file where event.action in ("open", "creation", "modification") and event.outcome == "success" and
// GenAI process
process.name in (
"ollama.exe", "ollama", "Ollama",
"textgen.exe", "textgen", "text-generation-webui.exe", "oobabooga.exe",
"lmstudio.exe", "lmstudio", "LM Studio",
"claude.exe", "claude", "Claude",
"cursor.exe", "cursor", "Cursor",
"copilot.exe", "copilot", "Copilot",
"codex.exe", "codex",
"Jan", "jan.exe", "jan",
"gpt4all.exe", "gpt4all", "GPT4All",
"gemini-cli.exe", "gemini-cli",
"genaiscript.exe", "genaiscript",
"grok.exe", "grok",
"qwen.exe", "qwen",
"koboldcpp.exe", "koboldcpp", "KoboldCpp",
"llama-server", "llama-cli"
) and
// Sensitive file paths
(
// Persistence via Shell configs
file.name in (".bashrc", ".bash_profile", ".zshrc", ".zshenv", ".zprofile", ".profile", ".bash_logout") or
// Credentials In Files
file.name like~
("key?.db",
"logins.json",
"Login Data",
"Local State",
"signons.sqlite",
"Cookies",
"cookies.sqlite",
"Cookies.binarycookies",
"login.keychain-db",
"System.keychain",
"credentials.db",
"credentials",
"access_tokens.db",
"accessTokens.json",
"azureProfile.json",
"RDCMan.settings",
"known_hosts",
"KeePass.config.xml",
"Unattended.xml")
)
Install detection rules in Elastic Security
Detect GenAI Process Accessing Sensitive Files in the Elastic Security detection engine by installing this rule into your Elastic Stack.
To setup this rule, check out the installation guide for Prebuilt Security Detection Rules(opens in a new tab or window).