- id: openrouter-envfile name: "OpenRouter Key in .env files" source: github category: infrastructure query: 'sk-or-v1- extension:env' description: "Finds OpenRouter gateway keys (sk-or-v1- prefix) in .env files" tags: [openrouter, gateway, env, tier5] - id: openrouter-pyfile name: "OpenRouter Key in Python files" source: github category: infrastructure query: 'sk-or-v1- extension:py' description: "Finds OpenRouter gateway keys hard-coded in Python source" tags: [openrouter, gateway, python, tier5] - id: litellm-envfile name: "LiteLLM Master Key in .env files" source: github category: infrastructure query: 'LITELLM_MASTER_KEY extension:env' description: "Finds LiteLLM proxy master keys in .env files" tags: [litellm, proxy, env, tier5] - id: portkey-envfile name: "Portkey API Key in .env files" source: github category: infrastructure query: 'PORTKEY_API_KEY extension:env' description: "Finds Portkey gateway keys in .env files" tags: [portkey, gateway, env, tier5] - id: helicone-envfile name: "Helicone Key in .env files" source: github category: infrastructure query: 'sk-helicone- extension:env' description: "Finds Helicone observability keys in .env files" tags: [helicone, observability, env, tier5] - id: cloudflare-ai-envfile name: "Cloudflare AI Token in repos" source: github category: infrastructure query: 'CF_API_TOKEN "ai.run"' description: "Finds Cloudflare API tokens next to Workers AI ai.run references" tags: [cloudflare, workers-ai, tier5] - id: vercel-ai-envfile name: "Vercel AI SDK Key in .env files" source: github category: infrastructure query: 'VERCEL_AI extension:env' description: "Finds Vercel AI SDK credentials in .env files" tags: [vercel, env, tier5] - id: ollama-config name: "Ollama host in docker-compose" source: github category: infrastructure query: 'OLLAMA_HOST filename:docker-compose.yaml' description: "Finds exposed self-hosted Ollama instances in docker-compose files" tags: [ollama, self-hosted, docker, tier8] - id: vllm-config name: "vLLM entrypoint in config.yaml" source: github category: infrastructure query: 'vllm.entrypoints filename:config.yaml' description: "Finds self-hosted vLLM deployments in config.yaml files" tags: [vllm, self-hosted, tier8] - id: localai-envfile name: "LocalAI API Key in .env files" source: github category: infrastructure query: 'LOCALAI_API_KEY extension:env' description: "Finds LocalAI self-hosted gateway keys in .env files" tags: [localai, self-hosted, env, tier8]