feat(08-03): add 20 Shodan dorks for exposed LLM infrastructure
- frontier.yaml: 6 dorks (OpenAI/Anthropic proxies, Azure OpenAI certs, AWS Bedrock, LiteLLM) - infrastructure.yaml: 14 dorks (Ollama, vLLM, LocalAI, LM Studio, text-generation-webui, Open WebUI, Triton, TGI, LangServe, FastChat, OpenRouter/Portkey/Helicone gateways) - Real Shodan query syntax: http.title, http.html, ssl.cert.subject.cn, product, port, http.component - Dual-located: pkg/dorks/definitions/shodan/ + dorks/shodan/
This commit is contained in:
42
pkg/dorks/definitions/shodan/frontier.yaml
Normal file
42
pkg/dorks/definitions/shodan/frontier.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
- id: shodan-openai-proxy
|
||||
name: "OpenAI proxy servers"
|
||||
source: shodan
|
||||
category: frontier
|
||||
query: 'http.title:"openai" http.html:"/v1/chat/completions"'
|
||||
description: "Exposed OpenAI-compatible proxy servers serving chat completions"
|
||||
tags: [openai, proxy, tier1]
|
||||
- id: shodan-litellm-proxy
|
||||
name: "LiteLLM proxies on default port"
|
||||
source: shodan
|
||||
category: frontier
|
||||
query: 'http.title:"LiteLLM" port:4000'
|
||||
description: "LiteLLM gateway dashboards exposed on default port 4000"
|
||||
tags: [litellm, gateway, tier5]
|
||||
- id: shodan-openai-nginx
|
||||
name: "Nginx front-ends leaking OPENAI_API_KEY"
|
||||
source: shodan
|
||||
category: frontier
|
||||
query: 'http.html:"OPENAI_API_KEY" http.component:nginx'
|
||||
description: "Nginx-fronted services exposing OPENAI_API_KEY in HTML"
|
||||
tags: [openai, nginx, tier1]
|
||||
- id: shodan-azure-openai
|
||||
name: "Azure OpenAI certificate matches"
|
||||
source: shodan
|
||||
category: frontier
|
||||
query: 'ssl.cert.subject.cn:"openai.azure.com"'
|
||||
description: "TLS certificates referencing Azure OpenAI endpoints"
|
||||
tags: [azure, openai, tier1]
|
||||
- id: shodan-bedrock-runtime
|
||||
name: "AWS Bedrock runtime certificates"
|
||||
source: shodan
|
||||
category: frontier
|
||||
query: 'ssl.cert.subject.cn:"bedrock-runtime"'
|
||||
description: "TLS certificates referencing AWS Bedrock runtime hosts"
|
||||
tags: [aws, bedrock, tier1]
|
||||
- id: shodan-anthropic-proxy
|
||||
name: "Anthropic-compatible proxy servers"
|
||||
source: shodan
|
||||
category: frontier
|
||||
query: 'http.html:"anthropic" http.html:"messages"'
|
||||
description: "Proxy servers routing to Anthropic messages API"
|
||||
tags: [anthropic, proxy, tier1]
|
||||
98
pkg/dorks/definitions/shodan/infrastructure.yaml
Normal file
98
pkg/dorks/definitions/shodan/infrastructure.yaml
Normal file
@@ -0,0 +1,98 @@
|
||||
- id: shodan-ollama-default
|
||||
name: "Ollama on default port 11434"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'product:"Ollama" port:11434'
|
||||
description: "Ollama servers banner-identified on the default port"
|
||||
tags: [ollama, self-hosted, tier8]
|
||||
- id: shodan-ollama-tags
|
||||
name: "Ollama /api/tags endpoints"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.html:"/api/tags" http.title:"Ollama"'
|
||||
description: "Ollama servers exposing the model tags listing endpoint"
|
||||
tags: [ollama, self-hosted, tier8]
|
||||
- id: shodan-vllm
|
||||
name: "vLLM /v1/models endpoints"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.html:"vLLM" http.html:"/v1/models"'
|
||||
description: "vLLM inference servers exposing the models endpoint"
|
||||
tags: [vllm, self-hosted, tier8]
|
||||
- id: shodan-localai
|
||||
name: "LocalAI dashboards"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"LocalAI"'
|
||||
description: "LocalAI self-hosted inference dashboards"
|
||||
tags: [localai, self-hosted, tier8]
|
||||
- id: shodan-lmstudio
|
||||
name: "LM Studio servers"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"LM Studio"'
|
||||
description: "Exposed LM Studio local inference servers"
|
||||
tags: [lmstudio, self-hosted, tier8]
|
||||
- id: shodan-textgenwebui
|
||||
name: "text-generation-webui instances"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"text-generation-webui"'
|
||||
description: "Exposed oobabooga text-generation-webui instances"
|
||||
tags: [textgen, self-hosted, tier8]
|
||||
- id: shodan-openwebui
|
||||
name: "Open WebUI chat servers"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"Open WebUI" http.html:"/api/chat"'
|
||||
description: "Exposed Open WebUI chat front-ends"
|
||||
tags: [openwebui, self-hosted, tier8]
|
||||
- id: shodan-openrouter-proxy
|
||||
name: "OpenRouter-linked proxies"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.html:"openrouter.ai" port:443'
|
||||
description: "HTTPS hosts referencing openrouter.ai in page content"
|
||||
tags: [openrouter, gateway, tier5]
|
||||
- id: shodan-portkey-gateway
|
||||
name: "Portkey gateway dashboards"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"Portkey"'
|
||||
description: "Exposed Portkey AI gateway dashboards"
|
||||
tags: [portkey, gateway, tier5]
|
||||
- id: shodan-helicone-gateway
|
||||
name: "Helicone gateway endpoints"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.html:"helicone" http.html:"/v1"'
|
||||
description: "Hosts referencing Helicone observability gateway endpoints"
|
||||
tags: [helicone, gateway, tier5]
|
||||
- id: shodan-triton-server
|
||||
name: "NVIDIA Triton inference servers"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.html:"NVIDIA Triton" http.html:"/v2/models"'
|
||||
description: "Exposed NVIDIA Triton inference servers"
|
||||
tags: [triton, nvidia, tier8]
|
||||
- id: shodan-tgi-hf
|
||||
name: "HF text-generation-inference servers"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.html:"text-generation-inference" "/generate"'
|
||||
description: "Hugging Face text-generation-inference servers exposing /generate"
|
||||
tags: [huggingface, tgi, tier8]
|
||||
- id: shodan-langserve
|
||||
name: "LangServe endpoints"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"LangServe"'
|
||||
description: "Exposed LangChain LangServe deployments"
|
||||
tags: [langserve, tier8]
|
||||
- id: shodan-fastchat
|
||||
name: "FastChat servers"
|
||||
source: shodan
|
||||
category: infrastructure
|
||||
query: 'http.title:"FastChat"'
|
||||
description: "Exposed FastChat multi-model serving instances"
|
||||
tags: [fastchat, self-hosted, tier8]
|
||||
Reference in New Issue
Block a user