feat(08-04): add 15 Censys + 10 ZoomEye dorks

- 15 Censys Search 2.0 queries for Ollama, vLLM, LocalAI, Open WebUI,
  LM Studio, Triton, TGI, LiteLLM, Portkey, LangServe, FastChat,
  text-generation-webui, Azure OpenAI certs, Bedrock certs, and OpenAI
  proxies (12 infrastructure + 3 frontier)
- 10 ZoomEye app/title/port/service queries covering the same LLM
  infrastructure surface (9 infrastructure + 1 frontier)
- Dual-located under pkg/dorks/definitions/ (embedded) and dorks/ (repo root)
This commit is contained in:
salvacybersec
2026-04-06 00:21:34 +03:00
parent 56c11e39a0
commit 1c86800c14
4 changed files with 396 additions and 0 deletions

View File

@@ -0,0 +1,119 @@
- id: censys-ollama-11434
name: "Ollama server on port 11434 (Censys)"
source: censys
category: infrastructure
query: 'services.port: 11434 and services.http.response.body: "Ollama"'
description: "Finds exposed Ollama LLM servers advertising on their default port via Censys."
tags: [ollama, censys, infrastructure, tier1]
- id: censys-vllm
name: "vLLM inference server (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.body: "vLLM" and services.http.response.body: "/v1/models"'
description: "Locates vLLM servers exposing their OpenAI-compatible models endpoint."
tags: [vllm, censys, infrastructure]
- id: censys-localai
name: "LocalAI host (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "LocalAI"'
description: "Finds LocalAI self-hosted OpenAI-compatible servers by their HTML title."
tags: [localai, censys, infrastructure]
- id: censys-openwebui
name: "Open WebUI dashboard (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "Open WebUI"'
description: "Finds internet-exposed Open WebUI dashboards that front LLM backends."
tags: [openwebui, censys, infrastructure]
- id: censys-lmstudio
name: "LM Studio server (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "LM Studio"'
description: "Finds exposed LM Studio local-model servers."
tags: [lmstudio, censys, infrastructure]
- id: censys-triton
name: "NVIDIA Triton inference server (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.body: "NVIDIA Triton" and services.http.response.body: "/v2/models"'
description: "Finds NVIDIA Triton model servers exposing their /v2/models catalog."
tags: [triton, nvidia, censys, infrastructure]
- id: censys-tgi
name: "Hugging Face text-generation-inference (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.body: "text-generation-inference"'
description: "Finds public text-generation-inference (TGI) instances."
tags: [tgi, huggingface, censys, infrastructure]
- id: censys-litellm
name: "LiteLLM proxy on :4000 (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "LiteLLM" and services.port: 4000'
description: "Finds LiteLLM proxy servers on their default admin port."
tags: [litellm, censys, infrastructure]
- id: censys-portkey
name: "Portkey AI gateway (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "Portkey"'
description: "Finds self-hosted Portkey AI gateway deployments."
tags: [portkey, censys, infrastructure]
- id: censys-langserve
name: "LangServe endpoint (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "LangServe"'
description: "Finds LangServe (LangChain) API servers exposed on the public internet."
tags: [langserve, langchain, censys, infrastructure]
- id: censys-openai-azure-cert
name: "Azure OpenAI TLS certificates (Censys)"
source: censys
category: frontier
query: 'services.tls.certificates.leaf_data.subject.common_name: "openai.azure.com"'
description: "Finds hosts presenting certificates for openai.azure.com subject CN."
tags: [openai, azure, censys, frontier, tls]
- id: censys-bedrock-cert
name: "AWS Bedrock runtime certificates (Censys)"
source: censys
category: frontier
query: 'services.tls.certificates.leaf_data.subject.common_name: "bedrock-runtime"'
description: "Finds hosts exposing certs referencing AWS Bedrock runtime CN."
tags: [bedrock, aws, censys, frontier, tls]
- id: censys-fastchat
name: "FastChat LLM server (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "FastChat"'
description: "Finds FastChat multi-model serving dashboards."
tags: [fastchat, censys, infrastructure]
- id: censys-textgen-webui
name: "oobabooga text-generation-webui (Censys)"
source: censys
category: infrastructure
query: 'services.http.response.html_title: "text-generation-webui"'
description: "Finds exposed oobabooga text-generation-webui instances."
tags: [oobabooga, textgen, censys, infrastructure]
- id: censys-openai-proxy
name: "OpenAI-compatible proxy leaking key var (Censys)"
source: censys
category: frontier
query: 'services.http.response.body: "/v1/chat/completions" and services.http.response.body: "OPENAI_API_KEY"'
description: "Finds OpenAI-compatible proxies whose bodies leak the OPENAI_API_KEY env reference."
tags: [openai, proxy, censys, frontier]