merge: phase 16 resolve conflicts

This commit is contained in:
salvacybersec
2026-04-06 16:47:10 +03:00
10 changed files with 999 additions and 7 deletions

View File

@@ -0,0 +1,94 @@
package sources
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"time"
"golang.org/x/time/rate"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
// APKMirrorSource searches APKMirror for mobile app metadata (descriptions,
// changelogs, file listings) that may contain leaked API keys. This is a
// metadata scanner -- it does not decompile APKs. Full decompilation via
// apktool/jadx would require local binary dependencies and is out of scope
// for a network-based ReconSource.
type APKMirrorSource struct {
BaseURL string
Registry *providers.Registry
Limiters *recon.LimiterRegistry
Client *Client
}
var _ recon.ReconSource = (*APKMirrorSource)(nil)
func (s *APKMirrorSource) Name() string { return "apkmirror" }
func (s *APKMirrorSource) RateLimit() rate.Limit { return rate.Every(5 * time.Second) }
func (s *APKMirrorSource) Burst() int { return 2 }
func (s *APKMirrorSource) RespectsRobots() bool { return true }
func (s *APKMirrorSource) Enabled(_ recon.Config) bool { return true }
func (s *APKMirrorSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
base := s.BaseURL
if base == "" {
base = "https://www.apkmirror.com"
}
client := s.Client
if client == nil {
client = NewClient()
}
queries := BuildQueries(s.Registry, "apkmirror")
if len(queries) == 0 {
return nil
}
for _, q := range queries {
if err := ctx.Err(); err != nil {
return err
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
searchURL := fmt.Sprintf(
"%s/?post_type=app_release&searchtype=apk&s=%s",
base, url.QueryEscape(q),
)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
if err != nil {
continue
}
resp, err := client.Do(ctx, req)
if err != nil {
continue
}
body, err := io.ReadAll(io.LimitReader(resp.Body, 512*1024))
_ = resp.Body.Close()
if err != nil {
continue
}
if ciLogKeyPattern.Match(body) {
out <- recon.Finding{
ProviderName: q,
Source: searchURL,
SourceType: "recon:apkmirror",
Confidence: "medium",
DetectedAt: time.Now(),
}
}
}
return nil
}

View File

@@ -0,0 +1,115 @@
package sources
import (
"context"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
func TestAPKMirror_Name(t *testing.T) {
s := &APKMirrorSource{}
if s.Name() != "apkmirror" {
t.Fatalf("expected apkmirror, got %s", s.Name())
}
}
func TestAPKMirror_Enabled(t *testing.T) {
s := &APKMirrorSource{}
if !s.Enabled(recon.Config{}) {
t.Fatal("APKMirrorSource should always be enabled")
}
}
func TestAPKMirror_RespectsRobots(t *testing.T) {
s := &APKMirrorSource{}
if !s.RespectsRobots() {
t.Fatal("APKMirrorSource should respect robots.txt")
}
}
func TestAPKMirror_Sweep(t *testing.T) {
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
_, _ = w.Write([]byte(`
<html><body>
<div class="appRow">
<h5 class="appRowTitle">AI Chat Pro</h5>
<p>Uses api_key = "sk-proj-ABCDEF1234567890abcdef" for backend</p>
</div>
</body></html>
`))
}))
defer srv.Close()
reg := providers.NewRegistryFromProviders([]providers.Provider{
{Name: "openai", Keywords: []string{"sk-proj-"}},
})
s := &APKMirrorSource{
BaseURL: srv.URL,
Registry: reg,
Client: NewClient(),
}
out := make(chan recon.Finding, 10)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err := s.Sweep(ctx, "", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) == 0 {
t.Fatal("expected at least one finding from APKMirror")
}
if findings[0].SourceType != "recon:apkmirror" {
t.Fatalf("expected recon:apkmirror, got %s", findings[0].SourceType)
}
}
func TestAPKMirror_Sweep_NoMatch(t *testing.T) {
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
_, _ = w.Write([]byte(`<html><body><p>No API keys here</p></body></html>`))
}))
defer srv.Close()
reg := providers.NewRegistryFromProviders([]providers.Provider{
{Name: "openai", Keywords: []string{"sk-proj-"}},
})
s := &APKMirrorSource{
BaseURL: srv.URL,
Registry: reg,
Client: NewClient(),
}
out := make(chan recon.Finding, 10)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err := s.Sweep(ctx, "", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) != 0 {
t.Fatalf("expected no findings, got %d", len(findings))
}
}

177
pkg/recon/sources/crtsh.go Normal file
View File

@@ -0,0 +1,177 @@
package sources
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"golang.org/x/time/rate"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
// CrtShSource discovers subdomains via certificate transparency logs (crt.sh)
// and probes their config endpoints (/.env, /api/config, /actuator/env) for
// leaked API keys.
type CrtShSource struct {
BaseURL string
Registry *providers.Registry
Limiters *recon.LimiterRegistry
Client *Client
// ProbeBaseURL overrides the scheme+host used when probing discovered
// subdomains. Tests set this to the httptest server URL.
ProbeBaseURL string
}
var _ recon.ReconSource = (*CrtShSource)(nil)
func (s *CrtShSource) Name() string { return "crtsh" }
func (s *CrtShSource) RateLimit() rate.Limit { return rate.Every(3 * time.Second) }
func (s *CrtShSource) Burst() int { return 3 }
func (s *CrtShSource) RespectsRobots() bool { return false }
func (s *CrtShSource) Enabled(_ recon.Config) bool { return true }
// crtshEntry represents one row from the crt.sh JSON API.
type crtshEntry struct {
NameValue string `json:"name_value"`
CommonName string `json:"common_name"`
}
// configProbeEndpoints are the well-known config endpoints probed on each
// discovered subdomain.
var configProbeEndpoints = []string{
"/.env",
"/api/config",
"/actuator/env",
}
func (s *CrtShSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
base := s.BaseURL
if base == "" {
base = "https://crt.sh"
}
client := s.Client
if client == nil {
client = NewClient()
}
// query should be a domain. Skip keyword-like queries (no dots).
if query == "" || !strings.Contains(query, ".") {
return nil
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
// Fetch subdomains from crt.sh.
crtURL := fmt.Sprintf("%s/?q=%%25.%s&output=json", base, url.QueryEscape(query))
req, err := http.NewRequestWithContext(ctx, http.MethodGet, crtURL, nil)
if err != nil {
return err
}
resp, err := client.Do(ctx, req)
if err != nil {
return nil // non-fatal: crt.sh may be down
}
data, err := io.ReadAll(io.LimitReader(resp.Body, 1024*1024))
_ = resp.Body.Close()
if err != nil {
return nil
}
var entries []crtshEntry
if err := json.Unmarshal(data, &entries); err != nil {
return nil
}
// Deduplicate name_value entries.
seen := make(map[string]struct{})
var subdomains []string
for _, e := range entries {
// name_value can contain multiple names separated by newlines.
for _, name := range strings.Split(e.NameValue, "\n") {
name = strings.TrimSpace(name)
if name == "" {
continue
}
// Remove wildcard prefix.
name = strings.TrimPrefix(name, "*.")
if _, ok := seen[name]; ok {
continue
}
seen[name] = struct{}{}
subdomains = append(subdomains, name)
if len(subdomains) >= 20 {
break
}
}
if len(subdomains) >= 20 {
break
}
}
// Probe config endpoints on each subdomain.
probeClient := &http.Client{Timeout: 5 * time.Second}
for _, sub := range subdomains {
if err := ctx.Err(); err != nil {
return err
}
s.probeSubdomain(ctx, probeClient, sub, out)
}
return nil
}
// probeSubdomain checks well-known config endpoints for key patterns.
func (s *CrtShSource) probeSubdomain(ctx context.Context, probeClient *http.Client, subdomain string, out chan<- recon.Finding) {
for _, ep := range configProbeEndpoints {
if err := ctx.Err(); err != nil {
return
}
var probeURL string
if s.ProbeBaseURL != "" {
// Test mode: use the mock server URL with subdomain as a header/path hint.
probeURL = s.ProbeBaseURL + "/" + subdomain + ep
} else {
probeURL = "https://" + subdomain + ep
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, probeURL, nil)
if err != nil {
continue
}
resp, err := probeClient.Do(req)
if err != nil {
continue
}
body, err := io.ReadAll(io.LimitReader(resp.Body, 64*1024))
_ = resp.Body.Close()
if err != nil {
continue
}
if resp.StatusCode == http.StatusOK && ciLogKeyPattern.Match(body) {
out <- recon.Finding{
ProviderName: subdomain,
Source: probeURL,
SourceType: "recon:crtsh",
Confidence: "high",
DetectedAt: time.Now(),
}
}
}
}

View File

@@ -0,0 +1,139 @@
package sources
import (
"context"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
func TestCrtSh_Name(t *testing.T) {
s := &CrtShSource{}
if s.Name() != "crtsh" {
t.Fatalf("expected crtsh, got %s", s.Name())
}
}
func TestCrtSh_Enabled(t *testing.T) {
s := &CrtShSource{}
if !s.Enabled(recon.Config{}) {
t.Fatal("CrtShSource should always be enabled")
}
}
func TestCrtSh_Sweep_SkipsKeywords(t *testing.T) {
s := &CrtShSource{Client: NewClient()}
out := make(chan recon.Finding, 10)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
// "sk-proj-" has no dot -- should be skipped as a keyword.
err := s.Sweep(ctx, "sk-proj-", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) != 0 {
t.Fatalf("expected no findings for keyword query, got %d", len(findings))
}
}
func TestCrtSh_Sweep(t *testing.T) {
// Mux handles both crt.sh API and probe endpoints.
mux := http.NewServeMux()
// crt.sh subdomain lookup.
mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if r.URL.Query().Get("output") == "json" {
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`[
{"name_value":"api.example.com","common_name":"api.example.com"},
{"name_value":"staging.example.com","common_name":"staging.example.com"}
]`))
return
}
http.NotFound(w, r)
})
crtSrv := httptest.NewServer(mux)
defer crtSrv.Close()
// Probe server: serves /.env with key-like content.
probeMux := http.NewServeMux()
probeMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if strings.HasSuffix(r.URL.Path, "/.env") {
_, _ = w.Write([]byte(`API_KEY = "sk-proj-ABCDEF1234567890abcdef"`))
return
}
http.NotFound(w, r)
})
probeSrv := httptest.NewServer(probeMux)
defer probeSrv.Close()
s := &CrtShSource{
BaseURL: crtSrv.URL,
Client: NewClient(),
ProbeBaseURL: probeSrv.URL,
}
out := make(chan recon.Finding, 20)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
err := s.Sweep(ctx, "example.com", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) == 0 {
t.Fatal("expected at least one finding from crt.sh probe")
}
if findings[0].SourceType != "recon:crtsh" {
t.Fatalf("expected recon:crtsh, got %s", findings[0].SourceType)
}
}
func TestCrtSh_Sweep_NoSubdomains(t *testing.T) {
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`[]`))
}))
defer srv.Close()
s := &CrtShSource{
BaseURL: srv.URL,
Client: NewClient(),
}
out := make(chan recon.Finding, 10)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err := s.Sweep(ctx, "empty.example.com", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) != 0 {
t.Fatalf("expected no findings, got %d", len(findings))
}
}

View File

@@ -52,6 +52,9 @@ type SourcesConfig struct {
// Phase 14: CI/CD source tokens.
CircleCIToken string
// Phase 16: DNS/threat intel source tokens.
SecurityTrailsAPIKey string
// Registry drives query generation for every source via BuildQueries.
Registry *providers.Registry
// Limiters is the shared per-source rate-limiter registry.
@@ -61,8 +64,8 @@ type SourcesConfig struct {
// RegisterAll registers every Phase 10 code-hosting, Phase 11 search engine /
// paste site, Phase 12 IoT scanner / cloud storage, Phase 13 package
// registry / container / IaC, Phase 14 CI/CD log / web archive / frontend
// leak, and Phase 15 forum / collaboration tool / log aggregator source on
// engine (67 sources total).
// leak, Phase 15 forum / collaboration tool / log aggregator, and Phase 16
// mobile / DNS / threat intel source on engine (70 sources total).
//
// All sources are registered unconditionally so that cmd/recon.go can surface
// the full catalog via `keyhunter recon list` regardless of which credentials
@@ -282,4 +285,13 @@ func RegisterAll(engine *recon.Engine, cfg SourcesConfig) {
engine.Register(&SplunkSource{Registry: reg, Limiters: lim})
engine.Register(&GrafanaSource{Registry: reg, Limiters: lim})
engine.Register(&SentrySource{Registry: reg, Limiters: lim})
// Phase 16: Mobile, DNS, and threat intel sources.
engine.Register(&APKMirrorSource{Registry: reg, Limiters: lim})
engine.Register(&CrtShSource{Registry: reg, Limiters: lim})
engine.Register(&SecurityTrailsSource{
APIKey: cfg.SecurityTrailsAPIKey,
Registry: reg,
Limiters: lim,
})
}

View File

@@ -0,0 +1,189 @@
package sources
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
"golang.org/x/time/rate"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
// SecurityTrailsSource searches SecurityTrails DNS/subdomain data for API key
// exposure. It enumerates subdomains for a target domain and probes config
// endpoints, and also checks DNS history records (TXT records may contain keys).
type SecurityTrailsSource struct {
APIKey string
BaseURL string
Registry *providers.Registry
Limiters *recon.LimiterRegistry
Client *Client
// ProbeBaseURL overrides the scheme+host used when probing discovered
// subdomains. Tests set this to the httptest server URL.
ProbeBaseURL string
}
var _ recon.ReconSource = (*SecurityTrailsSource)(nil)
func (s *SecurityTrailsSource) Name() string { return "securitytrails" }
func (s *SecurityTrailsSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) }
func (s *SecurityTrailsSource) Burst() int { return 5 }
func (s *SecurityTrailsSource) RespectsRobots() bool { return false }
func (s *SecurityTrailsSource) Enabled(_ recon.Config) bool {
return s.APIKey != ""
}
// securityTrailsSubdomains represents the subdomain listing API response.
type securityTrailsSubdomains struct {
Subdomains []string `json:"subdomains"`
}
func (s *SecurityTrailsSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
base := s.BaseURL
if base == "" {
base = "https://api.securitytrails.com/v1"
}
client := s.Client
if client == nil {
client = NewClient()
}
if query == "" || !strings.Contains(query, ".") {
return nil
}
// Phase 1: Enumerate subdomains.
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
subURL := fmt.Sprintf("%s/domain/%s/subdomains?children_only=false", base, query)
subReq, err := http.NewRequestWithContext(ctx, http.MethodGet, subURL, nil)
if err != nil {
return err
}
subReq.Header.Set("APIKEY", s.APIKey)
subResp, err := client.Do(ctx, subReq)
if err != nil {
return nil // non-fatal
}
subData, err := io.ReadAll(io.LimitReader(subResp.Body, 512*1024))
_ = subResp.Body.Close()
if err != nil {
return nil
}
var subResult securityTrailsSubdomains
if err := json.Unmarshal(subData, &subResult); err != nil {
return nil
}
// Build FQDNs and limit to 20.
var fqdns []string
for _, sub := range subResult.Subdomains {
fqdns = append(fqdns, sub+"."+query)
if len(fqdns) >= 20 {
break
}
}
// Probe config endpoints on each subdomain.
probeClient := &http.Client{Timeout: 5 * time.Second}
for _, fqdn := range fqdns {
if err := ctx.Err(); err != nil {
return err
}
s.probeSubdomain(ctx, probeClient, fqdn, out)
}
// Phase 2: Check DNS history for key patterns in TXT records.
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
dnsURL := fmt.Sprintf("%s/domain/%s", base, query)
dnsReq, err := http.NewRequestWithContext(ctx, http.MethodGet, dnsURL, nil)
if err != nil {
return nil
}
dnsReq.Header.Set("APIKEY", s.APIKey)
dnsResp, err := client.Do(ctx, dnsReq)
if err != nil {
return nil
}
dnsData, err := io.ReadAll(io.LimitReader(dnsResp.Body, 512*1024))
_ = dnsResp.Body.Close()
if err != nil {
return nil
}
if ciLogKeyPattern.Match(dnsData) {
out <- recon.Finding{
ProviderName: query,
Source: dnsURL,
SourceType: "recon:securitytrails",
Confidence: "medium",
DetectedAt: time.Now(),
}
}
return nil
}
// probeSubdomain checks well-known config endpoints for key patterns.
func (s *SecurityTrailsSource) probeSubdomain(ctx context.Context, probeClient *http.Client, subdomain string, out chan<- recon.Finding) {
for _, ep := range configProbeEndpoints {
if err := ctx.Err(); err != nil {
return
}
var probeURL string
if s.ProbeBaseURL != "" {
probeURL = s.ProbeBaseURL + "/" + subdomain + ep
} else {
probeURL = "https://" + subdomain + ep
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, probeURL, nil)
if err != nil {
continue
}
resp, err := probeClient.Do(req)
if err != nil {
continue
}
body, err := io.ReadAll(io.LimitReader(resp.Body, 64*1024))
_ = resp.Body.Close()
if err != nil {
continue
}
if resp.StatusCode == http.StatusOK && ciLogKeyPattern.Match(body) {
out <- recon.Finding{
ProviderName: subdomain,
Source: probeURL,
SourceType: "recon:securitytrails",
Confidence: "high",
DetectedAt: time.Now(),
}
}
}
}

View File

@@ -0,0 +1,180 @@
package sources
import (
"context"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
func TestSecurityTrails_Name(t *testing.T) {
s := &SecurityTrailsSource{}
if s.Name() != "securitytrails" {
t.Fatalf("expected securitytrails, got %s", s.Name())
}
}
func TestSecurityTrails_Enabled(t *testing.T) {
s := &SecurityTrailsSource{}
if s.Enabled(recon.Config{}) {
t.Fatal("SecurityTrailsSource should be disabled without API key")
}
s.APIKey = "test-key"
if !s.Enabled(recon.Config{}) {
t.Fatal("SecurityTrailsSource should be enabled with API key")
}
}
func TestSecurityTrails_Sweep(t *testing.T) {
// API server mocks SecurityTrails endpoints.
apiMux := http.NewServeMux()
// Subdomain enumeration.
apiMux.HandleFunc("/domain/example.com/subdomains", func(w http.ResponseWriter, r *http.Request) {
if r.Header.Get("APIKEY") != "test-key" {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`{"subdomains":["api","staging"]}`))
})
// DNS history.
apiMux.HandleFunc("/domain/example.com", func(w http.ResponseWriter, r *http.Request) {
if r.Header.Get("APIKEY") != "test-key" {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`{"current_dns":{"txt":{"values":[{"value":"token = sk-proj-ABCDEF1234567890abcdef"}]}}}`))
})
apiSrv := httptest.NewServer(apiMux)
defer apiSrv.Close()
// Probe server.
probeMux := http.NewServeMux()
probeMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if strings.HasSuffix(r.URL.Path, "/.env") {
_, _ = w.Write([]byte(`SECRET_KEY = "sk-proj-ABCDEF1234567890abcdef"`))
return
}
http.NotFound(w, r)
})
probeSrv := httptest.NewServer(probeMux)
defer probeSrv.Close()
s := &SecurityTrailsSource{
APIKey: "test-key",
BaseURL: apiSrv.URL,
Client: NewClient(),
ProbeBaseURL: probeSrv.URL,
}
out := make(chan recon.Finding, 20)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
err := s.Sweep(ctx, "example.com", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) == 0 {
t.Fatal("expected at least one finding from SecurityTrails")
}
// Check that we got both probe findings and DNS history findings.
var probeFound, dnsFound bool
for _, f := range findings {
if f.SourceType != "recon:securitytrails" {
t.Fatalf("expected recon:securitytrails, got %s", f.SourceType)
}
if strings.Contains(f.Source, "/.env") {
probeFound = true
}
if strings.Contains(f.Source, "/domain/example.com") && !strings.Contains(f.Source, "subdomains") {
dnsFound = true
}
}
if !probeFound {
t.Fatal("expected probe finding from SecurityTrails")
}
if !dnsFound {
t.Fatal("expected DNS history finding from SecurityTrails")
}
}
func TestSecurityTrails_Sweep_SkipsKeywords(t *testing.T) {
s := &SecurityTrailsSource{
APIKey: "test-key",
Client: NewClient(),
}
out := make(chan recon.Finding, 10)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err := s.Sweep(ctx, "sk-proj-", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) != 0 {
t.Fatalf("expected no findings for keyword query, got %d", len(findings))
}
}
func TestSecurityTrails_Sweep_NoSubdomains(t *testing.T) {
apiMux := http.NewServeMux()
apiMux.HandleFunc("/domain/empty.example.com/subdomains", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`{"subdomains":[]}`))
})
apiMux.HandleFunc("/domain/empty.example.com", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`{"current_dns":{}}`))
})
apiSrv := httptest.NewServer(apiMux)
defer apiSrv.Close()
s := &SecurityTrailsSource{
APIKey: "test-key",
BaseURL: apiSrv.URL,
Client: NewClient(),
}
out := make(chan recon.Finding, 10)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err := s.Sweep(ctx, "empty.example.com", out)
close(out)
if err != nil {
t.Fatalf("Sweep error: %v", err)
}
var findings []recon.Finding
for f := range out {
findings = append(findings, f)
}
if len(findings) != 0 {
t.Fatalf("expected no findings, got %d", len(findings))
}
}