Files
keyhunter/pkg/recon/sources/wayback.go

135 lines
3.3 KiB
Go

package sources
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
"golang.org/x/time/rate"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
// WaybackMachineSource searches the Internet Archive's Wayback Machine CDX API
// for archived pages that may contain leaked API keys. Developers sometimes
// remove secrets from live pages but cached versions persist in web archives.
type WaybackMachineSource struct {
BaseURL string
Registry *providers.Registry
Limiters *recon.LimiterRegistry
Client *Client
}
var _ recon.ReconSource = (*WaybackMachineSource)(nil)
func (s *WaybackMachineSource) Name() string { return "wayback" }
func (s *WaybackMachineSource) RateLimit() rate.Limit { return rate.Every(5 * time.Second) }
func (s *WaybackMachineSource) Burst() int { return 1 }
func (s *WaybackMachineSource) RespectsRobots() bool { return true }
func (s *WaybackMachineSource) Enabled(_ recon.Config) bool { return true }
func (s *WaybackMachineSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error {
base := s.BaseURL
if base == "" {
base = "https://web.archive.org"
}
client := s.Client
if client == nil {
client = NewClient()
}
queries := BuildQueries(s.Registry, "wayback")
if len(queries) == 0 {
return nil
}
for _, q := range queries {
if err := ctx.Err(); err != nil {
return err
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
// CDX API: search for archived URLs matching the query.
// Filter for .env, config, and JS files that commonly contain keys.
cdxURL := fmt.Sprintf("%s/cdx/search/cdx?url=*%s*&output=json&limit=10&fl=url,timestamp,statuscode&filter=statuscode:200", base, q)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, cdxURL, nil)
if err != nil {
continue
}
req.Header.Set("Accept", "application/json")
resp, err := client.Do(ctx, req)
if err != nil {
continue
}
var rows [][]string
if err := json.NewDecoder(resp.Body).Decode(&rows); err != nil {
_ = resp.Body.Close()
continue
}
_ = resp.Body.Close()
// Skip the header row if present.
start := 0
if len(rows) > 0 && len(rows[0]) > 0 && rows[0][0] == "url" {
start = 1
}
for _, row := range rows[start:] {
if err := ctx.Err(); err != nil {
return err
}
if len(row) < 2 {
continue
}
archivedURL := row[0]
timestamp := row[1]
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
// Fetch the archived page content.
snapshotURL := fmt.Sprintf("%s/web/%sid_/%s", base, timestamp, archivedURL)
snapReq, err := http.NewRequestWithContext(ctx, http.MethodGet, snapshotURL, nil)
if err != nil {
continue
}
snapResp, err := client.Do(ctx, snapReq)
if err != nil {
continue
}
body, err := io.ReadAll(io.LimitReader(snapResp.Body, 256*1024))
_ = snapResp.Body.Close()
if err != nil {
continue
}
if apiKeyPattern.Match(body) {
out <- recon.Finding{
ProviderName: q,
Source: snapshotURL,
SourceType: "recon:wayback",
Confidence: "medium",
DetectedAt: time.Now(),
}
}
}
}
return nil
}