Files
keyhunter/pkg/recon/sources/gistpaste.go
salvacybersec 3c500b5473 feat(11-02): add PastebinSource and GistPasteSource for paste site scanning
- PastebinSource: two-phase search+raw-fetch with keyword matching
- GistPasteSource: scrapes gist.github.com public search (no auth)
- Both implement recon.ReconSource with httptest-based tests
2026-04-06 11:53:00 +03:00

153 lines
3.9 KiB
Go

package sources
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"regexp"
"strings"
"time"
"golang.org/x/time/rate"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
// GistPasteSource scrapes gist.github.com's public search (no auth required)
// for API key leaks. This is distinct from Phase 10's GistSource which uses
// the authenticated GitHub API.
//
// Auth: none. Rate: Every(3s), Burst 1.
type GistPasteSource struct {
BaseURL string
Registry *providers.Registry
Limiters *recon.LimiterRegistry
Client *Client
}
// gistPasteLinkRE matches gist links: /<user>/<hex-hash>
var gistPasteLinkRE = regexp.MustCompile(`^/[^/]+/[a-f0-9]+$`)
// Compile-time assertion.
var _ recon.ReconSource = (*GistPasteSource)(nil)
func (s *GistPasteSource) Name() string { return "gistpaste" }
func (s *GistPasteSource) RateLimit() rate.Limit { return rate.Every(3 * time.Second) }
func (s *GistPasteSource) Burst() int { return 1 }
func (s *GistPasteSource) RespectsRobots() bool { return true }
// Enabled always returns true: gist search scraping requires no credentials.
func (s *GistPasteSource) Enabled(_ recon.Config) bool { return true }
// Sweep searches gist.github.com for each provider keyword, fetches raw gist
// content, and emits Findings for keyword matches.
func (s *GistPasteSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error {
base := s.BaseURL
if base == "" {
base = "https://gist.github.com"
}
client := s.Client
if client == nil {
client = NewClient()
}
queries := BuildQueries(s.Registry, "gistpaste")
if len(queries) == 0 {
return nil
}
keywords := gistPasteKeywordSet(s.Registry)
if len(keywords) == 0 {
return nil
}
for _, q := range queries {
if err := ctx.Err(); err != nil {
return err
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
searchURL := fmt.Sprintf("%s/search?q=%s", base, url.QueryEscape(q))
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
if err != nil {
return fmt.Errorf("gistpaste: build search req: %w", err)
}
resp, err := client.Do(ctx, req)
if err != nil {
return fmt.Errorf("gistpaste: search fetch: %w", err)
}
links, err := extractAnchorHrefs(resp.Body, gistPasteLinkRE)
_ = resp.Body.Close()
if err != nil {
return fmt.Errorf("gistpaste: parse search html: %w", err)
}
for _, gistPath := range links {
if err := ctx.Err(); err != nil {
return err
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
rawURL := fmt.Sprintf("%s%s/raw", base, gistPath)
rawReq, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil)
if err != nil {
return fmt.Errorf("gistpaste: build raw req: %w", err)
}
rawResp, err := client.Do(ctx, rawReq)
if err != nil {
continue // skip this gist on error
}
body, readErr := io.ReadAll(io.LimitReader(rawResp.Body, 256*1024))
_ = rawResp.Body.Close()
if readErr != nil {
continue
}
content := string(body)
for kw, provName := range keywords {
if strings.Contains(content, kw) {
out <- recon.Finding{
ProviderName: provName,
Source: fmt.Sprintf("%s%s", base, gistPath),
SourceType: "recon:gistpaste",
Confidence: "low",
DetectedAt: time.Now(),
}
break // one finding per gist
}
}
}
}
return nil
}
// gistPasteKeywordSet builds keyword->providerName map from registry.
func gistPasteKeywordSet(reg *providers.Registry) map[string]string {
out := make(map[string]string)
if reg == nil {
return out
}
for _, p := range reg.List() {
for _, k := range p.Keywords {
if k == "" {
continue
}
if _, ok := out[k]; !ok {
out[k] = p.Name
}
}
}
return out
}