feat(10-04): add GistSource for public gist keyword recon
- GistSource implements recon.ReconSource (RECON-CODE-04) - Lists /gists/public?per_page=100, fetches each file's raw content, scans against provider keyword set, emits one Finding per matching gist - Disabled when GitHub token empty - Rate: rate.Every(2s), burst 1 (30 req/min GitHub limit) - 256KB read cap per file; skips gists without keyword matches - httptest coverage: enable gating, sweep match, no-match, 401, ctx cancel
This commit is contained in:
184
pkg/recon/sources/gist.go
Normal file
184
pkg/recon/sources/gist.go
Normal file
@@ -0,0 +1,184 @@
|
||||
package sources
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||
)
|
||||
|
||||
// GistSource scans recent public GitHub Gists for provider keyword leaks
|
||||
// (RECON-CODE-04).
|
||||
//
|
||||
// GitHub does not expose a dedicated /search/gists endpoint, so this source
|
||||
// enumerates /gists/public (most-recent page) and fetches each file's raw URL
|
||||
// to scan its content against the provider keyword set. Keep Phase 10 minimal:
|
||||
// only the first page is walked; broader sweeps are a future optimization.
|
||||
//
|
||||
// Auth: GitHub token via Bearer header. Rate: 30 req/min (shared with GitHub
|
||||
// search limits) → rate.Every(2s), burst 1.
|
||||
type GistSource struct {
|
||||
Token string
|
||||
BaseURL string
|
||||
Registry *providers.Registry
|
||||
Limiters *recon.LimiterRegistry
|
||||
|
||||
client *Client
|
||||
}
|
||||
|
||||
var _ recon.ReconSource = (*GistSource)(nil)
|
||||
|
||||
// Name returns the stable source identifier.
|
||||
func (s *GistSource) Name() string { return "gist" }
|
||||
|
||||
// RateLimit reports the per-source token bucket rate (30/min).
|
||||
func (s *GistSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) }
|
||||
|
||||
// Burst reports the token bucket burst capacity.
|
||||
func (s *GistSource) Burst() int { return 1 }
|
||||
|
||||
// RespectsRobots reports whether robots.txt applies (REST API → false).
|
||||
func (s *GistSource) RespectsRobots() bool { return false }
|
||||
|
||||
// Enabled reports whether the source runs. Requires a GitHub token.
|
||||
func (s *GistSource) Enabled(_ recon.Config) bool { return s.Token != "" }
|
||||
|
||||
type gistListEntry struct {
|
||||
HTMLURL string `json:"html_url"`
|
||||
Files map[string]struct {
|
||||
Filename string `json:"filename"`
|
||||
RawURL string `json:"raw_url"`
|
||||
} `json:"files"`
|
||||
}
|
||||
|
||||
// Sweep fetches /gists/public, scans each file's raw content against the
|
||||
// keyword set from the registry, and emits one Finding per gist that matches
|
||||
// any keyword (not one per file — gists often split a single leak across
|
||||
// helper files).
|
||||
func (s *GistSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error {
|
||||
if s.client == nil {
|
||||
s.client = NewClient()
|
||||
}
|
||||
base := s.BaseURL
|
||||
if base == "" {
|
||||
base = "https://api.github.com"
|
||||
}
|
||||
|
||||
keywords := s.keywordSet()
|
||||
if len(keywords) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if s.Limiters != nil {
|
||||
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
listReq, err := http.NewRequest(http.MethodGet, base+"/gists/public?per_page=100", nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("gist: build list request: %w", err)
|
||||
}
|
||||
listReq.Header.Set("Authorization", "Bearer "+s.Token)
|
||||
listReq.Header.Set("Accept", "application/vnd.github+json")
|
||||
|
||||
listResp, err := s.client.Do(ctx, listReq)
|
||||
if err != nil {
|
||||
return fmt.Errorf("gist: list: %w", err)
|
||||
}
|
||||
var gists []gistListEntry
|
||||
dec := json.NewDecoder(listResp.Body)
|
||||
decodeErr := dec.Decode(&gists)
|
||||
_ = listResp.Body.Close()
|
||||
if decodeErr != nil {
|
||||
return fmt.Errorf("gist: decode list: %w", decodeErr)
|
||||
}
|
||||
|
||||
for _, g := range gists {
|
||||
if err := ctx.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
matched := false
|
||||
var matchedProvider string
|
||||
|
||||
fileLoop:
|
||||
for _, f := range g.Files {
|
||||
if f.RawURL == "" {
|
||||
continue
|
||||
}
|
||||
if s.Limiters != nil {
|
||||
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
rawReq, err := http.NewRequest(http.MethodGet, f.RawURL, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("gist: build raw request: %w", err)
|
||||
}
|
||||
rawReq.Header.Set("Authorization", "Bearer "+s.Token)
|
||||
rawResp, err := s.client.Do(ctx, rawReq)
|
||||
if err != nil {
|
||||
return fmt.Errorf("gist: fetch raw: %w", err)
|
||||
}
|
||||
// Cap read to 256KB to avoid pathological gists.
|
||||
body, readErr := io.ReadAll(io.LimitReader(rawResp.Body, 256*1024))
|
||||
_ = rawResp.Body.Close()
|
||||
if readErr != nil {
|
||||
return fmt.Errorf("gist: read raw: %w", readErr)
|
||||
}
|
||||
|
||||
content := string(body)
|
||||
for kw, provName := range keywords {
|
||||
if strings.Contains(content, kw) {
|
||||
matched = true
|
||||
matchedProvider = provName
|
||||
break fileLoop
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matched {
|
||||
select {
|
||||
case out <- recon.Finding{
|
||||
ProviderName: matchedProvider,
|
||||
Source: g.HTMLURL,
|
||||
SourceType: "recon:gist",
|
||||
DetectedAt: time.Now().UTC(),
|
||||
}:
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// keywordSet flattens the registry into a keyword→providerName map for
|
||||
// content scanning. Empty keywords are skipped.
|
||||
func (s *GistSource) keywordSet() map[string]string {
|
||||
out := make(map[string]string)
|
||||
if s.Registry == nil {
|
||||
return out
|
||||
}
|
||||
for _, p := range s.Registry.List() {
|
||||
for _, k := range p.Keywords {
|
||||
if k == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := out[k]; !ok {
|
||||
out[k] = p.Name
|
||||
}
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
166
pkg/recon/sources/gist_test.go
Normal file
166
pkg/recon/sources/gist_test.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package sources
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||
)
|
||||
|
||||
func gistTestRegistry() *providers.Registry {
|
||||
return providers.NewRegistryFromProviders([]providers.Provider{
|
||||
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||
})
|
||||
}
|
||||
|
||||
func newGistSource(baseURL, token string) *GistSource {
|
||||
return &GistSource{
|
||||
Token: token,
|
||||
BaseURL: baseURL,
|
||||
Registry: gistTestRegistry(),
|
||||
Limiters: recon.NewLimiterRegistry(),
|
||||
}
|
||||
}
|
||||
|
||||
func TestGist_EnabledRequiresToken(t *testing.T) {
|
||||
cfg := recon.Config{}
|
||||
if newGistSource("", "").Enabled(cfg) {
|
||||
t.Fatal("expected disabled when token empty")
|
||||
}
|
||||
if !newGistSource("", "tok").Enabled(cfg) {
|
||||
t.Fatal("expected enabled when token set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGist_SweepEmitsFindingsOnKeywordMatch(t *testing.T) {
|
||||
var gotAuth, gotListPath string
|
||||
|
||||
mux := http.NewServeMux()
|
||||
var srv *httptest.Server
|
||||
|
||||
mux.HandleFunc("/gists/public", func(w http.ResponseWriter, r *http.Request) {
|
||||
gotAuth = r.Header.Get("Authorization")
|
||||
gotListPath = r.URL.Path
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
body := fmt.Sprintf(`[
|
||||
{
|
||||
"html_url": "https://gist.github.com/alice/aaa",
|
||||
"files": {
|
||||
"leak.env": {"filename": "leak.env", "raw_url": "%s/raw/aaa"}
|
||||
}
|
||||
},
|
||||
{
|
||||
"html_url": "https://gist.github.com/bob/bbb",
|
||||
"files": {
|
||||
"notes.md": {"filename": "notes.md", "raw_url": "%s/raw/bbb"}
|
||||
}
|
||||
}
|
||||
]`, srv.URL, srv.URL)
|
||||
_, _ = w.Write([]byte(body))
|
||||
})
|
||||
mux.HandleFunc("/raw/aaa", func(w http.ResponseWriter, r *http.Request) {
|
||||
_, _ = w.Write([]byte("OPENAI_API_KEY=sk-proj-1234567890abcdefghijk"))
|
||||
})
|
||||
mux.HandleFunc("/raw/bbb", func(w http.ResponseWriter, r *http.Request) {
|
||||
_, _ = w.Write([]byte("just some unrelated notes here"))
|
||||
})
|
||||
|
||||
srv = httptest.NewServer(mux)
|
||||
t.Cleanup(srv.Close)
|
||||
|
||||
src := newGistSource(srv.URL, "tok")
|
||||
out := make(chan recon.Finding, 8)
|
||||
if err := src.Sweep(context.Background(), "", out); err != nil {
|
||||
t.Fatalf("Sweep: %v", err)
|
||||
}
|
||||
close(out)
|
||||
|
||||
if gotAuth != "Bearer tok" {
|
||||
t.Errorf("Authorization = %q", gotAuth)
|
||||
}
|
||||
if gotListPath != "/gists/public" {
|
||||
t.Errorf("list path = %q", gotListPath)
|
||||
}
|
||||
|
||||
var findings []recon.Finding
|
||||
for f := range out {
|
||||
findings = append(findings, f)
|
||||
}
|
||||
if len(findings) != 1 {
|
||||
t.Fatalf("findings count = %d, want 1 (only aaa matches sk-proj-)", len(findings))
|
||||
}
|
||||
f := findings[0]
|
||||
if !strings.Contains(f.Source, "alice/aaa") {
|
||||
t.Errorf("Source = %q, want gist alice/aaa", f.Source)
|
||||
}
|
||||
if f.SourceType != "recon:gist" {
|
||||
t.Errorf("SourceType = %q", f.SourceType)
|
||||
}
|
||||
if f.ProviderName != "openai" {
|
||||
t.Errorf("ProviderName = %q, want openai", f.ProviderName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGist_NoMatch_NoFinding(t *testing.T) {
|
||||
var srv *httptest.Server
|
||||
mux := http.NewServeMux()
|
||||
mux.HandleFunc("/gists/public", func(w http.ResponseWriter, r *http.Request) {
|
||||
body := fmt.Sprintf(`[{"html_url":"https://gist.github.com/x/y","files":{"a.txt":{"filename":"a.txt","raw_url":"%s/raw/x"}}}]`, srv.URL)
|
||||
_, _ = w.Write([]byte(body))
|
||||
})
|
||||
mux.HandleFunc("/raw/x", func(w http.ResponseWriter, r *http.Request) {
|
||||
_, _ = w.Write([]byte("nothing interesting"))
|
||||
})
|
||||
srv = httptest.NewServer(mux)
|
||||
t.Cleanup(srv.Close)
|
||||
|
||||
src := newGistSource(srv.URL, "tok")
|
||||
out := make(chan recon.Finding, 4)
|
||||
if err := src.Sweep(context.Background(), "", out); err != nil {
|
||||
t.Fatalf("Sweep: %v", err)
|
||||
}
|
||||
close(out)
|
||||
n := 0
|
||||
for range out {
|
||||
n++
|
||||
}
|
||||
if n != 0 {
|
||||
t.Fatalf("findings = %d, want 0", n)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGist_Unauthorized(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, "bad", http.StatusUnauthorized)
|
||||
}))
|
||||
t.Cleanup(srv.Close)
|
||||
src := newGistSource(srv.URL, "tok")
|
||||
out := make(chan recon.Finding, 1)
|
||||
err := src.Sweep(context.Background(), "", out)
|
||||
if !errors.Is(err, ErrUnauthorized) {
|
||||
t.Fatalf("err = %v, want ErrUnauthorized", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGist_ContextCancellation(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
time.Sleep(2 * time.Second)
|
||||
_, _ = w.Write([]byte(`[]`))
|
||||
}))
|
||||
t.Cleanup(srv.Close)
|
||||
src := newGistSource(srv.URL, "tok")
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 50*time.Millisecond)
|
||||
defer cancel()
|
||||
out := make(chan recon.Finding, 1)
|
||||
err := src.Sweep(ctx, "", out)
|
||||
if err == nil {
|
||||
t.Fatal("expected error on cancelled ctx")
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user