diff --git a/pkg/recon/sources/hackernews.go b/pkg/recon/sources/hackernews.go new file mode 100644 index 0000000..9439e75 --- /dev/null +++ b/pkg/recon/sources/hackernews.go @@ -0,0 +1,111 @@ +package sources + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "golang.org/x/time/rate" + + "github.com/salvacybersec/keyhunter/pkg/providers" + "github.com/salvacybersec/keyhunter/pkg/recon" +) + +// HackerNewsSource searches the Algolia-powered Hacker News search API for +// comments containing leaked API keys. Developers occasionally paste +// credentials in HN discussion threads about APIs and tools. +type HackerNewsSource struct { + BaseURL string + Registry *providers.Registry + Limiters *recon.LimiterRegistry + Client *Client +} + +var _ recon.ReconSource = (*HackerNewsSource)(nil) + +func (s *HackerNewsSource) Name() string { return "hackernews" } +func (s *HackerNewsSource) RateLimit() rate.Limit { return rate.Every(1 * time.Second) } +func (s *HackerNewsSource) Burst() int { return 5 } +func (s *HackerNewsSource) RespectsRobots() bool { return false } +func (s *HackerNewsSource) Enabled(_ recon.Config) bool { return true } + +// hnSearchResponse represents the Algolia HN Search API response. +type hnSearchResponse struct { + Hits []hnHit `json:"hits"` +} + +type hnHit struct { + CommentText string `json:"comment_text"` + ObjectID string `json:"objectID"` + StoryID int `json:"story_id"` +} + +func (s *HackerNewsSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error { + base := s.BaseURL + if base == "" { + base = "https://hn.algolia.com" + } + client := s.Client + if client == nil { + client = NewClient() + } + + queries := BuildQueries(s.Registry, "hackernews") + if len(queries) == 0 { + return nil + } + + for _, q := range queries { + if err := ctx.Err(); err != nil { + return err + } + + if s.Limiters != nil { + if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil { + return err + } + } + + searchURL := fmt.Sprintf("%s/api/v1/search?query=%s&tags=comment&hitsPerPage=20", + base, url.QueryEscape(q)) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil) + if err != nil { + continue + } + req.Header.Set("Accept", "application/json") + + resp, err := client.Do(ctx, req) + if err != nil { + continue + } + + body, err := io.ReadAll(io.LimitReader(resp.Body, 256*1024)) + _ = resp.Body.Close() + if err != nil { + continue + } + + var result hnSearchResponse + if err := json.Unmarshal(body, &result); err != nil { + continue + } + + for _, hit := range result.Hits { + if ciLogKeyPattern.MatchString(hit.CommentText) { + itemURL := fmt.Sprintf("https://news.ycombinator.com/item?id=%s", hit.ObjectID) + out <- recon.Finding{ + ProviderName: q, + Source: itemURL, + SourceType: "recon:hackernews", + Confidence: "medium", + DetectedAt: time.Now(), + } + } + } + } + return nil +} diff --git a/pkg/recon/sources/hackernews_test.go b/pkg/recon/sources/hackernews_test.go new file mode 100644 index 0000000..2c550e1 --- /dev/null +++ b/pkg/recon/sources/hackernews_test.go @@ -0,0 +1,72 @@ +package sources + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/salvacybersec/keyhunter/pkg/providers" + "github.com/salvacybersec/keyhunter/pkg/recon" +) + +func TestHackerNews_Name(t *testing.T) { + s := &HackerNewsSource{} + if s.Name() != "hackernews" { + t.Fatalf("expected hackernews, got %s", s.Name()) + } +} + +func TestHackerNews_Enabled(t *testing.T) { + s := &HackerNewsSource{} + if !s.Enabled(recon.Config{}) { + t.Fatal("HackerNewsSource should always be enabled (credentialless)") + } +} + +func TestHackerNews_Sweep(t *testing.T) { + mux := http.NewServeMux() + mux.HandleFunc("/api/v1/search", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"hits":[{ + "comment_text":"You should set your auth_token = \"sk-proj-ABCDEF1234567890abcdef\" in the config", + "objectID":"98765432", + "story_id":98765000 + }]}`)) + }) + + srv := httptest.NewServer(mux) + defer srv.Close() + + reg := providers.NewRegistryFromProviders([]providers.Provider{ + {Name: "openai", Keywords: []string{"sk-proj-"}}, + }) + + s := &HackerNewsSource{ + BaseURL: srv.URL, + Registry: reg, + Client: NewClient(), + } + + out := make(chan recon.Finding, 10) + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + err := s.Sweep(ctx, "", out) + close(out) + if err != nil { + t.Fatalf("Sweep error: %v", err) + } + + var findings []recon.Finding + for f := range out { + findings = append(findings, f) + } + if len(findings) == 0 { + t.Fatal("expected at least one finding from Hacker News search") + } + if findings[0].SourceType != "recon:hackernews" { + t.Fatalf("expected recon:hackernews, got %s", findings[0].SourceType) + } +} diff --git a/pkg/recon/sources/reddit.go b/pkg/recon/sources/reddit.go new file mode 100644 index 0000000..f361021 --- /dev/null +++ b/pkg/recon/sources/reddit.go @@ -0,0 +1,121 @@ +package sources + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "golang.org/x/time/rate" + + "github.com/salvacybersec/keyhunter/pkg/providers" + "github.com/salvacybersec/keyhunter/pkg/recon" +) + +// RedditSource searches Reddit's public JSON API for posts containing leaked +// API keys. Developers frequently share code snippets with credentials in +// subreddits like r/learnprogramming, r/openai, and r/machinelearning. +type RedditSource struct { + BaseURL string + Registry *providers.Registry + Limiters *recon.LimiterRegistry + Client *Client +} + +var _ recon.ReconSource = (*RedditSource)(nil) + +func (s *RedditSource) Name() string { return "reddit" } +func (s *RedditSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) } +func (s *RedditSource) Burst() int { return 2 } +func (s *RedditSource) RespectsRobots() bool { return false } +func (s *RedditSource) Enabled(_ recon.Config) bool { return true } + +// redditListingResponse represents the Reddit JSON API search response. +type redditListingResponse struct { + Data redditListingData `json:"data"` +} + +type redditListingData struct { + Children []redditChild `json:"children"` +} + +type redditChild struct { + Data redditPost `json:"data"` +} + +type redditPost struct { + Selftext string `json:"selftext"` + Permalink string `json:"permalink"` + Title string `json:"title"` +} + +func (s *RedditSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error { + base := s.BaseURL + if base == "" { + base = "https://www.reddit.com" + } + client := s.Client + if client == nil { + client = NewClient() + } + + queries := BuildQueries(s.Registry, "reddit") + if len(queries) == 0 { + return nil + } + + for _, q := range queries { + if err := ctx.Err(); err != nil { + return err + } + + if s.Limiters != nil { + if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil { + return err + } + } + + searchURL := fmt.Sprintf("%s/search.json?q=%s&sort=new&limit=25&restrict_sr=false", + base, url.QueryEscape(q)) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil) + if err != nil { + continue + } + req.Header.Set("Accept", "application/json") + // Reddit blocks requests with default User-Agent. + req.Header.Set("User-Agent", "keyhunter-recon/1.0 (API key scanner)") + + resp, err := client.Do(ctx, req) + if err != nil { + continue + } + + body, err := io.ReadAll(io.LimitReader(resp.Body, 256*1024)) + _ = resp.Body.Close() + if err != nil { + continue + } + + var result redditListingResponse + if err := json.Unmarshal(body, &result); err != nil { + continue + } + + for _, child := range result.Data.Children { + if ciLogKeyPattern.MatchString(child.Data.Selftext) { + postURL := fmt.Sprintf("https://www.reddit.com%s", child.Data.Permalink) + out <- recon.Finding{ + ProviderName: q, + Source: postURL, + SourceType: "recon:reddit", + Confidence: "medium", + DetectedAt: time.Now(), + } + } + } + } + return nil +} diff --git a/pkg/recon/sources/reddit_test.go b/pkg/recon/sources/reddit_test.go new file mode 100644 index 0000000..1275fc3 --- /dev/null +++ b/pkg/recon/sources/reddit_test.go @@ -0,0 +1,74 @@ +package sources + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/salvacybersec/keyhunter/pkg/providers" + "github.com/salvacybersec/keyhunter/pkg/recon" +) + +func TestReddit_Name(t *testing.T) { + s := &RedditSource{} + if s.Name() != "reddit" { + t.Fatalf("expected reddit, got %s", s.Name()) + } +} + +func TestReddit_Enabled(t *testing.T) { + s := &RedditSource{} + if !s.Enabled(recon.Config{}) { + t.Fatal("RedditSource should always be enabled (credentialless)") + } +} + +func TestReddit_Sweep(t *testing.T) { + mux := http.NewServeMux() + mux.HandleFunc("/search.json", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"data":{"children":[{ + "data":{ + "selftext":"I set my api_key = \"sk-proj-ABCDEF1234567890abcdef\" but it does not work", + "permalink":"/r/openai/comments/abc123/help_with_api/", + "title":"Help with API" + } + }]}}`)) + }) + + srv := httptest.NewServer(mux) + defer srv.Close() + + reg := providers.NewRegistryFromProviders([]providers.Provider{ + {Name: "openai", Keywords: []string{"sk-proj-"}}, + }) + + s := &RedditSource{ + BaseURL: srv.URL, + Registry: reg, + Client: NewClient(), + } + + out := make(chan recon.Finding, 10) + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + err := s.Sweep(ctx, "", out) + close(out) + if err != nil { + t.Fatalf("Sweep error: %v", err) + } + + var findings []recon.Finding + for f := range out { + findings = append(findings, f) + } + if len(findings) == 0 { + t.Fatal("expected at least one finding from Reddit search") + } + if findings[0].SourceType != "recon:reddit" { + t.Fatalf("expected recon:reddit, got %s", findings[0].SourceType) + } +} diff --git a/pkg/recon/sources/stackoverflow.go b/pkg/recon/sources/stackoverflow.go new file mode 100644 index 0000000..550fbd9 --- /dev/null +++ b/pkg/recon/sources/stackoverflow.go @@ -0,0 +1,112 @@ +package sources + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "golang.org/x/time/rate" + + "github.com/salvacybersec/keyhunter/pkg/providers" + "github.com/salvacybersec/keyhunter/pkg/recon" +) + +// StackOverflowSource searches Stack Exchange API for questions and answers +// containing leaked API keys. Developers frequently paste credentials in +// code examples when asking for help debugging API integrations. +type StackOverflowSource struct { + BaseURL string + Registry *providers.Registry + Limiters *recon.LimiterRegistry + Client *Client +} + +var _ recon.ReconSource = (*StackOverflowSource)(nil) + +func (s *StackOverflowSource) Name() string { return "stackoverflow" } +func (s *StackOverflowSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) } +func (s *StackOverflowSource) Burst() int { return 3 } +func (s *StackOverflowSource) RespectsRobots() bool { return false } +func (s *StackOverflowSource) Enabled(_ recon.Config) bool { return true } + +// stackExchangeResponse represents the Stack Exchange API v2.3 search/excerpts response. +type stackExchangeResponse struct { + Items []stackExchangeItem `json:"items"` +} + +type stackExchangeItem struct { + Body string `json:"body"` + Excerpt string `json:"excerpt"` + QuestionID int `json:"question_id"` +} + +func (s *StackOverflowSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error { + base := s.BaseURL + if base == "" { + base = "https://api.stackexchange.com" + } + client := s.Client + if client == nil { + client = NewClient() + } + + queries := BuildQueries(s.Registry, "stackoverflow") + if len(queries) == 0 { + return nil + } + + for _, q := range queries { + if err := ctx.Err(); err != nil { + return err + } + + if s.Limiters != nil { + if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil { + return err + } + } + + searchURL := fmt.Sprintf("%s/2.3/search/excerpts?order=desc&sort=relevance&q=%s&site=stackoverflow", + base, url.QueryEscape(q)) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil) + if err != nil { + continue + } + req.Header.Set("Accept", "application/json") + + resp, err := client.Do(ctx, req) + if err != nil { + continue + } + + body, err := io.ReadAll(io.LimitReader(resp.Body, 256*1024)) + _ = resp.Body.Close() + if err != nil { + continue + } + + var result stackExchangeResponse + if err := json.Unmarshal(body, &result); err != nil { + continue + } + + for _, item := range result.Items { + content := item.Body + " " + item.Excerpt + if ciLogKeyPattern.MatchString(content) { + itemURL := fmt.Sprintf("https://stackoverflow.com/q/%d", item.QuestionID) + out <- recon.Finding{ + ProviderName: q, + Source: itemURL, + SourceType: "recon:stackoverflow", + Confidence: "medium", + DetectedAt: time.Now(), + } + } + } + } + return nil +} diff --git a/pkg/recon/sources/stackoverflow_test.go b/pkg/recon/sources/stackoverflow_test.go new file mode 100644 index 0000000..270f4af --- /dev/null +++ b/pkg/recon/sources/stackoverflow_test.go @@ -0,0 +1,72 @@ +package sources + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/salvacybersec/keyhunter/pkg/providers" + "github.com/salvacybersec/keyhunter/pkg/recon" +) + +func TestStackOverflow_Name(t *testing.T) { + s := &StackOverflowSource{} + if s.Name() != "stackoverflow" { + t.Fatalf("expected stackoverflow, got %s", s.Name()) + } +} + +func TestStackOverflow_Enabled(t *testing.T) { + s := &StackOverflowSource{} + if !s.Enabled(recon.Config{}) { + t.Fatal("StackOverflowSource should always be enabled (credentialless)") + } +} + +func TestStackOverflow_Sweep(t *testing.T) { + mux := http.NewServeMux() + mux.HandleFunc("/2.3/search/excerpts", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"items":[{ + "body":"Here is my code: api_key = \"sk-proj-ABCDEF1234567890abcdef\"", + "excerpt":"Using OpenAI API key in Python", + "question_id":12345678 + }]}`)) + }) + + srv := httptest.NewServer(mux) + defer srv.Close() + + reg := providers.NewRegistryFromProviders([]providers.Provider{ + {Name: "openai", Keywords: []string{"sk-proj-"}}, + }) + + s := &StackOverflowSource{ + BaseURL: srv.URL, + Registry: reg, + Client: NewClient(), + } + + out := make(chan recon.Finding, 10) + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + err := s.Sweep(ctx, "", out) + close(out) + if err != nil { + t.Fatalf("Sweep error: %v", err) + } + + var findings []recon.Finding + for f := range out { + findings = append(findings, f) + } + if len(findings) == 0 { + t.Fatal("expected at least one finding from Stack Overflow search") + } + if findings[0].SourceType != "recon:stackoverflow" { + t.Fatalf("expected recon:stackoverflow, got %s", findings[0].SourceType) + } +}