feat(10-07): add Replit and CodeSandbox scraping sources
- ReplitSource scrapes /search HTML extracting /@user/repl anchors - CodeSandboxSource scrapes /search HTML extracting /s/slug anchors - Both use golang.org/x/net/html parser, 10 req/min rate, RespectsRobots=true - 10 httptest-backed tests covering extraction, ctx cancel, rate/name assertions
This commit is contained in:
95
pkg/recon/sources/codesandbox.go
Normal file
95
pkg/recon/sources/codesandbox.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package sources
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||
)
|
||||
|
||||
// CodeSandboxSource scrapes codesandbox.io search results for references to
|
||||
// provider keywords. Mirrors ReplitSource: robots-respecting, 10 req/min,
|
||||
// no credentials required.
|
||||
type CodeSandboxSource struct {
|
||||
BaseURL string
|
||||
Registry *providers.Registry
|
||||
Limiters *recon.LimiterRegistry
|
||||
Client *Client
|
||||
}
|
||||
|
||||
// codeSandboxLinkRE matches /s/<slug> sandbox result links.
|
||||
var codeSandboxLinkRE = regexp.MustCompile(`^/s/[a-zA-Z0-9-]+$`)
|
||||
|
||||
// Compile-time assertion that CodeSandboxSource satisfies recon.ReconSource.
|
||||
var _ recon.ReconSource = (*CodeSandboxSource)(nil)
|
||||
|
||||
func (s *CodeSandboxSource) Name() string { return "codesandbox" }
|
||||
func (s *CodeSandboxSource) RateLimit() rate.Limit { return rate.Every(6 * time.Second) }
|
||||
func (s *CodeSandboxSource) Burst() int { return 1 }
|
||||
func (s *CodeSandboxSource) RespectsRobots() bool { return true }
|
||||
func (s *CodeSandboxSource) Enabled(_ recon.Config) bool { return true }
|
||||
|
||||
// Sweep runs a CodeSandbox search per provider keyword and emits one Finding
|
||||
// per matched result anchor.
|
||||
func (s *CodeSandboxSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error {
|
||||
base := s.BaseURL
|
||||
if base == "" {
|
||||
base = "https://codesandbox.io"
|
||||
}
|
||||
client := s.Client
|
||||
if client == nil {
|
||||
client = NewClient()
|
||||
}
|
||||
|
||||
queries := BuildQueries(s.Registry, "codesandbox")
|
||||
if len(queries) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, q := range queries {
|
||||
if err := ctx.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if s.Limiters != nil {
|
||||
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
searchURL := fmt.Sprintf("%s/search?query=%s&type=sandboxes", base, url.QueryEscape(q))
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("codesandbox: build req: %w", err)
|
||||
}
|
||||
resp, err := client.Do(ctx, req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("codesandbox: fetch: %w", err)
|
||||
}
|
||||
links, err := extractAnchorHrefs(resp.Body, codeSandboxLinkRE)
|
||||
_ = resp.Body.Close()
|
||||
if err != nil {
|
||||
return fmt.Errorf("codesandbox: parse html: %w", err)
|
||||
}
|
||||
|
||||
for _, href := range links {
|
||||
if err := ctx.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
out <- recon.Finding{
|
||||
Source: base + href,
|
||||
SourceType: "recon:codesandbox",
|
||||
Confidence: "low",
|
||||
DetectedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
Reference in New Issue
Block a user