Files
keyhunter/pkg/recon/sources/googledocs.go
salvacybersec 5d568333c7 feat(15-02): add Confluence and GoogleDocs ReconSource implementations
- ConfluenceSource searches exposed instances via /rest/api/content/search CQL
- GoogleDocsSource uses dorking + /export?format=txt for plain-text scanning
- HTML tag stripping for Confluence storage format
- Both credentialless, tests with httptest mocks confirm findings
2026-04-06 13:50:14 +03:00

140 lines
3.4 KiB
Go

package sources
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"time"
"golang.org/x/time/rate"
"github.com/salvacybersec/keyhunter/pkg/providers"
"github.com/salvacybersec/keyhunter/pkg/recon"
)
// GoogleDocsSource searches publicly shared Google Docs for leaked API keys.
// Google Docs shared with "anyone with the link" are indexable by search
// engines. This source uses a dorking approach to discover public docs and
// then fetches their plain-text export for credential scanning.
type GoogleDocsSource struct {
BaseURL string
Registry *providers.Registry
Limiters *recon.LimiterRegistry
Client *Client
}
var _ recon.ReconSource = (*GoogleDocsSource)(nil)
func (s *GoogleDocsSource) Name() string { return "googledocs" }
func (s *GoogleDocsSource) RateLimit() rate.Limit { return rate.Every(3 * time.Second) }
func (s *GoogleDocsSource) Burst() int { return 2 }
func (s *GoogleDocsSource) RespectsRobots() bool { return true }
func (s *GoogleDocsSource) Enabled(_ recon.Config) bool { return true }
// googleDocsSearchResponse represents dork search results for Google Docs.
type googleDocsSearchResponse struct {
Results []googleDocsSearchResult `json:"results"`
}
type googleDocsSearchResult struct {
URL string `json:"url"`
Title string `json:"title"`
}
func (s *GoogleDocsSource) Sweep(ctx context.Context, _ string, out chan<- recon.Finding) error {
base := s.BaseURL
if base == "" {
base = "https://search.googledocs.dev"
}
client := s.Client
if client == nil {
client = NewClient()
}
queries := BuildQueries(s.Registry, "googledocs")
if len(queries) == 0 {
return nil
}
for _, q := range queries {
if err := ctx.Err(); err != nil {
return err
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
// Search for public Google Docs via dorking.
searchURL := fmt.Sprintf("%s/search?q=%s&format=json",
base, url.QueryEscape("site:docs.google.com "+q))
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
if err != nil {
continue
}
req.Header.Set("Accept", "application/json")
resp, err := client.Do(ctx, req)
if err != nil {
continue
}
body, err := io.ReadAll(io.LimitReader(resp.Body, 256*1024))
_ = resp.Body.Close()
if err != nil {
continue
}
var results googleDocsSearchResponse
if err := json.Unmarshal(body, &results); err != nil {
continue
}
// Fetch each discovered doc's plain-text export.
for _, result := range results.Results {
if err := ctx.Err(); err != nil {
return err
}
if s.Limiters != nil {
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
return err
}
}
exportURL := result.URL + "/export?format=txt"
docReq, err := http.NewRequestWithContext(ctx, http.MethodGet, exportURL, nil)
if err != nil {
continue
}
docResp, err := client.Do(ctx, docReq)
if err != nil {
continue
}
docBody, err := io.ReadAll(io.LimitReader(docResp.Body, 256*1024))
_ = docResp.Body.Close()
if err != nil {
continue
}
if ciLogKeyPattern.Match(docBody) {
out <- recon.Finding{
ProviderName: q,
Source: result.URL,
SourceType: "recon:googledocs",
Confidence: "medium",
DetectedAt: time.Now(),
}
}
}
}
return nil
}