feat(15-03): add Elasticsearch, Kibana, and Splunk ReconSource implementations
- ElasticsearchSource: POST _search API with query_string, parse hits._source - KibanaSource: GET saved_objects/_find API with kbn-xsrf header - SplunkSource: GET search/jobs/export API with newline-delimited JSON parsing - All sources use ciLogKeyPattern for key detection - Tests use httptest mocks for each API endpoint
This commit is contained in:
118
pkg/recon/sources/elasticsearch.go
Normal file
118
pkg/recon/sources/elasticsearch.go
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ElasticsearchSource searches exposed Elasticsearch instances for documents
|
||||||
|
// containing API keys. Many ES deployments are left unauthenticated on the
|
||||||
|
// internet, allowing full-text search across all indexed data.
|
||||||
|
type ElasticsearchSource struct {
|
||||||
|
BaseURL string
|
||||||
|
Registry *providers.Registry
|
||||||
|
Limiters *recon.LimiterRegistry
|
||||||
|
Client *Client
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ recon.ReconSource = (*ElasticsearchSource)(nil)
|
||||||
|
|
||||||
|
func (s *ElasticsearchSource) Name() string { return "elasticsearch" }
|
||||||
|
func (s *ElasticsearchSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) }
|
||||||
|
func (s *ElasticsearchSource) Burst() int { return 3 }
|
||||||
|
func (s *ElasticsearchSource) RespectsRobots() bool { return false }
|
||||||
|
func (s *ElasticsearchSource) Enabled(_ recon.Config) bool { return true }
|
||||||
|
|
||||||
|
// esSearchResponse represents the Elasticsearch _search response envelope.
|
||||||
|
type esSearchResponse struct {
|
||||||
|
Hits struct {
|
||||||
|
Hits []esHit `json:"hits"`
|
||||||
|
} `json:"hits"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type esHit struct {
|
||||||
|
Index string `json:"_index"`
|
||||||
|
ID string `json:"_id"`
|
||||||
|
Source json.RawMessage `json:"_source"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ElasticsearchSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
|
||||||
|
base := s.BaseURL
|
||||||
|
if base == "" {
|
||||||
|
base = "http://localhost:9200"
|
||||||
|
}
|
||||||
|
// If no explicit target was provided (still default) and query is not a URL, skip.
|
||||||
|
if base == "http://localhost:9200" && query != "" && !strings.HasPrefix(query, "http") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
client := s.Client
|
||||||
|
if client == nil {
|
||||||
|
client = NewClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
queries := BuildQueries(s.Registry, "elasticsearch")
|
||||||
|
if len(queries) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, q := range queries {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
searchURL := fmt.Sprintf("%s/_search", base)
|
||||||
|
body := fmt.Sprintf(`{"query":{"query_string":{"query":"%s"}},"size":20}`, q)
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodPost, searchURL, bytes.NewBufferString(body))
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := client.Do(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(io.LimitReader(resp.Body, 512*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var result esSearchResponse
|
||||||
|
if err := json.Unmarshal(data, &result); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, hit := range result.Hits.Hits {
|
||||||
|
src := string(hit.Source)
|
||||||
|
if ciLogKeyPattern.MatchString(src) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: q,
|
||||||
|
Source: fmt.Sprintf("%s/%s/%s", base, hit.Index, hit.ID),
|
||||||
|
SourceType: "recon:elasticsearch",
|
||||||
|
Confidence: "medium",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
120
pkg/recon/sources/elasticsearch_test.go
Normal file
120
pkg/recon/sources/elasticsearch_test.go
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestElasticsearch_Name(t *testing.T) {
|
||||||
|
s := &ElasticsearchSource{}
|
||||||
|
if s.Name() != "elasticsearch" {
|
||||||
|
t.Fatalf("expected elasticsearch, got %s", s.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestElasticsearch_Enabled(t *testing.T) {
|
||||||
|
s := &ElasticsearchSource{}
|
||||||
|
if !s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("ElasticsearchSource should always be enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestElasticsearch_Sweep(t *testing.T) {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/_search", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{
|
||||||
|
"hits": {
|
||||||
|
"hits": [
|
||||||
|
{
|
||||||
|
"_index": "logs",
|
||||||
|
"_id": "abc123",
|
||||||
|
"_source": {
|
||||||
|
"message": "api_key = sk-proj-ABCDEF1234567890abcdef",
|
||||||
|
"level": "error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := httptest.NewServer(mux)
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &ElasticsearchSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) == 0 {
|
||||||
|
t.Fatal("expected at least one finding from Elasticsearch")
|
||||||
|
}
|
||||||
|
if findings[0].SourceType != "recon:elasticsearch" {
|
||||||
|
t.Fatalf("expected recon:elasticsearch, got %s", findings[0].SourceType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestElasticsearch_Sweep_NoHits(t *testing.T) {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/_search", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{"hits":{"hits":[]}}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := httptest.NewServer(mux)
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &ElasticsearchSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
114
pkg/recon/sources/kibana.go
Normal file
114
pkg/recon/sources/kibana.go
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
// KibanaSource searches exposed Kibana instances for API keys in saved objects
|
||||||
|
// such as dashboards, visualizations, and index patterns. Many Kibana instances
|
||||||
|
// are left unauthenticated, exposing the saved objects API.
|
||||||
|
type KibanaSource struct {
|
||||||
|
BaseURL string
|
||||||
|
Registry *providers.Registry
|
||||||
|
Limiters *recon.LimiterRegistry
|
||||||
|
Client *Client
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ recon.ReconSource = (*KibanaSource)(nil)
|
||||||
|
|
||||||
|
func (s *KibanaSource) Name() string { return "kibana" }
|
||||||
|
func (s *KibanaSource) RateLimit() rate.Limit { return rate.Every(2 * time.Second) }
|
||||||
|
func (s *KibanaSource) Burst() int { return 3 }
|
||||||
|
func (s *KibanaSource) RespectsRobots() bool { return false }
|
||||||
|
func (s *KibanaSource) Enabled(_ recon.Config) bool { return true }
|
||||||
|
|
||||||
|
// kibanaSavedObjectsResponse represents the Kibana saved objects API response.
|
||||||
|
type kibanaSavedObjectsResponse struct {
|
||||||
|
SavedObjects []kibanaSavedObject `json:"saved_objects"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type kibanaSavedObject struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Attributes json.RawMessage `json:"attributes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *KibanaSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
|
||||||
|
base := s.BaseURL
|
||||||
|
if base == "" {
|
||||||
|
base = "http://localhost:5601"
|
||||||
|
}
|
||||||
|
client := s.Client
|
||||||
|
if client == nil {
|
||||||
|
client = NewClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
queries := BuildQueries(s.Registry, "kibana")
|
||||||
|
if len(queries) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, q := range queries {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search saved objects (dashboards and visualizations).
|
||||||
|
searchURL := fmt.Sprintf(
|
||||||
|
"%s/api/saved_objects/_find?type=visualization&type=dashboard&search=%s&per_page=20",
|
||||||
|
base, url.QueryEscape(q),
|
||||||
|
)
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
req.Header.Set("kbn-xsrf", "true")
|
||||||
|
|
||||||
|
resp, err := client.Do(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(io.LimitReader(resp.Body, 512*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var result kibanaSavedObjectsResponse
|
||||||
|
if err := json.Unmarshal(data, &result); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, obj := range result.SavedObjects {
|
||||||
|
attrs := string(obj.Attributes)
|
||||||
|
if ciLogKeyPattern.MatchString(attrs) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: q,
|
||||||
|
Source: fmt.Sprintf("%s/app/kibana#/%s/%s", base, obj.Type, obj.ID),
|
||||||
|
SourceType: "recon:kibana",
|
||||||
|
Confidence: "medium",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
123
pkg/recon/sources/kibana_test.go
Normal file
123
pkg/recon/sources/kibana_test.go
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestKibana_Name(t *testing.T) {
|
||||||
|
s := &KibanaSource{}
|
||||||
|
if s.Name() != "kibana" {
|
||||||
|
t.Fatalf("expected kibana, got %s", s.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKibana_Enabled(t *testing.T) {
|
||||||
|
s := &KibanaSource{}
|
||||||
|
if !s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("KibanaSource should always be enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKibana_Sweep(t *testing.T) {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/api/saved_objects/_find", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Verify kbn-xsrf header is present.
|
||||||
|
if r.Header.Get("kbn-xsrf") == "" {
|
||||||
|
http.Error(w, "missing kbn-xsrf", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{
|
||||||
|
"saved_objects": [
|
||||||
|
{
|
||||||
|
"id": "vis-001",
|
||||||
|
"type": "visualization",
|
||||||
|
"attributes": {
|
||||||
|
"title": "API Usage",
|
||||||
|
"config": "api_key = sk-proj-ABCDEF1234567890abcdef"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := httptest.NewServer(mux)
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &KibanaSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) == 0 {
|
||||||
|
t.Fatal("expected at least one finding from Kibana")
|
||||||
|
}
|
||||||
|
if findings[0].SourceType != "recon:kibana" {
|
||||||
|
t.Fatalf("expected recon:kibana, got %s", findings[0].SourceType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKibana_Sweep_NoFindings(t *testing.T) {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/api/saved_objects/_find", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(`{"saved_objects":[]}`))
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := httptest.NewServer(mux)
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &KibanaSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
122
pkg/recon/sources/splunk.go
Normal file
122
pkg/recon/sources/splunk.go
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SplunkSource searches exposed Splunk instances for API keys in log data.
|
||||||
|
// Exposed Splunk Web interfaces may allow unauthenticated search via the
|
||||||
|
// REST API, especially in development or misconfigured environments.
|
||||||
|
type SplunkSource struct {
|
||||||
|
BaseURL string
|
||||||
|
Registry *providers.Registry
|
||||||
|
Limiters *recon.LimiterRegistry
|
||||||
|
Client *Client
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ recon.ReconSource = (*SplunkSource)(nil)
|
||||||
|
|
||||||
|
func (s *SplunkSource) Name() string { return "splunk" }
|
||||||
|
func (s *SplunkSource) RateLimit() rate.Limit { return rate.Every(3 * time.Second) }
|
||||||
|
func (s *SplunkSource) Burst() int { return 2 }
|
||||||
|
func (s *SplunkSource) RespectsRobots() bool { return false }
|
||||||
|
func (s *SplunkSource) Enabled(_ recon.Config) bool { return true }
|
||||||
|
|
||||||
|
// splunkResult represents a single result row from Splunk search export.
|
||||||
|
type splunkResult struct {
|
||||||
|
Result json.RawMessage `json:"result"`
|
||||||
|
Raw string `json:"_raw"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SplunkSource) Sweep(ctx context.Context, query string, out chan<- recon.Finding) error {
|
||||||
|
base := s.BaseURL
|
||||||
|
if base == "" {
|
||||||
|
base = "https://localhost:8089"
|
||||||
|
}
|
||||||
|
// If no explicit target was provided (still default) and query is not a URL, skip.
|
||||||
|
if base == "https://localhost:8089" && query != "" && !strings.HasPrefix(query, "http") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
client := s.Client
|
||||||
|
if client == nil {
|
||||||
|
client = NewClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
queries := BuildQueries(s.Registry, "splunk")
|
||||||
|
if len(queries) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, q := range queries {
|
||||||
|
if err := ctx.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Limiters != nil {
|
||||||
|
if err := s.Limiters.Wait(ctx, s.Name(), s.RateLimit(), s.Burst(), false); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
searchURL := fmt.Sprintf(
|
||||||
|
"%s/services/search/jobs/export?search=%s&output_mode=json&count=20",
|
||||||
|
base, url.QueryEscape("search "+q),
|
||||||
|
)
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Do(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(io.LimitReader(resp.Body, 512*1024))
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Splunk export returns newline-delimited JSON objects.
|
||||||
|
for _, line := range strings.Split(string(data), "\n") {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if line == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var sr splunkResult
|
||||||
|
if err := json.Unmarshal([]byte(line), &sr); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
content := sr.Raw
|
||||||
|
if content == "" {
|
||||||
|
content = string(sr.Result)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ciLogKeyPattern.MatchString(content) {
|
||||||
|
out <- recon.Finding{
|
||||||
|
ProviderName: q,
|
||||||
|
Source: fmt.Sprintf("%s/services/search/jobs/export", base),
|
||||||
|
SourceType: "recon:splunk",
|
||||||
|
Confidence: "medium",
|
||||||
|
DetectedAt: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
110
pkg/recon/sources/splunk_test.go
Normal file
110
pkg/recon/sources/splunk_test.go
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/providers"
|
||||||
|
"github.com/salvacybersec/keyhunter/pkg/recon"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSplunk_Name(t *testing.T) {
|
||||||
|
s := &SplunkSource{}
|
||||||
|
if s.Name() != "splunk" {
|
||||||
|
t.Fatalf("expected splunk, got %s", s.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplunk_Enabled(t *testing.T) {
|
||||||
|
s := &SplunkSource{}
|
||||||
|
if !s.Enabled(recon.Config{}) {
|
||||||
|
t.Fatal("SplunkSource should always be enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplunk_Sweep(t *testing.T) {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/services/search/jobs/export", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
// Splunk returns newline-delimited JSON.
|
||||||
|
_, _ = w.Write([]byte(`{"result":{"_raw":"Setting secret_key = sk-proj-ABCDEF1234567890abcdef"},"_raw":"Setting secret_key = sk-proj-ABCDEF1234567890abcdef"}
|
||||||
|
{"result":{"_raw":"normal log line no keys here"},"_raw":"normal log line no keys here"}
|
||||||
|
`))
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := httptest.NewServer(mux)
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &SplunkSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) == 0 {
|
||||||
|
t.Fatal("expected at least one finding from Splunk")
|
||||||
|
}
|
||||||
|
if findings[0].SourceType != "recon:splunk" {
|
||||||
|
t.Fatalf("expected recon:splunk, got %s", findings[0].SourceType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplunk_Sweep_NoResults(t *testing.T) {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/services/search/jobs/export", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, _ = w.Write([]byte(``))
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := httptest.NewServer(mux)
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
reg := providers.NewRegistryFromProviders([]providers.Provider{
|
||||||
|
{Name: "openai", Keywords: []string{"sk-proj-"}},
|
||||||
|
})
|
||||||
|
|
||||||
|
s := &SplunkSource{
|
||||||
|
BaseURL: srv.URL,
|
||||||
|
Registry: reg,
|
||||||
|
Client: NewClient(),
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make(chan recon.Finding, 10)
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
err := s.Sweep(ctx, "", out)
|
||||||
|
close(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Sweep error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var findings []recon.Finding
|
||||||
|
for f := range out {
|
||||||
|
findings = append(findings, f)
|
||||||
|
}
|
||||||
|
if len(findings) != 0 {
|
||||||
|
t.Fatalf("expected no findings, got %d", len(findings))
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user