3 Commits

Author SHA1 Message Date
dependabot[bot]
0b27804472 chore(deps): bump pypdf from 6.9.1 to 6.10.0
Bumps [pypdf](https://github.com/py-pdf/pypdf) from 6.9.1 to 6.10.0.
- [Release notes](https://github.com/py-pdf/pypdf/releases)
- [Changelog](https://github.com/py-pdf/pypdf/blob/main/CHANGELOG.md)
- [Commits](https://github.com/py-pdf/pypdf/compare/6.9.1...6.10.0)

---
updated-dependencies:
- dependency-name: pypdf
  dependency-version: 6.10.0
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-04-10 22:01:14 +00:00
STJ
38b2700553 feat: Migrate from Poetry to uv (#379) 2026-03-31 17:20:41 -07:00
alex s
e78c931e4e feat: Better source-aware testing (#391) 2026-03-31 11:53:49 -07:00
26 changed files with 6374 additions and 9268 deletions

View File

@@ -30,15 +30,15 @@ jobs:
with:
python-version: '3.12'
- uses: snok/install-poetry@v1
- uses: astral-sh/setup-uv@v5
- name: Build
shell: bash
run: |
poetry install --with dev
poetry run pyinstaller strix.spec --noconfirm
uv sync --frozen
uv run pyinstaller strix.spec --noconfirm
VERSION=$(poetry version -s)
VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*"\(.*\)"/\1/')
mkdir -p dist/release
if [[ "${{ runner.os }}" == "Windows" ]]; then

View File

@@ -31,6 +31,7 @@ repos:
- id: check-toml
- id: check-merge-conflict
- id: check-added-large-files
args: ['--maxkb=1024']
- id: debug-statements
- id: check-case-conflict
- id: check-docstring-first

View File

@@ -8,7 +8,7 @@ Thank you for your interest in contributing to Strix! This guide will help you g
- Python 3.12+
- Docker (running)
- Poetry (for dependency management)
- [uv](https://docs.astral.sh/uv/) (for dependency management)
- Git
### Local Development
@@ -24,8 +24,8 @@ Thank you for your interest in contributing to Strix! This guide will help you g
make setup-dev
# or manually:
poetry install --with=dev
poetry run pre-commit install
uv sync
uv run pre-commit install
```
3. **Configure your LLM provider**
@@ -36,7 +36,7 @@ Thank you for your interest in contributing to Strix! This guide will help you g
4. **Run Strix in development mode**
```bash
poetry run strix --target https://example.com
uv run strix --target https://example.com
```
## 📚 Contributing Skills

View File

@@ -22,38 +22,38 @@ help:
@echo " clean - Clean up cache files and artifacts"
install:
poetry install --only=main
uv sync --no-dev
dev-install:
poetry install --with=dev
uv sync
setup-dev: dev-install
poetry run pre-commit install
uv run pre-commit install
@echo "✅ Development environment setup complete!"
@echo "Run 'make check-all' to verify everything works correctly."
format:
@echo "🎨 Formatting code with ruff..."
poetry run ruff format .
uv run ruff format .
@echo "✅ Code formatting complete!"
lint:
@echo "🔍 Linting code with ruff..."
poetry run ruff check . --fix
uv run ruff check . --fix
@echo "📝 Running additional linting with pylint..."
poetry run pylint strix/ --score=no --reports=no
uv run pylint strix/ --score=no --reports=no
@echo "✅ Linting complete!"
type-check:
@echo "🔍 Type checking with mypy..."
poetry run mypy strix/
uv run mypy strix/
@echo "🔍 Type checking with pyright..."
poetry run pyright strix/
uv run pyright strix/
@echo "✅ Type checking complete!"
security:
@echo "🔒 Running security checks with bandit..."
poetry run bandit -r strix/ -c pyproject.toml
uv run bandit -r strix/ -c pyproject.toml
@echo "✅ Security checks complete!"
check-all: format lint type-check security
@@ -61,18 +61,18 @@ check-all: format lint type-check security
test:
@echo "🧪 Running tests..."
poetry run pytest -v
uv run pytest -v
@echo "✅ Tests complete!"
test-cov:
@echo "🧪 Running tests with coverage..."
poetry run pytest -v --cov=strix --cov-report=term-missing --cov-report=html
uv run pytest -v --cov=strix --cov-report=term-missing --cov-report=html
@echo "✅ Tests with coverage complete!"
@echo "📊 Coverage report generated in htmlcov/"
pre-commit:
@echo "🔧 Running pre-commit hooks..."
poetry run pre-commit run --all-files
uv run pre-commit run --all-files
@echo "✅ Pre-commit hooks complete!"
clean:

View File

@@ -70,11 +70,7 @@ USER root
RUN cp /app/certs/ca.crt /usr/local/share/ca-certificates/ca.crt && \
update-ca-certificates
RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python3 - && \
ln -s /opt/poetry/bin/poetry /usr/local/bin/poetry && \
chmod +x /usr/local/bin/poetry && \
python3 -m venv /app/venv && \
chown -R pentester:pentester /app/venv /opt/poetry
RUN curl -LsSf https://astral.sh/uv/install.sh | env UV_INSTALL_DIR=/usr/local/bin sh
USER pentester
WORKDIR /tmp
@@ -171,9 +167,8 @@ RUN apt-get autoremove -y && \
apt-get autoclean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
ENV PATH="/home/pentester/go/bin:/home/pentester/.local/bin:/home/pentester/.npm-global/bin:/app/venv/bin:$PATH"
ENV VIRTUAL_ENV="/app/venv"
ENV POETRY_HOME="/opt/poetry"
ENV PATH="/home/pentester/go/bin:/home/pentester/.local/bin:/home/pentester/.npm-global/bin:/app/.venv/bin:$PATH"
ENV VIRTUAL_ENV="/app/.venv"
WORKDIR /app
@@ -198,17 +193,16 @@ ENV SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
RUN mkdir -p /workspace && chown -R pentester:pentester /workspace /app
COPY pyproject.toml poetry.lock ./
COPY pyproject.toml uv.lock ./
RUN echo "# Sandbox Environment" > README.md && mkdir -p strix && touch strix/__init__.py
USER pentester
RUN poetry install --no-root --without dev --extras sandbox
RUN poetry run playwright install chromium
RUN uv sync --frozen --no-dev --extra sandbox
RUN /app/.venv/bin/python -m playwright install chromium
RUN /app/venv/bin/pip install -r /home/pentester/tools/jwt_tool/requirements.txt && \
RUN uv pip install -r /home/pentester/tools/jwt_tool/requirements.txt && \
ln -s /home/pentester/tools/jwt_tool/jwt_tool.py /home/pentester/.local/bin/jwt_tool
RUN echo "# Sandbox Environment" > README.md
COPY strix/__init__.py strix/
COPY strix/config/ /app/strix/config/
COPY strix/utils/ /app/strix/utils/

View File

@@ -155,12 +155,11 @@ echo "Starting tool server..."
cd /app
export PYTHONPATH=/app
export STRIX_SANDBOX_MODE=true
export POETRY_VIRTUALENVS_CREATE=false
export TOOL_SERVER_TIMEOUT="${STRIX_SANDBOX_EXECUTION_TIMEOUT:-120}"
TOOL_SERVER_LOG="/tmp/tool_server.log"
sudo -E -u pentester \
poetry run python -m strix.runtime.tool_server \
/app/.venv/bin/python -m strix.runtime.tool_server \
--token="$TOOL_SERVER_TOKEN" \
--host=0.0.0.0 \
--port="$TOOL_SERVER_PORT" \

View File

@@ -9,7 +9,7 @@ description: "Contribute to Strix development"
- Python 3.12+
- Docker (running)
- Poetry
- [uv](https://docs.astral.sh/uv/)
- Git
### Local Development
@@ -26,8 +26,8 @@ description: "Contribute to Strix development"
make setup-dev
# or manually:
poetry install --with=dev
poetry run pre-commit install
uv sync
uv run pre-commit install
```
</Step>
<Step title="Configure LLM">
@@ -38,7 +38,7 @@ description: "Contribute to Strix development"
</Step>
<Step title="Run Strix">
```bash
poetry run strix --target https://example.com
uv run strix --target https://example.com
```
</Step>
</Steps>

8794
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,13 @@
[tool.poetry]
[project]
name = "strix-agent"
version = "0.8.3"
description = "Open-source AI Hackers for your apps"
authors = ["Strix <hi@usestrix.com>"]
readme = "README.md"
license = "Apache-2.0"
requires-python = ">=3.12"
authors = [
{ name = "Strix", email = "hi@usestrix.com" },
]
keywords = [
"cybersecurity",
"security",
@@ -29,81 +32,62 @@ classifiers = [
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
]
packages = [
{ include = "strix", format = ["sdist", "wheel"] }
]
include = [
"LICENSE",
"README.md",
"strix/agents/**/*.jinja",
"strix/skills/**/*.md",
"strix/**/*.xml",
"strix/**/*.tcss"
dependencies = [
"litellm[proxy]>=1.81.1,<1.82.0",
"tenacity>=9.0.0",
"pydantic[email]>=2.11.3",
"rich",
"docker>=7.1.0",
"textual>=6.0.0",
"xmltodict>=0.13.0",
"requests>=2.32.0",
"cvss>=3.2",
"traceloop-sdk>=0.53.0",
"opentelemetry-exporter-otlp-proto-http>=1.40.0",
"scrubadub>=2.0.1",
"defusedxml>=0.7.1",
]
[tool.poetry.scripts]
[project.scripts]
strix = "strix.interface.main:main"
[tool.poetry.dependencies]
python = "^3.12"
# Core CLI dependencies
litellm = { version = "~1.81.1", extras = ["proxy"] }
tenacity = "^9.0.0"
pydantic = {extras = ["email"], version = "^2.11.3"}
rich = "*"
docker = "^7.1.0"
textual = "^4.0.0"
xmltodict = "^0.13.0"
requests = "^2.32.0"
cvss = "^3.2"
traceloop-sdk = "^0.53.0"
opentelemetry-exporter-otlp-proto-http = "^1.40.0"
scrubadub = "^2.0.1"
[project.optional-dependencies]
vertex = ["google-cloud-aiplatform>=1.38"]
sandbox = [
"fastapi",
"uvicorn",
"ipython>=9.3.0",
"openhands-aci>=0.3.0",
"playwright>=1.48.0",
"gql[requests]>=3.5.3",
"pyte>=0.8.1",
"libtmux>=0.46.2",
"numpydoc>=1.8.0",
]
# Optional LLM provider dependencies
google-cloud-aiplatform = { version = ">=1.38", optional = true }
# Sandbox-only dependencies (only needed inside Docker container)
fastapi = { version = "*", optional = true }
uvicorn = { version = "*", optional = true }
ipython = { version = "^9.3.0", optional = true }
openhands-aci = { version = "^0.3.0", optional = true }
playwright = { version = "^1.48.0", optional = true }
gql = { version = "^3.5.3", extras = ["requests"], optional = true }
pyte = { version = "^0.8.1", optional = true }
libtmux = { version = "^0.46.2", optional = true }
numpydoc = { version = "^1.8.0", optional = true }
defusedxml = "^0.7.1"
[tool.poetry.extras]
vertex = ["google-cloud-aiplatform"]
sandbox = ["fastapi", "uvicorn", "ipython", "openhands-aci", "playwright", "gql", "pyte", "libtmux", "numpydoc"]
[tool.poetry.group.dev.dependencies]
# Type checking and static analysis
mypy = "^1.16.0"
ruff = "^0.11.13"
pyright = "^1.1.401"
pylint = "^3.3.7"
bandit = "^1.8.3"
# Testing
pytest = "^8.4.0"
pytest-asyncio = "^1.0.0"
pytest-cov = "^6.1.1"
pytest-mock = "^3.14.1"
# Development tools
pre-commit = "^4.2.0"
black = "^25.1.0"
isort = "^6.0.1"
# Build tools
pyinstaller = { version = "^6.17.0", python = ">=3.12,<3.15" }
[dependency-groups]
dev = [
"mypy>=1.16.0",
"ruff>=0.11.13",
"pyright>=1.1.401",
"pylint>=3.3.7",
"bandit>=1.8.3",
"pytest>=8.4.0",
"pytest-asyncio>=1.0.0",
"pytest-cov>=6.1.1",
"pytest-mock>=3.14.1",
"pre-commit>=4.2.0",
"black>=25.1.0",
"isort>=6.0.1",
"pyinstaller>=6.17.0; python_version >= '3.12' and python_version < '3.15'",
]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["strix"]
# ============================================================================
# Type Checking Configuration

View File

@@ -33,23 +33,23 @@ echo -e "${YELLOW}Platform:${NC} $OS_NAME-$ARCH_NAME"
cd "$PROJECT_ROOT"
if ! command -v poetry &> /dev/null; then
echo -e "${RED}Error: Poetry is not installed${NC}"
echo "Please install Poetry first: https://python-poetry.org/docs/#installation"
if ! command -v uv &> /dev/null; then
echo -e "${RED}Error: uv is not installed${NC}"
echo "Please install uv first: https://docs.astral.sh/uv/getting-started/installation/"
exit 1
fi
echo -e "\n${BLUE}Installing dependencies...${NC}"
poetry install --with dev
uv sync --frozen
VERSION=$(poetry version -s)
VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*"\(.*\)"/\1/')
echo -e "${YELLOW}Version:${NC} $VERSION"
echo -e "\n${BLUE}Cleaning previous builds...${NC}"
rm -rf build/ dist/
echo -e "\n${BLUE}Building binary with PyInstaller...${NC}"
poetry run pyinstaller strix.spec --noconfirm
uv run pyinstaller strix.spec --noconfirm
RELEASE_DIR="dist/release"
mkdir -p "$RELEASE_DIR"

16
scripts/docker.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/bin/bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
IMAGE="strix-sandbox"
TAG="${1:-dev}"
echo "Building $IMAGE:$TAG ..."
docker build \
-f "$PROJECT_ROOT/containers/Dockerfile" \
-t "$IMAGE:$TAG" \
"$PROJECT_ROOT"
echo "Done: $IMAGE:$TAG"

View File

@@ -116,8 +116,8 @@ WHITE-BOX TESTING (code provided):
- Static coverage target per repository: run one `semgrep` pass, one secrets pass (`gitleaks` and/or `trufflehog`), one `trivy fs` pass, and one AST-structural pass (`sg` and/or Tree-sitter); if any are skipped, record why in the shared wiki
- Keep AST artifacts bounded and high-signal: scope to relevant paths/hypotheses, avoid whole-repo generic function dumps
- AST target selection rule: build `sg-targets.txt` from `semgrep.json` scope first (`paths.scanned`, fallback to unique `results[].path`), then run `xargs ... sg run` against that file list. Only use path-heuristic fallback if semgrep scope is unavailable, and log fallback reason in the wiki.
- Shared memory: Use notes as shared working memory; discover wiki notes with `list_notes`, read `wiki:overview` first when available, then read `wiki:security` via `get_note(note_id=...)` before analysis
- Before `agent_finish`/`finish_scan`, update `wiki:security` with scanner summaries, key routes/sinks, and dynamic follow-up plan
- Shared memory: Use notes as shared working memory; discover wiki notes with `list_notes`, then read the selected one via `get_note(note_id=...)` before analysis
- Before `agent_finish`/`finish_scan`, update the shared repo wiki with scanner summaries, key routes/sinks, and dynamic follow-up plan
- Dynamic: Run the application and test live to validate exploitability
- NEVER rely solely on static code analysis when dynamic validation is possible
- Begin with fast source triage and dynamic run preparation in parallel; use static findings to prioritize live testing.
@@ -484,7 +484,7 @@ PROXY & INTERCEPTION:
- Ignore Caido proxy-generated 50x HTML error pages; these are proxy issues (might happen when requesting a wrong host or SSL/TLS issues, etc).
PROGRAMMING:
- Python 3, Poetry, Go, Node.js/npm
- Python 3, uv, Go, Node.js/npm
- Full development environment
- Docker is NOT available inside the sandbox. Do not run docker; rely on provided tools to run locally.
- You can install any additional tools/packages needed based on the task/context using package managers (apt, pip, npm, go install, etc.)

View File

@@ -1077,7 +1077,7 @@ class StrixTUIApp(App): # type: ignore[misc]
combined.append("\n")
StrixTUIApp._append_renderable(combined, sub)
else:
inner = getattr(item, "renderable", None)
inner = getattr(item, "content", None) or getattr(item, "renderable", None)
if inner is not None:
StrixTUIApp._append_renderable(combined, inner)
else:
@@ -1171,7 +1171,7 @@ class StrixTUIApp(App): # type: ignore[misc]
renderer = get_tool_renderer(tool_name)
if renderer:
widget = renderer.render(tool_data)
return widget.renderable
return widget.content
return self._render_default_streaming_tool(tool_name, args, is_complete)
@@ -1709,7 +1709,7 @@ class StrixTUIApp(App): # type: ignore[misc]
if renderer:
widget = renderer.render(tool_data)
return widget.renderable
return widget.content
text = Text()

View File

@@ -44,16 +44,14 @@ Coverage target per repository:
## Wiki Note Requirement (Source Map)
When source is present, maintain two stable wiki notes per repository and keep them current:
- `wiki:overview` for architecture/source-map context
- `wiki:security` for scanner and validation deltas
When source is present, maintain one wiki note per repository and keep it current.
Operational rules:
- At task start, call `list_notes` with `category=wiki`; read `wiki:overview` first, then `wiki:security` via `get_note(note_id=...)`.
- If wiki notes are missing, create them with `create_note`, `category=wiki`, and tags including `wiki:overview` or `wiki:security`.
- Update existing notes via `update_note`; avoid creating duplicates.
- Child agents should read both notes first, then extend with new evidence from their scope.
- Before calling `agent_finish`, each source-focused child agent should append a short delta update to `wiki:security` (scanner outputs, route/sink map deltas, dynamic follow-ups).
- At task start, call `list_notes` with `category=wiki`, then read the selected wiki with `get_note(note_id=...)`.
- If no repo wiki exists, create one with `create_note` and `category=wiki`.
- Update the same wiki via `update_note`; avoid creating duplicate wiki notes for the same repo.
- Child agents should read wiki notes first via `get_note`, then extend with new evidence from their scope.
- Before calling `agent_finish`, each source-focused child agent should append a short delta update to the shared repo wiki (scanner outputs, route/sink map deltas, dynamic follow-ups).
Recommended sections:
- Architecture overview

View File

@@ -19,12 +19,12 @@ Before scanning, check shared wiki memory:
```text
1) list_notes(category="wiki")
2) get_note(note_id=...) for `wiki:overview` first, then `wiki:security`
3) Reuse matching repo wiki notes if present
4) create_note(category="wiki") only if missing (with tags `wiki:overview` / `wiki:security`)
2) get_note(note_id=...) for the selected repo wiki before analysis
3) Reuse matching repo wiki note if present
4) create_note(category="wiki") only if missing
```
After every major source-analysis batch, update `wiki:security` with `update_note` so other agents can reuse your latest map.
After every major source-analysis batch, update the same repo wiki note with `update_note` so other agents can reuse your latest map.
## Baseline Coverage Bundle (Recommended)
@@ -74,7 +74,7 @@ trivy fs --scanners vuln,misconfig --timeout 30m --offline-scan \
--format json --output "$ART/trivy-fs.json" . || true
```
If one tool is skipped or fails, record that in `wiki:security` along with the reason.
If one tool is skipped or fails, record that in the shared wiki note along with the reason.
## Semgrep First Pass
@@ -143,7 +143,7 @@ trivy fs --scanners vuln,misconfig --timeout 30m --offline-scan \
## Wiki Update Template
Keep `wiki:overview` and `wiki:security` per repository. Update these sections in `wiki:security`:
Keep one wiki note per repository and update these sections:
```text
## Architecture
@@ -164,4 +164,4 @@ Before `agent_finish`, make one final `update_note` call to capture:
- Do not treat scanner output as final truth.
- Do not spend full cycles on low-signal pattern matches.
- Do not report source-only findings without validation evidence.
- Do not create duplicate `wiki:overview` or `wiki:security` notes for the same repository.
- Do not create multiple wiki notes for the same repository when one already exists.

View File

@@ -15,7 +15,7 @@ Thorough understanding before exploitation. Test every parameter, every endpoint
**Whitebox (source available)**
- Map every file, module, and code path in the repository
- Load and maintain shared `wiki` notes from the start (`list_notes(category="wiki")`, then `get_note(note_id=...)` for `wiki:overview` and `wiki:security`), then continuously update `wiki:security`
- Load and maintain shared `wiki` notes from the start (`list_notes(category="wiki")` then `get_note(note_id=...)`), then continuously update one repo note
- Start with broad source-aware triage (`semgrep`, `ast-grep`, `gitleaks`, `trufflehog`, `trivy fs`) and use outputs to drive deep review
- Execute at least one structural AST pass (`sg` and/or Tree-sitter) per repository and store artifacts for reuse
- Keep AST artifacts bounded and query-driven (target relevant paths/sinks first; avoid whole-repo generic function dumps)
@@ -31,7 +31,7 @@ Thorough understanding before exploitation. Test every parameter, every endpoint
- Review file handling: upload, download, processing
- Understand the deployment model and infrastructure assumptions
- Check all dependency versions and repository risks against CVE/misconfiguration data
- Before final completion, update `wiki:security` with scanner summary + dynamic follow-ups
- Before final completion, update the shared repo wiki with scanner summary + dynamic follow-ups
**Blackbox (no source)**
- Exhaustive subdomain enumeration with multiple sources and tools

View File

@@ -15,7 +15,7 @@ Optimize for fast feedback on critical security issues. Skip exhaustive enumerat
**Whitebox (source available)**
- Focus on recent changes: git diffs, new commits, modified files—these are most likely to contain fresh bugs
- Read existing `wiki` notes first (`list_notes(category="wiki")`, then `get_note(note_id=...)` for `wiki:overview` and `wiki:security`) to avoid remapping from scratch
- Read existing `wiki` notes first (`list_notes(category="wiki")` then `get_note(note_id=...)`) to avoid remapping from scratch
- Run a fast static triage on changed files first (`semgrep`, then targeted `sg` queries)
- Run at least one lightweight AST pass (`sg` or Tree-sitter) so structural mapping is not skipped
- Keep AST commands tightly scoped to changed or high-risk paths; avoid broad repository-wide pattern dumps
@@ -23,7 +23,7 @@ Optimize for fast feedback on critical security issues. Skip exhaustive enumerat
- Identify security-sensitive patterns in changed code: auth checks, input handling, database queries, file operations
- Trace user input through modified code paths
- Check if security controls were modified or bypassed
- Before completion, update `wiki:security` with what changed and what needs dynamic follow-up
- Before completion, update the shared repo wiki with what changed and what needs dynamic follow-up
**Blackbox (no source)**
- Map authentication and critical user flows

View File

@@ -15,7 +15,7 @@ Systematic testing across the full attack surface. Understand the application be
**Whitebox (source available)**
- Map codebase structure: modules, entry points, routing
- Start by loading existing `wiki` notes (`list_notes(category="wiki")`, then `get_note(note_id=...)` for `wiki:overview` and `wiki:security`) and update `wiki:security` as mapping evolves
- Start by loading existing `wiki` notes (`list_notes(category="wiki")` then `get_note(note_id=...)`) and update one shared repo note as mapping evolves
- Run `semgrep` first-pass triage to prioritize risky flows before deep manual review
- Run at least one AST-structural mapping pass (`sg` and/or Tree-sitter), then use outputs for route, sink, and trust-boundary mapping
- Keep AST output bounded to relevant paths and hypotheses; avoid whole-repo generic function dumps
@@ -25,7 +25,7 @@ Systematic testing across the full attack surface. Understand the application be
- Analyze database interactions and ORM usage
- Check dependencies and repo risks with `trivy fs`, `gitleaks`, and `trufflehog`
- Understand the data model and sensitive data locations
- Before completion, update `wiki:security` with source findings summary and dynamic validation next steps
- Before completion, update the shared repo wiki with source findings summary and dynamic validation next steps
**Blackbox (no source)**
- Crawl application thoroughly, interact with every feature

View File

@@ -1,9 +1,10 @@
import json
import logging
import threading
from collections.abc import Callable
from datetime import UTC, datetime
from pathlib import Path
from typing import Any, Callable, Optional
from typing import Any, Optional
from uuid import uuid4
from opentelemetry import trace
@@ -36,6 +37,7 @@ _OTEL_BOOTSTRAP_LOCK = threading.Lock()
_OTEL_BOOTSTRAPPED = False
_OTEL_REMOTE_ENABLED = False
def get_global_tracer() -> Optional["Tracer"]:
return _global_tracer

View File

@@ -124,7 +124,6 @@ def iso_from_unix_ns(unix_ns: int | None) -> str | None:
return None
def get_events_write_lock(output_path: Path) -> threading.Lock:
path_key = str(output_path.resolve(strict=False))
with _EVENTS_FILE_LOCKS_LOCK:

View File

@@ -44,32 +44,7 @@ def _extract_repo_tags(agent_state: Any | None) -> set[str]:
return repo_tags
def _extract_wiki_kind(note: dict[str, Any]) -> str:
note_kind = str(note.get("wiki_kind") or "").strip().lower()
if note_kind in {"overview", "security", "general"}:
return note_kind
note_tags = note.get("tags") or []
if isinstance(note_tags, list):
normalized_tags = {str(tag).strip().lower() for tag in note_tags if str(tag).strip()}
if "wiki:overview" in normalized_tags:
return "overview"
if "wiki:security" in normalized_tags:
return "security"
title = str(note.get("title") or "").lower()
if "overview" in title or "architecture" in title:
return "overview"
if "security" in title or "vuln" in title or "finding" in title:
return "security"
return "general"
def _load_primary_wiki_note(
agent_state: Any | None = None,
preferred_kind: str | None = None,
allow_kind_fallback: bool = True,
) -> dict[str, Any] | None:
def _load_primary_wiki_note(agent_state: Any | None = None) -> dict[str, Any] | None:
try:
from strix.tools.notes.notes_actions import get_note, list_notes
@@ -81,32 +56,19 @@ def _load_primary_wiki_note(
if not notes:
return None
candidate_notes = notes
selected_note_id = None
repo_tags = _extract_repo_tags(agent_state)
if repo_tags:
tagged_notes = []
for note in notes:
note_tags = note.get("tags") or []
if not isinstance(note_tags, list):
continue
normalized_note_tags = {str(tag).strip().lower() for tag in note_tags if str(tag).strip()}
if normalized_note_tags.intersection(repo_tags):
tagged_notes.append(note)
if tagged_notes:
candidate_notes = tagged_notes
normalized_kind = (preferred_kind or "").strip().lower()
if normalized_kind in {"overview", "security", "general"}:
for note in candidate_notes:
if _extract_wiki_kind(note) == normalized_kind:
selected_note_id = note.get("note_id")
break
if not selected_note_id and (not normalized_kind or allow_kind_fallback):
selected_note_id = candidate_notes[0].get("note_id")
note_id = selected_note_id
note_id = selected_note_id or notes[0].get("note_id")
if not isinstance(note_id, str) or not note_id:
return None
@@ -128,44 +90,26 @@ def _inject_wiki_context_for_whitebox(agent_state: Any) -> None:
if not _is_whitebox_agent(agent_state.agent_id):
return
overview_note = _load_primary_wiki_note(
agent_state,
preferred_kind="overview",
allow_kind_fallback=False,
)
security_note = _load_primary_wiki_note(
agent_state,
preferred_kind="security",
allow_kind_fallback=True,
)
wiki_note = _load_primary_wiki_note(agent_state)
if not wiki_note:
return
notes_to_embed: list[tuple[str, dict[str, Any]]] = []
if isinstance(overview_note, dict):
notes_to_embed.append(("overview", overview_note))
if isinstance(security_note, dict):
overview_note_id = str(overview_note.get("note_id")) if isinstance(overview_note, dict) else ""
security_note_id = str(security_note.get("note_id"))
if not overview_note_id or overview_note_id != security_note_id:
notes_to_embed.append(("security", security_note))
title = str(wiki_note.get("title") or "repo wiki")
content = str(wiki_note.get("content") or "").strip()
if not content:
return
max_chars = 4000
for wiki_kind, note in notes_to_embed:
title = str(note.get("title") or "repo wiki")
content = str(note.get("content") or "").strip()
if not content:
continue
truncated_content = content[:max_chars]
suffix = "\n\n[truncated for context size]" if len(content) > max_chars else ""
agent_state.add_message(
"user",
(
f"<shared_repo_wiki type=\"{wiki_kind}\" title=\"{title}\">\n"
f"{truncated_content}{suffix}\n"
"</shared_repo_wiki>"
),
)
truncated_content = content[:max_chars]
suffix = "\n\n[truncated for context size]" if len(content) > max_chars else ""
agent_state.add_message(
"user",
(
f"<shared_repo_wiki title=\"{title}\">\n"
f"{truncated_content}{suffix}\n"
"</shared_repo_wiki>"
),
)
def _append_wiki_update_on_finish(
@@ -181,11 +125,7 @@ def _append_wiki_update_on_finish(
try:
from strix.tools.notes.notes_actions import append_note_content
note = _load_primary_wiki_note(
agent_state,
preferred_kind="security",
allow_kind_fallback=True,
)
note = _load_primary_wiki_note(agent_state)
if not note:
return
@@ -239,10 +179,10 @@ def _run_agent_in_thread(
wiki_memory_instruction = ""
if getattr(getattr(agent, "llm_config", None), "is_whitebox", False):
wiki_memory_instruction = (
'\n - White-box memory (recommended): call list_notes(category="wiki"), read '
"wiki:overview first when available, then wiki:security via get_note(note_id=...) before substantive work (including terminal scans)"
"\n - Prefer two stable wiki notes per repo: one tagged wiki:overview and one tagged wiki:security; avoid duplicates"
"\n - Before agent_finish, call list_notes(category=\"wiki\") + get_note(note_id=...) again, then append a short scope delta via update_note to wiki:security (new routes/sinks, scanner results, dynamic follow-ups)"
'\n - White-box memory (recommended): call list_notes(category="wiki") and then '
"get_note(note_id=...) before substantive work (including terminal scans)"
"\n - Reuse one repo wiki note where possible and avoid duplicates"
"\n - Before agent_finish, call list_notes(category=\"wiki\") + get_note(note_id=...) again, then append a short scope delta via update_note (new routes/sinks, scanner results, dynamic follow-ups)"
"\n - If terminal output contains `command not found` or shell parse errors, correct and rerun before using the result"
"\n - Use ASCII-only shell commands; if a command includes unexpected non-ASCII characters, rerun with a clean ASCII command"
"\n - Keep AST artifacts bounded: target relevant paths and avoid whole-repo generic function dumps"
@@ -442,10 +382,10 @@ def create_agent(
"keep artifacts bounded and skip forced AST steps for purely dynamic validation tasks.\n"
"- Keep AST output bounded: scope to relevant paths/files, avoid whole-repo "
"generic function patterns, and cap artifact size.\n"
'- Use shared wiki memory by calling list_notes(category="wiki"), reading wiki:overview first '
"then wiki:security via get_note(note_id=...).\n"
'- Use shared wiki memory by calling list_notes(category="wiki") then '
"get_note(note_id=...).\n"
'- Before agent_finish, call list_notes(category="wiki") + get_note(note_id=...) '
"again, and append updates to wiki:security.\n"
"again, reuse one repo wiki, and call update_note.\n"
"- If terminal output contains `command not found` or shell parse errors, "
"correct and rerun before using the result."
)

View File

@@ -15,28 +15,6 @@ _loaded_notes_run_dir: str | None = None
_DEFAULT_CONTENT_PREVIEW_CHARS = 280
def _note_tag_set(note: dict[str, Any]) -> set[str]:
tags = note.get("tags", [])
if not isinstance(tags, list):
return set()
return {str(tag).strip().lower() for tag in tags if str(tag).strip()}
def _infer_wiki_kind(note: dict[str, Any]) -> str:
tag_set = _note_tag_set(note)
if "wiki:overview" in tag_set:
return "overview"
if "wiki:security" in tag_set:
return "security"
title = str(note.get("title", "")).lower()
if "overview" in title or "architecture" in title:
return "overview"
if "security" in title or "vuln" in title or "finding" in title:
return "security"
return "general"
def _get_run_dir() -> Path | None:
try:
from strix.telemetry.tracer import get_global_tracer
@@ -129,7 +107,6 @@ def _ensure_notes_loaded() -> None:
for note_id, note in _notes_storage.items():
if note.get("category") == "wiki":
_persist_wiki_note(note_id, note)
_persist_wiki_index()
except OSError:
pass
@@ -156,13 +133,6 @@ def _get_wiki_directory() -> Path | None:
return wiki_dir
def _get_wiki_index_path() -> Path | None:
wiki_dir = _get_wiki_directory()
if not wiki_dir:
return None
return wiki_dir / "index.json"
def _get_wiki_note_path(note_id: str, note: dict[str, Any]) -> Path | None:
wiki_dir = _get_wiki_directory()
if not wiki_dir:
@@ -197,34 +167,6 @@ def _persist_wiki_note(note_id: str, note: dict[str, Any]) -> None:
wiki_path.write_text(content, encoding="utf-8")
def _persist_wiki_index() -> None:
index_path = _get_wiki_index_path()
if not index_path:
return
notes: list[dict[str, Any]] = []
for note_id, note in _notes_storage.items():
if note.get("category") != "wiki":
continue
wiki_path = _get_wiki_note_path(note_id, note)
notes.append(
{
"note_id": note_id,
"title": str(note.get("title", "")),
"wiki_kind": _infer_wiki_kind(note),
"tags": note.get("tags", []),
"created_at": note.get("created_at", ""),
"updated_at": note.get("updated_at", ""),
"wiki_filename": note.get("wiki_filename", ""),
"wiki_path": wiki_path.name if wiki_path else "",
}
)
notes.sort(key=lambda item: item.get("updated_at", ""), reverse=True)
payload = {"generated_at": datetime.now(UTC).isoformat(), "notes": notes}
index_path.write_text(f"{json.dumps(payload, ensure_ascii=True, indent=2)}\n", encoding="utf-8")
def _remove_wiki_note(note_id: str, note: dict[str, Any]) -> None:
wiki_path = _get_wiki_note_path(note_id, note)
if not wiki_path:
@@ -284,9 +226,6 @@ def _to_note_listing_entry(
if isinstance(wiki_filename, str) and wiki_filename:
entry["wiki_filename"] = wiki_filename
if note.get("category") == "wiki":
entry["wiki_kind"] = _infer_wiki_kind(note)
content = str(note.get("content", ""))
if include_content:
entry["content"] = content
@@ -351,7 +290,6 @@ def create_note( # noqa: PLR0911
_append_note_event("create", note_id, note)
if category == "wiki":
_persist_wiki_note(note_id, note)
_persist_wiki_index()
except (ValueError, TypeError) as e:
return {"success": False, "error": f"Failed to create note: {e}", "note_id": None}
@@ -418,8 +356,6 @@ def get_note(note_id: str) -> dict[str, Any]:
note_with_id = note.copy()
note_with_id["note_id"] = note_id
if note.get("category") == "wiki":
note_with_id["wiki_kind"] = _infer_wiki_kind(note)
except (ValueError, TypeError) as e:
return {
@@ -484,7 +420,6 @@ def update_note(
_append_note_event("update", note_id, note)
if note.get("category") == "wiki":
_persist_wiki_note(note_id, note)
_persist_wiki_index()
return {
"success": True,
@@ -508,13 +443,10 @@ def delete_note(note_id: str) -> dict[str, Any]:
note = _notes_storage[note_id]
note_title = note["title"]
is_wiki = note.get("category") == "wiki"
if is_wiki:
if note.get("category") == "wiki":
_remove_wiki_note(note_id, note)
del _notes_storage[note_id]
_append_note_event("delete", note_id)
if is_wiki:
_persist_wiki_index()
except (ValueError, TypeError) as e:
return {"success": False, "error": f"Failed to delete note: {e}"}

View File

@@ -4,7 +4,6 @@
<details>Use this tool for documenting discoveries, observations, methodology notes, and questions.
This is your personal and shared run memory for recording information you want to remember or reference later.
Use category "wiki" for repository source maps shared across agents in the same run.
For Codewiki patterns, prefer wiki tags `wiki:overview` and `wiki:security` for stable note roles.
For tracking actionable tasks, use the todo tool instead.</details>
<parameters>
<parameter name="title" type="string" required="true">
@@ -110,7 +109,7 @@ The /api/internal/* endpoints are high priority as they appear to lack authentic
</parameter>
</parameters>
<returns type="Dict[str, Any]">
<description>Response containing: - notes: List of matching notes (metadata + optional content/content_preview; wiki entries include wiki_kind) - total_count: Total number of notes found</description>
<description>Response containing: - notes: List of matching notes (metadata + optional content/content_preview) - total_count: Total number of notes found</description>
</returns>
<examples>
# List all findings

View File

@@ -296,120 +296,3 @@ def test_load_primary_wiki_note_prefers_repo_tag_match(monkeypatch) -> None:
assert note is not None
assert note["note_id"] == "wiki-target"
assert selected_note_ids == ["wiki-target"]
def test_load_primary_wiki_note_prefers_requested_wiki_kind(monkeypatch) -> None:
selected_note_ids: list[str] = []
def fake_list_notes(category=None):
assert category == "wiki"
return {
"success": True,
"notes": [
{"note_id": "wiki-security", "tags": ["repo:appsmith", "wiki:security"]},
{"note_id": "wiki-overview", "tags": ["repo:appsmith", "wiki:overview"]},
],
"total_count": 2,
}
def fake_get_note(note_id: str):
selected_note_ids.append(note_id)
return {
"success": True,
"note": {
"note_id": note_id,
"title": "Repo Wiki",
"content": "content",
},
}
monkeypatch.setattr("strix.tools.notes.notes_actions.list_notes", fake_list_notes)
monkeypatch.setattr("strix.tools.notes.notes_actions.get_note", fake_get_note)
agent_state = SimpleNamespace(task="analyze /workspace/appsmith")
overview_note = agents_graph_actions._load_primary_wiki_note(
agent_state,
preferred_kind="overview",
allow_kind_fallback=False,
)
security_note = agents_graph_actions._load_primary_wiki_note(
agent_state,
preferred_kind="security",
allow_kind_fallback=True,
)
assert overview_note is not None
assert security_note is not None
assert overview_note["note_id"] == "wiki-overview"
assert security_note["note_id"] == "wiki-security"
assert selected_note_ids == ["wiki-overview", "wiki-security"]
def test_agent_finish_prefers_security_wiki_for_append(monkeypatch) -> None:
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
agents_graph_actions._agent_graph["nodes"].clear()
agents_graph_actions._agent_graph["edges"].clear()
agents_graph_actions._agent_messages.clear()
agents_graph_actions._running_agents.clear()
agents_graph_actions._agent_instances.clear()
agents_graph_actions._agent_states.clear()
parent_id = "parent-sec"
child_id = "child-sec"
agents_graph_actions._agent_graph["nodes"][parent_id] = {
"name": "Parent",
"task": "parent task",
"status": "running",
"parent_id": None,
}
agents_graph_actions._agent_graph["nodes"][child_id] = {
"name": "Child",
"task": "child task",
"status": "running",
"parent_id": parent_id,
}
agents_graph_actions._agent_instances[child_id] = SimpleNamespace(
llm_config=LLMConfig(is_whitebox=True)
)
captured: dict[str, str] = {}
def fake_list_notes(category=None):
assert category == "wiki"
return {
"success": True,
"notes": [
{"note_id": "wiki-overview", "tags": ["repo:appsmith", "wiki:overview"]},
{"note_id": "wiki-security", "tags": ["repo:appsmith", "wiki:security"]},
],
"total_count": 2,
}
def fake_get_note(note_id: str):
return {
"success": True,
"note": {"note_id": note_id, "title": "Repo Wiki", "content": "Existing wiki content"},
}
def fake_append_note_content(note_id: str, delta: str):
captured["note_id"] = note_id
captured["delta"] = delta
return {"success": True, "note_id": note_id}
monkeypatch.setattr("strix.tools.notes.notes_actions.list_notes", fake_list_notes)
monkeypatch.setattr("strix.tools.notes.notes_actions.get_note", fake_get_note)
monkeypatch.setattr("strix.tools.notes.notes_actions.append_note_content", fake_append_note_content)
state = SimpleNamespace(agent_id=child_id, parent_id=parent_id, task="analyze /workspace/appsmith")
result = agents_graph_actions.agent_finish(
agent_state=state,
result_summary="Static triage completed",
findings=["Found candidate sink"],
success=True,
final_recommendations=["Validate with dynamic PoC"],
)
assert result["agent_completed"] is True
assert captured["note_id"] == "wiki-security"
assert "Static triage completed" in captured["delta"]

View File

@@ -124,7 +124,7 @@ def test_get_note_returns_full_note(tmp_path: Path, monkeypatch) -> None:
title="Repo wiki",
content="entrypoints and sinks",
category="wiki",
tags=["repo:appsmith", "wiki:security"],
tags=["repo:appsmith"],
)
assert created["success"] is True
note_id = created["note_id"]
@@ -134,7 +134,6 @@ def test_get_note_returns_full_note(tmp_path: Path, monkeypatch) -> None:
assert result["success"] is True
assert result["note"]["note_id"] == note_id
assert result["note"]["content"] == "entrypoints and sinks"
assert result["note"]["wiki_kind"] == "security"
finally:
_reset_notes_state()
set_global_tracer(previous_tracer) # type: ignore[arg-type]
@@ -213,55 +212,3 @@ def test_list_and_get_note_handle_wiki_repersist_oserror_gracefully(
finally:
_reset_notes_state()
set_global_tracer(previous_tracer) # type: ignore[arg-type]
def test_wiki_index_tracks_overview_and_security_notes(tmp_path: Path, monkeypatch) -> None:
monkeypatch.chdir(tmp_path)
_reset_notes_state()
previous_tracer = get_global_tracer()
tracer = Tracer("wiki-index-run")
set_global_tracer(tracer)
try:
overview = notes_actions.create_note(
title="Repo overview wiki",
content="architecture and entrypoints",
category="wiki",
tags=["repo:demo", "wiki:overview"],
)
assert overview["success"] is True
overview_id = overview["note_id"]
assert isinstance(overview_id, str)
security = notes_actions.create_note(
title="Repo security wiki",
content="scanner summary and follow-ups",
category="wiki",
tags=["repo:demo", "wiki:security"],
)
assert security["success"] is True
security_id = security["note_id"]
assert isinstance(security_id, str)
wiki_index = tmp_path / "strix_runs" / "wiki-index-run" / "wiki" / "index.json"
assert wiki_index.exists() is True
index_data = wiki_index.read_text(encoding="utf-8")
assert '"wiki_kind": "overview"' in index_data
assert '"wiki_kind": "security"' in index_data
listed = notes_actions.list_notes(category="wiki")
assert listed["success"] is True
note_kinds = {note["note_id"]: note.get("wiki_kind") for note in listed["notes"]}
assert note_kinds[overview_id] == "overview"
assert note_kinds[security_id] == "security"
deleted = notes_actions.delete_note(note_id=overview_id)
assert deleted["success"] is True
index_after_delete = wiki_index.read_text(encoding="utf-8")
assert overview_id not in index_after_delete
assert security_id in index_after_delete
finally:
_reset_notes_state()
set_global_tracer(previous_tracer) # type: ignore[arg-type]

6206
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff