2 Commits

Author SHA1 Message Date
alex s
15c95718e6 fix: ensure LLM stats tracking is accurate by including completed subagents (#441) 2026-04-13 00:09:13 -04:00
Ahmed Allam
62e9af36d2 Add Strix GitHub Actions integration tip 2026-04-12 12:43:41 -07:00
7 changed files with 160 additions and 42 deletions

View File

@@ -32,9 +32,8 @@
</div>
> [!TIP]
> **New!** Strix integrates seamlessly with GitHub Actions and CI/CD pipelines. Automatically scan for vulnerabilities on every pull request and block insecure code before it reaches production!
> **New!** Strix integrates seamlessly with GitHub Actions and CI/CD pipelines. Automatically scan for vulnerabilities on every pull request and block insecure code before it reaches production - [Get started with no setup required](https://app.strix.ai).
---

View File

@@ -134,7 +134,8 @@ class BaseAgent(metaclass=AgentMeta):
}
agents_graph_actions._agent_graph["nodes"][self.state.agent_id] = node
agents_graph_actions._agent_instances[self.state.agent_id] = self
with agents_graph_actions._agent_llm_stats_lock:
agents_graph_actions._agent_instances[self.state.agent_id] = self
agents_graph_actions._agent_states[self.state.agent_id] = self.state
if self.state.parent_id:

View File

@@ -799,17 +799,25 @@ class Tracer:
)
def get_total_llm_stats(self) -> dict[str, Any]:
from strix.tools.agents_graph.agents_graph_actions import _agent_instances
from strix.tools.agents_graph.agents_graph_actions import (
_agent_instances,
_completed_agent_llm_totals,
_agent_llm_stats_lock,
)
with _agent_llm_stats_lock:
completed_totals = dict(_completed_agent_llm_totals)
active_agents = list(_agent_instances.values())
total_stats = {
"input_tokens": 0,
"output_tokens": 0,
"cached_tokens": 0,
"cost": 0.0,
"requests": 0,
"input_tokens": int(completed_totals.get("input_tokens", 0) or 0),
"output_tokens": int(completed_totals.get("output_tokens", 0) or 0),
"cached_tokens": int(completed_totals.get("cached_tokens", 0) or 0),
"cost": float(completed_totals.get("cost", 0.0) or 0.0),
"requests": int(completed_totals.get("requests", 0) or 0),
}
for agent_instance in _agent_instances.values():
for agent_instance in active_agents:
if hasattr(agent_instance, "llm") and hasattr(agent_instance.llm, "_total_stats"):
agent_stats = agent_instance.llm._total_stats
total_stats["input_tokens"] += agent_stats.input_tokens

View File

@@ -19,9 +19,55 @@ _running_agents: dict[str, threading.Thread] = {}
_agent_instances: dict[str, Any] = {}
_agent_llm_stats_lock = threading.Lock()
def _empty_llm_stats_totals() -> dict[str, int | float]:
return {
"input_tokens": 0,
"output_tokens": 0,
"cached_tokens": 0,
"cost": 0.0,
"requests": 0,
}
_completed_agent_llm_totals: dict[str, int | float] = _empty_llm_stats_totals()
_agent_states: dict[str, Any] = {}
def _snapshot_agent_llm_stats(agent: Any) -> dict[str, int | float] | None:
if not hasattr(agent, "llm") or not hasattr(agent.llm, "_total_stats"):
return None
stats = agent.llm._total_stats
return {
"input_tokens": stats.input_tokens,
"output_tokens": stats.output_tokens,
"cached_tokens": stats.cached_tokens,
"cost": stats.cost,
"requests": stats.requests,
}
def _finalize_agent_llm_stats(agent_id: str, agent: Any) -> None:
stats = _snapshot_agent_llm_stats(agent)
with _agent_llm_stats_lock:
if stats is not None:
_completed_agent_llm_totals["input_tokens"] += int(stats["input_tokens"])
_completed_agent_llm_totals["output_tokens"] += int(stats["output_tokens"])
_completed_agent_llm_totals["cached_tokens"] += int(stats["cached_tokens"])
_completed_agent_llm_totals["cost"] += float(stats["cost"])
_completed_agent_llm_totals["requests"] += int(stats["requests"])
node = _agent_graph["nodes"].get(agent_id)
if node is not None:
node["llm_stats"] = stats
_agent_instances.pop(agent_id, None)
def _is_whitebox_agent(agent_id: str) -> bool:
agent = _agent_instances.get(agent_id)
return bool(getattr(getattr(agent, "llm_config", None), "is_whitebox", False))
@@ -237,7 +283,7 @@ def _run_agent_in_thread(
_agent_graph["nodes"][state.agent_id]["finished_at"] = datetime.now(UTC).isoformat()
_agent_graph["nodes"][state.agent_id]["result"] = {"error": str(e)}
_running_agents.pop(state.agent_id, None)
_agent_instances.pop(state.agent_id, None)
_finalize_agent_llm_stats(state.agent_id, agent)
raise
else:
if state.stop_requested:
@@ -247,7 +293,7 @@ def _run_agent_in_thread(
_agent_graph["nodes"][state.agent_id]["finished_at"] = datetime.now(UTC).isoformat()
_agent_graph["nodes"][state.agent_id]["result"] = result
_running_agents.pop(state.agent_id, None)
_agent_instances.pop(state.agent_id, None)
_finalize_agent_llm_stats(state.agent_id, agent)
return {"result": result}
@@ -418,7 +464,8 @@ def create_agent(
if inherit_context:
inherited_messages = agent_state.get_conversation_history()
_agent_instances[state.agent_id] = agent
with _agent_llm_stats_lock:
_agent_instances[state.agent_id] = agent
thread = threading.Thread(
target=_run_agent_in_thread,

View File

@@ -10,6 +10,7 @@ from opentelemetry.sdk.trace.export import SimpleSpanProcessor, SpanExportResult
from strix.telemetry import tracer as tracer_module
from strix.telemetry import utils as telemetry_utils
from strix.telemetry.tracer import Tracer, set_global_tracer
from strix.tools.agents_graph import agents_graph_actions
def _load_events(events_path: Path) -> list[dict[str, Any]]:
@@ -255,6 +256,75 @@ def test_events_with_agent_id_include_agent_name(monkeypatch, tmp_path) -> None:
assert chat_event["actor"]["agent_name"] == "Root Agent"
def test_get_total_llm_stats_includes_completed_subagents(monkeypatch, tmp_path) -> None:
monkeypatch.chdir(tmp_path)
class DummyStats:
def __init__(
self,
*,
input_tokens: int,
output_tokens: int,
cached_tokens: int,
cost: float,
requests: int,
) -> None:
self.input_tokens = input_tokens
self.output_tokens = output_tokens
self.cached_tokens = cached_tokens
self.cost = cost
self.requests = requests
class DummyLLM:
def __init__(self, stats: DummyStats) -> None:
self._total_stats = stats
class DummyAgent:
def __init__(self, stats: DummyStats) -> None:
self.llm = DummyLLM(stats)
tracer = Tracer("cost-rollup")
set_global_tracer(tracer)
monkeypatch.setattr(
agents_graph_actions,
"_agent_instances",
{
"root-agent": DummyAgent(
DummyStats(
input_tokens=1_000,
output_tokens=250,
cached_tokens=100,
cost=0.12345,
requests=2,
)
)
},
)
monkeypatch.setattr(
agents_graph_actions,
"_completed_agent_llm_totals",
{
"input_tokens": 2_000,
"output_tokens": 500,
"cached_tokens": 400,
"cost": 0.54321,
"requests": 3,
},
)
stats = tracer.get_total_llm_stats()
assert stats["total"] == {
"input_tokens": 3_000,
"output_tokens": 750,
"cached_tokens": 500,
"cost": 0.6667,
"requests": 5,
}
assert stats["total_tokens"] == 3_750
def test_run_metadata_is_only_on_run_lifecycle_events(monkeypatch, tmp_path) -> None:
monkeypatch.chdir(tmp_path)

View File

@@ -5,16 +5,24 @@ from strix.llm.config import LLMConfig
from strix.tools.agents_graph import agents_graph_actions
def test_create_agent_inherits_parent_whitebox_flag(monkeypatch) -> None:
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
def _reset_agent_graph_state() -> None:
agents_graph_actions._agent_graph["nodes"].clear()
agents_graph_actions._agent_graph["edges"].clear()
agents_graph_actions._agent_messages.clear()
agents_graph_actions._running_agents.clear()
agents_graph_actions._agent_instances.clear()
agents_graph_actions._completed_agent_llm_totals.clear()
agents_graph_actions._completed_agent_llm_totals.update(
agents_graph_actions._empty_llm_stats_totals()
)
agents_graph_actions._agent_states.clear()
def test_create_agent_inherits_parent_whitebox_flag(monkeypatch) -> None:
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
_reset_agent_graph_state()
parent_id = "parent-agent"
parent_llm = LLMConfig(timeout=123, scan_mode="standard", is_whitebox=True)
agents_graph_actions._agent_instances[parent_id] = SimpleNamespace(
@@ -66,12 +74,7 @@ def test_create_agent_inherits_parent_whitebox_flag(monkeypatch) -> None:
def test_delegation_prompt_includes_wiki_memory_instruction_in_whitebox(monkeypatch) -> None:
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
agents_graph_actions._agent_graph["nodes"].clear()
agents_graph_actions._agent_graph["edges"].clear()
agents_graph_actions._agent_messages.clear()
agents_graph_actions._running_agents.clear()
agents_graph_actions._agent_instances.clear()
agents_graph_actions._agent_states.clear()
_reset_agent_graph_state()
parent_id = "parent-1"
child_id = "child-1"
@@ -116,12 +119,7 @@ def test_delegation_prompt_includes_wiki_memory_instruction_in_whitebox(monkeypa
def test_agent_finish_appends_wiki_update_for_whitebox(monkeypatch) -> None:
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
agents_graph_actions._agent_graph["nodes"].clear()
agents_graph_actions._agent_graph["edges"].clear()
agents_graph_actions._agent_messages.clear()
agents_graph_actions._running_agents.clear()
agents_graph_actions._agent_instances.clear()
agents_graph_actions._agent_states.clear()
_reset_agent_graph_state()
parent_id = "parent-2"
child_id = "child-2"
@@ -192,12 +190,7 @@ def test_agent_finish_appends_wiki_update_for_whitebox(monkeypatch) -> None:
def test_run_agent_in_thread_injects_shared_wiki_context_in_whitebox(monkeypatch) -> None:
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
agents_graph_actions._agent_graph["nodes"].clear()
agents_graph_actions._agent_graph["edges"].clear()
agents_graph_actions._agent_messages.clear()
agents_graph_actions._running_agents.clear()
agents_graph_actions._agent_instances.clear()
agents_graph_actions._agent_states.clear()
_reset_agent_graph_state()
parent_id = "parent-3"
child_id = "child-3"

16
uv.lock generated
View File

@@ -158,7 +158,7 @@ wheels = [
[[package]]
name = "anthropic"
version = "0.87.0"
version = "0.86.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -170,9 +170,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d6/8f/3281edf7c35cbac169810e5388eb9b38678c7ea9867c2d331237bd5dff08/anthropic-0.87.0.tar.gz", hash = "sha256:098fef3753cdd3c0daa86f95efb9c8d03a798d45c5170329525bb4653f6702d0", size = 588982, upload-time = "2026-03-31T17:52:41.697Z" }
sdist = { url = "https://files.pythonhosted.org/packages/37/7a/8b390dc47945d3169875d342847431e5f7d5fa716b2e37494d57cfc1db10/anthropic-0.86.0.tar.gz", hash = "sha256:60023a7e879aa4fbb1fed99d487fe407b2ebf6569603e5047cfe304cebdaa0e5", size = 583820, upload-time = "2026-03-18T18:43:08.017Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0d/02/99bf351933bdea0545a2b6e2d812ed878899e9a95f618351dfa3d0de0e69/anthropic-0.87.0-py3-none-any.whl", hash = "sha256:e2669b86d42c739d3df163f873c51719552e263a3d85179297180fb4fa00a236", size = 472126, upload-time = "2026-03-31T17:52:40.174Z" },
{ url = "https://files.pythonhosted.org/packages/63/5f/67db29c6e5d16c8c9c4652d3efb934d89cb750cad201539141781d8eae14/anthropic-0.86.0-py3-none-any.whl", hash = "sha256:9d2bbd339446acce98858c5627d33056efe01f70435b22b63546fe7edae0cd57", size = 469400, upload-time = "2026-03-18T18:43:06.526Z" },
]
[[package]]
@@ -2374,7 +2374,7 @@ name = "macholib"
version = "1.16.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "altgraph", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
{ name = "altgraph" },
]
sdist = { url = "https://files.pythonhosted.org/packages/10/2f/97589876ea967487978071c9042518d28b958d87b17dceb7cdc1d881f963/macholib-1.16.4.tar.gz", hash = "sha256:f408c93ab2e995cd2c46e34fe328b130404be143469e41bc366c807448979362", size = 59427, upload-time = "2025-11-22T08:28:38.373Z" }
wheels = [
@@ -3815,7 +3815,7 @@ name = "pexpect"
version = "4.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "ptyprocess", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
{ name = "ptyprocess" },
]
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
wheels = [
@@ -4458,7 +4458,7 @@ name = "pyroscope-io"
version = "0.8.16"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "sys_platform != 'win32'" },
{ name = "cffi" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/50/607b38b120ba8adad954119ba512c53590c793f0cf7f009ba6549e4e1d77/pyroscope_io-0.8.16-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:e07edcfd59f5bdce42948b92c9b118c824edbd551730305f095a6b9af401a9e8", size = 3138869, upload-time = "2026-01-22T06:23:24.664Z" },
@@ -5439,7 +5439,7 @@ wheels = [
[[package]]
name = "strix-agent"
version = "0.8.3"
version = "0.8.2"
source = { editable = "." }
dependencies = [
{ name = "cvss" },
@@ -5511,7 +5511,7 @@ requires-dist = [
{ name = "rich" },
{ name = "scrubadub", specifier = ">=2.0.1" },
{ name = "tenacity", specifier = ">=9.0.0" },
{ name = "textual", specifier = ">=6.0.0" },
{ name = "textual", specifier = ">=4.0.0" },
{ name = "traceloop-sdk", specifier = ">=0.53.0" },
{ name = "uvicorn", marker = "extra == 'sandbox'" },
{ name = "xmltodict", specifier = ">=0.13.0" },