feat: Better source-aware testing (#391)
This commit is contained in:
153
tests/interface/test_diff_scope.py
Normal file
153
tests/interface/test_diff_scope.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _load_utils_module():
|
||||
module_path = Path(__file__).resolve().parents[2] / "strix" / "interface" / "utils.py"
|
||||
spec = importlib.util.spec_from_file_location("strix_interface_utils_test", module_path)
|
||||
if spec is None or spec.loader is None:
|
||||
raise RuntimeError("Failed to load strix.interface.utils for tests")
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
utils = _load_utils_module()
|
||||
|
||||
|
||||
def test_parse_name_status_uses_rename_destination_path() -> None:
|
||||
raw = (
|
||||
b"R100\x00old/path.py\x00new/path.py\x00"
|
||||
b"R75\x00legacy/module.py\x00modern/module.py\x00"
|
||||
b"M\x00src/app.py\x00"
|
||||
b"A\x00src/new_file.py\x00"
|
||||
b"D\x00src/deleted.py\x00"
|
||||
)
|
||||
|
||||
entries = utils._parse_name_status_z(raw)
|
||||
classified = utils._classify_diff_entries(entries)
|
||||
|
||||
assert "new/path.py" in classified["analyzable_files"]
|
||||
assert "old/path.py" not in classified["analyzable_files"]
|
||||
assert "modern/module.py" in classified["analyzable_files"]
|
||||
assert classified["renamed_files"][0]["old_path"] == "old/path.py"
|
||||
assert classified["renamed_files"][0]["new_path"] == "new/path.py"
|
||||
assert "src/deleted.py" in classified["deleted_files"]
|
||||
assert "src/deleted.py" not in classified["analyzable_files"]
|
||||
|
||||
|
||||
def test_build_diff_scope_instruction_includes_added_modified_and_deleted_guidance() -> None:
|
||||
scope = utils.RepoDiffScope(
|
||||
source_path="/tmp/repo",
|
||||
workspace_subdir="repo",
|
||||
base_ref="refs/remotes/origin/main",
|
||||
merge_base="abc123",
|
||||
added_files=["src/added.py"],
|
||||
modified_files=["src/changed.py"],
|
||||
renamed_files=[{"old_path": "src/old.py", "new_path": "src/new.py", "similarity": 90}],
|
||||
deleted_files=["src/deleted.py"],
|
||||
analyzable_files=["src/added.py", "src/changed.py", "src/new.py"],
|
||||
)
|
||||
|
||||
instruction = utils.build_diff_scope_instruction([scope])
|
||||
|
||||
assert "For Added files, review the entire file content." in instruction
|
||||
assert "For Modified files, focus primarily on the changed areas." in instruction
|
||||
assert "Note: These files were deleted" in instruction
|
||||
assert "src/deleted.py" in instruction
|
||||
assert "src/old.py -> src/new.py" in instruction
|
||||
|
||||
|
||||
def test_resolve_base_ref_prefers_github_base_ref(monkeypatch) -> None:
|
||||
calls: list[str] = []
|
||||
|
||||
def fake_ref_exists(_repo_path: Path, ref: str) -> bool:
|
||||
calls.append(ref)
|
||||
return ref == "refs/remotes/origin/release-2026"
|
||||
|
||||
monkeypatch.setattr(utils, "_git_ref_exists", fake_ref_exists)
|
||||
monkeypatch.setattr(utils, "_extract_github_base_sha", lambda _env: None)
|
||||
monkeypatch.setattr(utils, "_resolve_origin_head_ref", lambda _repo_path: None)
|
||||
|
||||
base_ref = utils._resolve_base_ref(
|
||||
Path("/tmp/repo"),
|
||||
diff_base=None,
|
||||
env={"GITHUB_BASE_REF": "release-2026"},
|
||||
)
|
||||
|
||||
assert base_ref == "refs/remotes/origin/release-2026"
|
||||
assert calls[0] == "refs/remotes/origin/release-2026"
|
||||
|
||||
|
||||
def test_resolve_base_ref_falls_back_to_remote_main(monkeypatch) -> None:
|
||||
calls: list[str] = []
|
||||
|
||||
def fake_ref_exists(_repo_path: Path, ref: str) -> bool:
|
||||
calls.append(ref)
|
||||
return ref == "refs/remotes/origin/main"
|
||||
|
||||
monkeypatch.setattr(utils, "_git_ref_exists", fake_ref_exists)
|
||||
monkeypatch.setattr(utils, "_extract_github_base_sha", lambda _env: None)
|
||||
monkeypatch.setattr(utils, "_resolve_origin_head_ref", lambda _repo_path: None)
|
||||
|
||||
base_ref = utils._resolve_base_ref(Path("/tmp/repo"), diff_base=None, env={})
|
||||
|
||||
assert base_ref == "refs/remotes/origin/main"
|
||||
assert "refs/remotes/origin/main" in calls
|
||||
assert "origin/main" not in calls
|
||||
|
||||
|
||||
def test_resolve_diff_scope_context_auto_degrades_when_repo_scope_resolution_fails(
|
||||
monkeypatch,
|
||||
) -> None:
|
||||
source = {"source_path": "/tmp/repo", "workspace_subdir": "repo"}
|
||||
|
||||
monkeypatch.setattr(utils, "_should_activate_auto_scope", lambda *_args, **_kwargs: True)
|
||||
monkeypatch.setattr(utils, "_is_git_repo", lambda _repo_path: True)
|
||||
monkeypatch.setattr(
|
||||
utils,
|
||||
"_resolve_repo_diff_scope",
|
||||
lambda *_args, **_kwargs: (_ for _ in ()).throw(ValueError("shallow history")),
|
||||
)
|
||||
|
||||
result = utils.resolve_diff_scope_context(
|
||||
local_sources=[source],
|
||||
scope_mode="auto",
|
||||
diff_base=None,
|
||||
non_interactive=True,
|
||||
env={},
|
||||
)
|
||||
|
||||
assert result.active is False
|
||||
assert result.mode == "auto"
|
||||
assert result.metadata["active"] is False
|
||||
assert result.metadata["mode"] == "auto"
|
||||
assert "skipped_diff_scope_sources" in result.metadata
|
||||
assert result.metadata["skipped_diff_scope_sources"] == [
|
||||
"/tmp/repo (diff-scope skipped: shallow history)"
|
||||
]
|
||||
|
||||
|
||||
def test_resolve_diff_scope_context_diff_mode_still_raises_on_repo_scope_resolution_failure(
|
||||
monkeypatch,
|
||||
) -> None:
|
||||
source = {"source_path": "/tmp/repo", "workspace_subdir": "repo"}
|
||||
|
||||
monkeypatch.setattr(utils, "_is_git_repo", lambda _repo_path: True)
|
||||
monkeypatch.setattr(
|
||||
utils,
|
||||
"_resolve_repo_diff_scope",
|
||||
lambda *_args, **_kwargs: (_ for _ in ()).throw(ValueError("shallow history")),
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="shallow history"):
|
||||
utils.resolve_diff_scope_context(
|
||||
local_sources=[source],
|
||||
scope_mode="diff",
|
||||
diff_base=None,
|
||||
non_interactive=True,
|
||||
env={},
|
||||
)
|
||||
30
tests/llm/test_source_aware_whitebox.py
Normal file
30
tests/llm/test_source_aware_whitebox.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from strix.llm.config import LLMConfig
|
||||
from strix.llm.llm import LLM
|
||||
|
||||
|
||||
def test_llm_config_whitebox_defaults_to_false(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
config = LLMConfig()
|
||||
assert config.is_whitebox is False
|
||||
|
||||
|
||||
def test_llm_config_whitebox_can_be_enabled(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
config = LLMConfig(is_whitebox=True)
|
||||
assert config.is_whitebox is True
|
||||
|
||||
|
||||
def test_whitebox_prompt_loads_source_aware_coordination_skill(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
|
||||
whitebox_llm = LLM(LLMConfig(scan_mode="quick", is_whitebox=True), agent_name="StrixAgent")
|
||||
assert "<source_aware_whitebox>" in whitebox_llm.system_prompt
|
||||
assert "<source_aware_sast>" in whitebox_llm.system_prompt
|
||||
assert "Begin with fast source triage" in whitebox_llm.system_prompt
|
||||
assert "You MUST begin at the very first step by running the code and testing live." not in (
|
||||
whitebox_llm.system_prompt
|
||||
)
|
||||
|
||||
non_whitebox_llm = LLM(LLMConfig(scan_mode="quick", is_whitebox=False), agent_name="StrixAgent")
|
||||
assert "<source_aware_whitebox>" not in non_whitebox_llm.system_prompt
|
||||
assert "<source_aware_sast>" not in non_whitebox_llm.system_prompt
|
||||
298
tests/tools/test_agents_graph_whitebox.py
Normal file
298
tests/tools/test_agents_graph_whitebox.py
Normal file
@@ -0,0 +1,298 @@
|
||||
from types import SimpleNamespace
|
||||
|
||||
import strix.agents as agents_module
|
||||
from strix.llm.config import LLMConfig
|
||||
from strix.tools.agents_graph import agents_graph_actions
|
||||
|
||||
|
||||
def test_create_agent_inherits_parent_whitebox_flag(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
|
||||
agents_graph_actions._agent_graph["nodes"].clear()
|
||||
agents_graph_actions._agent_graph["edges"].clear()
|
||||
agents_graph_actions._agent_messages.clear()
|
||||
agents_graph_actions._running_agents.clear()
|
||||
agents_graph_actions._agent_instances.clear()
|
||||
agents_graph_actions._agent_states.clear()
|
||||
|
||||
parent_id = "parent-agent"
|
||||
parent_llm = LLMConfig(timeout=123, scan_mode="standard", is_whitebox=True)
|
||||
agents_graph_actions._agent_instances[parent_id] = SimpleNamespace(
|
||||
llm_config=parent_llm,
|
||||
non_interactive=True,
|
||||
)
|
||||
|
||||
captured_config: dict[str, object] = {}
|
||||
|
||||
class FakeStrixAgent:
|
||||
def __init__(self, config: dict[str, object]):
|
||||
captured_config["agent_config"] = config
|
||||
|
||||
class FakeThread:
|
||||
def __init__(self, target, args, daemon, name):
|
||||
self.target = target
|
||||
self.args = args
|
||||
self.daemon = daemon
|
||||
self.name = name
|
||||
|
||||
def start(self) -> None:
|
||||
return None
|
||||
|
||||
monkeypatch.setattr(agents_module, "StrixAgent", FakeStrixAgent)
|
||||
monkeypatch.setattr(agents_graph_actions.threading, "Thread", FakeThread)
|
||||
|
||||
agent_state = SimpleNamespace(
|
||||
agent_id=parent_id,
|
||||
get_conversation_history=list,
|
||||
)
|
||||
result = agents_graph_actions.create_agent(
|
||||
agent_state=agent_state,
|
||||
task="source-aware child task",
|
||||
name="SourceAwareChild",
|
||||
inherit_context=False,
|
||||
)
|
||||
|
||||
assert result["success"] is True
|
||||
llm_config = captured_config["agent_config"]["llm_config"]
|
||||
assert isinstance(llm_config, LLMConfig)
|
||||
assert llm_config.timeout == 123
|
||||
assert llm_config.scan_mode == "standard"
|
||||
assert llm_config.is_whitebox is True
|
||||
child_task = captured_config["agent_config"]["state"].task
|
||||
assert "White-box execution guidance (recommended when source is available):" in child_task
|
||||
assert "mandatory" not in child_task.lower()
|
||||
|
||||
|
||||
def test_delegation_prompt_includes_wiki_memory_instruction_in_whitebox(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
|
||||
agents_graph_actions._agent_graph["nodes"].clear()
|
||||
agents_graph_actions._agent_graph["edges"].clear()
|
||||
agents_graph_actions._agent_messages.clear()
|
||||
agents_graph_actions._running_agents.clear()
|
||||
agents_graph_actions._agent_instances.clear()
|
||||
agents_graph_actions._agent_states.clear()
|
||||
|
||||
parent_id = "parent-1"
|
||||
child_id = "child-1"
|
||||
agents_graph_actions._agent_graph["nodes"][parent_id] = {"name": "Parent", "status": "running"}
|
||||
agents_graph_actions._agent_graph["nodes"][child_id] = {"name": "Child", "status": "running"}
|
||||
|
||||
class FakeState:
|
||||
def __init__(self) -> None:
|
||||
self.agent_id = child_id
|
||||
self.agent_name = "Child"
|
||||
self.parent_id = parent_id
|
||||
self.task = "analyze source risks"
|
||||
self.stop_requested = False
|
||||
self.messages: list[tuple[str, str]] = []
|
||||
|
||||
def add_message(self, role: str, content: str) -> None:
|
||||
self.messages.append((role, content))
|
||||
|
||||
def model_dump(self) -> dict[str, str]:
|
||||
return {"agent_id": self.agent_id}
|
||||
|
||||
class FakeAgent:
|
||||
def __init__(self) -> None:
|
||||
self.llm_config = LLMConfig(is_whitebox=True)
|
||||
|
||||
async def agent_loop(self, _task: str) -> dict[str, bool]:
|
||||
return {"ok": True}
|
||||
|
||||
state = FakeState()
|
||||
agent = FakeAgent()
|
||||
agents_graph_actions._agent_instances[child_id] = agent
|
||||
result = agents_graph_actions._run_agent_in_thread(agent, state, inherited_messages=[])
|
||||
|
||||
assert result["result"] == {"ok": True}
|
||||
task_messages = [msg for role, msg in state.messages if role == "user"]
|
||||
assert task_messages
|
||||
assert 'list_notes(category="wiki")' in task_messages[-1]
|
||||
assert "get_note(note_id=...)" in task_messages[-1]
|
||||
assert "Before agent_finish" in task_messages[-1]
|
||||
|
||||
|
||||
def test_agent_finish_appends_wiki_update_for_whitebox(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
|
||||
agents_graph_actions._agent_graph["nodes"].clear()
|
||||
agents_graph_actions._agent_graph["edges"].clear()
|
||||
agents_graph_actions._agent_messages.clear()
|
||||
agents_graph_actions._running_agents.clear()
|
||||
agents_graph_actions._agent_instances.clear()
|
||||
agents_graph_actions._agent_states.clear()
|
||||
|
||||
parent_id = "parent-2"
|
||||
child_id = "child-2"
|
||||
agents_graph_actions._agent_graph["nodes"][parent_id] = {
|
||||
"name": "Parent",
|
||||
"task": "parent task",
|
||||
"status": "running",
|
||||
"parent_id": None,
|
||||
}
|
||||
agents_graph_actions._agent_graph["nodes"][child_id] = {
|
||||
"name": "Child",
|
||||
"task": "child task",
|
||||
"status": "running",
|
||||
"parent_id": parent_id,
|
||||
}
|
||||
agents_graph_actions._agent_instances[child_id] = SimpleNamespace(
|
||||
llm_config=LLMConfig(is_whitebox=True)
|
||||
)
|
||||
|
||||
captured: dict[str, str] = {}
|
||||
|
||||
def fake_list_notes(category=None):
|
||||
assert category == "wiki"
|
||||
return {
|
||||
"success": True,
|
||||
"notes": [{"note_id": "wiki-note-1", "content": "Existing wiki content"}],
|
||||
"total_count": 1,
|
||||
}
|
||||
|
||||
captured_get: dict[str, str] = {}
|
||||
|
||||
def fake_get_note(note_id: str):
|
||||
captured_get["note_id"] = note_id
|
||||
return {
|
||||
"success": True,
|
||||
"note": {
|
||||
"note_id": note_id,
|
||||
"title": "Repo Wiki",
|
||||
"content": "Existing wiki content",
|
||||
},
|
||||
}
|
||||
|
||||
def fake_append_note_content(note_id: str, delta: str):
|
||||
captured["note_id"] = note_id
|
||||
captured["delta"] = delta
|
||||
return {"success": True, "note_id": note_id}
|
||||
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.list_notes", fake_list_notes)
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.get_note", fake_get_note)
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.append_note_content", fake_append_note_content)
|
||||
|
||||
state = SimpleNamespace(agent_id=child_id, parent_id=parent_id)
|
||||
result = agents_graph_actions.agent_finish(
|
||||
agent_state=state,
|
||||
result_summary="AST pass completed",
|
||||
findings=["Found route sink candidate"],
|
||||
success=True,
|
||||
final_recommendations=["Validate sink with dynamic PoC"],
|
||||
)
|
||||
|
||||
assert result["agent_completed"] is True
|
||||
assert captured_get["note_id"] == "wiki-note-1"
|
||||
assert captured["note_id"] == "wiki-note-1"
|
||||
assert "Agent Update: Child" in captured["delta"]
|
||||
assert "AST pass completed" in captured["delta"]
|
||||
|
||||
|
||||
def test_run_agent_in_thread_injects_shared_wiki_context_in_whitebox(monkeypatch) -> None:
|
||||
monkeypatch.setenv("STRIX_LLM", "openai/gpt-5")
|
||||
|
||||
agents_graph_actions._agent_graph["nodes"].clear()
|
||||
agents_graph_actions._agent_graph["edges"].clear()
|
||||
agents_graph_actions._agent_messages.clear()
|
||||
agents_graph_actions._running_agents.clear()
|
||||
agents_graph_actions._agent_instances.clear()
|
||||
agents_graph_actions._agent_states.clear()
|
||||
|
||||
parent_id = "parent-3"
|
||||
child_id = "child-3"
|
||||
agents_graph_actions._agent_graph["nodes"][parent_id] = {"name": "Parent", "status": "running"}
|
||||
agents_graph_actions._agent_graph["nodes"][child_id] = {"name": "Child", "status": "running"}
|
||||
|
||||
class FakeState:
|
||||
def __init__(self) -> None:
|
||||
self.agent_id = child_id
|
||||
self.agent_name = "Child"
|
||||
self.parent_id = parent_id
|
||||
self.task = "map source"
|
||||
self.stop_requested = False
|
||||
self.messages: list[tuple[str, str]] = []
|
||||
|
||||
def add_message(self, role: str, content: str) -> None:
|
||||
self.messages.append((role, content))
|
||||
|
||||
def model_dump(self) -> dict[str, str]:
|
||||
return {"agent_id": self.agent_id}
|
||||
|
||||
class FakeAgent:
|
||||
def __init__(self) -> None:
|
||||
self.llm_config = LLMConfig(is_whitebox=True)
|
||||
|
||||
async def agent_loop(self, _task: str) -> dict[str, bool]:
|
||||
return {"ok": True}
|
||||
|
||||
captured_get: dict[str, str] = {}
|
||||
|
||||
def fake_list_notes(category=None):
|
||||
assert category == "wiki"
|
||||
return {
|
||||
"success": True,
|
||||
"notes": [{"note_id": "wiki-ctx-1"}],
|
||||
"total_count": 1,
|
||||
}
|
||||
|
||||
def fake_get_note(note_id: str):
|
||||
captured_get["note_id"] = note_id
|
||||
return {
|
||||
"success": True,
|
||||
"note": {
|
||||
"note_id": note_id,
|
||||
"title": "Shared Repo Wiki",
|
||||
"content": "Architecture: server/client split",
|
||||
},
|
||||
}
|
||||
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.list_notes", fake_list_notes)
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.get_note", fake_get_note)
|
||||
|
||||
state = FakeState()
|
||||
agent = FakeAgent()
|
||||
agents_graph_actions._agent_instances[child_id] = agent
|
||||
result = agents_graph_actions._run_agent_in_thread(agent, state, inherited_messages=[])
|
||||
|
||||
assert result["result"] == {"ok": True}
|
||||
assert captured_get["note_id"] == "wiki-ctx-1"
|
||||
user_messages = [content for role, content in state.messages if role == "user"]
|
||||
assert user_messages
|
||||
assert "<shared_repo_wiki" in user_messages[0]
|
||||
assert "Architecture: server/client split" in user_messages[0]
|
||||
|
||||
|
||||
def test_load_primary_wiki_note_prefers_repo_tag_match(monkeypatch) -> None:
|
||||
selected_note_ids: list[str] = []
|
||||
|
||||
def fake_list_notes(category=None):
|
||||
assert category == "wiki"
|
||||
return {
|
||||
"success": True,
|
||||
"notes": [
|
||||
{"note_id": "wiki-other", "tags": ["repo:other"]},
|
||||
{"note_id": "wiki-target", "tags": ["repo:appsmith"]},
|
||||
],
|
||||
"total_count": 2,
|
||||
}
|
||||
|
||||
def fake_get_note(note_id: str):
|
||||
selected_note_ids.append(note_id)
|
||||
return {
|
||||
"success": True,
|
||||
"note": {"note_id": note_id, "title": "Repo Wiki", "content": "content"},
|
||||
}
|
||||
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.list_notes", fake_list_notes)
|
||||
monkeypatch.setattr("strix.tools.notes.notes_actions.get_note", fake_get_note)
|
||||
|
||||
agent_state = SimpleNamespace(
|
||||
task="analyze /workspace/appsmith",
|
||||
context={"whitebox_repo_tags": ["repo:appsmith"]},
|
||||
)
|
||||
note = agents_graph_actions._load_primary_wiki_note(agent_state)
|
||||
|
||||
assert note is not None
|
||||
assert note["note_id"] == "wiki-target"
|
||||
assert selected_note_ids == ["wiki-target"]
|
||||
214
tests/tools/test_notes_wiki.py
Normal file
214
tests/tools/test_notes_wiki.py
Normal file
@@ -0,0 +1,214 @@
|
||||
from pathlib import Path
|
||||
|
||||
from strix.telemetry.tracer import Tracer, get_global_tracer, set_global_tracer
|
||||
from strix.tools.notes import notes_actions
|
||||
|
||||
|
||||
def _reset_notes_state() -> None:
|
||||
notes_actions._notes_storage.clear()
|
||||
notes_actions._loaded_notes_run_dir = None
|
||||
|
||||
|
||||
def test_wiki_notes_are_persisted_and_removed(tmp_path: Path, monkeypatch) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
_reset_notes_state()
|
||||
|
||||
previous_tracer = get_global_tracer()
|
||||
tracer = Tracer("wiki-test-run")
|
||||
set_global_tracer(tracer)
|
||||
|
||||
try:
|
||||
created = notes_actions.create_note(
|
||||
title="Repo Map",
|
||||
content="## Architecture\n- monolith",
|
||||
category="wiki",
|
||||
tags=["source-map"],
|
||||
)
|
||||
assert created["success"] is True
|
||||
note_id = created["note_id"]
|
||||
assert isinstance(note_id, str)
|
||||
|
||||
note = notes_actions._notes_storage[note_id]
|
||||
wiki_filename = note.get("wiki_filename")
|
||||
assert isinstance(wiki_filename, str)
|
||||
|
||||
wiki_path = tmp_path / "strix_runs" / "wiki-test-run" / "wiki" / wiki_filename
|
||||
assert wiki_path.exists()
|
||||
assert "## Architecture" in wiki_path.read_text(encoding="utf-8")
|
||||
|
||||
updated = notes_actions.update_note(
|
||||
note_id=note_id,
|
||||
content="## Architecture\n- service-oriented",
|
||||
)
|
||||
assert updated["success"] is True
|
||||
assert "service-oriented" in wiki_path.read_text(encoding="utf-8")
|
||||
|
||||
deleted = notes_actions.delete_note(note_id=note_id)
|
||||
assert deleted["success"] is True
|
||||
assert wiki_path.exists() is False
|
||||
finally:
|
||||
_reset_notes_state()
|
||||
set_global_tracer(previous_tracer) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def test_notes_jsonl_replay_survives_memory_reset(tmp_path: Path, monkeypatch) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
_reset_notes_state()
|
||||
|
||||
previous_tracer = get_global_tracer()
|
||||
tracer = Tracer("notes-replay-run")
|
||||
set_global_tracer(tracer)
|
||||
|
||||
try:
|
||||
created = notes_actions.create_note(
|
||||
title="Auth findings",
|
||||
content="initial finding",
|
||||
category="findings",
|
||||
tags=["auth"],
|
||||
)
|
||||
assert created["success"] is True
|
||||
note_id = created["note_id"]
|
||||
assert isinstance(note_id, str)
|
||||
|
||||
notes_path = tmp_path / "strix_runs" / "notes-replay-run" / "notes" / "notes.jsonl"
|
||||
assert notes_path.exists() is True
|
||||
|
||||
_reset_notes_state()
|
||||
listed = notes_actions.list_notes(category="findings")
|
||||
assert listed["success"] is True
|
||||
assert listed["total_count"] == 1
|
||||
assert listed["notes"][0]["note_id"] == note_id
|
||||
assert "content" not in listed["notes"][0]
|
||||
assert "content_preview" in listed["notes"][0]
|
||||
|
||||
updated = notes_actions.update_note(note_id=note_id, content="updated finding")
|
||||
assert updated["success"] is True
|
||||
|
||||
_reset_notes_state()
|
||||
listed_after_update = notes_actions.list_notes(search="updated finding")
|
||||
assert listed_after_update["success"] is True
|
||||
assert listed_after_update["total_count"] == 1
|
||||
assert listed_after_update["notes"][0]["note_id"] == note_id
|
||||
assert listed_after_update["notes"][0]["content_preview"] == "updated finding"
|
||||
|
||||
listed_with_content = notes_actions.list_notes(
|
||||
category="findings",
|
||||
include_content=True,
|
||||
)
|
||||
assert listed_with_content["success"] is True
|
||||
assert listed_with_content["total_count"] == 1
|
||||
assert listed_with_content["notes"][0]["content"] == "updated finding"
|
||||
|
||||
deleted = notes_actions.delete_note(note_id=note_id)
|
||||
assert deleted["success"] is True
|
||||
|
||||
_reset_notes_state()
|
||||
listed_after_delete = notes_actions.list_notes(category="findings")
|
||||
assert listed_after_delete["success"] is True
|
||||
assert listed_after_delete["total_count"] == 0
|
||||
finally:
|
||||
_reset_notes_state()
|
||||
set_global_tracer(previous_tracer) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def test_get_note_returns_full_note(tmp_path: Path, monkeypatch) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
_reset_notes_state()
|
||||
|
||||
previous_tracer = get_global_tracer()
|
||||
tracer = Tracer("get-note-run")
|
||||
set_global_tracer(tracer)
|
||||
|
||||
try:
|
||||
created = notes_actions.create_note(
|
||||
title="Repo wiki",
|
||||
content="entrypoints and sinks",
|
||||
category="wiki",
|
||||
tags=["repo:appsmith"],
|
||||
)
|
||||
assert created["success"] is True
|
||||
note_id = created["note_id"]
|
||||
assert isinstance(note_id, str)
|
||||
|
||||
result = notes_actions.get_note(note_id=note_id)
|
||||
assert result["success"] is True
|
||||
assert result["note"]["note_id"] == note_id
|
||||
assert result["note"]["content"] == "entrypoints and sinks"
|
||||
finally:
|
||||
_reset_notes_state()
|
||||
set_global_tracer(previous_tracer) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def test_append_note_content_appends_delta(tmp_path: Path, monkeypatch) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
_reset_notes_state()
|
||||
|
||||
previous_tracer = get_global_tracer()
|
||||
tracer = Tracer("append-note-run")
|
||||
set_global_tracer(tracer)
|
||||
|
||||
try:
|
||||
created = notes_actions.create_note(
|
||||
title="Repo wiki",
|
||||
content="base",
|
||||
category="wiki",
|
||||
tags=["repo:demo"],
|
||||
)
|
||||
assert created["success"] is True
|
||||
note_id = created["note_id"]
|
||||
assert isinstance(note_id, str)
|
||||
|
||||
appended = notes_actions.append_note_content(
|
||||
note_id=note_id,
|
||||
delta="\n\n## Agent Update: worker\nSummary: done",
|
||||
)
|
||||
assert appended["success"] is True
|
||||
|
||||
loaded = notes_actions.get_note(note_id=note_id)
|
||||
assert loaded["success"] is True
|
||||
assert loaded["note"]["content"] == "base\n\n## Agent Update: worker\nSummary: done"
|
||||
finally:
|
||||
_reset_notes_state()
|
||||
set_global_tracer(previous_tracer) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def test_list_and_get_note_handle_wiki_repersist_oserror_gracefully(
|
||||
tmp_path: Path, monkeypatch
|
||||
) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
_reset_notes_state()
|
||||
|
||||
previous_tracer = get_global_tracer()
|
||||
tracer = Tracer("wiki-repersist-oserror-run")
|
||||
set_global_tracer(tracer)
|
||||
|
||||
try:
|
||||
created = notes_actions.create_note(
|
||||
title="Repo wiki",
|
||||
content="initial wiki content",
|
||||
category="wiki",
|
||||
tags=["repo:demo"],
|
||||
)
|
||||
assert created["success"] is True
|
||||
note_id = created["note_id"]
|
||||
assert isinstance(note_id, str)
|
||||
|
||||
_reset_notes_state()
|
||||
|
||||
def _raise_oserror(*_args, **_kwargs) -> None:
|
||||
raise OSError("disk full")
|
||||
|
||||
monkeypatch.setattr(notes_actions, "_persist_wiki_note", _raise_oserror)
|
||||
|
||||
listed = notes_actions.list_notes(category="wiki")
|
||||
assert listed["success"] is True
|
||||
assert listed["total_count"] == 1
|
||||
assert listed["notes"][0]["note_id"] == note_id
|
||||
|
||||
fetched = notes_actions.get_note(note_id=note_id)
|
||||
assert fetched["success"] is True
|
||||
assert fetched["note"]["note_id"] == note_id
|
||||
assert fetched["note"]["content"] == "initial wiki content"
|
||||
finally:
|
||||
_reset_notes_state()
|
||||
set_global_tracer(previous_tracer) # type: ignore[arg-type]
|
||||
Reference in New Issue
Block a user