Use Pi runtime hooks for research context hygiene
This commit is contained in:
@@ -1,11 +1,14 @@
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
import { registerAlphaTools } from "./research-tools/alpha.js";
|
||||
import { registerAutoLog } from "./research-tools/autolog.js";
|
||||
import { registerContextReportTool } from "./research-tools/context.js";
|
||||
import { registerDiscoveryCommands } from "./research-tools/discovery.js";
|
||||
import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js";
|
||||
import { installFeynmanHeader } from "./research-tools/header.js";
|
||||
import { registerHelpCommand } from "./research-tools/help.js";
|
||||
import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js";
|
||||
import { registerResumePacket } from "./research-tools/resume.js";
|
||||
import { registerServiceTierControls } from "./research-tools/service-tier.js";
|
||||
|
||||
export default function researchTools(pi: ExtensionAPI): void {
|
||||
@@ -17,10 +20,13 @@ export default function researchTools(pi: ExtensionAPI): void {
|
||||
});
|
||||
|
||||
registerAlphaTools(pi);
|
||||
registerAutoLog(pi);
|
||||
registerContextReportTool(pi);
|
||||
registerDiscoveryCommands(pi);
|
||||
registerFeynmanModelCommand(pi);
|
||||
registerHelpCommand(pi);
|
||||
registerInitCommand(pi);
|
||||
registerOutputsCommand(pi);
|
||||
registerResumePacket(pi);
|
||||
registerServiceTierControls(pi);
|
||||
}
|
||||
|
||||
@@ -7,7 +7,11 @@ import {
|
||||
readPaperCode,
|
||||
searchPapers,
|
||||
} from "@companion-ai/alpha-hub/lib";
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { createHash } from "node:crypto";
|
||||
import { mkdirSync, writeFileSync } from "node:fs";
|
||||
import { dirname, resolve } from "node:path";
|
||||
|
||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
||||
import { Type } from "@sinclair/typebox";
|
||||
|
||||
function formatText(value: unknown): string {
|
||||
@@ -15,6 +19,44 @@ function formatText(value: unknown): string {
|
||||
return JSON.stringify(value, null, 2);
|
||||
}
|
||||
|
||||
function toolOutputCapChars(): number {
|
||||
const raw = Number(process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS);
|
||||
return Number.isFinite(raw) && raw > 0 ? Math.floor(raw) : 32_000;
|
||||
}
|
||||
|
||||
function spillPath(ctx: ExtensionContext, toolName: string, text: string): string {
|
||||
const hash = createHash("sha256").update(text).digest("hex").slice(0, 12);
|
||||
return resolve(ctx.cwd, "outputs", ".cache", `${toolName}-${hash}.md`);
|
||||
}
|
||||
|
||||
export function formatToolResultWithSpillover(
|
||||
ctx: ExtensionContext,
|
||||
toolName: string,
|
||||
result: unknown,
|
||||
): { text: string; details: unknown } {
|
||||
const text = formatText(result);
|
||||
const cap = toolOutputCapChars();
|
||||
if (text.length <= cap) {
|
||||
return { text, details: result };
|
||||
}
|
||||
|
||||
const path = spillPath(ctx, toolName, text);
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
writeFileSync(path, text, "utf8");
|
||||
|
||||
const head = text.slice(0, Math.min(cap, 4_000));
|
||||
const pointer = {
|
||||
feynman_spillover: true,
|
||||
tool: toolName,
|
||||
path,
|
||||
bytes: Buffer.byteLength(text, "utf8"),
|
||||
sha256: createHash("sha256").update(text).digest("hex"),
|
||||
note: "Full tool output was written to disk. Read the path in bounded chunks instead of asking the tool to return everything again.",
|
||||
head,
|
||||
};
|
||||
return { text: JSON.stringify(pointer, null, 2), details: pointer };
|
||||
}
|
||||
|
||||
export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
pi.registerTool({
|
||||
name: "alpha_search",
|
||||
@@ -27,9 +69,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }),
|
||||
),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
||||
const result = await searchPapers(params.query, params.mode?.trim() || "semantic");
|
||||
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_search", result);
|
||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
||||
},
|
||||
});
|
||||
|
||||
@@ -41,9 +84,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
|
||||
fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
||||
const result = await getPaper(params.paper, { fullText: params.fullText });
|
||||
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_get_paper", result);
|
||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
||||
},
|
||||
});
|
||||
|
||||
@@ -55,9 +99,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
|
||||
question: Type.String({ description: "Question about the paper." }),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
||||
const result = await askPaper(params.paper, params.question);
|
||||
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_ask_paper", result);
|
||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
||||
},
|
||||
});
|
||||
|
||||
@@ -70,13 +115,14 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })),
|
||||
clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
||||
const result = params.clear
|
||||
? await clearPaperAnnotation(params.paper)
|
||||
: params.note
|
||||
? await annotatePaper(params.paper, params.note)
|
||||
: (() => { throw new Error("Provide either note or clear=true."); })();
|
||||
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_annotate_paper", result);
|
||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
||||
},
|
||||
});
|
||||
|
||||
@@ -85,9 +131,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
label: "Alpha List Annotations",
|
||||
description: "List all persistent local paper annotations.",
|
||||
parameters: Type.Object({}),
|
||||
async execute() {
|
||||
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
|
||||
const result = await listPaperAnnotations();
|
||||
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_list_annotations", result);
|
||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
||||
},
|
||||
});
|
||||
|
||||
@@ -99,9 +146,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||
githubUrl: Type.String({ description: "GitHub repository URL." }),
|
||||
path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
||||
const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/");
|
||||
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_read_code", result);
|
||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
84
extensions/research-tools/autolog.ts
Normal file
84
extensions/research-tools/autolog.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { appendFileSync, mkdirSync, readFileSync } from "node:fs";
|
||||
import { dirname, resolve } from "node:path";
|
||||
|
||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
type AutoLogMode = "off" | "events" | "full";
|
||||
|
||||
function readAgentSettings(): Record<string, unknown> {
|
||||
const agentDir = process.env.PI_CODING_AGENT_DIR;
|
||||
if (!agentDir) return {};
|
||||
try {
|
||||
return JSON.parse(readFileSync(resolve(agentDir, "settings.json"), "utf8")) as Record<string, unknown>;
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeMode(value: unknown): AutoLogMode | undefined {
|
||||
if (typeof value !== "string") return undefined;
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (normalized === "off" || normalized === "events" || normalized === "full") return normalized;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getAutoLogMode(): AutoLogMode {
|
||||
return normalizeMode(process.env.FEYNMAN_AUTO_LOG) ??
|
||||
normalizeMode(readAgentSettings().autoLog) ??
|
||||
"events";
|
||||
}
|
||||
|
||||
function extractMessageText(message: unknown): string {
|
||||
if (!message || typeof message !== "object") return "";
|
||||
const content = (message as { content?: unknown }).content;
|
||||
if (typeof content === "string") return content;
|
||||
if (!Array.isArray(content)) return "";
|
||||
return content
|
||||
.map((item) => {
|
||||
if (!item || typeof item !== "object") return "";
|
||||
const record = item as { type?: string; text?: unknown; thinking?: unknown; name?: unknown };
|
||||
if (record.type === "text" && typeof record.text === "string") return record.text;
|
||||
if (record.type === "thinking" && typeof record.thinking === "string") return "[thinking omitted]";
|
||||
if (record.type === "toolCall") return `[tool:${typeof record.name === "string" ? record.name : "unknown"}]`;
|
||||
return "";
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
function clip(text: string, maxChars: number): string {
|
||||
return text.length > maxChars ? `${text.slice(0, maxChars)}\n...[truncated ${text.length - maxChars} chars]` : text;
|
||||
}
|
||||
|
||||
export function autoLogPath(cwd: string, date = new Date()): string {
|
||||
const day = date.toISOString().slice(0, 10);
|
||||
return resolve(cwd, "notes", "feynman-autolog", `${day}.jsonl`);
|
||||
}
|
||||
|
||||
export function writeAutoLogEntry(cwd: string, entry: Record<string, unknown>): void {
|
||||
const path = autoLogPath(cwd);
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
appendFileSync(path, `${JSON.stringify(entry)}\n`, "utf8");
|
||||
}
|
||||
|
||||
export function registerAutoLog(pi: ExtensionAPI): void {
|
||||
pi.on("message_end", async (event, ctx: ExtensionContext) => {
|
||||
const mode = getAutoLogMode();
|
||||
if (mode === "off") return;
|
||||
|
||||
const message = event.message as any;
|
||||
if (message.role !== "user" && message.role !== "assistant") return;
|
||||
|
||||
const text = extractMessageText(message).replace(/\s+/g, " ").trim();
|
||||
if (!text) return;
|
||||
|
||||
writeAutoLogEntry(ctx.cwd, {
|
||||
timestamp: new Date(message.timestamp ?? Date.now()).toISOString(),
|
||||
session: ctx.sessionManager.getSessionId(),
|
||||
role: message.role,
|
||||
model: message.role === "assistant" ? `${message.provider}/${message.model}` : undefined,
|
||||
mode,
|
||||
text: mode === "full" ? text : clip(text, 500),
|
||||
});
|
||||
});
|
||||
}
|
||||
53
extensions/research-tools/context.ts
Normal file
53
extensions/research-tools/context.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
||||
import { Type } from "@sinclair/typebox";
|
||||
|
||||
type ContextPosture = {
|
||||
model: string;
|
||||
contextWindow: number | null;
|
||||
estimatedInputTokens: number | null;
|
||||
utilizationPct: number | null;
|
||||
compactionThresholdHit: boolean;
|
||||
recommendedMaxWorkers: number;
|
||||
};
|
||||
|
||||
export function computeContextPosture(ctx: ExtensionContext): ContextPosture {
|
||||
const usage = ctx.getContextUsage();
|
||||
const modelWindow = typeof ctx.model?.contextWindow === "number" ? ctx.model.contextWindow : null;
|
||||
const contextWindow = usage?.contextWindow ?? modelWindow;
|
||||
const estimatedInputTokens = usage?.tokens ?? null;
|
||||
const utilizationPct = usage?.percent ?? (contextWindow && estimatedInputTokens
|
||||
? Math.round((estimatedInputTokens / contextWindow) * 1000) / 10
|
||||
: null);
|
||||
const compactionThresholdHit = utilizationPct !== null && utilizationPct >= 70;
|
||||
const availableForWorkers = contextWindow
|
||||
? Math.max(0, contextWindow - 16_384 - (estimatedInputTokens ?? 0))
|
||||
: 0;
|
||||
const recommendedMaxWorkers = contextWindow === null
|
||||
? 1
|
||||
: Math.max(1, Math.min(4, Math.floor(availableForWorkers / 24_000) || 1));
|
||||
|
||||
return {
|
||||
model: ctx.model ? `${ctx.model.provider}/${ctx.model.id}` : "not set",
|
||||
contextWindow,
|
||||
estimatedInputTokens,
|
||||
utilizationPct,
|
||||
compactionThresholdHit,
|
||||
recommendedMaxWorkers,
|
||||
};
|
||||
}
|
||||
|
||||
export function registerContextReportTool(pi: ExtensionAPI): void {
|
||||
pi.registerTool({
|
||||
name: "context_report",
|
||||
label: "Context Report",
|
||||
description: "Report current Pi context usage, compaction threshold posture, and safe worker-count guidance.",
|
||||
parameters: Type.Object({}),
|
||||
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
|
||||
const report = computeContextPosture(ctx);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(report, null, 2) }],
|
||||
details: report,
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
92
extensions/research-tools/resume.ts
Normal file
92
extensions/research-tools/resume.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
|
||||
import { join, resolve } from "node:path";
|
||||
|
||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
type ResumeArtifact = {
|
||||
path: string;
|
||||
mtimeMs: number;
|
||||
};
|
||||
|
||||
function collectFiles(root: string, predicate: (path: string) => boolean): ResumeArtifact[] {
|
||||
if (!existsSync(root)) return [];
|
||||
const files: ResumeArtifact[] = [];
|
||||
for (const entry of readdirSync(root, { withFileTypes: true })) {
|
||||
const path = join(root, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...collectFiles(path, predicate));
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile() || !predicate(path)) continue;
|
||||
try {
|
||||
files.push({ path, mtimeMs: statSync(path).mtimeMs });
|
||||
} catch {}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
function tail(text: string, maxChars: number): string {
|
||||
return text.length <= maxChars ? text : text.slice(text.length - maxChars);
|
||||
}
|
||||
|
||||
export function buildResumePacket(cwd: string, maxChars = 4_000): string | undefined {
|
||||
const plans = collectFiles(resolve(cwd, "outputs", ".plans"), (path) => path.endsWith(".md"))
|
||||
.sort((a, b) => b.mtimeMs - a.mtimeMs)
|
||||
.slice(0, 3);
|
||||
const stateFiles = collectFiles(resolve(cwd, "outputs", ".state"), (path) => /\.(json|jsonl|md)$/i.test(path))
|
||||
.sort((a, b) => b.mtimeMs - a.mtimeMs)
|
||||
.slice(0, 5);
|
||||
const changelogPath = resolve(cwd, "CHANGELOG.md");
|
||||
|
||||
if (plans.length === 0 && stateFiles.length === 0 && !existsSync(changelogPath)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const lines: string[] = [
|
||||
"[feynman resume packet]",
|
||||
"This is a bounded project-state summary from disk. Prefer these paths over guessing prior workflow state.",
|
||||
];
|
||||
|
||||
if (plans.length > 0) {
|
||||
lines.push("", "Recent plans:");
|
||||
for (const plan of plans) {
|
||||
lines.push(`- ${plan.path}`);
|
||||
}
|
||||
const newestPlan = plans[0]!;
|
||||
try {
|
||||
lines.push("", `Newest plan tail (${newestPlan.path}):`, tail(readFileSync(newestPlan.path, "utf8"), 1_500));
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (stateFiles.length > 0) {
|
||||
lines.push("", "Recent state files:");
|
||||
for (const file of stateFiles) {
|
||||
lines.push(`- ${file.path}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (existsSync(changelogPath)) {
|
||||
try {
|
||||
lines.push("", "CHANGELOG tail:", tail(readFileSync(changelogPath, "utf8"), 1_200));
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return tail(lines.join("\n"), maxChars);
|
||||
}
|
||||
|
||||
export function registerResumePacket(pi: ExtensionAPI): void {
|
||||
pi.on("session_start", async (_event, ctx: ExtensionContext) => {
|
||||
if (process.env.FEYNMAN_RESUME_PACKET === "off") return;
|
||||
const packet = buildResumePacket(ctx.cwd);
|
||||
if (!packet) return;
|
||||
pi.sendMessage(
|
||||
{
|
||||
customType: "feynman_resume_packet",
|
||||
content: packet,
|
||||
display: false,
|
||||
details: { source: "outputs/.plans outputs/.state CHANGELOG.md" },
|
||||
},
|
||||
{ triggerTurn: false, deliverAs: "nextTurn" },
|
||||
);
|
||||
});
|
||||
}
|
||||
Reference in New Issue
Block a user