1 Commits

Author SHA1 Message Date
Advait Paliwal
f0bbb25910 Use Pi runtime hooks for research context hygiene 2026-04-17 10:38:42 -07:00
18 changed files with 480 additions and 25 deletions

View File

@@ -25,7 +25,7 @@ curl -fsSL https://feynman.is/install | bash
irm https://feynman.is/install.ps1 | iex irm https://feynman.is/install.ps1 | iex
``` ```
The one-line installer fetches the latest tagged release. To pin a version, pass it explicitly, for example `curl -fsSL https://feynman.is/install | bash -s -- 0.2.25`. The one-line installer fetches the latest tagged release. To pin a version, pass it explicitly, for example `curl -fsSL https://feynman.is/install | bash -s -- 0.2.26`.
The installer downloads a standalone native bundle with its own Node.js runtime. The installer downloads a standalone native bundle with its own Node.js runtime.
@@ -35,6 +35,8 @@ To uninstall the standalone app, remove the launcher and runtime bundle, then op
Local models are supported through the setup flow. For LM Studio, run `feynman setup`, choose `LM Studio`, and keep the default `http://localhost:1234/v1` unless you changed the server port. For LiteLLM, choose `LiteLLM Proxy` and keep the default `http://localhost:4000/v1`. For Ollama or vLLM, choose `Custom provider (baseUrl + API key)`, use `openai-completions`, and point it at the local `/v1` endpoint. Local models are supported through the setup flow. For LM Studio, run `feynman setup`, choose `LM Studio`, and keep the default `http://localhost:1234/v1` unless you changed the server port. For LiteLLM, choose `LiteLLM Proxy` and keep the default `http://localhost:4000/v1`. For Ollama or vLLM, choose `Custom provider (baseUrl + API key)`, use `openai-completions`, and point it at the local `/v1` endpoint.
Feynman uses Pi's own runtime hooks for context hygiene: Pi compaction/retry settings are seeded by default, `context_report` exposes the current Pi context usage to the model, oversized alphaXiv tool returns spill to `outputs/.cache/`, and a bounded resume packet is injected from `outputs/.plans/`, `outputs/.state/`, and `CHANGELOG.md` when those files exist. Automatic session logging writes JSONL snippets to `notes/feynman-autolog/`; set `FEYNMAN_AUTO_LOG=off` to disable it or `FEYNMAN_AUTO_LOG=full` for full text.
### Skills Only ### Skills Only
If you want just the research skills without the full terminal app: If you want just the research skills without the full terminal app:

View File

@@ -1,11 +1,14 @@
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent"; import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { registerAlphaTools } from "./research-tools/alpha.js"; import { registerAlphaTools } from "./research-tools/alpha.js";
import { registerAutoLog } from "./research-tools/autolog.js";
import { registerContextReportTool } from "./research-tools/context.js";
import { registerDiscoveryCommands } from "./research-tools/discovery.js"; import { registerDiscoveryCommands } from "./research-tools/discovery.js";
import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js"; import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js";
import { installFeynmanHeader } from "./research-tools/header.js"; import { installFeynmanHeader } from "./research-tools/header.js";
import { registerHelpCommand } from "./research-tools/help.js"; import { registerHelpCommand } from "./research-tools/help.js";
import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js"; import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js";
import { registerResumePacket } from "./research-tools/resume.js";
import { registerServiceTierControls } from "./research-tools/service-tier.js"; import { registerServiceTierControls } from "./research-tools/service-tier.js";
export default function researchTools(pi: ExtensionAPI): void { export default function researchTools(pi: ExtensionAPI): void {
@@ -17,10 +20,13 @@ export default function researchTools(pi: ExtensionAPI): void {
}); });
registerAlphaTools(pi); registerAlphaTools(pi);
registerAutoLog(pi);
registerContextReportTool(pi);
registerDiscoveryCommands(pi); registerDiscoveryCommands(pi);
registerFeynmanModelCommand(pi); registerFeynmanModelCommand(pi);
registerHelpCommand(pi); registerHelpCommand(pi);
registerInitCommand(pi); registerInitCommand(pi);
registerOutputsCommand(pi); registerOutputsCommand(pi);
registerResumePacket(pi);
registerServiceTierControls(pi); registerServiceTierControls(pi);
} }

View File

@@ -7,7 +7,11 @@ import {
readPaperCode, readPaperCode,
searchPapers, searchPapers,
} from "@companion-ai/alpha-hub/lib"; } from "@companion-ai/alpha-hub/lib";
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent"; import { createHash } from "node:crypto";
import { mkdirSync, writeFileSync } from "node:fs";
import { dirname, resolve } from "node:path";
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
function formatText(value: unknown): string { function formatText(value: unknown): string {
@@ -15,6 +19,44 @@ function formatText(value: unknown): string {
return JSON.stringify(value, null, 2); return JSON.stringify(value, null, 2);
} }
function toolOutputCapChars(): number {
const raw = Number(process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS);
return Number.isFinite(raw) && raw > 0 ? Math.floor(raw) : 32_000;
}
function spillPath(ctx: ExtensionContext, toolName: string, text: string): string {
const hash = createHash("sha256").update(text).digest("hex").slice(0, 12);
return resolve(ctx.cwd, "outputs", ".cache", `${toolName}-${hash}.md`);
}
export function formatToolResultWithSpillover(
ctx: ExtensionContext,
toolName: string,
result: unknown,
): { text: string; details: unknown } {
const text = formatText(result);
const cap = toolOutputCapChars();
if (text.length <= cap) {
return { text, details: result };
}
const path = spillPath(ctx, toolName, text);
mkdirSync(dirname(path), { recursive: true });
writeFileSync(path, text, "utf8");
const head = text.slice(0, Math.min(cap, 4_000));
const pointer = {
feynman_spillover: true,
tool: toolName,
path,
bytes: Buffer.byteLength(text, "utf8"),
sha256: createHash("sha256").update(text).digest("hex"),
note: "Full tool output was written to disk. Read the path in bounded chunks instead of asking the tool to return everything again.",
head,
};
return { text: JSON.stringify(pointer, null, 2), details: pointer };
}
export function registerAlphaTools(pi: ExtensionAPI): void { export function registerAlphaTools(pi: ExtensionAPI): void {
pi.registerTool({ pi.registerTool({
name: "alpha_search", name: "alpha_search",
@@ -27,9 +69,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }), Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }),
), ),
}), }),
async execute(_toolCallId, params) { async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
const result = await searchPapers(params.query, params.mode?.trim() || "semantic"); const result = await searchPapers(params.query, params.mode?.trim() || "semantic");
return { content: [{ type: "text", text: formatText(result) }], details: result }; const formatted = formatToolResultWithSpillover(ctx, "alpha_search", result);
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -41,9 +84,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }), paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })), fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })),
}), }),
async execute(_toolCallId, params) { async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
const result = await getPaper(params.paper, { fullText: params.fullText }); const result = await getPaper(params.paper, { fullText: params.fullText });
return { content: [{ type: "text", text: formatText(result) }], details: result }; const formatted = formatToolResultWithSpillover(ctx, "alpha_get_paper", result);
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -55,9 +99,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }), paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
question: Type.String({ description: "Question about the paper." }), question: Type.String({ description: "Question about the paper." }),
}), }),
async execute(_toolCallId, params) { async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
const result = await askPaper(params.paper, params.question); const result = await askPaper(params.paper, params.question);
return { content: [{ type: "text", text: formatText(result) }], details: result }; const formatted = formatToolResultWithSpillover(ctx, "alpha_ask_paper", result);
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -70,13 +115,14 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })), note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })),
clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })), clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })),
}), }),
async execute(_toolCallId, params) { async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
const result = params.clear const result = params.clear
? await clearPaperAnnotation(params.paper) ? await clearPaperAnnotation(params.paper)
: params.note : params.note
? await annotatePaper(params.paper, params.note) ? await annotatePaper(params.paper, params.note)
: (() => { throw new Error("Provide either note or clear=true."); })(); : (() => { throw new Error("Provide either note or clear=true."); })();
return { content: [{ type: "text", text: formatText(result) }], details: result }; const formatted = formatToolResultWithSpillover(ctx, "alpha_annotate_paper", result);
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -85,9 +131,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
label: "Alpha List Annotations", label: "Alpha List Annotations",
description: "List all persistent local paper annotations.", description: "List all persistent local paper annotations.",
parameters: Type.Object({}), parameters: Type.Object({}),
async execute() { async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
const result = await listPaperAnnotations(); const result = await listPaperAnnotations();
return { content: [{ type: "text", text: formatText(result) }], details: result }; const formatted = formatToolResultWithSpillover(ctx, "alpha_list_annotations", result);
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -99,9 +146,10 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
githubUrl: Type.String({ description: "GitHub repository URL." }), githubUrl: Type.String({ description: "GitHub repository URL." }),
path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })), path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })),
}), }),
async execute(_toolCallId, params) { async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/"); const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/");
return { content: [{ type: "text", text: formatText(result) }], details: result }; const formatted = formatToolResultWithSpillover(ctx, "alpha_read_code", result);
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
} }

View File

@@ -0,0 +1,84 @@
import { appendFileSync, mkdirSync, readFileSync } from "node:fs";
import { dirname, resolve } from "node:path";
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
type AutoLogMode = "off" | "events" | "full";
function readAgentSettings(): Record<string, unknown> {
const agentDir = process.env.PI_CODING_AGENT_DIR;
if (!agentDir) return {};
try {
return JSON.parse(readFileSync(resolve(agentDir, "settings.json"), "utf8")) as Record<string, unknown>;
} catch {
return {};
}
}
function normalizeMode(value: unknown): AutoLogMode | undefined {
if (typeof value !== "string") return undefined;
const normalized = value.trim().toLowerCase();
if (normalized === "off" || normalized === "events" || normalized === "full") return normalized;
return undefined;
}
export function getAutoLogMode(): AutoLogMode {
return normalizeMode(process.env.FEYNMAN_AUTO_LOG) ??
normalizeMode(readAgentSettings().autoLog) ??
"events";
}
function extractMessageText(message: unknown): string {
if (!message || typeof message !== "object") return "";
const content = (message as { content?: unknown }).content;
if (typeof content === "string") return content;
if (!Array.isArray(content)) return "";
return content
.map((item) => {
if (!item || typeof item !== "object") return "";
const record = item as { type?: string; text?: unknown; thinking?: unknown; name?: unknown };
if (record.type === "text" && typeof record.text === "string") return record.text;
if (record.type === "thinking" && typeof record.thinking === "string") return "[thinking omitted]";
if (record.type === "toolCall") return `[tool:${typeof record.name === "string" ? record.name : "unknown"}]`;
return "";
})
.filter(Boolean)
.join("\n");
}
function clip(text: string, maxChars: number): string {
return text.length > maxChars ? `${text.slice(0, maxChars)}\n...[truncated ${text.length - maxChars} chars]` : text;
}
export function autoLogPath(cwd: string, date = new Date()): string {
const day = date.toISOString().slice(0, 10);
return resolve(cwd, "notes", "feynman-autolog", `${day}.jsonl`);
}
export function writeAutoLogEntry(cwd: string, entry: Record<string, unknown>): void {
const path = autoLogPath(cwd);
mkdirSync(dirname(path), { recursive: true });
appendFileSync(path, `${JSON.stringify(entry)}\n`, "utf8");
}
export function registerAutoLog(pi: ExtensionAPI): void {
pi.on("message_end", async (event, ctx: ExtensionContext) => {
const mode = getAutoLogMode();
if (mode === "off") return;
const message = event.message as any;
if (message.role !== "user" && message.role !== "assistant") return;
const text = extractMessageText(message).replace(/\s+/g, " ").trim();
if (!text) return;
writeAutoLogEntry(ctx.cwd, {
timestamp: new Date(message.timestamp ?? Date.now()).toISOString(),
session: ctx.sessionManager.getSessionId(),
role: message.role,
model: message.role === "assistant" ? `${message.provider}/${message.model}` : undefined,
mode,
text: mode === "full" ? text : clip(text, 500),
});
});
}

View File

@@ -0,0 +1,53 @@
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox";
type ContextPosture = {
model: string;
contextWindow: number | null;
estimatedInputTokens: number | null;
utilizationPct: number | null;
compactionThresholdHit: boolean;
recommendedMaxWorkers: number;
};
export function computeContextPosture(ctx: ExtensionContext): ContextPosture {
const usage = ctx.getContextUsage();
const modelWindow = typeof ctx.model?.contextWindow === "number" ? ctx.model.contextWindow : null;
const contextWindow = usage?.contextWindow ?? modelWindow;
const estimatedInputTokens = usage?.tokens ?? null;
const utilizationPct = usage?.percent ?? (contextWindow && estimatedInputTokens
? Math.round((estimatedInputTokens / contextWindow) * 1000) / 10
: null);
const compactionThresholdHit = utilizationPct !== null && utilizationPct >= 70;
const availableForWorkers = contextWindow
? Math.max(0, contextWindow - 16_384 - (estimatedInputTokens ?? 0))
: 0;
const recommendedMaxWorkers = contextWindow === null
? 1
: Math.max(1, Math.min(4, Math.floor(availableForWorkers / 24_000) || 1));
return {
model: ctx.model ? `${ctx.model.provider}/${ctx.model.id}` : "not set",
contextWindow,
estimatedInputTokens,
utilizationPct,
compactionThresholdHit,
recommendedMaxWorkers,
};
}
export function registerContextReportTool(pi: ExtensionAPI): void {
pi.registerTool({
name: "context_report",
label: "Context Report",
description: "Report current Pi context usage, compaction threshold posture, and safe worker-count guidance.",
parameters: Type.Object({}),
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
const report = computeContextPosture(ctx);
return {
content: [{ type: "text", text: JSON.stringify(report, null, 2) }],
details: report,
};
},
});
}

View File

@@ -0,0 +1,92 @@
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
import { join, resolve } from "node:path";
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
type ResumeArtifact = {
path: string;
mtimeMs: number;
};
function collectFiles(root: string, predicate: (path: string) => boolean): ResumeArtifact[] {
if (!existsSync(root)) return [];
const files: ResumeArtifact[] = [];
for (const entry of readdirSync(root, { withFileTypes: true })) {
const path = join(root, entry.name);
if (entry.isDirectory()) {
files.push(...collectFiles(path, predicate));
continue;
}
if (!entry.isFile() || !predicate(path)) continue;
try {
files.push({ path, mtimeMs: statSync(path).mtimeMs });
} catch {}
}
return files;
}
function tail(text: string, maxChars: number): string {
return text.length <= maxChars ? text : text.slice(text.length - maxChars);
}
export function buildResumePacket(cwd: string, maxChars = 4_000): string | undefined {
const plans = collectFiles(resolve(cwd, "outputs", ".plans"), (path) => path.endsWith(".md"))
.sort((a, b) => b.mtimeMs - a.mtimeMs)
.slice(0, 3);
const stateFiles = collectFiles(resolve(cwd, "outputs", ".state"), (path) => /\.(json|jsonl|md)$/i.test(path))
.sort((a, b) => b.mtimeMs - a.mtimeMs)
.slice(0, 5);
const changelogPath = resolve(cwd, "CHANGELOG.md");
if (plans.length === 0 && stateFiles.length === 0 && !existsSync(changelogPath)) {
return undefined;
}
const lines: string[] = [
"[feynman resume packet]",
"This is a bounded project-state summary from disk. Prefer these paths over guessing prior workflow state.",
];
if (plans.length > 0) {
lines.push("", "Recent plans:");
for (const plan of plans) {
lines.push(`- ${plan.path}`);
}
const newestPlan = plans[0]!;
try {
lines.push("", `Newest plan tail (${newestPlan.path}):`, tail(readFileSync(newestPlan.path, "utf8"), 1_500));
} catch {}
}
if (stateFiles.length > 0) {
lines.push("", "Recent state files:");
for (const file of stateFiles) {
lines.push(`- ${file.path}`);
}
}
if (existsSync(changelogPath)) {
try {
lines.push("", "CHANGELOG tail:", tail(readFileSync(changelogPath, "utf8"), 1_200));
} catch {}
}
return tail(lines.join("\n"), maxChars);
}
export function registerResumePacket(pi: ExtensionAPI): void {
pi.on("session_start", async (_event, ctx: ExtensionContext) => {
if (process.env.FEYNMAN_RESUME_PACKET === "off") return;
const packet = buildResumePacket(ctx.cwd);
if (!packet) return;
pi.sendMessage(
{
customType: "feynman_resume_packet",
content: packet,
display: false,
details: { source: "outputs/.plans outputs/.state CHANGELOG.md" },
},
{ triggerTurn: false, deliverAs: "nextTurn" },
);
});
}

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "@companion-ai/feynman", "name": "@companion-ai/feynman",
"version": "0.2.25", "version": "0.2.26",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@companion-ai/feynman", "name": "@companion-ai/feynman",
"version": "0.2.25", "version": "0.2.26",
"hasInstallScript": true, "hasInstallScript": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {

View File

@@ -1,6 +1,6 @@
{ {
"name": "@companion-ai/feynman", "name": "@companion-ai/feynman",
"version": "0.2.25", "version": "0.2.26",
"description": "Research-first CLI agent built on Pi and alphaXiv", "description": "Research-first CLI agent built on Pi and alphaXiv",
"license": "MIT", "license": "MIT",
"type": "module", "type": "module",

View File

@@ -110,7 +110,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.25 & ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.26
"@ "@
} }

View File

@@ -261,7 +261,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
curl -fsSL https://feynman.is/install | bash -s -- 0.2.25 curl -fsSL https://feynman.is/install | bash -s -- 0.2.26
EOF EOF
exit 1 exit 1
fi fi

View File

@@ -1,2 +1,3 @@
export const PI_SUBAGENTS_PATCH_TARGETS: string[]; export const PI_SUBAGENTS_PATCH_TARGETS: string[];
export function patchPiSubagentsSource(relativePath: string, source: string): string; export function patchPiSubagentsSource(relativePath: string, source: string): string;
export function stripPiSubagentBuiltinModelSource(source: string): string;

View File

@@ -127,6 +127,19 @@ export function normalizeFeynmanSettings(
settings.theme = "feynman"; settings.theme = "feynman";
settings.quietStartup = true; settings.quietStartup = true;
settings.collapseChangelog = true; settings.collapseChangelog = true;
settings.compaction = {
enabled: true,
reserveTokens: 16384,
keepRecentTokens: 20000,
...(settings.compaction && typeof settings.compaction === "object" ? settings.compaction : {}),
};
settings.retry = {
enabled: true,
maxRetries: 3,
baseDelayMs: 2000,
maxDelayMs: 60000,
...(settings.retry && typeof settings.retry === "object" ? settings.retry : {}),
};
const supportedCorePackages = filterPackageSourcesForCurrentNode(CORE_PACKAGE_SOURCES); const supportedCorePackages = filterPackageSourcesForCurrentNode(CORE_PACKAGE_SOURCES);
if (!Array.isArray(settings.packages) || settings.packages.length === 0) { if (!Array.isArray(settings.packages) || settings.packages.length === 0) {
settings.packages = supportedCorePackages; settings.packages = supportedCorePackages;

View File

@@ -12,6 +12,11 @@ import { buildModelStatusSnapshotFromRecords, getAvailableModelRecords, getSuppo
import { createModelRegistry, getModelsJsonPath } from "../model/registry.js"; import { createModelRegistry, getModelsJsonPath } from "../model/registry.js";
import { getConfiguredServiceTier } from "../model/service-tier.js"; import { getConfiguredServiceTier } from "../model/service-tier.js";
type ContextRiskSummary = {
level: "low" | "medium" | "high" | "unknown";
lines: string[];
};
function findProvidersMissingApiKey(modelsJsonPath: string): string[] { function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
try { try {
const raw = readFileSync(modelsJsonPath, "utf8").trim(); const raw = readFileSync(modelsJsonPath, "utf8").trim();
@@ -35,6 +40,50 @@ function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
} }
} }
function numberSetting(settings: Record<string, unknown>, path: string[], fallback: number): number {
let value: unknown = settings;
for (const key of path) {
if (!value || typeof value !== "object") return fallback;
value = (value as Record<string, unknown>)[key];
}
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
}
export function buildContextRiskSummary(
settings: Record<string, unknown>,
model: { provider: string; id: string; contextWindow: number; maxTokens: number; reasoning: boolean } | undefined,
): ContextRiskSummary {
if (!model) {
return {
level: "unknown",
lines: ["context risk: unknown (no active model)"],
};
}
const reserveTokens = numberSetting(settings, ["compaction", "reserveTokens"], 16384);
const keepRecentTokens = numberSetting(settings, ["compaction", "keepRecentTokens"], 20000);
const retryMax = numberSetting(settings, ["retry", "maxRetries"], 3);
const usableWindow = Math.max(0, model.contextWindow - reserveTokens);
const level = model.contextWindow < 64_000
? "high"
: model.contextWindow < 128_000
? "medium"
: "low";
return {
level,
lines: [
`context risk: ${level}`,
` model: ${model.provider}/${model.id}`,
` context window: ${model.contextWindow}`,
` usable before Pi compaction reserve: ${usableWindow}`,
` Pi compaction: reserve=${reserveTokens}, keepRecent=${keepRecentTokens}`,
` Pi retry: maxRetries=${retryMax}`,
` reasoning: ${model.reasoning ? "supported" : "off/not supported"}`,
],
};
}
export type DoctorOptions = { export type DoctorOptions = {
settingsPath: string; settingsPath: string;
authPath: string; authPath: string;
@@ -164,6 +213,10 @@ export function runDoctor(options: DoctorOptions): void {
: "not set"}`, : "not set"}`,
); );
const modelStatus = collectStatusSnapshot(options); const modelStatus = collectStatusSnapshot(options);
const currentModel = typeof settings.defaultProvider === "string" && typeof settings.defaultModel === "string"
? modelRegistry.find(settings.defaultProvider, settings.defaultModel)
: undefined;
const contextRisk = buildContextRiskSummary(settings, currentModel);
console.log(`default model valid: ${modelStatus.modelValid ? "yes" : "no"}`); console.log(`default model valid: ${modelStatus.modelValid ? "yes" : "no"}`);
console.log(`authenticated providers: ${modelStatus.authenticatedProviderCount}`); console.log(`authenticated providers: ${modelStatus.authenticatedProviderCount}`);
console.log(`authenticated models: ${modelStatus.authenticatedModelCount}`); console.log(`authenticated models: ${modelStatus.authenticatedModelCount}`);
@@ -172,6 +225,9 @@ export function runDoctor(options: DoctorOptions): void {
if (modelStatus.recommendedModelReason) { if (modelStatus.recommendedModelReason) {
console.log(` why: ${modelStatus.recommendedModelReason}`); console.log(` why: ${modelStatus.recommendedModelReason}`);
} }
for (const line of contextRisk.lines) {
console.log(line);
}
const modelsError = modelRegistry.getError(); const modelsError = modelRegistry.getError();
if (modelsError) { if (modelsError) {
console.log("models.json: error"); console.log("models.json: error");

View File

@@ -258,7 +258,7 @@ test("updateConfiguredPackages batches multiple npm updates into a single instal
globalThis.fetch = (async () => ({ globalThis.fetch = (async () => ({
ok: true, ok: true,
json: async () => ({ version: "2.0.0" }), json: async () => ({ version: "2.0.0" }),
})) as typeof fetch; })) as unknown as typeof fetch;
try { try {
const result = await updateConfiguredPackages(workingDir, agentDir); const result = await updateConfiguredPackages(workingDir, agentDir);
@@ -306,7 +306,7 @@ test("updateConfiguredPackages skips native package updates on unsupported Node
globalThis.fetch = (async () => ({ globalThis.fetch = (async () => ({
ok: true, ok: true,
json: async () => ({ version: "2.0.0" }), json: async () => ({ version: "2.0.0" }),
})) as typeof fetch; })) as unknown as typeof fetch;
Object.defineProperty(process.versions, "node", { value: "25.0.0", configurable: true }); Object.defineProperty(process.versions, "node", { value: "25.0.0", configurable: true });
try { try {

View File

@@ -0,0 +1,100 @@
import test from "node:test";
import assert from "node:assert/strict";
import { existsSync, mkdirSync, mkdtempSync, readFileSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { dirname, join, resolve } from "node:path";
import type { ExtensionContext } from "@mariozechner/pi-coding-agent";
import { formatToolResultWithSpillover } from "../extensions/research-tools/alpha.js";
import { autoLogPath, writeAutoLogEntry } from "../extensions/research-tools/autolog.js";
import { computeContextPosture } from "../extensions/research-tools/context.js";
import { buildResumePacket } from "../extensions/research-tools/resume.js";
import { buildContextRiskSummary } from "../src/setup/doctor.js";
function fakeCtx(cwd: string): ExtensionContext {
return {
cwd,
model: {
provider: "test",
id: "small",
contextWindow: 32_000,
},
getContextUsage: () => ({
tokens: 24_000,
contextWindow: 32_000,
percent: 75,
}),
sessionManager: {
getSessionId: () => "session-1",
},
} as unknown as ExtensionContext;
}
test("alpha tool spillover writes oversized output to outputs cache", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-spill-"));
const originalCap = process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS;
process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS = "64";
try {
const result = formatToolResultWithSpillover(fakeCtx(root), "alpha_get_paper", { text: "x".repeat(500) });
const parsed = JSON.parse(result.text) as { path: string; feynman_spillover: boolean };
assert.equal(parsed.feynman_spillover, true);
assert.equal(existsSync(parsed.path), true);
assert.match(readFileSync(parsed.path, "utf8"), /xxxxx/);
assert.match(parsed.path, /outputs\/\.cache\/alpha_get_paper-/);
} finally {
if (originalCap === undefined) {
delete process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS;
} else {
process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS = originalCap;
}
}
});
test("context_report posture uses Pi context usage directly", () => {
const report = computeContextPosture(fakeCtx("/tmp"));
assert.equal(report.model, "test/small");
assert.equal(report.contextWindow, 32_000);
assert.equal(report.estimatedInputTokens, 24_000);
assert.equal(report.compactionThresholdHit, true);
assert.equal(report.recommendedMaxWorkers, 1);
});
test("autolog writes dated jsonl entries under notes", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-autolog-"));
writeAutoLogEntry(root, { role: "user", text: "hello" });
const path = autoLogPath(root);
assert.equal(existsSync(path), true);
assert.deepEqual(JSON.parse(readFileSync(path, "utf8").trim()), { role: "user", text: "hello" });
});
test("resume packet summarizes recent plans and changelog from disk", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-resume-"));
mkdirSync(resolve(root, "outputs", ".plans"), { recursive: true });
mkdirSync(resolve(root, "outputs", ".state"), { recursive: true });
const planPath = resolve(root, "outputs", ".plans", "demo.md");
const statePath = resolve(root, "outputs", ".state", "demo.jobs.jsonl");
writeFileSyncSafe(planPath, "# Plan\n\n- next step");
writeFileSyncSafe(statePath, "{\"status\":\"running\"}\n");
writeFileSyncSafe(resolve(root, "CHANGELOG.md"), "## Entry\n- verified\n");
const packet = buildResumePacket(root);
assert.ok(packet);
assert.match(packet!, /Recent plans/);
assert.match(packet!, /demo\.md/);
assert.match(packet!, /CHANGELOG tail/);
});
test("doctor context risk uses Pi model context window and compaction settings", () => {
const summary = buildContextRiskSummary(
{ compaction: { reserveTokens: 4096, keepRecentTokens: 8000 }, retry: { maxRetries: 2 } },
{ provider: "local", id: "qwen", contextWindow: 32_000, maxTokens: 4096, reasoning: true },
);
assert.equal(summary.level, "high");
assert.match(summary.lines.join("\n"), /Pi compaction: reserve=4096, keepRecent=8000/);
assert.match(summary.lines.join("\n"), /Pi retry: maxRetries=2/);
});
function writeFileSyncSafe(path: string, text: string): void {
mkdirSync(dirname(path), { recursive: true });
writeFileSync(path, text, "utf8");
}

View File

@@ -261,7 +261,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
curl -fsSL https://feynman.is/install | bash -s -- 0.2.25 curl -fsSL https://feynman.is/install | bash -s -- 0.2.26
EOF EOF
exit 1 exit 1
fi fi

View File

@@ -110,7 +110,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.25 & ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.26
"@ "@
} }

View File

@@ -117,13 +117,13 @@ These installers download the bundled `skills/` and `prompts/` trees plus the re
The one-line installer already targets the latest tagged release. To pin an exact version, pass it explicitly: The one-line installer already targets the latest tagged release. To pin an exact version, pass it explicitly:
```bash ```bash
curl -fsSL https://feynman.is/install | bash -s -- 0.2.25 curl -fsSL https://feynman.is/install | bash -s -- 0.2.26
``` ```
On Windows: On Windows:
```powershell ```powershell
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.25 & ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.26
``` ```
## Post-install setup ## Post-install setup