2 Commits

Author SHA1 Message Date
Advait Paliwal
ec4cbfb57e Update Pi runtime packages 2026-04-17 13:45:16 -07:00
Advait Paliwal
1cd1a147f2 Remove runtime hygiene extension bloat 2026-04-17 11:47:18 -07:00
26 changed files with 667 additions and 1424 deletions

View File

@@ -25,7 +25,7 @@ curl -fsSL https://feynman.is/install | bash
irm https://feynman.is/install.ps1 | iex irm https://feynman.is/install.ps1 | iex
``` ```
The one-line installer fetches the latest tagged release. To pin a version, pass it explicitly, for example `curl -fsSL https://feynman.is/install | bash -s -- 0.2.27`. The one-line installer fetches the latest tagged release. To pin a version, pass it explicitly, for example `curl -fsSL https://feynman.is/install | bash -s -- 0.2.29`.
The installer downloads a standalone native bundle with its own Node.js runtime. The installer downloads a standalone native bundle with its own Node.js runtime.
@@ -35,8 +35,6 @@ To uninstall the standalone app, remove the launcher and runtime bundle, then op
Local models are supported through the setup flow. For LM Studio, run `feynman setup`, choose `LM Studio`, and keep the default `http://localhost:1234/v1` unless you changed the server port. For LiteLLM, choose `LiteLLM Proxy` and keep the default `http://localhost:4000/v1`. For Ollama or vLLM, choose `Custom provider (baseUrl + API key)`, use `openai-completions`, and point it at the local `/v1` endpoint. Local models are supported through the setup flow. For LM Studio, run `feynman setup`, choose `LM Studio`, and keep the default `http://localhost:1234/v1` unless you changed the server port. For LiteLLM, choose `LiteLLM Proxy` and keep the default `http://localhost:4000/v1`. For Ollama or vLLM, choose `Custom provider (baseUrl + API key)`, use `openai-completions`, and point it at the local `/v1` endpoint.
Feynman uses Pi's own runtime hooks for context hygiene: Pi compaction/retry settings are seeded by default, `context_report` exposes the current Pi context usage to the model, oversized alphaXiv tool returns spill to `outputs/.cache/`, oversized custom/subagent returns spill to `outputs/.runs/`, and a bounded resume packet is injected from `outputs/.plans/`, `outputs/.state/`, and `CHANGELOG.md` when those files exist. Automatic session logging writes JSONL snippets to `notes/feynman-autolog/`; set `FEYNMAN_AUTO_LOG=off` to disable it or `FEYNMAN_AUTO_LOG=full` for full text. Feynman also locks new plan slugs under `outputs/.state/` to prevent concurrent workflow collisions and garbage-collects stale managed caches on startup.
### Skills Only ### Skills Only
If you want just the research skills without the full terminal app: If you want just the research skills without the full terminal app:

View File

@@ -1,16 +1,12 @@
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent"; import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { registerAlphaTools } from "./research-tools/alpha.js"; import { registerAlphaTools } from "./research-tools/alpha.js";
import { registerAutoLog } from "./research-tools/autolog.js";
import { registerContextReportTool } from "./research-tools/context.js";
import { registerDiscoveryCommands } from "./research-tools/discovery.js"; import { registerDiscoveryCommands } from "./research-tools/discovery.js";
import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js"; import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js";
import { installFeynmanHeader } from "./research-tools/header.js"; import { installFeynmanHeader } from "./research-tools/header.js";
import { registerHelpCommand } from "./research-tools/help.js"; import { registerHelpCommand } from "./research-tools/help.js";
import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js"; import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js";
import { registerResumePacket } from "./research-tools/resume.js";
import { registerServiceTierControls } from "./research-tools/service-tier.js"; import { registerServiceTierControls } from "./research-tools/service-tier.js";
import { registerStateManagement } from "./research-tools/state.js";
export default function researchTools(pi: ExtensionAPI): void { export default function researchTools(pi: ExtensionAPI): void {
const cache: { agentSummaryPromise?: Promise<{ agents: string[]; chains: string[] }> } = {}; const cache: { agentSummaryPromise?: Promise<{ agents: string[]; chains: string[] }> } = {};
@@ -21,14 +17,10 @@ export default function researchTools(pi: ExtensionAPI): void {
}); });
registerAlphaTools(pi); registerAlphaTools(pi);
registerAutoLog(pi);
registerContextReportTool(pi);
registerDiscoveryCommands(pi); registerDiscoveryCommands(pi);
registerFeynmanModelCommand(pi); registerFeynmanModelCommand(pi);
registerHelpCommand(pi); registerHelpCommand(pi);
registerInitCommand(pi); registerInitCommand(pi);
registerOutputsCommand(pi); registerOutputsCommand(pi);
registerResumePacket(pi);
registerServiceTierControls(pi); registerServiceTierControls(pi);
registerStateManagement(pi);
} }

View File

@@ -7,11 +7,7 @@ import {
readPaperCode, readPaperCode,
searchPapers, searchPapers,
} from "@companion-ai/alpha-hub/lib"; } from "@companion-ai/alpha-hub/lib";
import { createHash } from "node:crypto"; import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { mkdirSync, writeFileSync } from "node:fs";
import { dirname, resolve } from "node:path";
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
function formatText(value: unknown): string { function formatText(value: unknown): string {
@@ -19,44 +15,6 @@ function formatText(value: unknown): string {
return JSON.stringify(value, null, 2); return JSON.stringify(value, null, 2);
} }
function toolOutputCapChars(): number {
const raw = Number(process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS);
return Number.isFinite(raw) && raw > 0 ? Math.floor(raw) : 32_000;
}
function spillPath(ctx: ExtensionContext, toolName: string, text: string): string {
const hash = createHash("sha256").update(text).digest("hex").slice(0, 12);
return resolve(ctx.cwd, "outputs", ".cache", `${toolName}-${hash}.md`);
}
export function formatToolResultWithSpillover(
ctx: ExtensionContext,
toolName: string,
result: unknown,
): { text: string; details: unknown } {
const text = formatText(result);
const cap = toolOutputCapChars();
if (text.length <= cap) {
return { text, details: result };
}
const path = spillPath(ctx, toolName, text);
mkdirSync(dirname(path), { recursive: true });
writeFileSync(path, text, "utf8");
const head = text.slice(0, Math.min(cap, 4_000));
const pointer = {
feynman_spillover: true,
tool: toolName,
path,
bytes: Buffer.byteLength(text, "utf8"),
sha256: createHash("sha256").update(text).digest("hex"),
note: "Full tool output was written to disk. Read the path in bounded chunks instead of asking the tool to return everything again.",
head,
};
return { text: JSON.stringify(pointer, null, 2), details: pointer };
}
export function registerAlphaTools(pi: ExtensionAPI): void { export function registerAlphaTools(pi: ExtensionAPI): void {
pi.registerTool({ pi.registerTool({
name: "alpha_search", name: "alpha_search",
@@ -69,10 +27,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }), Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }),
), ),
}), }),
async execute(_toolCallId, params, _signal, _onUpdate, ctx) { async execute(_toolCallId, params) {
const result = await searchPapers(params.query, params.mode?.trim() || "semantic"); const result = await searchPapers(params.query, params.mode?.trim() || "semantic");
const formatted = formatToolResultWithSpillover(ctx, "alpha_search", result); return { content: [{ type: "text", text: formatText(result) }], details: result };
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -84,10 +41,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }), paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })), fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })),
}), }),
async execute(_toolCallId, params, _signal, _onUpdate, ctx) { async execute(_toolCallId, params) {
const result = await getPaper(params.paper, { fullText: params.fullText }); const result = await getPaper(params.paper, { fullText: params.fullText });
const formatted = formatToolResultWithSpillover(ctx, "alpha_get_paper", result); return { content: [{ type: "text", text: formatText(result) }], details: result };
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -99,10 +55,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }), paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
question: Type.String({ description: "Question about the paper." }), question: Type.String({ description: "Question about the paper." }),
}), }),
async execute(_toolCallId, params, _signal, _onUpdate, ctx) { async execute(_toolCallId, params) {
const result = await askPaper(params.paper, params.question); const result = await askPaper(params.paper, params.question);
const formatted = formatToolResultWithSpillover(ctx, "alpha_ask_paper", result); return { content: [{ type: "text", text: formatText(result) }], details: result };
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -115,14 +70,13 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })), note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })),
clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })), clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })),
}), }),
async execute(_toolCallId, params, _signal, _onUpdate, ctx) { async execute(_toolCallId, params) {
const result = params.clear const result = params.clear
? await clearPaperAnnotation(params.paper) ? await clearPaperAnnotation(params.paper)
: params.note : params.note
? await annotatePaper(params.paper, params.note) ? await annotatePaper(params.paper, params.note)
: (() => { throw new Error("Provide either note or clear=true."); })(); : (() => { throw new Error("Provide either note or clear=true."); })();
const formatted = formatToolResultWithSpillover(ctx, "alpha_annotate_paper", result); return { content: [{ type: "text", text: formatText(result) }], details: result };
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -131,10 +85,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
label: "Alpha List Annotations", label: "Alpha List Annotations",
description: "List all persistent local paper annotations.", description: "List all persistent local paper annotations.",
parameters: Type.Object({}), parameters: Type.Object({}),
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) { async execute() {
const result = await listPaperAnnotations(); const result = await listPaperAnnotations();
const formatted = formatToolResultWithSpillover(ctx, "alpha_list_annotations", result); return { content: [{ type: "text", text: formatText(result) }], details: result };
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
@@ -146,10 +99,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
githubUrl: Type.String({ description: "GitHub repository URL." }), githubUrl: Type.String({ description: "GitHub repository URL." }),
path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })), path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })),
}), }),
async execute(_toolCallId, params, _signal, _onUpdate, ctx) { async execute(_toolCallId, params) {
const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/"); const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/");
const formatted = formatToolResultWithSpillover(ctx, "alpha_read_code", result); return { content: [{ type: "text", text: formatText(result) }], details: result };
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
}, },
}); });
} }

View File

@@ -1,84 +0,0 @@
import { appendFileSync, mkdirSync, readFileSync } from "node:fs";
import { dirname, resolve } from "node:path";
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
type AutoLogMode = "off" | "events" | "full";
function readAgentSettings(): Record<string, unknown> {
const agentDir = process.env.PI_CODING_AGENT_DIR;
if (!agentDir) return {};
try {
return JSON.parse(readFileSync(resolve(agentDir, "settings.json"), "utf8")) as Record<string, unknown>;
} catch {
return {};
}
}
function normalizeMode(value: unknown): AutoLogMode | undefined {
if (typeof value !== "string") return undefined;
const normalized = value.trim().toLowerCase();
if (normalized === "off" || normalized === "events" || normalized === "full") return normalized;
return undefined;
}
export function getAutoLogMode(): AutoLogMode {
return normalizeMode(process.env.FEYNMAN_AUTO_LOG) ??
normalizeMode(readAgentSettings().autoLog) ??
"events";
}
function extractMessageText(message: unknown): string {
if (!message || typeof message !== "object") return "";
const content = (message as { content?: unknown }).content;
if (typeof content === "string") return content;
if (!Array.isArray(content)) return "";
return content
.map((item) => {
if (!item || typeof item !== "object") return "";
const record = item as { type?: string; text?: unknown; thinking?: unknown; name?: unknown };
if (record.type === "text" && typeof record.text === "string") return record.text;
if (record.type === "thinking" && typeof record.thinking === "string") return "[thinking omitted]";
if (record.type === "toolCall") return `[tool:${typeof record.name === "string" ? record.name : "unknown"}]`;
return "";
})
.filter(Boolean)
.join("\n");
}
function clip(text: string, maxChars: number): string {
return text.length > maxChars ? `${text.slice(0, maxChars)}\n...[truncated ${text.length - maxChars} chars]` : text;
}
export function autoLogPath(cwd: string, date = new Date()): string {
const day = date.toISOString().slice(0, 10);
return resolve(cwd, "notes", "feynman-autolog", `${day}.jsonl`);
}
export function writeAutoLogEntry(cwd: string, entry: Record<string, unknown>): void {
const path = autoLogPath(cwd);
mkdirSync(dirname(path), { recursive: true });
appendFileSync(path, `${JSON.stringify(entry)}\n`, "utf8");
}
export function registerAutoLog(pi: ExtensionAPI): void {
pi.on("message_end", async (event, ctx: ExtensionContext) => {
const mode = getAutoLogMode();
if (mode === "off") return;
const message = event.message as any;
if (message.role !== "user" && message.role !== "assistant") return;
const text = extractMessageText(message).replace(/\s+/g, " ").trim();
if (!text) return;
writeAutoLogEntry(ctx.cwd, {
timestamp: new Date(message.timestamp ?? Date.now()).toISOString(),
session: ctx.sessionManager.getSessionId(),
role: message.role,
model: message.role === "assistant" ? `${message.provider}/${message.model}` : undefined,
mode,
text: mode === "full" ? text : clip(text, 500),
});
});
}

View File

@@ -1,53 +0,0 @@
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox";
type ContextPosture = {
model: string;
contextWindow: number | null;
estimatedInputTokens: number | null;
utilizationPct: number | null;
compactionThresholdHit: boolean;
recommendedMaxWorkers: number;
};
export function computeContextPosture(ctx: ExtensionContext): ContextPosture {
const usage = ctx.getContextUsage();
const modelWindow = typeof ctx.model?.contextWindow === "number" ? ctx.model.contextWindow : null;
const contextWindow = usage?.contextWindow ?? modelWindow;
const estimatedInputTokens = usage?.tokens ?? null;
const utilizationPct = usage?.percent ?? (contextWindow && estimatedInputTokens
? Math.round((estimatedInputTokens / contextWindow) * 1000) / 10
: null);
const compactionThresholdHit = utilizationPct !== null && utilizationPct >= 70;
const availableForWorkers = contextWindow
? Math.max(0, contextWindow - 16_384 - (estimatedInputTokens ?? 0))
: 0;
const recommendedMaxWorkers = contextWindow === null
? 1
: Math.max(1, Math.min(4, Math.floor(availableForWorkers / 24_000) || 1));
return {
model: ctx.model ? `${ctx.model.provider}/${ctx.model.id}` : "not set",
contextWindow,
estimatedInputTokens,
utilizationPct,
compactionThresholdHit,
recommendedMaxWorkers,
};
}
export function registerContextReportTool(pi: ExtensionAPI): void {
pi.registerTool({
name: "context_report",
label: "Context Report",
description: "Report current Pi context usage, compaction threshold posture, and safe worker-count guidance.",
parameters: Type.Object({}),
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
const report = computeContextPosture(ctx);
return {
content: [{ type: "text", text: JSON.stringify(report, null, 2) }],
details: report,
};
},
});
}

View File

@@ -5,7 +5,6 @@ import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { getExtensionCommandSpec } from "../../metadata/commands.mjs"; import { getExtensionCommandSpec } from "../../metadata/commands.mjs";
import { buildProjectAgentsTemplate, buildSessionLogsReadme } from "./project-scaffold.js"; import { buildProjectAgentsTemplate, buildSessionLogsReadme } from "./project-scaffold.js";
import { collectManagedGc } from "./state.js";
async function pathExists(path: string): Promise<boolean> { async function pathExists(path: string): Promise<boolean> {
try { try {
@@ -105,15 +104,7 @@ export function registerInitCommand(pi: ExtensionAPI): void {
export function registerOutputsCommand(pi: ExtensionAPI): void { export function registerOutputsCommand(pi: ExtensionAPI): void {
pi.registerCommand("outputs", { pi.registerCommand("outputs", {
description: "Browse all research artifacts (papers, outputs, experiments, notes).", description: "Browse all research artifacts (papers, outputs, experiments, notes).",
handler: async (args, ctx) => { handler: async (_args, ctx) => {
const trimmedArgs = args.trim();
if (trimmedArgs === "gc" || trimmedArgs === "gc --dry-run") {
const dryRun = trimmedArgs.includes("--dry-run");
const result = collectManagedGc(ctx.cwd, Date.now(), undefined, { dryRun });
ctx.ui.notify(`${dryRun ? "Would remove" : "Removed"} ${result.deleted.length} managed cache file(s).`, "info");
return;
}
const items = await collectArtifacts(ctx.cwd); const items = await collectArtifacts(ctx.cwd);
if (items.length === 0) { if (items.length === 0) {
ctx.ui.notify("No artifacts found. Use /lit, /draft, /review, or /deepresearch to create some.", "info"); ctx.ui.notify("No artifacts found. Use /lit, /draft, /review, or /deepresearch to create some.", "info");

View File

@@ -1,92 +0,0 @@
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
import { join, resolve } from "node:path";
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
type ResumeArtifact = {
path: string;
mtimeMs: number;
};
function collectFiles(root: string, predicate: (path: string) => boolean): ResumeArtifact[] {
if (!existsSync(root)) return [];
const files: ResumeArtifact[] = [];
for (const entry of readdirSync(root, { withFileTypes: true })) {
const path = join(root, entry.name);
if (entry.isDirectory()) {
files.push(...collectFiles(path, predicate));
continue;
}
if (!entry.isFile() || !predicate(path)) continue;
try {
files.push({ path, mtimeMs: statSync(path).mtimeMs });
} catch {}
}
return files;
}
function tail(text: string, maxChars: number): string {
return text.length <= maxChars ? text : text.slice(text.length - maxChars);
}
export function buildResumePacket(cwd: string, maxChars = 4_000): string | undefined {
const plans = collectFiles(resolve(cwd, "outputs", ".plans"), (path) => path.endsWith(".md"))
.sort((a, b) => b.mtimeMs - a.mtimeMs)
.slice(0, 3);
const stateFiles = collectFiles(resolve(cwd, "outputs", ".state"), (path) => /\.(json|jsonl|md)$/i.test(path))
.sort((a, b) => b.mtimeMs - a.mtimeMs)
.slice(0, 5);
const changelogPath = resolve(cwd, "CHANGELOG.md");
if (plans.length === 0 && stateFiles.length === 0 && !existsSync(changelogPath)) {
return undefined;
}
const lines: string[] = [
"[feynman resume packet]",
"This is a bounded project-state summary from disk. Prefer these paths over guessing prior workflow state.",
];
if (plans.length > 0) {
lines.push("", "Recent plans:");
for (const plan of plans) {
lines.push(`- ${plan.path}`);
}
const newestPlan = plans[0]!;
try {
lines.push("", `Newest plan tail (${newestPlan.path}):`, tail(readFileSync(newestPlan.path, "utf8"), 1_500));
} catch {}
}
if (stateFiles.length > 0) {
lines.push("", "Recent state files:");
for (const file of stateFiles) {
lines.push(`- ${file.path}`);
}
}
if (existsSync(changelogPath)) {
try {
lines.push("", "CHANGELOG tail:", tail(readFileSync(changelogPath, "utf8"), 1_200));
} catch {}
}
return tail(lines.join("\n"), maxChars);
}
export function registerResumePacket(pi: ExtensionAPI): void {
pi.on("session_start", async (_event, ctx: ExtensionContext) => {
if (process.env.FEYNMAN_RESUME_PACKET === "off") return;
const packet = buildResumePacket(ctx.cwd);
if (!packet) return;
pi.sendMessage(
{
customType: "feynman_resume_packet",
content: packet,
display: false,
details: { source: "outputs/.plans outputs/.state CHANGELOG.md" },
},
{ triggerTurn: false, deliverAs: "nextTurn" },
);
});
}

View File

@@ -1,276 +0,0 @@
import { createHash } from "node:crypto";
import { appendFileSync, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from "node:fs";
import { basename, dirname, relative, resolve } from "node:path";
import { isToolCallEventType, type ExtensionAPI, type ExtensionContext, type ToolCallEvent } from "@mariozechner/pi-coding-agent";
type SlugLock = {
pid: number;
sessionId: string;
startedAt: string;
planPath: string;
};
type GcResult = {
deleted: string[];
kept: string[];
};
type SpillResult = {
content: { type: "text"; text: string }[];
details: unknown;
} | undefined;
type ToolResultPatch = {
content?: { type: "text"; text: string }[];
details?: unknown;
isError?: boolean;
};
const BUILT_IN_TOOL_NAMES = new Set(["bash", "read", "write", "edit", "grep", "find", "ls"]);
function isPathInside(parent: string, child: string): boolean {
const rel = relative(parent, child);
return rel === "" || (!rel.startsWith("..") && !rel.startsWith("/"));
}
function pidIsLive(pid: number): boolean {
if (!Number.isInteger(pid) || pid <= 0) return false;
try {
process.kill(pid, 0);
return true;
} catch {
return false;
}
}
function readLock(path: string): SlugLock | undefined {
try {
return JSON.parse(readFileSync(path, "utf8")) as SlugLock;
} catch {
return undefined;
}
}
function lockIsLive(lock: SlugLock | undefined, timeoutMs: number, now = Date.now()): boolean {
if (!lock) return false;
const started = Date.parse(lock.startedAt);
if (!Number.isFinite(started) || now - started > timeoutMs) return false;
return pidIsLive(lock.pid);
}
function planPathInfo(cwd: string, inputPath: string): { absPath: string; slug: string; lockPath: string } | undefined {
const absPath = resolve(cwd, inputPath);
const plansRoot = resolve(cwd, "outputs", ".plans");
if (!isPathInside(plansRoot, absPath) || !absPath.endsWith(".md")) return undefined;
const slug = basename(absPath, ".md");
const lockPath = resolve(cwd, "outputs", ".state", `${slug}.lock`);
return { absPath, slug, lockPath };
}
export function claimPlanSlug(
cwd: string,
sessionId: string,
inputPath: string,
options?: { timeoutMinutes?: number; strategy?: "suffix" | "error" | "overwrite"; now?: number },
): { ok: true; lockPath?: string } | { ok: false; reason: string } {
const info = planPathInfo(cwd, inputPath);
if (!info) return { ok: true };
const strategy = options?.strategy ?? (process.env.FEYNMAN_SLUG_COLLISION_STRATEGY as "suffix" | "error" | "overwrite" | undefined) ?? "error";
if (strategy === "overwrite") return { ok: true };
const timeoutMinutes = options?.timeoutMinutes ?? (Number(process.env.FEYNMAN_SLUG_LOCK_TIMEOUT_MINUTES) || 30);
const timeoutMs = timeoutMinutes * 60_000;
const existingLock = readLock(info.lockPath);
const live = lockIsLive(existingLock, timeoutMs, options?.now);
if (live && existingLock?.sessionId !== sessionId) {
return {
ok: false,
reason: `Slug "${info.slug}" is locked by another Feynman session. Use a unique slug such as ${info.slug}-2, or wait for ${info.lockPath} to expire.`,
};
}
if (existsSync(info.absPath) && existingLock?.sessionId !== sessionId) {
return {
ok: false,
reason: `Plan already exists at ${relative(cwd, info.absPath)}. Use a unique slug such as ${info.slug}-2 to avoid overwriting another run.`,
};
}
mkdirSync(dirname(info.lockPath), { recursive: true });
writeFileSync(
info.lockPath,
JSON.stringify({
pid: process.pid,
sessionId,
startedAt: new Date(options?.now ?? Date.now()).toISOString(),
planPath: info.absPath,
}, null, 2) + "\n",
"utf8",
);
return { ok: true, lockPath: info.lockPath };
}
function managedRetentionDays(): number {
const raw = Number(process.env.FEYNMAN_CACHE_RETENTION_DAYS);
return Number.isFinite(raw) && raw >= 0 ? raw : 14;
}
function gcIgnored(path: string): boolean {
if (path.endsWith(".gcignore")) return true;
try {
return /^---[\s\S]*?retain:\s*true/im.test(readFileSync(path, "utf8").slice(0, 500));
} catch {
return false;
}
}
export function collectManagedGc(
cwd: string,
now = Date.now(),
retentionDays = managedRetentionDays(),
options?: { dryRun?: boolean },
): GcResult {
const roots = [
resolve(cwd, "outputs", ".cache"),
resolve(cwd, "outputs", ".runs"),
resolve(cwd, "outputs", ".notes"),
];
const cutoff = now - retentionDays * 24 * 60 * 60 * 1000;
const result: GcResult = { deleted: [], kept: [] };
const visit = (path: string) => {
if (!existsSync(path)) return;
for (const entry of readdirSync(path, { withFileTypes: true })) {
const child = resolve(path, entry.name);
if (entry.isDirectory()) {
visit(child);
try {
if (readdirSync(child).length === 0) rmSync(child, { recursive: true, force: true });
} catch {}
continue;
}
if (!entry.isFile()) continue;
const stat = statSync(child);
if (gcIgnored(child) || stat.mtimeMs >= cutoff) {
result.kept.push(child);
continue;
}
if (!options?.dryRun) {
rmSync(child, { force: true });
}
result.deleted.push(child);
}
};
for (const root of roots) visit(root);
return result;
}
function textFromToolContent(content: ToolResultContent): string {
return content
.map((item) => item.type === "text" ? item.text : "")
.filter(Boolean)
.join("\n");
}
type ToolResultContent = Array<{ type: "text"; text: string } | { type: "image"; data: string; mimeType: string }>;
function customToolOutputCapChars(): number {
const raw = Number(process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS);
return Number.isFinite(raw) && raw > 0 ? raw : 24_000;
}
export function spillLargeCustomToolResult(
cwd: string,
toolName: string,
toolCallId: string,
content: ToolResultContent,
details: unknown,
): SpillResult {
if (BUILT_IN_TOOL_NAMES.has(toolName)) return undefined;
const text = textFromToolContent(content);
const cap = customToolOutputCapChars();
if (text.length <= cap) return undefined;
const hash = createHash("sha256").update(text).digest("hex");
const safeToolName = toolName.replace(/[^a-zA-Z0-9._-]+/g, "-").slice(0, 60) || "tool";
const path = resolve(cwd, "outputs", ".runs", `${safeToolName}-${toolCallId}-${hash.slice(0, 12)}.md`);
mkdirSync(dirname(path), { recursive: true });
writeFileSync(path, text, "utf8");
const pointer = {
feynman_spillover: true,
tool: toolName,
toolCallId,
path,
bytes: Buffer.byteLength(text, "utf8"),
sha256: hash,
head: text.slice(0, Math.min(cap, 2_000)),
note: "Full custom/subagent tool result was written to disk. Read the path in bounded chunks when needed.",
originalDetails: details,
};
return {
content: [{ type: "text", text: JSON.stringify(pointer, null, 2) }],
details: pointer,
};
}
function appendJsonl(path: string, value: unknown): void {
mkdirSync(dirname(path), { recursive: true });
appendFileSync(path, `${JSON.stringify(value)}\n`, "utf8");
}
function recordCheckpoint(ctx: ExtensionContext, toolName: string, isError: boolean): void {
appendJsonl(resolve(ctx.cwd, "outputs", ".state", "feynman.checkpoint.jsonl"), {
timestamp: new Date().toISOString(),
sessionId: ctx.sessionManager.getSessionId(),
toolName,
isError,
context: ctx.getContextUsage?.(),
});
}
function recordJobEvent(ctx: ExtensionContext, toolName: string, status: "running" | "done" | "failed", data: unknown): void {
appendJsonl(resolve(ctx.cwd, "outputs", ".state", "subagent.jobs.jsonl"), {
timestamp: new Date().toISOString(),
sessionId: ctx.sessionManager.getSessionId(),
toolName,
status,
data,
});
}
function looksLikeSubagentTool(toolName: string): boolean {
return /subagent|parallel|chain|run/i.test(toolName);
}
export function registerStateManagement(pi: ExtensionAPI): void {
pi.on("session_start", async (_event, ctx) => {
if (process.env.FEYNMAN_OUTPUTS_GC === "off") return;
collectManagedGc(ctx.cwd);
});
pi.on("tool_call", async (event: ToolCallEvent, ctx) => {
const sessionId = ctx.sessionManager.getSessionId();
if (isToolCallEventType("write", event)) {
const claim = claimPlanSlug(ctx.cwd, sessionId, event.input.path);
if (!claim.ok) return { block: true, reason: claim.reason };
}
if (isToolCallEventType("edit", event)) {
const claim = claimPlanSlug(ctx.cwd, sessionId, event.input.path);
if (!claim.ok) return { block: true, reason: claim.reason };
}
if (looksLikeSubagentTool(event.toolName)) {
recordJobEvent(ctx, event.toolName, "running", event.input);
}
return undefined;
});
pi.on("tool_result", async (event, ctx): Promise<ToolResultPatch | undefined> => {
recordCheckpoint(ctx, event.toolName, event.isError);
if (looksLikeSubagentTool(event.toolName)) {
recordJobEvent(ctx, event.toolName, event.isError ? "failed" : "done", event.details ?? event.content);
}
return spillLargeCustomToolResult(ctx.cwd, event.toolName, event.toolCallId, event.content as ToolResultContent, event.details);
});
}

1105
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "@companion-ai/feynman", "name": "@companion-ai/feynman",
"version": "0.2.27", "version": "0.2.29",
"description": "Research-first CLI agent built on Pi and alphaXiv", "description": "Research-first CLI agent built on Pi and alphaXiv",
"license": "MIT", "license": "MIT",
"type": "module", "type": "module",
@@ -61,16 +61,16 @@
"dependencies": { "dependencies": {
"@clack/prompts": "^1.2.0", "@clack/prompts": "^1.2.0",
"@companion-ai/alpha-hub": "^0.1.3", "@companion-ai/alpha-hub": "^0.1.3",
"@mariozechner/pi-ai": "^0.66.1", "@mariozechner/pi-ai": "^0.67.6",
"@mariozechner/pi-coding-agent": "^0.66.1", "@mariozechner/pi-coding-agent": "^0.67.6",
"@sinclair/typebox": "^0.34.48", "@sinclair/typebox": "^0.34.49",
"dotenv": "^17.3.1" "dotenv": "^17.4.2"
}, },
"overrides": { "overrides": {
"basic-ftp": "5.2.2", "basic-ftp": "5.3.0",
"@modelcontextprotocol/sdk": { "@modelcontextprotocol/sdk": {
"@hono/node-server": "1.19.13", "@hono/node-server": "1.19.14",
"hono": "4.12.12" "hono": "4.12.14"
}, },
"express": { "express": {
"router": { "router": {
@@ -80,16 +80,17 @@
"proxy-agent": { "proxy-agent": {
"pac-proxy-agent": { "pac-proxy-agent": {
"get-uri": { "get-uri": {
"basic-ftp": "5.2.2" "basic-ftp": "5.3.0"
} }
} }
}, },
"protobufjs": "7.5.5",
"minimatch": { "minimatch": {
"brace-expansion": "5.0.5" "brace-expansion": "5.0.5"
} }
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^25.5.0", "@types/node": "^25.6.0",
"tsx": "^4.21.0", "tsx": "^4.21.0",
"typescript": "^5.9.3" "typescript": "^5.9.3"
}, },

View File

@@ -6,6 +6,8 @@ topLevelCli: true
--- ---
Run a deep research workflow for: $@ Run a deep research workflow for: $@
This is an execution request, not a request to explain or implement the workflow instructions. Carry out the workflow with tools and durable files. Do not answer by describing the protocol, converting it into programming steps, or saying how someone could implement it.
You are the Lead Researcher. You plan, delegate, evaluate, verify, write, and cite. Internal orchestration is invisible to the user unless they ask. You are the Lead Researcher. You plan, delegate, evaluate, verify, write, and cite. Internal orchestration is invisible to the user unless they ask.
## 1. Plan ## 1. Plan

View File

@@ -110,7 +110,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.27 & ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.29
"@ "@
} }

View File

@@ -261,7 +261,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
curl -fsSL https://feynman.is/install | bash -s -- 0.2.27 curl -fsSL https://feynman.is/install | bash -s -- 0.2.29
EOF EOF
exit 1 exit 1
fi fi

View File

@@ -1,4 +1,5 @@
import { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from "node:fs"; import { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from "node:fs";
import { createHash } from "node:crypto";
import { resolve } from "node:path"; import { resolve } from "node:path";
import { spawnSync } from "node:child_process"; import { spawnSync } from "node:child_process";
@@ -6,6 +7,8 @@ import { stripPiSubagentBuiltinModelSource } from "./lib/pi-subagents-patch.mjs"
const appRoot = resolve(import.meta.dirname, ".."); const appRoot = resolve(import.meta.dirname, "..");
const settingsPath = resolve(appRoot, ".feynman", "settings.json"); const settingsPath = resolve(appRoot, ".feynman", "settings.json");
const packageJsonPath = resolve(appRoot, "package.json");
const packageLockPath = resolve(appRoot, "package-lock.json");
const feynmanDir = resolve(appRoot, ".feynman"); const feynmanDir = resolve(appRoot, ".feynman");
const workspaceDir = resolve(appRoot, ".feynman", "npm"); const workspaceDir = resolve(appRoot, ".feynman", "npm");
const workspaceNodeModulesDir = resolve(workspaceDir, "node_modules"); const workspaceNodeModulesDir = resolve(workspaceDir, "node_modules");
@@ -13,16 +16,29 @@ const manifestPath = resolve(workspaceDir, ".runtime-manifest.json");
const workspacePackageJsonPath = resolve(workspaceDir, "package.json"); const workspacePackageJsonPath = resolve(workspaceDir, "package.json");
const workspaceArchivePath = resolve(feynmanDir, "runtime-workspace.tgz"); const workspaceArchivePath = resolve(feynmanDir, "runtime-workspace.tgz");
const PRUNE_VERSION = 4; const PRUNE_VERSION = 4;
const PINNED_RUNTIME_PACKAGES = [
"@mariozechner/pi-agent-core",
"@mariozechner/pi-ai",
"@mariozechner/pi-coding-agent",
"@mariozechner/pi-tui",
];
function readPackageSpecs() { function readPackageSpecs() {
const settings = JSON.parse(readFileSync(settingsPath, "utf8")); const settings = JSON.parse(readFileSync(settingsPath, "utf8"));
if (!Array.isArray(settings.packages)) { const packageSpecs = Array.isArray(settings.packages)
return []; ? settings.packages
.filter((value) => typeof value === "string" && value.startsWith("npm:"))
.map((value) => value.slice(4))
: [];
for (const packageName of PINNED_RUNTIME_PACKAGES) {
const version = readLockedPackageVersion(packageName);
if (version) {
packageSpecs.push(`${packageName}@${version}`);
}
} }
return settings.packages return Array.from(new Set(packageSpecs));
.filter((value) => typeof value === "string" && value.startsWith("npm:"))
.map((value) => value.slice(4));
} }
function parsePackageName(spec) { function parsePackageName(spec) {
@@ -30,10 +46,41 @@ function parsePackageName(spec) {
return match?.[1] ?? spec; return match?.[1] ?? spec;
} }
function readLockedPackageVersion(packageName) {
if (!existsSync(packageLockPath)) {
return undefined;
}
try {
const lockfile = JSON.parse(readFileSync(packageLockPath, "utf8"));
const entry = lockfile.packages?.[`node_modules/${packageName}`];
return typeof entry?.version === "string" ? entry.version : undefined;
} catch {
return undefined;
}
}
function arraysMatch(left, right) { function arraysMatch(left, right) {
return left.length === right.length && left.every((value, index) => value === right[index]); return left.length === right.length && left.every((value, index) => value === right[index]);
} }
function hashFile(path) {
if (!existsSync(path)) {
return null;
}
return createHash("sha256").update(readFileSync(path)).digest("hex");
}
function getRuntimeInputHash() {
const hash = createHash("sha256");
for (const path of [packageJsonPath, packageLockPath, settingsPath]) {
hash.update(path);
hash.update("\0");
hash.update(hashFile(path) ?? "missing");
hash.update("\0");
}
return hash.digest("hex");
}
function workspaceIsCurrent(packageSpecs) { function workspaceIsCurrent(packageSpecs) {
if (!existsSync(manifestPath) || !existsSync(workspaceNodeModulesDir)) { if (!existsSync(manifestPath) || !existsSync(workspaceNodeModulesDir)) {
return false; return false;
@@ -44,6 +91,9 @@ function workspaceIsCurrent(packageSpecs) {
if (!Array.isArray(manifest.packageSpecs) || !arraysMatch(manifest.packageSpecs, packageSpecs)) { if (!Array.isArray(manifest.packageSpecs) || !arraysMatch(manifest.packageSpecs, packageSpecs)) {
return false; return false;
} }
if (manifest.runtimeInputHash !== getRuntimeInputHash()) {
return false;
}
if ( if (
manifest.nodeAbi !== process.versions.modules || manifest.nodeAbi !== process.versions.modules ||
manifest.platform !== process.platform || manifest.platform !== process.platform ||
@@ -97,8 +147,8 @@ function prepareWorkspace(packageSpecs) {
const result = spawnSync( const result = spawnSync(
process.env.npm_execpath ? process.execPath : "npm", process.env.npm_execpath ? process.execPath : "npm",
process.env.npm_execpath process.env.npm_execpath
? [process.env.npm_execpath, "install", "--prefer-offline", "--no-audit", "--no-fund", "--no-dry-run", "--loglevel", "error", "--prefix", workspaceDir, ...packageSpecs] ? [process.env.npm_execpath, "install", "--prefer-offline", "--no-audit", "--no-fund", "--no-dry-run", "--legacy-peer-deps", "--loglevel", "error", "--prefix", workspaceDir, ...packageSpecs]
: ["install", "--prefer-offline", "--no-audit", "--no-fund", "--no-dry-run", "--loglevel", "error", "--prefix", workspaceDir, ...packageSpecs], : ["install", "--prefer-offline", "--no-audit", "--no-fund", "--no-dry-run", "--legacy-peer-deps", "--loglevel", "error", "--prefix", workspaceDir, ...packageSpecs],
{ stdio: "inherit", env: childNpmInstallEnv() }, { stdio: "inherit", env: childNpmInstallEnv() },
); );
if (result.status !== 0) { if (result.status !== 0) {
@@ -110,15 +160,16 @@ function writeManifest(packageSpecs) {
writeFileSync( writeFileSync(
manifestPath, manifestPath,
JSON.stringify( JSON.stringify(
{ {
packageSpecs, packageSpecs,
generatedAt: new Date().toISOString(), runtimeInputHash: getRuntimeInputHash(),
nodeAbi: process.versions.modules, generatedAt: new Date().toISOString(),
nodeVersion: process.version, nodeAbi: process.versions.modules,
platform: process.platform, nodeVersion: process.version,
arch: process.arch, platform: process.platform,
pruneVersion: PRUNE_VERSION, arch: process.arch,
}, pruneVersion: PRUNE_VERSION,
},
null, null,
2, 2,
) + "\n", ) + "\n",

View File

@@ -558,6 +558,7 @@ export async function main(): Promise<void> {
normalizeFeynmanSettings(feynmanSettingsPath, bundledSettingsPath, thinkingLevel, feynmanAuthPath); normalizeFeynmanSettings(feynmanSettingsPath, bundledSettingsPath, thinkingLevel, feynmanAuthPath);
} }
const workflowCommandNames = new Set(readPromptSpecs(appRoot).filter((s) => s.topLevelCli).map((s) => s.name));
await launchPiChat({ await launchPiChat({
appRoot, appRoot,
workingDir, workingDir,
@@ -568,6 +569,6 @@ export async function main(): Promise<void> {
thinkingLevel, thinkingLevel,
explicitModelSpec, explicitModelSpec,
oneShotPrompt: values.prompt, oneShotPrompt: values.prompt,
initialPrompt: resolveInitialPrompt(command, rest, values.prompt, new Set(readPromptSpecs(appRoot).filter((s) => s.topLevelCli).map((s) => s.name))), initialPrompt: resolveInitialPrompt(command, rest, values.prompt, workflowCommandNames),
}); });
} }

View File

@@ -127,19 +127,6 @@ export function normalizeFeynmanSettings(
settings.theme = "feynman"; settings.theme = "feynman";
settings.quietStartup = true; settings.quietStartup = true;
settings.collapseChangelog = true; settings.collapseChangelog = true;
settings.compaction = {
enabled: true,
reserveTokens: 16384,
keepRecentTokens: 20000,
...(settings.compaction && typeof settings.compaction === "object" ? settings.compaction : {}),
};
settings.retry = {
enabled: true,
maxRetries: 3,
baseDelayMs: 2000,
maxDelayMs: 60000,
...(settings.retry && typeof settings.retry === "object" ? settings.retry : {}),
};
const supportedCorePackages = filterPackageSourcesForCurrentNode(CORE_PACKAGE_SOURCES); const supportedCorePackages = filterPackageSourcesForCurrentNode(CORE_PACKAGE_SOURCES);
if (!Array.isArray(settings.packages) || settings.packages.length === 0) { if (!Array.isArray(settings.packages) || settings.packages.length === 0) {
settings.packages = supportedCorePackages; settings.packages = supportedCorePackages;

View File

@@ -12,11 +12,6 @@ import { buildModelStatusSnapshotFromRecords, getAvailableModelRecords, getSuppo
import { createModelRegistry, getModelsJsonPath } from "../model/registry.js"; import { createModelRegistry, getModelsJsonPath } from "../model/registry.js";
import { getConfiguredServiceTier } from "../model/service-tier.js"; import { getConfiguredServiceTier } from "../model/service-tier.js";
type ContextRiskSummary = {
level: "low" | "medium" | "high" | "unknown";
lines: string[];
};
function findProvidersMissingApiKey(modelsJsonPath: string): string[] { function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
try { try {
const raw = readFileSync(modelsJsonPath, "utf8").trim(); const raw = readFileSync(modelsJsonPath, "utf8").trim();
@@ -40,50 +35,6 @@ function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
} }
} }
function numberSetting(settings: Record<string, unknown>, path: string[], fallback: number): number {
let value: unknown = settings;
for (const key of path) {
if (!value || typeof value !== "object") return fallback;
value = (value as Record<string, unknown>)[key];
}
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
}
export function buildContextRiskSummary(
settings: Record<string, unknown>,
model: { provider: string; id: string; contextWindow: number; maxTokens: number; reasoning: boolean } | undefined,
): ContextRiskSummary {
if (!model) {
return {
level: "unknown",
lines: ["context risk: unknown (no active model)"],
};
}
const reserveTokens = numberSetting(settings, ["compaction", "reserveTokens"], 16384);
const keepRecentTokens = numberSetting(settings, ["compaction", "keepRecentTokens"], 20000);
const retryMax = numberSetting(settings, ["retry", "maxRetries"], 3);
const usableWindow = Math.max(0, model.contextWindow - reserveTokens);
const level = model.contextWindow < 64_000
? "high"
: model.contextWindow < 128_000
? "medium"
: "low";
return {
level,
lines: [
`context risk: ${level}`,
` model: ${model.provider}/${model.id}`,
` context window: ${model.contextWindow}`,
` usable before Pi compaction reserve: ${usableWindow}`,
` Pi compaction: reserve=${reserveTokens}, keepRecent=${keepRecentTokens}`,
` Pi retry: maxRetries=${retryMax}`,
` reasoning: ${model.reasoning ? "supported" : "off/not supported"}`,
],
};
}
export type DoctorOptions = { export type DoctorOptions = {
settingsPath: string; settingsPath: string;
authPath: string; authPath: string;
@@ -213,10 +164,6 @@ export function runDoctor(options: DoctorOptions): void {
: "not set"}`, : "not set"}`,
); );
const modelStatus = collectStatusSnapshot(options); const modelStatus = collectStatusSnapshot(options);
const currentModel = typeof settings.defaultProvider === "string" && typeof settings.defaultModel === "string"
? modelRegistry.find(settings.defaultProvider, settings.defaultModel)
: undefined;
const contextRisk = buildContextRiskSummary(settings, currentModel);
console.log(`default model valid: ${modelStatus.modelValid ? "yes" : "no"}`); console.log(`default model valid: ${modelStatus.modelValid ? "yes" : "no"}`);
console.log(`authenticated providers: ${modelStatus.authenticatedProviderCount}`); console.log(`authenticated providers: ${modelStatus.authenticatedProviderCount}`);
console.log(`authenticated models: ${modelStatus.authenticatedModelCount}`); console.log(`authenticated models: ${modelStatus.authenticatedModelCount}`);
@@ -225,9 +172,6 @@ export function runDoctor(options: DoctorOptions): void {
if (modelStatus.recommendedModelReason) { if (modelStatus.recommendedModelReason) {
console.log(` why: ${modelStatus.recommendedModelReason}`); console.log(` why: ${modelStatus.recommendedModelReason}`);
} }
for (const line of contextRisk.lines) {
console.log(line);
}
const modelsError = modelRegistry.getError(); const modelsError = modelRegistry.getError();
if (modelsError) { if (modelsError) {
console.log("models.json: error"); console.log("models.json: error");

View File

@@ -65,6 +65,8 @@ test("deepresearch workflow requires durable artifacts even when blocked", () =>
assert.match(systemPrompt, /Do not claim you are only a static model/i); assert.match(systemPrompt, /Do not claim you are only a static model/i);
assert.match(systemPrompt, /write the requested durable artifact/i); assert.match(systemPrompt, /write the requested durable artifact/i);
assert.match(deepResearchPrompt, /Do not stop after planning/i); assert.match(deepResearchPrompt, /Do not stop after planning/i);
assert.match(deepResearchPrompt, /not a request to explain or implement/i);
assert.match(deepResearchPrompt, /Do not answer by describing the protocol/i);
assert.match(deepResearchPrompt, /degraded mode/i); assert.match(deepResearchPrompt, /degraded mode/i);
assert.match(deepResearchPrompt, /Verification: BLOCKED/i); assert.match(deepResearchPrompt, /Verification: BLOCKED/i);
assert.match(deepResearchPrompt, /Never end with only an explanation in chat/i); assert.match(deepResearchPrompt, /Never end with only an explanation in chat/i);

View File

@@ -243,6 +243,10 @@ test("updateConfiguredPackages batches multiple npm updates into a single instal
` console.log(resolve(${JSON.stringify(root)}, "npm-global", "lib", "node_modules"));`, ` console.log(resolve(${JSON.stringify(root)}, "npm-global", "lib", "node_modules"));`,
` process.exit(0);`, ` process.exit(0);`,
`}`, `}`,
`if (args.length >= 4 && args[0] === "view" && args[2] === "version" && args[3] === "--json") {`,
` console.log(JSON.stringify("2.0.0"));`,
` process.exit(0);`,
`}`,
`appendFileSync(${JSON.stringify(logPath)}, JSON.stringify(args) + "\\n", "utf8");`, `appendFileSync(${JSON.stringify(logPath)}, JSON.stringify(args) + "\\n", "utf8");`,
"process.exit(0);", "process.exit(0);",
].join("\n")); ].join("\n"));
@@ -290,6 +294,10 @@ test("updateConfiguredPackages skips native package updates on unsupported Node
` console.log(resolve(${JSON.stringify(root)}, "npm-global", "lib", "node_modules"));`, ` console.log(resolve(${JSON.stringify(root)}, "npm-global", "lib", "node_modules"));`,
` process.exit(0);`, ` process.exit(0);`,
`}`, `}`,
`if (args.length >= 4 && args[0] === "view" && args[2] === "version" && args[3] === "--json") {`,
` console.log(JSON.stringify("2.0.0"));`,
` process.exit(0);`,
`}`,
`appendFileSync(${JSON.stringify(logPath)}, JSON.stringify(args) + "\\n", "utf8");`, `appendFileSync(${JSON.stringify(logPath)}, JSON.stringify(args) + "\\n", "utf8");`,
"process.exit(0);", "process.exit(0);",
].join("\n")); ].join("\n"));

View File

@@ -1,156 +0,0 @@
import test from "node:test";
import assert from "node:assert/strict";
import { existsSync, mkdirSync, mkdtempSync, readFileSync, utimesSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { dirname, join, resolve } from "node:path";
import type { ExtensionContext } from "@mariozechner/pi-coding-agent";
import { formatToolResultWithSpillover } from "../extensions/research-tools/alpha.js";
import { autoLogPath, writeAutoLogEntry } from "../extensions/research-tools/autolog.js";
import { computeContextPosture } from "../extensions/research-tools/context.js";
import { buildResumePacket } from "../extensions/research-tools/resume.js";
import { buildContextRiskSummary } from "../src/setup/doctor.js";
import { claimPlanSlug, collectManagedGc, spillLargeCustomToolResult } from "../extensions/research-tools/state.js";
function fakeCtx(cwd: string): ExtensionContext {
return {
cwd,
model: {
provider: "test",
id: "small",
contextWindow: 32_000,
},
getContextUsage: () => ({
tokens: 24_000,
contextWindow: 32_000,
percent: 75,
}),
sessionManager: {
getSessionId: () => "session-1",
},
} as unknown as ExtensionContext;
}
test("alpha tool spillover writes oversized output to outputs cache", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-spill-"));
const originalCap = process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS;
process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS = "64";
try {
const result = formatToolResultWithSpillover(fakeCtx(root), "alpha_get_paper", { text: "x".repeat(500) });
const parsed = JSON.parse(result.text) as { path: string; feynman_spillover: boolean };
assert.equal(parsed.feynman_spillover, true);
assert.equal(existsSync(parsed.path), true);
assert.match(readFileSync(parsed.path, "utf8"), /xxxxx/);
assert.match(parsed.path, /outputs\/\.cache\/alpha_get_paper-/);
} finally {
if (originalCap === undefined) {
delete process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS;
} else {
process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS = originalCap;
}
}
});
test("context_report posture uses Pi context usage directly", () => {
const report = computeContextPosture(fakeCtx("/tmp"));
assert.equal(report.model, "test/small");
assert.equal(report.contextWindow, 32_000);
assert.equal(report.estimatedInputTokens, 24_000);
assert.equal(report.compactionThresholdHit, true);
assert.equal(report.recommendedMaxWorkers, 1);
});
test("autolog writes dated jsonl entries under notes", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-autolog-"));
writeAutoLogEntry(root, { role: "user", text: "hello" });
const path = autoLogPath(root);
assert.equal(existsSync(path), true);
assert.deepEqual(JSON.parse(readFileSync(path, "utf8").trim()), { role: "user", text: "hello" });
});
test("resume packet summarizes recent plans and changelog from disk", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-resume-"));
mkdirSync(resolve(root, "outputs", ".plans"), { recursive: true });
mkdirSync(resolve(root, "outputs", ".state"), { recursive: true });
const planPath = resolve(root, "outputs", ".plans", "demo.md");
const statePath = resolve(root, "outputs", ".state", "demo.jobs.jsonl");
writeFileSyncSafe(planPath, "# Plan\n\n- next step");
writeFileSyncSafe(statePath, "{\"status\":\"running\"}\n");
writeFileSyncSafe(resolve(root, "CHANGELOG.md"), "## Entry\n- verified\n");
const packet = buildResumePacket(root);
assert.ok(packet);
assert.match(packet!, /Recent plans/);
assert.match(packet!, /demo\.md/);
assert.match(packet!, /CHANGELOG tail/);
});
test("doctor context risk uses Pi model context window and compaction settings", () => {
const summary = buildContextRiskSummary(
{ compaction: { reserveTokens: 4096, keepRecentTokens: 8000 }, retry: { maxRetries: 2 } },
{ provider: "local", id: "qwen", contextWindow: 32_000, maxTokens: 4096, reasoning: true },
);
assert.equal(summary.level, "high");
assert.match(summary.lines.join("\n"), /Pi compaction: reserve=4096, keepRecent=8000/);
assert.match(summary.lines.join("\n"), /Pi retry: maxRetries=2/);
});
test("slug lock blocks overwriting an existing plan from another session", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-slug-"));
const planPath = resolve(root, "outputs", ".plans", "demo.md");
writeFileSyncSafe(planPath, "# Existing\n");
const result = claimPlanSlug(root, "session-2", "outputs/.plans/demo.md");
assert.equal(result.ok, false);
if (!result.ok) {
assert.match(result.reason, /Plan already exists/);
}
});
test("managed cache gc deletes stale cache files and honors dry-run", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-gc-"));
const cachePath = resolve(root, "outputs", ".cache", "old.md");
writeFileSyncSafe(cachePath, "old");
const old = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
utimesSync(cachePath, old, old);
const preview = collectManagedGc(root, Date.now(), 14, { dryRun: true });
assert.equal(preview.deleted.length, 1);
assert.equal(existsSync(cachePath), true);
const actual = collectManagedGc(root, Date.now(), 14);
assert.equal(actual.deleted.length, 1);
assert.equal(existsSync(cachePath), false);
});
test("large custom tool results spill to outputs runs", () => {
const root = mkdtempSync(join(tmpdir(), "feynman-subagent-spill-"));
const originalCap = process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS;
process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS = "50";
try {
const result = spillLargeCustomToolResult(
root,
"subagent",
"call-1",
[{ type: "text", text: "x".repeat(200) }],
{ ok: true },
);
assert.ok(result);
const parsed = JSON.parse(result!.content[0]!.text) as { path: string; feynman_spillover: boolean };
assert.equal(parsed.feynman_spillover, true);
assert.match(parsed.path, /outputs\/\.runs\/subagent-call-1-/);
assert.equal(existsSync(parsed.path), true);
} finally {
if (originalCap === undefined) {
delete process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS;
} else {
process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS = originalCap;
}
}
});
function writeFileSyncSafe(path: string, text: string): void {
mkdirSync(dirname(path), { recursive: true });
writeFileSync(path, text, "utf8");
}

View File

@@ -1544,9 +1544,9 @@
} }
}, },
"node_modules/@hono/node-server": { "node_modules/@hono/node-server": {
"version": "1.19.13", "version": "1.19.14",
"resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.13.tgz", "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.14.tgz",
"integrity": "sha512-TsQLe4i2gvoTtrHje625ngThGBySOgSK3Xo2XRYOdqGN1teR8+I7vchQC46uLJi8OF62YTYA3AhSpumtkhsaKQ==", "integrity": "sha512-GwtvgtXxnWsucXvbQXkRgqksiH2Qed37H9xHZocE5sA3N8O8O8/8FA3uclQXxXVzc9XBZuEOMK7+r02FmSpHtw==",
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">=18.14.1" "node": ">=18.14.1"
@@ -7998,9 +7998,9 @@
} }
}, },
"node_modules/hono": { "node_modules/hono": {
"version": "4.12.12", "version": "4.12.14",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.12.12.tgz", "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.14.tgz",
"integrity": "sha512-p1JfQMKaceuCbpJKAPKVqyqviZdS0eUxH9v82oWo1kb9xjQ5wA6iP3FNVAPDFlz5/p7d45lO+BpSk1tuSZMF4Q==", "integrity": "sha512-am5zfg3yu6sqn5yjKBNqhnTX7Cv+m00ox+7jbaKkrLMRJ4rAdldd1xPd/JzbBWspqaQv6RSTrgFN95EsfhC+7w==",
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">=16.9.0" "node": ">=16.9.0"

View File

@@ -36,8 +36,8 @@
}, },
"overrides": { "overrides": {
"@modelcontextprotocol/sdk": { "@modelcontextprotocol/sdk": {
"@hono/node-server": "1.19.13", "@hono/node-server": "1.19.14",
"hono": "4.12.12" "hono": "4.12.14"
}, },
"router": { "router": {
"path-to-regexp": "8.4.2" "path-to-regexp": "8.4.2"

View File

@@ -261,7 +261,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
curl -fsSL https://feynman.is/install | bash -s -- 0.2.27 curl -fsSL https://feynman.is/install | bash -s -- 0.2.29
EOF EOF
exit 1 exit 1
fi fi

View File

@@ -110,7 +110,7 @@ This usually means the release exists, but not all platform bundles were uploade
Workarounds: Workarounds:
- try again after the release finishes publishing - try again after the release finishes publishing
- pass the latest published version explicitly, e.g.: - pass the latest published version explicitly, e.g.:
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.27 & ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.29
"@ "@
} }

View File

@@ -117,13 +117,13 @@ These installers download the bundled `skills/` and `prompts/` trees plus the re
The one-line installer already targets the latest tagged release. To pin an exact version, pass it explicitly: The one-line installer already targets the latest tagged release. To pin an exact version, pass it explicitly:
```bash ```bash
curl -fsSL https://feynman.is/install | bash -s -- 0.2.27 curl -fsSL https://feynman.is/install | bash -s -- 0.2.29
``` ```
On Windows: On Windows:
```powershell ```powershell
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.27 & ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.29
``` ```
## Post-install setup ## Post-install setup

View File

@@ -22,7 +22,9 @@ These are installed by default with every Feynman installation. They provide the
| `pi-mermaid` | Render Mermaid diagrams in the terminal UI | | `pi-mermaid` | Render Mermaid diagrams in the terminal UI |
| `@aliou/pi-processes` | Manage long-running experiments, background tasks, and log tailing | | `@aliou/pi-processes` | Manage long-running experiments, background tasks, and log tailing |
| `pi-zotero` | Integration with Zotero for citation library management | | `pi-zotero` | Integration with Zotero for citation library management |
| `@kaiserlich-dev/pi-session-search` | Indexed session recall with summarize and resume UI. Powers session lookup |
| `pi-schedule-prompt` | Schedule recurring and deferred research jobs. Powers the `/watch` workflow | | `pi-schedule-prompt` | Schedule recurring and deferred research jobs. Powers the `/watch` workflow |
| `@samfp/pi-memory` | Pi-managed preference and correction memory across sessions |
| `@tmustier/pi-ralph-wiggum` | Long-running agent loops for iterative development. Powers `/autoresearch` | | `@tmustier/pi-ralph-wiggum` | Long-running agent loops for iterative development. Powers `/autoresearch` |
These packages are updated together when you run `feynman update`. You do not need to install them individually. These packages are updated together when you run `feynman update`. You do not need to install them individually.
@@ -34,8 +36,6 @@ Install on demand with `feynman packages install <preset>`. These extend Feynman
| Package | Preset | Purpose | | Package | Preset | Purpose |
| --- | --- | --- | | --- | --- | --- |
| `pi-generative-ui` | `generative-ui` | Interactive HTML-style widgets for rich output | | `pi-generative-ui` | `generative-ui` | Interactive HTML-style widgets for rich output |
| `@kaiserlich-dev/pi-session-search` | `session-search` | Indexed session recall with summarize and resume UI. Powers `/search` |
| `@samfp/pi-memory` | `memory` | Automatic preference and correction memory across sessions |
## Installing and managing packages ## Installing and managing packages
@@ -48,17 +48,9 @@ feynman packages list
Install a specific optional preset: Install a specific optional preset:
```bash ```bash
feynman packages install session-search
feynman packages install memory
feynman packages install generative-ui feynman packages install generative-ui
``` ```
Install all optional packages at once:
```bash
feynman packages install all-extras
```
## Updating packages ## Updating packages
Update all installed packages to their latest versions: Update all installed packages to their latest versions: