Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1cd1a147f2 |
@@ -25,7 +25,7 @@ curl -fsSL https://feynman.is/install | bash
|
|||||||
irm https://feynman.is/install.ps1 | iex
|
irm https://feynman.is/install.ps1 | iex
|
||||||
```
|
```
|
||||||
|
|
||||||
The one-line installer fetches the latest tagged release. To pin a version, pass it explicitly, for example `curl -fsSL https://feynman.is/install | bash -s -- 0.2.27`.
|
The one-line installer fetches the latest tagged release. To pin a version, pass it explicitly, for example `curl -fsSL https://feynman.is/install | bash -s -- 0.2.28`.
|
||||||
|
|
||||||
The installer downloads a standalone native bundle with its own Node.js runtime.
|
The installer downloads a standalone native bundle with its own Node.js runtime.
|
||||||
|
|
||||||
@@ -35,8 +35,6 @@ To uninstall the standalone app, remove the launcher and runtime bundle, then op
|
|||||||
|
|
||||||
Local models are supported through the setup flow. For LM Studio, run `feynman setup`, choose `LM Studio`, and keep the default `http://localhost:1234/v1` unless you changed the server port. For LiteLLM, choose `LiteLLM Proxy` and keep the default `http://localhost:4000/v1`. For Ollama or vLLM, choose `Custom provider (baseUrl + API key)`, use `openai-completions`, and point it at the local `/v1` endpoint.
|
Local models are supported through the setup flow. For LM Studio, run `feynman setup`, choose `LM Studio`, and keep the default `http://localhost:1234/v1` unless you changed the server port. For LiteLLM, choose `LiteLLM Proxy` and keep the default `http://localhost:4000/v1`. For Ollama or vLLM, choose `Custom provider (baseUrl + API key)`, use `openai-completions`, and point it at the local `/v1` endpoint.
|
||||||
|
|
||||||
Feynman uses Pi's own runtime hooks for context hygiene: Pi compaction/retry settings are seeded by default, `context_report` exposes the current Pi context usage to the model, oversized alphaXiv tool returns spill to `outputs/.cache/`, oversized custom/subagent returns spill to `outputs/.runs/`, and a bounded resume packet is injected from `outputs/.plans/`, `outputs/.state/`, and `CHANGELOG.md` when those files exist. Automatic session logging writes JSONL snippets to `notes/feynman-autolog/`; set `FEYNMAN_AUTO_LOG=off` to disable it or `FEYNMAN_AUTO_LOG=full` for full text. Feynman also locks new plan slugs under `outputs/.state/` to prevent concurrent workflow collisions and garbage-collects stale managed caches on startup.
|
|
||||||
|
|
||||||
### Skills Only
|
### Skills Only
|
||||||
|
|
||||||
If you want just the research skills without the full terminal app:
|
If you want just the research skills without the full terminal app:
|
||||||
|
|||||||
@@ -1,16 +1,12 @@
|
|||||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||||
|
|
||||||
import { registerAlphaTools } from "./research-tools/alpha.js";
|
import { registerAlphaTools } from "./research-tools/alpha.js";
|
||||||
import { registerAutoLog } from "./research-tools/autolog.js";
|
|
||||||
import { registerContextReportTool } from "./research-tools/context.js";
|
|
||||||
import { registerDiscoveryCommands } from "./research-tools/discovery.js";
|
import { registerDiscoveryCommands } from "./research-tools/discovery.js";
|
||||||
import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js";
|
import { registerFeynmanModelCommand } from "./research-tools/feynman-model.js";
|
||||||
import { installFeynmanHeader } from "./research-tools/header.js";
|
import { installFeynmanHeader } from "./research-tools/header.js";
|
||||||
import { registerHelpCommand } from "./research-tools/help.js";
|
import { registerHelpCommand } from "./research-tools/help.js";
|
||||||
import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js";
|
import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js";
|
||||||
import { registerResumePacket } from "./research-tools/resume.js";
|
|
||||||
import { registerServiceTierControls } from "./research-tools/service-tier.js";
|
import { registerServiceTierControls } from "./research-tools/service-tier.js";
|
||||||
import { registerStateManagement } from "./research-tools/state.js";
|
|
||||||
|
|
||||||
export default function researchTools(pi: ExtensionAPI): void {
|
export default function researchTools(pi: ExtensionAPI): void {
|
||||||
const cache: { agentSummaryPromise?: Promise<{ agents: string[]; chains: string[] }> } = {};
|
const cache: { agentSummaryPromise?: Promise<{ agents: string[]; chains: string[] }> } = {};
|
||||||
@@ -21,14 +17,10 @@ export default function researchTools(pi: ExtensionAPI): void {
|
|||||||
});
|
});
|
||||||
|
|
||||||
registerAlphaTools(pi);
|
registerAlphaTools(pi);
|
||||||
registerAutoLog(pi);
|
|
||||||
registerContextReportTool(pi);
|
|
||||||
registerDiscoveryCommands(pi);
|
registerDiscoveryCommands(pi);
|
||||||
registerFeynmanModelCommand(pi);
|
registerFeynmanModelCommand(pi);
|
||||||
registerHelpCommand(pi);
|
registerHelpCommand(pi);
|
||||||
registerInitCommand(pi);
|
registerInitCommand(pi);
|
||||||
registerOutputsCommand(pi);
|
registerOutputsCommand(pi);
|
||||||
registerResumePacket(pi);
|
|
||||||
registerServiceTierControls(pi);
|
registerServiceTierControls(pi);
|
||||||
registerStateManagement(pi);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,11 +7,7 @@ import {
|
|||||||
readPaperCode,
|
readPaperCode,
|
||||||
searchPapers,
|
searchPapers,
|
||||||
} from "@companion-ai/alpha-hub/lib";
|
} from "@companion-ai/alpha-hub/lib";
|
||||||
import { createHash } from "node:crypto";
|
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||||
import { mkdirSync, writeFileSync } from "node:fs";
|
|
||||||
import { dirname, resolve } from "node:path";
|
|
||||||
|
|
||||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
|
||||||
import { Type } from "@sinclair/typebox";
|
import { Type } from "@sinclair/typebox";
|
||||||
|
|
||||||
function formatText(value: unknown): string {
|
function formatText(value: unknown): string {
|
||||||
@@ -19,44 +15,6 @@ function formatText(value: unknown): string {
|
|||||||
return JSON.stringify(value, null, 2);
|
return JSON.stringify(value, null, 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
function toolOutputCapChars(): number {
|
|
||||||
const raw = Number(process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS);
|
|
||||||
return Number.isFinite(raw) && raw > 0 ? Math.floor(raw) : 32_000;
|
|
||||||
}
|
|
||||||
|
|
||||||
function spillPath(ctx: ExtensionContext, toolName: string, text: string): string {
|
|
||||||
const hash = createHash("sha256").update(text).digest("hex").slice(0, 12);
|
|
||||||
return resolve(ctx.cwd, "outputs", ".cache", `${toolName}-${hash}.md`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function formatToolResultWithSpillover(
|
|
||||||
ctx: ExtensionContext,
|
|
||||||
toolName: string,
|
|
||||||
result: unknown,
|
|
||||||
): { text: string; details: unknown } {
|
|
||||||
const text = formatText(result);
|
|
||||||
const cap = toolOutputCapChars();
|
|
||||||
if (text.length <= cap) {
|
|
||||||
return { text, details: result };
|
|
||||||
}
|
|
||||||
|
|
||||||
const path = spillPath(ctx, toolName, text);
|
|
||||||
mkdirSync(dirname(path), { recursive: true });
|
|
||||||
writeFileSync(path, text, "utf8");
|
|
||||||
|
|
||||||
const head = text.slice(0, Math.min(cap, 4_000));
|
|
||||||
const pointer = {
|
|
||||||
feynman_spillover: true,
|
|
||||||
tool: toolName,
|
|
||||||
path,
|
|
||||||
bytes: Buffer.byteLength(text, "utf8"),
|
|
||||||
sha256: createHash("sha256").update(text).digest("hex"),
|
|
||||||
note: "Full tool output was written to disk. Read the path in bounded chunks instead of asking the tool to return everything again.",
|
|
||||||
head,
|
|
||||||
};
|
|
||||||
return { text: JSON.stringify(pointer, null, 2), details: pointer };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerAlphaTools(pi: ExtensionAPI): void {
|
export function registerAlphaTools(pi: ExtensionAPI): void {
|
||||||
pi.registerTool({
|
pi.registerTool({
|
||||||
name: "alpha_search",
|
name: "alpha_search",
|
||||||
@@ -69,10 +27,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
|||||||
Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }),
|
Type.String({ description: "Search mode: semantic, keyword, both, agentic, or all." }),
|
||||||
),
|
),
|
||||||
}),
|
}),
|
||||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
async execute(_toolCallId, params) {
|
||||||
const result = await searchPapers(params.query, params.mode?.trim() || "semantic");
|
const result = await searchPapers(params.query, params.mode?.trim() || "semantic");
|
||||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_search", result);
|
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -84,10 +41,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
|||||||
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
|
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
|
||||||
fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })),
|
fullText: Type.Optional(Type.Boolean({ description: "Return raw full text instead of AI report." })),
|
||||||
}),
|
}),
|
||||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
async execute(_toolCallId, params) {
|
||||||
const result = await getPaper(params.paper, { fullText: params.fullText });
|
const result = await getPaper(params.paper, { fullText: params.fullText });
|
||||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_get_paper", result);
|
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -99,10 +55,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
|||||||
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
|
paper: Type.String({ description: "arXiv ID, arXiv URL, or alphaXiv URL." }),
|
||||||
question: Type.String({ description: "Question about the paper." }),
|
question: Type.String({ description: "Question about the paper." }),
|
||||||
}),
|
}),
|
||||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
async execute(_toolCallId, params) {
|
||||||
const result = await askPaper(params.paper, params.question);
|
const result = await askPaper(params.paper, params.question);
|
||||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_ask_paper", result);
|
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -115,14 +70,13 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
|||||||
note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })),
|
note: Type.Optional(Type.String({ description: "Annotation text. Omit when clear=true." })),
|
||||||
clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })),
|
clear: Type.Optional(Type.Boolean({ description: "Clear the existing annotation." })),
|
||||||
}),
|
}),
|
||||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
async execute(_toolCallId, params) {
|
||||||
const result = params.clear
|
const result = params.clear
|
||||||
? await clearPaperAnnotation(params.paper)
|
? await clearPaperAnnotation(params.paper)
|
||||||
: params.note
|
: params.note
|
||||||
? await annotatePaper(params.paper, params.note)
|
? await annotatePaper(params.paper, params.note)
|
||||||
: (() => { throw new Error("Provide either note or clear=true."); })();
|
: (() => { throw new Error("Provide either note or clear=true."); })();
|
||||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_annotate_paper", result);
|
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -131,10 +85,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
|||||||
label: "Alpha List Annotations",
|
label: "Alpha List Annotations",
|
||||||
description: "List all persistent local paper annotations.",
|
description: "List all persistent local paper annotations.",
|
||||||
parameters: Type.Object({}),
|
parameters: Type.Object({}),
|
||||||
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
|
async execute() {
|
||||||
const result = await listPaperAnnotations();
|
const result = await listPaperAnnotations();
|
||||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_list_annotations", result);
|
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -146,10 +99,9 @@ export function registerAlphaTools(pi: ExtensionAPI): void {
|
|||||||
githubUrl: Type.String({ description: "GitHub repository URL." }),
|
githubUrl: Type.String({ description: "GitHub repository URL." }),
|
||||||
path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })),
|
path: Type.Optional(Type.String({ description: "File or directory path. Default: '/'" })),
|
||||||
}),
|
}),
|
||||||
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
|
async execute(_toolCallId, params) {
|
||||||
const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/");
|
const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/");
|
||||||
const formatted = formatToolResultWithSpillover(ctx, "alpha_read_code", result);
|
return { content: [{ type: "text", text: formatText(result) }], details: result };
|
||||||
return { content: [{ type: "text", text: formatted.text }], details: formatted.details };
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
import { appendFileSync, mkdirSync, readFileSync } from "node:fs";
|
|
||||||
import { dirname, resolve } from "node:path";
|
|
||||||
|
|
||||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
|
||||||
|
|
||||||
type AutoLogMode = "off" | "events" | "full";
|
|
||||||
|
|
||||||
function readAgentSettings(): Record<string, unknown> {
|
|
||||||
const agentDir = process.env.PI_CODING_AGENT_DIR;
|
|
||||||
if (!agentDir) return {};
|
|
||||||
try {
|
|
||||||
return JSON.parse(readFileSync(resolve(agentDir, "settings.json"), "utf8")) as Record<string, unknown>;
|
|
||||||
} catch {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeMode(value: unknown): AutoLogMode | undefined {
|
|
||||||
if (typeof value !== "string") return undefined;
|
|
||||||
const normalized = value.trim().toLowerCase();
|
|
||||||
if (normalized === "off" || normalized === "events" || normalized === "full") return normalized;
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getAutoLogMode(): AutoLogMode {
|
|
||||||
return normalizeMode(process.env.FEYNMAN_AUTO_LOG) ??
|
|
||||||
normalizeMode(readAgentSettings().autoLog) ??
|
|
||||||
"events";
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractMessageText(message: unknown): string {
|
|
||||||
if (!message || typeof message !== "object") return "";
|
|
||||||
const content = (message as { content?: unknown }).content;
|
|
||||||
if (typeof content === "string") return content;
|
|
||||||
if (!Array.isArray(content)) return "";
|
|
||||||
return content
|
|
||||||
.map((item) => {
|
|
||||||
if (!item || typeof item !== "object") return "";
|
|
||||||
const record = item as { type?: string; text?: unknown; thinking?: unknown; name?: unknown };
|
|
||||||
if (record.type === "text" && typeof record.text === "string") return record.text;
|
|
||||||
if (record.type === "thinking" && typeof record.thinking === "string") return "[thinking omitted]";
|
|
||||||
if (record.type === "toolCall") return `[tool:${typeof record.name === "string" ? record.name : "unknown"}]`;
|
|
||||||
return "";
|
|
||||||
})
|
|
||||||
.filter(Boolean)
|
|
||||||
.join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
function clip(text: string, maxChars: number): string {
|
|
||||||
return text.length > maxChars ? `${text.slice(0, maxChars)}\n...[truncated ${text.length - maxChars} chars]` : text;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function autoLogPath(cwd: string, date = new Date()): string {
|
|
||||||
const day = date.toISOString().slice(0, 10);
|
|
||||||
return resolve(cwd, "notes", "feynman-autolog", `${day}.jsonl`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function writeAutoLogEntry(cwd: string, entry: Record<string, unknown>): void {
|
|
||||||
const path = autoLogPath(cwd);
|
|
||||||
mkdirSync(dirname(path), { recursive: true });
|
|
||||||
appendFileSync(path, `${JSON.stringify(entry)}\n`, "utf8");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerAutoLog(pi: ExtensionAPI): void {
|
|
||||||
pi.on("message_end", async (event, ctx: ExtensionContext) => {
|
|
||||||
const mode = getAutoLogMode();
|
|
||||||
if (mode === "off") return;
|
|
||||||
|
|
||||||
const message = event.message as any;
|
|
||||||
if (message.role !== "user" && message.role !== "assistant") return;
|
|
||||||
|
|
||||||
const text = extractMessageText(message).replace(/\s+/g, " ").trim();
|
|
||||||
if (!text) return;
|
|
||||||
|
|
||||||
writeAutoLogEntry(ctx.cwd, {
|
|
||||||
timestamp: new Date(message.timestamp ?? Date.now()).toISOString(),
|
|
||||||
session: ctx.sessionManager.getSessionId(),
|
|
||||||
role: message.role,
|
|
||||||
model: message.role === "assistant" ? `${message.provider}/${message.model}` : undefined,
|
|
||||||
mode,
|
|
||||||
text: mode === "full" ? text : clip(text, 500),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
|
||||||
import { Type } from "@sinclair/typebox";
|
|
||||||
|
|
||||||
type ContextPosture = {
|
|
||||||
model: string;
|
|
||||||
contextWindow: number | null;
|
|
||||||
estimatedInputTokens: number | null;
|
|
||||||
utilizationPct: number | null;
|
|
||||||
compactionThresholdHit: boolean;
|
|
||||||
recommendedMaxWorkers: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function computeContextPosture(ctx: ExtensionContext): ContextPosture {
|
|
||||||
const usage = ctx.getContextUsage();
|
|
||||||
const modelWindow = typeof ctx.model?.contextWindow === "number" ? ctx.model.contextWindow : null;
|
|
||||||
const contextWindow = usage?.contextWindow ?? modelWindow;
|
|
||||||
const estimatedInputTokens = usage?.tokens ?? null;
|
|
||||||
const utilizationPct = usage?.percent ?? (contextWindow && estimatedInputTokens
|
|
||||||
? Math.round((estimatedInputTokens / contextWindow) * 1000) / 10
|
|
||||||
: null);
|
|
||||||
const compactionThresholdHit = utilizationPct !== null && utilizationPct >= 70;
|
|
||||||
const availableForWorkers = contextWindow
|
|
||||||
? Math.max(0, contextWindow - 16_384 - (estimatedInputTokens ?? 0))
|
|
||||||
: 0;
|
|
||||||
const recommendedMaxWorkers = contextWindow === null
|
|
||||||
? 1
|
|
||||||
: Math.max(1, Math.min(4, Math.floor(availableForWorkers / 24_000) || 1));
|
|
||||||
|
|
||||||
return {
|
|
||||||
model: ctx.model ? `${ctx.model.provider}/${ctx.model.id}` : "not set",
|
|
||||||
contextWindow,
|
|
||||||
estimatedInputTokens,
|
|
||||||
utilizationPct,
|
|
||||||
compactionThresholdHit,
|
|
||||||
recommendedMaxWorkers,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerContextReportTool(pi: ExtensionAPI): void {
|
|
||||||
pi.registerTool({
|
|
||||||
name: "context_report",
|
|
||||||
label: "Context Report",
|
|
||||||
description: "Report current Pi context usage, compaction threshold posture, and safe worker-count guidance.",
|
|
||||||
parameters: Type.Object({}),
|
|
||||||
async execute(_toolCallId, _params, _signal, _onUpdate, ctx) {
|
|
||||||
const report = computeContextPosture(ctx);
|
|
||||||
return {
|
|
||||||
content: [{ type: "text", text: JSON.stringify(report, null, 2) }],
|
|
||||||
details: report,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,6 @@ import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
|||||||
|
|
||||||
import { getExtensionCommandSpec } from "../../metadata/commands.mjs";
|
import { getExtensionCommandSpec } from "../../metadata/commands.mjs";
|
||||||
import { buildProjectAgentsTemplate, buildSessionLogsReadme } from "./project-scaffold.js";
|
import { buildProjectAgentsTemplate, buildSessionLogsReadme } from "./project-scaffold.js";
|
||||||
import { collectManagedGc } from "./state.js";
|
|
||||||
|
|
||||||
async function pathExists(path: string): Promise<boolean> {
|
async function pathExists(path: string): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
@@ -105,15 +104,7 @@ export function registerInitCommand(pi: ExtensionAPI): void {
|
|||||||
export function registerOutputsCommand(pi: ExtensionAPI): void {
|
export function registerOutputsCommand(pi: ExtensionAPI): void {
|
||||||
pi.registerCommand("outputs", {
|
pi.registerCommand("outputs", {
|
||||||
description: "Browse all research artifacts (papers, outputs, experiments, notes).",
|
description: "Browse all research artifacts (papers, outputs, experiments, notes).",
|
||||||
handler: async (args, ctx) => {
|
handler: async (_args, ctx) => {
|
||||||
const trimmedArgs = args.trim();
|
|
||||||
if (trimmedArgs === "gc" || trimmedArgs === "gc --dry-run") {
|
|
||||||
const dryRun = trimmedArgs.includes("--dry-run");
|
|
||||||
const result = collectManagedGc(ctx.cwd, Date.now(), undefined, { dryRun });
|
|
||||||
ctx.ui.notify(`${dryRun ? "Would remove" : "Removed"} ${result.deleted.length} managed cache file(s).`, "info");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const items = await collectArtifacts(ctx.cwd);
|
const items = await collectArtifacts(ctx.cwd);
|
||||||
if (items.length === 0) {
|
if (items.length === 0) {
|
||||||
ctx.ui.notify("No artifacts found. Use /lit, /draft, /review, or /deepresearch to create some.", "info");
|
ctx.ui.notify("No artifacts found. Use /lit, /draft, /review, or /deepresearch to create some.", "info");
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
|
|
||||||
import { join, resolve } from "node:path";
|
|
||||||
|
|
||||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
|
||||||
|
|
||||||
type ResumeArtifact = {
|
|
||||||
path: string;
|
|
||||||
mtimeMs: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
function collectFiles(root: string, predicate: (path: string) => boolean): ResumeArtifact[] {
|
|
||||||
if (!existsSync(root)) return [];
|
|
||||||
const files: ResumeArtifact[] = [];
|
|
||||||
for (const entry of readdirSync(root, { withFileTypes: true })) {
|
|
||||||
const path = join(root, entry.name);
|
|
||||||
if (entry.isDirectory()) {
|
|
||||||
files.push(...collectFiles(path, predicate));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!entry.isFile() || !predicate(path)) continue;
|
|
||||||
try {
|
|
||||||
files.push({ path, mtimeMs: statSync(path).mtimeMs });
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
function tail(text: string, maxChars: number): string {
|
|
||||||
return text.length <= maxChars ? text : text.slice(text.length - maxChars);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildResumePacket(cwd: string, maxChars = 4_000): string | undefined {
|
|
||||||
const plans = collectFiles(resolve(cwd, "outputs", ".plans"), (path) => path.endsWith(".md"))
|
|
||||||
.sort((a, b) => b.mtimeMs - a.mtimeMs)
|
|
||||||
.slice(0, 3);
|
|
||||||
const stateFiles = collectFiles(resolve(cwd, "outputs", ".state"), (path) => /\.(json|jsonl|md)$/i.test(path))
|
|
||||||
.sort((a, b) => b.mtimeMs - a.mtimeMs)
|
|
||||||
.slice(0, 5);
|
|
||||||
const changelogPath = resolve(cwd, "CHANGELOG.md");
|
|
||||||
|
|
||||||
if (plans.length === 0 && stateFiles.length === 0 && !existsSync(changelogPath)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
const lines: string[] = [
|
|
||||||
"[feynman resume packet]",
|
|
||||||
"This is a bounded project-state summary from disk. Prefer these paths over guessing prior workflow state.",
|
|
||||||
];
|
|
||||||
|
|
||||||
if (plans.length > 0) {
|
|
||||||
lines.push("", "Recent plans:");
|
|
||||||
for (const plan of plans) {
|
|
||||||
lines.push(`- ${plan.path}`);
|
|
||||||
}
|
|
||||||
const newestPlan = plans[0]!;
|
|
||||||
try {
|
|
||||||
lines.push("", `Newest plan tail (${newestPlan.path}):`, tail(readFileSync(newestPlan.path, "utf8"), 1_500));
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stateFiles.length > 0) {
|
|
||||||
lines.push("", "Recent state files:");
|
|
||||||
for (const file of stateFiles) {
|
|
||||||
lines.push(`- ${file.path}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (existsSync(changelogPath)) {
|
|
||||||
try {
|
|
||||||
lines.push("", "CHANGELOG tail:", tail(readFileSync(changelogPath, "utf8"), 1_200));
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
return tail(lines.join("\n"), maxChars);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerResumePacket(pi: ExtensionAPI): void {
|
|
||||||
pi.on("session_start", async (_event, ctx: ExtensionContext) => {
|
|
||||||
if (process.env.FEYNMAN_RESUME_PACKET === "off") return;
|
|
||||||
const packet = buildResumePacket(ctx.cwd);
|
|
||||||
if (!packet) return;
|
|
||||||
pi.sendMessage(
|
|
||||||
{
|
|
||||||
customType: "feynman_resume_packet",
|
|
||||||
content: packet,
|
|
||||||
display: false,
|
|
||||||
details: { source: "outputs/.plans outputs/.state CHANGELOG.md" },
|
|
||||||
},
|
|
||||||
{ triggerTurn: false, deliverAs: "nextTurn" },
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,276 +0,0 @@
|
|||||||
import { createHash } from "node:crypto";
|
|
||||||
import { appendFileSync, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from "node:fs";
|
|
||||||
import { basename, dirname, relative, resolve } from "node:path";
|
|
||||||
|
|
||||||
import { isToolCallEventType, type ExtensionAPI, type ExtensionContext, type ToolCallEvent } from "@mariozechner/pi-coding-agent";
|
|
||||||
|
|
||||||
type SlugLock = {
|
|
||||||
pid: number;
|
|
||||||
sessionId: string;
|
|
||||||
startedAt: string;
|
|
||||||
planPath: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GcResult = {
|
|
||||||
deleted: string[];
|
|
||||||
kept: string[];
|
|
||||||
};
|
|
||||||
|
|
||||||
type SpillResult = {
|
|
||||||
content: { type: "text"; text: string }[];
|
|
||||||
details: unknown;
|
|
||||||
} | undefined;
|
|
||||||
|
|
||||||
type ToolResultPatch = {
|
|
||||||
content?: { type: "text"; text: string }[];
|
|
||||||
details?: unknown;
|
|
||||||
isError?: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
const BUILT_IN_TOOL_NAMES = new Set(["bash", "read", "write", "edit", "grep", "find", "ls"]);
|
|
||||||
|
|
||||||
function isPathInside(parent: string, child: string): boolean {
|
|
||||||
const rel = relative(parent, child);
|
|
||||||
return rel === "" || (!rel.startsWith("..") && !rel.startsWith("/"));
|
|
||||||
}
|
|
||||||
|
|
||||||
function pidIsLive(pid: number): boolean {
|
|
||||||
if (!Number.isInteger(pid) || pid <= 0) return false;
|
|
||||||
try {
|
|
||||||
process.kill(pid, 0);
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function readLock(path: string): SlugLock | undefined {
|
|
||||||
try {
|
|
||||||
return JSON.parse(readFileSync(path, "utf8")) as SlugLock;
|
|
||||||
} catch {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function lockIsLive(lock: SlugLock | undefined, timeoutMs: number, now = Date.now()): boolean {
|
|
||||||
if (!lock) return false;
|
|
||||||
const started = Date.parse(lock.startedAt);
|
|
||||||
if (!Number.isFinite(started) || now - started > timeoutMs) return false;
|
|
||||||
return pidIsLive(lock.pid);
|
|
||||||
}
|
|
||||||
|
|
||||||
function planPathInfo(cwd: string, inputPath: string): { absPath: string; slug: string; lockPath: string } | undefined {
|
|
||||||
const absPath = resolve(cwd, inputPath);
|
|
||||||
const plansRoot = resolve(cwd, "outputs", ".plans");
|
|
||||||
if (!isPathInside(plansRoot, absPath) || !absPath.endsWith(".md")) return undefined;
|
|
||||||
const slug = basename(absPath, ".md");
|
|
||||||
const lockPath = resolve(cwd, "outputs", ".state", `${slug}.lock`);
|
|
||||||
return { absPath, slug, lockPath };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function claimPlanSlug(
|
|
||||||
cwd: string,
|
|
||||||
sessionId: string,
|
|
||||||
inputPath: string,
|
|
||||||
options?: { timeoutMinutes?: number; strategy?: "suffix" | "error" | "overwrite"; now?: number },
|
|
||||||
): { ok: true; lockPath?: string } | { ok: false; reason: string } {
|
|
||||||
const info = planPathInfo(cwd, inputPath);
|
|
||||||
if (!info) return { ok: true };
|
|
||||||
|
|
||||||
const strategy = options?.strategy ?? (process.env.FEYNMAN_SLUG_COLLISION_STRATEGY as "suffix" | "error" | "overwrite" | undefined) ?? "error";
|
|
||||||
if (strategy === "overwrite") return { ok: true };
|
|
||||||
|
|
||||||
const timeoutMinutes = options?.timeoutMinutes ?? (Number(process.env.FEYNMAN_SLUG_LOCK_TIMEOUT_MINUTES) || 30);
|
|
||||||
const timeoutMs = timeoutMinutes * 60_000;
|
|
||||||
const existingLock = readLock(info.lockPath);
|
|
||||||
const live = lockIsLive(existingLock, timeoutMs, options?.now);
|
|
||||||
if (live && existingLock?.sessionId !== sessionId) {
|
|
||||||
return {
|
|
||||||
ok: false,
|
|
||||||
reason: `Slug "${info.slug}" is locked by another Feynman session. Use a unique slug such as ${info.slug}-2, or wait for ${info.lockPath} to expire.`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (existsSync(info.absPath) && existingLock?.sessionId !== sessionId) {
|
|
||||||
return {
|
|
||||||
ok: false,
|
|
||||||
reason: `Plan already exists at ${relative(cwd, info.absPath)}. Use a unique slug such as ${info.slug}-2 to avoid overwriting another run.`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
mkdirSync(dirname(info.lockPath), { recursive: true });
|
|
||||||
writeFileSync(
|
|
||||||
info.lockPath,
|
|
||||||
JSON.stringify({
|
|
||||||
pid: process.pid,
|
|
||||||
sessionId,
|
|
||||||
startedAt: new Date(options?.now ?? Date.now()).toISOString(),
|
|
||||||
planPath: info.absPath,
|
|
||||||
}, null, 2) + "\n",
|
|
||||||
"utf8",
|
|
||||||
);
|
|
||||||
return { ok: true, lockPath: info.lockPath };
|
|
||||||
}
|
|
||||||
|
|
||||||
function managedRetentionDays(): number {
|
|
||||||
const raw = Number(process.env.FEYNMAN_CACHE_RETENTION_DAYS);
|
|
||||||
return Number.isFinite(raw) && raw >= 0 ? raw : 14;
|
|
||||||
}
|
|
||||||
|
|
||||||
function gcIgnored(path: string): boolean {
|
|
||||||
if (path.endsWith(".gcignore")) return true;
|
|
||||||
try {
|
|
||||||
return /^---[\s\S]*?retain:\s*true/im.test(readFileSync(path, "utf8").slice(0, 500));
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function collectManagedGc(
|
|
||||||
cwd: string,
|
|
||||||
now = Date.now(),
|
|
||||||
retentionDays = managedRetentionDays(),
|
|
||||||
options?: { dryRun?: boolean },
|
|
||||||
): GcResult {
|
|
||||||
const roots = [
|
|
||||||
resolve(cwd, "outputs", ".cache"),
|
|
||||||
resolve(cwd, "outputs", ".runs"),
|
|
||||||
resolve(cwd, "outputs", ".notes"),
|
|
||||||
];
|
|
||||||
const cutoff = now - retentionDays * 24 * 60 * 60 * 1000;
|
|
||||||
const result: GcResult = { deleted: [], kept: [] };
|
|
||||||
|
|
||||||
const visit = (path: string) => {
|
|
||||||
if (!existsSync(path)) return;
|
|
||||||
for (const entry of readdirSync(path, { withFileTypes: true })) {
|
|
||||||
const child = resolve(path, entry.name);
|
|
||||||
if (entry.isDirectory()) {
|
|
||||||
visit(child);
|
|
||||||
try {
|
|
||||||
if (readdirSync(child).length === 0) rmSync(child, { recursive: true, force: true });
|
|
||||||
} catch {}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!entry.isFile()) continue;
|
|
||||||
const stat = statSync(child);
|
|
||||||
if (gcIgnored(child) || stat.mtimeMs >= cutoff) {
|
|
||||||
result.kept.push(child);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!options?.dryRun) {
|
|
||||||
rmSync(child, { force: true });
|
|
||||||
}
|
|
||||||
result.deleted.push(child);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for (const root of roots) visit(root);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
function textFromToolContent(content: ToolResultContent): string {
|
|
||||||
return content
|
|
||||||
.map((item) => item.type === "text" ? item.text : "")
|
|
||||||
.filter(Boolean)
|
|
||||||
.join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolResultContent = Array<{ type: "text"; text: string } | { type: "image"; data: string; mimeType: string }>;
|
|
||||||
|
|
||||||
function customToolOutputCapChars(): number {
|
|
||||||
const raw = Number(process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS);
|
|
||||||
return Number.isFinite(raw) && raw > 0 ? raw : 24_000;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function spillLargeCustomToolResult(
|
|
||||||
cwd: string,
|
|
||||||
toolName: string,
|
|
||||||
toolCallId: string,
|
|
||||||
content: ToolResultContent,
|
|
||||||
details: unknown,
|
|
||||||
): SpillResult {
|
|
||||||
if (BUILT_IN_TOOL_NAMES.has(toolName)) return undefined;
|
|
||||||
const text = textFromToolContent(content);
|
|
||||||
const cap = customToolOutputCapChars();
|
|
||||||
if (text.length <= cap) return undefined;
|
|
||||||
|
|
||||||
const hash = createHash("sha256").update(text).digest("hex");
|
|
||||||
const safeToolName = toolName.replace(/[^a-zA-Z0-9._-]+/g, "-").slice(0, 60) || "tool";
|
|
||||||
const path = resolve(cwd, "outputs", ".runs", `${safeToolName}-${toolCallId}-${hash.slice(0, 12)}.md`);
|
|
||||||
mkdirSync(dirname(path), { recursive: true });
|
|
||||||
writeFileSync(path, text, "utf8");
|
|
||||||
const pointer = {
|
|
||||||
feynman_spillover: true,
|
|
||||||
tool: toolName,
|
|
||||||
toolCallId,
|
|
||||||
path,
|
|
||||||
bytes: Buffer.byteLength(text, "utf8"),
|
|
||||||
sha256: hash,
|
|
||||||
head: text.slice(0, Math.min(cap, 2_000)),
|
|
||||||
note: "Full custom/subagent tool result was written to disk. Read the path in bounded chunks when needed.",
|
|
||||||
originalDetails: details,
|
|
||||||
};
|
|
||||||
return {
|
|
||||||
content: [{ type: "text", text: JSON.stringify(pointer, null, 2) }],
|
|
||||||
details: pointer,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function appendJsonl(path: string, value: unknown): void {
|
|
||||||
mkdirSync(dirname(path), { recursive: true });
|
|
||||||
appendFileSync(path, `${JSON.stringify(value)}\n`, "utf8");
|
|
||||||
}
|
|
||||||
|
|
||||||
function recordCheckpoint(ctx: ExtensionContext, toolName: string, isError: boolean): void {
|
|
||||||
appendJsonl(resolve(ctx.cwd, "outputs", ".state", "feynman.checkpoint.jsonl"), {
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
sessionId: ctx.sessionManager.getSessionId(),
|
|
||||||
toolName,
|
|
||||||
isError,
|
|
||||||
context: ctx.getContextUsage?.(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function recordJobEvent(ctx: ExtensionContext, toolName: string, status: "running" | "done" | "failed", data: unknown): void {
|
|
||||||
appendJsonl(resolve(ctx.cwd, "outputs", ".state", "subagent.jobs.jsonl"), {
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
sessionId: ctx.sessionManager.getSessionId(),
|
|
||||||
toolName,
|
|
||||||
status,
|
|
||||||
data,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function looksLikeSubagentTool(toolName: string): boolean {
|
|
||||||
return /subagent|parallel|chain|run/i.test(toolName);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerStateManagement(pi: ExtensionAPI): void {
|
|
||||||
pi.on("session_start", async (_event, ctx) => {
|
|
||||||
if (process.env.FEYNMAN_OUTPUTS_GC === "off") return;
|
|
||||||
collectManagedGc(ctx.cwd);
|
|
||||||
});
|
|
||||||
|
|
||||||
pi.on("tool_call", async (event: ToolCallEvent, ctx) => {
|
|
||||||
const sessionId = ctx.sessionManager.getSessionId();
|
|
||||||
if (isToolCallEventType("write", event)) {
|
|
||||||
const claim = claimPlanSlug(ctx.cwd, sessionId, event.input.path);
|
|
||||||
if (!claim.ok) return { block: true, reason: claim.reason };
|
|
||||||
}
|
|
||||||
if (isToolCallEventType("edit", event)) {
|
|
||||||
const claim = claimPlanSlug(ctx.cwd, sessionId, event.input.path);
|
|
||||||
if (!claim.ok) return { block: true, reason: claim.reason };
|
|
||||||
}
|
|
||||||
if (looksLikeSubagentTool(event.toolName)) {
|
|
||||||
recordJobEvent(ctx, event.toolName, "running", event.input);
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
});
|
|
||||||
|
|
||||||
pi.on("tool_result", async (event, ctx): Promise<ToolResultPatch | undefined> => {
|
|
||||||
recordCheckpoint(ctx, event.toolName, event.isError);
|
|
||||||
if (looksLikeSubagentTool(event.toolName)) {
|
|
||||||
recordJobEvent(ctx, event.toolName, event.isError ? "failed" : "done", event.details ?? event.content);
|
|
||||||
}
|
|
||||||
return spillLargeCustomToolResult(ctx.cwd, event.toolName, event.toolCallId, event.content as ToolResultContent, event.details);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "@companion-ai/feynman",
|
"name": "@companion-ai/feynman",
|
||||||
"version": "0.2.27",
|
"version": "0.2.28",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "@companion-ai/feynman",
|
"name": "@companion-ai/feynman",
|
||||||
"version": "0.2.27",
|
"version": "0.2.28",
|
||||||
"hasInstallScript": true,
|
"hasInstallScript": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@companion-ai/feynman",
|
"name": "@companion-ai/feynman",
|
||||||
"version": "0.2.27",
|
"version": "0.2.28",
|
||||||
"description": "Research-first CLI agent built on Pi and alphaXiv",
|
"description": "Research-first CLI agent built on Pi and alphaXiv",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ This usually means the release exists, but not all platform bundles were uploade
|
|||||||
Workarounds:
|
Workarounds:
|
||||||
- try again after the release finishes publishing
|
- try again after the release finishes publishing
|
||||||
- pass the latest published version explicitly, e.g.:
|
- pass the latest published version explicitly, e.g.:
|
||||||
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.27
|
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.28
|
||||||
"@
|
"@
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -261,7 +261,7 @@ This usually means the release exists, but not all platform bundles were uploade
|
|||||||
Workarounds:
|
Workarounds:
|
||||||
- try again after the release finishes publishing
|
- try again after the release finishes publishing
|
||||||
- pass the latest published version explicitly, e.g.:
|
- pass the latest published version explicitly, e.g.:
|
||||||
curl -fsSL https://feynman.is/install | bash -s -- 0.2.27
|
curl -fsSL https://feynman.is/install | bash -s -- 0.2.28
|
||||||
EOF
|
EOF
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -127,19 +127,6 @@ export function normalizeFeynmanSettings(
|
|||||||
settings.theme = "feynman";
|
settings.theme = "feynman";
|
||||||
settings.quietStartup = true;
|
settings.quietStartup = true;
|
||||||
settings.collapseChangelog = true;
|
settings.collapseChangelog = true;
|
||||||
settings.compaction = {
|
|
||||||
enabled: true,
|
|
||||||
reserveTokens: 16384,
|
|
||||||
keepRecentTokens: 20000,
|
|
||||||
...(settings.compaction && typeof settings.compaction === "object" ? settings.compaction : {}),
|
|
||||||
};
|
|
||||||
settings.retry = {
|
|
||||||
enabled: true,
|
|
||||||
maxRetries: 3,
|
|
||||||
baseDelayMs: 2000,
|
|
||||||
maxDelayMs: 60000,
|
|
||||||
...(settings.retry && typeof settings.retry === "object" ? settings.retry : {}),
|
|
||||||
};
|
|
||||||
const supportedCorePackages = filterPackageSourcesForCurrentNode(CORE_PACKAGE_SOURCES);
|
const supportedCorePackages = filterPackageSourcesForCurrentNode(CORE_PACKAGE_SOURCES);
|
||||||
if (!Array.isArray(settings.packages) || settings.packages.length === 0) {
|
if (!Array.isArray(settings.packages) || settings.packages.length === 0) {
|
||||||
settings.packages = supportedCorePackages;
|
settings.packages = supportedCorePackages;
|
||||||
|
|||||||
@@ -12,11 +12,6 @@ import { buildModelStatusSnapshotFromRecords, getAvailableModelRecords, getSuppo
|
|||||||
import { createModelRegistry, getModelsJsonPath } from "../model/registry.js";
|
import { createModelRegistry, getModelsJsonPath } from "../model/registry.js";
|
||||||
import { getConfiguredServiceTier } from "../model/service-tier.js";
|
import { getConfiguredServiceTier } from "../model/service-tier.js";
|
||||||
|
|
||||||
type ContextRiskSummary = {
|
|
||||||
level: "low" | "medium" | "high" | "unknown";
|
|
||||||
lines: string[];
|
|
||||||
};
|
|
||||||
|
|
||||||
function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
|
function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
|
||||||
try {
|
try {
|
||||||
const raw = readFileSync(modelsJsonPath, "utf8").trim();
|
const raw = readFileSync(modelsJsonPath, "utf8").trim();
|
||||||
@@ -40,50 +35,6 @@ function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function numberSetting(settings: Record<string, unknown>, path: string[], fallback: number): number {
|
|
||||||
let value: unknown = settings;
|
|
||||||
for (const key of path) {
|
|
||||||
if (!value || typeof value !== "object") return fallback;
|
|
||||||
value = (value as Record<string, unknown>)[key];
|
|
||||||
}
|
|
||||||
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildContextRiskSummary(
|
|
||||||
settings: Record<string, unknown>,
|
|
||||||
model: { provider: string; id: string; contextWindow: number; maxTokens: number; reasoning: boolean } | undefined,
|
|
||||||
): ContextRiskSummary {
|
|
||||||
if (!model) {
|
|
||||||
return {
|
|
||||||
level: "unknown",
|
|
||||||
lines: ["context risk: unknown (no active model)"],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const reserveTokens = numberSetting(settings, ["compaction", "reserveTokens"], 16384);
|
|
||||||
const keepRecentTokens = numberSetting(settings, ["compaction", "keepRecentTokens"], 20000);
|
|
||||||
const retryMax = numberSetting(settings, ["retry", "maxRetries"], 3);
|
|
||||||
const usableWindow = Math.max(0, model.contextWindow - reserveTokens);
|
|
||||||
const level = model.contextWindow < 64_000
|
|
||||||
? "high"
|
|
||||||
: model.contextWindow < 128_000
|
|
||||||
? "medium"
|
|
||||||
: "low";
|
|
||||||
|
|
||||||
return {
|
|
||||||
level,
|
|
||||||
lines: [
|
|
||||||
`context risk: ${level}`,
|
|
||||||
` model: ${model.provider}/${model.id}`,
|
|
||||||
` context window: ${model.contextWindow}`,
|
|
||||||
` usable before Pi compaction reserve: ${usableWindow}`,
|
|
||||||
` Pi compaction: reserve=${reserveTokens}, keepRecent=${keepRecentTokens}`,
|
|
||||||
` Pi retry: maxRetries=${retryMax}`,
|
|
||||||
` reasoning: ${model.reasoning ? "supported" : "off/not supported"}`,
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export type DoctorOptions = {
|
export type DoctorOptions = {
|
||||||
settingsPath: string;
|
settingsPath: string;
|
||||||
authPath: string;
|
authPath: string;
|
||||||
@@ -213,10 +164,6 @@ export function runDoctor(options: DoctorOptions): void {
|
|||||||
: "not set"}`,
|
: "not set"}`,
|
||||||
);
|
);
|
||||||
const modelStatus = collectStatusSnapshot(options);
|
const modelStatus = collectStatusSnapshot(options);
|
||||||
const currentModel = typeof settings.defaultProvider === "string" && typeof settings.defaultModel === "string"
|
|
||||||
? modelRegistry.find(settings.defaultProvider, settings.defaultModel)
|
|
||||||
: undefined;
|
|
||||||
const contextRisk = buildContextRiskSummary(settings, currentModel);
|
|
||||||
console.log(`default model valid: ${modelStatus.modelValid ? "yes" : "no"}`);
|
console.log(`default model valid: ${modelStatus.modelValid ? "yes" : "no"}`);
|
||||||
console.log(`authenticated providers: ${modelStatus.authenticatedProviderCount}`);
|
console.log(`authenticated providers: ${modelStatus.authenticatedProviderCount}`);
|
||||||
console.log(`authenticated models: ${modelStatus.authenticatedModelCount}`);
|
console.log(`authenticated models: ${modelStatus.authenticatedModelCount}`);
|
||||||
@@ -225,9 +172,6 @@ export function runDoctor(options: DoctorOptions): void {
|
|||||||
if (modelStatus.recommendedModelReason) {
|
if (modelStatus.recommendedModelReason) {
|
||||||
console.log(` why: ${modelStatus.recommendedModelReason}`);
|
console.log(` why: ${modelStatus.recommendedModelReason}`);
|
||||||
}
|
}
|
||||||
for (const line of contextRisk.lines) {
|
|
||||||
console.log(line);
|
|
||||||
}
|
|
||||||
const modelsError = modelRegistry.getError();
|
const modelsError = modelRegistry.getError();
|
||||||
if (modelsError) {
|
if (modelsError) {
|
||||||
console.log("models.json: error");
|
console.log("models.json: error");
|
||||||
|
|||||||
@@ -1,156 +0,0 @@
|
|||||||
import test from "node:test";
|
|
||||||
import assert from "node:assert/strict";
|
|
||||||
import { existsSync, mkdirSync, mkdtempSync, readFileSync, utimesSync, writeFileSync } from "node:fs";
|
|
||||||
import { tmpdir } from "node:os";
|
|
||||||
import { dirname, join, resolve } from "node:path";
|
|
||||||
|
|
||||||
import type { ExtensionContext } from "@mariozechner/pi-coding-agent";
|
|
||||||
|
|
||||||
import { formatToolResultWithSpillover } from "../extensions/research-tools/alpha.js";
|
|
||||||
import { autoLogPath, writeAutoLogEntry } from "../extensions/research-tools/autolog.js";
|
|
||||||
import { computeContextPosture } from "../extensions/research-tools/context.js";
|
|
||||||
import { buildResumePacket } from "../extensions/research-tools/resume.js";
|
|
||||||
import { buildContextRiskSummary } from "../src/setup/doctor.js";
|
|
||||||
import { claimPlanSlug, collectManagedGc, spillLargeCustomToolResult } from "../extensions/research-tools/state.js";
|
|
||||||
|
|
||||||
function fakeCtx(cwd: string): ExtensionContext {
|
|
||||||
return {
|
|
||||||
cwd,
|
|
||||||
model: {
|
|
||||||
provider: "test",
|
|
||||||
id: "small",
|
|
||||||
contextWindow: 32_000,
|
|
||||||
},
|
|
||||||
getContextUsage: () => ({
|
|
||||||
tokens: 24_000,
|
|
||||||
contextWindow: 32_000,
|
|
||||||
percent: 75,
|
|
||||||
}),
|
|
||||||
sessionManager: {
|
|
||||||
getSessionId: () => "session-1",
|
|
||||||
},
|
|
||||||
} as unknown as ExtensionContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
test("alpha tool spillover writes oversized output to outputs cache", () => {
|
|
||||||
const root = mkdtempSync(join(tmpdir(), "feynman-spill-"));
|
|
||||||
const originalCap = process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS;
|
|
||||||
process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS = "64";
|
|
||||||
try {
|
|
||||||
const result = formatToolResultWithSpillover(fakeCtx(root), "alpha_get_paper", { text: "x".repeat(500) });
|
|
||||||
const parsed = JSON.parse(result.text) as { path: string; feynman_spillover: boolean };
|
|
||||||
assert.equal(parsed.feynman_spillover, true);
|
|
||||||
assert.equal(existsSync(parsed.path), true);
|
|
||||||
assert.match(readFileSync(parsed.path, "utf8"), /xxxxx/);
|
|
||||||
assert.match(parsed.path, /outputs\/\.cache\/alpha_get_paper-/);
|
|
||||||
} finally {
|
|
||||||
if (originalCap === undefined) {
|
|
||||||
delete process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS;
|
|
||||||
} else {
|
|
||||||
process.env.FEYNMAN_TOOL_OUTPUT_CAP_CHARS = originalCap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test("context_report posture uses Pi context usage directly", () => {
|
|
||||||
const report = computeContextPosture(fakeCtx("/tmp"));
|
|
||||||
assert.equal(report.model, "test/small");
|
|
||||||
assert.equal(report.contextWindow, 32_000);
|
|
||||||
assert.equal(report.estimatedInputTokens, 24_000);
|
|
||||||
assert.equal(report.compactionThresholdHit, true);
|
|
||||||
assert.equal(report.recommendedMaxWorkers, 1);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("autolog writes dated jsonl entries under notes", () => {
|
|
||||||
const root = mkdtempSync(join(tmpdir(), "feynman-autolog-"));
|
|
||||||
writeAutoLogEntry(root, { role: "user", text: "hello" });
|
|
||||||
const path = autoLogPath(root);
|
|
||||||
assert.equal(existsSync(path), true);
|
|
||||||
assert.deepEqual(JSON.parse(readFileSync(path, "utf8").trim()), { role: "user", text: "hello" });
|
|
||||||
});
|
|
||||||
|
|
||||||
test("resume packet summarizes recent plans and changelog from disk", () => {
|
|
||||||
const root = mkdtempSync(join(tmpdir(), "feynman-resume-"));
|
|
||||||
mkdirSync(resolve(root, "outputs", ".plans"), { recursive: true });
|
|
||||||
mkdirSync(resolve(root, "outputs", ".state"), { recursive: true });
|
|
||||||
const planPath = resolve(root, "outputs", ".plans", "demo.md");
|
|
||||||
const statePath = resolve(root, "outputs", ".state", "demo.jobs.jsonl");
|
|
||||||
writeFileSyncSafe(planPath, "# Plan\n\n- next step");
|
|
||||||
writeFileSyncSafe(statePath, "{\"status\":\"running\"}\n");
|
|
||||||
writeFileSyncSafe(resolve(root, "CHANGELOG.md"), "## Entry\n- verified\n");
|
|
||||||
const packet = buildResumePacket(root);
|
|
||||||
assert.ok(packet);
|
|
||||||
assert.match(packet!, /Recent plans/);
|
|
||||||
assert.match(packet!, /demo\.md/);
|
|
||||||
assert.match(packet!, /CHANGELOG tail/);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("doctor context risk uses Pi model context window and compaction settings", () => {
|
|
||||||
const summary = buildContextRiskSummary(
|
|
||||||
{ compaction: { reserveTokens: 4096, keepRecentTokens: 8000 }, retry: { maxRetries: 2 } },
|
|
||||||
{ provider: "local", id: "qwen", contextWindow: 32_000, maxTokens: 4096, reasoning: true },
|
|
||||||
);
|
|
||||||
assert.equal(summary.level, "high");
|
|
||||||
assert.match(summary.lines.join("\n"), /Pi compaction: reserve=4096, keepRecent=8000/);
|
|
||||||
assert.match(summary.lines.join("\n"), /Pi retry: maxRetries=2/);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("slug lock blocks overwriting an existing plan from another session", () => {
|
|
||||||
const root = mkdtempSync(join(tmpdir(), "feynman-slug-"));
|
|
||||||
const planPath = resolve(root, "outputs", ".plans", "demo.md");
|
|
||||||
writeFileSyncSafe(planPath, "# Existing\n");
|
|
||||||
|
|
||||||
const result = claimPlanSlug(root, "session-2", "outputs/.plans/demo.md");
|
|
||||||
|
|
||||||
assert.equal(result.ok, false);
|
|
||||||
if (!result.ok) {
|
|
||||||
assert.match(result.reason, /Plan already exists/);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test("managed cache gc deletes stale cache files and honors dry-run", () => {
|
|
||||||
const root = mkdtempSync(join(tmpdir(), "feynman-gc-"));
|
|
||||||
const cachePath = resolve(root, "outputs", ".cache", "old.md");
|
|
||||||
writeFileSyncSafe(cachePath, "old");
|
|
||||||
const old = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
|
|
||||||
utimesSync(cachePath, old, old);
|
|
||||||
|
|
||||||
const preview = collectManagedGc(root, Date.now(), 14, { dryRun: true });
|
|
||||||
assert.equal(preview.deleted.length, 1);
|
|
||||||
assert.equal(existsSync(cachePath), true);
|
|
||||||
|
|
||||||
const actual = collectManagedGc(root, Date.now(), 14);
|
|
||||||
assert.equal(actual.deleted.length, 1);
|
|
||||||
assert.equal(existsSync(cachePath), false);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("large custom tool results spill to outputs runs", () => {
|
|
||||||
const root = mkdtempSync(join(tmpdir(), "feynman-subagent-spill-"));
|
|
||||||
const originalCap = process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS;
|
|
||||||
process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS = "50";
|
|
||||||
try {
|
|
||||||
const result = spillLargeCustomToolResult(
|
|
||||||
root,
|
|
||||||
"subagent",
|
|
||||||
"call-1",
|
|
||||||
[{ type: "text", text: "x".repeat(200) }],
|
|
||||||
{ ok: true },
|
|
||||||
);
|
|
||||||
assert.ok(result);
|
|
||||||
const parsed = JSON.parse(result!.content[0]!.text) as { path: string; feynman_spillover: boolean };
|
|
||||||
assert.equal(parsed.feynman_spillover, true);
|
|
||||||
assert.match(parsed.path, /outputs\/\.runs\/subagent-call-1-/);
|
|
||||||
assert.equal(existsSync(parsed.path), true);
|
|
||||||
} finally {
|
|
||||||
if (originalCap === undefined) {
|
|
||||||
delete process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS;
|
|
||||||
} else {
|
|
||||||
process.env.FEYNMAN_CUSTOM_TOOL_CAP_CHARS = originalCap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function writeFileSyncSafe(path: string, text: string): void {
|
|
||||||
mkdirSync(dirname(path), { recursive: true });
|
|
||||||
writeFileSync(path, text, "utf8");
|
|
||||||
}
|
|
||||||
@@ -261,7 +261,7 @@ This usually means the release exists, but not all platform bundles were uploade
|
|||||||
Workarounds:
|
Workarounds:
|
||||||
- try again after the release finishes publishing
|
- try again after the release finishes publishing
|
||||||
- pass the latest published version explicitly, e.g.:
|
- pass the latest published version explicitly, e.g.:
|
||||||
curl -fsSL https://feynman.is/install | bash -s -- 0.2.27
|
curl -fsSL https://feynman.is/install | bash -s -- 0.2.28
|
||||||
EOF
|
EOF
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ This usually means the release exists, but not all platform bundles were uploade
|
|||||||
Workarounds:
|
Workarounds:
|
||||||
- try again after the release finishes publishing
|
- try again after the release finishes publishing
|
||||||
- pass the latest published version explicitly, e.g.:
|
- pass the latest published version explicitly, e.g.:
|
||||||
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.27
|
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.28
|
||||||
"@
|
"@
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -117,13 +117,13 @@ These installers download the bundled `skills/` and `prompts/` trees plus the re
|
|||||||
The one-line installer already targets the latest tagged release. To pin an exact version, pass it explicitly:
|
The one-line installer already targets the latest tagged release. To pin an exact version, pass it explicitly:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl -fsSL https://feynman.is/install | bash -s -- 0.2.27
|
curl -fsSL https://feynman.is/install | bash -s -- 0.2.28
|
||||||
```
|
```
|
||||||
|
|
||||||
On Windows:
|
On Windows:
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.27
|
& ([scriptblock]::Create((irm https://feynman.is/install.ps1))) -Version 0.2.28
|
||||||
```
|
```
|
||||||
|
|
||||||
## Post-install setup
|
## Post-install setup
|
||||||
|
|||||||
Reference in New Issue
Block a user