Replace Pi tool registrations with skills and CLI integration

- Remove all manually registered Pi tools (alpha_search, alpha_get_paper,
  alpha_ask_paper, alpha_annotate_paper, alpha_list_annotations,
  alpha_read_code, session_search, preview_file) and their wrappers
  (alpha.ts, preview.ts, session-search.ts, alpha-tools.test.ts)
- Add Pi skill files for alpha-research, session-search, preview,
  modal-compute, and runpod-compute in skills/
- Sync skills to ~/.feynman/agent/skills/ on startup via syncBundledAssets
- Add node_modules/.bin to Pi subprocess PATH so alpha CLI is accessible
- Add /outputs extension command to browse research artifacts via dialog
- Add Modal and RunPod as execution environments in /replicate and
  /autoresearch prompts
- Remove redundant /alpha-login /alpha-logout /alpha-status REPL commands
  (feynman alpha CLI still works)
- Update README, researcher agent, metadata, and website docs

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Advait Paliwal
2026-03-25 00:38:45 -07:00
parent 5fab329ad1
commit 7024a86024
26 changed files with 320 additions and 1009 deletions

View File

@@ -1,9 +1,8 @@
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { registerAlphaCommands, registerAlphaTools } from "./research-tools/alpha.js";
import { installFeynmanHeader } from "./research-tools/header.js";
import { registerHelpCommand } from "./research-tools/help.js";
import { registerInitCommand, registerPreviewTool, registerSessionSearchTool } from "./research-tools/project.js";
import { registerInitCommand, registerOutputsCommand } from "./research-tools/project.js";
export default function researchTools(pi: ExtensionAPI): void {
const cache: { agentSummaryPromise?: Promise<{ agents: string[]; chains: string[] }> } = {};
@@ -16,10 +15,7 @@ export default function researchTools(pi: ExtensionAPI): void {
await installFeynmanHeader(pi, ctx, cache);
});
registerAlphaCommands(pi);
registerHelpCommand(pi);
registerInitCommand(pi);
registerSessionSearchTool(pi);
registerAlphaTools(pi);
registerPreviewTool(pi);
registerOutputsCommand(pi);
}

View File

@@ -1,380 +0,0 @@
import {
annotatePaper,
askPaper,
clearPaperAnnotation,
disconnect,
getPaper,
getUserName as getAlphaUserName,
isLoggedIn as isAlphaLoggedIn,
listPaperAnnotations,
login as loginAlpha,
logout as logoutAlpha,
readPaperCode,
searchPapers,
} from "@companion-ai/alpha-hub/lib";
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox";
import { getExtensionCommandSpec } from "../../metadata/commands.mjs";
import { collapseExcessBlankLines, formatToolText } from "./shared.js";
type JsonRecord = Record<string, unknown>;
type AlphaSearchHit = {
rank?: number;
title?: string;
publishedAt?: string;
organizations?: string;
authors?: string;
abstract?: string;
arxivId?: string;
arxivUrl?: string;
alphaXivUrl?: string;
};
type AlphaSearchSection = {
count: number;
results: AlphaSearchHit[];
note?: string;
};
type AlphaSearchPayload = {
query?: string;
mode?: string;
results?: AlphaSearchHit[];
semantic?: AlphaSearchSection;
keyword?: AlphaSearchSection;
agentic?: AlphaSearchSection;
};
function isRecord(value: unknown): value is JsonRecord {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
function cleanText(value: unknown, maxLength = 320): string | undefined {
if (typeof value !== "string") {
return undefined;
}
const collapsed = collapseExcessBlankLines(value)
.replace(/\s*\n\s*/g, " ")
.replace(/[ \t]+/g, " ");
if (!collapsed) {
return undefined;
}
return collapsed.length > maxLength ? `${collapsed.slice(0, maxLength - 1).trimEnd()}` : collapsed;
}
function sanitizeHit(value: unknown, fallbackRank: number): AlphaSearchHit | null {
if (!isRecord(value)) {
return null;
}
const title = cleanText(value.title, 220);
if (!title) {
return null;
}
return {
rank: typeof value.rank === "number" ? value.rank : fallbackRank,
title,
publishedAt: cleanText(value.publishedAt, 48),
organizations: cleanText(value.organizations, 180),
authors: cleanText(value.authors, 220),
abstract: cleanText(value.abstract, 360),
arxivId: cleanText(value.arxivId, 32),
arxivUrl: cleanText(value.arxivUrl, 160),
alphaXivUrl: cleanText(value.alphaXivUrl, 160),
};
}
function sanitizeHits(value: unknown): AlphaSearchHit[] {
if (!Array.isArray(value)) {
return [];
}
return value
.map((entry, index) => sanitizeHit(entry, index + 1))
.filter((entry): entry is AlphaSearchHit => entry !== null);
}
function sanitizeSection(value: unknown): AlphaSearchSection {
if (!isRecord(value)) {
return { count: 0, results: [] };
}
const results = sanitizeHits(value.results);
const note = results.length === 0 ? cleanText(value.raw, 600) : undefined;
return {
count: results.length,
results,
...(note ? { note } : {}),
};
}
export function sanitizeAlphaSearchPayload(value: unknown): AlphaSearchPayload {
if (!isRecord(value)) {
return {};
}
const payload: AlphaSearchPayload = {
query: cleanText(value.query, 240),
mode: cleanText(value.mode, 32),
};
const topLevelResults = sanitizeHits(value.results);
if (topLevelResults.length > 0) {
payload.results = topLevelResults;
}
for (const key of ["semantic", "keyword", "agentic"] as const) {
if (key in value) {
payload[key] = sanitizeSection(value[key]);
}
}
return payload;
}
function pushHitLines(lines: string[], hit: AlphaSearchHit): void {
lines.push(`${hit.rank ?? "?"}. ${hit.title ?? "Untitled result"}`);
if (hit.arxivId) lines.push(` arXiv: ${hit.arxivId}`);
if (hit.publishedAt) lines.push(` published: ${hit.publishedAt}`);
if (hit.organizations) lines.push(` orgs: ${hit.organizations}`);
if (hit.authors) lines.push(` authors: ${hit.authors}`);
if (hit.abstract) lines.push(` abstract: ${hit.abstract}`);
if (hit.arxivUrl) lines.push(` arXiv URL: ${hit.arxivUrl}`);
if (hit.alphaXivUrl) lines.push(` alphaXiv URL: ${hit.alphaXivUrl}`);
}
function pushSectionLines(lines: string[], label: string, section: AlphaSearchSection): void {
lines.push(`${label} (${section.count})`);
if (section.results.length === 0) {
lines.push(section.note ? ` note: ${section.note}` : " no parsed results");
return;
}
for (const hit of section.results) {
pushHitLines(lines, hit);
}
}
export function formatAlphaSearchContext(value: unknown): string {
const payload = sanitizeAlphaSearchPayload(value);
const lines: string[] = [];
if (payload.query) lines.push(`query: ${payload.query}`);
if (payload.mode) lines.push(`mode: ${payload.mode}`);
if (payload.results) {
pushSectionLines(lines, "results", { count: payload.results.length, results: payload.results });
}
for (const [label, section] of [
["semantic", payload.semantic],
["keyword", payload.keyword],
["agentic", payload.agentic],
] as const) {
if (section) {
pushSectionLines(lines, label, section);
}
}
return lines.length > 0 ? lines.join("\n") : "No alpha search results returned.";
}
export function registerAlphaCommands(pi: ExtensionAPI): void {
pi.registerCommand("alpha-login", {
description: getExtensionCommandSpec("alpha-login")?.description ?? "Sign in to alphaXiv from inside Feynman.",
handler: async (_args, ctx) => {
if (isAlphaLoggedIn()) {
const name = getAlphaUserName();
ctx.ui.notify(name ? `alphaXiv already connected as ${name}` : "alphaXiv already connected", "info");
return;
}
await loginAlpha();
const name = getAlphaUserName();
ctx.ui.notify(name ? `alphaXiv connected as ${name}` : "alphaXiv login complete", "info");
},
});
pi.registerCommand("alpha-logout", {
description: getExtensionCommandSpec("alpha-logout")?.description ?? "Clear alphaXiv auth from inside Feynman.",
handler: async (_args, ctx) => {
logoutAlpha();
ctx.ui.notify("alphaXiv auth cleared", "info");
},
});
pi.registerCommand("alpha-status", {
description: getExtensionCommandSpec("alpha-status")?.description ?? "Show alphaXiv authentication status.",
handler: async (_args, ctx) => {
if (!isAlphaLoggedIn()) {
ctx.ui.notify("alphaXiv not connected", "warning");
return;
}
const name = getAlphaUserName();
ctx.ui.notify(name ? `alphaXiv connected as ${name}` : "alphaXiv connected", "info");
},
});
}
export function registerAlphaTools(pi: ExtensionAPI): void {
pi.registerTool({
name: "alpha_search",
label: "Alpha Search",
description: "Search papers through alphaXiv using semantic, keyword, both, agentic, or all retrieval modes.",
parameters: Type.Object({
query: Type.String({ description: "Paper search query." }),
mode: Type.Optional(
Type.String({
description: "Search mode: semantic, keyword, both, agentic, or all.",
}),
),
}),
async execute(_toolCallId, params) {
try {
const result = await searchPapers(params.query, params.mode?.trim() || "all");
const sanitized = sanitizeAlphaSearchPayload(result);
return {
content: [{ type: "text", text: formatAlphaSearchContext(sanitized) }],
details: sanitized,
};
} finally {
await disconnect();
}
},
});
pi.registerTool({
name: "alpha_get_paper",
label: "Alpha Get Paper",
description: "Fetch a paper report or full text, plus any local annotation, using alphaXiv.",
parameters: Type.Object({
paper: Type.String({
description: "arXiv ID, arXiv URL, or alphaXiv URL.",
}),
fullText: Type.Optional(
Type.Boolean({
description: "Return raw full text instead of the AI report.",
}),
),
}),
async execute(_toolCallId, params) {
try {
const result = await getPaper(params.paper, { fullText: params.fullText });
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
} finally {
await disconnect();
}
},
});
pi.registerTool({
name: "alpha_ask_paper",
label: "Alpha Ask Paper",
description: "Ask a targeted question about a paper using alphaXiv's PDF analysis.",
parameters: Type.Object({
paper: Type.String({
description: "arXiv ID, arXiv URL, or alphaXiv URL.",
}),
question: Type.String({
description: "Question to ask about the paper.",
}),
}),
async execute(_toolCallId, params) {
try {
const result = await askPaper(params.paper, params.question);
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
} finally {
await disconnect();
}
},
});
pi.registerTool({
name: "alpha_annotate_paper",
label: "Alpha Annotate Paper",
description: "Write or clear a persistent local annotation for a paper.",
parameters: Type.Object({
paper: Type.String({
description: "Paper ID to annotate.",
}),
note: Type.Optional(
Type.String({
description: "Annotation text. Omit when clear=true.",
}),
),
clear: Type.Optional(
Type.Boolean({
description: "Clear the existing annotation instead of writing one.",
}),
),
}),
async execute(_toolCallId, params) {
const result = params.clear
? await clearPaperAnnotation(params.paper)
: params.note
? await annotatePaper(params.paper, params.note)
: (() => {
throw new Error("Provide either note or clear=true.");
})();
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
},
});
pi.registerTool({
name: "alpha_list_annotations",
label: "Alpha List Annotations",
description: "List all persistent local paper annotations.",
parameters: Type.Object({}),
async execute() {
const result = await listPaperAnnotations();
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
},
});
pi.registerTool({
name: "alpha_read_code",
label: "Alpha Read Code",
description: "Read files from a paper's GitHub repository through alphaXiv.",
parameters: Type.Object({
githubUrl: Type.String({
description: "GitHub repository URL for the paper implementation.",
}),
path: Type.Optional(
Type.String({
description: "Repository path to inspect. Use / for the repo overview.",
}),
),
}),
async execute(_toolCallId, params) {
try {
const result = await readPaperCode(params.githubUrl, params.path?.trim() || "/");
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
} finally {
await disconnect();
}
},
});
}

View File

@@ -1,183 +0,0 @@
import { execFile, spawn } from "node:child_process";
import { mkdir, mkdtemp, readFile, stat, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { basename, dirname, extname, join } from "node:path";
import { pathToFileURL } from "node:url";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
function isMarkdownPath(path: string): boolean {
return [".md", ".markdown", ".txt"].includes(extname(path).toLowerCase());
}
function isLatexPath(path: string): boolean {
return extname(path).toLowerCase() === ".tex";
}
function wrapCodeAsMarkdown(source: string, filePath: string): string {
const language = extname(filePath).replace(/^\./, "") || "text";
return `# ${basename(filePath)}\n\n\`\`\`${language}\n${source}\n\`\`\`\n`;
}
export async function openWithDefaultApp(targetPath: string): Promise<void> {
const target = pathToFileURL(targetPath).href;
if (process.platform === "darwin") {
await execFileAsync("open", [target]);
return;
}
if (process.platform === "win32") {
await execFileAsync("cmd", ["/c", "start", "", target]);
return;
}
await execFileAsync("xdg-open", [target]);
}
async function runCommandWithInput(
command: string,
args: string[],
input: string,
): Promise<{ stdout: string; stderr: string }> {
return await new Promise((resolve, reject) => {
const child = spawn(command, args, { stdio: ["pipe", "pipe", "pipe"] });
const stdoutChunks: Buffer[] = [];
const stderrChunks: Buffer[] = [];
child.stdout.on("data", (chunk: Buffer | string) => {
stdoutChunks.push(typeof chunk === "string" ? Buffer.from(chunk) : chunk);
});
child.stderr.on("data", (chunk: Buffer | string) => {
stderrChunks.push(typeof chunk === "string" ? Buffer.from(chunk) : chunk);
});
child.once("error", reject);
child.once("close", (code) => {
const stdout = Buffer.concat(stdoutChunks).toString("utf8");
const stderr = Buffer.concat(stderrChunks).toString("utf8");
if (code === 0) {
resolve({ stdout, stderr });
return;
}
reject(new Error(`${command} failed with exit code ${code}${stderr ? `: ${stderr.trim()}` : ""}`));
});
child.stdin.end(input);
});
}
export async function renderHtmlPreview(filePath: string): Promise<string> {
const source = await readFile(filePath, "utf8");
const pandocCommand = process.env.PANDOC_PATH?.trim() || "pandoc";
const inputFormat = isLatexPath(filePath)
? "latex"
: "markdown+lists_without_preceding_blankline+tex_math_dollars+autolink_bare_uris-raw_html";
const markdown = isLatexPath(filePath) || isMarkdownPath(filePath) ? source : wrapCodeAsMarkdown(source, filePath);
const args = ["-f", inputFormat, "-t", "html5", "--mathml", "--wrap=none", `--resource-path=${dirname(filePath)}`];
const { stdout } = await runCommandWithInput(pandocCommand, args, markdown);
const html = `<!doctype html><html><head><meta charset="utf-8" /><base href="${pathToFileURL(dirname(filePath) + "/").href}" /><title>${basename(filePath)}</title><style>
:root{
--bg:#faf7f2;
--paper:#fffdf9;
--border:#d7cec1;
--text:#1f1c18;
--muted:#6c645a;
--code:#f3eee6;
--link:#0f6d8c;
--quote:#8b7f70;
}
@media (prefers-color-scheme: dark){
:root{
--bg:#161311;
--paper:#1d1916;
--border:#3b342d;
--text:#ebe3d6;
--muted:#b4ab9f;
--code:#221d19;
--link:#8ac6d6;
--quote:#a89d8f;
}
}
body{
font-family:Charter,"Iowan Old Style","Palatino Linotype","Book Antiqua",Palatino,Georgia,serif;
margin:0;
background:var(--bg);
color:var(--text);
line-height:1.7;
}
main{
max-width:900px;
margin:2rem auto 4rem;
padding:2.5rem 3rem;
background:var(--paper);
border:1px solid var(--border);
border-radius:18px;
box-shadow:0 12px 40px rgba(0,0,0,.06);
}
h1,h2,h3,h4,h5,h6{
font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;
line-height:1.2;
margin-top:1.5em;
}
h1{font-size:2.2rem;border-bottom:1px solid var(--border);padding-bottom:.35rem;}
h2{font-size:1.6rem;border-bottom:1px solid var(--border);padding-bottom:.25rem;}
p,ul,ol,blockquote,table{margin:1rem 0;}
pre,code{font-family:ui-monospace,SFMono-Regular,Menlo,monospace}
pre{
background:var(--code);
border:1px solid var(--border);
border-radius:12px;
padding:1rem 1.1rem;
overflow:auto;
}
code{
background:var(--code);
padding:.12rem .28rem;
border-radius:6px;
}
a{color:var(--link);text-decoration:none}
a:hover{text-decoration:underline}
img{max-width:100%}
blockquote{
border-left:4px solid var(--border);
padding-left:1rem;
color:var(--quote);
}
table{border-collapse:collapse;width:100%}
th,td{border:1px solid var(--border);padding:.55rem .7rem;text-align:left}
</style></head><body><main>${stdout}</main></body></html>`;
const tempDir = await mkdtemp(join(tmpdir(), "feynman-preview-"));
const htmlPath = join(tempDir, `${basename(filePath)}.html`);
await writeFile(htmlPath, html, "utf8");
return htmlPath;
}
export async function renderPdfPreview(filePath: string): Promise<string> {
const source = await readFile(filePath, "utf8");
const pandocCommand = process.env.PANDOC_PATH?.trim() || "pandoc";
const pdfEngine = process.env.PANDOC_PDF_ENGINE?.trim() || "xelatex";
const inputFormat = isLatexPath(filePath)
? "latex"
: "markdown+lists_without_preceding_blankline+tex_math_dollars+autolink_bare_uris-raw_html";
const markdown = isLatexPath(filePath) || isMarkdownPath(filePath) ? source : wrapCodeAsMarkdown(source, filePath);
const tempDir = await mkdtemp(join(tmpdir(), "feynman-preview-"));
const pdfPath = join(tempDir, `${basename(filePath)}.pdf`);
const args = [
"-f",
inputFormat,
"-o",
pdfPath,
`--pdf-engine=${pdfEngine}`,
`--resource-path=${dirname(filePath)}`,
];
await runCommandWithInput(pandocCommand, args, markdown);
return pdfPath;
}
export async function pathExists(path: string): Promise<boolean> {
try {
await stat(path);
return true;
} catch {
return false;
}
}

View File

@@ -1,14 +1,70 @@
import { mkdir, stat, writeFile } from "node:fs/promises";
import { dirname, resolve as resolvePath } from "node:path";
import { mkdir, readdir, readFile, stat, writeFile } from "node:fs/promises";
import { join, relative, resolve as resolvePath } from "node:path";
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox";
import { getExtensionCommandSpec } from "../../metadata/commands.mjs";
import { renderHtmlPreview, renderPdfPreview, openWithDefaultApp, pathExists } from "./preview.js";
import { buildProjectAgentsTemplate, buildSessionLogsReadme } from "./project-scaffold.js";
import { formatToolText } from "./shared.js";
import { searchSessionTranscripts } from "./session-search.js";
async function pathExists(path: string): Promise<boolean> {
try {
await stat(path);
return true;
} catch {
return false;
}
}
const ARTIFACT_DIRS = ["papers", "outputs", "experiments", "notes"];
const ARTIFACT_EXTS = new Set([".md", ".tex", ".pdf", ".py", ".csv", ".json", ".html", ".txt", ".log"]);
async function collectArtifacts(cwd: string): Promise<{ label: string; path: string }[]> {
const items: { label: string; path: string; mtime: number }[] = [];
for (const dir of ARTIFACT_DIRS) {
const dirPath = resolvePath(cwd, dir);
if (!(await pathExists(dirPath))) continue;
const walk = async (current: string): Promise<void> => {
let entries;
try {
entries = await readdir(current, { withFileTypes: true });
} catch {
return;
}
for (const entry of entries) {
const full = join(current, entry.name);
if (entry.isDirectory()) {
await walk(full);
} else if (ARTIFACT_EXTS.has(entry.name.slice(entry.name.lastIndexOf(".")))) {
const rel = relative(cwd, full);
let title = "";
try {
const head = await readFile(full, "utf8").then((c) => c.slice(0, 200));
const match = head.match(/^#\s+(.+)/m);
if (match) title = match[1]!.trim();
} catch {}
const info = await stat(full).catch(() => null);
const mtime = info?.mtimeMs ?? 0;
const size = info ? formatSize(info.size) : "";
const titlePart = title ? `${title}` : "";
items.push({ label: `${rel}${titlePart} (${size})`, path: rel, mtime });
}
}
};
await walk(dirPath);
}
items.sort((a, b) => b.mtime - a.mtime);
return items;
}
function formatSize(bytes: number): string {
if (bytes < 1024) return `${bytes}B`;
if (bytes < 1024 * 1024) return `${Math.round(bytes / 1024)}KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
}
export function registerInitCommand(pi: ExtensionAPI): void {
pi.registerCommand("init", {
@@ -45,73 +101,23 @@ export function registerInitCommand(pi: ExtensionAPI): void {
});
}
export function registerSessionSearchTool(pi: ExtensionAPI): void {
pi.registerTool({
name: "session_search",
label: "Session Search",
description: "Search prior Feynman session transcripts to recover what was done, said, or written before.",
parameters: Type.Object({
query: Type.String({
description: "Search query to look for in past sessions.",
}),
limit: Type.Optional(
Type.Number({
description: "Maximum number of sessions to return. Defaults to 3.",
}),
),
}),
async execute(_toolCallId, params) {
const result = await searchSessionTranscripts(params.query, Math.max(1, Math.min(params.limit ?? 3, 8)));
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
},
});
}
export function registerPreviewTool(pi: ExtensionAPI): void {
pi.registerTool({
name: "preview_file",
label: "Preview File",
description: "Open a markdown, LaTeX, PDF, or code artifact in the browser or a PDF viewer for human review. Rendered HTML/PDF previews are temporary and do not replace the source artifact.",
parameters: Type.Object({
path: Type.String({
description: "Path to the file to preview.",
}),
target: Type.Optional(
Type.String({
description: "Preview target: browser or pdf. Defaults to browser.",
}),
),
}),
async execute(_toolCallId, params, _signal, _onUpdate, ctx) {
const target = (params.target?.trim().toLowerCase() || "browser");
if (target !== "browser" && target !== "pdf") {
throw new Error("target must be browser or pdf");
export function registerOutputsCommand(pi: ExtensionAPI): void {
pi.registerCommand("outputs", {
description: "Browse all research artifacts (papers, outputs, experiments, notes).",
handler: async (_args, ctx) => {
const items = await collectArtifacts(ctx.cwd);
if (items.length === 0) {
ctx.ui.notify("No artifacts found. Use /lit, /draft, /review, or /deepresearch to create some.", "info");
return;
}
const resolvedPath = resolvePath(ctx.cwd, params.path);
const openedPath =
resolvePath(resolvedPath).toLowerCase().endsWith(".pdf") && target === "pdf"
? resolvedPath
: target === "pdf"
? await renderPdfPreview(resolvedPath)
: await renderHtmlPreview(resolvedPath);
const selected = await ctx.ui.select(`Artifacts (${items.length})`, items.map((i) => i.label));
if (!selected) return;
await mkdir(dirname(openedPath), { recursive: true }).catch(() => {});
await openWithDefaultApp(openedPath);
const result = {
sourcePath: resolvedPath,
target,
openedPath,
temporaryPreview: openedPath !== resolvedPath,
};
return {
content: [{ type: "text", text: formatToolText(result) }],
details: result,
};
const match = items.find((i) => i.label === selected);
if (match) {
ctx.ui.setEditorText(`read ${match.path}`);
}
},
});
}

View File

@@ -1,223 +0,0 @@
import { readdir, readFile, stat } from "node:fs/promises";
import { basename, join } from "node:path";
import { pathToFileURL } from "node:url";
import { getFeynmanHome } from "./shared.js";
function extractMessageText(message: unknown): string {
if (!message || typeof message !== "object") {
return "";
}
const content = (message as { content?: unknown }).content;
if (typeof content === "string") {
return content;
}
if (!Array.isArray(content)) {
return "";
}
return content
.map((item) => {
if (!item || typeof item !== "object") {
return "";
}
const record = item as { type?: string; text?: unknown; arguments?: unknown; name?: unknown };
if (record.type === "text" && typeof record.text === "string") {
return record.text;
}
if (record.type === "toolCall") {
const name = typeof record.name === "string" ? record.name : "tool";
const args =
typeof record.arguments === "string"
? record.arguments
: record.arguments
? JSON.stringify(record.arguments)
: "";
return `[tool:${name}] ${args}`;
}
return "";
})
.filter(Boolean)
.join("\n");
}
function buildExcerpt(text: string, query: string, radius = 180): string {
const normalizedText = text.replace(/\s+/g, " ").trim();
if (!normalizedText) {
return "";
}
const lower = normalizedText.toLowerCase();
const q = query.toLowerCase();
const index = lower.indexOf(q);
if (index === -1) {
return normalizedText.slice(0, radius * 2) + (normalizedText.length > radius * 2 ? "..." : "");
}
const start = Math.max(0, index - radius);
const end = Math.min(normalizedText.length, index + q.length + radius);
const prefix = start > 0 ? "..." : "";
const suffix = end < normalizedText.length ? "..." : "";
return `${prefix}${normalizedText.slice(start, end)}${suffix}`;
}
export async function searchSessionTranscripts(query: string, limit: number): Promise<{
query: string;
results: Array<{
sessionId: string;
sessionFile: string;
startedAt?: string;
cwd?: string;
matchCount: number;
topMatches: Array<{ role: string; timestamp?: string; excerpt: string }>;
}>;
}> {
const packageRoot = process.env.FEYNMAN_PI_NPM_ROOT;
if (packageRoot) {
try {
const indexerPath = pathToFileURL(
join(packageRoot, "@kaiserlich-dev", "pi-session-search", "extensions", "indexer.ts"),
).href;
const indexer = await import(indexerPath) as {
updateIndex?: (onProgress?: (msg: string) => void) => Promise<number>;
search?: (query: string, limit?: number) => Array<{
sessionPath: string;
project: string;
timestamp: string;
snippet: string;
rank: number;
title: string | null;
}>;
getSessionSnippets?: (sessionPath: string, query: string, limit?: number) => string[];
};
await indexer.updateIndex?.();
const results = indexer.search?.(query, limit) ?? [];
if (results.length > 0) {
return {
query,
results: results.map((result) => ({
sessionId: basename(result.sessionPath),
sessionFile: result.sessionPath,
startedAt: result.timestamp,
cwd: result.project,
matchCount: 1,
topMatches: (indexer.getSessionSnippets?.(result.sessionPath, query, 4) ?? [result.snippet])
.filter(Boolean)
.map((excerpt) => ({
role: "match",
excerpt,
})),
})),
};
}
} catch {
// Fall back to direct JSONL scanning below.
}
}
const sessionDir = join(getFeynmanHome(), "sessions");
const terms = query
.toLowerCase()
.split(/\s+/)
.map((term) => term.trim())
.filter((term) => term.length >= 2);
const needle = query.toLowerCase();
let files: string[] = [];
try {
files = (await readdir(sessionDir))
.filter((entry) => entry.endsWith(".jsonl"))
.map((entry) => join(sessionDir, entry));
} catch {
return { query, results: [] };
}
const sessions = [];
for (const file of files) {
const raw = await readFile(file, "utf8").catch(() => "");
if (!raw) {
continue;
}
let sessionId = basename(file);
let startedAt: string | undefined;
let cwd: string | undefined;
const matches: Array<{ role: string; timestamp?: string; excerpt: string }> = [];
for (const line of raw.split("\n")) {
if (!line.trim()) {
continue;
}
try {
const record = JSON.parse(line) as {
type?: string;
id?: string;
timestamp?: string;
cwd?: string;
message?: { role?: string; content?: unknown };
};
if (record.type === "session") {
sessionId = record.id ?? sessionId;
startedAt = record.timestamp;
cwd = record.cwd;
continue;
}
if (record.type !== "message" || !record.message) {
continue;
}
const text = extractMessageText(record.message);
if (!text) {
continue;
}
const lower = text.toLowerCase();
const matched = lower.includes(needle) || terms.some((term) => lower.includes(term));
if (!matched) {
continue;
}
matches.push({
role: record.message.role ?? "unknown",
timestamp: record.timestamp,
excerpt: buildExcerpt(text, query),
});
} catch {
continue;
}
}
if (matches.length === 0) {
continue;
}
let mtime = 0;
try {
mtime = (await stat(file)).mtimeMs;
} catch {
mtime = 0;
}
sessions.push({
sessionId,
sessionFile: file,
startedAt,
cwd,
matchCount: matches.length,
topMatches: matches.slice(0, 4),
mtime,
});
}
sessions.sort((a, b) => {
if (b.matchCount !== a.matchCount) {
return b.matchCount - a.matchCount;
}
return b.mtime - a.mtime;
});
return {
query,
results: sessions.slice(0, limit).map(({ mtime: _mtime, ...session }) => session),
};
}

View File

@@ -1,5 +1,4 @@
import { readFileSync } from "node:fs";
import { homedir } from "node:os";
import { dirname, resolve as resolvePath } from "node:path";
import { fileURLToPath } from "node:url";
@@ -15,30 +14,3 @@ export const FEYNMAN_VERSION = (() => {
})();
export { FEYNMAN_ASCII_LOGO as FEYNMAN_AGENT_LOGO } from "../../logo.mjs";
export const FEYNMAN_RESEARCH_TOOLS = [
"alpha_search",
"alpha_get_paper",
"alpha_ask_paper",
"alpha_annotate_paper",
"alpha_list_annotations",
"alpha_read_code",
"session_search",
"preview_file",
];
export function collapseExcessBlankLines(text: string): string {
return text.replace(/\r\n/g, "\n").replace(/\n{3,}/g, "\n\n").trim();
}
export function formatToolText(result: unknown): string {
const text = typeof result === "string" ? result : JSON.stringify(result, null, 2);
return collapseExcessBlankLines(text);
}
export function getFeynmanHome(): string {
const agentDir = process.env.FEYNMAN_CODING_AGENT_DIR ??
process.env.PI_CODING_AGENT_DIR ??
resolvePath(homedir(), ".feynman", "agent");
return dirname(agentDir);
}