Prune removed bundled skills during bootstrap sync

This commit is contained in:
Advait Paliwal
2026-03-25 01:37:08 -07:00
parent 75b0467761
commit 151956ea24
2 changed files with 91 additions and 9 deletions

View File

@@ -1,5 +1,5 @@
import { createHash } from "node:crypto"; import { createHash } from "node:crypto";
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from "node:fs"; import { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { dirname, relative, resolve } from "node:path"; import { dirname, relative, resolve } from "node:path";
import { getBootstrapStatePath } from "../config/paths.js"; import { getBootstrapStatePath } from "../config/paths.js";
@@ -64,27 +64,76 @@ function listFiles(root: string): string[] {
return files.sort(); return files.sort();
} }
function removeEmptyParentDirectories(path: string, stopAt: string): void {
let current = dirname(path);
while (current.startsWith(stopAt) && current !== stopAt) {
if (!existsSync(current)) {
current = dirname(current);
continue;
}
if (readdirSync(current).length > 0) {
return;
}
rmSync(current, { recursive: true, force: true });
current = dirname(current);
}
}
function syncManagedFiles( function syncManagedFiles(
sourceRoot: string, sourceRoot: string,
targetRoot: string, targetRoot: string,
scope: string,
state: BootstrapState, state: BootstrapState,
result: BootstrapSyncResult, result: BootstrapSyncResult,
): void { ): void {
const sourcePaths = new Set(listFiles(sourceRoot).map((sourcePath) => relative(sourceRoot, sourcePath)));
for (const targetPath of listFiles(targetRoot)) {
const key = relative(targetRoot, targetPath);
if (sourcePaths.has(key)) continue;
const scopedKey = `${scope}:${key}`;
const previous = state.files[scopedKey] ?? state.files[key];
if (!previous) {
continue;
}
if (!existsSync(targetPath)) {
delete state.files[scopedKey];
delete state.files[key];
continue;
}
const currentTargetText = readFileSync(targetPath, "utf8");
const currentTargetHash = sha256(currentTargetText);
if (currentTargetHash !== previous.lastAppliedTargetHash) {
result.skipped.push(key);
continue;
}
rmSync(targetPath, { force: true });
removeEmptyParentDirectories(targetPath, targetRoot);
delete state.files[scopedKey];
delete state.files[key];
}
for (const sourcePath of listFiles(sourceRoot)) { for (const sourcePath of listFiles(sourceRoot)) {
const key = relative(sourceRoot, sourcePath); const key = relative(sourceRoot, sourcePath);
const targetPath = resolve(targetRoot, key); const targetPath = resolve(targetRoot, key);
const sourceText = readFileSync(sourcePath, "utf8"); const sourceText = readFileSync(sourcePath, "utf8");
const sourceHash = sha256(sourceText); const sourceHash = sha256(sourceText);
const previous = state.files[key]; const scopedKey = `${scope}:${key}`;
const previous = state.files[scopedKey] ?? state.files[key];
mkdirSync(dirname(targetPath), { recursive: true }); mkdirSync(dirname(targetPath), { recursive: true });
if (!existsSync(targetPath)) { if (!existsSync(targetPath)) {
writeFileSync(targetPath, sourceText, "utf8"); writeFileSync(targetPath, sourceText, "utf8");
state.files[key] = { state.files[scopedKey] = {
lastAppliedSourceHash: sourceHash, lastAppliedSourceHash: sourceHash,
lastAppliedTargetHash: sourceHash, lastAppliedTargetHash: sourceHash,
}; };
delete state.files[key];
result.copied.push(key); result.copied.push(key);
continue; continue;
} }
@@ -93,10 +142,11 @@ function syncManagedFiles(
const currentTargetHash = sha256(currentTargetText); const currentTargetHash = sha256(currentTargetText);
if (currentTargetHash === sourceHash) { if (currentTargetHash === sourceHash) {
state.files[key] = { state.files[scopedKey] = {
lastAppliedSourceHash: sourceHash, lastAppliedSourceHash: sourceHash,
lastAppliedTargetHash: currentTargetHash, lastAppliedTargetHash: currentTargetHash,
}; };
delete state.files[key];
continue; continue;
} }
@@ -111,10 +161,11 @@ function syncManagedFiles(
} }
writeFileSync(targetPath, sourceText, "utf8"); writeFileSync(targetPath, sourceText, "utf8");
state.files[key] = { state.files[scopedKey] = {
lastAppliedSourceHash: sourceHash, lastAppliedSourceHash: sourceHash,
lastAppliedTargetHash: sourceHash, lastAppliedTargetHash: sourceHash,
}; };
delete state.files[key];
result.updated.push(key); result.updated.push(key);
} }
} }
@@ -128,9 +179,9 @@ export function syncBundledAssets(appRoot: string, agentDir: string): BootstrapS
skipped: [], skipped: [],
}; };
syncManagedFiles(resolve(appRoot, ".feynman", "themes"), resolve(agentDir, "themes"), state, result); syncManagedFiles(resolve(appRoot, ".feynman", "themes"), resolve(agentDir, "themes"), "themes", state, result);
syncManagedFiles(resolve(appRoot, ".feynman", "agents"), resolve(agentDir, "agents"), state, result); syncManagedFiles(resolve(appRoot, ".feynman", "agents"), resolve(agentDir, "agents"), "agents", state, result);
syncManagedFiles(resolve(appRoot, "skills"), resolve(agentDir, "skills"), state, result); syncManagedFiles(resolve(appRoot, "skills"), resolve(agentDir, "skills"), "skills", state, result);
writeBootstrapState(statePath, state); writeBootstrapState(statePath, state);
return result; return result;

View File

@@ -1,6 +1,6 @@
import test from "node:test"; import test from "node:test";
import assert from "node:assert/strict"; import assert from "node:assert/strict";
import { mkdtempSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { existsSync, mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os"; import { tmpdir } from "node:os";
import { join } from "node:path"; import { join } from "node:path";
@@ -49,3 +49,34 @@ test("syncBundledAssets preserves user-modified files and updates managed files"
assert.equal(readFileSync(join(agentDir, "themes", "feynman.json"), "utf8"), '{"theme":"v2"}\n'); assert.equal(readFileSync(join(agentDir, "themes", "feynman.json"), "utf8"), '{"theme":"v2"}\n');
assert.equal(readFileSync(join(agentDir, "agents", "researcher.md"), "utf8"), "# user-custom\n"); assert.equal(readFileSync(join(agentDir, "agents", "researcher.md"), "utf8"), "# user-custom\n");
}); });
test("syncBundledAssets removes deleted managed files but preserves user-modified stale files", () => {
const appRoot = createAppRoot();
const home = mkdtempSync(join(tmpdir(), "feynman-home-"));
process.env.FEYNMAN_HOME = home;
const agentDir = join(home, "agent");
mkdirSync(agentDir, { recursive: true });
mkdirSync(join(appRoot, "skills", "paper-eli5"), { recursive: true });
writeFileSync(join(appRoot, "skills", "paper-eli5", "SKILL.md"), "# old skill\n", "utf8");
syncBundledAssets(appRoot, agentDir);
rmSync(join(appRoot, "skills", "paper-eli5"), { recursive: true, force: true });
mkdirSync(join(appRoot, "skills", "eli5"), { recursive: true });
writeFileSync(join(appRoot, "skills", "eli5", "SKILL.md"), "# new skill\n", "utf8");
const firstResult = syncBundledAssets(appRoot, agentDir);
assert.deepEqual(firstResult.copied, ["eli5/SKILL.md"]);
assert.equal(existsSync(join(agentDir, "skills", "paper-eli5", "SKILL.md")), false);
assert.equal(readFileSync(join(agentDir, "skills", "eli5", "SKILL.md"), "utf8"), "# new skill\n");
mkdirSync(join(appRoot, "skills", "legacy"), { recursive: true });
writeFileSync(join(appRoot, "skills", "legacy", "SKILL.md"), "# managed legacy\n", "utf8");
syncBundledAssets(appRoot, agentDir);
writeFileSync(join(agentDir, "skills", "legacy", "SKILL.md"), "# user legacy override\n", "utf8");
rmSync(join(appRoot, "skills", "legacy"), { recursive: true, force: true });
const secondResult = syncBundledAssets(appRoot, agentDir);
assert.deepEqual(secondResult.skipped, ["legacy/SKILL.md"]);
assert.equal(readFileSync(join(agentDir, "skills", "legacy", "SKILL.md"), "utf8"), "# user legacy override\n");
});