feat: add API key and custom provider configuration (#4)

* feat: add API key and custom provider configuration

Previously, model setup only offered OAuth login. This adds:

- API key configuration for 17 built-in providers (OpenAI, Anthropic,
  Google, Mistral, Groq, xAI, OpenRouter, etc.)
- Custom provider setup via models.json (for Ollama, vLLM, LM Studio,
  proxies, or any OpenAI/Anthropic/Google-compatible endpoint)
- Interactive prompts with smart defaults and auto-detection of models
- Verification flow that probes endpoints and provides actionable tips
- Doctor diagnostics for models.json path and missing apiKey warnings
- Dev environment fallback for running without dist/ build artifacts
- Unified auth flow: `feynman model login` now offers both API key
  and OAuth options (OAuth-only when a specific provider is given)

New files:
- src/model/models-json.ts: Read/write models.json with proper merging
- src/model/registry.ts: Centralized ModelRegistry creation with modelsJsonPath
- tests/models-json.test.ts: Unit tests for provider config upsert

* fix: harden runtime env and custom provider auth

---------

Co-authored-by: Advait Paliwal <advaitspaliwal@gmail.com>
This commit is contained in:
Mochamad Chairulridjal
2026-03-27 07:09:38 +07:00
committed by GitHub
parent dbd89d8e3d
commit 30d07246d1
13 changed files with 745 additions and 23 deletions

View File

@@ -6,6 +6,20 @@ FEYNMAN_THINKING=medium
OPENAI_API_KEY= OPENAI_API_KEY=
ANTHROPIC_API_KEY= ANTHROPIC_API_KEY=
GEMINI_API_KEY=
OPENROUTER_API_KEY=
ZAI_API_KEY=
KIMI_API_KEY=
MINIMAX_API_KEY=
MINIMAX_CN_API_KEY=
MISTRAL_API_KEY=
GROQ_API_KEY=
XAI_API_KEY=
CEREBRAS_API_KEY=
HF_TOKEN=
OPENCODE_API_KEY=
AI_GATEWAY_API_KEY=
AZURE_OPENAI_API_KEY=
RUNPOD_API_KEY= RUNPOD_API_KEY=
MODAL_TOKEN_ID= MODAL_TOKEN_ID=

View File

@@ -11,7 +11,7 @@ import {
login as loginAlpha, login as loginAlpha,
logout as logoutAlpha, logout as logoutAlpha,
} from "@companion-ai/alpha-hub/lib"; } from "@companion-ai/alpha-hub/lib";
import { AuthStorage, DefaultPackageManager, ModelRegistry, SettingsManager } from "@mariozechner/pi-coding-agent"; import { DefaultPackageManager, SettingsManager } from "@mariozechner/pi-coding-agent";
import { syncBundledAssets } from "./bootstrap/sync.js"; import { syncBundledAssets } from "./bootstrap/sync.js";
import { ensureFeynmanHome, getDefaultSessionDir, getFeynmanAgentDir, getFeynmanHome } from "./config/paths.js"; import { ensureFeynmanHome, getDefaultSessionDir, getFeynmanAgentDir, getFeynmanHome } from "./config/paths.js";
@@ -19,6 +19,7 @@ import { launchPiChat } from "./pi/launch.js";
import { CORE_PACKAGE_SOURCES, getOptionalPackagePresetSources, listOptionalPackagePresets } from "./pi/package-presets.js"; import { CORE_PACKAGE_SOURCES, getOptionalPackagePresetSources, listOptionalPackagePresets } from "./pi/package-presets.js";
import { normalizeFeynmanSettings, normalizeThinkingLevel, parseModelSpec } from "./pi/settings.js"; import { normalizeFeynmanSettings, normalizeThinkingLevel, parseModelSpec } from "./pi/settings.js";
import { import {
authenticateModelProvider,
getCurrentModelSpec, getCurrentModelSpec,
loginModelProvider, loginModelProvider,
logoutModelProvider, logoutModelProvider,
@@ -30,6 +31,7 @@ import { runDoctor, runStatus } from "./setup/doctor.js";
import { setupPreviewDependencies } from "./setup/preview.js"; import { setupPreviewDependencies } from "./setup/preview.js";
import { runSetup } from "./setup/setup.js"; import { runSetup } from "./setup/setup.js";
import { ASH, printAsciiHeader, printInfo, printPanel, printSection, RESET, SAGE } from "./ui/terminal.js"; import { ASH, printAsciiHeader, printInfo, printPanel, printSection, RESET, SAGE } from "./ui/terminal.js";
import { createModelRegistry } from "./model/registry.js";
import { import {
cliCommandSections, cliCommandSections,
formatCliWorkflowUsage, formatCliWorkflowUsage,
@@ -124,7 +126,13 @@ async function handleModelCommand(subcommand: string | undefined, args: string[]
} }
if (subcommand === "login") { if (subcommand === "login") {
await loginModelProvider(feynmanAuthPath, args[0], feynmanSettingsPath); if (args[0]) {
// Specific provider given - use OAuth login directly
await loginModelProvider(feynmanAuthPath, args[0], feynmanSettingsPath);
} else {
// No provider specified - show auth method choice
await authenticateModelProvider(feynmanAuthPath, feynmanSettingsPath);
}
return; return;
} }
@@ -427,7 +435,7 @@ export async function main(): Promise<void> {
const explicitModelSpec = values.model ?? process.env.FEYNMAN_MODEL; const explicitModelSpec = values.model ?? process.env.FEYNMAN_MODEL;
if (explicitModelSpec) { if (explicitModelSpec) {
const modelRegistry = new ModelRegistry(AuthStorage.create(feynmanAuthPath)); const modelRegistry = createModelRegistry(feynmanAuthPath);
const explicitModel = parseModelSpec(explicitModelSpec, modelRegistry); const explicitModel = parseModelSpec(explicitModelSpec, modelRegistry);
if (!explicitModel) { if (!explicitModel) {
throw new Error(`Unknown model: ${explicitModelSpec}`); throw new Error(`Unknown model: ${explicitModelSpec}`);

View File

@@ -1,4 +1,4 @@
import { AuthStorage, ModelRegistry } from "@mariozechner/pi-coding-agent"; import { createModelRegistry } from "./registry.js";
type ModelRecord = { type ModelRecord = {
provider: string; provider: string;
@@ -166,10 +166,6 @@ function sortProviders(left: ProviderStatus, right: ProviderStatus): number {
return left.label.localeCompare(right.label); return left.label.localeCompare(right.label);
} }
function createModelRegistry(authPath: string): ModelRegistry {
return new ModelRegistry(AuthStorage.create(authPath));
}
export function getAvailableModelRecords(authPath: string): ModelRecord[] { export function getAvailableModelRecords(authPath: string): ModelRecord[] {
return createModelRegistry(authPath) return createModelRegistry(authPath)
.getAvailable() .getAvailable()
@@ -258,7 +254,9 @@ export function buildModelStatusSnapshotFromRecords(
const guidance: string[] = []; const guidance: string[] = [];
if (available.length === 0) { if (available.length === 0) {
guidance.push("No authenticated Pi models are available yet."); guidance.push("No authenticated Pi models are available yet.");
guidance.push("Run `feynman model login <provider>` or add provider credentials that Pi can see."); guidance.push(
"Run `feynman model login <provider>` (OAuth) or configure an API key (env var, auth.json, or models.json for custom providers).",
);
guidance.push("After auth is in place, rerun `feynman model list` or `feynman setup model`."); guidance.push("After auth is in place, rerun `feynman model list` or `feynman setup model`.");
} else if (!current) { } else if (!current) {
guidance.push(`No default research model is set. Recommended: ${recommended?.spec}.`); guidance.push(`No default research model is set. Recommended: ${recommended?.spec}.`);

View File

@@ -1,5 +1,7 @@
import { AuthStorage } from "@mariozechner/pi-coding-agent"; import { AuthStorage } from "@mariozechner/pi-coding-agent";
import { writeFileSync } from "node:fs"; import { writeFileSync } from "node:fs";
import { exec as execCallback } from "node:child_process";
import { promisify } from "node:util";
import { readJson } from "../pi/settings.js"; import { readJson } from "../pi/settings.js";
import { promptChoice, promptText } from "../setup/prompts.js"; import { promptChoice, promptText } from "../setup/prompts.js";
@@ -12,6 +14,10 @@ import {
getSupportedModelRecords, getSupportedModelRecords,
type ModelStatusSnapshot, type ModelStatusSnapshot,
} from "./catalog.js"; } from "./catalog.js";
import { createModelRegistry, getModelsJsonPath } from "./registry.js";
import { upsertProviderBaseUrl, upsertProviderConfig } from "./models-json.js";
const exec = promisify(execCallback);
function collectModelStatus(settingsPath: string, authPath: string): ModelStatusSnapshot { function collectModelStatus(settingsPath: string, authPath: string): ModelStatusSnapshot {
return buildModelStatusSnapshotFromRecords( return buildModelStatusSnapshotFromRecords(
@@ -58,6 +64,453 @@ async function selectOAuthProvider(authPath: string, action: "login" | "logout")
return providers[selection]; return providers[selection];
} }
type ApiKeyProviderInfo = {
id: string;
label: string;
envVar?: string;
};
const API_KEY_PROVIDERS: ApiKeyProviderInfo[] = [
{ id: "__custom__", label: "Custom provider (baseUrl + API key)" },
{ id: "openai", label: "OpenAI Platform API", envVar: "OPENAI_API_KEY" },
{ id: "anthropic", label: "Anthropic API", envVar: "ANTHROPIC_API_KEY" },
{ id: "google", label: "Google Gemini API", envVar: "GEMINI_API_KEY" },
{ id: "openrouter", label: "OpenRouter", envVar: "OPENROUTER_API_KEY" },
{ id: "zai", label: "Z.AI / GLM", envVar: "ZAI_API_KEY" },
{ id: "kimi-coding", label: "Kimi / Moonshot", envVar: "KIMI_API_KEY" },
{ id: "minimax", label: "MiniMax", envVar: "MINIMAX_API_KEY" },
{ id: "minimax-cn", label: "MiniMax (China)", envVar: "MINIMAX_CN_API_KEY" },
{ id: "mistral", label: "Mistral", envVar: "MISTRAL_API_KEY" },
{ id: "groq", label: "Groq", envVar: "GROQ_API_KEY" },
{ id: "xai", label: "xAI", envVar: "XAI_API_KEY" },
{ id: "cerebras", label: "Cerebras", envVar: "CEREBRAS_API_KEY" },
{ id: "vercel-ai-gateway", label: "Vercel AI Gateway", envVar: "AI_GATEWAY_API_KEY" },
{ id: "huggingface", label: "Hugging Face", envVar: "HF_TOKEN" },
{ id: "opencode", label: "OpenCode Zen", envVar: "OPENCODE_API_KEY" },
{ id: "opencode-go", label: "OpenCode Go", envVar: "OPENCODE_API_KEY" },
{ id: "azure-openai-responses", label: "Azure OpenAI (Responses)", envVar: "AZURE_OPENAI_API_KEY" },
];
async function selectApiKeyProvider(): Promise<ApiKeyProviderInfo | undefined> {
const choices = API_KEY_PROVIDERS.map(
(provider) => `${provider.id}${provider.label}${provider.envVar ? ` (${provider.envVar})` : ""}`,
);
choices.push("Cancel");
const selection = await promptChoice("Choose an API-key provider:", choices, 0);
if (selection >= API_KEY_PROVIDERS.length) {
return undefined;
}
return API_KEY_PROVIDERS[selection];
}
type CustomProviderSetup = {
providerId: string;
modelIds: string[];
baseUrl: string;
api: "openai-completions" | "openai-responses" | "anthropic-messages" | "google-generative-ai";
apiKeyConfig: string;
/**
* If true, add `Authorization: Bearer <apiKey>` to requests in addition to
* whatever the API mode uses (useful for proxies that implement /v1/messages
* but expect Bearer auth instead of x-api-key).
*/
authHeader: boolean;
};
function normalizeProviderId(value: string): string {
return value.trim().toLowerCase().replace(/\s+/g, "-");
}
function normalizeModelIds(value: string): string[] {
const items = value
.split(",")
.map((entry) => entry.trim())
.filter(Boolean);
return Array.from(new Set(items));
}
function normalizeBaseUrl(value: string): string {
return value.trim().replace(/\/+$/, "");
}
function normalizeCustomProviderBaseUrl(
api: CustomProviderSetup["api"],
baseUrl: string,
): { baseUrl: string; note?: string } {
const normalized = normalizeBaseUrl(baseUrl);
if (!normalized) {
return { baseUrl: normalized };
}
// Pi expects Anthropic baseUrl without `/v1` (it appends `/v1/messages` internally).
if (api === "anthropic-messages" && /\/v1$/i.test(normalized)) {
return { baseUrl: normalized.replace(/\/v1$/i, ""), note: "Stripped trailing /v1 for Anthropic mode." };
}
return { baseUrl: normalized };
}
function isLocalBaseUrl(baseUrl: string): boolean {
return /^(https?:\/\/)?(localhost|127\.0\.0\.1|0\.0\.0\.0)(:|\/|$)/i.test(baseUrl);
}
async function resolveApiKeyConfig(apiKeyConfig: string): Promise<string | undefined> {
const trimmed = apiKeyConfig.trim();
if (!trimmed) return undefined;
if (trimmed.startsWith("!")) {
const command = trimmed.slice(1).trim();
if (!command) return undefined;
const shell = process.platform === "win32" ? process.env.ComSpec || "cmd.exe" : process.env.SHELL || "/bin/sh";
try {
const { stdout } = await exec(command, { shell, maxBuffer: 1024 * 1024 });
const value = stdout.trim();
return value || undefined;
} catch {
return undefined;
}
}
const envValue = process.env[trimmed];
if (typeof envValue === "string" && envValue.trim()) {
return envValue.trim();
}
// Fall back to literal value.
return trimmed;
}
async function bestEffortFetchOpenAiModelIds(
baseUrl: string,
apiKey: string,
authHeader: boolean,
): Promise<string[] | undefined> {
const url = `${baseUrl}/models`;
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), 5000);
try {
const response = await fetch(url, {
method: "GET",
headers: authHeader ? { Authorization: `Bearer ${apiKey}` } : undefined,
signal: controller.signal,
});
if (!response.ok) {
return undefined;
}
const json = (await response.json()) as any;
if (!Array.isArray(json?.data)) return undefined;
return json.data
.map((entry: any) => (typeof entry?.id === "string" ? entry.id : undefined))
.filter(Boolean);
} catch {
return undefined;
} finally {
clearTimeout(timer);
}
}
async function promptCustomProviderSetup(): Promise<CustomProviderSetup | undefined> {
printSection("Custom Provider");
const providerIdInput = await promptText("Provider id (e.g. my-proxy)", "custom");
const providerId = normalizeProviderId(providerIdInput);
if (!providerId || providerId === "__custom__") {
printWarning("Invalid provider id.");
return undefined;
}
const apiChoices = [
"openai-completions — OpenAI Chat Completions compatible (e.g. /v1/chat/completions)",
"openai-responses — OpenAI Responses compatible (e.g. /v1/responses)",
"anthropic-messages — Anthropic Messages compatible (e.g. /v1/messages)",
"google-generative-ai — Google Generative AI compatible (generativelanguage.googleapis.com)",
"Cancel",
];
const apiSelection = await promptChoice("API mode:", apiChoices, 0);
if (apiSelection >= 4) {
return undefined;
}
const api = ["openai-completions", "openai-responses", "anthropic-messages", "google-generative-ai"][apiSelection] as CustomProviderSetup["api"];
const baseUrlDefault = ((): string => {
if (api === "openai-completions" || api === "openai-responses") return "http://localhost:11434/v1";
if (api === "anthropic-messages") return "https://api.anthropic.com";
if (api === "google-generative-ai") return "https://generativelanguage.googleapis.com";
return "http://localhost:11434/v1";
})();
const baseUrlPrompt =
api === "openai-completions" || api === "openai-responses"
? "Base URL (include /v1 for OpenAI-compatible endpoints)"
: api === "anthropic-messages"
? "Base URL (no trailing /, no /v1)"
: "Base URL (no trailing /)";
const baseUrlRaw = await promptText(baseUrlPrompt, baseUrlDefault);
const { baseUrl, note: baseUrlNote } = normalizeCustomProviderBaseUrl(api, baseUrlRaw);
if (!baseUrl) {
printWarning("Base URL is required.");
return undefined;
}
if (baseUrlNote) {
printInfo(baseUrlNote);
}
let authHeader = false;
if (api === "openai-completions" || api === "openai-responses") {
const defaultAuthHeader = !isLocalBaseUrl(baseUrl);
const authHeaderChoices = [
"Yes (send Authorization: Bearer <apiKey>)",
"No (common for local Ollama/vLLM/LM Studio)",
"Cancel",
];
const authHeaderSelection = await promptChoice(
"Send Authorization header?",
authHeaderChoices,
defaultAuthHeader ? 0 : 1,
);
if (authHeaderSelection >= 2) {
return undefined;
}
authHeader = authHeaderSelection === 0;
}
if (api === "anthropic-messages") {
const defaultAuthHeader = isLocalBaseUrl(baseUrl);
const authHeaderChoices = [
"Yes (also send Authorization: Bearer <apiKey>)",
"No (standard Anthropic uses x-api-key only)",
"Cancel",
];
const authHeaderSelection = await promptChoice(
"Also send Authorization header?",
authHeaderChoices,
defaultAuthHeader ? 0 : 1,
);
if (authHeaderSelection >= 2) {
return undefined;
}
authHeader = authHeaderSelection === 0;
}
printInfo("API key value supports:");
printInfo(" - literal secret (stored in models.json)");
printInfo(" - env var name (resolved at runtime)");
printInfo(" - !command (executes and uses stdout)");
const apiKeyConfigRaw = (await promptText("API key / resolver", "")).trim();
const apiKeyConfig = apiKeyConfigRaw || "local";
if (!apiKeyConfigRaw) {
printInfo("Using placeholder apiKey value (required by Pi for custom providers).");
}
let modelIdsDefault = "my-model";
if (api === "openai-completions" || api === "openai-responses") {
// Best-effort: hit /models so users can pick correct ids (especially for proxies).
const resolvedKey = await resolveApiKeyConfig(apiKeyConfig);
const modelIds = resolvedKey ? await bestEffortFetchOpenAiModelIds(baseUrl, resolvedKey, authHeader) : undefined;
if (modelIds && modelIds.length > 0) {
const sample = modelIds.slice(0, 10).join(", ");
printInfo(`Detected models: ${sample}${modelIds.length > 10 ? ", ..." : ""}`);
modelIdsDefault = modelIds.includes("sonnet") ? "sonnet" : modelIds[0]!;
}
}
const modelIdsRaw = await promptText("Model id(s) (comma-separated)", modelIdsDefault);
const modelIds = normalizeModelIds(modelIdsRaw);
if (modelIds.length === 0) {
printWarning("At least one model id is required.");
return undefined;
}
return { providerId, modelIds, baseUrl, api, apiKeyConfig, authHeader };
}
async function verifyCustomProvider(setup: CustomProviderSetup, authPath: string): Promise<void> {
const registry = createModelRegistry(authPath);
const modelsError = registry.getError();
if (modelsError) {
printWarning("Verification: models.json failed to load.");
for (const line of modelsError.split("\n")) {
printInfo(` ${line}`);
}
return;
}
const all = registry.getAll();
const hasModel = setup.modelIds.some((id) => all.some((model) => model.provider === setup.providerId && model.id === id));
if (!hasModel) {
printWarning("Verification: model registry does not contain the configured provider/model ids.");
return;
}
const available = registry.getAvailable();
const hasAvailable = setup.modelIds.some((id) =>
available.some((model) => model.provider === setup.providerId && model.id === id),
);
if (!hasAvailable) {
printWarning("Verification: provider is not considered authenticated/available.");
return;
}
const apiKey = await registry.getApiKeyForProvider(setup.providerId);
if (!apiKey) {
printWarning("Verification: API key could not be resolved (check env var name / !command).");
return;
}
const timeoutMs = 8000;
// Best-effort network check for OpenAI-compatible endpoints
if (setup.api === "openai-completions" || setup.api === "openai-responses") {
const url = `${setup.baseUrl}/models`;
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), timeoutMs);
try {
const response = await fetch(url, {
method: "GET",
headers: setup.authHeader ? { Authorization: `Bearer ${apiKey}` } : undefined,
signal: controller.signal,
});
if (!response.ok) {
printWarning(`Verification: ${url} returned ${response.status} ${response.statusText}`);
return;
}
const json = (await response.json()) as unknown;
const modelIds = Array.isArray((json as any)?.data)
? (json as any).data.map((entry: any) => (typeof entry?.id === "string" ? entry.id : undefined)).filter(Boolean)
: [];
const missing = setup.modelIds.filter((id) => modelIds.length > 0 && !modelIds.includes(id));
if (modelIds.length > 0 && missing.length > 0) {
printWarning(`Verification: /models does not list configured model id(s): ${missing.join(", ")}`);
return;
}
printSuccess("Verification: endpoint reachable and authorized.");
} catch (error) {
printWarning(`Verification: failed to reach ${url}: ${error instanceof Error ? error.message : String(error)}`);
} finally {
clearTimeout(timer);
}
return;
}
if (setup.api === "anthropic-messages") {
const url = `${setup.baseUrl}/v1/models?limit=1`;
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), timeoutMs);
try {
const headers: Record<string, string> = {
"x-api-key": apiKey,
"anthropic-version": "2023-06-01",
};
if (setup.authHeader) {
headers.Authorization = `Bearer ${apiKey}`;
}
const response = await fetch(url, {
method: "GET",
headers,
signal: controller.signal,
});
if (!response.ok) {
printWarning(`Verification: ${url} returned ${response.status} ${response.statusText}`);
if (response.status === 404) {
printInfo(" Tip: For Anthropic mode, use a base URL without /v1 (e.g. https://api.anthropic.com).");
}
if ((response.status === 401 || response.status === 403) && !setup.authHeader) {
printInfo(" Tip: Some proxies require `Authorization: Bearer <apiKey>` even in Anthropic mode.");
}
return;
}
printSuccess("Verification: endpoint reachable and authorized.");
} catch (error) {
printWarning(`Verification: failed to reach ${url}: ${error instanceof Error ? error.message : String(error)}`);
} finally {
clearTimeout(timer);
}
return;
}
if (setup.api === "google-generative-ai") {
const url = `${setup.baseUrl}/v1beta/models?key=${encodeURIComponent(apiKey)}`;
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), timeoutMs);
try {
const response = await fetch(url, { method: "GET", signal: controller.signal });
if (!response.ok) {
printWarning(`Verification: ${url} returned ${response.status} ${response.statusText}`);
return;
}
printSuccess("Verification: endpoint reachable and authorized.");
} catch (error) {
printWarning(`Verification: failed to reach ${url}: ${error instanceof Error ? error.message : String(error)}`);
} finally {
clearTimeout(timer);
}
return;
}
printInfo("Verification: skipped network probe for this API mode.");
}
async function configureApiKeyProvider(authPath: string): Promise<boolean> {
const provider = await selectApiKeyProvider();
if (!provider) {
printInfo("API key setup cancelled.");
return false;
}
if (provider.id === "__custom__") {
const setup = await promptCustomProviderSetup();
if (!setup) {
printInfo("Custom provider setup cancelled.");
return false;
}
const modelsJsonPath = getModelsJsonPath(authPath);
const result = upsertProviderConfig(modelsJsonPath, setup.providerId, {
baseUrl: setup.baseUrl,
apiKey: setup.apiKeyConfig,
api: setup.api,
authHeader: setup.authHeader,
models: setup.modelIds.map((id) => ({ id })),
});
if (!result.ok) {
printWarning(result.error);
return false;
}
printSuccess(`Saved custom provider: ${setup.providerId}`);
await verifyCustomProvider(setup, authPath);
return true;
}
printSection(`API Key: ${provider.label}`);
if (provider.envVar) {
printInfo(`Tip: to avoid writing secrets to disk, set ${provider.envVar} in your shell or .env.`);
}
const apiKey = await promptText("Paste API key (leave empty to use env var instead)", "");
if (!apiKey) {
if (provider.envVar) {
printInfo(`Set ${provider.envVar} and rerun setup (or run \`feynman model list\`).`);
} else {
printInfo("No API key provided.");
}
return false;
}
AuthStorage.create(authPath).set(provider.id, { type: "api_key", key: apiKey });
printSuccess(`Saved API key for ${provider.id} in auth storage.`);
const baseUrl = await promptText("Base URL override (optional, include /v1 for OpenAI-compatible endpoints)", "");
if (baseUrl) {
const modelsJsonPath = getModelsJsonPath(authPath);
const result = upsertProviderBaseUrl(modelsJsonPath, provider.id, baseUrl);
if (result.ok) {
printSuccess(`Saved baseUrl override for ${provider.id} in models.json.`);
} else {
printWarning(result.error);
}
}
return true;
}
function resolveAvailableModelSpec(authPath: string, input: string): string | undefined { function resolveAvailableModelSpec(authPath: string, input: string): string | undefined {
const normalizedInput = input.trim().toLowerCase(); const normalizedInput = input.trim().toLowerCase();
if (!normalizedInput) { if (!normalizedInput) {
@@ -111,14 +564,46 @@ export function printModelList(settingsPath: string, authPath: string): void {
} }
} }
export async function loginModelProvider(authPath: string, providerId?: string, settingsPath?: string): Promise<void> { export async function authenticateModelProvider(authPath: string, settingsPath?: string): Promise<boolean> {
const choices = [
"API key (OpenAI, Anthropic, Google, custom provider, ...)",
"OAuth login (ChatGPT Plus/Pro, Claude Pro/Max, Copilot, ...)",
"Cancel",
];
const selection = await promptChoice("How do you want to authenticate?", choices, 0);
if (selection === 0) {
const configured = await configureApiKeyProvider(authPath);
if (configured && settingsPath) {
const currentSpec = getCurrentModelSpec(settingsPath);
const available = getAvailableModelRecords(authPath);
const currentValid = currentSpec ? available.some((m) => `${m.provider}/${m.id}` === currentSpec) : false;
if ((!currentSpec || !currentValid) && available.length > 0) {
const recommended = chooseRecommendedModel(authPath);
if (recommended) {
setDefaultModelSpec(settingsPath, authPath, recommended.spec);
}
}
}
return configured;
}
if (selection === 1) {
return loginModelProvider(authPath, undefined, settingsPath);
}
printInfo("Authentication cancelled.");
return false;
}
export async function loginModelProvider(authPath: string, providerId?: string, settingsPath?: string): Promise<boolean> {
const provider = providerId ? resolveOAuthProvider(authPath, providerId) : await selectOAuthProvider(authPath, "login"); const provider = providerId ? resolveOAuthProvider(authPath, providerId) : await selectOAuthProvider(authPath, "login");
if (!provider) { if (!provider) {
if (providerId) { if (providerId) {
throw new Error(`Unknown OAuth model provider: ${providerId}`); throw new Error(`Unknown OAuth model provider: ${providerId}`);
} }
printInfo("Login cancelled."); printInfo("Login cancelled.");
return; return false;
} }
const authStorage = AuthStorage.create(authPath); const authStorage = AuthStorage.create(authPath);
@@ -166,6 +651,8 @@ export async function loginModelProvider(authPath: string, providerId?: string,
} }
} }
} }
return true;
} }
export async function logoutModelProvider(authPath: string, providerId?: string): Promise<void> { export async function logoutModelProvider(authPath: string, providerId?: string): Promise<void> {
@@ -200,11 +687,34 @@ export function setDefaultModelSpec(settingsPath: string, authPath: string, spec
export async function runModelSetup(settingsPath: string, authPath: string): Promise<void> { export async function runModelSetup(settingsPath: string, authPath: string): Promise<void> {
let status = collectModelStatus(settingsPath, authPath); let status = collectModelStatus(settingsPath, authPath);
if (status.availableModels.length === 0) { while (status.availableModels.length === 0) {
await loginModelProvider(authPath, undefined, settingsPath); const choices = [
"API key (OpenAI, Anthropic, ZAI, Kimi, MiniMax, ...)",
"OAuth login (ChatGPT Plus/Pro, Claude Pro/Max, Copilot, ...)",
"Cancel",
];
const selection = await promptChoice("Choose how to configure model access:", choices, 0);
if (selection === 0) {
const configured = await configureApiKeyProvider(authPath);
if (!configured) {
status = collectModelStatus(settingsPath, authPath);
continue;
}
} else if (selection === 1) {
const loggedIn = await loginModelProvider(authPath, undefined, settingsPath);
if (!loggedIn) {
status = collectModelStatus(settingsPath, authPath);
continue;
}
} else {
printInfo("Setup cancelled.");
return;
}
status = collectModelStatus(settingsPath, authPath); status = collectModelStatus(settingsPath, authPath);
if (status.availableModels.length === 0) { if (status.availableModels.length === 0) {
return; printWarning("No authenticated models are available yet.");
printInfo("If you configured a custom provider, ensure it has `apiKey` set in models.json.");
printInfo("Tip: run `feynman doctor` to see models.json path + load errors.");
} }
} }

91
src/model/models-json.ts Normal file
View File

@@ -0,0 +1,91 @@
import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { dirname } from "node:path";
type ModelsJson = {
providers?: Record<string, Record<string, unknown>>;
};
function readModelsJson(modelsJsonPath: string): { ok: true; value: ModelsJson } | { ok: false; error: string } {
if (!existsSync(modelsJsonPath)) {
return { ok: true, value: { providers: {} } };
}
try {
const raw = readFileSync(modelsJsonPath, "utf8").trim();
if (!raw) {
return { ok: true, value: { providers: {} } };
}
const parsed = JSON.parse(raw) as unknown;
if (!parsed || typeof parsed !== "object") {
return { ok: false, error: `Invalid models.json (expected an object): ${modelsJsonPath}` };
}
return { ok: true, value: parsed as ModelsJson };
} catch (error) {
return {
ok: false,
error: `Failed to read models.json: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
export function upsertProviderBaseUrl(
modelsJsonPath: string,
providerId: string,
baseUrl: string,
): { ok: true } | { ok: false; error: string } {
return upsertProviderConfig(modelsJsonPath, providerId, { baseUrl });
}
export type ProviderConfigPatch = {
baseUrl?: string;
apiKey?: string;
api?: string;
authHeader?: boolean;
headers?: Record<string, string>;
models?: Array<{ id: string }>;
};
export function upsertProviderConfig(
modelsJsonPath: string,
providerId: string,
patch: ProviderConfigPatch,
): { ok: true } | { ok: false; error: string } {
const loaded = readModelsJson(modelsJsonPath);
if (!loaded.ok) {
return loaded;
}
const value: ModelsJson = loaded.value;
const providers: Record<string, Record<string, unknown>> = {
...(value.providers && typeof value.providers === "object" ? value.providers : {}),
};
const currentProvider =
providers[providerId] && typeof providers[providerId] === "object" ? providers[providerId] : {};
const nextProvider: Record<string, unknown> = { ...currentProvider };
if (patch.baseUrl !== undefined) nextProvider.baseUrl = patch.baseUrl;
if (patch.apiKey !== undefined) nextProvider.apiKey = patch.apiKey;
if (patch.api !== undefined) nextProvider.api = patch.api;
if (patch.authHeader !== undefined) nextProvider.authHeader = patch.authHeader;
if (patch.headers !== undefined) nextProvider.headers = patch.headers;
if (patch.models !== undefined) nextProvider.models = patch.models;
providers[providerId] = nextProvider;
const next: ModelsJson = { ...value, providers };
try {
mkdirSync(dirname(modelsJsonPath), { recursive: true });
writeFileSync(modelsJsonPath, JSON.stringify(next, null, 2) + "\n", "utf8");
// models.json can contain API keys/headers; default to user-only permissions.
try {
chmodSync(modelsJsonPath, 0o600);
} catch {
// ignore permission errors (best-effort)
}
return { ok: true };
} catch (error) {
return { ok: false, error: `Failed to write models.json: ${error instanceof Error ? error.message : String(error)}` };
}
}

12
src/model/registry.ts Normal file
View File

@@ -0,0 +1,12 @@
import { dirname, resolve } from "node:path";
import { AuthStorage, ModelRegistry } from "@mariozechner/pi-coding-agent";
export function getModelsJsonPath(authPath: string): string {
return resolve(dirname(authPath), "models.json");
}
export function createModelRegistry(authPath: string): ModelRegistry {
return new ModelRegistry(AuthStorage.create(authPath), getModelsJsonPath(authPath));
}

View File

@@ -7,11 +7,14 @@ import { ensureSupportedNodeVersion } from "../system/node-version.js";
export async function launchPiChat(options: PiRuntimeOptions): Promise<void> { export async function launchPiChat(options: PiRuntimeOptions): Promise<void> {
ensureSupportedNodeVersion(); ensureSupportedNodeVersion();
const { piCliPath, promisePolyfillPath } = resolvePiPaths(options.appRoot); const { piCliPath, promisePolyfillPath, promisePolyfillSourcePath, tsxLoaderPath } = resolvePiPaths(options.appRoot);
if (!existsSync(piCliPath)) { if (!existsSync(piCliPath)) {
throw new Error(`Pi CLI not found: ${piCliPath}`); throw new Error(`Pi CLI not found: ${piCliPath}`);
} }
if (!existsSync(promisePolyfillPath)) {
const useBuiltPolyfill = existsSync(promisePolyfillPath);
const useDevPolyfill = !useBuiltPolyfill && existsSync(promisePolyfillSourcePath) && existsSync(tsxLoaderPath);
if (!useBuiltPolyfill && !useDevPolyfill) {
throw new Error(`Promise polyfill not found: ${promisePolyfillPath}`); throw new Error(`Promise polyfill not found: ${promisePolyfillPath}`);
} }
@@ -19,7 +22,11 @@ export async function launchPiChat(options: PiRuntimeOptions): Promise<void> {
process.stdout.write("\x1b[2J\x1b[3J\x1b[H"); process.stdout.write("\x1b[2J\x1b[3J\x1b[H");
} }
const child = spawn(process.execPath, ["--import", promisePolyfillPath, piCliPath, ...buildPiArgs(options)], { const importArgs = useDevPolyfill
? ["--import", tsxLoaderPath, "--import", promisePolyfillSourcePath]
: ["--import", promisePolyfillPath];
const child = spawn(process.execPath, [...importArgs, piCliPath, ...buildPiArgs(options)], {
cwd: options.workingDir, cwd: options.workingDir,
stdio: "inherit", stdio: "inherit",
env: buildPiEnv(options), env: buildPiEnv(options),

View File

@@ -25,6 +25,8 @@ export function resolvePiPaths(appRoot: string) {
piPackageRoot: resolve(appRoot, "node_modules", "@mariozechner", "pi-coding-agent"), piPackageRoot: resolve(appRoot, "node_modules", "@mariozechner", "pi-coding-agent"),
piCliPath: resolve(appRoot, "node_modules", "@mariozechner", "pi-coding-agent", "dist", "cli.js"), piCliPath: resolve(appRoot, "node_modules", "@mariozechner", "pi-coding-agent", "dist", "cli.js"),
promisePolyfillPath: resolve(appRoot, "dist", "system", "promise-polyfill.js"), promisePolyfillPath: resolve(appRoot, "dist", "system", "promise-polyfill.js"),
promisePolyfillSourcePath: resolve(appRoot, "src", "system", "promise-polyfill.ts"),
tsxLoaderPath: resolve(appRoot, "node_modules", "tsx", "dist", "loader.mjs"),
researchToolsPath: resolve(appRoot, "extensions", "research-tools.ts"), researchToolsPath: resolve(appRoot, "extensions", "research-tools.ts"),
promptTemplatePath: resolve(appRoot, "prompts"), promptTemplatePath: resolve(appRoot, "prompts"),
systemPromptPath: resolve(appRoot, ".feynman", "SYSTEM.md"), systemPromptPath: resolve(appRoot, ".feynman", "SYSTEM.md"),
@@ -38,7 +40,11 @@ export function validatePiInstallation(appRoot: string): string[] {
const missing: string[] = []; const missing: string[] = [];
if (!existsSync(paths.piCliPath)) missing.push(paths.piCliPath); if (!existsSync(paths.piCliPath)) missing.push(paths.piCliPath);
if (!existsSync(paths.promisePolyfillPath)) missing.push(paths.promisePolyfillPath); if (!existsSync(paths.promisePolyfillPath)) {
// Dev fallback: allow running from source without `dist/` build artifacts.
const hasDevPolyfill = existsSync(paths.promisePolyfillSourcePath) && existsSync(paths.tsxLoaderPath);
if (!hasDevPolyfill) missing.push(paths.promisePolyfillPath);
}
if (!existsSync(paths.researchToolsPath)) missing.push(paths.researchToolsPath); if (!existsSync(paths.researchToolsPath)) missing.push(paths.researchToolsPath);
if (!existsSync(paths.promptTemplatePath)) missing.push(paths.promptTemplatePath); if (!existsSync(paths.promptTemplatePath)) missing.push(paths.promptTemplatePath);
@@ -94,6 +100,8 @@ export function buildPiEnv(options: PiRuntimeOptions): NodeJS.ProcessEnv {
FEYNMAN_NODE_EXECUTABLE: process.execPath, FEYNMAN_NODE_EXECUTABLE: process.execPath,
FEYNMAN_BIN_PATH: resolve(options.appRoot, "bin", "feynman.js"), FEYNMAN_BIN_PATH: resolve(options.appRoot, "bin", "feynman.js"),
FEYNMAN_NPM_PREFIX: feynmanNpmPrefixPath, FEYNMAN_NPM_PREFIX: feynmanNpmPrefixPath,
// Ensure the Pi child process uses Feynman's agent dir for auth/models/settings.
PI_CODING_AGENT_DIR: options.feynmanAgentDir,
PANDOC_PATH: process.env.PANDOC_PATH ?? resolveExecutable("pandoc", PANDOC_FALLBACK_PATHS), PANDOC_PATH: process.env.PANDOC_PATH ?? resolveExecutable("pandoc", PANDOC_FALLBACK_PATHS),
PI_HARDWARE_CURSOR: process.env.PI_HARDWARE_CURSOR ?? "1", PI_HARDWARE_CURSOR: process.env.PI_HARDWARE_CURSOR ?? "1",
PI_SKIP_VERSION_CHECK: process.env.PI_SKIP_VERSION_CHECK ?? "1", PI_SKIP_VERSION_CHECK: process.env.PI_SKIP_VERSION_CHECK ?? "1",

View File

@@ -1,9 +1,10 @@
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { dirname } from "node:path"; import { dirname } from "node:path";
import { AuthStorage, ModelRegistry, type PackageSource } from "@mariozechner/pi-coding-agent"; import { ModelRegistry, type PackageSource } from "@mariozechner/pi-coding-agent";
import { CORE_PACKAGE_SOURCES, shouldPruneLegacyDefaultPackages } from "./package-presets.js"; import { CORE_PACKAGE_SOURCES, shouldPruneLegacyDefaultPackages } from "./package-presets.js";
import { createModelRegistry } from "../model/registry.js";
export type ThinkingLevel = "off" | "minimal" | "low" | "medium" | "high" | "xhigh"; export type ThinkingLevel = "off" | "minimal" | "low" | "medium" | "high" | "xhigh";
@@ -115,8 +116,7 @@ export function normalizeFeynmanSettings(
settings.packages = [...CORE_PACKAGE_SOURCES]; settings.packages = [...CORE_PACKAGE_SOURCES];
} }
const authStorage = AuthStorage.create(authPath); const modelRegistry = createModelRegistry(authPath);
const modelRegistry = new ModelRegistry(authStorage);
const availableModels = modelRegistry.getAvailable().map((model) => ({ const availableModels = modelRegistry.getAvailable().map((model) => ({
provider: model.provider, provider: model.provider,
id: model.id, id: model.id,

View File

@@ -1,6 +1,7 @@
import { AuthStorage, ModelRegistry } from "@mariozechner/pi-coding-agent";
import { getUserName as getAlphaUserName, isLoggedIn as isAlphaLoggedIn } from "@companion-ai/alpha-hub/lib"; import { getUserName as getAlphaUserName, isLoggedIn as isAlphaLoggedIn } from "@companion-ai/alpha-hub/lib";
import { readFileSync } from "node:fs";
import { formatPiWebAccessDoctorLines, getPiWebAccessStatus } from "../pi/web-access.js"; import { formatPiWebAccessDoctorLines, getPiWebAccessStatus } from "../pi/web-access.js";
import { BROWSER_FALLBACK_PATHS, PANDOC_FALLBACK_PATHS, resolveExecutable } from "../system/executables.js"; import { BROWSER_FALLBACK_PATHS, PANDOC_FALLBACK_PATHS, resolveExecutable } from "../system/executables.js";
import { readJson } from "../pi/settings.js"; import { readJson } from "../pi/settings.js";
@@ -8,6 +9,30 @@ import { validatePiInstallation } from "../pi/runtime.js";
import { printInfo, printPanel, printSection } from "../ui/terminal.js"; import { printInfo, printPanel, printSection } from "../ui/terminal.js";
import { getCurrentModelSpec } from "../model/commands.js"; import { getCurrentModelSpec } from "../model/commands.js";
import { buildModelStatusSnapshotFromRecords, getAvailableModelRecords, getSupportedModelRecords } from "../model/catalog.js"; import { buildModelStatusSnapshotFromRecords, getAvailableModelRecords, getSupportedModelRecords } from "../model/catalog.js";
import { createModelRegistry, getModelsJsonPath } from "../model/registry.js";
function findProvidersMissingApiKey(modelsJsonPath: string): string[] {
try {
const raw = readFileSync(modelsJsonPath, "utf8").trim();
if (!raw) return [];
const parsed = JSON.parse(raw) as any;
const providers = parsed?.providers;
if (!providers || typeof providers !== "object") return [];
const missing: string[] = [];
for (const [providerId, config] of Object.entries(providers as Record<string, unknown>)) {
if (!config || typeof config !== "object") continue;
const models = (config as any).models;
if (!Array.isArray(models) || models.length === 0) continue;
const apiKey = (config as any).apiKey;
if (typeof apiKey !== "string" || apiKey.trim().length === 0) {
missing.push(providerId);
}
}
return missing;
} catch {
return [];
}
}
export type DoctorOptions = { export type DoctorOptions = {
settingsPath: string; settingsPath: string;
@@ -104,7 +129,7 @@ export function runStatus(options: DoctorOptions): void {
export function runDoctor(options: DoctorOptions): void { export function runDoctor(options: DoctorOptions): void {
const settings = readJson(options.settingsPath); const settings = readJson(options.settingsPath);
const modelRegistry = new ModelRegistry(AuthStorage.create(options.authPath)); const modelRegistry = createModelRegistry(options.authPath);
const availableModels = modelRegistry.getAvailable(); const availableModels = modelRegistry.getAvailable();
const pandocPath = resolveExecutable("pandoc", PANDOC_FALLBACK_PATHS); const pandocPath = resolveExecutable("pandoc", PANDOC_FALLBACK_PATHS);
const browserPath = process.env.PUPPETEER_EXECUTABLE_PATH ?? resolveExecutable("google-chrome", BROWSER_FALLBACK_PATHS); const browserPath = process.env.PUPPETEER_EXECUTABLE_PATH ?? resolveExecutable("google-chrome", BROWSER_FALLBACK_PATHS);
@@ -144,6 +169,21 @@ export function runDoctor(options: DoctorOptions): void {
if (modelStatus.recommendedModelReason) { if (modelStatus.recommendedModelReason) {
console.log(` why: ${modelStatus.recommendedModelReason}`); console.log(` why: ${modelStatus.recommendedModelReason}`);
} }
const modelsError = modelRegistry.getError();
if (modelsError) {
console.log("models.json: error");
for (const line of modelsError.split("\n")) {
console.log(` ${line}`);
}
} else {
const modelsJsonPath = getModelsJsonPath(options.authPath);
console.log(`models.json: ${modelsJsonPath}`);
const missingApiKeyProviders = findProvidersMissingApiKey(modelsJsonPath);
if (missingApiKeyProviders.length > 0) {
console.log(` warning: provider(s) missing apiKey: ${missingApiKeyProviders.join(", ")}`);
console.log(" note: custom providers with a models[] list need apiKey in models.json to be available.");
}
}
console.log(`pandoc: ${pandocPath ?? "missing"}`); console.log(`pandoc: ${pandocPath ?? "missing"}`);
console.log(`browser preview runtime: ${browserPath ?? "missing"}`); console.log(`browser preview runtime: ${browserPath ?? "missing"}`);
for (const line of formatPiWebAccessDoctorLines()) { for (const line of formatPiWebAccessDoctorLines()) {

View File

@@ -29,6 +29,7 @@ function printNonInteractiveSetupGuidance(): void {
printInfo("Non-interactive terminal. Use explicit commands:"); printInfo("Non-interactive terminal. Use explicit commands:");
printInfo(" feynman model login <provider>"); printInfo(" feynman model login <provider>");
printInfo(" feynman model set <provider/model>"); printInfo(" feynman model set <provider/model>");
printInfo(" # or configure API keys via env vars/auth.json and rerun `feynman model list`");
printInfo(" feynman alpha login"); printInfo(" feynman alpha login");
printInfo(" feynman doctor"); printInfo(" feynman doctor");
} }

32
tests/models-json.test.ts Normal file
View File

@@ -0,0 +1,32 @@
import test from "node:test";
import assert from "node:assert/strict";
import { mkdtempSync, readFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { upsertProviderConfig } from "../src/model/models-json.js";
test("upsertProviderConfig creates models.json and merges provider config", () => {
const dir = mkdtempSync(join(tmpdir(), "feynman-models-"));
const modelsPath = join(dir, "models.json");
const first = upsertProviderConfig(modelsPath, "custom", {
baseUrl: "http://localhost:11434/v1",
apiKey: "ollama",
api: "openai-completions",
authHeader: true,
models: [{ id: "llama3.1:8b" }],
});
assert.deepEqual(first, { ok: true });
const second = upsertProviderConfig(modelsPath, "custom", {
baseUrl: "http://localhost:9999/v1",
});
assert.deepEqual(second, { ok: true });
const parsed = JSON.parse(readFileSync(modelsPath, "utf8")) as any;
assert.equal(parsed.providers.custom.baseUrl, "http://localhost:9999/v1");
assert.equal(parsed.providers.custom.api, "openai-completions");
assert.equal(parsed.providers.custom.authHeader, true);
assert.deepEqual(parsed.providers.custom.models, [{ id: "llama3.1:8b" }]);
});

View File

@@ -50,6 +50,7 @@ test("buildPiEnv wires Feynman paths into the Pi environment", () => {
assert.equal(env.FEYNMAN_NPM_PREFIX, "/home/.feynman/npm-global"); assert.equal(env.FEYNMAN_NPM_PREFIX, "/home/.feynman/npm-global");
assert.equal(env.NPM_CONFIG_PREFIX, "/home/.feynman/npm-global"); assert.equal(env.NPM_CONFIG_PREFIX, "/home/.feynman/npm-global");
assert.equal(env.npm_config_prefix, "/home/.feynman/npm-global"); assert.equal(env.npm_config_prefix, "/home/.feynman/npm-global");
assert.equal(env.PI_CODING_AGENT_DIR, "/home/.feynman/agent");
assert.ok( assert.ok(
env.PATH?.startsWith( env.PATH?.startsWith(
"/repo/feynman/node_modules/.bin:/repo/feynman/.feynman/npm/node_modules/.bin:/home/.feynman/npm-global/bin:", "/repo/feynman/node_modules/.bin:/repo/feynman/.feynman/npm/node_modules/.bin:/home/.feynman/npm-global/bin:",