Models/Config: default missing Anthropic model api fields

This commit is contained in:
Vignesh Natarajan
2026-02-21 22:50:43 -08:00
parent 7f611f0e13
commit 29a782b9cd
4 changed files with 119 additions and 2 deletions

View File

@@ -31,6 +31,7 @@ Docs: https://docs.openclaw.ai
- Agents/Fallbacks: treat JSON payloads with `type: "api_error"` + `"Internal server error"` as transient failover errors so Anthropic 500-style failures trigger model fallback. (#23193) Thanks @jarvis-lane.
- Agents/Diagnostics: include resolved lifecycle error text in `embedded run agent end` warnings so UI/TUI “Connection error” runs expose actionable provider failure reasons in gateway logs. (#23054) Thanks @Raize.
- Plugins/Hooks: run legacy `before_agent_start` once per agent turn and reuse that result across model-resolve and prompt-build compatibility paths, preventing duplicate hook side effects (for example duplicate external API calls). (#23289) Thanks @ksato8710.
- Models/Config: default missing Anthropic provider/model `api` fields to `anthropic-messages` during config validation so custom relay model entries are preserved instead of being dropped by runtime model registry validation. (#23332) Thanks @bigbigmonkey123.
- Gateway/Pairing: treat operator.admin pairing tokens as satisfying operator.write requests so legacy devices stop looping through scope-upgrade prompts introduced in 2026.2.19. (#23125, #23006) Thanks @vignesh07.
- Gateway/Pairing: treat `operator.admin` as satisfying other `operator.*` scope checks during device-auth verification so local CLI/TUI sessions stop entering pairing-required loops for pairing/approval-scoped commands. (#22062, #22193, #21191) Thanks @Botaccess, @jhartshorn, and @ctbritt.
- Gateway/Pairing: preserve existing approved token scopes when processing repair pairings that omit `scopes`, preventing empty-scope token regressions on reconnecting clients. (#21906) Thanks @paki81.

View File

@@ -2,6 +2,7 @@ import fs from "node:fs/promises";
import path from "node:path";
import { describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { validateConfigObject } from "../config/validation.js";
import { resolveOpenClawAgentDir } from "./agent-paths.js";
import {
CUSTOM_PROXY_MODELS_CONFIG,
@@ -13,6 +14,37 @@ import { ensureOpenClawModelsJson } from "./models-config.js";
installModelsConfigTestHooks();
describe("models-config", () => {
it("keeps anthropic api defaults when model entries omit api", async () => {
await withTempHome(async () => {
const validated = validateConfigObject({
models: {
providers: {
anthropic: {
baseUrl: "https://relay.example.com/api",
apiKey: "cr_xxxx",
models: [{ id: "claude-opus-4-6", name: "Claude Opus 4.6" }],
},
},
},
});
expect(validated.ok).toBe(true);
if (!validated.ok) {
throw new Error("expected config to validate");
}
await ensureOpenClawModelsJson(validated.config);
const modelPath = path.join(resolveOpenClawAgentDir(), "models.json");
const raw = await fs.readFile(modelPath, "utf8");
const parsed = JSON.parse(raw) as {
providers: Record<string, { api?: string; models?: Array<{ id: string; api?: string }> }>;
};
expect(parsed.providers.anthropic?.api).toBe("anthropic-messages");
expect(parsed.providers.anthropic?.models?.[0]?.api).toBe("anthropic-messages");
});
});
it("fills missing provider.apiKey from env var name when models exist", async () => {
await withTempHome(async () => {
const prevKey = process.env.MINIMAX_API_KEY;

View File

@@ -1,5 +1,5 @@
import { DEFAULT_CONTEXT_TOKENS } from "../agents/defaults.js";
import { parseModelRef } from "../agents/model-selection.js";
import { normalizeProviderId, parseModelRef } from "../agents/model-selection.js";
import { DEFAULT_AGENT_MAX_CONCURRENT, DEFAULT_SUBAGENT_MAX_CONCURRENT } from "./agent-limits.js";
import { resolveTalkApiKey } from "./talk.js";
import type { OpenClawConfig } from "./types.js";
@@ -37,6 +37,16 @@ const DEFAULT_MODEL_MAX_TOKENS = 8192;
type ModelDefinitionLike = Partial<ModelDefinitionConfig> &
Pick<ModelDefinitionConfig, "id" | "name">;
function resolveDefaultProviderApi(
providerId: string,
providerApi: ModelDefinitionConfig["api"] | undefined,
): ModelDefinitionConfig["api"] | undefined {
if (providerApi) {
return providerApi;
}
return normalizeProviderId(providerId) === "anthropic" ? "anthropic-messages" : undefined;
}
function isPositiveNumber(value: unknown): value is number {
return typeof value === "number" && Number.isFinite(value) && value > 0;
}
@@ -181,6 +191,12 @@ export function applyModelDefaults(cfg: OpenClawConfig): OpenClawConfig {
if (!Array.isArray(models) || models.length === 0) {
continue;
}
const providerApi = resolveDefaultProviderApi(providerId, provider.api);
let nextProvider = provider;
if (providerApi && provider.api !== providerApi) {
mutated = true;
nextProvider = { ...nextProvider, api: providerApi };
}
let providerMutated = false;
const nextModels = models.map((model) => {
const raw = model as ModelDefinitionLike;
@@ -220,6 +236,10 @@ export function applyModelDefaults(cfg: OpenClawConfig): OpenClawConfig {
if (raw.maxTokens !== maxTokens) {
modelMutated = true;
}
const api = raw.api ?? providerApi;
if (raw.api !== api) {
modelMutated = true;
}
if (!modelMutated) {
return model;
@@ -232,13 +252,17 @@ export function applyModelDefaults(cfg: OpenClawConfig): OpenClawConfig {
cost,
contextWindow,
maxTokens,
api,
} as ModelDefinitionConfig;
});
if (!providerMutated) {
if (nextProvider !== provider) {
nextProviders[providerId] = nextProvider;
}
continue;
}
nextProviders[providerId] = { ...provider, models: nextModels };
nextProviders[providerId] = { ...nextProvider, models: nextModels };
mutated = true;
}

View File

@@ -104,4 +104,64 @@ describe("applyModelDefaults", () => {
expect(model?.contextWindow).toBe(32768);
expect(model?.maxTokens).toBe(32768);
});
it("defaults anthropic provider and model api to anthropic-messages", () => {
const cfg = {
models: {
providers: {
anthropic: {
baseUrl: "https://relay.example.com/api",
apiKey: "cr_xxxx",
models: [
{
id: "claude-opus-4-6",
name: "Claude Opus 4.6",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200_000,
maxTokens: 8192,
},
],
},
},
},
} satisfies OpenClawConfig;
const next = applyModelDefaults(cfg);
const provider = next.models?.providers?.anthropic;
const model = provider?.models?.[0];
expect(provider?.api).toBe("anthropic-messages");
expect(model?.api).toBe("anthropic-messages");
});
it("propagates provider api to models when model api is missing", () => {
const cfg = {
models: {
providers: {
myproxy: {
baseUrl: "https://proxy.example/v1",
apiKey: "sk-test",
api: "openai-completions",
models: [
{
id: "gpt-5.2",
name: "GPT-5.2",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200_000,
maxTokens: 8192,
},
],
},
},
},
} satisfies OpenClawConfig;
const next = applyModelDefaults(cfg);
const model = next.models?.providers?.myproxy?.models?.[0];
expect(model?.api).toBe("openai-completions");
});
});