TUI: filter model picker to allowlisted models

This commit is contained in:
Vignesh Natarajan
2026-02-21 19:03:05 -08:00
parent 853ae626fa
commit 4550a52007
3 changed files with 106 additions and 1 deletions

View File

@@ -94,6 +94,7 @@ Docs: https://docs.openclaw.ai
- CLI/Pairing: default `pairing list` and `pairing approve` to the sole available pairing channel when omitted, so TUI-only setups can recover from `pairing required` without guessing channel arguments. (#21527) Thanks @losts1.
- TUI/Pairing: show explicit pairing-required recovery guidance after gateway disconnects that return `pairing required`, including approval steps to unblock quickstart TUI hatching on fresh installs. (#21841) Thanks @nicolinux.
- TUI/Input: suppress duplicate backspace events arriving in the same input burst window so SSH sessions no longer delete two characters per backspace press in the composer. (#19318) Thanks @eheimer.
- TUI/Models: scope `models.list` to the configured model allowlist (`agents.defaults.models`) so `/model` picker no longer floods with unrelated catalog entries by default. (#18816) Thanks @fwends.
- TUI/Heartbeat: suppress heartbeat ACK/prompt noise in chat streaming when `showOk` is disabled, while still preserving non-ACK heartbeat alerts in final output. (#20228) Thanks @bhalliburton.
- TUI/History: cap chat-log component growth and prune stale render nodes/references so large default history loads no longer overflow render recursion with `RangeError: Maximum call stack size exceeded`. (#18068) Thanks @JaniJegoroff.
- Memory/QMD: diversify mixed-source search ranking when both session and memory collections are present so session transcript hits no longer crowd out durable memory-file matches in top results. (#19913) Thanks @alextempr.

View File

@@ -1,3 +1,6 @@
import { DEFAULT_PROVIDER } from "../../agents/defaults.js";
import { buildAllowedModelSet } from "../../agents/model-selection.js";
import { loadConfig } from "../../config/config.js";
import {
ErrorCodes,
errorShape,
@@ -20,7 +23,14 @@ export const modelsHandlers: GatewayRequestHandlers = {
return;
}
try {
const models = await context.loadGatewayModelCatalog();
const catalog = await context.loadGatewayModelCatalog();
const cfg = loadConfig();
const { allowedCatalog } = buildAllowedModelSet({
cfg,
catalog,
defaultProvider: DEFAULT_PROVIDER,
});
const models = allowedCatalog.length > 0 ? allowedCatalog : catalog;
respond(true, { models }, undefined);
} catch (err) {
respond(false, undefined, errorShape(ErrorCodes.UNAVAILABLE, String(err)));

View File

@@ -5,6 +5,7 @@ import { afterAll, beforeAll, describe, expect, test } from "vitest";
import { WebSocket } from "ws";
import { getChannelPlugin } from "../channels/plugins/index.js";
import type { ChannelOutboundAdapter } from "../channels/plugins/types.js";
import { clearConfigCache } from "../config/config.js";
import { resolveCanvasHostUrl } from "../infra/canvas-host-url.js";
import { GatewayLockError } from "../infra/gateway-lock.js";
import { getActivePluginRegistry, setActivePluginRegistry } from "../plugins/runtime.js";
@@ -251,6 +252,99 @@ describe("gateway server models + voicewake", () => {
expect(piSdkMock.discoverCalls).toBe(1);
});
test("models.list filters to allowlisted configured models by default", async () => {
const configPath = process.env.OPENCLAW_CONFIG_PATH;
if (!configPath) {
throw new Error("Missing OPENCLAW_CONFIG_PATH");
}
let previousConfig: string | undefined;
try {
previousConfig = await fs.readFile(configPath, "utf-8");
} catch (err) {
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(
configPath,
JSON.stringify(
{
agents: {
defaults: {
model: { primary: "openai/gpt-test-z" },
models: {
"openai/gpt-test-z": {},
"anthropic/claude-test-a": {},
},
},
},
},
null,
2,
),
"utf-8",
);
clearConfigCache();
piSdkMock.enabled = true;
piSdkMock.models = [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
const res = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
});
test("models.list rejects unknown params", async () => {
piSdkMock.enabled = true;
piSdkMock.models = [{ id: "gpt-test-a", name: "A", provider: "openai" }];