fix(gateway): tighten openai-http edge handling

This commit is contained in:
Peter Steinberger
2026-02-22 11:28:55 +00:00
parent 05358173da
commit 0f989d3109
3 changed files with 258 additions and 222 deletions

View File

@@ -334,6 +334,21 @@ describe("OpenAI-compatible HTTP API (e2e)", () => {
expect(msg.content).toBe("hello");
}
{
agentCommand.mockClear();
agentCommand.mockResolvedValueOnce({ payloads: [{ text: "" }] } as never);
const res = await postChatCompletions(port, {
stream: false,
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
});
expect(res.status).toBe(200);
const json = (await res.json()) as Record<string, unknown>;
const choice0 = (json.choices as Array<Record<string, unknown>>)[0] ?? {};
const msg = (choice0.message as Record<string, unknown> | undefined) ?? {};
expect(msg.content).toBe("No response from OpenClaw.");
}
{
const res = await postChatCompletions(port, {
model: "openclaw",
@@ -475,6 +490,31 @@ describe("OpenAI-compatible HTTP API (e2e)", () => {
expect(fallbackText).toContain("[DONE]");
expect(fallbackText).toContain("hello");
}
{
agentCommand.mockClear();
agentCommand.mockRejectedValueOnce(new Error("boom"));
const errorRes = await postChatCompletions(port, {
stream: true,
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
});
expect(errorRes.status).toBe(200);
const errorText = await errorRes.text();
const errorData = parseSseDataLines(errorText);
expect(errorData[errorData.length - 1]).toBe("[DONE]");
const errorChunks = errorData
.filter((d) => d !== "[DONE]")
.map((d) => JSON.parse(d) as Record<string, unknown>);
const stopChoice = errorChunks
.flatMap((c) => (c.choices as Array<Record<string, unknown>> | undefined) ?? [])
.find((choice) => choice.finish_reason === "stop");
expect((stopChoice?.delta as Record<string, unknown> | undefined)?.content).toBe(
"Error: internal error",
);
}
} finally {
// shared server
}

View File

@@ -41,6 +41,51 @@ function writeSse(res: ServerResponse, data: unknown) {
res.write(`data: ${JSON.stringify(data)}\n\n`);
}
function buildAgentCommandInput(params: {
prompt: { message: string; extraSystemPrompt?: string };
sessionKey: string;
runId: string;
}) {
return {
message: params.prompt.message,
extraSystemPrompt: params.prompt.extraSystemPrompt,
sessionKey: params.sessionKey,
runId: params.runId,
deliver: false as const,
messageChannel: "webchat" as const,
bestEffortDeliver: false as const,
};
}
function writeAssistantRoleChunk(res: ServerResponse, params: { runId: string; model: string }) {
writeSse(res, {
id: params.runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: params.model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
}
function writeAssistantContentChunk(
res: ServerResponse,
params: { runId: string; model: string; content: string; finishReason: "stop" | null },
) {
writeSse(res, {
id: params.runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: params.model,
choices: [
{
index: 0,
delta: { content: params.content },
finish_reason: params.finishReason,
},
],
});
}
function asMessages(val: unknown): OpenAiChatMessage[] {
return Array.isArray(val) ? (val as OpenAiChatMessage[]) : [];
}
@@ -194,22 +239,15 @@ export async function handleOpenAiHttpRequest(
const runId = `chatcmpl_${randomUUID()}`;
const deps = createDefaultDeps();
const commandInput = buildAgentCommandInput({
prompt,
sessionKey,
runId,
});
if (!stream) {
try {
const result = await agentCommand(
{
message: prompt.message,
extraSystemPrompt: prompt.extraSystemPrompt,
sessionKey,
runId,
deliver: false,
messageChannel: "webchat",
bestEffortDeliver: false,
},
defaultRuntime,
deps,
);
const result = await agentCommand(commandInput, defaultRuntime, deps);
const content = resolveAgentResponseText(result);
@@ -258,28 +296,15 @@ export async function handleOpenAiHttpRequest(
if (!wroteRole) {
wroteRole = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
writeAssistantRoleChunk(res, { runId, model });
}
sawAssistantDelta = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
writeAssistantContentChunk(res, {
runId,
model,
choices: [
{
index: 0,
delta: { content },
finish_reason: null,
},
],
content,
finishReason: null,
});
return;
}
@@ -302,19 +327,7 @@ export async function handleOpenAiHttpRequest(
void (async () => {
try {
const result = await agentCommand(
{
message: prompt.message,
extraSystemPrompt: prompt.extraSystemPrompt,
sessionKey,
runId,
deliver: false,
messageChannel: "webchat",
bestEffortDeliver: false,
},
defaultRuntime,
deps,
);
const result = await agentCommand(commandInput, defaultRuntime, deps);
if (closed) {
return;
@@ -323,30 +336,17 @@ export async function handleOpenAiHttpRequest(
if (!sawAssistantDelta) {
if (!wroteRole) {
wroteRole = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
writeAssistantRoleChunk(res, { runId, model });
}
const content = resolveAgentResponseText(result);
sawAssistantDelta = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
writeAssistantContentChunk(res, {
runId,
model,
choices: [
{
index: 0,
delta: { content },
finish_reason: null,
},
],
content,
finishReason: null,
});
}
} catch (err) {
@@ -354,18 +354,11 @@ export async function handleOpenAiHttpRequest(
if (closed) {
return;
}
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
writeAssistantContentChunk(res, {
runId,
model,
choices: [
{
index: 0,
delta: { content: "Error: internal error" },
finish_reason: "stop",
},
],
content: "Error: internal error",
finishReason: "stop",
});
emitAgentEvent({
runId,

View File

@@ -81,7 +81,106 @@ const whatsappRegistry = createRegistry([
]);
const emptyRegistry = createRegistry([]);
type ModelCatalogRpcEntry = {
id: string;
name: string;
provider: string;
contextWindow?: number;
};
type PiCatalogFixtureEntry = {
id: string;
provider: string;
name?: string;
contextWindow?: number;
};
const buildPiCatalogFixture = (): PiCatalogFixtureEntry[] => [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
const expectedSortedCatalog = (): ModelCatalogRpcEntry[] => [
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
];
describe("gateway server models + voicewake", () => {
const listModels = async () => rpcReq<{ models: ModelCatalogRpcEntry[] }>(ws, "models.list");
const seedPiCatalog = () => {
piSdkMock.enabled = true;
piSdkMock.models = buildPiCatalogFixture();
};
const withModelsConfig = async <T>(config: unknown, run: () => Promise<T>): Promise<T> => {
const configPath = process.env.OPENCLAW_CONFIG_PATH;
if (!configPath) {
throw new Error("Missing OPENCLAW_CONFIG_PATH");
}
let previousConfig: string | undefined;
try {
previousConfig = await fs.readFile(configPath, "utf-8");
} catch (err) {
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(configPath, JSON.stringify(config, null, 2), "utf-8");
clearConfigCache();
return await run();
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
};
const withTempHome = async <T>(fn: (homeDir: string) => Promise<T>): Promise<T> => {
const tempHome = await createTempHomeEnv("openclaw-home-");
try {
@@ -178,171 +277,75 @@ describe("gateway server models + voicewake", () => {
});
test("models.list returns model catalog", async () => {
piSdkMock.enabled = true;
piSdkMock.models = [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
seedPiCatalog();
const res1 = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
const res2 = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
const res1 = await listModels();
const res2 = await listModels();
expect(res1.ok).toBe(true);
expect(res2.ok).toBe(true);
const models = res1.payload?.models ?? [];
expect(models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
expect(models).toEqual(expectedSortedCatalog());
expect(piSdkMock.discoverCalls).toBe(1);
});
test("models.list filters to allowlisted configured models by default", async () => {
const configPath = process.env.OPENCLAW_CONFIG_PATH;
if (!configPath) {
throw new Error("Missing OPENCLAW_CONFIG_PATH");
}
let previousConfig: string | undefined;
try {
previousConfig = await fs.readFile(configPath, "utf-8");
} catch (err) {
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(
configPath,
JSON.stringify(
{
agents: {
defaults: {
model: { primary: "openai/gpt-test-z" },
models: {
"openai/gpt-test-z": {},
"anthropic/claude-test-a": {},
},
},
await withModelsConfig(
{
agents: {
defaults: {
model: { primary: "openai/gpt-test-z" },
models: {
"openai/gpt-test-z": {},
"anthropic/claude-test-a": {},
},
},
null,
2,
),
"utf-8",
);
clearConfigCache();
},
},
async () => {
seedPiCatalog();
const res = await listModels();
piSdkMock.enabled = true;
piSdkMock.models = [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
},
);
});
const res = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
test("models.list falls back to full catalog when allowlist has no catalog match", async () => {
await withModelsConfig(
{
agents: {
defaults: {
model: { primary: "openai/not-in-catalog" },
models: {
"openai/not-in-catalog": {},
},
},
},
},
async () => {
seedPiCatalog();
const res = await listModels();
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual(expectedSortedCatalog());
},
);
});
test("models.list rejects unknown params", async () => {