perf(test): trim duplicate raw-body and streaming queue scenarios

This commit is contained in:
Peter Steinberger
2026-02-14 01:49:54 +00:00
parent c4f550ef2a
commit b4430c126a
2 changed files with 0 additions and 143 deletions

View File

@@ -7,7 +7,6 @@ import { getReplyFromConfig } from "./reply.js";
type RunEmbeddedPiAgent = typeof import("../agents/pi-embedded.js").runEmbeddedPiAgent;
type RunEmbeddedPiAgentParams = Parameters<RunEmbeddedPiAgent>[0];
type ReplyResult = Awaited<ReturnType<RunEmbeddedPiAgent>>;
const piEmbeddedMock = vi.hoisted(() => ({
abortEmbeddedPiRun: vi.fn().mockReturnValue(false),
@@ -78,16 +77,6 @@ async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
}
}
function makeAgentResult(text: string): ReplyResult {
return {
payloads: [{ text }],
meta: {
durationMs: 5,
agentMeta: { sessionId: "s", provider: "p", model: "m" },
},
};
}
describe("block streaming", () => {
beforeAll(async () => {
fixtureRoot = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-stream-"));
@@ -267,99 +256,6 @@ describe("block streaming", () => {
expect(resStreamMode?.text).toBe("final");
expect(onBlockReplyStreamMode).not.toHaveBeenCalled();
vi.useFakeTimers();
try {
piEmbeddedMock.runEmbeddedPiAgent.mockReset();
const prompts: string[] = [];
piEmbeddedMock.runEmbeddedPiAgent.mockImplementation(async (params) => {
prompts.push(params.prompt);
return makeAgentResult("ok");
});
const collectCfg = {
agents: {
defaults: {
model: "anthropic/claude-opus-4-5",
workspace: path.join(home, "openclaw"),
},
},
channels: { whatsapp: { allowFrom: ["*"] } },
session: { store: path.join(home, "sessions.json") },
messages: {
queue: {
mode: "collect",
debounceMs: 200,
cap: 10,
drop: "summarize",
},
},
};
piEmbeddedMock.isEmbeddedPiRunActive.mockReturnValue(true);
piEmbeddedMock.isEmbeddedPiRunStreaming.mockReturnValue(true);
const first = await getReplyFromConfig(
{ Body: "first", From: "+1001", To: "+2000", MessageSid: "m-1" },
{},
collectCfg,
);
expect(first).toBeUndefined();
expect(piEmbeddedMock.runEmbeddedPiAgent).not.toHaveBeenCalled();
piEmbeddedMock.isEmbeddedPiRunActive.mockReturnValue(false);
piEmbeddedMock.isEmbeddedPiRunStreaming.mockReturnValue(false);
const second = await getReplyFromConfig(
{ Body: "second", From: "+1001", To: "+2000" },
{},
collectCfg,
);
const secondText = Array.isArray(second) ? second[0]?.text : second?.text;
expect(secondText).toBe("ok");
await vi.advanceTimersByTimeAsync(500);
await Promise.resolve();
const queuedPrompt =
prompts.find((p) => p.includes("[Queued messages while agent was busy]")) ?? "";
expect(queuedPrompt).toContain("Queued #1");
expect(queuedPrompt).toContain("first");
expect(queuedPrompt).not.toContain("[message_id:");
prompts.length = 0;
piEmbeddedMock.isEmbeddedPiRunActive.mockReturnValue(true);
piEmbeddedMock.isEmbeddedPiRunStreaming.mockReturnValue(false);
const followupCfg = {
agents: {
defaults: {
model: "anthropic/claude-opus-4-5",
workspace: path.join(home, "openclaw"),
},
},
channels: { whatsapp: { allowFrom: ["*"] } },
session: { store: path.join(home, "sessions-2.json") },
messages: {
queue: {
mode: "followup",
debounceMs: 0,
cap: 1,
drop: "summarize",
},
},
};
await getReplyFromConfig({ Body: "one", From: "+1002", To: "+2000" }, {}, followupCfg);
await getReplyFromConfig({ Body: "two", From: "+1002", To: "+2000" }, {}, followupCfg);
piEmbeddedMock.isEmbeddedPiRunActive.mockReturnValue(false);
await getReplyFromConfig({ Body: "three", From: "+1002", To: "+2000" }, {}, followupCfg);
await vi.advanceTimersByTimeAsync(50);
await Promise.resolve();
expect(prompts.some((p) => p.includes("[Queue overflow]"))).toBe(true);
} finally {
vi.useRealTimers();
}
});
});
});

View File

@@ -80,9 +80,6 @@ async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
}
describe("RawBody directive parsing", () => {
type ReplyMessage = Parameters<typeof getReplyFromConfig>[0];
type ReplyConfig = Parameters<typeof getReplyFromConfig>[2];
beforeAll(async () => {
fixtureRoot = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-rawbody-"));
});
@@ -104,42 +101,6 @@ describe("RawBody directive parsing", () => {
it("handles directives, history, and non-default agent session files", async () => {
await withTempHome(async (home) => {
const assertCommandReply = async (input: {
message: ReplyMessage;
config: ReplyConfig;
expectedIncludes: string[];
}) => {
vi.mocked(runEmbeddedPiAgent).mockReset();
const res = await getReplyFromConfig(input.message, {}, input.config);
const text = Array.isArray(res) ? res[0]?.text : res?.text;
for (const expected of input.expectedIncludes) {
expect(text).toContain(expected);
}
expect(runEmbeddedPiAgent).not.toHaveBeenCalled();
};
await assertCommandReply({
message: {
Body: `[Chat messages since your last reply - for context]\\n[WhatsApp ...] Someone: hello\\n\\n[Current message - respond to this]\\n[WhatsApp ...] Jake: /think:high\\n[from: Jake McInteer (+6421807830)]`,
RawBody: "/think:high",
From: "+1222",
To: "+1222",
ChatType: "group",
CommandAuthorized: true,
},
config: {
agents: {
defaults: {
model: "anthropic/claude-opus-4-5",
workspace: path.join(home, "openclaw-1"),
},
},
channels: { whatsapp: { allowFrom: ["*"] } },
session: { store: path.join(home, "sessions-1.json") },
},
expectedIncludes: ["Thinking level set to high."],
});
vi.mocked(runEmbeddedPiAgent).mockResolvedValue({
payloads: [{ text: "ok" }],
meta: {