refactor(agents): dedupe model and tool test helpers

This commit is contained in:
Peter Steinberger
2026-03-02 21:30:12 +00:00
parent 067855e623
commit ab8b8dae70
13 changed files with 302 additions and 374 deletions

View File

@@ -109,6 +109,62 @@ type ModelFallbackRunResult<T> = {
attempts: FallbackAttempt[];
};
function buildFallbackSuccess<T>(params: {
result: T;
provider: string;
model: string;
attempts: FallbackAttempt[];
}): ModelFallbackRunResult<T> {
return {
result: params.result,
provider: params.provider,
model: params.model,
attempts: params.attempts,
};
}
async function runFallbackCandidate<T>(params: {
run: (provider: string, model: string) => Promise<T>;
provider: string;
model: string;
}): Promise<{ ok: true; result: T } | { ok: false; error: unknown }> {
try {
return {
ok: true,
result: await params.run(params.provider, params.model),
};
} catch (err) {
if (shouldRethrowAbort(err)) {
throw err;
}
return { ok: false, error: err };
}
}
async function runFallbackAttempt<T>(params: {
run: (provider: string, model: string) => Promise<T>;
provider: string;
model: string;
attempts: FallbackAttempt[];
}): Promise<{ success: ModelFallbackRunResult<T> } | { error: unknown }> {
const runResult = await runFallbackCandidate({
run: params.run,
provider: params.provider,
model: params.model,
});
if (runResult.ok) {
return {
success: buildFallbackSuccess({
result: runResult.result,
provider: params.provider,
model: params.model,
attempts: params.attempts,
}),
};
}
return { error: runResult.error };
}
function sameModelCandidate(a: ModelCandidate, b: ModelCandidate): boolean {
return a.provider === b.provider && a.model === b.model;
}
@@ -444,18 +500,12 @@ export async function runWithModelFallback<T>(params: {
}
}
try {
const result = await params.run(candidate.provider, candidate.model);
return {
result,
provider: candidate.provider,
model: candidate.model,
attempts,
};
} catch (err) {
if (shouldRethrowAbort(err)) {
throw err;
}
const attemptRun = await runFallbackAttempt({ run: params.run, ...candidate, attempts });
if ("success" in attemptRun) {
return attemptRun.success;
}
const err = attemptRun.error;
{
// Context overflow errors should be handled by the inner runner's
// compaction/retry logic, not by model fallback. If one escapes as a
// throw, rethrow it immediately rather than trying a different model
@@ -532,18 +582,12 @@ export async function runWithImageModelFallback<T>(params: {
for (let i = 0; i < candidates.length; i += 1) {
const candidate = candidates[i];
try {
const result = await params.run(candidate.provider, candidate.model);
return {
result,
provider: candidate.provider,
model: candidate.model,
attempts,
};
} catch (err) {
if (shouldRethrowAbort(err)) {
throw err;
}
const attemptRun = await runFallbackAttempt({ run: params.run, ...candidate, attempts });
if ("success" in attemptRun) {
return attemptRun.success;
}
{
const err = attemptRun.error;
lastError = err;
attempts.push({
provider: candidate.provider,

View File

@@ -13,40 +13,40 @@ import { ensureOpenClawModelsJson } from "./models-config.js";
installModelsConfigTestHooks({ restoreFetch: true });
async function writeAuthProfiles(agentDir: string, profiles: Record<string, unknown>) {
await fs.mkdir(agentDir, { recursive: true });
await fs.writeFile(
path.join(agentDir, "auth-profiles.json"),
JSON.stringify({ version: 1, profiles }, null, 2),
);
}
function expectBearerAuthHeader(fetchMock: { mock: { calls: unknown[][] } }, token: string) {
const [, opts] = fetchMock.mock.calls[0] as [string, { headers?: Record<string, string> }];
expect(opts?.headers?.Authorization).toBe(`Bearer ${token}`);
}
describe("models-config", () => {
it("uses the first github-copilot profile when env tokens are missing", async () => {
await withTempHome(async (home) => {
await withUnsetCopilotTokenEnv(async () => {
const fetchMock = mockCopilotTokenExchangeSuccess();
const agentDir = path.join(home, "agent-profiles");
await fs.mkdir(agentDir, { recursive: true });
await fs.writeFile(
path.join(agentDir, "auth-profiles.json"),
JSON.stringify(
{
version: 1,
profiles: {
"github-copilot:alpha": {
type: "token",
provider: "github-copilot",
token: "alpha-token",
},
"github-copilot:beta": {
type: "token",
provider: "github-copilot",
token: "beta-token",
},
},
},
null,
2,
),
);
await writeAuthProfiles(agentDir, {
"github-copilot:alpha": {
type: "token",
provider: "github-copilot",
token: "alpha-token",
},
"github-copilot:beta": {
type: "token",
provider: "github-copilot",
token: "beta-token",
},
});
await ensureOpenClawModelsJson({ models: { providers: {} } }, agentDir);
const [, opts] = fetchMock.mock.calls[0] as [string, { headers?: Record<string, string> }];
expect(opts?.headers?.Authorization).toBe("Bearer alpha-token");
expectBearerAuthHeader(fetchMock, "alpha-token");
});
});
});
@@ -82,31 +82,21 @@ describe("models-config", () => {
await withUnsetCopilotTokenEnv(async () => {
const fetchMock = mockCopilotTokenExchangeSuccess();
const agentDir = path.join(home, "agent-profiles");
await fs.mkdir(agentDir, { recursive: true });
process.env.COPILOT_REF_TOKEN = "token-from-ref-env";
await fs.writeFile(
path.join(agentDir, "auth-profiles.json"),
JSON.stringify(
{
version: 1,
profiles: {
"github-copilot:default": {
type: "token",
provider: "github-copilot",
tokenRef: { source: "env", provider: "default", id: "COPILOT_REF_TOKEN" },
},
},
try {
await writeAuthProfiles(agentDir, {
"github-copilot:default": {
type: "token",
provider: "github-copilot",
tokenRef: { source: "env", provider: "default", id: "COPILOT_REF_TOKEN" },
},
null,
2,
),
);
});
await ensureOpenClawModelsJson({ models: { providers: {} } }, agentDir);
const [, opts] = fetchMock.mock.calls[0] as [string, { headers?: Record<string, string> }];
expect(opts?.headers?.Authorization).toBe("Bearer token-from-ref-env");
delete process.env.COPILOT_REF_TOKEN;
await ensureOpenClawModelsJson({ models: { providers: {} } }, agentDir);
expectBearerAuthHeader(fetchMock, "token-from-ref-env");
} finally {
delete process.env.COPILOT_REF_TOKEN;
}
});
});
});

View File

@@ -285,19 +285,8 @@ describe("openclaw-tools: subagents (sessions_spawn allowlist)", () => {
list: [{ id: "main", subagents: { allowAgents: ["*"] } }, { id: "my-research_agent01" }],
},
});
callGatewayMock.mockImplementation(async () => ({
runId: "run-1",
status: "accepted",
acceptedAt: 1000,
}));
const tool = await getSessionsSpawnTool({
agentSessionKey: "main",
agentChannel: "whatsapp",
});
const result = await tool.execute("call-valid", {
task: "do thing",
agentId: "my-research_agent01",
});
mockAcceptedSpawn(1000);
const result = await executeSpawn("call-valid", "my-research_agent01");
const details = result.details as { status?: string };
expect(details.status).toBe("accepted");
});
@@ -312,19 +301,8 @@ describe("openclaw-tools: subagents (sessions_spawn allowlist)", () => {
],
},
});
callGatewayMock.mockImplementation(async () => ({
runId: "run-1",
status: "accepted",
acceptedAt: 1000,
}));
const tool = await getSessionsSpawnTool({
agentSessionKey: "main",
agentChannel: "whatsapp",
});
const result = await tool.execute("call-unconfigured", {
task: "do thing",
agentId: "research",
});
mockAcceptedSpawn(1000);
const result = await executeSpawn("call-unconfigured", "research");
const details = result.details as { status?: string };
// Must pass: "research" is in allowAgents even though not in agents.list
expect(details.status).toBe("accepted");

View File

@@ -40,6 +40,19 @@ describe("buildEmbeddedRunPayloads", () => {
expect(payloads[0]?.text).toBe(OVERLOADED_FALLBACK_TEXT);
};
function expectNoSyntheticCompletionForSession(sessionKey: string) {
const payloads = buildPayloads({
sessionKey,
toolMetas: [{ toolName: "write", meta: "/tmp/out.md" }],
lastAssistant: makeAssistant({
stopReason: "stop",
errorMessage: undefined,
content: [],
}),
});
expect(payloads).toHaveLength(0);
}
it("suppresses raw API error JSON when the assistant errored", () => {
const payloads = buildPayloads({
assistantTexts: [errorJson],
@@ -140,31 +153,11 @@ describe("buildEmbeddedRunPayloads", () => {
});
it("does not add synthetic completion text for channel sessions", () => {
const payloads = buildPayloads({
sessionKey: "agent:main:discord:channel:c123",
toolMetas: [{ toolName: "write", meta: "/tmp/out.md" }],
lastAssistant: makeAssistant({
stopReason: "stop",
errorMessage: undefined,
content: [],
}),
});
expect(payloads).toHaveLength(0);
expectNoSyntheticCompletionForSession("agent:main:discord:channel:c123");
});
it("does not add synthetic completion text for group sessions", () => {
const payloads = buildPayloads({
sessionKey: "agent:main:telegram:group:g123",
toolMetas: [{ toolName: "write", meta: "/tmp/out.md" }],
lastAssistant: makeAssistant({
stopReason: "stop",
errorMessage: undefined,
content: [],
}),
});
expect(payloads).toHaveLength(0);
expectNoSyntheticCompletionForSession("agent:main:telegram:group:g123");
});
it("does not add synthetic completion text when messaging tool already delivered output", () => {

View File

@@ -102,6 +102,23 @@ const createCompactionContext = (params: {
},
}) as unknown as Partial<ExtensionContext>;
async function runCompactionScenario(params: {
sessionManager: ExtensionContext["sessionManager"];
event: unknown;
apiKey: string | null;
}) {
const compactionHandler = createCompactionHandler();
const getApiKeyMock = vi.fn().mockResolvedValue(params.apiKey);
const mockContext = createCompactionContext({
sessionManager: params.sessionManager,
getApiKeyMock,
});
const result = (await compactionHandler(params.event, mockContext)) as {
cancel?: boolean;
};
return { result, getApiKeyMock };
}
describe("compaction-safeguard tool failures", () => {
it("formats tool failures with meta and summary", () => {
const messages: AgentMessage[] = [
@@ -377,23 +394,16 @@ describe("compaction-safeguard extension model fallback", () => {
// Set up runtime with model (mimics buildEmbeddedExtensionPaths behavior)
setCompactionSafeguardRuntime(sessionManager, { model });
const compactionHandler = createCompactionHandler();
const mockEvent = createCompactionEvent({
messageText: "test message",
tokensBefore: 1000,
});
const getApiKeyMock = vi.fn().mockResolvedValue(null);
const mockContext = createCompactionContext({
const { result, getApiKeyMock } = await runCompactionScenario({
sessionManager,
getApiKeyMock,
event: mockEvent,
apiKey: null,
});
// Call the handler and wait for result
const result = (await compactionHandler(mockEvent, mockContext)) as {
cancel?: boolean;
};
expect(result).toEqual({ cancel: true });
// KEY ASSERTION: Prove the fallback path was exercised
@@ -410,22 +420,16 @@ describe("compaction-safeguard extension model fallback", () => {
// Do NOT set runtime.model (both ctx.model and runtime.model will be undefined)
const compactionHandler = createCompactionHandler();
const mockEvent = createCompactionEvent({
messageText: "test",
tokensBefore: 500,
});
const getApiKeyMock = vi.fn().mockResolvedValue(null);
const mockContext = createCompactionContext({
const { result, getApiKeyMock } = await runCompactionScenario({
sessionManager,
getApiKeyMock,
event: mockEvent,
apiKey: null,
});
const result = (await compactionHandler(mockEvent, mockContext)) as {
cancel?: boolean;
};
expect(result).toEqual({ cancel: true });
// Verify early return: getApiKey should NOT have been called when both models are missing
@@ -439,7 +443,6 @@ describe("compaction-safeguard double-compaction guard", () => {
const model = createAnthropicModelFixture();
setCompactionSafeguardRuntime(sessionManager, { model });
const compactionHandler = createCompactionHandler();
const mockEvent = {
preparation: {
messagesToSummarize: [] as AgentMessage[],
@@ -451,16 +454,11 @@ describe("compaction-safeguard double-compaction guard", () => {
customInstructions: "",
signal: new AbortController().signal,
};
const getApiKeyMock = vi.fn().mockResolvedValue("sk-test");
const mockContext = createCompactionContext({
const { result, getApiKeyMock } = await runCompactionScenario({
sessionManager,
getApiKeyMock,
event: mockEvent,
apiKey: "sk-test",
});
const result = (await compactionHandler(mockEvent, mockContext)) as {
cancel?: boolean;
};
expect(result).toEqual({ cancel: true });
expect(getApiKeyMock).not.toHaveBeenCalled();
});
@@ -470,20 +468,15 @@ describe("compaction-safeguard double-compaction guard", () => {
const model = createAnthropicModelFixture();
setCompactionSafeguardRuntime(sessionManager, { model });
const compactionHandler = createCompactionHandler();
const mockEvent = createCompactionEvent({
messageText: "real message",
tokensBefore: 1500,
});
const getApiKeyMock = vi.fn().mockResolvedValue(null);
const mockContext = createCompactionContext({
const { result, getApiKeyMock } = await runCompactionScenario({
sessionManager,
getApiKeyMock,
event: mockEvent,
apiKey: null,
});
const result = (await compactionHandler(mockEvent, mockContext)) as {
cancel?: boolean;
};
expect(result).toEqual({ cancel: true });
expect(getApiKeyMock).toHaveBeenCalled();
});

View File

@@ -43,6 +43,20 @@ function writeRuntimeOpenRouterProfile(agentDir: string): void {
);
}
async function writeLegacyAuthJson(
agentDir: string,
authEntries: Record<string, unknown>,
): Promise<void> {
await fs.writeFile(path.join(agentDir, "auth.json"), JSON.stringify(authEntries, null, 2));
}
async function readLegacyAuthJson(agentDir: string): Promise<Record<string, unknown>> {
return JSON.parse(await fs.readFile(path.join(agentDir, "auth.json"), "utf8")) as Record<
string,
unknown
>;
}
describe("discoverAuthStorage", () => {
it("loads runtime credentials from auth-profiles without writing auth.json", async () => {
await withAgentDir(async (agentDir) => {
@@ -91,28 +105,19 @@ describe("discoverAuthStorage", () => {
it("scrubs static api_key entries from legacy auth.json and keeps oauth entries", async () => {
await withAgentDir(async (agentDir) => {
writeRuntimeOpenRouterProfile(agentDir);
await fs.writeFile(
path.join(agentDir, "auth.json"),
JSON.stringify(
{
openrouter: { type: "api_key", key: "legacy-static-key" },
"openai-codex": {
type: "oauth",
access: "oauth-access",
refresh: "oauth-refresh",
expires: Date.now() + 60_000,
},
},
null,
2,
),
);
await writeLegacyAuthJson(agentDir, {
openrouter: { type: "api_key", key: "legacy-static-key" },
"openai-codex": {
type: "oauth",
access: "oauth-access",
refresh: "oauth-refresh",
expires: Date.now() + 60_000,
},
});
discoverAuthStorage(agentDir);
const parsed = JSON.parse(await fs.readFile(path.join(agentDir, "auth.json"), "utf8")) as {
[key: string]: unknown;
};
const parsed = await readLegacyAuthJson(agentDir);
expect(parsed.openrouter).toBeUndefined();
expect(parsed["openai-codex"]).toMatchObject({
type: "oauth",
@@ -127,22 +132,13 @@ describe("discoverAuthStorage", () => {
process.env.OPENCLAW_AUTH_STORE_READONLY = "1";
try {
writeRuntimeOpenRouterProfile(agentDir);
await fs.writeFile(
path.join(agentDir, "auth.json"),
JSON.stringify(
{
openrouter: { type: "api_key", key: "legacy-static-key" },
},
null,
2,
),
);
await writeLegacyAuthJson(agentDir, {
openrouter: { type: "api_key", key: "legacy-static-key" },
});
discoverAuthStorage(agentDir);
const parsed = JSON.parse(await fs.readFile(path.join(agentDir, "auth.json"), "utf8")) as {
[key: string]: unknown;
};
const parsed = await readLegacyAuthJson(agentDir);
expect(parsed.openrouter).toMatchObject({ type: "api_key", key: "legacy-static-key" });
} finally {
if (previous === undefined) {

View File

@@ -106,6 +106,36 @@ async function createHostEscapeFixture(stateDir: string) {
return { workspaceDir, outsideFile };
}
async function expectMkdirpAllowsExistingDirectory(params?: { forceBoundaryIoFallback?: boolean }) {
await withTempDir("openclaw-fs-bridge-mkdirp-", async (stateDir) => {
const workspaceDir = path.join(stateDir, "workspace");
const nestedDir = path.join(workspaceDir, "memory", "kemik");
await fs.mkdir(nestedDir, { recursive: true });
if (params?.forceBoundaryIoFallback) {
mockedOpenBoundaryFile.mockImplementationOnce(async () => ({
ok: false,
reason: "io",
error: Object.assign(new Error("EISDIR"), { code: "EISDIR" }),
}));
}
const bridge = createSandboxFsBridge({
sandbox: createSandbox({
workspaceDir,
agentWorkspaceDir: workspaceDir,
}),
});
await expect(bridge.mkdirp({ filePath: "memory/kemik" })).resolves.toBeUndefined();
const mkdirCall = findCallByScriptFragment('mkdir -p -- "$1"');
expect(mkdirCall).toBeDefined();
const mkdirPath = mkdirCall ? getDockerPathArg(mkdirCall[0]) : "";
expect(mkdirPath).toBe("/workspace/memory/kemik");
});
}
describe("sandbox fs bridge shell compatibility", () => {
beforeEach(() => {
mockedExecDockerRaw.mockClear();
@@ -235,53 +265,11 @@ describe("sandbox fs bridge shell compatibility", () => {
});
it("allows mkdirp for existing in-boundary subdirectories", async () => {
await withTempDir("openclaw-fs-bridge-mkdirp-", async (stateDir) => {
const workspaceDir = path.join(stateDir, "workspace");
const nestedDir = path.join(workspaceDir, "memory", "kemik");
await fs.mkdir(nestedDir, { recursive: true });
const bridge = createSandboxFsBridge({
sandbox: createSandbox({
workspaceDir,
agentWorkspaceDir: workspaceDir,
}),
});
await expect(bridge.mkdirp({ filePath: "memory/kemik" })).resolves.toBeUndefined();
const mkdirCall = findCallByScriptFragment('mkdir -p -- "$1"');
expect(mkdirCall).toBeDefined();
const mkdirPath = mkdirCall ? getDockerPathArg(mkdirCall[0]) : "";
expect(mkdirPath).toBe("/workspace/memory/kemik");
});
await expectMkdirpAllowsExistingDirectory();
});
it("allows mkdirp when boundary open reports io for an existing directory", async () => {
await withTempDir("openclaw-fs-bridge-mkdirp-io-", async (stateDir) => {
const workspaceDir = path.join(stateDir, "workspace");
const nestedDir = path.join(workspaceDir, "memory", "kemik");
await fs.mkdir(nestedDir, { recursive: true });
mockedOpenBoundaryFile.mockImplementationOnce(async () => ({
ok: false,
reason: "io",
error: Object.assign(new Error("EISDIR"), { code: "EISDIR" }),
}));
const bridge = createSandboxFsBridge({
sandbox: createSandbox({
workspaceDir,
agentWorkspaceDir: workspaceDir,
}),
});
await expect(bridge.mkdirp({ filePath: "memory/kemik" })).resolves.toBeUndefined();
const mkdirCall = findCallByScriptFragment('mkdir -p -- "$1"');
expect(mkdirCall).toBeDefined();
const mkdirPath = mkdirCall ? getDockerPathArg(mkdirCall[0]) : "";
expect(mkdirPath).toBe("/workspace/memory/kemik");
});
await expectMkdirpAllowsExistingDirectory({ forceBoundaryIoFallback: true });
});
it("rejects mkdirp when target exists as a file", async () => {

View File

@@ -47,6 +47,34 @@ async function expectCurrentPidOwnsLock(params: {
await lock.release();
}
async function expectActiveInProcessLockIsNotReclaimed(params?: {
legacyStarttime?: unknown;
}): Promise<void> {
const root = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-lock-"));
try {
const sessionFile = path.join(root, "sessions.json");
const lockPath = `${sessionFile}.lock`;
const lock = await acquireSessionWriteLock({ sessionFile, timeoutMs: 500 });
const lockPayload = {
pid: process.pid,
createdAt: new Date().toISOString(),
...(params && "legacyStarttime" in params ? { starttime: params.legacyStarttime } : {}),
};
await fs.writeFile(lockPath, JSON.stringify(lockPayload), "utf8");
await expect(
acquireSessionWriteLock({
sessionFile,
timeoutMs: 50,
allowReentrant: false,
}),
).rejects.toThrow(/session file locked/);
await lock.release();
} finally {
await fs.rm(root, { recursive: true, force: true });
}
}
describe("acquireSessionWriteLock", () => {
it("reuses locks across symlinked session paths", async () => {
if (process.platform === "win32") {
@@ -323,60 +351,11 @@ describe("acquireSessionWriteLock", () => {
});
it("does not reclaim active in-process lock files without starttime", async () => {
const root = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-lock-"));
try {
const sessionFile = path.join(root, "sessions.json");
const lockPath = `${sessionFile}.lock`;
const lock = await acquireSessionWriteLock({ sessionFile, timeoutMs: 500 });
await fs.writeFile(
lockPath,
JSON.stringify({
pid: process.pid,
createdAt: new Date().toISOString(),
}),
"utf8",
);
await expect(
acquireSessionWriteLock({
sessionFile,
timeoutMs: 50,
allowReentrant: false,
}),
).rejects.toThrow(/session file locked/);
await lock.release();
} finally {
await fs.rm(root, { recursive: true, force: true });
}
await expectActiveInProcessLockIsNotReclaimed();
});
it("does not reclaim active in-process lock files with malformed starttime", async () => {
const root = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-lock-"));
try {
const sessionFile = path.join(root, "sessions.json");
const lockPath = `${sessionFile}.lock`;
const lock = await acquireSessionWriteLock({ sessionFile, timeoutMs: 500 });
await fs.writeFile(
lockPath,
JSON.stringify({
pid: process.pid,
createdAt: new Date().toISOString(),
starttime: 123.5,
}),
"utf8",
);
await expect(
acquireSessionWriteLock({
sessionFile,
timeoutMs: 50,
allowReentrant: false,
}),
).rejects.toThrow(/session file locked/);
await lock.release();
} finally {
await fs.rm(root, { recursive: true, force: true });
}
await expectActiveInProcessLockIsNotReclaimed({ legacyStarttime: 123.5 });
});
it("registers cleanup for SIGQUIT and SIGABRT", () => {

View File

@@ -410,56 +410,47 @@ export async function spawnSubagentDirect(
}
thinkingOverride = normalized;
}
try {
await callGateway({
method: "sessions.patch",
params: { key: childSessionKey, spawnDepth: childDepth },
timeoutMs: 10_000,
});
} catch (err) {
const messageText =
err instanceof Error ? err.message : typeof err === "string" ? err : "error";
const patchChildSession = async (patch: Record<string, unknown>): Promise<string | undefined> => {
try {
await callGateway({
method: "sessions.patch",
params: { key: childSessionKey, ...patch },
timeoutMs: 10_000,
});
return undefined;
} catch (err) {
return err instanceof Error ? err.message : typeof err === "string" ? err : "error";
}
};
const spawnDepthPatchError = await patchChildSession({ spawnDepth: childDepth });
if (spawnDepthPatchError) {
return {
status: "error",
error: messageText,
error: spawnDepthPatchError,
childSessionKey,
};
}
if (resolvedModel) {
try {
await callGateway({
method: "sessions.patch",
params: { key: childSessionKey, model: resolvedModel },
timeoutMs: 10_000,
});
modelApplied = true;
} catch (err) {
const messageText =
err instanceof Error ? err.message : typeof err === "string" ? err : "error";
const modelPatchError = await patchChildSession({ model: resolvedModel });
if (modelPatchError) {
return {
status: "error",
error: messageText,
error: modelPatchError,
childSessionKey,
};
}
modelApplied = true;
}
if (thinkingOverride !== undefined) {
try {
await callGateway({
method: "sessions.patch",
params: {
key: childSessionKey,
thinkingLevel: thinkingOverride === "off" ? null : thinkingOverride,
},
timeoutMs: 10_000,
});
} catch (err) {
const messageText =
err instanceof Error ? err.message : typeof err === "string" ? err : "error";
const thinkingPatchError = await patchChildSession({
thinkingLevel: thinkingOverride === "off" ? null : thinkingOverride,
});
if (thinkingPatchError) {
return {
status: "error",
error: messageText,
error: thinkingPatchError,
childSessionKey,
};
}

View File

@@ -327,36 +327,22 @@ export async function handleDiscordGuildAction(
integer: true,
});
const availableTags = parseAvailableTags(params.availableTags);
const editPayload = {
channelId,
name: name ?? undefined,
topic: topic ?? undefined,
position: position ?? undefined,
parentId,
nsfw,
rateLimitPerUser: rateLimitPerUser ?? undefined,
archived,
locked,
autoArchiveDuration: autoArchiveDuration ?? undefined,
availableTags,
};
const channel = accountId
? await editChannelDiscord(
{
channelId,
name: name ?? undefined,
topic: topic ?? undefined,
position: position ?? undefined,
parentId,
nsfw,
rateLimitPerUser: rateLimitPerUser ?? undefined,
archived,
locked,
autoArchiveDuration: autoArchiveDuration ?? undefined,
availableTags,
},
{ accountId },
)
: await editChannelDiscord({
channelId,
name: name ?? undefined,
topic: topic ?? undefined,
position: position ?? undefined,
parentId,
nsfw,
rateLimitPerUser: rateLimitPerUser ?? undefined,
archived,
locked,
autoArchiveDuration: autoArchiveDuration ?? undefined,
availableTags,
});
? await editChannelDiscord(editPayload, { accountId })
: await editChannelDiscord(editPayload);
return jsonResult({ ok: true, channel });
}
case "channelDelete": {

View File

@@ -2,7 +2,7 @@ import { type Context, complete } from "@mariozechner/pi-ai";
import { Type } from "@sinclair/typebox";
import type { OpenClawConfig } from "../../config/config.js";
import { resolveUserPath } from "../../utils.js";
import { getDefaultLocalRoots, loadWebMedia } from "../../web/media.js";
import { loadWebMedia } from "../../web/media.js";
import { minimaxUnderstandImage } from "../minimax-vlm.js";
import {
coerceImageAssistantText,
@@ -15,6 +15,7 @@ import {
applyImageModelConfigDefaults,
buildTextToolResult,
resolveModelFromRegistry,
resolveMediaToolLocalRoots,
resolveModelRuntimeApiKey,
resolvePromptAndModelOverride,
} from "./media-tool-shared.js";
@@ -24,7 +25,6 @@ import {
discoverAuthStorage,
discoverModels,
ensureOpenClawModelsJson,
normalizeWorkspaceDir,
resolveSandboxedBridgeMediaPath,
runWithImageModelFallback,
type AnyAgentTool,
@@ -298,17 +298,9 @@ export function createImageTool(options?: {
? "Analyze one or more images with a vision model. Use image for a single path/URL, or images for multiple (up to 20). Only use this tool when images were NOT already provided in the user's message. Images mentioned in the prompt are automatically visible to you."
: "Analyze one or more images with the configured image model (agents.defaults.imageModel). Use image for a single path/URL, or images for multiple (up to 20). Provide a prompt describing what to analyze.";
const localRoots = (() => {
const workspaceDir = normalizeWorkspaceDir(options?.workspaceDir);
if (options?.fsPolicy?.workspaceOnly) {
return workspaceDir ? [workspaceDir] : [];
}
const roots = getDefaultLocalRoots();
if (!workspaceDir) {
return roots;
}
return Array.from(new Set([...roots, workspaceDir]));
})();
const localRoots = resolveMediaToolLocalRoots(options?.workspaceDir, {
workspaceOnly: options?.fsPolicy?.workspaceOnly === true,
});
return {
label: "Image",

View File

@@ -36,9 +36,15 @@ export function applyImageModelConfigDefaults(
};
}
export function resolveMediaToolLocalRoots(workspaceDirRaw: string | undefined): string[] {
const roots = getDefaultLocalRoots();
export function resolveMediaToolLocalRoots(
workspaceDirRaw: string | undefined,
options?: { workspaceOnly?: boolean },
): string[] {
const workspaceDir = normalizeWorkspaceDir(workspaceDirRaw);
if (options?.workspaceOnly) {
return workspaceDir ? [workspaceDir] : [];
}
const roots = getDefaultLocalRoots();
if (!workspaceDir) {
return [...roots];
}

View File

@@ -3,7 +3,7 @@ import { Type } from "@sinclair/typebox";
import type { OpenClawConfig } from "../../config/config.js";
import { extractPdfContent, type PdfExtractedContent } from "../../media/pdf-extract.js";
import { resolveUserPath } from "../../utils.js";
import { getDefaultLocalRoots, loadWebMediaRaw } from "../../web/media.js";
import { loadWebMediaRaw } from "../../web/media.js";
import {
coerceImageModelConfig,
type ImageModelConfig,
@@ -13,6 +13,7 @@ import {
applyImageModelConfigDefaults,
buildTextToolResult,
resolveModelFromRegistry,
resolveMediaToolLocalRoots,
resolveModelRuntimeApiKey,
resolvePromptAndModelOverride,
} from "./media-tool-shared.js";
@@ -30,7 +31,6 @@ import {
discoverAuthStorage,
discoverModels,
ensureOpenClawModelsJson,
normalizeWorkspaceDir,
resolveSandboxedBridgeMediaPath,
runWithImageModelFallback,
type AnyAgentTool,
@@ -327,17 +327,9 @@ export function createPdfTool(options?: {
? Math.floor(maxPagesDefault)
: DEFAULT_MAX_PAGES;
const localRoots = (() => {
const workspaceDir = normalizeWorkspaceDir(options?.workspaceDir);
if (options?.fsPolicy?.workspaceOnly) {
return workspaceDir ? [workspaceDir] : [];
}
const roots = getDefaultLocalRoots();
if (!workspaceDir) {
return roots;
}
return Array.from(new Set([...roots, workspaceDir]));
})();
const localRoots = resolveMediaToolLocalRoots(options?.workspaceDir, {
workspaceOnly: options?.fsPolicy?.workspaceOnly === true,
});
const description =
"Analyze one or more PDF documents with a model. Supports native PDF analysis for Anthropic and Google models, with text/image extraction fallback for other providers. Use pdf for a single path/URL, or pdfs for multiple (up to 10). Provide a prompt describing what to analyze.";