Merge branch 'openclaw:main' into qianfan

This commit is contained in:
ide-rea
2026-02-07 14:07:52 +08:00
committed by GitHub
231 changed files with 6276 additions and 1432 deletions

View File

@@ -1,4 +1,8 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import type { CliBackendConfig } from "../config/types.js";
import { runCliAgent } from "./cli-runner.js";
import { cleanupSuspendedCliProcesses } from "./cli-runner/helpers.js";
@@ -58,6 +62,85 @@ describe("runCliAgent resume cleanup", () => {
expect(pkillArgs[1]).toContain("resume");
expect(pkillArgs[1]).toContain("thread-123");
});
it("falls back to per-agent workspace when workspaceDir is missing", async () => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cli-runner-"));
const fallbackWorkspace = path.join(tempDir, "workspace-main");
await fs.mkdir(fallbackWorkspace, { recursive: true });
const cfg = {
agents: {
defaults: {
workspace: fallbackWorkspace,
},
},
} satisfies OpenClawConfig;
runExecMock.mockResolvedValue({ stdout: "", stderr: "" });
runCommandWithTimeoutMock.mockResolvedValueOnce({
stdout: "ok",
stderr: "",
code: 0,
signal: null,
killed: false,
});
try {
await runCliAgent({
sessionId: "s1",
sessionKey: "agent:main:subagent:missing-workspace",
sessionFile: "/tmp/session.jsonl",
workspaceDir: undefined as unknown as string,
config: cfg,
prompt: "hi",
provider: "codex-cli",
model: "gpt-5.2-codex",
timeoutMs: 1_000,
runId: "run-1",
});
} finally {
await fs.rm(tempDir, { recursive: true, force: true });
}
const options = runCommandWithTimeoutMock.mock.calls[0]?.[1] as { cwd?: string };
expect(options.cwd).toBe(path.resolve(fallbackWorkspace));
});
it("throws when sessionKey is malformed", async () => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cli-runner-"));
const mainWorkspace = path.join(tempDir, "workspace-main");
const researchWorkspace = path.join(tempDir, "workspace-research");
await fs.mkdir(mainWorkspace, { recursive: true });
await fs.mkdir(researchWorkspace, { recursive: true });
const cfg = {
agents: {
defaults: {
workspace: mainWorkspace,
},
list: [{ id: "research", workspace: researchWorkspace }],
},
} satisfies OpenClawConfig;
try {
await expect(
runCliAgent({
sessionId: "s1",
sessionKey: "agent::broken",
agentId: "research",
sessionFile: "/tmp/session.jsonl",
workspaceDir: undefined as unknown as string,
config: cfg,
prompt: "hi",
provider: "codex-cli",
model: "gpt-5.2-codex",
timeoutMs: 1_000,
runId: "run-2",
}),
).rejects.toThrow("Malformed agent session key");
} finally {
await fs.rm(tempDir, { recursive: true, force: true });
}
expect(runCommandWithTimeoutMock).not.toHaveBeenCalled();
});
});
describe("cleanupSuspendedCliProcesses", () => {

View File

@@ -7,7 +7,6 @@ import { shouldLogVerbose } from "../globals.js";
import { isTruthyEnvValue } from "../infra/env.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { runCommandWithTimeout } from "../process/exec.js";
import { resolveUserPath } from "../utils.js";
import { resolveSessionAgentIds } from "./agent-scope.js";
import { makeBootstrapWarn, resolveBootstrapContextForRun } from "./bootstrap-files.js";
import { resolveCliBackendConfig } from "./cli-backends.js";
@@ -29,12 +28,14 @@ import {
import { resolveOpenClawDocsPath } from "./docs-path.js";
import { FailoverError, resolveFailoverStatus } from "./failover-error.js";
import { classifyFailoverReason, isFailoverErrorMessage } from "./pi-embedded-helpers.js";
import { redactRunIdentifier, resolveRunWorkspaceDir } from "./workspace-run.js";
const log = createSubsystemLogger("agent/claude-cli");
export async function runCliAgent(params: {
sessionId: string;
sessionKey?: string;
agentId?: string;
sessionFile: string;
workspaceDir: string;
config?: OpenClawConfig;
@@ -51,7 +52,21 @@ export async function runCliAgent(params: {
images?: ImageContent[];
}): Promise<EmbeddedPiRunResult> {
const started = Date.now();
const resolvedWorkspace = resolveUserPath(params.workspaceDir);
const workspaceResolution = resolveRunWorkspaceDir({
workspaceDir: params.workspaceDir,
sessionKey: params.sessionKey,
agentId: params.agentId,
config: params.config,
});
const resolvedWorkspace = workspaceResolution.workspaceDir;
const redactedSessionId = redactRunIdentifier(params.sessionId);
const redactedSessionKey = redactRunIdentifier(params.sessionKey);
const redactedWorkspace = redactRunIdentifier(resolvedWorkspace);
if (workspaceResolution.usedFallback) {
log.warn(
`[workspace-fallback] caller=runCliAgent reason=${workspaceResolution.fallbackReason} run=${params.runId} session=${redactedSessionId} sessionKey=${redactedSessionKey} agent=${workspaceResolution.agentId} workspace=${redactedWorkspace}`,
);
}
const workspaceDir = resolvedWorkspace;
const backendResolved = resolveCliBackendConfig(params.provider, params.config);
@@ -311,6 +326,7 @@ export async function runCliAgent(params: {
export async function runClaudeCliAgent(params: {
sessionId: string;
sessionKey?: string;
agentId?: string;
sessionFile: string;
workspaceDir: string;
config?: OpenClawConfig;
@@ -328,6 +344,7 @@ export async function runClaudeCliAgent(params: {
return runCliAgent({
sessionId: params.sessionId,
sessionKey: params.sessionKey,
agentId: params.agentId,
sessionFile: params.sessionFile,
workspaceDir: params.workspaceDir,
config: params.config,

View File

@@ -9,7 +9,7 @@ export type ResolvedMemorySearchConfig = {
enabled: boolean;
sources: Array<"memory" | "sessions">;
extraPaths: string[];
provider: "openai" | "local" | "gemini" | "auto";
provider: "openai" | "local" | "gemini" | "voyage" | "auto";
remote?: {
baseUrl?: string;
apiKey?: string;
@@ -25,7 +25,7 @@ export type ResolvedMemorySearchConfig = {
experimental: {
sessionMemory: boolean;
};
fallback: "openai" | "gemini" | "local" | "none";
fallback: "openai" | "gemini" | "local" | "voyage" | "none";
model: string;
local: {
modelPath?: string;
@@ -72,6 +72,7 @@ export type ResolvedMemorySearchConfig = {
const DEFAULT_OPENAI_MODEL = "text-embedding-3-small";
const DEFAULT_GEMINI_MODEL = "gemini-embedding-001";
const DEFAULT_VOYAGE_MODEL = "voyage-4-large";
const DEFAULT_CHUNK_TOKENS = 400;
const DEFAULT_CHUNK_OVERLAP = 80;
const DEFAULT_WATCH_DEBOUNCE_MS = 1500;
@@ -136,7 +137,11 @@ function mergeConfig(
defaultRemote?.headers,
);
const includeRemote =
hasRemoteConfig || provider === "openai" || provider === "gemini" || provider === "auto";
hasRemoteConfig ||
provider === "openai" ||
provider === "gemini" ||
provider === "voyage" ||
provider === "auto";
const batch = {
enabled: overrideRemote?.batch?.enabled ?? defaultRemote?.batch?.enabled ?? true,
wait: overrideRemote?.batch?.wait ?? defaultRemote?.batch?.wait ?? true,
@@ -163,7 +168,9 @@ function mergeConfig(
? DEFAULT_GEMINI_MODEL
: provider === "openai"
? DEFAULT_OPENAI_MODEL
: undefined;
: provider === "voyage"
? DEFAULT_VOYAGE_MODEL
: undefined;
const model = overrides?.model ?? defaults?.model ?? modelDefault ?? "";
const local = {
modelPath: overrides?.local?.modelPath ?? defaults?.local?.modelPath,

View File

@@ -463,4 +463,28 @@ describe("getApiKeyForModel", () => {
}
}
});
it("accepts VOYAGE_API_KEY for voyage", async () => {
const previous = process.env.VOYAGE_API_KEY;
try {
process.env.VOYAGE_API_KEY = "voyage-test-key";
vi.resetModules();
const { resolveApiKeyForProvider } = await import("./model-auth.js");
const resolved = await resolveApiKeyForProvider({
provider: "voyage",
store: { version: 1, profiles: {} },
});
expect(resolved.apiKey).toBe("voyage-test-key");
expect(resolved.source).toContain("VOYAGE_API_KEY");
} finally {
if (previous === undefined) {
delete process.env.VOYAGE_API_KEY;
} else {
process.env.VOYAGE_API_KEY = previous;
}
}
});
});

View File

@@ -287,6 +287,7 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
const envMap: Record<string, string> = {
openai: "OPENAI_API_KEY",
google: "GEMINI_API_KEY",
voyage: "VOYAGE_API_KEY",
groq: "GROQ_API_KEY",
deepgram: "DEEPGRAM_API_KEY",
cerebras: "CEREBRAS_API_KEY",

View File

@@ -45,8 +45,12 @@ describe("sessions_spawn thinking defaults", () => {
const agentCall = calls
.map((call) => call[0] as { method: string; params?: Record<string, unknown> })
.findLast((call) => call.method === "agent");
const thinkingPatch = calls
.map((call) => call[0] as { method: string; params?: Record<string, unknown> })
.findLast((call) => call.method === "sessions.patch" && call.params?.thinkingLevel);
expect(agentCall?.params?.thinking).toBe("high");
expect(thinkingPatch?.params?.thinkingLevel).toBe("high");
});
it("prefers explicit sessions_spawn.thinking over config default", async () => {
@@ -60,7 +64,11 @@ describe("sessions_spawn thinking defaults", () => {
const agentCall = calls
.map((call) => call[0] as { method: string; params?: Record<string, unknown> })
.findLast((call) => call.method === "agent");
const thinkingPatch = calls
.map((call) => call[0] as { method: string; params?: Record<string, unknown> })
.findLast((call) => call.method === "sessions.patch" && call.params?.thinkingLevel);
expect(agentCall?.params?.thinking).toBe("low");
expect(thinkingPatch?.params?.thinkingLevel).toBe("low");
});
});

View File

@@ -219,6 +219,75 @@ describe("runEmbeddedPiAgent", () => {
await expect(fs.stat(path.join(agentDir, "models.json"))).resolves.toBeTruthy();
});
it("falls back to per-agent workspace when runtime workspaceDir is missing", async () => {
const sessionFile = nextSessionFile();
const fallbackWorkspace = path.join(tempRoot ?? os.tmpdir(), "workspace-fallback-main");
const cfg = {
...makeOpenAiConfig(["mock-1"]),
agents: {
defaults: {
workspace: fallbackWorkspace,
},
},
} satisfies OpenClawConfig;
await ensureModels(cfg);
const result = await runEmbeddedPiAgent({
sessionId: "session:test-fallback",
sessionKey: "agent:main:subagent:fallback-workspace",
sessionFile,
workspaceDir: undefined as unknown as string,
config: cfg,
prompt: "hello",
provider: "openai",
model: "mock-1",
timeoutMs: 5_000,
agentDir,
runId: "run-fallback-workspace",
enqueue: immediateEnqueue,
});
expect(result.payloads?.[0]?.text).toBe("ok");
await expect(fs.stat(fallbackWorkspace)).resolves.toBeTruthy();
});
it("throws when sessionKey is malformed", async () => {
const sessionFile = nextSessionFile();
const cfg = {
...makeOpenAiConfig(["mock-1"]),
agents: {
defaults: {
workspace: path.join(tempRoot ?? os.tmpdir(), "workspace-fallback-main"),
},
list: [
{
id: "research",
workspace: path.join(tempRoot ?? os.tmpdir(), "workspace-fallback-research"),
},
],
},
} satisfies OpenClawConfig;
await ensureModels(cfg);
await expect(
runEmbeddedPiAgent({
sessionId: "session:test-fallback-malformed",
sessionKey: "agent::broken",
agentId: "research",
sessionFile,
workspaceDir: undefined as unknown as string,
config: cfg,
prompt: "hello",
provider: "openai",
model: "mock-1",
timeoutMs: 5_000,
agentDir,
runId: "run-fallback-workspace-malformed",
enqueue: immediateEnqueue,
}),
).rejects.toThrow("Malformed agent session key");
});
itIfNotWin32(
"persists the first user message before assistant output",
{ timeout: 120_000 },

View File

@@ -172,6 +172,41 @@ describe("resolveModel", () => {
});
});
it("builds an anthropic forward-compat fallback for claude-opus-4-6", () => {
const templateModel = {
id: "claude-opus-4-5",
name: "Claude Opus 4.5",
provider: "anthropic",
api: "anthropic-messages",
baseUrl: "https://api.anthropic.com",
reasoning: true,
input: ["text", "image"] as const,
cost: { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
contextWindow: 200000,
maxTokens: 64000,
};
vi.mocked(discoverModels).mockReturnValue({
find: vi.fn((provider: string, modelId: string) => {
if (provider === "anthropic" && modelId === "claude-opus-4-5") {
return templateModel;
}
return null;
}),
} as unknown as ReturnType<typeof discoverModels>);
const result = resolveModel("anthropic", "claude-opus-4-6", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "anthropic",
id: "claude-opus-4-6",
api: "anthropic-messages",
baseUrl: "https://api.anthropic.com",
reasoning: true,
});
});
it("keeps unknown-model errors for non-gpt-5 openai-codex ids", () => {
const result = resolveModel("openai-codex", "gpt-4.1-mini", "/tmp/agent");
expect(result.model).toBeUndefined();

View File

@@ -23,6 +23,12 @@ const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex";
const OPENAI_CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const;
// pi-ai's built-in Anthropic catalog can lag behind OpenClaw's defaults/docs.
// Add forward-compat fallbacks for known-new IDs by cloning an older template model.
const ANTHROPIC_OPUS_46_MODEL_ID = "claude-opus-4-6";
const ANTHROPIC_OPUS_46_DOT_MODEL_ID = "claude-opus-4.6";
const ANTHROPIC_OPUS_TEMPLATE_MODEL_IDS = ["claude-opus-4-5", "claude-opus-4.5"] as const;
function resolveOpenAICodexGpt53FallbackModel(
provider: string,
modelId: string,
@@ -63,6 +69,51 @@ function resolveOpenAICodexGpt53FallbackModel(
} as Model<Api>);
}
function resolveAnthropicOpus46ForwardCompatModel(
provider: string,
modelId: string,
modelRegistry: ModelRegistry,
): Model<Api> | undefined {
const normalizedProvider = normalizeProviderId(provider);
if (normalizedProvider !== "anthropic") {
return undefined;
}
const trimmedModelId = modelId.trim();
const lower = trimmedModelId.toLowerCase();
const isOpus46 =
lower === ANTHROPIC_OPUS_46_MODEL_ID ||
lower === ANTHROPIC_OPUS_46_DOT_MODEL_ID ||
lower.startsWith(`${ANTHROPIC_OPUS_46_MODEL_ID}-`) ||
lower.startsWith(`${ANTHROPIC_OPUS_46_DOT_MODEL_ID}-`);
if (!isOpus46) {
return undefined;
}
const templateIds: string[] = [];
if (lower.startsWith(ANTHROPIC_OPUS_46_MODEL_ID)) {
templateIds.push(lower.replace(ANTHROPIC_OPUS_46_MODEL_ID, "claude-opus-4-5"));
}
if (lower.startsWith(ANTHROPIC_OPUS_46_DOT_MODEL_ID)) {
templateIds.push(lower.replace(ANTHROPIC_OPUS_46_DOT_MODEL_ID, "claude-opus-4.5"));
}
templateIds.push(...ANTHROPIC_OPUS_TEMPLATE_MODEL_IDS);
for (const templateId of [...new Set(templateIds)].filter(Boolean)) {
const template = modelRegistry.find(normalizedProvider, templateId) as Model<Api> | null;
if (!template) {
continue;
}
return normalizeModelCompat({
...template,
id: trimmedModelId,
name: trimmedModelId,
} as Model<Api>);
}
return undefined;
}
export function buildInlineProviderModels(
providers: Record<string, InlineProviderConfig>,
): InlineModelEntry[] {
@@ -140,6 +191,14 @@ export function resolveModel(
if (codexForwardCompat) {
return { model: codexForwardCompat, authStorage, modelRegistry };
}
const anthropicForwardCompat = resolveAnthropicOpus46ForwardCompatModel(
provider,
modelId,
modelRegistry,
);
if (anthropicForwardCompat) {
return { model: anthropicForwardCompat, authStorage, modelRegistry };
}
const providerCfg = providers[provider];
if (providerCfg || modelId.startsWith("mock-")) {
const fallbackModel: Model<Api> = normalizeModelCompat({

View File

@@ -3,7 +3,6 @@ import type { ThinkLevel } from "../../auto-reply/thinking.js";
import type { RunEmbeddedPiAgentParams } from "./run/params.js";
import type { EmbeddedPiAgentMeta, EmbeddedPiRunResult } from "./types.js";
import { enqueueCommandInLane } from "../../process/command-queue.js";
import { resolveUserPath } from "../../utils.js";
import { isMarkdownCapableMessageChannel } from "../../utils/message-channel.js";
import { resolveOpenClawAgentDir } from "../agent-paths.js";
import {
@@ -46,6 +45,7 @@ import {
type FailoverReason,
} from "../pi-embedded-helpers.js";
import { normalizeUsage, type UsageLike } from "../usage.js";
import { redactRunIdentifier, resolveRunWorkspaceDir } from "../workspace-run.js";
import { compactEmbeddedPiSessionDirect } from "./compact.js";
import { resolveGlobalLane, resolveSessionLane } from "./lanes.js";
import { log } from "./logger.js";
@@ -92,7 +92,21 @@ export async function runEmbeddedPiAgent(
return enqueueSession(() =>
enqueueGlobal(async () => {
const started = Date.now();
const resolvedWorkspace = resolveUserPath(params.workspaceDir);
const workspaceResolution = resolveRunWorkspaceDir({
workspaceDir: params.workspaceDir,
sessionKey: params.sessionKey,
agentId: params.agentId,
config: params.config,
});
const resolvedWorkspace = workspaceResolution.workspaceDir;
const redactedSessionId = redactRunIdentifier(params.sessionId);
const redactedSessionKey = redactRunIdentifier(params.sessionKey);
const redactedWorkspace = redactRunIdentifier(resolvedWorkspace);
if (workspaceResolution.usedFallback) {
log.warn(
`[workspace-fallback] caller=runEmbeddedPiAgent reason=${workspaceResolution.fallbackReason} run=${params.runId} session=${redactedSessionId} sessionKey=${redactedSessionKey} agent=${workspaceResolution.agentId} workspace=${redactedWorkspace}`,
);
}
const prevCwd = process.cwd();
const provider = (params.provider ?? DEFAULT_PROVIDER).trim() || DEFAULT_PROVIDER;
@@ -333,7 +347,7 @@ export async function runEmbeddedPiAgent(
replyToMode: params.replyToMode,
hasRepliedRef: params.hasRepliedRef,
sessionFile: params.sessionFile,
workspaceDir: params.workspaceDir,
workspaceDir: resolvedWorkspace,
agentDir,
config: params.config,
skillsSnapshot: params.skillsSnapshot,
@@ -345,6 +359,7 @@ export async function runEmbeddedPiAgent(
model,
authStorage,
modelRegistry,
agentId: workspaceResolution.agentId,
thinkLevel,
verboseLevel: params.verboseLevel,
reasoningLevel: params.reasoningLevel,
@@ -401,7 +416,7 @@ export async function runEmbeddedPiAgent(
agentAccountId: params.agentAccountId,
authProfileId: lastProfileId,
sessionFile: params.sessionFile,
workspaceDir: params.workspaceDir,
workspaceDir: resolvedWorkspace,
agentDir,
config: params.config,
skillsSnapshot: params.skillsSnapshot,

View File

@@ -10,7 +10,7 @@ import { resolveChannelCapabilities } from "../../../config/channel-capabilities
import { getMachineDisplayName } from "../../../infra/machine-name.js";
import { MAX_IMAGE_BYTES } from "../../../media/constants.js";
import { getGlobalHookRunner } from "../../../plugins/hook-runner-global.js";
import { isSubagentSessionKey } from "../../../routing/session-key.js";
import { isSubagentSessionKey, normalizeAgentId } from "../../../routing/session-key.js";
import { resolveSignalReactionLevel } from "../../../signal/reaction-level.js";
import { resolveTelegramInlineButtonsScope } from "../../../telegram/inline-buttons.js";
import { resolveTelegramReactionLevel } from "../../../telegram/reaction-level.js";
@@ -705,6 +705,13 @@ export async function runEmbeddedAttempt(
// Get hook runner once for both before_agent_start and agent_end hooks
const hookRunner = getGlobalHookRunner();
const hookAgentId =
typeof params.agentId === "string" && params.agentId.trim()
? normalizeAgentId(params.agentId)
: resolveSessionAgentIds({
sessionKey: params.sessionKey,
config: params.config,
}).sessionAgentId;
let promptError: unknown = null;
try {
@@ -720,7 +727,7 @@ export async function runEmbeddedAttempt(
messages: activeSession.messages,
},
{
agentId: params.sessionKey?.split(":")[0] ?? "main",
agentId: hookAgentId,
sessionKey: params.sessionKey,
workspaceDir: params.workspaceDir,
messageProvider: params.messageProvider ?? undefined,
@@ -850,7 +857,7 @@ export async function runEmbeddedAttempt(
durationMs: Date.now() - promptStartedAt,
},
{
agentId: params.sessionKey?.split(":")[0] ?? "main",
agentId: hookAgentId,
sessionKey: params.sessionKey,
workspaceDir: params.workspaceDir,
messageProvider: params.messageProvider ?? undefined,

View File

@@ -20,6 +20,7 @@ export type ClientToolDefinition = {
export type RunEmbeddedPiAgentParams = {
sessionId: string;
sessionKey?: string;
agentId?: string;
messageChannel?: string;
messageProvider?: string;
agentAccountId?: string;

View File

@@ -14,6 +14,7 @@ import type { ClientToolDefinition } from "./params.js";
export type EmbeddedRunAttemptParams = {
sessionId: string;
sessionKey?: string;
agentId?: string;
messageChannel?: string;
messageProvider?: string;
agentAccountId?: string;

View File

@@ -148,7 +148,7 @@ describe("Agent-specific tool filtering", () => {
workspaceDir: "/tmp/test-provider",
agentDir: "/tmp/agent-provider",
modelProvider: "google-antigravity",
modelId: "claude-opus-4-5-thinking",
modelId: "claude-opus-4-6-thinking",
});
const toolNames = tools.map((t) => t.name);
@@ -176,7 +176,7 @@ describe("Agent-specific tool filtering", () => {
workspaceDir: "/tmp/test-provider-profile",
agentDir: "/tmp/agent-provider-profile",
modelProvider: "google-antigravity",
modelId: "claude-opus-4-5-thinking",
modelId: "claude-opus-4-6-thinking",
});
const toolNames = tools.map((t) => t.name);

View File

@@ -30,8 +30,8 @@ describe("cron tool", () => {
],
["remove", { action: "remove", jobId: "job-1" }, { id: "job-1" }],
["remove", { action: "remove", id: "job-2" }, { id: "job-2" }],
["run", { action: "run", jobId: "job-1" }, { id: "job-1" }],
["run", { action: "run", id: "job-2" }, { id: "job-2" }],
["run", { action: "run", jobId: "job-1" }, { id: "job-1", mode: "force" }],
["run", { action: "run", id: "job-2" }, { id: "job-2", mode: "force" }],
["runs", { action: "runs", jobId: "job-1" }, { id: "job-1" }],
["runs", { action: "runs", id: "job-2" }, { id: "job-2" }],
])("%s sends id to gateway", async (action, args, expectedParams) => {
@@ -58,7 +58,21 @@ describe("cron tool", () => {
const call = callGatewayMock.mock.calls[0]?.[0] as {
params?: unknown;
};
expect(call?.params).toEqual({ id: "job-primary" });
expect(call?.params).toEqual({ id: "job-primary", mode: "force" });
});
it("supports due-only run mode", async () => {
const tool = createCronTool();
await tool.execute("call-due", {
action: "run",
jobId: "job-due",
runMode: "due",
});
const call = callGatewayMock.mock.calls[0]?.[0] as {
params?: unknown;
};
expect(call?.params).toEqual({ id: "job-due", mode: "due" });
});
it("normalizes cron.add job payloads", async () => {
@@ -86,7 +100,7 @@ describe("cron tool", () => {
deleteAfterRun: true,
schedule: { kind: "at", at: new Date(123).toISOString() },
sessionTarget: "main",
wakeMode: "next-heartbeat",
wakeMode: "now",
payload: { kind: "systemEvent", text: "hello" },
});
});

View File

@@ -18,6 +18,7 @@ import { resolveInternalSessionKey, resolveMainSessionAlias } from "./sessions-h
const CRON_ACTIONS = ["status", "list", "add", "update", "remove", "run", "runs", "wake"] as const;
const CRON_WAKE_MODES = ["now", "next-heartbeat"] as const;
const CRON_RUN_MODES = ["due", "force"] as const;
const REMINDER_CONTEXT_MESSAGES_MAX = 10;
const REMINDER_CONTEXT_PER_MESSAGE_MAX = 220;
@@ -37,6 +38,7 @@ const CronToolSchema = Type.Object({
patch: Type.Optional(Type.Object({}, { additionalProperties: true })),
text: Type.Optional(Type.String()),
mode: optionalStringEnum(CRON_WAKE_MODES),
runMode: optionalStringEnum(CRON_RUN_MODES),
contextMessages: Type.Optional(
Type.Number({ minimum: 0, maximum: REMINDER_CONTEXT_MESSAGES_MAX }),
),
@@ -312,7 +314,6 @@ Use jobId as the canonical identifier; id is accepted for compatibility. Use con
}
}
// [Fix Issue 3] Infer delivery target from session key for isolated jobs if not provided
if (
opts?.agentSessionKey &&
job &&
@@ -393,7 +394,9 @@ Use jobId as the canonical identifier; id is accepted for compatibility. Use con
if (!id) {
throw new Error("jobId required (id accepted for backward compatibility)");
}
return jsonResult(await callGatewayTool("cron.run", gatewayOpts, { id }));
const runMode =
params.runMode === "due" || params.runMode === "force" ? params.runMode : "force";
return jsonResult(await callGatewayTool("cron.run", gatewayOpts, { id, mode: runMode }));
}
case "runs": {
const id = readStringParam(params, "jobId") ?? readStringParam(params, "id");

View File

@@ -214,6 +214,26 @@ export function createSessionsSpawnTool(opts?: {
modelWarning = messageText;
}
}
if (thinkingOverride !== undefined) {
try {
await callGateway({
method: "sessions.patch",
params: {
key: childSessionKey,
thinkingLevel: thinkingOverride === "off" ? null : thinkingOverride,
},
timeoutMs: 10_000,
});
} catch (err) {
const messageText =
err instanceof Error ? err.message : typeof err === "string" ? err : "error";
return jsonResult({
status: "error",
error: messageText,
childSessionKey,
});
}
}
const childSystemPrompt = buildSubagentSystemPrompt({
requesterSessionKey,
requesterOrigin,

View File

@@ -0,0 +1,139 @@
import os from "node:os";
import path from "node:path";
import { describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { resolveRunWorkspaceDir } from "./workspace-run.js";
import { DEFAULT_AGENT_WORKSPACE_DIR } from "./workspace.js";
describe("resolveRunWorkspaceDir", () => {
it("resolves explicit workspace values without fallback", () => {
const explicit = path.join(process.cwd(), "tmp", "workspace-run-explicit");
const result = resolveRunWorkspaceDir({
workspaceDir: explicit,
sessionKey: "agent:main:subagent:test",
});
expect(result.usedFallback).toBe(false);
expect(result.agentId).toBe("main");
expect(result.workspaceDir).toBe(path.resolve(explicit));
});
it("falls back to configured per-agent workspace when input is missing", () => {
const defaultWorkspace = path.join(process.cwd(), "tmp", "workspace-default-main");
const researchWorkspace = path.join(process.cwd(), "tmp", "workspace-research");
const cfg = {
agents: {
defaults: { workspace: defaultWorkspace },
list: [{ id: "research", workspace: researchWorkspace }],
},
} satisfies OpenClawConfig;
const result = resolveRunWorkspaceDir({
workspaceDir: undefined,
sessionKey: "agent:research:subagent:test",
config: cfg,
});
expect(result.usedFallback).toBe(true);
expect(result.fallbackReason).toBe("missing");
expect(result.agentId).toBe("research");
expect(result.workspaceDir).toBe(path.resolve(researchWorkspace));
});
it("falls back to default workspace for blank strings", () => {
const defaultWorkspace = path.join(process.cwd(), "tmp", "workspace-default-main");
const cfg = {
agents: {
defaults: { workspace: defaultWorkspace },
},
} satisfies OpenClawConfig;
const result = resolveRunWorkspaceDir({
workspaceDir: " ",
sessionKey: "agent:main:subagent:test",
config: cfg,
});
expect(result.usedFallback).toBe(true);
expect(result.fallbackReason).toBe("blank");
expect(result.agentId).toBe("main");
expect(result.workspaceDir).toBe(path.resolve(defaultWorkspace));
});
it("falls back to built-in main workspace when config is unavailable", () => {
const result = resolveRunWorkspaceDir({
workspaceDir: null,
sessionKey: "agent:main:subagent:test",
config: undefined,
});
expect(result.usedFallback).toBe(true);
expect(result.fallbackReason).toBe("missing");
expect(result.agentId).toBe("main");
expect(result.workspaceDir).toBe(path.resolve(DEFAULT_AGENT_WORKSPACE_DIR));
});
it("throws for malformed agent session keys", () => {
expect(() =>
resolveRunWorkspaceDir({
workspaceDir: undefined,
sessionKey: "agent::broken",
config: undefined,
}),
).toThrow("Malformed agent session key");
});
it("uses explicit agent id for per-agent fallback when config is unavailable", () => {
const result = resolveRunWorkspaceDir({
workspaceDir: undefined,
sessionKey: "definitely-not-a-valid-session-key",
agentId: "research",
config: undefined,
});
expect(result.agentId).toBe("research");
expect(result.agentIdSource).toBe("explicit");
expect(result.workspaceDir).toBe(path.resolve(os.homedir(), ".openclaw", "workspace-research"));
});
it("throws for malformed agent session keys even when config has a default agent", () => {
const mainWorkspace = path.join(process.cwd(), "tmp", "workspace-main-default");
const researchWorkspace = path.join(process.cwd(), "tmp", "workspace-research-default");
const cfg = {
agents: {
defaults: { workspace: mainWorkspace },
list: [
{ id: "main", workspace: mainWorkspace },
{ id: "research", workspace: researchWorkspace, default: true },
],
},
} satisfies OpenClawConfig;
expect(() =>
resolveRunWorkspaceDir({
workspaceDir: undefined,
sessionKey: "agent::broken",
config: cfg,
}),
).toThrow("Malformed agent session key");
});
it("treats non-agent legacy keys as default, not malformed", () => {
const fallbackWorkspace = path.join(process.cwd(), "tmp", "workspace-default-legacy");
const cfg = {
agents: {
defaults: { workspace: fallbackWorkspace },
},
} satisfies OpenClawConfig;
const result = resolveRunWorkspaceDir({
workspaceDir: undefined,
sessionKey: "custom-main-key",
config: cfg,
});
expect(result.agentId).toBe("main");
expect(result.agentIdSource).toBe("default");
expect(result.workspaceDir).toBe(path.resolve(fallbackWorkspace));
});
});

106
src/agents/workspace-run.ts Normal file
View File

@@ -0,0 +1,106 @@
import type { OpenClawConfig } from "../config/config.js";
import { redactIdentifier } from "../logging/redact-identifier.js";
import {
classifySessionKeyShape,
DEFAULT_AGENT_ID,
normalizeAgentId,
parseAgentSessionKey,
} from "../routing/session-key.js";
import { resolveUserPath } from "../utils.js";
import { resolveAgentWorkspaceDir, resolveDefaultAgentId } from "./agent-scope.js";
export type WorkspaceFallbackReason = "missing" | "blank" | "invalid_type";
type AgentIdSource = "explicit" | "session_key" | "default";
export type ResolveRunWorkspaceResult = {
workspaceDir: string;
usedFallback: boolean;
fallbackReason?: WorkspaceFallbackReason;
agentId: string;
agentIdSource: AgentIdSource;
};
function resolveRunAgentId(params: {
sessionKey?: string;
agentId?: string;
config?: OpenClawConfig;
}): {
agentId: string;
agentIdSource: AgentIdSource;
} {
const rawSessionKey = params.sessionKey?.trim() ?? "";
const shape = classifySessionKeyShape(rawSessionKey);
if (shape === "malformed_agent") {
throw new Error("Malformed agent session key; refusing workspace resolution.");
}
const explicit =
typeof params.agentId === "string" && params.agentId.trim()
? normalizeAgentId(params.agentId)
: undefined;
if (explicit) {
return { agentId: explicit, agentIdSource: "explicit" };
}
const defaultAgentId = resolveDefaultAgentId(params.config ?? {});
if (shape === "missing" || shape === "legacy_or_alias") {
return {
agentId: defaultAgentId || DEFAULT_AGENT_ID,
agentIdSource: "default",
};
}
const parsed = parseAgentSessionKey(rawSessionKey);
if (parsed?.agentId) {
return {
agentId: normalizeAgentId(parsed.agentId),
agentIdSource: "session_key",
};
}
// Defensive fallback, should be unreachable for non-malformed shapes.
return {
agentId: defaultAgentId || DEFAULT_AGENT_ID,
agentIdSource: "default",
};
}
export function redactRunIdentifier(value: string | undefined): string {
return redactIdentifier(value, { len: 12 });
}
export function resolveRunWorkspaceDir(params: {
workspaceDir: unknown;
sessionKey?: string;
agentId?: string;
config?: OpenClawConfig;
}): ResolveRunWorkspaceResult {
const requested = params.workspaceDir;
const { agentId, agentIdSource } = resolveRunAgentId({
sessionKey: params.sessionKey,
agentId: params.agentId,
config: params.config,
});
if (typeof requested === "string") {
const trimmed = requested.trim();
if (trimmed) {
return {
workspaceDir: resolveUserPath(trimmed),
usedFallback: false,
agentId,
agentIdSource,
};
}
}
const fallbackReason: WorkspaceFallbackReason =
requested == null ? "missing" : typeof requested === "string" ? "blank" : "invalid_type";
const fallbackWorkspace = resolveAgentWorkspaceDir(params.config ?? {}, agentId);
return {
workspaceDir: resolveUserPath(fallbackWorkspace),
usedFallback: true,
fallbackReason,
agentId,
agentIdSource,
};
}

View File

@@ -178,6 +178,7 @@ export async function runAgentTurnWithFallback(params: {
const result = await runCliAgent({
sessionId: params.followupRun.run.sessionId,
sessionKey: params.sessionKey,
agentId: params.followupRun.run.agentId,
sessionFile: params.followupRun.run.sessionFile,
workspaceDir: params.followupRun.run.workspaceDir,
config: params.followupRun.run.config,
@@ -255,6 +256,7 @@ export async function runAgentTurnWithFallback(params: {
return runEmbeddedPiAgent({
sessionId: params.followupRun.run.sessionId,
sessionKey: params.sessionKey,
agentId: params.followupRun.run.agentId,
messageProvider: params.sessionCtx.Provider?.trim().toLowerCase() || undefined,
agentAccountId: params.sessionCtx.AccountId,
messageTo: params.sessionCtx.OriginatingTo ?? params.sessionCtx.To,

View File

@@ -113,6 +113,7 @@ export async function runMemoryFlushIfNeeded(params: {
return runEmbeddedPiAgent({
sessionId: params.followupRun.run.sessionId,
sessionKey: params.sessionKey,
agentId: params.followupRun.run.agentId,
messageProvider: params.sessionCtx.Provider?.trim().toLowerCase() || undefined,
agentAccountId: params.sessionCtx.AccountId,
messageTo: params.sessionCtx.OriginatingTo ?? params.sessionCtx.To,

View File

@@ -140,6 +140,7 @@ export function createFollowupRunner(params: {
return runEmbeddedPiAgent({
sessionId: queued.run.sessionId,
sessionKey: queued.run.sessionKey,
agentId: queued.run.agentId,
messageProvider: queued.run.messageProvider,
agentAccountId: queued.run.agentAccountId,
messageTo: queued.originatingTo,

View File

@@ -71,7 +71,7 @@ export function registerCronAddCommand(cron: Command) {
.option("--keep-after-run", "Keep one-shot job after it succeeds", false)
.option("--agent <id>", "Agent id for this job")
.option("--session <target>", "Session target (main|isolated)")
.option("--wake <mode>", "Wake mode (now|next-heartbeat)", "next-heartbeat")
.option("--wake <mode>", "Wake mode (now|next-heartbeat)", "now")
.option("--at <when>", "Run once at time (ISO) or +duration (e.g. 20m)")
.option("--every <duration>", "Run every duration (e.g. 10m, 1h)")
.option("--cron <expr>", "Cron expression (5-field)")
@@ -122,8 +122,8 @@ export function registerCronAddCommand(cron: Command) {
};
})();
const wakeModeRaw = typeof opts.wake === "string" ? opts.wake : "next-heartbeat";
const wakeMode = wakeModeRaw.trim() || "next-heartbeat";
const wakeModeRaw = typeof opts.wake === "string" ? opts.wake : "now";
const wakeMode = wakeModeRaw.trim() || "now";
if (wakeMode !== "now" && wakeMode !== "next-heartbeat") {
throw new Error("--wake must be now or next-heartbeat");
}

View File

@@ -92,12 +92,12 @@ export function registerCronSimpleCommands(cron: Command) {
.command("run")
.description("Run a cron job now (debug)")
.argument("<id>", "Job id")
.option("--force", "Run even if not due", false)
.option("--due", "Run only when due (default behavior in older versions)", false)
.action(async (id, opts) => {
try {
const res = await callGatewayFromCli("cron.run", opts, {
id,
mode: opts.force ? "force" : "due",
mode: opts.due ? "due" : "force",
});
defaultRuntime.log(JSON.stringify(res, null, 2));
} catch (err) {

View File

@@ -398,6 +398,7 @@ export async function agentCommand(
return runCliAgent({
sessionId,
sessionKey,
agentId: sessionAgentId,
sessionFile,
workspaceDir,
config: cfg,
@@ -418,6 +419,7 @@ export async function agentCommand(
return runEmbeddedPiAgent({
sessionId,
sessionKey,
agentId: sessionAgentId,
messageChannel,
agentAccountId: runContext.accountId,
messageTo: opts.replyTo ?? opts.to,

View File

@@ -319,6 +319,7 @@ async function probeTarget(params: {
await runEmbeddedPiAgent({
sessionId,
sessionFile,
agentId,
workspaceDir,
agentDir,
config: cfg,

View File

@@ -1,4 +1,3 @@
import crypto from "node:crypto";
import fs from "node:fs";
import type {
ChannelAccountSnapshot,
@@ -8,6 +7,7 @@ import type {
import type { OpenClawConfig } from "../../config/config.js";
import { resolveChannelDefaultAccountId } from "../../channels/plugins/helpers.js";
import { listChannelPlugins } from "../../channels/plugins/index.js";
import { sha256HexPrefix } from "../../logging/redact-identifier.js";
import { formatAge } from "./format.js";
export type ChannelRow = {
@@ -57,17 +57,13 @@ function existsSyncMaybe(p: string | undefined): boolean | null {
}
}
function sha256HexPrefix(value: string, len = 8): string {
return crypto.createHash("sha256").update(value).digest("hex").slice(0, len);
}
function formatTokenHint(token: string, opts: { showSecrets: boolean }): string {
const t = token.trim();
if (!t) {
return "empty";
}
if (!opts.showSecrets) {
return `sha256:${sha256HexPrefix(t)} · len ${t.length}`;
return `sha256:${sha256HexPrefix(t, 8)} · len ${t.length}`;
}
const head = t.slice(0, 4);
const tail = t.slice(-4);

View File

@@ -542,7 +542,8 @@ const FIELD_HELP: Record<string, string> = {
"Extra paths to include in memory search (directories or .md files; relative paths resolved from workspace).",
"agents.defaults.memorySearch.experimental.sessionMemory":
"Enable experimental session transcript indexing for memory search (default: false).",
"agents.defaults.memorySearch.provider": 'Embedding provider ("openai", "gemini", or "local").',
"agents.defaults.memorySearch.provider":
'Embedding provider ("openai", "gemini", "voyage", or "local").',
"agents.defaults.memorySearch.remote.baseUrl":
"Custom base URL for remote embeddings (OpenAI-compatible proxies or Gemini overrides).",
"agents.defaults.memorySearch.remote.apiKey": "Custom API key for the remote embedding provider.",

View File

@@ -234,7 +234,7 @@ export type MemorySearchConfig = {
sessionMemory?: boolean;
};
/** Embedding provider mode. */
provider?: "openai" | "gemini" | "local";
provider?: "openai" | "gemini" | "local" | "voyage";
remote?: {
baseUrl?: string;
apiKey?: string;
@@ -253,7 +253,7 @@ export type MemorySearchConfig = {
};
};
/** Fallback behavior when embeddings fail. */
fallback?: "openai" | "gemini" | "local" | "none";
fallback?: "openai" | "gemini" | "local" | "voyage" | "none";
/** Embedding model id (remote) or alias (local). */
model?: string;
/** Local embedding settings (node-llama-cpp). */

View File

@@ -318,7 +318,9 @@ export const MemorySearchSchema = z
})
.strict()
.optional(),
provider: z.union([z.literal("openai"), z.literal("local"), z.literal("gemini")]).optional(),
provider: z
.union([z.literal("openai"), z.literal("local"), z.literal("gemini"), z.literal("voyage")])
.optional(),
remote: z
.object({
baseUrl: z.string().optional(),
@@ -338,7 +340,13 @@ export const MemorySearchSchema = z
.strict()
.optional(),
fallback: z
.union([z.literal("openai"), z.literal("gemini"), z.literal("local"), z.literal("none")])
.union([
z.literal("openai"),
z.literal("gemini"),
z.literal("local"),
z.literal("voyage"),
z.literal("none"),
])
.optional(),
model: z.string().optional(),
local: z

45
src/cron/delivery.test.ts Normal file
View File

@@ -0,0 +1,45 @@
import { describe, expect, it } from "vitest";
import type { CronJob } from "./types.js";
import { resolveCronDeliveryPlan } from "./delivery.js";
function makeJob(overrides: Partial<CronJob>): CronJob {
const now = Date.now();
return {
id: "job-1",
name: "test",
enabled: true,
createdAtMs: now,
updatedAtMs: now,
schedule: { kind: "every", everyMs: 60_000 },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "hello" },
state: {},
...overrides,
};
}
describe("resolveCronDeliveryPlan", () => {
it("defaults to announce when delivery object has no mode", () => {
const plan = resolveCronDeliveryPlan(
makeJob({
delivery: { channel: "telegram", to: "123", mode: undefined as never },
}),
);
expect(plan.mode).toBe("announce");
expect(plan.requested).toBe(true);
expect(plan.channel).toBe("telegram");
expect(plan.to).toBe("123");
});
it("respects legacy payload deliver=false", () => {
const plan = resolveCronDeliveryPlan(
makeJob({
delivery: undefined,
payload: { kind: "agentTurn", message: "hello", deliver: false },
}),
);
expect(plan.mode).toBe("none");
expect(plan.requested).toBe(false);
});
});

View File

@@ -32,12 +32,13 @@ export function resolveCronDeliveryPlan(job: CronJob): CronDeliveryPlan {
const delivery = job.delivery;
const hasDelivery = delivery && typeof delivery === "object";
const rawMode = hasDelivery ? (delivery as { mode?: unknown }).mode : undefined;
const normalizedMode = typeof rawMode === "string" ? rawMode.trim().toLowerCase() : rawMode;
const mode =
rawMode === "announce"
normalizedMode === "announce"
? "announce"
: rawMode === "none"
: normalizedMode === "none"
? "none"
: rawMode === "deliver"
: normalizedMode === "deliver"
? "announce"
: undefined;
@@ -51,7 +52,7 @@ export function resolveCronDeliveryPlan(job: CronJob): CronDeliveryPlan {
const channel = deliveryChannel ?? payloadChannel ?? "last";
const to = deliveryTo ?? payloadTo;
if (hasDelivery) {
const resolvedMode = mode ?? "none";
const resolvedMode = mode ?? "announce";
return {
mode: resolvedMode,
channel,

View File

@@ -134,6 +134,48 @@ describe("runCronIsolatedAgentTurn", () => {
});
});
it("announces only the final payload text", async () => {
await withTempHome(async (home) => {
const storePath = await writeSessionStore(home);
const deps: CliDeps = {
sendMessageWhatsApp: vi.fn(),
sendMessageTelegram: vi.fn(),
sendMessageDiscord: vi.fn(),
sendMessageSignal: vi.fn(),
sendMessageIMessage: vi.fn(),
};
vi.mocked(runEmbeddedPiAgent).mockResolvedValue({
payloads: [{ text: "Working on it..." }, { text: "Final weather summary" }],
meta: {
durationMs: 5,
agentMeta: { sessionId: "s", provider: "p", model: "m" },
},
});
const res = await runCronIsolatedAgentTurn({
cfg: makeCfg(home, storePath, {
channels: { telegram: { botToken: "t-1" } },
}),
deps,
job: {
...makeJob({ kind: "agentTurn", message: "do it" }),
delivery: { mode: "announce", channel: "telegram", to: "123" },
},
message: "do it",
sessionKey: "cron:job-1",
lane: "cron",
});
expect(res.status).toBe("ok");
expect(deps.sendMessageTelegram).toHaveBeenCalledTimes(1);
expect(deps.sendMessageTelegram).toHaveBeenCalledWith(
"123",
"Final weather summary",
expect.any(Object),
);
});
});
it("skips announce when messaging tool already sent to target", async () => {
await withTempHome(async (home) => {
const storePath = await writeSessionStore(home);

View File

@@ -48,7 +48,7 @@ async function writeSessionStore(home: string) {
async function readSessionEntry(storePath: string, key: string) {
const raw = await fs.readFile(storePath, "utf-8");
const store = JSON.parse(raw) as Record<string, { sessionId?: string }>;
const store = JSON.parse(raw) as Record<string, { sessionId?: string; label?: string }>;
return store[key];
}
@@ -90,6 +90,38 @@ describe("runCronIsolatedAgentTurn", () => {
vi.mocked(loadModelCatalog).mockResolvedValue([]);
});
it("treats blank model overrides as unset", async () => {
await withTempHome(async (home) => {
const storePath = await writeSessionStore(home);
const deps: CliDeps = {
sendMessageWhatsApp: vi.fn(),
sendMessageTelegram: vi.fn(),
sendMessageDiscord: vi.fn(),
sendMessageSignal: vi.fn(),
sendMessageIMessage: vi.fn(),
};
vi.mocked(runEmbeddedPiAgent).mockResolvedValue({
payloads: [{ text: "ok" }],
meta: {
durationMs: 5,
agentMeta: { sessionId: "s", provider: "p", model: "m" },
},
});
const res = await runCronIsolatedAgentTurn({
cfg: makeCfg(home, storePath),
deps,
job: makeJob({ kind: "agentTurn", message: "do it", model: " " }),
message: "do it",
sessionKey: "cron:job-1",
lane: "cron",
});
expect(res.status).toBe("ok");
expect(vi.mocked(runEmbeddedPiAgent)).toHaveBeenCalledTimes(1);
});
});
it("uses last non-empty agent text as summary", async () => {
await withTempHome(async (home) => {
const storePath = await writeSessionStore(home);
@@ -585,6 +617,49 @@ describe("runCronIsolatedAgentTurn", () => {
expect(first?.sessionId).toBeDefined();
expect(second?.sessionId).toBeDefined();
expect(second?.sessionId).not.toBe(first?.sessionId);
expect(first?.label).toBe("Cron: job-1");
expect(second?.label).toBe("Cron: job-1");
});
});
it("preserves an existing cron session label", async () => {
await withTempHome(async (home) => {
const storePath = await writeSessionStore(home);
const raw = await fs.readFile(storePath, "utf-8");
const store = JSON.parse(raw) as Record<string, Record<string, unknown>>;
store["agent:main:cron:job-1"] = {
sessionId: "old",
updatedAt: Date.now(),
label: "Nightly digest",
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
const deps: CliDeps = {
sendMessageWhatsApp: vi.fn(),
sendMessageTelegram: vi.fn(),
sendMessageDiscord: vi.fn(),
sendMessageSignal: vi.fn(),
sendMessageIMessage: vi.fn(),
};
vi.mocked(runEmbeddedPiAgent).mockResolvedValue({
payloads: [{ text: "ok" }],
meta: {
durationMs: 5,
agentMeta: { sessionId: "s", provider: "p", model: "m" },
},
});
await runCronIsolatedAgentTurn({
cfg: makeCfg(home, storePath),
deps,
job: makeJob({ kind: "agentTurn", message: "ping", deliver: false }),
message: "ping",
sessionKey: "cron:job-1",
lane: "cron",
});
const entry = await readSessionEntry(storePath, "agent:main:cron:job-1");
expect(entry?.label).toBe("Nightly digest");
});
});
});

View File

@@ -30,6 +30,7 @@ export async function resolveDeliveryTarget(
}> {
const requestedChannel = typeof jobPayload.channel === "string" ? jobPayload.channel : "last";
const explicitTo = typeof jobPayload.to === "string" ? jobPayload.to : undefined;
const allowMismatchedLastTo = requestedChannel === "last";
const sessionCfg = cfg.session;
const mainSessionKey = resolveAgentMainSessionKey({ cfg, agentId });
@@ -41,7 +42,7 @@ export async function resolveDeliveryTarget(
entry: main,
requestedChannel,
explicitTo,
allowMismatchedLastTo: true,
allowMismatchedLastTo,
});
let fallbackChannel: Exclude<OutboundChannel, "none"> | undefined;
@@ -60,7 +61,7 @@ export async function resolveDeliveryTarget(
requestedChannel,
explicitTo,
fallbackChannel,
allowMismatchedLastTo: true,
allowMismatchedLastTo,
mode: preliminary.mode,
})
: preliminary;

View File

@@ -8,6 +8,7 @@ type DeliveryPayload = {
text?: string;
mediaUrl?: string;
mediaUrls?: string[];
channelData?: Record<string, unknown>;
};
export function pickSummaryFromOutput(text: string | undefined) {
@@ -39,6 +40,19 @@ export function pickLastNonEmptyTextFromPayloads(payloads: Array<{ text?: string
return undefined;
}
export function pickLastDeliverablePayload(payloads: DeliveryPayload[]) {
for (let i = payloads.length - 1; i >= 0; i--) {
const payload = payloads[i];
const text = (payload?.text ?? "").trim();
const hasMedia = Boolean(payload?.mediaUrl) || (payload?.mediaUrls?.length ?? 0) > 0;
const hasChannelData = Object.keys(payload?.channelData ?? {}).length > 0;
if (text || hasMedia || hasChannelData) {
return payload;
}
}
return undefined;
}
/**
* Check if all payloads are just heartbeat ack responses (HEARTBEAT_OK).
* Returns true if delivery should be skipped because there's no real content.

View File

@@ -56,6 +56,7 @@ import { resolveCronDeliveryPlan } from "../delivery.js";
import { resolveDeliveryTarget } from "./delivery-target.js";
import {
isHeartbeatOnlyResponse,
pickLastDeliverablePayload,
pickLastNonEmptyTextFromPayloads,
pickSummaryFromOutput,
pickSummaryFromPayloads,
@@ -97,6 +98,8 @@ export type RunCronAgentTurnResult = {
/** Last non-empty agent text output (not truncated). */
outputText?: string;
error?: string;
sessionId?: string;
sessionKey?: string;
};
export async function runCronIsolatedAgentTurn(params: {
@@ -187,14 +190,12 @@ export async function runCronIsolatedAgentTurn(params: {
}
const modelOverrideRaw =
params.job.payload.kind === "agentTurn" ? params.job.payload.model : undefined;
if (modelOverrideRaw !== undefined) {
if (typeof modelOverrideRaw !== "string") {
return { status: "error", error: "invalid model: expected string" };
}
const modelOverride = typeof modelOverrideRaw === "string" ? modelOverrideRaw.trim() : undefined;
if (modelOverride !== undefined && modelOverride.length > 0) {
const resolvedOverride = resolveAllowedModelRef({
cfg: cfgWithAgentDefaults,
catalog: await loadCatalog(),
raw: modelOverrideRaw,
raw: modelOverride,
defaultProvider: resolvedDefault.provider,
defaultModel: resolvedDefault.model,
});
@@ -211,6 +212,36 @@ export async function runCronIsolatedAgentTurn(params: {
agentId,
nowMs: now,
});
const runSessionId = cronSession.sessionEntry.sessionId;
const runSessionKey = baseSessionKey.startsWith("cron:")
? `${agentSessionKey}:run:${runSessionId}`
: agentSessionKey;
const persistSessionEntry = async () => {
cronSession.store[agentSessionKey] = cronSession.sessionEntry;
if (runSessionKey !== agentSessionKey) {
cronSession.store[runSessionKey] = cronSession.sessionEntry;
}
await updateSessionStore(cronSession.storePath, (store) => {
store[agentSessionKey] = cronSession.sessionEntry;
if (runSessionKey !== agentSessionKey) {
store[runSessionKey] = cronSession.sessionEntry;
}
});
};
const withRunSession = (
result: Omit<RunCronAgentTurnResult, "sessionId" | "sessionKey">,
): RunCronAgentTurnResult => ({
...result,
sessionId: runSessionId,
sessionKey: runSessionKey,
});
if (!cronSession.sessionEntry.label?.trim() && baseSessionKey.startsWith("cron:")) {
const labelSuffix =
typeof params.job.name === "string" && params.job.name.trim()
? params.job.name.trim()
: params.job.id;
cronSession.sessionEntry.label = `Cron: ${labelSuffix}`;
}
// Resolve thinking level - job thinking > hooks.gmail.thinking > agent default
const hooksGmailThinking = isGmailHook
@@ -317,18 +348,12 @@ export async function runCronIsolatedAgentTurn(params: {
updatedAt: Date.now(),
skillsSnapshot,
};
cronSession.store[agentSessionKey] = cronSession.sessionEntry;
await updateSessionStore(cronSession.storePath, (store) => {
store[agentSessionKey] = cronSession.sessionEntry;
});
await persistSessionEntry();
}
// Persist systemSent before the run, mirroring the inbound auto-reply behavior.
cronSession.sessionEntry.systemSent = true;
cronSession.store[agentSessionKey] = cronSession.sessionEntry;
await updateSessionStore(cronSession.storePath, (store) => {
store[agentSessionKey] = cronSession.sessionEntry;
});
await persistSessionEntry();
let runResult: Awaited<ReturnType<typeof runEmbeddedPiAgent>>;
let fallbackProvider = provider;
@@ -356,6 +381,7 @@ export async function runCronIsolatedAgentTurn(params: {
return runCliAgent({
sessionId: cronSession.sessionEntry.sessionId,
sessionKey: agentSessionKey,
agentId,
sessionFile,
workspaceDir,
config: cfgWithAgentDefaults,
@@ -371,6 +397,7 @@ export async function runCronIsolatedAgentTurn(params: {
return runEmbeddedPiAgent({
sessionId: cronSession.sessionEntry.sessionId,
sessionKey: agentSessionKey,
agentId,
messageChannel,
agentAccountId: resolvedDelivery.accountId,
sessionFile,
@@ -394,7 +421,7 @@ export async function runCronIsolatedAgentTurn(params: {
fallbackProvider = fallbackResult.provider;
fallbackModel = fallbackResult.model;
} catch (err) {
return { status: "error", error: String(err) };
return withRunSession({ status: "error", error: String(err) });
}
const payloads = runResult.payloads ?? [];
@@ -425,14 +452,19 @@ export async function runCronIsolatedAgentTurn(params: {
cronSession.sessionEntry.totalTokens =
promptTokens > 0 ? promptTokens : (usage.total ?? input);
}
cronSession.store[agentSessionKey] = cronSession.sessionEntry;
await updateSessionStore(cronSession.storePath, (store) => {
store[agentSessionKey] = cronSession.sessionEntry;
});
await persistSessionEntry();
}
const firstText = payloads[0]?.text ?? "";
const summary = pickSummaryFromPayloads(payloads) ?? pickSummaryFromOutput(firstText);
const outputText = pickLastNonEmptyTextFromPayloads(payloads);
const synthesizedText = outputText?.trim() || summary?.trim() || undefined;
const deliveryPayload = pickLastDeliverablePayload(payloads);
const deliveryPayloads =
deliveryPayload !== undefined
? [deliveryPayload]
: synthesizedText
? [{ text: synthesizedText }]
: [];
const deliveryBestEffort = resolveCronDeliveryBestEffort(params.job);
// Skip delivery for heartbeat-only responses (HEARTBEAT_OK with no real content).
@@ -452,28 +484,28 @@ export async function runCronIsolatedAgentTurn(params: {
if (deliveryRequested && !skipHeartbeatDelivery && !skipMessagingToolDelivery) {
if (resolvedDelivery.error) {
if (!deliveryBestEffort) {
return {
return withRunSession({
status: "error",
error: resolvedDelivery.error.message,
summary,
outputText,
};
});
}
logWarn(`[cron:${params.job.id}] ${resolvedDelivery.error.message}`);
return { status: "ok", summary, outputText };
return withRunSession({ status: "ok", summary, outputText });
}
if (!resolvedDelivery.to) {
const message = "cron delivery target is missing";
if (!deliveryBestEffort) {
return {
return withRunSession({
status: "error",
error: message,
summary,
outputText,
};
});
}
logWarn(`[cron:${params.job.id}] ${message}`);
return { status: "ok", summary, outputText };
return withRunSession({ status: "ok", summary, outputText });
}
try {
await deliverOutboundPayloads({
@@ -482,16 +514,16 @@ export async function runCronIsolatedAgentTurn(params: {
to: resolvedDelivery.to,
accountId: resolvedDelivery.accountId,
threadId: resolvedDelivery.threadId,
payloads,
payloads: deliveryPayloads,
bestEffort: deliveryBestEffort,
deps: createOutboundSendDeps(params.deps),
});
} catch (err) {
if (!deliveryBestEffort) {
return { status: "error", summary, outputText, error: String(err) };
return withRunSession({ status: "error", summary, outputText, error: String(err) });
}
}
}
return { status: "ok", summary, outputText };
return withRunSession({ status: "ok", summary, outputText });
}

View File

@@ -28,6 +28,8 @@ export function resolveCronSession(params: {
lastChannel: entry?.lastChannel,
lastTo: entry?.lastTo,
lastAccountId: entry?.lastAccountId,
label: entry?.label,
displayName: entry?.displayName,
skillsSnapshot: entry?.skillsSnapshot,
};
return { storePath, store, sessionEntry, systemSent, isNewSession: true };

View File

@@ -234,4 +234,62 @@ describe("normalizeCronJobCreate", () => {
expect(delivery.mode).toBe("announce");
expect((normalized as { isolation?: unknown }).isolation).toBeUndefined();
});
it("infers payload kind/session target and name for message-only jobs", () => {
const normalized = normalizeCronJobCreate({
schedule: { kind: "every", everyMs: 60_000 },
payload: { message: "Nightly backup" },
}) as unknown as Record<string, unknown>;
const payload = normalized.payload as Record<string, unknown>;
expect(payload.kind).toBe("agentTurn");
expect(payload.message).toBe("Nightly backup");
expect(normalized.sessionTarget).toBe("isolated");
expect(normalized.wakeMode).toBe("now");
expect(typeof normalized.name).toBe("string");
});
it("maps top-level model/thinking/timeout into payload for legacy add params", () => {
const normalized = normalizeCronJobCreate({
name: "legacy root fields",
schedule: { kind: "every", everyMs: 60_000 },
payload: { kind: "agentTurn", message: "hello" },
model: " openrouter/deepseek/deepseek-r1 ",
thinking: " high ",
timeoutSeconds: 45,
allowUnsafeExternalContent: true,
}) as unknown as Record<string, unknown>;
const payload = normalized.payload as Record<string, unknown>;
expect(payload.model).toBe("openrouter/deepseek/deepseek-r1");
expect(payload.thinking).toBe("high");
expect(payload.timeoutSeconds).toBe(45);
expect(payload.allowUnsafeExternalContent).toBe(true);
});
it("coerces sessionTarget and wakeMode casing", () => {
const normalized = normalizeCronJobCreate({
name: "casing",
schedule: { kind: "cron", expr: "* * * * *" },
sessionTarget: " IsOlAtEd ",
wakeMode: " NOW ",
payload: { kind: "agentTurn", message: "hello" },
}) as unknown as Record<string, unknown>;
expect(normalized.sessionTarget).toBe("isolated");
expect(normalized.wakeMode).toBe("now");
});
it("strips invalid delivery mode from partial delivery objects", () => {
const normalized = normalizeCronJobCreate({
name: "delivery mode",
schedule: { kind: "cron", expr: "* * * * *" },
payload: { kind: "agentTurn", message: "hello" },
delivery: { mode: "bogus", to: "123" },
}) as unknown as Record<string, unknown>;
const delivery = normalized.delivery as Record<string, unknown>;
expect(delivery.mode).toBeUndefined();
expect(delivery.to).toBe("123");
});
});

View File

@@ -2,6 +2,7 @@ import type { CronJobCreate, CronJobPatch } from "./types.js";
import { sanitizeAgentId } from "../routing/session-key.js";
import { parseAbsoluteTimeMs } from "./parse.js";
import { migrateLegacyCronPayload } from "./payload-migration.js";
import { inferLegacyName } from "./service/normalize.js";
type UnknownRecord = Record<string, unknown>;
@@ -19,7 +20,8 @@ function isRecord(value: unknown): value is UnknownRecord {
function coerceSchedule(schedule: UnknownRecord) {
const next: UnknownRecord = { ...schedule };
const kind = typeof schedule.kind === "string" ? schedule.kind : undefined;
const rawKind = typeof schedule.kind === "string" ? schedule.kind.trim().toLowerCase() : "";
const kind = rawKind === "at" || rawKind === "every" || rawKind === "cron" ? rawKind : undefined;
const atMsRaw = schedule.atMs;
const atRaw = schedule.at;
const atString = typeof atRaw === "string" ? atRaw.trim() : "";
@@ -32,7 +34,9 @@ function coerceSchedule(schedule: UnknownRecord) {
? parseAbsoluteTimeMs(atString)
: null;
if (!kind) {
if (kind) {
next.kind = kind;
} else {
if (
typeof schedule.atMs === "number" ||
typeof schedule.at === "string" ||
@@ -47,7 +51,7 @@ function coerceSchedule(schedule: UnknownRecord) {
}
if (atString) {
next.at = parsedAtMs ? new Date(parsedAtMs).toISOString() : atString;
next.at = parsedAtMs !== null ? new Date(parsedAtMs).toISOString() : atString;
} else if (parsedAtMs !== null) {
next.at = new Date(parsedAtMs).toISOString();
}
@@ -62,6 +66,72 @@ function coercePayload(payload: UnknownRecord) {
const next: UnknownRecord = { ...payload };
// Back-compat: older configs used `provider` for delivery channel.
migrateLegacyCronPayload(next);
const kindRaw = typeof next.kind === "string" ? next.kind.trim().toLowerCase() : "";
if (kindRaw === "agentturn") {
next.kind = "agentTurn";
} else if (kindRaw === "systemevent") {
next.kind = "systemEvent";
} else if (kindRaw) {
next.kind = kindRaw;
}
if (!next.kind) {
const hasMessage = typeof next.message === "string" && next.message.trim().length > 0;
const hasText = typeof next.text === "string" && next.text.trim().length > 0;
if (hasMessage) {
next.kind = "agentTurn";
} else if (hasText) {
next.kind = "systemEvent";
}
}
if (typeof next.message === "string") {
const trimmed = next.message.trim();
if (trimmed) {
next.message = trimmed;
}
}
if (typeof next.text === "string") {
const trimmed = next.text.trim();
if (trimmed) {
next.text = trimmed;
}
}
if ("model" in next) {
if (typeof next.model === "string") {
const trimmed = next.model.trim();
if (trimmed) {
next.model = trimmed;
} else {
delete next.model;
}
} else {
delete next.model;
}
}
if ("thinking" in next) {
if (typeof next.thinking === "string") {
const trimmed = next.thinking.trim();
if (trimmed) {
next.thinking = trimmed;
} else {
delete next.thinking;
}
} else {
delete next.thinking;
}
}
if ("timeoutSeconds" in next) {
if (typeof next.timeoutSeconds === "number" && Number.isFinite(next.timeoutSeconds)) {
next.timeoutSeconds = Math.max(1, Math.floor(next.timeoutSeconds));
} else {
delete next.timeoutSeconds;
}
}
if (
"allowUnsafeExternalContent" in next &&
typeof next.allowUnsafeExternalContent !== "boolean"
) {
delete next.allowUnsafeExternalContent;
}
return next;
}
@@ -69,7 +139,15 @@ function coerceDelivery(delivery: UnknownRecord) {
const next: UnknownRecord = { ...delivery };
if (typeof delivery.mode === "string") {
const mode = delivery.mode.trim().toLowerCase();
next.mode = mode === "deliver" ? "announce" : mode;
if (mode === "deliver") {
next.mode = "announce";
} else if (mode === "announce" || mode === "none") {
next.mode = mode;
} else {
delete next.mode;
}
} else if ("mode" in next) {
delete next.mode;
}
if (typeof delivery.channel === "string") {
const trimmed = delivery.channel.trim().toLowerCase();
@@ -147,6 +225,95 @@ function unwrapJob(raw: UnknownRecord) {
return raw;
}
function normalizeSessionTarget(raw: unknown) {
if (typeof raw !== "string") {
return undefined;
}
const trimmed = raw.trim().toLowerCase();
if (trimmed === "main" || trimmed === "isolated") {
return trimmed;
}
return undefined;
}
function normalizeWakeMode(raw: unknown) {
if (typeof raw !== "string") {
return undefined;
}
const trimmed = raw.trim().toLowerCase();
if (trimmed === "now" || trimmed === "next-heartbeat") {
return trimmed;
}
return undefined;
}
function copyTopLevelAgentTurnFields(next: UnknownRecord, payload: UnknownRecord) {
const copyString = (field: "model" | "thinking") => {
if (typeof payload[field] === "string" && payload[field].trim()) {
return;
}
const value = next[field];
if (typeof value === "string" && value.trim()) {
payload[field] = value.trim();
}
};
copyString("model");
copyString("thinking");
if (typeof payload.timeoutSeconds !== "number" && typeof next.timeoutSeconds === "number") {
payload.timeoutSeconds = next.timeoutSeconds;
}
if (
typeof payload.allowUnsafeExternalContent !== "boolean" &&
typeof next.allowUnsafeExternalContent === "boolean"
) {
payload.allowUnsafeExternalContent = next.allowUnsafeExternalContent;
}
}
function copyTopLevelLegacyDeliveryFields(next: UnknownRecord, payload: UnknownRecord) {
if (typeof payload.deliver !== "boolean" && typeof next.deliver === "boolean") {
payload.deliver = next.deliver;
}
if (
typeof payload.channel !== "string" &&
typeof next.channel === "string" &&
next.channel.trim()
) {
payload.channel = next.channel.trim();
}
if (typeof payload.to !== "string" && typeof next.to === "string" && next.to.trim()) {
payload.to = next.to.trim();
}
if (
typeof payload.bestEffortDeliver !== "boolean" &&
typeof next.bestEffortDeliver === "boolean"
) {
payload.bestEffortDeliver = next.bestEffortDeliver;
}
if (
typeof payload.provider !== "string" &&
typeof next.provider === "string" &&
next.provider.trim()
) {
payload.provider = next.provider.trim();
}
}
function stripLegacyTopLevelFields(next: UnknownRecord) {
delete next.model;
delete next.thinking;
delete next.timeoutSeconds;
delete next.allowUnsafeExternalContent;
delete next.message;
delete next.text;
delete next.deliver;
delete next.channel;
delete next.to;
delete next.bestEffortDeliver;
delete next.provider;
}
export function normalizeCronJobInput(
raw: unknown,
options: NormalizeOptions = DEFAULT_OPTIONS,
@@ -186,10 +353,38 @@ export function normalizeCronJobInput(
}
}
if ("sessionTarget" in base) {
const normalized = normalizeSessionTarget(base.sessionTarget);
if (normalized) {
next.sessionTarget = normalized;
} else {
delete next.sessionTarget;
}
}
if ("wakeMode" in base) {
const normalized = normalizeWakeMode(base.wakeMode);
if (normalized) {
next.wakeMode = normalized;
} else {
delete next.wakeMode;
}
}
if (isRecord(base.schedule)) {
next.schedule = coerceSchedule(base.schedule);
}
if (!("payload" in next) || !isRecord(next.payload)) {
const message = typeof next.message === "string" ? next.message.trim() : "";
const text = typeof next.text === "string" ? next.text.trim() : "";
if (message) {
next.payload = { kind: "agentTurn", message };
} else if (text) {
next.payload = { kind: "systemEvent", text };
}
}
if (isRecord(base.payload)) {
next.payload = coercePayload(base.payload);
}
@@ -198,17 +393,39 @@ export function normalizeCronJobInput(
next.delivery = coerceDelivery(base.delivery);
}
if (isRecord(base.isolation)) {
if ("isolation" in next) {
delete next.isolation;
}
const payload = isRecord(next.payload) ? next.payload : null;
if (payload && payload.kind === "agentTurn") {
copyTopLevelAgentTurnFields(next, payload);
copyTopLevelLegacyDeliveryFields(next, payload);
}
stripLegacyTopLevelFields(next);
if (options.applyDefaults) {
if (!next.wakeMode) {
next.wakeMode = "next-heartbeat";
next.wakeMode = "now";
}
if (typeof next.enabled !== "boolean") {
next.enabled = true;
}
if (
(typeof next.name !== "string" || !next.name.trim()) &&
isRecord(next.schedule) &&
isRecord(next.payload)
) {
next.name = inferLegacyName({
schedule: next.schedule as { kind?: unknown; everyMs?: unknown; expr?: unknown },
payload: next.payload as { kind?: unknown; text?: unknown; message?: unknown },
});
} else if (typeof next.name === "string") {
const trimmed = next.name.trim();
if (trimmed) {
next.name = trimmed;
}
}
if (!next.sessionTarget && isRecord(next.payload)) {
const kind = typeof next.payload.kind === "string" ? next.payload.kind : "";
if (kind === "systemEvent") {

View File

@@ -65,6 +65,8 @@ describe("cron run log", () => {
jobId: "a",
action: "finished",
status: "skipped",
sessionId: "run-123",
sessionKey: "agent:main:cron:a:run:run-123",
});
const allA = await readCronRunLogEntries(logPathA, { limit: 10 });
@@ -78,6 +80,8 @@ describe("cron run log", () => {
const lastOne = await readCronRunLogEntries(logPathA, { limit: 1 });
expect(lastOne.map((e) => e.ts)).toEqual([3]);
expect(lastOne[0]?.sessionId).toBe("run-123");
expect(lastOne[0]?.sessionKey).toBe("agent:main:cron:a:run:run-123");
const onlyB = await readCronRunLogEntries(logPathB, {
limit: 10,

View File

@@ -8,6 +8,8 @@ export type CronRunLogEntry = {
status?: "ok" | "error" | "skipped";
error?: string;
summary?: string;
sessionId?: string;
sessionKey?: string;
runAtMs?: number;
durationMs?: number;
nextRunAtMs?: number;
@@ -93,7 +95,24 @@ export async function readCronRunLogEntries(
if (jobId && obj.jobId !== jobId) {
continue;
}
parsed.push(obj as CronRunLogEntry);
const entry: CronRunLogEntry = {
ts: obj.ts,
jobId: obj.jobId,
action: "finished",
status: obj.status,
error: obj.error,
summary: obj.summary,
runAtMs: obj.runAtMs,
durationMs: obj.durationMs,
nextRunAtMs: obj.nextRunAtMs,
};
if (typeof obj.sessionId === "string" && obj.sessionId.trim().length > 0) {
entry.sessionId = obj.sessionId;
}
if (typeof obj.sessionKey === "string" && obj.sessionKey.trim().length > 0) {
entry.sessionKey = obj.sessionKey;
}
parsed.push(entry);
} catch {
// ignore invalid lines
}

View File

@@ -2,6 +2,14 @@ import { Cron } from "croner";
import type { CronSchedule } from "./types.js";
import { parseAbsoluteTimeMs } from "./parse.js";
function resolveCronTimezone(tz?: string) {
const trimmed = typeof tz === "string" ? tz.trim() : "";
if (trimmed) {
return trimmed;
}
return Intl.DateTimeFormat().resolvedOptions().timeZone;
}
export function computeNextRunAtMs(schedule: CronSchedule, nowMs: number): number | undefined {
if (schedule.kind === "at") {
// Handle both canonical `at` (string) and legacy `atMs` (number) fields.
@@ -38,9 +46,20 @@ export function computeNextRunAtMs(schedule: CronSchedule, nowMs: number): numbe
return undefined;
}
const cron = new Cron(expr, {
timezone: schedule.tz?.trim() || undefined,
timezone: resolveCronTimezone(schedule.tz),
catch: false,
});
const next = cron.nextRun(new Date(nowMs));
return next ? next.getTime() : undefined;
let cursor = nowMs;
for (let attempt = 0; attempt < 3; attempt++) {
const next = cron.nextRun(new Date(cursor));
if (!next) {
return undefined;
}
const nextMs = next.getTime();
if (Number.isFinite(nextMs) && nextMs > nowMs) {
return nextMs;
}
cursor += 1_000;
}
return undefined;
}

View File

@@ -0,0 +1,92 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it, vi } from "vitest";
import { CronService } from "./service.js";
const noopLogger = {
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
};
async function makeStorePath() {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-delivery-"));
return {
storePath: path.join(dir, "cron", "jobs.json"),
cleanup: async () => {
await fs.rm(dir, { recursive: true, force: true });
},
};
}
describe("CronService delivery plan consistency", () => {
it("does not post isolated summary when legacy deliver=false", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok", summary: "done" })),
});
await cron.start();
const job = await cron.add({
name: "legacy-off",
schedule: { kind: "every", everyMs: 60_000, anchorMs: Date.now() },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: {
kind: "agentTurn",
message: "hello",
deliver: false,
},
});
const result = await cron.run(job.id, "force");
expect(result).toEqual({ ok: true, ran: true });
expect(enqueueSystemEvent).not.toHaveBeenCalled();
cron.stop();
await store.cleanup();
});
it("treats delivery object without mode as announce", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok", summary: "done" })),
});
await cron.start();
const job = await cron.add({
name: "partial-delivery",
schedule: { kind: "every", everyMs: 60_000, anchorMs: Date.now() },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: {
kind: "agentTurn",
message: "hello",
},
delivery: { channel: "telegram", to: "123" } as unknown as {
mode: "none" | "announce";
channel?: string;
to?: string;
},
});
const result = await cron.run(job.id, "force");
expect(result).toEqual({ ok: true, ran: true });
expect(enqueueSystemEvent).toHaveBeenCalledWith("Cron: done", { agentId: undefined });
cron.stop();
await store.cleanup();
});
});

View File

@@ -2,6 +2,7 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { CronJob } from "./types.js";
import { CronService } from "./service.js";
const noopLogger = {
@@ -21,6 +22,23 @@ async function makeStorePath() {
};
}
async function waitForJob(
cron: CronService,
id: string,
predicate: (job: CronJob | undefined) => boolean,
) {
let latest: CronJob | undefined;
for (let i = 0; i < 30; i++) {
const jobs = await cron.list({ includeDisabled: true });
latest = jobs.find((job) => job.id === id);
if (predicate(latest)) {
return latest;
}
await vi.runOnlyPendingTimersAsync();
}
return latest;
}
describe("CronService interval/cron jobs fire on time", () => {
beforeEach(() => {
vi.useFakeTimers();
@@ -66,9 +84,7 @@ describe("CronService interval/cron jobs fire on time", () => {
vi.setSystemTime(new Date(firstDueAt + 5));
await vi.runOnlyPendingTimersAsync();
// Wait for the async onTimer to complete via the lock queue.
const jobs = await cron.list();
const updated = jobs.find((j) => j.id === job.id);
const updated = await waitForJob(cron, job.id, (current) => current?.state.lastStatus === "ok");
expect(enqueueSystemEvent).toHaveBeenCalledWith("tick", { agentId: undefined });
expect(updated?.state.lastStatus).toBe("ok");
@@ -112,9 +128,7 @@ describe("CronService interval/cron jobs fire on time", () => {
vi.setSystemTime(new Date(firstDueAt + 5));
await vi.runOnlyPendingTimersAsync();
// Wait for the async onTimer to complete via the lock queue.
const jobs = await cron.list();
const updated = jobs.find((j) => j.id === job.id);
const updated = await waitForJob(cron, job.id, (current) => current?.state.lastStatus === "ok");
expect(enqueueSystemEvent).toHaveBeenCalledWith("cron-tick", { agentId: undefined });
expect(updated?.state.lastStatus).toBe("ok");
@@ -124,4 +138,88 @@ describe("CronService interval/cron jobs fire on time", () => {
cron.stop();
await store.cleanup();
});
it("keeps legacy every jobs due while minute cron jobs recompute schedules", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const requestHeartbeatNow = vi.fn();
const nowMs = Date.parse("2025-12-13T00:00:00.000Z");
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(
store.storePath,
JSON.stringify(
{
version: 1,
jobs: [
{
id: "legacy-every",
name: "legacy every",
enabled: true,
createdAtMs: nowMs,
updatedAtMs: nowMs,
schedule: { kind: "every", everyMs: 120_000 },
sessionTarget: "main",
wakeMode: "now",
payload: { kind: "systemEvent", text: "sf-tick" },
state: { nextRunAtMs: nowMs + 120_000 },
},
{
id: "minute-cron",
name: "minute cron",
enabled: true,
createdAtMs: nowMs,
updatedAtMs: nowMs,
schedule: { kind: "cron", expr: "* * * * *", tz: "UTC" },
sessionTarget: "main",
wakeMode: "now",
payload: { kind: "systemEvent", text: "minute-tick" },
state: { nextRunAtMs: nowMs + 60_000 },
},
],
},
null,
2,
),
"utf-8",
);
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow,
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok" })),
});
await cron.start();
for (let minute = 1; minute <= 6; minute++) {
vi.setSystemTime(new Date(nowMs + minute * 60_000));
const minuteRun = await cron.run("minute-cron", "force");
expect(minuteRun).toEqual({ ok: true, ran: true });
}
vi.setSystemTime(new Date(nowMs + 6 * 60_000));
const sfRun = await cron.run("legacy-every", "due");
expect(sfRun).toEqual({ ok: true, ran: true });
const sfRuns = enqueueSystemEvent.mock.calls.filter((args) => args[0] === "sf-tick").length;
const minuteRuns = enqueueSystemEvent.mock.calls.filter(
(args) => args[0] === "minute-tick",
).length;
expect(minuteRuns).toBeGreaterThan(0);
expect(sfRuns).toBeGreaterThan(0);
const jobs = await cron.list({ includeDisabled: true });
const sfJob = jobs.find((job) => job.id === "legacy-every");
expect(sfJob?.state.lastStatus).toBe("ok");
expect(sfJob?.schedule.kind).toBe("every");
if (sfJob?.schedule.kind === "every") {
expect(sfJob.schedule.anchorMs).toBe(nowMs);
}
cron.stop();
await store.cleanup();
});
});

View File

@@ -0,0 +1,346 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { setTimeout as delay } from "node:timers/promises";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { CronJob } from "./types.js";
import { CronService } from "./service.js";
import { createCronServiceState, type CronEvent } from "./service/state.js";
import { onTimer } from "./service/timer.js";
const noopLogger = {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
trace: vi.fn(),
};
async function makeStorePath() {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "cron-issues-"));
const storePath = path.join(dir, "jobs.json");
return {
storePath,
cleanup: async () => {
await fs.rm(dir, { recursive: true, force: true });
},
};
}
function createDueIsolatedJob(params: {
id: string;
nowMs: number;
nextRunAtMs: number;
deleteAfterRun?: boolean;
}): CronJob {
return {
id: params.id,
name: params.id,
enabled: true,
deleteAfterRun: params.deleteAfterRun ?? false,
createdAtMs: params.nowMs,
updatedAtMs: params.nowMs,
schedule: { kind: "at", at: new Date(params.nextRunAtMs).toISOString() },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: params.id },
delivery: { mode: "none" },
state: { nextRunAtMs: params.nextRunAtMs },
};
}
describe("Cron issue regressions", () => {
beforeEach(() => {
vi.useFakeTimers();
vi.setSystemTime(new Date("2026-02-06T10:05:00.000Z"));
});
afterEach(() => {
vi.useRealTimers();
vi.clearAllMocks();
});
it("recalculates nextRunAtMs when schedule changes", async () => {
const store = await makeStorePath();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn().mockResolvedValue({ status: "ok", summary: "ok" }),
});
await cron.start();
const created = await cron.add({
name: "hourly",
schedule: { kind: "cron", expr: "0 * * * *", tz: "UTC" },
sessionTarget: "main",
payload: { kind: "systemEvent", text: "tick" },
});
expect(created.state.nextRunAtMs).toBe(Date.parse("2026-02-06T11:00:00.000Z"));
const updated = await cron.update(created.id, {
schedule: { kind: "cron", expr: "0 */2 * * *", tz: "UTC" },
});
expect(updated.state.nextRunAtMs).toBe(Date.parse("2026-02-06T12:00:00.000Z"));
cron.stop();
await store.cleanup();
});
it("runs immediately with force mode even when not due", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn().mockResolvedValue({ status: "ok", summary: "ok" }),
});
await cron.start();
const created = await cron.add({
name: "force-now",
schedule: { kind: "every", everyMs: 60_000, anchorMs: Date.now() },
sessionTarget: "main",
payload: { kind: "systemEvent", text: "force" },
});
const result = await cron.run(created.id, "force");
expect(result).toEqual({ ok: true, ran: true });
expect(enqueueSystemEvent).toHaveBeenCalledWith("force", { agentId: undefined });
cron.stop();
await store.cleanup();
});
it("schedules isolated jobs with next wake time", async () => {
const store = await makeStorePath();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn().mockResolvedValue({ status: "ok", summary: "ok" }),
});
await cron.start();
const job = await cron.add({
name: "isolated",
schedule: { kind: "every", everyMs: 60_000, anchorMs: Date.now() },
sessionTarget: "isolated",
payload: { kind: "agentTurn", message: "hi" },
});
const status = await cron.status();
expect(typeof job.state.nextRunAtMs).toBe("number");
expect(typeof status.nextWakeAtMs).toBe("number");
cron.stop();
await store.cleanup();
});
it("persists allowUnsafeExternalContent on agentTurn payload patches", async () => {
const store = await makeStorePath();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn().mockResolvedValue({ status: "ok", summary: "ok" }),
});
await cron.start();
const created = await cron.add({
name: "unsafe toggle",
schedule: { kind: "every", everyMs: 60_000, anchorMs: Date.now() },
sessionTarget: "isolated",
payload: { kind: "agentTurn", message: "hi" },
});
const updated = await cron.update(created.id, {
payload: { kind: "agentTurn", allowUnsafeExternalContent: true },
});
expect(updated.payload.kind).toBe("agentTurn");
if (updated.payload.kind === "agentTurn") {
expect(updated.payload.allowUnsafeExternalContent).toBe(true);
expect(updated.payload.message).toBe("hi");
}
cron.stop();
await store.cleanup();
});
it("caps timer delay to 60s for far-future schedules", async () => {
const timeoutSpy = vi.spyOn(globalThis, "setTimeout");
const store = await makeStorePath();
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn().mockResolvedValue({ status: "ok", summary: "ok" }),
});
await cron.start();
const callsBeforeAdd = timeoutSpy.mock.calls.length;
await cron.add({
name: "far-future",
schedule: { kind: "at", at: "2035-01-01T00:00:00.000Z" },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: { kind: "systemEvent", text: "future" },
});
const delaysAfterAdd = timeoutSpy.mock.calls
.slice(callsBeforeAdd)
.map(([, delay]) => delay)
.filter((delay): delay is number => typeof delay === "number");
expect(delaysAfterAdd.some((delay) => delay === 60_000)).toBe(true);
cron.stop();
timeoutSpy.mockRestore();
await store.cleanup();
});
it("does not hot-loop zero-delay timers while a run is already in progress", async () => {
const timeoutSpy = vi.spyOn(globalThis, "setTimeout");
const store = await makeStorePath();
const now = Date.parse("2026-02-06T10:05:00.000Z");
const state = createCronServiceState({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
nowMs: () => now,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn().mockResolvedValue({ status: "ok", summary: "ok" }),
});
state.running = true;
state.store = {
version: 1,
jobs: [createDueIsolatedJob({ id: "due", nowMs: now, nextRunAtMs: now - 1 })],
};
await onTimer(state);
expect(timeoutSpy).not.toHaveBeenCalled();
expect(state.timer).toBeNull();
timeoutSpy.mockRestore();
await store.cleanup();
});
it("skips forced manual runs while a timer-triggered run is in progress", async () => {
vi.useRealTimers();
const store = await makeStorePath();
let resolveRun:
| ((value: { status: "ok" | "error" | "skipped"; summary?: string; error?: string }) => void)
| undefined;
const runIsolatedAgentJob = vi.fn(
async () =>
await new Promise<{ status: "ok" | "error" | "skipped"; summary?: string; error?: string }>(
(resolve) => {
resolveRun = resolve;
},
),
);
const cron = new CronService({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob,
});
await cron.start();
const runAt = Date.now() + 30;
const job = await cron.add({
name: "timer-overlap",
enabled: true,
schedule: { kind: "at", at: new Date(runAt).toISOString() },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "long task" },
delivery: { mode: "none" },
});
for (let i = 0; i < 25 && runIsolatedAgentJob.mock.calls.length === 0; i++) {
await delay(20);
}
expect(runIsolatedAgentJob).toHaveBeenCalledTimes(1);
const manualResult = await cron.run(job.id, "force");
expect(manualResult).toEqual({ ok: true, ran: false, reason: "already-running" });
expect(runIsolatedAgentJob).toHaveBeenCalledTimes(1);
resolveRun?.({ status: "ok", summary: "done" });
for (let i = 0; i < 25; i++) {
const jobs = await cron.list({ includeDisabled: true });
if (jobs.some((j) => j.id === job.id && j.state.lastStatus === "ok")) {
break;
}
await delay(20);
}
cron.stop();
await store.cleanup();
});
it("records per-job start time and duration for batched due jobs", async () => {
const store = await makeStorePath();
const dueAt = Date.parse("2026-02-06T10:05:01.000Z");
const first = createDueIsolatedJob({ id: "batch-first", nowMs: dueAt, nextRunAtMs: dueAt });
const second = createDueIsolatedJob({ id: "batch-second", nowMs: dueAt, nextRunAtMs: dueAt });
await fs.writeFile(
store.storePath,
JSON.stringify({ version: 1, jobs: [first, second] }, null, 2),
"utf-8",
);
let now = dueAt;
const events: CronEvent[] = [];
const state = createCronServiceState({
cronEnabled: true,
storePath: store.storePath,
log: noopLogger,
nowMs: () => now,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
onEvent: (evt) => {
events.push(evt);
},
runIsolatedAgentJob: vi.fn(async (params: { job: { id: string } }) => {
now += params.job.id === first.id ? 50 : 20;
return { status: "ok" as const, summary: "ok" };
}),
});
await onTimer(state);
const jobs = state.store?.jobs ?? [];
const firstDone = jobs.find((job) => job.id === first.id);
const secondDone = jobs.find((job) => job.id === second.id);
const startedAtEvents = events
.filter((evt) => evt.action === "started")
.map((evt) => evt.runAtMs);
expect(firstDone?.state.lastRunAtMs).toBe(dueAt);
expect(firstDone?.state.lastDurationMs).toBe(50);
expect(secondDone?.state.lastRunAtMs).toBe(dueAt + 50);
expect(secondDone?.state.lastDurationMs).toBe(20);
expect(startedAtEvents).toEqual([dueAt, dueAt + 50]);
await store.cleanup();
});
});

View File

@@ -0,0 +1,104 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { setTimeout as delay } from "node:timers/promises";
import { describe, expect, it, vi } from "vitest";
import { CronService } from "./service.js";
const noopLogger = {
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
};
async function makeStorePath() {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-"));
return {
storePath: path.join(dir, "cron", "jobs.json"),
cleanup: async () => {
await fs.rm(dir, { recursive: true, force: true });
},
};
}
describe("CronService read ops while job is running", () => {
it("keeps list and status responsive during a long isolated run", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const requestHeartbeatNow = vi.fn();
let resolveRun:
| ((value: { status: "ok" | "error" | "skipped"; summary?: string; error?: string }) => void)
| undefined;
const runIsolatedAgentJob = vi.fn(
async () =>
await new Promise<{ status: "ok" | "error" | "skipped"; summary?: string; error?: string }>(
(resolve) => {
resolveRun = resolve;
},
),
);
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow,
runIsolatedAgentJob,
});
await cron.start();
const runAt = Date.now() + 30;
await cron.add({
name: "slow isolated",
enabled: true,
deleteAfterRun: false,
schedule: { kind: "at", at: new Date(runAt).toISOString() },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "long task" },
delivery: { mode: "none" },
});
for (let i = 0; i < 25 && runIsolatedAgentJob.mock.calls.length === 0; i++) {
await delay(20);
}
expect(runIsolatedAgentJob).toHaveBeenCalledTimes(1);
const listRace = await Promise.race([
cron.list({ includeDisabled: true }).then(() => "ok"),
delay(200).then(() => "timeout"),
]);
expect(listRace).toBe("ok");
const statusRace = await Promise.race([
cron.status().then(() => "ok"),
delay(200).then(() => "timeout"),
]);
expect(statusRace).toBe("ok");
const running = await cron.list({ includeDisabled: true });
expect(running[0]?.state.runningAtMs).toBeTypeOf("number");
resolveRun?.({ status: "ok", summary: "done" });
for (let i = 0; i < 25; i++) {
const jobs = await cron.list({ includeDisabled: true });
if (jobs[0]?.state.lastStatus === "ok") {
break;
}
await delay(20);
}
const finished = await cron.list({ includeDisabled: true });
expect(finished[0]?.state.lastStatus).toBe("ok");
cron.stop();
await store.cleanup();
});
});

View File

@@ -0,0 +1,165 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { CronService } from "./service.js";
const noopLogger = {
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
};
async function makeStorePath() {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-"));
return {
storePath: path.join(dir, "cron", "jobs.json"),
cleanup: async () => {
await fs.rm(dir, { recursive: true, force: true });
},
};
}
describe("CronService restart catch-up", () => {
beforeEach(() => {
vi.useFakeTimers();
vi.setSystemTime(new Date("2025-12-13T17:00:00.000Z"));
noopLogger.debug.mockClear();
noopLogger.info.mockClear();
noopLogger.warn.mockClear();
noopLogger.error.mockClear();
});
afterEach(() => {
vi.useRealTimers();
});
it("executes an overdue recurring job immediately on start", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const requestHeartbeatNow = vi.fn();
const dueAt = Date.parse("2025-12-13T15:00:00.000Z");
const lastRunAt = Date.parse("2025-12-12T15:00:00.000Z");
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(
store.storePath,
JSON.stringify(
{
version: 1,
jobs: [
{
id: "restart-overdue-job",
name: "daily digest",
enabled: true,
createdAtMs: Date.parse("2025-12-10T12:00:00.000Z"),
updatedAtMs: Date.parse("2025-12-12T15:00:00.000Z"),
schedule: { kind: "cron", expr: "0 15 * * *", tz: "UTC" },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: { kind: "systemEvent", text: "digest now" },
state: {
nextRunAtMs: dueAt,
lastRunAtMs: lastRunAt,
lastStatus: "ok",
},
},
],
},
null,
2,
),
"utf-8",
);
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow,
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok" })),
});
await cron.start();
expect(enqueueSystemEvent).toHaveBeenCalledWith("digest now", { agentId: undefined });
expect(requestHeartbeatNow).toHaveBeenCalled();
const jobs = await cron.list({ includeDisabled: true });
const updated = jobs.find((job) => job.id === "restart-overdue-job");
expect(updated?.state.lastStatus).toBe("ok");
expect(updated?.state.lastRunAtMs).toBe(Date.parse("2025-12-13T17:00:00.000Z"));
expect(updated?.state.nextRunAtMs).toBeGreaterThan(Date.parse("2025-12-13T17:00:00.000Z"));
cron.stop();
await store.cleanup();
});
it("clears stale running markers and catches up overdue jobs on startup", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const requestHeartbeatNow = vi.fn();
const dueAt = Date.parse("2025-12-13T16:00:00.000Z");
const staleRunningAt = Date.parse("2025-12-13T16:30:00.000Z");
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(
store.storePath,
JSON.stringify(
{
version: 1,
jobs: [
{
id: "restart-stale-running",
name: "daily stale marker",
enabled: true,
createdAtMs: Date.parse("2025-12-10T12:00:00.000Z"),
updatedAtMs: Date.parse("2025-12-13T16:30:00.000Z"),
schedule: { kind: "cron", expr: "0 16 * * *", tz: "UTC" },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: { kind: "systemEvent", text: "resume stale marker" },
state: {
nextRunAtMs: dueAt,
runningAtMs: staleRunningAt,
},
},
],
},
null,
2,
),
"utf-8",
);
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow,
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok" })),
});
await cron.start();
expect(enqueueSystemEvent).toHaveBeenCalledWith("resume stale marker", { agentId: undefined });
expect(noopLogger.warn).toHaveBeenCalledWith(
expect.objectContaining({ jobId: "restart-stale-running" }),
"cron: clearing stale running marker on startup",
);
const jobs = await cron.list({ includeDisabled: true });
const updated = jobs.find((job) => job.id === "restart-stale-running");
expect(updated?.state.runningAtMs).toBeUndefined();
expect(updated?.state.lastStatus).toBe("ok");
expect(updated?.state.lastRunAtMs).toBe(Date.parse("2025-12-13T17:00:00.000Z"));
cron.stop();
await store.cleanup();
});
});

View File

@@ -3,6 +3,7 @@ import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { HeartbeatRunResult } from "../infra/heartbeat-wake.js";
import type { CronJob } from "./types.js";
import { CronService } from "./service.js";
const noopLogger = {
@@ -22,6 +23,18 @@ async function makeStorePath() {
};
}
async function waitForJobs(cron: CronService, predicate: (jobs: CronJob[]) => boolean) {
let latest: CronJob[] = [];
for (let i = 0; i < 30; i++) {
latest = await cron.list({ includeDisabled: true });
if (predicate(latest)) {
return latest;
}
await vi.runOnlyPendingTimersAsync();
}
return latest;
}
describe("CronService", () => {
beforeEach(() => {
vi.useFakeTimers();
@@ -67,7 +80,9 @@ describe("CronService", () => {
vi.setSystemTime(new Date("2025-12-13T00:00:02.000Z"));
await vi.runOnlyPendingTimersAsync();
const jobs = await cron.list({ includeDisabled: true });
const jobs = await waitForJobs(cron, (items) =>
items.some((item) => item.id === job.id && !item.enabled),
);
const updated = jobs.find((j) => j.id === job.id);
expect(updated?.enabled).toBe(false);
expect(enqueueSystemEvent).toHaveBeenCalledWith("hello", {
@@ -108,7 +123,7 @@ describe("CronService", () => {
vi.setSystemTime(new Date("2025-12-13T00:00:02.000Z"));
await vi.runOnlyPendingTimersAsync();
const jobs = await cron.list({ includeDisabled: true });
const jobs = await waitForJobs(cron, (items) => !items.some((item) => item.id === job.id));
expect(jobs.find((j) => j.id === job.id)).toBeUndefined();
expect(enqueueSystemEvent).toHaveBeenCalledWith("hello", {
agentId: undefined,
@@ -185,6 +200,49 @@ describe("CronService", () => {
await store.cleanup();
});
it("wakeMode now falls back to queued heartbeat when main lane stays busy", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
const requestHeartbeatNow = vi.fn();
const runHeartbeatOnce = vi.fn(async () => ({
status: "skipped" as const,
reason: "requests-in-flight",
}));
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent,
requestHeartbeatNow,
runHeartbeatOnce,
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok" })),
});
await cron.start();
const job = await cron.add({
name: "wakeMode now fallback",
enabled: true,
schedule: { kind: "at", at: new Date(1).toISOString() },
sessionTarget: "main",
wakeMode: "now",
payload: { kind: "systemEvent", text: "hello" },
});
const runPromise = cron.run(job.id, "force");
await vi.advanceTimersByTimeAsync(125_000);
await runPromise;
expect(runHeartbeatOnce).toHaveBeenCalled();
expect(requestHeartbeatNow).toHaveBeenCalled();
expect(job.state.lastStatus).toBe("ok");
expect(job.state.lastError).toBeUndefined();
await cron.list({ includeDisabled: true });
cron.stop();
await store.cleanup();
});
it("runs an isolated job and posts summary to main", async () => {
const store = await makeStorePath();
const enqueueSystemEvent = vi.fn();
@@ -218,7 +276,7 @@ describe("CronService", () => {
vi.setSystemTime(new Date("2025-12-13T00:00:01.000Z"));
await vi.runOnlyPendingTimersAsync();
await cron.list({ includeDisabled: true });
await waitForJobs(cron, (items) => items.some((item) => item.state.lastStatus === "ok"));
expect(runIsolatedAgentJob).toHaveBeenCalledTimes(1);
expect(enqueueSystemEvent).toHaveBeenCalledWith("Cron: done", {
agentId: undefined,
@@ -366,7 +424,7 @@ describe("CronService", () => {
vi.setSystemTime(new Date("2025-12-13T00:00:01.000Z"));
await vi.runOnlyPendingTimersAsync();
await cron.list({ includeDisabled: true });
await waitForJobs(cron, (items) => items.some((item) => item.state.lastStatus === "error"));
expect(enqueueSystemEvent).toHaveBeenCalledWith("Cron (error): last output", {
agentId: undefined,
@@ -460,7 +518,7 @@ describe("CronService", () => {
expect(enqueueSystemEvent).not.toHaveBeenCalled();
expect(requestHeartbeatNow).not.toHaveBeenCalled();
const jobs = await cron.list({ includeDisabled: true });
const jobs = await waitForJobs(cron, (items) => items[0]?.state.lastStatus === "skipped");
expect(jobs[0]?.state.lastStatus).toBe("skipped");
expect(jobs[0]?.state.lastError).toMatch(/main job requires/i);

View File

@@ -2,6 +2,7 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { CronJob } from "./types.js";
import { CronService } from "./service.js";
const noopLogger = {
@@ -21,6 +22,22 @@ async function makeStorePath() {
};
}
async function waitForFirstJob(
cron: CronService,
predicate: (job: CronJob | undefined) => boolean,
) {
let latest: CronJob | undefined;
for (let i = 0; i < 30; i++) {
const jobs = await cron.list({ includeDisabled: true });
latest = jobs[0];
if (predicate(latest)) {
return latest;
}
await vi.runOnlyPendingTimersAsync();
}
return latest;
}
describe("CronService", () => {
beforeEach(() => {
vi.useFakeTimers();
@@ -66,9 +83,9 @@ describe("CronService", () => {
expect(enqueueSystemEvent).not.toHaveBeenCalled();
expect(requestHeartbeatNow).not.toHaveBeenCalled();
const jobs = await cron.list({ includeDisabled: true });
expect(jobs[0]?.state.lastStatus).toBe("skipped");
expect(jobs[0]?.state.lastError).toMatch(/non-empty/i);
const job = await waitForFirstJob(cron, (current) => current?.state.lastStatus === "skipped");
expect(job?.state.lastStatus).toBe("skipped");
expect(job?.state.lastError).toMatch(/non-empty/i);
cron.stop();
await store.cleanup();

View File

@@ -0,0 +1,124 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { CronService } from "./service.js";
const noopLogger = {
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
};
async function makeStorePath() {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-"));
return {
storePath: path.join(dir, "cron", "jobs.json"),
cleanup: async () => {
await fs.rm(dir, { recursive: true, force: true });
},
};
}
describe("CronService store migrations", () => {
beforeEach(() => {
vi.useFakeTimers();
vi.setSystemTime(new Date("2026-02-06T17:00:00.000Z"));
noopLogger.debug.mockClear();
noopLogger.info.mockClear();
noopLogger.warn.mockClear();
noopLogger.error.mockClear();
});
afterEach(() => {
vi.useRealTimers();
});
it("migrates legacy top-level agentTurn fields and initializes missing state", async () => {
const store = await makeStorePath();
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(
store.storePath,
JSON.stringify(
{
version: 1,
jobs: [
{
id: "legacy-agentturn-job",
name: "legacy agentturn",
enabled: true,
createdAtMs: Date.parse("2026-02-01T12:00:00.000Z"),
updatedAtMs: Date.parse("2026-02-05T12:00:00.000Z"),
schedule: { kind: "cron", expr: "0 23 * * *", tz: "UTC" },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
model: "openrouter/deepseek/deepseek-r1",
thinking: "high",
timeoutSeconds: 120,
allowUnsafeExternalContent: true,
deliver: true,
channel: "telegram",
to: "12345",
bestEffortDeliver: true,
payload: { kind: "agentTurn", message: "legacy payload fields" },
},
],
},
null,
2,
),
"utf-8",
);
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok", summary: "ok" })),
});
await cron.start();
const status = await cron.status();
expect(status.enabled).toBe(true);
const jobs = await cron.list({ includeDisabled: true });
const job = jobs.find((entry) => entry.id === "legacy-agentturn-job");
expect(job).toBeDefined();
expect(job?.state).toBeDefined();
expect(job?.sessionTarget).toBe("isolated");
expect(job?.payload.kind).toBe("agentTurn");
if (job?.payload.kind === "agentTurn") {
expect(job.payload.model).toBe("openrouter/deepseek/deepseek-r1");
expect(job.payload.thinking).toBe("high");
expect(job.payload.timeoutSeconds).toBe(120);
expect(job.payload.allowUnsafeExternalContent).toBe(true);
}
expect(job?.delivery).toEqual({
mode: "announce",
channel: "telegram",
to: "12345",
bestEffort: true,
});
const persisted = JSON.parse(await fs.readFile(store.storePath, "utf-8")) as {
jobs: Array<Record<string, unknown>>;
};
const persistedJob = persisted.jobs.find((entry) => entry.id === "legacy-agentturn-job");
expect(persistedJob).toBeDefined();
expect(persistedJob?.state).toEqual(expect.any(Object));
expect(persistedJob?.model).toBeUndefined();
expect(persistedJob?.thinking).toBeUndefined();
expect(persistedJob?.timeoutSeconds).toBeUndefined();
expect(persistedJob?.deliver).toBeUndefined();
expect(persistedJob?.channel).toBeUndefined();
expect(persistedJob?.to).toBeUndefined();
expect(persistedJob?.bestEffortDeliver).toBeUndefined();
cron.stop();
await store.cleanup();
});
});

View File

@@ -98,4 +98,49 @@ describe("cron store migration", () => {
await store.cleanup();
});
it("adds anchorMs to legacy every schedules", async () => {
const store = await makeStorePath();
const createdAtMs = 1_700_000_000_000;
const legacyJob = {
id: "job-every-legacy",
agentId: undefined,
name: "Legacy every",
description: null,
enabled: true,
deleteAfterRun: false,
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "every", everyMs: 120_000 },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: {
kind: "systemEvent",
text: "tick",
},
state: {},
};
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(store.storePath, JSON.stringify({ version: 1, jobs: [legacyJob] }, null, 2));
const cron = new CronService({
storePath: store.storePath,
cronEnabled: true,
log: noopLogger,
enqueueSystemEvent: vi.fn(),
requestHeartbeatNow: vi.fn(),
runIsolatedAgentJob: vi.fn(async () => ({ status: "ok" })),
});
await cron.start();
cron.stop();
const loaded = await loadCronStore(store.storePath);
const migrated = loaded.jobs[0] as Record<string, unknown>;
const schedule = migrated.schedule as Record<string, unknown>;
expect(schedule.kind).toBe("every");
expect(schedule.anchorMs).toBe(createdAtMs);
await store.cleanup();
});
});

View File

@@ -20,6 +20,17 @@ import {
const STUCK_RUN_MS = 2 * 60 * 60 * 1000;
function resolveEveryAnchorMs(params: {
schedule: { everyMs: number; anchorMs?: number };
fallbackAnchorMs: number;
}) {
const raw = params.schedule.anchorMs;
if (typeof raw === "number" && Number.isFinite(raw)) {
return Math.max(0, Math.floor(raw));
}
return Math.max(0, Math.floor(params.fallbackAnchorMs));
}
export function assertSupportedJobSpec(job: Pick<CronJob, "sessionTarget" | "payload">) {
if (job.sessionTarget === "main" && job.payload.kind !== "systemEvent") {
throw new Error('main cron jobs require payload.kind="systemEvent"');
@@ -47,6 +58,13 @@ export function computeJobNextRunAtMs(job: CronJob, nowMs: number): number | und
if (!job.enabled) {
return undefined;
}
if (job.schedule.kind === "every") {
const anchorMs = resolveEveryAnchorMs({
schedule: job.schedule,
fallbackAnchorMs: job.createdAtMs,
});
return computeNextRunAtMs({ ...job.schedule, anchorMs }, nowMs);
}
if (job.schedule.kind === "at") {
// One-shot jobs stay due until they successfully finish.
if (job.state.lastStatus === "ok" && job.state.lastRunAtMs) {
@@ -69,18 +87,26 @@ export function computeJobNextRunAtMs(job: CronJob, nowMs: number): number | und
return computeNextRunAtMs(job.schedule, nowMs);
}
export function recomputeNextRuns(state: CronServiceState) {
export function recomputeNextRuns(state: CronServiceState): boolean {
if (!state.store) {
return;
return false;
}
let changed = false;
const now = state.deps.nowMs();
for (const job of state.store.jobs) {
if (!job.state) {
job.state = {};
changed = true;
}
if (!job.enabled) {
job.state.nextRunAtMs = undefined;
job.state.runningAtMs = undefined;
if (job.state.nextRunAtMs !== undefined) {
job.state.nextRunAtMs = undefined;
changed = true;
}
if (job.state.runningAtMs !== undefined) {
job.state.runningAtMs = undefined;
changed = true;
}
continue;
}
const runningAt = job.state.runningAtMs;
@@ -90,9 +116,15 @@ export function recomputeNextRuns(state: CronServiceState) {
"cron: clearing stuck running marker",
);
job.state.runningAtMs = undefined;
changed = true;
}
const newNext = computeJobNextRunAtMs(job, now);
if (job.state.nextRunAtMs !== newNext) {
job.state.nextRunAtMs = newNext;
changed = true;
}
job.state.nextRunAtMs = computeJobNextRunAtMs(job, now);
}
return changed;
}
export function nextWakeAtMs(state: CronServiceState) {
@@ -110,10 +142,20 @@ export function nextWakeAtMs(state: CronServiceState) {
export function createJob(state: CronServiceState, input: CronJobCreate): CronJob {
const now = state.deps.nowMs();
const id = crypto.randomUUID();
const schedule =
input.schedule.kind === "every"
? {
...input.schedule,
anchorMs: resolveEveryAnchorMs({
schedule: input.schedule,
fallbackAnchorMs: now,
}),
}
: input.schedule;
const deleteAfterRun =
typeof input.deleteAfterRun === "boolean"
? input.deleteAfterRun
: input.schedule.kind === "at"
: schedule.kind === "at"
? true
: undefined;
const enabled = typeof input.enabled === "boolean" ? input.enabled : true;
@@ -126,7 +168,7 @@ export function createJob(state: CronServiceState, input: CronJobCreate): CronJo
deleteAfterRun,
createdAtMs: now,
updatedAtMs: now,
schedule: input.schedule,
schedule,
sessionTarget: input.sessionTarget,
wakeMode: input.wakeMode,
payload: input.payload,
@@ -223,6 +265,9 @@ function mergeCronPayload(existing: CronPayload, patch: CronPayloadPatch): CronP
if (typeof patch.timeoutSeconds === "number") {
next.timeoutSeconds = patch.timeoutSeconds;
}
if (typeof patch.allowUnsafeExternalContent === "boolean") {
next.allowUnsafeExternalContent = patch.allowUnsafeExternalContent;
}
if (typeof patch.deliver === "boolean") {
next.deliver = patch.deliver;
}
@@ -297,6 +342,7 @@ function buildPayloadFromPatch(patch: CronPayloadPatch): CronPayload {
model: patch.model,
thinking: patch.thinking,
timeoutSeconds: patch.timeoutSeconds,
allowUnsafeExternalContent: patch.allowUnsafeExternalContent,
deliver: patch.deliver,
channel: patch.channel,
to: patch.to,
@@ -334,6 +380,9 @@ function mergeCronDelivery(
}
export function isJobDue(job: CronJob, nowMs: number, opts: { forced: boolean }) {
if (typeof job.state.runningAtMs === "number") {
return false;
}
if (opts.forced) {
return true;
}

View File

@@ -11,7 +11,7 @@ import {
} from "./jobs.js";
import { locked } from "./locked.js";
import { ensureLoaded, persist, warnIfDisabled } from "./store.js";
import { armTimer, emit, executeJob, stopTimer, wake } from "./timer.js";
import { armTimer, emit, executeJob, runMissedJobs, stopTimer, wake } from "./timer.js";
export async function start(state: CronServiceState) {
await locked(state, async () => {
@@ -19,7 +19,18 @@ export async function start(state: CronServiceState) {
state.deps.log.info({ enabled: false }, "cron: disabled");
return;
}
await ensureLoaded(state);
await ensureLoaded(state, { skipRecompute: true });
const jobs = state.store?.jobs ?? [];
for (const job of jobs) {
if (typeof job.state.runningAtMs === "number") {
state.deps.log.warn(
{ jobId: job.id, runningAtMs: job.state.runningAtMs },
"cron: clearing stale running marker on startup",
);
job.state.runningAtMs = undefined;
}
}
await runMissedJobs(state);
recomputeNextRuns(state);
await persist(state);
armTimer(state);
@@ -40,7 +51,7 @@ export function stop(state: CronServiceState) {
export async function status(state: CronServiceState) {
return await locked(state, async () => {
await ensureLoaded(state);
await ensureLoaded(state, { skipRecompute: true });
return {
enabled: state.deps.cronEnabled,
storePath: state.deps.storePath,
@@ -52,7 +63,7 @@ export async function status(state: CronServiceState) {
export async function list(state: CronServiceState, opts?: { includeDisabled?: boolean }) {
return await locked(state, async () => {
await ensureLoaded(state);
await ensureLoaded(state, { skipRecompute: true });
const includeDisabled = opts?.includeDisabled === true;
const jobs = (state.store?.jobs ?? []).filter((j) => includeDisabled || j.enabled);
return jobs.toSorted((a, b) => (a.state.nextRunAtMs ?? 0) - (b.state.nextRunAtMs ?? 0));
@@ -83,6 +94,22 @@ export async function update(state: CronServiceState, id: string, patch: CronJob
const job = findJobOrThrow(state, id);
const now = state.deps.nowMs();
applyJobPatch(job, patch);
if (job.schedule.kind === "every") {
const anchor = job.schedule.anchorMs;
if (typeof anchor !== "number" || !Number.isFinite(anchor)) {
const patchSchedule = patch.schedule;
const fallbackAnchorMs =
patchSchedule?.kind === "every"
? now
: typeof job.createdAtMs === "number" && Number.isFinite(job.createdAtMs)
? job.createdAtMs
: now;
job.schedule = {
...job.schedule,
anchorMs: Math.max(0, Math.floor(fallbackAnchorMs)),
};
}
}
job.updatedAtMs = now;
if (job.enabled) {
job.state.nextRunAtMs = computeJobNextRunAtMs(job, now);
@@ -124,14 +151,18 @@ export async function remove(state: CronServiceState, id: string) {
export async function run(state: CronServiceState, id: string, mode?: "due" | "force") {
return await locked(state, async () => {
warnIfDisabled(state, "run");
await ensureLoaded(state);
await ensureLoaded(state, { skipRecompute: true });
const job = findJobOrThrow(state, id);
if (typeof job.state.runningAtMs === "number") {
return { ok: true, ran: false, reason: "already-running" as const };
}
const now = state.deps.nowMs();
const due = isJobDue(job, now, { forced: mode === "force" });
if (!due) {
return { ok: true, ran: false, reason: "not-due" as const };
}
await executeJob(state, job, now, { forced: mode === "force" });
recomputeNextRuns(state);
await persist(state);
armTimer(state);
return { ok: true, ran: true } as const;

View File

@@ -9,6 +9,8 @@ export type CronEvent = {
status?: "ok" | "error" | "skipped";
error?: string;
summary?: string;
sessionId?: string;
sessionKey?: string;
nextRunAtMs?: number;
};
@@ -33,6 +35,8 @@ export type CronServiceDeps = {
/** Last non-empty agent text output (not truncated). */
outputText?: string;
error?: string;
sessionId?: string;
sessionKey?: string;
}>;
onEvent?: (evt: CronEvent) => void;
};
@@ -78,6 +82,7 @@ export type CronStatusSummary = {
export type CronRunResult =
| { ok: true; ran: true }
| { ok: true; ran: false; reason: "not-due" }
| { ok: true; ran: false; reason: "already-running" }
| { ok: false };
export type CronRemoveResult = { ok: true; removed: boolean } | { ok: false; removed: false };

View File

@@ -117,6 +117,141 @@ function stripLegacyDeliveryFields(payload: Record<string, unknown>) {
}
}
function normalizePayloadKind(payload: Record<string, unknown>) {
const raw = typeof payload.kind === "string" ? payload.kind.trim().toLowerCase() : "";
if (raw === "agentturn") {
payload.kind = "agentTurn";
return true;
}
if (raw === "systemevent") {
payload.kind = "systemEvent";
return true;
}
return false;
}
function inferPayloadIfMissing(raw: Record<string, unknown>) {
const message = typeof raw.message === "string" ? raw.message.trim() : "";
const text = typeof raw.text === "string" ? raw.text.trim() : "";
if (message) {
raw.payload = { kind: "agentTurn", message };
return true;
}
if (text) {
raw.payload = { kind: "systemEvent", text };
return true;
}
return false;
}
function copyTopLevelAgentTurnFields(
raw: Record<string, unknown>,
payload: Record<string, unknown>,
) {
let mutated = false;
const copyTrimmedString = (field: "model" | "thinking") => {
const existing = payload[field];
if (typeof existing === "string" && existing.trim()) {
return;
}
const value = raw[field];
if (typeof value === "string" && value.trim()) {
payload[field] = value.trim();
mutated = true;
}
};
copyTrimmedString("model");
copyTrimmedString("thinking");
if (
typeof payload.timeoutSeconds !== "number" &&
typeof raw.timeoutSeconds === "number" &&
Number.isFinite(raw.timeoutSeconds)
) {
payload.timeoutSeconds = Math.max(1, Math.floor(raw.timeoutSeconds));
mutated = true;
}
if (
typeof payload.allowUnsafeExternalContent !== "boolean" &&
typeof raw.allowUnsafeExternalContent === "boolean"
) {
payload.allowUnsafeExternalContent = raw.allowUnsafeExternalContent;
mutated = true;
}
if (typeof payload.deliver !== "boolean" && typeof raw.deliver === "boolean") {
payload.deliver = raw.deliver;
mutated = true;
}
if (
typeof payload.channel !== "string" &&
typeof raw.channel === "string" &&
raw.channel.trim()
) {
payload.channel = raw.channel.trim();
mutated = true;
}
if (typeof payload.to !== "string" && typeof raw.to === "string" && raw.to.trim()) {
payload.to = raw.to.trim();
mutated = true;
}
if (
typeof payload.bestEffortDeliver !== "boolean" &&
typeof raw.bestEffortDeliver === "boolean"
) {
payload.bestEffortDeliver = raw.bestEffortDeliver;
mutated = true;
}
if (
typeof payload.provider !== "string" &&
typeof raw.provider === "string" &&
raw.provider.trim()
) {
payload.provider = raw.provider.trim();
mutated = true;
}
return mutated;
}
function stripLegacyTopLevelFields(raw: Record<string, unknown>) {
if ("model" in raw) {
delete raw.model;
}
if ("thinking" in raw) {
delete raw.thinking;
}
if ("timeoutSeconds" in raw) {
delete raw.timeoutSeconds;
}
if ("allowUnsafeExternalContent" in raw) {
delete raw.allowUnsafeExternalContent;
}
if ("message" in raw) {
delete raw.message;
}
if ("text" in raw) {
delete raw.text;
}
if ("deliver" in raw) {
delete raw.deliver;
}
if ("channel" in raw) {
delete raw.channel;
}
if ("to" in raw) {
delete raw.to;
}
if ("bestEffortDeliver" in raw) {
delete raw.bestEffortDeliver;
}
if ("provider" in raw) {
delete raw.provider;
}
}
async function getFileMtimeMs(path: string): Promise<number | null> {
try {
const stats = await fs.promises.stat(path);
@@ -148,6 +283,12 @@ export async function ensureLoaded(
const jobs = (loaded.jobs ?? []) as unknown as Array<Record<string, unknown>>;
let mutated = false;
for (const raw of jobs) {
const state = raw.state;
if (!state || typeof state !== "object" || Array.isArray(state)) {
raw.state = {};
mutated = true;
}
const nameRaw = raw.name;
if (typeof nameRaw !== "string" || nameRaw.trim().length === 0) {
raw.name = inferLegacyName({
@@ -171,8 +312,57 @@ export async function ensureLoaded(
}
const payload = raw.payload;
if (payload && typeof payload === "object" && !Array.isArray(payload)) {
if (migrateLegacyCronPayload(payload as Record<string, unknown>)) {
if (
(!payload || typeof payload !== "object" || Array.isArray(payload)) &&
inferPayloadIfMissing(raw)
) {
mutated = true;
}
const payloadRecord =
raw.payload && typeof raw.payload === "object" && !Array.isArray(raw.payload)
? (raw.payload as Record<string, unknown>)
: null;
if (payloadRecord) {
if (normalizePayloadKind(payloadRecord)) {
mutated = true;
}
if (!payloadRecord.kind) {
if (typeof payloadRecord.message === "string" && payloadRecord.message.trim()) {
payloadRecord.kind = "agentTurn";
mutated = true;
} else if (typeof payloadRecord.text === "string" && payloadRecord.text.trim()) {
payloadRecord.kind = "systemEvent";
mutated = true;
}
}
if (payloadRecord.kind === "agentTurn") {
if (copyTopLevelAgentTurnFields(raw, payloadRecord)) {
mutated = true;
}
}
}
const hadLegacyTopLevelFields =
"model" in raw ||
"thinking" in raw ||
"timeoutSeconds" in raw ||
"allowUnsafeExternalContent" in raw ||
"message" in raw ||
"text" in raw ||
"deliver" in raw ||
"channel" in raw ||
"to" in raw ||
"bestEffortDeliver" in raw ||
"provider" in raw;
if (hadLegacyTopLevelFields) {
stripLegacyTopLevelFields(raw);
mutated = true;
}
if (payloadRecord) {
if (migrateLegacyCronPayload(payloadRecord)) {
mutated = true;
}
}
@@ -202,6 +392,27 @@ export async function ensureLoaded(
}
mutated = true;
}
const everyMsRaw = sched.everyMs;
const everyMs =
typeof everyMsRaw === "number" && Number.isFinite(everyMsRaw)
? Math.floor(everyMsRaw)
: null;
if ((kind === "every" || sched.kind === "every") && everyMs !== null) {
const anchorRaw = sched.anchorMs;
const normalizedAnchor =
typeof anchorRaw === "number" && Number.isFinite(anchorRaw)
? Math.max(0, Math.floor(anchorRaw))
: typeof raw.createdAtMs === "number" && Number.isFinite(raw.createdAtMs)
? Math.max(0, Math.floor(raw.createdAtMs))
: typeof raw.updatedAtMs === "number" && Number.isFinite(raw.updatedAtMs)
? Math.max(0, Math.floor(raw.updatedAtMs))
: null;
if (normalizedAnchor !== null && anchorRaw !== normalizedAnchor) {
sched.anchorMs = normalizedAnchor;
mutated = true;
}
}
}
const delivery = raw.delivery;
@@ -213,6 +424,11 @@ export async function ensureLoaded(
(delivery as { mode?: unknown }).mode = "announce";
mutated = true;
}
} else if (modeRaw === undefined || modeRaw === null) {
// Explicitly persist the default so existing jobs don't silently
// change behaviour when the runtime default shifts.
(delivery as { mode?: unknown }).mode = "announce";
mutated = true;
}
}
@@ -222,10 +438,6 @@ export async function ensureLoaded(
mutated = true;
}
const payloadRecord =
payload && typeof payload === "object" && !Array.isArray(payload)
? (payload as Record<string, unknown>)
: null;
const payloadKind =
payloadRecord && typeof payloadRecord.kind === "string" ? payloadRecord.kind : "";
const sessionTarget =

View File

@@ -1,6 +1,7 @@
import type { HeartbeatRunResult } from "../../infra/heartbeat-wake.js";
import type { CronJob } from "../types.js";
import type { CronEvent, CronServiceState } from "./state.js";
import { resolveCronDeliveryPlan } from "../delivery.js";
import {
computeJobNextRunAtMs,
nextWakeAtMs,
@@ -10,7 +11,7 @@ import {
import { locked } from "./locked.js";
import { ensureLoaded, persist } from "./store.js";
const MAX_TIMEOUT_MS = 2 ** 31 - 1;
const MAX_TIMER_DELAY_MS = 60_000;
export function armTimer(state: CronServiceState) {
if (state.timer) {
@@ -25,12 +26,15 @@ export function armTimer(state: CronServiceState) {
return;
}
const delay = Math.max(nextAt - state.deps.nowMs(), 0);
// Avoid TimeoutOverflowWarning when a job is far in the future.
const clampedDelay = Math.min(delay, MAX_TIMEOUT_MS);
state.timer = setTimeout(() => {
void onTimer(state).catch((err) => {
// Wake at least once a minute to avoid schedule drift and recover quickly
// when the process was paused or wall-clock time jumps.
const clampedDelay = Math.min(delay, MAX_TIMER_DELAY_MS);
state.timer = setTimeout(async () => {
try {
await onTimer(state);
} catch (err) {
state.deps.log.error({ err: String(err) }, "cron: timer tick failed");
});
}
}, clampedDelay);
}
@@ -40,22 +44,169 @@ export async function onTimer(state: CronServiceState) {
}
state.running = true;
try {
await locked(state, async () => {
// Reload persisted due-times without recomputing so runDueJobs sees
// the original nextRunAtMs values. Recomputing first would advance
// every/cron slots past the current tick when the timer fires late (#9788).
const dueJobs = await locked(state, async () => {
await ensureLoaded(state, { forceReload: true, skipRecompute: true });
await runDueJobs(state);
recomputeNextRuns(state);
const due = findDueJobs(state);
if (due.length === 0) {
const changed = recomputeNextRuns(state);
if (changed) {
await persist(state);
}
return [];
}
const now = state.deps.nowMs();
for (const job of due) {
job.state.runningAtMs = now;
job.state.lastError = undefined;
}
await persist(state);
return due.map((j) => ({
id: j.id,
job: j,
}));
});
const results: Array<{
jobId: string;
status: "ok" | "error" | "skipped";
error?: string;
summary?: string;
sessionId?: string;
sessionKey?: string;
startedAt: number;
endedAt: number;
}> = [];
for (const { id, job } of dueJobs) {
const startedAt = state.deps.nowMs();
job.state.runningAtMs = startedAt;
emit(state, { jobId: job.id, action: "started", runAtMs: startedAt });
try {
const result = await executeJobCore(state, job);
results.push({ jobId: id, ...result, startedAt, endedAt: state.deps.nowMs() });
} catch (err) {
results.push({
jobId: id,
status: "error",
error: String(err),
startedAt,
endedAt: state.deps.nowMs(),
});
}
}
if (results.length > 0) {
await locked(state, async () => {
await ensureLoaded(state, { forceReload: true, skipRecompute: true });
for (const result of results) {
const job = state.store?.jobs.find((j) => j.id === result.jobId);
if (!job) {
continue;
}
const startedAt = result.startedAt;
job.state.runningAtMs = undefined;
job.state.lastRunAtMs = startedAt;
job.state.lastStatus = result.status;
job.state.lastDurationMs = Math.max(0, result.endedAt - startedAt);
job.state.lastError = result.error;
const shouldDelete =
job.schedule.kind === "at" && result.status === "ok" && job.deleteAfterRun === true;
if (!shouldDelete) {
if (job.schedule.kind === "at" && result.status === "ok") {
job.enabled = false;
job.state.nextRunAtMs = undefined;
} else if (job.enabled) {
job.state.nextRunAtMs = computeJobNextRunAtMs(job, result.endedAt);
} else {
job.state.nextRunAtMs = undefined;
}
}
emit(state, {
jobId: job.id,
action: "finished",
status: result.status,
error: result.error,
summary: result.summary,
sessionId: result.sessionId,
sessionKey: result.sessionKey,
runAtMs: startedAt,
durationMs: job.state.lastDurationMs,
nextRunAtMs: job.state.nextRunAtMs,
});
if (shouldDelete && state.store) {
state.store.jobs = state.store.jobs.filter((j) => j.id !== job.id);
emit(state, { jobId: job.id, action: "removed" });
}
job.updatedAtMs = result.endedAt;
}
recomputeNextRuns(state);
await persist(state);
});
}
} finally {
state.running = false;
// Always re-arm so transient errors (e.g. ENOSPC) don't kill the scheduler.
armTimer(state);
}
}
function findDueJobs(state: CronServiceState): CronJob[] {
if (!state.store) {
return [];
}
const now = state.deps.nowMs();
return state.store.jobs.filter((j) => {
if (!j.enabled) {
return false;
}
if (typeof j.state.runningAtMs === "number") {
return false;
}
const next = j.state.nextRunAtMs;
return typeof next === "number" && now >= next;
});
}
export async function runMissedJobs(state: CronServiceState) {
if (!state.store) {
return;
}
const now = state.deps.nowMs();
const missed = state.store.jobs.filter((j) => {
if (!j.enabled) {
return false;
}
if (typeof j.state.runningAtMs === "number") {
return false;
}
const next = j.state.nextRunAtMs;
if (j.schedule.kind === "at" && j.state.lastStatus === "ok") {
return false;
}
return typeof next === "number" && now >= next;
});
if (missed.length > 0) {
state.deps.log.info(
{ count: missed.length, jobIds: missed.map((j) => j.id) },
"cron: running missed jobs after restart",
);
for (const job of missed) {
await executeJob(state, job, now, { forced: false });
}
}
}
export async function runDueJobs(state: CronServiceState) {
if (!state.store) {
return;
@@ -76,6 +227,99 @@ export async function runDueJobs(state: CronServiceState) {
}
}
async function executeJobCore(
state: CronServiceState,
job: CronJob,
): Promise<{
status: "ok" | "error" | "skipped";
error?: string;
summary?: string;
sessionId?: string;
sessionKey?: string;
}> {
if (job.sessionTarget === "main") {
const text = resolveJobPayloadTextForMain(job);
if (!text) {
const kind = job.payload.kind;
return {
status: "skipped",
error:
kind === "systemEvent"
? "main job requires non-empty systemEvent text"
: 'main job requires payload.kind="systemEvent"',
};
}
state.deps.enqueueSystemEvent(text, { agentId: job.agentId });
if (job.wakeMode === "now" && state.deps.runHeartbeatOnce) {
const reason = `cron:${job.id}`;
const delay = (ms: number) => new Promise<void>((resolve) => setTimeout(resolve, ms));
const maxWaitMs = 2 * 60_000;
const waitStartedAt = state.deps.nowMs();
let heartbeatResult: HeartbeatRunResult;
for (;;) {
heartbeatResult = await state.deps.runHeartbeatOnce({ reason });
if (
heartbeatResult.status !== "skipped" ||
heartbeatResult.reason !== "requests-in-flight"
) {
break;
}
if (state.deps.nowMs() - waitStartedAt > maxWaitMs) {
state.deps.requestHeartbeatNow({ reason });
return { status: "ok", summary: text };
}
await delay(250);
}
if (heartbeatResult.status === "ran") {
return { status: "ok", summary: text };
} else if (heartbeatResult.status === "skipped") {
return { status: "skipped", error: heartbeatResult.reason, summary: text };
} else {
return { status: "error", error: heartbeatResult.reason, summary: text };
}
} else {
state.deps.requestHeartbeatNow({ reason: `cron:${job.id}` });
return { status: "ok", summary: text };
}
}
if (job.payload.kind !== "agentTurn") {
return { status: "skipped", error: "isolated job requires payload.kind=agentTurn" };
}
const res = await state.deps.runIsolatedAgentJob({
job,
message: job.payload.message,
});
// Post a short summary back to the main session.
const summaryText = res.summary?.trim();
const deliveryPlan = resolveCronDeliveryPlan(job);
if (summaryText && deliveryPlan.requested) {
const prefix = "Cron";
const label =
res.status === "error" ? `${prefix} (error): ${summaryText}` : `${prefix}: ${summaryText}`;
state.deps.enqueueSystemEvent(label, { agentId: job.agentId });
if (job.wakeMode === "now") {
state.deps.requestHeartbeatNow({ reason: `cron:${job.id}` });
}
}
return {
status: res.status,
error: res.error,
summary: res.summary,
sessionId: res.sessionId,
sessionKey: res.sessionKey,
};
}
/**
* Execute a job. This version is used by the `run` command and other
* places that need the full execution with state updates.
*/
export async function executeJob(
state: CronServiceState,
job: CronJob,
@@ -89,7 +333,12 @@ export async function executeJob(
let deleted = false;
const finish = async (status: "ok" | "error" | "skipped", err?: string, summary?: string) => {
const finish = async (
status: "ok" | "error" | "skipped",
err?: string,
summary?: string,
session?: { sessionId?: string; sessionKey?: string },
) => {
const endedAt = state.deps.nowMs();
job.state.runningAtMs = undefined;
job.state.lastRunAtMs = startedAt;
@@ -102,7 +351,6 @@ export async function executeJob(
if (!shouldDelete) {
if (job.schedule.kind === "at" && status === "ok") {
// One-shot job completed successfully; disable it.
job.enabled = false;
job.state.nextRunAtMs = undefined;
} else if (job.enabled) {
@@ -118,6 +366,8 @@ export async function executeJob(
status,
error: err,
summary,
sessionId: session?.sessionId,
sessionKey: session?.sessionKey,
runAtMs: startedAt,
durationMs: job.state.lastDurationMs,
nextRunAtMs: job.state.nextRunAtMs,
@@ -131,96 +381,16 @@ export async function executeJob(
};
try {
if (job.sessionTarget === "main") {
const text = resolveJobPayloadTextForMain(job);
if (!text) {
const kind = job.payload.kind;
await finish(
"skipped",
kind === "systemEvent"
? "main job requires non-empty systemEvent text"
: 'main job requires payload.kind="systemEvent"',
);
return;
}
state.deps.enqueueSystemEvent(text, { agentId: job.agentId });
if (job.wakeMode === "now" && state.deps.runHeartbeatOnce) {
const reason = `cron:${job.id}`;
const delay = (ms: number) => new Promise<void>((resolve) => setTimeout(resolve, ms));
const maxWaitMs = 2 * 60_000;
const waitStartedAt = state.deps.nowMs();
let heartbeatResult: HeartbeatRunResult;
for (;;) {
heartbeatResult = await state.deps.runHeartbeatOnce({ reason });
if (
heartbeatResult.status !== "skipped" ||
heartbeatResult.reason !== "requests-in-flight"
) {
break;
}
if (state.deps.nowMs() - waitStartedAt > maxWaitMs) {
heartbeatResult = {
status: "skipped",
reason: "timeout waiting for main lane to become idle",
};
break;
}
await delay(250);
}
if (heartbeatResult.status === "ran") {
await finish("ok", undefined, text);
} else if (heartbeatResult.status === "skipped") {
await finish("skipped", heartbeatResult.reason, text);
} else {
await finish("error", heartbeatResult.reason, text);
}
} else {
// wakeMode is "next-heartbeat" or runHeartbeatOnce not available
state.deps.requestHeartbeatNow({ reason: `cron:${job.id}` });
await finish("ok", undefined, text);
}
return;
}
if (job.payload.kind !== "agentTurn") {
await finish("skipped", "isolated job requires payload.kind=agentTurn");
return;
}
const res = await state.deps.runIsolatedAgentJob({
job,
message: job.payload.message,
const result = await executeJobCore(state, job);
await finish(result.status, result.error, result.summary, {
sessionId: result.sessionId,
sessionKey: result.sessionKey,
});
// Post a short summary back to the main session so the user sees
// the cron result without opening the isolated session.
const summaryText = res.summary?.trim();
const deliveryMode = job.delivery?.mode ?? "announce";
if (summaryText && deliveryMode !== "none") {
const prefix = "Cron";
const label =
res.status === "error" ? `${prefix} (error): ${summaryText}` : `${prefix}: ${summaryText}`;
state.deps.enqueueSystemEvent(label, { agentId: job.agentId });
if (job.wakeMode === "now") {
state.deps.requestHeartbeatNow({ reason: `cron:${job.id}` });
}
}
if (res.status === "ok") {
await finish("ok", undefined, res.summary);
} else if (res.status === "skipped") {
await finish("skipped", undefined, res.summary);
} else {
await finish("error", res.error ?? "cron job failed", res.summary);
}
} catch (err) {
await finish("error", String(err));
} finally {
job.updatedAtMs = nowMs;
if (!opts.forced && job.enabled && !deleted) {
// Keep nextRunAtMs in sync in case the schedule advanced during a long run.
job.state.nextRunAtMs = computeJobNextRunAtMs(job, state.deps.nowMs());
}
}

32
src/cron/store.test.ts Normal file
View File

@@ -0,0 +1,32 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it } from "vitest";
import { loadCronStore } from "./store.js";
async function makeStorePath() {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-store-"));
return {
dir,
storePath: path.join(dir, "jobs.json"),
cleanup: async () => {
await fs.rm(dir, { recursive: true, force: true });
},
};
}
describe("cron store", () => {
it("returns empty store when file does not exist", async () => {
const store = await makeStorePath();
const loaded = await loadCronStore(store.storePath);
expect(loaded).toEqual({ version: 1, jobs: [] });
await store.cleanup();
});
it("throws when store contains invalid JSON", async () => {
const store = await makeStorePath();
await fs.writeFile(store.storePath, "{ not json", "utf-8");
await expect(loadCronStore(store.storePath)).rejects.toThrow(/Failed to parse cron store/i);
await store.cleanup();
});
});

View File

@@ -22,14 +22,28 @@ export function resolveCronStorePath(storePath?: string) {
export async function loadCronStore(storePath: string): Promise<CronStoreFile> {
try {
const raw = await fs.promises.readFile(storePath, "utf-8");
const parsed = JSON5.parse(raw);
const jobs = Array.isArray(parsed?.jobs) ? (parsed?.jobs as never[]) : [];
let parsed: unknown;
try {
parsed = JSON5.parse(raw);
} catch (err) {
throw new Error(`Failed to parse cron store at ${storePath}: ${String(err)}`, {
cause: err,
});
}
const parsedRecord =
parsed && typeof parsed === "object" && !Array.isArray(parsed)
? (parsed as Record<string, unknown>)
: {};
const jobs = Array.isArray(parsedRecord.jobs) ? (parsedRecord.jobs as never[]) : [];
return {
version: 1,
jobs: jobs.filter(Boolean) as never as CronStoreFile["jobs"],
};
} catch {
return { version: 1, jobs: [] };
} catch (err) {
if ((err as { code?: unknown })?.code === "ENOENT") {
return { version: 1, jobs: [] };
}
throw err;
}
}

View File

@@ -42,6 +42,11 @@ export const CronPayloadSchema = Type.Union([
model: Type.Optional(Type.String()),
thinking: Type.Optional(Type.String()),
timeoutSeconds: Type.Optional(Type.Integer({ minimum: 1 })),
allowUnsafeExternalContent: Type.Optional(Type.Boolean()),
deliver: Type.Optional(Type.Boolean()),
channel: Type.Optional(Type.String()),
to: Type.Optional(Type.String()),
bestEffortDeliver: Type.Optional(Type.Boolean()),
},
{ additionalProperties: false },
),
@@ -62,6 +67,11 @@ export const CronPayloadPatchSchema = Type.Union([
model: Type.Optional(Type.String()),
thinking: Type.Optional(Type.String()),
timeoutSeconds: Type.Optional(Type.Integer({ minimum: 1 })),
allowUnsafeExternalContent: Type.Optional(Type.Boolean()),
deliver: Type.Optional(Type.Boolean()),
channel: Type.Optional(Type.String()),
to: Type.Optional(Type.String()),
bestEffortDeliver: Type.Optional(Type.Boolean()),
},
{ additionalProperties: false },
),
@@ -239,6 +249,8 @@ export const CronRunLogEntrySchema = Type.Object(
),
error: Type.Optional(Type.String()),
summary: Type.Optional(Type.String()),
sessionId: Type.Optional(NonEmptyString),
sessionKey: Type.Optional(NonEmptyString),
runAtMs: Type.Optional(Type.Integer({ minimum: 0 })),
durationMs: Type.Optional(Type.Integer({ minimum: 0 })),
nextRunAtMs: Type.Optional(Type.Integer({ minimum: 0 })),

View File

@@ -90,6 +90,8 @@ export function buildGatewayCronService(params: {
status: evt.status,
error: evt.error,
summary: evt.summary,
sessionId: evt.sessionId,
sessionKey: evt.sessionKey,
runAtMs: evt.runAtMs,
durationMs: evt.durationMs,
nextRunAtMs: evt.nextRunAtMs,

View File

@@ -189,7 +189,7 @@ export const cronHandlers: GatewayRequestHandlers = {
);
return;
}
const result = await context.cron.run(jobId, p.mode);
const result = await context.cron.run(jobId, p.mode ?? "force");
respond(true, result, undefined);
},
"cron.runs": async ({ params, respond, context }) => {

View File

@@ -117,7 +117,7 @@ describe("gateway server cron", () => {
| { schedule?: unknown; sessionTarget?: unknown; wakeMode?: unknown }
| undefined;
expect(wrappedPayload?.sessionTarget).toBe("main");
expect(wrappedPayload?.wakeMode).toBe("next-heartbeat");
expect(wrappedPayload?.wakeMode).toBe("now");
expect((wrappedPayload?.schedule as { kind?: unknown } | undefined)?.kind).toBe("at");
const patchRes = await rpcReq(ws, "cron.add", {
@@ -181,6 +181,32 @@ describe("gateway server cron", () => {
expect(merged?.delivery?.channel).toBe("telegram");
expect(merged?.delivery?.to).toBe("19098680");
const legacyDeliveryPatchRes = await rpcReq(ws, "cron.update", {
id: mergeJobId,
patch: {
payload: {
kind: "agentTurn",
deliver: true,
channel: "signal",
to: "+15550001111",
bestEffortDeliver: true,
},
},
});
expect(legacyDeliveryPatchRes.ok).toBe(true);
const legacyDeliveryPatched = legacyDeliveryPatchRes.payload as
| {
payload?: { kind?: unknown; message?: unknown };
delivery?: { mode?: unknown; channel?: unknown; to?: unknown; bestEffort?: unknown };
}
| undefined;
expect(legacyDeliveryPatched?.payload?.kind).toBe("agentTurn");
expect(legacyDeliveryPatched?.payload?.message).toBe("hello");
expect(legacyDeliveryPatched?.delivery?.mode).toBe("announce");
expect(legacyDeliveryPatched?.delivery?.channel).toBe("signal");
expect(legacyDeliveryPatched?.delivery?.to).toBe("+15550001111");
expect(legacyDeliveryPatched?.delivery?.bestEffort).toBe(true);
const rejectRes = await rpcReq(ws, "cron.add", {
name: "patch reject",
enabled: true,

View File

@@ -331,4 +331,29 @@ describe("listSessionsFromStore search", () => {
});
expect(result.sessions.length).toBe(1);
});
test("hides cron run alias session keys from sessions list", () => {
const now = Date.now();
const store: Record<string, SessionEntry> = {
"agent:main:cron:job-1": {
sessionId: "run-abc",
updatedAt: now,
label: "Cron: job-1",
} as SessionEntry,
"agent:main:cron:job-1:run:run-abc": {
sessionId: "run-abc",
updatedAt: now,
label: "Cron: job-1",
} as SessionEntry,
};
const result = listSessionsFromStore({
cfg: baseCfg,
storePath: "/tmp/sessions.json",
store,
opts: {},
});
expect(result.sessions.map((session) => session.key)).toEqual(["agent:main:cron:job-1"]);
});
});

View File

@@ -207,6 +207,12 @@ export function classifySessionKey(key: string, entry?: SessionEntry): GatewaySe
return "direct";
}
function isCronRunSessionKey(key: string): boolean {
const parsed = parseAgentSessionKey(key);
const raw = parsed?.rest ?? key;
return /^cron:[^:]+:run:[^:]+$/.test(raw);
}
export function parseGroupKey(
key: string,
): { channel?: string; kind?: "group" | "channel"; id?: string } | null {
@@ -568,6 +574,9 @@ export function listSessionsFromStore(params: {
let sessions = Object.entries(store)
.filter(([key]) => {
if (isCronRunSessionKey(key)) {
return false;
}
if (!includeGlobal && key === "global") {
return false;
}

View File

@@ -12,9 +12,12 @@ import { fileURLToPath } from "node:url";
import type { OpenClawConfig } from "../../../config/config.js";
import type { HookHandler } from "../../hooks.js";
import { resolveAgentWorkspaceDir } from "../../../agents/agent-scope.js";
import { createSubsystemLogger } from "../../../logging/subsystem.js";
import { resolveAgentIdFromSessionKey } from "../../../routing/session-key.js";
import { resolveHookConfig } from "../../config.js";
const log = createSubsystemLogger("hooks/session-memory");
/**
* Read recent messages from session file for slug generation
*/
@@ -69,7 +72,7 @@ const saveSessionToMemory: HookHandler = async (event) => {
}
try {
console.log("[session-memory] Hook triggered for /new command");
log.debug("Hook triggered for /new command");
const context = event.context || {};
const cfg = context.cfg as OpenClawConfig | undefined;
@@ -92,9 +95,11 @@ const saveSessionToMemory: HookHandler = async (event) => {
const currentSessionId = sessionEntry.sessionId as string;
const currentSessionFile = sessionEntry.sessionFile as string;
console.log("[session-memory] Current sessionId:", currentSessionId);
console.log("[session-memory] Current sessionFile:", currentSessionFile);
console.log("[session-memory] cfg present:", !!cfg);
log.debug("Session context resolved", {
sessionId: currentSessionId,
sessionFile: currentSessionFile,
hasCfg: Boolean(cfg),
});
const sessionFile = currentSessionFile || undefined;
@@ -111,10 +116,13 @@ const saveSessionToMemory: HookHandler = async (event) => {
if (sessionFile) {
// Get recent conversation content
sessionContent = await getRecentSessionContent(sessionFile, messageCount);
console.log("[session-memory] sessionContent length:", sessionContent?.length || 0);
log.debug("Session content loaded", {
length: sessionContent?.length ?? 0,
messageCount,
});
if (sessionContent && cfg) {
console.log("[session-memory] Calling generateSlugViaLLM...");
log.debug("Calling generateSlugViaLLM...");
// Dynamically import the LLM slug generator (avoids module caching issues)
// When compiled, handler is at dist/hooks/bundled/session-memory/handler.js
// Going up ../.. puts us at dist/hooks/, so just add llm-slug-generator.js
@@ -124,7 +132,7 @@ const saveSessionToMemory: HookHandler = async (event) => {
// Use LLM to generate a descriptive slug
slug = await generateSlugViaLLM({ sessionContent, cfg });
console.log("[session-memory] Generated slug:", slug);
log.debug("Generated slug", { slug });
}
}
@@ -132,14 +140,16 @@ const saveSessionToMemory: HookHandler = async (event) => {
if (!slug) {
const timeSlug = now.toISOString().split("T")[1].split(".")[0].replace(/:/g, "");
slug = timeSlug.slice(0, 4); // HHMM
console.log("[session-memory] Using fallback timestamp slug:", slug);
log.debug("Using fallback timestamp slug", { slug });
}
// Create filename with date and slug
const filename = `${dateStr}-${slug}.md`;
const memoryFilePath = path.join(memoryDir, filename);
console.log("[session-memory] Generated filename:", filename);
console.log("[session-memory] Full path:", memoryFilePath);
log.debug("Memory file path resolved", {
filename,
path: memoryFilePath.replace(os.homedir(), "~"),
});
// Format time as HH:MM:SS UTC
const timeStr = now.toISOString().split("T")[1].split(".")[0];
@@ -167,16 +177,21 @@ const saveSessionToMemory: HookHandler = async (event) => {
// Write to new memory file
await fs.writeFile(memoryFilePath, entry, "utf-8");
console.log("[session-memory] Memory file written successfully");
log.debug("Memory file written successfully");
// Log completion (but don't send user-visible confirmation - it's internal housekeeping)
const relPath = memoryFilePath.replace(os.homedir(), "~");
console.log(`[session-memory] Session context saved to ${relPath}`);
log.info(`Session context saved to ${relPath}`);
} catch (err) {
console.error(
"[session-memory] Failed to save session memory:",
err instanceof Error ? err.message : String(err),
);
if (err instanceof Error) {
log.error("Failed to save session memory", {
errorName: err.name,
errorMessage: err.message,
stack: err.stack,
});
} else {
log.error("Failed to save session memory", { error: String(err) });
}
}
};

View File

@@ -41,6 +41,7 @@ Reply with ONLY the slug, nothing else. Examples: "vendor-pitch", "api-design",
const result = await runEmbeddedPiAgent({
sessionId: `slug-generator-${Date.now()}`,
sessionKey: "temp:slug-generator",
agentId,
sessionFile: tempSessionFile,
workspaceDir,
agentDir,

View File

@@ -0,0 +1,14 @@
import crypto from "node:crypto";
export function sha256HexPrefix(value: string, len = 12): string {
const safeLen = Number.isFinite(len) ? Math.max(1, Math.floor(len)) : 12;
return crypto.createHash("sha256").update(value).digest("hex").slice(0, safeLen);
}
export function redactIdentifier(value: string | undefined, opts?: { len?: number }): string {
const trimmed = value?.trim();
if (!trimmed) {
return "-";
}
return `sha256:${sha256HexPrefix(trimmed, opts?.len ?? 12)}`;
}

View File

@@ -0,0 +1,174 @@
import { ReadableStream } from "node:stream/web";
import { afterEach, describe, expect, it, vi } from "vitest";
import type { VoyageBatchOutputLine, VoyageBatchRequest } from "./batch-voyage.js";
import type { VoyageEmbeddingClient } from "./embeddings-voyage.js";
// Mock internal.js if needed, but runWithConcurrency is simple enough to keep real.
// We DO need to mock retryAsync to avoid actual delays/retries logic complicating tests
vi.mock("../infra/retry.js", () => ({
retryAsync: async <T>(fn: () => Promise<T>) => fn(),
}));
describe("runVoyageEmbeddingBatches", () => {
afterEach(() => {
vi.resetAllMocks();
vi.unstubAllGlobals();
});
const mockClient: VoyageEmbeddingClient = {
baseUrl: "https://api.voyageai.com/v1",
headers: { Authorization: "Bearer test-key" },
model: "voyage-4-large",
};
const mockRequests: VoyageBatchRequest[] = [
{ custom_id: "req-1", body: { input: "text1" } },
{ custom_id: "req-2", body: { input: "text2" } },
];
it("successfully submits batch, waits, and streams results", async () => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
// Sequence of fetch calls:
// 1. Upload file
fetchMock.mockResolvedValueOnce({
ok: true,
json: async () => ({ id: "file-123" }),
});
// 2. Create batch
fetchMock.mockResolvedValueOnce({
ok: true,
json: async () => ({ id: "batch-abc", status: "pending" }),
});
// 3. Poll status (pending) - Optional depending on wait loop, let's say it finishes immediately for this test
// Actually the code does: initial check (if completed) -> wait loop.
// If create returns "pending", it enters waitForVoyageBatch.
// waitForVoyageBatch fetches status.
// 3. Poll status (completed)
fetchMock.mockResolvedValueOnce({
ok: true,
json: async () => ({
id: "batch-abc",
status: "completed",
output_file_id: "file-out-999",
}),
});
// 4. Download content (Streaming)
const outputLines: VoyageBatchOutputLine[] = [
{
custom_id: "req-1",
response: { status_code: 200, body: { data: [{ embedding: [0.1, 0.1] }] } },
},
{
custom_id: "req-2",
response: { status_code: 200, body: { data: [{ embedding: [0.2, 0.2] }] } },
},
];
// Create a stream that emits the NDJSON lines
const stream = new ReadableStream({
start(controller) {
const text = outputLines.map((l) => JSON.stringify(l)).join("\n");
controller.enqueue(new TextEncoder().encode(text));
controller.close();
},
});
fetchMock.mockResolvedValueOnce({
ok: true,
body: stream,
});
const { runVoyageEmbeddingBatches } = await import("./batch-voyage.js");
const results = await runVoyageEmbeddingBatches({
client: mockClient,
agentId: "agent-1",
requests: mockRequests,
wait: true,
pollIntervalMs: 1, // fast poll
timeoutMs: 1000,
concurrency: 1,
});
expect(results.size).toBe(2);
expect(results.get("req-1")).toEqual([0.1, 0.1]);
expect(results.get("req-2")).toEqual([0.2, 0.2]);
// Verify calls
expect(fetchMock).toHaveBeenCalledTimes(4);
// Verify File Upload
expect(fetchMock.mock.calls[0][0]).toContain("/files");
const uploadBody = fetchMock.mock.calls[0][1].body as FormData;
expect(uploadBody).toBeInstanceOf(FormData);
expect(uploadBody.get("purpose")).toBe("batch");
// Verify Batch Create
expect(fetchMock.mock.calls[1][0]).toContain("/batches");
const createBody = JSON.parse(fetchMock.mock.calls[1][1].body);
expect(createBody.input_file_id).toBe("file-123");
expect(createBody.completion_window).toBe("12h");
expect(createBody.request_params).toEqual({
model: "voyage-4-large",
input_type: "document",
});
// Verify Content Fetch
expect(fetchMock.mock.calls[3][0]).toContain("/files/file-out-999/content");
});
it("handles empty lines and stream chunks correctly", async () => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
// 1. Upload
fetchMock.mockResolvedValueOnce({ ok: true, json: async () => ({ id: "f1" }) });
// 2. Create (completed immediately)
fetchMock.mockResolvedValueOnce({
ok: true,
json: async () => ({ id: "b1", status: "completed", output_file_id: "out1" }),
});
// 3. Download Content (Streaming with chunks and newlines)
const stream = new ReadableStream({
start(controller) {
const line1 = JSON.stringify({
custom_id: "req-1",
response: { body: { data: [{ embedding: [1] }] } },
});
const line2 = JSON.stringify({
custom_id: "req-2",
response: { body: { data: [{ embedding: [2] }] } },
});
// Split across chunks
controller.enqueue(new TextEncoder().encode(line1 + "\n"));
controller.enqueue(new TextEncoder().encode("\n")); // empty line
controller.enqueue(new TextEncoder().encode(line2)); // no newline at EOF
controller.close();
},
});
fetchMock.mockResolvedValueOnce({ ok: true, body: stream });
const { runVoyageEmbeddingBatches } = await import("./batch-voyage.js");
const results = await runVoyageEmbeddingBatches({
client: mockClient,
agentId: "a1",
requests: mockRequests,
wait: true,
pollIntervalMs: 1,
timeoutMs: 1000,
concurrency: 1,
});
expect(results.get("req-1")).toEqual([1]);
expect(results.get("req-2")).toEqual([2]);
});
});

363
src/memory/batch-voyage.ts Normal file
View File

@@ -0,0 +1,363 @@
import { createInterface } from "node:readline";
import { Readable } from "node:stream";
import type { VoyageEmbeddingClient } from "./embeddings-voyage.js";
import { retryAsync } from "../infra/retry.js";
import { hashText, runWithConcurrency } from "./internal.js";
/**
* Voyage Batch API Input Line format.
* See: https://docs.voyageai.com/docs/batch-inference
*/
export type VoyageBatchRequest = {
custom_id: string;
body: {
input: string | string[];
};
};
export type VoyageBatchStatus = {
id?: string;
status?: string;
output_file_id?: string | null;
error_file_id?: string | null;
};
export type VoyageBatchOutputLine = {
custom_id?: string;
response?: {
status_code?: number;
body?: {
data?: Array<{ embedding?: number[]; index?: number }>;
error?: { message?: string };
};
};
error?: { message?: string };
};
export const VOYAGE_BATCH_ENDPOINT = "/v1/embeddings";
const VOYAGE_BATCH_COMPLETION_WINDOW = "12h";
const VOYAGE_BATCH_MAX_REQUESTS = 50000;
function getVoyageBaseUrl(client: VoyageEmbeddingClient): string {
return client.baseUrl?.replace(/\/$/, "") ?? "";
}
function getVoyageHeaders(
client: VoyageEmbeddingClient,
params: { json: boolean },
): Record<string, string> {
const headers = client.headers ? { ...client.headers } : {};
if (params.json) {
if (!headers["Content-Type"] && !headers["content-type"]) {
headers["Content-Type"] = "application/json";
}
} else {
delete headers["Content-Type"];
delete headers["content-type"];
}
return headers;
}
function splitVoyageBatchRequests(requests: VoyageBatchRequest[]): VoyageBatchRequest[][] {
if (requests.length <= VOYAGE_BATCH_MAX_REQUESTS) return [requests];
const groups: VoyageBatchRequest[][] = [];
for (let i = 0; i < requests.length; i += VOYAGE_BATCH_MAX_REQUESTS) {
groups.push(requests.slice(i, i + VOYAGE_BATCH_MAX_REQUESTS));
}
return groups;
}
async function submitVoyageBatch(params: {
client: VoyageEmbeddingClient;
requests: VoyageBatchRequest[];
agentId: string;
}): Promise<VoyageBatchStatus> {
const baseUrl = getVoyageBaseUrl(params.client);
const jsonl = params.requests.map((request) => JSON.stringify(request)).join("\n");
const form = new FormData();
form.append("purpose", "batch");
form.append(
"file",
new Blob([jsonl], { type: "application/jsonl" }),
`memory-embeddings.${hashText(String(Date.now()))}.jsonl`,
);
// 1. Upload file using Voyage Files API
const fileRes = await fetch(`${baseUrl}/files`, {
method: "POST",
headers: getVoyageHeaders(params.client, { json: false }),
body: form,
});
if (!fileRes.ok) {
const text = await fileRes.text();
throw new Error(`voyage batch file upload failed: ${fileRes.status} ${text}`);
}
const filePayload = (await fileRes.json()) as { id?: string };
if (!filePayload.id) {
throw new Error("voyage batch file upload failed: missing file id");
}
// 2. Create batch job using Voyage Batches API
const batchRes = await retryAsync(
async () => {
const res = await fetch(`${baseUrl}/batches`, {
method: "POST",
headers: getVoyageHeaders(params.client, { json: true }),
body: JSON.stringify({
input_file_id: filePayload.id,
endpoint: VOYAGE_BATCH_ENDPOINT,
completion_window: VOYAGE_BATCH_COMPLETION_WINDOW,
request_params: {
model: params.client.model,
input_type: "document",
},
metadata: {
source: "clawdbot-memory",
agent: params.agentId,
},
}),
});
if (!res.ok) {
const text = await res.text();
const err = new Error(`voyage batch create failed: ${res.status} ${text}`) as Error & {
status?: number;
};
err.status = res.status;
throw err;
}
return res;
},
{
attempts: 3,
minDelayMs: 300,
maxDelayMs: 2000,
jitter: 0.2,
shouldRetry: (err) => {
const status = (err as { status?: number }).status;
return status === 429 || (typeof status === "number" && status >= 500);
},
},
);
return (await batchRes.json()) as VoyageBatchStatus;
}
async function fetchVoyageBatchStatus(params: {
client: VoyageEmbeddingClient;
batchId: string;
}): Promise<VoyageBatchStatus> {
const baseUrl = getVoyageBaseUrl(params.client);
const res = await fetch(`${baseUrl}/batches/${params.batchId}`, {
headers: getVoyageHeaders(params.client, { json: true }),
});
if (!res.ok) {
const text = await res.text();
throw new Error(`voyage batch status failed: ${res.status} ${text}`);
}
return (await res.json()) as VoyageBatchStatus;
}
async function readVoyageBatchError(params: {
client: VoyageEmbeddingClient;
errorFileId: string;
}): Promise<string | undefined> {
try {
const baseUrl = getVoyageBaseUrl(params.client);
const res = await fetch(`${baseUrl}/files/${params.errorFileId}/content`, {
headers: getVoyageHeaders(params.client, { json: true }),
});
if (!res.ok) {
const text = await res.text();
throw new Error(`voyage batch error file content failed: ${res.status} ${text}`);
}
const text = await res.text();
if (!text.trim()) return undefined;
const lines = text
.split("\n")
.map((line) => line.trim())
.filter(Boolean)
.map((line) => JSON.parse(line) as VoyageBatchOutputLine);
const first = lines.find((line) => line.error?.message || line.response?.body?.error);
const message =
first?.error?.message ??
(typeof first?.response?.body?.error?.message === "string"
? first?.response?.body?.error?.message
: undefined);
return message;
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return message ? `error file unavailable: ${message}` : undefined;
}
}
async function waitForVoyageBatch(params: {
client: VoyageEmbeddingClient;
batchId: string;
wait: boolean;
pollIntervalMs: number;
timeoutMs: number;
debug?: (message: string, data?: Record<string, unknown>) => void;
initial?: VoyageBatchStatus;
}): Promise<{ outputFileId: string; errorFileId?: string }> {
const start = Date.now();
let current: VoyageBatchStatus | undefined = params.initial;
while (true) {
const status =
current ??
(await fetchVoyageBatchStatus({
client: params.client,
batchId: params.batchId,
}));
const state = status.status ?? "unknown";
if (state === "completed") {
if (!status.output_file_id) {
throw new Error(`voyage batch ${params.batchId} completed without output file`);
}
return {
outputFileId: status.output_file_id,
errorFileId: status.error_file_id ?? undefined,
};
}
if (["failed", "expired", "cancelled", "canceled"].includes(state)) {
const detail = status.error_file_id
? await readVoyageBatchError({ client: params.client, errorFileId: status.error_file_id })
: undefined;
const suffix = detail ? `: ${detail}` : "";
throw new Error(`voyage batch ${params.batchId} ${state}${suffix}`);
}
if (!params.wait) {
throw new Error(`voyage batch ${params.batchId} still ${state}; wait disabled`);
}
if (Date.now() - start > params.timeoutMs) {
throw new Error(`voyage batch ${params.batchId} timed out after ${params.timeoutMs}ms`);
}
params.debug?.(`voyage batch ${params.batchId} ${state}; waiting ${params.pollIntervalMs}ms`);
await new Promise((resolve) => setTimeout(resolve, params.pollIntervalMs));
current = undefined;
}
}
export async function runVoyageEmbeddingBatches(params: {
client: VoyageEmbeddingClient;
agentId: string;
requests: VoyageBatchRequest[];
wait: boolean;
pollIntervalMs: number;
timeoutMs: number;
concurrency: number;
debug?: (message: string, data?: Record<string, unknown>) => void;
}): Promise<Map<string, number[]>> {
if (params.requests.length === 0) return new Map();
const groups = splitVoyageBatchRequests(params.requests);
const byCustomId = new Map<string, number[]>();
const tasks = groups.map((group, groupIndex) => async () => {
const batchInfo = await submitVoyageBatch({
client: params.client,
requests: group,
agentId: params.agentId,
});
if (!batchInfo.id) {
throw new Error("voyage batch create failed: missing batch id");
}
params.debug?.("memory embeddings: voyage batch created", {
batchId: batchInfo.id,
status: batchInfo.status,
group: groupIndex + 1,
groups: groups.length,
requests: group.length,
});
if (!params.wait && batchInfo.status !== "completed") {
throw new Error(
`voyage batch ${batchInfo.id} submitted; enable remote.batch.wait to await completion`,
);
}
const completed =
batchInfo.status === "completed"
? {
outputFileId: batchInfo.output_file_id ?? "",
errorFileId: batchInfo.error_file_id ?? undefined,
}
: await waitForVoyageBatch({
client: params.client,
batchId: batchInfo.id,
wait: params.wait,
pollIntervalMs: params.pollIntervalMs,
timeoutMs: params.timeoutMs,
debug: params.debug,
initial: batchInfo,
});
if (!completed.outputFileId) {
throw new Error(`voyage batch ${batchInfo.id} completed without output file`);
}
const baseUrl = getVoyageBaseUrl(params.client);
const contentRes = await fetch(`${baseUrl}/files/${completed.outputFileId}/content`, {
headers: getVoyageHeaders(params.client, { json: true }),
});
if (!contentRes.ok) {
const text = await contentRes.text();
throw new Error(`voyage batch file content failed: ${contentRes.status} ${text}`);
}
const errors: string[] = [];
const remaining = new Set(group.map((request) => request.custom_id));
if (contentRes.body) {
const reader = createInterface({
input: Readable.fromWeb(contentRes.body as any),
terminal: false,
});
for await (const rawLine of reader) {
if (!rawLine.trim()) continue;
const line = JSON.parse(rawLine) as VoyageBatchOutputLine;
const customId = line.custom_id;
if (!customId) continue;
remaining.delete(customId);
if (line.error?.message) {
errors.push(`${customId}: ${line.error.message}`);
continue;
}
const response = line.response;
const statusCode = response?.status_code ?? 0;
if (statusCode >= 400) {
const message =
response?.body?.error?.message ??
(typeof response?.body === "string" ? response.body : undefined) ??
"unknown error";
errors.push(`${customId}: ${message}`);
continue;
}
const data = response?.body?.data ?? [];
const embedding = data[0]?.embedding ?? [];
if (embedding.length === 0) {
errors.push(`${customId}: empty embedding`);
continue;
}
byCustomId.set(customId, embedding);
}
}
if (errors.length > 0) {
throw new Error(`voyage batch ${batchInfo.id} failed: ${errors.join("; ")}`);
}
if (remaining.size > 0) {
throw new Error(`voyage batch ${batchInfo.id} missing ${remaining.size} embedding responses`);
}
});
params.debug?.("memory embeddings: voyage batch submit", {
requests: params.requests.length,
groups: groups.length,
wait: params.wait,
concurrency: params.concurrency,
pollIntervalMs: params.pollIntervalMs,
timeoutMs: params.timeoutMs,
});
await runWithConcurrency(tasks, params.concurrency);
return byCustomId;
}

View File

@@ -0,0 +1,138 @@
import { afterEach, describe, expect, it, vi } from "vitest";
vi.mock("../agents/model-auth.js", () => ({
resolveApiKeyForProvider: vi.fn(),
requireApiKey: (auth: { apiKey?: string; mode?: string }, provider: string) => {
if (auth?.apiKey) return auth.apiKey;
throw new Error(`No API key resolved for provider "${provider}" (auth mode: ${auth?.mode}).`);
},
}));
const createFetchMock = () =>
vi.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ data: [{ embedding: [0.1, 0.2, 0.3] }] }),
})) as unknown as typeof fetch;
describe("voyage embedding provider", () => {
afterEach(() => {
vi.resetAllMocks();
vi.resetModules();
vi.unstubAllGlobals();
});
it("configures client with correct defaults and headers", async () => {
const fetchMock = createFetchMock();
vi.stubGlobal("fetch", fetchMock);
const { createVoyageEmbeddingProvider } = await import("./embeddings-voyage.js");
const authModule = await import("../agents/model-auth.js");
vi.mocked(authModule.resolveApiKeyForProvider).mockResolvedValue({
apiKey: "voyage-key-123",
mode: "api-key",
source: "test",
});
const result = await createVoyageEmbeddingProvider({
config: {} as never,
provider: "voyage",
model: "voyage-4-large",
fallback: "none",
});
await result.provider.embedQuery("test query");
expect(authModule.resolveApiKeyForProvider).toHaveBeenCalledWith(
expect.objectContaining({ provider: "voyage" }),
);
const [url, init] = fetchMock.mock.calls[0] ?? [];
expect(url).toBe("https://api.voyageai.com/v1/embeddings");
const headers = (init?.headers ?? {}) as Record<string, string>;
expect(headers.Authorization).toBe("Bearer voyage-key-123");
expect(headers["Content-Type"]).toBe("application/json");
const body = JSON.parse(init?.body as string);
expect(body).toEqual({
model: "voyage-4-large",
input: ["test query"],
input_type: "query",
});
});
it("respects remote overrides for baseUrl and apiKey", async () => {
const fetchMock = createFetchMock();
vi.stubGlobal("fetch", fetchMock);
const { createVoyageEmbeddingProvider } = await import("./embeddings-voyage.js");
const result = await createVoyageEmbeddingProvider({
config: {} as never,
provider: "voyage",
model: "voyage-4-lite",
fallback: "none",
remote: {
baseUrl: "https://proxy.example.com",
apiKey: "remote-override-key",
headers: { "X-Custom": "123" },
},
});
await result.provider.embedQuery("test");
const [url, init] = fetchMock.mock.calls[0] ?? [];
expect(url).toBe("https://proxy.example.com/embeddings");
const headers = (init?.headers ?? {}) as Record<string, string>;
expect(headers.Authorization).toBe("Bearer remote-override-key");
expect(headers["X-Custom"]).toBe("123");
});
it("passes input_type=document for embedBatch", async () => {
const fetchMock = vi.fn(async () => ({
ok: true,
status: 200,
json: async () => ({
data: [{ embedding: [0.1, 0.2] }, { embedding: [0.3, 0.4] }],
}),
})) as unknown as typeof fetch;
vi.stubGlobal("fetch", fetchMock);
const { createVoyageEmbeddingProvider } = await import("./embeddings-voyage.js");
const authModule = await import("../agents/model-auth.js");
vi.mocked(authModule.resolveApiKeyForProvider).mockResolvedValue({
apiKey: "voyage-key-123",
mode: "api-key",
source: "test",
});
const result = await createVoyageEmbeddingProvider({
config: {} as never,
provider: "voyage",
model: "voyage-4-large",
fallback: "none",
});
await result.provider.embedBatch(["doc1", "doc2"]);
const [, init] = fetchMock.mock.calls[0] ?? [];
const body = JSON.parse(init?.body as string);
expect(body).toEqual({
model: "voyage-4-large",
input: ["doc1", "doc2"],
input_type: "document",
});
});
it("normalizes model names", async () => {
const { normalizeVoyageModel } = await import("./embeddings-voyage.js");
expect(normalizeVoyageModel("voyage/voyage-large-2")).toBe("voyage-large-2");
expect(normalizeVoyageModel("voyage-4-large")).toBe("voyage-4-large");
expect(normalizeVoyageModel(" voyage-lite ")).toBe("voyage-lite");
expect(normalizeVoyageModel("")).toBe("voyage-4-large"); // Default
});
});

View File

@@ -0,0 +1,92 @@
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import { requireApiKey, resolveApiKeyForProvider } from "../agents/model-auth.js";
export type VoyageEmbeddingClient = {
baseUrl: string;
headers: Record<string, string>;
model: string;
};
export const DEFAULT_VOYAGE_EMBEDDING_MODEL = "voyage-4-large";
const DEFAULT_VOYAGE_BASE_URL = "https://api.voyageai.com/v1";
export function normalizeVoyageModel(model: string): string {
const trimmed = model.trim();
if (!trimmed) return DEFAULT_VOYAGE_EMBEDDING_MODEL;
if (trimmed.startsWith("voyage/")) return trimmed.slice("voyage/".length);
return trimmed;
}
export async function createVoyageEmbeddingProvider(
options: EmbeddingProviderOptions,
): Promise<{ provider: EmbeddingProvider; client: VoyageEmbeddingClient }> {
const client = await resolveVoyageEmbeddingClient(options);
const url = `${client.baseUrl.replace(/\/$/, "")}/embeddings`;
const embed = async (input: string[], input_type?: "query" | "document"): Promise<number[][]> => {
if (input.length === 0) return [];
const body: { model: string; input: string[]; input_type?: "query" | "document" } = {
model: client.model,
input,
};
if (input_type) body.input_type = input_type;
const res = await fetch(url, {
method: "POST",
headers: client.headers,
body: JSON.stringify(body),
});
if (!res.ok) {
const text = await res.text();
throw new Error(`voyage embeddings failed: ${res.status} ${text}`);
}
const payload = (await res.json()) as {
data?: Array<{ embedding?: number[] }>;
};
const data = payload.data ?? [];
return data.map((entry) => entry.embedding ?? []);
};
return {
provider: {
id: "voyage",
model: client.model,
embedQuery: async (text) => {
const [vec] = await embed([text], "query");
return vec ?? [];
},
embedBatch: async (texts) => embed(texts, "document"),
},
client,
};
}
export async function resolveVoyageEmbeddingClient(
options: EmbeddingProviderOptions,
): Promise<VoyageEmbeddingClient> {
const remote = options.remote;
const remoteApiKey = remote?.apiKey?.trim();
const remoteBaseUrl = remote?.baseUrl?.trim();
const apiKey = remoteApiKey
? remoteApiKey
: requireApiKey(
await resolveApiKeyForProvider({
provider: "voyage",
cfg: options.config,
agentDir: options.agentDir,
}),
"voyage",
);
const providerConfig = options.config.models?.providers?.voyage;
const baseUrl = remoteBaseUrl || providerConfig?.baseUrl?.trim() || DEFAULT_VOYAGE_BASE_URL;
const headerOverrides = Object.assign({}, providerConfig?.headers, remote?.headers);
const headers: Record<string, string> = {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
...headerOverrides,
};
const model = normalizeVoyageModel(options.model);
return { baseUrl, headers, model };
}

View File

@@ -4,6 +4,7 @@ import type { OpenClawConfig } from "../config/config.js";
import { resolveUserPath } from "../utils.js";
import { createGeminiEmbeddingProvider, type GeminiEmbeddingClient } from "./embeddings-gemini.js";
import { createOpenAiEmbeddingProvider, type OpenAiEmbeddingClient } from "./embeddings-openai.js";
import { createVoyageEmbeddingProvider, type VoyageEmbeddingClient } from "./embeddings-voyage.js";
import { importNodeLlamaCpp } from "./node-llama.js";
function sanitizeAndNormalizeEmbedding(vec: number[]): number[] {
@@ -17,6 +18,7 @@ function sanitizeAndNormalizeEmbedding(vec: number[]): number[] {
export type { GeminiEmbeddingClient } from "./embeddings-gemini.js";
export type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
export type { VoyageEmbeddingClient } from "./embeddings-voyage.js";
export type EmbeddingProvider = {
id: string;
@@ -27,24 +29,25 @@ export type EmbeddingProvider = {
export type EmbeddingProviderResult = {
provider: EmbeddingProvider;
requestedProvider: "openai" | "local" | "gemini" | "auto";
fallbackFrom?: "openai" | "local" | "gemini";
requestedProvider: "openai" | "local" | "gemini" | "voyage" | "auto";
fallbackFrom?: "openai" | "local" | "gemini" | "voyage";
fallbackReason?: string;
openAi?: OpenAiEmbeddingClient;
gemini?: GeminiEmbeddingClient;
voyage?: VoyageEmbeddingClient;
};
export type EmbeddingProviderOptions = {
config: OpenClawConfig;
agentDir?: string;
provider: "openai" | "local" | "gemini" | "auto";
provider: "openai" | "local" | "gemini" | "voyage" | "auto";
remote?: {
baseUrl?: string;
apiKey?: string;
headers?: Record<string, string>;
};
model: string;
fallback: "openai" | "gemini" | "local" | "none";
fallback: "openai" | "gemini" | "local" | "voyage" | "none";
local?: {
modelPath?: string;
modelCacheDir?: string;
@@ -128,7 +131,7 @@ export async function createEmbeddingProvider(
const requestedProvider = options.provider;
const fallback = options.fallback;
const createProvider = async (id: "openai" | "local" | "gemini") => {
const createProvider = async (id: "openai" | "local" | "gemini" | "voyage") => {
if (id === "local") {
const provider = await createLocalEmbeddingProvider(options);
return { provider };
@@ -137,11 +140,15 @@ export async function createEmbeddingProvider(
const { provider, client } = await createGeminiEmbeddingProvider(options);
return { provider, gemini: client };
}
if (id === "voyage") {
const { provider, client } = await createVoyageEmbeddingProvider(options);
return { provider, voyage: client };
}
const { provider, client } = await createOpenAiEmbeddingProvider(options);
return { provider, openAi: client };
};
const formatPrimaryError = (err: unknown, provider: "openai" | "local" | "gemini") =>
const formatPrimaryError = (err: unknown, provider: "openai" | "local" | "gemini" | "voyage") =>
provider === "local" ? formatLocalSetupError(err) : formatError(err);
if (requestedProvider === "auto") {
@@ -157,7 +164,7 @@ export async function createEmbeddingProvider(
}
}
for (const provider of ["openai", "gemini"] as const) {
for (const provider of ["openai", "gemini", "voyage"] as const) {
try {
const result = await createProvider(provider);
return { ...result, requestedProvider };
@@ -240,6 +247,7 @@ function formatLocalSetupError(err: unknown): string {
: null,
"3) If you use pnpm: pnpm approve-builds (select node-llama-cpp), then pnpm rebuild node-llama-cpp",
'Or set agents.defaults.memorySearch.provider = "openai" (remote).',
'Or set agents.defaults.memorySearch.provider = "voyage" (remote).',
]
.filter(Boolean)
.join("\n");

View File

@@ -275,3 +275,33 @@ export function cosineSimilarity(a: number[], b: number[]): number {
}
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
}
export async function runWithConcurrency<T>(
tasks: Array<() => Promise<T>>,
limit: number,
): Promise<T[]> {
if (tasks.length === 0) return [];
const resolvedLimit = Math.max(1, Math.min(limit, tasks.length));
const results: T[] = Array.from({ length: tasks.length });
let next = 0;
let firstError: unknown = null;
const workers = Array.from({ length: resolvedLimit }, async () => {
while (true) {
if (firstError) return;
const index = next;
next += 1;
if (index >= tasks.length) return;
try {
results[index] = await tasks[index]();
} catch (err) {
firstError = err;
return;
}
}
});
await Promise.allSettled(workers);
if (firstError) throw firstError;
return results;
}

View File

@@ -26,14 +26,17 @@ import {
type OpenAiBatchRequest,
runOpenAiEmbeddingBatches,
} from "./batch-openai.js";
import { type VoyageBatchRequest, runVoyageEmbeddingBatches } from "./batch-voyage.js";
import { DEFAULT_GEMINI_EMBEDDING_MODEL } from "./embeddings-gemini.js";
import { DEFAULT_OPENAI_EMBEDDING_MODEL } from "./embeddings-openai.js";
import { DEFAULT_VOYAGE_EMBEDDING_MODEL } from "./embeddings-voyage.js";
import {
createEmbeddingProvider,
type EmbeddingProvider,
type EmbeddingProviderResult,
type GeminiEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
} from "./embeddings.js";
import { bm25RankToScore, buildFtsQuery, mergeHybridResults } from "./hybrid.js";
import {
@@ -47,6 +50,7 @@ import {
type MemoryChunk,
type MemoryFileEntry,
parseEmbedding,
runWithConcurrency,
} from "./internal.js";
import { searchKeyword, searchVector } from "./manager-search.js";
import { ensureMemoryIndexSchema } from "./memory-schema.js";
@@ -112,11 +116,12 @@ export class MemoryIndexManager implements MemorySearchManager {
private readonly workspaceDir: string;
private readonly settings: ResolvedMemorySearchConfig;
private provider: EmbeddingProvider;
private readonly requestedProvider: "openai" | "local" | "gemini" | "auto";
private fallbackFrom?: "openai" | "local" | "gemini";
private readonly requestedProvider: "openai" | "local" | "gemini" | "voyage" | "auto";
private fallbackFrom?: "openai" | "local" | "gemini" | "voyage";
private fallbackReason?: string;
private openAi?: OpenAiEmbeddingClient;
private gemini?: GeminiEmbeddingClient;
private voyage?: VoyageEmbeddingClient;
private batch: {
enabled: boolean;
wait: boolean;
@@ -217,6 +222,7 @@ export class MemoryIndexManager implements MemorySearchManager {
this.fallbackReason = params.providerResult.fallbackReason;
this.openAi = params.providerResult.openAi;
this.gemini = params.providerResult.gemini;
this.voyage = params.providerResult.voyage;
this.sources = new Set(params.settings.sources);
this.db = this.openDatabase();
this.providerKey = this.computeProviderKey();
@@ -1109,7 +1115,7 @@ export class MemoryIndexManager implements MemorySearchManager {
});
}
});
await this.runWithConcurrency(tasks, this.getIndexConcurrency());
await runWithConcurrency(tasks, this.getIndexConcurrency());
const staleRows = this.db
.prepare(`SELECT path FROM files WHERE source = ?`)
@@ -1206,7 +1212,7 @@ export class MemoryIndexManager implements MemorySearchManager {
});
}
});
await this.runWithConcurrency(tasks, this.getIndexConcurrency());
await runWithConcurrency(tasks, this.getIndexConcurrency());
const staleRows = this.db
.prepare(`SELECT path FROM files WHERE source = ?`)
@@ -1346,7 +1352,8 @@ export class MemoryIndexManager implements MemorySearchManager {
const enabled = Boolean(
batch?.enabled &&
((this.openAi && this.provider.id === "openai") ||
(this.gemini && this.provider.id === "gemini")),
(this.gemini && this.provider.id === "gemini") ||
(this.voyage && this.provider.id === "voyage")),
);
return {
enabled,
@@ -1365,14 +1372,16 @@ export class MemoryIndexManager implements MemorySearchManager {
if (this.fallbackFrom) {
return false;
}
const fallbackFrom = this.provider.id as "openai" | "gemini" | "local";
const fallbackFrom = this.provider.id as "openai" | "gemini" | "local" | "voyage";
const fallbackModel =
fallback === "gemini"
? DEFAULT_GEMINI_EMBEDDING_MODEL
: fallback === "openai"
? DEFAULT_OPENAI_EMBEDDING_MODEL
: this.settings.model;
: fallback === "voyage"
? DEFAULT_VOYAGE_EMBEDDING_MODEL
: this.settings.model;
const fallbackResult = await createEmbeddingProvider({
config: this.cfg,
@@ -1389,6 +1398,7 @@ export class MemoryIndexManager implements MemorySearchManager {
this.provider = fallbackResult.provider;
this.openAi = fallbackResult.openAi;
this.gemini = fallbackResult.gemini;
this.voyage = fallbackResult.voyage;
this.providerKey = this.computeProviderKey();
this.batch = this.resolveBatchConfig();
log.warn(`memory embeddings: switched to fallback provider (${fallback})`, { reason });
@@ -1865,9 +1875,82 @@ export class MemoryIndexManager implements MemorySearchManager {
if (this.provider.id === "gemini" && this.gemini) {
return this.embedChunksWithGeminiBatch(chunks, entry, source);
}
if (this.provider.id === "voyage" && this.voyage) {
return this.embedChunksWithVoyageBatch(chunks, entry, source);
}
return this.embedChunksInBatches(chunks);
}
private async embedChunksWithVoyageBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,
source: MemorySource,
): Promise<number[][]> {
const voyage = this.voyage;
if (!voyage) {
return this.embedChunksInBatches(chunks);
}
if (chunks.length === 0) return [];
const cached = this.loadEmbeddingCache(chunks.map((chunk) => chunk.hash));
const embeddings: number[][] = Array.from({ length: chunks.length }, () => []);
const missing: Array<{ index: number; chunk: MemoryChunk }> = [];
for (let i = 0; i < chunks.length; i += 1) {
const chunk = chunks[i];
const hit = chunk?.hash ? cached.get(chunk.hash) : undefined;
if (hit && hit.length > 0) {
embeddings[i] = hit;
} else if (chunk) {
missing.push({ index: i, chunk });
}
}
if (missing.length === 0) return embeddings;
const requests: VoyageBatchRequest[] = [];
const mapping = new Map<string, { index: number; hash: string }>();
for (const item of missing) {
const chunk = item.chunk;
const customId = hashText(
`${source}:${entry.path}:${chunk.startLine}:${chunk.endLine}:${chunk.hash}:${item.index}`,
);
mapping.set(customId, { index: item.index, hash: chunk.hash });
requests.push({
custom_id: customId,
body: {
input: chunk.text,
},
});
}
const batchResult = await this.runBatchWithFallback({
provider: "voyage",
run: async () =>
await runVoyageEmbeddingBatches({
client: voyage,
agentId: this.agentId,
requests,
wait: this.batch.wait,
concurrency: this.batch.concurrency,
pollIntervalMs: this.batch.pollIntervalMs,
timeoutMs: this.batch.timeoutMs,
debug: (message, data) => log.debug(message, { ...data, source, chunks: chunks.length }),
}),
fallback: async () => await this.embedChunksInBatches(chunks),
});
if (Array.isArray(batchResult)) return batchResult;
const byCustomId = batchResult;
const toCache: Array<{ hash: string; embedding: number[] }> = [];
for (const [customId, embedding] of byCustomId.entries()) {
const mapped = mapping.get(customId);
if (!mapped) continue;
embeddings[mapped.index] = embedding;
toCache.push({ hash: mapped.hash, embedding });
}
this.upsertEmbeddingCache(toCache);
return embeddings;
}
private async embedChunksWithOpenAiBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,
@@ -2108,41 +2191,6 @@ export class MemoryIndexManager implements MemorySearchManager {
}
}
private async runWithConcurrency<T>(tasks: Array<() => Promise<T>>, limit: number): Promise<T[]> {
if (tasks.length === 0) {
return [];
}
const resolvedLimit = Math.max(1, Math.min(limit, tasks.length));
const results: T[] = Array.from({ length: tasks.length });
let next = 0;
let firstError: unknown = null;
const workers = Array.from({ length: resolvedLimit }, async () => {
while (true) {
if (firstError) {
return;
}
const index = next;
next += 1;
if (index >= tasks.length) {
return;
}
try {
results[index] = await tasks[index]();
} catch (err) {
firstError = err;
return;
}
}
});
await Promise.allSettled(workers);
if (firstError) {
throw firstError;
}
return results;
}
private async withBatchFailureLock<T>(fn: () => Promise<T>): Promise<T> {
let release: () => void;
const wait = this.batchFailureLock;

View File

@@ -0,0 +1,25 @@
import { describe, expect, it } from "vitest";
import { classifySessionKeyShape } from "./session-key.js";
describe("classifySessionKeyShape", () => {
it("classifies empty keys as missing", () => {
expect(classifySessionKeyShape(undefined)).toBe("missing");
expect(classifySessionKeyShape(" ")).toBe("missing");
});
it("classifies valid agent keys", () => {
expect(classifySessionKeyShape("agent:main:main")).toBe("agent");
expect(classifySessionKeyShape("agent:research:subagent:worker")).toBe("agent");
});
it("classifies malformed agent keys", () => {
expect(classifySessionKeyShape("agent::broken")).toBe("malformed_agent");
expect(classifySessionKeyShape("agent:main")).toBe("malformed_agent");
});
it("treats non-agent legacy or alias keys as non-malformed", () => {
expect(classifySessionKeyShape("main")).toBe("legacy_or_alias");
expect(classifySessionKeyShape("custom-main")).toBe("legacy_or_alias");
expect(classifySessionKeyShape("subagent:worker")).toBe("legacy_or_alias");
});
});

View File

@@ -10,6 +10,7 @@ export {
export const DEFAULT_AGENT_ID = "main";
export const DEFAULT_MAIN_KEY = "main";
export const DEFAULT_ACCOUNT_ID = "default";
export type SessionKeyShape = "missing" | "agent" | "legacy_or_alias" | "malformed_agent";
// Pre-compiled regex
const VALID_ID_RE = /^[a-z0-9][a-z0-9_-]{0,63}$/i;
@@ -58,6 +59,17 @@ export function resolveAgentIdFromSessionKey(sessionKey: string | undefined | nu
return normalizeAgentId(parsed?.agentId ?? DEFAULT_AGENT_ID);
}
export function classifySessionKeyShape(sessionKey: string | undefined | null): SessionKeyShape {
const raw = (sessionKey ?? "").trim();
if (!raw) {
return "missing";
}
if (parseAgentSessionKey(raw)) {
return "agent";
}
return raw.toLowerCase().startsWith("agent:") ? "malformed_agent" : "legacy_or_alias";
}
export function normalizeAgentId(value: string | undefined | null): string {
const trimmed = (value ?? "").trim();
if (!trimmed) {

View File

@@ -128,6 +128,9 @@ export function createTelegramBot(opts: TelegramBotOptions) {
network: telegramCfg.network,
});
const shouldProvideFetch = Boolean(fetchImpl);
// grammY's ApiClientOptions types still track `node-fetch` types; Node 22+ global fetch
// (undici) is structurally compatible at runtime but not assignable in TS.
const fetchForClient = fetchImpl as unknown as NonNullable<ApiClientOptions["fetch"]>;
const timeoutSeconds =
typeof telegramCfg?.timeoutSeconds === "number" && Number.isFinite(telegramCfg.timeoutSeconds)
? Math.max(1, Math.floor(telegramCfg.timeoutSeconds))
@@ -135,7 +138,7 @@ export function createTelegramBot(opts: TelegramBotOptions) {
const client: ApiClientOptions | undefined =
shouldProvideFetch || timeoutSeconds
? {
...(shouldProvideFetch && fetchImpl ? { fetch: fetchImpl } : {}),
...(shouldProvideFetch && fetchImpl ? { fetch: fetchForClient } : {}),
...(timeoutSeconds ? { timeoutSeconds } : {}),
}
: undefined;

View File

@@ -79,15 +79,12 @@ describe("jidToE164", () => {
it("maps @lid using reverse mapping file", () => {
const mappingPath = path.join(CONFIG_DIR, "credentials", "lid-mapping-123_reverse.json");
const original = fs.readFileSync;
const spy = vi
.spyOn(fs, "readFileSync")
// oxlint-disable-next-line typescript/no-explicit-any
.mockImplementation((path: any, encoding?: any) => {
if (path === mappingPath) {
return `"5551234"`;
}
return original(path, encoding);
});
const spy = vi.spyOn(fs, "readFileSync").mockImplementation((...args) => {
if (args[0] === mappingPath) {
return `"5551234"`;
}
return original(...args);
});
expect(jidToE164("123@lid")).toBe("+5551234");
spy.mockRestore();
});
@@ -167,4 +164,9 @@ describe("resolveUserPath", () => {
it("resolves relative paths", () => {
expect(resolveUserPath("tmp/dir")).toBe(path.resolve("tmp/dir"));
});
it("keeps blank paths blank", () => {
expect(resolveUserPath("")).toBe("");
expect(resolveUserPath(" ")).toBe("");
});
});