fix(test): update media-understanding tests for whisper skip empty audio

Increase test audio file sizes to meet MIN_AUDIO_FILE_BYTES (1024) threshold
introduced by the skip-empty-audio feature. Fix localPathRoots in skip-tiny-audio
tests so temp files pass path validation. Remove undefined loadApply() call
in apply.test.ts.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Glucksberg
2026-02-27 15:59:41 +00:00
committed by Peter Steinberger
parent 5f19112217
commit f7b0378ccb
3 changed files with 18 additions and 13 deletions

View File

@@ -174,7 +174,7 @@ async function createAudioCtx(params?: {
}): Promise<MsgContext> {
const mediaPath = await createTempMediaFile({
fileName: params?.fileName ?? "note.ogg",
content: params?.content ?? Buffer.from([0, 255, 0, 1, 2, 3, 4, 5, 6, 7, 8]),
content: params?.content ?? Buffer.alloc(2048, 0xab),
});
return {
Body: params?.body ?? "<media:audio>",
@@ -190,7 +190,7 @@ async function setupAudioAutoDetectCase(stdout: string): Promise<{
const ctx = await createAudioCtx({
fileName: "sample.wav",
mediaType: "audio/wav",
content: "audio",
content: Buffer.alloc(2048, 0xab),
});
const cfg: OpenClawConfig = { tools: { media: { audio: {} } } };
mockedRunExec.mockResolvedValueOnce({
@@ -249,7 +249,7 @@ describe("applyMediaUnderstanding", () => {
mockedFetchRemoteMedia.mockClear();
mockedRunExec.mockReset();
mockedFetchRemoteMedia.mockResolvedValue({
buffer: Buffer.from([0, 255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),
buffer: Buffer.alloc(2048, 0xab),
contentType: "audio/ogg",
fileName: "note.ogg",
});
@@ -288,7 +288,7 @@ describe("applyMediaUnderstanding", () => {
const ctx = await createAudioCtx({
fileName: "data.mp3",
mediaType: "audio/mpeg",
content: '"a","b"\n"1","2"',
content: `"a","b"\n"1","2"\n${"x".repeat(2048)}`,
});
const result = await applyMediaUnderstanding({
ctx,
@@ -361,7 +361,6 @@ describe("applyMediaUnderstanding", () => {
});
it("skips URL-only audio when remote file is too small", async () => {
const { applyMediaUnderstanding } = await loadApply();
// Override the default mock to return a tiny buffer (below MIN_AUDIO_FILE_BYTES)
mockedFetchRemoteMedia.mockResolvedValueOnce({
buffer: Buffer.alloc(100),
@@ -541,7 +540,7 @@ describe("applyMediaUnderstanding", () => {
const ctx = await createAudioCtx({
fileName: "sample.wav",
mediaType: "audio/wav",
content: "audio",
content: Buffer.alloc(2048, 0xab),
});
const cfg: OpenClawConfig = { tools: { media: { audio: {} } } };
mockedResolveApiKey.mockResolvedValue({
@@ -655,7 +654,7 @@ describe("applyMediaUnderstanding", () => {
it("uses active model when enabled and models are missing", async () => {
const audioPath = await createTempMediaFile({
fileName: "fallback.ogg",
content: Buffer.from([0, 255, 0, 1, 2, 3, 4, 5, 6]),
content: Buffer.alloc(2048, 0xab),
});
const ctx: MsgContext = {
@@ -691,7 +690,7 @@ describe("applyMediaUnderstanding", () => {
it("handles multiple audio attachments when attachment mode is all", async () => {
const dir = await createTempMediaDir();
const audioBytes = Buffer.from([200, 201, 202, 203, 204, 205, 206, 207, 208]);
const audioBytes = Buffer.alloc(2048, 0xab);
const audioPathA = path.join(dir, "note-a.ogg");
const audioPathB = path.join(dir, "note-b.ogg");
await fs.writeFile(audioPathA, audioBytes);
@@ -738,7 +737,7 @@ describe("applyMediaUnderstanding", () => {
const audioPath = path.join(dir, "note.ogg");
const videoPath = path.join(dir, "clip.mp4");
await fs.writeFile(imagePath, "image-bytes");
await fs.writeFile(audioPath, Buffer.from([200, 201, 202, 203, 204, 205, 206, 207, 208]));
await fs.writeFile(audioPath, Buffer.alloc(2048, 0xab));
await fs.writeFile(videoPath, "video-bytes");
const ctx: MsgContext = {

View File

@@ -24,7 +24,9 @@ describe("runCapability skips tiny audio files", () => {
const ctx: MsgContext = { MediaPath: tmpPath, MediaType: "audio/wav" };
const media = normalizeMediaAttachments(ctx);
const cache = createMediaAttachmentCache(media);
const cache = createMediaAttachmentCache(media, {
localPathRoots: [path.dirname(tmpPath)],
});
let transcribeCalled = false;
const providerRegistry = buildProviderRegistry({
@@ -85,7 +87,9 @@ describe("runCapability skips tiny audio files", () => {
const ctx: MsgContext = { MediaPath: tmpPath, MediaType: "audio/ogg" };
const media = normalizeMediaAttachments(ctx);
const cache = createMediaAttachmentCache(media);
const cache = createMediaAttachmentCache(media, {
localPathRoots: [path.dirname(tmpPath)],
});
let transcribeCalled = false;
const providerRegistry = buildProviderRegistry({
@@ -139,7 +143,9 @@ describe("runCapability skips tiny audio files", () => {
const ctx: MsgContext = { MediaPath: tmpPath, MediaType: "audio/wav" };
const media = normalizeMediaAttachments(ctx);
const cache = createMediaAttachmentCache(media);
const cache = createMediaAttachmentCache(media, {
localPathRoots: [path.dirname(tmpPath)],
});
let transcribeCalled = false;
const providerRegistry = buildProviderRegistry({

View File

@@ -49,7 +49,7 @@ export async function withAudioFixture(
filePrefix,
extension: "wav",
mediaType: "audio/wav",
fileContents: Buffer.from("RIFF"),
fileContents: Buffer.alloc(2048, 0x52),
},
run,
);