refactor(memory): reuse runWithConcurrency

This commit is contained in:
Peter Steinberger
2026-02-14 13:34:16 +00:00
parent 60a7625f2a
commit e707a7bd36
2 changed files with 2 additions and 72 deletions

View File

@@ -1,7 +1,7 @@
import type { GeminiEmbeddingClient } from "./embeddings-gemini.js";
import { isTruthyEnvValue } from "../infra/env.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { hashText } from "./internal.js";
import { hashText, runWithConcurrency } from "./internal.js";
export type GeminiBatchRequest = {
custom_id: string;
@@ -277,41 +277,6 @@ async function waitForGeminiBatch(params: {
}
}
async function runWithConcurrency<T>(tasks: Array<() => Promise<T>>, limit: number): Promise<T[]> {
if (tasks.length === 0) {
return [];
}
const resolvedLimit = Math.max(1, Math.min(limit, tasks.length));
const results: T[] = Array.from({ length: tasks.length });
let next = 0;
let firstError: unknown = null;
const workers = Array.from({ length: resolvedLimit }, async () => {
while (true) {
if (firstError) {
return;
}
const index = next;
next += 1;
if (index >= tasks.length) {
return;
}
try {
results[index] = await tasks[index]();
} catch (err) {
firstError = err;
return;
}
}
});
await Promise.allSettled(workers);
if (firstError) {
throw firstError;
}
return results;
}
export async function runGeminiEmbeddingBatches(params: {
gemini: GeminiEmbeddingClient;
agentId: string;

View File

@@ -1,6 +1,6 @@
import type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
import { retryAsync } from "../infra/retry.js";
import { hashText } from "./internal.js";
import { hashText, runWithConcurrency } from "./internal.js";
export type OpenAiBatchRequest = {
custom_id: string;
@@ -245,41 +245,6 @@ async function waitForOpenAiBatch(params: {
}
}
async function runWithConcurrency<T>(tasks: Array<() => Promise<T>>, limit: number): Promise<T[]> {
if (tasks.length === 0) {
return [];
}
const resolvedLimit = Math.max(1, Math.min(limit, tasks.length));
const results: T[] = Array.from({ length: tasks.length });
let next = 0;
let firstError: unknown = null;
const workers = Array.from({ length: resolvedLimit }, async () => {
while (true) {
if (firstError) {
return;
}
const index = next;
next += 1;
if (index >= tasks.length) {
return;
}
try {
results[index] = await tasks[index]();
} catch (err) {
firstError = err;
return;
}
}
});
await Promise.allSettled(workers);
if (firstError) {
throw firstError;
}
return results;
}
export async function runOpenAiEmbeddingBatches(params: {
openAi: OpenAiEmbeddingClient;
agentId: string;