From debb396b445e21d75d5160214ed768d95b313a8c Mon Sep 17 00:00:00 2001 From: Hongbo Wu Date: Mon, 29 Apr 2024 17:34:10 +0800 Subject: [PATCH] better error handling --- packages/api/src/jobs/ai/create_digest.ts | 28 +++++++++++++---------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/packages/api/src/jobs/ai/create_digest.ts b/packages/api/src/jobs/ai/create_digest.ts index ff4ccf3a9..487401926 100644 --- a/packages/api/src/jobs/ai/create_digest.ts +++ b/packages/api/src/jobs/ai/create_digest.ts @@ -379,17 +379,6 @@ const summarizeItems = async ( digestDefinition.summaryPrompt ) - const prompts = await Promise.all( - rankedCandidates.map( - async (item) => - await contextualTemplate.format({ - title: item.libraryItem.title, - author: item.libraryItem.author ?? '', - content: item.libraryItem.readableContent, // markdown content - }) - ) - ) - // // send all the ranked candidates to openAI at once in a batch // const summaries = await chain.batch( // rankedCandidates.map((item) => ({ @@ -399,7 +388,22 @@ const summarizeItems = async ( // })) // ) - const summaries = await llm.batch(prompts) + const summaries = await Promise.all( + rankedCandidates.map(async (item) => { + try { + const prompt = await contextualTemplate.format({ + title: item.libraryItem.title, + author: item.libraryItem.author ?? '', + content: item.libraryItem.readableContent, // markdown content + }) + + return llm.invoke(prompt) + } catch (error) { + logger.error('summarizeItems error', error) + return { content: '' } + } + }) + ) summaries.forEach( (summary, index) =>