Merge pull request #4124 from omnivore-app/fix/digest-score-client

fix: returns a stub score (0) in case score api throws an error
This commit is contained in:
Hongbo Wu
2024-07-01 12:21:50 +08:00
committed by GitHub
4 changed files with 46 additions and 26 deletions

View File

@ -97,24 +97,22 @@ export const scoreLibraryItem = async (
items: itemFeatures,
})
logger.info('Scores', scores)
const score = scores[libraryItem.id]['score']
const score = scores[libraryItem.id].score
if (!score) {
logger.error('Failed to score library item', data)
throw new Error('Failed to score library item')
} else {
await updateLibraryItem(
libraryItem.id,
{
score,
},
userId,
undefined,
true
)
logger.info('Library item score updated', data)
}
await updateLibraryItem(
libraryItem.id,
{
score,
},
userId,
undefined,
true
)
logger.info('Library item scored', data)
try {
await enqueueUpdateHomeJob({
userId,

View File

@ -248,7 +248,7 @@ const rankCandidates = async (
const scores = await scoreClient.getScores(data)
// update scores for candidates
candidates.forEach((item) => {
item.score = scores[item.id]['score'] || 0
item.score = scores[item.id].score || 0
})
// rank candidates by score in descending order
@ -494,8 +494,6 @@ const latency = new client.Histogram({
buckets: [0.1, 0.5, 1, 2, 5, 10],
})
latency.observe(10)
registerMetric(latency)
export const updateHome = async (data: UpdateHomeJobData) => {

View File

@ -92,8 +92,6 @@ const jobLatency = new client.Histogram({
buckets: [0, 1, 5, 10, 50, 100, 500],
})
jobLatency.observe(10)
registerMetric(jobLatency)
export const getBackendQueue = async (

View File

@ -1,5 +1,8 @@
import axios from 'axios'
import client from 'prom-client'
import { env } from '../env'
import { registerMetric } from '../prometheus'
import { logError } from '../utils/logger'
export interface Feature {
library_item_id?: string
@ -37,6 +40,15 @@ export type ScoreBody = {
score: number
}
// use prometheus to monitor the latency of digest score api
const latency = new client.Histogram({
name: 'omnivore_digest_score_latency',
help: 'Latency of digest score API in seconds',
buckets: [0.1, 0.5, 1, 2, 5, 10, 20, 30, 60],
})
registerMetric(latency)
export type ScoreApiResponse = Record<string, ScoreBody> // item_id -> score
interface ScoreClient {
getScores(data: ScoreApiRequestBody): Promise<ScoreApiResponse>
@ -63,14 +75,28 @@ class ScoreClientImpl implements ScoreClient {
}
async getScores(data: ScoreApiRequestBody): Promise<ScoreApiResponse> {
const response = await axios.post<ScoreApiResponse>(this.apiUrl, data, {
headers: {
'Content-Type': 'application/json',
},
timeout: 5000,
})
const start = Date.now()
return response.data
try {
const response = await axios.post<ScoreApiResponse>(this.apiUrl, data, {
headers: {
'Content-Type': 'application/json',
},
timeout: 5000,
})
return response.data
} catch (error) {
logError(error)
// Returns a stub score (0) in case of an error
return {
[Object.keys(data.items)[0]]: { score: 0 },
}
} finally {
const duration = (Date.now() - start) / 1000 // in seconds
latency.observe(duration)
}
}
}