queue email sending jobs in exporter and importer

This commit is contained in:
Hongbo Wu
2024-07-21 11:11:29 +08:00
parent f749090a7d
commit 95657e3d3e
14 changed files with 104 additions and 35 deletions

View File

@ -46,7 +46,12 @@ const parseDate = (date: string): Date => {
export const importCsv = async (ctx: ImportContext, stream: Stream) => {
// create metrics in redis
await createMetrics(ctx.redisClient, ctx.userId, ctx.taskId, ctx.source)
await createMetrics(
ctx.redisDataSource.cacheClient,
ctx.userId,
ctx.taskId,
ctx.source
)
const parser = parse({
headers: true,
@ -68,7 +73,7 @@ export const importCsv = async (ctx: ImportContext, stream: Stream) => {
// update total counter
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.TOTAL
@ -79,7 +84,7 @@ export const importCsv = async (ctx: ImportContext, stream: Stream) => {
ctx.countImported += 1
// update started counter
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.STARTED
@ -96,7 +101,7 @@ export const importCsv = async (ctx: ImportContext, stream: Stream) => {
ctx.countFailed += 1
// update invalid counter
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.INVALID

View File

@ -4,13 +4,13 @@ import { RedisDataSource } from '@omnivore/utils'
import * as Sentry from '@sentry/serverless'
import axios from 'axios'
import 'dotenv/config'
import Redis from 'ioredis'
import * as jwt from 'jsonwebtoken'
import { Stream } from 'node:stream'
import * as path from 'path'
import { promisify } from 'util'
import { v4 as uuid } from 'uuid'
import { importCsv } from './csv'
import { queueEmailJob } from './job'
import { importMatterArchive } from './matterHistory'
import { ImportStatus, updateMetrics } from './metrics'
import { CONTENT_FETCH_URL, createCloudTask, emailUserUrl } from './task'
@ -57,7 +57,7 @@ export type ImportContext = {
countFailed: number
urlHandler: UrlHandler
contentHandler: ContentHandler
redisClient: Redis
redisDataSource: RedisDataSource
taskId: string
source: string
}
@ -140,32 +140,40 @@ const createEmailCloudTask = async (userId: string, payload: unknown) => {
)
}
const sendImportFailedEmail = async (userId: string) => {
return createEmailCloudTask(userId, {
const sendImportFailedEmail = async (
redisDataSource: RedisDataSource,
userId: string
) => {
return queueEmailJob(redisDataSource, {
userId,
subject: 'Your Omnivore import failed.',
body: `There was an error importing your file. Please ensure you uploaded the correct file type, if you need help, please email feedback@omnivore.app`,
html: `There was an error importing your file. Please ensure you uploaded the correct file type, if you need help, please email feedback@omnivore.app`,
})
}
export const sendImportStartedEmail = async (
redisDataSource: RedisDataSource,
userId: string,
urlsEnqueued: number,
urlsFailed: number
) => {
return createEmailCloudTask(userId, {
return queueEmailJob(redisDataSource, {
userId,
subject: 'Your Omnivore import has started',
body: `We have started processing ${urlsEnqueued} URLs. ${urlsFailed} URLs are invalid.`,
html: `We have started processing ${urlsEnqueued} URLs. ${urlsFailed} URLs are invalid.`,
})
}
export const sendImportCompletedEmail = async (
redisDataSource: RedisDataSource,
userId: string,
urlsImported: number,
urlsFailed: number
) => {
return createEmailCloudTask(userId, {
return queueEmailJob(redisDataSource, {
userId,
subject: 'Your Omnivore import has finished',
body: `We have finished processing ${
html: `We have finished processing ${
urlsImported + urlsFailed
} URLs. ${urlsImported} URLs have been added to your library. ${urlsFailed} URLs failed to be parsed.`,
})
@ -298,7 +306,10 @@ const contentHandler = async (
return Promise.resolve()
}
const handleEvent = async (data: StorageEvent, redisClient: Redis) => {
const handleEvent = async (
data: StorageEvent,
redisDataSource: RedisDataSource
) => {
if (shouldHandle(data)) {
const handler = handlerForFile(data.name)
if (!handler) {
@ -329,7 +340,7 @@ const handleEvent = async (data: StorageEvent, redisClient: Redis) => {
countFailed: 0,
urlHandler,
contentHandler,
redisClient,
redisDataSource,
taskId: data.name,
source: importSource(data.name),
}
@ -337,9 +348,14 @@ const handleEvent = async (data: StorageEvent, redisClient: Redis) => {
await handler(ctx, stream)
if (ctx.countImported > 0) {
await sendImportStartedEmail(userId, ctx.countImported, ctx.countFailed)
await sendImportStartedEmail(
ctx.redisDataSource,
userId,
ctx.countImported,
ctx.countFailed
)
} else {
await sendImportFailedEmail(userId)
await sendImportFailedEmail(ctx.redisDataSource, userId)
}
}
}
@ -377,7 +393,7 @@ export const importHandler = Sentry.GCPFunction.wrapHttpFunction(
})
try {
await handleEvent(obj, redisDataSource.cacheClient)
await handleEvent(obj, redisDataSource)
} catch (err) {
console.log('error handling event', { err, obj })
throw err
@ -436,7 +452,7 @@ export const importMetricsCollector = Sentry.GCPFunction.wrapHttpFunction(
try {
// update metrics
await updateMetrics(
redisDataSource.cacheClient,
redisDataSource,
userId,
req.body.taskId,
req.body.status

View File

@ -0,0 +1,23 @@
import { RedisDataSource } from '@omnivore/utils'
import { Queue } from 'bullmq'
const QUEUE_NAME = 'omnivore-backend-queue'
export const SEND_EMAIL_JOB = 'send-email'
interface SendEmailJobData {
userId: string
from?: string
subject?: string
html?: string
}
export const queueEmailJob = async (
redisDataSource: RedisDataSource,
data: SendEmailJobData
) => {
const queue = new Queue(QUEUE_NAME, {
connection: redisDataSource.queueRedisClient,
})
await queue.add(SEND_EMAIL_JOB, data)
}

View File

@ -37,7 +37,7 @@ export const importMatterHistoryCsv = async (
const url = new URL(row['URL'])
// update total counter
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.TOTAL
@ -46,7 +46,7 @@ export const importMatterHistoryCsv = async (
ctx.countImported += 1
// update started counter
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.STARTED
@ -219,7 +219,7 @@ const handleMatterHistoryRow = async (
ctx.countFailed += 1
// update failed counter
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.FAILED
@ -254,7 +254,7 @@ export const importMatterArchive = async (
try {
// create metrics in redis
await createMetrics(
ctx.redisClient,
ctx.redisDataSource.cacheClient,
ctx.userId,
ctx.taskId,
'matter-importer'
@ -273,7 +273,7 @@ export const importMatterArchive = async (
try {
// update total metrics
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.TOTAL
@ -284,7 +284,7 @@ export const importMatterArchive = async (
ctx.countImported += 1
// update started metrics
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.STARTED
@ -294,7 +294,7 @@ export const importMatterArchive = async (
ctx.countFailed += 1
// update failed metrics
await updateMetrics(
ctx.redisClient,
ctx.redisDataSource,
ctx.userId,
ctx.taskId,
ImportStatus.FAILED

View File

@ -47,13 +47,14 @@ export const createMetrics = async (
}
export const updateMetrics = async (
redisClient: Redis,
redisDataSource: RedisDataSource,
userId: string,
taskId: string,
status: ImportStatus
) => {
const key = `import:${userId}:${taskId}`
const redisClient = redisDataSource.cacheClient
/**
* Define our command
*/
@ -109,7 +110,12 @@ export const updateMetrics = async (
if ((state as ImportTaskState) == ImportTaskState.FINISHED) {
const metrics = await getMetrics(redisClient, userId, taskId)
if (metrics) {
await sendImportCompletedEmail(userId, metrics.imported, metrics.failed)
await sendImportCompletedEmail(
redisDataSource,
userId,
metrics.imported,
metrics.failed
)
}
}
} catch (error) {

View File

@ -26,7 +26,7 @@ describe('Test csv importer', () => {
},
})
stub = stubImportCtx(redisDataSource.cacheClient)
stub = stubImportCtx(redisDataSource)
})
afterEach(async () => {

View File

@ -30,7 +30,7 @@ describe('matter importer', () => {
},
})
stub = stubImportCtx(redisDataSource.cacheClient)
stub = stubImportCtx(redisDataSource)
})
afterEach(async () => {

View File

@ -1,8 +1,10 @@
import { Readability } from '@omnivore/readability'
import Redis from 'ioredis'
import { RedisDataSource } from '@omnivore/utils'
import { ArticleSavingRequestStatus, ImportContext } from '../src'
export const stubImportCtx = (redisClient: Redis): ImportContext => {
export const stubImportCtx = (
redisDataSource: RedisDataSource
): ImportContext => {
return {
userId: '',
countImported: 0,
@ -24,7 +26,7 @@ export const stubImportCtx = (redisClient: Redis): ImportContext => {
): Promise<void> => {
return Promise.resolve()
},
redisClient,
redisDataSource,
taskId: '',
source: 'csv-importer',
}