Merge pull request #1046 from omnivore-app/fix/email-subject-parsing-error

Use parsed from, to and subject from email headers
This commit is contained in:
Hongbo Wu
2022-08-05 15:53:51 +08:00
committed by GitHub
4 changed files with 87 additions and 103 deletions

View File

@ -246,7 +246,15 @@ export interface SearchItem {
highlights?: Highlight[]
}
const keys = ['_id', 'url', 'slug', 'userId', 'uploadFileId', 'state'] as const
const keys = [
'_id',
'url',
'slug',
'userId',
'uploadFileId',
'state',
'id',
] as const
export type ParamSet = PickTuple<Page, typeof keys>

View File

@ -112,7 +112,7 @@ export const savePage = async (
existingPage.id,
{
savedAt: new Date(),
archivedAt: undefined,
archivedAt: null,
},
ctx
))

View File

@ -24,10 +24,12 @@ import { getRepository } from '../../src/entity/utils'
import {
createPage,
deletePage,
deletePagesByParam,
getPageById,
updatePage,
} from '../../src/elastic/pages'
import { addHighlightToPage } from '../../src/elastic/highlights'
import { refreshIndex } from '../../src/elastic'
chai.use(chaiString)
@ -401,7 +403,7 @@ describe('Article API', () => {
document = '<p>test</p>'
title = 'new title'
await createPage(
pageId = (await createPage(
{
content: document,
createdAt: new Date(),
@ -419,7 +421,11 @@ describe('Article API', () => {
archivedAt: new Date(),
},
ctx
)
))!
})
after(async () => {
await deletePage(pageId, ctx)
})
it('should unarchive the article', async () => {
@ -438,7 +444,8 @@ describe('Article API', () => {
let pageId: string
before(async () => {
const page = {
const page: Page = {
state: ArticleSavingRequestStatus.Succeeded,
id: '',
hash: 'test hash',
userId: user.id,
@ -460,11 +467,11 @@ describe('Article API', () => {
patch: 'test patch',
quote: 'test quote',
updatedAt: new Date(),
userId: user.id,
},
],
} as Page
const id = await createPage(page, ctx)
id && (pageId = id)
}
pageId = (await createPage(page, ctx))!
})
after(async () => {
@ -538,6 +545,10 @@ describe('Article API', () => {
})
context('when we save a new page', () => {
after(async () => {
await deletePagesByParam({ url }, ctx)
})
it('should return a slugged url', async () => {
const res = await graphqlRequest(query, authToken).expect(200)
expect(res.body.data.savePage.url).to.startsWith(
@ -547,45 +558,48 @@ describe('Article API', () => {
})
context('when we save a page that is already archived', () => {
it('it should return that page in the GetArticles Query', async () => {
before(() => {
url = 'https://example.com/new-url'
})
after(async () => {
await deletePagesByParam({ url }, ctx)
})
it('it should return that page in the GetArticles Query', async () => {
await graphqlRequest(
savePageQuery(url, title, originalContent),
authToken
).expect(200)
await refreshIndex()
let allLinks
// Save a link, then archive it
// set a slight delay to make sure the page is updated
setTimeout(async () => {
let allLinks = await graphqlRequest(
articlesQuery(''),
authToken
).expect(200)
const justSavedId = allLinks.body.data.articles.edges[0].node.id
await archiveLink(authToken, justSavedId)
}, 100)
// refresh the index to make sure the page is updated
let allLinks = await graphqlRequest(
articlesQuery(''),
authToken
).expect(200)
const justSavedId = allLinks.body.data.articles.edges[0].node.id
await archiveLink(authToken, justSavedId)
await refreshIndex()
// test the negative case, ensuring the archive link wasn't returned
setTimeout(async () => {
allLinks = await graphqlRequest(articlesQuery(''), authToken).expect(
200
)
expect(allLinks.body.data.articles.edges[0].node.url).to.not.eq(url)
}, 100)
allLinks = await graphqlRequest(articlesQuery(''), authToken).expect(
200
)
expect(allLinks.body.data.articles.edges[0]?.node?.url).to.not.eq(url)
// Now save the link again, and ensure it is returned
await graphqlRequest(
savePageQuery(url, title, originalContent),
authToken
).expect(200)
await refreshIndex()
setTimeout(async () => {
allLinks = await graphqlRequest(articlesQuery(''), authToken).expect(
200
)
expect(allLinks.body.data.articles.edges[0].node.url).to.eq(url)
}, 100)
allLinks = await graphqlRequest(articlesQuery(''), authToken).expect(
200
)
expect(allLinks.body.data.articles.edges[0].node.url).to.eq(url)
})
})
})
@ -599,6 +613,10 @@ describe('Article API', () => {
})
context('when we save a new url', () => {
after(async () => {
await deletePagesByParam({ url }, ctx)
})
it('should return a slugged url', async () => {
const res = await graphqlRequest(query, authToken).expect(200)
expect(res.body.data.saveUrl.url).to.startsWith(
@ -606,43 +624,6 @@ describe('Article API', () => {
)
})
})
context('when we save a url that is already archived', () => {
it('it should return that page in the GetArticles Query', async () => {
url = 'https://example.com/new-url'
await graphqlRequest(saveUrlQuery(url), authToken).expect(200)
let allLinks
// Save a link, then archive it
// set a slight delay to make sure the page is updated
setTimeout(async () => {
let allLinks = await graphqlRequest(
articlesQuery(''),
authToken
).expect(200)
const justSavedId = allLinks.body.data.articles.edges[0].node.id
await archiveLink(authToken, justSavedId)
}, 100)
// test the negative case, ensuring the archive link wasn't returned
setTimeout(async () => {
allLinks = await graphqlRequest(articlesQuery(''), authToken).expect(
200
)
expect(allLinks.body.data.articles.edges[0].node.url).to.not.eq(url)
}, 100)
// Now save the link again, and ensure it is returned
await graphqlRequest(saveUrlQuery(url), authToken).expect(200)
setTimeout(async () => {
allLinks = await graphqlRequest(articlesQuery(''), authToken).expect(
200
)
expect(allLinks.body.data.articles.edges[0].node.url).to.eq(url)
}, 100)
})
})
})
describe('setBookmarkArticle', () => {
@ -713,13 +694,11 @@ describe('Article API', () => {
let pageId = ''
before(async () => {
pageId = (await createTestElasticPage(user)).id
pageId = (await createTestElasticPage(user)).id!
})
after(async () => {
if (pageId) {
await deletePage(pageId, ctx)
}
await deletePage(pageId, ctx)
})
beforeEach(() => {
@ -749,24 +728,23 @@ describe('Article API', () => {
firstRes.body.data.saveArticleReadingProgress.updatedArticle
.readingProgressPercent
).to.eq(75)
await refreshIndex()
// Now try to set to a lower value (50), value should not be updated
// have a slight delay to ensure the reading progress is updated
setTimeout(async () => {
const secondQuery = saveArticleReadingProgressQuery(articleId, 50)
const secondRes = await graphqlRequest(secondQuery, authToken).expect(
200
)
expect(
secondRes.body.data.saveArticleReadingProgress.updatedArticle
.readingProgressPercent
).to.eq(75)
}, 100)
// refresh index to ensure the reading progress is updated
const secondQuery = saveArticleReadingProgressQuery(articleId, 50)
const secondRes = await graphqlRequest(secondQuery, authToken).expect(
200
)
expect(
secondRes.body.data.saveArticleReadingProgress.updatedArticle
.readingProgressPercent
).to.eq(75)
})
})
})
describe('SaveFile', () => {
xdescribe('SaveFile', () => {
let query = ''
let url = ''
let uploadFileId = ''
@ -781,7 +759,7 @@ describe('Article API', () => {
uploadFileId = generateFakeUuid()
})
xit('should return Unauthorized error', async () => {
it('should return Unauthorized error', async () => {
const res = await graphqlRequest(query, authToken).expect(200)
expect(res.body.data.saveFile.errorCodes).to.eql(['UNAUTHORIZED'])
})
@ -800,7 +778,7 @@ describe('Article API', () => {
uploadFileId = uploadFile.id
})
xit('should return the new url', async () => {
it('should return the new url', async () => {
const res = await graphqlRequest(query, authToken).expect(200)
expect(res.body.data.saveFile.url).to.startsWith(
'http://localhost:3000/fakeUser/links'
@ -836,11 +814,7 @@ describe('Article API', () => {
savedAt: new Date(),
state: ArticleSavingRequestStatus.Succeeded,
}
const pageId = await createPage(page, ctx)
if (!pageId) {
expect.fail('Failed to create page')
}
page.id = pageId
page.id = (await createPage(page, ctx))!
pages.push(page)
// Create some test highlights
@ -853,7 +827,7 @@ describe('Article API', () => {
createdAt: new Date(),
updatedAt: new Date(),
}
await addHighlightToPage(pageId, highlight, ctx)
await addHighlightToPage(page.id, highlight, ctx)
highlights.push(highlight)
}
})
@ -862,6 +836,10 @@ describe('Article API', () => {
query = searchQuery(keyword)
})
after(async () => {
await deletePagesByParam({ userId: user.id }, ctx)
})
context('when type:highlights is not in the query', () => {
before(() => {
keyword = 'search'
@ -948,11 +926,7 @@ describe('Article API', () => {
savedAt: new Date(),
state: ArticleSavingRequestStatus.Succeeded,
}
const pageId = await createPage(page, ctx)
if (!pageId) {
expect.fail('Failed to create page')
}
page.id = pageId
page.id = (await createPage(page, ctx))!
pages.push(page)
}
})
@ -961,6 +935,10 @@ describe('Article API', () => {
query = typeaheadSearchQuery(keyword)
})
after(async () => {
await deletePagesByParam({ userId: user.id }, ctx)
})
it('should return pages with typeahead prefix', async () => {
const res = await graphqlRequest(query, authToken).expect(200)
@ -1046,9 +1024,7 @@ describe('Article API', () => {
after(async () => {
// Delete all pages
for (let i = 0; i < pages.length; i++) {
await deletePage(pages[i].id, ctx)
}
await deletePagesByParam({ userId: user.id }, ctx)
})
it('returns pages deleted after since', async () => {

View File

@ -66,8 +66,8 @@ export const inboundEmailHandler = Sentry.GCPFunction.wrapHttpFunction(
console.log('headers: ', headers)
// original sender email address
const from = headers['from'].toString()
const subject = headers['subject']?.toString()
const from = parsed['from']
const subject = parsed['subject']
const html = parsed['html']
const text = parsed['text']
@ -80,7 +80,7 @@ export const inboundEmailHandler = Sentry.GCPFunction.wrapHttpFunction(
const forwardedFrom = headers['x-forwarded-for']?.toString().split(' ')[0]
// if an email is forwarded to the inbox, the to is the forwarding email recipient
const to = forwardedTo || headers['to'].toString()
const to = forwardedTo || parsed['to']
const postHeader = headers['list-post']?.toString()
const unSubHeader = headers['list-unsubscribe']?.toString()