* Add elastic to our docker compose * add AND/OR/NOT search operations * add elastic and create article in elastic * change error code when elastic throws error * add search pages in elastic * add search by labels * Add elastic to GitHub Action * Update elastic version * Fix port for elastic * add url in search query * Set elastic features when running tests * add debug logs * Use localhost instead of service hostname * refresh elastic after create/update * update search labels query * add typescript support * search pages in elastic * fix search queries * use elastic for saving page * fix test failure * update getArticle api to use elastic * use generic get page function * add elastic migration python script * fix bulk helper param * save elastic page id in article_saving_request instead of postgres article_id * fix page archiving and deleting * add tests for deleteArticle * remove custom date type in elastic mappings which not exist in older version of elastic * fix timestamp format issue * add tests for save reading progress * add tests for save file * optimize search results * add alias to index * update migration script to receive env var as params * Add failing test to validate we don't decrease reading progress This test is failing with Elastic because we aren't fetching the reading progress from elastic here, and are fetching it from postgres. * Rename readingProgress to readingProgressPercent This is the name stored in elastic, so fixes issues pulling the value out. * Linting * Add failing test for creating highlights w/elastic This test fails because the highlight can't be looked up. Is there a different ID we should be passing in to query for highlights, or do we need to update the query to look for elastic_id? * add tests code coverage threshold * update nyc config * include more files in test coverage * change alias name * update updateContent to update pages in elastic * remove debug log * fix createhighlight test * search pages by alias in elastic * update set labels and delete labels in elastic * migration script enumeration * make BULK_SIZE an env var * fix pdf search indexing * debug github action exit issue * call pubsub when create/update/delete page in elastic * fix json parsing bug and reduce reading data from file * replace a depreciated pubsub api call * debug github action exit issue * debug github action exit issue * add handler to upload elastic page data to GCS * fix tests * Use http_auth instead of basic_auth * add index creation and existing postgres tables update in migration script * fix a typo to connect to elastic * rename readingProgress to readingProgressPercent * migrate elastic_page_id in highlights and article_saving_request tables * update migration script to include number of updated rows * update db migration query * read index mappings from file * fix upload pages to gcs * fix tests failure due to pageContext * fix upload file id not exist error * Handle savedAt & isArchived attributes w/out quering elastic * Fix prettier issues * fix content-type mismatching * revert pageId to linkId because frontend was not deployed yet * fix newsletters and attachment not saved in elastic * put linkId in article for setting labels * exclude orginalHtml in the result of searching to improve performace * exclude content in the result of searching to improve performace * remove score sorting * do not refresh immediately to reduce searching and indexing time * do not replace the backup data in gcs * fix no article id defined in articleSavingRequest * add logging of elastic api running time * reduce home feed pagination size to 15 * reduce home feed pagination size to 10 * stop revalidating first page * do not use a separate api to fetch reading progress * Remove unused comment * get reading progress if not exists * replace ngram tokenizer with standard tokenizer * fix tests * remove .env.local * add sort keyword in searching to sort by score Co-authored-by: Hongbo Wu <hongbo@omnivore.app>
704 lines
18 KiB
TypeScript
704 lines
18 KiB
TypeScript
import { createTestLabel, createTestUser, deleteTestUser } from '../db'
|
|
import {
|
|
createTestElasticPage,
|
|
generateFakeUuid,
|
|
graphqlRequest,
|
|
request,
|
|
} from '../util'
|
|
import * as chai from 'chai'
|
|
import { expect } from 'chai'
|
|
import 'mocha'
|
|
import { User } from '../../src/entity/user'
|
|
import chaiString from 'chai-string'
|
|
import { Label } from '../../src/entity/label'
|
|
import {
|
|
createPage,
|
|
deletePage,
|
|
getPageById,
|
|
updatePage,
|
|
} from '../../src/elastic'
|
|
import { PageType, UploadFileStatus } from '../../src/generated/graphql'
|
|
import { Page, PageContext } from '../../src/elastic/types'
|
|
import { getRepository } from 'typeorm'
|
|
import { UploadFile } from '../../src/entity/upload_file'
|
|
import { createPubSubClient } from '../../src/datalayer/pubsub'
|
|
|
|
chai.use(chaiString)
|
|
|
|
const ctx: PageContext = { pubsub: createPubSubClient(), refresh: true }
|
|
const archiveLink = async (authToken: string, linkId: string) => {
|
|
const query = `
|
|
mutation {
|
|
setLinkArchived(
|
|
input: {
|
|
linkId: "${linkId}",
|
|
archived: ${true}
|
|
}
|
|
) {
|
|
... on ArchiveLinkSuccess {
|
|
linkId
|
|
}
|
|
... on ArchiveLinkError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
return graphqlRequest(query, authToken).expect(200)
|
|
}
|
|
|
|
const createArticleQuery = (
|
|
url: string,
|
|
source: string,
|
|
document: string,
|
|
title: string
|
|
) => {
|
|
return `
|
|
mutation {
|
|
createArticle(input: {
|
|
url: "${url}"
|
|
source: "${source}"
|
|
preparedDocument: {
|
|
document: "${document}"
|
|
pageInfo: {
|
|
contentType: "text/html"
|
|
title: "${title}"
|
|
}
|
|
}
|
|
}) {
|
|
... on CreateArticleSuccess {
|
|
createdArticle {
|
|
id
|
|
title
|
|
content
|
|
}
|
|
user {
|
|
id
|
|
name
|
|
}
|
|
created
|
|
}
|
|
... on CreateArticleError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
const articlesQuery = (after = '', order = 'ASCENDING') => {
|
|
return `
|
|
query {
|
|
articles(
|
|
sharedOnly: ${false}
|
|
sort: {
|
|
order: ${order}
|
|
by: UPDATED_TIME
|
|
}
|
|
after: "${after}"
|
|
first: 5
|
|
query: "") {
|
|
... on ArticlesSuccess {
|
|
edges {
|
|
cursor
|
|
node {
|
|
id
|
|
url
|
|
linkId
|
|
labels {
|
|
id
|
|
name
|
|
color
|
|
}
|
|
}
|
|
}
|
|
pageInfo {
|
|
hasNextPage
|
|
hasPreviousPage
|
|
startCursor
|
|
endCursor
|
|
totalCount
|
|
}
|
|
}
|
|
... on ArticlesError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
const getArticleQuery = (slug: string) => {
|
|
return `
|
|
query {
|
|
article(slug: "${slug}", username: "") {
|
|
... on ArticleSuccess {
|
|
article {
|
|
id
|
|
slug
|
|
}
|
|
}
|
|
... on ArticleError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
const savePageQuery = (url: string, title: string, originalContent: string) => {
|
|
return `
|
|
mutation {
|
|
savePage(
|
|
input: {
|
|
url: "${url}",
|
|
source: "test",
|
|
clientRequestId: "${generateFakeUuid()}",
|
|
title: "${title}",
|
|
originalContent: "${originalContent}"
|
|
}
|
|
) {
|
|
... on SaveSuccess {
|
|
url
|
|
}
|
|
... on SaveError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
const saveFileQuery = (url: string, uploadFileId: string) => {
|
|
return `
|
|
mutation {
|
|
saveFile (
|
|
input: {
|
|
url: "${url}",
|
|
source: "test",
|
|
clientRequestId: "${generateFakeUuid()}",
|
|
uploadFileId: "${uploadFileId}",
|
|
}
|
|
) {
|
|
... on SaveSuccess {
|
|
url
|
|
}
|
|
... on SaveError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
const setBookmarkQuery = (articleId: string, bookmark: boolean) => {
|
|
return `
|
|
mutation {
|
|
setBookmarkArticle(
|
|
input: {
|
|
articleID: "${articleId}",
|
|
bookmark: ${bookmark}
|
|
}
|
|
) {
|
|
... on SetBookmarkArticleSuccess {
|
|
bookmarkedArticle {
|
|
id
|
|
}
|
|
}
|
|
... on SetBookmarkArticleError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
const saveArticleReadingProgressQuery = (
|
|
articleId: string,
|
|
progress: number
|
|
) => {
|
|
return `
|
|
mutation {
|
|
saveArticleReadingProgress(
|
|
input: {
|
|
id: "${articleId}",
|
|
readingProgressPercent: ${progress}
|
|
readingProgressAnchorIndex: 0
|
|
}
|
|
) {
|
|
... on SaveArticleReadingProgressSuccess {
|
|
updatedArticle {
|
|
id
|
|
readingProgressPercent
|
|
}
|
|
}
|
|
... on SaveArticleReadingProgressError {
|
|
errorCodes
|
|
}
|
|
}
|
|
}
|
|
`
|
|
}
|
|
|
|
describe('Article API', () => {
|
|
const username = 'fakeUser'
|
|
let authToken: string
|
|
let user: User
|
|
|
|
before(async () => {
|
|
// create test user and login
|
|
user = await createTestUser(username)
|
|
const res = await request
|
|
.post('/local/debug/fake-user-login')
|
|
.send({ fakeEmail: user.email })
|
|
|
|
authToken = res.body.authToken
|
|
})
|
|
|
|
after(async () => {
|
|
// clean up
|
|
await deleteTestUser(username)
|
|
})
|
|
|
|
describe('CreateArticle', () => {
|
|
let query = ''
|
|
let url = ''
|
|
let source = ''
|
|
let document = ''
|
|
let title = ''
|
|
let pageId = ''
|
|
|
|
beforeEach(async () => {
|
|
query = createArticleQuery(url, source, document, title)
|
|
})
|
|
|
|
context('when saving from document', () => {
|
|
before(() => {
|
|
url = 'https://blog.omnivore.app/p/testing-is-fun-with-omnivore'
|
|
source = 'puppeteer-parse'
|
|
document = '<p>test</p>'
|
|
title = 'new title'
|
|
})
|
|
|
|
after(async () => {
|
|
await deletePage(pageId, ctx)
|
|
})
|
|
|
|
it('should create an article', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
|
|
expect(res.body.data.createArticle.createdArticle.title).to.eql(title)
|
|
pageId = res.body.data.createArticle.createdArticle.id
|
|
})
|
|
})
|
|
})
|
|
|
|
describe('GetArticle', () => {
|
|
const realSlug = 'testing-is-really-fun-with-omnivore'
|
|
let query = ''
|
|
let slug = ''
|
|
let pageId: string | undefined
|
|
|
|
before(async () => {
|
|
const page = {
|
|
id: '',
|
|
hash: 'test hash',
|
|
userId: user.id,
|
|
pageType: PageType.Article,
|
|
title: 'test title',
|
|
content: '<p>test</p>',
|
|
slug: realSlug,
|
|
createdAt: new Date(),
|
|
updatedAt: new Date(),
|
|
readingProgressPercent: 100,
|
|
readingProgressAnchorIndex: 0,
|
|
url: 'https://blog.omnivore.app/test-with-omnivore',
|
|
savedAt: new Date(),
|
|
} as Page
|
|
pageId = await createPage(page, ctx)
|
|
})
|
|
|
|
after(async () => {
|
|
if (pageId) {
|
|
await deletePage(pageId, ctx)
|
|
}
|
|
})
|
|
|
|
beforeEach(async () => {
|
|
query = getArticleQuery(slug)
|
|
})
|
|
|
|
context('when article exists', () => {
|
|
before(() => {
|
|
slug = realSlug
|
|
})
|
|
|
|
it('should return the article', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
|
|
expect(res.body.data.article.article.slug).to.eql(slug)
|
|
})
|
|
})
|
|
|
|
context('when article does not exist', () => {
|
|
before(() => {
|
|
slug = 'not-a-real-slug'
|
|
})
|
|
|
|
it('should return an error', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
|
|
expect(res.body.data.article.errorCodes).to.eql(['NOT_FOUND'])
|
|
})
|
|
})
|
|
})
|
|
|
|
describe('GetArticles', () => {
|
|
let query = ''
|
|
let after = ''
|
|
let pages: Page[] = []
|
|
let label: Label
|
|
|
|
before(async () => {
|
|
// Create some test pages
|
|
for (let i = 0; i < 15; i++) {
|
|
const page = {
|
|
id: '',
|
|
hash: 'test hash',
|
|
userId: user.id,
|
|
pageType: PageType.Article,
|
|
title: 'test title',
|
|
content: '<p>test</p>',
|
|
slug: 'test slug',
|
|
createdAt: new Date(),
|
|
updatedAt: new Date(),
|
|
readingProgressPercent: 100,
|
|
readingProgressAnchorIndex: 0,
|
|
url: 'https://blog.omnivore.app/p/getting-started-with-omnivore',
|
|
savedAt: new Date(),
|
|
} as Page
|
|
const pageId = await createPage(page, ctx)
|
|
if (!pageId) {
|
|
expect.fail('Failed to create page')
|
|
}
|
|
page.id = pageId
|
|
pages.push(page)
|
|
}
|
|
// create testing labels
|
|
label = await createTestLabel(user, 'label', '#ffffff')
|
|
// set label to a link
|
|
await updatePage(
|
|
pages[0].id,
|
|
{
|
|
...pages[0],
|
|
labels: [{ id: label.id, name: label.name, color: label.color }],
|
|
},
|
|
ctx
|
|
)
|
|
})
|
|
|
|
beforeEach(async () => {
|
|
query = articlesQuery(after)
|
|
})
|
|
|
|
it('should return labels', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
|
|
expect(res.body.data.articles.edges[0].node.labels[0].id).to.eql(label.id)
|
|
})
|
|
|
|
context('when we fetch the first page', () => {
|
|
it('should return the first five items', async () => {
|
|
after = ''
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
|
|
expect(res.body.data.articles.edges.length).to.eql(5)
|
|
expect(res.body.data.articles.edges[0].node.id).to.eql(pages[0].id)
|
|
expect(res.body.data.articles.edges[1].node.id).to.eql(pages[1].id)
|
|
expect(res.body.data.articles.edges[2].node.id).to.eql(pages[2].id)
|
|
expect(res.body.data.articles.edges[3].node.id).to.eql(pages[3].id)
|
|
expect(res.body.data.articles.edges[4].node.id).to.eql(pages[4].id)
|
|
})
|
|
|
|
it('should set the pageInfo', async () => {
|
|
after = ''
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(res.body.data.articles.pageInfo.endCursor).to.eql('5')
|
|
expect(res.body.data.articles.pageInfo.startCursor).to.eql('')
|
|
expect(res.body.data.articles.pageInfo.totalCount, 'totalCount').to.eql(
|
|
15
|
|
)
|
|
expect(
|
|
res.body.data.articles.pageInfo.hasNextPage,
|
|
'hasNextPage'
|
|
).to.eql(true)
|
|
})
|
|
})
|
|
|
|
context('when we fetch the second page', () => {
|
|
before(() => {
|
|
after = '5'
|
|
})
|
|
|
|
it('should return the second five items', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
|
|
expect(res.body.data.articles.edges.length).to.eql(5)
|
|
expect(res.body.data.articles.edges[0].node.id).to.eql(pages[5].id)
|
|
expect(res.body.data.articles.edges[1].node.id).to.eql(pages[6].id)
|
|
expect(res.body.data.articles.edges[2].node.id).to.eql(pages[7].id)
|
|
expect(res.body.data.articles.edges[3].node.id).to.eql(pages[8].id)
|
|
expect(res.body.data.articles.edges[4].node.id).to.eql(pages[9].id)
|
|
})
|
|
|
|
it('should set the pageInfo', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(res.body.data.articles.pageInfo.totalCount, 'totalCount').to.eql(
|
|
15
|
|
)
|
|
expect(
|
|
res.body.data.articles.pageInfo.startCursor,
|
|
'startCursor'
|
|
).to.eql('5')
|
|
expect(res.body.data.articles.pageInfo.endCursor, 'endCursor').to.eql(
|
|
'10'
|
|
)
|
|
expect(
|
|
res.body.data.articles.pageInfo.hasNextPage,
|
|
'hasNextPage'
|
|
).to.eql(true)
|
|
// We don't implement hasPreviousPage in the API and should probably remove it
|
|
// expect(res.body.data.articles.pageInfo.hasPreviousPage).to.eql(true)
|
|
})
|
|
})
|
|
})
|
|
|
|
describe('SavePage', () => {
|
|
let query = ''
|
|
let title = 'Example Title'
|
|
let url = 'https://example.com'
|
|
let originalContent = '<div>Example Content</div>'
|
|
|
|
beforeEach(() => {
|
|
query = savePageQuery(url, title, originalContent)
|
|
})
|
|
|
|
context('when we save a new page', () => {
|
|
it('should return a slugged url', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(res.body.data.savePage.url).to.startsWith(
|
|
'http://localhost:3000/fakeUser/example-title-'
|
|
)
|
|
})
|
|
})
|
|
|
|
context('when we save a page that is already archived', () => {
|
|
it('it should return that page in the GetArticles Query', async () => {
|
|
url = 'https://example.com/new-url'
|
|
await graphqlRequest(
|
|
savePageQuery(url, title, originalContent),
|
|
authToken
|
|
).expect(200)
|
|
|
|
let allLinks
|
|
// Save a link, then archive it
|
|
// set a slight delay to make sure the page is updated
|
|
setTimeout(async () => {
|
|
let allLinks = await graphqlRequest(
|
|
articlesQuery('', 'DESCENDING'),
|
|
authToken
|
|
).expect(200)
|
|
const justSavedId = allLinks.body.data.articles.edges[0].node.id
|
|
await archiveLink(authToken, justSavedId)
|
|
}, 100)
|
|
|
|
// test the negative case, ensuring the archive link wasn't returned
|
|
setTimeout(async () => {
|
|
allLinks = await graphqlRequest(
|
|
articlesQuery('', 'DESCENDING'),
|
|
authToken
|
|
).expect(200)
|
|
expect(allLinks.body.data.articles.edges[0].node.url).to.not.eq(url)
|
|
}, 100)
|
|
|
|
// Now save the link again, and ensure it is returned
|
|
await graphqlRequest(
|
|
savePageQuery(url, title, originalContent),
|
|
authToken
|
|
).expect(200)
|
|
|
|
setTimeout(async () => {
|
|
allLinks = await graphqlRequest(
|
|
articlesQuery('', 'DESCENDING'),
|
|
authToken
|
|
).expect(200)
|
|
expect(allLinks.body.data.articles.edges[0].node.url).to.eq(url)
|
|
}, 100)
|
|
})
|
|
})
|
|
})
|
|
|
|
describe('setBookmarkArticle', () => {
|
|
let query = ''
|
|
let articleId = ''
|
|
let bookmark = true
|
|
let pageId = ''
|
|
|
|
before(async () => {
|
|
const page: Page = {
|
|
id: '',
|
|
hash: 'test hash',
|
|
userId: user.id,
|
|
pageType: PageType.Article,
|
|
title: 'test title',
|
|
content: '<p>test</p>',
|
|
createdAt: new Date(),
|
|
url: 'https://blog.omnivore.app/setBookmarkArticle',
|
|
slug: 'test-with-omnivore',
|
|
}
|
|
const newPageId = await createPage(page, ctx)
|
|
if (newPageId) {
|
|
pageId = newPageId
|
|
}
|
|
})
|
|
|
|
after(async () => {
|
|
if (pageId) {
|
|
await deletePage(pageId, ctx)
|
|
}
|
|
})
|
|
|
|
beforeEach(() => {
|
|
query = setBookmarkQuery(articleId, bookmark)
|
|
})
|
|
|
|
context('when we set a bookmark on an article', () => {
|
|
before(async () => {
|
|
articleId = pageId
|
|
bookmark = true
|
|
})
|
|
|
|
it('should bookmark an article', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(res.body.data.setBookmarkArticle.bookmarkedArticle.id).to.eq(
|
|
articleId
|
|
)
|
|
})
|
|
})
|
|
|
|
context('when we unset a bookmark on an article', () => {
|
|
before(async () => {
|
|
articleId = pageId
|
|
bookmark = false
|
|
})
|
|
|
|
it('should delete an article', async () => {
|
|
await graphqlRequest(query, authToken).expect(200)
|
|
const pageId = await getPageById(articleId)
|
|
expect(pageId).to.undefined
|
|
})
|
|
})
|
|
})
|
|
|
|
describe('saveArticleReadingProgressResolver', () => {
|
|
let query = ''
|
|
let articleId = ''
|
|
let progress = 0.5
|
|
let pageId = ''
|
|
|
|
before(async () => {
|
|
pageId = (await createTestElasticPage(user)).id
|
|
})
|
|
|
|
after(async () => {
|
|
if (pageId) {
|
|
await deletePage(pageId, ctx)
|
|
}
|
|
})
|
|
|
|
beforeEach(() => {
|
|
query = saveArticleReadingProgressQuery(articleId, progress)
|
|
})
|
|
|
|
context('when we save a reading progress on an article', () => {
|
|
before(async () => {
|
|
articleId = pageId
|
|
progress = 0.5
|
|
})
|
|
|
|
it('should save a reading progress on an article', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(
|
|
res.body.data.saveArticleReadingProgress.updatedArticle
|
|
.readingProgressPercent
|
|
).to.eq(progress)
|
|
})
|
|
|
|
it('should not allow setting the reading progress lower than current progress', async () => {
|
|
const firstQuery = saveArticleReadingProgressQuery(articleId, 75)
|
|
const firstRes = await graphqlRequest(firstQuery, authToken).expect(200)
|
|
expect(
|
|
firstRes.body.data.saveArticleReadingProgress.updatedArticle
|
|
.readingProgressPercent
|
|
).to.eq(75)
|
|
|
|
// Now try to set to a lower value (50), value should not be updated
|
|
// have a slight delay to ensure the reading progress is updated
|
|
setTimeout(async () => {
|
|
const secondQuery = saveArticleReadingProgressQuery(articleId, 50)
|
|
const secondRes = await graphqlRequest(secondQuery, authToken).expect(
|
|
200
|
|
)
|
|
expect(
|
|
secondRes.body.data.saveArticleReadingProgress.updatedArticle
|
|
.readingProgressPercent
|
|
).to.eq(75)
|
|
}, 100)
|
|
})
|
|
})
|
|
})
|
|
|
|
describe('SaveFile', () => {
|
|
let query = ''
|
|
let url = ''
|
|
let uploadFileId = ''
|
|
|
|
beforeEach(() => {
|
|
query = saveFileQuery(url, uploadFileId)
|
|
})
|
|
|
|
context('when the file is not uploaded', () => {
|
|
before(async () => {
|
|
url = 'fake url'
|
|
uploadFileId = generateFakeUuid()
|
|
})
|
|
|
|
it('should return Unauthorized error', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(res.body.data.saveFile.errorCodes).to.eql(['UNAUTHORIZED'])
|
|
})
|
|
})
|
|
|
|
context('when the file is uploaded', () => {
|
|
before(async () => {
|
|
url = 'https://example.com/'
|
|
const uploadFile = await getRepository(UploadFile).save({
|
|
fileName: 'test.pdf',
|
|
contentType: 'application/pdf',
|
|
url: url,
|
|
user: user,
|
|
status: UploadFileStatus.Initialized,
|
|
})
|
|
uploadFileId = uploadFile.id
|
|
})
|
|
|
|
it('should return the new url', async () => {
|
|
const res = await graphqlRequest(query, authToken).expect(200)
|
|
expect(res.body.data.saveFile.url).to.startsWith(
|
|
'http://localhost:3000/fakeUser/links'
|
|
)
|
|
})
|
|
})
|
|
})
|
|
})
|