From 3f4be32df8bdd10d657066945a0eac9a17ff2f94 Mon Sep 17 00:00:00 2001 From: Jackson Harper Date: Fri, 13 Oct 2023 18:26:02 +0800 Subject: [PATCH] Merge library item imports --- packages/api/src/services/library_item.ts | 16 +- packages/api/src/utils/search.ts | 5 + packages/api/test/resolvers/article.test.ts | 145 ++++++++++++------ packages/api/test/utils/search.test.ts | 7 + ...34.do.rename_site_tsv_to_site_name_tsv.sql | 33 ++++ ....undo.rename_site_tsv_to_site_name_tsv.sql | 9 ++ 6 files changed, 166 insertions(+), 49 deletions(-) create mode 100755 packages/db/migrations/0134.do.rename_site_tsv_to_site_name_tsv.sql create mode 100755 packages/db/migrations/0134.undo.rename_site_tsv_to_site_name_tsv.sql diff --git a/packages/api/src/services/library_item.ts b/packages/api/src/services/library_item.ts index 628873cfb..07be9f885 100644 --- a/packages/api/src/services/library_item.ts +++ b/packages/api/src/services/library_item.ts @@ -1,4 +1,4 @@ -import { DeepPartial, SelectQueryBuilder } from 'typeorm' +import { Brackets, DeepPartial, SelectQueryBuilder } from 'typeorm' import { QueryDeepPartialEntity } from 'typeorm/query-builder/QueryPartialEntity' import { EntityLabel } from '../entity/entity_label' import { Highlight } from '../entity/highlight' @@ -223,10 +223,16 @@ const buildWhereClause = ( args.matchFilters.forEach((filter) => { const param = `match_${filter.field}` queryBuilder.andWhere( - `websearch_to_tsquery('english', :${param}) @@ library_item.${filter.field}_tsv`, - { - [param]: filter.value, - } + new Brackets((qb) => { + qb.andWhere( + `websearch_to_tsquery('english', :${param}) @@ library_item.${filter.field}_tsv`, + { + [param]: filter.value, + } + ).orWhere(`${filter.field} ILIKE :value`, { + value: `%${filter.value}%`, + }) + }) ) }) } diff --git a/packages/api/src/utils/search.ts b/packages/api/src/utils/search.ts index 357875449..c4799ba7b 100644 --- a/packages/api/src/utils/search.ts +++ b/packages/api/src/utils/search.ts @@ -325,6 +325,11 @@ const parseFieldFilter = ( field: 'subscription', value, } + case 'SITE': + return { + field: 'site_name', + value, + } } return { diff --git a/packages/api/test/resolvers/article.test.ts b/packages/api/test/resolvers/article.test.ts index aeb76604a..1bf9fc804 100644 --- a/packages/api/test/resolvers/article.test.ts +++ b/packages/api/test/resolvers/article.test.ts @@ -1016,6 +1016,60 @@ describe('Article API', () => { }) }) + context('when site is in the query', () => { + let items: LibraryItem[] = [] + let label: Label + + before(async () => { + // Create some test items + label = await createLabel('test', '', user.id) + items = await createLibraryItems( + [ + { + user, + title: 'test title 1', + readableContent: '

test 1

', + slug: 'test slug 1', + originalUrl: `${url}/test1`, + state: LibraryItemState.Succeeded, + siteName: 'yes-app.com', + }, + { + user, + title: 'test title 2', + readableContent: '

test 2

', + slug: 'test slug 2', + originalUrl: `${url}/test2`, + state: LibraryItemState.Succeeded, + siteName: 'no-app.com', + }, + ], + user.id + ) + }) + + after(async () => { + await deleteLibraryItems(items, user.id) + }) + + it('returns item with matching site', async () => { + keyword = 'site:yes-app.com' + const res = await graphqlRequest(query, authToken).expect(200) + + expect(res.body.data.search.pageInfo.totalCount).to.eq(1) + expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id) + }) + + it('returns item with matching search query', async () => { + keyword = 'site:app.com' + const res = await graphqlRequest(query, authToken).expect(200) + + expect(res.body.data.search.pageInfo.totalCount).to.eq(2) + expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id) + expect(res.body.data.search.edges[1].node.id).to.eq(items[1].id) + }) + }) + context("when in:library label:test' is in the query", () => { let items: LibraryItem[] = [] let label: Label @@ -1322,54 +1376,57 @@ describe('Article API', () => { }) }) - context('when wordsCount:>=10000 wordsCount:<=20000 is in the query', () => { - let items: LibraryItem[] = [] + context( + 'when wordsCount:>=10000 wordsCount:<=20000 is in the query', + () => { + let items: LibraryItem[] = [] - before(async () => { - keyword = 'wordsCount:>=10000 wordsCount:<=20000' - // Create some test items - items = await createLibraryItems( - [ - { - user, - title: 'test title 1', - readableContent: '

test 1

', - slug: 'test slug 1', - originalUrl: `${url}/test1`, - wordCount: 10000, - }, - { - user, - title: 'test title 2', - readableContent: '

test 2

', - slug: 'test slug 2', - originalUrl: `${url}/test2`, - wordCount: 8000, - }, - { - user, - title: 'test title 3', - readableContent: '

test 3

', - slug: 'test slug 3', - originalUrl: `${url}/test3`, - wordCount: 100000, - }, - ], - user.id - ) - }) + before(async () => { + keyword = 'wordsCount:>=10000 wordsCount:<=20000' + // Create some test items + items = await createLibraryItems( + [ + { + user, + title: 'test title 1', + readableContent: '

test 1

', + slug: 'test slug 1', + originalUrl: `${url}/test1`, + wordCount: 10000, + }, + { + user, + title: 'test title 2', + readableContent: '

test 2

', + slug: 'test slug 2', + originalUrl: `${url}/test2`, + wordCount: 8000, + }, + { + user, + title: 'test title 3', + readableContent: '

test 3

', + slug: 'test slug 3', + originalUrl: `${url}/test3`, + wordCount: 100000, + }, + ], + user.id + ) + }) - after(async () => { - await deleteLibraryItems(items, user.id) - }) + after(async () => { + await deleteLibraryItems(items, user.id) + }) - it('returns items with words count between 10000 and 20000 inclusively', async () => { - const res = await graphqlRequest(query, authToken).expect(200) + it('returns items with words count between 10000 and 20000 inclusively', async () => { + const res = await graphqlRequest(query, authToken).expect(200) - expect(res.body.data.search.pageInfo.totalCount).to.eq(1) - expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id) - }) - }) + expect(res.body.data.search.pageInfo.totalCount).to.eq(1) + expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id) + }) + } + ) }) describe('TypeaheadSearch API', () => { diff --git a/packages/api/test/utils/search.test.ts b/packages/api/test/utils/search.test.ts index 6ad4e7a24..5518e84b0 100644 --- a/packages/api/test/utils/search.test.ts +++ b/packages/api/test/utils/search.test.ts @@ -159,3 +159,10 @@ describe('query with author set', () => { }) }) +describe('query with site set', () => { + it('adds site_name to the match filters', () => { + const result = parseSearchQuery('site:omnivore.app') + expect(result.matchFilters[0].field).to.equal('site_name') + expect(result.matchFilters[0].value).to.equal('omnivore.app') + }) +}) diff --git a/packages/db/migrations/0134.do.rename_site_tsv_to_site_name_tsv.sql b/packages/db/migrations/0134.do.rename_site_tsv_to_site_name_tsv.sql new file mode 100755 index 000000000..9899b3fbf --- /dev/null +++ b/packages/db/migrations/0134.do.rename_site_tsv_to_site_name_tsv.sql @@ -0,0 +1,33 @@ +-- Type: DO +-- Name: Rename site to site_name +-- Description: Rename the site_tsv column to site_name_tsv to make it more consistent + +BEGIN; + +ALTER TABLE omnivore.library_item RENAME COLUMN site_tsv TO site_name_tsv ; + +CREATE OR REPLACE FUNCTION update_library_item_tsv() RETURNS trigger AS $$ +begin + new.content_tsv := to_tsvector('pg_catalog.english', coalesce(new.readable_content, '')); + new.site_name_tsv := to_tsvector('pg_catalog.english', coalesce(new.site_name, '')); + new.title_tsv := to_tsvector('pg_catalog.english', coalesce(new.title, '')); + new.author_tsv := to_tsvector('pg_catalog.english', coalesce(new.author, '')); + new.description_tsv := to_tsvector('pg_catalog.english', coalesce(new.description, '')); + -- note_tsv is generated by both note and highlight_annotations + new.note_tsv := to_tsvector('pg_catalog.english', coalesce(new.note, '') || ' ' || array_to_string(new.highlight_annotations, ' ')); + new.search_tsv := + setweight(new.title_tsv, 'A') || + setweight(new.author_tsv, 'A') || + setweight(new.site_name_tsv, 'A') || + setweight(new.description_tsv, 'A') || + -- full hostname (eg www.omnivore.app) + setweight(to_tsvector('pg_catalog.english', coalesce(regexp_replace(new.original_url, '^((http[s]?):\/)?\/?([^:\/\s]+)((\/\w+)*\/)([\w\-\.]+[^#?\s]+)(.*)?(#[\w\-]+)?$', '\3'), '')), 'A') || + -- secondary hostname (eg omnivore) + setweight(to_tsvector('pg_catalog.english', coalesce(regexp_replace(new.original_url, '^((http[s]?):\/)?\/?(.*\.)?([^:\/\s]+)(\..*)((\/+)*\/)?([\w\-\.]+[^#?\s]+)(.*)?(#[\w\-]+)?$', '\4'), '')), 'A') || + setweight(new.note_tsv, 'A') || + setweight(new.content_tsv, 'B'); + return new; +end +$$ LANGUAGE plpgsql; + +COMMIT; diff --git a/packages/db/migrations/0134.undo.rename_site_tsv_to_site_name_tsv.sql b/packages/db/migrations/0134.undo.rename_site_tsv_to_site_name_tsv.sql new file mode 100755 index 000000000..2e1329a7c --- /dev/null +++ b/packages/db/migrations/0134.undo.rename_site_tsv_to_site_name_tsv.sql @@ -0,0 +1,9 @@ +-- Type: UNDO +-- Name: Rename site to site_name +-- Description: Rename the site_tsv column to site_name_tsv to make it more consistent + +BEGIN; + +ALTER TABLE omnivore.library_item RENAME COLUMN site_name_tsv TO site_tsv ; + +COMMIT;