Merge pull request #2926 from omnivore-app/fix/api-site-name-query
Add ILIKE queries to all field queries
This commit is contained in:
@ -1,4 +1,4 @@
|
||||
import { DeepPartial, SelectQueryBuilder } from 'typeorm'
|
||||
import { Brackets, DeepPartial, SelectQueryBuilder } from 'typeorm'
|
||||
import { QueryDeepPartialEntity } from 'typeorm/query-builder/QueryPartialEntity'
|
||||
import { EntityLabel } from '../entity/entity_label'
|
||||
import { Highlight } from '../entity/highlight'
|
||||
@ -223,10 +223,16 @@ const buildWhereClause = (
|
||||
args.matchFilters.forEach((filter) => {
|
||||
const param = `match_${filter.field}`
|
||||
queryBuilder.andWhere(
|
||||
`websearch_to_tsquery('english', :${param}) @@ library_item.${filter.field}_tsv`,
|
||||
{
|
||||
[param]: filter.value,
|
||||
}
|
||||
new Brackets((qb) => {
|
||||
qb.andWhere(
|
||||
`websearch_to_tsquery('english', :${param}) @@ library_item.${filter.field}_tsv`,
|
||||
{
|
||||
[param]: filter.value,
|
||||
}
|
||||
).orWhere(`${filter.field} ILIKE :value`, {
|
||||
value: `%${filter.value}%`,
|
||||
})
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@ -325,6 +325,11 @@ const parseFieldFilter = (
|
||||
field: 'subscription',
|
||||
value,
|
||||
}
|
||||
case 'SITE':
|
||||
return {
|
||||
field: 'site_name',
|
||||
value,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@ -1016,6 +1016,89 @@ describe('Article API', () => {
|
||||
})
|
||||
})
|
||||
|
||||
context('when site is in the query', () => {
|
||||
let items: LibraryItem[] = []
|
||||
|
||||
before(async () => {
|
||||
keyword = 'site:yes-app.com'
|
||||
items = await createLibraryItems(
|
||||
[
|
||||
{
|
||||
user,
|
||||
title: 'test title 1',
|
||||
readableContent: '<p>test 1</p>',
|
||||
slug: 'test slug 1',
|
||||
originalUrl: `${url}/test1`,
|
||||
state: LibraryItemState.Succeeded,
|
||||
siteName: 'yes-app.com',
|
||||
},
|
||||
{
|
||||
user,
|
||||
title: 'test title 2',
|
||||
readableContent: '<p>test 2</p>',
|
||||
slug: 'test slug 2',
|
||||
originalUrl: `${url}/test2`,
|
||||
state: LibraryItemState.Succeeded,
|
||||
siteName: 'no-app.com',
|
||||
},
|
||||
],
|
||||
user.id
|
||||
)
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
await deleteLibraryItems(items, user.id)
|
||||
})
|
||||
|
||||
it('returns item with matching site', async () => {
|
||||
const res = await graphqlRequest(query, authToken).expect(200)
|
||||
|
||||
expect(res.body.data.search.pageInfo.totalCount).to.eq(1)
|
||||
expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id)
|
||||
})
|
||||
})
|
||||
|
||||
context('when wildcard site is in the query', () => {
|
||||
let items: LibraryItem[] = []
|
||||
|
||||
before(async () => {
|
||||
keyword = 'site:app.com'
|
||||
items = await createLibraryItems(
|
||||
[
|
||||
{
|
||||
user,
|
||||
title: 'test title 1',
|
||||
readableContent: '<p>test 1</p>',
|
||||
slug: 'test slug 1',
|
||||
originalUrl: `${url}/test1`,
|
||||
state: LibraryItemState.Succeeded,
|
||||
siteName: 'yes-app.com',
|
||||
},
|
||||
{
|
||||
user,
|
||||
title: 'test title 2',
|
||||
readableContent: '<p>test 2</p>',
|
||||
slug: 'test slug 2',
|
||||
originalUrl: `${url}/test2`,
|
||||
state: LibraryItemState.Succeeded,
|
||||
siteName: 'no-app.com',
|
||||
},
|
||||
],
|
||||
user.id
|
||||
)
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
await deleteLibraryItems(items, user.id)
|
||||
})
|
||||
|
||||
it('returns item with matching search query', async () => {
|
||||
const res = await graphqlRequest(query, authToken).expect(200)
|
||||
|
||||
expect(res.body.data.search.pageInfo.totalCount).to.eq(2)
|
||||
})
|
||||
})
|
||||
|
||||
context("when in:library label:test' is in the query", () => {
|
||||
let items: LibraryItem[] = []
|
||||
let label: Label
|
||||
@ -1322,54 +1405,57 @@ describe('Article API', () => {
|
||||
})
|
||||
})
|
||||
|
||||
context('when wordsCount:>=10000 wordsCount:<=20000 is in the query', () => {
|
||||
let items: LibraryItem[] = []
|
||||
context(
|
||||
'when wordsCount:>=10000 wordsCount:<=20000 is in the query',
|
||||
() => {
|
||||
let items: LibraryItem[] = []
|
||||
|
||||
before(async () => {
|
||||
keyword = 'wordsCount:>=10000 wordsCount:<=20000'
|
||||
// Create some test items
|
||||
items = await createLibraryItems(
|
||||
[
|
||||
{
|
||||
user,
|
||||
title: 'test title 1',
|
||||
readableContent: '<p>test 1</p>',
|
||||
slug: 'test slug 1',
|
||||
originalUrl: `${url}/test1`,
|
||||
wordCount: 10000,
|
||||
},
|
||||
{
|
||||
user,
|
||||
title: 'test title 2',
|
||||
readableContent: '<p>test 2</p>',
|
||||
slug: 'test slug 2',
|
||||
originalUrl: `${url}/test2`,
|
||||
wordCount: 8000,
|
||||
},
|
||||
{
|
||||
user,
|
||||
title: 'test title 3',
|
||||
readableContent: '<p>test 3</p>',
|
||||
slug: 'test slug 3',
|
||||
originalUrl: `${url}/test3`,
|
||||
wordCount: 100000,
|
||||
},
|
||||
],
|
||||
user.id
|
||||
)
|
||||
})
|
||||
before(async () => {
|
||||
keyword = 'wordsCount:>=10000 wordsCount:<=20000'
|
||||
// Create some test items
|
||||
items = await createLibraryItems(
|
||||
[
|
||||
{
|
||||
user,
|
||||
title: 'test title 1',
|
||||
readableContent: '<p>test 1</p>',
|
||||
slug: 'test slug 1',
|
||||
originalUrl: `${url}/test1`,
|
||||
wordCount: 10000,
|
||||
},
|
||||
{
|
||||
user,
|
||||
title: 'test title 2',
|
||||
readableContent: '<p>test 2</p>',
|
||||
slug: 'test slug 2',
|
||||
originalUrl: `${url}/test2`,
|
||||
wordCount: 8000,
|
||||
},
|
||||
{
|
||||
user,
|
||||
title: 'test title 3',
|
||||
readableContent: '<p>test 3</p>',
|
||||
slug: 'test slug 3',
|
||||
originalUrl: `${url}/test3`,
|
||||
wordCount: 100000,
|
||||
},
|
||||
],
|
||||
user.id
|
||||
)
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
await deleteLibraryItems(items, user.id)
|
||||
})
|
||||
after(async () => {
|
||||
await deleteLibraryItems(items, user.id)
|
||||
})
|
||||
|
||||
it('returns items with words count between 10000 and 20000 inclusively', async () => {
|
||||
const res = await graphqlRequest(query, authToken).expect(200)
|
||||
it('returns items with words count between 10000 and 20000 inclusively', async () => {
|
||||
const res = await graphqlRequest(query, authToken).expect(200)
|
||||
|
||||
expect(res.body.data.search.pageInfo.totalCount).to.eq(1)
|
||||
expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id)
|
||||
})
|
||||
})
|
||||
expect(res.body.data.search.pageInfo.totalCount).to.eq(1)
|
||||
expect(res.body.data.search.edges[0].node.id).to.eq(items[0].id)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('TypeaheadSearch API', () => {
|
||||
|
||||
@ -159,3 +159,10 @@ describe('query with author set', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('query with site set', () => {
|
||||
it('adds site_name to the match filters', () => {
|
||||
const result = parseSearchQuery('site:omnivore.app')
|
||||
expect(result.matchFilters[0].field).to.equal('site_name')
|
||||
expect(result.matchFilters[0].value).to.equal('omnivore.app')
|
||||
})
|
||||
})
|
||||
|
||||
33
packages/db/migrations/0134.do.rename_site_tsv_to_site_name_tsv.sql
Executable file
33
packages/db/migrations/0134.do.rename_site_tsv_to_site_name_tsv.sql
Executable file
@ -0,0 +1,33 @@
|
||||
-- Type: DO
|
||||
-- Name: Rename site to site_name
|
||||
-- Description: Rename the site_tsv column to site_name_tsv to make it more consistent
|
||||
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE omnivore.library_item RENAME COLUMN site_tsv TO site_name_tsv ;
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_library_item_tsv() RETURNS trigger AS $$
|
||||
begin
|
||||
new.content_tsv := to_tsvector('pg_catalog.english', coalesce(new.readable_content, ''));
|
||||
new.site_name_tsv := to_tsvector('pg_catalog.english', coalesce(new.site_name, ''));
|
||||
new.title_tsv := to_tsvector('pg_catalog.english', coalesce(new.title, ''));
|
||||
new.author_tsv := to_tsvector('pg_catalog.english', coalesce(new.author, ''));
|
||||
new.description_tsv := to_tsvector('pg_catalog.english', coalesce(new.description, ''));
|
||||
-- note_tsv is generated by both note and highlight_annotations
|
||||
new.note_tsv := to_tsvector('pg_catalog.english', coalesce(new.note, '') || ' ' || array_to_string(new.highlight_annotations, ' '));
|
||||
new.search_tsv :=
|
||||
setweight(new.title_tsv, 'A') ||
|
||||
setweight(new.author_tsv, 'A') ||
|
||||
setweight(new.site_name_tsv, 'A') ||
|
||||
setweight(new.description_tsv, 'A') ||
|
||||
-- full hostname (eg www.omnivore.app)
|
||||
setweight(to_tsvector('pg_catalog.english', coalesce(regexp_replace(new.original_url, '^((http[s]?):\/)?\/?([^:\/\s]+)((\/\w+)*\/)([\w\-\.]+[^#?\s]+)(.*)?(#[\w\-]+)?$', '\3'), '')), 'A') ||
|
||||
-- secondary hostname (eg omnivore)
|
||||
setweight(to_tsvector('pg_catalog.english', coalesce(regexp_replace(new.original_url, '^((http[s]?):\/)?\/?(.*\.)?([^:\/\s]+)(\..*)((\/+)*\/)?([\w\-\.]+[^#?\s]+)(.*)?(#[\w\-]+)?$', '\4'), '')), 'A') ||
|
||||
setweight(new.note_tsv, 'A') ||
|
||||
setweight(new.content_tsv, 'B');
|
||||
return new;
|
||||
end
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMIT;
|
||||
9
packages/db/migrations/0134.undo.rename_site_tsv_to_site_name_tsv.sql
Executable file
9
packages/db/migrations/0134.undo.rename_site_tsv_to_site_name_tsv.sql
Executable file
@ -0,0 +1,9 @@
|
||||
-- Type: UNDO
|
||||
-- Name: Rename site to site_name
|
||||
-- Description: Rename the site_tsv column to site_name_tsv to make it more consistent
|
||||
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE omnivore.library_item RENAME COLUMN site_name_tsv TO site_tsv ;
|
||||
|
||||
COMMIT;
|
||||
Reference in New Issue
Block a user