Merge pull request #1812 from omnivore-app/feature/pocket-import

feature/pocket import
This commit is contained in:
Hongbo Wu
2023-04-12 17:05:19 +08:00
committed by GitHub
53 changed files with 1701 additions and 495 deletions

View File

@ -72,6 +72,8 @@ export enum ArticleSavingRequestStatus {
Processing = 'PROCESSING',
Succeeded = 'SUCCEEDED',
Deleted = 'DELETED',
Archived = 'ARCHIVED',
}
export enum HighlightType {
@ -84,7 +86,7 @@ export interface Label {
id: string
name: string
color: string
description?: string
description?: string | null
createdAt?: Date
}

View File

@ -10,7 +10,8 @@ import {
import { User } from './user'
export enum IntegrationType {
Readwise = 'READWISE',
Export = 'EXPORT',
Import = 'IMPORT',
}
@Entity({ name: 'integrations' })
@ -22,7 +23,13 @@ export class Integration {
@JoinColumn({ name: 'user_id' })
user!: User
@Column('enum', { enum: IntegrationType })
@Column('varchar', { length: 40 })
name!: string
@Column('enum', {
enum: IntegrationType,
default: IntegrationType.Export,
})
type!: IntegrationType
@Column('varchar', { length: 255 })

View File

@ -24,7 +24,7 @@ export class Label {
color!: string
@Column('text', { nullable: true })
description?: string
description?: string | null
@CreateDateColumn()
createdAt!: Date

View File

@ -188,6 +188,7 @@ export enum ArticleSavingRequestErrorCode {
export type ArticleSavingRequestResult = ArticleSavingRequestError | ArticleSavingRequestSuccess;
export enum ArticleSavingRequestStatus {
Archived = 'ARCHIVED',
Deleted = 'DELETED',
Failed = 'FAILED',
Processing = 'PROCESSING',
@ -264,9 +265,11 @@ export enum CreateArticleErrorCode {
export type CreateArticleInput = {
articleSavingRequestId?: InputMaybe<Scalars['ID']>;
labels?: InputMaybe<Array<CreateLabelInput>>;
preparedDocument?: InputMaybe<PreparedDocumentInput>;
skipParsing?: InputMaybe<Scalars['Boolean']>;
source?: InputMaybe<Scalars['String']>;
state?: InputMaybe<ArticleSavingRequestStatus>;
uploadFileId?: InputMaybe<Scalars['ID']>;
url: Scalars['String'];
};
@ -401,7 +404,7 @@ export enum CreateLabelErrorCode {
}
export type CreateLabelInput = {
color: Scalars['String'];
color?: InputMaybe<Scalars['String']>;
description?: InputMaybe<Scalars['String']>;
name: Scalars['String'];
};
@ -940,18 +943,37 @@ export enum HighlightType {
Redaction = 'REDACTION'
}
export type ImportFromIntegrationError = {
__typename?: 'ImportFromIntegrationError';
errorCodes: Array<ImportFromIntegrationErrorCode>;
};
export enum ImportFromIntegrationErrorCode {
BadRequest = 'BAD_REQUEST',
Unauthorized = 'UNAUTHORIZED'
}
export type ImportFromIntegrationResult = ImportFromIntegrationError | ImportFromIntegrationSuccess;
export type ImportFromIntegrationSuccess = {
__typename?: 'ImportFromIntegrationSuccess';
success: Scalars['Boolean'];
};
export type Integration = {
__typename?: 'Integration';
createdAt: Scalars['Date'];
enabled: Scalars['Boolean'];
id: Scalars['ID'];
name: Scalars['String'];
token: Scalars['String'];
type: IntegrationType;
updatedAt: Scalars['Date'];
};
export enum IntegrationType {
Readwise = 'READWISE'
Export = 'EXPORT',
Import = 'IMPORT'
}
export type IntegrationsError = {
@ -1223,6 +1245,7 @@ export type Mutation = {
generateApiKey: GenerateApiKeyResult;
googleLogin: LoginResult;
googleSignup: GoogleSignupResult;
importFromIntegration: ImportFromIntegrationResult;
joinGroup: JoinGroupResult;
leaveGroup: LeaveGroupResult;
logOut: LogOutResult;
@ -1389,6 +1412,11 @@ export type MutationGoogleSignupArgs = {
};
export type MutationImportFromIntegrationArgs = {
integrationId: Scalars['ID'];
};
export type MutationJoinGroupArgs = {
inviteCode: Scalars['String'];
};
@ -2167,7 +2195,9 @@ export enum SaveErrorCode {
export type SaveFileInput = {
clientRequestId: Scalars['ID'];
labels?: InputMaybe<Array<CreateLabelInput>>;
source: Scalars['String'];
state?: InputMaybe<ArticleSavingRequestStatus>;
uploadFileId: Scalars['ID'];
url: Scalars['String'];
};
@ -2199,9 +2229,11 @@ export type SaveFilterSuccess = {
export type SavePageInput = {
clientRequestId: Scalars['ID'];
labels?: InputMaybe<Array<CreateLabelInput>>;
originalContent: Scalars['String'];
parseResult?: InputMaybe<ParseResult>;
source: Scalars['String'];
state?: InputMaybe<ArticleSavingRequestStatus>;
title?: InputMaybe<Scalars['String']>;
url: Scalars['String'];
};
@ -2216,7 +2248,9 @@ export type SaveSuccess = {
export type SaveUrlInput = {
clientRequestId: Scalars['ID'];
labels?: InputMaybe<Array<CreateLabelInput>>;
source: Scalars['String'];
state?: InputMaybe<ArticleSavingRequestStatus>;
url: Scalars['String'];
};
@ -2387,8 +2421,9 @@ export enum SetIntegrationErrorCode {
export type SetIntegrationInput = {
enabled: Scalars['Boolean'];
id?: InputMaybe<Scalars['ID']>;
name: Scalars['String'];
token: Scalars['String'];
type: IntegrationType;
type?: InputMaybe<IntegrationType>;
};
export type SetIntegrationResult = SetIntegrationError | SetIntegrationSuccess;
@ -3398,6 +3433,10 @@ export type ResolversTypes = {
HighlightStats: ResolverTypeWrapper<HighlightStats>;
HighlightType: HighlightType;
ID: ResolverTypeWrapper<Scalars['ID']>;
ImportFromIntegrationError: ResolverTypeWrapper<ImportFromIntegrationError>;
ImportFromIntegrationErrorCode: ImportFromIntegrationErrorCode;
ImportFromIntegrationResult: ResolversTypes['ImportFromIntegrationError'] | ResolversTypes['ImportFromIntegrationSuccess'];
ImportFromIntegrationSuccess: ResolverTypeWrapper<ImportFromIntegrationSuccess>;
Int: ResolverTypeWrapper<Scalars['Int']>;
Integration: ResolverTypeWrapper<Integration>;
IntegrationType: IntegrationType;
@ -3842,6 +3881,9 @@ export type ResolversParentTypes = {
HighlightReply: HighlightReply;
HighlightStats: HighlightStats;
ID: Scalars['ID'];
ImportFromIntegrationError: ImportFromIntegrationError;
ImportFromIntegrationResult: ResolversParentTypes['ImportFromIntegrationError'] | ResolversParentTypes['ImportFromIntegrationSuccess'];
ImportFromIntegrationSuccess: ImportFromIntegrationSuccess;
Int: Scalars['Int'];
Integration: Integration;
IntegrationsError: IntegrationsError;
@ -4764,10 +4806,25 @@ export type HighlightStatsResolvers<ContextType = ResolverContext, ParentType ex
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
};
export type ImportFromIntegrationErrorResolvers<ContextType = ResolverContext, ParentType extends ResolversParentTypes['ImportFromIntegrationError'] = ResolversParentTypes['ImportFromIntegrationError']> = {
errorCodes?: Resolver<Array<ResolversTypes['ImportFromIntegrationErrorCode']>, ParentType, ContextType>;
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
};
export type ImportFromIntegrationResultResolvers<ContextType = ResolverContext, ParentType extends ResolversParentTypes['ImportFromIntegrationResult'] = ResolversParentTypes['ImportFromIntegrationResult']> = {
__resolveType: TypeResolveFn<'ImportFromIntegrationError' | 'ImportFromIntegrationSuccess', ParentType, ContextType>;
};
export type ImportFromIntegrationSuccessResolvers<ContextType = ResolverContext, ParentType extends ResolversParentTypes['ImportFromIntegrationSuccess'] = ResolversParentTypes['ImportFromIntegrationSuccess']> = {
success?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
};
export type IntegrationResolvers<ContextType = ResolverContext, ParentType extends ResolversParentTypes['Integration'] = ResolversParentTypes['Integration']> = {
createdAt?: Resolver<ResolversTypes['Date'], ParentType, ContextType>;
enabled?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
name?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
token?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
type?: Resolver<ResolversTypes['IntegrationType'], ParentType, ContextType>;
updatedAt?: Resolver<ResolversTypes['Date'], ParentType, ContextType>;
@ -4975,6 +5032,7 @@ export type MutationResolvers<ContextType = ResolverContext, ParentType extends
generateApiKey?: Resolver<ResolversTypes['GenerateApiKeyResult'], ParentType, ContextType, RequireFields<MutationGenerateApiKeyArgs, 'input'>>;
googleLogin?: Resolver<ResolversTypes['LoginResult'], ParentType, ContextType, RequireFields<MutationGoogleLoginArgs, 'input'>>;
googleSignup?: Resolver<ResolversTypes['GoogleSignupResult'], ParentType, ContextType, RequireFields<MutationGoogleSignupArgs, 'input'>>;
importFromIntegration?: Resolver<ResolversTypes['ImportFromIntegrationResult'], ParentType, ContextType, RequireFields<MutationImportFromIntegrationArgs, 'integrationId'>>;
joinGroup?: Resolver<ResolversTypes['JoinGroupResult'], ParentType, ContextType, RequireFields<MutationJoinGroupArgs, 'inviteCode'>>;
leaveGroup?: Resolver<ResolversTypes['LeaveGroupResult'], ParentType, ContextType, RequireFields<MutationLeaveGroupArgs, 'groupId'>>;
logOut?: Resolver<ResolversTypes['LogOutResult'], ParentType, ContextType>;
@ -6086,6 +6144,9 @@ export type Resolvers<ContextType = ResolverContext> = {
Highlight?: HighlightResolvers<ContextType>;
HighlightReply?: HighlightReplyResolvers<ContextType>;
HighlightStats?: HighlightStatsResolvers<ContextType>;
ImportFromIntegrationError?: ImportFromIntegrationErrorResolvers<ContextType>;
ImportFromIntegrationResult?: ImportFromIntegrationResultResolvers<ContextType>;
ImportFromIntegrationSuccess?: ImportFromIntegrationSuccessResolvers<ContextType>;
Integration?: IntegrationResolvers<ContextType>;
IntegrationsError?: IntegrationsErrorResolvers<ContextType>;
IntegrationsResult?: IntegrationsResultResolvers<ContextType>;

View File

@ -153,6 +153,7 @@ enum ArticleSavingRequestErrorCode {
union ArticleSavingRequestResult = ArticleSavingRequestError | ArticleSavingRequestSuccess
enum ArticleSavingRequestStatus {
ARCHIVED
DELETED
FAILED
PROCESSING
@ -222,9 +223,11 @@ enum CreateArticleErrorCode {
input CreateArticleInput {
articleSavingRequestId: ID
labels: [CreateLabelInput!]
preparedDocument: PreparedDocumentInput
skipParsing: Boolean
source: String
state: ArticleSavingRequestStatus
uploadFileId: ID
url: String!
}
@ -349,7 +352,7 @@ enum CreateLabelErrorCode {
}
input CreateLabelInput {
color: String!
color: String
description: String
name: String!
}
@ -835,17 +838,34 @@ enum HighlightType {
REDACTION
}
type ImportFromIntegrationError {
errorCodes: [ImportFromIntegrationErrorCode!]!
}
enum ImportFromIntegrationErrorCode {
BAD_REQUEST
UNAUTHORIZED
}
union ImportFromIntegrationResult = ImportFromIntegrationError | ImportFromIntegrationSuccess
type ImportFromIntegrationSuccess {
success: Boolean!
}
type Integration {
createdAt: Date!
enabled: Boolean!
id: ID!
name: String!
token: String!
type: IntegrationType!
updatedAt: Date!
}
enum IntegrationType {
READWISE
EXPORT
IMPORT
}
type IntegrationsError {
@ -1093,6 +1113,7 @@ type Mutation {
generateApiKey(input: GenerateApiKeyInput!): GenerateApiKeyResult!
googleLogin(input: GoogleLoginInput!): LoginResult!
googleSignup(input: GoogleSignupInput!): GoogleSignupResult!
importFromIntegration(integrationId: ID!): ImportFromIntegrationResult!
joinGroup(inviteCode: String!): JoinGroupResult!
leaveGroup(groupId: ID!): LeaveGroupResult!
logOut: LogOutResult!
@ -1571,7 +1592,9 @@ enum SaveErrorCode {
input SaveFileInput {
clientRequestId: ID!
labels: [CreateLabelInput!]
source: String!
state: ArticleSavingRequestStatus
uploadFileId: ID!
url: String!
}
@ -1601,9 +1624,11 @@ type SaveFilterSuccess {
input SavePageInput {
clientRequestId: ID!
labels: [CreateLabelInput!]
originalContent: String!
parseResult: ParseResult
source: String!
state: ArticleSavingRequestStatus
title: String
url: String!
}
@ -1617,7 +1642,9 @@ type SaveSuccess {
input SaveUrlInput {
clientRequestId: ID!
labels: [CreateLabelInput!]
source: String!
state: ArticleSavingRequestStatus
url: String!
}
@ -1775,8 +1802,9 @@ enum SetIntegrationErrorCode {
input SetIntegrationInput {
enabled: Boolean!
id: ID
name: String!
token: String!
type: IntegrationType!
type: IntegrationType
}
union SetIntegrationResult = SetIntegrationError | SetIntegrationSuccess

View File

@ -72,6 +72,7 @@ import {
UpdatesSinceSuccess,
} from '../../generated/graphql'
import { createPageSaveRequest } from '../../services/create_page_save_request'
import { createLabels } from '../../services/labels'
import { parsedContentToPage } from '../../services/save_page'
import { traceAs } from '../../tracing'
import { Merge } from '../../util'
@ -146,6 +147,8 @@ export const createArticleResolver = authorized<
uploadFileId,
skipParsing,
source,
state,
labels: inputLabels,
},
},
ctx
@ -219,6 +222,19 @@ export const createArticleResolver = authorized<
isArchived: false,
},
}
// save state
let archivedAt =
state === ArticleSavingRequestStatus.Archived ? new Date() : null
if (pageId) {
const reminder = await models.reminder.getByRequestId(uid, pageId)
if (reminder && reminder.archiveUntil) {
archivedAt = new Date()
}
}
// add labels to page
const labels = inputLabels
? await createLabels(ctx, inputLabels)
: undefined
if (uploadFileId) {
/* We do not trust the values from client, lookup upload file by querying
@ -248,7 +264,7 @@ export const createArticleResolver = authorized<
source !== 'puppeteer-parse' &&
FORCE_PUPPETEER_URLS.some((regex) => regex.test(url))
) {
await createPageSaveRequest(uid, url, models)
await createPageSaveRequest({ userId: uid, url, archivedAt, labels })
return DUMMY_RESPONSE
} else if (!skipParsing && preparedDocument?.document) {
const parseResults = await traceAs<Promise<ParsedContentPuppeteer>>(
@ -264,7 +280,7 @@ export const createArticleResolver = authorized<
} else if (!preparedDocument?.document) {
// We have a URL but no document, so we try to send this to puppeteer
// and return a dummy response.
await createPageSaveRequest(uid, url, models)
await createPageSaveRequest({ userId: uid, url, archivedAt, labels })
return DUMMY_RESPONSE
}
@ -287,14 +303,6 @@ export const createArticleResolver = authorized<
saveTime,
})
let archive = false
if (pageId) {
const reminder = await models.reminder.getByRequestId(uid, pageId)
if (reminder) {
archive = reminder.archiveUntil || false
}
}
log.info('New article saving', {
parsedArticle: Object.assign({}, articleToSave, {
content: undefined,
@ -308,7 +316,6 @@ export const createArticleResolver = authorized<
},
})
let uploadFileUrlOverride = ''
if (uploadFileId) {
const uploadFileData = await authTrx(async (tx) => {
return models.uploadFile.setFileUploadComplete(uploadFileId, tx)
@ -322,12 +329,11 @@ export const createArticleResolver = authorized<
pageId
)
}
uploadFileUrlOverride = await makeStorageFilePublic(
uploadFileData.id,
uploadFileData.fileName
)
await makeStorageFilePublic(uploadFileData.id, uploadFileData.fileName)
}
// save page's state and labels
articleToSave.archivedAt = archivedAt
articleToSave.labels = labels
if (
pageId ||
(pageId = (
@ -338,7 +344,6 @@ export const createArticleResolver = authorized<
)?.id)
) {
// update existing page's state from processing to succeeded
articleToSave.archivedAt = archive ? saveTime : null
const updated = await updatePage(pageId, articleToSave, {
...ctx,
uid,

View File

@ -1,5 +1,7 @@
/* eslint-disable prefer-const */
import { getPageByParam } from '../../elastic/pages'
import { User } from '../../entity/user'
import { getRepository } from '../../entity/utils'
import { env } from '../../env'
import {
ArticleSavingRequestError,
@ -25,7 +27,7 @@ export const createArticleSavingRequestResolver = authorized<
CreateArticleSavingRequestSuccess,
CreateArticleSavingRequestError,
MutationCreateArticleSavingRequestArgs
>(async (_, { input: { url } }, { models, claims, pubsub }) => {
>(async (_, { input: { url } }, { claims, pubsub }) => {
analytics.track({
userId: claims.uid,
event: 'link_saved',
@ -37,7 +39,11 @@ export const createArticleSavingRequestResolver = authorized<
})
try {
const request = await createPageSaveRequest(claims.uid, url, models, pubsub)
const request = await createPageSaveRequest({
userId: claims.uid,
url,
pubsub,
})
return {
articleSavingRequest: request,
}
@ -56,11 +62,14 @@ export const articleSavingRequestResolver = authorized<
ArticleSavingRequestSuccess,
ArticleSavingRequestError,
QueryArticleSavingRequestArgs
>(async (_, { id, url }, { models, claims }) => {
>(async (_, { id, url }, { claims }) => {
if (!id && !url) {
return { errorCodes: [ArticleSavingRequestErrorCode.BadData] }
}
const user = await models.user.get(claims.uid)
const user = await getRepository(User).findOne({
where: { id: claims.uid },
relations: ['profile'],
})
if (!user) {
return { errorCodes: [ArticleSavingRequestErrorCode.Unauthorized] }
}

View File

@ -62,6 +62,7 @@ import {
googleLoginResolver,
googleSignupResolver,
groupsResolver,
importFromIntegrationResolver,
integrationsResolver,
joinGroupResolver,
labelsResolver,
@ -202,6 +203,7 @@ export const functionResolvers = {
uploadImportFile: uploadImportFileResolver,
markEmailAsItem: markEmailAsItemResolver,
bulkAction: bulkActionResolver,
importFromIntegration: importFromIntegrationResolver,
},
Query: {
me: getMeUserResolver,
@ -650,4 +652,5 @@ export const functionResolvers = {
...resultResolveTypeResolver('RecentEmails'),
...resultResolveTypeResolver('MarkEmailAsItem'),
...resultResolveTypeResolver('BulkAction'),
...resultResolveTypeResolver('ImportFromIntegration'),
}

View File

@ -1,24 +1,32 @@
import { authorized } from '../../utils/helpers'
import { Integration, IntegrationType } from '../../entity/integration'
import { User } from '../../entity/user'
import { getRepository } from '../../entity/utils'
import { env } from '../../env'
import {
DeleteIntegrationError,
DeleteIntegrationErrorCode,
DeleteIntegrationSuccess,
ImportFromIntegrationError,
ImportFromIntegrationErrorCode,
ImportFromIntegrationSuccess,
IntegrationsError,
IntegrationsErrorCode,
IntegrationsSuccess,
MutationDeleteIntegrationArgs,
MutationImportFromIntegrationArgs,
MutationSetIntegrationArgs,
SetIntegrationError,
SetIntegrationErrorCode,
SetIntegrationSuccess,
} from '../../generated/graphql'
import { getRepository } from '../../entity/utils'
import { User } from '../../entity/user'
import { Integration } from '../../entity/integration'
import { getIntegrationService } from '../../services/integrations'
import { analytics } from '../../utils/analytics'
import { env } from '../../env'
import { validateToken } from '../../services/integrations'
import { deleteTask, enqueueSyncWithIntegration } from '../../utils/createTask'
import {
deleteTask,
enqueueImportFromIntegration,
enqueueSyncWithIntegration,
} from '../../utils/createTask'
import { authorized } from '../../utils/helpers'
export const setIntegrationResolver = authorized<
SetIntegrationSuccess,
@ -35,8 +43,11 @@ export const setIntegrationResolver = authorized<
}
}
let integrationToSave: Partial<Integration> = {
const integrationToSave: Partial<Integration> = {
...input,
user,
id: input.id || undefined,
type: input.type || IntegrationType.Export,
}
if (input.id) {
// Update
@ -55,46 +66,30 @@ export const setIntegrationResolver = authorized<
}
}
integrationToSave = {
...integrationToSave,
id: existingIntegration.id,
enabled: input.enabled,
token: input.token,
taskName: existingIntegration.taskName,
}
integrationToSave.id = existingIntegration.id
integrationToSave.taskName = existingIntegration.taskName
} else {
// Create
const existingIntegration = await getRepository(Integration).findOneBy({
user: { id: uid },
type: input.type,
})
if (existingIntegration) {
return {
errorCodes: [SetIntegrationErrorCode.AlreadyExists],
}
}
// validate token
if (!(await validateToken(input.token, input.type))) {
const integrationService = getIntegrationService(input.name)
// authorize and get access token
const token = await integrationService.accessToken(input.token)
if (!token) {
return {
errorCodes: [SetIntegrationErrorCode.InvalidToken],
}
}
integrationToSave = {
...integrationToSave,
token: input.token,
type: input.type,
enabled: true,
}
integrationToSave.token = token
}
// save integration
const integration = await getRepository(Integration).save(integrationToSave)
if (!integrationToSave.id || integrationToSave.enabled) {
// create a task to sync all the pages if new integration or enable integration
const taskName = await enqueueSyncWithIntegration(user.id, input.type)
if (
integrationToSave.type === IntegrationType.Export &&
(!integrationToSave.id || integrationToSave.enabled)
) {
// create a task to sync all the pages if new integration or enable integration (export type)
const taskName = await enqueueSyncWithIntegration(user.id, input.name)
log.info('enqueued task', taskName)
// update task name in integration
@ -215,7 +210,7 @@ export const deleteIntegrationResolver = authorized<
})
return {
integration: deletedIntegration,
integration,
}
} catch (error) {
log.error(error)
@ -225,3 +220,56 @@ export const deleteIntegrationResolver = authorized<
}
}
})
export const importFromIntegrationResolver = authorized<
ImportFromIntegrationSuccess,
ImportFromIntegrationError,
MutationImportFromIntegrationArgs
>(async (_, { integrationId }, { claims: { uid }, log, signToken }) => {
log.info('importFromIntegrationResolver')
try {
const integration = await getRepository(Integration).findOne({
where: { id: integrationId, user: { id: uid } },
relations: ['user'],
})
if (!integration) {
return {
errorCodes: [ImportFromIntegrationErrorCode.Unauthorized],
}
}
const exp = Math.floor(Date.now() / 1000) + 60 * 60 * 24 // 1 day
const authToken = (await signToken(
{ uid, exp },
env.server.jwtSecret
)) as string
// create a task to import all the pages
const taskName = await enqueueImportFromIntegration(
uid,
integration.id,
authToken
)
// update task name in integration
await getRepository(Integration).update(integration.id, { taskName })
analytics.track({
userId: uid,
event: 'integration_import',
properties: {
integrationId,
},
})
return {
success: true,
}
} catch (error) {
log.error(error)
return {
errorCodes: [ImportFromIntegrationErrorCode.BadRequest],
}
}
})

View File

@ -1,4 +1,17 @@
import { authorized } from '../../utils/helpers'
import { Between, ILike } from 'typeorm'
import { createPubSubClient } from '../../datalayer/pubsub'
import { getHighlightById } from '../../elastic/highlights'
import {
deleteLabel,
setLabelsForHighlight,
updateLabel,
updateLabelsInPage,
} from '../../elastic/labels'
import { getPageById } from '../../elastic/pages'
import { Label } from '../../entity/label'
import { User } from '../../entity/user'
import { getRepository, setClaims } from '../../entity/utils'
import { env } from '../../env'
import {
CreateLabelError,
CreateLabelErrorCode,
@ -25,23 +38,10 @@ import {
UpdateLabelErrorCode,
UpdateLabelSuccess,
} from '../../generated/graphql'
import { analytics } from '../../utils/analytics'
import { env } from '../../env'
import { User } from '../../entity/user'
import { Label } from '../../entity/label'
import { Between, ILike } from 'typeorm'
import { getRepository, setClaims } from '../../entity/utils'
import { createPubSubClient } from '../../datalayer/pubsub'
import { AppDataSource } from '../../server'
import { getPageById } from '../../elastic/pages'
import {
deleteLabel,
setLabelsForHighlight,
updateLabel,
updateLabelsInPage,
} from '../../elastic/labels'
import { getHighlightById } from '../../elastic/highlights'
import { getLabelsByIds } from '../../services/labels'
import { analytics } from '../../utils/analytics'
import { authorized, generateRandomColor } from '../../utils/helpers'
export const labelsResolver = authorized<LabelsSuccess, LabelsError>(
async (_obj, _params, { claims: { uid }, log }) => {
@ -114,7 +114,7 @@ export const createLabelResolver = authorized<
const label = await getRepository(Label).save({
user,
name,
color,
color: color || generateRandomColor(),
description: description || '',
})

View File

@ -4,7 +4,6 @@ import { htmlToSpeechFile } from '@omnivore/text-to-speech-handler'
import cors from 'cors'
import express from 'express'
import * as jwt from 'jsonwebtoken'
import { kx } from '../datalayer/knex_config'
import { createPubSubClient } from '../datalayer/pubsub'
import { getPageById, updatePage } from '../elastic/pages'
import { Speech, SpeechState } from '../entity/speech'
@ -12,7 +11,6 @@ import { getRepository } from '../entity/utils'
import { env } from '../env'
import { CreateArticleErrorCode } from '../generated/graphql'
import { Claims } from '../resolvers/types'
import { initModels } from '../server'
import { createPageSaveRequest } from '../services/create_page_save_request'
import { getClaimsByToken } from '../utils/auth'
import { isSiteBlockedForParse } from '../utils/blocked'
@ -59,8 +57,7 @@ export function articleRouter() {
return res.status(400).send({ errorCode: 'BAD_DATA' })
}
const models = initModels(kx, false)
const result = await createPageSaveRequest(uid, url, models)
const result = await createPageSaveRequest({ userId: uid, url })
if (isSiteBlockedForParse(url)) {
return res

View File

@ -0,0 +1,61 @@
import cors from 'cors'
import express from 'express'
import { corsConfig } from '../utils/corsConfig'
import { env } from '../env'
import axios from 'axios'
import { buildLogger } from '../utils/logger'
import { getClaimsByToken } from '../utils/auth'
const logger = buildLogger('app.dispatch')
export function integrationRouter() {
const router = express.Router()
// request token from pocket
router.post(
'/pocket/auth',
cors<express.Request>(corsConfig),
async (req: express.Request, res: express.Response) => {
logger.info('pocket/request-token')
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const token = (req.cookies.auth as string) || req.headers.authorization
const claims = await getClaimsByToken(token)
if (!claims) {
return res.status(401).send('UNAUTHORIZED')
}
const consumerKey = env.pocket.consumerKey
const redirectUri = `${env.client.url}/settings/integrations?state=pocketAuthorizationFinished`
try {
// make a POST request to Pocket to get a request token
const response = await axios.post<{ code: string }>(
'https://getpocket.com/v3/oauth/request',
{
consumer_key: consumerKey,
redirect_uri: redirectUri,
},
{
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json',
},
}
)
const { code } = response.data
// store the request token in a cookie
res.cookie('pocketRequestToken', code, {
maxAge: 1000 * 60 * 60,
})
// redirect the user to Pocket to authorize the request token
res.redirect(
`https://getpocket.com/auth/authorize?request_token=${code}&redirect_uri=${redirectUri}`
)
} catch (e) {
logger.info('pocket/request-token exception:', e)
res.redirect(
`${env.client.url}/settings/integrations?errorCodes=UNKNOWN`
)
}
}
)
return router
}

View File

@ -2,14 +2,19 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
import express from 'express'
import { DateTime } from 'luxon'
import { v4 as uuidv4 } from 'uuid'
import { EntityType, readPushSubscription } from '../../datalayer/pubsub'
import { getPageById, searchPages } from '../../elastic/pages'
import { Page } from '../../elastic/types'
import { Integration, IntegrationType } from '../../entity/integration'
import { getRepository } from '../../entity/utils'
import { syncWithIntegration } from '../../services/integrations'
import { Claims } from '../../resolvers/types'
import { getIntegrationService } from '../../services/integrations'
import { getClaimsByToken } from '../../utils/auth'
import { buildLogger } from '../../utils/logger'
import { DateFilter } from '../../utils/search'
import { createGCSFile } from '../../utils/uploads'
export interface Message {
type?: EntityType
@ -19,15 +24,22 @@ export interface Message {
articleId?: string
}
interface ImportEvent {
integrationId: string
}
const isImportEvent = (event: any): event is ImportEvent =>
'integrationId' in event
const logger = buildLogger('app.dispatch')
export function integrationsServiceRouter() {
const router = express.Router()
router.post('/:integrationType/:action', async (req, res) => {
router.post('/:integrationName/:action', async (req, res) => {
logger.info('start to sync with integration', {
action: req.params.action,
integrationType: req.params.integrationType,
integrationName: req.params.integrationName,
})
const { message: msgStr, expired } = readPushSubscription(req)
@ -54,7 +66,8 @@ export function integrationsServiceRouter() {
const integration = await getRepository(Integration).findOneBy({
user: { id: userId },
type: req.params.integrationType.toUpperCase() as IntegrationType,
name: req.params.integrationName.toUpperCase(),
type: IntegrationType.Export,
enabled: true,
})
if (!integration) {
@ -64,6 +77,7 @@ export function integrationsServiceRouter() {
}
const action = req.params.action.toUpperCase()
const integrationService = getIntegrationService(integration.name)
if (action === 'SYNC_UPDATED') {
// get updated page by id
let id: string | undefined
@ -99,7 +113,7 @@ export function integrationsServiceRouter() {
pageId: page.id,
})
const synced = await syncWithIntegration(integration, [page])
const synced = await integrationService.export(integration, [page])
if (!synced) {
logger.info('failed to sync page', {
integrationId: integration.id,
@ -125,12 +139,12 @@ export function integrationsServiceRouter() {
;[pages, count] = (await searchPages(
{ from: after, size, dateFilters },
userId
))!
)) as [Page[], number]
const pageIds = pages.map((p) => p.id)
logger.info('syncing pages', { pageIds })
const synced = await syncWithIntegration(integration, pages)
const synced = await integrationService.export(integration, pages)
if (!synced) {
logger.info('failed to sync pages', {
pageIds,
@ -156,6 +170,95 @@ export function integrationsServiceRouter() {
res.status(500).send(err)
}
})
// import pages from integration task handler
router.post('/import', async (req, res) => {
logger.info('start cloud task to import pages from integration')
const token = req.cookies?.auth || req.headers?.authorization
let claims: Claims | undefined
try {
claims = await getClaimsByToken(token)
if (!claims) {
return res.status(401).send('UNAUTHORIZED')
}
} catch (err) {
logger.error('failed to get claims from token', err)
return res.status(401).send('UNAUTHORIZED')
}
if (!isImportEvent(req.body)) {
logger.info('Invalid message')
return res.status(400).send('Bad Request')
}
let writeStream: NodeJS.WritableStream | undefined
try {
const userId = claims.uid
const integration = await getRepository(Integration).findOneBy({
user: { id: userId },
id: req.body.integrationId,
enabled: true,
type: IntegrationType.Import,
})
if (!integration) {
logger.info('No active integration found for user', { userId })
return res.status(200).send('No integration found')
}
const integrationService = getIntegrationService(integration.name)
// import pages from integration
logger.info('importing pages from integration', {
integrationId: integration.id,
})
// write the list of urls to a csv file and upload it to gcs
// path style: imports/<uid>/<date>/<type>-<uuid>.csv
const dateStr = DateTime.now().toISODate()
const fileUuid = uuidv4()
const fullPath = `imports/${userId}/${dateStr}/URL_LIST-${fileUuid}.csv`
// open a write_stream to the file
const file = createGCSFile(fullPath)
writeStream = file.createWriteStream({
contentType: 'text/csv',
})
let hasMore = true
let offset = 0
let since = integration.syncedAt?.getTime() || 0
while (hasMore) {
// get pages from integration
const retrieved = await integrationService.retrieve({
token: integration.token,
since,
offset: offset,
})
const retrievedData = retrieved.data
if (retrievedData.length === 0) {
break
}
// write the list of urls, state and labels to the stream
const csvData = retrievedData.map((page) => {
const { url, state, labels } = page
return [url, state, `"[${labels?.join(',') || ''}]"`].join(',')
})
writeStream.write(csvData.join('\n'))
hasMore = !!retrieved.hasMore
offset += retrievedData.length
since = retrieved.since || Date.now()
}
// update the integration's syncedAt
await getRepository(Integration).update(integration.id, {
syncedAt: new Date(since),
})
} catch (err) {
logger.error('import pages from integration failed', err)
return res.status(500).send(err)
} finally {
writeStream?.end()
}
res.status(200).send('OK')
})
return router
}

View File

@ -3,9 +3,7 @@
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import express from 'express'
import { readPushSubscription } from '../../datalayer/pubsub'
import { kx } from '../../datalayer/knex_config'
import { createPageSaveRequest } from '../../services/create_page_save_request'
import { initModels } from '../../server'
interface CreateLinkRequestMessage {
url: string
@ -39,10 +37,11 @@ export function linkServiceRouter() {
}
const msg = data as CreateLinkRequestMessage
const models = initModels(kx, false)
try {
const request = await createPageSaveRequest(msg.userId, msg.url, models)
const request = await createPageSaveRequest({
userId: msg.userId,
url: msg.url,
})
console.log('create link request', request)
res.status(200).send(request)

View File

@ -488,6 +488,8 @@ const schema = gql`
uploadFileId: ID
skipParsing: Boolean
source: String
state: ArticleSavingRequestStatus
labels: [CreateLabelInput!]
}
enum CreateArticleErrorCode {
UNABLE_TO_FETCH
@ -530,6 +532,8 @@ const schema = gql`
source: String!
clientRequestId: ID!
uploadFileId: ID!
state: ArticleSavingRequestStatus
labels: [CreateLabelInput!]
}
input ParseResult {
@ -554,12 +558,16 @@ const schema = gql`
title: String
originalContent: String!
parseResult: ParseResult
state: ArticleSavingRequestStatus
labels: [CreateLabelInput!]
}
input SaveUrlInput {
url: String!
source: String!
clientRequestId: ID!
state: ArticleSavingRequestStatus
labels: [CreateLabelInput!]
}
union SaveResult = SaveSuccess | SaveError
@ -1073,6 +1081,7 @@ const schema = gql`
SUCCEEDED
FAILED
DELETED
ARCHIVED
}
type ArticleSavingRequest {
@ -1427,7 +1436,7 @@ const schema = gql`
input CreateLabelInput {
name: String! @sanitize(maxLength: 64)
color: String! @sanitize(pattern: "^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$")
color: String @sanitize(pattern: "^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$")
description: String @sanitize(maxLength: 100)
}
@ -1903,6 +1912,7 @@ const schema = gql`
type Integration {
id: ID!
name: String!
type: IntegrationType!
token: String!
enabled: Boolean!
@ -1911,7 +1921,8 @@ const schema = gql`
}
enum IntegrationType {
READWISE
EXPORT
IMPORT
}
type SetIntegrationError {
@ -1928,7 +1939,8 @@ const schema = gql`
input SetIntegrationInput {
id: ID
type: IntegrationType!
name: String!
type: IntegrationType
token: String!
enabled: Boolean!
}
@ -2416,6 +2428,23 @@ const schema = gql`
UNAUTHORIZED
}
union ImportFromIntegrationResult =
ImportFromIntegrationSuccess
| ImportFromIntegrationError
type ImportFromIntegrationSuccess {
success: Boolean!
}
type ImportFromIntegrationError {
errorCodes: [ImportFromIntegrationErrorCode!]!
}
enum ImportFromIntegrationErrorCode {
UNAUTHORIZED
BAD_REQUEST
}
# Mutations
type Mutation {
googleLogin(input: GoogleLoginInput!): LoginResult!
@ -2506,6 +2535,7 @@ const schema = gql`
): UploadImportFileResult!
markEmailAsItem(recentEmailId: ID!): MarkEmailAsItemResult!
bulkAction(query: String, action: BulkActionType!): BulkActionResult!
importFromIntegration(integrationId: ID!): ImportFromIntegrationResult!
}
# FIXME: remove sort from feedArticles after all cached tabs are closed

View File

@ -49,6 +49,7 @@ import { textToSpeechRouter } from './routers/text_to_speech'
import * as httpContext from 'express-http-context'
import { notificationRouter } from './routers/notification_router'
import { userRouter } from './routers/user_router'
import { integrationRouter } from './routers/integration_router'
const PORT = process.env.PORT || 4000
@ -135,6 +136,7 @@ export const createApp = (): {
app.use('/api/mobile-auth', mobileAuthRouter())
app.use('/api/text-to-speech', textToSpeechRouter())
app.use('/api/notification', notificationRouter())
app.use('/api/integration', integrationRouter())
app.use('/svc/pubsub/content', contentServiceRouter())
app.use('/svc/pubsub/links', linkServiceRouter())
app.use('/svc/pubsub/newsletters', newsletterServiceRouter())

View File

@ -8,16 +8,26 @@ import {
getPageByParam,
updatePage,
} from '../elastic/pages'
import { ArticleSavingRequestStatus, PageType } from '../elastic/types'
import { ArticleSavingRequestStatus, Label, PageType } from '../elastic/types'
import { User } from '../entity/user'
import { getRepository } from '../entity/utils'
import {
ArticleSavingRequest,
CreateArticleSavingRequestErrorCode,
} from '../generated/graphql'
// TODO: switch to a proper Entity instead of using the old data models.
import { DataModels } from '../resolvers/types'
import { enqueueParseRequest } from '../utils/createTask'
import { generateSlug, pageToArticleSavingRequest } from '../utils/helpers'
interface PageSaveRequest {
userId: string
url: string
pubsub?: PubsubClient
articleSavingRequestId?: string
archivedAt?: Date | null
labels?: Label[]
priority?: 'low' | 'high'
}
const SAVING_CONTENT = 'Your link is being saved...'
const isPrivateIP = privateIpLib.default
@ -58,14 +68,15 @@ export const validateUrl = (url: string): URL => {
return u
}
export const createPageSaveRequest = async (
userId: string,
url: string,
models: DataModels,
pubsub: PubsubClient = createPubSubClient(),
export const createPageSaveRequest = async ({
userId,
url,
pubsub = createPubSubClient(),
articleSavingRequestId = uuidv4(),
priority?: 'low' | 'high'
): Promise<ArticleSavingRequest> => {
archivedAt,
priority,
labels,
}: PageSaveRequest): Promise<ArticleSavingRequest> => {
try {
validateUrl(url)
} catch (error) {
@ -75,7 +86,10 @@ export const createPageSaveRequest = async (
})
}
const user = await models.user.get(userId)
const user = await getRepository(User).findOne({
where: { id: userId },
relations: ['profile'],
})
if (!user) {
console.log('User not found', userId)
return Promise.reject({
@ -116,6 +130,8 @@ export const createPageSaveRequest = async (
state: ArticleSavingRequestStatus.Processing,
createdAt: new Date(),
savedAt: new Date(),
archivedAt,
labels,
}
// create processing page
@ -137,8 +153,20 @@ export const createPageSaveRequest = async (
ctx
)
}
const labelsInput = labels?.map((label) => ({
name: label.name,
color: label.color,
description: label.description,
}))
// enqueue task to parse page
await enqueueParseRequest(url, userId, page.id, priority)
await enqueueParseRequest({
url,
userId,
saveRequestId: page.id,
priority,
state: archivedAt ? ArticleSavingRequestStatus.Archived : undefined,
labels: labelsInput,
})
return pageToArticleSavingRequest(user, page)
}

View File

@ -1,170 +0,0 @@
import { IntegrationType } from '../generated/graphql'
import { env } from '../env'
import axios from 'axios'
import { wait } from '../utils/helpers'
import { HighlightType, Page } from '../elastic/types'
import { getHighlightUrl } from './highlights'
import { Integration } from '../entity/integration'
import { getRepository } from '../entity/utils'
interface ReadwiseHighlight {
// The highlight text, (technically the only field required in a highlight object)
text: string
// The title of the page the highlight is on
title?: string
// The author of the page the highlight is on
author?: string
// The URL of the page image
image_url?: string
// The URL of the page
source_url?: string
// A meaningful unique identifier for your app
source_type?: string
// One of: books, articles, tweets or podcasts
category?: string
// Annotation note attached to the specific highlight
note?: string
// Highlight's location in the source text. Used to order the highlights
location?: number
// One of: page, order or time_offset
location_type?: string
// A datetime representing when the highlight was taken in the ISO 8601 format
highlighted_at?: string
// Unique url of the specific highlight
highlight_url?: string
}
export const READWISE_API_URL = 'https://readwise.io/api/v2'
export const validateToken = async (
token: string,
type: IntegrationType
): Promise<boolean> => {
switch (type) {
case IntegrationType.Readwise:
return validateReadwiseToken(token)
default:
return false
}
}
const validateReadwiseToken = async (token: string): Promise<boolean> => {
const authUrl = `${env.readwise.apiUrl || READWISE_API_URL}/auth`
try {
const response = await axios.get(authUrl, {
headers: {
Authorization: `Token ${token}`,
},
})
return response.status === 204
} catch (error) {
console.log('error validating readwise token', error)
return false
}
}
const pageToReadwiseHighlight = (page: Page): ReadwiseHighlight[] => {
if (!page.highlights) return []
const category = page.siteName === 'Twitter' ? 'tweets' : 'articles'
return (
page.highlights
// filter out highlights with no quote and are not of type Highlight
.filter(
(highlight) =>
highlight.type === HighlightType.Highlight && highlight.quote
)
.map((highlight) => {
return {
text: highlight.quote!,
title: page.title,
author: page.author || undefined,
highlight_url: getHighlightUrl(page.slug, highlight.id),
highlighted_at: new Date(highlight.createdAt).toISOString(),
category,
image_url: page.image || undefined,
// location: highlight.highlightPositionAnchorIndex || undefined,
location_type: 'order',
note: highlight.annotation || undefined,
source_type: 'omnivore',
source_url: page.url,
}
})
)
}
export const syncWithIntegration = async (
integration: Integration,
pages: Page[]
): Promise<boolean> => {
let result = true
switch (integration.type) {
case IntegrationType.Readwise: {
const highlights = pages.flatMap(pageToReadwiseHighlight)
// If there are no highlights, we will skip the sync
if (highlights.length > 0) {
result = await syncWithReadwise(integration.token, highlights)
}
break
}
default:
return false
}
// update integration syncedAt if successful
if (result) {
console.log('updating integration syncedAt')
await getRepository(Integration).update(integration.id, {
syncedAt: new Date(),
})
}
return result
}
export const syncWithReadwise = async (
token: string,
highlights: ReadwiseHighlight[],
retryCount = 0
): Promise<boolean> => {
const url = `${env.readwise.apiUrl || READWISE_API_URL}/highlights`
try {
const response = await axios.post(
url,
{
highlights,
},
{
headers: {
Authorization: `Token ${token}`,
ContentType: 'application/json',
},
}
)
return response.status === 200
} catch (error) {
if (axios.isAxiosError(error)) {
if (error.response) {
if (error.response.status === 429 && retryCount < 3) {
console.log('Readwise API rate limit exceeded, retrying...')
// wait for Retry-After seconds in the header if rate limited
// max retry count is 3
const retryAfter = error.response?.headers['retry-after'] || '10' // default to 10 seconds
await wait(parseInt(retryAfter, 10) * 1000)
return syncWithReadwise(token, highlights, retryCount + 1)
}
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
console.error('Readwise error, response data', error.response.data)
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
console.error('Readwise error, request', error.request)
} else {
// Something happened in setting up the request that triggered an Error
console.error('Error', error.message)
}
} else {
console.error('Error syncing with readwise', error)
}
return false
}
}

View File

@ -0,0 +1,16 @@
import { ReadwiseIntegration } from './readwise'
import { IntegrationService } from './integration'
import { PocketIntegration } from './pocket'
const integrations: IntegrationService[] = [
new ReadwiseIntegration(),
new PocketIntegration(),
]
export const getIntegrationService = (name: string): IntegrationService => {
const service = integrations.find((s) => s.name === name)
if (!service) {
throw new Error(`Integration service not found: ${name}`)
}
return service
}

View File

@ -0,0 +1,37 @@
import { Integration } from '../../entity/integration'
import { ArticleSavingRequestStatus, Page } from '../../elastic/types'
export interface RetrievedData {
url: string
labels?: string[]
state?: ArticleSavingRequestStatus
}
export interface RetrievedResult {
data: RetrievedData[]
hasMore?: boolean
since?: number // unix timestamp in milliseconds
}
export interface RetrieveRequest {
token: string
since?: number // unix timestamp in milliseconds
count?: number
offset?: number
}
export abstract class IntegrationService {
abstract name: string
accessToken = async (token: string): Promise<string | null> => {
return Promise.resolve(null)
}
export = async (
integration: Integration,
pages: Page[]
): Promise<boolean> => {
return Promise.resolve(false)
}
retrieve = async (req: RetrieveRequest): Promise<RetrievedResult> => {
return Promise.resolve({ data: [] })
}
}

View File

@ -0,0 +1,142 @@
import {
IntegrationService,
RetrievedResult,
RetrieveRequest,
} from './integration'
import axios from 'axios'
import { env } from '../../env'
import { ArticleSavingRequestStatus } from '../../elastic/types'
interface PocketResponse {
status: number // 1 if success
complete: number // 1 if all items have been returned
list: {
[key: string]: PocketItem
}
since: number // unix timestamp in seconds
search_meta: {
search_type: string
}
error: string
}
interface PocketItem {
item_id: string
resolved_id: string
given_url: string
resolved_url: string
given_title: string
resolved_title: string
favorite: string
status: string
excerpt: string
word_count: string
tags?: {
[key: string]: Tag
}
authors?: {
[key: string]: Author
}
}
interface Tag {
item_id: string
tag: string
}
interface Author {
item_id: string
author_id: string
name: string
}
export class PocketIntegration extends IntegrationService {
name = 'POCKET'
POCKET_API_URL = 'https://getpocket.com/v3'
headers = {
'Content-Type': 'application/json',
'X-Accept': 'application/json',
}
accessToken = async (token: string): Promise<string | null> => {
const url = `${this.POCKET_API_URL}/oauth/authorize`
try {
const response = await axios.post<{ access_token: string }>(
url,
{
consumer_key: env.pocket.consumerKey,
code: token,
},
{
headers: this.headers,
}
)
return response.data.access_token
} catch (error) {
console.log('error validating pocket token', error)
return null
}
}
retrievePocketData = async (
accessToken: string,
since: number, // unix timestamp in seconds
count = 100,
offset = 0
): Promise<PocketResponse> => {
const url = `${this.POCKET_API_URL}/get`
try {
const response = await axios.post<PocketResponse>(
url,
{
consumer_key: env.pocket.consumerKey,
access_token: accessToken,
state: 'all',
detailType: 'complete',
since,
sort: 'oldest',
count,
offset,
},
{
headers: this.headers,
}
)
console.debug('pocket data', response.data)
return response.data
} catch (error) {
console.log('error retrieving pocket data', error)
throw new Error('Error retrieving pocket data')
}
}
retrieve = async ({
token,
since = 0,
count = 100,
offset = 0,
}: RetrieveRequest): Promise<RetrievedResult> => {
const pocketData = await this.retrievePocketData(
token,
since / 1000,
count,
offset
)
const pocketItems = Object.values(pocketData.list)
const statusToState: Record<string, ArticleSavingRequestStatus> = {
'0': ArticleSavingRequestStatus.Succeeded,
'1': ArticleSavingRequestStatus.Archived,
'2': ArticleSavingRequestStatus.Deleted,
}
const data = pocketItems.map((item) => ({
url: item.given_url,
labels: Object.values(item.tags ?? {}).map((tag) => tag.tag),
state: statusToState[item.status],
}))
return {
data,
hasMore: pocketData.complete !== 1,
since: pocketData.since * 1000,
}
}
}

View File

@ -0,0 +1,143 @@
import axios from 'axios'
import { HighlightType, Page } from '../../elastic/types'
import { Integration } from '../../entity/integration'
import { getRepository } from '../../entity/utils'
import { env } from '../../env'
import { wait } from '../../utils/helpers'
import { getHighlightUrl } from '../highlights'
import { IntegrationService } from './integration'
interface ReadwiseHighlight {
// The highlight text, (technically the only field required in a highlight object)
text: string
// The title of the page the highlight is on
title?: string
// The author of the page the highlight is on
author?: string
// The URL of the page image
image_url?: string
// The URL of the page
source_url?: string
// A meaningful unique identifier for your app
source_type?: string
// One of: books, articles, tweets or podcasts
category?: string
// Annotation note attached to the specific highlight
note?: string
// Highlight's location in the source text. Used to order the highlights
location?: number
// One of: page, order or time_offset
location_type?: string
// A datetime representing when the highlight was taken in the ISO 8601 format
highlighted_at?: string
// Unique url of the specific highlight
highlight_url?: string
}
export const READWISE_API_URL = 'https://readwise.io/api/v2'
export class ReadwiseIntegration extends IntegrationService {
name = 'READWISE'
accessToken = async (token: string): Promise<string | null> => {
const authUrl = `${env.readwise.apiUrl || READWISE_API_URL}/auth`
try {
const response = await axios.get(authUrl, {
headers: {
Authorization: `Token ${token}`,
},
})
return response.status === 204 ? token : null
} catch (error) {
console.log('error validating readwise token', error)
return null
}
}
export = async (
integration: Integration,
pages: Page[]
): Promise<boolean> => {
let result = true
const highlights = pages.flatMap(this.pageToReadwiseHighlight)
// If there are no highlights, we will skip the sync
if (highlights.length > 0) {
result = await this.syncWithReadwise(integration.token, highlights)
}
// update integration syncedAt if successful
if (result) {
console.log('updating integration syncedAt')
await getRepository(Integration).update(integration.id, {
syncedAt: new Date(),
})
}
return result
}
pageToReadwiseHighlight = (page: Page): ReadwiseHighlight[] => {
const { highlights } = page
if (!highlights) return []
const category = page.siteName === 'Twitter' ? 'tweets' : 'articles'
return highlights
.map((highlight) => {
// filter out highlights that are not of type highlight or have no quote
if (highlight.type !== HighlightType.Highlight || !highlight.quote) {
return undefined
}
return {
text: highlight.quote,
title: page.title,
author: page.author || undefined,
highlight_url: getHighlightUrl(page.slug, highlight.id),
highlighted_at: new Date(highlight.createdAt).toISOString(),
category,
image_url: page.image || undefined,
// location: highlight.highlightPositionAnchorIndex || undefined,
location_type: 'order',
note: highlight.annotation || undefined,
source_type: 'omnivore',
source_url: page.url,
}
})
.filter((highlight) => highlight !== undefined) as ReadwiseHighlight[]
}
syncWithReadwise = async (
token: string,
highlights: ReadwiseHighlight[],
retryCount = 0
): Promise<boolean> => {
const url = `${env.readwise.apiUrl || READWISE_API_URL}/highlights`
try {
const response = await axios.post(
url,
{
highlights,
},
{
headers: {
Authorization: `Token ${token}`,
ContentType: 'application/json',
},
}
)
return response.status === 200
} catch (error) {
if (
axios.isAxiosError(error) &&
error.response?.status === 429 &&
retryCount < 3
) {
console.log('Readwise API rate limit exceeded, retrying...')
// wait for Retry-After seconds in the header if rate limited
// max retry count is 3
const retryAfter = error.response?.headers['retry-after'] || '10' // default to 10 seconds
await wait(parseInt(retryAfter, 10) * 1000)
return this.syncWithReadwise(token, highlights, retryCount + 1)
}
console.log('Error creating highlights in Readwise', error)
return false
}
}
}

View File

@ -1,11 +1,12 @@
import { Label } from '../entity/label'
import { ILike, In } from 'typeorm'
import { PageContext } from '../elastic/types'
import { User } from '../entity/user'
import { addLabelInPage } from '../elastic/labels'
import { getRepository } from '../entity/utils'
import { Link } from '../entity/link'
import DataLoader from 'dataloader'
import { In } from 'typeorm'
import { addLabelInPage } from '../elastic/labels'
import { PageContext } from '../elastic/types'
import { Label } from '../entity/label'
import { Link } from '../entity/link'
import { User } from '../entity/user'
import { getRepository } from '../entity/utils'
import { CreateLabelInput } from '../generated/graphql'
import { generateRandomColor } from '../utils/helpers'
const batchGetLabelsFromLinkIds = async (
@ -39,10 +40,11 @@ export const addLabelToPage = async (
return false
}
let labelEntity = await getRepository(Label).findOneBy({
user: { id: user.id },
name: ILike(label.name),
})
let labelEntity = await getRepository(Label)
.createQueryBuilder()
.where({ user: { id: user.id } })
.andWhere('LOWER(name) = LOWER(:name)', { name: label.name })
.getOne()
if (!labelEntity) {
console.log('creating new label', label.name)
@ -86,10 +88,11 @@ export const createLabel = async (
description?: string
}
): Promise<Label> => {
const existingLabel = await getRepository(Label).findOneBy({
user: { id: userId },
name: ILike(label.name),
})
const existingLabel = await getRepository(Label)
.createQueryBuilder()
.where({ user: { id: userId } })
.andWhere('LOWER(name) = LOWER(:name)', { name: label.name })
.getOne()
if (existingLabel) {
return existingLabel
@ -103,3 +106,39 @@ export const createLabel = async (
user: { id: userId },
})
}
export const createLabels = async (
ctx: PageContext,
labels: CreateLabelInput[]
): Promise<Label[]> => {
const user = await getRepository(User).findOneBy({
id: ctx.uid,
})
if (!user) {
console.error('user not found')
return []
}
const labelEntities = await getRepository(Label)
.createQueryBuilder()
.where({
user: { id: user.id },
})
.andWhere('LOWER(name) IN (:...names)', {
names: labels.map((l) => l.name.toLowerCase()),
})
.getMany()
const existingLabels = labelEntities.map((l) => l.name)
const newLabels = labels.filter((l) => !existingLabels.includes(l.name))
// create new labels
const newLabelEntities = await getRepository(Label).save(
newLabels.map((l) => ({
name: l.name,
description: l.description,
color: l.color || generateRandomColor(),
user,
}))
)
return [...labelEntities, ...newLabelEntities]
}

View File

@ -2,8 +2,15 @@ import { Knex } from 'knex'
import { PubsubClient } from '../datalayer/pubsub'
import { UserData } from '../datalayer/user/model'
import { homePageURL } from '../env'
import { SaveErrorCode, SaveFileInput, SaveResult } from '../generated/graphql'
import {
ArticleSavingRequestStatus,
SaveErrorCode,
SaveFileInput,
SaveResult,
} from '../generated/graphql'
import { DataModels } from '../resolvers/types'
import { createLabels } from './labels'
import { updatePage } from '../elastic/pages'
import { getStorageFileDetails } from '../utils/uploads'
type SaveContext = {
@ -22,7 +29,7 @@ export const saveFile = async (
input: SaveFileInput
): Promise<SaveResult> => {
console.log('saving file with input', input)
const pageId = input.clientRequestId
const uploadFile = await ctx.models.uploadFile.getWhere({
id: input.uploadFileId,
userId: saver.id,
@ -36,10 +43,40 @@ export const saveFile = async (
await getStorageFileDetails(input.uploadFileId, uploadFile.fileName)
await ctx.authTrx(async (tx) => {
const uploadFileData = await ctx.authTrx(async (tx) => {
return ctx.models.uploadFile.setFileUploadComplete(input.uploadFileId, tx)
})
if (!uploadFileData) {
return {
errorCodes: [SaveErrorCode.Unknown],
}
}
// save state
const archivedAt =
input.state === ArticleSavingRequestStatus.Archived ? new Date() : null
// add labels to page
const labels = input.labels
? await createLabels({ ...ctx, uid: saver.id }, input.labels)
: undefined
if (input.state || input.labels) {
const updated = await updatePage(
pageId,
{
archivedAt,
labels,
},
ctx
)
if (!updated) {
console.log('error updating page', pageId)
return {
errorCodes: [SaveErrorCode.Unknown],
}
}
}
return {
clientRequestId: input.clientRequestId,
url: `${homePageURL()}/${saver.profile.username}/links/${

View File

@ -22,6 +22,7 @@ import {
} from '../utils/helpers'
import { parsePreparedContent } from '../utils/parser'
import { createPageSaveRequest } from './create_page_save_request'
import { createLabels } from './labels'
type SaveContext = {
pubsub: PubsubClient
@ -107,6 +108,14 @@ export const savePage = async (
userId: saver.userId,
url: articleToSave.url,
})
// save state
articleToSave.archivedAt =
input.state === ArticleSavingRequestStatus.Archived ? new Date() : null
// add labels to page
articleToSave.labels = input.labels
? await createLabels(ctx, input.labels)
: undefined
if (existingPage) {
pageId = existingPage.id
slug = existingPage.slug
@ -116,7 +125,6 @@ export const savePage = async (
{
// update the page with the new content
...articleToSave,
archivedAt: null, // unarchive if it was archived
id: pageId, // we don't want to update the id
slug, // we don't want to update the slug
createdAt: existingPage.createdAt, // we don't want to update the createdAt
@ -131,13 +139,14 @@ export const savePage = async (
}
} else if (shouldParseInBackend(input)) {
try {
await createPageSaveRequest(
saver.userId,
articleToSave.url,
ctx.models,
ctx.pubsub,
input.clientRequestId
)
await createPageSaveRequest({
userId: saver.userId,
url: articleToSave.url,
pubsub: ctx.pubsub,
articleSavingRequestId: input.clientRequestId,
archivedAt: articleToSave.archivedAt,
labels: articleToSave.labels,
})
} catch (e) {
return {
errorCodes: [SaveErrorCode.Unknown],
@ -165,12 +174,7 @@ export const savePage = async (
type: HighlightType.Highlight,
}
if (
!(await addHighlightToPage(pageId, highlight, {
pubsub: ctx.pubsub,
uid: ctx.uid,
}))
) {
if (!(await addHighlightToPage(pageId, highlight, ctx))) {
return {
errorCodes: [SaveErrorCode.EmbeddedHighlightFailed],
message: 'Failed to save highlight',

View File

@ -4,6 +4,8 @@ import { homePageURL } from '../env'
import { SaveErrorCode, SaveResult, SaveUrlInput } from '../generated/graphql'
import { DataModels } from '../resolvers/types'
import { createPageSaveRequest } from './create_page_save_request'
import { ArticleSavingRequestStatus } from '../elastic/types'
import { createLabels } from './labels'
type SaveContext = {
pubsub: PubsubClient
@ -16,13 +18,22 @@ export const saveUrl = async (
input: SaveUrlInput
): Promise<SaveResult> => {
try {
const pageSaveRequest = await createPageSaveRequest(
saver.id,
input.url,
ctx.models,
ctx.pubsub,
input.clientRequestId
)
// save state
const archivedAt =
input.state === ArticleSavingRequestStatus.Archived ? new Date() : null
// add labels to page
const labels = input.labels
? await createLabels({ ...ctx, uid: saver.id }, input.labels)
: undefined
const pageSaveRequest = await createPageSaveRequest({
userId: saver.id,
url: input.url,
pubsub: ctx.pubsub,
articleSavingRequestId: input.clientRequestId,
archivedAt,
labels,
})
return {
clientRequestId: pageSaveRequest.id,

View File

@ -98,6 +98,10 @@ interface BackendEnv {
gcp: {
location: string
}
pocket: {
consumerKey: string
}
}
/***
@ -154,6 +158,7 @@ const nullableEnvVars = [
'AZURE_SPEECH_REGION',
'GCP_LOCATION',
'RECOMMENDATION_TASK_HANDLER_URL',
'POCKET_CONSUMER_KEY',
] // Allow some vars to be null/empty
/* If not in GAE and Prod/QA/Demo env (f.e. on localhost/dev env), allow following env vars to be null */
@ -284,6 +289,10 @@ export function getEnv(): BackendEnv {
location: parse('GCP_LOCATION'),
}
const pocket = {
consumerKey: parse('POCKET_CONSUMER_KEY'),
}
return {
pg,
client,
@ -304,6 +313,7 @@ export function getEnv(): BackendEnv {
readwise,
azure,
gcp,
pocket,
}
}

View File

@ -2,15 +2,18 @@
/* eslint-disable @typescript-eslint/restrict-template-expressions */
// Imports the Google Cloud Tasks library.
import { CloudTasksClient, protos } from '@google-cloud/tasks'
import { google } from '@google-cloud/tasks/build/protos/protos'
import axios from 'axios'
import { nanoid } from 'nanoid'
import { Recommendation } from '../elastic/types'
import { env } from '../env'
import {
ArticleSavingRequestStatus,
CreateLabelInput,
} from '../generated/graphql'
import { signFeatureToken } from '../services/features'
import { CreateTaskError } from './errors'
import { buildLogger } from './logger'
import { nanoid } from 'nanoid'
import { google } from '@google-cloud/tasks/build/protos/protos'
import { IntegrationType } from '../entity/integration'
import { signFeatureToken } from '../services/features'
import { Recommendation } from '../elastic/types'
import View = google.cloud.tasks.v2.Task.View
const logger = buildLogger('app.dispatch')
@ -195,18 +198,30 @@ export const deleteTask = async (
* @param queue - Queue name
* @returns Name of the task created
*/
export const enqueueParseRequest = async (
url: string,
userId: string,
saveRequestId: string,
priority: 'low' | 'high' = 'high',
queue = env.queue.name
): Promise<string> => {
export const enqueueParseRequest = async ({
url,
userId,
saveRequestId,
priority = 'high',
queue = env.queue.name,
state,
labels,
}: {
url: string
userId: string
saveRequestId: string
priority?: 'low' | 'high'
queue?: string
state?: ArticleSavingRequestStatus
labels?: CreateLabelInput[]
}): Promise<string> => {
const { GOOGLE_CLOUD_PROJECT } = process.env
const payload = {
url,
userId,
saveRequestId,
state,
labels,
}
// If there is no Google Cloud Project Id exposed, it means that we are in local environment
@ -280,7 +295,7 @@ export const enqueueReminder = async (
export const enqueueSyncWithIntegration = async (
userId: string,
integrationType: IntegrationType
integrationName: string
): Promise<string> => {
const { GOOGLE_CLOUD_PROJECT, PUBSUB_VERIFICATION_TOKEN } = process.env
// use pubsub data format to send the userId to the task handler
@ -305,7 +320,7 @@ export const enqueueSyncWithIntegration = async (
payload,
taskHandlerUrl: `${
env.queue.integrationTaskHandlerUrl
}/${integrationType.toLowerCase()}/sync_all?token=${PUBSUB_VERIFICATION_TOKEN}`,
}/${integrationName.toLowerCase()}/sync_all?token=${PUBSUB_VERIFICATION_TOKEN}`,
priority: 'low',
})
@ -442,4 +457,40 @@ export const enqueueRecommendation = async (
return createdTasks[0].name
}
export const enqueueImportFromIntegration = async (
userId: string,
integrationId: string,
authToken: string
): Promise<string> => {
const { GOOGLE_CLOUD_PROJECT } = process.env
const payload = {
integrationId,
}
const headers = {
Cookie: `auth=${authToken}`,
}
// If there is no Google Cloud Project Id exposed, it means that we are in local environment
if (env.dev.isLocal || !GOOGLE_CLOUD_PROJECT) {
return nanoid()
}
const createdTasks = await createHttpTaskWithToken({
project: GOOGLE_CLOUD_PROJECT,
payload,
taskHandlerUrl: `${env.queue.integrationTaskHandlerUrl}/import`,
priority: 'low',
requestHeaders: headers,
})
if (!createdTasks || !createdTasks[0].name) {
logger.error(`Unable to get the name of the task`, {
payload,
createdTasks,
})
throw new CreateTaskError(`Unable to get the name of the task`)
}
return createdTasks[0].name
}
export default createHttpTaskWithToken

View File

@ -1,4 +1,14 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import crypto from 'crypto'
import normalizeUrl from 'normalize-url'
import path from 'path'
import _ from 'underscore'
import slugify from 'voca/slugify'
import wordsCounter from 'word-counting'
import { RegistrationType, UserData } from '../datalayer/user/model'
import { updatePage } from '../elastic/pages'
import { ArticleSavingRequestStatus, Page } from '../elastic/types'
import { User } from '../entity/user'
import {
ArticleSavingRequest,
CreateArticleError,
@ -6,18 +16,9 @@ import {
Profile,
ResolverFn,
} from '../generated/graphql'
import { Claims, WithDataSourcesContext } from '../resolvers/types'
import { RegistrationType, UserData } from '../datalayer/user/model'
import crypto from 'crypto'
import slugify from 'voca/slugify'
import { Merge } from '../util'
import { CreateArticlesSuccessPartial } from '../resolvers'
import { ArticleSavingRequestStatus, Page } from '../elastic/types'
import { updatePage } from '../elastic/pages'
import path from 'path'
import normalizeUrl from 'normalize-url'
import wordsCounter from 'word-counting'
import _ from 'underscore'
import { Claims, WithDataSourcesContext } from '../resolvers/types'
import { Merge } from '../util'
interface InputObject {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -187,7 +188,7 @@ export const pageError = async (
}
export const pageToArticleSavingRequest = (
user: UserData,
user: User,
page: Page
): ArticleSavingRequest => ({
...page,

View File

@ -384,7 +384,7 @@ const getJSONLdLinkMetadata = async (
return result
} catch (error) {
logger.warning(`Unable to get JSONLD link of the article`, error)
logger.warning(`Unable to get JSONLD link of the article`, { error })
return result
}
}

View File

@ -0,0 +1,55 @@
import { Writable } from 'stream'
class MockStorage {
buckets: { [name: string]: MockBucket }
constructor() {
this.buckets = {}
}
bucket(name: string) {
return this.buckets[name] || (this.buckets[name] = new MockBucket(name))
}
}
export class MockBucket {
name: string
files: { [path: string]: MockFile }
constructor(name: string) {
this.name = name
this.files = {}
}
file(path: string) {
return this.files[path] || (this.files[path] = new MockFile(path))
}
}
class MockFile {
path: string
contents: Buffer
constructor(path: string) {
this.path = path
this.contents = Buffer.alloc(0)
}
createWriteStream() {
return new MockWriteStream(this)
}
}
class MockWriteStream extends Writable {
file: MockFile
constructor(file: MockFile) {
super()
this.file = file
}
_write(chunk: Buffer, encoding: string, callback: (error?: Error) => void) {
this.file.contents = Buffer.concat([this.file.contents, chunk])
callback()
}
}

View File

@ -10,6 +10,7 @@ import {
deletePage,
deletePagesByParam,
getPageById,
getPageByParam,
updatePage,
} from '../../src/elastic/pages'
import {
@ -36,6 +37,9 @@ import {
graphqlRequest,
request,
} from '../util'
import sinon from 'sinon'
import * as createTask from '../../src/utils/createTask'
import * as uploads from '../../src/utils/uploads'
chai.use(chaiString)
@ -208,7 +212,13 @@ const searchQuery = (keyword = '') => {
`
}
const savePageQuery = (url: string, title: string, originalContent: string) => {
const savePageQuery = (
url: string,
title: string,
originalContent: string,
state: ArticleSavingRequestStatus | null = null,
labels: string[] | null = null
) => {
return `
mutation {
savePage(
@ -218,6 +228,12 @@ const savePageQuery = (url: string, title: string, originalContent: string) => {
clientRequestId: "${generateFakeUuid()}",
title: "${title}",
originalContent: "${originalContent}"
state: ${state}
labels: ${
labels
? '[' + labels.map((label) => `{ name: "${label}" }`) + ']'
: null
}
}
) {
... on SaveSuccess {
@ -253,7 +269,11 @@ const saveFileQuery = (url: string, uploadFileId: string) => {
`
}
const saveUrlQuery = (url: string) => {
const saveUrlQuery = (
url: string,
state: ArticleSavingRequestStatus | null = null,
labels: string[] | null = null
) => {
return `
mutation {
saveUrl(
@ -261,6 +281,12 @@ const saveUrlQuery = (url: string) => {
url: "${url}",
source: "test",
clientRequestId: "${generateFakeUuid()}",
state: ${state}
labels: ${
labels
? '[' + labels.map((label) => `{ name: "${label}" }`) + ']'
: null
}
}
) {
... on SaveSuccess {
@ -611,21 +637,50 @@ describe('Article API', () => {
expect(allLinks.body.data.articles.edges[0].node.url).to.eq(url)
})
})
context('when we also want to save labels and archives the page', () => {
after(async () => {
await deletePagesByParam({ url }, ctx)
})
it('saves the labels and archives the page', async () => {
url = 'https://blog.omnivore.app/new-url-2'
const state = ArticleSavingRequestStatus.Archived
const labels = ['test name', 'test name 2']
await graphqlRequest(
savePageQuery(url, title, originalContent, state, labels),
authToken
).expect(200)
await refreshIndex()
const savedPage = await getPageByParam({ url })
expect(savedPage?.archivedAt).to.not.be.null
expect(savedPage?.labels?.map((l) => l.name)).to.eql(labels)
})
})
})
describe('SaveUrl', () => {
let query = ''
let url = 'https://blog.omnivore.app/new-url-1'
before(() => {
sinon.replace(createTask, 'enqueueParseRequest', sinon.fake.resolves(''))
})
beforeEach(() => {
query = saveUrlQuery(url)
})
context('when we save a new url', () => {
after(async () => {
await deletePagesByParam({ url }, ctx)
})
after(() => {
sinon.restore()
})
afterEach(async () => {
await deletePagesByParam({ url }, ctx)
})
context('when we save a new url', () => {
it('should return a slugged url', async () => {
const res = await graphqlRequest(query, authToken).expect(200)
expect(res.body.data.saveUrl.url).to.startsWith(
@ -633,6 +688,23 @@ describe('Article API', () => {
)
})
})
context('when we save labels', () => {
it('saves the labels and archives the page', async () => {
url = 'https://blog.omnivore.app/new-url-2'
const state = ArticleSavingRequestStatus.Archived
const labels = ['test name', 'test name 2']
await graphqlRequest(
saveUrlQuery(url, state, labels),
authToken
).expect(200)
await refreshIndex()
const savedPage = await getPageByParam({ url })
expect(savedPage?.archivedAt).to.not.be.null
expect(savedPage?.labels?.map((l) => l.name)).to.eql(labels)
})
})
})
describe('setBookmarkArticle', () => {
@ -779,15 +851,27 @@ describe('Article API', () => {
})
})
xdescribe('SaveFile', () => {
describe('SaveFile', () => {
let query = ''
let url = ''
let uploadFileId = ''
before(() => {
sinon.replace(
uploads,
'getStorageFileDetails',
sinon.fake.resolves({ fileUrl: 'fake url', md5Hash: 'fake hash' })
)
})
beforeEach(() => {
query = saveFileQuery(url, uploadFileId)
})
after(() => {
sinon.restore()
})
context('when the file is not uploaded', () => {
before(async () => {
url = 'fake url'

View File

@ -2,18 +2,15 @@ import 'mocha'
import { User } from '../../src/entity/user'
import { createTestUser, deleteTestIntegrations, deleteTestUser } from '../db'
import { generateFakeUuid, graphqlRequest, request } from '../util'
import {
IntegrationType,
SetIntegrationErrorCode,
} from '../../src/generated/graphql'
import { expect } from 'chai'
import { SetIntegrationErrorCode } from '../../src/generated/graphql'
import chai, { expect } from 'chai'
import { getRepository } from '../../src/entity/utils'
import {
Integration,
IntegrationType as DataIntegrationType,
} from '../../src/entity/integration'
import { Integration } from '../../src/entity/integration'
import nock from 'nock'
import { READWISE_API_URL } from '../../src/services/integrations'
import { READWISE_API_URL } from '../../src/services/integrations/readwise'
import sinonChai from 'sinon-chai'
chai.use(sinonChai)
describe('Integrations resolvers', () => {
let loginUser: User
@ -37,14 +34,14 @@ describe('Integrations resolvers', () => {
const validToken = 'valid-token'
const query = (
id = '',
type: IntegrationType = IntegrationType.Readwise,
name = 'READWISE',
token: string = 'test token',
enabled = true
) => `
mutation {
setIntegration(input: {
id: "${id}",
type: ${type},
name: "${name}",
token: "${token}",
enabled: ${enabled},
}) {
@ -62,7 +59,7 @@ describe('Integrations resolvers', () => {
`
let integrationId: string
let token: string
let integrationType: IntegrationType
let integrationName: string
let enabled: boolean
let scope: nock.Scope
@ -74,6 +71,7 @@ describe('Integrations resolvers', () => {
.get('/auth')
.reply(204)
.persist()
integrationName = 'READWISE'
})
after(() => {
@ -85,80 +83,56 @@ describe('Integrations resolvers', () => {
integrationId = ''
})
context('when integration exists', () => {
let existingIntegration: Integration
before(async () => {
existingIntegration = await getRepository(Integration).save({
user: { id: loginUser.id },
type: DataIntegrationType.Readwise,
token: 'fakeToken',
context('when token is invalid', () => {
before(() => {
token = 'invalid token'
nock(READWISE_API_URL, {
reqheaders: { Authorization: `Token ${token}` },
})
integrationType = existingIntegration.type
.get('/auth')
.reply(401)
})
after(async () => {
await deleteTestIntegrations(loginUser.id, [existingIntegration.id])
})
it('returns AlreadyExists error code', async () => {
it('returns InvalidToken error code', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType),
query(integrationId, integrationName, token),
authToken
)
expect(res.body.data.setIntegration.errorCodes).to.eql([
SetIntegrationErrorCode.AlreadyExists,
SetIntegrationErrorCode.InvalidToken,
])
})
})
context('when integration does not exist', () => {
context('when token is invalid', () => {
before(() => {
token = 'invalid token'
})
context('when token is valid', () => {
before(() => {
token = validToken
})
it('returns InvalidToken error code', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token),
authToken
)
expect(res.body.data.setIntegration.errorCodes).to.eql([
SetIntegrationErrorCode.InvalidToken,
])
afterEach(async () => {
await deleteTestIntegrations(loginUser.id, {
user: { id: loginUser.id },
name: integrationName,
})
})
context('when token is valid', () => {
before(() => {
token = validToken
})
it('creates new integration', async () => {
const res = await graphqlRequest(
query(integrationId, integrationName, token),
authToken
)
expect(res.body.data.setIntegration.integration.enabled).to.be.true
})
afterEach(async () => {
await deleteTestIntegrations(loginUser.id, {
user: { id: loginUser.id },
type: integrationType,
})
})
it('creates new integration', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token),
authToken
)
expect(res.body.data.setIntegration.integration.enabled).to.be.true
})
it('creates new cloud task to sync all existing articles and highlights', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token),
authToken
)
const integration = await getRepository(Integration).findOneBy({
id: res.body.data.setIntegration.integration.id,
})
expect(integration?.taskName).not.to.be.null
it('creates new cloud task to sync all existing articles and highlights', async () => {
const res = await graphqlRequest(
query(integrationId, integrationName, token),
authToken
)
const integration = await getRepository(Integration).findOneBy({
id: res.body.data.setIntegration.integration.id,
})
expect(integration?.taskName).not.to.be.null
})
})
})
@ -173,7 +147,7 @@ describe('Integrations resolvers', () => {
it('returns NotFound error code', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType),
query(integrationId, integrationName),
authToken
)
expect(res.body.data.setIntegration.errorCodes).to.eql([
@ -190,7 +164,7 @@ describe('Integrations resolvers', () => {
otherUser = await createTestUser('otherUser')
existingIntegration = await getRepository(Integration).save({
user: { id: otherUser.id },
type: DataIntegrationType.Readwise,
name: 'READWISE',
token: 'fakeToken',
})
integrationId = existingIntegration.id
@ -198,12 +172,12 @@ describe('Integrations resolvers', () => {
after(async () => {
await deleteTestUser(otherUser.id)
await deleteTestIntegrations(loginUser.id, [existingIntegration.id])
await deleteTestIntegrations(otherUser.id, [existingIntegration.id])
})
it('returns Unauthorized error code', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType),
query(integrationId, integrationName),
authToken
)
expect(res.body.data.setIntegration.errorCodes).to.eql([
@ -216,7 +190,7 @@ describe('Integrations resolvers', () => {
before(async () => {
existingIntegration = await getRepository(Integration).save({
user: { id: loginUser.id },
type: DataIntegrationType.Readwise,
name: 'READWISE',
token: 'fakeToken',
})
integrationId = existingIntegration.id
@ -240,7 +214,7 @@ describe('Integrations resolvers', () => {
it('disables integration', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token, enabled),
query(integrationId, integrationName, token, enabled),
authToken
)
expect(res.body.data.setIntegration.integration.enabled).to.be
@ -249,7 +223,7 @@ describe('Integrations resolvers', () => {
it('deletes cloud task', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token, enabled),
query(integrationId, integrationName, token, enabled),
authToken
)
const integration = await getRepository(Integration).findOneBy({
@ -273,7 +247,7 @@ describe('Integrations resolvers', () => {
it('enables integration', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token, enabled),
query(integrationId, integrationName, token, enabled),
authToken
)
expect(res.body.data.setIntegration.integration.enabled).to.be
@ -282,7 +256,7 @@ describe('Integrations resolvers', () => {
it('creates new cloud task to sync all existing articles and highlights', async () => {
const res = await graphqlRequest(
query(integrationId, integrationType, token, enabled),
query(integrationId, integrationName, token, enabled),
authToken
)
const integration = await getRepository(Integration).findOneBy({
@ -316,7 +290,7 @@ describe('Integrations resolvers', () => {
before(async () => {
existingIntegration = await getRepository(Integration).save({
user: { id: loginUser.id },
type: DataIntegrationType.Readwise,
name: 'READWISE',
token: 'fakeToken',
})
})
@ -362,7 +336,7 @@ describe('Integrations resolvers', () => {
beforeEach(async () => {
existingIntegration = await getRepository(Integration).save({
user: { id: loginUser.id },
type: DataIntegrationType.Readwise,
name: 'READWISE',
token: 'fakeToken',
taskName: 'some task name',
})
@ -385,4 +359,59 @@ describe('Integrations resolvers', () => {
})
})
})
describe('importFromIntegration API', () => {
const query = (integrationId: string) => `
mutation {
importFromIntegration(integrationId: "${integrationId}") {
... on ImportFromIntegrationSuccess {
success
}
... on ImportFromIntegrationError {
errorCodes
}
}
}
`
let existingIntegration: Integration
context('when integration exists', () => {
before(async () => {
existingIntegration = await getRepository(Integration).save({
user: { id: loginUser.id },
name: 'POCKET',
token: 'fakeToken',
})
})
after(async () => {
await deleteTestIntegrations(loginUser.id, [existingIntegration.id])
})
it('returns success and starts cloud task', async () => {
const res = await graphqlRequest(
query(existingIntegration.id),
authToken
).expect(200)
expect(res.body.data.importFromIntegration.success).to.be.true
const integration = await getRepository(Integration).findOneBy({
id: existingIntegration.id,
})
expect(integration?.taskName).not.to.be.null
})
})
context('when integration does not exist', () => {
it('returns error', async () => {
const invalidIntegrationId = generateFakeUuid()
const res = await graphqlRequest(
query(invalidIntegrationId),
authToken
).expect(200)
expect(res.body.data.importFromIntegration.errorCodes).to.eql([
'UNAUTHORIZED',
])
})
})
})
})

View File

@ -6,6 +6,8 @@ import { graphqlRequest, request } from '../util'
import { getRepository } from '../../src/entity/utils'
import { ReceivedEmail } from '../../src/entity/received_email'
import { NewsletterEmail } from '../../src/entity/newsletter_email'
import sinon from 'sinon'
import * as sendEmail from '../../src/utils/sendEmail'
describe('Recent Emails Resolver', () => {
const recentEmailsQuery = `
@ -121,11 +123,13 @@ describe('Recent Emails Resolver', () => {
to: newsletterEmail.address,
type: 'non-article',
})
sinon.replace(sendEmail, 'sendEmail', sinon.fake.resolves(true))
})
after(async () => {
// clean up
await getRepository(ReceivedEmail).delete(recentEmail.id)
sinon.restore()
})
it('marks email as item', async () => {

View File

@ -5,6 +5,8 @@ import nock from 'nock'
import 'mocha'
import { env } from '../../src/env'
import { User } from '../../src/entity/user'
import sinon from 'sinon'
import * as createTask from '../../src/utils/createTask'
describe('/article/save API', () => {
let user: User
@ -33,6 +35,14 @@ describe('/article/save API', () => {
describe('POST /article/save', () => {
const url = 'https://blog.omnivore.app'
before(() => {
sinon.replace(createTask, 'enqueueParseRequest', sinon.fake.resolves(''))
})
after(() => {
sinon.restore()
})
context('when token and url are valid', () => {
it('should create an article saving request', async () => {
const response = await request

View File

@ -17,20 +17,41 @@ import {
PageContext,
} from '../../src/elastic/types'
import nock from 'nock'
import { READWISE_API_URL } from '../../src/services/integrations'
import { addHighlightToPage } from '../../src/elastic/highlights'
import { getHighlightUrl } from '../../src/services/highlights'
import { deletePage } from '../../src/elastic/pages'
import { READWISE_API_URL } from '../../src/services/integrations/readwise'
import sinon from 'sinon'
import { Storage } from '@google-cloud/storage'
import { MockBucket } from '../mock_storage'
import { env } from '../../src/env'
describe('Integrations routers', () => {
const baseUrl = '/svc/pubsub/integrations'
let token: string
let user: User
let authToken: string
before(async () => {
user = await createTestUser('fakeUser')
const res = await request
.post('/local/debug/fake-user-login')
.send({ fakeEmail: user.email })
const body = res.body as { authToken: string }
authToken = body.authToken
})
after(async () => {
await deleteTestUser(user.id)
})
describe('sync with integrations', () => {
const endpoint = (token: string, type = 'type', action = 'action') =>
`/svc/pubsub/integrations/${type}/${action}?token=${token}`
const endpoint = (token: string, name = 'name', action = 'action') =>
`${baseUrl}/${name}/${action}?token=${token}`
let action: string
let data: PubSubRequestBody
let integrationType: string
let integrationName: string
context('when token is invalid', () => {
before(() => {
@ -44,7 +65,7 @@ describe('Integrations routers', () => {
context('when token is valid', () => {
before(() => {
token = process.env.PUBSUB_VERIFICATION_TOKEN!
token = process.env.PUBSUB_VERIFICATION_TOKEN as string
})
context('when data is expired', () => {
@ -83,19 +104,9 @@ describe('Integrations routers', () => {
})
context('when user exists', () => {
let user: User
before(async () => {
user = await createTestUser('fakeUser')
})
after(async () => {
await deleteTestUser(user.id)
})
context('when integration not found', () => {
before(() => {
integrationType = IntegrationType.Readwise
integrationName = 'READWISE'
data = {
message: {
data: Buffer.from(
@ -108,7 +119,7 @@ describe('Integrations routers', () => {
it('returns 200 with No integration found', async () => {
const res = await request
.post(endpoint(token, integrationType))
.post(endpoint(token, integrationName))
.send(data)
.expect(200)
expect(res.text).to.eql('No integration found')
@ -125,10 +136,10 @@ describe('Integrations routers', () => {
before(async () => {
integration = await getRepository(Integration).save({
user: { id: user.id },
type: IntegrationType.Readwise,
name: 'READWISE',
token: 'token',
})
integrationType = integration.type
integrationName = integration.name
// create page
page = await createTestElasticPage(user.id)
ctx = {
@ -177,7 +188,7 @@ describe('Integrations routers', () => {
})
context('when action is sync_updated', () => {
before(async () => {
before(() => {
action = 'sync_updated'
})
@ -208,7 +219,7 @@ describe('Integrations routers', () => {
it('returns 200 with OK', async () => {
const res = await request
.post(endpoint(token, integrationType, action))
.post(endpoint(token, integrationName, action))
.send(data)
.expect(200)
expect(res.text).to.eql('OK')
@ -240,7 +251,7 @@ describe('Integrations routers', () => {
it('returns 200 with OK', async () => {
const res = await request
.post(endpoint(token, integrationType, action))
.post(endpoint(token, integrationName, action))
.send(data)
.expect(200)
expect(res.text).to.eql('OK')
@ -275,7 +286,7 @@ describe('Integrations routers', () => {
it('returns 200 with OK', async () => {
const res = await request
.post(endpoint(token, integrationType, action))
.post(endpoint(token, integrationName, action))
.send(data)
.expect(200)
expect(res.text).to.eql('OK')
@ -313,7 +324,7 @@ describe('Integrations routers', () => {
it('returns 200 with OK', async () => {
const res = await request
.post(endpoint(token, integrationType, action))
.post(endpoint(token, integrationName, action))
.send(data)
.expect(200)
expect(res.text).to.eql('OK')
@ -323,4 +334,72 @@ describe('Integrations routers', () => {
})
})
})
describe('import from integrations router', () => {
let integration: Integration
before(async () => {
token = 'test token'
// create integration
integration = await getRepository(Integration).save({
user: { id: user.id },
name: 'POCKET',
token,
type: IntegrationType.Import,
})
// mock Pocket API
nock('https://getpocket.com', {
reqheaders: {
'content-type': 'application/json',
'x-accept': 'application/json',
},
})
.post('/v3/get', {
access_token: token,
consumer_key: env.pocket.consumerKey,
state: 'all',
detailType: 'complete',
since: 0,
sort: 'oldest',
count: 100,
offset: 0,
})
.reply(200, {
complete: 1,
list: {
'123': {
given_url: 'https://omnivore.app/pocket-import-test',
state: '0',
},
},
since: Date.now() / 1000,
})
// mock cloud storage
const mockBucket = new MockBucket('test')
sinon.replace(
Storage.prototype,
'bucket',
sinon.fake.returns(mockBucket as never)
)
})
after(async () => {
sinon.restore()
await deleteTestIntegrations(user.id, [integration.id])
})
context('when integration is pocket', () => {
it('returns 200 with OK', async () => {
return request
.post(`${baseUrl}/import`)
.send({
integrationId: integration.id,
})
.set('Cookie', `auth=${authToken}`)
.expect(200)
})
})
})
})

View File

@ -11,19 +11,23 @@ import { User } from '../../src/entity/user'
describe('saveEmail', () => {
const fakeContent = 'fake content'
let user: User
let scope: nock.Scope
before(async () => {
// create test user
user = await createTestUser('fakeUser')
scope = nock('https://blog.omnivore.app')
.get('/fake-url')
.reply(200)
.persist()
})
after(async () => {
await deleteTestUser(user.id)
scope.persist(false)
})
it('doesnt fail if saved twice', async () => {
nock('https://blog.omnivore.app').get('/fake-url').reply(404)
const url = 'https://blog.omnivore.app/fake-url'
const title = 'fake title'
const author = 'fake author'

View File

@ -50,7 +50,8 @@ describe('saveNewsletterEmail', () => {
})
it('adds the newsletter to the library', async () => {
nock('https://blog.omnivore.app').get('/fake-url').reply(404)
nock('https://blog.omnivore.app').get('/fake-url').reply(200)
nock('https://blog.omnivore.app').head('/fake-url').reply(200)
const url = 'https://blog.omnivore.app/fake-url'
await saveNewsletterEmail(
@ -88,6 +89,8 @@ describe('saveNewsletterEmail', () => {
})
it('adds a Newsletter label to that page', async () => {
nock('https://blog.omnivore.app').get('/new-fake-url').reply(200)
nock('https://blog.omnivore.app').head('/new-fake-url').reply(200)
const url = 'https://blog.omnivore.app/new-fake-url'
const newLabel = {
name: 'Newsletter',

View File

@ -21,10 +21,10 @@ const load = (path: string): string => {
return fs.readFileSync(path, 'utf8')
}
describe('parseMetadata', async () => {
it('gets author, title, image, description', async () => {
describe('parseMetadata', () => {
it('gets author, title, image, description', () => {
const html = load('./test/utils/data/substack-post.html')
const metadata = await parsePageMetadata(html)
const metadata = parsePageMetadata(html)
expect(metadata?.author).to.deep.equal('Omnivore')
expect(metadata?.title).to.deep.equal('Code Block Syntax Highlighting')
expect(metadata?.previewImage).to.deep.equal(
@ -36,7 +36,7 @@ describe('parseMetadata', async () => {
})
})
describe('parsePreparedContent', async () => {
describe('parsePreparedContent', () => {
it('gets published date when JSONLD fails to load', async () => {
nock('https://stratechery.com:443', { encodedQueryParams: true })
.get('/wp-json/oembed/1.0/embed')
@ -78,7 +78,7 @@ describe('parsePreparedContent', async () => {
})
})
describe('parsePreparedContent', async () => {
describe('parsePreparedContent', () => {
nock('https://oembeddata').get('/').reply(200, {
version: '1.0',
provider_name: 'Hippocratic Adventures',

View File

@ -0,0 +1,16 @@
-- Type: DO
-- Name: change_type_in_integration
-- Description: Change type field in integration table
BEGIN;
ALTER TABLE omnivore.integrations RENAME COLUMN "type" TO "name";
ALTER TABLE omnivore.integrations
DROP CONSTRAINT integrations_user_id_type_key,
ALTER COLUMN "name" TYPE VARCHAR(40) USING "name"::VARCHAR(40);
DROP TYPE omnivore.integration_type;
CREATE TYPE omnivore.integration_type AS ENUM ('EXPORT', 'IMPORT');
ALTER TABLE omnivore.integrations
ADD COLUMN "type" omnivore.integration_type NOT NULL DEFAULT 'EXPORT';
COMMIT;

View File

@ -0,0 +1,15 @@
-- Type: UNDO
-- Name: change_type_in_integration
-- Description: Change type field in integration table
BEGIN;
ALTER TABLE omnivore.integrations DROP COLUMN "type";
DROP TYPE omnivore.integration_type;
CREATE TYPE omnivore.integration_type AS ENUM ('READWISE', 'POCKET');
ALTER TABLE omnivore.integrations
ALTER COLUMN "name" TYPE omnivore.integration_type USING "name"::omnivore.integration_type,
ADD CONSTRAINT integrations_user_id_type_key UNIQUE (user_id, "name");
ALTER TABLE omnivore.integrations RENAME COLUMN "name" TO "type";
COMMIT;

View File

@ -13,7 +13,10 @@ export const importCsv = async (ctx: ImportContext, stream: Stream) => {
for await (const row of parser) {
try {
const url = new URL(row[0])
await ctx.urlHandler(ctx, url)
const state = row.length > 1 ? row[1] : undefined
// labels follows format: "[label1, label2]"
const labels = row.length > 2 ? row[2].slice(1, -1).split(',') : undefined
await ctx.urlHandler(ctx, url, state, labels)
ctx.countImported += 1
} catch (error) {
console.log('invalid url', row, error)

View File

@ -1,7 +1,7 @@
import { Storage } from '@google-cloud/storage'
import { importCsv } from './csv'
import * as path from 'path'
import { importMatterArchive, importMatterHistoryCsv } from './matterHistory'
import { importMatterArchive } from './matterHistory'
import { Stream } from 'node:stream'
import { v4 as uuid } from 'uuid'
import { CONTENT_FETCH_URL, createCloudTask, emailUserUrl } from './task'
@ -13,6 +13,15 @@ import { Readability } from '@omnivore/readability'
import * as Sentry from '@sentry/serverless'
export enum ArticleSavingRequestStatus {
Failed = 'FAILED',
Processing = 'PROCESSING',
Succeeded = 'SUCCEEDED',
Deleted = 'DELETED',
Archived = 'ARCHIVED',
}
Sentry.GCPFunction.init({
dsn: process.env.SENTRY_DSN,
tracesSampleRate: 0,
@ -24,7 +33,12 @@ const storage = new Storage()
const CONTENT_TYPES = ['text/csv', 'application/zip']
export type UrlHandler = (ctx: ImportContext, url: URL) => Promise<void>
export type UrlHandler = (
ctx: ImportContext,
url: URL,
state?: ArticleSavingRequestStatus,
labels?: string[]
) => Promise<void>
export type ContentHandler = (
ctx: ImportContext,
url: URL,
@ -66,13 +80,19 @@ const shouldHandle = (data: StorageEvent) => {
const importURL = async (
userId: string,
url: URL,
source: string
source: string,
state?: ArticleSavingRequestStatus,
labels?: string[]
): Promise<string | undefined> => {
return createCloudTask(CONTENT_FETCH_URL, {
userId,
source,
url: url.toString(),
saveRequestId: uuid(),
state,
labels: labels?.map((l) => {
return { name: l }
}),
})
}
@ -122,10 +142,21 @@ const handlerForFile = (name: string): importHandlerFunc | undefined => {
return undefined
}
const urlHandler = async (ctx: ImportContext, url: URL): Promise<void> => {
const urlHandler = async (
ctx: ImportContext,
url: URL,
state?: ArticleSavingRequestStatus,
labels?: string[]
): Promise<void> => {
try {
// Imports are stored in the format imports/<user id>/<type>-<uuid>.csv
const result = await importURL(ctx.userId, url, 'csv-importer')
const result = await importURL(
ctx.userId,
url,
'csv-importer',
state,
labels
)
if (result) {
ctx.countImported += 1
}

View File

@ -4,7 +4,7 @@ import { expect } from 'chai'
import chaiString from 'chai-string'
import * as fs from 'fs'
import { importCsv } from '../../src/csv'
import { ImportContext } from '../../src'
import { ArticleSavingRequestStatus, ImportContext } from '../../src'
import { stubImportCtx } from '../util'
chai.use(chaiString)
@ -28,3 +28,44 @@ describe('Load a simple CSV file', () => {
])
})
})
describe('Load a complex CSV file', () => {
it('should call the handler for each URL, state and labels', async () => {
const results: {
url: URL
state?: ArticleSavingRequestStatus
labels?: string[]
}[] = []
const stream = fs.createReadStream('./test/csv/data/complex.csv')
const stub = stubImportCtx()
stub.urlHandler = (
ctx: ImportContext,
url,
state,
labels
): Promise<void> => {
results.push({
url,
state,
labels,
})
return Promise.resolve()
}
await importCsv(stub, stream)
expect(stub.countFailed).to.equal(0)
expect(stub.countImported).to.equal(2)
expect(results).to.eql([
{
url: new URL('https://omnivore.app'),
state: 'ARCHIVED',
labels: ['test'],
},
{
url: new URL('https://google.com'),
state: 'SUCCEEDED',
labels: ['test', 'development'],
},
])
})
})

View File

@ -0,0 +1,2 @@
"https://omnivore.app",ARCHIVED,"[test]"
"https://google.com",SUCCEEDED,"[test,development]"
1 https://omnivore.app ARCHIVED [test]
2 https://google.com SUCCEEDED [test,development]

View File

@ -1,12 +1,17 @@
import { Readability } from '@omnivore/readability'
import { ImportContext } from '../src'
import { ArticleSavingRequestStatus, ImportContext } from '../src'
export const stubImportCtx = () => {
return {
userId: '',
countImported: 0,
countFailed: 0,
urlHandler: (ctx: ImportContext, url: URL): Promise<void> => {
urlHandler: (
ctx: ImportContext,
url: URL,
state?: ArticleSavingRequestStatus,
labels?: string[]
): Promise<void> => {
return Promise.resolve()
},
contentHandler: (

View File

@ -248,6 +248,8 @@ async function fetchContent(req, res) {
let url = getUrl(req);
const userId = (req.query ? req.query.userId : undefined) || (req.body ? req.body.userId : undefined);
const articleSavingRequestId = (req.query ? req.query.saveRequestId : undefined) || (req.body ? req.body.saveRequestId : undefined);
const state = req.body.state
const labels = req.body.labels
let logRecord = {
url,
@ -256,6 +258,8 @@ async function fetchContent(req, res) {
labels: {
source: 'parseContent',
},
state,
labelsToAdd: labels
};
console.info(`Article parsing request`, logRecord);
@ -333,6 +337,8 @@ async function fetchContent(req, res) {
title,
originalContent: content,
parseResult: readabilityResult,
state,
labels,
});
logRecord.totalTime = Date.now() - functionStartTime;
@ -367,6 +373,8 @@ async function fetchContent(req, res) {
title,
originalContent: content,
parseResult: readabilityResult,
state,
labels,
});
logRecord.totalTime = Date.now() - functionStartTime;

View File

@ -26,7 +26,7 @@ const Header = styled(Box, {
export function Readwise(): JSX.Element {
const { integrations, revalidate } = useGetIntegrationsQuery()
const readwiseIntegration = useMemo(() => {
return integrations.find((i) => i.type == 'READWISE')
return integrations.find((i) => i.name == 'READWISE' && i.type == 'EXPORT')
}, [integrations])
return (
@ -82,7 +82,8 @@ function AddReadwiseForm(): JSX.Element {
try {
const result = await setIntegrationMutation({
token,
type: 'READWISE',
name: 'READWISE',
type: 'EXPORT',
enabled: true,
})
if (result) {

View File

@ -0,0 +1,35 @@
import { gqlFetcher } from '../networkHelpers'
interface ImportFromIntegrationDataResponseData {
importFromIntegration?: ImportFromIntegrationData
}
interface ImportFromIntegrationData {
success: boolean
errorCodes?: unknown[]
}
export async function importFromIntegrationMutation(
integrationId: string
): Promise<void> {
const mutation = `
mutation ImportFromIntegration($integrationId: ID!) {
importFromIntegration(integrationId:$integrationId) {
... on ImportFromIntegrationError {
errorCodes
}
... on ImportFromIntegrationSuccess {
success
}
}
}`
const data = await gqlFetcher(mutation, { integrationId })
console.log('integrationId: ', data)
const output = data as ImportFromIntegrationDataResponseData | undefined
const error = output?.importFromIntegration?.errorCodes?.find(() => true)
console.log('error: ', error)
if (error) {
throw error
}
}

View File

@ -4,8 +4,9 @@ import { IntegrationType } from '../queries/useGetIntegrationsQuery'
export type SetIntegrationInput = {
id?: string
name: string
type: IntegrationType
token: string,
token: string
enabled: boolean
}
@ -20,6 +21,7 @@ type SetIntegrationData = {
type Integration = {
id: string
name: string
type: IntegrationType
token: string
enabled: boolean
@ -31,13 +33,12 @@ export async function setIntegrationMutation(
input: SetIntegrationInput
): Promise<Integration | undefined> {
const mutation = gql`
mutation SetIntegration(
$input: SetIntegrationInput!
) {
mutation SetIntegration($input: SetIntegrationInput!) {
setIntegration(input: $input) {
... on SetIntegrationSuccess {
integration {
id
name
type
token
enabled
@ -52,12 +53,11 @@ export async function setIntegrationMutation(
}
`
const data = await gqlFetcher(mutation, { input }) as SetIntegrationResult
const data = (await gqlFetcher(mutation, { input })) as SetIntegrationResult
const output = data as any
const error = data.setIntegration?.errorCodes?.find(() => true)
if (error) {
if (error === 'INVALID_TOKEN')
throw 'Your token is invalid.'
if (error === 'INVALID_TOKEN') throw 'Your token is invalid.'
throw error
}
return output.setIntegration?.integration

View File

@ -4,6 +4,7 @@ import { publicGqlFetcher } from '../networkHelpers'
export interface Integration {
id: string
name: string
type: IntegrationType
token: string
enabled: boolean
@ -11,7 +12,7 @@ export interface Integration {
updatedAt: Date
}
export type IntegrationType = 'READWISE'
export type IntegrationType = 'EXPORT' | 'IMPORT'
interface IntegrationsQueryResponse {
isValidating: boolean
@ -34,6 +35,7 @@ export function useGetIntegrationsQuery(): IntegrationsQueryResponse {
... on IntegrationsSuccess {
integrations {
id
name
type
token
enabled

View File

@ -18,6 +18,10 @@ import { useGetIntegrationsQuery } from '../../lib/networking/queries/useGetInte
import { useGetWebhooksQuery } from '../../lib/networking/queries/useGetWebhooksQuery'
import { deleteIntegrationMutation } from '../../lib/networking/mutations/deleteIntegrationMutation'
import { showErrorToast, showSuccessToast } from '../../lib/toastHelpers'
import { fetchEndpoint } from '../../lib/appConfig'
import { setIntegrationMutation } from '../../lib/networking/mutations/setIntegrationMutation'
import { cookieValue } from '../../lib/cookieHelpers'
import { importFromIntegrationMutation } from '../../lib/networking/mutations/importFromIntegrationMutation'
// Styles
const Header = styled(Box, {
@ -65,7 +69,10 @@ export default function Integrations(): JSX.Element {
const router = useRouter()
const readwiseConnected = useMemo(() => {
return integrations.find((i) => i.type == 'READWISE')
return integrations.find((i) => i.name == 'READWISE' && i.type == 'EXPORT')
}, [integrations])
const pocketConnected = useMemo(() => {
return integrations.find((i) => i.name == 'POCKET' && i.type == 'IMPORT')
}, [integrations])
const deleteIntegration = async (id: string) => {
@ -78,6 +85,58 @@ export default function Integrations(): JSX.Element {
}
}
const importFromIntegration = async (id: string) => {
try {
await importFromIntegrationMutation(id)
showSuccessToast('Import started')
} catch (err) {
showErrorToast('Error: ' + err)
}
}
const redirectToPocket = () => {
// create a form and submit it to the backend
const form = document.createElement('form')
form.method = 'POST'
form.action = `${fetchEndpoint}/integration/pocket/auth`
document.body.appendChild(form)
form.submit()
}
useEffect(() => {
const connectToPocket = async () => {
try {
// get the token from cookies
const token = cookieValue('pocketRequestToken', document.cookie)
if (!token) {
showErrorToast('There was an error connecting to Pocket.')
return
}
const result = await setIntegrationMutation({
token,
name: 'POCKET',
type: 'IMPORT',
enabled: true,
})
if (result) {
revalidate()
showSuccessToast('Connected with Pocket.')
} else {
showErrorToast('There was an error connecting to Pocket.')
}
} catch (err) {
showErrorToast('Error: ' + err)
}
}
if (!router.isReady) return
if (
router.query.state == 'pocketAuthorizationFinished' &&
!pocketConnected
) {
connectToPocket()
}
}, [router])
useEffect(() => {
setIntegrationsArray([
{
@ -121,8 +180,23 @@ export default function Integrations(): JSX.Element {
action: () => router.push('/settings/webhooks'),
},
},
{
icon: '/static/icons/pocket.svg',
title: 'Pocket',
subText: 'Pocket is a place to save articles, videos, and more.',
button: {
text: pocketConnected ? 'Import' : 'Connect to Pocket',
icon: <Link size={16} weight={'bold'} />,
style: 'ctaDarkYellow',
action: () => {
pocketConnected
? importFromIntegration(pocketConnected.id)
: redirectToPocket()
},
},
},
])
}, [readwiseConnected, router, webhooks])
}, [pocketConnected, readwiseConnected, webhooks])
return (
<SettingsLayout>
@ -142,7 +216,7 @@ export default function Integrations(): JSX.Element {
css={{
width: '80%',
margin: '0 auto',
height: '100%',
height: '800px',
'@smDown': {
width: '100%',
},

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 333334 300072" shape-rendering="geometricPrecision" text-rendering="geometricPrecision" image-rendering="optimizeQuality" fill-rule="evenodd" clip-rule="evenodd"><path d="M29158 0C10996 1184 0 11552 0 29993v110439c0 89700 89214 160194 166389 159638 86778-626 166945-73904 166945-159638V29993c0-18511-11691-28949-29993-29993H29158zm70286 89840l66945 63604 67015-63604c29993-12595 43075 21643 30758 31663l-87265 83298c-2853 2714-18093 2714-20946 0l-87265-83299c-11761-11065 3548-45859 30758-31663z" fill="#ef4056"/></svg>

After

Width:  |  Height:  |  Size: 572 B