For URLs that will be forced to backend fetch dont send full content from the extension

This is useful with YouTube, which has massive page sizes.
This commit is contained in:
Jackson Harper
2022-12-12 11:13:34 +08:00
parent 58209b5c41
commit e22c014385
3 changed files with 27 additions and 1 deletions

View File

@ -321,7 +321,12 @@ async function saveArticle (tab) {
}
const requestId = uuidv4()
const { type, pageInfo, doc, uploadContentObjUrl } = response;
var { type } = response;
const { pageInfo, doc, uploadContentObjUrl } = response;
if (type == 'html' && handleBackendUrl(tab.url)) {
type = 'url'
}
switch(type) {
case 'html': {

View File

@ -98,3 +98,17 @@ window.CREATE_ARTICLE_SAVING_REQUEST_QUERY = `mutation CreateArticleSavingReques
}
}
}`;
function handleBackendUrl(url) {
try {
const FORCE_CONTENT_FETCH_URLS = [
// twitter status url regex
/twitter\.com\/(?:#!\/)?(\w+)\/status(?:es)?\/(\d+)(?:\/.*)?/,
/^((?:https?:)?\/\/)?((?:www|m)\.)?((?:youtube\.com|youtu.be))(\/(?:[\w-]+\?v=|embed\/|v\/)?)([\w-]+)(\S+)?$/,
]
return FORCE_CONTENT_FETCH_URLS.some((regex) => regex.test(url))
} catch (error) {
console.log('error checking url', url)
}
return false
}

View File

@ -187,6 +187,13 @@
if (pdfContent) {
return pdfContent
}
try {
if (handleBackendUrl(window.location.href)) {
return { type: 'url' }
}
} catch {
console.log('error checking url')
}
async function scrollPage () {
const scrollingEl = (document.scrollingElement || document.body);