diff --git a/package.json b/package.json index db456c96..e23a30ab 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "electron-updater": "^5.0.1", "fast-xml-parser": "^4.0.6", "got": "11.8.3", + "hpagent": "^1.0.0", "keytar": "^7.9.0", "node-html-parser": "^5.3.3", "realm": "^10.13.0", diff --git a/packages/main/index.ts b/packages/main/index.ts index 8a3ec940..573a9e97 100644 --- a/packages/main/index.ts +++ b/packages/main/index.ts @@ -42,8 +42,8 @@ async function createWindow() { title: "Main window", width: 1440, height: 860, - minWidth: 1440, - minHeight: 860, + minWidth: 800, + minHeight: 600, useContentSize: true, webPreferences: { preload: join(__dirname, "../preload/index.cjs"), diff --git a/packages/preload/interactors/entity-interactor.ts b/packages/preload/interactors/entity-interactor.ts index 52deab3d..ec4ce91a 100644 --- a/packages/preload/interactors/entity-interactor.ts +++ b/packages/preload/interactors/entity-interactor.ts @@ -164,6 +164,22 @@ export class EntityInteractor { ); } + async addWholeFolder(folder: string) { + const files = await this.fileRepository.listPDFs(folder); + const PDFfiles = files.filter((file) => path.extname(file) === ".pdf"); + + this.add(PDFfiles); + } + + async addFromZoteroCSV(csvFile: string) { + if (path.extname(csvFile) === ".csv") { + const paperEntityDrafts = await this.fileRepository.parseZoteroCSV( + csvFile + ); + this.update(JSON.stringify(paperEntityDrafts)); + } + } + // ============================================================ // Delete async delete(ids: string[]) { @@ -251,7 +267,7 @@ export class EntityInteractor { const updatePromise = async (entityDrafts: PaperEntityDraft[]) => { const movedEntityDrafts = await Promise.all( entityDrafts.map((entityDraft: PaperEntityDraft) => - this.fileRepository.move(entityDraft) + this.fileRepository.move(entityDraft, true) ) ); diff --git a/packages/preload/models/PaperEntityDraft.ts b/packages/preload/models/PaperEntityDraft.ts index 6f0ba634..2942e2f7 100644 --- a/packages/preload/models/PaperEntityDraft.ts +++ b/packages/preload/models/PaperEntityDraft.ts @@ -102,8 +102,8 @@ export class PaperEntityDraft { return entity; } - setValue(key: string, value: unknown) { - if (value && value !== "undefined") { + setValue(key: string, value: unknown, allowEmpty = false) { + if ((value || allowEmpty) && value !== "undefined") { this[key] = value; } } diff --git a/packages/preload/repositories/exporter-repository/exporters/bib-exporter.ts b/packages/preload/repositories/exporter-repository/exporters/bib-exporter.ts index 50f82a9c..b7d11f31 100644 --- a/packages/preload/repositories/exporter-repository/exporters/bib-exporter.ts +++ b/packages/preload/repositories/exporter-repository/exporters/bib-exporter.ts @@ -13,7 +13,12 @@ export class BibExporter extends Exporter { for (const entity of entityDrafts) { let citeKey = ""; - const nameArray = entity.authors.split(", ")[0].split(" "); + let nameArray; + if (entity.authors.includes(";")) { + nameArray = entity.authors.split(";")[0].split(" "); + } else { + nameArray = entity.authors.split(", ")[0].split(" "); + } const lastName = nameArray[nameArray.length - 1]; citeKey += lastName.toLowerCase(); citeKey += entity.pubTime; @@ -36,7 +41,7 @@ export class BibExporter extends Exporter { const pubDetails = { volume: entity.volume, - number: entity.number, + issue: entity.number, pages: entity.pages, publisher: entity.publisher, }; diff --git a/packages/preload/repositories/file-repository/backends/backend.ts b/packages/preload/repositories/file-repository/backends/backend.ts index f4f353d3..f38fdc50 100644 --- a/packages/preload/repositories/file-repository/backends/backend.ts +++ b/packages/preload/repositories/file-repository/backends/backend.ts @@ -8,7 +8,10 @@ export interface FileBackend { check(): void; access(url: string, download: boolean): Promise; - move(entity: PaperEntityDraft): Promise; + move( + entity: PaperEntityDraft, + fourceDelete: boolean + ): Promise; remove(entity: PaperEntityDraft): Promise; removeFile(url: string): Promise; } diff --git a/packages/preload/repositories/file-repository/backends/local-backend.ts b/packages/preload/repositories/file-repository/backends/local-backend.ts index 99a859ff..13c4f87a 100644 --- a/packages/preload/repositories/file-repository/backends/local-backend.ts +++ b/packages/preload/repositories/file-repository/backends/local-backend.ts @@ -32,15 +32,23 @@ export class LocalFileBackend implements FileBackend { this.preference.get("appLibFolder") as string, "file://" ); - const pathStat = await fsPromise.lstat(fileURL.replace("file://", "")); - if (existsSync(fileURL.replace("file://", "")) && pathStat.isFile()) { - return Promise.resolve(fileURL); + if (existsSync(fileURL.replace("file://", ""))) { + const pathStat = await fsPromise.lstat(fileURL.replace("file://", "")); + if (pathStat.isFile()) { + return Promise.resolve(fileURL); + } else { + return Promise.resolve(""); + } } else { return Promise.resolve(""); } } - async _move(sourceURL: string, targetURL: string): Promise { + async _move( + sourceURL: string, + targetURL: string, + forceDelete: boolean = false + ): Promise { const _sourceURL = sourceURL.replace("file://", ""); const _targetURL = targetURL.replace("file://", ""); const stat = await fsPromise.lstat(_sourceURL); @@ -50,7 +58,7 @@ export class LocalFileBackend implements FileBackend { try { await fsPromise.copyFile(_sourceURL, _targetURL); if ( - (this.preference.get("deleteSourceFile") as boolean) && + ((this.preference.get("deleteSourceFile") as boolean) || forceDelete) && _sourceURL !== _targetURL ) { await fsPromise.unlink(sourceURL); @@ -65,11 +73,34 @@ export class LocalFileBackend implements FileBackend { } } - async move(entity: PaperEntityDraft): Promise { - const targetFileName = - entity.title.replace(/[^a-zA-Z0-9 ]/g, "").replace(/\s/g, "_") + - "_" + - entity._id.toString(); + async move( + entity: PaperEntityDraft, + forceDelete: boolean = false + ): Promise { + let title = entity.title.replace(/[^a-zA-Z0-9 ]/g, "").replace(/\s/g, "_"); + let id = entity._id.toString(); + if (this.preference.get("renamingFormat") === "short") { + title = title + .split("_") + .map((word: string) => { + if (word) { + return word.slice(0, 1); + } else { + return ""; + } + }) + .filter((c: string) => c && c === c.toUpperCase()) + .join(""); + } else if (this.preference.get("renamingFormat") === "authortitle") { + let author = entity.authors.split(",")[0]; + if (author !== entity.authors) { + author = `${author} et al`; + } + title = `${author} - ${title.slice(0, 20)}`; + id = id.slice(-5, -1); + } + + const targetFileName = title + "_" + id; // 1. Move main file. const sourceMainURL = constructFileURL( @@ -84,7 +115,11 @@ export class LocalFileBackend implements FileBackend { false, this.preference.get("appLibFolder") as string ); - const mainSuccess = await this._move(sourceMainURL, targetMainURL); + const mainSuccess = await this._move( + sourceMainURL, + targetMainURL, + forceDelete + ); if (mainSuccess) { entity.mainURL = path.basename(targetMainURL); } else { @@ -106,7 +141,11 @@ export class LocalFileBackend implements FileBackend { sourceSupURL: string, targetSupURL: string ) => { - const supSuccess = await this._move(sourceSupURL, targetSupURL); + const supSuccess = await this._move( + sourceSupURL, + targetSupURL, + forceDelete + ); if (supSuccess) { return path.basename(targetSupURL); } else { diff --git a/packages/preload/repositories/file-repository/backends/webdav-backend.ts b/packages/preload/repositories/file-repository/backends/webdav-backend.ts index b470ec8c..34950858 100644 --- a/packages/preload/repositories/file-repository/backends/webdav-backend.ts +++ b/packages/preload/repositories/file-repository/backends/webdav-backend.ts @@ -71,18 +71,14 @@ export class WebDavFileBackend implements FileBackend { async access(url: string, download = true): Promise { await this.check(); const basename = path.basename(url); - const localURL = path.join( - this.sharedState.dbState.defaultPath.get() as string, - "file_cache", - basename + const localURL = constructFileURL( + basename, + true, + false, + this.preference.get("appLibFolder") as string ); // Check if file exists on local temp disk. - const pathStat = await fsPromise.lstat(localURL.replace("file://", "")); - if (!pathStat.isFile()) { - return ""; - } const isExist = existsSync(localURL); - if (!isExist) { if (download) { try { @@ -109,6 +105,11 @@ export class WebDavFileBackend implements FileBackend { return ""; } } + } else { + const pathStat = await fsPromise.lstat(localURL.replace("file://", "")); + if (!pathStat.isFile()) { + return ""; + } } return Promise.resolve( @@ -132,6 +133,28 @@ export class WebDavFileBackend implements FileBackend { return true; } + async _local2localMove( + sourceURL: string, + targetURL: string + ): Promise { + const _sourceURL = sourceURL.replace("file://", ""); + const _targetURL = targetURL.replace("file://", ""); + const stat = await fsPromise.lstat(_sourceURL); + if (stat.isDirectory()) { + return false; + } + try { + await fsPromise.copyFile(_sourceURL, _targetURL); + return true; + } catch (error) { + this.sharedState.set( + "viewState.alertInformation", + `Could not copy file: ${error as string}` + ); + return false; + } + } + async _local2serverMove( sourceURL: string, targetURL: string @@ -163,17 +186,27 @@ export class WebDavFileBackend implements FileBackend { return true; } - async _move(sourceURL: string, targetURL: string): Promise { + async _move( + sourceURL: string, + targetURL: string, + targetCacheURL: string, + forceDelete: boolean = false + ): Promise { try { let success; if (sourceURL.startsWith("file://")) { + success = await this._local2localMove(sourceURL, targetCacheURL); success = await this._local2serverMove(sourceURL, targetURL); if (this.preference.get("deleteSourceFile") as boolean) { await fsPromise.unlink(sourceURL); } } else if (sourceURL.startsWith("webdav://")) { success = await this._server2serverMove(sourceURL, targetURL); - if (this.preference.get("deleteSourceFile") as boolean) { + if ( + ((this.preference.get("deleteSourceFile") as boolean) || + forceDelete) && + sourceURL !== targetURL + ) { await this.webdavClient?.deleteFile( sourceURL.replace("webdav://", "/paperlib/") ); @@ -191,12 +224,35 @@ export class WebDavFileBackend implements FileBackend { } } - async move(entity: PaperEntityDraft): Promise { + async move( + entity: PaperEntityDraft, + forceDelete: boolean = false + ): Promise { await this.check(); - const targetFileName = - entity.title.replace(/[^a-zA-Z0-9 ]/g, "").replace(/\s/g, "_") + - "_" + - entity._id.toString(); + let title = entity.title.replace(/[^a-zA-Z0-9 ]/g, "").replace(/\s/g, "_"); + let id = entity._id.toString(); + if (this.preference.get("renamingFormat") === "short") { + title = title + .split("_") + .map((word: string) => { + if (word) { + return word.slice(0, 1); + } else { + return ""; + } + }) + .filter((c: string) => c && c === c.toUpperCase()) + .join(""); + } else if (this.preference.get("renamingFormat") === "authortitle") { + let author = entity.authors.split(",")[0]; + if (author !== entity.authors) { + author = `${author} et al`; + } + title = `${author} - ${title.slice(0, 20)}`; + id = id.slice(-5, -1); + } + + const targetFileName = title + "_" + id; // 1. Move main file. let sourceMainURL; @@ -218,7 +274,19 @@ export class WebDavFileBackend implements FileBackend { "", "webdav://" ); - const mainSuccess = await this._move(sourceMainURL, targetMainURL); + const targetMainCacheURL = constructFileURL( + targetFileName + "_main" + path.extname(sourceMainURL), + true, + false, + this.preference.get("appLibFolder") as string + ); + + const mainSuccess = await this._move( + sourceMainURL, + targetMainURL, + targetMainCacheURL, + forceDelete + ); if (mainSuccess) { entity.mainURL = path.basename(targetMainURL); } else { @@ -239,9 +307,15 @@ export class WebDavFileBackend implements FileBackend { const SupMovePromise = async ( sourceSupURL: string, - targetSupURL: string + targetSupURL: string, + targetSupCacheURL: string ) => { - const supSuccess = await this._move(sourceSupURL, targetSupURL); + const supSuccess = await this._move( + sourceSupURL, + targetSupURL, + targetSupCacheURL, + forceDelete + ); if (supSuccess) { return path.basename(targetSupURL); } else { @@ -258,7 +332,18 @@ export class WebDavFileBackend implements FileBackend { "", "webdav://" ); - const supMovePromise = SupMovePromise(sourceSupURL, targetSupURL); + const targetSupCacheURL = constructFileURL( + targetFileName + `_sup${i}` + path.extname(sourceSupURL), + true, + false, + this.preference.get("appLibFolder") as string + ); + + const supMovePromise = SupMovePromise( + sourceSupURL, + targetSupURL, + targetSupCacheURL + ); supMovePromiseList.push(supMovePromise); } @@ -288,10 +373,11 @@ export class WebDavFileBackend implements FileBackend { async _removeFileCache(url: string) { const basename = path.basename(url); - const localURL = path.join( - this.sharedState.dbState.defaultPath.get() as string, - "file_cache", - basename + const localURL = constructFileURL( + basename, + true, + false, + this.preference.get("appLibFolder") as string ); await fsPromise.unlink(localURL); } diff --git a/packages/preload/repositories/file-repository/file-repository.ts b/packages/preload/repositories/file-repository/file-repository.ts index 4029e83e..f7f48e83 100644 --- a/packages/preload/repositories/file-repository/file-repository.ts +++ b/packages/preload/repositories/file-repository/file-repository.ts @@ -1,6 +1,8 @@ +import fs from "fs"; import { PaperEntityDraft } from "../..//models/PaperEntityDraft"; import { SharedState } from "../../utils/appstate"; import { Preference } from "../../utils/preference"; +import { getAllFiles } from "../../utils/path"; import { FileBackend } from "./backends/backend"; import { LocalFileBackend } from "./backends/local-backend"; import { WebDavFileBackend } from "./backends/webdav-backend"; @@ -29,8 +31,11 @@ export class FileRepository { async access(url: string, download: boolean): Promise { return await this.backend.access(url, download); } - async move(entity: PaperEntityDraft): Promise { - return await this.backend.move(entity); + async move( + entity: PaperEntityDraft, + fourceDelete = false + ): Promise { + return await this.backend.move(entity, fourceDelete); } async remove(entity: PaperEntityDraft): Promise { return await this.backend.remove(entity); @@ -39,6 +44,89 @@ export class FileRepository { return await this.backend.removeFile(url); } + async listPDFs(folderUrl: string): Promise { + return new Promise((resolve, reject) => { + try { + const files = getAllFiles(folderUrl); + resolve(files); + } catch (e) { + console.error(e); + reject([]); + } + }); + } + + async parseZoteroCSV(csvUrl: string) { + const data = fs.readFileSync(csvUrl, "utf8"); + let dataList = data.split("\n"); + + const keys = dataList[0].split('","'); + const values = dataList.slice(1).map((line) => { + if (line) { + const vs = line.split('","'); + return vs.reduce((acc, v, i) => { + acc[keys[i]] = v === '""' ? "" : v; + return acc; + }, {} as any); + } + }); + + let paperEntityDrafts = []; + for (const value of values) { + try { + if (value) { + const entityDraft = new PaperEntityDraft(true); + entityDraft.setValue("title", value.Title); + if (value.Author) { + const authors = value.Author.split(";") + .map((author: string) => { + if (author.trim()) { + const first_last = author.split(",").map((author: string) => { + return author.trim(); + }); + first_last.reverse(); + return first_last.join(" "); + } + }) + .join(", "); + entityDraft.setValue("authors", authors); + } + entityDraft.setValue("publication", value["Publication Title"]); + entityDraft.setValue("pubTime", value["Publication Year"]); + entityDraft.setValue("doi", value["DOI"]); + entityDraft.setValue("addTime", new Date(value["Date Added"])); + const pubType = [ + "journalArticle", + "conferencePaper", + "others", + "book", + ].indexOf(value["Item Type"]); + entityDraft.setValue("pubType", pubType > -1 ? pubType : 2); + console.log(value["File Attachments"]); + const attachments = value["File Attachments"].split(";"); + const mainURL = attachments[0]; + const supURLs = attachments.slice(1).map((url: string) => url.trim()); + if (mainURL) { + entityDraft.setValue("mainURL", mainURL); + } + if (supURLs.length > 0) { + entityDraft.setValue("supURLs", supURLs); + } + entityDraft.setValue("pages", value["Pages"]); + entityDraft.setValue("volume", value["Volume"]); + entityDraft.setValue("number", value["Issue"]); + entityDraft.setValue("publisher", value["Publisher"]); + + paperEntityDrafts.push(entityDraft); + } + } catch (e) { + console.error(e); + } + } + + return paperEntityDrafts; + } + initBackend(): FileBackend { if (this.preference.get("syncFileStorage") === "local") { return new LocalFileBackend(this.sharedState, this.preference); diff --git a/packages/preload/repositories/scraper-repository/scrapers/cvf.ts b/packages/preload/repositories/scraper-repository/scrapers/cvf.ts index d23b0e00..6c8dd70e 100644 --- a/packages/preload/repositories/scraper-repository/scrapers/cvf.ts +++ b/packages/preload/repositories/scraper-repository/scrapers/cvf.ts @@ -38,14 +38,23 @@ export class CVFScraper extends Scraper { year: string; booktitle: string; type: string; + ENTRYTYPE: string; + pages: string; + author: string; }; if (typeof response.year !== "undefined") { const pubTime = response.year; const publication = response.booktitle; let pubType; - if (response.type === "inproceedings") { + if ( + response.type === "inproceedings" || + response.ENTRYTYPE === "inproceedings" + ) { pubType = 1; - } else if (response.type === "article") { + } else if ( + response.type === "article" || + response.ENTRYTYPE === "article" + ) { pubType = 0; } else { pubType = 2; @@ -53,6 +62,18 @@ export class CVFScraper extends Scraper { entityDraft.setValue("pubTime", `${pubTime}`); entityDraft.setValue("pubType", pubType); entityDraft.setValue("publication", publication); + entityDraft.setValue("pages", response.pages || ""); + + if (response.author) { + const authorList = response.author.split("and").map((author) => { + const first_last = author + .trim() + .split(",") + .map((v) => v.trim()); + return `${first_last[1]} ${first_last[0]}`; + }); + entityDraft.setValue("authors", authorList.join(", ")); + } } return entityDraft; } diff --git a/packages/preload/repositories/scraper-repository/scrapers/dblp.ts b/packages/preload/repositories/scraper-repository/scrapers/dblp.ts index fb09e4eb..21261d2f 100644 --- a/packages/preload/repositories/scraper-repository/scrapers/dblp.ts +++ b/packages/preload/repositories/scraper-repository/scrapers/dblp.ts @@ -224,11 +224,11 @@ export class DBLPVenueScraper extends Scraper { entityDraft.setValue("publication", venue); break; } else { - entityDraft.setValue("publication", ""); + entityDraft.setValue("publication", "", true); } } } else { - entityDraft.setValue("publication", ""); + entityDraft.setValue("publication", "", true); } return entityDraft; } diff --git a/packages/preload/repositories/scraper-repository/scrapers/google-scholar.ts b/packages/preload/repositories/scraper-repository/scrapers/google-scholar.ts index 79965329..67b1d590 100644 --- a/packages/preload/repositories/scraper-repository/scrapers/google-scholar.ts +++ b/packages/preload/repositories/scraper-repository/scrapers/google-scholar.ts @@ -17,7 +17,8 @@ async function scrapeImpl( ) as ScraperRequestType; if (enable) { - const response = await safeGot(scrapeURL, headers); + const agent = this.getProxyAgent(); + const response = await safeGot(scrapeURL, headers, agent); const root = parse(response?.body); const results = root.querySelector("#gs_res_ccl_mid"); @@ -52,7 +53,7 @@ async function scrapeImpl( if (dataid) { const citeUrl = `https://scholar.google.com/scholar?q=info:${dataid}:scholar.google.com/&output=cite&scirp=1&hl=en`; - const citeResponse = await safeGot(citeUrl, headers); + const citeResponse = await safeGot(citeUrl, headers, agent); const citeRoot = parse(citeResponse?.body); const citeBibtexNode = citeRoot.lastChild .childNodes[0] as any as HTMLElement; @@ -62,7 +63,8 @@ async function scrapeImpl( if (citeBibtexUrl) { const citeBibtexResponse = await safeGot( citeBibtexUrl, - headers + headers, + agent ); bibtex = citeBibtexResponse?.body; } @@ -119,7 +121,7 @@ export class GoogleScholarScraper extends Scraper { const bibtexs = BibtexParser.parseToJSON(rawResponse); for (const bibtex of bibtexs) { if (bibtex.year) { - entityDraft.year = bibtex.year; + entityDraft.pubTime = `${bibtex.year}`; } if (bibtex.author) { const authors = bibtex.author diff --git a/packages/preload/repositories/scraper-repository/scrapers/pdf.ts b/packages/preload/repositories/scraper-repository/scrapers/pdf.ts index b54bfd1a..6cd86546 100644 --- a/packages/preload/repositories/scraper-repository/scrapers/pdf.ts +++ b/packages/preload/repositories/scraper-repository/scrapers/pdf.ts @@ -68,8 +68,18 @@ export class PDFScraper extends Scraper { }; const firstPageText = rawResponse.firstPageText; - entityDraft.setValue("title", metaData.info.Title); - entityDraft.setValue("authors", metaData.info.Author); + entityDraft.setValue("title", metaData.info.Title || ""); + let authors; + if (metaData.info.Author?.includes(";")) { + authors = metaData.info.Author.split(";") + .map((author) => { + return author.trim(); + }) + .join(", "); + } else { + authors = metaData.info.Author || ""; + } + entityDraft.setValue("authors", authors); // Extract arXiv ID const arxivIds = firstPageText.match( diff --git a/packages/preload/repositories/scraper-repository/scrapers/scraper.ts b/packages/preload/repositories/scraper-repository/scrapers/scraper.ts index 85ae905b..647e996a 100644 --- a/packages/preload/repositories/scraper-repository/scrapers/scraper.ts +++ b/packages/preload/repositories/scraper-repository/scrapers/scraper.ts @@ -1,4 +1,5 @@ import got, { Response } from "got"; +import { HttpProxyAgent, HttpsProxyAgent } from "hpagent"; import { PDFFileResponseType } from "./pdf"; import { PaperEntityDraft } from "../../../models/PaperEntityDraft"; @@ -20,6 +21,7 @@ export interface ScraperType { entityDraft: PaperEntityDraft ): PaperEntityDraft | void; scrapeImpl: (_: PaperEntityDraft) => Promise; + getProxyAgent(): Record; } export class Scraper implements ScraperType { @@ -35,6 +37,47 @@ export class Scraper implements ScraperType { return this.scrapeImpl(entityDraft); } + getProxyAgent() { + const httpproxyUrl = this.preference.get("httpproxy") as string; + const httpsproxyUrl = this.preference.get("httpsproxy") as string; + + let agnets = {}; + if (httpproxyUrl || httpsproxyUrl) { + let validHttpproxyUrl, validHttpsproxyUrl; + if (httpproxyUrl) { + validHttpproxyUrl = httpproxyUrl; + } else { + validHttpproxyUrl = httpsproxyUrl; + } + if (httpsproxyUrl) { + validHttpsproxyUrl = httpsproxyUrl; + } else { + validHttpsproxyUrl = httpproxyUrl; + } + // @ts-ignore + agnets["http"] = new HttpProxyAgent({ + keepAlive: true, + keepAliveMsecs: 1000, + maxSockets: 256, + maxFreeSockets: 256, + scheduling: "lifo", + proxy: validHttpproxyUrl, + }); + + // @ts-ignore + agnets["https"] = new HttpsProxyAgent({ + keepAlive: true, + keepAliveMsecs: 1000, + maxSockets: 256, + maxFreeSockets: 256, + scheduling: "lifo", + proxy: validHttpsproxyUrl, + }); + } + + return agnets; + } + preProcess(_entityDraft: PaperEntityDraft): ScraperRequestType | void { throw new Error("Method not implemented."); } @@ -58,10 +101,12 @@ async function scrapeImpl( ) as ScraperRequestType; if (enable) { - const options = { + const agent = this.getProxyAgent(); + let options = { headers: headers, retry: 0, timeout: 5000, + agent: agent, }; const response = await got(scrapeURL, options); return this.parsingProcess(response, entityDraft) as PaperEntityDraft; diff --git a/packages/preload/repositories/web-importer-repository/importers/google-scholar.ts b/packages/preload/repositories/web-importer-repository/importers/google-scholar.ts index cce372a7..d98d3d80 100644 --- a/packages/preload/repositories/web-importer-repository/importers/google-scholar.ts +++ b/packages/preload/repositories/web-importer-repository/importers/google-scholar.ts @@ -19,6 +19,7 @@ export class GoogleScholarWebImporter extends WebImporter { const paper = parse(webContent.document); if (paper) { + const agent = this.getProxyAgent(); entityDraft = new PaperEntityDraft(true); const fileUrlNode = paper.querySelector(".gs_or_ggsm")?.firstChild; // @ts-ignore @@ -44,13 +45,13 @@ export class GoogleScholarWebImporter extends WebImporter { " ", "+" )}`; - await safeGot(scrapeUrl, headers); + await safeGot(scrapeUrl, headers, agent); if (titleStr) { const dataid = title.parentNode.parentNode.attributes["data-aid"]; if (dataid) { const citeUrl = `https://scholar.google.com/scholar?q=info:${dataid}:scholar.google.com/&output=cite&scirp=1&hl=en`; - const citeResponse = await safeGot(citeUrl, headers); + const citeResponse = await safeGot(citeUrl, headers, agent); const citeRoot = parse(citeResponse?.body); const citeBibtexNode = citeRoot.lastChild .childNodes[0] as any as HTMLElement; @@ -60,7 +61,8 @@ export class GoogleScholarWebImporter extends WebImporter { if (citeBibtexUrl) { const citeBibtexResponse = await safeGot( citeBibtexUrl, - headers + headers, + agent ); const bibtexStr = citeBibtexResponse?.body; if (bibtexStr) { diff --git a/packages/preload/repositories/web-importer-repository/importers/importer.ts b/packages/preload/repositories/web-importer-repository/importers/importer.ts index 7a48eab9..ac0b3ede 100644 --- a/packages/preload/repositories/web-importer-repository/importers/importer.ts +++ b/packages/preload/repositories/web-importer-repository/importers/importer.ts @@ -3,6 +3,7 @@ import os from "os"; import stream from "stream"; import { promisify } from "util"; import got from "got"; +import { HttpProxyAgent, HttpsProxyAgent } from "hpagent"; import { createWriteStream } from "fs"; import { PaperEntityDraft } from "../../../models/PaperEntityDraft"; @@ -24,6 +25,8 @@ export interface WebImporterType { parsingProcess( webContent: WebContentType ): Promise; + + getProxyAgent(): Record; } export class WebImporter implements WebImporterType { @@ -49,6 +52,47 @@ export class WebImporter implements WebImporterType { return this.urlRegExp.test(webContent.url); } + getProxyAgent() { + const httpproxyUrl = this.preference.get("httpproxy") as string; + const httpsproxyUrl = this.preference.get("httpsproxy") as string; + + let agnets = {}; + if (httpproxyUrl || httpsproxyUrl) { + let validHttpproxyUrl, validHttpsproxyUrl; + if (httpproxyUrl) { + validHttpproxyUrl = httpproxyUrl; + } else { + validHttpproxyUrl = httpsproxyUrl; + } + if (httpsproxyUrl) { + validHttpsproxyUrl = httpsproxyUrl; + } else { + validHttpsproxyUrl = httpproxyUrl; + } + // @ts-ignore + agnets["http"] = new HttpProxyAgent({ + keepAlive: true, + keepAliveMsecs: 1000, + maxSockets: 256, + maxFreeSockets: 256, + scheduling: "lifo", + proxy: validHttpproxyUrl, + }); + + // @ts-ignore + agnets["https"] = new HttpsProxyAgent({ + keepAlive: true, + keepAliveMsecs: 1000, + maxSockets: 256, + maxFreeSockets: 256, + scheduling: "lifo", + proxy: validHttpsproxyUrl, + }); + } + + return agnets; + } + // eslint-disable-next-line @typescript-eslint/require-await async parsingProcess( webContent: WebContentType diff --git a/packages/preload/utils/got.ts b/packages/preload/utils/got.ts index 6bfb0627..d3651cd2 100644 --- a/packages/preload/utils/got.ts +++ b/packages/preload/utils/got.ts @@ -1,13 +1,18 @@ import got, { HTTPError } from "got"; import { ipcRenderer } from "electron"; -export async function safeGot(url: string, headers: Record) { +export async function safeGot( + url: string, + headers: Record, + agent: any +) { const options = { headers: headers, retry: 0, timeout: { request: 5000, }, + agent: agent, }; let response; diff --git a/packages/preload/utils/path.ts b/packages/preload/utils/path.ts index 15e1b625..4ae0daa5 100644 --- a/packages/preload/utils/path.ts +++ b/packages/preload/utils/path.ts @@ -1,5 +1,6 @@ import path from "path"; import os from "os"; +import fs from "fs"; export const constructFileURL = ( url: string, @@ -40,3 +41,23 @@ export const constructFileURL = ( } } }; + +export const getAllFiles = function ( + dirPath: string, + arrayOfFiles: string[] | null = null +): string[] { + let files = fs.readdirSync(dirPath); + + arrayOfFiles = (arrayOfFiles || []) as string[]; + + files.forEach(function (file) { + if (fs.statSync(dirPath + "/" + file).isDirectory()) { + arrayOfFiles = getAllFiles(dirPath + "/" + file, arrayOfFiles); + } else { + arrayOfFiles = arrayOfFiles as string[]; + arrayOfFiles.push(path.join(dirPath, "/", file)); + } + }); + + return arrayOfFiles; +}; diff --git a/packages/preload/utils/preference.ts b/packages/preload/utils/preference.ts index 798dc2e7..46c88fe1 100644 --- a/packages/preload/utils/preference.ts +++ b/packages/preload/utils/preference.ts @@ -11,6 +11,7 @@ export interface PreferenceStore { invertColor: boolean; sidebarSortBy: "name" | "count" | "color"; sidebarSortOrder: "asce" | "desc"; + renamingFormat: "full" | "short" | "authortitle"; enableExportReplacement: boolean; exportReplacement: Array<{ from: string; to: string }>; @@ -40,6 +41,9 @@ export interface PreferenceStore { pwcScraper: boolean; googlescholarScraper: boolean; + httpproxy: string; + httpsproxy: string; + lastVersion: string; [Key: string]: unknown; @@ -54,6 +58,7 @@ const defaultPreferences: PreferenceStore = { invertColor: true, sidebarSortBy: "name", sidebarSortOrder: "asce", + renamingFormat: "full", enableExportReplacement: true, exportReplacement: [], @@ -83,6 +88,9 @@ const defaultPreferences: PreferenceStore = { pwcScraper: true, googlescholarScraper: true, + httpproxy: "", + httpsproxy: "", + lastVersion: "", }; diff --git a/packages/renderer/src/ui/edit-view/components/multiselect-box.vue b/packages/renderer/src/ui/edit-view/components/multiselect-box.vue index b1d42694..5ffb34d6 100644 --- a/packages/renderer/src/ui/edit-view/components/multiselect-box.vue +++ b/packages/renderer/src/ui/edit-view/components/multiselect-box.vue @@ -10,9 +10,10 @@ const props = defineProps({ type: Array as () => String[], required: true, }, - values: { + existValues: { type: Array as () => String[], required: true, + default: () => [], }, }); @@ -25,7 +26,7 @@ const onSelected = (value: string) => { const onDeselected = (value: string) => { emit( "changed", - props.values.filter((v) => v !== value) + props.existValues.filter((v) => v !== value) ); }; @@ -108,7 +109,7 @@ const onDeselected = (value: string) => { { tag.replaceAll(' ', '')) @@ -130,7 +130,7 @@ const onSaveClicked = async () => { folder.replaceAll(' ', '')) diff --git a/packages/renderer/src/ui/main-view/detail-view/components/pub-details.vue b/packages/renderer/src/ui/main-view/detail-view/components/pub-details.vue index 6e123f84..5b89d5f2 100644 --- a/packages/renderer/src/ui/main-view/detail-view/components/pub-details.vue +++ b/packages/renderer/src/ui/main-view/detail-view/components/pub-details.vue @@ -41,7 +41,7 @@ const isExpanded = ref(false); > volume: {{ volume }} pages: {{ pages }} - number: {{ number }} + issue/number: {{ number }} publisher: {{ publisher }} diff --git a/packages/renderer/src/ui/preference-view/general-view.vue b/packages/renderer/src/ui/preference-view/general-view.vue index 83ac470f..5249ea30 100644 --- a/packages/renderer/src/ui/preference-view/general-view.vue +++ b/packages/renderer/src/ui/preference-view/general-view.vue @@ -46,12 +46,26 @@ const onThemeUpdate = (value: string) => { + +
General Options
diff --git a/packages/renderer/src/ui/preference-view/import-view.vue b/packages/renderer/src/ui/preference-view/import-view.vue new file mode 100644 index 00000000..71f13a61 --- /dev/null +++ b/packages/renderer/src/ui/preference-view/import-view.vue @@ -0,0 +1,84 @@ + + + diff --git a/packages/renderer/src/ui/preference-view/preference-view.vue b/packages/renderer/src/ui/preference-view/preference-view.vue index 91d5ec07..d5a0a7a1 100644 --- a/packages/renderer/src/ui/preference-view/preference-view.vue +++ b/packages/renderer/src/ui/preference-view/preference-view.vue @@ -6,6 +6,8 @@ import { BIconBinoculars, BIconCloudArrowUp, BIconBoxArrowDown, + BIconBoxArrowInDown, + BIconGlobe, BIconInfoCircle, } from "bootstrap-icons-vue"; @@ -15,6 +17,8 @@ import ScraperView from "./scraper-view.vue"; import CloudView from "./cloud-view.vue"; import AboutView from "./about-view.vue"; import ExportView from "./export-view.vue"; +import ProxyView from "./proxy-view.vue"; +import ImportView from "./import-view.vue"; import { PreferenceStore } from "../../../../preload/utils/preference"; @@ -74,6 +78,13 @@ window.appInteractor.registerState( > + + + + + + + + +import { ref } from "vue"; + +import { PreferenceStore } from "../../../../preload/utils/preference"; + +const props = defineProps({ + preference: { + type: Object as () => PreferenceStore, + required: true, + }, +}); + +const httpproxy = ref(props.preference.httpproxy); +const httpsproxy = ref(props.preference.httpsproxy); + +const onUpdate = (key: string, value: unknown) => { + window.appInteractor.updatePreference(key, value); +}; + + +