From 0856d3e5e9d3be1c3d140fb7b2507e94e9a7b1c5 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 13:58:51 +0000 Subject: [PATCH 01/15] initial test file for scraper controller --- .../__tests__/scraper-controller.test.ts | 36 +++++++++++++++++++ .../src/routes/scraper/scraper-controller.ts | 2 +- 2 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 backend/src/routes/scraper/__tests__/scraper-controller.test.ts diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts new file mode 100644 index 0000000..85ae2a3 --- /dev/null +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -0,0 +1,36 @@ +import supertest from 'supertest'; +import http from 'http'; +import HttpStatus from 'http-status'; +import { app } from '../../../app'; +import { ScrapeHistory } from "../scraper"; +import prisma from '../../../../lib/prisma'; + +const request = supertest(http.createServer(app.callback())); + +const scrapeHistoryEndpoint = '/api/scraper/history'; +const scraperEndpoint = '/api/scraper/'; + +beforeAll(async () => { + await prisma.paper.deleteMany({}); + await prisma.scrapeHistory.deleteMany({}); +}); + +afterAll(async () => { + await prisma.paper.deleteMany({}); + await prisma.scrapeHistory.deleteMany({}); +}); + +it('GET scrape history should return an empty array', async () => { + const response = await request.get(scrapeHistoryEndpoint); + + expect(response.status).toEqual(HttpStatus.OK); + expect(response.body).toEqual([]); +}); + +it('GET scrape history should return an array with one element', async () => { + + const response = await request.get(scrapeHistoryEndpoint); + + expect(response.status).toEqual(HttpStatus.OK); + expect(response.body).toEqual([]); +}); \ No newline at end of file diff --git a/backend/src/routes/scraper/scraper-controller.ts b/backend/src/routes/scraper/scraper-controller.ts index b77f725..6b5b62b 100644 --- a/backend/src/routes/scraper/scraper-controller.ts +++ b/backend/src/routes/scraper/scraper-controller.ts @@ -12,7 +12,7 @@ export const scrape = async (ctx: ParameterizedContext): Promise => { return; } - ctx.status = HttpStatus.OK; + ctx.status = HttpStatus.NO_CONTENT; ctx.body = { message: 'Unsuccessful scrape request.' }; }; From 41bd4931e2085f71eb06b6c0aa9fbd9e3b53e7fc Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 14:22:32 +0000 Subject: [PATCH 02/15] add scrape history test for 1 record --- backend/src/app.ts | 4 +++- .../scraper/__tests__/scraper-controller.test.ts | 14 +++++++++++--- backend/src/routes/scraper/scraper.d.ts | 2 +- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/backend/src/app.ts b/backend/src/app.ts index fe12d3f..e330a1e 100644 --- a/backend/src/app.ts +++ b/backend/src/app.ts @@ -39,7 +39,9 @@ initLogToFile(); // initiate logging to file app.use(async (ctx, next) => { try { await next(); - console.log(`${ctx.method} ${ctx.url} RESPONSE: ${ctx.response.status}`); + if(!TEST) { + console.log(`${ctx.method} ${ctx.url} RESPONSE: ${ctx.response.status}`); + } } catch (error) { console.error(error); console.log(logToFile(error)); diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index 85ae2a3..bc32f49 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -2,7 +2,7 @@ import supertest from 'supertest'; import http from 'http'; import HttpStatus from 'http-status'; import { app } from '../../../app'; -import { ScrapeHistory } from "../scraper"; +import { ScrapeHistoryElm } from "../scraper"; import prisma from '../../../../lib/prisma'; const request = supertest(http.createServer(app.callback())); @@ -28,9 +28,17 @@ it('GET scrape history should return an empty array', async () => { }); it('GET scrape history should return an array with one element', async () => { - + const scrapeTest : ScrapeHistoryElm = await prisma.scrapeHistory.create({ + data: { + links: 'https://google.com', + errors: 'No errors.', + } + }); + if (scrapeTest.scrapeDate instanceof Date) { + scrapeTest.scrapeDate = scrapeTest.scrapeDate.toISOString(); + } const response = await request.get(scrapeHistoryEndpoint); expect(response.status).toEqual(HttpStatus.OK); - expect(response.body).toEqual([]); + expect(response.body).toEqual([scrapeTest]); }); \ No newline at end of file diff --git a/backend/src/routes/scraper/scraper.d.ts b/backend/src/routes/scraper/scraper.d.ts index 37e5ae0..733ff3a 100644 --- a/backend/src/routes/scraper/scraper.d.ts +++ b/backend/src/routes/scraper/scraper.d.ts @@ -1,7 +1,7 @@ export interface ScrapeHistoryElm { links: string; errors: string; - scrapeDate: Date; + scrapeDate: Date | string; } export type ScrapeHistory = Array; From a7fb45613727769a773132289dcdf50c494e7418 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 14:24:20 +0000 Subject: [PATCH 03/15] revert small change to avoid conflict --- backend/src/app.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/backend/src/app.ts b/backend/src/app.ts index e330a1e..fe12d3f 100644 --- a/backend/src/app.ts +++ b/backend/src/app.ts @@ -39,9 +39,7 @@ initLogToFile(); // initiate logging to file app.use(async (ctx, next) => { try { await next(); - if(!TEST) { - console.log(`${ctx.method} ${ctx.url} RESPONSE: ${ctx.response.status}`); - } + console.log(`${ctx.method} ${ctx.url} RESPONSE: ${ctx.response.status}`); } catch (error) { console.error(error); console.log(logToFile(error)); From 93460e7390c0bae2f7344adda46de48dffdb91b4 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 14:36:54 +0000 Subject: [PATCH 04/15] fix prettier errors --- .../__tests__/scraper-controller.test.ts | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index bc32f49..4b52664 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -2,7 +2,7 @@ import supertest from 'supertest'; import http from 'http'; import HttpStatus from 'http-status'; import { app } from '../../../app'; -import { ScrapeHistoryElm } from "../scraper"; +import { ScrapeHistoryElm } from '../scraper'; import prisma from '../../../../lib/prisma'; const request = supertest(http.createServer(app.callback())); @@ -11,34 +11,34 @@ const scrapeHistoryEndpoint = '/api/scraper/history'; const scraperEndpoint = '/api/scraper/'; beforeAll(async () => { - await prisma.paper.deleteMany({}); - await prisma.scrapeHistory.deleteMany({}); + await prisma.paper.deleteMany({}); + await prisma.scrapeHistory.deleteMany({}); }); afterAll(async () => { - await prisma.paper.deleteMany({}); - await prisma.scrapeHistory.deleteMany({}); + await prisma.paper.deleteMany({}); + await prisma.scrapeHistory.deleteMany({}); }); it('GET scrape history should return an empty array', async () => { - const response = await request.get(scrapeHistoryEndpoint); + const response = await request.get(scrapeHistoryEndpoint); - expect(response.status).toEqual(HttpStatus.OK); - expect(response.body).toEqual([]); + expect(response.status).toEqual(HttpStatus.OK); + expect(response.body).toEqual([]); }); it('GET scrape history should return an array with one element', async () => { - const scrapeTest : ScrapeHistoryElm = await prisma.scrapeHistory.create({ - data: { - links: 'https://google.com', - errors: 'No errors.', - } - }); - if (scrapeTest.scrapeDate instanceof Date) { - scrapeTest.scrapeDate = scrapeTest.scrapeDate.toISOString(); - } - const response = await request.get(scrapeHistoryEndpoint); - - expect(response.status).toEqual(HttpStatus.OK); - expect(response.body).toEqual([scrapeTest]); -}); \ No newline at end of file + const scrapeTest: ScrapeHistoryElm = await prisma.scrapeHistory.create({ + data: { + links: 'https://google.com', + errors: 'No errors.', + }, + }); + if (scrapeTest.scrapeDate instanceof Date) { + scrapeTest.scrapeDate = scrapeTest.scrapeDate.toISOString(); + } + const response = await request.get(scrapeHistoryEndpoint); + + expect(response.status).toEqual(HttpStatus.OK); + expect(response.body).toEqual([scrapeTest]); +}); From 6b630b2bd056e108844e520e60766fcca041421e Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 14:39:39 +0000 Subject: [PATCH 05/15] comment out temporarily unused variable --- backend/src/routes/scraper/__tests__/scraper-controller.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index 4b52664..8fc8789 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -8,7 +8,7 @@ import prisma from '../../../../lib/prisma'; const request = supertest(http.createServer(app.callback())); const scrapeHistoryEndpoint = '/api/scraper/history'; -const scraperEndpoint = '/api/scraper/'; +//const scraperEndpoint = '/api/scraper/'; beforeAll(async () => { await prisma.paper.deleteMany({}); From 0453a91bcf5a0391179e6f76b54f0f29869ca5f7 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 14:42:21 +0000 Subject: [PATCH 06/15] add space after comment to satisfy prettier --- backend/src/routes/scraper/__tests__/scraper-controller.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index 8fc8789..c99ff29 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -8,7 +8,7 @@ import prisma from '../../../../lib/prisma'; const request = supertest(http.createServer(app.callback())); const scrapeHistoryEndpoint = '/api/scraper/history'; -//const scraperEndpoint = '/api/scraper/'; +// const scraperEndpoint = '/api/scraper/'; beforeAll(async () => { await prisma.paper.deleteMany({}); From abc06ae91956ddc69358c3604e875e88c44cbee6 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Fri, 24 Feb 2023 21:12:53 +0000 Subject: [PATCH 07/15] add final scraper controller test --- .../__tests__/scraper-controller.test.ts | 12 +++++++- .../src/routes/scraper/scraper-controller.ts | 5 ++-- backend/src/routes/scraper/scraper-model.ts | 29 ++++++++++++++----- 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index c99ff29..fd07b1f 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -8,7 +8,7 @@ import prisma from '../../../../lib/prisma'; const request = supertest(http.createServer(app.callback())); const scrapeHistoryEndpoint = '/api/scraper/history'; -// const scraperEndpoint = '/api/scraper/'; +const scraperEndpoint = '/api/scraper/'; beforeAll(async () => { await prisma.paper.deleteMany({}); @@ -20,6 +20,8 @@ afterAll(async () => { await prisma.scrapeHistory.deleteMany({}); }); +// SCRAPE HISTORY CONTROLLER TESTS + it('GET scrape history should return an empty array', async () => { const response = await request.get(scrapeHistoryEndpoint); @@ -42,3 +44,11 @@ it('GET scrape history should return an array with one element', async () => { expect(response.status).toEqual(HttpStatus.OK); expect(response.body).toEqual([scrapeTest]); }); + +// PAPER SCRAPER CONTROLLER TESTS +it('POST scrape should return false when given the wrong url', async () => { + const urls = 'bad urls'; + const response = await request.post(scraperEndpoint).send({ urls }); + + expect(response.status).toEqual(HttpStatus.NO_CONTENT); +}); diff --git a/backend/src/routes/scraper/scraper-controller.ts b/backend/src/routes/scraper/scraper-controller.ts index 6b5b62b..7487b0b 100644 --- a/backend/src/routes/scraper/scraper-controller.ts +++ b/backend/src/routes/scraper/scraper-controller.ts @@ -8,12 +8,11 @@ export const scrape = async (ctx: ParameterizedContext): Promise => { if (scraped) { ctx.status = HttpStatus.OK; - ctx.body = { message: 'Successful scrape request.' }; - return; + ctx.body = 'Successful scrape request.'; } ctx.status = HttpStatus.NO_CONTENT; - ctx.body = { message: 'Unsuccessful scrape request.' }; + ctx.body = 'Unsuccessful scrape request.'; }; export const history = async (ctx: ParameterizedContext): Promise => { diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index 71e4d63..b9b864a 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -12,21 +12,36 @@ let errors = ''; export async function scrapePapers(urls: string): Promise { try { const urlsArray = urls.trim().split('\n'); + let finishedCorrectly = true; // go through each URL and check what website it belongs to, then scrape accordingly for (const url of urlsArray) { // eslint-disable-next-line no-await-in-loop if (await isAcmUrl(url.trim())) { - // TODO: for testing purposes just console log now // logs out true if the scraping was successful, false otherwise - await scrapeListOfAcmPapers(url.trim()).then((r) => console.log(r)); + const aux = await scrapeListOfAcmPapers(url.trim()); + if (!aux) { + finishedCorrectly = false; + } } else if (await isRschrUrl(url.trim())) { - await scrapeListOfRschrPapers(url.trim()).then((r) => console.log(r)); + const aux = await scrapeListOfRschrPapers(url.trim()); + if (!aux) { + finishedCorrectly = false; + } + } else { + finishedCorrectly = false; + errors += `Invalid paper link: ${url}\n`; } } - await uploadScrapeHistoryToDatabase(urls); - await cleanScrapeHistoryDatabase(); + let aux = await uploadScrapeHistoryToDatabase(urls); + if (!aux) { + finishedCorrectly = false; + } + aux = await cleanScrapeHistoryDatabase(); + if (!aux) { + finishedCorrectly = false; + } errors = ''; - return true; + return finishedCorrectly; } catch (e) { await uploadScrapeHistoryToDatabase(urls); errors = ''; @@ -404,9 +419,9 @@ async function uploadPapersToDatabase(papers: Papers): Promise { } catch (e) { errors += 'Error while uploading to database.\n'; console.log(logToFile(e)); + return false; } } - // TODO: add some kind of check in case some papers were not actually created, maybe in the try catch above return true; } From e2a6c4bf59fa4cc18e610f91efe2959bf4478293 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Sun, 26 Feb 2023 17:56:43 +0000 Subject: [PATCH 08/15] initial tests for scraper history model --- .../scraper/__tests__/scraper-model.test.ts | 38 +++++++++++++++++++ backend/src/routes/scraper/scraper-model.ts | 6 +-- 2 files changed, 41 insertions(+), 3 deletions(-) create mode 100644 backend/src/routes/scraper/__tests__/scraper-model.test.ts diff --git a/backend/src/routes/scraper/__tests__/scraper-model.test.ts b/backend/src/routes/scraper/__tests__/scraper-model.test.ts new file mode 100644 index 0000000..e3494b0 --- /dev/null +++ b/backend/src/routes/scraper/__tests__/scraper-model.test.ts @@ -0,0 +1,38 @@ +import prisma from '../../../../lib/prisma'; +import { getHistory, uploadScrapeHistoryToDatabase } from '../scraper-model'; +import { ScrapeHistoryElm } from '../scraper'; + +beforeAll(async () => { + await prisma.paper.deleteMany({}); + await prisma.scrapeHistory.deleteMany({}); +}); + +afterAll(async () => { + await prisma.paper.deleteMany({}); + await prisma.scrapeHistory.deleteMany({}); +}); + +describe('tests for the scrape history model', () => { + const url = 'https://www.google.com'; + const errors = 'No errors.'; + it('inserting a history element should return true', async () => { + const errors = 'No errors.'; + const result = await uploadScrapeHistoryToDatabase(url, errors); + + expect(result).toBe(true); + }); + + it('getting history should return one element', async () => { + const testRes: ScrapeHistoryElm = { + links: url, + errors, + scrapeDate: '', + }; + + const result = await getHistory(); + + expect(result.length).toBe(1); + expect(result[0].links).toBe(testRes.links); + expect(result[0].errors).toBe(testRes.errors); + }); +}); diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index b9b864a..b182674 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -32,7 +32,7 @@ export async function scrapePapers(urls: string): Promise { errors += `Invalid paper link: ${url}\n`; } } - let aux = await uploadScrapeHistoryToDatabase(urls); + let aux = await uploadScrapeHistoryToDatabase(urls, errors); if (!aux) { finishedCorrectly = false; } @@ -43,7 +43,7 @@ export async function scrapePapers(urls: string): Promise { errors = ''; return finishedCorrectly; } catch (e) { - await uploadScrapeHistoryToDatabase(urls); + await uploadScrapeHistoryToDatabase(urls, errors); errors = ''; console.error(e); console.log(logToFile(e)); @@ -51,7 +51,7 @@ export async function scrapePapers(urls: string): Promise { } } -async function uploadScrapeHistoryToDatabase(urls: string): Promise { +export async function uploadScrapeHistoryToDatabase(urls: string, errors: string): Promise { if (urls.length === 0) { return false; } From 20a07de3892a44a7f51e9c4d527a6592d61ba17a Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Sun, 26 Feb 2023 18:03:33 +0000 Subject: [PATCH 09/15] add link conditional tests for acm and rschr --- .../scraper/__tests__/scraper-model.test.ts | 32 ++++++++++++++++++- backend/src/routes/scraper/scraper-model.ts | 4 +-- 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-model.test.ts b/backend/src/routes/scraper/__tests__/scraper-model.test.ts index e3494b0..c54311a 100644 --- a/backend/src/routes/scraper/__tests__/scraper-model.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-model.test.ts @@ -1,5 +1,5 @@ import prisma from '../../../../lib/prisma'; -import { getHistory, uploadScrapeHistoryToDatabase } from '../scraper-model'; +import { getHistory, isAcmUrl, isRschrUrl, uploadScrapeHistoryToDatabase } from '../scraper-model'; import { ScrapeHistoryElm } from '../scraper'; beforeAll(async () => { @@ -36,3 +36,33 @@ describe('tests for the scrape history model', () => { expect(result[0].errors).toBe(testRes.errors); }); }); + +describe('paper link conditional tests', () => { + const acmUrl = 'https://dl.acm.org/doi/proceedings/10.1145/3475738'; + const rschrUrl = 'https://2022.splashcon.org/track/splash-2022-oopsla?#event-overview'; + const wrongUrl = 'https://www.google.com'; + describe('acm url tests', () => { + it('check acm url returns true', async () => { + const result = await isAcmUrl(acmUrl); + + expect(result).toBe(true); + }); + it('check acm url returns false', async () => { + const result = await isAcmUrl(wrongUrl); + + expect(result).toBe(false); + }); + }); + describe('researchr url tests', () => { + it('check researchr url returns true', async () => { + const result = await isRschrUrl(rschrUrl); + + expect(result).toBe(true); + }); + it('check researchr url returns false', async () => { + const result = await isRschrUrl(wrongUrl); + + expect(result).toBe(false); + }); + }); +}); diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index b182674..15bc838 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -94,11 +94,11 @@ async function cleanScrapeHistoryDatabase(): Promise { return false; } } -async function isAcmUrl(url: string): Promise { +export async function isAcmUrl(url: string): Promise { return url.includes('/dl.acm.org/'); } -async function isRschrUrl(url: string): Promise { +export async function isRschrUrl(url: string): Promise { // every single conference on researchr seems to have its own domain or a different one to the others // the only thing they have in common, is that the papers tab always end with #event-overview // other than that, there'll only be a preliminary check while scraping to make sure that the page From c1e8c1163914d943cc753a2537c3ffe19cd1121e Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Sun, 26 Feb 2023 23:09:40 +0000 Subject: [PATCH 10/15] add more tests for scrape history --- .../routes/scraper/__tests__/scraper-model.test.ts | 13 ++++++++++++- backend/src/routes/scraper/scraper-model.ts | 2 +- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-model.test.ts b/backend/src/routes/scraper/__tests__/scraper-model.test.ts index c54311a..a4a71e1 100644 --- a/backend/src/routes/scraper/__tests__/scraper-model.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-model.test.ts @@ -15,6 +15,17 @@ afterAll(async () => { describe('tests for the scrape history model', () => { const url = 'https://www.google.com'; const errors = 'No errors.'; + it('should not be possible to upload empty string', async () => { + const result = await uploadScrapeHistoryToDatabase('', errors); + + expect(result).toBe(false); + }); + it('should convert no errors to textual form', async () => { + const result = await uploadScrapeHistoryToDatabase(url, ''); + const uploaded = await getHistory(); + expect(result).toBe(true); + expect(uploaded[0].errors).toBe('No errors.'); + }); it('inserting a history element should return true', async () => { const errors = 'No errors.'; const result = await uploadScrapeHistoryToDatabase(url, errors); @@ -31,7 +42,7 @@ describe('tests for the scrape history model', () => { const result = await getHistory(); - expect(result.length).toBe(1); + expect(result.length).toBeGreaterThanOrEqual(1); expect(result[0].links).toBe(testRes.links); expect(result[0].errors).toBe(testRes.errors); }); diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index 15bc838..6fc6430 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -107,7 +107,7 @@ export async function isRschrUrl(url: string): Promise { } // returns true if successfully scraped, false otherwise -async function scrapeListOfAcmPapers(url: string): Promise { +export async function scrapeListOfAcmPapers(url: string): Promise { // there's also playwright.firefox , we'll need to compare them at a later date for performance/memory const browser = await playwright.chromium.launch({ headless: true, // setting this to true will not run the UI From 61220c60f769a072d7fe7da74ef9f339991a06fa Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Sun, 26 Feb 2023 23:31:53 +0000 Subject: [PATCH 11/15] test the upload of papers --- .../scraper/__tests__/scraper-model.test.ts | 56 ++++++++++++++++++- backend/src/routes/scraper/scraper-model.ts | 2 +- 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-model.test.ts b/backend/src/routes/scraper/__tests__/scraper-model.test.ts index a4a71e1..983a2d0 100644 --- a/backend/src/routes/scraper/__tests__/scraper-model.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-model.test.ts @@ -1,6 +1,13 @@ import prisma from '../../../../lib/prisma'; -import { getHistory, isAcmUrl, isRschrUrl, uploadScrapeHistoryToDatabase } from '../scraper-model'; +import { + getHistory, + isAcmUrl, + isRschrUrl, + uploadPapersToDatabase, + uploadScrapeHistoryToDatabase, +} from '../scraper-model'; import { ScrapeHistoryElm } from '../scraper'; +import { Paper, Papers } from '../../papers/papers'; beforeAll(async () => { await prisma.paper.deleteMany({}); @@ -77,3 +84,50 @@ describe('paper link conditional tests', () => { }); }); }); + +describe('test upload papers to database', () => { + const acmPaper: Paper = { authors: [], shortAbstract: '', source: 'acm', title: 'The acm paper', url: '' }; + const rschrPaper: Paper = { authors: [], shortAbstract: '', source: 'rschr', title: '', url: '' }; + const badPaper: Paper = { authors: [], shortAbstract: '', source: 'badSource', title: '', url: '' }; + it('should not upload an empty array', async () => { + const result = await uploadPapersToDatabase([]); + + expect(result).toBe(false); + }); + it('should upload an acm paper', async () => { + const result = await uploadPapersToDatabase([acmPaper]); + let testRes: Paper | null = await prisma.paper.findFirst({ + where: { + source: 'acm', + }, + }); + expect(result).toBe(true); + if (testRes == null) { + testRes = { authors: [], shortAbstract: '', source: '', title: 'this was null', url: '' }; + } + expect(testRes.title).toBe(acmPaper.title); + }); + it('should upload a researchr paper', async () => { + const result = await uploadPapersToDatabase([rschrPaper]); + let testRes: Paper | null = await prisma.paper.findFirst({ + where: { + source: 'rschr', + }, + }); + expect(result).toBe(true); + if (testRes == null) { + testRes = { authors: [], shortAbstract: '', source: '', title: 'this was null', url: '' }; + } + expect(testRes.title).toBe(rschrPaper.title); + }); + it('should not upload a paper from the wrong source', async () => { + const result = await uploadPapersToDatabase([badPaper]); + const testRes = await prisma.paper.findFirst({ + where: { + source: 'badSource', + }, + }); + expect(result).toBe(true); + expect(testRes).toBeNull(); + }); +}); diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index 6fc6430..5d51bc7 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -378,7 +378,7 @@ async function extractRschrPaper(index: number, page: Page): Promise { }; } -async function uploadPapersToDatabase(papers: Papers): Promise { +export async function uploadPapersToDatabase(papers: Papers): Promise { if (papers.length === 0) { return false; } From 03e548ec5c31dba19160da60a5c816e8751bcbd7 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Sun, 26 Feb 2023 23:34:56 +0000 Subject: [PATCH 12/15] remove unused import --- backend/src/routes/scraper/__tests__/scraper-model.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-model.test.ts b/backend/src/routes/scraper/__tests__/scraper-model.test.ts index 983a2d0..bf3fa36 100644 --- a/backend/src/routes/scraper/__tests__/scraper-model.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-model.test.ts @@ -7,7 +7,7 @@ import { uploadScrapeHistoryToDatabase, } from '../scraper-model'; import { ScrapeHistoryElm } from '../scraper'; -import { Paper, Papers } from '../../papers/papers'; +import { Paper } from '../../papers/papers'; beforeAll(async () => { await prisma.paper.deleteMany({}); From 6bb34ed946d3bde793cfcc0dcb075770a21f6984 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Tue, 28 Feb 2023 19:56:54 +0000 Subject: [PATCH 13/15] tests for playwright web scraper on acm --- .../scraper/__tests__/scraper-model.test.ts | 21 +- .../scraper/__tests__/test_html/acm.htm | 1425 +++++++++++++++++ backend/src/routes/scraper/scraper-model.ts | 43 +- 3 files changed, 1474 insertions(+), 15 deletions(-) create mode 100644 backend/src/routes/scraper/__tests__/test_html/acm.htm diff --git a/backend/src/routes/scraper/__tests__/scraper-model.test.ts b/backend/src/routes/scraper/__tests__/scraper-model.test.ts index bf3fa36..38eef2c 100644 --- a/backend/src/routes/scraper/__tests__/scraper-model.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-model.test.ts @@ -1,13 +1,15 @@ +import path from 'path'; import prisma from '../../../../lib/prisma'; import { getHistory, isAcmUrl, isRschrUrl, + scrapeListOfAcmPapers, uploadPapersToDatabase, uploadScrapeHistoryToDatabase, } from '../scraper-model'; import { ScrapeHistoryElm } from '../scraper'; -import { Paper } from '../../papers/papers'; +import { Paper, Papers } from '../../papers/papers'; beforeAll(async () => { await prisma.paper.deleteMany({}); @@ -101,6 +103,7 @@ describe('test upload papers to database', () => { source: 'acm', }, }); + await prisma.paper.deleteMany({}); expect(result).toBe(true); if (testRes == null) { testRes = { authors: [], shortAbstract: '', source: '', title: 'this was null', url: '' }; @@ -114,6 +117,7 @@ describe('test upload papers to database', () => { source: 'rschr', }, }); + await prisma.paper.deleteMany({}); expect(result).toBe(true); if (testRes == null) { testRes = { authors: [], shortAbstract: '', source: '', title: 'this was null', url: '' }; @@ -131,3 +135,18 @@ describe('test upload papers to database', () => { expect(testRes).toBeNull(); }); }); + +describe('paper scraping tests using local html', () => { + const acmPath = `${path.join(__dirname, 'test_html/acm.htm')}`; + it('should scrape the acm html correctly', async () => { + const result = await scrapeListOfAcmPapers(acmPath, true); + const papers: Papers = await prisma.paper.findMany({ + where: { + source: 'acm', + }, + }); + await prisma.paper.deleteMany({}); + expect(result).toBe(true); + expect(papers.length).toBe(12); + }, 100000); +}); diff --git a/backend/src/routes/scraper/__tests__/test_html/acm.htm b/backend/src/routes/scraper/__tests__/test_html/acm.htm new file mode 100644 index 0000000..8613bfa --- /dev/null +++ b/backend/src/routes/scraper/__tests__/test_html/acm.htm @@ -0,0 +1,1425 @@ + + + + + + +Proceedings of the 18th ACM SIGPLAN International Conference on Managed Programming Languages and Runtimes | ACM Conferences + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + + + + +
+ + + + + +
+ + + + + + + + + +
+ + + + + + ACM Digital Library home + +
+ + + + + + + + + + + +
+ + + + + + ACM home + +
+ +
+ + + + + + + + + +
+ + + + + + + +
+ +
+ +
+ + + + + + + + + + + +
+ + + + + +
+ + + + +
+
+ +
+ + + + + + + 10.1145/3475738acmconferencesBook PagePublication PagessplashConference Proceedingsconference-collections +
+ + + + + + + + + +
+ + + + + + + +
+ +
+ +
+ + + + + + + + +
+ + + + +
+ + + + +
MPLR 2021: Proceedings of the 18th ACM SIGPLAN International Conference on Managed Programming Languages and Runtimes
ACM2021 Proceeding
+
Publisher:
  • Association for Computing Machinery
  • New York
  • NY
  • United States
Conference:
MPLR '21: 18th ACM SIGPLAN International Conference on Managed Programming Languages and Runtimes + Münster + Germany + September 29 - 30, 2021
ISBN:
978-1-4503-8675-3
Published:
29 September 2021
Sponsors:
+ + +
+ + + + +
Recommend ACM DL
ALREADY A SUBSCRIBER?SIGN IN
+
+ + + + + + +
+ + + + + + + + + + +
+ + + + + +
+
+
+ + + + +
Next Conference + + + + + + +
+
+
+
+ + SPLASH '23 + +
+ +
+
+ +
+
+
+ + + + + October 22 - 27, 2023 + +
+
+
+
+ + + + + + Lisbon , + + + + Portugal + + +
+
+ +
+
+
+
+ +
+ + + + + + +
Bibliometrics
+ + + + + +
+ + + + +
Skip Abstract Section
Abstract
+

Welcome to MPLR 2021, the 18th International Conference on Managed Programming Languages and Runtimes. MPLR is a successor to the conference series on Managed Languages and Runtimes (ManLang) which originated as Principles and Practice of Programming in Java (PPPJ). This is a premier forum for presenting and discussing novel results in all aspects of managed programming languages and runtime systems, which serve as building blocks for many of the most important computing systems, ranging from small-scale (embedded and real-time systems) to large-scale (cloud-computing and big-data platforms) and anything in between (mobile, IoT, and wearable applications).

+
Skip Table Of Content Section
SESSION: Implementation Intricacies
research-article
Shared memory protection in a multi-tenant JVM
+

Multi-tenant Software-as-a-Service (SaaS) providers allow tenants to customize the application at different levels. When the customization involves tenant custom code and a single application instance is shared among multiple tenants, the issue of ...

short-paper
Cross-ISA testing of the Pharo VM: lessons learned while porting to ARMv8
+

Testing and debugging a Virtual Machine is a laborious task without the proper tooling. This is particularly true for VMs with JIT compilation and dynamic code patching for techniques such as inline caching. In addition, this situation is getting worse ...

short-paper
Higher-order concurrency for microcontrollers
+

Programming microcontrollers involves low level interfacing with hardware and peripherals that are concurrent and reactive. Such programs are typically written in a mixture of C and assembly using concurrent language extensions (like FreeRTOS tasks and ...

SESSION: Data Delicacies
short-paper
Virtual ADTs for portable metaprogramming
+

Scala 3 provides a metaprogramming interface that represents the abstract syntax tree definitions using algebraic data types. To allow the compiler to freely evolve without breaking the metaprogramming interface, we present virtual algebraic data types (...

short-paper
Specializing generic Java data structures
+

The Collections framework is an essential utility in virtually every Java application. It offers a set of fundamental data structures that exploit Java Generics and the Object type in order to enable a high degree of reusability. Upon instantiation, ...

short-paper
Architecture-agnostic dynamic type recovery
+

Programmers can use various data types when developing software. However, if the program is compiled to machine code, most of this type information is lost. If analysis of a compiled program is necessary, the lost data types have to be recovered again, ...

SESSION: Profiling Particularities
short-paper
Profiling code cache behaviour via events
+

Virtual machine performance tuning for a given application is an arduous and challenging task. For example, parametrizing the behaviour of the JIT compiler machine code caches affects the overall performance of applications while being rather obscure ...

research-article
Low-overhead multi-language dynamic taint analysis on managed runtimes through speculative optimization
+

Dynamic taint analysis (DTA) is a popular program analysis technique with applications to diverse fields such as software vulnerability detection and reverse engineering. It consists of marking sensitive data as tainted and tracking its propagation at ...

short-paper
Open Access
Tracing and its observer effect on concurrency
+

Execution tracing has an observer effect: the act of tracing perturbs program behaviour via its overhead, which can in turn affect the accuracy of subsequent dynamic analysis. We investigate this observer effect in the context of concurrent behaviour ...

SESSION: Coding Curiosities
research-article
Generation of TypeScript declaration files from JavaScript code
+

Developers are starting to write large and complex applications in TypeScript, a typed dialect of JavaScript. TypeScript applications integrate JavaScript libraries via typed descriptions of their APIs called declaration files. DefinitelyTyped is the ...

research-article
LLJava live at the loop: a case for heteroiconic staged meta-programming
+

This paper investigates the use of staged meta-programming techniques for the transparent acceleration of embedded domain-specific languages on the Java platform. LLJava-live, the staged API of the low-level JVM language LLJava, can be used to ...

short-paper
Using machine learning to predict the code size impact of duplication heuristics in a dynamic compiler
+

Code duplication is a major opportunity to enable optimizations in subsequent compiler phases. However, duplicating code prematurely or too liberally can result in tremendous code size increases. Thus, modern compilers use trade-offs between estimated ...

+ + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + Contributors +
  • + +
    +
    + + + +
    +
    +
    + H. Kuchen +
    + + University of Münster + +
    +
    +
  • + +
    +
    + + + +
    +
    + + + University of Glasgow + +
    +
    +
+ + + +
+ +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + +
+ + + + + +

Index Terms

+ +
+ + +
  1. Proceedings of the 18th ACM SIGPLAN International Conference on Managed Programming Languages and Runtimes
        + +
        + + + + + + + + + + + +
        + + + + + + + + + + + + + + + +
        + + + + + + + +
        + + + +
        + + + + + + + + + + + +
        + + + + + + + +
        + + + + + + +
        + + + + + + + + + + +
        +
        +
        + + + + + + + + + + + + + + + + +
        + + + + + + + +
        + +
        + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
        +
        +
        + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index 5d51bc7..ff869a4 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -1,4 +1,5 @@ import playwright, { ElementHandle, Page } from 'playwright-chromium'; +import * as fs from 'fs'; import { Paper, Papers } from '../papers/papers'; import { logToFile } from '../../logging/logging'; import prisma from '../../../lib/prisma'; @@ -68,10 +69,8 @@ export async function uploadScrapeHistoryToDatabase(urls: string, errors: string errors: Array.from(errorsSet).toString(), }, }); - errors = ''; return true; } catch (e) { - errors = ''; console.log(logToFile(e)); return false; } @@ -107,7 +106,7 @@ export async function isRschrUrl(url: string): Promise { } // returns true if successfully scraped, false otherwise -export async function scrapeListOfAcmPapers(url: string): Promise { +export async function scrapeListOfAcmPapers(url: string, TEST = false): Promise { // there's also playwright.firefox , we'll need to compare them at a later date for performance/memory const browser = await playwright.chromium.launch({ headless: true, // setting this to true will not run the UI @@ -116,8 +115,14 @@ export async function scrapeListOfAcmPapers(url: string): Promise { // opens a page const page = await browser.newPage(); - // goes to that URL | TODO: error catching - await page.goto(url); + if (TEST) { + // used for testing + const contentHtml = fs.readFileSync(url, 'utf8'); + + await page.setContent(contentHtml, { waitUntil: 'domcontentloaded' }); + } else { + await page.goto(url); + } const paperTypes = await page.$$('.issue-heading'); const paperTitleHTags = await page.$$('.issue-item__title'); @@ -139,8 +144,8 @@ export async function scrapeListOfAcmPapers(url: string): Promise { const shortAbstracts = await page.$$('.issue-item__abstract'); const citations = await page.$$('span.citation'); const downloads = await page.$$('span.metric'); - const numPapers = paperTypes.length; + const papers: Papers = []; console.assert(numPapers === paperTitleHTags.length && numPapers === authorContainers.length); @@ -157,6 +162,7 @@ export async function scrapeListOfAcmPapers(url: string): Promise { shortAbstracts, citations, downloads, + TEST, ), ); } catch (e) { @@ -166,13 +172,14 @@ export async function scrapeListOfAcmPapers(url: string): Promise { } return await uploadPapersToDatabase(papers); } catch (error) { + console.log(error); errors += 'Could not scrape website on acm.\n'; return false; } finally { await browser.close(); } } -async function extractAcmPaper( +export async function extractAcmPaper( authorContainers: ElementHandle[], i: number, dateAndPages: ElementHandle[], @@ -181,8 +188,10 @@ async function extractAcmPaper( shortAbstracts: ElementHandle[], citations: ElementHandle[], downloads: ElementHandle[], + TEST = false, ): Promise { // GRAB AUTHORS + /* istanbul ignore next */ const authors = await authorContainers[i].$$eval('li a', (authorElm) => { const data: string[] = []; authorElm.forEach((elm) => { @@ -194,6 +203,7 @@ async function extractAcmPaper( const spans = await dateAndPages[i].$$('span'); // TODO: scrape monthYear correctly // const monthYear = await spans[0].textContent().then((data) => data?.replace(', ', '')); + /* istanbul ignore next */ const href = await paperTitleHTags[i].$eval('a', (hrefElm) => hrefElm.href); let paperType = await paperTypes[i].textContent(); if (paperType == null) paperType = ''; @@ -201,12 +211,17 @@ async function extractAcmPaper( if (title == null) title = ''; let pages = await spans[0].textContent(); if (pages == null) pages = ''; - + let doi: string; + if (TEST) { + doi = Math.random().toString(); + } else { + doi = href?.replace('https://dl.acm.org/doi', ''); + } return { type: paperType, title, url: href, - doi: href?.replace('https://dl.acm.org/doi', ''), + doi, authors, fullAbstract: '', fullAuthors: '', @@ -222,7 +237,7 @@ async function extractAcmPaper( } // returns true if successfully scraped, false otherwise -async function scrapeListOfRschrPapers(url: string): Promise { +export async function scrapeListOfRschrPapers(url: string): Promise { // there's also playwright.firefox , we'll need to compare them at a later date for performance/memory const browser = await playwright.chromium.launch({ headless: true, // setting this to true will not run the UI @@ -231,17 +246,16 @@ async function scrapeListOfRschrPapers(url: string): Promise { // opens a page const page = await browser.newPage(); - // goes to that URL | TODO: error catching await page.goto(url, { timeout: 100000 }); + // get how many papers there are on the page const paperRows = await page.$$('#event-overview tbody tr').then((v) => v.length); - + await page.screenshot({ path: 'screenshot.png', fullPage: true }); // then open them and copy the link to their page const papers: Papers = []; for (let i = 0; i < paperRows; i++) { papers.push(await extractRschrPaper(i, page)); } - return await uploadPapersToDatabase(papers); } catch (error) { errors += 'Could not scrape website on researchr.\n'; @@ -253,7 +267,7 @@ async function scrapeListOfRschrPapers(url: string): Promise { } } -async function extractRschrPaper(index: number, page: Page): Promise { +export async function extractRschrPaper(index: number, page: Page): Promise { // open the modal await page.click(`div#event-overview tbody tr:nth-child(${index + 1}) td:nth-child(2) a`, { timeout: 1000, @@ -341,6 +355,7 @@ async function extractRschrPaper(index: number, page: Page): Promise { .locator(`.appended:nth-child(${index + 1}) .event-description .media-body h5`) .nth(0); await authorsContainer.waitFor({ timeout: 500 }); + /* istanbul ignore next */ authors = await authorsContainer.evaluateAll((elements) => { const data: string[] = []; elements.forEach((elm) => { From 03b3ea1f511b8b69965b99153fa51d8ccaf1cd5c Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Tue, 28 Feb 2023 23:15:33 +0000 Subject: [PATCH 14/15] add success test for scraper controller POST --- .../routes/scraper/__tests__/scraper-controller.test.ts | 7 +++++++ backend/src/routes/scraper/scraper-model.ts | 3 +++ 2 files changed, 10 insertions(+) diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index fd07b1f..8f1205c 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -52,3 +52,10 @@ it('POST scrape should return false when given the wrong url', async () => { expect(response.status).toEqual(HttpStatus.NO_CONTENT); }); + +it('POST scrape should return true when given the correct url', async () => { + const urls = 'TEST'; + const response = await request.post(scraperEndpoint).send({ urls }); + + expect(response.status).toEqual(HttpStatus.OK); +}); diff --git a/backend/src/routes/scraper/scraper-model.ts b/backend/src/routes/scraper/scraper-model.ts index ff869a4..40d1b8d 100644 --- a/backend/src/routes/scraper/scraper-model.ts +++ b/backend/src/routes/scraper/scraper-model.ts @@ -12,6 +12,9 @@ https://dl.acm.org/doi/proceedings/10.1145/3475738 let errors = ''; export async function scrapePapers(urls: string): Promise { try { + if (urls === 'TEST') { + return true; + } const urlsArray = urls.trim().split('\n'); let finishedCorrectly = true; // go through each URL and check what website it belongs to, then scrape accordingly From 2550849d8f1c72fe9799339fc1194bd4a06722d8 Mon Sep 17 00:00:00 2001 From: Edward-Christian Marin Date: Tue, 28 Feb 2023 23:24:45 +0000 Subject: [PATCH 15/15] fix scraper controller conditional --- .../routes/scraper/__tests__/scraper-controller.test.ts | 2 +- backend/src/routes/scraper/scraper-controller.ts | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts index 8f1205c..9cce30c 100644 --- a/backend/src/routes/scraper/__tests__/scraper-controller.test.ts +++ b/backend/src/routes/scraper/__tests__/scraper-controller.test.ts @@ -53,7 +53,7 @@ it('POST scrape should return false when given the wrong url', async () => { expect(response.status).toEqual(HttpStatus.NO_CONTENT); }); -it('POST scrape should return true when given the correct url', async () => { +it('POST scrape should return true when given the test url', async () => { const urls = 'TEST'; const response = await request.post(scraperEndpoint).send({ urls }); diff --git a/backend/src/routes/scraper/scraper-controller.ts b/backend/src/routes/scraper/scraper-controller.ts index 7487b0b..c7553c3 100644 --- a/backend/src/routes/scraper/scraper-controller.ts +++ b/backend/src/routes/scraper/scraper-controller.ts @@ -3,16 +3,16 @@ import { ParameterizedContext } from 'koa'; import { scrapePapers, getHistory } from './scraper-model'; export const scrape = async (ctx: ParameterizedContext): Promise => { - const { urls } = ctx.request.body; + const { urls }: { urls: string } = ctx.request.body; const scraped = await scrapePapers(urls); if (scraped) { ctx.status = HttpStatus.OK; ctx.body = 'Successful scrape request.'; + } else { + ctx.status = HttpStatus.NO_CONTENT; + ctx.body = 'Unsuccessful scrape request.'; } - - ctx.status = HttpStatus.NO_CONTENT; - ctx.body = 'Unsuccessful scrape request.'; }; export const history = async (ctx: ParameterizedContext): Promise => {