Skip to content
This repository has been archived by the owner on Jan 11, 2023. It is now read-only.

Handle i/o backpressure with sapper export #869

Merged
merged 8 commits into from
Jun 9, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@
"svelte": "^3.6.9",
"svelte-loader": "^2.13.6",
"webpack": "^4.38.0",
"webpack-format-messages": "^2.0.5",
"yootils": "0.0.16"
"webpack-format-messages": "^2.0.5"
},
"peerDependencies": {
"svelte": "^3.5.0"
Expand Down
2 changes: 1 addition & 1 deletion src/api.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export { dev } from './api/dev';
export { build } from './api/build';
export { export } from './api/export';
export { find_page } from './api/find_page';
export { find_page } from './api/find_page';
202 changes: 120 additions & 82 deletions src/api/export.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,19 @@ import * as child_process from 'child_process';
import * as fs from 'fs';
import * as path from 'path';
import * as url from 'url';
import { promisify } from 'util';
import fetch from 'node-fetch';
import * as yootils from 'yootils';
import * as ports from 'port-authority';
import { exportQueue, FetchOpts, FetchRet } from './utils/export_queue';
import clean_html from './utils/clean_html';
import minify_html from './utils/minify_html';
import Deferred from './utils/Deferred';
import { noop } from './utils/noop';
import { parse as parseLinkHeader } from 'http-link-header';
import { rimraf, copy, mkdirp } from './utils/fs_utils';

const writeFile = promisify(fs.writeFile)

type Opts = {
build_dir?: string,
export_dir?: string,
Expand All @@ -32,6 +35,8 @@ type Ref = {
as: string
};

type URL = url.UrlWithStringQuery;

function resolve(from: string, to: string) {
return url.parse(url.resolve(from, to));
}
Expand All @@ -40,7 +45,10 @@ function cleanPath(path: string) {
return path.replace(/^\/|\/$|\/*index(.html)*$|.html$/g, '')
}

type URL = url.UrlWithStringQuery;
function get_href(attrs: string) {
const match = /href\s*=\s*(?:"(.*?)"|'(.*?)'|([^\s>]*))/.exec(attrs);
return match && (match[1] || match[2] || match[3]);
}

export { _export as export };

Expand Down Expand Up @@ -100,10 +108,9 @@ async function _export({

const seen = new Set();
const saved = new Set();
const q = yootils.queue(concurrent);

function save(url: string, status: number, type: string, body: string) {
const { pathname } = resolve(origin, url);
let { pathname } = resolve(origin, url);
let file = decodeURIComponent(pathname.slice(1));

if (saved.has(file)) return;
Expand All @@ -127,122 +134,153 @@ async function _export({
const export_file = path.join(export_dir, file);
if (fs.existsSync(export_file)) return;
mkdirp(path.dirname(export_file));
fs.writeFileSync(export_file, body);
}

proc.on('message', message => {
if (!message.__sapper__ || message.event !== 'file') return;
save(message.url, message.status, message.type, message.body);
});
return writeFile(export_file, body);
}

async function handle(url: URL) {
function handle(url: URL, fetchOpts: FetchOpts, addCallback: Function) {
let pathname = url.pathname;
if (pathname !== '/service-worker-index.html') {
pathname = pathname.replace(root.pathname, '') || '/'
pathname = pathname.replace(fetchOpts.root.pathname, '') || '/'
}

if (seen.has(pathname)) return;
seen.add(pathname);

const r = await q.add(async () => {
const timeout_deferred = new Deferred();
const the_timeout = setTimeout(() => {
timeout_deferred.reject(new Error(`Timed out waiting for ${url.href}`));
}, timeout);

const r = await Promise.race([
fetch(url.href, {
headers: { host: host_header || host },
redirect: 'manual'
}),
timeout_deferred.promise
]);
seen.add(pathname);
addCallback(url);
}

clearTimeout(the_timeout); // prevent it hanging at the end
async function handleFetch(url: URL, { timeout, host, host_header }: FetchOpts) {
const href = url.href;
const timeout_deferred = new Deferred();
const the_timeout = setTimeout(() => {
timeout_deferred.reject(new Error(`Timed out waiting for ${href}`));
}, timeout);

const r = await Promise.race([
fetch(href, {
headers: { host: host_header || host },
redirect: 'manual'
}),
timeout_deferred.promise
]);

clearTimeout(the_timeout); // prevent it hanging at the end

return {
response: r,
url,
};
}

return r;
}) as Response;
async function handleResponse(fetched: Promise<FetchRet>, fetchOpts: FetchOpts) {
const { response, url } = await fetched;
const { protocol, host, root } = fetchOpts;
let pathname = url.pathname;

let type = r.headers.get('Content-Type');
if (pathname !== '/service-worker-index.html') {
pathname = pathname.replace(root.pathname, '') || '/';
}

let body = await r.text();
let type = response.headers.get('Content-Type');

const range = ~~(r.status / 100);
let body = await response.text();

let tasks = [];
const range = ~~(response.status / 100);

if (range === 2) {
if (type === 'text/html') {
// parse link rel=preload headers and embed them in the HTML
let link = parseLinkHeader(r.headers.get('Link') || '');
link.refs.forEach((ref: Ref) => {
if (ref.rel === 'preload') {
body = body.replace('</head>',
`<link rel="preload" as=${JSON.stringify(ref.as)} href=${JSON.stringify(ref.uri)}></head>`)
}
});
if (range === 2 && type === 'text/html') {
// parse link rel=preload headers and embed them in the HTML
let link = parseLinkHeader(response.headers.get('Link') || '');
link.refs.forEach((ref: Ref) => {
if (ref.rel === 'preload') {
body = body.replace('</head>',
`<link rel="preload" as=${JSON.stringify(ref.as)} href=${JSON.stringify(ref.uri)}></head>`)
}
});

if (pathname !== '/service-worker-index.html') {
const cleaned = clean_html(body);
if (pathname !== '/service-worker-index.html') {
const cleaned = clean_html(body);

const base_match = /<base ([\s\S]+?)>/m.exec(cleaned);
const base_href = base_match && get_href(base_match[1]);
const base = resolve(url.href, base_href);
const base_match = /<base ([\s\S]+?)>/m.exec(cleaned);
const base_href = base_match && get_href(base_match[1]);
const base = resolve(url.href, base_href);

let match;
let pattern = /<a ([\s\S]+?)>/gm;
let match;
let pattern = /<a ([\s\S]+?)>/gm;

while (match = pattern.exec(cleaned)) {
const attrs = match[1];
const href = get_href(attrs);
while (match = pattern.exec(cleaned)) {
const attrs = match[1];
const href = get_href(attrs);

if (href) {
const url = resolve(base.href, href);
if (href) {
const url = resolve(base.href, href);

if (url.protocol === protocol && url.host === host) {
tasks.push(handle(url));
}
if (url.protocol === protocol && url.host === host) {
handle(url, fetchOpts, queue.add)
}
}
}
}
}

if (range === 3) {
const location = r.headers.get('Location');
const location = response.headers.get('Location');

type = 'text/html';
body = `<script>window.location.href = "${location.replace(origin, '')}"</script>`;

tasks.push(handle(resolve(root.href, location)));
handle(resolve(root.href, location), fetchOpts, queue.add);
}

save(pathname, r.status, type, body);

await Promise.all(tasks);
return save(pathname, response.status, type, body);
}

try {
await ports.wait(port);
const fetchOpts = {
timeout: timeout === false ? 0 : timeout,
host,
host_header,
protocol,
root,
};

const queue = exportQueue({
concurrent,
seen,
saved,
fetchOpts,
handleFetch,
handleResponse,
callbacks: {
onDone: () => {},
},
});

for (const entryPoint of entryPoints) {
oninfo({
message: `Crawling ${entryPoint.href}`
});
await handle(entryPoint);
}
proc.on('message', message => {
if (!message.__sapper__ || message.event !== 'file') return;
queue.addSave(save(message.url, message.status, message.type, message.body));
});

await handle(resolve(root.href, 'service-worker-index.html'));
await q.close();
return new Promise(async (res, rej) => {
queue.setCallback('onDone', () => {
proc.kill();
res();
});

proc.kill()
} catch (err) {
proc.kill();
throw err;
}
}
try {
await ports.wait(port);

function get_href(attrs: string) {
const match = /href\s*=\s*(?:"(.*?)"|'(.*?)'|([^\s>]*))/.exec(attrs);
return match && (match[1] || match[2] || match[3]);
for (const entryPoint of entryPoints) {
oninfo({
message: `Crawling ${entryPoint.href}`
});
handle(entryPoint, fetchOpts, queue.add);
}

const workerUrl = resolve(root.href, 'service-worker-index.html');
handle(workerUrl, fetchOpts, queue.add);
} catch (err) {
proc.kill();
rej(err);
}
});
}
Loading