Skip to content
This repository has been archived by the owner on Apr 21, 2022. It is now read-only.

Commit

Permalink
decrease build times for pack command
Browse files Browse the repository at this point in the history
- We can reuse the tarball and append new files (node binaries in this
case) instead of compressing/extracting multiple times. This prevents
un-necessary copying of files in the built tarball.

We can also build all `targets` in parallel in the following way:
- Download Node.js versions for targets in parallel instead of serially
- Use [workerpool][1] to create gzip files in parallel
- Run all `xz` compression with maximum available threads. xz has
[supported multicore computation since 5.2 (2014)][1].
- Like gzip, run all `xz` compression in parallel instead of waiting for
each target to build.

Running this in the Heroku CLI produced identical tarballs in 65% amount
of time (~111s seconds vs ~344 seconds before this change). It's likely
even a bit faster since I was running it locally which has some boot-up
time due to ts-node compiling typescript at runtime.

[1]: https://www.npmjs.com/package/workerpool
[2]: https://www.phoronix.com/scan.php?page=news_item&px=MTg3MDM
  • Loading branch information
fivetanley committed Mar 15, 2021
1 parent ea4bae7 commit 8c60eff
Show file tree
Hide file tree
Showing 7 changed files with 549 additions and 129 deletions.
5 changes: 4 additions & 1 deletion .eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
"oclif-typescript"
],
"rules": {
"unicorn/no-abusive-eslint-disable": "off"
"unicorn/no-abusive-eslint-disable": "off",
"@typescript-eslint/no-use-before-define": ["error", {
"functions": false
}]
}
}
16 changes: 12 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,25 +14,33 @@
"@oclif/plugin-help": "^3.2.0",
"cli-ux": "^5.2.1",
"debug": "^4.1.1",
"execa": "^5.0.0",
"find-yarn-workspace-root": "^2.0.0",
"fs-extra": "^8.1",
"github-slugger": "^1.2.1",
"lodash": "^4.17.11",
"normalize-package-data": "^3.0.0",
"qqjs": "^0.3.10",
"tslib": "^2.0.3"
"stream.pipeline-shim": "^1.1.0",
"tar": "^6.1.0",
"tmp": "^0.2.1",
"tslib": "^2.0.3",
"workerpool": "^6.1.0"
},
"devDependencies": {
"@oclif/plugin-legacy": "^1.1.4",
"@oclif/test": "^1.2.4",
"@types/chai": "^4.1.7",
"@types/execa": "^0.9.0",
"@types/execa": "^2.0.0",
"@types/fs-extra": "^9.0",
"@types/lodash": "^4.14.123",
"@types/lodash.template": "^4.4.6",
"@types/mocha": "^7.0.2",
"@types/node": "^14.0.14",
"@types/supports-color": "^5.3.0",
"@types/supports-color": "^7.2.0",
"@types/tar": "^4.0.4",
"@types/tmp": "^0.2.0",
"@types/workerpool": "^6.0.0",
"@types/write-json-file": "^3.2.1",
"aws-sdk": "^2.443.0",
"chai": "^4.2.0",
Expand All @@ -46,7 +54,7 @@
"typescript": "3.8.3"
},
"engines": {
"node": ">=8.10.0"
"node": ">= 8.0.0"
},
"files": [
"/oclif.manifest.json",
Expand Down
9 changes: 6 additions & 3 deletions src/tarballs/bin.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
/* eslint-disable no-useless-escape */
import * as Config from '@oclif/config'
import * as qq from 'qqjs'
import * as fs from 'fs-extra'
import * as path from 'path'

export async function writeBinScripts({config, baseWorkspace, nodeVersion}: {config: Config.IConfig; baseWorkspace: string; nodeVersion: string}) {
const binPathEnvVar = config.scopedEnvVarKey('BINPATH')
const redirectedEnvVar = config.scopedEnvVarKey('REDIRECTED')
const clientHomeEnvVar = config.scopedEnvVarKey('OCLIF_CLIENT_HOME')
await qq.mkdirp(qq.join([baseWorkspace, 'bin']))
const writeWin32 = async () => {
const {bin} = config
await qq.write([baseWorkspace, 'bin', `${config.bin}.cmd`], `@echo off
await fs.writeFile(path.join(baseWorkspace, 'bin', `${config.bin}.cmd`), `@echo off
setlocal enableextensions
if not "%${redirectedEnvVar}%"=="1" if exist "%LOCALAPPDATA%\\${bin}\\client\\bin\\${bin}.cmd" (
Expand All @@ -34,8 +37,8 @@ if exist "%~dp0..\\bin\\node.exe" (
// `)
}
const writeUnix = async () => {
const bin = qq.join([baseWorkspace, 'bin', config.bin])
await qq.write(bin, `#!/usr/bin/env bash
const bin = path.join(baseWorkspace, 'bin', config.bin)
await fs.writeFile(bin, `#!/usr/bin/env bash
set -e
echoerr() { echo "$@" 1>&2; }
Expand Down
206 changes: 144 additions & 62 deletions src/tarballs/build.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,61 @@ import * as Errors from '@oclif/errors'
import * as findYarnWorkspaceRoot from 'find-yarn-workspace-root'
import * as path from 'path'
import * as qq from 'qqjs'
import * as tar from 'tar'
import * as fs from 'fs-extra'
import streamPipeline = require('stream.pipeline-shim');
import {promisify} from 'util'
import {pool} from 'workerpool'

const pipeline = promisify(streamPipeline)

import {log} from '../log'

import {writeBinScripts} from './bin'
import {IConfig, IManifest} from './config'
import {fetchNodeBinary} from './node'
import {fetchNodeBinary, downloadSHASums} from './node'

const gzipPool = pool(path.join(__dirname, './gzip-worker.js'))

async function gzip(filePath: string, target: string) {
await gzipPool.exec('gzip', [filePath, target])
}

function targetNodeLocation(target: any, config: IConfig) {
const workspace = config.workspace(target)
return path.join(workspace, 'bin', 'node')
}

const pack = async (from: string, to: string) => {
const prevCwd = qq.cwd()
qq.cd(path.dirname(from))
await qq.mkdirp(path.dirname(to))
const toDir = path.dirname(to)
const fromBase = path.basename(from)

await qq.mkdirp(toDir)

log(`packing tarball from ${qq.prettifyPaths(from)} to ${qq.prettifyPaths(to)}`)
await (to.endsWith('gz') ?
qq.x('tar', ['czf', to, path.basename(from)]) :
qq.x(`tar c ${path.basename(from)} | xz > ${to}`))
qq.cd(prevCwd)

const tarStream = tar.c(
{
gzip: false,
cwd: path.dirname(from),
},
[fromBase],
)

await pipeline(tarStream, fs.createWriteStream(to))
}

export async function build(c: IConfig, options: {
function tarballBasePath(c: IConfig) {
const {config} = c
return c.dist(config.s3Key('versioned', '.tar.gz')).replace('.tar.gz', '.tar')
}

async function doBuild(c: IConfig, options: {
platform?: string;
pack?: boolean;
} = {}) {
const {xz, config} = c
const baseTarballPath = tarballBasePath(c)
const prevCwd = qq.cwd()
const packCLI = async () => {
const stdout = await qq.x.stdout('npm', ['pack', '--unsafe-perm'], {cwd: c.root})
Expand All @@ -37,9 +69,10 @@ export async function build(c: IConfig, options: {
tarball = path.basename(tarball)
tarball = qq.join([c.workspace(), tarball])
qq.cd(c.workspace())
await qq.x(`tar -xzf ${tarball}`)
// eslint-disable-next-line no-await-in-loop
for (const f of await qq.ls('package', {fullpath: true})) await qq.mv(f, '.')
await tar.x({
file: tarball,
stripComponents: 1,
})
await qq.rm('package', tarball, 'bin/run.cmd')
}
const updatePJSON = async () => {
Expand Down Expand Up @@ -70,73 +103,122 @@ export async function build(c: IConfig, options: {
const buildTarget = async (target: {platform: PlatformTypes; arch: ArchTypes}) => {
const workspace = c.workspace(target)
const key = config.s3Key('versioned', '.tar.gz', target)
const tarballPath = key.replace('tar.gz', 'tar')

const base = path.basename(key)
const tarballDist = c.dist(tarballPath)
const nodePath = targetNodeLocation(target, c)
const workspaceParent = path.dirname(workspace)

log(`building target ${base}`)
await qq.rm(workspace)
await qq.cp(c.workspace(), workspace)
await fetchNodeBinary({
nodeVersion: c.nodeVersion,
output: path.join(workspace, 'bin', 'node'),
platform: target.platform,
arch: target.arch,
tmp: qq.join(config.root, 'tmp'),
})
if (xz) {
const baseXZ = base.replace('.tar.gz', '.tar.xz')
log(`building target ${baseXZ}`)
}

await qq.cp(baseTarballPath, tarballDist)

await tar.replace({
file: tarballDist,
cwd: workspaceParent,
}, [path.relative(workspaceParent, nodePath)])

if (options.pack === false) return
await pack(workspace, c.dist(key))
if (xz) await pack(workspace, c.dist(config.s3Key('versioned', '.tar.xz', target)))
await compress(tarballDist, xz)
if (!c.updateConfig.s3.host) return
const rollout = (typeof c.updateConfig.autoupdate === 'object' && c.updateConfig.autoupdate.rollout)
const manifest: IManifest = {
rollout: rollout === false ? undefined : rollout,
version: c.version,
channel: c.channel,
baseDir: config.s3Key('baseDir', target),
gz: config.s3Url(config.s3Key('versioned', '.tar.gz', target)),
xz: xz ? config.s3Url(config.s3Key('versioned', '.tar.xz', target)) : undefined,
sha256gz: await qq.hash('sha256', c.dist(config.s3Key('versioned', '.tar.gz', target))),
sha256xz: xz ? await qq.hash('sha256', c.dist(config.s3Key('versioned', '.tar.xz', target))) : undefined,
node: {
compatible: config.pjson.engines.node,
recommended: c.nodeVersion,
},
}
await qq.writeJSON(c.dist(config.s3Key('manifest', target)), manifest)
await writeManifest(target, c, config, xz)
}
const buildBaseTarball = async () => {
if (options.pack === false) return
await pack(c.workspace(), c.dist(config.s3Key('versioned', '.tar.gz')))
if (xz) await pack(c.workspace(), c.dist(config.s3Key('versioned', '.tar.xz')))
await pack(c.workspace(), baseTarballPath)
await compress(baseTarballPath, xz)
if (!c.updateConfig.s3.host) {
Errors.warn('No S3 bucket or host configured. CLI will not be able to update.')
return
}
const manifest: IManifest = {
version: c.version,
baseDir: config.s3Key('baseDir'),
channel: config.channel,
gz: config.s3Url(config.s3Key('versioned', '.tar.gz')),
xz: config.s3Url(config.s3Key('versioned', '.tar.xz')),
sha256gz: await qq.hash('sha256', c.dist(config.s3Key('versioned', '.tar.gz'))),
sha256xz: xz ? await qq.hash('sha256', c.dist(config.s3Key('versioned', '.tar.xz'))) : undefined,
rollout: (typeof c.updateConfig.autoupdate === 'object' && c.updateConfig.autoupdate.rollout) as number,
node: {
compatible: config.pjson.engines.node,
recommended: c.nodeVersion,
},
}
await qq.writeJSON(c.dist(config.s3Key('manifest')), manifest)
}
log(`gathering workspace for ${config.bin} to ${c.workspace()}`)
await extractCLI(await packCLI())
await updatePJSON()
await addDependencies()
await writeBinScripts({config, baseWorkspace: c.workspace(), nodeVersion: c.nodeVersion})
await buildBaseTarball()
for (const target of c.targets) {
if (!options.platform || options.platform === target.platform) {
// eslint-disable-next-line no-await-in-loop
await buildTarget(target)
await writeManifest(undefined, c, config, xz)
await downloadNodeBinaries(c)
const targetsToBuild =
options.platform ?
c.targets.filter(t => options.platform === t.platform) :
c.targets
const buildPromises = targetsToBuild.map(buildTarget)
await Promise.all(buildPromises)
log('done building')

qq.cd(prevCwd)
}

export async function build(c: IConfig, options: {
platform?: string;
pack?: boolean;
} = {}) {
try {
await doBuild(c, options)
} finally {
await gzipPool.terminate()
}
}

async function writeManifest(target: any, c: IConfig, config: IConfig['config'], xz: boolean) {
const rollout = (typeof c.updateConfig.autoupdate === 'object' && c.updateConfig.autoupdate.rollout)
const gz = config.s3Key('versioned', '.tar.gz', target)

let manifest: IManifest = {
rollout: rollout === false ? undefined : rollout,
version: c.version,
channel: c.channel,
baseDir: config.s3Key('baseDir', target),
gz: config.s3Url(gz),
sha256gz: await qq.hash('sha256', c.dist(gz)),
node: {
compatible: config.pjson.engines.node,
recommended: c.nodeVersion,
},
}

if (xz) {
const s3XZ = config.s3Key('versioned', '.tar.xz', target)
manifest = {
...manifest,
xz: config.s3Url(s3XZ),
sha256xz: await qq.hash('sha256', c.dist(s3XZ)),
}
}
qq.cd(prevCwd)

await qq.writeJSON(c.dist(config.s3Key('manifest', target)), manifest)
}

async function compress(tarballPath: string, xz: boolean) {
const gzpath = tarballPath + '.gz'
const gzipPromise = gzip(tarballPath, gzpath)
const promises: Promise<any>[] = [gzipPromise]

if (xz) {
promises.push(qq.x(`xz -T0 --compress --force --keep ${tarballPath}`))
}

await Promise.all(promises)
}

async function downloadNodeBinaries(config: IConfig) {
const shasums = await downloadSHASums(config.nodeVersion)
const promises = config.targets.map(async target => {
await fetchNodeBinary({
nodeVersion: config.nodeVersion,
output: targetNodeLocation(target, config),
platform: target.platform,
arch: target.arch,
tmp: qq.join(config.root, 'tmp'),
shasums,
})
})

await Promise.all(promises)
}
18 changes: 18 additions & 0 deletions src/tarballs/gzip-worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
const {worker} = require('workerpool');
const {createGzip} = require('zlib');
const {pipeline: streamPipeline} = require('stream');
const {promisify} = require('util');
const fs = require('fs');

const pipeline = promisify(streamPipeline);

function gzip(filePath, target) {
const gzip = createGzip({level: 6});
const readStream = fs.createReadStream(filePath);
const writeStream = fs.createWriteStream(target);
return pipeline(readStream, gzip, writeStream).then(() => true);
}

worker({
gzip: gzip
})
Loading

0 comments on commit 8c60eff

Please sign in to comment.