Skip to content

Commit

Permalink
Refactor S3 calls from deploy-with-s3 to s3-utils
Browse files Browse the repository at this point in the history
  • Loading branch information
rogermparent committed Apr 15, 2020
1 parent 796ca4d commit f1892f6
Show file tree
Hide file tree
Showing 2 changed files with 139 additions and 102 deletions.
127 changes: 27 additions & 100 deletions scripts/deploy-with-s3.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/usr/bin/env node
'use strict'
const PRODUCTION_PREFIX = 'dvc-org-prod'

/**
* Build gatsby site and deploy public/ to s3.
Expand All @@ -18,107 +19,39 @@
* - HEROKU_APP_NAME: (optional) app name to specify the ID of the PR if any.
**/

const path = require('path')
const { execSync } = require('child_process')
const { remove, move, ensureDir } = require('fs-extra')
const { s3Prefix, s3Bucket, s3Client } = require('./s3-utils')

const publicDirName = 'public'
const publicDirEntry = [publicDirName, '/']
const cacheDirEntry = ['.cache', '-cache/']
const cacheDirs = [
[publicDirName, '/'],
['.cache', '-cache/']
]

const rootDir = process.cwd()
function localPath(dirName) {
return path.join(rootDir, dirName)
}

const cacheDirs = [cacheDirEntry, publicDirEntry]
const { s3Prefix, withEntries, prefixIsEmpty } = require('./s3-utils')
const { downloadAllFromS3, uploadAllToS3, cleanAllLocal } = withEntries(
cacheDirs
)

function run(command) {
execSync(command, {
stdio: ['pipe', process.stdout, process.stderr]
})
}

function syncCall(method, ...args) {
return new Promise((resolve, reject) => {
const synchroniser = s3Client[method](...args)
synchroniser.on('error', reject)
synchroniser.on('end', resolve)
})
}

async function prefixIsEmpty(prefix) {
try {
await s3Client.s3
.headObject({
Bucket: s3Bucket,
Key: `${prefix}/index.html`
})
.promise()
return false
} catch (e) {
return true
}
}

async function downloadFromS3([dir, childPrefix], basePrefix = s3Prefix) {
try {
const prefix = basePrefix + childPrefix
const localDirPath = localPath(dir)
await ensureDir(localDirPath)

console.log(`Downloading "${dir}" from s3://${s3Bucket}/${prefix}`)
console.time(`"${dir}" downloaded in`)
await syncCall('downloadDir', {
localDir: localDirPath,
s3Params: {
Bucket: s3Bucket,
Prefix: prefix
}
})
console.timeEnd(`"${dir}" downloaded in`)
} catch (downloadError) {
console.error('Error downloading initial data', downloadError)
// Don't propagate. It's just a cache warming step
}
}

async function uploadToS3([dir, childPrefix], basePrefix = s3Prefix) {
const prefix = basePrefix + childPrefix
console.log(`Uploading "${dir}" to s3://${s3Bucket}/${prefix}`)
console.time(`"${dir}" uploaded in`)
await syncCall('uploadDir', {
localDir: localPath(dir),
deleteRemoved: true,
s3Params: {
Bucket: s3Bucket,
Prefix: prefix
}
})
console.timeEnd(`"${dir}" uploaded in`)
}

async function downloadAllFromS3(basePrefix) {
return Promise.all(cacheDirs.map(dir => downloadFromS3(dir, basePrefix)))
}

async function uploadAllToS3(basePrefix) {
return Promise.all(cacheDirs.map(dir => uploadToS3(dir, basePrefix)))
}

async function clean() {
return Promise.all(cacheDirs.map(([dir]) => remove(localPath(dir))))
}

async function main() {
const emptyPrefix = await prefixIsEmpty(s3Prefix)

// First build of a PR is slow because it can't reuse cache.
// But we can download from prod to warm cache up.
const cacheWarmPrefix = emptyPrefix ? 'dvc-org-prod' : s3Prefix

await downloadAllFromS3(cacheWarmPrefix)
// Check if the prefix we're working with has a build in it.
const emptyPrefix = await prefixIsEmpty()
// If not, we download production's cache.
// This greatly speeds up PR initial build time.

if (emptyPrefix) {
console.warn(
`The current prefix "${s3Prefix}" is empty! Attempting to fall back on production cache.`
)
await downloadAllFromS3(PRODUCTION_PREFIX)
} else {
await downloadAllFromS3(s3Prefix)
}

try {
run('yarn build')
Expand All @@ -127,22 +60,16 @@ async function main() {
// Clear it and try again.

console.error('------------------------\n\n')
console.error('The first Gatsby build attempt failed!\n')
console.error(buildError)
console.error('\nAssuming bad cache and retrying:\n')
console.error('\nRetrying with a cleared cache:\n')

await clean()
await cleanAllLocal()
run('yarn build')
}

await move(
path.join(localPath(publicDirName), '404.html'),
path.join(rootDir, '404.html'),
{
overwrite: true
}
)
await uploadAllToS3()
await clean()
await uploadAllToS3(s3Prefix)
await cleanAllLocal()
}

main().catch(e => {
Expand Down
114 changes: 112 additions & 2 deletions scripts/s3-utils.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
'use strict'

const { s3Prefix, s3Bucket } = require('../src/server/config')
const path = require('path')
const s3 = require('s3-client')
const { s3Prefix, s3Bucket } = require('../src/server/config')
const { remove, move, ensureDir } = require('fs-extra')

const {
AWS_REGION,
Expand All @@ -25,8 +27,116 @@ console.log({
hasCreds: Boolean(AWS_ACCESS_KEY_ID && AWS_SECRET_ACCESS_KEY)
})

const rootDir = process.cwd()

function localPath(dirName) {
return path.join(rootDir, dirName)
}

function syncCall(method, ...args) {
return new Promise((resolve, reject) => {
const synchroniser = s3Client[method](...args)
synchroniser.on('error', reject)
synchroniser.on('end', resolve)
})
}

async function prefixIsEmpty(prefix = s3Prefix) {
try {
await s3Client.s3
.headObject({
Bucket: s3Bucket,
Key: `${prefix}/index.html`
})
.promise()
return false
} catch (e) {
return true
}
}

async function downloadFromS3(dir, childPrefix, basePrefix = s3Prefix) {
try {
const prefix = basePrefix + childPrefix
const localDirPath = localPath(dir)
await ensureDir(localDirPath)

console.log(`Downloading "${dir}" from s3://${s3Bucket}/${prefix}`)
const timeString = `"${dir}" downloaded in`
console.time(timeString)
await syncCall('downloadDir', {
localDir: localDirPath,
s3Params: {
Bucket: s3Bucket,
Prefix: prefix
}
})
console.timeEnd(timeString)
} catch (downloadError) {
console.error('Error downloading initial data', downloadError)
// Don't propagate. It's just a cache warming step
}
}

async function uploadToS3(dir, childPrefix, basePrefix = s3Prefix) {
const prefix = basePrefix + childPrefix
console.log(`Uploading "${dir}" to s3://${s3Bucket}/${prefix}`)
const timeString = `"${dir}" uploaded in`
console.time(timeString)
await syncCall('uploadDir', {
localDir: localPath(dir),
deleteRemoved: true,
s3Params: {
Bucket: s3Bucket,
Prefix: prefix
}
})
console.timeEnd(timeString)
}

async function cleanAllLocal(entries) {
return Promise.all(entries.map(([dir]) => remove(localPath(dir))))
}

async function downloadAllFromS3(entries, basePrefix) {
return Promise.all(
entries.map(([dir, childPrefix]) =>
downloadFromS3(dir, childPrefix, basePrefix)
)
)
}

async function uploadAllToS3(entries, basePrefix) {
return Promise.all(
entries.map(([dir, childPrefix]) =>
uploadToS3(dir, childPrefix, basePrefix)
)
)
}

// Some syntax sugar that generates versions of the entry-based
// functions with an entries list baked in.
function withEntries(entries) {
return {
async downloadAllFromS3(basePrefix) {
return downloadAllFromS3(entries, basePrefix)
},
async uploadAllToS3(basePrefix) {
return uploadAllToS3(entries, basePrefix)
},
async cleanAllLocal() {
return cleanAllLocal(entries)
}
}
}

module.exports = {
s3Bucket,
s3Prefix,
s3Client
s3Client,
uploadAllToS3,
downloadAllFromS3,
withEntries,
cleanAllLocal,
prefixIsEmpty
}

0 comments on commit f1892f6

Please sign in to comment.