Skip to content

Commit

Permalink
fix: compile using electron-compile not in place, but using cache
Browse files Browse the repository at this point in the history
Close #807
  • Loading branch information
develar committed Mar 24, 2017
1 parent e208f53 commit 08893e3
Show file tree
Hide file tree
Showing 5 changed files with 176 additions and 146 deletions.
1 change: 0 additions & 1 deletion packages/electron-builder/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@
"isbinaryfile": "^3.0.2",
"js-yaml": "^3.8.2",
"minimatch": "^3.0.3",
"mime": "^1.3.4",
"node-forge": "^0.7.0",
"normalize-package-data": "^2.3.6",
"parse-color": "^1.0.0",
Expand Down
86 changes: 70 additions & 16 deletions packages/electron-builder/src/asarUtil.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ import { AsarOptions } from "electron-builder-core"
import { debug } from "electron-builder-util"
import { CONCURRENCY, FileCopier, FileTransformer, Filter, MAX_FILE_REQUESTS, statOrNull, walk } from "electron-builder-util/out/fs"
import { log } from "electron-builder-util/out/log"
import { createReadStream, createWriteStream, ensureDir, readFile, readlink, stat, Stats, writeFile } from "fs-extra-p"
import { createReadStream, createWriteStream, emptyDir, ensureDir, readFile, readlink, stat, Stats, writeFile } from "fs-extra-p"
import * as path from "path"
import { AsarFilesystem, Node, readAsar } from "./asar"
import { createElectronCompilerHost } from "./fileTransformer"

const isBinaryFile: any = BluebirdPromise.promisify(require("isbinaryfile"))
const pickle = require ("chromium-pickle-js")
Expand All @@ -26,7 +27,7 @@ function addValue(map: Map<string, Array<string>>, key: string, value: string) {
interface UnpackedFileTask {
stats: Stats
src?: string
data?: string
data?: string | Buffer
destination: string
}

Expand All @@ -44,15 +45,18 @@ function writeUnpackedFiles(filesToUnpack: Array<UnpackedFileTask>, fileCopier:
export class AsarPackager {
private readonly fs = new AsarFilesystem(this.src)
private readonly outFile: string

private transformedFiles: Array<string | Buffer | true | null>
private readonly metadata = new Map<string, Stats>()

constructor(private readonly src: string, destination: string, private readonly options: AsarOptions, private readonly unpackPattern: Filter | null) {
constructor(private readonly src: string, destination: string, private readonly options: AsarOptions, private readonly unpackPattern: Filter | null, private readonly transformer: FileTransformer) {
this.outFile = path.join(destination, "app.asar")
}

// sort files to minimize file change (i.e. asar file is not changed dramatically on small change)
async pack(filter: Filter, transformer: ((path: string) => any) | null) {
const metadata = new Map<string, Stats>()
const files = await walk(this.src, filter, (file, fileStat) => {
async pack(filter: Filter, isElectronCompile: boolean) {
const metadata = this.metadata
let files = await walk(this.src, filter, (file, fileStat) => {
metadata.set(file, fileStat)
if (fileStat.isSymbolicLink()) {
return readlink(file)
Expand All @@ -76,13 +80,61 @@ export class AsarPackager {
}
return null
})

// transform before electron-compile to avoid filtering (cache files in any case should be not transformed)
const transformer = this.transformer
this.transformedFiles = await BluebirdPromise.map(files, it => metadata.get(it)!.isFile() ? transformer(it) : null, CONCURRENCY)

if (isElectronCompile) {
files = await this.compileUsingElectronCompile(files)
}

await this.createPackageFromFiles(this.options.ordering == null ? files : await this.order(files))
}

async compileUsingElectronCompile(files: Array<string>): Promise<Array<string>> {
log("Compiling using electron-compile")

const metadata = this.metadata
const cacheDir = path.join(this.src, ".cache")
// clear and create cache dir
await emptyDir(cacheDir)
const compilerHost = await createElectronCompilerHost(this.src, cacheDir)
const nextSlashIndex = this.src.length + 1
// pre-compute electron-compile to cache dir - we need to process only subdirectories, not direct files of app dir
await BluebirdPromise.map(files, file => {
if (file.includes("/node_modules/") || file.includes("/bower_components/")
|| !file.includes("/", nextSlashIndex) // ignore not root files
|| !metadata.get(file)!.isFile()) {
return null
}
return compilerHost.compile(file)
.then((it: any) => null)
}, CONCURRENCY)

await this.createPackageFromFiles(this.options.ordering == null ? files : await this.order(files), metadata, transformer)
await compilerHost.saveConfiguration()

const cacheFiles = await walk(cacheDir, (file, stat) => !file.startsWith("."), (file, fileStat) => {
this.metadata.set(file, fileStat)
return null
})

// add es6-shim.js
const es6ShimPath = `${this.src}/es6-shim.js`
cacheFiles.push(es6ShimPath)
metadata.set(es6ShimPath, <any>{isFile: () => true, isDirectory: () => false})

this.transformedFiles = (new Array(cacheFiles.length)).concat(this.transformedFiles)

this.transformedFiles[cacheFiles.length - 1] = await readFile(path.join(this.src, "node_modules", "electron-compile", "lib", "es6-shim.js"))

// cache files should be first (better IO)
return cacheFiles.concat(files)
}

async detectUnpackedDirs(files: Array<string>, metadata: Map<string, Stats>, autoUnpackDirs: Set<string>, unpackedDest: string) {
async detectUnpackedDirs(files: Array<string>, autoUnpackDirs: Set<string>, unpackedDest: string) {
const dirToCreate = new Map<string, Array<string>>()

const metadata = this.metadata
/* tslint:disable:rule1 prefer-const */
for (let i = 0, n = files.length; i < n; i++) {
const file = files[i]
Expand Down Expand Up @@ -147,19 +199,20 @@ export class AsarPackager {
}
}

private async createPackageFromFiles(files: Array<string>, metadata: Map<string, Stats>, transformer: FileTransformer | null) {
async createPackageFromFiles(files: Array<string>) {
const metadata = this.metadata
// search auto unpacked dir
const unpackedDirs = new Set<string>()
const unpackedDest = `${this.outFile}.unpacked`
await ensureDir(path.dirname(this.outFile))

if (this.options.smartUnpack !== false) {
await this.detectUnpackedDirs(files, metadata, unpackedDirs, unpackedDest)
await this.detectUnpackedDirs(files, unpackedDirs, unpackedDest)
}

const dirToCreateForUnpackedFiles = new Set<string>(unpackedDirs)

const transformedFiles = transformer == null ? new Array(files.length) : await BluebirdPromise.map(files, it => metadata.get(it)!.isFile() ? transformer(it) : null, CONCURRENCY)
const transformedFiles = this.transformedFiles
const filesToUnpack: Array<UnpackedFileTask> = []
const fileCopier = new FileCopier()
/* tslint:disable:rule1 prefer-const */
Expand All @@ -170,9 +223,9 @@ export class AsarPackager {
const fileParent = path.dirname(file)
const dirNode = this.fs.getOrCreateNode(fileParent)

const newData = transformedFiles == null ? null : transformedFiles[i]
const newData = transformedFiles == null ? null : <string | Buffer>transformedFiles[i]
const node = this.fs.getOrCreateNode(file)
node.size = newData == null ? stat.size : Buffer.byteLength(newData)
node.size = newData == null ? stat.size : Buffer.byteLength(<any>newData)
if (dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat))) {
node.unpacked = true
if (newData != null) {
Expand Down Expand Up @@ -227,10 +280,10 @@ export class AsarPackager {
await writeUnpackedFiles(filesToUnpack, fileCopier)
}

await this.writeAsarFile(files, transformedFiles)
await this.writeAsarFile(files)
}

private writeAsarFile(files: Array<string>, transformedFiles: Array<string | Buffer | null | true>): Promise<any> {
private writeAsarFile(files: Array<string>): Promise<any> {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(this.fs.header))
const headerBuf = headerPickle.toBuffer()
Expand All @@ -239,6 +292,7 @@ export class AsarPackager {
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()

const transformedFiles = this.transformedFiles
const writeStream = createWriteStream(this.outFile)
return new BluebirdPromise((resolve, reject) => {
writeStream.on("error", reject)
Expand Down
75 changes: 75 additions & 0 deletions packages/electron-builder/src/fileMatcher.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import BluebirdPromise from "bluebird-lst"
import { FilePattern, PlatformSpecificBuildOptions } from "electron-builder-core"
import { asArray } from "electron-builder-util"
import { copyDir, copyFile, Filter, statOrNull } from "electron-builder-util/out/fs"
import { warn } from "electron-builder-util/out/log"
import { mkdirs } from "fs-extra-p"
import { Minimatch } from "minimatch"
import * as path from "path"
import { Config } from "./metadata"
import { BuildInfo } from "./packagerApi"
import { createFilter, hasMagic } from "./util/filter"

export class FileMatcher {
Expand Down Expand Up @@ -71,6 +74,78 @@ export class FileMatcher {
}
}

export function createFileMatcher(info: BuildInfo, appDir: string, resourcesPath: string, macroExpander: (pattern: string) => string, platformSpecificBuildOptions: PlatformSpecificBuildOptions) {
const patterns = info.isPrepackedAppAsar ? null : getFileMatchers(info.config, "files", appDir, path.join(resourcesPath, "app"), false, macroExpander, platformSpecificBuildOptions)
const matcher = patterns == null ? new FileMatcher(appDir, path.join(resourcesPath, "app"), macroExpander) : patterns[0]
if (matcher.isEmpty() || matcher.containsOnlyIgnore()) {
matcher.addAllPattern()
}
else {
matcher.addPattern("package.json")
}
matcher.addPattern("!**/node_modules/*/{CHANGELOG.md,ChangeLog,changelog.md,README.md,README,readme.md,readme,test,__tests__,tests,powered-test,example,examples,*.d.ts}")
matcher.addPattern("!**/node_modules/.bin")
matcher.addPattern("!**/*.{o,hprof,orig,pyc,pyo,rbc,swp}")
matcher.addPattern("!**/._*")
matcher.addPattern("!*.iml")
//noinspection SpellCheckingInspection
matcher.addPattern("!**/{.git,.hg,.svn,CVS,RCS,SCCS," +
"__pycache__,.DS_Store,thumbs.db,.gitignore,.gitattributes," +
".editorconfig,.flowconfig,.jshintrc,.eslintrc," +
".yarn-integrity,.yarn-metadata.json,yarn-error.log,yarn.lock,npm-debug.log," +
".idea," +
"appveyor.yml,.travis.yml,circle.yml," +
".nyc_output}")

return matcher
}

export function getFileMatchers(config: Config, name: "files" | "extraFiles" | "extraResources" | "asarUnpack", defaultSrc: string, defaultDest: string, allowAdvancedMatching: boolean, macroExpander: (pattern: string) => string, customBuildOptions: PlatformSpecificBuildOptions): Array<FileMatcher> | null {
const globalPatterns: Array<string | FilePattern> | string | n | FilePattern = (<any>config)[name]
const platformSpecificPatterns: Array<string | FilePattern> | string | n = (<any>customBuildOptions)[name]

const defaultMatcher = new FileMatcher(defaultSrc, defaultDest, macroExpander)
const fileMatchers: Array<FileMatcher> = []

function addPatterns(patterns: Array<string | FilePattern> | string | n | FilePattern) {
if (patterns == null) {
return
}
else if (!Array.isArray(patterns)) {
if (typeof patterns === "string") {
defaultMatcher.addPattern(patterns)
return
}
patterns = [patterns]
}

for (const pattern of patterns) {
if (typeof pattern === "string") {
// use normalize to transform ./foo to foo
defaultMatcher.addPattern(pattern)
}
else if (allowAdvancedMatching) {
const from = pattern.from == null ? defaultSrc : path.resolve(defaultSrc, pattern.from)
const to = pattern.to == null ? defaultDest : path.resolve(defaultDest, pattern.to)
fileMatchers.push(new FileMatcher(from, to, macroExpander, pattern.filter))
}
else {
throw new Error(`Advanced file copying not supported for "${name}"`)
}
}
}

addPatterns(globalPatterns)
addPatterns(platformSpecificPatterns)

if (!defaultMatcher.isEmpty()) {
// default matcher should be first in the array
fileMatchers.unshift(defaultMatcher)
}

return fileMatchers.length === 0 ? null : fileMatchers
}

export function copyFiles(patterns: Array<FileMatcher> | null): Promise<any> {
if (patterns == null || patterns.length === 0) {
return BluebirdPromise.resolve()
Expand Down
54 changes: 12 additions & 42 deletions packages/electron-builder/src/fileTransformer.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
import { debug } from "electron-builder-util"
import { deepAssign } from "electron-builder-util/out/deepAssign"
import { FileTransformer } from "electron-builder-util/out/fs"
import { log, warn } from "electron-builder-util/out/log"
import { warn } from "electron-builder-util/out/log"
import { readJson } from "fs-extra-p"
import mime from "mime"
import * as path from "path"
import { BuildInfo } from "./packagerApi"
import { PlatformPackager } from "./platformPackager"

function isElectronCompileUsed(info: BuildInfo): boolean {
export function isElectronCompileUsed(info: BuildInfo): boolean {
const depList = [(<any>info.metadata).devDependencies, info.metadata.dependencies]
if (info.isTwoPackageJsonProjectLayoutUsed) {
depList.push((<any>info.devMetadata).devDependencies)
Expand All @@ -17,19 +15,17 @@ function isElectronCompileUsed(info: BuildInfo): boolean {

for (const deps of depList) {
if (deps != null && "electron-compile" in deps) {
log("electron-compile detected — files will be compiled")
return true
}
}

return false
}

export async function createTransformer(projectDir: string, srcDir: string, packager: PlatformPackager<any>): Promise<FileTransformer> {
const extraMetadata = packager.packagerOptions.extraMetadata
export async function createTransformer(srcDir: string, extraMetadata: any): Promise<FileTransformer> {
const mainPackageJson = path.join(srcDir, "package.json")
const defaultTransformer: FileTransformer = file => {

return file => {
if (file === mainPackageJson) {
return modifyMainPackageJson(file, extraMetadata)
}
Expand All @@ -42,43 +38,17 @@ export async function createTransformer(projectDir: string, srcDir: string, pack
return null
}
}
}

export interface CompilerHost {
compile(file: string): any

return isElectronCompileUsed(packager.info) ? await createElectronCompileTransformer(projectDir, defaultTransformer) : defaultTransformer
saveConfiguration(): Promise<any>
}

async function createElectronCompileTransformer(projectDir: string, defaultTransformer: FileTransformer) {
export function createElectronCompilerHost(projectDir: string, cacheDir: string): Promise<CompilerHost> {
const electronCompilePath = path.join(projectDir, "node_modules", "electron-compile", "lib")
const CompilerHost = require(path.join(electronCompilePath, "compiler-host")).default
const compilerHost = await require(path.join(electronCompilePath, "config-parser")).createCompilerHostFromProjectRoot(projectDir)
return async (file: string) => {
const defaultResult = defaultTransformer(file)
if (defaultResult != null) {
return await defaultResult
}

if (file.includes("/node_modules/") || file.includes("/bower_components/")) {
return null
}

const hashInfo = await compilerHost.fileChangeCache.getHashForPath(file)

if (CompilerHost.shouldPassthrough(hashInfo)) {
return null
}

// we don't use @paulcbetts/mime-types to lookup mime-type because it doesn't any value except size (@develar 20.03.17)
// as we already depends on mime module (github publisher)
// https://github.com/electron/electron-compile/pull/148#issuecomment-266669293
const type = mime.lookup(file)
const compiler = type == null ? null : compilerHost.compilersByMimeType[type]
if (compiler == null) {
return null
}

const cache = compilerHost.cachesForCompilers.get(compiler)
const result = await cache.getOrFetch(file, (file: string, hashInfo: any) => compilerHost.compileUncached(file, hashInfo, compiler))
return result.code || result.binaryData
}
return require(path.join(electronCompilePath, "config-parser")).createCompilerHostFromProjectRoot(projectDir, cacheDir)
}

function cleanupPackageJson(data: any): any {
Expand Down
Loading

0 comments on commit 08893e3

Please sign in to comment.