Skip to content

Commit

Permalink
fix: Unable to build package because of asarUnpack
Browse files Browse the repository at this point in the history
BREAKING CHANGE: asar.unpackDir, asar.unpack, asar-unpack-dir, asar-unpack were removed — please use build.asarUnpack instead

Closes #937
  • Loading branch information
develar committed Nov 26, 2016
1 parent a55c573 commit e3cfa8e
Show file tree
Hide file tree
Showing 8 changed files with 106 additions and 115 deletions.
91 changes: 38 additions & 53 deletions src/asarUtil.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,21 @@
import { AsarFileInfo, listPackage, statFile, AsarOptions } from "asar-electron-builder"
import { statOrNull, debug } from "./util/util"
import { statOrNull, debug, isCi } from "./util/util"
import {
lstat, readdir, readFile, Stats, createWriteStream, ensureDir, createReadStream, readJson,
writeFile, realpath
lstat,
readdir,
readFile,
Stats,
createWriteStream,
ensureDir,
createReadStream,
readJson,
writeFile,
realpath,
link
} from "fs-extra-p"
import BluebirdPromise from "bluebird-lst-c"
import * as path from "path"
import { log } from "./util/log"
import { Minimatch } from "minimatch"
import { deepAssign } from "./util/deepAssign"
import { Filter } from "./util/filter"

Expand Down Expand Up @@ -71,10 +79,6 @@ export async function createAsarArchive(src: string, resourcesPath: string, opti
await new AsarPackager(src, resourcesPath, options, unpackPattern).pack(filter)
}

function isUnpackDir(path: string, pattern: Minimatch, rawPattern: string): boolean {
return path.startsWith(rawPattern) || pattern.match(path)
}

function addValue(map: Map<string, Array<string>>, key: string, value: string) {
let list = map.get(key)
if (list == null) {
Expand Down Expand Up @@ -193,19 +197,15 @@ class AsarPackager {

async createPackageFromFiles(files: Array<string>, metadata: Map<string, Stats>) {
// search auto unpacked dir
const autoUnpackDirs = new Set<string>()
const unpackedDirs = new Set<string>()
const unpackedDest = `${this.outFile}.unpacked`
const fileIndexToModulePackageData = new Map<number, BluebirdPromise<string>>()
await ensureDir(path.dirname(this.outFile))

if (this.options.smartUnpack !== false) {
await this.detectUnpackedDirs(files, metadata, autoUnpackDirs, unpackedDest, fileIndexToModulePackageData)
await this.detectUnpackedDirs(files, metadata, unpackedDirs, unpackedDest, fileIndexToModulePackageData)
}

const unpackDir = this.options.unpackDir == null ? null : new Minimatch(this.options.unpackDir)
const unpack = this.options.unpack == null ? null : new Minimatch(this.options.unpack, {
matchBase: true
})

const createDirPromises: Array<Promise<any>> = [ensureDir(path.dirname(this.outFile))]
const copyPromises: Array<Promise<any>> = []
const mainPackageJson = path.join(this.src, "package.json")
for (let i = 0, n = files.length; i < n; i++) {
Expand All @@ -214,12 +214,6 @@ class AsarPackager {
if (stat.isFile()) {
const fileParent = path.dirname(file)
const dirNode = this.fs.searchNodeFromPath(fileParent)

if (dirNode.unpacked && createDirPromises.length > 0) {
await BluebirdPromise.all(createDirPromises)
createDirPromises.length = 0
}

const packageDataPromise = fileIndexToModulePackageData.get(i)
let newData: any | null = null
if (packageDataPromise == null) {
Expand All @@ -234,19 +228,12 @@ class AsarPackager {
const fileSize = newData == null ? stat.size : Buffer.byteLength(newData)
const node = this.fs.searchNodeFromPath(file)
node.size = fileSize
if (dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat)) || (unpack != null && unpack.match(file))) {
if (dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat))) {
node.unpacked = true

if (!dirNode.unpacked) {
const promise = ensureDir(path.join(unpackedDest, path.relative(this.src, fileParent)))
if (createDirPromises.length === 0) {
await createDirPromises
}
else {
createDirPromises.push(promise)
await BluebirdPromise.all(createDirPromises)
createDirPromises.length = 0
}
if (!dirNode.unpacked && !unpackedDirs.has(fileParent)) {
unpackedDirs.add(fileParent)
await ensureDir(path.join(unpackedDest, path.relative(this.src, fileParent)))
}

const unpackedFile = path.join(unpackedDest, path.relative(this.src, file))
Expand Down Expand Up @@ -276,24 +263,18 @@ class AsarPackager {
}
else if (stat.isDirectory()) {
let unpacked = false
if (autoUnpackDirs.has(file)) {
if (unpackedDirs.has(file)) {
unpacked = true
}
else {
unpacked = unpackDir != null && isUnpackDir(path.relative(this.src, file), unpackDir, this.options.unpackDir!)
if (unpacked) {
createDirPromises.push(ensureDir(path.join(unpackedDest, path.relative(this.src, file))))
}
else {
for (let d of autoUnpackDirs) {
if (file.length > (d.length + 2) && file[d.length] === path.sep && file.startsWith(d)) {
unpacked = true
autoUnpackDirs.add(file)
// not all dirs marked as unpacked after first iteration - because node module dir can be marked as unpacked after processing node module dir content
// e.g. node-notifier/example/advanced.js processed, but only on process vendor/terminal-notifier.app module will be marked as unpacked
createDirPromises.push(ensureDir(path.join(unpackedDest, path.relative(this.src, file))))
break
}
for (const dir of unpackedDirs) {
if (file.length > (dir.length + 2) && file[dir.length] === path.sep && file.startsWith(dir)) {
unpacked = true
unpackedDirs.add(file)
// not all dirs marked as unpacked after first iteration - because node module dir can be marked as unpacked after processing node module dir content
// e.g. node-notifier/example/advanced.js processed, but only on process vendor/terminal-notifier.app module will be marked as unpacked
await ensureDir(path.join(unpackedDest, path.relative(this.src, file)))
break
}
}
}
Expand Down Expand Up @@ -362,7 +343,7 @@ class AsarPackager {
})
}

async order(filenames: Array<string>) {
private async order(filenames: Array<string>) {
const orderingFiles = (await readFile(this.options.ordering!, "utf8")).split("\n").map(line => {
if (line.indexOf(":") !== -1) {
line = line.split(":").pop()!
Expand All @@ -375,7 +356,7 @@ class AsarPackager {
})

const ordering: Array<string> = []
for (let file of orderingFiles) {
for (const file of orderingFiles) {
let pathComponents = file.split(path.sep)
let str = this.src
for (let pathComponent of pathComponents) {
Expand All @@ -387,12 +368,12 @@ class AsarPackager {
const filenamesSorted: Array<string> = []
let missing = 0
const total = filenames.length
for (let file of ordering) {
for (const file of ordering) {
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
filenamesSorted.push(file)
}
}
for (let file of filenames) {
for (const file of filenames) {
if (!filenamesSorted.includes(file)) {
filenamesSorted.push(file)
missing += 1
Expand All @@ -406,7 +387,7 @@ class AsarPackager {
function cleanupPackageJson(data: any): any {
try {
let changed = false
for (let prop of Object.getOwnPropertyNames(data)) {
for (const prop of Object.getOwnPropertyNames(data)) {
if (prop[0] === "_" || prop === "dist" || prop === "gitHead" || prop === "keywords") {
delete data[prop]
changed = true
Expand Down Expand Up @@ -459,6 +440,10 @@ export async function checkFileInArchive(asarFile: string, relativeFile: string,
}

function copyFile(src: string, dest: string, stats: Stats) {
if (process.platform != "win32" && (isCi || process.env.USE_HARD_LINKS === "true")) {
return link(src, dest)
}

return new BluebirdPromise(function (resolve, reject) {
const readStream = createReadStream(src)
const writeStream = createWriteStream(dest, {mode: stats.mode})
Expand Down
5 changes: 5 additions & 0 deletions src/fileMatcher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ export class FileMatcher {
this.patterns.push(pattern)
}

addAllPattern() {
// must be first, see minimatchAll implementation
this.patterns.unshift("**/*")
}

isEmpty() {
return this.patterns.length === 0
}
Expand Down
52 changes: 30 additions & 22 deletions src/platformPackager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { PublishConfiguration, GithubOptions, BintrayOptions, GenericServerOptio
import { getRepositoryInfo } from "./repositoryInfo"
import { dependencies } from "./yarn"
import { Target } from "./targets/targetFactory"
import { deepAssign } from "./util/deepAssign"
import EventEmitter = NodeJS.EventEmitter

export interface PackagerOptions {
Expand Down Expand Up @@ -198,7 +199,7 @@ export abstract class PlatformPackager<DC extends PlatformSpecificBuildOptions>
const patterns = this.getFileMatchers("files", appDir, path.join(resourcesPath, "app"), false, fileMatchOptions, platformSpecificBuildOptions)
let defaultMatcher = patterns == null ? new FileMatcher(appDir, path.join(resourcesPath, "app"), fileMatchOptions) : patterns[0]
if (defaultMatcher.isEmpty() || defaultMatcher.containsOnlyIgnore()) {
defaultMatcher.addPattern("**/*")
defaultMatcher.addAllPattern()
}
else {
defaultMatcher.addPattern("package.json")
Expand Down Expand Up @@ -288,34 +289,41 @@ export abstract class PlatformPackager<DC extends PlatformSpecificBuildOptions>
}

private computeAsarOptions(customBuildOptions: DC): AsarOptions | null {
let result = this.devMetadata.build.asar
let platformSpecific = customBuildOptions.asar
if (platformSpecific != null) {
result = platformSpecific
}

if (result === false) {
return null
function errorMessage(name: string) {
return `${name} is deprecated is deprecated and not supported — please use build.asarUnpack`
}

const buildMetadata = <any>this.devMetadata.build
if (buildMetadata["asar-unpack"] != null) {
warn("asar-unpack is deprecated, please set as asar.unpack")
throw new Error(errorMessage("asar-unpack"))
}
if (buildMetadata["asar-unpack-dir"] != null) {
warn("asar-unpack-dir is deprecated, please set as asar.unpackDir")
throw new Error(errorMessage("asar-unpack-dir"))
}

const platformSpecific = customBuildOptions.asar
const result = platformSpecific == null ? this.devMetadata.build.asar : platformSpecific

if (result === false) {
return null
}

const defaultOptions = {
extraMetadata: this.options.extraMetadata,
}

if (result == null || result === true) {
result = {
unpack: buildMetadata["asar-unpack"],
unpackDir: buildMetadata["asar-unpack-dir"]
}
return defaultOptions
}

return Object.assign(result, {
extraMetadata: this.options.extraMetadata
})
if ((<any>result).unpackDir != null) {
throw new Error(errorMessage("asar.unpackDir"))
}
if ((<any>result).unpack != null) {
throw new Error(errorMessage("asar.unpack"))
}

return deepAssign({}, result, defaultOptions)
}

private doCopyExtraFiles(patterns: Array<FileMatcher> | null): Promise<any> {
Expand All @@ -324,11 +332,11 @@ export abstract class PlatformPackager<DC extends PlatformSpecificBuildOptions>
}
else {
const promises: Array<Promise<any>> = []
for (let i = 0; i < patterns.length; i++) {
if (patterns[i].isEmpty()) {
patterns[i].addPattern("**/*")
for (const pattern of patterns) {
if (pattern.isEmpty() || pattern.containsOnlyIgnore()) {
pattern.addAllPattern()
}
promises.push(copyFiltered(patterns[i].from, patterns[i].to, patterns[i].createFilter(), this.platform === Platform.WINDOWS))
promises.push(copyFiltered(pattern.from, pattern.to, pattern.createFilter(), this.platform === Platform.WINDOWS))
}
return BluebirdPromise.all(promises)
}
Expand Down
3 changes: 2 additions & 1 deletion test/jestSetup.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,5 @@ test.ifDevOrLinuxCi = !isCi || process.platform === "linux" ? test : skip
test.ifWinCi = isCi && isWindows ? test : skip

delete process.env.CSC_NAME
process.env.CSC_IDENTITY_AUTO_DISCOVERY = "false"
process.env.CSC_IDENTITY_AUTO_DISCOVERY = "false"
process.env.USE_HARD_LINKS = "true"
56 changes: 25 additions & 31 deletions test/src/extraMetadataTest.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import { assertPack, modifyPackageJson, appTwoThrows } from "./helpers/packTester"
import { modifyPackageJson, appTwoThrows, app, appTwo } from "./helpers/packTester"
import { Platform, DIR_TARGET } from "out"
import { assertThat } from "./helpers/fileAssert"
import * as path from "path"
import { extractFile } from "asar-electron-builder"

test.ifDevOrLinuxCi("extra metadata", () => {
const extraMetadata = {
test.ifDevOrLinuxCi("extra metadata", app({
targets: Platform.LINUX.createTarget(DIR_TARGET),
extraMetadata: {
foo: {
bar: 12,
},
Expand All @@ -14,46 +15,39 @@ test.ifDevOrLinuxCi("extra metadata", () => {
executableName: "new-name"
}
}
}
return assertPack("test-app-one", {
targets: Platform.LINUX.createTarget(DIR_TARGET),
extraMetadata: extraMetadata,
}, {
projectDirCreated: projectDir => modifyPackageJson(projectDir, data => {
data.foo = {
bar: 42,
},
}, {
projectDirCreated: projectDir => modifyPackageJson(projectDir, data => {
data.foo = {
bar: 42,
existingProp: 22,
}
}),
packed: async context => {
await assertThat(path.join(context.getContent(Platform.LINUX), "new-name")).isFile()
assertThat(JSON.parse(extractFile(path.join(context.getResources(Platform.LINUX), "app.asar"), "package.json").toString())).hasProperties({
foo: {
bar: 12,
existingProp: 22,
}
}),
packed: async context => {
await assertThat(path.join(context.getContent(Platform.LINUX), "new-name")).isFile()
assertThat(JSON.parse(extractFile(path.join(context.getResources(Platform.LINUX), "app.asar"), "package.json").toString())).hasProperties({
foo: {
bar: 12,
existingProp: 22,
}
})
}
})
})
})
}
}))

test.ifDevOrLinuxCi("extra metadata - two", () => {
const extraMetadata = {
test.ifDevOrLinuxCi("extra metadata - two", appTwo({
targets: Platform.LINUX.createTarget(DIR_TARGET),
extraMetadata: {
build: {
linux: {
executableName: "new-name"
}
}
}
return assertPack("test-app", {
targets: Platform.LINUX.createTarget(DIR_TARGET),
extraMetadata: extraMetadata,
},
}, {
packed: async context => {
await assertThat(path.join(context.getContent(Platform.LINUX), "new-name")).isFile()
}
})
})
}))

test.ifMac("extra metadata - override icon", appTwoThrows(/ENOENT: no such file or directory/, {
targets: Platform.MAC.createTarget(DIR_TARGET),
Expand Down
Loading

0 comments on commit e3cfa8e

Please sign in to comment.