Skip to content

Commit

Permalink
refactor: remove total file size from blockMap — no need since variab…
Browse files Browse the repository at this point in the history
…le block size is used
  • Loading branch information
develar committed Oct 18, 2017
1 parent b0fa409 commit eff6a8d
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 10 deletions.
7 changes: 3 additions & 4 deletions packages/app-package-builder/src/blockMap.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import BluebirdPromise from "bluebird-lst"
import { hashFile } from "builder-util"
import { PackageFileInfo } from "builder-util-runtime"
import { BlockMap, SIGNATURE_HEADER_SIZE } from "builder-util-runtime/out/blockMapApi"
import { BlockMap, SIGNATURE_HEADER_SIZE, BlockMapFile } from "builder-util-runtime/out/blockMapApi"
import { close, open, stat, write, writeFile } from "fs-extra-p"
import { safeDump } from "js-yaml"
import { Archive } from "./Archive"
Expand Down Expand Up @@ -47,7 +47,7 @@ async function appendBlockMapData(blockMap: BlockMap, archiveFile: string, fd: n
// lzma doesn't make a lof of sense (151 KB lzma vs 156 KB deflate) for small text file where most of the data are unique strings (encoded checksums)
// protobuf size — BlockMap size: 153104, compressed: 151256 So, it means that it doesn't make sense - better to use deflate instead of complicating (another runtime dependency (google-protobuf), proto files and so on)
// size encoding in a form where next value is a relative to previous doesn't make sense (zero savings in tests), since in our case next size can be less than previous (so, int will be negative and `-` symbol will be added)
// sha2556 secure hash is not required, md5 collision-resistance is good for our purpose, secure hash algo not required, in any case sha512 checksum is checked for the whole file. And size of matched block is checked in addition to.
// sha2556 secure hash is not required, md5 collision-resistance is good for our purpose, secure hash algorithm not required, in any case sha512 checksum is checked for the whole file. And size of matched block is checked in addition to.
const blockMapDataString = safeDump(blockMap, {
indent: 0,
flowLevel: 0,
Expand Down Expand Up @@ -160,7 +160,7 @@ async function doComputeBlockMap(files: Array<SubFileDescriptor>, fd: number): P
}

const stats: Array<string> = []
const blocks = await BluebirdPromise.map(files, async file => {
const blocks = await BluebirdPromise.map(files, async (file): Promise<BlockMapFile> => {
const chunker = new ContentDefinedChunker()
const blocks = await chunker.computeChunks(fd, file.dataStart, file.dataEnd, file.name)

Expand All @@ -171,7 +171,6 @@ async function doComputeBlockMap(files: Array<SubFileDescriptor>, fd: number): P
return {
name: file.name.replace(/\\/g, "/"),
offset: file.dataStart,
size: file.dataEnd - file.dataStart,
...blocks,
}
}, {concurrency: 2})
Expand Down
1 change: 0 additions & 1 deletion packages/builder-util-runtime/src/blockMapApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ export interface BlockMap {
export interface BlockMapFile extends FileChunks {
name: string
offset: number
size: number
}

export async function readBlockMapDataFromAppImage(file: string) {
Expand Down
3 changes: 1 addition & 2 deletions packages/electron-builder/src/util/NodeModuleCopyHelper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,7 @@ export class NodeModuleCopyHelper {
const childNames = await readdir(dirPath)
childNames.sort()

const isTopLevel = !dirPath.includes(path.sep, dep.path.length + 1)

const isTopLevel = dirPath === dep.path
const dirs: Array<string> = []
// our handler is async, but we should add sorted files, so, we add file to result not in the mapper, but after map
const sortedFilePaths = await BluebirdPromise.map(childNames, name => {
Expand Down
5 changes: 2 additions & 3 deletions packages/electron-updater/src/differentialPackage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -214,14 +214,13 @@ export class DifferentialDownloader {
const operations: Array<Operation> = []
for (const blockMapFile of newBlockMap.files) {
const name = blockMapFile.name
const oldEntry = blockMapFile.size === 0 ? null : oldEntryMap.get(name)
// block map doesn't contain empty files, but we check this case just to be sure
const oldEntry = oldEntryMap.get(name)
if (oldEntry == null) {
// new file
operations.push({
kind: OperationKind.DOWNLOAD,
start: blockMapFile.offset,
end: blockMapFile.size - blockMapFile.offset,
end: blockMapFile.offset + blockMapFile.sizes.reduce((accumulator, currentValue) => accumulator + currentValue),
})
continue
}
Expand Down

0 comments on commit eff6a8d

Please sign in to comment.