diff --git a/CHANGES.md b/CHANGES.md index 3b575ba..5f578f9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,8 @@ +## 2.0.1: + +* Fixed a bug where small files requiring a 64-bit offset (but not size) were corrupted. +* Never use 64-bit sizes in data descriptor for small files, even at large offsets. + ## 2.0.0: * breaking: now targets ES2020 because we need BigInts, becauseā€¦ diff --git a/src/zip.ts b/src/zip.ts index 48368ac..bba889b 100644 --- a/src/zip.ts +++ b/src/zip.ts @@ -7,12 +7,13 @@ const fileHeaderSignature = 0x504b_0304, fileHeaderLength = 30 const descriptorSignature = 0x504b_0708, descriptorLength = 16 const centralHeaderSignature = 0x504b_0102, centralHeaderLength = 46 const endSignature = 0x504b_0506, endLength = 22 -const zip64HeaderLength = 28 const zip64endRecordSignature = 0x504b_0606, zip64endRecordLength = 56 const zip64endLocatorSignature = 0x504b_0607, zip64endLocatorLength = 20 export type ForAwaitable = AsyncIterable | Iterable +type Zip64FieldLength = 0 | 12 | 28 + export async function* loadFiles(files: ForAwaitable) { const centralRecord: Uint8Array[] = [] let offset = 0n @@ -24,18 +25,18 @@ export async function* loadFiles(files: ForAwaitable) { yield fileHeader(file) yield file.encodedName yield* fileData(file) - const fileNeedsZip64 = file.uncompressedSize! >= 0xffffffffn || offset >= 0xffffffffn - yield dataDescriptor(file, fileNeedsZip64) - - centralRecord.push(centralHeader(file, offset, fileNeedsZip64)) + const bigFile = file.uncompressedSize! >= 0xffffffffn + const bigOffset = offset >= 0xffffffffn + const zip64HeaderLength = (+bigOffset * 12 | +bigFile * 28) as Zip64FieldLength + yield dataDescriptor(file, bigFile) + + centralRecord.push(centralHeader(file, offset, zip64HeaderLength)) centralRecord.push(file.encodedName) - if (fileNeedsZip64) { - centralRecord.push(zip64ExtraField(file, offset)) - offset += 8n - } + if (zip64HeaderLength) centralRecord.push(zip64ExtraField(file, offset, zip64HeaderLength)) + if (bigFile) offset += 8n // because the data descriptor will have 64-bit sizes fileCount++ offset += BigInt(fileHeaderLength + descriptorLength + file.encodedName.length) + file.uncompressedSize! - archiveNeedsZip64 ||= fileNeedsZip64 + archiveNeedsZip64 ||= bigFile } // write central repository @@ -123,7 +124,7 @@ export function dataDescriptor(file: ZipFileDescription, needsZip64: boolean) { return makeUint8Array(header) } -export function centralHeader(file: ZipFileDescription, offset: bigint, needsZip64: boolean) { +export function centralHeader(file: ZipFileDescription, offset: bigint, zip64HeaderLength: Zip64FieldLength = 0) { const header = makeBuffer(centralHeaderLength) header.setUint32(0, centralHeaderSignature) header.setUint32(4, 0x2d03_2d_00) // UNIX app version 4.5 | ZIP version 4.5 @@ -134,7 +135,7 @@ export function centralHeader(file: ZipFileDescription, offset: bigint, needsZip header.setUint32(20, clampInt32(file.uncompressedSize!), true) header.setUint32(24, clampInt32(file.uncompressedSize!), true) header.setUint16(28, file.encodedName.length, true) - header.setUint16(30, needsZip64 ? zip64HeaderLength : 0, true) + header.setUint16(30, zip64HeaderLength, true) // useless disk fields = zero (4 bytes) // useless attributes = zero (4 bytes) header.setUint16(40, 0o100664, true) // UNIX regular file, permissions 664 @@ -142,12 +143,14 @@ export function centralHeader(file: ZipFileDescription, offset: bigint, needsZip return makeUint8Array(header) } -export function zip64ExtraField(file: ZipFileDescription, offset: bigint) { +export function zip64ExtraField(file: ZipFileDescription, offset: bigint, zip64HeaderLength: Exclude) { const header = makeBuffer(zip64HeaderLength) header.setUint16(0, 1, true) header.setUint16(2, zip64HeaderLength - 4, true) - header.setBigUint64(4, file.uncompressedSize!, true) - header.setBigUint64(12, file.uncompressedSize!, true) - header.setBigUint64(20, offset, true) + if (zip64HeaderLength & 16) { + header.setBigUint64(4, file.uncompressedSize!, true) + header.setBigUint64(12, file.uncompressedSize!, true) + } + header.setBigUint64(zip64HeaderLength - 8, offset, true) return makeUint8Array(header) } diff --git a/test/crc32.test.ts b/test/crc32.test.ts index d28c4e1..4e05f35 100644 --- a/test/crc32.test.ts +++ b/test/crc32.test.ts @@ -1,4 +1,4 @@ -import { assertEquals } from "https://deno.land/std/testing/asserts.ts" +import { assertEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" import { crc32, memory } from "../src/crc32.ts" const table = await Deno.readFile("./test/table.array") diff --git a/test/datetime.test.ts b/test/datetime.test.ts index 7684eba..24921c1 100644 --- a/test/datetime.test.ts +++ b/test/datetime.test.ts @@ -1,4 +1,4 @@ -import { assertEquals } from "https://deno.land/std/testing/asserts.ts" +import { assertEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" import { formatDOSDateTime } from "../src/datetime.ts" import { makeBuffer } from "../src/utils.ts" diff --git a/test/zip.test.ts b/test/zip.test.ts index 1231be4..e4a48aa 100644 --- a/test/zip.test.ts +++ b/test/zip.test.ts @@ -1,4 +1,5 @@ -import { assertEquals, assertStrictEquals } from "https://deno.land/std/testing/asserts.ts" +import { assertEquals, assertStrictEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" +import { Buffer } from "https://deno.land/std@0.132.0/io/buffer.ts" import { fileHeader, fileData, dataDescriptor, centralHeader, zip64ExtraField } from "../src/zip.ts" import type { ZipFileDescription } from "../src/input.ts" @@ -19,7 +20,7 @@ Deno.test("the ZIP fileHeader function makes file headers", () => { Deno.test("the ZIP fileData function yields all the file's data", async () => { const file = {...baseFile} - const actual = new Deno.Buffer() + const actual = new Buffer() for await (const chunk of fileData(file)) actual.writeSync(chunk) assertEquals(actual.bytes({copy: false}), zipSpec) }) @@ -50,7 +51,7 @@ Deno.test("the ZIP dataDescriptor function makes ZIP64 data descriptors", () => Deno.test("the ZIP centralHeader function makes central record file headers", () => { const file = {...baseFile, uncompressedSize: 0x10203040n, crc: 0x12345678} const offset = 0x01020304n - const actual = centralHeader(file, offset, false) + const actual = centralHeader(file, offset, 0) const expected = BufferFromHex("504b01022d032d000800000000109a4e7856341240302010403020100b0000000000000000000000b48104030201") assertEquals(actual, expected) }) @@ -58,7 +59,7 @@ Deno.test("the ZIP centralHeader function makes central record file headers", () Deno.test("the ZIP centralHeader function makes ZIP64 central record file headers", () => { const file = {...baseFile, uncompressedSize: 0x110203040n, crc: 0x12345678} const offset = 0x101020304n - const actual = centralHeader(file, offset, true) + const actual = centralHeader(file, offset, 28) const expected = BufferFromHex("504b01022d032d000800000000109a4e78563412ffffffffffffffff0b001c000000000000000000b481ffffffff") assertEquals(actual, expected) }) @@ -66,7 +67,7 @@ Deno.test("the ZIP centralHeader function makes ZIP64 central record file header Deno.test("the ZIP zip64ExtraField function makes Zip64 extra fields", () => { const file = {...baseFile, uncompressedSize: 0x10203040n, crc: 0x12345678} const offset = 0x01020304n - const actual = zip64ExtraField(file, offset) + const actual = zip64ExtraField(file, offset, 28) const expected = BufferFromHex("01001800403020100000000040302010000000000403020100000000") assertEquals(actual, expected) })