From f27da188637b7b9ab84db35d73bf3d2ef7fb085d Mon Sep 17 00:00:00 2001 From: Michi Mutsuzaki Date: Sat, 9 Mar 2024 06:59:59 +0000 Subject: [PATCH] Don't depend on github.com/mholt/archiver CNCF license check complains about the usage of github.com/xi2/xz [^1], which cilium-cli indirectly depends on via github.com/mholt/archiver. Use the archive and compress packages Go standard library instead to: - Untar Cilium and Tetragon bugtool outputs. - Zip the sysdump output directory. [^1]: https://github.com/cilium/cilium/actions/runs/8191544402/job/22401012795?pr=31077 Signed-off-by: Michi Mutsuzaki --- go.mod | 9 - go.sum | 25 - sysdump/sysdump.go | 132 +- sysdump/sysdump_test.go | 13 + vendor/github.com/andybalholm/brotli/LICENSE | 19 - .../github.com/andybalholm/brotli/README.md | 7 - .../andybalholm/brotli/backward_references.go | 185 - .../brotli/backward_references_hq.go | 796 - .../github.com/andybalholm/brotli/bit_cost.go | 436 - .../andybalholm/brotli/bit_reader.go | 266 - .../andybalholm/brotli/block_splitter.go | 144 - .../brotli/block_splitter_command.go | 434 - .../brotli/block_splitter_distance.go | 433 - .../brotli/block_splitter_literal.go | 433 - .../andybalholm/brotli/brotli_bit_stream.go | 1283 - .../github.com/andybalholm/brotli/cluster.go | 30 - .../andybalholm/brotli/cluster_command.go | 326 - .../andybalholm/brotli/cluster_distance.go | 326 - .../andybalholm/brotli/cluster_literal.go | 326 - .../github.com/andybalholm/brotli/command.go | 254 - .../andybalholm/brotli/compress_fragment.go | 685 - .../brotli/compress_fragment_two_pass.go | 595 - .../andybalholm/brotli/constants.go | 77 - .../github.com/andybalholm/brotli/context.go | 2176 - .../github.com/andybalholm/brotli/decode.go | 2632 - .../andybalholm/brotli/dictionary.go | 122890 --------------- .../andybalholm/brotli/dictionary_hash.go | 32779 ---- .../github.com/andybalholm/brotli/encode.go | 1158 - .../andybalholm/brotli/encoder_dict.go | 22 - .../andybalholm/brotli/entropy_encode.go | 592 - .../brotli/entropy_encode_static.go | 4396 - .../github.com/andybalholm/brotli/fast_log.go | 296 - .../andybalholm/brotli/find_match_length.go | 45 - vendor/github.com/andybalholm/brotli/h10.go | 287 - vendor/github.com/andybalholm/brotli/h5.go | 214 - vendor/github.com/andybalholm/brotli/h6.go | 216 - vendor/github.com/andybalholm/brotli/hash.go | 344 - .../andybalholm/brotli/hash_composite.go | 93 - .../brotli/hash_forgetful_chain.go | 253 - .../brotli/hash_longest_match_quickly.go | 214 - .../andybalholm/brotli/hash_rolling.go | 169 - .../andybalholm/brotli/histogram.go | 226 - vendor/github.com/andybalholm/brotli/http.go | 192 - .../github.com/andybalholm/brotli/huffman.go | 653 - .../andybalholm/brotli/literal_cost.go | 182 - .../github.com/andybalholm/brotli/memory.go | 66 - .../andybalholm/brotli/metablock.go | 574 - .../andybalholm/brotli/metablock_command.go | 165 - .../andybalholm/brotli/metablock_distance.go | 165 - .../andybalholm/brotli/metablock_literal.go | 165 - .../github.com/andybalholm/brotli/params.go | 37 - .../github.com/andybalholm/brotli/platform.go | 103 - .../github.com/andybalholm/brotli/prefix.go | 30 - .../andybalholm/brotli/prefix_dec.go | 723 - .../github.com/andybalholm/brotli/quality.go | 196 - .../github.com/andybalholm/brotli/reader.go | 100 - .../andybalholm/brotli/ringbuffer.go | 134 - vendor/github.com/andybalholm/brotli/state.go | 295 - .../andybalholm/brotli/static_dict.go | 666 - .../andybalholm/brotli/static_dict_lut.go | 75094 --------- .../andybalholm/brotli/symbol_list.go | 22 - .../andybalholm/brotli/transform.go | 641 - .../andybalholm/brotli/utf8_util.go | 71 - vendor/github.com/andybalholm/brotli/util.go | 7 - .../andybalholm/brotli/write_bits.go | 94 - .../github.com/andybalholm/brotli/writer.go | 118 - vendor/github.com/dsnet/compress/.travis.yml | 36 - vendor/github.com/dsnet/compress/LICENSE.md | 24 - vendor/github.com/dsnet/compress/README.md | 75 - vendor/github.com/dsnet/compress/api.go | 74 - vendor/github.com/dsnet/compress/bzip2/bwt.go | 110 - .../github.com/dsnet/compress/bzip2/common.go | 110 - .../dsnet/compress/bzip2/fuzz_off.go | 13 - .../dsnet/compress/bzip2/fuzz_on.go | 77 - .../compress/bzip2/internal/sais/common.go | 28 - .../compress/bzip2/internal/sais/sais_byte.go | 661 - .../compress/bzip2/internal/sais/sais_int.go | 661 - .../dsnet/compress/bzip2/mtf_rle2.go | 131 - .../github.com/dsnet/compress/bzip2/prefix.go | 374 - .../github.com/dsnet/compress/bzip2/reader.go | 274 - .../github.com/dsnet/compress/bzip2/rle1.go | 101 - .../github.com/dsnet/compress/bzip2/writer.go | 307 - .../dsnet/compress/internal/common.go | 107 - .../dsnet/compress/internal/debug.go | 12 - .../dsnet/compress/internal/errors/errors.go | 120 - .../dsnet/compress/internal/gofuzz.go | 12 - .../dsnet/compress/internal/prefix/debug.go | 159 - .../dsnet/compress/internal/prefix/decoder.go | 136 - .../dsnet/compress/internal/prefix/encoder.go | 66 - .../dsnet/compress/internal/prefix/prefix.go | 400 - .../dsnet/compress/internal/prefix/range.go | 93 - .../dsnet/compress/internal/prefix/reader.go | 335 - .../dsnet/compress/internal/prefix/wrap.go | 146 - .../dsnet/compress/internal/prefix/writer.go | 166 - .../dsnet/compress/internal/release.go | 21 - vendor/github.com/dsnet/compress/zbench.sh | 12 - vendor/github.com/dsnet/compress/zfuzz.sh | 10 - vendor/github.com/dsnet/compress/zprof.sh | 54 - vendor/github.com/dsnet/compress/ztest.sh | 54 - vendor/github.com/golang/snappy/.gitignore | 16 - vendor/github.com/golang/snappy/AUTHORS | 18 - vendor/github.com/golang/snappy/CONTRIBUTORS | 41 - vendor/github.com/golang/snappy/LICENSE | 27 - vendor/github.com/golang/snappy/README | 107 - vendor/github.com/golang/snappy/decode.go | 264 - .../github.com/golang/snappy/decode_amd64.s | 490 - .../github.com/golang/snappy/decode_arm64.s | 494 - vendor/github.com/golang/snappy/decode_asm.go | 15 - .../github.com/golang/snappy/decode_other.go | 115 - vendor/github.com/golang/snappy/encode.go | 289 - .../github.com/golang/snappy/encode_amd64.s | 730 - .../github.com/golang/snappy/encode_arm64.s | 722 - vendor/github.com/golang/snappy/encode_asm.go | 30 - .../github.com/golang/snappy/encode_other.go | 238 - vendor/github.com/golang/snappy/snappy.go | 98 - .../klauspost/compress/flate/deflate.go | 1017 - .../klauspost/compress/flate/dict_decoder.go | 184 - .../klauspost/compress/flate/fast_encoder.go | 193 - .../compress/flate/huffman_bit_writer.go | 1182 - .../klauspost/compress/flate/huffman_code.go | 417 - .../compress/flate/huffman_sortByFreq.go | 159 - .../compress/flate/huffman_sortByLiteral.go | 201 - .../klauspost/compress/flate/inflate.go | 793 - .../klauspost/compress/flate/inflate_gen.go | 1283 - .../klauspost/compress/flate/level1.go | 241 - .../klauspost/compress/flate/level2.go | 214 - .../klauspost/compress/flate/level3.go | 241 - .../klauspost/compress/flate/level4.go | 221 - .../klauspost/compress/flate/level5.go | 708 - .../klauspost/compress/flate/level6.go | 325 - .../compress/flate/matchlen_amd64.go | 16 - .../klauspost/compress/flate/matchlen_amd64.s | 68 - .../compress/flate/matchlen_generic.go | 33 - .../klauspost/compress/flate/regmask_amd64.go | 37 - .../klauspost/compress/flate/regmask_other.go | 40 - .../klauspost/compress/flate/stateless.go | 318 - .../klauspost/compress/flate/token.go | 379 - .../klauspost/compress/gzip/gunzip.go | 375 - .../klauspost/compress/gzip/gzip.go | 290 - .../klauspost/compress/zip/reader.go | 901 - .../klauspost/compress/zip/register.go | 148 - .../klauspost/compress/zip/struct.go | 392 - .../klauspost/compress/zip/writer.go | 641 - vendor/github.com/klauspost/pgzip/.gitignore | 24 - vendor/github.com/klauspost/pgzip/.travis.yml | 24 - vendor/github.com/klauspost/pgzip/GO_LICENSE | 27 - vendor/github.com/klauspost/pgzip/LICENSE | 21 - vendor/github.com/klauspost/pgzip/README.md | 135 - vendor/github.com/klauspost/pgzip/gunzip.go | 584 - vendor/github.com/klauspost/pgzip/gzip.go | 519 - .../github.com/mholt/archiver/v3/.gitignore | 10 - .../mholt/archiver/v3/.goreleaser.yml | 41 - .../github.com/mholt/archiver/v3/.prettierrc | 4 - vendor/github.com/mholt/archiver/v3/LICENSE | 21 - vendor/github.com/mholt/archiver/v3/README.md | 324 - .../github.com/mholt/archiver/v3/SECURITY.md | 15 - .../github.com/mholt/archiver/v3/archiver.go | 540 - vendor/github.com/mholt/archiver/v3/brotli.go | 55 - .../github.com/mholt/archiver/v3/build.bash | 13 - vendor/github.com/mholt/archiver/v3/bz2.go | 64 - vendor/github.com/mholt/archiver/v3/error.go | 27 - .../mholt/archiver/v3/filecompressor.go | 67 - vendor/github.com/mholt/archiver/v3/gz.go | 76 - vendor/github.com/mholt/archiver/v3/lz4.go | 63 - vendor/github.com/mholt/archiver/v3/rar.go | 446 - vendor/github.com/mholt/archiver/v3/sz.go | 51 - vendor/github.com/mholt/archiver/v3/tar.go | 659 - .../github.com/mholt/archiver/v3/tarbrotli.go | 114 - vendor/github.com/mholt/archiver/v3/tarbz2.go | 126 - vendor/github.com/mholt/archiver/v3/targz.go | 137 - vendor/github.com/mholt/archiver/v3/tarlz4.go | 129 - vendor/github.com/mholt/archiver/v3/tarsz.go | 114 - vendor/github.com/mholt/archiver/v3/tarxz.go | 119 - vendor/github.com/mholt/archiver/v3/tarzst.go | 120 - vendor/github.com/mholt/archiver/v3/xz.go | 58 - vendor/github.com/mholt/archiver/v3/zip.go | 711 - vendor/github.com/mholt/archiver/v3/zstd.go | 61 - vendor/github.com/nwaples/rardecode/LICENSE | 23 - vendor/github.com/nwaples/rardecode/README.md | 4 - .../github.com/nwaples/rardecode/archive.go | 309 - .../github.com/nwaples/rardecode/archive15.go | 468 - .../github.com/nwaples/rardecode/archive50.go | 475 - .../nwaples/rardecode/bit_reader.go | 119 - .../github.com/nwaples/rardecode/decode29.go | 264 - .../nwaples/rardecode/decode29_lz.go | 247 - .../nwaples/rardecode/decode29_ppm.go | 132 - .../github.com/nwaples/rardecode/decode50.go | 294 - .../nwaples/rardecode/decode_reader.go | 290 - .../nwaples/rardecode/decrypt_reader.go | 126 - .../github.com/nwaples/rardecode/filters.go | 416 - .../github.com/nwaples/rardecode/huffman.go | 208 - .../github.com/nwaples/rardecode/ppm_model.go | 1096 - vendor/github.com/nwaples/rardecode/reader.go | 376 - vendor/github.com/nwaples/rardecode/vm.go | 687 - vendor/github.com/pierrec/lz4/v4/.gitignore | 36 - vendor/github.com/pierrec/lz4/v4/LICENSE | 28 - vendor/github.com/pierrec/lz4/v4/README.md | 90 - .../pierrec/lz4/v4/internal/lz4block/block.go | 469 - .../lz4/v4/internal/lz4block/blocks.go | 88 - .../lz4/v4/internal/lz4block/decode_amd64.s | 369 - .../lz4/v4/internal/lz4block/decode_arm.s | 197 - .../lz4/v4/internal/lz4block/decode_asm.go | 9 - .../lz4/v4/internal/lz4block/decode_other.go | 108 - .../lz4/v4/internal/lz4errors/errors.go | 19 - .../lz4/v4/internal/lz4stream/block.go | 332 - .../lz4/v4/internal/lz4stream/frame.go | 200 - .../lz4/v4/internal/lz4stream/frame_gen.go | 103 - .../lz4/v4/internal/xxh32/xxh32zero.go | 212 - .../lz4/v4/internal/xxh32/xxh32zero_arm.go | 11 - .../lz4/v4/internal/xxh32/xxh32zero_arm.s | 259 - .../lz4/v4/internal/xxh32/xxh32zero_other.go | 10 - vendor/github.com/pierrec/lz4/v4/lz4.go | 147 - vendor/github.com/pierrec/lz4/v4/options.go | 213 - .../github.com/pierrec/lz4/v4/options_gen.go | 92 - vendor/github.com/pierrec/lz4/v4/reader.go | 243 - vendor/github.com/pierrec/lz4/v4/state.go | 75 - vendor/github.com/pierrec/lz4/v4/state_gen.go | 28 - vendor/github.com/pierrec/lz4/v4/writer.go | 233 - vendor/github.com/ulikunitz/xz/.gitignore | 25 - vendor/github.com/ulikunitz/xz/LICENSE | 26 - vendor/github.com/ulikunitz/xz/README.md | 73 - vendor/github.com/ulikunitz/xz/SECURITY.md | 10 - vendor/github.com/ulikunitz/xz/TODO.md | 363 - vendor/github.com/ulikunitz/xz/bits.go | 79 - vendor/github.com/ulikunitz/xz/crc.go | 54 - vendor/github.com/ulikunitz/xz/format.go | 721 - .../github.com/ulikunitz/xz/fox-check-none.xz | Bin 96 -> 0 bytes vendor/github.com/ulikunitz/xz/fox.xz | Bin 104 -> 0 bytes .../ulikunitz/xz/internal/hash/cyclic_poly.go | 181 - .../ulikunitz/xz/internal/hash/doc.go | 14 - .../ulikunitz/xz/internal/hash/rabin_karp.go | 66 - .../ulikunitz/xz/internal/hash/roller.go | 29 - .../ulikunitz/xz/internal/xlog/xlog.go | 457 - .../github.com/ulikunitz/xz/lzma/bintree.go | 522 - vendor/github.com/ulikunitz/xz/lzma/bitops.go | 47 - .../github.com/ulikunitz/xz/lzma/breader.go | 39 - vendor/github.com/ulikunitz/xz/lzma/buffer.go | 171 - .../ulikunitz/xz/lzma/bytewriter.go | 37 - .../github.com/ulikunitz/xz/lzma/decoder.go | 277 - .../ulikunitz/xz/lzma/decoderdict.go | 128 - .../ulikunitz/xz/lzma/directcodec.go | 38 - .../github.com/ulikunitz/xz/lzma/distcodec.go | 140 - .../github.com/ulikunitz/xz/lzma/encoder.go | 268 - .../ulikunitz/xz/lzma/encoderdict.go | 149 - vendor/github.com/ulikunitz/xz/lzma/fox.lzma | Bin 67 -> 0 bytes .../github.com/ulikunitz/xz/lzma/hashtable.go | 309 - vendor/github.com/ulikunitz/xz/lzma/header.go | 167 - .../github.com/ulikunitz/xz/lzma/header2.go | 398 - .../ulikunitz/xz/lzma/lengthcodec.go | 116 - .../ulikunitz/xz/lzma/literalcodec.go | 125 - .../ulikunitz/xz/lzma/matchalgorithm.go | 52 - .../github.com/ulikunitz/xz/lzma/operation.go | 55 - vendor/github.com/ulikunitz/xz/lzma/prob.go | 53 - .../ulikunitz/xz/lzma/properties.go | 69 - .../ulikunitz/xz/lzma/rangecodec.go | 222 - vendor/github.com/ulikunitz/xz/lzma/reader.go | 100 - .../github.com/ulikunitz/xz/lzma/reader2.go | 231 - vendor/github.com/ulikunitz/xz/lzma/state.go | 145 - .../ulikunitz/xz/lzma/treecodecs.go | 133 - vendor/github.com/ulikunitz/xz/lzma/writer.go | 209 - .../github.com/ulikunitz/xz/lzma/writer2.go | 305 - vendor/github.com/ulikunitz/xz/lzmafilter.go | 117 - vendor/github.com/ulikunitz/xz/make-docs | 5 - vendor/github.com/ulikunitz/xz/none-check.go | 23 - vendor/github.com/ulikunitz/xz/reader.go | 359 - vendor/github.com/ulikunitz/xz/writer.go | 399 - vendor/github.com/xi2/xz/AUTHORS | 8 - vendor/github.com/xi2/xz/LICENSE | 18 - vendor/github.com/xi2/xz/README.md | 10 - vendor/github.com/xi2/xz/dec_bcj.go | 461 - vendor/github.com/xi2/xz/dec_delta.go | 55 - vendor/github.com/xi2/xz/dec_lzma2.go | 1235 - vendor/github.com/xi2/xz/dec_stream.go | 932 - vendor/github.com/xi2/xz/dec_util.go | 52 - vendor/github.com/xi2/xz/dec_xz.go | 124 - vendor/github.com/xi2/xz/doc.go | 35 - vendor/github.com/xi2/xz/reader.go | 256 - vendor/modules.txt | 42 - 278 files changed, 130 insertions(+), 301607 deletions(-) delete mode 100644 vendor/github.com/andybalholm/brotli/LICENSE delete mode 100644 vendor/github.com/andybalholm/brotli/README.md delete mode 100644 vendor/github.com/andybalholm/brotli/backward_references.go delete mode 100644 vendor/github.com/andybalholm/brotli/backward_references_hq.go delete mode 100644 vendor/github.com/andybalholm/brotli/bit_cost.go delete mode 100644 vendor/github.com/andybalholm/brotli/bit_reader.go delete mode 100644 vendor/github.com/andybalholm/brotli/block_splitter.go delete mode 100644 vendor/github.com/andybalholm/brotli/block_splitter_command.go delete mode 100644 vendor/github.com/andybalholm/brotli/block_splitter_distance.go delete mode 100644 vendor/github.com/andybalholm/brotli/block_splitter_literal.go delete mode 100644 vendor/github.com/andybalholm/brotli/brotli_bit_stream.go delete mode 100644 vendor/github.com/andybalholm/brotli/cluster.go delete mode 100644 vendor/github.com/andybalholm/brotli/cluster_command.go delete mode 100644 vendor/github.com/andybalholm/brotli/cluster_distance.go delete mode 100644 vendor/github.com/andybalholm/brotli/cluster_literal.go delete mode 100644 vendor/github.com/andybalholm/brotli/command.go delete mode 100644 vendor/github.com/andybalholm/brotli/compress_fragment.go delete mode 100644 vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go delete mode 100644 vendor/github.com/andybalholm/brotli/constants.go delete mode 100644 vendor/github.com/andybalholm/brotli/context.go delete mode 100644 vendor/github.com/andybalholm/brotli/decode.go delete mode 100644 vendor/github.com/andybalholm/brotli/dictionary.go delete mode 100644 vendor/github.com/andybalholm/brotli/dictionary_hash.go delete mode 100644 vendor/github.com/andybalholm/brotli/encode.go delete mode 100644 vendor/github.com/andybalholm/brotli/encoder_dict.go delete mode 100644 vendor/github.com/andybalholm/brotli/entropy_encode.go delete mode 100644 vendor/github.com/andybalholm/brotli/entropy_encode_static.go delete mode 100644 vendor/github.com/andybalholm/brotli/fast_log.go delete mode 100644 vendor/github.com/andybalholm/brotli/find_match_length.go delete mode 100644 vendor/github.com/andybalholm/brotli/h10.go delete mode 100644 vendor/github.com/andybalholm/brotli/h5.go delete mode 100644 vendor/github.com/andybalholm/brotli/h6.go delete mode 100644 vendor/github.com/andybalholm/brotli/hash.go delete mode 100644 vendor/github.com/andybalholm/brotli/hash_composite.go delete mode 100644 vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go delete mode 100644 vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go delete mode 100644 vendor/github.com/andybalholm/brotli/hash_rolling.go delete mode 100644 vendor/github.com/andybalholm/brotli/histogram.go delete mode 100644 vendor/github.com/andybalholm/brotli/http.go delete mode 100644 vendor/github.com/andybalholm/brotli/huffman.go delete mode 100644 vendor/github.com/andybalholm/brotli/literal_cost.go delete mode 100644 vendor/github.com/andybalholm/brotli/memory.go delete mode 100644 vendor/github.com/andybalholm/brotli/metablock.go delete mode 100644 vendor/github.com/andybalholm/brotli/metablock_command.go delete mode 100644 vendor/github.com/andybalholm/brotli/metablock_distance.go delete mode 100644 vendor/github.com/andybalholm/brotli/metablock_literal.go delete mode 100644 vendor/github.com/andybalholm/brotli/params.go delete mode 100644 vendor/github.com/andybalholm/brotli/platform.go delete mode 100644 vendor/github.com/andybalholm/brotli/prefix.go delete mode 100644 vendor/github.com/andybalholm/brotli/prefix_dec.go delete mode 100644 vendor/github.com/andybalholm/brotli/quality.go delete mode 100644 vendor/github.com/andybalholm/brotli/reader.go delete mode 100644 vendor/github.com/andybalholm/brotli/ringbuffer.go delete mode 100644 vendor/github.com/andybalholm/brotli/state.go delete mode 100644 vendor/github.com/andybalholm/brotli/static_dict.go delete mode 100644 vendor/github.com/andybalholm/brotli/static_dict_lut.go delete mode 100644 vendor/github.com/andybalholm/brotli/symbol_list.go delete mode 100644 vendor/github.com/andybalholm/brotli/transform.go delete mode 100644 vendor/github.com/andybalholm/brotli/utf8_util.go delete mode 100644 vendor/github.com/andybalholm/brotli/util.go delete mode 100644 vendor/github.com/andybalholm/brotli/write_bits.go delete mode 100644 vendor/github.com/andybalholm/brotli/writer.go delete mode 100644 vendor/github.com/dsnet/compress/.travis.yml delete mode 100644 vendor/github.com/dsnet/compress/LICENSE.md delete mode 100644 vendor/github.com/dsnet/compress/README.md delete mode 100644 vendor/github.com/dsnet/compress/api.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/bwt.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/common.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/fuzz_off.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/fuzz_on.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/internal/sais/common.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_byte.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_int.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/mtf_rle2.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/prefix.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/reader.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/rle1.go delete mode 100644 vendor/github.com/dsnet/compress/bzip2/writer.go delete mode 100644 vendor/github.com/dsnet/compress/internal/common.go delete mode 100644 vendor/github.com/dsnet/compress/internal/debug.go delete mode 100644 vendor/github.com/dsnet/compress/internal/errors/errors.go delete mode 100644 vendor/github.com/dsnet/compress/internal/gofuzz.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/debug.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/decoder.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/encoder.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/prefix.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/range.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/reader.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/wrap.go delete mode 100644 vendor/github.com/dsnet/compress/internal/prefix/writer.go delete mode 100644 vendor/github.com/dsnet/compress/internal/release.go delete mode 100644 vendor/github.com/dsnet/compress/zbench.sh delete mode 100644 vendor/github.com/dsnet/compress/zfuzz.sh delete mode 100644 vendor/github.com/dsnet/compress/zprof.sh delete mode 100644 vendor/github.com/dsnet/compress/ztest.sh delete mode 100644 vendor/github.com/golang/snappy/.gitignore delete mode 100644 vendor/github.com/golang/snappy/AUTHORS delete mode 100644 vendor/github.com/golang/snappy/CONTRIBUTORS delete mode 100644 vendor/github.com/golang/snappy/LICENSE delete mode 100644 vendor/github.com/golang/snappy/README delete mode 100644 vendor/github.com/golang/snappy/decode.go delete mode 100644 vendor/github.com/golang/snappy/decode_amd64.s delete mode 100644 vendor/github.com/golang/snappy/decode_arm64.s delete mode 100644 vendor/github.com/golang/snappy/decode_asm.go delete mode 100644 vendor/github.com/golang/snappy/decode_other.go delete mode 100644 vendor/github.com/golang/snappy/encode.go delete mode 100644 vendor/github.com/golang/snappy/encode_amd64.s delete mode 100644 vendor/github.com/golang/snappy/encode_arm64.s delete mode 100644 vendor/github.com/golang/snappy/encode_asm.go delete mode 100644 vendor/github.com/golang/snappy/encode_other.go delete mode 100644 vendor/github.com/golang/snappy/snappy.go delete mode 100644 vendor/github.com/klauspost/compress/flate/deflate.go delete mode 100644 vendor/github.com/klauspost/compress/flate/dict_decoder.go delete mode 100644 vendor/github.com/klauspost/compress/flate/fast_encoder.go delete mode 100644 vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go delete mode 100644 vendor/github.com/klauspost/compress/flate/huffman_code.go delete mode 100644 vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go delete mode 100644 vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go delete mode 100644 vendor/github.com/klauspost/compress/flate/inflate.go delete mode 100644 vendor/github.com/klauspost/compress/flate/inflate_gen.go delete mode 100644 vendor/github.com/klauspost/compress/flate/level1.go delete mode 100644 vendor/github.com/klauspost/compress/flate/level2.go delete mode 100644 vendor/github.com/klauspost/compress/flate/level3.go delete mode 100644 vendor/github.com/klauspost/compress/flate/level4.go delete mode 100644 vendor/github.com/klauspost/compress/flate/level5.go delete mode 100644 vendor/github.com/klauspost/compress/flate/level6.go delete mode 100644 vendor/github.com/klauspost/compress/flate/matchlen_amd64.go delete mode 100644 vendor/github.com/klauspost/compress/flate/matchlen_amd64.s delete mode 100644 vendor/github.com/klauspost/compress/flate/matchlen_generic.go delete mode 100644 vendor/github.com/klauspost/compress/flate/regmask_amd64.go delete mode 100644 vendor/github.com/klauspost/compress/flate/regmask_other.go delete mode 100644 vendor/github.com/klauspost/compress/flate/stateless.go delete mode 100644 vendor/github.com/klauspost/compress/flate/token.go delete mode 100644 vendor/github.com/klauspost/compress/gzip/gunzip.go delete mode 100644 vendor/github.com/klauspost/compress/gzip/gzip.go delete mode 100644 vendor/github.com/klauspost/compress/zip/reader.go delete mode 100644 vendor/github.com/klauspost/compress/zip/register.go delete mode 100644 vendor/github.com/klauspost/compress/zip/struct.go delete mode 100644 vendor/github.com/klauspost/compress/zip/writer.go delete mode 100644 vendor/github.com/klauspost/pgzip/.gitignore delete mode 100644 vendor/github.com/klauspost/pgzip/.travis.yml delete mode 100644 vendor/github.com/klauspost/pgzip/GO_LICENSE delete mode 100644 vendor/github.com/klauspost/pgzip/LICENSE delete mode 100644 vendor/github.com/klauspost/pgzip/README.md delete mode 100644 vendor/github.com/klauspost/pgzip/gunzip.go delete mode 100644 vendor/github.com/klauspost/pgzip/gzip.go delete mode 100644 vendor/github.com/mholt/archiver/v3/.gitignore delete mode 100644 vendor/github.com/mholt/archiver/v3/.goreleaser.yml delete mode 100644 vendor/github.com/mholt/archiver/v3/.prettierrc delete mode 100644 vendor/github.com/mholt/archiver/v3/LICENSE delete mode 100644 vendor/github.com/mholt/archiver/v3/README.md delete mode 100644 vendor/github.com/mholt/archiver/v3/SECURITY.md delete mode 100644 vendor/github.com/mholt/archiver/v3/archiver.go delete mode 100644 vendor/github.com/mholt/archiver/v3/brotli.go delete mode 100644 vendor/github.com/mholt/archiver/v3/build.bash delete mode 100644 vendor/github.com/mholt/archiver/v3/bz2.go delete mode 100644 vendor/github.com/mholt/archiver/v3/error.go delete mode 100644 vendor/github.com/mholt/archiver/v3/filecompressor.go delete mode 100644 vendor/github.com/mholt/archiver/v3/gz.go delete mode 100644 vendor/github.com/mholt/archiver/v3/lz4.go delete mode 100644 vendor/github.com/mholt/archiver/v3/rar.go delete mode 100644 vendor/github.com/mholt/archiver/v3/sz.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tar.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tarbrotli.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tarbz2.go delete mode 100644 vendor/github.com/mholt/archiver/v3/targz.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tarlz4.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tarsz.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tarxz.go delete mode 100644 vendor/github.com/mholt/archiver/v3/tarzst.go delete mode 100644 vendor/github.com/mholt/archiver/v3/xz.go delete mode 100644 vendor/github.com/mholt/archiver/v3/zip.go delete mode 100644 vendor/github.com/mholt/archiver/v3/zstd.go delete mode 100644 vendor/github.com/nwaples/rardecode/LICENSE delete mode 100644 vendor/github.com/nwaples/rardecode/README.md delete mode 100644 vendor/github.com/nwaples/rardecode/archive.go delete mode 100644 vendor/github.com/nwaples/rardecode/archive15.go delete mode 100644 vendor/github.com/nwaples/rardecode/archive50.go delete mode 100644 vendor/github.com/nwaples/rardecode/bit_reader.go delete mode 100644 vendor/github.com/nwaples/rardecode/decode29.go delete mode 100644 vendor/github.com/nwaples/rardecode/decode29_lz.go delete mode 100644 vendor/github.com/nwaples/rardecode/decode29_ppm.go delete mode 100644 vendor/github.com/nwaples/rardecode/decode50.go delete mode 100644 vendor/github.com/nwaples/rardecode/decode_reader.go delete mode 100644 vendor/github.com/nwaples/rardecode/decrypt_reader.go delete mode 100644 vendor/github.com/nwaples/rardecode/filters.go delete mode 100644 vendor/github.com/nwaples/rardecode/huffman.go delete mode 100644 vendor/github.com/nwaples/rardecode/ppm_model.go delete mode 100644 vendor/github.com/nwaples/rardecode/reader.go delete mode 100644 vendor/github.com/nwaples/rardecode/vm.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/.gitignore delete mode 100644 vendor/github.com/pierrec/lz4/v4/LICENSE delete mode 100644 vendor/github.com/pierrec/lz4/v4/README.md delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4block/block.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4block/blocks.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_amd64.s delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_arm.s delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_asm.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_other.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4errors/errors.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4stream/block.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame_gen.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.s delete mode 100644 vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_other.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/lz4.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/options.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/options_gen.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/reader.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/state.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/state_gen.go delete mode 100644 vendor/github.com/pierrec/lz4/v4/writer.go delete mode 100644 vendor/github.com/ulikunitz/xz/.gitignore delete mode 100644 vendor/github.com/ulikunitz/xz/LICENSE delete mode 100644 vendor/github.com/ulikunitz/xz/README.md delete mode 100644 vendor/github.com/ulikunitz/xz/SECURITY.md delete mode 100644 vendor/github.com/ulikunitz/xz/TODO.md delete mode 100644 vendor/github.com/ulikunitz/xz/bits.go delete mode 100644 vendor/github.com/ulikunitz/xz/crc.go delete mode 100644 vendor/github.com/ulikunitz/xz/format.go delete mode 100644 vendor/github.com/ulikunitz/xz/fox-check-none.xz delete mode 100644 vendor/github.com/ulikunitz/xz/fox.xz delete mode 100644 vendor/github.com/ulikunitz/xz/internal/hash/cyclic_poly.go delete mode 100644 vendor/github.com/ulikunitz/xz/internal/hash/doc.go delete mode 100644 vendor/github.com/ulikunitz/xz/internal/hash/rabin_karp.go delete mode 100644 vendor/github.com/ulikunitz/xz/internal/hash/roller.go delete mode 100644 vendor/github.com/ulikunitz/xz/internal/xlog/xlog.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/bintree.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/bitops.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/breader.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/buffer.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/bytewriter.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/decoder.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/decoderdict.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/directcodec.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/distcodec.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/encoder.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/encoderdict.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/fox.lzma delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/hashtable.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/header.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/header2.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/lengthcodec.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/literalcodec.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/matchalgorithm.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/operation.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/prob.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/properties.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/rangecodec.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/reader.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/reader2.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/state.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/treecodecs.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/writer.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzma/writer2.go delete mode 100644 vendor/github.com/ulikunitz/xz/lzmafilter.go delete mode 100644 vendor/github.com/ulikunitz/xz/make-docs delete mode 100644 vendor/github.com/ulikunitz/xz/none-check.go delete mode 100644 vendor/github.com/ulikunitz/xz/reader.go delete mode 100644 vendor/github.com/ulikunitz/xz/writer.go delete mode 100644 vendor/github.com/xi2/xz/AUTHORS delete mode 100644 vendor/github.com/xi2/xz/LICENSE delete mode 100644 vendor/github.com/xi2/xz/README.md delete mode 100644 vendor/github.com/xi2/xz/dec_bcj.go delete mode 100644 vendor/github.com/xi2/xz/dec_delta.go delete mode 100644 vendor/github.com/xi2/xz/dec_lzma2.go delete mode 100644 vendor/github.com/xi2/xz/dec_stream.go delete mode 100644 vendor/github.com/xi2/xz/dec_util.go delete mode 100644 vendor/github.com/xi2/xz/dec_xz.go delete mode 100644 vendor/github.com/xi2/xz/doc.go delete mode 100644 vendor/github.com/xi2/xz/reader.go diff --git a/go.mod b/go.mod index 41708f7725..8514c78896 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,6 @@ require ( github.com/cloudflare/cfssl v1.6.4 github.com/go-openapi/strfmt v0.22.1 github.com/google/gops v0.3.28 - github.com/mholt/archiver/v3 v3.5.1 github.com/osrg/gobgp/v3 v3.23.0 github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c github.com/spf13/cobra v1.8.0 @@ -93,7 +92,6 @@ require ( github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Masterminds/sprig/v3 v3.2.3 // indirect github.com/Masterminds/squirrel v1.5.4 // indirect - github.com/andybalholm/brotli v1.0.1 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/census-instrumentation/opencensus-proto v0.4.1 // indirect @@ -109,7 +107,6 @@ require ( github.com/docker/docker-credential-helpers v0.7.0 // indirect github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-metrics v0.0.1 // indirect - github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect github.com/emicklei/go-restful/v3 v3.11.2 // indirect github.com/envoyproxy/protoc-gen-validate v1.0.4 // indirect github.com/evanphx/json-patch v5.9.0+incompatible // indirect @@ -133,7 +130,6 @@ require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect - github.com/golang/snappy v0.0.4 // indirect github.com/google/btree v1.1.2 // indirect github.com/google/certificate-transparency-go v1.1.4 // indirect github.com/google/go-cmp v0.6.0 // indirect @@ -151,7 +147,6 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.17.0 // indirect - github.com/klauspost/pgzip v1.2.5 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect @@ -174,7 +169,6 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect - github.com/nwaples/rardecode v1.1.0 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc6 // indirect @@ -182,7 +176,6 @@ require ( github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc // indirect - github.com/pierrec/lz4/v4 v4.1.2 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_golang v1.19.0 // indirect @@ -201,14 +194,12 @@ require ( github.com/spf13/viper v1.18.2 // indirect github.com/stretchr/testify v1.9.0 github.com/subosito/gotenv v1.6.0 // indirect - github.com/ulikunitz/xz v0.5.10 // indirect github.com/vishvananda/netlink v1.2.1-beta.2.0.20231127184239-0ced8385386a // indirect github.com/vishvananda/netns v0.0.4 // indirect github.com/weppos/publicsuffix-go v0.15.1-0.20210511084619-b1f36a2d6c0b // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect - github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/xlab/treeprint v1.2.0 // indirect github.com/zmap/zcrypto v0.0.0-20210511125630-18f1e0152cfc // indirect github.com/zmap/zlint/v3 v3.1.0 // indirect diff --git a/go.sum b/go.sum index c49ed5fa0b..3f4875cdc3 100644 --- a/go.sum +++ b/go.sum @@ -30,8 +30,6 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/andybalholm/brotli v1.0.1 h1:KqhlKozYbRtJvsPrrEeXcO+N2l6NYT5A2QAFmSULpEc= -github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= @@ -135,9 +133,6 @@ github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQ github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1 h1:ZClxb8laGDf5arXfYcAtECDFgAgHklGI8CxgjHnXKJ4= github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= -github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY= -github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= -github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/emicklei/go-restful/v3 v3.11.2 h1:1onLa9DcsMYO9P+CXaL0dStDqQ2EHHXLiz+BtnqkLAU= github.com/emicklei/go-restful/v3 v3.11.2/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -236,9 +231,6 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/gomodule/redigo v1.8.2 h1:H5XSIre1MB5NbPYFp+i1NBbb5qN1W8Y8YAQoAYbkm8k= github.com/gomodule/redigo v1.8.2/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -355,13 +347,8 @@ github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= -github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -414,8 +401,6 @@ github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= -github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= @@ -456,8 +441,6 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8m github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= -github.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7MQ= -github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= @@ -486,8 +469,6 @@ github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc h1:8bQZVK1X6BJR/6 github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4= github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1Hc+ETb5K+23HdAMvESYE3ZJ5b5cMI= github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= -github.com/pierrec/lz4/v4 v4.1.2 h1:qvY3YFXRQE/XB8MlLzJH7mSzBs74eA2gg52YTk6jUPM= -github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= @@ -596,10 +577,6 @@ github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhso github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= -github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.10 h1:t92gobL9l3HE202wg3rlk19F6X+JOxl9BBrCCMYEYd8= -github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/vishvananda/netlink v1.2.1-beta.2.0.20231127184239-0ced8385386a h1:PdKmLjqKUM8AfjGqDbrF/C56RvuGFDMYB0Z+8TMmGpU= github.com/vishvananda/netlink v1.2.1-beta.2.0.20231127184239-0ced8385386a/go.mod h1:whJevzBpTrid75eZy99s3DqCmy05NfibNaF2Ol5Ox5A= github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= @@ -615,8 +592,6 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo= -github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ= github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= diff --git a/sysdump/sysdump.go b/sysdump/sysdump.go index c141e6e430..abaa774732 100644 --- a/sysdump/sysdump.go +++ b/sysdump/sysdump.go @@ -4,6 +4,9 @@ package sysdump import ( + "archive/tar" + "archive/zip" + "compress/gzip" "context" "fmt" "io" @@ -18,7 +21,6 @@ import ( "github.com/cilium/cilium/pkg/versioncheck" "github.com/cilium/workerpool" - "github.com/mholt/archiver/v3" "github.com/spf13/cobra" "github.com/spf13/pflag" corev1 "k8s.io/api/core/v1" @@ -1593,7 +1595,7 @@ func (c *Collector) Run() error { // Create the zip file in the current directory. c.log("🗳 Compiling sysdump") f := c.replaceTimestamp(c.Options.OutputFileName) + ".zip" - if err := archiver.Archive([]string{c.sysdumpDir}, f); err != nil { + if err := zipDirectory(c.sysdumpDir, f); err != nil { return fmt.Errorf("failed to create zip file: %w", err) } c.log("✅ The sysdump has been saved to %s", f) @@ -1947,13 +1949,7 @@ func (c *Collector) SubmitTetragonBugtoolTasks(pods []*corev1.Pod, tetragonAgent if err != nil { return fmt.Errorf("failed to collect 'tetragon-bugtool' output for %q: %w", p.Name, err) } - // Untar the resulting file. - t := archiver.TarGz{ - Tar: &archiver.Tar{ - StripComponents: 1, - }, - } - if err := t.Unarchive(f, strings.Replace(f, ".tar.gz", "", -1)); err != nil { + if err := untar(f, strings.Replace(f, ".tar.gz", "", -1)); err != nil { c.logWarn("Failed to unarchive 'tetragon-bugtool' output for %q: %v", p.Name, err) return nil } @@ -1974,6 +1970,117 @@ func (c *Collector) SubmitTetragonBugtoolTasks(pods []*corev1.Pod, tetragonAgent return nil } +// removeTopDirectory removes the top directory from a relative file path +// (e.g. "a/b/c" => "b/c"). Don't pass an absolute path. It doesn't do what +// you think it does. +func removeTopDirectory(path string) (string, error) { + index := strings.IndexByte(path, filepath.Separator) + if index < 0 { + return "", fmt.Errorf("invalid path %q", path) + } + return path[index+1:], nil +} + +func untar(src string, dst string) error { + reader, err := os.Open(src) + if err != nil { + return err + } + defer reader.Close() + gz, err := gzip.NewReader(reader) + if err != nil { + return err + } + defer gz.Close() + tr := tar.NewReader(gz) + for { + header, err := tr.Next() + if err == io.EOF { + return nil + } else if err != nil { + return err + } + // Cilium and Tetragon bugtool tar files don't contain headers for + // directories, so create a directory for each file instead. + if header.Typeflag != tar.TypeReg { + continue + } + name, err := removeTopDirectory(header.Name) + if err != nil { + return nil + } + filename := filepath.Join(dst, name) + directory := filepath.Dir(filename) + if err := os.MkdirAll(directory, 0755); err != nil { + return err + } + f, err := os.OpenFile(filename, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) + if err != nil { + return err + } + if err = copyN(f, tr, 1024); err != nil { + f.Close() + return err + } + f.Close() + } +} + +// copyN copies from src to dst n bytes at a time to avoid this lint error: +// G110: Potential DoS vulnerability via decompression bomb (gosec) +func copyN(dst io.Writer, src io.Reader, n int64) error { + for { + _, err := io.CopyN(dst, src, n) + if err == io.EOF { + return nil + } else if err != nil { + return err + } + } +} + +func zipDirectory(src string, dst string) error { + f, err := os.Create(dst) + if err != nil { + return err + } + defer f.Close() + writer := zip.NewWriter(f) + defer writer.Close() + return filepath.WalkDir(src, func(path string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + fi, err := d.Info() + if err != nil { + return err + } + header, err := zip.FileInfoHeader(fi) + if err != nil { + return err + } + header.Name, err = filepath.Rel(filepath.Dir(src), path) + if err != nil { + return err + } + header.Method = zip.Deflate + dstFile, err := writer.CreateHeader(header) + if err != nil { + return err + } + srcFile, err := os.Open(path) + if err != nil { + return err + } + defer srcFile.Close() + _, err = io.Copy(dstFile, srcFile) + return err + }) +} + func (c *Collector) submitCiliumBugtoolTasks(pods []*corev1.Pod) error { for _, p := range pods { p := p @@ -2024,12 +2131,7 @@ func (c *Collector) submitCiliumBugtoolTasks(pods []*corev1.Pod) error { return fmt.Errorf("failed to collect 'cilium-bugtool' output for %q: %w", p.Name, err) } // Untar the resulting file. - t := archiver.TarGz{ - Tar: &archiver.Tar{ - StripComponents: 1, - }, - } - if err := t.Unarchive(f, strings.Replace(f, ".tar.gz", "", -1)); err != nil { + if err := untar(f, strings.Replace(f, ".tar.gz", "", -1)); err != nil { c.logWarn("Failed to unarchive 'cilium-bugtool' output for %q: %v", p.Name, err) return nil } diff --git a/sysdump/sysdump_test.go b/sysdump/sysdump_test.go index 338e210098..280a36a1e5 100644 --- a/sysdump/sysdump_test.go +++ b/sysdump/sysdump_test.go @@ -504,3 +504,16 @@ func (c *fakeClient) GetNamespace(_ context.Context, ns string, _ metav1.GetOpti }, } } + +func Test_removeTopDirectory(t *testing.T) { + result, err := removeTopDirectory("/") + assert.NoError(t, err) + assert.Equal(t, "", result) + + result, err = removeTopDirectory("a/b/c") + assert.NoError(t, err) + assert.Equal(t, "b/c", result) + + _, err = removeTopDirectory("") + assert.Error(t, err) +} diff --git a/vendor/github.com/andybalholm/brotli/LICENSE b/vendor/github.com/andybalholm/brotli/LICENSE deleted file mode 100644 index 33b7cdd2db..0000000000 --- a/vendor/github.com/andybalholm/brotli/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/andybalholm/brotli/README.md b/vendor/github.com/andybalholm/brotli/README.md deleted file mode 100644 index 1ea7fdb759..0000000000 --- a/vendor/github.com/andybalholm/brotli/README.md +++ /dev/null @@ -1,7 +0,0 @@ -This package is a brotli compressor and decompressor implemented in Go. -It was translated from the reference implementation (https://github.com/google/brotli) -with the `c2go` tool at https://github.com/andybalholm/c2go. - -I am using it in production with https://github.com/andybalholm/redwood. - -API documentation is found at https://pkg.go.dev/github.com/andybalholm/brotli?tab=doc. diff --git a/vendor/github.com/andybalholm/brotli/backward_references.go b/vendor/github.com/andybalholm/brotli/backward_references.go deleted file mode 100644 index 008c054d1c..0000000000 --- a/vendor/github.com/andybalholm/brotli/backward_references.go +++ /dev/null @@ -1,185 +0,0 @@ -package brotli - -import ( - "sync" -) - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function to find backward reference copies. */ - -func computeDistanceCode(distance uint, max_distance uint, dist_cache []int) uint { - if distance <= max_distance { - var distance_plus_3 uint = distance + 3 - var offset0 uint = distance_plus_3 - uint(dist_cache[0]) - var offset1 uint = distance_plus_3 - uint(dist_cache[1]) - if distance == uint(dist_cache[0]) { - return 0 - } else if distance == uint(dist_cache[1]) { - return 1 - } else if offset0 < 7 { - return (0x9750468 >> (4 * offset0)) & 0xF - } else if offset1 < 7 { - return (0xFDB1ACE >> (4 * offset1)) & 0xF - } else if distance == uint(dist_cache[2]) { - return 2 - } else if distance == uint(dist_cache[3]) { - return 3 - } - } - - return distance + numDistanceShortCodes - 1 -} - -var hasherSearchResultPool sync.Pool - -func createBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher hasherHandle, dist_cache []int, last_insert_len *uint, commands *[]command, num_literals *uint) { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var insert_length uint = *last_insert_len - var pos_end uint = position + num_bytes - var store_end uint - if num_bytes >= hasher.StoreLookahead() { - store_end = position + num_bytes - hasher.StoreLookahead() + 1 - } else { - store_end = position - } - var random_heuristics_window_size uint = literalSpreeLengthForSparseSearch(params) - var apply_random_heuristics uint = position + random_heuristics_window_size - var gap uint = 0 - /* Set maximum distance, see section 9.1. of the spec. */ - - const kMinScore uint = scoreBase + 100 - - /* For speed up heuristics for random data. */ - - /* Minimum score to accept a backward reference. */ - hasher.PrepareDistanceCache(dist_cache) - sr2, _ := hasherSearchResultPool.Get().(*hasherSearchResult) - if sr2 == nil { - sr2 = &hasherSearchResult{} - } - sr, _ := hasherSearchResultPool.Get().(*hasherSearchResult) - if sr == nil { - sr = &hasherSearchResult{} - } - - for position+hasher.HashTypeLength() < pos_end { - var max_length uint = pos_end - position - var max_distance uint = brotli_min_size_t(position, max_backward_limit) - sr.len = 0 - sr.len_code_delta = 0 - sr.distance = 0 - sr.score = kMinScore - hasher.FindLongestMatch(¶ms.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, sr) - if sr.score > kMinScore { - /* Found a match. Let's look for something even better ahead. */ - var delayed_backward_references_in_row int = 0 - max_length-- - for ; ; max_length-- { - var cost_diff_lazy uint = 175 - if params.quality < minQualityForExtensiveReferenceSearch { - sr2.len = brotli_min_size_t(sr.len-1, max_length) - } else { - sr2.len = 0 - } - sr2.len_code_delta = 0 - sr2.distance = 0 - sr2.score = kMinScore - max_distance = brotli_min_size_t(position+1, max_backward_limit) - hasher.FindLongestMatch(¶ms.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, sr2) - if sr2.score >= sr.score+cost_diff_lazy { - /* Ok, let's just write one byte for now and start a match from the - next byte. */ - position++ - - insert_length++ - *sr = *sr2 - delayed_backward_references_in_row++ - if delayed_backward_references_in_row < 4 && position+hasher.HashTypeLength() < pos_end { - continue - } - } - - break - } - - apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size - max_distance = brotli_min_size_t(position, max_backward_limit) - { - /* The first 16 codes are special short-codes, - and the minimum offset is 1. */ - var distance_code uint = computeDistanceCode(sr.distance, max_distance+gap, dist_cache) - if (sr.distance <= (max_distance + gap)) && distance_code > 0 { - dist_cache[3] = dist_cache[2] - dist_cache[2] = dist_cache[1] - dist_cache[1] = dist_cache[0] - dist_cache[0] = int(sr.distance) - hasher.PrepareDistanceCache(dist_cache) - } - - *commands = append(*commands, makeCommand(¶ms.dist, insert_length, sr.len, sr.len_code_delta, distance_code)) - } - - *num_literals += insert_length - insert_length = 0 - /* Put the hash keys into the table, if there are enough bytes left. - Depending on the hasher implementation, it can push all positions - in the given range or only a subset of them. - Avoid hash poisoning with RLE data. */ - { - var range_start uint = position + 2 - var range_end uint = brotli_min_size_t(position+sr.len, store_end) - if sr.distance < sr.len>>2 { - range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2))) - } - - hasher.StoreRange(ringbuffer, ringbuffer_mask, range_start, range_end) - } - - position += sr.len - } else { - insert_length++ - position++ - - /* If we have not seen matches for a long time, we can skip some - match lookups. Unsuccessful match lookups are very very expensive - and this kind of a heuristic speeds up compression quite - a lot. */ - if position > apply_random_heuristics { - /* Going through uncompressible data, jump. */ - if position > apply_random_heuristics+4*random_heuristics_window_size { - var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 4) - /* It is quite a long time since we saw a copy, so we assume - that this data is not compressible, and store hashes less - often. Hashes of non compressible data are less likely to - turn out to be useful in the future, too, so we store less of - them to not to flood out the hash table of good compressible - data. */ - - var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin) - for ; position < pos_jump; position += 4 { - hasher.Store(ringbuffer, ringbuffer_mask, position) - insert_length += 4 - } - } else { - var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 2) - var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin) - for ; position < pos_jump; position += 2 { - hasher.Store(ringbuffer, ringbuffer_mask, position) - insert_length += 2 - } - } - } - } - } - - insert_length += pos_end - position - *last_insert_len = insert_length - - hasherSearchResultPool.Put(sr) - hasherSearchResultPool.Put(sr2) -} diff --git a/vendor/github.com/andybalholm/brotli/backward_references_hq.go b/vendor/github.com/andybalholm/brotli/backward_references_hq.go deleted file mode 100644 index 21629c1cdb..0000000000 --- a/vendor/github.com/andybalholm/brotli/backward_references_hq.go +++ /dev/null @@ -1,796 +0,0 @@ -package brotli - -import "math" - -type zopfliNode struct { - length uint32 - distance uint32 - dcode_insert_length uint32 - u struct { - cost float32 - next uint32 - shortcut uint32 - } -} - -const maxEffectiveDistanceAlphabetSize = 544 - -const kInfinity float32 = 1.7e38 /* ~= 2 ^ 127 */ - -var kDistanceCacheIndex = []uint32{0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1} - -var kDistanceCacheOffset = []int{0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3} - -func initZopfliNodes(array []zopfliNode, length uint) { - var stub zopfliNode - var i uint - stub.length = 1 - stub.distance = 0 - stub.dcode_insert_length = 0 - stub.u.cost = kInfinity - for i = 0; i < length; i++ { - array[i] = stub - } -} - -func zopfliNodeCopyLength(self *zopfliNode) uint32 { - return self.length & 0x1FFFFFF -} - -func zopfliNodeLengthCode(self *zopfliNode) uint32 { - var modifier uint32 = self.length >> 25 - return zopfliNodeCopyLength(self) + 9 - modifier -} - -func zopfliNodeCopyDistance(self *zopfliNode) uint32 { - return self.distance -} - -func zopfliNodeDistanceCode(self *zopfliNode) uint32 { - var short_code uint32 = self.dcode_insert_length >> 27 - if short_code == 0 { - return zopfliNodeCopyDistance(self) + numDistanceShortCodes - 1 - } else { - return short_code - 1 - } -} - -func zopfliNodeCommandLength(self *zopfliNode) uint32 { - return zopfliNodeCopyLength(self) + (self.dcode_insert_length & 0x7FFFFFF) -} - -/* Histogram based cost model for zopflification. */ -type zopfliCostModel struct { - cost_cmd_ [numCommandSymbols]float32 - cost_dist_ []float32 - distance_histogram_size uint32 - literal_costs_ []float32 - min_cost_cmd_ float32 - num_bytes_ uint -} - -func initZopfliCostModel(self *zopfliCostModel, dist *distanceParams, num_bytes uint) { - var distance_histogram_size uint32 = dist.alphabet_size - if distance_histogram_size > maxEffectiveDistanceAlphabetSize { - distance_histogram_size = maxEffectiveDistanceAlphabetSize - } - - self.num_bytes_ = num_bytes - self.literal_costs_ = make([]float32, (num_bytes + 2)) - self.cost_dist_ = make([]float32, (dist.alphabet_size)) - self.distance_histogram_size = distance_histogram_size -} - -func cleanupZopfliCostModel(self *zopfliCostModel) { - self.literal_costs_ = nil - self.cost_dist_ = nil -} - -func setCost(histogram []uint32, histogram_size uint, literal_histogram bool, cost []float32) { - var sum uint = 0 - var missing_symbol_sum uint - var log2sum float32 - var missing_symbol_cost float32 - var i uint - for i = 0; i < histogram_size; i++ { - sum += uint(histogram[i]) - } - - log2sum = float32(fastLog2(sum)) - missing_symbol_sum = sum - if !literal_histogram { - for i = 0; i < histogram_size; i++ { - if histogram[i] == 0 { - missing_symbol_sum++ - } - } - } - - missing_symbol_cost = float32(fastLog2(missing_symbol_sum)) + 2 - for i = 0; i < histogram_size; i++ { - if histogram[i] == 0 { - cost[i] = missing_symbol_cost - continue - } - - /* Shannon bits for this symbol. */ - cost[i] = log2sum - float32(fastLog2(uint(histogram[i]))) - - /* Cannot be coded with less than 1 bit */ - if cost[i] < 1 { - cost[i] = 1 - } - } -} - -func zopfliCostModelSetFromCommands(self *zopfliCostModel, position uint, ringbuffer []byte, ringbuffer_mask uint, commands []command, last_insert_len uint) { - var histogram_literal [numLiteralSymbols]uint32 - var histogram_cmd [numCommandSymbols]uint32 - var histogram_dist [maxEffectiveDistanceAlphabetSize]uint32 - var cost_literal [numLiteralSymbols]float32 - var pos uint = position - last_insert_len - var min_cost_cmd float32 = kInfinity - var cost_cmd []float32 = self.cost_cmd_[:] - var literal_costs []float32 - - histogram_literal = [numLiteralSymbols]uint32{} - histogram_cmd = [numCommandSymbols]uint32{} - histogram_dist = [maxEffectiveDistanceAlphabetSize]uint32{} - - for i := range commands { - var inslength uint = uint(commands[i].insert_len_) - var copylength uint = uint(commandCopyLen(&commands[i])) - var distcode uint = uint(commands[i].dist_prefix_) & 0x3FF - var cmdcode uint = uint(commands[i].cmd_prefix_) - var j uint - - histogram_cmd[cmdcode]++ - if cmdcode >= 128 { - histogram_dist[distcode]++ - } - - for j = 0; j < inslength; j++ { - histogram_literal[ringbuffer[(pos+j)&ringbuffer_mask]]++ - } - - pos += inslength + copylength - } - - setCost(histogram_literal[:], numLiteralSymbols, true, cost_literal[:]) - setCost(histogram_cmd[:], numCommandSymbols, false, cost_cmd) - setCost(histogram_dist[:], uint(self.distance_histogram_size), false, self.cost_dist_) - - for i := 0; i < numCommandSymbols; i++ { - min_cost_cmd = brotli_min_float(min_cost_cmd, cost_cmd[i]) - } - - self.min_cost_cmd_ = min_cost_cmd - { - literal_costs = self.literal_costs_ - var literal_carry float32 = 0.0 - num_bytes := int(self.num_bytes_) - literal_costs[0] = 0.0 - for i := 0; i < num_bytes; i++ { - literal_carry += cost_literal[ringbuffer[(position+uint(i))&ringbuffer_mask]] - literal_costs[i+1] = literal_costs[i] + literal_carry - literal_carry -= literal_costs[i+1] - literal_costs[i] - } - } -} - -func zopfliCostModelSetFromLiteralCosts(self *zopfliCostModel, position uint, ringbuffer []byte, ringbuffer_mask uint) { - var literal_costs []float32 = self.literal_costs_ - var literal_carry float32 = 0.0 - var cost_dist []float32 = self.cost_dist_ - var cost_cmd []float32 = self.cost_cmd_[:] - var num_bytes uint = self.num_bytes_ - var i uint - estimateBitCostsForLiterals(position, num_bytes, ringbuffer_mask, ringbuffer, literal_costs[1:]) - literal_costs[0] = 0.0 - for i = 0; i < num_bytes; i++ { - literal_carry += literal_costs[i+1] - literal_costs[i+1] = literal_costs[i] + literal_carry - literal_carry -= literal_costs[i+1] - literal_costs[i] - } - - for i = 0; i < numCommandSymbols; i++ { - cost_cmd[i] = float32(fastLog2(uint(11 + uint32(i)))) - } - - for i = 0; uint32(i) < self.distance_histogram_size; i++ { - cost_dist[i] = float32(fastLog2(uint(20 + uint32(i)))) - } - - self.min_cost_cmd_ = float32(fastLog2(11)) -} - -func zopfliCostModelGetCommandCost(self *zopfliCostModel, cmdcode uint16) float32 { - return self.cost_cmd_[cmdcode] -} - -func zopfliCostModelGetDistanceCost(self *zopfliCostModel, distcode uint) float32 { - return self.cost_dist_[distcode] -} - -func zopfliCostModelGetLiteralCosts(self *zopfliCostModel, from uint, to uint) float32 { - return self.literal_costs_[to] - self.literal_costs_[from] -} - -func zopfliCostModelGetMinCostCmd(self *zopfliCostModel) float32 { - return self.min_cost_cmd_ -} - -/* REQUIRES: len >= 2, start_pos <= pos */ -/* REQUIRES: cost < kInfinity, nodes[start_pos].cost < kInfinity */ -/* Maintains the "ZopfliNode array invariant". */ -func updateZopfliNode(nodes []zopfliNode, pos uint, start_pos uint, len uint, len_code uint, dist uint, short_code uint, cost float32) { - var next *zopfliNode = &nodes[pos+len] - next.length = uint32(len | (len+9-len_code)<<25) - next.distance = uint32(dist) - next.dcode_insert_length = uint32(short_code<<27 | (pos - start_pos)) - next.u.cost = cost -} - -type posData struct { - pos uint - distance_cache [4]int - costdiff float32 - cost float32 -} - -/* Maintains the smallest 8 cost difference together with their positions */ -type startPosQueue struct { - q_ [8]posData - idx_ uint -} - -func initStartPosQueue(self *startPosQueue) { - self.idx_ = 0 -} - -func startPosQueueSize(self *startPosQueue) uint { - return brotli_min_size_t(self.idx_, 8) -} - -func startPosQueuePush(self *startPosQueue, posdata *posData) { - var offset uint = ^(self.idx_) & 7 - self.idx_++ - var len uint = startPosQueueSize(self) - var i uint - var q []posData = self.q_[:] - q[offset] = *posdata - - /* Restore the sorted order. In the list of |len| items at most |len - 1| - adjacent element comparisons / swaps are required. */ - for i = 1; i < len; i++ { - if q[offset&7].costdiff > q[(offset+1)&7].costdiff { - var tmp posData = q[offset&7] - q[offset&7] = q[(offset+1)&7] - q[(offset+1)&7] = tmp - } - - offset++ - } -} - -func startPosQueueAt(self *startPosQueue, k uint) *posData { - return &self.q_[(k-self.idx_)&7] -} - -/* Returns the minimum possible copy length that can improve the cost of any */ -/* future position. */ -func computeMinimumCopyLength(start_cost float32, nodes []zopfliNode, num_bytes uint, pos uint) uint { - var min_cost float32 = start_cost - var len uint = 2 - var next_len_bucket uint = 4 - /* Compute the minimum possible cost of reaching any future position. */ - - var next_len_offset uint = 10 - for pos+len <= num_bytes && nodes[pos+len].u.cost <= min_cost { - /* We already reached (pos + len) with no more cost than the minimum - possible cost of reaching anything from this pos, so there is no point in - looking for lengths <= len. */ - len++ - - if len == next_len_offset { - /* We reached the next copy length code bucket, so we add one more - extra bit to the minimum cost. */ - min_cost += 1.0 - - next_len_offset += next_len_bucket - next_len_bucket *= 2 - } - } - - return uint(len) -} - -/* REQUIRES: nodes[pos].cost < kInfinity - REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */ -func computeDistanceShortcut(block_start uint, pos uint, max_backward_limit uint, gap uint, nodes []zopfliNode) uint32 { - var clen uint = uint(zopfliNodeCopyLength(&nodes[pos])) - var ilen uint = uint(nodes[pos].dcode_insert_length & 0x7FFFFFF) - var dist uint = uint(zopfliNodeCopyDistance(&nodes[pos])) - - /* Since |block_start + pos| is the end position of the command, the copy part - starts from |block_start + pos - clen|. Distances that are greater than - this or greater than |max_backward_limit| + |gap| are static dictionary - references, and do not update the last distances. - Also distance code 0 (last distance) does not update the last distances. */ - if pos == 0 { - return 0 - } else if dist+clen <= block_start+pos+gap && dist <= max_backward_limit+gap && zopfliNodeDistanceCode(&nodes[pos]) > 0 { - return uint32(pos) - } else { - return nodes[pos-clen-ilen].u.shortcut - } -} - -/* Fills in dist_cache[0..3] with the last four distances (as defined by - Section 4. of the Spec) that would be used at (block_start + pos) if we - used the shortest path of commands from block_start, computed from - nodes[0..pos]. The last four distances at block_start are in - starting_dist_cache[0..3]. - REQUIRES: nodes[pos].cost < kInfinity - REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */ -func computeDistanceCache(pos uint, starting_dist_cache []int, nodes []zopfliNode, dist_cache []int) { - var idx int = 0 - var p uint = uint(nodes[pos].u.shortcut) - for idx < 4 && p > 0 { - var ilen uint = uint(nodes[p].dcode_insert_length & 0x7FFFFFF) - var clen uint = uint(zopfliNodeCopyLength(&nodes[p])) - var dist uint = uint(zopfliNodeCopyDistance(&nodes[p])) - dist_cache[idx] = int(dist) - idx++ - - /* Because of prerequisite, p >= clen + ilen >= 2. */ - p = uint(nodes[p-clen-ilen].u.shortcut) - } - - for ; idx < 4; idx++ { - dist_cache[idx] = starting_dist_cache[0] - starting_dist_cache = starting_dist_cache[1:] - } -} - -/* Maintains "ZopfliNode array invariant" and pushes node to the queue, if it - is eligible. */ -func evaluateNode(block_start uint, pos uint, max_backward_limit uint, gap uint, starting_dist_cache []int, model *zopfliCostModel, queue *startPosQueue, nodes []zopfliNode) { - /* Save cost, because ComputeDistanceCache invalidates it. */ - var node_cost float32 = nodes[pos].u.cost - nodes[pos].u.shortcut = computeDistanceShortcut(block_start, pos, max_backward_limit, gap, nodes) - if node_cost <= zopfliCostModelGetLiteralCosts(model, 0, pos) { - var posdata posData - posdata.pos = pos - posdata.cost = node_cost - posdata.costdiff = node_cost - zopfliCostModelGetLiteralCosts(model, 0, pos) - computeDistanceCache(pos, starting_dist_cache, nodes, posdata.distance_cache[:]) - startPosQueuePush(queue, &posdata) - } -} - -/* Returns longest copy length. */ -func updateNodes(num_bytes uint, block_start uint, pos uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, max_backward_limit uint, starting_dist_cache []int, num_matches uint, matches []backwardMatch, model *zopfliCostModel, queue *startPosQueue, nodes []zopfliNode) uint { - var cur_ix uint = block_start + pos - var cur_ix_masked uint = cur_ix & ringbuffer_mask - var max_distance uint = brotli_min_size_t(cur_ix, max_backward_limit) - var max_len uint = num_bytes - pos - var max_zopfli_len uint = maxZopfliLen(params) - var max_iters uint = maxZopfliCandidates(params) - var min_len uint - var result uint = 0 - var k uint - var gap uint = 0 - - evaluateNode(block_start, pos, max_backward_limit, gap, starting_dist_cache, model, queue, nodes) - { - var posdata *posData = startPosQueueAt(queue, 0) - var min_cost float32 = (posdata.cost + zopfliCostModelGetMinCostCmd(model) + zopfliCostModelGetLiteralCosts(model, posdata.pos, pos)) - min_len = computeMinimumCopyLength(min_cost, nodes, num_bytes, pos) - } - - /* Go over the command starting positions in order of increasing cost - difference. */ - for k = 0; k < max_iters && k < startPosQueueSize(queue); k++ { - var posdata *posData = startPosQueueAt(queue, k) - var start uint = posdata.pos - var inscode uint16 = getInsertLengthCode(pos - start) - var start_costdiff float32 = posdata.costdiff - var base_cost float32 = start_costdiff + float32(getInsertExtra(inscode)) + zopfliCostModelGetLiteralCosts(model, 0, pos) - var best_len uint = min_len - 1 - var j uint = 0 - /* Look for last distance matches using the distance cache from this - starting position. */ - for ; j < numDistanceShortCodes && best_len < max_len; j++ { - var idx uint = uint(kDistanceCacheIndex[j]) - var backward uint = uint(posdata.distance_cache[idx] + kDistanceCacheOffset[j]) - var prev_ix uint = cur_ix - backward - var len uint = 0 - var continuation byte = ringbuffer[cur_ix_masked+best_len] - if cur_ix_masked+best_len > ringbuffer_mask { - break - } - - if backward > max_distance+gap { - /* Word dictionary -> ignore. */ - continue - } - - if backward <= max_distance { - /* Regular backward reference. */ - if prev_ix >= cur_ix { - continue - } - - prev_ix &= ringbuffer_mask - if prev_ix+best_len > ringbuffer_mask || continuation != ringbuffer[prev_ix+best_len] { - continue - } - - len = findMatchLengthWithLimit(ringbuffer[prev_ix:], ringbuffer[cur_ix_masked:], max_len) - } else { - continue - } - { - var dist_cost float32 = base_cost + zopfliCostModelGetDistanceCost(model, j) - var l uint - for l = best_len + 1; l <= len; l++ { - var copycode uint16 = getCopyLengthCode(l) - var cmdcode uint16 = combineLengthCodes(inscode, copycode, j == 0) - var tmp float32 - if cmdcode < 128 { - tmp = base_cost - } else { - tmp = dist_cost - } - var cost float32 = tmp + float32(getCopyExtra(copycode)) + zopfliCostModelGetCommandCost(model, cmdcode) - if cost < nodes[pos+l].u.cost { - updateZopfliNode(nodes, pos, start, l, l, backward, j+1, cost) - result = brotli_max_size_t(result, l) - } - - best_len = l - } - } - } - - /* At higher iterations look only for new last distance matches, since - looking only for new command start positions with the same distances - does not help much. */ - if k >= 2 { - continue - } - { - /* Loop through all possible copy lengths at this position. */ - var len uint = min_len - for j = 0; j < num_matches; j++ { - var match backwardMatch = matches[j] - var dist uint = uint(match.distance) - var is_dictionary_match bool = (dist > max_distance+gap) - var dist_code uint = dist + numDistanceShortCodes - 1 - var dist_symbol uint16 - var distextra uint32 - var distnumextra uint32 - var dist_cost float32 - var max_match_len uint - /* We already tried all possible last distance matches, so we can use - normal distance code here. */ - prefixEncodeCopyDistance(dist_code, uint(params.dist.num_direct_distance_codes), uint(params.dist.distance_postfix_bits), &dist_symbol, &distextra) - - distnumextra = uint32(dist_symbol) >> 10 - dist_cost = base_cost + float32(distnumextra) + zopfliCostModelGetDistanceCost(model, uint(dist_symbol)&0x3FF) - - /* Try all copy lengths up until the maximum copy length corresponding - to this distance. If the distance refers to the static dictionary, or - the maximum length is long enough, try only one maximum length. */ - max_match_len = backwardMatchLength(&match) - - if len < max_match_len && (is_dictionary_match || max_match_len > max_zopfli_len) { - len = max_match_len - } - - for ; len <= max_match_len; len++ { - var len_code uint - if is_dictionary_match { - len_code = backwardMatchLengthCode(&match) - } else { - len_code = len - } - var copycode uint16 = getCopyLengthCode(len_code) - var cmdcode uint16 = combineLengthCodes(inscode, copycode, false) - var cost float32 = dist_cost + float32(getCopyExtra(copycode)) + zopfliCostModelGetCommandCost(model, cmdcode) - if cost < nodes[pos+len].u.cost { - updateZopfliNode(nodes, pos, start, uint(len), len_code, dist, 0, cost) - if len > result { - result = len - } - } - } - } - } - } - - return result -} - -func computeShortestPathFromNodes(num_bytes uint, nodes []zopfliNode) uint { - var index uint = num_bytes - var num_commands uint = 0 - for nodes[index].dcode_insert_length&0x7FFFFFF == 0 && nodes[index].length == 1 { - index-- - } - nodes[index].u.next = math.MaxUint32 - for index != 0 { - var len uint = uint(zopfliNodeCommandLength(&nodes[index])) - index -= uint(len) - nodes[index].u.next = uint32(len) - num_commands++ - } - - return num_commands -} - -/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */ -func zopfliCreateCommands(num_bytes uint, block_start uint, nodes []zopfliNode, dist_cache []int, last_insert_len *uint, params *encoderParams, commands *[]command, num_literals *uint) { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var pos uint = 0 - var offset uint32 = nodes[0].u.next - var i uint - var gap uint = 0 - for i = 0; offset != math.MaxUint32; i++ { - var next *zopfliNode = &nodes[uint32(pos)+offset] - var copy_length uint = uint(zopfliNodeCopyLength(next)) - var insert_length uint = uint(next.dcode_insert_length & 0x7FFFFFF) - pos += insert_length - offset = next.u.next - if i == 0 { - insert_length += *last_insert_len - *last_insert_len = 0 - } - { - var distance uint = uint(zopfliNodeCopyDistance(next)) - var len_code uint = uint(zopfliNodeLengthCode(next)) - var max_distance uint = brotli_min_size_t(block_start+pos, max_backward_limit) - var is_dictionary bool = (distance > max_distance+gap) - var dist_code uint = uint(zopfliNodeDistanceCode(next)) - *commands = append(*commands, makeCommand(¶ms.dist, insert_length, copy_length, int(len_code)-int(copy_length), dist_code)) - - if !is_dictionary && dist_code > 0 { - dist_cache[3] = dist_cache[2] - dist_cache[2] = dist_cache[1] - dist_cache[1] = dist_cache[0] - dist_cache[0] = int(distance) - } - } - - *num_literals += insert_length - pos += copy_length - } - - *last_insert_len += num_bytes - pos -} - -func zopfliIterate(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, gap uint, dist_cache []int, model *zopfliCostModel, num_matches []uint32, matches []backwardMatch, nodes []zopfliNode) uint { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var max_zopfli_len uint = maxZopfliLen(params) - var queue startPosQueue - var cur_match_pos uint = 0 - var i uint - nodes[0].length = 0 - nodes[0].u.cost = 0 - initStartPosQueue(&queue) - for i = 0; i+3 < num_bytes; i++ { - var skip uint = updateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask, params, max_backward_limit, dist_cache, uint(num_matches[i]), matches[cur_match_pos:], model, &queue, nodes) - if skip < longCopyQuickStep { - skip = 0 - } - cur_match_pos += uint(num_matches[i]) - if num_matches[i] == 1 && backwardMatchLength(&matches[cur_match_pos-1]) > max_zopfli_len { - skip = brotli_max_size_t(backwardMatchLength(&matches[cur_match_pos-1]), skip) - } - - if skip > 1 { - skip-- - for skip != 0 { - i++ - if i+3 >= num_bytes { - break - } - evaluateNode(position, i, max_backward_limit, gap, dist_cache, model, &queue, nodes) - cur_match_pos += uint(num_matches[i]) - skip-- - } - } - } - - return computeShortestPathFromNodes(num_bytes, nodes) -} - -/* Computes the shortest path of commands from position to at most - position + num_bytes. - - On return, path->size() is the number of commands found and path[i] is the - length of the i-th command (copy length plus insert length). - Note that the sum of the lengths of all commands can be less than num_bytes. - - On return, the nodes[0..num_bytes] array will have the following - "ZopfliNode array invariant": - For each i in [1..num_bytes], if nodes[i].cost < kInfinity, then - (1) nodes[i].copy_length() >= 2 - (2) nodes[i].command_length() <= i and - (3) nodes[i - nodes[i].command_length()].cost < kInfinity - - REQUIRES: nodes != nil and len(nodes) >= num_bytes + 1 */ -func zopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, dist_cache []int, hasher *h10, nodes []zopfliNode) uint { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var max_zopfli_len uint = maxZopfliLen(params) - var model zopfliCostModel - var queue startPosQueue - var matches [2 * (maxNumMatchesH10 + 64)]backwardMatch - var store_end uint - if num_bytes >= hasher.StoreLookahead() { - store_end = position + num_bytes - hasher.StoreLookahead() + 1 - } else { - store_end = position - } - var i uint - var gap uint = 0 - var lz_matches_offset uint = 0 - nodes[0].length = 0 - nodes[0].u.cost = 0 - initZopfliCostModel(&model, ¶ms.dist, num_bytes) - zopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask) - initStartPosQueue(&queue) - for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ { - var pos uint = position + i - var max_distance uint = brotli_min_size_t(pos, max_backward_limit) - var skip uint - var num_matches uint - num_matches = findAllMatchesH10(hasher, ¶ms.dictionary, ringbuffer, ringbuffer_mask, pos, num_bytes-i, max_distance, gap, params, matches[lz_matches_offset:]) - if num_matches > 0 && backwardMatchLength(&matches[num_matches-1]) > max_zopfli_len { - matches[0] = matches[num_matches-1] - num_matches = 1 - } - - skip = updateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask, params, max_backward_limit, dist_cache, num_matches, matches[:], &model, &queue, nodes) - if skip < longCopyQuickStep { - skip = 0 - } - if num_matches == 1 && backwardMatchLength(&matches[0]) > max_zopfli_len { - skip = brotli_max_size_t(backwardMatchLength(&matches[0]), skip) - } - - if skip > 1 { - /* Add the tail of the copy to the hasher. */ - hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+skip, store_end)) - - skip-- - for skip != 0 { - i++ - if i+hasher.HashTypeLength()-1 >= num_bytes { - break - } - evaluateNode(position, i, max_backward_limit, gap, dist_cache, &model, &queue, nodes) - skip-- - } - } - } - - cleanupZopfliCostModel(&model) - return computeShortestPathFromNodes(num_bytes, nodes) -} - -func createZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher *h10, dist_cache []int, last_insert_len *uint, commands *[]command, num_literals *uint) { - var nodes []zopfliNode - nodes = make([]zopfliNode, (num_bytes + 1)) - initZopfliNodes(nodes, num_bytes+1) - zopfliComputeShortestPath(num_bytes, position, ringbuffer, ringbuffer_mask, params, dist_cache, hasher, nodes) - zopfliCreateCommands(num_bytes, position, nodes, dist_cache, last_insert_len, params, commands, num_literals) - nodes = nil -} - -func createHqZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *encoderParams, hasher hasherHandle, dist_cache []int, last_insert_len *uint, commands *[]command, num_literals *uint) { - var max_backward_limit uint = maxBackwardLimit(params.lgwin) - var num_matches []uint32 = make([]uint32, num_bytes) - var matches_size uint = 4 * num_bytes - var store_end uint - if num_bytes >= hasher.StoreLookahead() { - store_end = position + num_bytes - hasher.StoreLookahead() + 1 - } else { - store_end = position - } - var cur_match_pos uint = 0 - var i uint - var orig_num_literals uint - var orig_last_insert_len uint - var orig_dist_cache [4]int - var orig_num_commands int - var model zopfliCostModel - var nodes []zopfliNode - var matches []backwardMatch = make([]backwardMatch, matches_size) - var gap uint = 0 - var shadow_matches uint = 0 - var new_array []backwardMatch - for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ { - var pos uint = position + i - var max_distance uint = brotli_min_size_t(pos, max_backward_limit) - var max_length uint = num_bytes - i - var num_found_matches uint - var cur_match_end uint - var j uint - - /* Ensure that we have enough free slots. */ - if matches_size < cur_match_pos+maxNumMatchesH10+shadow_matches { - var new_size uint = matches_size - if new_size == 0 { - new_size = cur_match_pos + maxNumMatchesH10 + shadow_matches - } - - for new_size < cur_match_pos+maxNumMatchesH10+shadow_matches { - new_size *= 2 - } - - new_array = make([]backwardMatch, new_size) - if matches_size != 0 { - copy(new_array, matches[:matches_size]) - } - - matches = new_array - matches_size = new_size - } - - num_found_matches = findAllMatchesH10(hasher.(*h10), ¶ms.dictionary, ringbuffer, ringbuffer_mask, pos, max_length, max_distance, gap, params, matches[cur_match_pos+shadow_matches:]) - cur_match_end = cur_match_pos + num_found_matches - for j = cur_match_pos; j+1 < cur_match_end; j++ { - assert(backwardMatchLength(&matches[j]) <= backwardMatchLength(&matches[j+1])) - } - - num_matches[i] = uint32(num_found_matches) - if num_found_matches > 0 { - var match_len uint = backwardMatchLength(&matches[cur_match_end-1]) - if match_len > maxZopfliLenQuality11 { - var skip uint = match_len - 1 - matches[cur_match_pos] = matches[cur_match_end-1] - cur_match_pos++ - num_matches[i] = 1 - - /* Add the tail of the copy to the hasher. */ - hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+match_len, store_end)) - var pos uint = i - for i := 0; i < int(skip); i++ { - num_matches[pos+1:][i] = 0 - } - i += skip - } else { - cur_match_pos = cur_match_end - } - } - } - - orig_num_literals = *num_literals - orig_last_insert_len = *last_insert_len - copy(orig_dist_cache[:], dist_cache[:4]) - orig_num_commands = len(*commands) - nodes = make([]zopfliNode, (num_bytes + 1)) - initZopfliCostModel(&model, ¶ms.dist, num_bytes) - for i = 0; i < 2; i++ { - initZopfliNodes(nodes, num_bytes+1) - if i == 0 { - zopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask) - } else { - zopfliCostModelSetFromCommands(&model, position, ringbuffer, ringbuffer_mask, (*commands)[orig_num_commands:], orig_last_insert_len) - } - - *commands = (*commands)[:orig_num_commands] - *num_literals = orig_num_literals - *last_insert_len = orig_last_insert_len - copy(dist_cache, orig_dist_cache[:4]) - zopfliIterate(num_bytes, position, ringbuffer, ringbuffer_mask, params, gap, dist_cache, &model, num_matches, matches, nodes) - zopfliCreateCommands(num_bytes, position, nodes, dist_cache, last_insert_len, params, commands, num_literals) - } - - cleanupZopfliCostModel(&model) - nodes = nil - matches = nil - num_matches = nil -} diff --git a/vendor/github.com/andybalholm/brotli/bit_cost.go b/vendor/github.com/andybalholm/brotli/bit_cost.go deleted file mode 100644 index 0005fc15e6..0000000000 --- a/vendor/github.com/andybalholm/brotli/bit_cost.go +++ /dev/null @@ -1,436 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Functions to estimate the bit cost of Huffman trees. */ -func shannonEntropy(population []uint32, size uint, total *uint) float64 { - var sum uint = 0 - var retval float64 = 0 - var population_end []uint32 = population[size:] - var p uint - for -cap(population) < -cap(population_end) { - p = uint(population[0]) - population = population[1:] - sum += p - retval -= float64(p) * fastLog2(p) - } - - if sum != 0 { - retval += float64(sum) * fastLog2(sum) - } - *total = sum - return retval -} - -func bitsEntropy(population []uint32, size uint) float64 { - var sum uint - var retval float64 = shannonEntropy(population, size, &sum) - if retval < float64(sum) { - /* At least one bit per literal is needed. */ - retval = float64(sum) - } - - return retval -} - -const kOneSymbolHistogramCost float64 = 12 -const kTwoSymbolHistogramCost float64 = 20 -const kThreeSymbolHistogramCost float64 = 28 -const kFourSymbolHistogramCost float64 = 37 - -func populationCostLiteral(histogram *histogramLiteral) float64 { - var data_size uint = histogramDataSizeLiteral() - var count int = 0 - var s [5]uint - var bits float64 = 0.0 - var i uint - if histogram.total_count_ == 0 { - return kOneSymbolHistogramCost - } - - for i = 0; i < data_size; i++ { - if histogram.data_[i] > 0 { - s[count] = i - count++ - if count > 4 { - break - } - } - } - - if count == 1 { - return kOneSymbolHistogramCost - } - - if count == 2 { - return kTwoSymbolHistogramCost + float64(histogram.total_count_) - } - - if count == 3 { - var histo0 uint32 = histogram.data_[s[0]] - var histo1 uint32 = histogram.data_[s[1]] - var histo2 uint32 = histogram.data_[s[2]] - var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2)) - return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax) - } - - if count == 4 { - var histo [4]uint32 - var h23 uint32 - var histomax uint32 - for i = 0; i < 4; i++ { - histo[i] = histogram.data_[s[i]] - } - - /* Sort */ - for i = 0; i < 4; i++ { - var j uint - for j = i + 1; j < 4; j++ { - if histo[j] > histo[i] { - var tmp uint32 = histo[j] - histo[j] = histo[i] - histo[i] = tmp - } - } - } - - h23 = histo[2] + histo[3] - histomax = brotli_max_uint32_t(h23, histo[0]) - return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax) - } - { - var max_depth uint = 1 - var depth_histo = [codeLengthCodes]uint32{0} - /* In this loop we compute the entropy of the histogram and simultaneously - build a simplified histogram of the code length codes where we use the - zero repeat code 17, but we don't use the non-zero repeat code 16. */ - - var log2total float64 = fastLog2(histogram.total_count_) - for i = 0; i < data_size; { - if histogram.data_[i] > 0 { - var log2p float64 = log2total - fastLog2(uint(histogram.data_[i])) - /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) = - = log2(total_count) - log2(count(symbol)) */ - - var depth uint = uint(log2p + 0.5) - /* Approximate the bit depth by round(-log2(P(symbol))) */ - bits += float64(histogram.data_[i]) * log2p - - if depth > 15 { - depth = 15 - } - - if depth > max_depth { - max_depth = depth - } - - depth_histo[depth]++ - i++ - } else { - var reps uint32 = 1 - /* Compute the run length of zeros and add the appropriate number of 0 - and 17 code length codes to the code length code histogram. */ - - var k uint - for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ { - reps++ - } - - i += uint(reps) - if i == data_size { - /* Don't add any cost for the last zero run, since these are encoded - only implicitly. */ - break - } - - if reps < 3 { - depth_histo[0] += reps - } else { - reps -= 2 - for reps > 0 { - depth_histo[repeatZeroCodeLength]++ - - /* Add the 3 extra bits for the 17 code length code. */ - bits += 3 - - reps >>= 3 - } - } - } - } - - /* Add the estimated encoding cost of the code length code histogram. */ - bits += float64(18 + 2*max_depth) - - /* Add the entropy of the code length code histogram. */ - bits += bitsEntropy(depth_histo[:], codeLengthCodes) - } - - return bits -} - -func populationCostCommand(histogram *histogramCommand) float64 { - var data_size uint = histogramDataSizeCommand() - var count int = 0 - var s [5]uint - var bits float64 = 0.0 - var i uint - if histogram.total_count_ == 0 { - return kOneSymbolHistogramCost - } - - for i = 0; i < data_size; i++ { - if histogram.data_[i] > 0 { - s[count] = i - count++ - if count > 4 { - break - } - } - } - - if count == 1 { - return kOneSymbolHistogramCost - } - - if count == 2 { - return kTwoSymbolHistogramCost + float64(histogram.total_count_) - } - - if count == 3 { - var histo0 uint32 = histogram.data_[s[0]] - var histo1 uint32 = histogram.data_[s[1]] - var histo2 uint32 = histogram.data_[s[2]] - var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2)) - return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax) - } - - if count == 4 { - var histo [4]uint32 - var h23 uint32 - var histomax uint32 - for i = 0; i < 4; i++ { - histo[i] = histogram.data_[s[i]] - } - - /* Sort */ - for i = 0; i < 4; i++ { - var j uint - for j = i + 1; j < 4; j++ { - if histo[j] > histo[i] { - var tmp uint32 = histo[j] - histo[j] = histo[i] - histo[i] = tmp - } - } - } - - h23 = histo[2] + histo[3] - histomax = brotli_max_uint32_t(h23, histo[0]) - return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax) - } - { - var max_depth uint = 1 - var depth_histo = [codeLengthCodes]uint32{0} - /* In this loop we compute the entropy of the histogram and simultaneously - build a simplified histogram of the code length codes where we use the - zero repeat code 17, but we don't use the non-zero repeat code 16. */ - - var log2total float64 = fastLog2(histogram.total_count_) - for i = 0; i < data_size; { - if histogram.data_[i] > 0 { - var log2p float64 = log2total - fastLog2(uint(histogram.data_[i])) - /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) = - = log2(total_count) - log2(count(symbol)) */ - - var depth uint = uint(log2p + 0.5) - /* Approximate the bit depth by round(-log2(P(symbol))) */ - bits += float64(histogram.data_[i]) * log2p - - if depth > 15 { - depth = 15 - } - - if depth > max_depth { - max_depth = depth - } - - depth_histo[depth]++ - i++ - } else { - var reps uint32 = 1 - /* Compute the run length of zeros and add the appropriate number of 0 - and 17 code length codes to the code length code histogram. */ - - var k uint - for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ { - reps++ - } - - i += uint(reps) - if i == data_size { - /* Don't add any cost for the last zero run, since these are encoded - only implicitly. */ - break - } - - if reps < 3 { - depth_histo[0] += reps - } else { - reps -= 2 - for reps > 0 { - depth_histo[repeatZeroCodeLength]++ - - /* Add the 3 extra bits for the 17 code length code. */ - bits += 3 - - reps >>= 3 - } - } - } - } - - /* Add the estimated encoding cost of the code length code histogram. */ - bits += float64(18 + 2*max_depth) - - /* Add the entropy of the code length code histogram. */ - bits += bitsEntropy(depth_histo[:], codeLengthCodes) - } - - return bits -} - -func populationCostDistance(histogram *histogramDistance) float64 { - var data_size uint = histogramDataSizeDistance() - var count int = 0 - var s [5]uint - var bits float64 = 0.0 - var i uint - if histogram.total_count_ == 0 { - return kOneSymbolHistogramCost - } - - for i = 0; i < data_size; i++ { - if histogram.data_[i] > 0 { - s[count] = i - count++ - if count > 4 { - break - } - } - } - - if count == 1 { - return kOneSymbolHistogramCost - } - - if count == 2 { - return kTwoSymbolHistogramCost + float64(histogram.total_count_) - } - - if count == 3 { - var histo0 uint32 = histogram.data_[s[0]] - var histo1 uint32 = histogram.data_[s[1]] - var histo2 uint32 = histogram.data_[s[2]] - var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2)) - return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax) - } - - if count == 4 { - var histo [4]uint32 - var h23 uint32 - var histomax uint32 - for i = 0; i < 4; i++ { - histo[i] = histogram.data_[s[i]] - } - - /* Sort */ - for i = 0; i < 4; i++ { - var j uint - for j = i + 1; j < 4; j++ { - if histo[j] > histo[i] { - var tmp uint32 = histo[j] - histo[j] = histo[i] - histo[i] = tmp - } - } - } - - h23 = histo[2] + histo[3] - histomax = brotli_max_uint32_t(h23, histo[0]) - return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax) - } - { - var max_depth uint = 1 - var depth_histo = [codeLengthCodes]uint32{0} - /* In this loop we compute the entropy of the histogram and simultaneously - build a simplified histogram of the code length codes where we use the - zero repeat code 17, but we don't use the non-zero repeat code 16. */ - - var log2total float64 = fastLog2(histogram.total_count_) - for i = 0; i < data_size; { - if histogram.data_[i] > 0 { - var log2p float64 = log2total - fastLog2(uint(histogram.data_[i])) - /* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) = - = log2(total_count) - log2(count(symbol)) */ - - var depth uint = uint(log2p + 0.5) - /* Approximate the bit depth by round(-log2(P(symbol))) */ - bits += float64(histogram.data_[i]) * log2p - - if depth > 15 { - depth = 15 - } - - if depth > max_depth { - max_depth = depth - } - - depth_histo[depth]++ - i++ - } else { - var reps uint32 = 1 - /* Compute the run length of zeros and add the appropriate number of 0 - and 17 code length codes to the code length code histogram. */ - - var k uint - for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ { - reps++ - } - - i += uint(reps) - if i == data_size { - /* Don't add any cost for the last zero run, since these are encoded - only implicitly. */ - break - } - - if reps < 3 { - depth_histo[0] += reps - } else { - reps -= 2 - for reps > 0 { - depth_histo[repeatZeroCodeLength]++ - - /* Add the 3 extra bits for the 17 code length code. */ - bits += 3 - - reps >>= 3 - } - } - } - } - - /* Add the estimated encoding cost of the code length code histogram. */ - bits += float64(18 + 2*max_depth) - - /* Add the entropy of the code length code histogram. */ - bits += bitsEntropy(depth_histo[:], codeLengthCodes) - } - - return bits -} diff --git a/vendor/github.com/andybalholm/brotli/bit_reader.go b/vendor/github.com/andybalholm/brotli/bit_reader.go deleted file mode 100644 index fba8687c69..0000000000 --- a/vendor/github.com/andybalholm/brotli/bit_reader.go +++ /dev/null @@ -1,266 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Bit reading helpers */ - -const shortFillBitWindowRead = (8 >> 1) - -var kBitMask = [33]uint32{ - 0x00000000, - 0x00000001, - 0x00000003, - 0x00000007, - 0x0000000F, - 0x0000001F, - 0x0000003F, - 0x0000007F, - 0x000000FF, - 0x000001FF, - 0x000003FF, - 0x000007FF, - 0x00000FFF, - 0x00001FFF, - 0x00003FFF, - 0x00007FFF, - 0x0000FFFF, - 0x0001FFFF, - 0x0003FFFF, - 0x0007FFFF, - 0x000FFFFF, - 0x001FFFFF, - 0x003FFFFF, - 0x007FFFFF, - 0x00FFFFFF, - 0x01FFFFFF, - 0x03FFFFFF, - 0x07FFFFFF, - 0x0FFFFFFF, - 0x1FFFFFFF, - 0x3FFFFFFF, - 0x7FFFFFFF, - 0xFFFFFFFF, -} - -func bitMask(n uint32) uint32 { - return kBitMask[n] -} - -type bitReader struct { - val_ uint64 - bit_pos_ uint32 - input []byte - input_len uint - byte_pos uint -} - -type bitReaderState struct { - val_ uint64 - bit_pos_ uint32 - input []byte - input_len uint - byte_pos uint -} - -/* Initializes the BrotliBitReader fields. */ - -/* Ensures that accumulator is not empty. - May consume up to sizeof(brotli_reg_t) - 1 bytes of input. - Returns false if data is required but there is no input available. - For BROTLI_ALIGNED_READ this function also prepares bit reader for aligned - reading. */ -func bitReaderSaveState(from *bitReader, to *bitReaderState) { - to.val_ = from.val_ - to.bit_pos_ = from.bit_pos_ - to.input = from.input - to.input_len = from.input_len - to.byte_pos = from.byte_pos -} - -func bitReaderRestoreState(to *bitReader, from *bitReaderState) { - to.val_ = from.val_ - to.bit_pos_ = from.bit_pos_ - to.input = from.input - to.input_len = from.input_len - to.byte_pos = from.byte_pos -} - -func getAvailableBits(br *bitReader) uint32 { - return 64 - br.bit_pos_ -} - -/* Returns amount of unread bytes the bit reader still has buffered from the - BrotliInput, including whole bytes in br->val_. */ -func getRemainingBytes(br *bitReader) uint { - return uint(uint32(br.input_len-br.byte_pos) + (getAvailableBits(br) >> 3)) -} - -/* Checks if there is at least |num| bytes left in the input ring-buffer - (excluding the bits remaining in br->val_). */ -func checkInputAmount(br *bitReader, num uint) bool { - return br.input_len-br.byte_pos >= num -} - -/* Guarantees that there are at least |n_bits| + 1 bits in accumulator. - Precondition: accumulator contains at least 1 bit. - |n_bits| should be in the range [1..24] for regular build. For portable - non-64-bit little-endian build only 16 bits are safe to request. */ -func fillBitWindow(br *bitReader, n_bits uint32) { - if br.bit_pos_ >= 32 { - br.val_ >>= 32 - br.bit_pos_ ^= 32 /* here same as -= 32 because of the if condition */ - br.val_ |= (uint64(binary.LittleEndian.Uint32(br.input[br.byte_pos:]))) << 32 - br.byte_pos += 4 - } -} - -/* Mostly like BrotliFillBitWindow, but guarantees only 16 bits and reads no - more than BROTLI_SHORT_FILL_BIT_WINDOW_READ bytes of input. */ -func fillBitWindow16(br *bitReader) { - fillBitWindow(br, 17) -} - -/* Tries to pull one byte of input to accumulator. - Returns false if there is no input available. */ -func pullByte(br *bitReader) bool { - if br.byte_pos == br.input_len { - return false - } - - br.val_ >>= 8 - br.val_ |= (uint64(br.input[br.byte_pos])) << 56 - br.bit_pos_ -= 8 - br.byte_pos++ - return true -} - -/* Returns currently available bits. - The number of valid bits could be calculated by BrotliGetAvailableBits. */ -func getBitsUnmasked(br *bitReader) uint64 { - return br.val_ >> br.bit_pos_ -} - -/* Like BrotliGetBits, but does not mask the result. - The result contains at least 16 valid bits. */ -func get16BitsUnmasked(br *bitReader) uint32 { - fillBitWindow(br, 16) - return uint32(getBitsUnmasked(br)) -} - -/* Returns the specified number of bits from |br| without advancing bit - position. */ -func getBits(br *bitReader, n_bits uint32) uint32 { - fillBitWindow(br, n_bits) - return uint32(getBitsUnmasked(br)) & bitMask(n_bits) -} - -/* Tries to peek the specified amount of bits. Returns false, if there - is not enough input. */ -func safeGetBits(br *bitReader, n_bits uint32, val *uint32) bool { - for getAvailableBits(br) < n_bits { - if !pullByte(br) { - return false - } - } - - *val = uint32(getBitsUnmasked(br)) & bitMask(n_bits) - return true -} - -/* Advances the bit pos by |n_bits|. */ -func dropBits(br *bitReader, n_bits uint32) { - br.bit_pos_ += n_bits -} - -func bitReaderUnload(br *bitReader) { - var unused_bytes uint32 = getAvailableBits(br) >> 3 - var unused_bits uint32 = unused_bytes << 3 - br.byte_pos -= uint(unused_bytes) - if unused_bits == 64 { - br.val_ = 0 - } else { - br.val_ <<= unused_bits - } - - br.bit_pos_ += unused_bits -} - -/* Reads the specified number of bits from |br| and advances the bit pos. - Precondition: accumulator MUST contain at least |n_bits|. */ -func takeBits(br *bitReader, n_bits uint32, val *uint32) { - *val = uint32(getBitsUnmasked(br)) & bitMask(n_bits) - dropBits(br, n_bits) -} - -/* Reads the specified number of bits from |br| and advances the bit pos. - Assumes that there is enough input to perform BrotliFillBitWindow. */ -func readBits(br *bitReader, n_bits uint32) uint32 { - var val uint32 - fillBitWindow(br, n_bits) - takeBits(br, n_bits, &val) - return val -} - -/* Tries to read the specified amount of bits. Returns false, if there - is not enough input. |n_bits| MUST be positive. */ -func safeReadBits(br *bitReader, n_bits uint32, val *uint32) bool { - for getAvailableBits(br) < n_bits { - if !pullByte(br) { - return false - } - } - - takeBits(br, n_bits, val) - return true -} - -/* Advances the bit reader position to the next byte boundary and verifies - that any skipped bits are set to zero. */ -func bitReaderJumpToByteBoundary(br *bitReader) bool { - var pad_bits_count uint32 = getAvailableBits(br) & 0x7 - var pad_bits uint32 = 0 - if pad_bits_count != 0 { - takeBits(br, pad_bits_count, &pad_bits) - } - - return pad_bits == 0 -} - -/* Copies remaining input bytes stored in the bit reader to the output. Value - |num| may not be larger than BrotliGetRemainingBytes. The bit reader must be - warmed up again after this. */ -func copyBytes(dest []byte, br *bitReader, num uint) { - for getAvailableBits(br) >= 8 && num > 0 { - dest[0] = byte(getBitsUnmasked(br)) - dropBits(br, 8) - dest = dest[1:] - num-- - } - - copy(dest, br.input[br.byte_pos:][:num]) - br.byte_pos += num -} - -func initBitReader(br *bitReader) { - br.val_ = 0 - br.bit_pos_ = 64 -} - -func warmupBitReader(br *bitReader) bool { - /* Fixing alignment after unaligned BrotliFillWindow would result accumulator - overflow. If unalignment is caused by BrotliSafeReadBits, then there is - enough space in accumulator to fix alignment. */ - if getAvailableBits(br) == 0 { - if !pullByte(br) { - return false - } - } - - return true -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter.go b/vendor/github.com/andybalholm/brotli/block_splitter.go deleted file mode 100644 index 978a131474..0000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter.go +++ /dev/null @@ -1,144 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Block split point selection utilities. */ - -type blockSplit struct { - num_types uint - num_blocks uint - types []byte - lengths []uint32 - types_alloc_size uint - lengths_alloc_size uint -} - -const ( - kMaxLiteralHistograms uint = 100 - kMaxCommandHistograms uint = 50 - kLiteralBlockSwitchCost float64 = 28.1 - kCommandBlockSwitchCost float64 = 13.5 - kDistanceBlockSwitchCost float64 = 14.6 - kLiteralStrideLength uint = 70 - kCommandStrideLength uint = 40 - kSymbolsPerLiteralHistogram uint = 544 - kSymbolsPerCommandHistogram uint = 530 - kSymbolsPerDistanceHistogram uint = 544 - kMinLengthForBlockSplitting uint = 128 - kIterMulForRefining uint = 2 - kMinItersForRefining uint = 100 -) - -func countLiterals(cmds []command) uint { - var total_length uint = 0 - /* Count how many we have. */ - - for i := range cmds { - total_length += uint(cmds[i].insert_len_) - } - - return total_length -} - -func copyLiteralsToByteArray(cmds []command, data []byte, offset uint, mask uint, literals []byte) { - var pos uint = 0 - var from_pos uint = offset & mask - for i := range cmds { - var insert_len uint = uint(cmds[i].insert_len_) - if from_pos+insert_len > mask { - var head_size uint = mask + 1 - from_pos - copy(literals[pos:], data[from_pos:][:head_size]) - from_pos = 0 - pos += head_size - insert_len -= head_size - } - - if insert_len > 0 { - copy(literals[pos:], data[from_pos:][:insert_len]) - pos += insert_len - } - - from_pos = uint((uint32(from_pos+insert_len) + commandCopyLen(&cmds[i])) & uint32(mask)) - } -} - -func myRand(seed *uint32) uint32 { - /* Initial seed should be 7. In this case, loop length is (1 << 29). */ - *seed *= 16807 - - return *seed -} - -func bitCost(count uint) float64 { - if count == 0 { - return -2.0 - } else { - return fastLog2(count) - } -} - -const histogramsPerBatch = 64 - -const clustersPerBatch = 16 - -func initBlockSplit(self *blockSplit) { - self.num_types = 0 - self.num_blocks = 0 - self.types = self.types[:0] - self.lengths = self.lengths[:0] - self.types_alloc_size = 0 - self.lengths_alloc_size = 0 -} - -func splitBlock(cmds []command, data []byte, pos uint, mask uint, params *encoderParams, literal_split *blockSplit, insert_and_copy_split *blockSplit, dist_split *blockSplit) { - { - var literals_count uint = countLiterals(cmds) - var literals []byte = make([]byte, literals_count) - - /* Create a continuous array of literals. */ - copyLiteralsToByteArray(cmds, data, pos, mask, literals) - - /* Create the block split on the array of literals. - Literal histograms have alphabet size 256. */ - splitByteVectorLiteral(literals, literals_count, kSymbolsPerLiteralHistogram, kMaxLiteralHistograms, kLiteralStrideLength, kLiteralBlockSwitchCost, params, literal_split) - - literals = nil - } - { - var insert_and_copy_codes []uint16 = make([]uint16, len(cmds)) - /* Compute prefix codes for commands. */ - - for i := range cmds { - insert_and_copy_codes[i] = cmds[i].cmd_prefix_ - } - - /* Create the block split on the array of command prefixes. */ - splitByteVectorCommand(insert_and_copy_codes, kSymbolsPerCommandHistogram, kMaxCommandHistograms, kCommandStrideLength, kCommandBlockSwitchCost, params, insert_and_copy_split) - - /* TODO: reuse for distances? */ - - insert_and_copy_codes = nil - } - { - var distance_prefixes []uint16 = make([]uint16, len(cmds)) - var j uint = 0 - /* Create a continuous array of distance prefixes. */ - - for i := range cmds { - var cmd *command = &cmds[i] - if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 { - distance_prefixes[j] = cmd.dist_prefix_ & 0x3FF - j++ - } - } - - /* Create the block split on the array of distance prefixes. */ - splitByteVectorDistance(distance_prefixes, j, kSymbolsPerDistanceHistogram, kMaxCommandHistograms, kCommandStrideLength, kDistanceBlockSwitchCost, params, dist_split) - - distance_prefixes = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_command.go b/vendor/github.com/andybalholm/brotli/block_splitter_command.go deleted file mode 100644 index 9dec13e4d9..0000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter_command.go +++ /dev/null @@ -1,434 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func initialEntropyCodesCommand(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramCommand) { - var seed uint32 = 7 - var block_length uint = length / num_histograms - var i uint - clearHistogramsCommand(histograms, num_histograms) - for i = 0; i < num_histograms; i++ { - var pos uint = length * i / num_histograms - if i != 0 { - pos += uint(myRand(&seed) % uint32(block_length)) - } - - if pos+stride >= length { - pos = length - stride - 1 - } - - histogramAddVectorCommand(&histograms[i], data[pos:], stride) - } -} - -func randomSampleCommand(seed *uint32, data []uint16, length uint, stride uint, sample *histogramCommand) { - var pos uint = 0 - if stride >= length { - stride = length - } else { - pos = uint(myRand(seed) % uint32(length-stride+1)) - } - - histogramAddVectorCommand(sample, data[pos:], stride) -} - -func refineEntropyCodesCommand(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramCommand) { - var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining - var seed uint32 = 7 - var iter uint - iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms - for iter = 0; iter < iters; iter++ { - var sample histogramCommand - histogramClearCommand(&sample) - randomSampleCommand(&seed, data, length, stride, &sample) - histogramAddHistogramCommand(&histograms[iter%num_histograms], &sample) - } -} - -/* Assigns a block id from the range [0, num_histograms) to each data element - in data[0..length) and fills in block_id[0..length) with the assigned values. - Returns the number of blocks, i.e. one plus the number of block switches. */ -func findBlocksCommand(data []uint16, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramCommand, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint { - var data_size uint = histogramDataSizeCommand() - var bitmaplen uint = (num_histograms + 7) >> 3 - var num_blocks uint = 1 - var i uint - var j uint - assert(num_histograms <= 256) - if num_histograms <= 1 { - for i = 0; i < length; i++ { - block_id[i] = 0 - } - - return 1 - } - - for i := 0; i < int(data_size*num_histograms); i++ { - insert_cost[i] = 0 - } - for i = 0; i < num_histograms; i++ { - insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_))) - } - - for i = data_size; i != 0; { - i-- - for j = 0; j < num_histograms; j++ { - insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i])) - } - } - - for i := 0; i < int(num_histograms); i++ { - cost[i] = 0 - } - for i := 0; i < int(length*bitmaplen); i++ { - switch_signal[i] = 0 - } - - /* After each iteration of this loop, cost[k] will contain the difference - between the minimum cost of arriving at the current byte position using - entropy code k, and the minimum cost of arriving at the current byte - position. This difference is capped at the block switch cost, and if it - reaches block switch cost, it means that when we trace back from the last - position, we need to switch here. */ - for i = 0; i < length; i++ { - var byte_ix uint = i - var ix uint = byte_ix * bitmaplen - var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms - var min_cost float64 = 1e99 - var block_switch_cost float64 = block_switch_bitcost - var k uint - for k = 0; k < num_histograms; k++ { - /* We are coding the symbol in data[byte_ix] with entropy code k. */ - cost[k] += insert_cost[insert_cost_ix+k] - - if cost[k] < min_cost { - min_cost = cost[k] - block_id[byte_ix] = byte(k) - } - } - - /* More blocks for the beginning. */ - if byte_ix < 2000 { - block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000 - } - - for k = 0; k < num_histograms; k++ { - cost[k] -= min_cost - if cost[k] >= block_switch_cost { - var mask byte = byte(1 << (k & 7)) - cost[k] = block_switch_cost - assert(k>>3 < bitmaplen) - switch_signal[ix+(k>>3)] |= mask - /* Trace back from the last position and switch at the marked places. */ - } - } - } - { - var byte_ix uint = length - 1 - var ix uint = byte_ix * bitmaplen - var cur_id byte = block_id[byte_ix] - for byte_ix > 0 { - var mask byte = byte(1 << (cur_id & 7)) - assert(uint(cur_id)>>3 < bitmaplen) - byte_ix-- - ix -= bitmaplen - if switch_signal[ix+uint(cur_id>>3)]&mask != 0 { - if cur_id != block_id[byte_ix] { - cur_id = block_id[byte_ix] - num_blocks++ - } - } - - block_id[byte_ix] = cur_id - } - } - - return num_blocks -} - -var remapBlockIdsCommand_kInvalidId uint16 = 256 - -func remapBlockIdsCommand(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint { - var next_id uint16 = 0 - var i uint - for i = 0; i < num_histograms; i++ { - new_id[i] = remapBlockIdsCommand_kInvalidId - } - - for i = 0; i < length; i++ { - assert(uint(block_ids[i]) < num_histograms) - if new_id[block_ids[i]] == remapBlockIdsCommand_kInvalidId { - new_id[block_ids[i]] = next_id - next_id++ - } - } - - for i = 0; i < length; i++ { - block_ids[i] = byte(new_id[block_ids[i]]) - assert(uint(block_ids[i]) < num_histograms) - } - - assert(uint(next_id) <= num_histograms) - return uint(next_id) -} - -func buildBlockHistogramsCommand(data []uint16, length uint, block_ids []byte, num_histograms uint, histograms []histogramCommand) { - var i uint - clearHistogramsCommand(histograms, num_histograms) - for i = 0; i < length; i++ { - histogramAddCommand(&histograms[block_ids[i]], uint(data[i])) - } -} - -var clusterBlocksCommand_kInvalidIndex uint32 = math.MaxUint32 - -func clusterBlocksCommand(data []uint16, length uint, num_blocks uint, block_ids []byte, split *blockSplit) { - var histogram_symbols []uint32 = make([]uint32, num_blocks) - var block_lengths []uint32 = make([]uint32, num_blocks) - var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch - var all_histograms_size uint = 0 - var all_histograms_capacity uint = expected_num_clusters - var all_histograms []histogramCommand = make([]histogramCommand, all_histograms_capacity) - var cluster_size_size uint = 0 - var cluster_size_capacity uint = expected_num_clusters - var cluster_size []uint32 = make([]uint32, cluster_size_capacity) - var num_clusters uint = 0 - var histograms []histogramCommand = make([]histogramCommand, brotli_min_size_t(num_blocks, histogramsPerBatch)) - var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2 - var pairs_capacity uint = max_num_pairs + 1 - var pairs []histogramPair = make([]histogramPair, pairs_capacity) - var pos uint = 0 - var clusters []uint32 - var num_final_clusters uint - var new_index []uint32 - var i uint - var sizes = [histogramsPerBatch]uint32{0} - var new_clusters = [histogramsPerBatch]uint32{0} - var symbols = [histogramsPerBatch]uint32{0} - var remap = [histogramsPerBatch]uint32{0} - - for i := 0; i < int(num_blocks); i++ { - block_lengths[i] = 0 - } - { - var block_idx uint = 0 - for i = 0; i < length; i++ { - assert(block_idx < num_blocks) - block_lengths[block_idx]++ - if i+1 == length || block_ids[i] != block_ids[i+1] { - block_idx++ - } - } - - assert(block_idx == num_blocks) - } - - for i = 0; i < num_blocks; i += histogramsPerBatch { - var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - var k uint - histogramClearCommand(&histograms[j]) - for k = 0; uint32(k) < block_lengths[i+j]; k++ { - histogramAddCommand(&histograms[j], uint(data[pos])) - pos++ - } - - histograms[j].bit_cost_ = populationCostCommand(&histograms[j]) - new_clusters[j] = uint32(j) - symbols[j] = uint32(j) - sizes[j] = 1 - } - - num_new_clusters = histogramCombineCommand(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs) - if all_histograms_capacity < (all_histograms_size + num_new_clusters) { - var _new_size uint - if all_histograms_capacity == 0 { - _new_size = all_histograms_size + num_new_clusters - } else { - _new_size = all_histograms_capacity - } - var new_array []histogramCommand - for _new_size < (all_histograms_size + num_new_clusters) { - _new_size *= 2 - } - new_array = make([]histogramCommand, _new_size) - if all_histograms_capacity != 0 { - copy(new_array, all_histograms[:all_histograms_capacity]) - } - - all_histograms = new_array - all_histograms_capacity = _new_size - } - - brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters) - for j = 0; j < num_new_clusters; j++ { - all_histograms[all_histograms_size] = histograms[new_clusters[j]] - all_histograms_size++ - cluster_size[cluster_size_size] = sizes[new_clusters[j]] - cluster_size_size++ - remap[new_clusters[j]] = uint32(j) - } - - for j = 0; j < num_to_combine; j++ { - histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]] - } - - num_clusters += num_new_clusters - assert(num_clusters == cluster_size_size) - assert(num_clusters == all_histograms_size) - } - - histograms = nil - - max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < max_num_pairs+1 { - pairs = nil - pairs = make([]histogramPair, (max_num_pairs + 1)) - } - - clusters = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - clusters[i] = uint32(i) - } - - num_final_clusters = histogramCombineCommand(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs) - pairs = nil - cluster_size = nil - - new_index = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - new_index[i] = clusterBlocksCommand_kInvalidIndex - } - pos = 0 - { - var next_index uint32 = 0 - for i = 0; i < num_blocks; i++ { - var histo histogramCommand - var j uint - var best_out uint32 - var best_bits float64 - histogramClearCommand(&histo) - for j = 0; uint32(j) < block_lengths[i]; j++ { - histogramAddCommand(&histo, uint(data[pos])) - pos++ - } - - if i == 0 { - best_out = histogram_symbols[0] - } else { - best_out = histogram_symbols[i-1] - } - best_bits = histogramBitCostDistanceCommand(&histo, &all_histograms[best_out]) - for j = 0; j < num_final_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceCommand(&histo, &all_histograms[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - histogram_symbols[i] = best_out - if new_index[best_out] == clusterBlocksCommand_kInvalidIndex { - new_index[best_out] = next_index - next_index++ - } - } - } - - clusters = nil - all_histograms = nil - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks) - { - var cur_length uint32 = 0 - var block_idx uint = 0 - var max_type byte = 0 - for i = 0; i < num_blocks; i++ { - cur_length += block_lengths[i] - if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] { - var id byte = byte(new_index[histogram_symbols[i]]) - split.types[block_idx] = id - split.lengths[block_idx] = cur_length - max_type = brotli_max_uint8_t(max_type, id) - cur_length = 0 - block_idx++ - } - } - - split.num_blocks = block_idx - split.num_types = uint(max_type) + 1 - } - - new_index = nil - block_lengths = nil - histogram_symbols = nil -} - -func splitByteVectorCommand(data []uint16, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) { - length := uint(len(data)) - var data_size uint = histogramDataSizeCommand() - var num_histograms uint = length/literals_per_histogram + 1 - var histograms []histogramCommand - if num_histograms > max_histograms { - num_histograms = max_histograms - } - - if length == 0 { - split.num_types = 1 - return - } else if length < kMinLengthForBlockSplitting { - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1) - split.num_types = 1 - split.types[split.num_blocks] = 0 - split.lengths[split.num_blocks] = uint32(length) - split.num_blocks++ - return - } - - histograms = make([]histogramCommand, num_histograms) - - /* Find good entropy codes. */ - initialEntropyCodesCommand(data, length, sampling_stride_length, num_histograms, histograms) - - refineEntropyCodesCommand(data, length, sampling_stride_length, num_histograms, histograms) - { - var block_ids []byte = make([]byte, length) - var num_blocks uint = 0 - var bitmaplen uint = (num_histograms + 7) >> 3 - var insert_cost []float64 = make([]float64, (data_size * num_histograms)) - var cost []float64 = make([]float64, num_histograms) - var switch_signal []byte = make([]byte, (length * bitmaplen)) - var new_id []uint16 = make([]uint16, num_histograms) - var iters uint - if params.quality < hqZopflificationQuality { - iters = 3 - } else { - iters = 10 - } - /* Find a good path through literals with the good entropy codes. */ - - var i uint - for i = 0; i < iters; i++ { - num_blocks = findBlocksCommand(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids) - num_histograms = remapBlockIdsCommand(block_ids, length, new_id, num_histograms) - buildBlockHistogramsCommand(data, length, block_ids, num_histograms, histograms) - } - - insert_cost = nil - cost = nil - switch_signal = nil - new_id = nil - histograms = nil - clusterBlocksCommand(data, length, num_blocks, block_ids, split) - block_ids = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_distance.go b/vendor/github.com/andybalholm/brotli/block_splitter_distance.go deleted file mode 100644 index 953530d518..0000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter_distance.go +++ /dev/null @@ -1,433 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func initialEntropyCodesDistance(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramDistance) { - var seed uint32 = 7 - var block_length uint = length / num_histograms - var i uint - clearHistogramsDistance(histograms, num_histograms) - for i = 0; i < num_histograms; i++ { - var pos uint = length * i / num_histograms - if i != 0 { - pos += uint(myRand(&seed) % uint32(block_length)) - } - - if pos+stride >= length { - pos = length - stride - 1 - } - - histogramAddVectorDistance(&histograms[i], data[pos:], stride) - } -} - -func randomSampleDistance(seed *uint32, data []uint16, length uint, stride uint, sample *histogramDistance) { - var pos uint = 0 - if stride >= length { - stride = length - } else { - pos = uint(myRand(seed) % uint32(length-stride+1)) - } - - histogramAddVectorDistance(sample, data[pos:], stride) -} - -func refineEntropyCodesDistance(data []uint16, length uint, stride uint, num_histograms uint, histograms []histogramDistance) { - var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining - var seed uint32 = 7 - var iter uint - iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms - for iter = 0; iter < iters; iter++ { - var sample histogramDistance - histogramClearDistance(&sample) - randomSampleDistance(&seed, data, length, stride, &sample) - histogramAddHistogramDistance(&histograms[iter%num_histograms], &sample) - } -} - -/* Assigns a block id from the range [0, num_histograms) to each data element - in data[0..length) and fills in block_id[0..length) with the assigned values. - Returns the number of blocks, i.e. one plus the number of block switches. */ -func findBlocksDistance(data []uint16, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramDistance, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint { - var data_size uint = histogramDataSizeDistance() - var bitmaplen uint = (num_histograms + 7) >> 3 - var num_blocks uint = 1 - var i uint - var j uint - assert(num_histograms <= 256) - if num_histograms <= 1 { - for i = 0; i < length; i++ { - block_id[i] = 0 - } - - return 1 - } - - for i := 0; i < int(data_size*num_histograms); i++ { - insert_cost[i] = 0 - } - for i = 0; i < num_histograms; i++ { - insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_))) - } - - for i = data_size; i != 0; { - i-- - for j = 0; j < num_histograms; j++ { - insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i])) - } - } - - for i := 0; i < int(num_histograms); i++ { - cost[i] = 0 - } - for i := 0; i < int(length*bitmaplen); i++ { - switch_signal[i] = 0 - } - - /* After each iteration of this loop, cost[k] will contain the difference - between the minimum cost of arriving at the current byte position using - entropy code k, and the minimum cost of arriving at the current byte - position. This difference is capped at the block switch cost, and if it - reaches block switch cost, it means that when we trace back from the last - position, we need to switch here. */ - for i = 0; i < length; i++ { - var byte_ix uint = i - var ix uint = byte_ix * bitmaplen - var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms - var min_cost float64 = 1e99 - var block_switch_cost float64 = block_switch_bitcost - var k uint - for k = 0; k < num_histograms; k++ { - /* We are coding the symbol in data[byte_ix] with entropy code k. */ - cost[k] += insert_cost[insert_cost_ix+k] - - if cost[k] < min_cost { - min_cost = cost[k] - block_id[byte_ix] = byte(k) - } - } - - /* More blocks for the beginning. */ - if byte_ix < 2000 { - block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000 - } - - for k = 0; k < num_histograms; k++ { - cost[k] -= min_cost - if cost[k] >= block_switch_cost { - var mask byte = byte(1 << (k & 7)) - cost[k] = block_switch_cost - assert(k>>3 < bitmaplen) - switch_signal[ix+(k>>3)] |= mask - /* Trace back from the last position and switch at the marked places. */ - } - } - } - { - var byte_ix uint = length - 1 - var ix uint = byte_ix * bitmaplen - var cur_id byte = block_id[byte_ix] - for byte_ix > 0 { - var mask byte = byte(1 << (cur_id & 7)) - assert(uint(cur_id)>>3 < bitmaplen) - byte_ix-- - ix -= bitmaplen - if switch_signal[ix+uint(cur_id>>3)]&mask != 0 { - if cur_id != block_id[byte_ix] { - cur_id = block_id[byte_ix] - num_blocks++ - } - } - - block_id[byte_ix] = cur_id - } - } - - return num_blocks -} - -var remapBlockIdsDistance_kInvalidId uint16 = 256 - -func remapBlockIdsDistance(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint { - var next_id uint16 = 0 - var i uint - for i = 0; i < num_histograms; i++ { - new_id[i] = remapBlockIdsDistance_kInvalidId - } - - for i = 0; i < length; i++ { - assert(uint(block_ids[i]) < num_histograms) - if new_id[block_ids[i]] == remapBlockIdsDistance_kInvalidId { - new_id[block_ids[i]] = next_id - next_id++ - } - } - - for i = 0; i < length; i++ { - block_ids[i] = byte(new_id[block_ids[i]]) - assert(uint(block_ids[i]) < num_histograms) - } - - assert(uint(next_id) <= num_histograms) - return uint(next_id) -} - -func buildBlockHistogramsDistance(data []uint16, length uint, block_ids []byte, num_histograms uint, histograms []histogramDistance) { - var i uint - clearHistogramsDistance(histograms, num_histograms) - for i = 0; i < length; i++ { - histogramAddDistance(&histograms[block_ids[i]], uint(data[i])) - } -} - -var clusterBlocksDistance_kInvalidIndex uint32 = math.MaxUint32 - -func clusterBlocksDistance(data []uint16, length uint, num_blocks uint, block_ids []byte, split *blockSplit) { - var histogram_symbols []uint32 = make([]uint32, num_blocks) - var block_lengths []uint32 = make([]uint32, num_blocks) - var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch - var all_histograms_size uint = 0 - var all_histograms_capacity uint = expected_num_clusters - var all_histograms []histogramDistance = make([]histogramDistance, all_histograms_capacity) - var cluster_size_size uint = 0 - var cluster_size_capacity uint = expected_num_clusters - var cluster_size []uint32 = make([]uint32, cluster_size_capacity) - var num_clusters uint = 0 - var histograms []histogramDistance = make([]histogramDistance, brotli_min_size_t(num_blocks, histogramsPerBatch)) - var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2 - var pairs_capacity uint = max_num_pairs + 1 - var pairs []histogramPair = make([]histogramPair, pairs_capacity) - var pos uint = 0 - var clusters []uint32 - var num_final_clusters uint - var new_index []uint32 - var i uint - var sizes = [histogramsPerBatch]uint32{0} - var new_clusters = [histogramsPerBatch]uint32{0} - var symbols = [histogramsPerBatch]uint32{0} - var remap = [histogramsPerBatch]uint32{0} - - for i := 0; i < int(num_blocks); i++ { - block_lengths[i] = 0 - } - { - var block_idx uint = 0 - for i = 0; i < length; i++ { - assert(block_idx < num_blocks) - block_lengths[block_idx]++ - if i+1 == length || block_ids[i] != block_ids[i+1] { - block_idx++ - } - } - - assert(block_idx == num_blocks) - } - - for i = 0; i < num_blocks; i += histogramsPerBatch { - var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - var k uint - histogramClearDistance(&histograms[j]) - for k = 0; uint32(k) < block_lengths[i+j]; k++ { - histogramAddDistance(&histograms[j], uint(data[pos])) - pos++ - } - - histograms[j].bit_cost_ = populationCostDistance(&histograms[j]) - new_clusters[j] = uint32(j) - symbols[j] = uint32(j) - sizes[j] = 1 - } - - num_new_clusters = histogramCombineDistance(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs) - if all_histograms_capacity < (all_histograms_size + num_new_clusters) { - var _new_size uint - if all_histograms_capacity == 0 { - _new_size = all_histograms_size + num_new_clusters - } else { - _new_size = all_histograms_capacity - } - var new_array []histogramDistance - for _new_size < (all_histograms_size + num_new_clusters) { - _new_size *= 2 - } - new_array = make([]histogramDistance, _new_size) - if all_histograms_capacity != 0 { - copy(new_array, all_histograms[:all_histograms_capacity]) - } - - all_histograms = new_array - all_histograms_capacity = _new_size - } - - brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters) - for j = 0; j < num_new_clusters; j++ { - all_histograms[all_histograms_size] = histograms[new_clusters[j]] - all_histograms_size++ - cluster_size[cluster_size_size] = sizes[new_clusters[j]] - cluster_size_size++ - remap[new_clusters[j]] = uint32(j) - } - - for j = 0; j < num_to_combine; j++ { - histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]] - } - - num_clusters += num_new_clusters - assert(num_clusters == cluster_size_size) - assert(num_clusters == all_histograms_size) - } - - histograms = nil - - max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < max_num_pairs+1 { - pairs = nil - pairs = make([]histogramPair, (max_num_pairs + 1)) - } - - clusters = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - clusters[i] = uint32(i) - } - - num_final_clusters = histogramCombineDistance(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs) - pairs = nil - cluster_size = nil - - new_index = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - new_index[i] = clusterBlocksDistance_kInvalidIndex - } - pos = 0 - { - var next_index uint32 = 0 - for i = 0; i < num_blocks; i++ { - var histo histogramDistance - var j uint - var best_out uint32 - var best_bits float64 - histogramClearDistance(&histo) - for j = 0; uint32(j) < block_lengths[i]; j++ { - histogramAddDistance(&histo, uint(data[pos])) - pos++ - } - - if i == 0 { - best_out = histogram_symbols[0] - } else { - best_out = histogram_symbols[i-1] - } - best_bits = histogramBitCostDistanceDistance(&histo, &all_histograms[best_out]) - for j = 0; j < num_final_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceDistance(&histo, &all_histograms[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - histogram_symbols[i] = best_out - if new_index[best_out] == clusterBlocksDistance_kInvalidIndex { - new_index[best_out] = next_index - next_index++ - } - } - } - - clusters = nil - all_histograms = nil - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks) - { - var cur_length uint32 = 0 - var block_idx uint = 0 - var max_type byte = 0 - for i = 0; i < num_blocks; i++ { - cur_length += block_lengths[i] - if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] { - var id byte = byte(new_index[histogram_symbols[i]]) - split.types[block_idx] = id - split.lengths[block_idx] = cur_length - max_type = brotli_max_uint8_t(max_type, id) - cur_length = 0 - block_idx++ - } - } - - split.num_blocks = block_idx - split.num_types = uint(max_type) + 1 - } - - new_index = nil - block_lengths = nil - histogram_symbols = nil -} - -func splitByteVectorDistance(data []uint16, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) { - var data_size uint = histogramDataSizeDistance() - var num_histograms uint = length/literals_per_histogram + 1 - var histograms []histogramDistance - if num_histograms > max_histograms { - num_histograms = max_histograms - } - - if length == 0 { - split.num_types = 1 - return - } else if length < kMinLengthForBlockSplitting { - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1) - split.num_types = 1 - split.types[split.num_blocks] = 0 - split.lengths[split.num_blocks] = uint32(length) - split.num_blocks++ - return - } - - histograms = make([]histogramDistance, num_histograms) - - /* Find good entropy codes. */ - initialEntropyCodesDistance(data, length, sampling_stride_length, num_histograms, histograms) - - refineEntropyCodesDistance(data, length, sampling_stride_length, num_histograms, histograms) - { - var block_ids []byte = make([]byte, length) - var num_blocks uint = 0 - var bitmaplen uint = (num_histograms + 7) >> 3 - var insert_cost []float64 = make([]float64, (data_size * num_histograms)) - var cost []float64 = make([]float64, num_histograms) - var switch_signal []byte = make([]byte, (length * bitmaplen)) - var new_id []uint16 = make([]uint16, num_histograms) - var iters uint - if params.quality < hqZopflificationQuality { - iters = 3 - } else { - iters = 10 - } - /* Find a good path through literals with the good entropy codes. */ - - var i uint - for i = 0; i < iters; i++ { - num_blocks = findBlocksDistance(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids) - num_histograms = remapBlockIdsDistance(block_ids, length, new_id, num_histograms) - buildBlockHistogramsDistance(data, length, block_ids, num_histograms, histograms) - } - - insert_cost = nil - cost = nil - switch_signal = nil - new_id = nil - histograms = nil - clusterBlocksDistance(data, length, num_blocks, block_ids, split) - block_ids = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/block_splitter_literal.go b/vendor/github.com/andybalholm/brotli/block_splitter_literal.go deleted file mode 100644 index 1c895cf388..0000000000 --- a/vendor/github.com/andybalholm/brotli/block_splitter_literal.go +++ /dev/null @@ -1,433 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func initialEntropyCodesLiteral(data []byte, length uint, stride uint, num_histograms uint, histograms []histogramLiteral) { - var seed uint32 = 7 - var block_length uint = length / num_histograms - var i uint - clearHistogramsLiteral(histograms, num_histograms) - for i = 0; i < num_histograms; i++ { - var pos uint = length * i / num_histograms - if i != 0 { - pos += uint(myRand(&seed) % uint32(block_length)) - } - - if pos+stride >= length { - pos = length - stride - 1 - } - - histogramAddVectorLiteral(&histograms[i], data[pos:], stride) - } -} - -func randomSampleLiteral(seed *uint32, data []byte, length uint, stride uint, sample *histogramLiteral) { - var pos uint = 0 - if stride >= length { - stride = length - } else { - pos = uint(myRand(seed) % uint32(length-stride+1)) - } - - histogramAddVectorLiteral(sample, data[pos:], stride) -} - -func refineEntropyCodesLiteral(data []byte, length uint, stride uint, num_histograms uint, histograms []histogramLiteral) { - var iters uint = kIterMulForRefining*length/stride + kMinItersForRefining - var seed uint32 = 7 - var iter uint - iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms - for iter = 0; iter < iters; iter++ { - var sample histogramLiteral - histogramClearLiteral(&sample) - randomSampleLiteral(&seed, data, length, stride, &sample) - histogramAddHistogramLiteral(&histograms[iter%num_histograms], &sample) - } -} - -/* Assigns a block id from the range [0, num_histograms) to each data element - in data[0..length) and fills in block_id[0..length) with the assigned values. - Returns the number of blocks, i.e. one plus the number of block switches. */ -func findBlocksLiteral(data []byte, length uint, block_switch_bitcost float64, num_histograms uint, histograms []histogramLiteral, insert_cost []float64, cost []float64, switch_signal []byte, block_id []byte) uint { - var data_size uint = histogramDataSizeLiteral() - var bitmaplen uint = (num_histograms + 7) >> 3 - var num_blocks uint = 1 - var i uint - var j uint - assert(num_histograms <= 256) - if num_histograms <= 1 { - for i = 0; i < length; i++ { - block_id[i] = 0 - } - - return 1 - } - - for i := 0; i < int(data_size*num_histograms); i++ { - insert_cost[i] = 0 - } - for i = 0; i < num_histograms; i++ { - insert_cost[i] = fastLog2(uint(uint32(histograms[i].total_count_))) - } - - for i = data_size; i != 0; { - i-- - for j = 0; j < num_histograms; j++ { - insert_cost[i*num_histograms+j] = insert_cost[j] - bitCost(uint(histograms[j].data_[i])) - } - } - - for i := 0; i < int(num_histograms); i++ { - cost[i] = 0 - } - for i := 0; i < int(length*bitmaplen); i++ { - switch_signal[i] = 0 - } - - /* After each iteration of this loop, cost[k] will contain the difference - between the minimum cost of arriving at the current byte position using - entropy code k, and the minimum cost of arriving at the current byte - position. This difference is capped at the block switch cost, and if it - reaches block switch cost, it means that when we trace back from the last - position, we need to switch here. */ - for i = 0; i < length; i++ { - var byte_ix uint = i - var ix uint = byte_ix * bitmaplen - var insert_cost_ix uint = uint(data[byte_ix]) * num_histograms - var min_cost float64 = 1e99 - var block_switch_cost float64 = block_switch_bitcost - var k uint - for k = 0; k < num_histograms; k++ { - /* We are coding the symbol in data[byte_ix] with entropy code k. */ - cost[k] += insert_cost[insert_cost_ix+k] - - if cost[k] < min_cost { - min_cost = cost[k] - block_id[byte_ix] = byte(k) - } - } - - /* More blocks for the beginning. */ - if byte_ix < 2000 { - block_switch_cost *= 0.77 + 0.07*float64(byte_ix)/2000 - } - - for k = 0; k < num_histograms; k++ { - cost[k] -= min_cost - if cost[k] >= block_switch_cost { - var mask byte = byte(1 << (k & 7)) - cost[k] = block_switch_cost - assert(k>>3 < bitmaplen) - switch_signal[ix+(k>>3)] |= mask - /* Trace back from the last position and switch at the marked places. */ - } - } - } - { - var byte_ix uint = length - 1 - var ix uint = byte_ix * bitmaplen - var cur_id byte = block_id[byte_ix] - for byte_ix > 0 { - var mask byte = byte(1 << (cur_id & 7)) - assert(uint(cur_id)>>3 < bitmaplen) - byte_ix-- - ix -= bitmaplen - if switch_signal[ix+uint(cur_id>>3)]&mask != 0 { - if cur_id != block_id[byte_ix] { - cur_id = block_id[byte_ix] - num_blocks++ - } - } - - block_id[byte_ix] = cur_id - } - } - - return num_blocks -} - -var remapBlockIdsLiteral_kInvalidId uint16 = 256 - -func remapBlockIdsLiteral(block_ids []byte, length uint, new_id []uint16, num_histograms uint) uint { - var next_id uint16 = 0 - var i uint - for i = 0; i < num_histograms; i++ { - new_id[i] = remapBlockIdsLiteral_kInvalidId - } - - for i = 0; i < length; i++ { - assert(uint(block_ids[i]) < num_histograms) - if new_id[block_ids[i]] == remapBlockIdsLiteral_kInvalidId { - new_id[block_ids[i]] = next_id - next_id++ - } - } - - for i = 0; i < length; i++ { - block_ids[i] = byte(new_id[block_ids[i]]) - assert(uint(block_ids[i]) < num_histograms) - } - - assert(uint(next_id) <= num_histograms) - return uint(next_id) -} - -func buildBlockHistogramsLiteral(data []byte, length uint, block_ids []byte, num_histograms uint, histograms []histogramLiteral) { - var i uint - clearHistogramsLiteral(histograms, num_histograms) - for i = 0; i < length; i++ { - histogramAddLiteral(&histograms[block_ids[i]], uint(data[i])) - } -} - -var clusterBlocksLiteral_kInvalidIndex uint32 = math.MaxUint32 - -func clusterBlocksLiteral(data []byte, length uint, num_blocks uint, block_ids []byte, split *blockSplit) { - var histogram_symbols []uint32 = make([]uint32, num_blocks) - var block_lengths []uint32 = make([]uint32, num_blocks) - var expected_num_clusters uint = clustersPerBatch * (num_blocks + histogramsPerBatch - 1) / histogramsPerBatch - var all_histograms_size uint = 0 - var all_histograms_capacity uint = expected_num_clusters - var all_histograms []histogramLiteral = make([]histogramLiteral, all_histograms_capacity) - var cluster_size_size uint = 0 - var cluster_size_capacity uint = expected_num_clusters - var cluster_size []uint32 = make([]uint32, cluster_size_capacity) - var num_clusters uint = 0 - var histograms []histogramLiteral = make([]histogramLiteral, brotli_min_size_t(num_blocks, histogramsPerBatch)) - var max_num_pairs uint = histogramsPerBatch * histogramsPerBatch / 2 - var pairs_capacity uint = max_num_pairs + 1 - var pairs []histogramPair = make([]histogramPair, pairs_capacity) - var pos uint = 0 - var clusters []uint32 - var num_final_clusters uint - var new_index []uint32 - var i uint - var sizes = [histogramsPerBatch]uint32{0} - var new_clusters = [histogramsPerBatch]uint32{0} - var symbols = [histogramsPerBatch]uint32{0} - var remap = [histogramsPerBatch]uint32{0} - - for i := 0; i < int(num_blocks); i++ { - block_lengths[i] = 0 - } - { - var block_idx uint = 0 - for i = 0; i < length; i++ { - assert(block_idx < num_blocks) - block_lengths[block_idx]++ - if i+1 == length || block_ids[i] != block_ids[i+1] { - block_idx++ - } - } - - assert(block_idx == num_blocks) - } - - for i = 0; i < num_blocks; i += histogramsPerBatch { - var num_to_combine uint = brotli_min_size_t(num_blocks-i, histogramsPerBatch) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - var k uint - histogramClearLiteral(&histograms[j]) - for k = 0; uint32(k) < block_lengths[i+j]; k++ { - histogramAddLiteral(&histograms[j], uint(data[pos])) - pos++ - } - - histograms[j].bit_cost_ = populationCostLiteral(&histograms[j]) - new_clusters[j] = uint32(j) - symbols[j] = uint32(j) - sizes[j] = 1 - } - - num_new_clusters = histogramCombineLiteral(histograms, sizes[:], symbols[:], new_clusters[:], []histogramPair(pairs), num_to_combine, num_to_combine, histogramsPerBatch, max_num_pairs) - if all_histograms_capacity < (all_histograms_size + num_new_clusters) { - var _new_size uint - if all_histograms_capacity == 0 { - _new_size = all_histograms_size + num_new_clusters - } else { - _new_size = all_histograms_capacity - } - var new_array []histogramLiteral - for _new_size < (all_histograms_size + num_new_clusters) { - _new_size *= 2 - } - new_array = make([]histogramLiteral, _new_size) - if all_histograms_capacity != 0 { - copy(new_array, all_histograms[:all_histograms_capacity]) - } - - all_histograms = new_array - all_histograms_capacity = _new_size - } - - brotli_ensure_capacity_uint32_t(&cluster_size, &cluster_size_capacity, cluster_size_size+num_new_clusters) - for j = 0; j < num_new_clusters; j++ { - all_histograms[all_histograms_size] = histograms[new_clusters[j]] - all_histograms_size++ - cluster_size[cluster_size_size] = sizes[new_clusters[j]] - cluster_size_size++ - remap[new_clusters[j]] = uint32(j) - } - - for j = 0; j < num_to_combine; j++ { - histogram_symbols[i+j] = uint32(num_clusters) + remap[symbols[j]] - } - - num_clusters += num_new_clusters - assert(num_clusters == cluster_size_size) - assert(num_clusters == all_histograms_size) - } - - histograms = nil - - max_num_pairs = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < max_num_pairs+1 { - pairs = nil - pairs = make([]histogramPair, (max_num_pairs + 1)) - } - - clusters = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - clusters[i] = uint32(i) - } - - num_final_clusters = histogramCombineLiteral(all_histograms, cluster_size, histogram_symbols, clusters, pairs, num_clusters, num_blocks, maxNumberOfBlockTypes, max_num_pairs) - pairs = nil - cluster_size = nil - - new_index = make([]uint32, num_clusters) - for i = 0; i < num_clusters; i++ { - new_index[i] = clusterBlocksLiteral_kInvalidIndex - } - pos = 0 - { - var next_index uint32 = 0 - for i = 0; i < num_blocks; i++ { - var histo histogramLiteral - var j uint - var best_out uint32 - var best_bits float64 - histogramClearLiteral(&histo) - for j = 0; uint32(j) < block_lengths[i]; j++ { - histogramAddLiteral(&histo, uint(data[pos])) - pos++ - } - - if i == 0 { - best_out = histogram_symbols[0] - } else { - best_out = histogram_symbols[i-1] - } - best_bits = histogramBitCostDistanceLiteral(&histo, &all_histograms[best_out]) - for j = 0; j < num_final_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceLiteral(&histo, &all_histograms[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - histogram_symbols[i] = best_out - if new_index[best_out] == clusterBlocksLiteral_kInvalidIndex { - new_index[best_out] = next_index - next_index++ - } - } - } - - clusters = nil - all_histograms = nil - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, num_blocks) - { - var cur_length uint32 = 0 - var block_idx uint = 0 - var max_type byte = 0 - for i = 0; i < num_blocks; i++ { - cur_length += block_lengths[i] - if i+1 == num_blocks || histogram_symbols[i] != histogram_symbols[i+1] { - var id byte = byte(new_index[histogram_symbols[i]]) - split.types[block_idx] = id - split.lengths[block_idx] = cur_length - max_type = brotli_max_uint8_t(max_type, id) - cur_length = 0 - block_idx++ - } - } - - split.num_blocks = block_idx - split.num_types = uint(max_type) + 1 - } - - new_index = nil - block_lengths = nil - histogram_symbols = nil -} - -func splitByteVectorLiteral(data []byte, length uint, literals_per_histogram uint, max_histograms uint, sampling_stride_length uint, block_switch_cost float64, params *encoderParams, split *blockSplit) { - var data_size uint = histogramDataSizeLiteral() - var num_histograms uint = length/literals_per_histogram + 1 - var histograms []histogramLiteral - if num_histograms > max_histograms { - num_histograms = max_histograms - } - - if length == 0 { - split.num_types = 1 - return - } else if length < kMinLengthForBlockSplitting { - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, split.num_blocks+1) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, split.num_blocks+1) - split.num_types = 1 - split.types[split.num_blocks] = 0 - split.lengths[split.num_blocks] = uint32(length) - split.num_blocks++ - return - } - - histograms = make([]histogramLiteral, num_histograms) - - /* Find good entropy codes. */ - initialEntropyCodesLiteral(data, length, sampling_stride_length, num_histograms, histograms) - - refineEntropyCodesLiteral(data, length, sampling_stride_length, num_histograms, histograms) - { - var block_ids []byte = make([]byte, length) - var num_blocks uint = 0 - var bitmaplen uint = (num_histograms + 7) >> 3 - var insert_cost []float64 = make([]float64, (data_size * num_histograms)) - var cost []float64 = make([]float64, num_histograms) - var switch_signal []byte = make([]byte, (length * bitmaplen)) - var new_id []uint16 = make([]uint16, num_histograms) - var iters uint - if params.quality < hqZopflificationQuality { - iters = 3 - } else { - iters = 10 - } - /* Find a good path through literals with the good entropy codes. */ - - var i uint - for i = 0; i < iters; i++ { - num_blocks = findBlocksLiteral(data, length, block_switch_cost, num_histograms, histograms, insert_cost, cost, switch_signal, block_ids) - num_histograms = remapBlockIdsLiteral(block_ids, length, new_id, num_histograms) - buildBlockHistogramsLiteral(data, length, block_ids, num_histograms, histograms) - } - - insert_cost = nil - cost = nil - switch_signal = nil - new_id = nil - histograms = nil - clusterBlocksLiteral(data, length, num_blocks, block_ids, split) - block_ids = nil - } -} diff --git a/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go b/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go deleted file mode 100644 index 2470f84e4b..0000000000 --- a/vendor/github.com/andybalholm/brotli/brotli_bit_stream.go +++ /dev/null @@ -1,1283 +0,0 @@ -package brotli - -import ( - "math" - "sync" -) - -const maxHuffmanTreeSize = (2*numCommandSymbols + 1) - -/* The maximum size of Huffman dictionary for distances assuming that - NPOSTFIX = 0 and NDIRECT = 0. */ -const maxSimpleDistanceAlphabetSize = 140 - -/* Represents the range of values belonging to a prefix code: - [offset, offset + 2^nbits) */ -type prefixCodeRange struct { - offset uint32 - nbits uint32 -} - -var kBlockLengthPrefixCode = [numBlockLenSymbols]prefixCodeRange{ - prefixCodeRange{1, 2}, - prefixCodeRange{5, 2}, - prefixCodeRange{9, 2}, - prefixCodeRange{13, 2}, - prefixCodeRange{17, 3}, - prefixCodeRange{25, 3}, - prefixCodeRange{33, 3}, - prefixCodeRange{41, 3}, - prefixCodeRange{49, 4}, - prefixCodeRange{65, 4}, - prefixCodeRange{81, 4}, - prefixCodeRange{97, 4}, - prefixCodeRange{113, 5}, - prefixCodeRange{145, 5}, - prefixCodeRange{177, 5}, - prefixCodeRange{209, 5}, - prefixCodeRange{241, 6}, - prefixCodeRange{305, 6}, - prefixCodeRange{369, 7}, - prefixCodeRange{497, 8}, - prefixCodeRange{753, 9}, - prefixCodeRange{1265, 10}, - prefixCodeRange{2289, 11}, - prefixCodeRange{4337, 12}, - prefixCodeRange{8433, 13}, - prefixCodeRange{16625, 24}, -} - -func blockLengthPrefixCode(len uint32) uint32 { - var code uint32 - if len >= 177 { - if len >= 753 { - code = 20 - } else { - code = 14 - } - } else if len >= 41 { - code = 7 - } else { - code = 0 - } - for code < (numBlockLenSymbols-1) && len >= kBlockLengthPrefixCode[code+1].offset { - code++ - } - return code -} - -func getBlockLengthPrefixCode(len uint32, code *uint, n_extra *uint32, extra *uint32) { - *code = uint(blockLengthPrefixCode(uint32(len))) - *n_extra = kBlockLengthPrefixCode[*code].nbits - *extra = len - kBlockLengthPrefixCode[*code].offset -} - -type blockTypeCodeCalculator struct { - last_type uint - second_last_type uint -} - -func initBlockTypeCodeCalculator(self *blockTypeCodeCalculator) { - self.last_type = 1 - self.second_last_type = 0 -} - -func nextBlockTypeCode(calculator *blockTypeCodeCalculator, type_ byte) uint { - var type_code uint - if uint(type_) == calculator.last_type+1 { - type_code = 1 - } else if uint(type_) == calculator.second_last_type { - type_code = 0 - } else { - type_code = uint(type_) + 2 - } - calculator.second_last_type = calculator.last_type - calculator.last_type = uint(type_) - return type_code -} - -/* |nibblesbits| represents the 2 bits to encode MNIBBLES (0-3) - REQUIRES: length > 0 - REQUIRES: length <= (1 << 24) */ -func encodeMlen(length uint, bits *uint64, numbits *uint, nibblesbits *uint64) { - var lg uint - if length == 1 { - lg = 1 - } else { - lg = uint(log2FloorNonZero(uint(uint32(length-1)))) + 1 - } - var tmp uint - if lg < 16 { - tmp = 16 - } else { - tmp = (lg + 3) - } - var mnibbles uint = tmp / 4 - assert(length > 0) - assert(length <= 1<<24) - assert(lg <= 24) - *nibblesbits = uint64(mnibbles) - 4 - *numbits = mnibbles * 4 - *bits = uint64(length) - 1 -} - -func storeCommandExtra(cmd *command, bw *bitWriter) { - var copylen_code uint32 = commandCopyLenCode(cmd) - var inscode uint16 = getInsertLengthCode(uint(cmd.insert_len_)) - var copycode uint16 = getCopyLengthCode(uint(copylen_code)) - var insnumextra uint32 = getInsertExtra(inscode) - var insextraval uint64 = uint64(cmd.insert_len_) - uint64(getInsertBase(inscode)) - var copyextraval uint64 = uint64(copylen_code) - uint64(getCopyBase(copycode)) - var bits uint64 = copyextraval< 0 - REQUIRES: length <= (1 << 24) */ -func storeCompressedMetaBlockHeader(is_final_block bool, length uint, bw *bitWriter) { - var lenbits uint64 - var nlenbits uint - var nibblesbits uint64 - var is_final uint64 - if is_final_block { - is_final = 1 - } else { - is_final = 0 - } - - /* Write ISLAST bit. */ - bw.writeBits(1, is_final) - - /* Write ISEMPTY bit. */ - if is_final_block { - bw.writeBits(1, 0) - } - - encodeMlen(length, &lenbits, &nlenbits, &nibblesbits) - bw.writeBits(2, nibblesbits) - bw.writeBits(nlenbits, lenbits) - - if !is_final_block { - /* Write ISUNCOMPRESSED bit. */ - bw.writeBits(1, 0) - } -} - -/* Stores the uncompressed meta-block header. - REQUIRES: length > 0 - REQUIRES: length <= (1 << 24) */ -func storeUncompressedMetaBlockHeader(length uint, bw *bitWriter) { - var lenbits uint64 - var nlenbits uint - var nibblesbits uint64 - - /* Write ISLAST bit. - Uncompressed block cannot be the last one, so set to 0. */ - bw.writeBits(1, 0) - - encodeMlen(length, &lenbits, &nlenbits, &nibblesbits) - bw.writeBits(2, nibblesbits) - bw.writeBits(nlenbits, lenbits) - - /* Write ISUNCOMPRESSED bit. */ - bw.writeBits(1, 1) -} - -var storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder = [codeLengthCodes]byte{1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15} - -var storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeSymbols = [6]byte{0, 7, 3, 2, 1, 15} -var storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeBitLengths = [6]byte{2, 4, 3, 2, 2, 4} - -func storeHuffmanTreeOfHuffmanTreeToBitMask(num_codes int, code_length_bitdepth []byte, bw *bitWriter) { - var skip_some uint = 0 - var codes_to_store uint = codeLengthCodes - /* The bit lengths of the Huffman code over the code length alphabet - are compressed with the following static Huffman code: - Symbol Code - ------ ---- - 0 00 - 1 1110 - 2 110 - 3 01 - 4 10 - 5 1111 */ - - /* Throw away trailing zeros: */ - if num_codes > 1 { - for ; codes_to_store > 0; codes_to_store-- { - if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[codes_to_store-1]] != 0 { - break - } - } - } - - if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[0]] == 0 && code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[1]] == 0 { - skip_some = 2 /* skips two. */ - if code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[2]] == 0 { - skip_some = 3 /* skips three. */ - } - } - - bw.writeBits(2, uint64(skip_some)) - { - var i uint - for i = skip_some; i < codes_to_store; i++ { - var l uint = uint(code_length_bitdepth[storeHuffmanTreeOfHuffmanTreeToBitMask_kStorageOrder[i]]) - bw.writeBits(uint(storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeBitLengths[l]), uint64(storeHuffmanTreeOfHuffmanTreeToBitMask_kHuffmanBitLengthHuffmanCodeSymbols[l])) - } - } -} - -func storeHuffmanTreeToBitMask(huffman_tree_size uint, huffman_tree []byte, huffman_tree_extra_bits []byte, code_length_bitdepth []byte, code_length_bitdepth_symbols []uint16, bw *bitWriter) { - var i uint - for i = 0; i < huffman_tree_size; i++ { - var ix uint = uint(huffman_tree[i]) - bw.writeBits(uint(code_length_bitdepth[ix]), uint64(code_length_bitdepth_symbols[ix])) - - /* Extra bits */ - switch ix { - case repeatPreviousCodeLength: - bw.writeBits(2, uint64(huffman_tree_extra_bits[i])) - - case repeatZeroCodeLength: - bw.writeBits(3, uint64(huffman_tree_extra_bits[i])) - } - } -} - -func storeSimpleHuffmanTree(depths []byte, symbols []uint, num_symbols uint, max_bits uint, bw *bitWriter) { - /* value of 1 indicates a simple Huffman code */ - bw.writeBits(2, 1) - - bw.writeBits(2, uint64(num_symbols)-1) /* NSYM - 1 */ - { - /* Sort */ - var i uint - for i = 0; i < num_symbols; i++ { - var j uint - for j = i + 1; j < num_symbols; j++ { - if depths[symbols[j]] < depths[symbols[i]] { - var tmp uint = symbols[j] - symbols[j] = symbols[i] - symbols[i] = tmp - } - } - } - } - - if num_symbols == 2 { - bw.writeBits(max_bits, uint64(symbols[0])) - bw.writeBits(max_bits, uint64(symbols[1])) - } else if num_symbols == 3 { - bw.writeBits(max_bits, uint64(symbols[0])) - bw.writeBits(max_bits, uint64(symbols[1])) - bw.writeBits(max_bits, uint64(symbols[2])) - } else { - bw.writeBits(max_bits, uint64(symbols[0])) - bw.writeBits(max_bits, uint64(symbols[1])) - bw.writeBits(max_bits, uint64(symbols[2])) - bw.writeBits(max_bits, uint64(symbols[3])) - - /* tree-select */ - var tmp int - if depths[symbols[0]] == 1 { - tmp = 1 - } else { - tmp = 0 - } - bw.writeBits(1, uint64(tmp)) - } -} - -/* num = alphabet size - depths = symbol depths */ -func storeHuffmanTree(depths []byte, num uint, tree []huffmanTree, bw *bitWriter) { - var huffman_tree [numCommandSymbols]byte - var huffman_tree_extra_bits [numCommandSymbols]byte - var huffman_tree_size uint = 0 - var code_length_bitdepth = [codeLengthCodes]byte{0} - var code_length_bitdepth_symbols [codeLengthCodes]uint16 - var huffman_tree_histogram = [codeLengthCodes]uint32{0} - var i uint - var num_codes int = 0 - /* Write the Huffman tree into the brotli-representation. - The command alphabet is the largest, so this allocation will fit all - alphabets. */ - - var code uint = 0 - - assert(num <= numCommandSymbols) - - writeHuffmanTree(depths, num, &huffman_tree_size, huffman_tree[:], huffman_tree_extra_bits[:]) - - /* Calculate the statistics of the Huffman tree in brotli-representation. */ - for i = 0; i < huffman_tree_size; i++ { - huffman_tree_histogram[huffman_tree[i]]++ - } - - for i = 0; i < codeLengthCodes; i++ { - if huffman_tree_histogram[i] != 0 { - if num_codes == 0 { - code = i - num_codes = 1 - } else if num_codes == 1 { - num_codes = 2 - break - } - } - } - - /* Calculate another Huffman tree to use for compressing both the - earlier Huffman tree with. */ - createHuffmanTree(huffman_tree_histogram[:], codeLengthCodes, 5, tree, code_length_bitdepth[:]) - - convertBitDepthsToSymbols(code_length_bitdepth[:], codeLengthCodes, code_length_bitdepth_symbols[:]) - - /* Now, we have all the data, let's start storing it */ - storeHuffmanTreeOfHuffmanTreeToBitMask(num_codes, code_length_bitdepth[:], bw) - - if num_codes == 1 { - code_length_bitdepth[code] = 0 - } - - /* Store the real Huffman tree now. */ - storeHuffmanTreeToBitMask(huffman_tree_size, huffman_tree[:], huffman_tree_extra_bits[:], code_length_bitdepth[:], code_length_bitdepth_symbols[:], bw) -} - -/* Builds a Huffman tree from histogram[0:length] into depth[0:length] and - bits[0:length] and stores the encoded tree to the bit stream. */ -func buildAndStoreHuffmanTree(histogram []uint32, histogram_length uint, alphabet_size uint, tree []huffmanTree, depth []byte, bits []uint16, bw *bitWriter) { - var count uint = 0 - var s4 = [4]uint{0} - var i uint - var max_bits uint = 0 - for i = 0; i < histogram_length; i++ { - if histogram[i] != 0 { - if count < 4 { - s4[count] = i - } else if count > 4 { - break - } - - count++ - } - } - { - var max_bits_counter uint = alphabet_size - 1 - for max_bits_counter != 0 { - max_bits_counter >>= 1 - max_bits++ - } - } - - if count <= 1 { - bw.writeBits(4, 1) - bw.writeBits(max_bits, uint64(s4[0])) - depth[s4[0]] = 0 - bits[s4[0]] = 0 - return - } - - for i := 0; i < int(histogram_length); i++ { - depth[i] = 0 - } - createHuffmanTree(histogram, histogram_length, 15, tree, depth) - convertBitDepthsToSymbols(depth, histogram_length, bits) - - if count <= 4 { - storeSimpleHuffmanTree(depth, s4[:], count, max_bits, bw) - } else { - storeHuffmanTree(depth, histogram_length, tree, bw) - } -} - -func sortHuffmanTree1(v0 huffmanTree, v1 huffmanTree) bool { - return v0.total_count_ < v1.total_count_ -} - -var huffmanTreePool sync.Pool - -func buildAndStoreHuffmanTreeFast(histogram []uint32, histogram_total uint, max_bits uint, depth []byte, bits []uint16, bw *bitWriter) { - var count uint = 0 - var symbols = [4]uint{0} - var length uint = 0 - var total uint = histogram_total - for total != 0 { - if histogram[length] != 0 { - if count < 4 { - symbols[count] = length - } - - count++ - total -= uint(histogram[length]) - } - - length++ - } - - if count <= 1 { - bw.writeBits(4, 1) - bw.writeBits(max_bits, uint64(symbols[0])) - depth[symbols[0]] = 0 - bits[symbols[0]] = 0 - return - } - - for i := 0; i < int(length); i++ { - depth[i] = 0 - } - { - var max_tree_size uint = 2*length + 1 - tree, _ := huffmanTreePool.Get().(*[]huffmanTree) - if tree == nil || cap(*tree) < int(max_tree_size) { - tmp := make([]huffmanTree, max_tree_size) - tree = &tmp - } else { - *tree = (*tree)[:max_tree_size] - } - var count_limit uint32 - for count_limit = 1; ; count_limit *= 2 { - var node int = 0 - var l uint - for l = length; l != 0; { - l-- - if histogram[l] != 0 { - if histogram[l] >= count_limit { - initHuffmanTree(&(*tree)[node:][0], histogram[l], -1, int16(l)) - } else { - initHuffmanTree(&(*tree)[node:][0], count_limit, -1, int16(l)) - } - - node++ - } - } - { - var n int = node - /* Points to the next leaf node. */ /* Points to the next non-leaf node. */ - var sentinel huffmanTree - var i int = 0 - var j int = n + 1 - var k int - - sortHuffmanTreeItems(*tree, uint(n), huffmanTreeComparator(sortHuffmanTree1)) - - /* The nodes are: - [0, n): the sorted leaf nodes that we start with. - [n]: we add a sentinel here. - [n + 1, 2n): new parent nodes are added here, starting from - (n+1). These are naturally in ascending order. - [2n]: we add a sentinel at the end as well. - There will be (2n+1) elements at the end. */ - initHuffmanTree(&sentinel, math.MaxUint32, -1, -1) - - (*tree)[node] = sentinel - node++ - (*tree)[node] = sentinel - node++ - - for k = n - 1; k > 0; k-- { - var left int - var right int - if (*tree)[i].total_count_ <= (*tree)[j].total_count_ { - left = i - i++ - } else { - left = j - j++ - } - - if (*tree)[i].total_count_ <= (*tree)[j].total_count_ { - right = i - i++ - } else { - right = j - j++ - } - - /* The sentinel node becomes the parent node. */ - (*tree)[node-1].total_count_ = (*tree)[left].total_count_ + (*tree)[right].total_count_ - - (*tree)[node-1].index_left_ = int16(left) - (*tree)[node-1].index_right_or_value_ = int16(right) - - /* Add back the last sentinel node. */ - (*tree)[node] = sentinel - node++ - } - - if setDepth(2*n-1, *tree, depth, 14) { - /* We need to pack the Huffman tree in 14 bits. If this was not - successful, add fake entities to the lowest values and retry. */ - break - } - } - } - - huffmanTreePool.Put(tree) - } - - convertBitDepthsToSymbols(depth, length, bits) - if count <= 4 { - var i uint - - /* value of 1 indicates a simple Huffman code */ - bw.writeBits(2, 1) - - bw.writeBits(2, uint64(count)-1) /* NSYM - 1 */ - - /* Sort */ - for i = 0; i < count; i++ { - var j uint - for j = i + 1; j < count; j++ { - if depth[symbols[j]] < depth[symbols[i]] { - var tmp uint = symbols[j] - symbols[j] = symbols[i] - symbols[i] = tmp - } - } - } - - if count == 2 { - bw.writeBits(max_bits, uint64(symbols[0])) - bw.writeBits(max_bits, uint64(symbols[1])) - } else if count == 3 { - bw.writeBits(max_bits, uint64(symbols[0])) - bw.writeBits(max_bits, uint64(symbols[1])) - bw.writeBits(max_bits, uint64(symbols[2])) - } else { - bw.writeBits(max_bits, uint64(symbols[0])) - bw.writeBits(max_bits, uint64(symbols[1])) - bw.writeBits(max_bits, uint64(symbols[2])) - bw.writeBits(max_bits, uint64(symbols[3])) - - /* tree-select */ - bw.writeSingleBit(depth[symbols[0]] == 1) - } - } else { - var previous_value byte = 8 - var i uint - - /* Complex Huffman Tree */ - storeStaticCodeLengthCode(bw) - - /* Actual RLE coding. */ - for i = 0; i < length; { - var value byte = depth[i] - var reps uint = 1 - var k uint - for k = i + 1; k < length && depth[k] == value; k++ { - reps++ - } - - i += reps - if value == 0 { - bw.writeBits(uint(kZeroRepsDepth[reps]), kZeroRepsBits[reps]) - } else { - if previous_value != value { - bw.writeBits(uint(kCodeLengthDepth[value]), uint64(kCodeLengthBits[value])) - reps-- - } - - if reps < 3 { - for reps != 0 { - reps-- - bw.writeBits(uint(kCodeLengthDepth[value]), uint64(kCodeLengthBits[value])) - } - } else { - reps -= 3 - bw.writeBits(uint(kNonZeroRepsDepth[reps]), kNonZeroRepsBits[reps]) - } - - previous_value = value - } - } - } -} - -func indexOf(v []byte, v_size uint, value byte) uint { - var i uint = 0 - for ; i < v_size; i++ { - if v[i] == value { - return i - } - } - - return i -} - -func moveToFront(v []byte, index uint) { - var value byte = v[index] - var i uint - for i = index; i != 0; i-- { - v[i] = v[i-1] - } - - v[0] = value -} - -func moveToFrontTransform(v_in []uint32, v_size uint, v_out []uint32) { - var i uint - var mtf [256]byte - var max_value uint32 - if v_size == 0 { - return - } - - max_value = v_in[0] - for i = 1; i < v_size; i++ { - if v_in[i] > max_value { - max_value = v_in[i] - } - } - - assert(max_value < 256) - for i = 0; uint32(i) <= max_value; i++ { - mtf[i] = byte(i) - } - { - var mtf_size uint = uint(max_value + 1) - for i = 0; i < v_size; i++ { - var index uint = indexOf(mtf[:], mtf_size, byte(v_in[i])) - assert(index < mtf_size) - v_out[i] = uint32(index) - moveToFront(mtf[:], index) - } - } -} - -/* Finds runs of zeros in v[0..in_size) and replaces them with a prefix code of - the run length plus extra bits (lower 9 bits is the prefix code and the rest - are the extra bits). Non-zero values in v[] are shifted by - *max_length_prefix. Will not create prefix codes bigger than the initial - value of *max_run_length_prefix. The prefix code of run length L is simply - Log2Floor(L) and the number of extra bits is the same as the prefix code. */ -func runLengthCodeZeros(in_size uint, v []uint32, out_size *uint, max_run_length_prefix *uint32) { - var max_reps uint32 = 0 - var i uint - var max_prefix uint32 - for i = 0; i < in_size; { - var reps uint32 = 0 - for ; i < in_size && v[i] != 0; i++ { - } - for ; i < in_size && v[i] == 0; i++ { - reps++ - } - - max_reps = brotli_max_uint32_t(reps, max_reps) - } - - if max_reps > 0 { - max_prefix = log2FloorNonZero(uint(max_reps)) - } else { - max_prefix = 0 - } - max_prefix = brotli_min_uint32_t(max_prefix, *max_run_length_prefix) - *max_run_length_prefix = max_prefix - *out_size = 0 - for i = 0; i < in_size; { - assert(*out_size <= i) - if v[i] != 0 { - v[*out_size] = v[i] + *max_run_length_prefix - i++ - (*out_size)++ - } else { - var reps uint32 = 1 - var k uint - for k = i + 1; k < in_size && v[k] == 0; k++ { - reps++ - } - - i += uint(reps) - for reps != 0 { - if reps < 2< 0) - bw.writeSingleBit(use_rle) - if use_rle { - bw.writeBits(4, uint64(max_run_length_prefix)-1) - } - } - - buildAndStoreHuffmanTree(histogram[:], uint(uint32(num_clusters)+max_run_length_prefix), uint(uint32(num_clusters)+max_run_length_prefix), tree, depths[:], bits[:], bw) - for i = 0; i < num_rle_symbols; i++ { - var rle_symbol uint32 = rle_symbols[i] & encodeContextMap_kSymbolMask - var extra_bits_val uint32 = rle_symbols[i] >> symbolBits - bw.writeBits(uint(depths[rle_symbol]), uint64(bits[rle_symbol])) - if rle_symbol > 0 && rle_symbol <= max_run_length_prefix { - bw.writeBits(uint(rle_symbol), uint64(extra_bits_val)) - } - } - - bw.writeBits(1, 1) /* use move-to-front */ - rle_symbols = nil -} - -/* Stores the block switch command with index block_ix to the bit stream. */ -func storeBlockSwitch(code *blockSplitCode, block_len uint32, block_type byte, is_first_block bool, bw *bitWriter) { - var typecode uint = nextBlockTypeCode(&code.type_code_calculator, block_type) - var lencode uint - var len_nextra uint32 - var len_extra uint32 - if !is_first_block { - bw.writeBits(uint(code.type_depths[typecode]), uint64(code.type_bits[typecode])) - } - - getBlockLengthPrefixCode(block_len, &lencode, &len_nextra, &len_extra) - - bw.writeBits(uint(code.length_depths[lencode]), uint64(code.length_bits[lencode])) - bw.writeBits(uint(len_nextra), uint64(len_extra)) -} - -/* Builds a BlockSplitCode data structure from the block split given by the - vector of block types and block lengths and stores it to the bit stream. */ -func buildAndStoreBlockSplitCode(types []byte, lengths []uint32, num_blocks uint, num_types uint, tree []huffmanTree, code *blockSplitCode, bw *bitWriter) { - var type_histo [maxBlockTypeSymbols]uint32 - var length_histo [numBlockLenSymbols]uint32 - var i uint - var type_code_calculator blockTypeCodeCalculator - for i := 0; i < int(num_types+2); i++ { - type_histo[i] = 0 - } - length_histo = [numBlockLenSymbols]uint32{} - initBlockTypeCodeCalculator(&type_code_calculator) - for i = 0; i < num_blocks; i++ { - var type_code uint = nextBlockTypeCode(&type_code_calculator, types[i]) - if i != 0 { - type_histo[type_code]++ - } - length_histo[blockLengthPrefixCode(lengths[i])]++ - } - - storeVarLenUint8(num_types-1, bw) - if num_types > 1 { /* TODO: else? could StoreBlockSwitch occur? */ - buildAndStoreHuffmanTree(type_histo[0:], num_types+2, num_types+2, tree, code.type_depths[0:], code.type_bits[0:], bw) - buildAndStoreHuffmanTree(length_histo[0:], numBlockLenSymbols, numBlockLenSymbols, tree, code.length_depths[0:], code.length_bits[0:], bw) - storeBlockSwitch(code, lengths[0], types[0], true, bw) - } -} - -/* Stores a context map where the histogram type is always the block type. */ -func storeTrivialContextMap(num_types uint, context_bits uint, tree []huffmanTree, bw *bitWriter) { - storeVarLenUint8(num_types-1, bw) - if num_types > 1 { - var repeat_code uint = context_bits - 1 - var repeat_bits uint = (1 << repeat_code) - 1 - var alphabet_size uint = num_types + repeat_code - var histogram [maxContextMapSymbols]uint32 - var depths [maxContextMapSymbols]byte - var bits [maxContextMapSymbols]uint16 - var i uint - for i := 0; i < int(alphabet_size); i++ { - histogram[i] = 0 - } - - /* Write RLEMAX. */ - bw.writeBits(1, 1) - - bw.writeBits(4, uint64(repeat_code)-1) - histogram[repeat_code] = uint32(num_types) - histogram[0] = 1 - for i = context_bits; i < alphabet_size; i++ { - histogram[i] = 1 - } - - buildAndStoreHuffmanTree(histogram[:], alphabet_size, alphabet_size, tree, depths[:], bits[:], bw) - for i = 0; i < num_types; i++ { - var tmp uint - if i == 0 { - tmp = 0 - } else { - tmp = i + context_bits - 1 - } - var code uint = tmp - bw.writeBits(uint(depths[code]), uint64(bits[code])) - bw.writeBits(uint(depths[repeat_code]), uint64(bits[repeat_code])) - bw.writeBits(repeat_code, uint64(repeat_bits)) - } - - /* Write IMTF (inverse-move-to-front) bit. */ - bw.writeBits(1, 1) - } -} - -/* Manages the encoding of one block category (literal, command or distance). */ -type blockEncoder struct { - histogram_length_ uint - num_block_types_ uint - block_types_ []byte - block_lengths_ []uint32 - num_blocks_ uint - block_split_code_ blockSplitCode - block_ix_ uint - block_len_ uint - entropy_ix_ uint - depths_ []byte - bits_ []uint16 -} - -var blockEncoderPool sync.Pool - -func getBlockEncoder(histogram_length uint, num_block_types uint, block_types []byte, block_lengths []uint32, num_blocks uint) *blockEncoder { - self, _ := blockEncoderPool.Get().(*blockEncoder) - - if self != nil { - self.block_ix_ = 0 - self.entropy_ix_ = 0 - self.depths_ = self.depths_[:0] - self.bits_ = self.bits_[:0] - } else { - self = &blockEncoder{} - } - - self.histogram_length_ = histogram_length - self.num_block_types_ = num_block_types - self.block_types_ = block_types - self.block_lengths_ = block_lengths - self.num_blocks_ = num_blocks - initBlockTypeCodeCalculator(&self.block_split_code_.type_code_calculator) - if num_blocks == 0 { - self.block_len_ = 0 - } else { - self.block_len_ = uint(block_lengths[0]) - } - - return self -} - -func cleanupBlockEncoder(self *blockEncoder) { - blockEncoderPool.Put(self) -} - -/* Creates entropy codes of block lengths and block types and stores them - to the bit stream. */ -func buildAndStoreBlockSwitchEntropyCodes(self *blockEncoder, tree []huffmanTree, bw *bitWriter) { - buildAndStoreBlockSplitCode(self.block_types_, self.block_lengths_, self.num_blocks_, self.num_block_types_, tree, &self.block_split_code_, bw) -} - -/* Stores the next symbol with the entropy code of the current block type. - Updates the block type and block length at block boundaries. */ -func storeSymbol(self *blockEncoder, symbol uint, bw *bitWriter) { - if self.block_len_ == 0 { - self.block_ix_++ - var block_ix uint = self.block_ix_ - var block_len uint32 = self.block_lengths_[block_ix] - var block_type byte = self.block_types_[block_ix] - self.block_len_ = uint(block_len) - self.entropy_ix_ = uint(block_type) * self.histogram_length_ - storeBlockSwitch(&self.block_split_code_, block_len, block_type, false, bw) - } - - self.block_len_-- - { - var ix uint = self.entropy_ix_ + symbol - bw.writeBits(uint(self.depths_[ix]), uint64(self.bits_[ix])) - } -} - -/* Stores the next symbol with the entropy code of the current block type and - context value. - Updates the block type and block length at block boundaries. */ -func storeSymbolWithContext(self *blockEncoder, symbol uint, context uint, context_map []uint32, bw *bitWriter, context_bits uint) { - if self.block_len_ == 0 { - self.block_ix_++ - var block_ix uint = self.block_ix_ - var block_len uint32 = self.block_lengths_[block_ix] - var block_type byte = self.block_types_[block_ix] - self.block_len_ = uint(block_len) - self.entropy_ix_ = uint(block_type) << context_bits - storeBlockSwitch(&self.block_split_code_, block_len, block_type, false, bw) - } - - self.block_len_-- - { - var histo_ix uint = uint(context_map[self.entropy_ix_+context]) - var ix uint = histo_ix*self.histogram_length_ + symbol - bw.writeBits(uint(self.depths_[ix]), uint64(self.bits_[ix])) - } -} - -func buildAndStoreEntropyCodesLiteral(self *blockEncoder, histograms []histogramLiteral, histograms_size uint, alphabet_size uint, tree []huffmanTree, bw *bitWriter) { - var table_size uint = histograms_size * self.histogram_length_ - if cap(self.depths_) < int(table_size) { - self.depths_ = make([]byte, table_size) - } else { - self.depths_ = self.depths_[:table_size] - } - if cap(self.bits_) < int(table_size) { - self.bits_ = make([]uint16, table_size) - } else { - self.bits_ = self.bits_[:table_size] - } - { - var i uint - for i = 0; i < histograms_size; i++ { - var ix uint = i * self.histogram_length_ - buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], bw) - } - } -} - -func buildAndStoreEntropyCodesCommand(self *blockEncoder, histograms []histogramCommand, histograms_size uint, alphabet_size uint, tree []huffmanTree, bw *bitWriter) { - var table_size uint = histograms_size * self.histogram_length_ - if cap(self.depths_) < int(table_size) { - self.depths_ = make([]byte, table_size) - } else { - self.depths_ = self.depths_[:table_size] - } - if cap(self.bits_) < int(table_size) { - self.bits_ = make([]uint16, table_size) - } else { - self.bits_ = self.bits_[:table_size] - } - { - var i uint - for i = 0; i < histograms_size; i++ { - var ix uint = i * self.histogram_length_ - buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], bw) - } - } -} - -func buildAndStoreEntropyCodesDistance(self *blockEncoder, histograms []histogramDistance, histograms_size uint, alphabet_size uint, tree []huffmanTree, bw *bitWriter) { - var table_size uint = histograms_size * self.histogram_length_ - if cap(self.depths_) < int(table_size) { - self.depths_ = make([]byte, table_size) - } else { - self.depths_ = self.depths_[:table_size] - } - if cap(self.bits_) < int(table_size) { - self.bits_ = make([]uint16, table_size) - } else { - self.bits_ = self.bits_[:table_size] - } - { - var i uint - for i = 0; i < histograms_size; i++ { - var ix uint = i * self.histogram_length_ - buildAndStoreHuffmanTree(histograms[i].data_[0:], self.histogram_length_, alphabet_size, tree, self.depths_[ix:], self.bits_[ix:], bw) - } - } -} - -func storeMetaBlock(input []byte, start_pos uint, length uint, mask uint, prev_byte byte, prev_byte2 byte, is_last bool, params *encoderParams, literal_context_mode int, commands []command, mb *metaBlockSplit, bw *bitWriter) { - var pos uint = start_pos - var i uint - var num_distance_symbols uint32 = params.dist.alphabet_size - var num_effective_distance_symbols uint32 = num_distance_symbols - var tree []huffmanTree - var literal_context_lut contextLUT = getContextLUT(literal_context_mode) - var dist *distanceParams = ¶ms.dist - if params.large_window && num_effective_distance_symbols > numHistogramDistanceSymbols { - num_effective_distance_symbols = numHistogramDistanceSymbols - } - - storeCompressedMetaBlockHeader(is_last, length, bw) - - tree = make([]huffmanTree, maxHuffmanTreeSize) - literal_enc := getBlockEncoder(numLiteralSymbols, mb.literal_split.num_types, mb.literal_split.types, mb.literal_split.lengths, mb.literal_split.num_blocks) - command_enc := getBlockEncoder(numCommandSymbols, mb.command_split.num_types, mb.command_split.types, mb.command_split.lengths, mb.command_split.num_blocks) - distance_enc := getBlockEncoder(uint(num_effective_distance_symbols), mb.distance_split.num_types, mb.distance_split.types, mb.distance_split.lengths, mb.distance_split.num_blocks) - - buildAndStoreBlockSwitchEntropyCodes(literal_enc, tree, bw) - buildAndStoreBlockSwitchEntropyCodes(command_enc, tree, bw) - buildAndStoreBlockSwitchEntropyCodes(distance_enc, tree, bw) - - bw.writeBits(2, uint64(dist.distance_postfix_bits)) - bw.writeBits(4, uint64(dist.num_direct_distance_codes)>>dist.distance_postfix_bits) - for i = 0; i < mb.literal_split.num_types; i++ { - bw.writeBits(2, uint64(literal_context_mode)) - } - - if mb.literal_context_map_size == 0 { - storeTrivialContextMap(mb.literal_histograms_size, literalContextBits, tree, bw) - } else { - encodeContextMap(mb.literal_context_map, mb.literal_context_map_size, mb.literal_histograms_size, tree, bw) - } - - if mb.distance_context_map_size == 0 { - storeTrivialContextMap(mb.distance_histograms_size, distanceContextBits, tree, bw) - } else { - encodeContextMap(mb.distance_context_map, mb.distance_context_map_size, mb.distance_histograms_size, tree, bw) - } - - buildAndStoreEntropyCodesLiteral(literal_enc, mb.literal_histograms, mb.literal_histograms_size, numLiteralSymbols, tree, bw) - buildAndStoreEntropyCodesCommand(command_enc, mb.command_histograms, mb.command_histograms_size, numCommandSymbols, tree, bw) - buildAndStoreEntropyCodesDistance(distance_enc, mb.distance_histograms, mb.distance_histograms_size, uint(num_distance_symbols), tree, bw) - tree = nil - - for _, cmd := range commands { - var cmd_code uint = uint(cmd.cmd_prefix_) - storeSymbol(command_enc, cmd_code, bw) - storeCommandExtra(&cmd, bw) - if mb.literal_context_map_size == 0 { - var j uint - for j = uint(cmd.insert_len_); j != 0; j-- { - storeSymbol(literal_enc, uint(input[pos&mask]), bw) - pos++ - } - } else { - var j uint - for j = uint(cmd.insert_len_); j != 0; j-- { - var context uint = uint(getContext(prev_byte, prev_byte2, literal_context_lut)) - var literal byte = input[pos&mask] - storeSymbolWithContext(literal_enc, uint(literal), context, mb.literal_context_map, bw, literalContextBits) - prev_byte2 = prev_byte - prev_byte = literal - pos++ - } - } - - pos += uint(commandCopyLen(&cmd)) - if commandCopyLen(&cmd) != 0 { - prev_byte2 = input[(pos-2)&mask] - prev_byte = input[(pos-1)&mask] - if cmd.cmd_prefix_ >= 128 { - var dist_code uint = uint(cmd.dist_prefix_) & 0x3FF - var distnumextra uint32 = uint32(cmd.dist_prefix_) >> 10 - var distextra uint64 = uint64(cmd.dist_extra_) - if mb.distance_context_map_size == 0 { - storeSymbol(distance_enc, dist_code, bw) - } else { - var context uint = uint(commandDistanceContext(&cmd)) - storeSymbolWithContext(distance_enc, dist_code, context, mb.distance_context_map, bw, distanceContextBits) - } - - bw.writeBits(uint(distnumextra), distextra) - } - } - } - - cleanupBlockEncoder(distance_enc) - cleanupBlockEncoder(command_enc) - cleanupBlockEncoder(literal_enc) - if is_last { - bw.jumpToByteBoundary() - } -} - -func buildHistograms(input []byte, start_pos uint, mask uint, commands []command, lit_histo *histogramLiteral, cmd_histo *histogramCommand, dist_histo *histogramDistance) { - var pos uint = start_pos - for _, cmd := range commands { - var j uint - histogramAddCommand(cmd_histo, uint(cmd.cmd_prefix_)) - for j = uint(cmd.insert_len_); j != 0; j-- { - histogramAddLiteral(lit_histo, uint(input[pos&mask])) - pos++ - } - - pos += uint(commandCopyLen(&cmd)) - if commandCopyLen(&cmd) != 0 && cmd.cmd_prefix_ >= 128 { - histogramAddDistance(dist_histo, uint(cmd.dist_prefix_)&0x3FF) - } - } -} - -func storeDataWithHuffmanCodes(input []byte, start_pos uint, mask uint, commands []command, lit_depth []byte, lit_bits []uint16, cmd_depth []byte, cmd_bits []uint16, dist_depth []byte, dist_bits []uint16, bw *bitWriter) { - var pos uint = start_pos - for _, cmd := range commands { - var cmd_code uint = uint(cmd.cmd_prefix_) - var j uint - bw.writeBits(uint(cmd_depth[cmd_code]), uint64(cmd_bits[cmd_code])) - storeCommandExtra(&cmd, bw) - for j = uint(cmd.insert_len_); j != 0; j-- { - var literal byte = input[pos&mask] - bw.writeBits(uint(lit_depth[literal]), uint64(lit_bits[literal])) - pos++ - } - - pos += uint(commandCopyLen(&cmd)) - if commandCopyLen(&cmd) != 0 && cmd.cmd_prefix_ >= 128 { - var dist_code uint = uint(cmd.dist_prefix_) & 0x3FF - var distnumextra uint32 = uint32(cmd.dist_prefix_) >> 10 - var distextra uint32 = cmd.dist_extra_ - bw.writeBits(uint(dist_depth[dist_code]), uint64(dist_bits[dist_code])) - bw.writeBits(uint(distnumextra), uint64(distextra)) - } - } -} - -func storeMetaBlockTrivial(input []byte, start_pos uint, length uint, mask uint, is_last bool, params *encoderParams, commands []command, bw *bitWriter) { - var lit_histo histogramLiteral - var cmd_histo histogramCommand - var dist_histo histogramDistance - var lit_depth [numLiteralSymbols]byte - var lit_bits [numLiteralSymbols]uint16 - var cmd_depth [numCommandSymbols]byte - var cmd_bits [numCommandSymbols]uint16 - var dist_depth [maxSimpleDistanceAlphabetSize]byte - var dist_bits [maxSimpleDistanceAlphabetSize]uint16 - var tree []huffmanTree - var num_distance_symbols uint32 = params.dist.alphabet_size - - storeCompressedMetaBlockHeader(is_last, length, bw) - - histogramClearLiteral(&lit_histo) - histogramClearCommand(&cmd_histo) - histogramClearDistance(&dist_histo) - - buildHistograms(input, start_pos, mask, commands, &lit_histo, &cmd_histo, &dist_histo) - - bw.writeBits(13, 0) - - tree = make([]huffmanTree, maxHuffmanTreeSize) - buildAndStoreHuffmanTree(lit_histo.data_[:], numLiteralSymbols, numLiteralSymbols, tree, lit_depth[:], lit_bits[:], bw) - buildAndStoreHuffmanTree(cmd_histo.data_[:], numCommandSymbols, numCommandSymbols, tree, cmd_depth[:], cmd_bits[:], bw) - buildAndStoreHuffmanTree(dist_histo.data_[:], maxSimpleDistanceAlphabetSize, uint(num_distance_symbols), tree, dist_depth[:], dist_bits[:], bw) - tree = nil - storeDataWithHuffmanCodes(input, start_pos, mask, commands, lit_depth[:], lit_bits[:], cmd_depth[:], cmd_bits[:], dist_depth[:], dist_bits[:], bw) - if is_last { - bw.jumpToByteBoundary() - } -} - -func storeMetaBlockFast(input []byte, start_pos uint, length uint, mask uint, is_last bool, params *encoderParams, commands []command, bw *bitWriter) { - var num_distance_symbols uint32 = params.dist.alphabet_size - var distance_alphabet_bits uint32 = log2FloorNonZero(uint(num_distance_symbols-1)) + 1 - - storeCompressedMetaBlockHeader(is_last, length, bw) - - bw.writeBits(13, 0) - - if len(commands) <= 128 { - var histogram = [numLiteralSymbols]uint32{0} - var pos uint = start_pos - var num_literals uint = 0 - var lit_depth [numLiteralSymbols]byte - var lit_bits [numLiteralSymbols]uint16 - for _, cmd := range commands { - var j uint - for j = uint(cmd.insert_len_); j != 0; j-- { - histogram[input[pos&mask]]++ - pos++ - } - - num_literals += uint(cmd.insert_len_) - pos += uint(commandCopyLen(&cmd)) - } - - buildAndStoreHuffmanTreeFast(histogram[:], num_literals, /* max_bits = */ - 8, lit_depth[:], lit_bits[:], bw) - - storeStaticCommandHuffmanTree(bw) - storeStaticDistanceHuffmanTree(bw) - storeDataWithHuffmanCodes(input, start_pos, mask, commands, lit_depth[:], lit_bits[:], kStaticCommandCodeDepth[:], kStaticCommandCodeBits[:], kStaticDistanceCodeDepth[:], kStaticDistanceCodeBits[:], bw) - } else { - var lit_histo histogramLiteral - var cmd_histo histogramCommand - var dist_histo histogramDistance - var lit_depth [numLiteralSymbols]byte - var lit_bits [numLiteralSymbols]uint16 - var cmd_depth [numCommandSymbols]byte - var cmd_bits [numCommandSymbols]uint16 - var dist_depth [maxSimpleDistanceAlphabetSize]byte - var dist_bits [maxSimpleDistanceAlphabetSize]uint16 - histogramClearLiteral(&lit_histo) - histogramClearCommand(&cmd_histo) - histogramClearDistance(&dist_histo) - buildHistograms(input, start_pos, mask, commands, &lit_histo, &cmd_histo, &dist_histo) - buildAndStoreHuffmanTreeFast(lit_histo.data_[:], lit_histo.total_count_, /* max_bits = */ - 8, lit_depth[:], lit_bits[:], bw) - - buildAndStoreHuffmanTreeFast(cmd_histo.data_[:], cmd_histo.total_count_, /* max_bits = */ - 10, cmd_depth[:], cmd_bits[:], bw) - - buildAndStoreHuffmanTreeFast(dist_histo.data_[:], dist_histo.total_count_, /* max_bits = */ - uint(distance_alphabet_bits), dist_depth[:], dist_bits[:], bw) - - storeDataWithHuffmanCodes(input, start_pos, mask, commands, lit_depth[:], lit_bits[:], cmd_depth[:], cmd_bits[:], dist_depth[:], dist_bits[:], bw) - } - - if is_last { - bw.jumpToByteBoundary() - } -} - -/* This is for storing uncompressed blocks (simple raw storage of - bytes-as-bytes). */ -func storeUncompressedMetaBlock(is_final_block bool, input []byte, position uint, mask uint, len uint, bw *bitWriter) { - var masked_pos uint = position & mask - storeUncompressedMetaBlockHeader(uint(len), bw) - bw.jumpToByteBoundary() - - if masked_pos+len > mask+1 { - var len1 uint = mask + 1 - masked_pos - bw.writeBytes(input[masked_pos:][:len1]) - len -= len1 - masked_pos = 0 - } - - bw.writeBytes(input[masked_pos:][:len]) - - /* Since the uncompressed block itself may not be the final block, add an - empty one after this. */ - if is_final_block { - bw.writeBits(1, 1) /* islast */ - bw.writeBits(1, 1) /* isempty */ - bw.jumpToByteBoundary() - } -} diff --git a/vendor/github.com/andybalholm/brotli/cluster.go b/vendor/github.com/andybalholm/brotli/cluster.go deleted file mode 100644 index df8a328224..0000000000 --- a/vendor/github.com/andybalholm/brotli/cluster.go +++ /dev/null @@ -1,30 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Functions for clustering similar histograms together. */ - -type histogramPair struct { - idx1 uint32 - idx2 uint32 - cost_combo float64 - cost_diff float64 -} - -func histogramPairIsLess(p1 *histogramPair, p2 *histogramPair) bool { - if p1.cost_diff != p2.cost_diff { - return p1.cost_diff > p2.cost_diff - } - - return (p1.idx2 - p1.idx1) > (p2.idx2 - p2.idx1) -} - -/* Returns entropy reduction of the context map when we combine two clusters. */ -func clusterCostDiff(size_a uint, size_b uint) float64 { - var size_c uint = size_a + size_b - return float64(size_a)*fastLog2(size_a) + float64(size_b)*fastLog2(size_b) - float64(size_c)*fastLog2(size_c) -} diff --git a/vendor/github.com/andybalholm/brotli/cluster_command.go b/vendor/github.com/andybalholm/brotli/cluster_command.go deleted file mode 100644 index 7449751b21..0000000000 --- a/vendor/github.com/andybalholm/brotli/cluster_command.go +++ /dev/null @@ -1,326 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if - it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */ -func compareAndPushToQueueCommand(out []histogramCommand, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) { - var is_good_pair bool = false - var p histogramPair - p.idx2 = 0 - p.idx1 = p.idx2 - p.cost_combo = 0 - p.cost_diff = p.cost_combo - if idx1 == idx2 { - return - } - - if idx2 < idx1 { - var t uint32 = idx2 - idx2 = idx1 - idx1 = t - } - - p.idx1 = idx1 - p.idx2 = idx2 - p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2])) - p.cost_diff -= out[idx1].bit_cost_ - p.cost_diff -= out[idx2].bit_cost_ - - if out[idx1].total_count_ == 0 { - p.cost_combo = out[idx2].bit_cost_ - is_good_pair = true - } else if out[idx2].total_count_ == 0 { - p.cost_combo = out[idx1].bit_cost_ - is_good_pair = true - } else { - var threshold float64 - if *num_pairs == 0 { - threshold = 1e99 - } else { - threshold = brotli_max_double(0.0, pairs[0].cost_diff) - } - var combo histogramCommand = out[idx1] - var cost_combo float64 - histogramAddHistogramCommand(&combo, &out[idx2]) - cost_combo = populationCostCommand(&combo) - if cost_combo < threshold-p.cost_diff { - p.cost_combo = cost_combo - is_good_pair = true - } - } - - if is_good_pair { - p.cost_diff += p.cost_combo - if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) { - /* Replace the top of the queue if needed. */ - if *num_pairs < max_num_pairs { - pairs[*num_pairs] = pairs[0] - (*num_pairs)++ - } - - pairs[0] = p - } else if *num_pairs < max_num_pairs { - pairs[*num_pairs] = p - (*num_pairs)++ - } - } -} - -func histogramCombineCommand(out []histogramCommand, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint { - var cost_diff_threshold float64 = 0.0 - var min_cluster_size uint = 1 - var num_pairs uint = 0 - { - /* We maintain a vector of histogram pairs, with the property that the pair - with the maximum bit cost reduction is the first. */ - var idx1 uint - for idx1 = 0; idx1 < num_clusters; idx1++ { - var idx2 uint - for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ { - compareAndPushToQueueCommand(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs) - } - } - } - - for num_clusters > min_cluster_size { - var best_idx1 uint32 - var best_idx2 uint32 - var i uint - if pairs[0].cost_diff >= cost_diff_threshold { - cost_diff_threshold = 1e99 - min_cluster_size = max_clusters - continue - } - - /* Take the best pair from the top of heap. */ - best_idx1 = pairs[0].idx1 - - best_idx2 = pairs[0].idx2 - histogramAddHistogramCommand(&out[best_idx1], &out[best_idx2]) - out[best_idx1].bit_cost_ = pairs[0].cost_combo - cluster_size[best_idx1] += cluster_size[best_idx2] - for i = 0; i < symbols_size; i++ { - if symbols[i] == best_idx2 { - symbols[i] = best_idx1 - } - } - - for i = 0; i < num_clusters; i++ { - if clusters[i] == best_idx2 { - copy(clusters[i:], clusters[i+1:][:num_clusters-i-1]) - break - } - } - - num_clusters-- - { - /* Remove pairs intersecting the just combined best pair. */ - var copy_to_idx uint = 0 - for i = 0; i < num_pairs; i++ { - var p *histogramPair = &pairs[i] - if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 { - /* Remove invalid pair from the queue. */ - continue - } - - if histogramPairIsLess(&pairs[0], p) { - /* Replace the top of the queue if needed. */ - var front histogramPair = pairs[0] - pairs[0] = *p - pairs[copy_to_idx] = front - } else { - pairs[copy_to_idx] = *p - } - - copy_to_idx++ - } - - num_pairs = copy_to_idx - } - - /* Push new pairs formed with the combined histogram to the heap. */ - for i = 0; i < num_clusters; i++ { - compareAndPushToQueueCommand(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs) - } - } - - return num_clusters -} - -/* What is the bit cost of moving histogram from cur_symbol to candidate. */ -func histogramBitCostDistanceCommand(histogram *histogramCommand, candidate *histogramCommand) float64 { - if histogram.total_count_ == 0 { - return 0.0 - } else { - var tmp histogramCommand = *histogram - histogramAddHistogramCommand(&tmp, candidate) - return populationCostCommand(&tmp) - candidate.bit_cost_ - } -} - -/* Find the best 'out' histogram for each of the 'in' histograms. - When called, clusters[0..num_clusters) contains the unique values from - symbols[0..in_size), but this property is not preserved in this function. - Note: we assume that out[]->bit_cost_ is already up-to-date. */ -func histogramRemapCommand(in []histogramCommand, in_size uint, clusters []uint32, num_clusters uint, out []histogramCommand, symbols []uint32) { - var i uint - for i = 0; i < in_size; i++ { - var best_out uint32 - if i == 0 { - best_out = symbols[0] - } else { - best_out = symbols[i-1] - } - var best_bits float64 = histogramBitCostDistanceCommand(&in[i], &out[best_out]) - var j uint - for j = 0; j < num_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceCommand(&in[i], &out[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - symbols[i] = best_out - } - - /* Recompute each out based on raw and symbols. */ - for i = 0; i < num_clusters; i++ { - histogramClearCommand(&out[clusters[i]]) - } - - for i = 0; i < in_size; i++ { - histogramAddHistogramCommand(&out[symbols[i]], &in[i]) - } -} - -/* Reorders elements of the out[0..length) array and changes values in - symbols[0..length) array in the following way: - * when called, symbols[] contains indexes into out[], and has N unique - values (possibly N < length) - * on return, symbols'[i] = f(symbols[i]) and - out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length, - where f is a bijection between the range of symbols[] and [0..N), and - the first occurrences of values in symbols'[i] come in consecutive - increasing order. - Returns N, the number of unique values in symbols[]. */ - -var histogramReindexCommand_kInvalidIndex uint32 = math.MaxUint32 - -func histogramReindexCommand(out []histogramCommand, symbols []uint32, length uint) uint { - var new_index []uint32 = make([]uint32, length) - var next_index uint32 - var tmp []histogramCommand - var i uint - for i = 0; i < length; i++ { - new_index[i] = histogramReindexCommand_kInvalidIndex - } - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == histogramReindexCommand_kInvalidIndex { - new_index[symbols[i]] = next_index - next_index++ - } - } - - /* TODO: by using idea of "cycle-sort" we can avoid allocation of - tmp and reduce the number of copying by the factor of 2. */ - tmp = make([]histogramCommand, next_index) - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == next_index { - tmp[next_index] = out[symbols[i]] - next_index++ - } - - symbols[i] = new_index[symbols[i]] - } - - new_index = nil - for i = 0; uint32(i) < next_index; i++ { - out[i] = tmp[i] - } - - tmp = nil - return uint(next_index) -} - -func clusterHistogramsCommand(in []histogramCommand, in_size uint, max_histograms uint, out []histogramCommand, out_size *uint, histogram_symbols []uint32) { - var cluster_size []uint32 = make([]uint32, in_size) - var clusters []uint32 = make([]uint32, in_size) - var num_clusters uint = 0 - var max_input_histograms uint = 64 - var pairs_capacity uint = max_input_histograms * max_input_histograms / 2 - var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1)) - var i uint - - /* For the first pass of clustering, we allow all pairs. */ - for i = 0; i < in_size; i++ { - cluster_size[i] = 1 - } - - for i = 0; i < in_size; i++ { - out[i] = in[i] - out[i].bit_cost_ = populationCostCommand(&in[i]) - histogram_symbols[i] = uint32(i) - } - - for i = 0; i < in_size; i += max_input_histograms { - var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - clusters[num_clusters+j] = uint32(i + j) - } - - num_new_clusters = histogramCombineCommand(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity) - num_clusters += num_new_clusters - } - { - /* For the second pass, we limit the total number of histogram pairs. - After this limit is reached, we only keep searching for the best pair. */ - var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < (max_num_pairs + 1) { - var _new_size uint - if pairs_capacity == 0 { - _new_size = max_num_pairs + 1 - } else { - _new_size = pairs_capacity - } - var new_array []histogramPair - for _new_size < (max_num_pairs + 1) { - _new_size *= 2 - } - new_array = make([]histogramPair, _new_size) - if pairs_capacity != 0 { - copy(new_array, pairs[:pairs_capacity]) - } - - pairs = new_array - pairs_capacity = _new_size - } - - /* Collapse similar histograms. */ - num_clusters = histogramCombineCommand(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs) - } - - pairs = nil - cluster_size = nil - - /* Find the optimal map from original histograms to the final ones. */ - histogramRemapCommand(in, in_size, clusters, num_clusters, out, histogram_symbols) - - clusters = nil - - /* Convert the context map to a canonical form. */ - *out_size = histogramReindexCommand(out, histogram_symbols, in_size) -} diff --git a/vendor/github.com/andybalholm/brotli/cluster_distance.go b/vendor/github.com/andybalholm/brotli/cluster_distance.go deleted file mode 100644 index 1aaa86e6ed..0000000000 --- a/vendor/github.com/andybalholm/brotli/cluster_distance.go +++ /dev/null @@ -1,326 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if - it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */ -func compareAndPushToQueueDistance(out []histogramDistance, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) { - var is_good_pair bool = false - var p histogramPair - p.idx2 = 0 - p.idx1 = p.idx2 - p.cost_combo = 0 - p.cost_diff = p.cost_combo - if idx1 == idx2 { - return - } - - if idx2 < idx1 { - var t uint32 = idx2 - idx2 = idx1 - idx1 = t - } - - p.idx1 = idx1 - p.idx2 = idx2 - p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2])) - p.cost_diff -= out[idx1].bit_cost_ - p.cost_diff -= out[idx2].bit_cost_ - - if out[idx1].total_count_ == 0 { - p.cost_combo = out[idx2].bit_cost_ - is_good_pair = true - } else if out[idx2].total_count_ == 0 { - p.cost_combo = out[idx1].bit_cost_ - is_good_pair = true - } else { - var threshold float64 - if *num_pairs == 0 { - threshold = 1e99 - } else { - threshold = brotli_max_double(0.0, pairs[0].cost_diff) - } - var combo histogramDistance = out[idx1] - var cost_combo float64 - histogramAddHistogramDistance(&combo, &out[idx2]) - cost_combo = populationCostDistance(&combo) - if cost_combo < threshold-p.cost_diff { - p.cost_combo = cost_combo - is_good_pair = true - } - } - - if is_good_pair { - p.cost_diff += p.cost_combo - if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) { - /* Replace the top of the queue if needed. */ - if *num_pairs < max_num_pairs { - pairs[*num_pairs] = pairs[0] - (*num_pairs)++ - } - - pairs[0] = p - } else if *num_pairs < max_num_pairs { - pairs[*num_pairs] = p - (*num_pairs)++ - } - } -} - -func histogramCombineDistance(out []histogramDistance, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint { - var cost_diff_threshold float64 = 0.0 - var min_cluster_size uint = 1 - var num_pairs uint = 0 - { - /* We maintain a vector of histogram pairs, with the property that the pair - with the maximum bit cost reduction is the first. */ - var idx1 uint - for idx1 = 0; idx1 < num_clusters; idx1++ { - var idx2 uint - for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ { - compareAndPushToQueueDistance(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs) - } - } - } - - for num_clusters > min_cluster_size { - var best_idx1 uint32 - var best_idx2 uint32 - var i uint - if pairs[0].cost_diff >= cost_diff_threshold { - cost_diff_threshold = 1e99 - min_cluster_size = max_clusters - continue - } - - /* Take the best pair from the top of heap. */ - best_idx1 = pairs[0].idx1 - - best_idx2 = pairs[0].idx2 - histogramAddHistogramDistance(&out[best_idx1], &out[best_idx2]) - out[best_idx1].bit_cost_ = pairs[0].cost_combo - cluster_size[best_idx1] += cluster_size[best_idx2] - for i = 0; i < symbols_size; i++ { - if symbols[i] == best_idx2 { - symbols[i] = best_idx1 - } - } - - for i = 0; i < num_clusters; i++ { - if clusters[i] == best_idx2 { - copy(clusters[i:], clusters[i+1:][:num_clusters-i-1]) - break - } - } - - num_clusters-- - { - /* Remove pairs intersecting the just combined best pair. */ - var copy_to_idx uint = 0 - for i = 0; i < num_pairs; i++ { - var p *histogramPair = &pairs[i] - if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 { - /* Remove invalid pair from the queue. */ - continue - } - - if histogramPairIsLess(&pairs[0], p) { - /* Replace the top of the queue if needed. */ - var front histogramPair = pairs[0] - pairs[0] = *p - pairs[copy_to_idx] = front - } else { - pairs[copy_to_idx] = *p - } - - copy_to_idx++ - } - - num_pairs = copy_to_idx - } - - /* Push new pairs formed with the combined histogram to the heap. */ - for i = 0; i < num_clusters; i++ { - compareAndPushToQueueDistance(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs) - } - } - - return num_clusters -} - -/* What is the bit cost of moving histogram from cur_symbol to candidate. */ -func histogramBitCostDistanceDistance(histogram *histogramDistance, candidate *histogramDistance) float64 { - if histogram.total_count_ == 0 { - return 0.0 - } else { - var tmp histogramDistance = *histogram - histogramAddHistogramDistance(&tmp, candidate) - return populationCostDistance(&tmp) - candidate.bit_cost_ - } -} - -/* Find the best 'out' histogram for each of the 'in' histograms. - When called, clusters[0..num_clusters) contains the unique values from - symbols[0..in_size), but this property is not preserved in this function. - Note: we assume that out[]->bit_cost_ is already up-to-date. */ -func histogramRemapDistance(in []histogramDistance, in_size uint, clusters []uint32, num_clusters uint, out []histogramDistance, symbols []uint32) { - var i uint - for i = 0; i < in_size; i++ { - var best_out uint32 - if i == 0 { - best_out = symbols[0] - } else { - best_out = symbols[i-1] - } - var best_bits float64 = histogramBitCostDistanceDistance(&in[i], &out[best_out]) - var j uint - for j = 0; j < num_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceDistance(&in[i], &out[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - symbols[i] = best_out - } - - /* Recompute each out based on raw and symbols. */ - for i = 0; i < num_clusters; i++ { - histogramClearDistance(&out[clusters[i]]) - } - - for i = 0; i < in_size; i++ { - histogramAddHistogramDistance(&out[symbols[i]], &in[i]) - } -} - -/* Reorders elements of the out[0..length) array and changes values in - symbols[0..length) array in the following way: - * when called, symbols[] contains indexes into out[], and has N unique - values (possibly N < length) - * on return, symbols'[i] = f(symbols[i]) and - out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length, - where f is a bijection between the range of symbols[] and [0..N), and - the first occurrences of values in symbols'[i] come in consecutive - increasing order. - Returns N, the number of unique values in symbols[]. */ - -var histogramReindexDistance_kInvalidIndex uint32 = math.MaxUint32 - -func histogramReindexDistance(out []histogramDistance, symbols []uint32, length uint) uint { - var new_index []uint32 = make([]uint32, length) - var next_index uint32 - var tmp []histogramDistance - var i uint - for i = 0; i < length; i++ { - new_index[i] = histogramReindexDistance_kInvalidIndex - } - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == histogramReindexDistance_kInvalidIndex { - new_index[symbols[i]] = next_index - next_index++ - } - } - - /* TODO: by using idea of "cycle-sort" we can avoid allocation of - tmp and reduce the number of copying by the factor of 2. */ - tmp = make([]histogramDistance, next_index) - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == next_index { - tmp[next_index] = out[symbols[i]] - next_index++ - } - - symbols[i] = new_index[symbols[i]] - } - - new_index = nil - for i = 0; uint32(i) < next_index; i++ { - out[i] = tmp[i] - } - - tmp = nil - return uint(next_index) -} - -func clusterHistogramsDistance(in []histogramDistance, in_size uint, max_histograms uint, out []histogramDistance, out_size *uint, histogram_symbols []uint32) { - var cluster_size []uint32 = make([]uint32, in_size) - var clusters []uint32 = make([]uint32, in_size) - var num_clusters uint = 0 - var max_input_histograms uint = 64 - var pairs_capacity uint = max_input_histograms * max_input_histograms / 2 - var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1)) - var i uint - - /* For the first pass of clustering, we allow all pairs. */ - for i = 0; i < in_size; i++ { - cluster_size[i] = 1 - } - - for i = 0; i < in_size; i++ { - out[i] = in[i] - out[i].bit_cost_ = populationCostDistance(&in[i]) - histogram_symbols[i] = uint32(i) - } - - for i = 0; i < in_size; i += max_input_histograms { - var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - clusters[num_clusters+j] = uint32(i + j) - } - - num_new_clusters = histogramCombineDistance(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity) - num_clusters += num_new_clusters - } - { - /* For the second pass, we limit the total number of histogram pairs. - After this limit is reached, we only keep searching for the best pair. */ - var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < (max_num_pairs + 1) { - var _new_size uint - if pairs_capacity == 0 { - _new_size = max_num_pairs + 1 - } else { - _new_size = pairs_capacity - } - var new_array []histogramPair - for _new_size < (max_num_pairs + 1) { - _new_size *= 2 - } - new_array = make([]histogramPair, _new_size) - if pairs_capacity != 0 { - copy(new_array, pairs[:pairs_capacity]) - } - - pairs = new_array - pairs_capacity = _new_size - } - - /* Collapse similar histograms. */ - num_clusters = histogramCombineDistance(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs) - } - - pairs = nil - cluster_size = nil - - /* Find the optimal map from original histograms to the final ones. */ - histogramRemapDistance(in, in_size, clusters, num_clusters, out, histogram_symbols) - - clusters = nil - - /* Convert the context map to a canonical form. */ - *out_size = histogramReindexDistance(out, histogram_symbols, in_size) -} diff --git a/vendor/github.com/andybalholm/brotli/cluster_literal.go b/vendor/github.com/andybalholm/brotli/cluster_literal.go deleted file mode 100644 index 6ba66f31b2..0000000000 --- a/vendor/github.com/andybalholm/brotli/cluster_literal.go +++ /dev/null @@ -1,326 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if - it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */ -func compareAndPushToQueueLiteral(out []histogramLiteral, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) { - var is_good_pair bool = false - var p histogramPair - p.idx2 = 0 - p.idx1 = p.idx2 - p.cost_combo = 0 - p.cost_diff = p.cost_combo - if idx1 == idx2 { - return - } - - if idx2 < idx1 { - var t uint32 = idx2 - idx2 = idx1 - idx1 = t - } - - p.idx1 = idx1 - p.idx2 = idx2 - p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2])) - p.cost_diff -= out[idx1].bit_cost_ - p.cost_diff -= out[idx2].bit_cost_ - - if out[idx1].total_count_ == 0 { - p.cost_combo = out[idx2].bit_cost_ - is_good_pair = true - } else if out[idx2].total_count_ == 0 { - p.cost_combo = out[idx1].bit_cost_ - is_good_pair = true - } else { - var threshold float64 - if *num_pairs == 0 { - threshold = 1e99 - } else { - threshold = brotli_max_double(0.0, pairs[0].cost_diff) - } - var combo histogramLiteral = out[idx1] - var cost_combo float64 - histogramAddHistogramLiteral(&combo, &out[idx2]) - cost_combo = populationCostLiteral(&combo) - if cost_combo < threshold-p.cost_diff { - p.cost_combo = cost_combo - is_good_pair = true - } - } - - if is_good_pair { - p.cost_diff += p.cost_combo - if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) { - /* Replace the top of the queue if needed. */ - if *num_pairs < max_num_pairs { - pairs[*num_pairs] = pairs[0] - (*num_pairs)++ - } - - pairs[0] = p - } else if *num_pairs < max_num_pairs { - pairs[*num_pairs] = p - (*num_pairs)++ - } - } -} - -func histogramCombineLiteral(out []histogramLiteral, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint { - var cost_diff_threshold float64 = 0.0 - var min_cluster_size uint = 1 - var num_pairs uint = 0 - { - /* We maintain a vector of histogram pairs, with the property that the pair - with the maximum bit cost reduction is the first. */ - var idx1 uint - for idx1 = 0; idx1 < num_clusters; idx1++ { - var idx2 uint - for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ { - compareAndPushToQueueLiteral(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs) - } - } - } - - for num_clusters > min_cluster_size { - var best_idx1 uint32 - var best_idx2 uint32 - var i uint - if pairs[0].cost_diff >= cost_diff_threshold { - cost_diff_threshold = 1e99 - min_cluster_size = max_clusters - continue - } - - /* Take the best pair from the top of heap. */ - best_idx1 = pairs[0].idx1 - - best_idx2 = pairs[0].idx2 - histogramAddHistogramLiteral(&out[best_idx1], &out[best_idx2]) - out[best_idx1].bit_cost_ = pairs[0].cost_combo - cluster_size[best_idx1] += cluster_size[best_idx2] - for i = 0; i < symbols_size; i++ { - if symbols[i] == best_idx2 { - symbols[i] = best_idx1 - } - } - - for i = 0; i < num_clusters; i++ { - if clusters[i] == best_idx2 { - copy(clusters[i:], clusters[i+1:][:num_clusters-i-1]) - break - } - } - - num_clusters-- - { - /* Remove pairs intersecting the just combined best pair. */ - var copy_to_idx uint = 0 - for i = 0; i < num_pairs; i++ { - var p *histogramPair = &pairs[i] - if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 { - /* Remove invalid pair from the queue. */ - continue - } - - if histogramPairIsLess(&pairs[0], p) { - /* Replace the top of the queue if needed. */ - var front histogramPair = pairs[0] - pairs[0] = *p - pairs[copy_to_idx] = front - } else { - pairs[copy_to_idx] = *p - } - - copy_to_idx++ - } - - num_pairs = copy_to_idx - } - - /* Push new pairs formed with the combined histogram to the heap. */ - for i = 0; i < num_clusters; i++ { - compareAndPushToQueueLiteral(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs) - } - } - - return num_clusters -} - -/* What is the bit cost of moving histogram from cur_symbol to candidate. */ -func histogramBitCostDistanceLiteral(histogram *histogramLiteral, candidate *histogramLiteral) float64 { - if histogram.total_count_ == 0 { - return 0.0 - } else { - var tmp histogramLiteral = *histogram - histogramAddHistogramLiteral(&tmp, candidate) - return populationCostLiteral(&tmp) - candidate.bit_cost_ - } -} - -/* Find the best 'out' histogram for each of the 'in' histograms. - When called, clusters[0..num_clusters) contains the unique values from - symbols[0..in_size), but this property is not preserved in this function. - Note: we assume that out[]->bit_cost_ is already up-to-date. */ -func histogramRemapLiteral(in []histogramLiteral, in_size uint, clusters []uint32, num_clusters uint, out []histogramLiteral, symbols []uint32) { - var i uint - for i = 0; i < in_size; i++ { - var best_out uint32 - if i == 0 { - best_out = symbols[0] - } else { - best_out = symbols[i-1] - } - var best_bits float64 = histogramBitCostDistanceLiteral(&in[i], &out[best_out]) - var j uint - for j = 0; j < num_clusters; j++ { - var cur_bits float64 = histogramBitCostDistanceLiteral(&in[i], &out[clusters[j]]) - if cur_bits < best_bits { - best_bits = cur_bits - best_out = clusters[j] - } - } - - symbols[i] = best_out - } - - /* Recompute each out based on raw and symbols. */ - for i = 0; i < num_clusters; i++ { - histogramClearLiteral(&out[clusters[i]]) - } - - for i = 0; i < in_size; i++ { - histogramAddHistogramLiteral(&out[symbols[i]], &in[i]) - } -} - -/* Reorders elements of the out[0..length) array and changes values in - symbols[0..length) array in the following way: - * when called, symbols[] contains indexes into out[], and has N unique - values (possibly N < length) - * on return, symbols'[i] = f(symbols[i]) and - out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length, - where f is a bijection between the range of symbols[] and [0..N), and - the first occurrences of values in symbols'[i] come in consecutive - increasing order. - Returns N, the number of unique values in symbols[]. */ - -var histogramReindexLiteral_kInvalidIndex uint32 = math.MaxUint32 - -func histogramReindexLiteral(out []histogramLiteral, symbols []uint32, length uint) uint { - var new_index []uint32 = make([]uint32, length) - var next_index uint32 - var tmp []histogramLiteral - var i uint - for i = 0; i < length; i++ { - new_index[i] = histogramReindexLiteral_kInvalidIndex - } - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == histogramReindexLiteral_kInvalidIndex { - new_index[symbols[i]] = next_index - next_index++ - } - } - - /* TODO: by using idea of "cycle-sort" we can avoid allocation of - tmp and reduce the number of copying by the factor of 2. */ - tmp = make([]histogramLiteral, next_index) - - next_index = 0 - for i = 0; i < length; i++ { - if new_index[symbols[i]] == next_index { - tmp[next_index] = out[symbols[i]] - next_index++ - } - - symbols[i] = new_index[symbols[i]] - } - - new_index = nil - for i = 0; uint32(i) < next_index; i++ { - out[i] = tmp[i] - } - - tmp = nil - return uint(next_index) -} - -func clusterHistogramsLiteral(in []histogramLiteral, in_size uint, max_histograms uint, out []histogramLiteral, out_size *uint, histogram_symbols []uint32) { - var cluster_size []uint32 = make([]uint32, in_size) - var clusters []uint32 = make([]uint32, in_size) - var num_clusters uint = 0 - var max_input_histograms uint = 64 - var pairs_capacity uint = max_input_histograms * max_input_histograms / 2 - var pairs []histogramPair = make([]histogramPair, (pairs_capacity + 1)) - var i uint - - /* For the first pass of clustering, we allow all pairs. */ - for i = 0; i < in_size; i++ { - cluster_size[i] = 1 - } - - for i = 0; i < in_size; i++ { - out[i] = in[i] - out[i].bit_cost_ = populationCostLiteral(&in[i]) - histogram_symbols[i] = uint32(i) - } - - for i = 0; i < in_size; i += max_input_histograms { - var num_to_combine uint = brotli_min_size_t(in_size-i, max_input_histograms) - var num_new_clusters uint - var j uint - for j = 0; j < num_to_combine; j++ { - clusters[num_clusters+j] = uint32(i + j) - } - - num_new_clusters = histogramCombineLiteral(out, cluster_size, histogram_symbols[i:], clusters[num_clusters:], pairs, num_to_combine, num_to_combine, max_histograms, pairs_capacity) - num_clusters += num_new_clusters - } - { - /* For the second pass, we limit the total number of histogram pairs. - After this limit is reached, we only keep searching for the best pair. */ - var max_num_pairs uint = brotli_min_size_t(64*num_clusters, (num_clusters/2)*num_clusters) - if pairs_capacity < (max_num_pairs + 1) { - var _new_size uint - if pairs_capacity == 0 { - _new_size = max_num_pairs + 1 - } else { - _new_size = pairs_capacity - } - var new_array []histogramPair - for _new_size < (max_num_pairs + 1) { - _new_size *= 2 - } - new_array = make([]histogramPair, _new_size) - if pairs_capacity != 0 { - copy(new_array, pairs[:pairs_capacity]) - } - - pairs = new_array - pairs_capacity = _new_size - } - - /* Collapse similar histograms. */ - num_clusters = histogramCombineLiteral(out, cluster_size, histogram_symbols, clusters, pairs, num_clusters, in_size, max_histograms, max_num_pairs) - } - - pairs = nil - cluster_size = nil - - /* Find the optimal map from original histograms to the final ones. */ - histogramRemapLiteral(in, in_size, clusters, num_clusters, out, histogram_symbols) - - clusters = nil - - /* Convert the context map to a canonical form. */ - *out_size = histogramReindexLiteral(out, histogram_symbols, in_size) -} diff --git a/vendor/github.com/andybalholm/brotli/command.go b/vendor/github.com/andybalholm/brotli/command.go deleted file mode 100644 index b1662a5555..0000000000 --- a/vendor/github.com/andybalholm/brotli/command.go +++ /dev/null @@ -1,254 +0,0 @@ -package brotli - -var kInsBase = []uint32{ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 8, - 10, - 14, - 18, - 26, - 34, - 50, - 66, - 98, - 130, - 194, - 322, - 578, - 1090, - 2114, - 6210, - 22594, -} - -var kInsExtra = []uint32{ - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 7, - 8, - 9, - 10, - 12, - 14, - 24, -} - -var kCopyBase = []uint32{ - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 12, - 14, - 18, - 22, - 30, - 38, - 54, - 70, - 102, - 134, - 198, - 326, - 582, - 1094, - 2118, -} - -var kCopyExtra = []uint32{ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 2, - 2, - 3, - 3, - 4, - 4, - 5, - 5, - 6, - 7, - 8, - 9, - 10, - 24, -} - -func getInsertLengthCode(insertlen uint) uint16 { - if insertlen < 6 { - return uint16(insertlen) - } else if insertlen < 130 { - var nbits uint32 = log2FloorNonZero(insertlen-2) - 1 - return uint16((nbits << 1) + uint32((insertlen-2)>>nbits) + 2) - } else if insertlen < 2114 { - return uint16(log2FloorNonZero(insertlen-66) + 10) - } else if insertlen < 6210 { - return 21 - } else if insertlen < 22594 { - return 22 - } else { - return 23 - } -} - -func getCopyLengthCode(copylen uint) uint16 { - if copylen < 10 { - return uint16(copylen - 2) - } else if copylen < 134 { - var nbits uint32 = log2FloorNonZero(copylen-6) - 1 - return uint16((nbits << 1) + uint32((copylen-6)>>nbits) + 4) - } else if copylen < 2118 { - return uint16(log2FloorNonZero(copylen-70) + 12) - } else { - return 23 - } -} - -func combineLengthCodes(inscode uint16, copycode uint16, use_last_distance bool) uint16 { - var bits64 uint16 = uint16(copycode&0x7 | (inscode&0x7)<<3) - if use_last_distance && inscode < 8 && copycode < 16 { - if copycode < 8 { - return bits64 - } else { - return bits64 | 64 - } - } else { - /* Specification: 5 Encoding of ... (last table) */ - /* offset = 2 * index, where index is in range [0..8] */ - var offset uint32 = 2 * ((uint32(copycode) >> 3) + 3*(uint32(inscode)>>3)) - - /* All values in specification are K * 64, - where K = [2, 3, 6, 4, 5, 8, 7, 9, 10], - i + 1 = [1, 2, 3, 4, 5, 6, 7, 8, 9], - K - i - 1 = [1, 1, 3, 0, 0, 2, 0, 1, 2] = D. - All values in D require only 2 bits to encode. - Magic constant is shifted 6 bits left, to avoid final multiplication. */ - offset = (offset << 5) + 0x40 + ((0x520D40 >> offset) & 0xC0) - - return uint16(offset | uint32(bits64)) - } -} - -func getLengthCode(insertlen uint, copylen uint, use_last_distance bool, code *uint16) { - var inscode uint16 = getInsertLengthCode(insertlen) - var copycode uint16 = getCopyLengthCode(copylen) - *code = combineLengthCodes(inscode, copycode, use_last_distance) -} - -func getInsertBase(inscode uint16) uint32 { - return kInsBase[inscode] -} - -func getInsertExtra(inscode uint16) uint32 { - return kInsExtra[inscode] -} - -func getCopyBase(copycode uint16) uint32 { - return kCopyBase[copycode] -} - -func getCopyExtra(copycode uint16) uint32 { - return kCopyExtra[copycode] -} - -type command struct { - insert_len_ uint32 - copy_len_ uint32 - dist_extra_ uint32 - cmd_prefix_ uint16 - dist_prefix_ uint16 -} - -/* distance_code is e.g. 0 for same-as-last short code, or 16 for offset 1. */ -func makeCommand(dist *distanceParams, insertlen uint, copylen uint, copylen_code_delta int, distance_code uint) (cmd command) { - /* Don't rely on signed int representation, use honest casts. */ - var delta uint32 = uint32(byte(int8(copylen_code_delta))) - cmd.insert_len_ = uint32(insertlen) - cmd.copy_len_ = uint32(uint32(copylen) | delta<<25) - - /* The distance prefix and extra bits are stored in this Command as if - npostfix and ndirect were 0, they are only recomputed later after the - clustering if needed. */ - prefixEncodeCopyDistance(distance_code, uint(dist.num_direct_distance_codes), uint(dist.distance_postfix_bits), &cmd.dist_prefix_, &cmd.dist_extra_) - getLengthCode(insertlen, uint(int(copylen)+copylen_code_delta), (cmd.dist_prefix_&0x3FF == 0), &cmd.cmd_prefix_) - - return cmd -} - -func makeInsertCommand(insertlen uint) (cmd command) { - cmd.insert_len_ = uint32(insertlen) - cmd.copy_len_ = 4 << 25 - cmd.dist_extra_ = 0 - cmd.dist_prefix_ = numDistanceShortCodes - getLengthCode(insertlen, 4, false, &cmd.cmd_prefix_) - return cmd -} - -func commandRestoreDistanceCode(self *command, dist *distanceParams) uint32 { - if uint32(self.dist_prefix_&0x3FF) < numDistanceShortCodes+dist.num_direct_distance_codes { - return uint32(self.dist_prefix_) & 0x3FF - } else { - var dcode uint32 = uint32(self.dist_prefix_) & 0x3FF - var nbits uint32 = uint32(self.dist_prefix_) >> 10 - var extra uint32 = self.dist_extra_ - var postfix_mask uint32 = (1 << dist.distance_postfix_bits) - 1 - var hcode uint32 = (dcode - dist.num_direct_distance_codes - numDistanceShortCodes) >> dist.distance_postfix_bits - var lcode uint32 = (dcode - dist.num_direct_distance_codes - numDistanceShortCodes) & postfix_mask - var offset uint32 = ((2 + (hcode & 1)) << nbits) - 4 - return ((offset + extra) << dist.distance_postfix_bits) + lcode + dist.num_direct_distance_codes + numDistanceShortCodes - } -} - -func commandDistanceContext(self *command) uint32 { - var r uint32 = uint32(self.cmd_prefix_) >> 6 - var c uint32 = uint32(self.cmd_prefix_) & 7 - if (r == 0 || r == 2 || r == 4 || r == 7) && (c <= 2) { - return c - } - - return 3 -} - -func commandCopyLen(self *command) uint32 { - return self.copy_len_ & 0x1FFFFFF -} - -func commandCopyLenCode(self *command) uint32 { - var modifier uint32 = self.copy_len_ >> 25 - var delta int32 = int32(int8(byte(modifier | (modifier&0x40)<<1))) - return uint32(int32(self.copy_len_&0x1FFFFFF) + delta) -} diff --git a/vendor/github.com/andybalholm/brotli/compress_fragment.go b/vendor/github.com/andybalholm/brotli/compress_fragment.go deleted file mode 100644 index dbf0c43bf2..0000000000 --- a/vendor/github.com/andybalholm/brotli/compress_fragment.go +++ /dev/null @@ -1,685 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function for fast encoding of an input fragment, independently from the input - history. This function uses one-pass processing: when we find a backward - match, we immediately emit the corresponding command and literal codes to - the bit stream. - - Adapted from the CompressFragment() function in - https://github.com/google/snappy/blob/master/snappy.cc */ - -const maxDistance_compress_fragment = 262128 - -func hash5(p []byte, shift uint) uint32 { - var h uint64 = (binary.LittleEndian.Uint64(p) << 24) * uint64(kHashMul32) - return uint32(h >> shift) -} - -func hashBytesAtOffset5(v uint64, offset int, shift uint) uint32 { - assert(offset >= 0) - assert(offset <= 3) - { - var h uint64 = ((v >> uint(8*offset)) << 24) * uint64(kHashMul32) - return uint32(h >> shift) - } -} - -func isMatch5(p1 []byte, p2 []byte) bool { - return binary.LittleEndian.Uint32(p1) == binary.LittleEndian.Uint32(p2) && - p1[4] == p2[4] -} - -/* Builds a literal prefix code into "depths" and "bits" based on the statistics - of the "input" string and stores it into the bit stream. - Note that the prefix code here is built from the pre-LZ77 input, therefore - we can only approximate the statistics of the actual literal stream. - Moreover, for long inputs we build a histogram from a sample of the input - and thus have to assign a non-zero depth for each literal. - Returns estimated compression ratio millibytes/char for encoding given input - with generated code. */ -func buildAndStoreLiteralPrefixCode(input []byte, input_size uint, depths []byte, bits []uint16, bw *bitWriter) uint { - var histogram = [256]uint32{0} - var histogram_total uint - var i uint - if input_size < 1<<15 { - for i = 0; i < input_size; i++ { - histogram[input[i]]++ - } - - histogram_total = input_size - for i = 0; i < 256; i++ { - /* We weigh the first 11 samples with weight 3 to account for the - balancing effect of the LZ77 phase on the histogram. */ - var adjust uint32 = 2 * brotli_min_uint32_t(histogram[i], 11) - histogram[i] += adjust - histogram_total += uint(adjust) - } - } else { - const kSampleRate uint = 29 - for i = 0; i < input_size; i += kSampleRate { - histogram[input[i]]++ - } - - histogram_total = (input_size + kSampleRate - 1) / kSampleRate - for i = 0; i < 256; i++ { - /* We add 1 to each population count to avoid 0 bit depths (since this is - only a sample and we don't know if the symbol appears or not), and we - weigh the first 11 samples with weight 3 to account for the balancing - effect of the LZ77 phase on the histogram (more frequent symbols are - more likely to be in backward references instead as literals). */ - var adjust uint32 = 1 + 2*brotli_min_uint32_t(histogram[i], 11) - histogram[i] += adjust - histogram_total += uint(adjust) - } - } - - buildAndStoreHuffmanTreeFast(histogram[:], histogram_total, /* max_bits = */ - 8, depths, bits, bw) - { - var literal_ratio uint = 0 - for i = 0; i < 256; i++ { - if histogram[i] != 0 { - literal_ratio += uint(histogram[i] * uint32(depths[i])) - } - } - - /* Estimated encoding ratio, millibytes per symbol. */ - return (literal_ratio * 125) / histogram_total - } -} - -/* Builds a command and distance prefix code (each 64 symbols) into "depth" and - "bits" based on "histogram" and stores it into the bit stream. */ -func buildAndStoreCommandPrefixCode1(histogram []uint32, depth []byte, bits []uint16, bw *bitWriter) { - var tree [129]huffmanTree - var cmd_depth = [numCommandSymbols]byte{0} - /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */ - - var cmd_bits [64]uint16 - - createHuffmanTree(histogram, 64, 15, tree[:], depth) - createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:]) - - /* We have to jump through a few hoops here in order to compute - the command bits because the symbols are in a different order than in - the full alphabet. This looks complicated, but having the symbols - in this order in the command bits saves a few branches in the Emit* - functions. */ - copy(cmd_depth[:], depth[:24]) - - copy(cmd_depth[24:][:], depth[40:][:8]) - copy(cmd_depth[32:][:], depth[24:][:8]) - copy(cmd_depth[40:][:], depth[48:][:8]) - copy(cmd_depth[48:][:], depth[32:][:8]) - copy(cmd_depth[56:][:], depth[56:][:8]) - convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:]) - copy(bits, cmd_bits[:24]) - copy(bits[24:], cmd_bits[32:][:8]) - copy(bits[32:], cmd_bits[48:][:8]) - copy(bits[40:], cmd_bits[24:][:8]) - copy(bits[48:], cmd_bits[40:][:8]) - copy(bits[56:], cmd_bits[56:][:8]) - convertBitDepthsToSymbols(depth[64:], 64, bits[64:]) - { - /* Create the bit length array for the full command alphabet. */ - var i uint - for i := 0; i < int(64); i++ { - cmd_depth[i] = 0 - } /* only 64 first values were used */ - copy(cmd_depth[:], depth[:8]) - copy(cmd_depth[64:][:], depth[8:][:8]) - copy(cmd_depth[128:][:], depth[16:][:8]) - copy(cmd_depth[192:][:], depth[24:][:8]) - copy(cmd_depth[384:][:], depth[32:][:8]) - for i = 0; i < 8; i++ { - cmd_depth[128+8*i] = depth[40+i] - cmd_depth[256+8*i] = depth[48+i] - cmd_depth[448+8*i] = depth[56+i] - } - - storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], bw) - } - - storeHuffmanTree(depth[64:], 64, tree[:], bw) -} - -/* REQUIRES: insertlen < 6210 */ -func emitInsertLen1(insertlen uint, depth []byte, bits []uint16, histo []uint32, bw *bitWriter) { - if insertlen < 6 { - var code uint = insertlen + 40 - bw.writeBits(uint(depth[code]), uint64(bits[code])) - histo[code]++ - } else if insertlen < 130 { - var tail uint = insertlen - 2 - var nbits uint32 = log2FloorNonZero(tail) - 1 - var prefix uint = tail >> nbits - var inscode uint = uint((nbits << 1) + uint32(prefix) + 42) - bw.writeBits(uint(depth[inscode]), uint64(bits[inscode])) - bw.writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> nbits - var code uint = uint((nbits << 1) + uint32(prefix) + 20) - bw.writeBits(uint(depth[code]), uint64(bits[code])) - bw.writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> nbits - var code uint = uint((nbits << 1) + uint32(prefix) + 4) - bw.writeBits(uint(depth[code]), uint64(bits[code])) - bw.writeBits(uint(nbits), uint64(tail)-(uint64(prefix)<> 5) + 30 - bw.writeBits(uint(depth[code]), uint64(bits[code])) - bw.writeBits(5, uint64(tail)&31) - bw.writeBits(uint(depth[64]), uint64(bits[64])) - histo[code]++ - histo[64]++ - } else if copylen < 2120 { - var tail uint = copylen - 72 - var nbits uint32 = log2FloorNonZero(tail) - var code uint = uint(nbits + 28) - bw.writeBits(uint(depth[code]), uint64(bits[code])) - bw.writeBits(uint(nbits), uint64(tail)-(uint64(uint(1))<> nbits) & 1 - var offset uint = (2 + prefix) << nbits - var distcode uint = uint(2*(nbits-1) + uint32(prefix) + 80) - bw.writeBits(uint(depth[distcode]), uint64(bits[distcode])) - bw.writeBits(uint(nbits), uint64(d)-uint64(offset)) - histo[distcode]++ -} - -func emitLiterals(input []byte, len uint, depth []byte, bits []uint16, bw *bitWriter) { - var j uint - for j = 0; j < len; j++ { - var lit byte = input[j] - bw.writeBits(uint(depth[lit]), uint64(bits[lit])) - } -} - -/* REQUIRES: len <= 1 << 24. */ -func storeMetaBlockHeader1(len uint, is_uncompressed bool, bw *bitWriter) { - var nibbles uint = 6 - - /* ISLAST */ - bw.writeBits(1, 0) - - if len <= 1<<16 { - nibbles = 4 - } else if len <= 1<<20 { - nibbles = 5 - } - - bw.writeBits(2, uint64(nibbles)-4) - bw.writeBits(nibbles*4, uint64(len)-1) - - /* ISUNCOMPRESSED */ - bw.writeSingleBit(is_uncompressed) -} - -var shouldMergeBlock_kSampleRate uint = 43 - -func shouldMergeBlock(data []byte, len uint, depths []byte) bool { - var histo = [256]uint{0} - var i uint - for i = 0; i < len; i += shouldMergeBlock_kSampleRate { - histo[data[i]]++ - } - { - var total uint = (len + shouldMergeBlock_kSampleRate - 1) / shouldMergeBlock_kSampleRate - var r float64 = (fastLog2(total)+0.5)*float64(total) + 200 - for i = 0; i < 256; i++ { - r -= float64(histo[i]) * (float64(depths[i]) + fastLog2(histo[i])) - } - - return r >= 0.0 - } -} - -func shouldUseUncompressedMode(metablock_start []byte, next_emit []byte, insertlen uint, literal_ratio uint) bool { - var compressed uint = uint(-cap(next_emit) + cap(metablock_start)) - if compressed*50 > insertlen { - return false - } else { - return literal_ratio > 980 - } -} - -func emitUncompressedMetaBlock1(data []byte, storage_ix_start uint, bw *bitWriter) { - bw.rewind(storage_ix_start) - storeMetaBlockHeader1(uint(len(data)), true, bw) - bw.jumpToByteBoundary() - bw.writeBytes(data) -} - -var kCmdHistoSeed = [128]uint32{ - 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 0, 0, 0, 0, -} - -var compressFragmentFastImpl_kFirstBlockSize uint = 3 << 15 -var compressFragmentFastImpl_kMergeBlockSize uint = 1 << 16 - -func compressFragmentFastImpl(in []byte, input_size uint, is_last bool, table []int, table_bits uint, cmd_depth []byte, cmd_bits []uint16, cmd_code_numbits *uint, cmd_code []byte, bw *bitWriter) { - var cmd_histo [128]uint32 - var ip_end int - var next_emit int = 0 - var base_ip int = 0 - var input int = 0 - const kInputMarginBytes uint = windowGap - const kMinMatchLen uint = 5 - var metablock_start int = input - var block_size uint = brotli_min_size_t(input_size, compressFragmentFastImpl_kFirstBlockSize) - var total_block_size uint = block_size - var mlen_storage_ix uint = bw.getPos() + 3 - var lit_depth [256]byte - var lit_bits [256]uint16 - var literal_ratio uint - var ip int - var last_distance int - var shift uint = 64 - table_bits - - /* "next_emit" is a pointer to the first byte that is not covered by a - previous copy. Bytes between "next_emit" and the start of the next copy or - the end of the input will be emitted as literal bytes. */ - - /* Save the start of the first block for position and distance computations. - */ - - /* Save the bit position of the MLEN field of the meta-block header, so that - we can update it later if we decide to extend this meta-block. */ - storeMetaBlockHeader1(block_size, false, bw) - - /* No block splits, no contexts. */ - bw.writeBits(13, 0) - - literal_ratio = buildAndStoreLiteralPrefixCode(in[input:], block_size, lit_depth[:], lit_bits[:], bw) - { - /* Store the pre-compressed command and distance prefix codes. */ - var i uint - for i = 0; i+7 < *cmd_code_numbits; i += 8 { - bw.writeBits(8, uint64(cmd_code[i>>3])) - } - } - - bw.writeBits(*cmd_code_numbits&7, uint64(cmd_code[*cmd_code_numbits>>3])) - - /* Initialize the command and distance histograms. We will gather - statistics of command and distance codes during the processing - of this block and use it to update the command and distance - prefix codes for the next block. */ -emit_commands: - copy(cmd_histo[:], kCmdHistoSeed[:]) - - /* "ip" is the input pointer. */ - ip = input - - last_distance = -1 - ip_end = int(uint(input) + block_size) - - if block_size >= kInputMarginBytes { - var len_limit uint = brotli_min_size_t(block_size-kMinMatchLen, input_size-kInputMarginBytes) - var ip_limit int = int(uint(input) + len_limit) - /* For the last block, we need to keep a 16 bytes margin so that we can be - sure that all distances are at most window size - 16. - For all other blocks, we only need to keep a margin of 5 bytes so that - we don't go over the block size with a copy. */ - - var next_hash uint32 - ip++ - for next_hash = hash5(in[ip:], shift); ; { - var skip uint32 = 32 - var next_ip int = ip - /* Step 1: Scan forward in the input looking for a 5-byte-long match. - If we get close to exhausting the input then goto emit_remainder. - - Heuristic match skipping: If 32 bytes are scanned with no matches - found, start looking only at every other byte. If 32 more bytes are - scanned, look at every third byte, etc.. When a match is found, - immediately go back to looking at every byte. This is a small loss - (~5% performance, ~0.1% density) for compressible data due to more - bookkeeping, but for non-compressible data (such as JPEG) it's a huge - win since the compressor quickly "realizes" the data is incompressible - and doesn't bother looking for matches everywhere. - - The "skip" variable keeps track of how many bytes there are since the - last match; dividing it by 32 (i.e. right-shifting by five) gives the - number of bytes to move ahead for each iteration. */ - - var candidate int - assert(next_emit < ip) - - trawl: - for { - var hash uint32 = next_hash - var bytes_between_hash_lookups uint32 = skip >> 5 - skip++ - assert(hash == hash5(in[next_ip:], shift)) - ip = next_ip - next_ip = int(uint32(ip) + bytes_between_hash_lookups) - if next_ip > ip_limit { - goto emit_remainder - } - - next_hash = hash5(in[next_ip:], shift) - candidate = ip - last_distance - if isMatch5(in[ip:], in[candidate:]) { - if candidate < ip { - table[hash] = int(ip - base_ip) - break - } - } - - candidate = base_ip + table[hash] - assert(candidate >= base_ip) - assert(candidate < ip) - - table[hash] = int(ip - base_ip) - if !(!isMatch5(in[ip:], in[candidate:])) { - break - } - } - - /* Check copy distance. If candidate is not feasible, continue search. - Checking is done outside of hot loop to reduce overhead. */ - if ip-candidate > maxDistance_compress_fragment { - goto trawl - } - - /* Step 2: Emit the found match together with the literal bytes from - "next_emit" to the bit stream, and then see if we can find a next match - immediately afterwards. Repeat until we find no match for the input - without emitting some literal bytes. */ - { - var base int = ip - /* > 0 */ - var matched uint = 5 + findMatchLengthWithLimit(in[candidate+5:], in[ip+5:], uint(ip_end-ip)-5) - var distance int = int(base - candidate) - /* We have a 5-byte match at ip, and we need to emit bytes in - [next_emit, ip). */ - - var insert uint = uint(base - next_emit) - ip += int(matched) - if insert < 6210 { - emitInsertLen1(insert, cmd_depth, cmd_bits, cmd_histo[:], bw) - } else if shouldUseUncompressedMode(in[metablock_start:], in[next_emit:], insert, literal_ratio) { - emitUncompressedMetaBlock1(in[metablock_start:base], mlen_storage_ix-3, bw) - input_size -= uint(base - input) - input = base - next_emit = input - goto next_block - } else { - emitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo[:], bw) - } - - emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], bw) - if distance == last_distance { - bw.writeBits(uint(cmd_depth[64]), uint64(cmd_bits[64])) - cmd_histo[64]++ - } else { - emitDistance1(uint(distance), cmd_depth, cmd_bits, cmd_histo[:], bw) - last_distance = distance - } - - emitCopyLenLastDistance1(matched, cmd_depth, cmd_bits, cmd_histo[:], bw) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some positions - within the last copy. */ - { - var input_bytes uint64 = binary.LittleEndian.Uint64(in[ip-3:]) - var prev_hash uint32 = hashBytesAtOffset5(input_bytes, 0, shift) - var cur_hash uint32 = hashBytesAtOffset5(input_bytes, 3, shift) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset5(input_bytes, 1, shift) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset5(input_bytes, 2, shift) - table[prev_hash] = int(ip - base_ip - 1) - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - for isMatch5(in[ip:], in[candidate:]) { - var base int = ip - /* We have a 5-byte match at ip, and no need to emit any literal bytes - prior to ip. */ - - var matched uint = 5 + findMatchLengthWithLimit(in[candidate+5:], in[ip+5:], uint(ip_end-ip)-5) - if ip-candidate > maxDistance_compress_fragment { - break - } - ip += int(matched) - last_distance = int(base - candidate) /* > 0 */ - emitCopyLen1(matched, cmd_depth, cmd_bits, cmd_histo[:], bw) - emitDistance1(uint(last_distance), cmd_depth, cmd_bits, cmd_histo[:], bw) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some positions - within the last copy. */ - { - var input_bytes uint64 = binary.LittleEndian.Uint64(in[ip-3:]) - var prev_hash uint32 = hashBytesAtOffset5(input_bytes, 0, shift) - var cur_hash uint32 = hashBytesAtOffset5(input_bytes, 3, shift) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset5(input_bytes, 1, shift) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset5(input_bytes, 2, shift) - table[prev_hash] = int(ip - base_ip - 1) - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - ip++ - next_hash = hash5(in[ip:], shift) - } - } - -emit_remainder: - assert(next_emit <= ip_end) - input += int(block_size) - input_size -= block_size - block_size = brotli_min_size_t(input_size, compressFragmentFastImpl_kMergeBlockSize) - - /* Decide if we want to continue this meta-block instead of emitting the - last insert-only command. */ - if input_size > 0 && total_block_size+block_size <= 1<<20 && shouldMergeBlock(in[input:], block_size, lit_depth[:]) { - assert(total_block_size > 1<<16) - - /* Update the size of the current meta-block and continue emitting commands. - We can do this because the current size and the new size both have 5 - nibbles. */ - total_block_size += block_size - - bw.updateBits(20, uint32(total_block_size-1), mlen_storage_ix) - goto emit_commands - } - - /* Emit the remaining bytes as literals. */ - if next_emit < ip_end { - var insert uint = uint(ip_end - next_emit) - if insert < 6210 { - emitInsertLen1(insert, cmd_depth, cmd_bits, cmd_histo[:], bw) - emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], bw) - } else if shouldUseUncompressedMode(in[metablock_start:], in[next_emit:], insert, literal_ratio) { - emitUncompressedMetaBlock1(in[metablock_start:ip_end], mlen_storage_ix-3, bw) - } else { - emitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo[:], bw) - emitLiterals(in[next_emit:], insert, lit_depth[:], lit_bits[:], bw) - } - } - - next_emit = ip_end - - /* If we have more data, write a new meta-block header and prefix codes and - then continue emitting commands. */ -next_block: - if input_size > 0 { - metablock_start = input - block_size = brotli_min_size_t(input_size, compressFragmentFastImpl_kFirstBlockSize) - total_block_size = block_size - - /* Save the bit position of the MLEN field of the meta-block header, so that - we can update it later if we decide to extend this meta-block. */ - mlen_storage_ix = bw.getPos() + 3 - - storeMetaBlockHeader1(block_size, false, bw) - - /* No block splits, no contexts. */ - bw.writeBits(13, 0) - - literal_ratio = buildAndStoreLiteralPrefixCode(in[input:], block_size, lit_depth[:], lit_bits[:], bw) - buildAndStoreCommandPrefixCode1(cmd_histo[:], cmd_depth, cmd_bits, bw) - goto emit_commands - } - - if !is_last { - /* If this is not the last block, update the command and distance prefix - codes for the next block and store the compressed forms. */ - var bw bitWriter - bw.dst = cmd_code - buildAndStoreCommandPrefixCode1(cmd_histo[:], cmd_depth, cmd_bits, &bw) - *cmd_code_numbits = bw.getPos() - } -} - -/* Compresses "input" string to bw as one or more complete meta-blocks. - - If "is_last" is 1, emits an additional empty last meta-block. - - "cmd_depth" and "cmd_bits" contain the command and distance prefix codes - (see comment in encode.h) used for the encoding of this input fragment. - If "is_last" is 0, they are updated to reflect the statistics - of this input fragment, to be used for the encoding of the next fragment. - - "*cmd_code_numbits" is the number of bits of the compressed representation - of the command and distance prefix codes, and "cmd_code" is an array of - at least "(*cmd_code_numbits + 7) >> 3" size that contains the compressed - command and distance prefix codes. If "is_last" is 0, these are also - updated to represent the updated "cmd_depth" and "cmd_bits". - - REQUIRES: "input_size" is greater than zero, or "is_last" is 1. - REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24). - REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero. - REQUIRES: "table_size" is an odd (9, 11, 13, 15) power of two - OUTPUT: maximal copy distance <= |input_size| - OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */ -func compressFragmentFast(input []byte, input_size uint, is_last bool, table []int, table_size uint, cmd_depth []byte, cmd_bits []uint16, cmd_code_numbits *uint, cmd_code []byte, bw *bitWriter) { - var initial_storage_ix uint = bw.getPos() - var table_bits uint = uint(log2FloorNonZero(table_size)) - - if input_size == 0 { - assert(is_last) - bw.writeBits(1, 1) /* islast */ - bw.writeBits(1, 1) /* isempty */ - bw.jumpToByteBoundary() - return - } - - compressFragmentFastImpl(input, input_size, is_last, table, table_bits, cmd_depth, cmd_bits, cmd_code_numbits, cmd_code, bw) - - /* If output is larger than single uncompressed block, rewrite it. */ - if bw.getPos()-initial_storage_ix > 31+(input_size<<3) { - emitUncompressedMetaBlock1(input[:input_size], initial_storage_ix, bw) - } - - if is_last { - bw.writeBits(1, 1) /* islast */ - bw.writeBits(1, 1) /* isempty */ - bw.jumpToByteBoundary() - } -} diff --git a/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go b/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go deleted file mode 100644 index 2473aca3fe..0000000000 --- a/vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go +++ /dev/null @@ -1,595 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function for fast encoding of an input fragment, independently from the input - history. This function uses two-pass processing: in the first pass we save - the found backward matches and literal bytes into a buffer, and in the - second pass we emit them into the bit stream using prefix codes built based - on the actual command and literal byte histograms. */ - -const kCompressFragmentTwoPassBlockSize uint = 1 << 17 - -func hash1(p []byte, shift uint, length uint) uint32 { - var h uint64 = (binary.LittleEndian.Uint64(p) << ((8 - length) * 8)) * uint64(kHashMul32) - return uint32(h >> shift) -} - -func hashBytesAtOffset(v uint64, offset uint, shift uint, length uint) uint32 { - assert(offset <= 8-length) - { - var h uint64 = ((v >> (8 * offset)) << ((8 - length) * 8)) * uint64(kHashMul32) - return uint32(h >> shift) - } -} - -func isMatch1(p1 []byte, p2 []byte, length uint) bool { - if binary.LittleEndian.Uint32(p1) != binary.LittleEndian.Uint32(p2) { - return false - } - if length == 4 { - return true - } - return p1[4] == p2[4] && p1[5] == p2[5] -} - -/* Builds a command and distance prefix code (each 64 symbols) into "depth" and - "bits" based on "histogram" and stores it into the bit stream. */ -func buildAndStoreCommandPrefixCode(histogram []uint32, depth []byte, bits []uint16, bw *bitWriter) { - var tree [129]huffmanTree - var cmd_depth = [numCommandSymbols]byte{0} - /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */ - - var cmd_bits [64]uint16 - createHuffmanTree(histogram, 64, 15, tree[:], depth) - createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:]) - - /* We have to jump through a few hoops here in order to compute - the command bits because the symbols are in a different order than in - the full alphabet. This looks complicated, but having the symbols - in this order in the command bits saves a few branches in the Emit* - functions. */ - copy(cmd_depth[:], depth[24:][:24]) - - copy(cmd_depth[24:][:], depth[:8]) - copy(cmd_depth[32:][:], depth[48:][:8]) - copy(cmd_depth[40:][:], depth[8:][:8]) - copy(cmd_depth[48:][:], depth[56:][:8]) - copy(cmd_depth[56:][:], depth[16:][:8]) - convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:]) - copy(bits, cmd_bits[24:][:8]) - copy(bits[8:], cmd_bits[40:][:8]) - copy(bits[16:], cmd_bits[56:][:8]) - copy(bits[24:], cmd_bits[:24]) - copy(bits[48:], cmd_bits[32:][:8]) - copy(bits[56:], cmd_bits[48:][:8]) - convertBitDepthsToSymbols(depth[64:], 64, bits[64:]) - { - /* Create the bit length array for the full command alphabet. */ - var i uint - for i := 0; i < int(64); i++ { - cmd_depth[i] = 0 - } /* only 64 first values were used */ - copy(cmd_depth[:], depth[24:][:8]) - copy(cmd_depth[64:][:], depth[32:][:8]) - copy(cmd_depth[128:][:], depth[40:][:8]) - copy(cmd_depth[192:][:], depth[48:][:8]) - copy(cmd_depth[384:][:], depth[56:][:8]) - for i = 0; i < 8; i++ { - cmd_depth[128+8*i] = depth[i] - cmd_depth[256+8*i] = depth[8+i] - cmd_depth[448+8*i] = depth[16+i] - } - - storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], bw) - } - - storeHuffmanTree(depth[64:], 64, tree[:], bw) -} - -func emitInsertLen(insertlen uint32, commands *[]uint32) { - if insertlen < 6 { - (*commands)[0] = insertlen - } else if insertlen < 130 { - var tail uint32 = insertlen - 2 - var nbits uint32 = log2FloorNonZero(uint(tail)) - 1 - var prefix uint32 = tail >> nbits - var inscode uint32 = (nbits << 1) + prefix + 2 - var extra uint32 = tail - (prefix << nbits) - (*commands)[0] = inscode | extra<<8 - } else if insertlen < 2114 { - var tail uint32 = insertlen - 66 - var nbits uint32 = log2FloorNonZero(uint(tail)) - var code uint32 = nbits + 10 - var extra uint32 = tail - (1 << nbits) - (*commands)[0] = code | extra<<8 - } else if insertlen < 6210 { - var extra uint32 = insertlen - 2114 - (*commands)[0] = 21 | extra<<8 - } else if insertlen < 22594 { - var extra uint32 = insertlen - 6210 - (*commands)[0] = 22 | extra<<8 - } else { - var extra uint32 = insertlen - 22594 - (*commands)[0] = 23 | extra<<8 - } - - *commands = (*commands)[1:] -} - -func emitCopyLen(copylen uint, commands *[]uint32) { - if copylen < 10 { - (*commands)[0] = uint32(copylen + 38) - } else if copylen < 134 { - var tail uint = copylen - 6 - var nbits uint = uint(log2FloorNonZero(tail) - 1) - var prefix uint = tail >> nbits - var code uint = (nbits << 1) + prefix + 44 - var extra uint = tail - (prefix << nbits) - (*commands)[0] = uint32(code | extra<<8) - } else if copylen < 2118 { - var tail uint = copylen - 70 - var nbits uint = uint(log2FloorNonZero(tail)) - var code uint = nbits + 52 - var extra uint = tail - (uint(1) << nbits) - (*commands)[0] = uint32(code | extra<<8) - } else { - var extra uint = copylen - 2118 - (*commands)[0] = uint32(63 | extra<<8) - } - - *commands = (*commands)[1:] -} - -func emitCopyLenLastDistance(copylen uint, commands *[]uint32) { - if copylen < 12 { - (*commands)[0] = uint32(copylen + 20) - *commands = (*commands)[1:] - } else if copylen < 72 { - var tail uint = copylen - 8 - var nbits uint = uint(log2FloorNonZero(tail) - 1) - var prefix uint = tail >> nbits - var code uint = (nbits << 1) + prefix + 28 - var extra uint = tail - (prefix << nbits) - (*commands)[0] = uint32(code | extra<<8) - *commands = (*commands)[1:] - } else if copylen < 136 { - var tail uint = copylen - 8 - var code uint = (tail >> 5) + 54 - var extra uint = tail & 31 - (*commands)[0] = uint32(code | extra<<8) - *commands = (*commands)[1:] - (*commands)[0] = 64 - *commands = (*commands)[1:] - } else if copylen < 2120 { - var tail uint = copylen - 72 - var nbits uint = uint(log2FloorNonZero(tail)) - var code uint = nbits + 52 - var extra uint = tail - (uint(1) << nbits) - (*commands)[0] = uint32(code | extra<<8) - *commands = (*commands)[1:] - (*commands)[0] = 64 - *commands = (*commands)[1:] - } else { - var extra uint = copylen - 2120 - (*commands)[0] = uint32(63 | extra<<8) - *commands = (*commands)[1:] - (*commands)[0] = 64 - *commands = (*commands)[1:] - } -} - -func emitDistance(distance uint32, commands *[]uint32) { - var d uint32 = distance + 3 - var nbits uint32 = log2FloorNonZero(uint(d)) - 1 - var prefix uint32 = (d >> nbits) & 1 - var offset uint32 = (2 + prefix) << nbits - var distcode uint32 = 2*(nbits-1) + prefix + 80 - var extra uint32 = d - offset - (*commands)[0] = distcode | extra<<8 - *commands = (*commands)[1:] -} - -/* REQUIRES: len <= 1 << 24. */ -func storeMetaBlockHeader(len uint, is_uncompressed bool, bw *bitWriter) { - var nibbles uint = 6 - - /* ISLAST */ - bw.writeBits(1, 0) - - if len <= 1<<16 { - nibbles = 4 - } else if len <= 1<<20 { - nibbles = 5 - } - - bw.writeBits(2, uint64(nibbles)-4) - bw.writeBits(nibbles*4, uint64(len)-1) - - /* ISUNCOMPRESSED */ - bw.writeSingleBit(is_uncompressed) -} - -func createCommands(input []byte, block_size uint, input_size uint, base_ip_ptr []byte, table []int, table_bits uint, min_match uint, literals *[]byte, commands *[]uint32) { - var ip int = 0 - var shift uint = 64 - table_bits - var ip_end int = int(block_size) - var base_ip int = -cap(base_ip_ptr) + cap(input) - var next_emit int = 0 - var last_distance int = -1 - /* "ip" is the input pointer. */ - - const kInputMarginBytes uint = windowGap - - /* "next_emit" is a pointer to the first byte that is not covered by a - previous copy. Bytes between "next_emit" and the start of the next copy or - the end of the input will be emitted as literal bytes. */ - if block_size >= kInputMarginBytes { - var len_limit uint = brotli_min_size_t(block_size-min_match, input_size-kInputMarginBytes) - var ip_limit int = int(len_limit) - /* For the last block, we need to keep a 16 bytes margin so that we can be - sure that all distances are at most window size - 16. - For all other blocks, we only need to keep a margin of 5 bytes so that - we don't go over the block size with a copy. */ - - var next_hash uint32 - ip++ - for next_hash = hash1(input[ip:], shift, min_match); ; { - var skip uint32 = 32 - var next_ip int = ip - /* Step 1: Scan forward in the input looking for a 6-byte-long match. - If we get close to exhausting the input then goto emit_remainder. - - Heuristic match skipping: If 32 bytes are scanned with no matches - found, start looking only at every other byte. If 32 more bytes are - scanned, look at every third byte, etc.. When a match is found, - immediately go back to looking at every byte. This is a small loss - (~5% performance, ~0.1% density) for compressible data due to more - bookkeeping, but for non-compressible data (such as JPEG) it's a huge - win since the compressor quickly "realizes" the data is incompressible - and doesn't bother looking for matches everywhere. - - The "skip" variable keeps track of how many bytes there are since the - last match; dividing it by 32 (ie. right-shifting by five) gives the - number of bytes to move ahead for each iteration. */ - - var candidate int - - assert(next_emit < ip) - - trawl: - for { - var hash uint32 = next_hash - var bytes_between_hash_lookups uint32 = skip >> 5 - skip++ - ip = next_ip - assert(hash == hash1(input[ip:], shift, min_match)) - next_ip = int(uint32(ip) + bytes_between_hash_lookups) - if next_ip > ip_limit { - goto emit_remainder - } - - next_hash = hash1(input[next_ip:], shift, min_match) - candidate = ip - last_distance - if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) { - if candidate < ip { - table[hash] = int(ip - base_ip) - break - } - } - - candidate = base_ip + table[hash] - assert(candidate >= base_ip) - assert(candidate < ip) - - table[hash] = int(ip - base_ip) - if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) { - break - } - } - - /* Check copy distance. If candidate is not feasible, continue search. - Checking is done outside of hot loop to reduce overhead. */ - if ip-candidate > maxDistance_compress_fragment { - goto trawl - } - - /* Step 2: Emit the found match together with the literal bytes from - "next_emit", and then see if we can find a next match immediately - afterwards. Repeat until we find no match for the input - without emitting some literal bytes. */ - { - var base int = ip - /* > 0 */ - var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match) - var distance int = int(base - candidate) - /* We have a 6-byte match at ip, and we need to emit bytes in - [next_emit, ip). */ - - var insert int = int(base - next_emit) - ip += int(matched) - emitInsertLen(uint32(insert), commands) - copy(*literals, input[next_emit:][:uint(insert)]) - *literals = (*literals)[insert:] - if distance == last_distance { - (*commands)[0] = 64 - *commands = (*commands)[1:] - } else { - emitDistance(uint32(distance), commands) - last_distance = distance - } - - emitCopyLenLastDistance(matched, commands) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - { - var input_bytes uint64 - var cur_hash uint32 - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some - positions within the last copy. */ - - var prev_hash uint32 - if min_match == 4 { - input_bytes = binary.LittleEndian.Uint64(input[ip-3:]) - cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } else { - input_bytes = binary.LittleEndian.Uint64(input[ip-5:]) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 5) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 4) - prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - input_bytes = binary.LittleEndian.Uint64(input[ip-2:]) - cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - for ip-candidate <= maxDistance_compress_fragment && isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) { - var base int = ip - /* We have a 6-byte match at ip, and no need to emit any - literal bytes prior to ip. */ - - var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match) - ip += int(matched) - last_distance = int(base - candidate) /* > 0 */ - emitCopyLen(matched, commands) - emitDistance(uint32(last_distance), commands) - - next_emit = ip - if ip >= ip_limit { - goto emit_remainder - } - { - var input_bytes uint64 - var cur_hash uint32 - /* We could immediately start working at ip now, but to improve - compression we first update "table" with the hashes of some - positions within the last copy. */ - - var prev_hash uint32 - if min_match == 4 { - input_bytes = binary.LittleEndian.Uint64(input[ip-3:]) - cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } else { - input_bytes = binary.LittleEndian.Uint64(input[ip-5:]) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 5) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 4) - prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - table[prev_hash] = int(ip - base_ip - 3) - input_bytes = binary.LittleEndian.Uint64(input[ip-2:]) - cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match) - prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match) - table[prev_hash] = int(ip - base_ip - 2) - prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match) - table[prev_hash] = int(ip - base_ip - 1) - } - - candidate = base_ip + table[cur_hash] - table[cur_hash] = int(ip - base_ip) - } - } - - ip++ - next_hash = hash1(input[ip:], shift, min_match) - } - } - -emit_remainder: - assert(next_emit <= ip_end) - - /* Emit the remaining bytes as literals. */ - if next_emit < ip_end { - var insert uint32 = uint32(ip_end - next_emit) - emitInsertLen(insert, commands) - copy(*literals, input[next_emit:][:insert]) - *literals = (*literals)[insert:] - } -} - -var storeCommands_kNumExtraBits = [128]uint32{ - 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24, - 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, - 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 24, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, - 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, - 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24, -} -var storeCommands_kInsertOffset = [24]uint32{ - 0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50, 66, 98, 130, 194, 322, 578, - 1090, 2114, 6210, 22594, -} - -func storeCommands(literals []byte, num_literals uint, commands []uint32, num_commands uint, bw *bitWriter) { - var lit_depths [256]byte - var lit_bits [256]uint16 - var lit_histo = [256]uint32{0} - var cmd_depths = [128]byte{0} - var cmd_bits = [128]uint16{0} - var cmd_histo = [128]uint32{0} - var i uint - for i = 0; i < num_literals; i++ { - lit_histo[literals[i]]++ - } - - buildAndStoreHuffmanTreeFast(lit_histo[:], num_literals, /* max_bits = */ - 8, lit_depths[:], lit_bits[:], bw) - - for i = 0; i < num_commands; i++ { - var code uint32 = commands[i] & 0xFF - assert(code < 128) - cmd_histo[code]++ - } - - cmd_histo[1] += 1 - cmd_histo[2] += 1 - cmd_histo[64] += 1 - cmd_histo[84] += 1 - buildAndStoreCommandPrefixCode(cmd_histo[:], cmd_depths[:], cmd_bits[:], bw) - - for i = 0; i < num_commands; i++ { - var cmd uint32 = commands[i] - var code uint32 = cmd & 0xFF - var extra uint32 = cmd >> 8 - assert(code < 128) - bw.writeBits(uint(cmd_depths[code]), uint64(cmd_bits[code])) - bw.writeBits(uint(storeCommands_kNumExtraBits[code]), uint64(extra)) - if code < 24 { - var insert uint32 = storeCommands_kInsertOffset[code] + extra - var j uint32 - for j = 0; j < insert; j++ { - var lit byte = literals[0] - bw.writeBits(uint(lit_depths[lit]), uint64(lit_bits[lit])) - literals = literals[1:] - } - } - } -} - -/* Acceptable loss for uncompressible speedup is 2% */ -const minRatio = 0.98 - -const sampleRate = 43 - -func shouldCompress(input []byte, input_size uint, num_literals uint) bool { - var corpus_size float64 = float64(input_size) - if float64(num_literals) < minRatio*corpus_size { - return true - } else { - var literal_histo = [256]uint32{0} - var max_total_bit_cost float64 = corpus_size * 8 * minRatio / sampleRate - var i uint - for i = 0; i < input_size; i += sampleRate { - literal_histo[input[i]]++ - } - - return bitsEntropy(literal_histo[:], 256) < max_total_bit_cost - } -} - -func emitUncompressedMetaBlock(input []byte, input_size uint, bw *bitWriter) { - storeMetaBlockHeader(input_size, true, bw) - bw.jumpToByteBoundary() - bw.writeBytes(input[:input_size]) -} - -func compressFragmentTwoPassImpl(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_bits uint, min_match uint, bw *bitWriter) { - /* Save the start of the first block for position and distance computations. - */ - var base_ip []byte = input - - for input_size > 0 { - var block_size uint = brotli_min_size_t(input_size, kCompressFragmentTwoPassBlockSize) - var commands []uint32 = command_buf - var literals []byte = literal_buf - var num_literals uint - createCommands(input, block_size, input_size, base_ip, table, table_bits, min_match, &literals, &commands) - num_literals = uint(-cap(literals) + cap(literal_buf)) - if shouldCompress(input, block_size, num_literals) { - var num_commands uint = uint(-cap(commands) + cap(command_buf)) - storeMetaBlockHeader(block_size, false, bw) - - /* No block splits, no contexts. */ - bw.writeBits(13, 0) - - storeCommands(literal_buf, num_literals, command_buf, num_commands, bw) - } else { - /* Since we did not find many backward references and the entropy of - the data is close to 8 bits, we can simply emit an uncompressed block. - This makes compression speed of uncompressible data about 3x faster. */ - emitUncompressedMetaBlock(input, block_size, bw) - } - - input = input[block_size:] - input_size -= block_size - } -} - -/* Compresses "input" string to bw as one or more complete meta-blocks. - - If "is_last" is 1, emits an additional empty last meta-block. - - REQUIRES: "input_size" is greater than zero, or "is_last" is 1. - REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24). - REQUIRES: "command_buf" and "literal_buf" point to at least - kCompressFragmentTwoPassBlockSize long arrays. - REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero. - REQUIRES: "table_size" is a power of two - OUTPUT: maximal copy distance <= |input_size| - OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */ -func compressFragmentTwoPass(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_size uint, bw *bitWriter) { - var initial_storage_ix uint = bw.getPos() - var table_bits uint = uint(log2FloorNonZero(table_size)) - var min_match uint - if table_bits <= 15 { - min_match = 4 - } else { - min_match = 6 - } - compressFragmentTwoPassImpl(input, input_size, is_last, command_buf, literal_buf, table, table_bits, min_match, bw) - - /* If output is larger than single uncompressed block, rewrite it. */ - if bw.getPos()-initial_storage_ix > 31+(input_size<<3) { - bw.rewind(initial_storage_ix) - emitUncompressedMetaBlock(input, input_size, bw) - } - - if is_last { - bw.writeBits(1, 1) /* islast */ - bw.writeBits(1, 1) /* isempty */ - bw.jumpToByteBoundary() - } -} diff --git a/vendor/github.com/andybalholm/brotli/constants.go b/vendor/github.com/andybalholm/brotli/constants.go deleted file mode 100644 index a880dff789..0000000000 --- a/vendor/github.com/andybalholm/brotli/constants.go +++ /dev/null @@ -1,77 +0,0 @@ -package brotli - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Specification: 7.3. Encoding of the context map */ -const contextMapMaxRle = 16 - -/* Specification: 2. Compressed representation overview */ -const maxNumberOfBlockTypes = 256 - -/* Specification: 3.3. Alphabet sizes: insert-and-copy length */ -const numLiteralSymbols = 256 - -const numCommandSymbols = 704 - -const numBlockLenSymbols = 26 - -const maxContextMapSymbols = (maxNumberOfBlockTypes + contextMapMaxRle) - -const maxBlockTypeSymbols = (maxNumberOfBlockTypes + 2) - -/* Specification: 3.5. Complex prefix codes */ -const repeatPreviousCodeLength = 16 - -const repeatZeroCodeLength = 17 - -const codeLengthCodes = (repeatZeroCodeLength + 1) - -/* "code length of 8 is repeated" */ -const initialRepeatedCodeLength = 8 - -/* "Large Window Brotli" */ -const largeMaxDistanceBits = 62 - -const largeMinWbits = 10 - -const largeMaxWbits = 30 - -/* Specification: 4. Encoding of distances */ -const numDistanceShortCodes = 16 - -const maxNpostfix = 3 - -const maxNdirect = 120 - -const maxDistanceBits = 24 - -func distanceAlphabetSize(NPOSTFIX uint, NDIRECT uint, MAXNBITS uint) uint { - return numDistanceShortCodes + NDIRECT + uint(MAXNBITS<<(NPOSTFIX+1)) -} - -/* numDistanceSymbols == 1128 */ -const numDistanceSymbols = 1128 - -const maxDistance = 0x3FFFFFC - -const maxAllowedDistance = 0x7FFFFFFC - -/* 7.1. Context modes and context ID lookup for literals */ -/* "context IDs for literals are in the range of 0..63" */ -const literalContextBits = 6 - -/* 7.2. Context ID for distances */ -const distanceContextBits = 2 - -/* 9.1. Format of the Stream Header */ -/* Number of slack bytes for window size. Don't confuse - with BROTLI_NUM_DISTANCE_SHORT_CODES. */ -const windowGap = 16 - -func maxBackwardLimit(W uint) uint { - return (uint(1) << W) - windowGap -} diff --git a/vendor/github.com/andybalholm/brotli/context.go b/vendor/github.com/andybalholm/brotli/context.go deleted file mode 100644 index 884ff8a2d6..0000000000 --- a/vendor/github.com/andybalholm/brotli/context.go +++ /dev/null @@ -1,2176 +0,0 @@ -package brotli - -/* Lookup table to map the previous two bytes to a context id. - -There are four different context modeling modes defined here: - contextLSB6: context id is the least significant 6 bits of the last byte, - contextMSB6: context id is the most significant 6 bits of the last byte, - contextUTF8: second-order context model tuned for UTF8-encoded text, - contextSigned: second-order context model tuned for signed integers. - -If |p1| and |p2| are the previous two bytes, and |mode| is current context -mode, we calculate the context as: - - context = ContextLut(mode)[p1] | ContextLut(mode)[p2 + 256]. - -For contextUTF8 mode, if the previous two bytes are ASCII characters -(i.e. < 128), this will be equivalent to - - context = 4 * context1(p1) + context2(p2), - -where context1 is based on the previous byte in the following way: - - 0 : non-ASCII control - 1 : \t, \n, \r - 2 : space - 3 : other punctuation - 4 : " ' - 5 : % - 6 : ( < [ { - 7 : ) > ] } - 8 : , ; : - 9 : . - 10 : = - 11 : number - 12 : upper-case vowel - 13 : upper-case consonant - 14 : lower-case vowel - 15 : lower-case consonant - -and context2 is based on the second last byte: - - 0 : control, space - 1 : punctuation - 2 : upper-case letter, number - 3 : lower-case letter - -If the last byte is ASCII, and the second last byte is not (in a valid UTF8 -stream it will be a continuation byte, value between 128 and 191), the -context is the same as if the second last byte was an ASCII control or space. - -If the last byte is a UTF8 lead byte (value >= 192), then the next byte will -be a continuation byte and the context id is 2 or 3 depending on the LSB of -the last byte and to a lesser extent on the second last byte if it is ASCII. - -If the last byte is a UTF8 continuation byte, the second last byte can be: - - continuation byte: the next byte is probably ASCII or lead byte (assuming - 4-byte UTF8 characters are rare) and the context id is 0 or 1. - - lead byte (192 - 207): next byte is ASCII or lead byte, context is 0 or 1 - - lead byte (208 - 255): next byte is continuation byte, context is 2 or 3 - -The possible value combinations of the previous two bytes, the range of -context ids and the type of the next byte is summarized in the table below: - -|--------\-----------------------------------------------------------------| -| \ Last byte | -| Second \---------------------------------------------------------------| -| last byte \ ASCII | cont. byte | lead byte | -| \ (0-127) | (128-191) | (192-) | -|=============|===================|=====================|==================| -| ASCII | next: ASCII/lead | not valid | next: cont. | -| (0-127) | context: 4 - 63 | | context: 2 - 3 | -|-------------|-------------------|---------------------|------------------| -| cont. byte | next: ASCII/lead | next: ASCII/lead | next: cont. | -| (128-191) | context: 4 - 63 | context: 0 - 1 | context: 2 - 3 | -|-------------|-------------------|---------------------|------------------| -| lead byte | not valid | next: ASCII/lead | not valid | -| (192-207) | | context: 0 - 1 | | -|-------------|-------------------|---------------------|------------------| -| lead byte | not valid | next: cont. | not valid | -| (208-) | | context: 2 - 3 | | -|-------------|-------------------|---------------------|------------------| -*/ - -const ( - contextLSB6 = 0 - contextMSB6 = 1 - contextUTF8 = 2 - contextSigned = 3 -) - -/* Common context lookup table for all context modes. */ -var kContextLookup = [2048]byte{ - /* CONTEXT_LSB6, last byte. */ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - - /* CONTEXT_LSB6, second last byte, */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - - /* CONTEXT_MSB6, last byte. */ - 0, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 3, - 3, - 3, - 3, - 4, - 4, - 4, - 4, - 5, - 5, - 5, - 5, - 6, - 6, - 6, - 6, - 7, - 7, - 7, - 7, - 8, - 8, - 8, - 8, - 9, - 9, - 9, - 9, - 10, - 10, - 10, - 10, - 11, - 11, - 11, - 11, - 12, - 12, - 12, - 12, - 13, - 13, - 13, - 13, - 14, - 14, - 14, - 14, - 15, - 15, - 15, - 15, - 16, - 16, - 16, - 16, - 17, - 17, - 17, - 17, - 18, - 18, - 18, - 18, - 19, - 19, - 19, - 19, - 20, - 20, - 20, - 20, - 21, - 21, - 21, - 21, - 22, - 22, - 22, - 22, - 23, - 23, - 23, - 23, - 24, - 24, - 24, - 24, - 25, - 25, - 25, - 25, - 26, - 26, - 26, - 26, - 27, - 27, - 27, - 27, - 28, - 28, - 28, - 28, - 29, - 29, - 29, - 29, - 30, - 30, - 30, - 30, - 31, - 31, - 31, - 31, - 32, - 32, - 32, - 32, - 33, - 33, - 33, - 33, - 34, - 34, - 34, - 34, - 35, - 35, - 35, - 35, - 36, - 36, - 36, - 36, - 37, - 37, - 37, - 37, - 38, - 38, - 38, - 38, - 39, - 39, - 39, - 39, - 40, - 40, - 40, - 40, - 41, - 41, - 41, - 41, - 42, - 42, - 42, - 42, - 43, - 43, - 43, - 43, - 44, - 44, - 44, - 44, - 45, - 45, - 45, - 45, - 46, - 46, - 46, - 46, - 47, - 47, - 47, - 47, - 48, - 48, - 48, - 48, - 49, - 49, - 49, - 49, - 50, - 50, - 50, - 50, - 51, - 51, - 51, - 51, - 52, - 52, - 52, - 52, - 53, - 53, - 53, - 53, - 54, - 54, - 54, - 54, - 55, - 55, - 55, - 55, - 56, - 56, - 56, - 56, - 57, - 57, - 57, - 57, - 58, - 58, - 58, - 58, - 59, - 59, - 59, - 59, - 60, - 60, - 60, - 60, - 61, - 61, - 61, - 61, - 62, - 62, - 62, - 62, - 63, - 63, - 63, - 63, - - /* CONTEXT_MSB6, second last byte, */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - - /* CONTEXT_UTF8, last byte. */ - /* ASCII range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4, - 4, - 0, - 0, - 4, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8, - 12, - 16, - 12, - 12, - 20, - 12, - 16, - 24, - 28, - 12, - 12, - 32, - 12, - 36, - 12, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 44, - 32, - 32, - 24, - 40, - 28, - 12, - 12, - 48, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 52, - 52, - 48, - 52, - 52, - 52, - 52, - 52, - 24, - 12, - 28, - 12, - 12, - 12, - 56, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 60, - 60, - 56, - 60, - 60, - 60, - 60, - 60, - 24, - 12, - 28, - 12, - 0, - - /* UTF8 continuation byte range. */ - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - 0, - 1, - - /* UTF8 lead byte range. */ - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - 2, - 3, - - /* CONTEXT_UTF8 second last byte. */ - /* ASCII range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 1, - 1, - 1, - 1, - 1, - 1, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 1, - 1, - 1, - 1, - 0, - - /* UTF8 continuation byte range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - - /* UTF8 lead byte range. */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - - /* CONTEXT_SIGNED, last byte, same as the above values shifted by 3 bits. */ - 0, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 8, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 16, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 40, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 48, - 56, - - /* CONTEXT_SIGNED, second last byte. */ - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 3, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 4, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 5, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 7, -} - -type contextLUT []byte - -func getContextLUT(mode int) contextLUT { - return kContextLookup[mode<<9:] -} - -func getContext(p1 byte, p2 byte, lut contextLUT) byte { - return lut[p1] | lut[256+int(p2)] -} diff --git a/vendor/github.com/andybalholm/brotli/decode.go b/vendor/github.com/andybalholm/brotli/decode.go deleted file mode 100644 index d2f39a051c..0000000000 --- a/vendor/github.com/andybalholm/brotli/decode.go +++ /dev/null @@ -1,2632 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -const ( - decoderResultError = 0 - decoderResultSuccess = 1 - decoderResultNeedsMoreInput = 2 - decoderResultNeedsMoreOutput = 3 -) - -/** - * Error code for detailed logging / production debugging. - * - * See ::BrotliDecoderGetErrorCode and ::BROTLI_LAST_ERROR_CODE. - */ -const ( - decoderNoError = 0 - decoderSuccess = 1 - decoderNeedsMoreInput = 2 - decoderNeedsMoreOutput = 3 - decoderErrorFormatExuberantNibble = -1 - decoderErrorFormatReserved = -2 - decoderErrorFormatExuberantMetaNibble = -3 - decoderErrorFormatSimpleHuffmanAlphabet = -4 - decoderErrorFormatSimpleHuffmanSame = -5 - decoderErrorFormatClSpace = -6 - decoderErrorFormatHuffmanSpace = -7 - decoderErrorFormatContextMapRepeat = -8 - decoderErrorFormatBlockLength1 = -9 - decoderErrorFormatBlockLength2 = -10 - decoderErrorFormatTransform = -11 - decoderErrorFormatDictionary = -12 - decoderErrorFormatWindowBits = -13 - decoderErrorFormatPadding1 = -14 - decoderErrorFormatPadding2 = -15 - decoderErrorFormatDistance = -16 - decoderErrorDictionaryNotSet = -19 - decoderErrorInvalidArguments = -20 - decoderErrorAllocContextModes = -21 - decoderErrorAllocTreeGroups = -22 - decoderErrorAllocContextMap = -25 - decoderErrorAllocRingBuffer1 = -26 - decoderErrorAllocRingBuffer2 = -27 - decoderErrorAllocBlockTypeTrees = -30 - decoderErrorUnreachable = -31 -) - -/** - * The value of the last error code, negative integer. - * - * All other error code values are in the range from ::lastErrorCode - * to @c -1. There are also 4 other possible non-error codes @c 0 .. @c 3 in - * ::BrotliDecoderErrorCode enumeration. - */ -const lastErrorCode = decoderErrorUnreachable - -/** Options to be used with ::BrotliDecoderSetParameter. */ -const ( - decoderParamDisableRingBufferReallocation = 0 - decoderParamLargeWindow = 1 -) - -const huffmanTableBits = 8 - -const huffmanTableMask = 0xFF - -/* We need the slack region for the following reasons: - - doing up to two 16-byte copies for fast backward copying - - inserting transformed dictionary word (5 prefix + 24 base + 8 suffix) */ -const kRingBufferWriteAheadSlack uint32 = 42 - -var kCodeLengthCodeOrder = [codeLengthCodes]byte{1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15} - -/* Static prefix code for the complex code length code lengths. */ -var kCodeLengthPrefixLength = [16]byte{2, 2, 2, 3, 2, 2, 2, 4, 2, 2, 2, 3, 2, 2, 2, 4} - -var kCodeLengthPrefixValue = [16]byte{0, 4, 3, 2, 0, 4, 3, 1, 0, 4, 3, 2, 0, 4, 3, 5} - -func decoderSetParameter(state *Reader, p int, value uint32) bool { - if state.state != stateUninited { - return false - } - switch p { - case decoderParamDisableRingBufferReallocation: - if !(value == 0) { - state.canny_ringbuffer_allocation = 0 - } else { - state.canny_ringbuffer_allocation = 1 - } - return true - - case decoderParamLargeWindow: - state.large_window = (!(value == 0)) - return true - - default: - return false - } -} - -/* Saves error code and converts it to BrotliDecoderResult. */ -func saveErrorCode(s *Reader, e int) int { - s.error_code = int(e) - switch e { - case decoderSuccess: - return decoderResultSuccess - - case decoderNeedsMoreInput: - return decoderResultNeedsMoreInput - - case decoderNeedsMoreOutput: - return decoderResultNeedsMoreOutput - - default: - return decoderResultError - } -} - -/* Decodes WBITS by reading 1 - 7 bits, or 0x11 for "Large Window Brotli". - Precondition: bit-reader accumulator has at least 8 bits. */ -func decodeWindowBits(s *Reader, br *bitReader) int { - var n uint32 - var large_window bool = s.large_window - s.large_window = false - takeBits(br, 1, &n) - if n == 0 { - s.window_bits = 16 - return decoderSuccess - } - - takeBits(br, 3, &n) - if n != 0 { - s.window_bits = 17 + n - return decoderSuccess - } - - takeBits(br, 3, &n) - if n == 1 { - if large_window { - takeBits(br, 1, &n) - if n == 1 { - return decoderErrorFormatWindowBits - } - - s.large_window = true - return decoderSuccess - } else { - return decoderErrorFormatWindowBits - } - } - - if n != 0 { - s.window_bits = 8 + n - return decoderSuccess - } - - s.window_bits = 17 - return decoderSuccess -} - -/* Decodes a number in the range [0..255], by reading 1 - 11 bits. */ -func decodeVarLenUint8(s *Reader, br *bitReader, value *uint32) int { - var bits uint32 - switch s.substate_decode_uint8 { - case stateDecodeUint8None: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits == 0 { - *value = 0 - return decoderSuccess - } - fallthrough - - /* Fall through. */ - case stateDecodeUint8Short: - if !safeReadBits(br, 3, &bits) { - s.substate_decode_uint8 = stateDecodeUint8Short - return decoderNeedsMoreInput - } - - if bits == 0 { - *value = 1 - s.substate_decode_uint8 = stateDecodeUint8None - return decoderSuccess - } - - /* Use output value as a temporary storage. It MUST be persisted. */ - *value = bits - fallthrough - - /* Fall through. */ - case stateDecodeUint8Long: - if !safeReadBits(br, *value, &bits) { - s.substate_decode_uint8 = stateDecodeUint8Long - return decoderNeedsMoreInput - } - - *value = (1 << *value) + bits - s.substate_decode_uint8 = stateDecodeUint8None - return decoderSuccess - - default: - return decoderErrorUnreachable - } -} - -/* Decodes a metablock length and flags by reading 2 - 31 bits. */ -func decodeMetaBlockLength(s *Reader, br *bitReader) int { - var bits uint32 - var i int - for { - switch s.substate_metablock_header { - case stateMetablockHeaderNone: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - s.is_last_metablock = 1 - } else { - s.is_last_metablock = 0 - } - s.meta_block_remaining_len = 0 - s.is_uncompressed = 0 - s.is_metadata = 0 - if s.is_last_metablock == 0 { - s.substate_metablock_header = stateMetablockHeaderNibbles - break - } - - s.substate_metablock_header = stateMetablockHeaderEmpty - fallthrough - - /* Fall through. */ - case stateMetablockHeaderEmpty: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - } - - s.substate_metablock_header = stateMetablockHeaderNibbles - fallthrough - - /* Fall through. */ - case stateMetablockHeaderNibbles: - if !safeReadBits(br, 2, &bits) { - return decoderNeedsMoreInput - } - - s.size_nibbles = uint(byte(bits + 4)) - s.loop_counter = 0 - if bits == 3 { - s.is_metadata = 1 - s.substate_metablock_header = stateMetablockHeaderReserved - break - } - - s.substate_metablock_header = stateMetablockHeaderSize - fallthrough - - /* Fall through. */ - case stateMetablockHeaderSize: - i = s.loop_counter - - for ; i < int(s.size_nibbles); i++ { - if !safeReadBits(br, 4, &bits) { - s.loop_counter = i - return decoderNeedsMoreInput - } - - if uint(i+1) == s.size_nibbles && s.size_nibbles > 4 && bits == 0 { - return decoderErrorFormatExuberantNibble - } - - s.meta_block_remaining_len |= int(bits << uint(i*4)) - } - - s.substate_metablock_header = stateMetablockHeaderUncompressed - fallthrough - - /* Fall through. */ - case stateMetablockHeaderUncompressed: - if s.is_last_metablock == 0 { - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - s.is_uncompressed = 1 - } else { - s.is_uncompressed = 0 - } - } - - s.meta_block_remaining_len++ - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - - case stateMetablockHeaderReserved: - if !safeReadBits(br, 1, &bits) { - return decoderNeedsMoreInput - } - - if bits != 0 { - return decoderErrorFormatReserved - } - - s.substate_metablock_header = stateMetablockHeaderBytes - fallthrough - - /* Fall through. */ - case stateMetablockHeaderBytes: - if !safeReadBits(br, 2, &bits) { - return decoderNeedsMoreInput - } - - if bits == 0 { - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - } - - s.size_nibbles = uint(byte(bits)) - s.substate_metablock_header = stateMetablockHeaderMetadata - fallthrough - - /* Fall through. */ - case stateMetablockHeaderMetadata: - i = s.loop_counter - - for ; i < int(s.size_nibbles); i++ { - if !safeReadBits(br, 8, &bits) { - s.loop_counter = i - return decoderNeedsMoreInput - } - - if uint(i+1) == s.size_nibbles && s.size_nibbles > 1 && bits == 0 { - return decoderErrorFormatExuberantMetaNibble - } - - s.meta_block_remaining_len |= int(bits << uint(i*8)) - } - - s.meta_block_remaining_len++ - s.substate_metablock_header = stateMetablockHeaderNone - return decoderSuccess - - default: - return decoderErrorUnreachable - } - } -} - -/* Decodes the Huffman code. - This method doesn't read data from the bit reader, BUT drops the amount of - bits that correspond to the decoded symbol. - bits MUST contain at least 15 (BROTLI_HUFFMAN_MAX_CODE_LENGTH) valid bits. */ -func decodeSymbol(bits uint32, table []huffmanCode, br *bitReader) uint32 { - table = table[bits&huffmanTableMask:] - if table[0].bits > huffmanTableBits { - var nbits uint32 = uint32(table[0].bits) - huffmanTableBits - dropBits(br, huffmanTableBits) - table = table[uint32(table[0].value)+((bits>>huffmanTableBits)&bitMask(nbits)):] - } - - dropBits(br, uint32(table[0].bits)) - return uint32(table[0].value) -} - -/* Reads and decodes the next Huffman code from bit-stream. - This method peeks 16 bits of input and drops 0 - 15 of them. */ -func readSymbol(table []huffmanCode, br *bitReader) uint32 { - return decodeSymbol(get16BitsUnmasked(br), table, br) -} - -/* Same as DecodeSymbol, but it is known that there is less than 15 bits of - input are currently available. */ -func safeDecodeSymbol(table []huffmanCode, br *bitReader, result *uint32) bool { - var val uint32 - var available_bits uint32 = getAvailableBits(br) - if available_bits == 0 { - if table[0].bits == 0 { - *result = uint32(table[0].value) - return true - } - - return false /* No valid bits at all. */ - } - - val = uint32(getBitsUnmasked(br)) - table = table[val&huffmanTableMask:] - if table[0].bits <= huffmanTableBits { - if uint32(table[0].bits) <= available_bits { - dropBits(br, uint32(table[0].bits)) - *result = uint32(table[0].value) - return true - } else { - return false /* Not enough bits for the first level. */ - } - } - - if available_bits <= huffmanTableBits { - return false /* Not enough bits to move to the second level. */ - } - - /* Speculatively drop HUFFMAN_TABLE_BITS. */ - val = (val & bitMask(uint32(table[0].bits))) >> huffmanTableBits - - available_bits -= huffmanTableBits - table = table[uint32(table[0].value)+val:] - if available_bits < uint32(table[0].bits) { - return false /* Not enough bits for the second level. */ - } - - dropBits(br, huffmanTableBits+uint32(table[0].bits)) - *result = uint32(table[0].value) - return true -} - -func safeReadSymbol(table []huffmanCode, br *bitReader, result *uint32) bool { - var val uint32 - if safeGetBits(br, 15, &val) { - *result = decodeSymbol(val, table, br) - return true - } - - return safeDecodeSymbol(table, br, result) -} - -/* Makes a look-up in first level Huffman table. Peeks 8 bits. */ -func preloadSymbol(safe int, table []huffmanCode, br *bitReader, bits *uint32, value *uint32) { - if safe != 0 { - return - } - - table = table[getBits(br, huffmanTableBits):] - *bits = uint32(table[0].bits) - *value = uint32(table[0].value) -} - -/* Decodes the next Huffman code using data prepared by PreloadSymbol. - Reads 0 - 15 bits. Also peeks 8 following bits. */ -func readPreloadedSymbol(table []huffmanCode, br *bitReader, bits *uint32, value *uint32) uint32 { - var result uint32 = *value - var ext []huffmanCode - if *bits > huffmanTableBits { - var val uint32 = get16BitsUnmasked(br) - ext = table[val&huffmanTableMask:][*value:] - var mask uint32 = bitMask((*bits - huffmanTableBits)) - dropBits(br, huffmanTableBits) - ext = ext[(val>>huffmanTableBits)&mask:] - dropBits(br, uint32(ext[0].bits)) - result = uint32(ext[0].value) - } else { - dropBits(br, *bits) - } - - preloadSymbol(0, table, br, bits, value) - return result -} - -func log2Floor(x uint32) uint32 { - var result uint32 = 0 - for x != 0 { - x >>= 1 - result++ - } - - return result -} - -/* Reads (s->symbol + 1) symbols. - Totally 1..4 symbols are read, 1..11 bits each. - The list of symbols MUST NOT contain duplicates. */ -func readSimpleHuffmanSymbols(alphabet_size uint32, max_symbol uint32, s *Reader) int { - var br *bitReader = &s.br - var max_bits uint32 = log2Floor(alphabet_size - 1) - var i uint32 = s.sub_loop_counter - /* max_bits == 1..11; symbol == 0..3; 1..44 bits will be read. */ - - var num_symbols uint32 = s.symbol - for i <= num_symbols { - var v uint32 - if !safeReadBits(br, max_bits, &v) { - s.sub_loop_counter = i - s.substate_huffman = stateHuffmanSimpleRead - return decoderNeedsMoreInput - } - - if v >= max_symbol { - return decoderErrorFormatSimpleHuffmanAlphabet - } - - s.symbols_lists_array[i] = uint16(v) - i++ - } - - for i = 0; i < num_symbols; i++ { - var k uint32 = i + 1 - for ; k <= num_symbols; k++ { - if s.symbols_lists_array[i] == s.symbols_lists_array[k] { - return decoderErrorFormatSimpleHuffmanSame - } - } - } - - return decoderSuccess -} - -/* Process single decoded symbol code length: - A) reset the repeat variable - B) remember code length (if it is not 0) - C) extend corresponding index-chain - D) reduce the Huffman space - E) update the histogram */ -func processSingleCodeLength(code_len uint32, symbol *uint32, repeat *uint32, space *uint32, prev_code_len *uint32, symbol_lists symbolList, code_length_histo []uint16, next_symbol []int) { - *repeat = 0 - if code_len != 0 { /* code_len == 1..15 */ - symbolListPut(symbol_lists, next_symbol[code_len], uint16(*symbol)) - next_symbol[code_len] = int(*symbol) - *prev_code_len = code_len - *space -= 32768 >> code_len - code_length_histo[code_len]++ - } - - (*symbol)++ -} - -/* Process repeated symbol code length. - A) Check if it is the extension of previous repeat sequence; if the decoded - value is not BROTLI_REPEAT_PREVIOUS_CODE_LENGTH, then it is a new - symbol-skip - B) Update repeat variable - C) Check if operation is feasible (fits alphabet) - D) For each symbol do the same operations as in ProcessSingleCodeLength - - PRECONDITION: code_len == BROTLI_REPEAT_PREVIOUS_CODE_LENGTH or - code_len == BROTLI_REPEAT_ZERO_CODE_LENGTH */ -func processRepeatedCodeLength(code_len uint32, repeat_delta uint32, alphabet_size uint32, symbol *uint32, repeat *uint32, space *uint32, prev_code_len *uint32, repeat_code_len *uint32, symbol_lists symbolList, code_length_histo []uint16, next_symbol []int) { - var old_repeat uint32 /* for BROTLI_REPEAT_ZERO_CODE_LENGTH */ /* for BROTLI_REPEAT_ZERO_CODE_LENGTH */ - var extra_bits uint32 = 3 - var new_len uint32 = 0 - if code_len == repeatPreviousCodeLength { - new_len = *prev_code_len - extra_bits = 2 - } - - if *repeat_code_len != new_len { - *repeat = 0 - *repeat_code_len = new_len - } - - old_repeat = *repeat - if *repeat > 0 { - *repeat -= 2 - *repeat <<= extra_bits - } - - *repeat += repeat_delta + 3 - repeat_delta = *repeat - old_repeat - if *symbol+repeat_delta > alphabet_size { - *symbol = alphabet_size - *space = 0xFFFFF - return - } - - if *repeat_code_len != 0 { - var last uint = uint(*symbol + repeat_delta) - var next int = next_symbol[*repeat_code_len] - for { - symbolListPut(symbol_lists, next, uint16(*symbol)) - next = int(*symbol) - (*symbol)++ - if (*symbol) == uint32(last) { - break - } - } - - next_symbol[*repeat_code_len] = next - *space -= repeat_delta << (15 - *repeat_code_len) - code_length_histo[*repeat_code_len] = uint16(uint32(code_length_histo[*repeat_code_len]) + repeat_delta) - } else { - *symbol += repeat_delta - } -} - -/* Reads and decodes symbol codelengths. */ -func readSymbolCodeLengths(alphabet_size uint32, s *Reader) int { - var br *bitReader = &s.br - var symbol uint32 = s.symbol - var repeat uint32 = s.repeat - var space uint32 = s.space - var prev_code_len uint32 = s.prev_code_len - var repeat_code_len uint32 = s.repeat_code_len - var symbol_lists symbolList = s.symbol_lists - var code_length_histo []uint16 = s.code_length_histo[:] - var next_symbol []int = s.next_symbol[:] - if !warmupBitReader(br) { - return decoderNeedsMoreInput - } - var p []huffmanCode - for symbol < alphabet_size && space > 0 { - p = s.table[:] - var code_len uint32 - if !checkInputAmount(br, shortFillBitWindowRead) { - s.symbol = symbol - s.repeat = repeat - s.prev_code_len = prev_code_len - s.repeat_code_len = repeat_code_len - s.space = space - return decoderNeedsMoreInput - } - - fillBitWindow16(br) - p = p[getBitsUnmasked(br)&uint64(bitMask(huffmanMaxCodeLengthCodeLength)):] - dropBits(br, uint32(p[0].bits)) /* Use 1..5 bits. */ - code_len = uint32(p[0].value) /* code_len == 0..17 */ - if code_len < repeatPreviousCodeLength { - processSingleCodeLength(code_len, &symbol, &repeat, &space, &prev_code_len, symbol_lists, code_length_histo, next_symbol) /* code_len == 16..17, extra_bits == 2..3 */ - } else { - var extra_bits uint32 - if code_len == repeatPreviousCodeLength { - extra_bits = 2 - } else { - extra_bits = 3 - } - var repeat_delta uint32 = uint32(getBitsUnmasked(br)) & bitMask(extra_bits) - dropBits(br, extra_bits) - processRepeatedCodeLength(code_len, repeat_delta, alphabet_size, &symbol, &repeat, &space, &prev_code_len, &repeat_code_len, symbol_lists, code_length_histo, next_symbol) - } - } - - s.space = space - return decoderSuccess -} - -func safeReadSymbolCodeLengths(alphabet_size uint32, s *Reader) int { - var br *bitReader = &s.br - var get_byte bool = false - var p []huffmanCode - for s.symbol < alphabet_size && s.space > 0 { - p = s.table[:] - var code_len uint32 - var available_bits uint32 - var bits uint32 = 0 - if get_byte && !pullByte(br) { - return decoderNeedsMoreInput - } - get_byte = false - available_bits = getAvailableBits(br) - if available_bits != 0 { - bits = uint32(getBitsUnmasked(br)) - } - - p = p[bits&bitMask(huffmanMaxCodeLengthCodeLength):] - if uint32(p[0].bits) > available_bits { - get_byte = true - continue - } - - code_len = uint32(p[0].value) /* code_len == 0..17 */ - if code_len < repeatPreviousCodeLength { - dropBits(br, uint32(p[0].bits)) - processSingleCodeLength(code_len, &s.symbol, &s.repeat, &s.space, &s.prev_code_len, s.symbol_lists, s.code_length_histo[:], s.next_symbol[:]) /* code_len == 16..17, extra_bits == 2..3 */ - } else { - var extra_bits uint32 = code_len - 14 - var repeat_delta uint32 = (bits >> p[0].bits) & bitMask(extra_bits) - if available_bits < uint32(p[0].bits)+extra_bits { - get_byte = true - continue - } - - dropBits(br, uint32(p[0].bits)+extra_bits) - processRepeatedCodeLength(code_len, repeat_delta, alphabet_size, &s.symbol, &s.repeat, &s.space, &s.prev_code_len, &s.repeat_code_len, s.symbol_lists, s.code_length_histo[:], s.next_symbol[:]) - } - } - - return decoderSuccess -} - -/* Reads and decodes 15..18 codes using static prefix code. - Each code is 2..4 bits long. In total 30..72 bits are used. */ -func readCodeLengthCodeLengths(s *Reader) int { - var br *bitReader = &s.br - var num_codes uint32 = s.repeat - var space uint32 = s.space - var i uint32 = s.sub_loop_counter - for ; i < codeLengthCodes; i++ { - var code_len_idx byte = kCodeLengthCodeOrder[i] - var ix uint32 - var v uint32 - if !safeGetBits(br, 4, &ix) { - var available_bits uint32 = getAvailableBits(br) - if available_bits != 0 { - ix = uint32(getBitsUnmasked(br) & 0xF) - } else { - ix = 0 - } - - if uint32(kCodeLengthPrefixLength[ix]) > available_bits { - s.sub_loop_counter = i - s.repeat = num_codes - s.space = space - s.substate_huffman = stateHuffmanComplex - return decoderNeedsMoreInput - } - } - - v = uint32(kCodeLengthPrefixValue[ix]) - dropBits(br, uint32(kCodeLengthPrefixLength[ix])) - s.code_length_code_lengths[code_len_idx] = byte(v) - if v != 0 { - space = space - (32 >> v) - num_codes++ - s.code_length_histo[v]++ - if space-1 >= 32 { - /* space is 0 or wrapped around. */ - break - } - } - } - - if num_codes != 1 && space != 0 { - return decoderErrorFormatClSpace - } - - return decoderSuccess -} - -/* Decodes the Huffman tables. - There are 2 scenarios: - A) Huffman code contains only few symbols (1..4). Those symbols are read - directly; their code lengths are defined by the number of symbols. - For this scenario 4 - 49 bits will be read. - - B) 2-phase decoding: - B.1) Small Huffman table is decoded; it is specified with code lengths - encoded with predefined entropy code. 32 - 74 bits are used. - B.2) Decoded table is used to decode code lengths of symbols in resulting - Huffman table. In worst case 3520 bits are read. */ -func readHuffmanCode(alphabet_size uint32, max_symbol uint32, table []huffmanCode, opt_table_size *uint32, s *Reader) int { - var br *bitReader = &s.br - - /* Unnecessary masking, but might be good for safety. */ - alphabet_size &= 0x7FF - - /* State machine. */ - for { - switch s.substate_huffman { - case stateHuffmanNone: - if !safeReadBits(br, 2, &s.sub_loop_counter) { - return decoderNeedsMoreInput - } - - /* The value is used as follows: - 1 for simple code; - 0 for no skipping, 2 skips 2 code lengths, 3 skips 3 code lengths */ - if s.sub_loop_counter != 1 { - s.space = 32 - s.repeat = 0 /* num_codes */ - var i int - for i = 0; i <= huffmanMaxCodeLengthCodeLength; i++ { - s.code_length_histo[i] = 0 - } - - for i = 0; i < codeLengthCodes; i++ { - s.code_length_code_lengths[i] = 0 - } - - s.substate_huffman = stateHuffmanComplex - continue - } - fallthrough - - /* Read symbols, codes & code lengths directly. */ - case stateHuffmanSimpleSize: - if !safeReadBits(br, 2, &s.symbol) { /* num_symbols */ - s.substate_huffman = stateHuffmanSimpleSize - return decoderNeedsMoreInput - } - - s.sub_loop_counter = 0 - fallthrough - - case stateHuffmanSimpleRead: - { - var result int = readSimpleHuffmanSymbols(alphabet_size, max_symbol, s) - if result != decoderSuccess { - return result - } - } - fallthrough - - case stateHuffmanSimpleBuild: - var table_size uint32 - if s.symbol == 3 { - var bits uint32 - if !safeReadBits(br, 1, &bits) { - s.substate_huffman = stateHuffmanSimpleBuild - return decoderNeedsMoreInput - } - - s.symbol += bits - } - - table_size = buildSimpleHuffmanTable(table, huffmanTableBits, s.symbols_lists_array[:], s.symbol) - if opt_table_size != nil { - *opt_table_size = table_size - } - - s.substate_huffman = stateHuffmanNone - return decoderSuccess - - /* Decode Huffman-coded code lengths. */ - case stateHuffmanComplex: - { - var i uint32 - var result int = readCodeLengthCodeLengths(s) - if result != decoderSuccess { - return result - } - - buildCodeLengthsHuffmanTable(s.table[:], s.code_length_code_lengths[:], s.code_length_histo[:]) - for i = 0; i < 16; i++ { - s.code_length_histo[i] = 0 - } - - for i = 0; i <= huffmanMaxCodeLength; i++ { - s.next_symbol[i] = int(i) - (huffmanMaxCodeLength + 1) - symbolListPut(s.symbol_lists, s.next_symbol[i], 0xFFFF) - } - - s.symbol = 0 - s.prev_code_len = initialRepeatedCodeLength - s.repeat = 0 - s.repeat_code_len = 0 - s.space = 32768 - s.substate_huffman = stateHuffmanLengthSymbols - } - fallthrough - - case stateHuffmanLengthSymbols: - var table_size uint32 - var result int = readSymbolCodeLengths(max_symbol, s) - if result == decoderNeedsMoreInput { - result = safeReadSymbolCodeLengths(max_symbol, s) - } - - if result != decoderSuccess { - return result - } - - if s.space != 0 { - return decoderErrorFormatHuffmanSpace - } - - table_size = buildHuffmanTable(table, huffmanTableBits, s.symbol_lists, s.code_length_histo[:]) - if opt_table_size != nil { - *opt_table_size = table_size - } - - s.substate_huffman = stateHuffmanNone - return decoderSuccess - - default: - return decoderErrorUnreachable - } - } -} - -/* Decodes a block length by reading 3..39 bits. */ -func readBlockLength(table []huffmanCode, br *bitReader) uint32 { - var code uint32 - var nbits uint32 - code = readSymbol(table, br) - nbits = kBlockLengthPrefixCode[code].nbits /* nbits == 2..24 */ - return kBlockLengthPrefixCode[code].offset + readBits(br, nbits) -} - -/* WARNING: if state is not BROTLI_STATE_READ_BLOCK_LENGTH_NONE, then - reading can't be continued with ReadBlockLength. */ -func safeReadBlockLength(s *Reader, result *uint32, table []huffmanCode, br *bitReader) bool { - var index uint32 - if s.substate_read_block_length == stateReadBlockLengthNone { - if !safeReadSymbol(table, br, &index) { - return false - } - } else { - index = s.block_length_index - } - { - var bits uint32 /* nbits == 2..24 */ - var nbits uint32 = kBlockLengthPrefixCode[index].nbits - if !safeReadBits(br, nbits, &bits) { - s.block_length_index = index - s.substate_read_block_length = stateReadBlockLengthSuffix - return false - } - - *result = kBlockLengthPrefixCode[index].offset + bits - s.substate_read_block_length = stateReadBlockLengthNone - return true - } -} - -/* Transform: - 1) initialize list L with values 0, 1,... 255 - 2) For each input element X: - 2.1) let Y = L[X] - 2.2) remove X-th element from L - 2.3) prepend Y to L - 2.4) append Y to output - - In most cases max(Y) <= 7, so most of L remains intact. - To reduce the cost of initialization, we reuse L, remember the upper bound - of Y values, and reinitialize only first elements in L. - - Most of input values are 0 and 1. To reduce number of branches, we replace - inner for loop with do-while. */ -func inverseMoveToFrontTransform(v []byte, v_len uint32, state *Reader) { - var mtf [256]byte - var i int - for i = 1; i < 256; i++ { - mtf[i] = byte(i) - } - var mtf_1 byte - - /* Transform the input. */ - for i = 0; uint32(i) < v_len; i++ { - var index int = int(v[i]) - var value byte = mtf[index] - v[i] = value - mtf_1 = value - for index >= 1 { - index-- - mtf[index+1] = mtf[index] - } - - mtf[0] = mtf_1 - } -} - -/* Decodes a series of Huffman table using ReadHuffmanCode function. */ -func huffmanTreeGroupDecode(group *huffmanTreeGroup, s *Reader) int { - if s.substate_tree_group != stateTreeGroupLoop { - s.next = group.codes - s.htree_index = 0 - s.substate_tree_group = stateTreeGroupLoop - } - - for s.htree_index < int(group.num_htrees) { - var table_size uint32 - var result int = readHuffmanCode(uint32(group.alphabet_size), uint32(group.max_symbol), s.next, &table_size, s) - if result != decoderSuccess { - return result - } - group.htrees[s.htree_index] = s.next - s.next = s.next[table_size:] - s.htree_index++ - } - - s.substate_tree_group = stateTreeGroupNone - return decoderSuccess -} - -/* Decodes a context map. - Decoding is done in 4 phases: - 1) Read auxiliary information (6..16 bits) and allocate memory. - In case of trivial context map, decoding is finished at this phase. - 2) Decode Huffman table using ReadHuffmanCode function. - This table will be used for reading context map items. - 3) Read context map items; "0" values could be run-length encoded. - 4) Optionally, apply InverseMoveToFront transform to the resulting map. */ -func decodeContextMap(context_map_size uint32, num_htrees *uint32, context_map_arg *[]byte, s *Reader) int { - var br *bitReader = &s.br - var result int = decoderSuccess - - switch int(s.substate_context_map) { - case stateContextMapNone: - result = decodeVarLenUint8(s, br, num_htrees) - if result != decoderSuccess { - return result - } - - (*num_htrees)++ - s.context_index = 0 - *context_map_arg = make([]byte, uint(context_map_size)) - if *context_map_arg == nil { - return decoderErrorAllocContextMap - } - - if *num_htrees <= 1 { - for i := 0; i < int(context_map_size); i++ { - (*context_map_arg)[i] = 0 - } - return decoderSuccess - } - - s.substate_context_map = stateContextMapReadPrefix - fallthrough - /* Fall through. */ - case stateContextMapReadPrefix: - { - var bits uint32 - - /* In next stage ReadHuffmanCode uses at least 4 bits, so it is safe - to peek 4 bits ahead. */ - if !safeGetBits(br, 5, &bits) { - return decoderNeedsMoreInput - } - - if bits&1 != 0 { /* Use RLE for zeros. */ - s.max_run_length_prefix = (bits >> 1) + 1 - dropBits(br, 5) - } else { - s.max_run_length_prefix = 0 - dropBits(br, 1) - } - - s.substate_context_map = stateContextMapHuffman - } - fallthrough - - /* Fall through. */ - case stateContextMapHuffman: - { - var alphabet_size uint32 = *num_htrees + s.max_run_length_prefix - result = readHuffmanCode(alphabet_size, alphabet_size, s.context_map_table[:], nil, s) - if result != decoderSuccess { - return result - } - s.code = 0xFFFF - s.substate_context_map = stateContextMapDecode - } - fallthrough - - /* Fall through. */ - case stateContextMapDecode: - { - var context_index uint32 = s.context_index - var max_run_length_prefix uint32 = s.max_run_length_prefix - var context_map []byte = *context_map_arg - var code uint32 = s.code - var skip_preamble bool = (code != 0xFFFF) - for context_index < context_map_size || skip_preamble { - if !skip_preamble { - if !safeReadSymbol(s.context_map_table[:], br, &code) { - s.code = 0xFFFF - s.context_index = context_index - return decoderNeedsMoreInput - } - - if code == 0 { - context_map[context_index] = 0 - context_index++ - continue - } - - if code > max_run_length_prefix { - context_map[context_index] = byte(code - max_run_length_prefix) - context_index++ - continue - } - } else { - skip_preamble = false - } - - /* RLE sub-stage. */ - { - var reps uint32 - if !safeReadBits(br, code, &reps) { - s.code = code - s.context_index = context_index - return decoderNeedsMoreInput - } - - reps += 1 << code - if context_index+reps > context_map_size { - return decoderErrorFormatContextMapRepeat - } - - for { - context_map[context_index] = 0 - context_index++ - reps-- - if reps == 0 { - break - } - } - } - } - } - fallthrough - - case stateContextMapTransform: - var bits uint32 - if !safeReadBits(br, 1, &bits) { - s.substate_context_map = stateContextMapTransform - return decoderNeedsMoreInput - } - - if bits != 0 { - inverseMoveToFrontTransform(*context_map_arg, context_map_size, s) - } - - s.substate_context_map = stateContextMapNone - return decoderSuccess - - default: - return decoderErrorUnreachable - } -} - -/* Decodes a command or literal and updates block type ring-buffer. - Reads 3..54 bits. */ -func decodeBlockTypeAndLength(safe int, s *Reader, tree_type int) bool { - var max_block_type uint32 = s.num_block_types[tree_type] - var type_tree []huffmanCode - type_tree = s.block_type_trees[tree_type*huffmanMaxSize258:] - var len_tree []huffmanCode - len_tree = s.block_len_trees[tree_type*huffmanMaxSize26:] - var br *bitReader = &s.br - var ringbuffer []uint32 = s.block_type_rb[tree_type*2:] - var block_type uint32 - if max_block_type <= 1 { - return false - } - - /* Read 0..15 + 3..39 bits. */ - if safe == 0 { - block_type = readSymbol(type_tree, br) - s.block_length[tree_type] = readBlockLength(len_tree, br) - } else { - var memento bitReaderState - bitReaderSaveState(br, &memento) - if !safeReadSymbol(type_tree, br, &block_type) { - return false - } - if !safeReadBlockLength(s, &s.block_length[tree_type], len_tree, br) { - s.substate_read_block_length = stateReadBlockLengthNone - bitReaderRestoreState(br, &memento) - return false - } - } - - if block_type == 1 { - block_type = ringbuffer[1] + 1 - } else if block_type == 0 { - block_type = ringbuffer[0] - } else { - block_type -= 2 - } - - if block_type >= max_block_type { - block_type -= max_block_type - } - - ringbuffer[0] = ringbuffer[1] - ringbuffer[1] = block_type - return true -} - -func detectTrivialLiteralBlockTypes(s *Reader) { - var i uint - for i = 0; i < 8; i++ { - s.trivial_literal_contexts[i] = 0 - } - for i = 0; uint32(i) < s.num_block_types[0]; i++ { - var offset uint = i << literalContextBits - var error uint = 0 - var sample uint = uint(s.context_map[offset]) - var j uint - for j = 0; j < 1<>5] |= 1 << (i & 31) - } - } -} - -func prepareLiteralDecoding(s *Reader) { - var context_mode byte - var trivial uint - var block_type uint32 = s.block_type_rb[1] - var context_offset uint32 = block_type << literalContextBits - s.context_map_slice = s.context_map[context_offset:] - trivial = uint(s.trivial_literal_contexts[block_type>>5]) - s.trivial_literal_context = int((trivial >> (block_type & 31)) & 1) - s.literal_htree = []huffmanCode(s.literal_hgroup.htrees[s.context_map_slice[0]]) - context_mode = s.context_modes[block_type] & 3 - s.context_lookup = getContextLUT(int(context_mode)) -} - -/* Decodes the block type and updates the state for literal context. - Reads 3..54 bits. */ -func decodeLiteralBlockSwitchInternal(safe int, s *Reader) bool { - if !decodeBlockTypeAndLength(safe, s, 0) { - return false - } - - prepareLiteralDecoding(s) - return true -} - -func decodeLiteralBlockSwitch(s *Reader) { - decodeLiteralBlockSwitchInternal(0, s) -} - -func safeDecodeLiteralBlockSwitch(s *Reader) bool { - return decodeLiteralBlockSwitchInternal(1, s) -} - -/* Block switch for insert/copy length. - Reads 3..54 bits. */ -func decodeCommandBlockSwitchInternal(safe int, s *Reader) bool { - if !decodeBlockTypeAndLength(safe, s, 1) { - return false - } - - s.htree_command = []huffmanCode(s.insert_copy_hgroup.htrees[s.block_type_rb[3]]) - return true -} - -func decodeCommandBlockSwitch(s *Reader) { - decodeCommandBlockSwitchInternal(0, s) -} - -func safeDecodeCommandBlockSwitch(s *Reader) bool { - return decodeCommandBlockSwitchInternal(1, s) -} - -/* Block switch for distance codes. - Reads 3..54 bits. */ -func decodeDistanceBlockSwitchInternal(safe int, s *Reader) bool { - if !decodeBlockTypeAndLength(safe, s, 2) { - return false - } - - s.dist_context_map_slice = s.dist_context_map[s.block_type_rb[5]< s.ringbuffer_size { - pos = uint(s.ringbuffer_size) - } else { - pos = uint(s.pos) - } - var partial_pos_rb uint = (s.rb_roundtrips * uint(s.ringbuffer_size)) + pos - return partial_pos_rb - s.partial_pos_out -} - -/* Dumps output. - Returns BROTLI_DECODER_NEEDS_MORE_OUTPUT only if there is more output to push - and either ring-buffer is as big as window size, or |force| is true. */ -func writeRingBuffer(s *Reader, available_out *uint, next_out *[]byte, total_out *uint, force bool) int { - var start []byte - start = s.ringbuffer[s.partial_pos_out&uint(s.ringbuffer_mask):] - var to_write uint = unwrittenBytes(s, true) - var num_written uint = *available_out - if num_written > to_write { - num_written = to_write - } - - if s.meta_block_remaining_len < 0 { - return decoderErrorFormatBlockLength1 - } - - if next_out != nil && *next_out == nil { - *next_out = start - } else { - if next_out != nil { - copy(*next_out, start[:num_written]) - *next_out = (*next_out)[num_written:] - } - } - - *available_out -= num_written - s.partial_pos_out += num_written - if total_out != nil { - *total_out = s.partial_pos_out - } - - if num_written < to_write { - if s.ringbuffer_size == 1<= s.ringbuffer_size { - s.pos -= s.ringbuffer_size - s.rb_roundtrips++ - if uint(s.pos) != 0 { - s.should_wrap_ringbuffer = 1 - } else { - s.should_wrap_ringbuffer = 0 - } - } - - return decoderSuccess -} - -func wrapRingBuffer(s *Reader) { - if s.should_wrap_ringbuffer != 0 { - copy(s.ringbuffer, s.ringbuffer_end[:uint(s.pos)]) - s.should_wrap_ringbuffer = 0 - } -} - -/* Allocates ring-buffer. - - s->ringbuffer_size MUST be updated by BrotliCalculateRingBufferSize before - this function is called. - - Last two bytes of ring-buffer are initialized to 0, so context calculation - could be done uniformly for the first two and all other positions. */ -func ensureRingBuffer(s *Reader) bool { - var old_ringbuffer []byte = s.ringbuffer - if s.ringbuffer_size == s.new_ringbuffer_size { - return true - } - - s.ringbuffer = make([]byte, uint(s.new_ringbuffer_size)+uint(kRingBufferWriteAheadSlack)) - if s.ringbuffer == nil { - /* Restore previous value. */ - s.ringbuffer = old_ringbuffer - - return false - } - - s.ringbuffer[s.new_ringbuffer_size-2] = 0 - s.ringbuffer[s.new_ringbuffer_size-1] = 0 - - if !(old_ringbuffer == nil) { - copy(s.ringbuffer, old_ringbuffer[:uint(s.pos)]) - - old_ringbuffer = nil - } - - s.ringbuffer_size = s.new_ringbuffer_size - s.ringbuffer_mask = s.new_ringbuffer_size - 1 - s.ringbuffer_end = s.ringbuffer[s.ringbuffer_size:] - - return true -} - -func copyUncompressedBlockToOutput(available_out *uint, next_out *[]byte, total_out *uint, s *Reader) int { - /* TODO: avoid allocation for single uncompressed block. */ - if !ensureRingBuffer(s) { - return decoderErrorAllocRingBuffer1 - } - - /* State machine */ - for { - switch s.substate_uncompressed { - case stateUncompressedNone: - { - var nbytes int = int(getRemainingBytes(&s.br)) - if nbytes > s.meta_block_remaining_len { - nbytes = s.meta_block_remaining_len - } - - if s.pos+nbytes > s.ringbuffer_size { - nbytes = s.ringbuffer_size - s.pos - } - - /* Copy remaining bytes from s->br.buf_ to ring-buffer. */ - copyBytes(s.ringbuffer[s.pos:], &s.br, uint(nbytes)) - - s.pos += nbytes - s.meta_block_remaining_len -= nbytes - if s.pos < 1<>1 >= min_size { - new_ringbuffer_size >>= 1 - } - } - - s.new_ringbuffer_size = new_ringbuffer_size -} - -/* Reads 1..256 2-bit context modes. */ -func readContextModes(s *Reader) int { - var br *bitReader = &s.br - var i int = s.loop_counter - - for i < int(s.num_block_types[0]) { - var bits uint32 - if !safeReadBits(br, 2, &bits) { - s.loop_counter = i - return decoderNeedsMoreInput - } - - s.context_modes[i] = byte(bits) - i++ - } - - return decoderSuccess -} - -func takeDistanceFromRingBuffer(s *Reader) { - if s.distance_code == 0 { - s.dist_rb_idx-- - s.distance_code = s.dist_rb[s.dist_rb_idx&3] - - /* Compensate double distance-ring-buffer roll for dictionary items. */ - s.distance_context = 1 - } else { - var distance_code int = s.distance_code << 1 - const kDistanceShortCodeIndexOffset uint32 = 0xAAAFFF1B - const kDistanceShortCodeValueOffset uint32 = 0xFA5FA500 - var v int = (s.dist_rb_idx + int(kDistanceShortCodeIndexOffset>>uint(distance_code))) & 0x3 - /* kDistanceShortCodeIndexOffset has 2-bit values from LSB: - 3, 2, 1, 0, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2 */ - - /* kDistanceShortCodeValueOffset has 2-bit values from LSB: - -0, 0,-0, 0,-1, 1,-2, 2,-3, 3,-1, 1,-2, 2,-3, 3 */ - s.distance_code = s.dist_rb[v] - - v = int(kDistanceShortCodeValueOffset>>uint(distance_code)) & 0x3 - if distance_code&0x3 != 0 { - s.distance_code += v - } else { - s.distance_code -= v - if s.distance_code <= 0 { - /* A huge distance will cause a () soon. - This is a little faster than failing here. */ - s.distance_code = 0x7FFFFFFF - } - } - } -} - -func safeReadBitsMaybeZero(br *bitReader, n_bits uint32, val *uint32) bool { - if n_bits != 0 { - return safeReadBits(br, n_bits, val) - } else { - *val = 0 - return true - } -} - -/* Precondition: s->distance_code < 0. */ -func readDistanceInternal(safe int, s *Reader, br *bitReader) bool { - var distval int - var memento bitReaderState - var distance_tree []huffmanCode = []huffmanCode(s.distance_hgroup.htrees[s.dist_htree_index]) - if safe == 0 { - s.distance_code = int(readSymbol(distance_tree, br)) - } else { - var code uint32 - bitReaderSaveState(br, &memento) - if !safeReadSymbol(distance_tree, br, &code) { - return false - } - - s.distance_code = int(code) - } - - /* Convert the distance code to the actual distance by possibly - looking up past distances from the s->ringbuffer. */ - s.distance_context = 0 - - if s.distance_code&^0xF == 0 { - takeDistanceFromRingBuffer(s) - s.block_length[2]-- - return true - } - - distval = s.distance_code - int(s.num_direct_distance_codes) - if distval >= 0 { - var nbits uint32 - var postfix int - var offset int - if safe == 0 && (s.distance_postfix_bits == 0) { - nbits = (uint32(distval) >> 1) + 1 - offset = ((2 + (distval & 1)) << nbits) - 4 - s.distance_code = int(s.num_direct_distance_codes) + offset + int(readBits(br, nbits)) - } else { - /* This branch also works well when s->distance_postfix_bits == 0. */ - var bits uint32 - postfix = distval & s.distance_postfix_mask - distval >>= s.distance_postfix_bits - nbits = (uint32(distval) >> 1) + 1 - if safe != 0 { - if !safeReadBitsMaybeZero(br, nbits, &bits) { - s.distance_code = -1 /* Restore precondition. */ - bitReaderRestoreState(br, &memento) - return false - } - } else { - bits = readBits(br, nbits) - } - - offset = ((2 + (distval & 1)) << nbits) - 4 - s.distance_code = int(s.num_direct_distance_codes) + ((offset + int(bits)) << s.distance_postfix_bits) + postfix - } - } - - s.distance_code = s.distance_code - numDistanceShortCodes + 1 - s.block_length[2]-- - return true -} - -func readDistance(s *Reader, br *bitReader) { - readDistanceInternal(0, s, br) -} - -func safeReadDistance(s *Reader, br *bitReader) bool { - return readDistanceInternal(1, s, br) -} - -func readCommandInternal(safe int, s *Reader, br *bitReader, insert_length *int) bool { - var cmd_code uint32 - var insert_len_extra uint32 = 0 - var copy_length uint32 - var v cmdLutElement - var memento bitReaderState - if safe == 0 { - cmd_code = readSymbol(s.htree_command, br) - } else { - bitReaderSaveState(br, &memento) - if !safeReadSymbol(s.htree_command, br, &cmd_code) { - return false - } - } - - v = kCmdLut[cmd_code] - s.distance_code = int(v.distance_code) - s.distance_context = int(v.context) - s.dist_htree_index = s.dist_context_map_slice[s.distance_context] - *insert_length = int(v.insert_len_offset) - if safe == 0 { - if v.insert_len_extra_bits != 0 { - insert_len_extra = readBits(br, uint32(v.insert_len_extra_bits)) - } - - copy_length = readBits(br, uint32(v.copy_len_extra_bits)) - } else { - if !safeReadBitsMaybeZero(br, uint32(v.insert_len_extra_bits), &insert_len_extra) || !safeReadBitsMaybeZero(br, uint32(v.copy_len_extra_bits), ©_length) { - bitReaderRestoreState(br, &memento) - return false - } - } - - s.copy_length = int(copy_length) + int(v.copy_len_offset) - s.block_length[1]-- - *insert_length += int(insert_len_extra) - return true -} - -func readCommand(s *Reader, br *bitReader, insert_length *int) { - readCommandInternal(0, s, br, insert_length) -} - -func safeReadCommand(s *Reader, br *bitReader, insert_length *int) bool { - return readCommandInternal(1, s, br, insert_length) -} - -func checkInputAmountMaybeSafe(safe int, br *bitReader, num uint) bool { - if safe != 0 { - return true - } - - return checkInputAmount(br, num) -} - -func processCommandsInternal(safe int, s *Reader) int { - var pos int = s.pos - var i int = s.loop_counter - var result int = decoderSuccess - var br *bitReader = &s.br - var hc []huffmanCode - - if !checkInputAmountMaybeSafe(safe, br, 28) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if safe == 0 { - warmupBitReader(br) - } - - /* Jump into state machine. */ - if s.state == stateCommandBegin { - goto CommandBegin - } else if s.state == stateCommandInner { - goto CommandInner - } else if s.state == stateCommandPostDecodeLiterals { - goto CommandPostDecodeLiterals - } else if s.state == stateCommandPostWrapCopy { - goto CommandPostWrapCopy - } else { - return decoderErrorUnreachable - } - -CommandBegin: - if safe != 0 { - s.state = stateCommandBegin - } - - if !checkInputAmountMaybeSafe(safe, br, 28) { /* 156 bits + 7 bytes */ - s.state = stateCommandBegin - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if s.block_length[1] == 0 { - if safe != 0 { - if !safeDecodeCommandBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeCommandBlockSwitch(s) - } - - goto CommandBegin - } - - /* Read the insert/copy length in the command. */ - if safe != 0 { - if !safeReadCommand(s, br, &i) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - readCommand(s, br, &i) - } - - if i == 0 { - goto CommandPostDecodeLiterals - } - - s.meta_block_remaining_len -= i - -CommandInner: - if safe != 0 { - s.state = stateCommandInner - } - - /* Read the literals in the command. */ - if s.trivial_literal_context != 0 { - var bits uint32 - var value uint32 - preloadSymbol(safe, s.literal_htree, br, &bits, &value) - for { - if !checkInputAmountMaybeSafe(safe, br, 28) { /* 162 bits + 7 bytes */ - s.state = stateCommandInner - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if s.block_length[0] == 0 { - if safe != 0 { - if !safeDecodeLiteralBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeLiteralBlockSwitch(s) - } - - preloadSymbol(safe, s.literal_htree, br, &bits, &value) - if s.trivial_literal_context == 0 { - goto CommandInner - } - } - - if safe == 0 { - s.ringbuffer[pos] = byte(readPreloadedSymbol(s.literal_htree, br, &bits, &value)) - } else { - var literal uint32 - if !safeReadSymbol(s.literal_htree, br, &literal) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - s.ringbuffer[pos] = byte(literal) - } - - s.block_length[0]-- - pos++ - if pos == s.ringbuffer_size { - s.state = stateCommandInnerWrite - i-- - goto saveStateAndReturn - } - i-- - if i == 0 { - break - } - } - } else { - var p1 byte = s.ringbuffer[(pos-1)&s.ringbuffer_mask] - var p2 byte = s.ringbuffer[(pos-2)&s.ringbuffer_mask] - for { - var context byte - if !checkInputAmountMaybeSafe(safe, br, 28) { /* 162 bits + 7 bytes */ - s.state = stateCommandInner - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - if s.block_length[0] == 0 { - if safe != 0 { - if !safeDecodeLiteralBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeLiteralBlockSwitch(s) - } - - if s.trivial_literal_context != 0 { - goto CommandInner - } - } - - context = getContext(p1, p2, s.context_lookup) - hc = []huffmanCode(s.literal_hgroup.htrees[s.context_map_slice[context]]) - p2 = p1 - if safe == 0 { - p1 = byte(readSymbol(hc, br)) - } else { - var literal uint32 - if !safeReadSymbol(hc, br, &literal) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - - p1 = byte(literal) - } - - s.ringbuffer[pos] = p1 - s.block_length[0]-- - pos++ - if pos == s.ringbuffer_size { - s.state = stateCommandInnerWrite - i-- - goto saveStateAndReturn - } - i-- - if i == 0 { - break - } - } - } - - if s.meta_block_remaining_len <= 0 { - s.state = stateMetablockDone - goto saveStateAndReturn - } - -CommandPostDecodeLiterals: - if safe != 0 { - s.state = stateCommandPostDecodeLiterals - } - - if s.distance_code >= 0 { - /* Implicit distance case. */ - if s.distance_code != 0 { - s.distance_context = 0 - } else { - s.distance_context = 1 - } - - s.dist_rb_idx-- - s.distance_code = s.dist_rb[s.dist_rb_idx&3] - } else { - /* Read distance code in the command, unless it was implicitly zero. */ - if s.block_length[2] == 0 { - if safe != 0 { - if !safeDecodeDistanceBlockSwitch(s) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - decodeDistanceBlockSwitch(s) - } - } - - if safe != 0 { - if !safeReadDistance(s, br) { - result = decoderNeedsMoreInput - goto saveStateAndReturn - } - } else { - readDistance(s, br) - } - } - - if s.max_distance != s.max_backward_distance { - if pos < s.max_backward_distance { - s.max_distance = pos - } else { - s.max_distance = s.max_backward_distance - } - } - - i = s.copy_length - - /* Apply copy of LZ77 back-reference, or static dictionary reference if - the distance is larger than the max LZ77 distance */ - if s.distance_code > s.max_distance { - /* The maximum allowed distance is BROTLI_MAX_ALLOWED_DISTANCE = 0x7FFFFFFC. - With this choice, no signed overflow can occur after decoding - a special distance code (e.g., after adding 3 to the last distance). */ - if s.distance_code > maxAllowedDistance { - return decoderErrorFormatDistance - } - - if i >= minDictionaryWordLength && i <= maxDictionaryWordLength { - var address int = s.distance_code - s.max_distance - 1 - var words *dictionary = s.dictionary - var trans *transforms = s.transforms - var offset int = int(s.dictionary.offsets_by_length[i]) - var shift uint32 = uint32(s.dictionary.size_bits_by_length[i]) - var mask int = int(bitMask(shift)) - var word_idx int = address & mask - var transform_idx int = address >> shift - - /* Compensate double distance-ring-buffer roll. */ - s.dist_rb_idx += s.distance_context - - offset += word_idx * i - if words.data == nil { - return decoderErrorDictionaryNotSet - } - - if transform_idx < int(trans.num_transforms) { - var word []byte - word = words.data[offset:] - var len int = i - if transform_idx == int(trans.cutOffTransforms[0]) { - copy(s.ringbuffer[pos:], word[:uint(len)]) - } else { - len = transformDictionaryWord(s.ringbuffer[pos:], word, int(len), trans, transform_idx) - } - - pos += int(len) - s.meta_block_remaining_len -= int(len) - if pos >= s.ringbuffer_size { - s.state = stateCommandPostWrite1 - goto saveStateAndReturn - } - } else { - return decoderErrorFormatTransform - } - } else { - return decoderErrorFormatDictionary - } - } else { - var src_start int = (pos - s.distance_code) & s.ringbuffer_mask - var copy_dst []byte - copy_dst = s.ringbuffer[pos:] - var copy_src []byte - copy_src = s.ringbuffer[src_start:] - var dst_end int = pos + i - var src_end int = src_start + i - - /* Update the recent distances cache. */ - s.dist_rb[s.dist_rb_idx&3] = s.distance_code - - s.dist_rb_idx++ - s.meta_block_remaining_len -= i - - /* There are 32+ bytes of slack in the ring-buffer allocation. - Also, we have 16 short codes, that make these 16 bytes irrelevant - in the ring-buffer. Let's copy over them as a first guess. */ - copy(copy_dst, copy_src[:16]) - - if src_end > pos && dst_end > src_start { - /* Regions intersect. */ - goto CommandPostWrapCopy - } - - if dst_end >= s.ringbuffer_size || src_end >= s.ringbuffer_size { - /* At least one region wraps. */ - goto CommandPostWrapCopy - } - - pos += i - if i > 16 { - if i > 32 { - copy(copy_dst[16:], copy_src[16:][:uint(i-16)]) - } else { - /* This branch covers about 45% cases. - Fixed size short copy allows more compiler optimizations. */ - copy(copy_dst[16:], copy_src[16:][:16]) - } - } - } - - if s.meta_block_remaining_len <= 0 { - /* Next metablock, if any. */ - s.state = stateMetablockDone - - goto saveStateAndReturn - } else { - goto CommandBegin - } -CommandPostWrapCopy: - { - var wrap_guard int = s.ringbuffer_size - pos - for { - i-- - if i < 0 { - break - } - s.ringbuffer[pos] = s.ringbuffer[(pos-s.distance_code)&s.ringbuffer_mask] - pos++ - wrap_guard-- - if wrap_guard == 0 { - s.state = stateCommandPostWrite2 - goto saveStateAndReturn - } - } - } - - if s.meta_block_remaining_len <= 0 { - /* Next metablock, if any. */ - s.state = stateMetablockDone - - goto saveStateAndReturn - } else { - goto CommandBegin - } - -saveStateAndReturn: - s.pos = pos - s.loop_counter = i - return result -} - -func processCommands(s *Reader) int { - return processCommandsInternal(0, s) -} - -func safeProcessCommands(s *Reader) int { - return processCommandsInternal(1, s) -} - -/* Returns the maximum number of distance symbols which can only represent - distances not exceeding BROTLI_MAX_ALLOWED_DISTANCE. */ - -var maxDistanceSymbol_bound = [maxNpostfix + 1]uint32{0, 4, 12, 28} -var maxDistanceSymbol_diff = [maxNpostfix + 1]uint32{73, 126, 228, 424} - -func maxDistanceSymbol(ndirect uint32, npostfix uint32) uint32 { - var postfix uint32 = 1 << npostfix - if ndirect < maxDistanceSymbol_bound[npostfix] { - return ndirect + maxDistanceSymbol_diff[npostfix] + postfix - } else if ndirect > maxDistanceSymbol_bound[npostfix]+postfix { - return ndirect + maxDistanceSymbol_diff[npostfix] - } else { - return maxDistanceSymbol_bound[npostfix] + maxDistanceSymbol_diff[npostfix] + postfix - } -} - -/* Invariant: input stream is never overconsumed: - - invalid input implies that the whole stream is invalid -> any amount of - input could be read and discarded - - when result is "needs more input", then at least one more byte is REQUIRED - to complete decoding; all input data MUST be consumed by decoder, so - client could swap the input buffer - - when result is "needs more output" decoder MUST ensure that it doesn't - hold more than 7 bits in bit reader; this saves client from swapping input - buffer ahead of time - - when result is "success" decoder MUST return all unused data back to input - buffer; this is possible because the invariant is held on enter */ -func decoderDecompressStream(s *Reader, available_in *uint, next_in *[]byte, available_out *uint, next_out *[]byte) int { - var result int = decoderSuccess - var br *bitReader = &s.br - - /* Do not try to process further in a case of unrecoverable error. */ - if int(s.error_code) < 0 { - return decoderResultError - } - - if *available_out != 0 && (next_out == nil || *next_out == nil) { - return saveErrorCode(s, decoderErrorInvalidArguments) - } - - if *available_out == 0 { - next_out = nil - } - if s.buffer_length == 0 { /* Just connect bit reader to input stream. */ - br.input_len = *available_in - br.input = *next_in - br.byte_pos = 0 - } else { - /* At least one byte of input is required. More than one byte of input may - be required to complete the transaction -> reading more data must be - done in a loop -> do it in a main loop. */ - result = decoderNeedsMoreInput - - br.input = s.buffer.u8[:] - br.byte_pos = 0 - } - - /* State machine */ - for { - if result != decoderSuccess { - /* Error, needs more input/output. */ - if result == decoderNeedsMoreInput { - if s.ringbuffer != nil { /* Pro-actively push output. */ - var intermediate_result int = writeRingBuffer(s, available_out, next_out, nil, true) - - /* WriteRingBuffer checks s->meta_block_remaining_len validity. */ - if int(intermediate_result) < 0 { - result = intermediate_result - break - } - } - - if s.buffer_length != 0 { /* Used with internal buffer. */ - if br.byte_pos == br.input_len { - /* Successfully finished read transaction. - Accumulator contains less than 8 bits, because internal buffer - is expanded byte-by-byte until it is enough to complete read. */ - s.buffer_length = 0 - - /* Switch to input stream and restart. */ - result = decoderSuccess - - br.input_len = *available_in - br.input = *next_in - br.byte_pos = 0 - continue - } else if *available_in != 0 { - /* Not enough data in buffer, but can take one more byte from - input stream. */ - result = decoderSuccess - - s.buffer.u8[s.buffer_length] = (*next_in)[0] - s.buffer_length++ - br.input_len = uint(s.buffer_length) - *next_in = (*next_in)[1:] - (*available_in)-- - - /* Retry with more data in buffer. */ - continue - } - - /* Can't finish reading and no more input. */ - break - /* Input stream doesn't contain enough input. */ - } else { - /* Copy tail to internal buffer and return. */ - *next_in = br.input[br.byte_pos:] - - *available_in = br.input_len - br.byte_pos - for *available_in != 0 { - s.buffer.u8[s.buffer_length] = (*next_in)[0] - s.buffer_length++ - *next_in = (*next_in)[1:] - (*available_in)-- - } - - break - } - } - - /* Unreachable. */ - - /* Fail or needs more output. */ - if s.buffer_length != 0 { - /* Just consumed the buffered input and produced some output. Otherwise - it would result in "needs more input". Reset internal buffer. */ - s.buffer_length = 0 - } else { - /* Using input stream in last iteration. When decoder switches to input - stream it has less than 8 bits in accumulator, so it is safe to - return unused accumulator bits there. */ - bitReaderUnload(br) - - *available_in = br.input_len - br.byte_pos - *next_in = br.input[br.byte_pos:] - } - - break - } - - switch s.state { - /* Prepare to the first read. */ - case stateUninited: - if !warmupBitReader(br) { - result = decoderNeedsMoreInput - break - } - - /* Decode window size. */ - result = decodeWindowBits(s, br) /* Reads 1..8 bits. */ - if result != decoderSuccess { - break - } - - if s.large_window { - s.state = stateLargeWindowBits - break - } - - s.state = stateInitialize - - case stateLargeWindowBits: - if !safeReadBits(br, 6, &s.window_bits) { - result = decoderNeedsMoreInput - break - } - - if s.window_bits < largeMinWbits || s.window_bits > largeMaxWbits { - result = decoderErrorFormatWindowBits - break - } - - s.state = stateInitialize - fallthrough - - /* Maximum distance, see section 9.1. of the spec. */ - /* Fall through. */ - case stateInitialize: - s.max_backward_distance = (1 << s.window_bits) - windowGap - - /* Allocate memory for both block_type_trees and block_len_trees. */ - s.block_type_trees = make([]huffmanCode, (3 * (huffmanMaxSize258 + huffmanMaxSize26))) - - if s.block_type_trees == nil { - result = decoderErrorAllocBlockTypeTrees - break - } - - s.block_len_trees = s.block_type_trees[3*huffmanMaxSize258:] - - s.state = stateMetablockBegin - fallthrough - - /* Fall through. */ - case stateMetablockBegin: - decoderStateMetablockBegin(s) - - s.state = stateMetablockHeader - fallthrough - - /* Fall through. */ - case stateMetablockHeader: - result = decodeMetaBlockLength(s, br) - /* Reads 2 - 31 bits. */ - if result != decoderSuccess { - break - } - - if s.is_metadata != 0 || s.is_uncompressed != 0 { - if !bitReaderJumpToByteBoundary(br) { - result = decoderErrorFormatPadding1 - break - } - } - - if s.is_metadata != 0 { - s.state = stateMetadata - break - } - - if s.meta_block_remaining_len == 0 { - s.state = stateMetablockDone - break - } - - calculateRingBufferSize(s) - if s.is_uncompressed != 0 { - s.state = stateUncompressed - break - } - - s.loop_counter = 0 - s.state = stateHuffmanCode0 - - case stateUncompressed: - result = copyUncompressedBlockToOutput(available_out, next_out, nil, s) - if result == decoderSuccess { - s.state = stateMetablockDone - } - - case stateMetadata: - for ; s.meta_block_remaining_len > 0; s.meta_block_remaining_len-- { - var bits uint32 - - /* Read one byte and ignore it. */ - if !safeReadBits(br, 8, &bits) { - result = decoderNeedsMoreInput - break - } - } - - if result == decoderSuccess { - s.state = stateMetablockDone - } - - case stateHuffmanCode0: - if s.loop_counter >= 3 { - s.state = stateMetablockHeader2 - break - } - - /* Reads 1..11 bits. */ - result = decodeVarLenUint8(s, br, &s.num_block_types[s.loop_counter]) - - if result != decoderSuccess { - break - } - - s.num_block_types[s.loop_counter]++ - if s.num_block_types[s.loop_counter] < 2 { - s.loop_counter++ - break - } - - s.state = stateHuffmanCode1 - fallthrough - - case stateHuffmanCode1: - { - var alphabet_size uint32 = s.num_block_types[s.loop_counter] + 2 - var tree_offset int = s.loop_counter * huffmanMaxSize258 - result = readHuffmanCode(alphabet_size, alphabet_size, s.block_type_trees[tree_offset:], nil, s) - if result != decoderSuccess { - break - } - s.state = stateHuffmanCode2 - } - fallthrough - - case stateHuffmanCode2: - { - var alphabet_size uint32 = numBlockLenSymbols - var tree_offset int = s.loop_counter * huffmanMaxSize26 - result = readHuffmanCode(alphabet_size, alphabet_size, s.block_len_trees[tree_offset:], nil, s) - if result != decoderSuccess { - break - } - s.state = stateHuffmanCode3 - } - fallthrough - - case stateHuffmanCode3: - var tree_offset int = s.loop_counter * huffmanMaxSize26 - if !safeReadBlockLength(s, &s.block_length[s.loop_counter], s.block_len_trees[tree_offset:], br) { - result = decoderNeedsMoreInput - break - } - - s.loop_counter++ - s.state = stateHuffmanCode0 - - case stateMetablockHeader2: - { - var bits uint32 - if !safeReadBits(br, 6, &bits) { - result = decoderNeedsMoreInput - break - } - - s.distance_postfix_bits = bits & bitMask(2) - bits >>= 2 - s.num_direct_distance_codes = numDistanceShortCodes + (bits << s.distance_postfix_bits) - s.distance_postfix_mask = int(bitMask(s.distance_postfix_bits)) - s.context_modes = make([]byte, uint(s.num_block_types[0])) - if s.context_modes == nil { - result = decoderErrorAllocContextModes - break - } - - s.loop_counter = 0 - s.state = stateContextModes - } - fallthrough - - case stateContextModes: - result = readContextModes(s) - - if result != decoderSuccess { - break - } - - s.state = stateContextMap1 - fallthrough - - case stateContextMap1: - result = decodeContextMap(s.num_block_types[0]<= 3 { - prepareLiteralDecoding(s) - s.dist_context_map_slice = s.dist_context_map - s.htree_command = []huffmanCode(s.insert_copy_hgroup.htrees[0]) - if !ensureRingBuffer(s) { - result = decoderErrorAllocRingBuffer2 - break - } - - s.state = stateCommandBegin - } - - case stateCommandBegin, stateCommandInner, stateCommandPostDecodeLiterals, stateCommandPostWrapCopy: - result = processCommands(s) - - if result == decoderNeedsMoreInput { - result = safeProcessCommands(s) - } - - case stateCommandInnerWrite, stateCommandPostWrite1, stateCommandPostWrite2: - result = writeRingBuffer(s, available_out, next_out, nil, false) - - if result != decoderSuccess { - break - } - - wrapRingBuffer(s) - if s.ringbuffer_size == 1<= uint64(block_size) { - return 0 - } - return block_size - uint(delta) -} - -/* Wraps 64-bit input position to 32-bit ring-buffer position preserving - "not-a-first-lap" feature. */ -func wrapPosition(position uint64) uint32 { - var result uint32 = uint32(position) - var gb uint64 = position >> 30 - if gb > 2 { - /* Wrap every 2GiB; The first 3GB are continuous. */ - result = result&((1<<30)-1) | (uint32((gb-1)&1)+1)<<30 - } - - return result -} - -func hashTableSize(max_table_size uint, input_size uint) uint { - var htsize uint = 256 - for htsize < max_table_size && htsize < input_size { - htsize <<= 1 - } - - return htsize -} - -func getHashTable(s *Writer, quality int, input_size uint, table_size *uint) []int { - var max_table_size uint = maxHashTableSize(quality) - var htsize uint = hashTableSize(max_table_size, input_size) - /* Use smaller hash table when input.size() is smaller, since we - fill the table, incurring O(hash table size) overhead for - compression, and if the input is short, we won't need that - many hash table entries anyway. */ - - var table []int - assert(max_table_size >= 256) - if quality == fastOnePassCompressionQuality { - /* Only odd shifts are supported by fast-one-pass. */ - if htsize&0xAAAAA == 0 { - htsize <<= 1 - } - } - - if htsize <= uint(len(s.small_table_)) { - table = s.small_table_[:] - } else { - if htsize > s.large_table_size_ { - s.large_table_size_ = htsize - s.large_table_ = nil - s.large_table_ = make([]int, htsize) - } - - table = s.large_table_ - } - - *table_size = htsize - for i := 0; i < int(htsize); i++ { - table[i] = 0 - } - return table -} - -func encodeWindowBits(lgwin int, large_window bool, bw *bitWriter) { - if large_window { - bw.writeBits(14, uint64((lgwin&0x3F)<<8|0x11)) - } else { - if lgwin == 16 { - bw.writeBits(1, 0) - } else if lgwin == 17 { - bw.writeBits(7, 1) - } else if lgwin > 17 { - bw.writeBits(4, uint64((lgwin-17)<<1|0x01)) - } else { - bw.writeBits(7, uint64((lgwin-8)<<4|0x01)) - } - } -} - -/* Decide about the context map based on the ability of the prediction - ability of the previous byte UTF8-prefix on the next byte. The - prediction ability is calculated as Shannon entropy. Here we need - Shannon entropy instead of 'BitsEntropy' since the prefix will be - encoded with the remaining 6 bits of the following byte, and - BitsEntropy will assume that symbol to be stored alone using Huffman - coding. */ - -var kStaticContextMapContinuation = [64]uint32{ - 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -} -var kStaticContextMapSimpleUTF8 = [64]uint32{ - 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -} - -func chooseContextMap(quality int, bigram_histo []uint32, num_literal_contexts *uint, literal_context_map *[]uint32) { - var monogram_histo = [3]uint32{0} - var two_prefix_histo = [6]uint32{0} - var total uint - var i uint - var dummy uint - var entropy [4]float64 - for i = 0; i < 9; i++ { - monogram_histo[i%3] += bigram_histo[i] - two_prefix_histo[i%6] += bigram_histo[i] - } - - entropy[1] = shannonEntropy(monogram_histo[:], 3, &dummy) - entropy[2] = (shannonEntropy(two_prefix_histo[:], 3, &dummy) + shannonEntropy(two_prefix_histo[3:], 3, &dummy)) - entropy[3] = 0 - for i = 0; i < 3; i++ { - entropy[3] += shannonEntropy(bigram_histo[3*i:], 3, &dummy) - } - - total = uint(monogram_histo[0] + monogram_histo[1] + monogram_histo[2]) - assert(total != 0) - entropy[0] = 1.0 / float64(total) - entropy[1] *= entropy[0] - entropy[2] *= entropy[0] - entropy[3] *= entropy[0] - - if quality < minQualityForHqContextModeling { - /* 3 context models is a bit slower, don't use it at lower qualities. */ - entropy[3] = entropy[1] * 10 - } - - /* If expected savings by symbol are less than 0.2 bits, skip the - context modeling -- in exchange for faster decoding speed. */ - if entropy[1]-entropy[2] < 0.2 && entropy[1]-entropy[3] < 0.2 { - *num_literal_contexts = 1 - } else if entropy[2]-entropy[3] < 0.02 { - *num_literal_contexts = 2 - *literal_context_map = kStaticContextMapSimpleUTF8[:] - } else { - *num_literal_contexts = 3 - *literal_context_map = kStaticContextMapContinuation[:] - } -} - -/* Decide if we want to use a more complex static context map containing 13 - context values, based on the entropy reduction of histograms over the - first 5 bits of literals. */ - -var kStaticContextMapComplexUTF8 = [64]uint32{ - 11, 11, 12, 12, /* 0 special */ - 0, 0, 0, 0, /* 4 lf */ - 1, 1, 9, 9, /* 8 space */ - 2, 2, 2, 2, /* !, first after space/lf and after something else. */ - 1, 1, 1, 1, /* " */ - 8, 3, 3, 3, /* % */ - 1, 1, 1, 1, /* ({[ */ - 2, 2, 2, 2, /* }]) */ - 8, 4, 4, 4, /* :; */ - 8, 7, 4, 4, /* . */ - 8, 0, 0, 0, /* > */ - 3, 3, 3, 3, /* [0..9] */ - 5, 5, 10, 5, /* [A-Z] */ - 5, 5, 10, 5, - 6, 6, 6, 6, /* [a-z] */ - 6, 6, 6, 6, -} - -func shouldUseComplexStaticContextMap(input []byte, start_pos uint, length uint, mask uint, quality int, size_hint uint, num_literal_contexts *uint, literal_context_map *[]uint32) bool { - /* Try the more complex static context map only for long data. */ - if size_hint < 1<<20 { - return false - } else { - var end_pos uint = start_pos + length - var combined_histo = [32]uint32{0} - var context_histo = [13][32]uint32{[32]uint32{0}} - var total uint32 = 0 - var entropy [3]float64 - var dummy uint - var i uint - var utf8_lut contextLUT = getContextLUT(contextUTF8) - /* To make entropy calculations faster and to fit on the stack, we collect - histograms over the 5 most significant bits of literals. One histogram - without context and 13 additional histograms for each context value. */ - for ; start_pos+64 <= end_pos; start_pos += 4096 { - var stride_end_pos uint = start_pos + 64 - var prev2 byte = input[start_pos&mask] - var prev1 byte = input[(start_pos+1)&mask] - var pos uint - - /* To make the analysis of the data faster we only examine 64 byte long - strides at every 4kB intervals. */ - for pos = start_pos + 2; pos < stride_end_pos; pos++ { - var literal byte = input[pos&mask] - var context byte = byte(kStaticContextMapComplexUTF8[getContext(prev1, prev2, utf8_lut)]) - total++ - combined_histo[literal>>3]++ - context_histo[context][literal>>3]++ - prev2 = prev1 - prev1 = literal - } - } - - entropy[1] = shannonEntropy(combined_histo[:], 32, &dummy) - entropy[2] = 0 - for i = 0; i < 13; i++ { - entropy[2] += shannonEntropy(context_histo[i][0:], 32, &dummy) - } - - entropy[0] = 1.0 / float64(total) - entropy[1] *= entropy[0] - entropy[2] *= entropy[0] - - /* The triggering heuristics below were tuned by compressing the individual - files of the silesia corpus. If we skip this kind of context modeling - for not very well compressible input (i.e. entropy using context modeling - is 60% of maximal entropy) or if expected savings by symbol are less - than 0.2 bits, then in every case when it triggers, the final compression - ratio is improved. Note however that this heuristics might be too strict - for some cases and could be tuned further. */ - if entropy[2] > 3.0 || entropy[1]-entropy[2] < 0.2 { - return false - } else { - *num_literal_contexts = 13 - *literal_context_map = kStaticContextMapComplexUTF8[:] - return true - } - } -} - -func decideOverLiteralContextModeling(input []byte, start_pos uint, length uint, mask uint, quality int, size_hint uint, num_literal_contexts *uint, literal_context_map *[]uint32) { - if quality < minQualityForContextModeling || length < 64 { - return - } else if shouldUseComplexStaticContextMap(input, start_pos, length, mask, quality, size_hint, num_literal_contexts, literal_context_map) { - } else /* Context map was already set, nothing else to do. */ - { - var end_pos uint = start_pos + length - /* Gather bi-gram data of the UTF8 byte prefixes. To make the analysis of - UTF8 data faster we only examine 64 byte long strides at every 4kB - intervals. */ - - var bigram_prefix_histo = [9]uint32{0} - for ; start_pos+64 <= end_pos; start_pos += 4096 { - var lut = [4]int{0, 0, 1, 2} - var stride_end_pos uint = start_pos + 64 - var prev int = lut[input[start_pos&mask]>>6] * 3 - var pos uint - for pos = start_pos + 1; pos < stride_end_pos; pos++ { - var literal byte = input[pos&mask] - bigram_prefix_histo[prev+lut[literal>>6]]++ - prev = lut[literal>>6] * 3 - } - } - - chooseContextMap(quality, bigram_prefix_histo[0:], num_literal_contexts, literal_context_map) - } -} - -func shouldCompress_encode(data []byte, mask uint, last_flush_pos uint64, bytes uint, num_literals uint, num_commands uint) bool { - /* TODO: find more precise minimal block overhead. */ - if bytes <= 2 { - return false - } - if num_commands < (bytes>>8)+2 { - if float64(num_literals) > 0.99*float64(bytes) { - var literal_histo = [256]uint32{0} - const kSampleRate uint32 = 13 - const kMinEntropy float64 = 7.92 - var bit_cost_threshold float64 = float64(bytes) * kMinEntropy / float64(kSampleRate) - var t uint = uint((uint32(bytes) + kSampleRate - 1) / kSampleRate) - var pos uint32 = uint32(last_flush_pos) - var i uint - for i = 0; i < t; i++ { - literal_histo[data[pos&uint32(mask)]]++ - pos += kSampleRate - } - - if bitsEntropy(literal_histo[:], 256) > bit_cost_threshold { - return false - } - } - } - - return true -} - -/* Chooses the literal context mode for a metablock */ -func chooseContextMode(params *encoderParams, data []byte, pos uint, mask uint, length uint) int { - /* We only do the computation for the option of something else than - CONTEXT_UTF8 for the highest qualities */ - if params.quality >= minQualityForHqBlockSplitting && !isMostlyUTF8(data, pos, mask, length, kMinUTF8Ratio) { - return contextSigned - } - - return contextUTF8 -} - -func writeMetaBlockInternal(data []byte, mask uint, last_flush_pos uint64, bytes uint, is_last bool, literal_context_mode int, params *encoderParams, prev_byte byte, prev_byte2 byte, num_literals uint, commands []command, saved_dist_cache []int, dist_cache []int, bw *bitWriter) { - var wrapped_last_flush_pos uint32 = wrapPosition(last_flush_pos) - var literal_context_lut contextLUT = getContextLUT(literal_context_mode) - var block_params encoderParams = *params - - if bytes == 0 { - /* Write the ISLAST and ISEMPTY bits. */ - bw.writeBits(2, 3) - bw.jumpToByteBoundary() - return - } - - if !shouldCompress_encode(data, mask, last_flush_pos, bytes, num_literals, uint(len(commands))) { - /* Restore the distance cache, as its last update by - CreateBackwardReferences is now unused. */ - copy(dist_cache, saved_dist_cache[:4]) - - storeUncompressedMetaBlock(is_last, data, uint(wrapped_last_flush_pos), mask, bytes, bw) - return - } - - savedPos := bw.getPos() - if params.quality <= maxQualityForStaticEntropyCodes { - storeMetaBlockFast(data, uint(wrapped_last_flush_pos), bytes, mask, is_last, params, commands, bw) - } else if params.quality < minQualityForBlockSplit { - storeMetaBlockTrivial(data, uint(wrapped_last_flush_pos), bytes, mask, is_last, params, commands, bw) - } else { - mb := getMetaBlockSplit() - if params.quality < minQualityForHqBlockSplitting { - var num_literal_contexts uint = 1 - var literal_context_map []uint32 = nil - if !params.disable_literal_context_modeling { - decideOverLiteralContextModeling(data, uint(wrapped_last_flush_pos), bytes, mask, params.quality, params.size_hint, &num_literal_contexts, &literal_context_map) - } - - buildMetaBlockGreedy(data, uint(wrapped_last_flush_pos), mask, prev_byte, prev_byte2, literal_context_lut, num_literal_contexts, literal_context_map, commands, mb) - } else { - buildMetaBlock(data, uint(wrapped_last_flush_pos), mask, &block_params, prev_byte, prev_byte2, commands, literal_context_mode, mb) - } - - if params.quality >= minQualityForOptimizeHistograms { - /* The number of distance symbols effectively used for distance - histograms. It might be less than distance alphabet size - for "Large Window Brotli" (32-bit). */ - var num_effective_dist_codes uint32 = block_params.dist.alphabet_size - if num_effective_dist_codes > numHistogramDistanceSymbols { - num_effective_dist_codes = numHistogramDistanceSymbols - } - - optimizeHistograms(num_effective_dist_codes, mb) - } - - storeMetaBlock(data, uint(wrapped_last_flush_pos), bytes, mask, prev_byte, prev_byte2, is_last, &block_params, literal_context_mode, commands, mb, bw) - freeMetaBlockSplit(mb) - } - - if bytes+4 < bw.getPos()>>3 { - /* Restore the distance cache and last byte. */ - copy(dist_cache, saved_dist_cache[:4]) - bw.rewind(savedPos) - storeUncompressedMetaBlock(is_last, data, uint(wrapped_last_flush_pos), mask, bytes, bw) - } -} - -func chooseDistanceParams(params *encoderParams) { - var distance_postfix_bits uint32 = 0 - var num_direct_distance_codes uint32 = 0 - - if params.quality >= minQualityForNonzeroDistanceParams { - var ndirect_msb uint32 - if params.mode == modeFont { - distance_postfix_bits = 1 - num_direct_distance_codes = 12 - } else { - distance_postfix_bits = params.dist.distance_postfix_bits - num_direct_distance_codes = params.dist.num_direct_distance_codes - } - - ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0F - if distance_postfix_bits > maxNpostfix || num_direct_distance_codes > maxNdirect || ndirect_msb<>25)), (last_command.dist_prefix_&0x3FF == 0), &last_command.cmd_prefix_) - } -} - -/* - Processes the accumulated input data and writes - the new output meta-block to s.dest, if one has been - created (otherwise the processed input data is buffered internally). - If |is_last| or |force_flush| is true, an output meta-block is - always created. However, until |is_last| is true encoder may retain up - to 7 bits of the last byte of output. To force encoder to dump the remaining - bits use WriteMetadata() to append an empty meta-data block. - Returns false if the size of the input data is larger than - input_block_size(). -*/ -func encodeData(s *Writer, is_last bool, force_flush bool) bool { - var delta uint64 = unprocessedInputSize(s) - var bytes uint32 = uint32(delta) - var wrapped_last_processed_pos uint32 = wrapPosition(s.last_processed_pos_) - var data []byte - var mask uint32 - var literal_context_mode int - - data = s.ringbuffer_.buffer_ - mask = s.ringbuffer_.mask_ - - /* Adding more blocks after "last" block is forbidden. */ - if s.is_last_block_emitted_ { - return false - } - if is_last { - s.is_last_block_emitted_ = true - } - - if delta > uint64(inputBlockSize(s)) { - return false - } - - if s.params.quality == fastTwoPassCompressionQuality { - if s.command_buf_ == nil || cap(s.command_buf_) < int(kCompressFragmentTwoPassBlockSize) { - s.command_buf_ = make([]uint32, kCompressFragmentTwoPassBlockSize) - s.literal_buf_ = make([]byte, kCompressFragmentTwoPassBlockSize) - } else { - s.command_buf_ = s.command_buf_[:kCompressFragmentTwoPassBlockSize] - s.literal_buf_ = s.literal_buf_[:kCompressFragmentTwoPassBlockSize] - } - } - - if s.params.quality == fastOnePassCompressionQuality || s.params.quality == fastTwoPassCompressionQuality { - var table_size uint - var table []int - - if delta == 0 && !is_last { - /* We have no new input data and we don't have to finish the stream, so - nothing to do. */ - return true - } - - table = getHashTable(s, s.params.quality, uint(bytes), &table_size) - if s.params.quality == fastOnePassCompressionQuality { - compressFragmentFast(data[wrapped_last_processed_pos&mask:], uint(bytes), is_last, table, table_size, s.cmd_depths_[:], s.cmd_bits_[:], &s.cmd_code_numbits_, s.cmd_code_[:], &s.bw) - } else { - compressFragmentTwoPass(data[wrapped_last_processed_pos&mask:], uint(bytes), is_last, s.command_buf_, s.literal_buf_, table, table_size, &s.bw) - } - - updateLastProcessedPos(s) - s.writeOutput(s.bw.dst) - s.bw.dst = s.bw.dst[:0] - return true - } - { - /* Theoretical max number of commands is 1 per 2 bytes. */ - newsize := len(s.commands) + int(bytes)/2 + 1 - if newsize > cap(s.commands) { - /* Reserve a bit more memory to allow merging with a next block - without reallocation: that would impact speed. */ - newsize += int(bytes/4) + 16 - - new_commands := make([]command, len(s.commands), newsize) - if s.commands != nil { - copy(new_commands, s.commands) - } - - s.commands = new_commands - } - } - - initOrStitchToPreviousBlock(&s.hasher_, data, uint(mask), &s.params, uint(wrapped_last_processed_pos), uint(bytes), is_last) - - literal_context_mode = chooseContextMode(&s.params, data, uint(wrapPosition(s.last_flush_pos_)), uint(mask), uint(s.input_pos_-s.last_flush_pos_)) - - if len(s.commands) != 0 && s.last_insert_len_ == 0 { - extendLastCommand(s, &bytes, &wrapped_last_processed_pos) - } - - if s.params.quality == zopflificationQuality { - assert(s.params.hasher.type_ == 10) - createZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_.(*h10), s.dist_cache_[:], &s.last_insert_len_, &s.commands, &s.num_literals_) - } else if s.params.quality == hqZopflificationQuality { - assert(s.params.hasher.type_ == 10) - createHqZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, &s.commands, &s.num_literals_) - } else { - createBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, &s.commands, &s.num_literals_) - } - { - var max_length uint = maxMetablockSize(&s.params) - var max_literals uint = max_length / 8 - max_commands := int(max_length / 8) - var processed_bytes uint = uint(s.input_pos_ - s.last_flush_pos_) - var next_input_fits_metablock bool = (processed_bytes+inputBlockSize(s) <= max_length) - var should_flush bool = (s.params.quality < minQualityForBlockSplit && s.num_literals_+uint(len(s.commands)) >= maxNumDelayedSymbols) - /* If maximal possible additional block doesn't fit metablock, flush now. */ - /* TODO: Postpone decision until next block arrives? */ - - /* If block splitting is not used, then flush as soon as there is some - amount of commands / literals produced. */ - if !is_last && !force_flush && !should_flush && next_input_fits_metablock && s.num_literals_ < max_literals && len(s.commands) < max_commands { - /* Merge with next input block. Everything will happen later. */ - if updateLastProcessedPos(s) { - hasherReset(s.hasher_) - } - - return true - } - } - - /* Create the last insert-only command. */ - if s.last_insert_len_ > 0 { - s.commands = append(s.commands, makeInsertCommand(s.last_insert_len_)) - s.num_literals_ += s.last_insert_len_ - s.last_insert_len_ = 0 - } - - if !is_last && s.input_pos_ == s.last_flush_pos_ { - /* We have no new input data and we don't have to finish the stream, so - nothing to do. */ - return true - } - - assert(s.input_pos_ >= s.last_flush_pos_) - assert(s.input_pos_ > s.last_flush_pos_ || is_last) - assert(s.input_pos_-s.last_flush_pos_ <= 1<<24) - { - var metablock_size uint32 = uint32(s.input_pos_ - s.last_flush_pos_) - writeMetaBlockInternal(data, uint(mask), s.last_flush_pos_, uint(metablock_size), is_last, literal_context_mode, &s.params, s.prev_byte_, s.prev_byte2_, s.num_literals_, s.commands, s.saved_dist_cache_[:], s.dist_cache_[:], &s.bw) - s.last_flush_pos_ = s.input_pos_ - if updateLastProcessedPos(s) { - hasherReset(s.hasher_) - } - - if s.last_flush_pos_ > 0 { - s.prev_byte_ = data[(uint32(s.last_flush_pos_)-1)&mask] - } - - if s.last_flush_pos_ > 1 { - s.prev_byte2_ = data[uint32(s.last_flush_pos_-2)&mask] - } - - s.commands = s.commands[:0] - s.num_literals_ = 0 - - /* Save the state of the distance cache in case we need to restore it for - emitting an uncompressed block. */ - copy(s.saved_dist_cache_[:], s.dist_cache_[:]) - - s.writeOutput(s.bw.dst) - s.bw.dst = s.bw.dst[:0] - return true - } -} - -/* Dumps remaining output bits and metadata header to s.bw. - REQUIRED: |block_size| <= (1 << 24). */ -func writeMetadataHeader(s *Writer, block_size uint) { - bw := &s.bw - - bw.writeBits(1, 0) - bw.writeBits(2, 3) - bw.writeBits(1, 0) - if block_size == 0 { - bw.writeBits(2, 0) - } else { - var nbits uint32 - if block_size == 1 { - nbits = 0 - } else { - nbits = log2FloorNonZero(uint(uint32(block_size)-1)) + 1 - } - var nbytes uint32 = (nbits + 7) / 8 - bw.writeBits(2, uint64(nbytes)) - bw.writeBits(uint(8*nbytes), uint64(block_size)-1) - } - - bw.jumpToByteBoundary() -} - -func injectBytePaddingBlock(s *Writer) { - /* is_last = 0, data_nibbles = 11, reserved = 0, meta_nibbles = 00 */ - s.bw.writeBits(6, 0x6) - s.bw.jumpToByteBoundary() - s.writeOutput(s.bw.dst) - s.bw.dst = s.bw.dst[:0] -} - -func checkFlushComplete(s *Writer) { - if s.stream_state_ == streamFlushRequested && s.err == nil { - s.stream_state_ = streamProcessing - } -} - -func encoderCompressStreamFast(s *Writer, op int, available_in *uint, next_in *[]byte) bool { - var block_size_limit uint = uint(1) << s.params.lgwin - var buf_size uint = brotli_min_size_t(kCompressFragmentTwoPassBlockSize, brotli_min_size_t(*available_in, block_size_limit)) - var command_buf []uint32 = nil - var literal_buf []byte = nil - if s.params.quality != fastOnePassCompressionQuality && s.params.quality != fastTwoPassCompressionQuality { - return false - } - - if s.params.quality == fastTwoPassCompressionQuality { - if s.command_buf_ == nil || cap(s.command_buf_) < int(buf_size) { - s.command_buf_ = make([]uint32, buf_size) - s.literal_buf_ = make([]byte, buf_size) - } else { - s.command_buf_ = s.command_buf_[:buf_size] - s.literal_buf_ = s.literal_buf_[:buf_size] - } - - command_buf = s.command_buf_ - literal_buf = s.literal_buf_ - } - - for { - if s.stream_state_ == streamFlushRequested && s.bw.nbits&7 != 0 { - injectBytePaddingBlock(s) - continue - } - - /* Compress block only when stream is not - finished, there is no pending flush request, and there is either - additional input or pending operation. */ - if s.stream_state_ == streamProcessing && (*available_in != 0 || op != int(operationProcess)) { - var block_size uint = brotli_min_size_t(block_size_limit, *available_in) - var is_last bool = (*available_in == block_size) && (op == int(operationFinish)) - var force_flush bool = (*available_in == block_size) && (op == int(operationFlush)) - var table_size uint - var table []int - - if force_flush && block_size == 0 { - s.stream_state_ = streamFlushRequested - continue - } - - table = getHashTable(s, s.params.quality, block_size, &table_size) - - if s.params.quality == fastOnePassCompressionQuality { - compressFragmentFast(*next_in, block_size, is_last, table, table_size, s.cmd_depths_[:], s.cmd_bits_[:], &s.cmd_code_numbits_, s.cmd_code_[:], &s.bw) - } else { - compressFragmentTwoPass(*next_in, block_size, is_last, command_buf, literal_buf, table, table_size, &s.bw) - } - - *next_in = (*next_in)[block_size:] - *available_in -= block_size - s.writeOutput(s.bw.dst) - s.bw.dst = s.bw.dst[:0] - - if force_flush { - s.stream_state_ = streamFlushRequested - } - if is_last { - s.stream_state_ = streamFinished - } - continue - } - - break - } - - checkFlushComplete(s) - return true -} - -func processMetadata(s *Writer, available_in *uint, next_in *[]byte) bool { - if *available_in > 1<<24 { - return false - } - - /* Switch to metadata block workflow, if required. */ - if s.stream_state_ == streamProcessing { - s.remaining_metadata_bytes_ = uint32(*available_in) - s.stream_state_ = streamMetadataHead - } - - if s.stream_state_ != streamMetadataHead && s.stream_state_ != streamMetadataBody { - return false - } - - for { - if s.stream_state_ == streamFlushRequested && s.bw.nbits&7 != 0 { - injectBytePaddingBlock(s) - continue - } - - if s.input_pos_ != s.last_flush_pos_ { - var result bool = encodeData(s, false, true) - if !result { - return false - } - continue - } - - if s.stream_state_ == streamMetadataHead { - writeMetadataHeader(s, uint(s.remaining_metadata_bytes_)) - s.writeOutput(s.bw.dst) - s.bw.dst = s.bw.dst[:0] - s.stream_state_ = streamMetadataBody - continue - } else { - /* Exit workflow only when there is no more input and no more output. - Otherwise client may continue producing empty metadata blocks. */ - if s.remaining_metadata_bytes_ == 0 { - s.remaining_metadata_bytes_ = math.MaxUint32 - s.stream_state_ = streamProcessing - break - } - - /* This guarantees progress in "TakeOutput" workflow. */ - var c uint32 = brotli_min_uint32_t(s.remaining_metadata_bytes_, 16) - copy(s.tiny_buf_.u8[:], (*next_in)[:c]) - *next_in = (*next_in)[c:] - *available_in -= uint(c) - s.remaining_metadata_bytes_ -= c - s.writeOutput(s.tiny_buf_.u8[:c]) - - continue - } - } - - return true -} - -func updateSizeHint(s *Writer, available_in uint) { - if s.params.size_hint == 0 { - var delta uint64 = unprocessedInputSize(s) - var tail uint64 = uint64(available_in) - var limit uint32 = 1 << 30 - var total uint32 - if (delta >= uint64(limit)) || (tail >= uint64(limit)) || ((delta + tail) >= uint64(limit)) { - total = limit - } else { - total = uint32(delta + tail) - } - - s.params.size_hint = uint(total) - } -} - -func encoderCompressStream(s *Writer, op int, available_in *uint, next_in *[]byte) bool { - if !ensureInitialized(s) { - return false - } - - /* Unfinished metadata block; check requirements. */ - if s.remaining_metadata_bytes_ != math.MaxUint32 { - if uint32(*available_in) != s.remaining_metadata_bytes_ { - return false - } - if op != int(operationEmitMetadata) { - return false - } - } - - if op == int(operationEmitMetadata) { - updateSizeHint(s, 0) /* First data metablock might be emitted here. */ - return processMetadata(s, available_in, next_in) - } - - if s.stream_state_ == streamMetadataHead || s.stream_state_ == streamMetadataBody { - return false - } - - if s.stream_state_ != streamProcessing && *available_in != 0 { - return false - } - - if s.params.quality == fastOnePassCompressionQuality || s.params.quality == fastTwoPassCompressionQuality { - return encoderCompressStreamFast(s, op, available_in, next_in) - } - - for { - var remaining_block_size uint = remainingInputBlockSize(s) - - if remaining_block_size != 0 && *available_in != 0 { - var copy_input_size uint = brotli_min_size_t(remaining_block_size, *available_in) - copyInputToRingBuffer(s, copy_input_size, *next_in) - *next_in = (*next_in)[copy_input_size:] - *available_in -= copy_input_size - continue - } - - if s.stream_state_ == streamFlushRequested && s.bw.nbits&7 != 0 { - injectBytePaddingBlock(s) - continue - } - - /* Compress data only when stream is not - finished and there is no pending flush request. */ - if s.stream_state_ == streamProcessing { - if remaining_block_size == 0 || op != int(operationProcess) { - var is_last bool = ((*available_in == 0) && op == int(operationFinish)) - var force_flush bool = ((*available_in == 0) && op == int(operationFlush)) - var result bool - updateSizeHint(s, *available_in) - result = encodeData(s, is_last, force_flush) - if !result { - return false - } - if force_flush { - s.stream_state_ = streamFlushRequested - } - if is_last { - s.stream_state_ = streamFinished - } - continue - } - } - - break - } - - checkFlushComplete(s) - return true -} - -func (w *Writer) writeOutput(data []byte) { - if w.err != nil { - return - } - - _, w.err = w.dst.Write(data) - if w.err == nil { - checkFlushComplete(w) - } -} diff --git a/vendor/github.com/andybalholm/brotli/encoder_dict.go b/vendor/github.com/andybalholm/brotli/encoder_dict.go deleted file mode 100644 index 55c051c623..0000000000 --- a/vendor/github.com/andybalholm/brotli/encoder_dict.go +++ /dev/null @@ -1,22 +0,0 @@ -package brotli - -/* Dictionary data (words and transforms) for 1 possible context */ -type encoderDictionary struct { - words *dictionary - cutoffTransformsCount uint32 - cutoffTransforms uint64 - hash_table []uint16 - buckets []uint16 - dict_words []dictWord -} - -func initEncoderDictionary(dict *encoderDictionary) { - dict.words = getDictionary() - - dict.hash_table = kStaticDictionaryHash[:] - dict.buckets = kStaticDictionaryBuckets[:] - dict.dict_words = kStaticDictionaryWords[:] - - dict.cutoffTransformsCount = kCutoffTransformsCount - dict.cutoffTransforms = kCutoffTransforms -} diff --git a/vendor/github.com/andybalholm/brotli/entropy_encode.go b/vendor/github.com/andybalholm/brotli/entropy_encode.go deleted file mode 100644 index 3f469a3dd9..0000000000 --- a/vendor/github.com/andybalholm/brotli/entropy_encode.go +++ /dev/null @@ -1,592 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Entropy encoding (Huffman) utilities. */ - -/* A node of a Huffman tree. */ -type huffmanTree struct { - total_count_ uint32 - index_left_ int16 - index_right_or_value_ int16 -} - -func initHuffmanTree(self *huffmanTree, count uint32, left int16, right int16) { - self.total_count_ = count - self.index_left_ = left - self.index_right_or_value_ = right -} - -/* Input size optimized Shell sort. */ -type huffmanTreeComparator func(huffmanTree, huffmanTree) bool - -var sortHuffmanTreeItems_gaps = []uint{132, 57, 23, 10, 4, 1} - -func sortHuffmanTreeItems(items []huffmanTree, n uint, comparator huffmanTreeComparator) { - if n < 13 { - /* Insertion sort. */ - var i uint - for i = 1; i < n; i++ { - var tmp huffmanTree = items[i] - var k uint = i - var j uint = i - 1 - for comparator(tmp, items[j]) { - items[k] = items[j] - k = j - if j == 0 { - break - } - j-- - } - - items[k] = tmp - } - - return - } else { - var g int - if n < 57 { - g = 2 - } else { - g = 0 - } - for ; g < 6; g++ { - var gap uint = sortHuffmanTreeItems_gaps[g] - var i uint - for i = gap; i < n; i++ { - var j uint = i - var tmp huffmanTree = items[i] - for ; j >= gap && comparator(tmp, items[j-gap]); j -= gap { - items[j] = items[j-gap] - } - - items[j] = tmp - } - } - } -} - -/* Returns 1 if assignment of depths succeeded, otherwise 0. */ -func setDepth(p0 int, pool []huffmanTree, depth []byte, max_depth int) bool { - var stack [16]int - var level int = 0 - var p int = p0 - assert(max_depth <= 15) - stack[0] = -1 - for { - if pool[p].index_left_ >= 0 { - level++ - if level > max_depth { - return false - } - stack[level] = int(pool[p].index_right_or_value_) - p = int(pool[p].index_left_) - continue - } else { - depth[pool[p].index_right_or_value_] = byte(level) - } - - for level >= 0 && stack[level] == -1 { - level-- - } - if level < 0 { - return true - } - p = stack[level] - stack[level] = -1 - } -} - -/* Sort the root nodes, least popular first. */ -func sortHuffmanTree(v0 huffmanTree, v1 huffmanTree) bool { - if v0.total_count_ != v1.total_count_ { - return v0.total_count_ < v1.total_count_ - } - - return v0.index_right_or_value_ > v1.index_right_or_value_ -} - -/* This function will create a Huffman tree. - - The catch here is that the tree cannot be arbitrarily deep. - Brotli specifies a maximum depth of 15 bits for "code trees" - and 7 bits for "code length code trees." - - count_limit is the value that is to be faked as the minimum value - and this minimum value is raised until the tree matches the - maximum length requirement. - - This algorithm is not of excellent performance for very long data blocks, - especially when population counts are longer than 2**tree_limit, but - we are not planning to use this with extremely long blocks. - - See http://en.wikipedia.org/wiki/Huffman_coding */ -func createHuffmanTree(data []uint32, length uint, tree_limit int, tree []huffmanTree, depth []byte) { - var count_limit uint32 - var sentinel huffmanTree - initHuffmanTree(&sentinel, math.MaxUint32, -1, -1) - - /* For block sizes below 64 kB, we never need to do a second iteration - of this loop. Probably all of our block sizes will be smaller than - that, so this loop is mostly of academic interest. If we actually - would need this, we would be better off with the Katajainen algorithm. */ - for count_limit = 1; ; count_limit *= 2 { - var n uint = 0 - var i uint - var j uint - var k uint - for i = length; i != 0; { - i-- - if data[i] != 0 { - var count uint32 = brotli_max_uint32_t(data[i], count_limit) - initHuffmanTree(&tree[n], count, -1, int16(i)) - n++ - } - } - - if n == 1 { - depth[tree[0].index_right_or_value_] = 1 /* Only one element. */ - break - } - - sortHuffmanTreeItems(tree, n, huffmanTreeComparator(sortHuffmanTree)) - - /* The nodes are: - [0, n): the sorted leaf nodes that we start with. - [n]: we add a sentinel here. - [n + 1, 2n): new parent nodes are added here, starting from - (n+1). These are naturally in ascending order. - [2n]: we add a sentinel at the end as well. - There will be (2n+1) elements at the end. */ - tree[n] = sentinel - - tree[n+1] = sentinel - - i = 0 /* Points to the next leaf node. */ - j = n + 1 /* Points to the next non-leaf node. */ - for k = n - 1; k != 0; k-- { - var left uint - var right uint - if tree[i].total_count_ <= tree[j].total_count_ { - left = i - i++ - } else { - left = j - j++ - } - - if tree[i].total_count_ <= tree[j].total_count_ { - right = i - i++ - } else { - right = j - j++ - } - { - /* The sentinel node becomes the parent node. */ - var j_end uint = 2*n - k - tree[j_end].total_count_ = tree[left].total_count_ + tree[right].total_count_ - tree[j_end].index_left_ = int16(left) - tree[j_end].index_right_or_value_ = int16(right) - - /* Add back the last sentinel node. */ - tree[j_end+1] = sentinel - } - } - - if setDepth(int(2*n-1), tree[0:], depth, tree_limit) { - /* We need to pack the Huffman tree in tree_limit bits. If this was not - successful, add fake entities to the lowest values and retry. */ - break - } - } -} - -func reverse(v []byte, start uint, end uint) { - end-- - for start < end { - var tmp byte = v[start] - v[start] = v[end] - v[end] = tmp - start++ - end-- - } -} - -func writeHuffmanTreeRepetitions(previous_value byte, value byte, repetitions uint, tree_size *uint, tree []byte, extra_bits_data []byte) { - assert(repetitions > 0) - if previous_value != value { - tree[*tree_size] = value - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - repetitions-- - } - - if repetitions == 7 { - tree[*tree_size] = value - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - repetitions-- - } - - if repetitions < 3 { - var i uint - for i = 0; i < repetitions; i++ { - tree[*tree_size] = value - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - } - } else { - var start uint = *tree_size - repetitions -= 3 - for { - tree[*tree_size] = repeatPreviousCodeLength - extra_bits_data[*tree_size] = byte(repetitions & 0x3) - (*tree_size)++ - repetitions >>= 2 - if repetitions == 0 { - break - } - - repetitions-- - } - - reverse(tree, start, *tree_size) - reverse(extra_bits_data, start, *tree_size) - } -} - -func writeHuffmanTreeRepetitionsZeros(repetitions uint, tree_size *uint, tree []byte, extra_bits_data []byte) { - if repetitions == 11 { - tree[*tree_size] = 0 - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - repetitions-- - } - - if repetitions < 3 { - var i uint - for i = 0; i < repetitions; i++ { - tree[*tree_size] = 0 - extra_bits_data[*tree_size] = 0 - (*tree_size)++ - } - } else { - var start uint = *tree_size - repetitions -= 3 - for { - tree[*tree_size] = repeatZeroCodeLength - extra_bits_data[*tree_size] = byte(repetitions & 0x7) - (*tree_size)++ - repetitions >>= 3 - if repetitions == 0 { - break - } - - repetitions-- - } - - reverse(tree, start, *tree_size) - reverse(extra_bits_data, start, *tree_size) - } -} - -/* Change the population counts in a way that the consequent - Huffman tree compression, especially its RLE-part will be more - likely to compress this data more efficiently. - - length contains the size of the histogram. - counts contains the population counts. - good_for_rle is a buffer of at least length size */ -func optimizeHuffmanCountsForRLE(length uint, counts []uint32, good_for_rle []byte) { - var nonzero_count uint = 0 - var stride uint - var limit uint - var sum uint - var streak_limit uint = 1240 - var i uint - /* Let's make the Huffman code more compatible with RLE encoding. */ - for i = 0; i < length; i++ { - if counts[i] != 0 { - nonzero_count++ - } - } - - if nonzero_count < 16 { - return - } - - for length != 0 && counts[length-1] == 0 { - length-- - } - - if length == 0 { - return /* All zeros. */ - } - - /* Now counts[0..length - 1] does not have trailing zeros. */ - { - var nonzeros uint = 0 - var smallest_nonzero uint32 = 1 << 30 - for i = 0; i < length; i++ { - if counts[i] != 0 { - nonzeros++ - if smallest_nonzero > counts[i] { - smallest_nonzero = counts[i] - } - } - } - - if nonzeros < 5 { - /* Small histogram will model it well. */ - return - } - - if smallest_nonzero < 4 { - var zeros uint = length - nonzeros - if zeros < 6 { - for i = 1; i < length-1; i++ { - if counts[i-1] != 0 && counts[i] == 0 && counts[i+1] != 0 { - counts[i] = 1 - } - } - } - } - - if nonzeros < 28 { - return - } - } - - /* 2) Let's mark all population counts that already can be encoded - with an RLE code. */ - for i := 0; i < int(length); i++ { - good_for_rle[i] = 0 - } - { - var symbol uint32 = counts[0] - /* Let's not spoil any of the existing good RLE codes. - Mark any seq of 0's that is longer as 5 as a good_for_rle. - Mark any seq of non-0's that is longer as 7 as a good_for_rle. */ - - var step uint = 0 - for i = 0; i <= length; i++ { - if i == length || counts[i] != symbol { - if (symbol == 0 && step >= 5) || (symbol != 0 && step >= 7) { - var k uint - for k = 0; k < step; k++ { - good_for_rle[i-k-1] = 1 - } - } - - step = 1 - if i != length { - symbol = counts[i] - } - } else { - step++ - } - } - } - - /* 3) Let's replace those population counts that lead to more RLE codes. - Math here is in 24.8 fixed point representation. */ - stride = 0 - - limit = uint(256*(counts[0]+counts[1]+counts[2])/3 + 420) - sum = 0 - for i = 0; i <= length; i++ { - if i == length || good_for_rle[i] != 0 || (i != 0 && good_for_rle[i-1] != 0) || (256*counts[i]-uint32(limit)+uint32(streak_limit)) >= uint32(2*streak_limit) { - if stride >= 4 || (stride >= 3 && sum == 0) { - var k uint - var count uint = (sum + stride/2) / stride - /* The stride must end, collapse what we have, if we have enough (4). */ - if count == 0 { - count = 1 - } - - if sum == 0 { - /* Don't make an all zeros stride to be upgraded to ones. */ - count = 0 - } - - for k = 0; k < stride; k++ { - /* We don't want to change value at counts[i], - that is already belonging to the next stride. Thus - 1. */ - counts[i-k-1] = uint32(count) - } - } - - stride = 0 - sum = 0 - if i < length-2 { - /* All interesting strides have a count of at least 4, */ - /* at least when non-zeros. */ - limit = uint(256*(counts[i]+counts[i+1]+counts[i+2])/3 + 420) - } else if i < length { - limit = uint(256 * counts[i]) - } else { - limit = 0 - } - } - - stride++ - if i != length { - sum += uint(counts[i]) - if stride >= 4 { - limit = (256*sum + stride/2) / stride - } - - if stride == 4 { - limit += 120 - } - } - } -} - -func decideOverRLEUse(depth []byte, length uint, use_rle_for_non_zero *bool, use_rle_for_zero *bool) { - var total_reps_zero uint = 0 - var total_reps_non_zero uint = 0 - var count_reps_zero uint = 1 - var count_reps_non_zero uint = 1 - var i uint - for i = 0; i < length; { - var value byte = depth[i] - var reps uint = 1 - var k uint - for k = i + 1; k < length && depth[k] == value; k++ { - reps++ - } - - if reps >= 3 && value == 0 { - total_reps_zero += reps - count_reps_zero++ - } - - if reps >= 4 && value != 0 { - total_reps_non_zero += reps - count_reps_non_zero++ - } - - i += reps - } - - *use_rle_for_non_zero = total_reps_non_zero > count_reps_non_zero*2 - *use_rle_for_zero = total_reps_zero > count_reps_zero*2 -} - -/* Write a Huffman tree from bit depths into the bit-stream representation - of a Huffman tree. The generated Huffman tree is to be compressed once - more using a Huffman tree */ -func writeHuffmanTree(depth []byte, length uint, tree_size *uint, tree []byte, extra_bits_data []byte) { - var previous_value byte = initialRepeatedCodeLength - var i uint - var use_rle_for_non_zero bool = false - var use_rle_for_zero bool = false - var new_length uint = length - /* Throw away trailing zeros. */ - for i = 0; i < length; i++ { - if depth[length-i-1] == 0 { - new_length-- - } else { - break - } - } - - /* First gather statistics on if it is a good idea to do RLE. */ - if length > 50 { - /* Find RLE coding for longer codes. - Shorter codes seem not to benefit from RLE. */ - decideOverRLEUse(depth, new_length, &use_rle_for_non_zero, &use_rle_for_zero) - } - - /* Actual RLE coding. */ - for i = 0; i < new_length; { - var value byte = depth[i] - var reps uint = 1 - if (value != 0 && use_rle_for_non_zero) || (value == 0 && use_rle_for_zero) { - var k uint - for k = i + 1; k < new_length && depth[k] == value; k++ { - reps++ - } - } - - if value == 0 { - writeHuffmanTreeRepetitionsZeros(reps, tree_size, tree, extra_bits_data) - } else { - writeHuffmanTreeRepetitions(previous_value, value, reps, tree_size, tree, extra_bits_data) - previous_value = value - } - - i += reps - } -} - -var reverseBits_kLut = [16]uint{ - 0x00, - 0x08, - 0x04, - 0x0C, - 0x02, - 0x0A, - 0x06, - 0x0E, - 0x01, - 0x09, - 0x05, - 0x0D, - 0x03, - 0x0B, - 0x07, - 0x0F, -} - -func reverseBits(num_bits uint, bits uint16) uint16 { - var retval uint = reverseBits_kLut[bits&0x0F] - var i uint - for i = 4; i < num_bits; i += 4 { - retval <<= 4 - bits = uint16(bits >> 4) - retval |= reverseBits_kLut[bits&0x0F] - } - - retval >>= ((0 - num_bits) & 0x03) - return uint16(retval) -} - -/* 0..15 are values for bits */ -const maxHuffmanBits = 16 - -/* Get the actual bit values for a tree of bit depths. */ -func convertBitDepthsToSymbols(depth []byte, len uint, bits []uint16) { - var bl_count = [maxHuffmanBits]uint16{0} - var next_code [maxHuffmanBits]uint16 - var i uint - /* In Brotli, all bit depths are [1..15] - 0 bit depth means that the symbol does not exist. */ - - var code int = 0 - for i = 0; i < len; i++ { - bl_count[depth[i]]++ - } - - bl_count[0] = 0 - next_code[0] = 0 - for i = 1; i < maxHuffmanBits; i++ { - code = (code + int(bl_count[i-1])) << 1 - next_code[i] = uint16(code) - } - - for i = 0; i < len; i++ { - if depth[i] != 0 { - bits[i] = reverseBits(uint(depth[i]), next_code[depth[i]]) - next_code[depth[i]]++ - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/entropy_encode_static.go b/vendor/github.com/andybalholm/brotli/entropy_encode_static.go deleted file mode 100644 index 2543f8f07d..0000000000 --- a/vendor/github.com/andybalholm/brotli/entropy_encode_static.go +++ /dev/null @@ -1,4396 +0,0 @@ -package brotli - -var kCodeLengthDepth = [18]byte{4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 0, 4, 4} - -var kStaticCommandCodeDepth = [numCommandSymbols]byte{ - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 9, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, - 11, -} - -var kStaticDistanceCodeDepth = [64]byte{ - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, - 6, -} - -var kCodeLengthBits = [18]uint32{0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 15, 31, 0, 11, 7} - -func storeStaticCodeLengthCode(bw *bitWriter) { - bw.writeBits(32, 0x55555554) - bw.writeBits(8, 0xFF) -} - -var kZeroRepsBits = [numCommandSymbols]uint64{ - 0x00000000, - 0x00000000, - 0x00000000, - 0x00000007, - 0x00000017, - 0x00000027, - 0x00000037, - 0x00000047, - 0x00000057, - 0x00000067, - 0x00000077, - 0x00000770, - 0x00000b87, - 0x00001387, - 0x00001b87, - 0x00002387, - 0x00002b87, - 0x00003387, - 0x00003b87, - 0x00000397, - 0x00000b97, - 0x00001397, - 0x00001b97, - 0x00002397, - 0x00002b97, - 0x00003397, - 0x00003b97, - 0x000003a7, - 0x00000ba7, - 0x000013a7, - 0x00001ba7, - 0x000023a7, - 0x00002ba7, - 0x000033a7, - 0x00003ba7, - 0x000003b7, - 0x00000bb7, - 0x000013b7, - 0x00001bb7, - 0x000023b7, - 0x00002bb7, - 0x000033b7, - 0x00003bb7, - 0x000003c7, - 0x00000bc7, - 0x000013c7, - 0x00001bc7, - 0x000023c7, - 0x00002bc7, - 0x000033c7, - 0x00003bc7, - 0x000003d7, - 0x00000bd7, - 0x000013d7, - 0x00001bd7, - 0x000023d7, - 0x00002bd7, - 0x000033d7, - 0x00003bd7, - 0x000003e7, - 0x00000be7, - 0x000013e7, - 0x00001be7, - 0x000023e7, - 0x00002be7, - 0x000033e7, - 0x00003be7, - 0x000003f7, - 0x00000bf7, - 0x000013f7, - 0x00001bf7, - 0x000023f7, - 0x00002bf7, - 0x000033f7, - 0x00003bf7, - 0x0001c387, - 0x0005c387, - 0x0009c387, - 0x000dc387, - 0x0011c387, - 0x0015c387, - 0x0019c387, - 0x001dc387, - 0x0001cb87, - 0x0005cb87, - 0x0009cb87, - 0x000dcb87, - 0x0011cb87, - 0x0015cb87, - 0x0019cb87, - 0x001dcb87, - 0x0001d387, - 0x0005d387, - 0x0009d387, - 0x000dd387, - 0x0011d387, - 0x0015d387, - 0x0019d387, - 0x001dd387, - 0x0001db87, - 0x0005db87, - 0x0009db87, - 0x000ddb87, - 0x0011db87, - 0x0015db87, - 0x0019db87, - 0x001ddb87, - 0x0001e387, - 0x0005e387, - 0x0009e387, - 0x000de387, - 0x0011e387, - 0x0015e387, - 0x0019e387, - 0x001de387, - 0x0001eb87, - 0x0005eb87, - 0x0009eb87, - 0x000deb87, - 0x0011eb87, - 0x0015eb87, - 0x0019eb87, - 0x001deb87, - 0x0001f387, - 0x0005f387, - 0x0009f387, - 0x000df387, - 0x0011f387, - 0x0015f387, - 0x0019f387, - 0x001df387, - 0x0001fb87, - 0x0005fb87, - 0x0009fb87, - 0x000dfb87, - 0x0011fb87, - 0x0015fb87, - 0x0019fb87, - 0x001dfb87, - 0x0001c397, - 0x0005c397, - 0x0009c397, - 0x000dc397, - 0x0011c397, - 0x0015c397, - 0x0019c397, - 0x001dc397, - 0x0001cb97, - 0x0005cb97, - 0x0009cb97, - 0x000dcb97, - 0x0011cb97, - 0x0015cb97, - 0x0019cb97, - 0x001dcb97, - 0x0001d397, - 0x0005d397, - 0x0009d397, - 0x000dd397, - 0x0011d397, - 0x0015d397, - 0x0019d397, - 0x001dd397, - 0x0001db97, - 0x0005db97, - 0x0009db97, - 0x000ddb97, - 0x0011db97, - 0x0015db97, - 0x0019db97, - 0x001ddb97, - 0x0001e397, - 0x0005e397, - 0x0009e397, - 0x000de397, - 0x0011e397, - 0x0015e397, - 0x0019e397, - 0x001de397, - 0x0001eb97, - 0x0005eb97, - 0x0009eb97, - 0x000deb97, - 0x0011eb97, - 0x0015eb97, - 0x0019eb97, - 0x001deb97, - 0x0001f397, - 0x0005f397, - 0x0009f397, - 0x000df397, - 0x0011f397, - 0x0015f397, - 0x0019f397, - 0x001df397, - 0x0001fb97, - 0x0005fb97, - 0x0009fb97, - 0x000dfb97, - 0x0011fb97, - 0x0015fb97, - 0x0019fb97, - 0x001dfb97, - 0x0001c3a7, - 0x0005c3a7, - 0x0009c3a7, - 0x000dc3a7, - 0x0011c3a7, - 0x0015c3a7, - 0x0019c3a7, - 0x001dc3a7, - 0x0001cba7, - 0x0005cba7, - 0x0009cba7, - 0x000dcba7, - 0x0011cba7, - 0x0015cba7, - 0x0019cba7, - 0x001dcba7, - 0x0001d3a7, - 0x0005d3a7, - 0x0009d3a7, - 0x000dd3a7, - 0x0011d3a7, - 0x0015d3a7, - 0x0019d3a7, - 0x001dd3a7, - 0x0001dba7, - 0x0005dba7, - 0x0009dba7, - 0x000ddba7, - 0x0011dba7, - 0x0015dba7, - 0x0019dba7, - 0x001ddba7, - 0x0001e3a7, - 0x0005e3a7, - 0x0009e3a7, - 0x000de3a7, - 0x0011e3a7, - 0x0015e3a7, - 0x0019e3a7, - 0x001de3a7, - 0x0001eba7, - 0x0005eba7, - 0x0009eba7, - 0x000deba7, - 0x0011eba7, - 0x0015eba7, - 0x0019eba7, - 0x001deba7, - 0x0001f3a7, - 0x0005f3a7, - 0x0009f3a7, - 0x000df3a7, - 0x0011f3a7, - 0x0015f3a7, - 0x0019f3a7, - 0x001df3a7, - 0x0001fba7, - 0x0005fba7, - 0x0009fba7, - 0x000dfba7, - 0x0011fba7, - 0x0015fba7, - 0x0019fba7, - 0x001dfba7, - 0x0001c3b7, - 0x0005c3b7, - 0x0009c3b7, - 0x000dc3b7, - 0x0011c3b7, - 0x0015c3b7, - 0x0019c3b7, - 0x001dc3b7, - 0x0001cbb7, - 0x0005cbb7, - 0x0009cbb7, - 0x000dcbb7, - 0x0011cbb7, - 0x0015cbb7, - 0x0019cbb7, - 0x001dcbb7, - 0x0001d3b7, - 0x0005d3b7, - 0x0009d3b7, - 0x000dd3b7, - 0x0011d3b7, - 0x0015d3b7, - 0x0019d3b7, - 0x001dd3b7, - 0x0001dbb7, - 0x0005dbb7, - 0x0009dbb7, - 0x000ddbb7, - 0x0011dbb7, - 0x0015dbb7, - 0x0019dbb7, - 0x001ddbb7, - 0x0001e3b7, - 0x0005e3b7, - 0x0009e3b7, - 0x000de3b7, - 0x0011e3b7, - 0x0015e3b7, - 0x0019e3b7, - 0x001de3b7, - 0x0001ebb7, - 0x0005ebb7, - 0x0009ebb7, - 0x000debb7, - 0x0011ebb7, - 0x0015ebb7, - 0x0019ebb7, - 0x001debb7, - 0x0001f3b7, - 0x0005f3b7, - 0x0009f3b7, - 0x000df3b7, - 0x0011f3b7, - 0x0015f3b7, - 0x0019f3b7, - 0x001df3b7, - 0x0001fbb7, - 0x0005fbb7, - 0x0009fbb7, - 0x000dfbb7, - 0x0011fbb7, - 0x0015fbb7, - 0x0019fbb7, - 0x001dfbb7, - 0x0001c3c7, - 0x0005c3c7, - 0x0009c3c7, - 0x000dc3c7, - 0x0011c3c7, - 0x0015c3c7, - 0x0019c3c7, - 0x001dc3c7, - 0x0001cbc7, - 0x0005cbc7, - 0x0009cbc7, - 0x000dcbc7, - 0x0011cbc7, - 0x0015cbc7, - 0x0019cbc7, - 0x001dcbc7, - 0x0001d3c7, - 0x0005d3c7, - 0x0009d3c7, - 0x000dd3c7, - 0x0011d3c7, - 0x0015d3c7, - 0x0019d3c7, - 0x001dd3c7, - 0x0001dbc7, - 0x0005dbc7, - 0x0009dbc7, - 0x000ddbc7, - 0x0011dbc7, - 0x0015dbc7, - 0x0019dbc7, - 0x001ddbc7, - 0x0001e3c7, - 0x0005e3c7, - 0x0009e3c7, - 0x000de3c7, - 0x0011e3c7, - 0x0015e3c7, - 0x0019e3c7, - 0x001de3c7, - 0x0001ebc7, - 0x0005ebc7, - 0x0009ebc7, - 0x000debc7, - 0x0011ebc7, - 0x0015ebc7, - 0x0019ebc7, - 0x001debc7, - 0x0001f3c7, - 0x0005f3c7, - 0x0009f3c7, - 0x000df3c7, - 0x0011f3c7, - 0x0015f3c7, - 0x0019f3c7, - 0x001df3c7, - 0x0001fbc7, - 0x0005fbc7, - 0x0009fbc7, - 0x000dfbc7, - 0x0011fbc7, - 0x0015fbc7, - 0x0019fbc7, - 0x001dfbc7, - 0x0001c3d7, - 0x0005c3d7, - 0x0009c3d7, - 0x000dc3d7, - 0x0011c3d7, - 0x0015c3d7, - 0x0019c3d7, - 0x001dc3d7, - 0x0001cbd7, - 0x0005cbd7, - 0x0009cbd7, - 0x000dcbd7, - 0x0011cbd7, - 0x0015cbd7, - 0x0019cbd7, - 0x001dcbd7, - 0x0001d3d7, - 0x0005d3d7, - 0x0009d3d7, - 0x000dd3d7, - 0x0011d3d7, - 0x0015d3d7, - 0x0019d3d7, - 0x001dd3d7, - 0x0001dbd7, - 0x0005dbd7, - 0x0009dbd7, - 0x000ddbd7, - 0x0011dbd7, - 0x0015dbd7, - 0x0019dbd7, - 0x001ddbd7, - 0x0001e3d7, - 0x0005e3d7, - 0x0009e3d7, - 0x000de3d7, - 0x0011e3d7, - 0x0015e3d7, - 0x0019e3d7, - 0x001de3d7, - 0x0001ebd7, - 0x0005ebd7, - 0x0009ebd7, - 0x000debd7, - 0x0011ebd7, - 0x0015ebd7, - 0x0019ebd7, - 0x001debd7, - 0x0001f3d7, - 0x0005f3d7, - 0x0009f3d7, - 0x000df3d7, - 0x0011f3d7, - 0x0015f3d7, - 0x0019f3d7, - 0x001df3d7, - 0x0001fbd7, - 0x0005fbd7, - 0x0009fbd7, - 0x000dfbd7, - 0x0011fbd7, - 0x0015fbd7, - 0x0019fbd7, - 0x001dfbd7, - 0x0001c3e7, - 0x0005c3e7, - 0x0009c3e7, - 0x000dc3e7, - 0x0011c3e7, - 0x0015c3e7, - 0x0019c3e7, - 0x001dc3e7, - 0x0001cbe7, - 0x0005cbe7, - 0x0009cbe7, - 0x000dcbe7, - 0x0011cbe7, - 0x0015cbe7, - 0x0019cbe7, - 0x001dcbe7, - 0x0001d3e7, - 0x0005d3e7, - 0x0009d3e7, - 0x000dd3e7, - 0x0011d3e7, - 0x0015d3e7, - 0x0019d3e7, - 0x001dd3e7, - 0x0001dbe7, - 0x0005dbe7, - 0x0009dbe7, - 0x000ddbe7, - 0x0011dbe7, - 0x0015dbe7, - 0x0019dbe7, - 0x001ddbe7, - 0x0001e3e7, - 0x0005e3e7, - 0x0009e3e7, - 0x000de3e7, - 0x0011e3e7, - 0x0015e3e7, - 0x0019e3e7, - 0x001de3e7, - 0x0001ebe7, - 0x0005ebe7, - 0x0009ebe7, - 0x000debe7, - 0x0011ebe7, - 0x0015ebe7, - 0x0019ebe7, - 0x001debe7, - 0x0001f3e7, - 0x0005f3e7, - 0x0009f3e7, - 0x000df3e7, - 0x0011f3e7, - 0x0015f3e7, - 0x0019f3e7, - 0x001df3e7, - 0x0001fbe7, - 0x0005fbe7, - 0x0009fbe7, - 0x000dfbe7, - 0x0011fbe7, - 0x0015fbe7, - 0x0019fbe7, - 0x001dfbe7, - 0x0001c3f7, - 0x0005c3f7, - 0x0009c3f7, - 0x000dc3f7, - 0x0011c3f7, - 0x0015c3f7, - 0x0019c3f7, - 0x001dc3f7, - 0x0001cbf7, - 0x0005cbf7, - 0x0009cbf7, - 0x000dcbf7, - 0x0011cbf7, - 0x0015cbf7, - 0x0019cbf7, - 0x001dcbf7, - 0x0001d3f7, - 0x0005d3f7, - 0x0009d3f7, - 0x000dd3f7, - 0x0011d3f7, - 0x0015d3f7, - 0x0019d3f7, - 0x001dd3f7, - 0x0001dbf7, - 0x0005dbf7, - 0x0009dbf7, - 0x000ddbf7, - 0x0011dbf7, - 0x0015dbf7, - 0x0019dbf7, - 0x001ddbf7, - 0x0001e3f7, - 0x0005e3f7, - 0x0009e3f7, - 0x000de3f7, - 0x0011e3f7, - 0x0015e3f7, - 0x0019e3f7, - 0x001de3f7, - 0x0001ebf7, - 0x0005ebf7, - 0x0009ebf7, - 0x000debf7, - 0x0011ebf7, - 0x0015ebf7, - 0x0019ebf7, - 0x001debf7, - 0x0001f3f7, - 0x0005f3f7, - 0x0009f3f7, - 0x000df3f7, - 0x0011f3f7, - 0x0015f3f7, - 0x0019f3f7, - 0x001df3f7, - 0x0001fbf7, - 0x0005fbf7, - 0x0009fbf7, - 0x000dfbf7, - 0x0011fbf7, - 0x0015fbf7, - 0x0019fbf7, - 0x001dfbf7, - 0x00e1c387, - 0x02e1c387, - 0x04e1c387, - 0x06e1c387, - 0x08e1c387, - 0x0ae1c387, - 0x0ce1c387, - 0x0ee1c387, - 0x00e5c387, - 0x02e5c387, - 0x04e5c387, - 0x06e5c387, - 0x08e5c387, - 0x0ae5c387, - 0x0ce5c387, - 0x0ee5c387, - 0x00e9c387, - 0x02e9c387, - 0x04e9c387, - 0x06e9c387, - 0x08e9c387, - 0x0ae9c387, - 0x0ce9c387, - 0x0ee9c387, - 0x00edc387, - 0x02edc387, - 0x04edc387, - 0x06edc387, - 0x08edc387, - 0x0aedc387, - 0x0cedc387, - 0x0eedc387, - 0x00f1c387, - 0x02f1c387, - 0x04f1c387, - 0x06f1c387, - 0x08f1c387, - 0x0af1c387, - 0x0cf1c387, - 0x0ef1c387, - 0x00f5c387, - 0x02f5c387, - 0x04f5c387, - 0x06f5c387, - 0x08f5c387, - 0x0af5c387, - 0x0cf5c387, - 0x0ef5c387, - 0x00f9c387, - 0x02f9c387, - 0x04f9c387, - 0x06f9c387, - 0x08f9c387, - 0x0af9c387, - 0x0cf9c387, - 0x0ef9c387, - 0x00fdc387, - 0x02fdc387, - 0x04fdc387, - 0x06fdc387, - 0x08fdc387, - 0x0afdc387, - 0x0cfdc387, - 0x0efdc387, - 0x00e1cb87, - 0x02e1cb87, - 0x04e1cb87, - 0x06e1cb87, - 0x08e1cb87, - 0x0ae1cb87, - 0x0ce1cb87, - 0x0ee1cb87, - 0x00e5cb87, - 0x02e5cb87, - 0x04e5cb87, - 0x06e5cb87, - 0x08e5cb87, - 0x0ae5cb87, - 0x0ce5cb87, - 0x0ee5cb87, - 0x00e9cb87, - 0x02e9cb87, - 0x04e9cb87, - 0x06e9cb87, - 0x08e9cb87, - 0x0ae9cb87, - 0x0ce9cb87, - 0x0ee9cb87, - 0x00edcb87, - 0x02edcb87, - 0x04edcb87, - 0x06edcb87, - 0x08edcb87, - 0x0aedcb87, - 0x0cedcb87, - 0x0eedcb87, - 0x00f1cb87, - 0x02f1cb87, - 0x04f1cb87, - 0x06f1cb87, - 0x08f1cb87, - 0x0af1cb87, - 0x0cf1cb87, - 0x0ef1cb87, - 0x00f5cb87, - 0x02f5cb87, - 0x04f5cb87, - 0x06f5cb87, - 0x08f5cb87, - 0x0af5cb87, - 0x0cf5cb87, - 0x0ef5cb87, - 0x00f9cb87, - 0x02f9cb87, - 0x04f9cb87, - 0x06f9cb87, - 0x08f9cb87, -} - -var kZeroRepsDepth = [numCommandSymbols]uint32{ - 0, - 4, - 8, - 7, - 7, - 7, - 7, - 7, - 7, - 7, - 7, - 11, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 14, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 21, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, - 28, -} - -var kNonZeroRepsBits = [numCommandSymbols]uint64{ - 0x0000000b, - 0x0000001b, - 0x0000002b, - 0x0000003b, - 0x000002cb, - 0x000006cb, - 0x00000acb, - 0x00000ecb, - 0x000002db, - 0x000006db, - 0x00000adb, - 0x00000edb, - 0x000002eb, - 0x000006eb, - 0x00000aeb, - 0x00000eeb, - 0x000002fb, - 0x000006fb, - 0x00000afb, - 0x00000efb, - 0x0000b2cb, - 0x0001b2cb, - 0x0002b2cb, - 0x0003b2cb, - 0x0000b6cb, - 0x0001b6cb, - 0x0002b6cb, - 0x0003b6cb, - 0x0000bacb, - 0x0001bacb, - 0x0002bacb, - 0x0003bacb, - 0x0000becb, - 0x0001becb, - 0x0002becb, - 0x0003becb, - 0x0000b2db, - 0x0001b2db, - 0x0002b2db, - 0x0003b2db, - 0x0000b6db, - 0x0001b6db, - 0x0002b6db, - 0x0003b6db, - 0x0000badb, - 0x0001badb, - 0x0002badb, - 0x0003badb, - 0x0000bedb, - 0x0001bedb, - 0x0002bedb, - 0x0003bedb, - 0x0000b2eb, - 0x0001b2eb, - 0x0002b2eb, - 0x0003b2eb, - 0x0000b6eb, - 0x0001b6eb, - 0x0002b6eb, - 0x0003b6eb, - 0x0000baeb, - 0x0001baeb, - 0x0002baeb, - 0x0003baeb, - 0x0000beeb, - 0x0001beeb, - 0x0002beeb, - 0x0003beeb, - 0x0000b2fb, - 0x0001b2fb, - 0x0002b2fb, - 0x0003b2fb, - 0x0000b6fb, - 0x0001b6fb, - 0x0002b6fb, - 0x0003b6fb, - 0x0000bafb, - 0x0001bafb, - 0x0002bafb, - 0x0003bafb, - 0x0000befb, - 0x0001befb, - 0x0002befb, - 0x0003befb, - 0x002cb2cb, - 0x006cb2cb, - 0x00acb2cb, - 0x00ecb2cb, - 0x002db2cb, - 0x006db2cb, - 0x00adb2cb, - 0x00edb2cb, - 0x002eb2cb, - 0x006eb2cb, - 0x00aeb2cb, - 0x00eeb2cb, - 0x002fb2cb, - 0x006fb2cb, - 0x00afb2cb, - 0x00efb2cb, - 0x002cb6cb, - 0x006cb6cb, - 0x00acb6cb, - 0x00ecb6cb, - 0x002db6cb, - 0x006db6cb, - 0x00adb6cb, - 0x00edb6cb, - 0x002eb6cb, - 0x006eb6cb, - 0x00aeb6cb, - 0x00eeb6cb, - 0x002fb6cb, - 0x006fb6cb, - 0x00afb6cb, - 0x00efb6cb, - 0x002cbacb, - 0x006cbacb, - 0x00acbacb, - 0x00ecbacb, - 0x002dbacb, - 0x006dbacb, - 0x00adbacb, - 0x00edbacb, - 0x002ebacb, - 0x006ebacb, - 0x00aebacb, - 0x00eebacb, - 0x002fbacb, - 0x006fbacb, - 0x00afbacb, - 0x00efbacb, - 0x002cbecb, - 0x006cbecb, - 0x00acbecb, - 0x00ecbecb, - 0x002dbecb, - 0x006dbecb, - 0x00adbecb, - 0x00edbecb, - 0x002ebecb, - 0x006ebecb, - 0x00aebecb, - 0x00eebecb, - 0x002fbecb, - 0x006fbecb, - 0x00afbecb, - 0x00efbecb, - 0x002cb2db, - 0x006cb2db, - 0x00acb2db, - 0x00ecb2db, - 0x002db2db, - 0x006db2db, - 0x00adb2db, - 0x00edb2db, - 0x002eb2db, - 0x006eb2db, - 0x00aeb2db, - 0x00eeb2db, - 0x002fb2db, - 0x006fb2db, - 0x00afb2db, - 0x00efb2db, - 0x002cb6db, - 0x006cb6db, - 0x00acb6db, - 0x00ecb6db, - 0x002db6db, - 0x006db6db, - 0x00adb6db, - 0x00edb6db, - 0x002eb6db, - 0x006eb6db, - 0x00aeb6db, - 0x00eeb6db, - 0x002fb6db, - 0x006fb6db, - 0x00afb6db, - 0x00efb6db, - 0x002cbadb, - 0x006cbadb, - 0x00acbadb, - 0x00ecbadb, - 0x002dbadb, - 0x006dbadb, - 0x00adbadb, - 0x00edbadb, - 0x002ebadb, - 0x006ebadb, - 0x00aebadb, - 0x00eebadb, - 0x002fbadb, - 0x006fbadb, - 0x00afbadb, - 0x00efbadb, - 0x002cbedb, - 0x006cbedb, - 0x00acbedb, - 0x00ecbedb, - 0x002dbedb, - 0x006dbedb, - 0x00adbedb, - 0x00edbedb, - 0x002ebedb, - 0x006ebedb, - 0x00aebedb, - 0x00eebedb, - 0x002fbedb, - 0x006fbedb, - 0x00afbedb, - 0x00efbedb, - 0x002cb2eb, - 0x006cb2eb, - 0x00acb2eb, - 0x00ecb2eb, - 0x002db2eb, - 0x006db2eb, - 0x00adb2eb, - 0x00edb2eb, - 0x002eb2eb, - 0x006eb2eb, - 0x00aeb2eb, - 0x00eeb2eb, - 0x002fb2eb, - 0x006fb2eb, - 0x00afb2eb, - 0x00efb2eb, - 0x002cb6eb, - 0x006cb6eb, - 0x00acb6eb, - 0x00ecb6eb, - 0x002db6eb, - 0x006db6eb, - 0x00adb6eb, - 0x00edb6eb, - 0x002eb6eb, - 0x006eb6eb, - 0x00aeb6eb, - 0x00eeb6eb, - 0x002fb6eb, - 0x006fb6eb, - 0x00afb6eb, - 0x00efb6eb, - 0x002cbaeb, - 0x006cbaeb, - 0x00acbaeb, - 0x00ecbaeb, - 0x002dbaeb, - 0x006dbaeb, - 0x00adbaeb, - 0x00edbaeb, - 0x002ebaeb, - 0x006ebaeb, - 0x00aebaeb, - 0x00eebaeb, - 0x002fbaeb, - 0x006fbaeb, - 0x00afbaeb, - 0x00efbaeb, - 0x002cbeeb, - 0x006cbeeb, - 0x00acbeeb, - 0x00ecbeeb, - 0x002dbeeb, - 0x006dbeeb, - 0x00adbeeb, - 0x00edbeeb, - 0x002ebeeb, - 0x006ebeeb, - 0x00aebeeb, - 0x00eebeeb, - 0x002fbeeb, - 0x006fbeeb, - 0x00afbeeb, - 0x00efbeeb, - 0x002cb2fb, - 0x006cb2fb, - 0x00acb2fb, - 0x00ecb2fb, - 0x002db2fb, - 0x006db2fb, - 0x00adb2fb, - 0x00edb2fb, - 0x002eb2fb, - 0x006eb2fb, - 0x00aeb2fb, - 0x00eeb2fb, - 0x002fb2fb, - 0x006fb2fb, - 0x00afb2fb, - 0x00efb2fb, - 0x002cb6fb, - 0x006cb6fb, - 0x00acb6fb, - 0x00ecb6fb, - 0x002db6fb, - 0x006db6fb, - 0x00adb6fb, - 0x00edb6fb, - 0x002eb6fb, - 0x006eb6fb, - 0x00aeb6fb, - 0x00eeb6fb, - 0x002fb6fb, - 0x006fb6fb, - 0x00afb6fb, - 0x00efb6fb, - 0x002cbafb, - 0x006cbafb, - 0x00acbafb, - 0x00ecbafb, - 0x002dbafb, - 0x006dbafb, - 0x00adbafb, - 0x00edbafb, - 0x002ebafb, - 0x006ebafb, - 0x00aebafb, - 0x00eebafb, - 0x002fbafb, - 0x006fbafb, - 0x00afbafb, - 0x00efbafb, - 0x002cbefb, - 0x006cbefb, - 0x00acbefb, - 0x00ecbefb, - 0x002dbefb, - 0x006dbefb, - 0x00adbefb, - 0x00edbefb, - 0x002ebefb, - 0x006ebefb, - 0x00aebefb, - 0x00eebefb, - 0x002fbefb, - 0x006fbefb, - 0x00afbefb, - 0x00efbefb, - 0x0b2cb2cb, - 0x1b2cb2cb, - 0x2b2cb2cb, - 0x3b2cb2cb, - 0x0b6cb2cb, - 0x1b6cb2cb, - 0x2b6cb2cb, - 0x3b6cb2cb, - 0x0bacb2cb, - 0x1bacb2cb, - 0x2bacb2cb, - 0x3bacb2cb, - 0x0becb2cb, - 0x1becb2cb, - 0x2becb2cb, - 0x3becb2cb, - 0x0b2db2cb, - 0x1b2db2cb, - 0x2b2db2cb, - 0x3b2db2cb, - 0x0b6db2cb, - 0x1b6db2cb, - 0x2b6db2cb, - 0x3b6db2cb, - 0x0badb2cb, - 0x1badb2cb, - 0x2badb2cb, - 0x3badb2cb, - 0x0bedb2cb, - 0x1bedb2cb, - 0x2bedb2cb, - 0x3bedb2cb, - 0x0b2eb2cb, - 0x1b2eb2cb, - 0x2b2eb2cb, - 0x3b2eb2cb, - 0x0b6eb2cb, - 0x1b6eb2cb, - 0x2b6eb2cb, - 0x3b6eb2cb, - 0x0baeb2cb, - 0x1baeb2cb, - 0x2baeb2cb, - 0x3baeb2cb, - 0x0beeb2cb, - 0x1beeb2cb, - 0x2beeb2cb, - 0x3beeb2cb, - 0x0b2fb2cb, - 0x1b2fb2cb, - 0x2b2fb2cb, - 0x3b2fb2cb, - 0x0b6fb2cb, - 0x1b6fb2cb, - 0x2b6fb2cb, - 0x3b6fb2cb, - 0x0bafb2cb, - 0x1bafb2cb, - 0x2bafb2cb, - 0x3bafb2cb, - 0x0befb2cb, - 0x1befb2cb, - 0x2befb2cb, - 0x3befb2cb, - 0x0b2cb6cb, - 0x1b2cb6cb, - 0x2b2cb6cb, - 0x3b2cb6cb, - 0x0b6cb6cb, - 0x1b6cb6cb, - 0x2b6cb6cb, - 0x3b6cb6cb, - 0x0bacb6cb, - 0x1bacb6cb, - 0x2bacb6cb, - 0x3bacb6cb, - 0x0becb6cb, - 0x1becb6cb, - 0x2becb6cb, - 0x3becb6cb, - 0x0b2db6cb, - 0x1b2db6cb, - 0x2b2db6cb, - 0x3b2db6cb, - 0x0b6db6cb, - 0x1b6db6cb, - 0x2b6db6cb, - 0x3b6db6cb, - 0x0badb6cb, - 0x1badb6cb, - 0x2badb6cb, - 0x3badb6cb, - 0x0bedb6cb, - 0x1bedb6cb, - 0x2bedb6cb, - 0x3bedb6cb, - 0x0b2eb6cb, - 0x1b2eb6cb, - 0x2b2eb6cb, - 0x3b2eb6cb, - 0x0b6eb6cb, - 0x1b6eb6cb, - 0x2b6eb6cb, - 0x3b6eb6cb, - 0x0baeb6cb, - 0x1baeb6cb, - 0x2baeb6cb, - 0x3baeb6cb, - 0x0beeb6cb, - 0x1beeb6cb, - 0x2beeb6cb, - 0x3beeb6cb, - 0x0b2fb6cb, - 0x1b2fb6cb, - 0x2b2fb6cb, - 0x3b2fb6cb, - 0x0b6fb6cb, - 0x1b6fb6cb, - 0x2b6fb6cb, - 0x3b6fb6cb, - 0x0bafb6cb, - 0x1bafb6cb, - 0x2bafb6cb, - 0x3bafb6cb, - 0x0befb6cb, - 0x1befb6cb, - 0x2befb6cb, - 0x3befb6cb, - 0x0b2cbacb, - 0x1b2cbacb, - 0x2b2cbacb, - 0x3b2cbacb, - 0x0b6cbacb, - 0x1b6cbacb, - 0x2b6cbacb, - 0x3b6cbacb, - 0x0bacbacb, - 0x1bacbacb, - 0x2bacbacb, - 0x3bacbacb, - 0x0becbacb, - 0x1becbacb, - 0x2becbacb, - 0x3becbacb, - 0x0b2dbacb, - 0x1b2dbacb, - 0x2b2dbacb, - 0x3b2dbacb, - 0x0b6dbacb, - 0x1b6dbacb, - 0x2b6dbacb, - 0x3b6dbacb, - 0x0badbacb, - 0x1badbacb, - 0x2badbacb, - 0x3badbacb, - 0x0bedbacb, - 0x1bedbacb, - 0x2bedbacb, - 0x3bedbacb, - 0x0b2ebacb, - 0x1b2ebacb, - 0x2b2ebacb, - 0x3b2ebacb, - 0x0b6ebacb, - 0x1b6ebacb, - 0x2b6ebacb, - 0x3b6ebacb, - 0x0baebacb, - 0x1baebacb, - 0x2baebacb, - 0x3baebacb, - 0x0beebacb, - 0x1beebacb, - 0x2beebacb, - 0x3beebacb, - 0x0b2fbacb, - 0x1b2fbacb, - 0x2b2fbacb, - 0x3b2fbacb, - 0x0b6fbacb, - 0x1b6fbacb, - 0x2b6fbacb, - 0x3b6fbacb, - 0x0bafbacb, - 0x1bafbacb, - 0x2bafbacb, - 0x3bafbacb, - 0x0befbacb, - 0x1befbacb, - 0x2befbacb, - 0x3befbacb, - 0x0b2cbecb, - 0x1b2cbecb, - 0x2b2cbecb, - 0x3b2cbecb, - 0x0b6cbecb, - 0x1b6cbecb, - 0x2b6cbecb, - 0x3b6cbecb, - 0x0bacbecb, - 0x1bacbecb, - 0x2bacbecb, - 0x3bacbecb, - 0x0becbecb, - 0x1becbecb, - 0x2becbecb, - 0x3becbecb, - 0x0b2dbecb, - 0x1b2dbecb, - 0x2b2dbecb, - 0x3b2dbecb, - 0x0b6dbecb, - 0x1b6dbecb, - 0x2b6dbecb, - 0x3b6dbecb, - 0x0badbecb, - 0x1badbecb, - 0x2badbecb, - 0x3badbecb, - 0x0bedbecb, - 0x1bedbecb, - 0x2bedbecb, - 0x3bedbecb, - 0x0b2ebecb, - 0x1b2ebecb, - 0x2b2ebecb, - 0x3b2ebecb, - 0x0b6ebecb, - 0x1b6ebecb, - 0x2b6ebecb, - 0x3b6ebecb, - 0x0baebecb, - 0x1baebecb, - 0x2baebecb, - 0x3baebecb, - 0x0beebecb, - 0x1beebecb, - 0x2beebecb, - 0x3beebecb, - 0x0b2fbecb, - 0x1b2fbecb, - 0x2b2fbecb, - 0x3b2fbecb, - 0x0b6fbecb, - 0x1b6fbecb, - 0x2b6fbecb, - 0x3b6fbecb, - 0x0bafbecb, - 0x1bafbecb, - 0x2bafbecb, - 0x3bafbecb, - 0x0befbecb, - 0x1befbecb, - 0x2befbecb, - 0x3befbecb, - 0x0b2cb2db, - 0x1b2cb2db, - 0x2b2cb2db, - 0x3b2cb2db, - 0x0b6cb2db, - 0x1b6cb2db, - 0x2b6cb2db, - 0x3b6cb2db, - 0x0bacb2db, - 0x1bacb2db, - 0x2bacb2db, - 0x3bacb2db, - 0x0becb2db, - 0x1becb2db, - 0x2becb2db, - 0x3becb2db, - 0x0b2db2db, - 0x1b2db2db, - 0x2b2db2db, - 0x3b2db2db, - 0x0b6db2db, - 0x1b6db2db, - 0x2b6db2db, - 0x3b6db2db, - 0x0badb2db, - 0x1badb2db, - 0x2badb2db, - 0x3badb2db, - 0x0bedb2db, - 0x1bedb2db, - 0x2bedb2db, - 0x3bedb2db, - 0x0b2eb2db, - 0x1b2eb2db, - 0x2b2eb2db, - 0x3b2eb2db, - 0x0b6eb2db, - 0x1b6eb2db, - 0x2b6eb2db, - 0x3b6eb2db, - 0x0baeb2db, - 0x1baeb2db, - 0x2baeb2db, - 0x3baeb2db, - 0x0beeb2db, - 0x1beeb2db, - 0x2beeb2db, - 0x3beeb2db, - 0x0b2fb2db, - 0x1b2fb2db, - 0x2b2fb2db, - 0x3b2fb2db, - 0x0b6fb2db, - 0x1b6fb2db, - 0x2b6fb2db, - 0x3b6fb2db, - 0x0bafb2db, - 0x1bafb2db, - 0x2bafb2db, - 0x3bafb2db, - 0x0befb2db, - 0x1befb2db, - 0x2befb2db, - 0x3befb2db, - 0x0b2cb6db, - 0x1b2cb6db, - 0x2b2cb6db, - 0x3b2cb6db, - 0x0b6cb6db, - 0x1b6cb6db, - 0x2b6cb6db, - 0x3b6cb6db, - 0x0bacb6db, - 0x1bacb6db, - 0x2bacb6db, - 0x3bacb6db, - 0x0becb6db, - 0x1becb6db, - 0x2becb6db, - 0x3becb6db, - 0x0b2db6db, - 0x1b2db6db, - 0x2b2db6db, - 0x3b2db6db, - 0x0b6db6db, - 0x1b6db6db, - 0x2b6db6db, - 0x3b6db6db, - 0x0badb6db, - 0x1badb6db, - 0x2badb6db, - 0x3badb6db, - 0x0bedb6db, - 0x1bedb6db, - 0x2bedb6db, - 0x3bedb6db, - 0x0b2eb6db, - 0x1b2eb6db, - 0x2b2eb6db, - 0x3b2eb6db, - 0x0b6eb6db, - 0x1b6eb6db, - 0x2b6eb6db, - 0x3b6eb6db, - 0x0baeb6db, - 0x1baeb6db, - 0x2baeb6db, - 0x3baeb6db, -} - -var kNonZeroRepsDepth = [numCommandSymbols]uint32{ - 6, - 6, - 6, - 6, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 12, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 18, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 24, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, - 30, -} - -var kStaticCommandCodeBits = [numCommandSymbols]uint16{ - 0, - 256, - 128, - 384, - 64, - 320, - 192, - 448, - 32, - 288, - 160, - 416, - 96, - 352, - 224, - 480, - 16, - 272, - 144, - 400, - 80, - 336, - 208, - 464, - 48, - 304, - 176, - 432, - 112, - 368, - 240, - 496, - 8, - 264, - 136, - 392, - 72, - 328, - 200, - 456, - 40, - 296, - 168, - 424, - 104, - 360, - 232, - 488, - 24, - 280, - 152, - 408, - 88, - 344, - 216, - 472, - 56, - 312, - 184, - 440, - 120, - 376, - 248, - 504, - 4, - 260, - 132, - 388, - 68, - 324, - 196, - 452, - 36, - 292, - 164, - 420, - 100, - 356, - 228, - 484, - 20, - 276, - 148, - 404, - 84, - 340, - 212, - 468, - 52, - 308, - 180, - 436, - 116, - 372, - 244, - 500, - 12, - 268, - 140, - 396, - 76, - 332, - 204, - 460, - 44, - 300, - 172, - 428, - 108, - 364, - 236, - 492, - 28, - 284, - 156, - 412, - 92, - 348, - 220, - 476, - 60, - 316, - 188, - 444, - 124, - 380, - 252, - 508, - 2, - 258, - 130, - 386, - 66, - 322, - 194, - 450, - 34, - 290, - 162, - 418, - 98, - 354, - 226, - 482, - 18, - 274, - 146, - 402, - 82, - 338, - 210, - 466, - 50, - 306, - 178, - 434, - 114, - 370, - 242, - 498, - 10, - 266, - 138, - 394, - 74, - 330, - 202, - 458, - 42, - 298, - 170, - 426, - 106, - 362, - 234, - 490, - 26, - 282, - 154, - 410, - 90, - 346, - 218, - 474, - 58, - 314, - 186, - 442, - 122, - 378, - 250, - 506, - 6, - 262, - 134, - 390, - 70, - 326, - 198, - 454, - 38, - 294, - 166, - 422, - 102, - 358, - 230, - 486, - 22, - 278, - 150, - 406, - 86, - 342, - 214, - 470, - 54, - 310, - 182, - 438, - 118, - 374, - 246, - 502, - 14, - 270, - 142, - 398, - 78, - 334, - 206, - 462, - 46, - 302, - 174, - 430, - 110, - 366, - 238, - 494, - 30, - 286, - 158, - 414, - 94, - 350, - 222, - 478, - 62, - 318, - 190, - 446, - 126, - 382, - 254, - 510, - 1, - 257, - 129, - 385, - 65, - 321, - 193, - 449, - 33, - 289, - 161, - 417, - 97, - 353, - 225, - 481, - 17, - 273, - 145, - 401, - 81, - 337, - 209, - 465, - 49, - 305, - 177, - 433, - 113, - 369, - 241, - 497, - 9, - 265, - 137, - 393, - 73, - 329, - 201, - 457, - 41, - 297, - 169, - 425, - 105, - 361, - 233, - 489, - 25, - 281, - 153, - 409, - 89, - 345, - 217, - 473, - 57, - 313, - 185, - 441, - 121, - 377, - 249, - 505, - 5, - 261, - 133, - 389, - 69, - 325, - 197, - 453, - 37, - 293, - 165, - 421, - 101, - 357, - 229, - 485, - 21, - 277, - 149, - 405, - 85, - 341, - 213, - 469, - 53, - 309, - 181, - 437, - 117, - 373, - 245, - 501, - 13, - 269, - 141, - 397, - 77, - 333, - 205, - 461, - 45, - 301, - 173, - 429, - 109, - 365, - 237, - 493, - 29, - 285, - 157, - 413, - 93, - 349, - 221, - 477, - 61, - 317, - 189, - 445, - 125, - 381, - 253, - 509, - 3, - 259, - 131, - 387, - 67, - 323, - 195, - 451, - 35, - 291, - 163, - 419, - 99, - 355, - 227, - 483, - 19, - 275, - 147, - 403, - 83, - 339, - 211, - 467, - 51, - 307, - 179, - 435, - 115, - 371, - 243, - 499, - 11, - 267, - 139, - 395, - 75, - 331, - 203, - 459, - 43, - 299, - 171, - 427, - 107, - 363, - 235, - 491, - 27, - 283, - 155, - 411, - 91, - 347, - 219, - 475, - 59, - 315, - 187, - 443, - 123, - 379, - 251, - 507, - 7, - 1031, - 519, - 1543, - 263, - 1287, - 775, - 1799, - 135, - 1159, - 647, - 1671, - 391, - 1415, - 903, - 1927, - 71, - 1095, - 583, - 1607, - 327, - 1351, - 839, - 1863, - 199, - 1223, - 711, - 1735, - 455, - 1479, - 967, - 1991, - 39, - 1063, - 551, - 1575, - 295, - 1319, - 807, - 1831, - 167, - 1191, - 679, - 1703, - 423, - 1447, - 935, - 1959, - 103, - 1127, - 615, - 1639, - 359, - 1383, - 871, - 1895, - 231, - 1255, - 743, - 1767, - 487, - 1511, - 999, - 2023, - 23, - 1047, - 535, - 1559, - 279, - 1303, - 791, - 1815, - 151, - 1175, - 663, - 1687, - 407, - 1431, - 919, - 1943, - 87, - 1111, - 599, - 1623, - 343, - 1367, - 855, - 1879, - 215, - 1239, - 727, - 1751, - 471, - 1495, - 983, - 2007, - 55, - 1079, - 567, - 1591, - 311, - 1335, - 823, - 1847, - 183, - 1207, - 695, - 1719, - 439, - 1463, - 951, - 1975, - 119, - 1143, - 631, - 1655, - 375, - 1399, - 887, - 1911, - 247, - 1271, - 759, - 1783, - 503, - 1527, - 1015, - 2039, - 15, - 1039, - 527, - 1551, - 271, - 1295, - 783, - 1807, - 143, - 1167, - 655, - 1679, - 399, - 1423, - 911, - 1935, - 79, - 1103, - 591, - 1615, - 335, - 1359, - 847, - 1871, - 207, - 1231, - 719, - 1743, - 463, - 1487, - 975, - 1999, - 47, - 1071, - 559, - 1583, - 303, - 1327, - 815, - 1839, - 175, - 1199, - 687, - 1711, - 431, - 1455, - 943, - 1967, - 111, - 1135, - 623, - 1647, - 367, - 1391, - 879, - 1903, - 239, - 1263, - 751, - 1775, - 495, - 1519, - 1007, - 2031, - 31, - 1055, - 543, - 1567, - 287, - 1311, - 799, - 1823, - 159, - 1183, - 671, - 1695, - 415, - 1439, - 927, - 1951, - 95, - 1119, - 607, - 1631, - 351, - 1375, - 863, - 1887, - 223, - 1247, - 735, - 1759, - 479, - 1503, - 991, - 2015, - 63, - 1087, - 575, - 1599, - 319, - 1343, - 831, - 1855, - 191, - 1215, - 703, - 1727, - 447, - 1471, - 959, - 1983, - 127, - 1151, - 639, - 1663, - 383, - 1407, - 895, - 1919, - 255, - 1279, - 767, - 1791, - 511, - 1535, - 1023, - 2047, -} - -func storeStaticCommandHuffmanTree(bw *bitWriter) { - bw.writeBits(32, 0x16307003) - bw.writeBits(24, 0x926244) - bw.writeBits(3, 0x00000000) -} - -var kStaticDistanceCodeBits = [64]uint16{ - 0, - 32, - 16, - 48, - 8, - 40, - 24, - 56, - 4, - 36, - 20, - 52, - 12, - 44, - 28, - 60, - 2, - 34, - 18, - 50, - 10, - 42, - 26, - 58, - 6, - 38, - 22, - 54, - 14, - 46, - 30, - 62, - 1, - 33, - 17, - 49, - 9, - 41, - 25, - 57, - 5, - 37, - 21, - 53, - 13, - 45, - 29, - 61, - 3, - 35, - 19, - 51, - 11, - 43, - 27, - 59, - 7, - 39, - 23, - 55, - 15, - 47, - 31, - 63, -} - -func storeStaticDistanceHuffmanTree(bw *bitWriter) { - bw.writeBits(28, 0x0369DC03) -} diff --git a/vendor/github.com/andybalholm/brotli/fast_log.go b/vendor/github.com/andybalholm/brotli/fast_log.go deleted file mode 100644 index bbae3009be..0000000000 --- a/vendor/github.com/andybalholm/brotli/fast_log.go +++ /dev/null @@ -1,296 +0,0 @@ -package brotli - -import "math" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Utilities for fast computation of logarithms. */ - -func log2FloorNonZero(n uint) uint32 { - /* TODO: generalize and move to platform.h */ - var result uint32 = 0 - for { - n >>= 1 - if n == 0 { - break - } - result++ - } - return result -} - -/* A lookup table for small values of log2(int) to be used in entropy - computation. - - ", ".join(["%.16ff" % x for x in [0.0]+[log2(x) for x in range(1, 256)]]) */ -var kLog2Table = []float32{ - 0.0000000000000000, - 0.0000000000000000, - 1.0000000000000000, - 1.5849625007211563, - 2.0000000000000000, - 2.3219280948873622, - 2.5849625007211561, - 2.8073549220576042, - 3.0000000000000000, - 3.1699250014423126, - 3.3219280948873626, - 3.4594316186372978, - 3.5849625007211565, - 3.7004397181410922, - 3.8073549220576037, - 3.9068905956085187, - 4.0000000000000000, - 4.0874628412503400, - 4.1699250014423122, - 4.2479275134435852, - 4.3219280948873626, - 4.3923174227787607, - 4.4594316186372973, - 4.5235619560570131, - 4.5849625007211570, - 4.6438561897747244, - 4.7004397181410926, - 4.7548875021634691, - 4.8073549220576037, - 4.8579809951275728, - 4.9068905956085187, - 4.9541963103868758, - 5.0000000000000000, - 5.0443941193584534, - 5.0874628412503400, - 5.1292830169449664, - 5.1699250014423122, - 5.2094533656289501, - 5.2479275134435852, - 5.2854022188622487, - 5.3219280948873626, - 5.3575520046180838, - 5.3923174227787607, - 5.4262647547020979, - 5.4594316186372973, - 5.4918530963296748, - 5.5235619560570131, - 5.5545888516776376, - 5.5849625007211570, - 5.6147098441152083, - 5.6438561897747244, - 5.6724253419714961, - 5.7004397181410926, - 5.7279204545631996, - 5.7548875021634691, - 5.7813597135246599, - 5.8073549220576046, - 5.8328900141647422, - 5.8579809951275719, - 5.8826430493618416, - 5.9068905956085187, - 5.9307373375628867, - 5.9541963103868758, - 5.9772799234999168, - 6.0000000000000000, - 6.0223678130284544, - 6.0443941193584534, - 6.0660891904577721, - 6.0874628412503400, - 6.1085244567781700, - 6.1292830169449672, - 6.1497471195046822, - 6.1699250014423122, - 6.1898245588800176, - 6.2094533656289510, - 6.2288186904958804, - 6.2479275134435861, - 6.2667865406949019, - 6.2854022188622487, - 6.3037807481771031, - 6.3219280948873617, - 6.3398500028846252, - 6.3575520046180847, - 6.3750394313469254, - 6.3923174227787598, - 6.4093909361377026, - 6.4262647547020979, - 6.4429434958487288, - 6.4594316186372982, - 6.4757334309663976, - 6.4918530963296748, - 6.5077946401986964, - 6.5235619560570131, - 6.5391588111080319, - 6.5545888516776376, - 6.5698556083309478, - 6.5849625007211561, - 6.5999128421871278, - 6.6147098441152092, - 6.6293566200796095, - 6.6438561897747253, - 6.6582114827517955, - 6.6724253419714952, - 6.6865005271832185, - 6.7004397181410917, - 6.7142455176661224, - 6.7279204545631988, - 6.7414669864011465, - 6.7548875021634691, - 6.7681843247769260, - 6.7813597135246599, - 6.7944158663501062, - 6.8073549220576037, - 6.8201789624151887, - 6.8328900141647422, - 6.8454900509443757, - 6.8579809951275719, - 6.8703647195834048, - 6.8826430493618416, - 6.8948177633079437, - 6.9068905956085187, - 6.9188632372745955, - 6.9307373375628867, - 6.9425145053392399, - 6.9541963103868758, - 6.9657842846620879, - 6.9772799234999168, - 6.9886846867721664, - 7.0000000000000000, - 7.0112272554232540, - 7.0223678130284544, - 7.0334230015374501, - 7.0443941193584534, - 7.0552824355011898, - 7.0660891904577721, - 7.0768155970508317, - 7.0874628412503400, - 7.0980320829605272, - 7.1085244567781700, - 7.1189410727235076, - 7.1292830169449664, - 7.1395513523987937, - 7.1497471195046822, - 7.1598713367783891, - 7.1699250014423130, - 7.1799090900149345, - 7.1898245588800176, - 7.1996723448363644, - 7.2094533656289492, - 7.2191685204621621, - 7.2288186904958804, - 7.2384047393250794, - 7.2479275134435861, - 7.2573878426926521, - 7.2667865406949019, - 7.2761244052742384, - 7.2854022188622487, - 7.2946207488916270, - 7.3037807481771031, - 7.3128829552843557, - 7.3219280948873617, - 7.3309168781146177, - 7.3398500028846243, - 7.3487281542310781, - 7.3575520046180847, - 7.3663222142458151, - 7.3750394313469254, - 7.3837042924740528, - 7.3923174227787607, - 7.4008794362821844, - 7.4093909361377026, - 7.4178525148858991, - 7.4262647547020979, - 7.4346282276367255, - 7.4429434958487288, - 7.4512111118323299, - 7.4594316186372973, - 7.4676055500829976, - 7.4757334309663976, - 7.4838157772642564, - 7.4918530963296748, - 7.4998458870832057, - 7.5077946401986964, - 7.5156998382840436, - 7.5235619560570131, - 7.5313814605163119, - 7.5391588111080319, - 7.5468944598876373, - 7.5545888516776376, - 7.5622424242210728, - 7.5698556083309478, - 7.5774288280357487, - 7.5849625007211561, - 7.5924570372680806, - 7.5999128421871278, - 7.6073303137496113, - 7.6147098441152075, - 7.6220518194563764, - 7.6293566200796095, - 7.6366246205436488, - 7.6438561897747244, - 7.6510516911789290, - 7.6582114827517955, - 7.6653359171851765, - 7.6724253419714952, - 7.6794800995054464, - 7.6865005271832185, - 7.6934869574993252, - 7.7004397181410926, - 7.7073591320808825, - 7.7142455176661224, - 7.7210991887071856, - 7.7279204545631996, - 7.7347096202258392, - 7.7414669864011465, - 7.7481928495894596, - 7.7548875021634691, - 7.7615512324444795, - 7.7681843247769260, - 7.7747870596011737, - 7.7813597135246608, - 7.7879025593914317, - 7.7944158663501062, - 7.8008998999203047, - 7.8073549220576037, - 7.8137811912170374, - 7.8201789624151887, - 7.8265484872909159, - 7.8328900141647422, - 7.8392037880969445, - 7.8454900509443757, - 7.8517490414160571, - 7.8579809951275719, - 7.8641861446542798, - 7.8703647195834048, - 7.8765169465650002, - 7.8826430493618425, - 7.8887432488982601, - 7.8948177633079446, - 7.9008668079807496, - 7.9068905956085187, - 7.9128893362299619, - 7.9188632372745955, - 7.9248125036057813, - 7.9307373375628867, - 7.9366379390025719, - 7.9425145053392399, - 7.9483672315846778, - 7.9541963103868758, - 7.9600019320680806, - 7.9657842846620870, - 7.9715435539507720, - 7.9772799234999168, - 7.9829935746943104, - 7.9886846867721664, - 7.9943534368588578, -} - -/* Faster logarithm for small integers, with the property of log2(0) == 0. */ -func fastLog2(v uint) float64 { - if v < uint(len(kLog2Table)) { - return float64(kLog2Table[v]) - } - - return math.Log2(float64(v)) -} diff --git a/vendor/github.com/andybalholm/brotli/find_match_length.go b/vendor/github.com/andybalholm/brotli/find_match_length.go deleted file mode 100644 index 09d2ae6726..0000000000 --- a/vendor/github.com/andybalholm/brotli/find_match_length.go +++ /dev/null @@ -1,45 +0,0 @@ -package brotli - -import ( - "encoding/binary" - "math/bits" - "runtime" -) - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Function to find maximal matching prefixes of strings. */ -func findMatchLengthWithLimit(s1 []byte, s2 []byte, limit uint) uint { - var matched uint = 0 - _, _ = s1[limit-1], s2[limit-1] // bounds check - switch runtime.GOARCH { - case "amd64": - // Compare 8 bytes at at time. - for matched+8 <= limit { - w1 := binary.LittleEndian.Uint64(s1[matched:]) - w2 := binary.LittleEndian.Uint64(s2[matched:]) - if w1 != w2 { - return matched + uint(bits.TrailingZeros64(w1^w2)>>3) - } - matched += 8 - } - case "386": - // Compare 4 bytes at at time. - for matched+4 <= limit { - w1 := binary.LittleEndian.Uint32(s1[matched:]) - w2 := binary.LittleEndian.Uint32(s2[matched:]) - if w1 != w2 { - return matched + uint(bits.TrailingZeros32(w1^w2)>>3) - } - matched += 4 - } - } - for matched < limit && s1[matched] == s2[matched] { - matched++ - } - return matched -} diff --git a/vendor/github.com/andybalholm/brotli/h10.go b/vendor/github.com/andybalholm/brotli/h10.go deleted file mode 100644 index 5662fbbbb5..0000000000 --- a/vendor/github.com/andybalholm/brotli/h10.go +++ /dev/null @@ -1,287 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func (*h10) HashTypeLength() uint { - return 4 -} - -func (*h10) StoreLookahead() uint { - return 128 -} - -func hashBytesH10(data []byte) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return h >> (32 - 17) -} - -/* A (forgetful) hash table where each hash bucket contains a binary tree of - sequences whose first 4 bytes share the same hash code. - Each sequence is 128 long and is identified by its starting - position in the input data. The binary tree is sorted by the lexicographic - order of the sequences, and it is also a max-heap with respect to the - starting positions. */ -type h10 struct { - hasherCommon - window_mask_ uint - buckets_ [1 << 17]uint32 - invalid_pos_ uint32 - forest []uint32 -} - -func (h *h10) Initialize(params *encoderParams) { - h.window_mask_ = (1 << params.lgwin) - 1 - h.invalid_pos_ = uint32(0 - h.window_mask_) - var num_nodes uint = uint(1) << params.lgwin - h.forest = make([]uint32, 2*num_nodes) -} - -func (h *h10) Prepare(one_shot bool, input_size uint, data []byte) { - var invalid_pos uint32 = h.invalid_pos_ - var i uint32 - for i = 0; i < 1<<17; i++ { - h.buckets_[i] = invalid_pos - } -} - -func leftChildIndexH10(self *h10, pos uint) uint { - return 2 * (pos & self.window_mask_) -} - -func rightChildIndexH10(self *h10, pos uint) uint { - return 2*(pos&self.window_mask_) + 1 -} - -/* Stores the hash of the next 4 bytes and in a single tree-traversal, the - hash bucket's binary tree is searched for matches and is re-rooted at the - current position. - - If less than 128 data is available, the hash bucket of the - current position is searched for matches, but the state of the hash table - is not changed, since we can not know the final sorting order of the - current (incomplete) sequence. - - This function must be called with increasing cur_ix positions. */ -func storeAndFindMatchesH10(self *h10, data []byte, cur_ix uint, ring_buffer_mask uint, max_length uint, max_backward uint, best_len *uint, matches []backwardMatch) []backwardMatch { - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var max_comp_len uint = brotli_min_size_t(max_length, 128) - var should_reroot_tree bool = (max_length >= 128) - var key uint32 = hashBytesH10(data[cur_ix_masked:]) - var forest []uint32 = self.forest - var prev_ix uint = uint(self.buckets_[key]) - var node_left uint = leftChildIndexH10(self, cur_ix) - var node_right uint = rightChildIndexH10(self, cur_ix) - var best_len_left uint = 0 - var best_len_right uint = 0 - var depth_remaining uint - /* The forest index of the rightmost node of the left subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - - /* The forest index of the leftmost node of the right subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - - /* The match length of the rightmost node of the left subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - - /* The match length of the leftmost node of the right subtree of the new - root, updated as we traverse and re-root the tree of the hash bucket. */ - if should_reroot_tree { - self.buckets_[key] = uint32(cur_ix) - } - - for depth_remaining = 64; ; depth_remaining-- { - var backward uint = cur_ix - prev_ix - var prev_ix_masked uint = prev_ix & ring_buffer_mask - if backward == 0 || backward > max_backward || depth_remaining == 0 { - if should_reroot_tree { - forest[node_left] = self.invalid_pos_ - forest[node_right] = self.invalid_pos_ - } - - break - } - { - var cur_len uint = brotli_min_size_t(best_len_left, best_len_right) - var len uint - assert(cur_len <= 128) - len = cur_len + findMatchLengthWithLimit(data[cur_ix_masked+cur_len:], data[prev_ix_masked+cur_len:], max_length-cur_len) - if matches != nil && len > *best_len { - *best_len = uint(len) - initBackwardMatch(&matches[0], backward, uint(len)) - matches = matches[1:] - } - - if len >= max_comp_len { - if should_reroot_tree { - forest[node_left] = forest[leftChildIndexH10(self, prev_ix)] - forest[node_right] = forest[rightChildIndexH10(self, prev_ix)] - } - - break - } - - if data[cur_ix_masked+len] > data[prev_ix_masked+len] { - best_len_left = uint(len) - if should_reroot_tree { - forest[node_left] = uint32(prev_ix) - } - - node_left = rightChildIndexH10(self, prev_ix) - prev_ix = uint(forest[node_left]) - } else { - best_len_right = uint(len) - if should_reroot_tree { - forest[node_right] = uint32(prev_ix) - } - - node_right = leftChildIndexH10(self, prev_ix) - prev_ix = uint(forest[node_right]) - } - } - } - - return matches -} - -/* Finds all backward matches of &data[cur_ix & ring_buffer_mask] up to the - length of max_length and stores the position cur_ix in the hash table. - - Sets *num_matches to the number of matches found, and stores the found - matches in matches[0] to matches[*num_matches - 1]. The matches will be - sorted by strictly increasing length and (non-strictly) increasing - distance. */ -func findAllMatchesH10(handle *h10, dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, cur_ix uint, max_length uint, max_backward uint, gap uint, params *encoderParams, matches []backwardMatch) uint { - var orig_matches []backwardMatch = matches - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var best_len uint = 1 - var short_match_max_backward uint - if params.quality != hqZopflificationQuality { - short_match_max_backward = 16 - } else { - short_match_max_backward = 64 - } - var stop uint = cur_ix - short_match_max_backward - var dict_matches [maxStaticDictionaryMatchLen + 1]uint32 - var i uint - if cur_ix < short_match_max_backward { - stop = 0 - } - for i = cur_ix - 1; i > stop && best_len <= 2; i-- { - var prev_ix uint = i - var backward uint = cur_ix - prev_ix - if backward > max_backward { - break - } - - prev_ix &= ring_buffer_mask - if data[cur_ix_masked] != data[prev_ix] || data[cur_ix_masked+1] != data[prev_ix+1] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len > best_len { - best_len = uint(len) - initBackwardMatch(&matches[0], backward, uint(len)) - matches = matches[1:] - } - } - } - - if best_len < max_length { - matches = storeAndFindMatchesH10(handle, data, cur_ix, ring_buffer_mask, max_length, max_backward, &best_len, matches) - } - - for i = 0; i <= maxStaticDictionaryMatchLen; i++ { - dict_matches[i] = kInvalidMatch - } - { - var minlen uint = brotli_max_size_t(4, best_len+1) - if findAllStaticDictionaryMatches(dictionary, data[cur_ix_masked:], minlen, max_length, dict_matches[0:]) { - var maxlen uint = brotli_min_size_t(maxStaticDictionaryMatchLen, max_length) - var l uint - for l = minlen; l <= maxlen; l++ { - var dict_id uint32 = dict_matches[l] - if dict_id < kInvalidMatch { - var distance uint = max_backward + gap + uint(dict_id>>5) + 1 - if distance <= params.dist.max_distance { - initDictionaryBackwardMatch(&matches[0], distance, l, uint(dict_id&31)) - matches = matches[1:] - } - } - } - } - } - - return uint(-cap(matches) + cap(orig_matches)) -} - -/* Stores the hash of the next 4 bytes and re-roots the binary tree at the - current sequence, without returning any matches. - REQUIRES: ix + 128 <= end-of-current-block */ -func (h *h10) Store(data []byte, mask uint, ix uint) { - var max_backward uint = h.window_mask_ - windowGap + 1 - /* Maximum distance is window size - 16, see section 9.1. of the spec. */ - storeAndFindMatchesH10(h, data, ix, mask, 128, max_backward, nil, nil) -} - -func (h *h10) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - var i uint = ix_start - var j uint = ix_start - if ix_start+63 <= ix_end { - i = ix_end - 63 - } - - if ix_start+512 <= i { - for ; j < i; j += 8 { - h.Store(data, mask, j) - } - } - - for ; i < ix_end; i++ { - h.Store(data, mask, i) - } -} - -func (h *h10) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { - if num_bytes >= h.HashTypeLength()-1 && position >= 128 { - var i_start uint = position - 128 + 1 - var i_end uint = brotli_min_size_t(position, i_start+num_bytes) - /* Store the last `128 - 1` positions in the hasher. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - - var i uint - for i = i_start; i < i_end; i++ { - /* Maximum distance is window size - 16, see section 9.1. of the spec. - Furthermore, we have to make sure that we don't look further back - from the start of the next block than the window size, otherwise we - could access already overwritten areas of the ring-buffer. */ - var max_backward uint = h.window_mask_ - brotli_max_size_t(windowGap-1, position-i) - - /* We know that i + 128 <= position + num_bytes, i.e. the - end of the current block and that we have at least - 128 tail in the ring-buffer. */ - storeAndFindMatchesH10(h, ringbuffer, i, ringbuffer_mask, 128, max_backward, nil, nil) - } - } -} - -/* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */ -const maxNumMatchesH10 = 128 - -func (*h10) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - panic("unimplemented") -} - -func (*h10) PrepareDistanceCache(distance_cache []int) { - panic("unimplemented") -} diff --git a/vendor/github.com/andybalholm/brotli/h5.go b/vendor/github.com/andybalholm/brotli/h5.go deleted file mode 100644 index f391b73fdd..0000000000 --- a/vendor/github.com/andybalholm/brotli/h5.go +++ /dev/null @@ -1,214 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - This is a hash map of fixed size (bucket_size_) to a ring buffer of - fixed size (block_size_). The ring buffer contains the last block_size_ - index positions of the given hash key in the compressed data. */ -func (*h5) HashTypeLength() uint { - return 4 -} - -func (*h5) StoreLookahead() uint { - return 4 -} - -/* HashBytes is the function that chooses the bucket to place the address in. */ -func hashBytesH5(data []byte, shift int) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint32(h >> uint(shift)) -} - -type h5 struct { - hasherCommon - bucket_size_ uint - block_size_ uint - hash_shift_ int - block_mask_ uint32 - num []uint16 - buckets []uint32 -} - -func (h *h5) Initialize(params *encoderParams) { - h.hash_shift_ = 32 - h.params.bucket_bits - h.bucket_size_ = uint(1) << uint(h.params.bucket_bits) - h.block_size_ = uint(1) << uint(h.params.block_bits) - h.block_mask_ = uint32(h.block_size_ - 1) - h.num = make([]uint16, h.bucket_size_) - h.buckets = make([]uint32, h.block_size_*h.bucket_size_) -} - -func (h *h5) Prepare(one_shot bool, input_size uint, data []byte) { - var num []uint16 = h.num - var partial_prepare_threshold uint = h.bucket_size_ >> 6 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var key uint32 = hashBytesH5(data[i:], h.hash_shift_) - num[key] = 0 - } - } else { - for i := 0; i < int(h.bucket_size_); i++ { - num[i] = 0 - } - } -} - -/* Look at 4 bytes at &data[ix & mask]. - Compute a hash from these, and store the value of ix at that position. */ -func (h *h5) Store(data []byte, mask uint, ix uint) { - var num []uint16 = h.num - var key uint32 = hashBytesH5(data[ix&mask:], h.hash_shift_) - var minor_ix uint = uint(num[key]) & uint(h.block_mask_) - var offset uint = minor_ix + uint(key<= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ringbuffer_mask, position-3) - h.Store(ringbuffer, ringbuffer_mask, position-2) - h.Store(ringbuffer, ringbuffer_mask, position-1) - } -} - -func (h *h5) PrepareDistanceCache(distance_cache []int) { - prepareDistanceCache(distance_cache, h.params.num_last_distances_to_check) -} - -/* Find a longest backward match of &data[cur_ix] up to the length of - max_length and stores the position cur_ix in the hash table. - - REQUIRES: PrepareDistanceCacheH5 must be invoked for current distance cache - values; if this method is invoked repeatedly with the same distance - cache values, it is enough to invoke PrepareDistanceCacheH5 once. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *h5) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var num []uint16 = h.num - var buckets []uint32 = h.buckets - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = out.len - var i uint - var bucket []uint32 - /* Don't accept a short copy from far away. */ - out.len = 0 - - out.len_code_delta = 0 - - /* Try last distance first. */ - for i = 0; i < uint(h.params.num_last_distances_to_check); i++ { - var backward uint = uint(distance_cache[i]) - var prev_ix uint = uint(cur_ix - backward) - if prev_ix >= cur_ix { - continue - } - - if backward > max_backward { - continue - } - - prev_ix &= ring_buffer_mask - - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 3 || (len == 2 && i < 2) { - /* Comparing for >= 2 does not change the semantics, but just saves for - a few unnecessary binary logarithms in backward reference score, - since we are not interested in such short matches. */ - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - if i != 0 { - score -= backwardReferencePenaltyUsingLastDistance(i) - } - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - } - { - var key uint32 = hashBytesH5(data[cur_ix_masked:], h.hash_shift_) - bucket = buckets[key< h.block_size_ { - down = uint(num[key]) - h.block_size_ - } else { - down = 0 - } - for i = uint(num[key]); i > down; { - var prev_ix uint - i-- - prev_ix = uint(bucket[uint32(i)&h.block_mask_]) - var backward uint = cur_ix - prev_ix - if backward > max_backward { - break - } - - prev_ix &= ring_buffer_mask - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - /* Comparing for >= 3 does not change the semantics, but just saves - for a few unnecessary binary logarithms in backward reference - score, since we are not interested in such short matches. */ - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - - bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix) - num[key]++ - } - - if min_score == out.score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) - } -} diff --git a/vendor/github.com/andybalholm/brotli/h6.go b/vendor/github.com/andybalholm/brotli/h6.go deleted file mode 100644 index 80bb224aa8..0000000000 --- a/vendor/github.com/andybalholm/brotli/h6.go +++ /dev/null @@ -1,216 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - This is a hash map of fixed size (bucket_size_) to a ring buffer of - fixed size (block_size_). The ring buffer contains the last block_size_ - index positions of the given hash key in the compressed data. */ -func (*h6) HashTypeLength() uint { - return 8 -} - -func (*h6) StoreLookahead() uint { - return 8 -} - -/* HashBytes is the function that chooses the bucket to place the address in. */ -func hashBytesH6(data []byte, mask uint64, shift int) uint32 { - var h uint64 = (binary.LittleEndian.Uint64(data) & mask) * kHashMul64Long - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint32(h >> uint(shift)) -} - -type h6 struct { - hasherCommon - bucket_size_ uint - block_size_ uint - hash_shift_ int - hash_mask_ uint64 - block_mask_ uint32 - num []uint16 - buckets []uint32 -} - -func (h *h6) Initialize(params *encoderParams) { - h.hash_shift_ = 64 - h.params.bucket_bits - h.hash_mask_ = (^(uint64(0))) >> uint(64-8*h.params.hash_len) - h.bucket_size_ = uint(1) << uint(h.params.bucket_bits) - h.block_size_ = uint(1) << uint(h.params.block_bits) - h.block_mask_ = uint32(h.block_size_ - 1) - h.num = make([]uint16, h.bucket_size_) - h.buckets = make([]uint32, h.block_size_*h.bucket_size_) -} - -func (h *h6) Prepare(one_shot bool, input_size uint, data []byte) { - var num []uint16 = h.num - var partial_prepare_threshold uint = h.bucket_size_ >> 6 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var key uint32 = hashBytesH6(data[i:], h.hash_mask_, h.hash_shift_) - num[key] = 0 - } - } else { - for i := 0; i < int(h.bucket_size_); i++ { - num[i] = 0 - } - } -} - -/* Look at 4 bytes at &data[ix & mask]. - Compute a hash from these, and store the value of ix at that position. */ -func (h *h6) Store(data []byte, mask uint, ix uint) { - var num []uint16 = h.num - var key uint32 = hashBytesH6(data[ix&mask:], h.hash_mask_, h.hash_shift_) - var minor_ix uint = uint(num[key]) & uint(h.block_mask_) - var offset uint = minor_ix + uint(key<= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ringbuffer_mask, position-3) - h.Store(ringbuffer, ringbuffer_mask, position-2) - h.Store(ringbuffer, ringbuffer_mask, position-1) - } -} - -func (h *h6) PrepareDistanceCache(distance_cache []int) { - prepareDistanceCache(distance_cache, h.params.num_last_distances_to_check) -} - -/* Find a longest backward match of &data[cur_ix] up to the length of - max_length and stores the position cur_ix in the hash table. - - REQUIRES: PrepareDistanceCacheH6 must be invoked for current distance cache - values; if this method is invoked repeatedly with the same distance - cache values, it is enough to invoke PrepareDistanceCacheH6 once. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *h6) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var num []uint16 = h.num - var buckets []uint32 = h.buckets - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = out.len - var i uint - var bucket []uint32 - /* Don't accept a short copy from far away. */ - out.len = 0 - - out.len_code_delta = 0 - - /* Try last distance first. */ - for i = 0; i < uint(h.params.num_last_distances_to_check); i++ { - var backward uint = uint(distance_cache[i]) - var prev_ix uint = uint(cur_ix - backward) - if prev_ix >= cur_ix { - continue - } - - if backward > max_backward { - continue - } - - prev_ix &= ring_buffer_mask - - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 3 || (len == 2 && i < 2) { - /* Comparing for >= 2 does not change the semantics, but just saves for - a few unnecessary binary logarithms in backward reference score, - since we are not interested in such short matches. */ - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - if i != 0 { - score -= backwardReferencePenaltyUsingLastDistance(i) - } - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - } - { - var key uint32 = hashBytesH6(data[cur_ix_masked:], h.hash_mask_, h.hash_shift_) - bucket = buckets[key< h.block_size_ { - down = uint(num[key]) - h.block_size_ - } else { - down = 0 - } - for i = uint(num[key]); i > down; { - var prev_ix uint - i-- - prev_ix = uint(bucket[uint32(i)&h.block_mask_]) - var backward uint = cur_ix - prev_ix - if backward > max_backward { - break - } - - prev_ix &= ring_buffer_mask - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - /* Comparing for >= 3 does not change the semantics, but just saves - for a few unnecessary binary logarithms in backward reference - score, since we are not interested in such short matches. */ - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - - bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix) - num[key]++ - } - - if min_score == out.score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) - } -} diff --git a/vendor/github.com/andybalholm/brotli/hash.go b/vendor/github.com/andybalholm/brotli/hash.go deleted file mode 100644 index 003b433ea6..0000000000 --- a/vendor/github.com/andybalholm/brotli/hash.go +++ /dev/null @@ -1,344 +0,0 @@ -package brotli - -import ( - "encoding/binary" - "fmt" -) - -type hasherCommon struct { - params hasherParams - is_prepared_ bool - dict_num_lookups uint - dict_num_matches uint -} - -func (h *hasherCommon) Common() *hasherCommon { - return h -} - -type hasherHandle interface { - Common() *hasherCommon - Initialize(params *encoderParams) - Prepare(one_shot bool, input_size uint, data []byte) - StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) - HashTypeLength() uint - StoreLookahead() uint - PrepareDistanceCache(distance_cache []int) - FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) - StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) - Store(data []byte, mask uint, ix uint) -} - -type score_t uint - -const kCutoffTransformsCount uint32 = 10 - -/* 0, 12, 27, 23, 42, 63, 56, 48, 59, 64 */ -/* 0+0, 4+8, 8+19, 12+11, 16+26, 20+43, 24+32, 28+20, 32+27, 36+28 */ -const kCutoffTransforms uint64 = 0x071B520ADA2D3200 - -type hasherSearchResult struct { - len uint - distance uint - score uint - len_code_delta int -} - -/* kHashMul32 multiplier has these properties: - * The multiplier must be odd. Otherwise we may lose the highest bit. - * No long streaks of ones or zeros. - * There is no effort to ensure that it is a prime, the oddity is enough - for this use. - * The number has been tuned heuristically against compression benchmarks. */ -const kHashMul32 uint32 = 0x1E35A7BD - -const kHashMul64 uint64 = 0x1E35A7BD1E35A7BD - -const kHashMul64Long uint64 = 0x1FE35A7BD3579BD3 - -func hash14(data []byte) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return h >> (32 - 14) -} - -func prepareDistanceCache(distance_cache []int, num_distances int) { - if num_distances > 4 { - var last_distance int = distance_cache[0] - distance_cache[4] = last_distance - 1 - distance_cache[5] = last_distance + 1 - distance_cache[6] = last_distance - 2 - distance_cache[7] = last_distance + 2 - distance_cache[8] = last_distance - 3 - distance_cache[9] = last_distance + 3 - if num_distances > 10 { - var next_last_distance int = distance_cache[1] - distance_cache[10] = next_last_distance - 1 - distance_cache[11] = next_last_distance + 1 - distance_cache[12] = next_last_distance - 2 - distance_cache[13] = next_last_distance + 2 - distance_cache[14] = next_last_distance - 3 - distance_cache[15] = next_last_distance + 3 - } - } -} - -const literalByteScore = 135 - -const distanceBitPenalty = 30 - -/* Score must be positive after applying maximal penalty. */ -const scoreBase = (distanceBitPenalty * 8 * 8) - -/* Usually, we always choose the longest backward reference. This function - allows for the exception of that rule. - - If we choose a backward reference that is further away, it will - usually be coded with more bits. We approximate this by assuming - log2(distance). If the distance can be expressed in terms of the - last four distances, we use some heuristic constants to estimate - the bits cost. For the first up to four literals we use the bit - cost of the literals from the literal cost model, after that we - use the average bit cost of the cost model. - - This function is used to sometimes discard a longer backward reference - when it is not much longer and the bit cost for encoding it is more - than the saved literals. - - backward_reference_offset MUST be positive. */ -func backwardReferenceScore(copy_length uint, backward_reference_offset uint) uint { - return scoreBase + literalByteScore*uint(copy_length) - distanceBitPenalty*uint(log2FloorNonZero(backward_reference_offset)) -} - -func backwardReferenceScoreUsingLastDistance(copy_length uint) uint { - return literalByteScore*uint(copy_length) + scoreBase + 15 -} - -func backwardReferencePenaltyUsingLastDistance(distance_short_code uint) uint { - return uint(39) + ((0x1CA10 >> (distance_short_code & 0xE)) & 0xE) -} - -func testStaticDictionaryItem(dictionary *encoderDictionary, item uint, data []byte, max_length uint, max_backward uint, max_distance uint, out *hasherSearchResult) bool { - var len uint - var word_idx uint - var offset uint - var matchlen uint - var backward uint - var score uint - len = item & 0x1F - word_idx = item >> 5 - offset = uint(dictionary.words.offsets_by_length[len]) + len*word_idx - if len > max_length { - return false - } - - matchlen = findMatchLengthWithLimit(data, dictionary.words.data[offset:], uint(len)) - if matchlen+uint(dictionary.cutoffTransformsCount) <= len || matchlen == 0 { - return false - } - { - var cut uint = len - matchlen - var transform_id uint = (cut << 2) + uint((dictionary.cutoffTransforms>>(cut*6))&0x3F) - backward = max_backward + 1 + word_idx + (transform_id << dictionary.words.size_bits_by_length[len]) - } - - if backward > max_distance { - return false - } - - score = backwardReferenceScore(matchlen, backward) - if score < out.score { - return false - } - - out.len = matchlen - out.len_code_delta = int(len) - int(matchlen) - out.distance = backward - out.score = score - return true -} - -func searchInStaticDictionary(dictionary *encoderDictionary, handle hasherHandle, data []byte, max_length uint, max_backward uint, max_distance uint, out *hasherSearchResult, shallow bool) { - var key uint - var i uint - var self *hasherCommon = handle.Common() - if self.dict_num_matches < self.dict_num_lookups>>7 { - return - } - - key = uint(hash14(data) << 1) - for i = 0; ; (func() { i++; key++ })() { - var tmp uint - if shallow { - tmp = 1 - } else { - tmp = 2 - } - if i >= tmp { - break - } - var item uint = uint(dictionary.hash_table[key]) - self.dict_num_lookups++ - if item != 0 { - var item_matches bool = testStaticDictionaryItem(dictionary, item, data, max_length, max_backward, max_distance, out) - if item_matches { - self.dict_num_matches++ - } - } - } -} - -type backwardMatch struct { - distance uint32 - length_and_code uint32 -} - -func initBackwardMatch(self *backwardMatch, dist uint, len uint) { - self.distance = uint32(dist) - self.length_and_code = uint32(len << 5) -} - -func initDictionaryBackwardMatch(self *backwardMatch, dist uint, len uint, len_code uint) { - self.distance = uint32(dist) - var tmp uint - if len == len_code { - tmp = 0 - } else { - tmp = len_code - } - self.length_and_code = uint32(len<<5 | tmp) -} - -func backwardMatchLength(self *backwardMatch) uint { - return uint(self.length_and_code >> 5) -} - -func backwardMatchLengthCode(self *backwardMatch) uint { - var code uint = uint(self.length_and_code) & 31 - if code != 0 { - return code - } else { - return backwardMatchLength(self) - } -} - -func hasherReset(handle hasherHandle) { - if handle == nil { - return - } - handle.Common().is_prepared_ = false -} - -func newHasher(typ int) hasherHandle { - switch typ { - case 2: - return &hashLongestMatchQuickly{ - bucketBits: 16, - bucketSweep: 1, - hashLen: 5, - useDictionary: true, - } - case 3: - return &hashLongestMatchQuickly{ - bucketBits: 16, - bucketSweep: 2, - hashLen: 5, - useDictionary: false, - } - case 4: - return &hashLongestMatchQuickly{ - bucketBits: 17, - bucketSweep: 4, - hashLen: 5, - useDictionary: true, - } - case 5: - return new(h5) - case 6: - return new(h6) - case 10: - return new(h10) - case 35: - return &hashComposite{ - ha: newHasher(3), - hb: &hashRolling{jump: 4}, - } - case 40: - return &hashForgetfulChain{ - bucketBits: 15, - numBanks: 1, - bankBits: 16, - numLastDistancesToCheck: 4, - } - case 41: - return &hashForgetfulChain{ - bucketBits: 15, - numBanks: 1, - bankBits: 16, - numLastDistancesToCheck: 10, - } - case 42: - return &hashForgetfulChain{ - bucketBits: 15, - numBanks: 512, - bankBits: 9, - numLastDistancesToCheck: 16, - } - case 54: - return &hashLongestMatchQuickly{ - bucketBits: 20, - bucketSweep: 4, - hashLen: 7, - useDictionary: false, - } - case 55: - return &hashComposite{ - ha: newHasher(54), - hb: &hashRolling{jump: 4}, - } - case 65: - return &hashComposite{ - ha: newHasher(6), - hb: &hashRolling{jump: 1}, - } - } - - panic(fmt.Sprintf("unknown hasher type: %d", typ)) -} - -func hasherSetup(handle *hasherHandle, params *encoderParams, data []byte, position uint, input_size uint, is_last bool) { - var self hasherHandle = nil - var common *hasherCommon = nil - var one_shot bool = (position == 0 && is_last) - if *handle == nil { - chooseHasher(params, ¶ms.hasher) - self = newHasher(params.hasher.type_) - - *handle = self - common = self.Common() - common.params = params.hasher - self.Initialize(params) - } - - self = *handle - common = self.Common() - if !common.is_prepared_ { - self.Prepare(one_shot, input_size, data) - - if position == 0 { - common.dict_num_lookups = 0 - common.dict_num_matches = 0 - } - - common.is_prepared_ = true - } -} - -func initOrStitchToPreviousBlock(handle *hasherHandle, data []byte, mask uint, params *encoderParams, position uint, input_size uint, is_last bool) { - var self hasherHandle - hasherSetup(handle, params, data, position, input_size, is_last) - self = *handle - self.StitchToPreviousBlock(input_size, position, data, mask) -} diff --git a/vendor/github.com/andybalholm/brotli/hash_composite.go b/vendor/github.com/andybalholm/brotli/hash_composite.go deleted file mode 100644 index a65fe2e6a9..0000000000 --- a/vendor/github.com/andybalholm/brotli/hash_composite.go +++ /dev/null @@ -1,93 +0,0 @@ -package brotli - -/* Copyright 2018 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func (h *hashComposite) HashTypeLength() uint { - var a uint = h.ha.HashTypeLength() - var b uint = h.hb.HashTypeLength() - if a > b { - return a - } else { - return b - } -} - -func (h *hashComposite) StoreLookahead() uint { - var a uint = h.ha.StoreLookahead() - var b uint = h.hb.StoreLookahead() - if a > b { - return a - } else { - return b - } -} - -/* Composite hasher: This hasher allows to combine two other hashers, HASHER_A - and HASHER_B. */ -type hashComposite struct { - hasherCommon - ha hasherHandle - hb hasherHandle - params *encoderParams -} - -func (h *hashComposite) Initialize(params *encoderParams) { - h.params = params -} - -/* TODO: Initialize of the hashers is defered to Prepare (and params - remembered here) because we don't get the one_shot and input_size params - here that are needed to know the memory size of them. Instead provide - those params to all hashers InitializehashComposite */ -func (h *hashComposite) Prepare(one_shot bool, input_size uint, data []byte) { - if h.ha == nil { - var common_a *hasherCommon - var common_b *hasherCommon - - common_a = h.ha.Common() - common_a.params = h.params.hasher - common_a.is_prepared_ = false - common_a.dict_num_lookups = 0 - common_a.dict_num_matches = 0 - h.ha.Initialize(h.params) - - common_b = h.hb.Common() - common_b.params = h.params.hasher - common_b.is_prepared_ = false - common_b.dict_num_lookups = 0 - common_b.dict_num_matches = 0 - h.hb.Initialize(h.params) - } - - h.ha.Prepare(one_shot, input_size, data) - h.hb.Prepare(one_shot, input_size, data) -} - -func (h *hashComposite) Store(data []byte, mask uint, ix uint) { - h.ha.Store(data, mask, ix) - h.hb.Store(data, mask, ix) -} - -func (h *hashComposite) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - h.ha.StoreRange(data, mask, ix_start, ix_end) - h.hb.StoreRange(data, mask, ix_start, ix_end) -} - -func (h *hashComposite) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { - h.ha.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) - h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) -} - -func (h *hashComposite) PrepareDistanceCache(distance_cache []int) { - h.ha.PrepareDistanceCache(distance_cache) - h.hb.PrepareDistanceCache(distance_cache) -} - -func (h *hashComposite) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - h.ha.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) - h.hb.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) -} diff --git a/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go b/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go deleted file mode 100644 index 3364c44bd5..0000000000 --- a/vendor/github.com/andybalholm/brotli/hash_forgetful_chain.go +++ /dev/null @@ -1,253 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func (*hashForgetfulChain) HashTypeLength() uint { - return 4 -} - -func (*hashForgetfulChain) StoreLookahead() uint { - return 4 -} - -/* HashBytes is the function that chooses the bucket to place the address in.*/ -func (h *hashForgetfulChain) HashBytes(data []byte) uint { - var hash uint32 = binary.LittleEndian.Uint32(data) * kHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint(hash >> (32 - h.bucketBits)) -} - -type slot struct { - delta uint16 - next uint16 -} - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - Hashes are stored in chains which are bucketed to groups. Group of chains - share a storage "bank". When more than "bank size" chain nodes are added, - oldest nodes are replaced; this way several chains may share a tail. */ -type hashForgetfulChain struct { - hasherCommon - - bucketBits uint - numBanks uint - bankBits uint - numLastDistancesToCheck int - - addr []uint32 - head []uint16 - tiny_hash [65536]byte - banks [][]slot - free_slot_idx []uint16 - max_hops uint -} - -func (h *hashForgetfulChain) Initialize(params *encoderParams) { - var q uint - if params.quality > 6 { - q = 7 - } else { - q = 8 - } - h.max_hops = q << uint(params.quality-4) - - bankSize := 1 << h.bankBits - bucketSize := 1 << h.bucketBits - - h.addr = make([]uint32, bucketSize) - h.head = make([]uint16, bucketSize) - h.banks = make([][]slot, h.numBanks) - for i := range h.banks { - h.banks[i] = make([]slot, bankSize) - } - h.free_slot_idx = make([]uint16, h.numBanks) -} - -func (h *hashForgetfulChain) Prepare(one_shot bool, input_size uint, data []byte) { - var partial_prepare_threshold uint = (1 << h.bucketBits) >> 6 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var bucket uint = h.HashBytes(data[i:]) - - /* See InitEmpty comment. */ - h.addr[bucket] = 0xCCCCCCCC - - h.head[bucket] = 0xCCCC - } - } else { - /* Fill |addr| array with 0xCCCCCCCC value. Because of wrapping, position - processed by hasher never reaches 3GB + 64M; this makes all new chains - to be terminated after the first node. */ - for i := range h.addr { - h.addr[i] = 0xCCCCCCCC - } - - for i := range h.head { - h.head[i] = 0 - } - } - - h.tiny_hash = [65536]byte{} - for i := range h.free_slot_idx { - h.free_slot_idx[i] = 0 - } -} - -/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend - node to corresponding chain; also update tiny_hash for current position. */ -func (h *hashForgetfulChain) Store(data []byte, mask uint, ix uint) { - var key uint = h.HashBytes(data[ix&mask:]) - var bank uint = key & (h.numBanks - 1) - var idx uint - idx = uint(h.free_slot_idx[bank]) & ((1 << h.bankBits) - 1) - h.free_slot_idx[bank]++ - var delta uint = ix - uint(h.addr[key]) - h.tiny_hash[uint16(ix)] = byte(key) - if delta > 0xFFFF { - delta = 0xFFFF - } - h.banks[bank][idx].delta = uint16(delta) - h.banks[bank][idx].next = h.head[key] - h.addr[key] = uint32(ix) - h.head[key] = uint16(idx) -} - -func (h *hashForgetfulChain) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - var i uint - for i = ix_start; i < ix_end; i++ { - h.Store(data, mask, i) - } -} - -func (h *hashForgetfulChain) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { - if num_bytes >= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ring_buffer_mask, position-3) - h.Store(ringbuffer, ring_buffer_mask, position-2) - h.Store(ringbuffer, ring_buffer_mask, position-1) - } -} - -func (h *hashForgetfulChain) PrepareDistanceCache(distance_cache []int) { - prepareDistanceCache(distance_cache, h.numLastDistancesToCheck) -} - -/* Find a longest backward match of &data[cur_ix] up to the length of - max_length and stores the position cur_ix in the hash table. - - REQUIRES: PrepareDistanceCachehashForgetfulChain must be invoked for current distance cache - values; if this method is invoked repeatedly with the same distance - cache values, it is enough to invoke PrepareDistanceCachehashForgetfulChain once. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *hashForgetfulChain) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = out.len - var key uint = h.HashBytes(data[cur_ix_masked:]) - var tiny_hash byte = byte(key) - /* Don't accept a short copy from far away. */ - out.len = 0 - - out.len_code_delta = 0 - - /* Try last distance first. */ - for i := 0; i < h.numLastDistancesToCheck; i++ { - var backward uint = uint(distance_cache[i]) - var prev_ix uint = (cur_ix - backward) - - /* For distance code 0 we want to consider 2-byte matches. */ - if i > 0 && h.tiny_hash[uint16(prev_ix)] != tiny_hash { - continue - } - if prev_ix >= cur_ix || backward > max_backward { - continue - } - - prev_ix &= ring_buffer_mask - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 2 { - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - if i != 0 { - score -= backwardReferencePenaltyUsingLastDistance(uint(i)) - } - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - } - { - var bank uint = key & (h.numBanks - 1) - var backward uint = 0 - var hops uint = h.max_hops - var delta uint = cur_ix - uint(h.addr[key]) - var slot uint = uint(h.head[key]) - for { - tmp6 := hops - hops-- - if tmp6 == 0 { - break - } - var prev_ix uint - var last uint = slot - backward += delta - if backward > max_backward { - break - } - prev_ix = (cur_ix - backward) & ring_buffer_mask - slot = uint(h.banks[bank][last].next) - delta = uint(h.banks[bank][last].delta) - if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { - continue - } - { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - /* Comparing for >= 3 does not change the semantics, but just saves - for a few unnecessary binary logarithms in backward reference - score, since we are not interested in such short matches. */ - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = best_score - } - } - } - } - - h.Store(data, ring_buffer_mask, cur_ix) - } - - if out.score == min_score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) - } -} diff --git a/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go b/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go deleted file mode 100644 index 9375dc1553..0000000000 --- a/vendor/github.com/andybalholm/brotli/hash_longest_match_quickly.go +++ /dev/null @@ -1,214 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression - a little faster (0.5% - 1%) and it compresses 0.15% better on small text - and HTML inputs. */ - -func (*hashLongestMatchQuickly) HashTypeLength() uint { - return 8 -} - -func (*hashLongestMatchQuickly) StoreLookahead() uint { - return 8 -} - -/* HashBytes is the function that chooses the bucket to place - the address in. The HashLongestMatch and hashLongestMatchQuickly - classes have separate, different implementations of hashing. */ -func (h *hashLongestMatchQuickly) HashBytes(data []byte) uint32 { - var hash uint64 = ((binary.LittleEndian.Uint64(data) << (64 - 8*h.hashLen)) * kHashMul64) - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return uint32(hash >> (64 - h.bucketBits)) -} - -/* A (forgetful) hash table to the data seen by the compressor, to - help create backward references to previous data. - - This is a hash map of fixed size (1 << 16). Starting from the - given index, 1 buckets are used to store values of a key. */ -type hashLongestMatchQuickly struct { - hasherCommon - - bucketBits uint - bucketSweep int - hashLen uint - useDictionary bool - - buckets []uint32 -} - -func (h *hashLongestMatchQuickly) Initialize(params *encoderParams) { - h.buckets = make([]uint32, 1<> 7 - /* Partial preparation is 100 times slower (per socket). */ - if one_shot && input_size <= partial_prepare_threshold { - var i uint - for i = 0; i < input_size; i++ { - var key uint32 = h.HashBytes(data[i:]) - for j := 0; j < h.bucketSweep; j++ { - h.buckets[key+uint32(j)] = 0 - } - } - } else { - /* It is not strictly necessary to fill this buffer here, but - not filling will make the results of the compression stochastic - (but correct). This is because random data would cause the - system to find accidentally good backward references here and there. */ - for i := range h.buckets { - h.buckets[i] = 0 - } - } -} - -/* Look at 5 bytes at &data[ix & mask]. - Compute a hash from these, and store the value somewhere within - [ix .. ix+3]. */ -func (h *hashLongestMatchQuickly) Store(data []byte, mask uint, ix uint) { - var key uint32 = h.HashBytes(data[ix&mask:]) - var off uint32 = uint32(ix>>3) % uint32(h.bucketSweep) - /* Wiggle the value with the bucket sweep range. */ - h.buckets[key+off] = uint32(ix) -} - -func (h *hashLongestMatchQuickly) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { - var i uint - for i = ix_start; i < ix_end; i++ { - h.Store(data, mask, i) - } -} - -func (h *hashLongestMatchQuickly) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { - if num_bytes >= h.HashTypeLength()-1 && position >= 3 { - /* Prepare the hashes for three last bytes of the last write. - These could not be calculated before, since they require knowledge - of both the previous and the current block. */ - h.Store(ringbuffer, ringbuffer_mask, position-3) - h.Store(ringbuffer, ringbuffer_mask, position-2) - h.Store(ringbuffer, ringbuffer_mask, position-1) - } -} - -func (*hashLongestMatchQuickly) PrepareDistanceCache(distance_cache []int) { -} - -/* Find a longest backward match of &data[cur_ix & ring_buffer_mask] - up to the length of max_length and stores the position cur_ix in the - hash table. - - Does not look for matches longer than max_length. - Does not look for matches further away than max_backward. - Writes the best match into |out|. - |out|->score is updated only if a better match is found. */ -func (h *hashLongestMatchQuickly) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var best_len_in uint = out.len - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var key uint32 = h.HashBytes(data[cur_ix_masked:]) - var compare_char int = int(data[cur_ix_masked+best_len_in]) - var min_score uint = out.score - var best_score uint = out.score - var best_len uint = best_len_in - var cached_backward uint = uint(distance_cache[0]) - var prev_ix uint = cur_ix - cached_backward - var bucket []uint32 - out.len_code_delta = 0 - if prev_ix < cur_ix { - prev_ix &= uint(uint32(ring_buffer_mask)) - if compare_char == int(data[prev_ix+best_len]) { - var len uint = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - var score uint = backwardReferenceScoreUsingLastDistance(uint(len)) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = uint(len) - out.distance = cached_backward - out.score = best_score - compare_char = int(data[cur_ix_masked+best_len]) - if h.bucketSweep == 1 { - h.buckets[key] = uint32(cur_ix) - return - } - } - } - } - } - - if h.bucketSweep == 1 { - var backward uint - var len uint - - /* Only one to look for, don't bother to prepare for a loop. */ - prev_ix = uint(h.buckets[key]) - - h.buckets[key] = uint32(cur_ix) - backward = cur_ix - prev_ix - prev_ix &= uint(uint32(ring_buffer_mask)) - if compare_char != int(data[prev_ix+best_len_in]) { - return - } - - if backward == 0 || backward > max_backward { - return - } - - len = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - out.len = uint(len) - out.distance = backward - out.score = score - return - } - } - } else { - bucket = h.buckets[key:] - var i int - prev_ix = uint(bucket[0]) - bucket = bucket[1:] - for i = 0; i < h.bucketSweep; (func() { i++; tmp3 := bucket; bucket = bucket[1:]; prev_ix = uint(tmp3[0]) })() { - var backward uint = cur_ix - prev_ix - var len uint - prev_ix &= uint(uint32(ring_buffer_mask)) - if compare_char != int(data[prev_ix+best_len]) { - continue - } - - if backward == 0 || backward > max_backward { - continue - } - - len = findMatchLengthWithLimit(data[prev_ix:], data[cur_ix_masked:], max_length) - if len >= 4 { - var score uint = backwardReferenceScore(uint(len), backward) - if best_score < score { - best_score = score - best_len = uint(len) - out.len = best_len - out.distance = backward - out.score = score - compare_char = int(data[cur_ix_masked+best_len]) - } - } - } - } - - if h.useDictionary && min_score == out.score { - searchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true) - } - - h.buckets[key+uint32((cur_ix>>3)%uint(h.bucketSweep))] = uint32(cur_ix) -} diff --git a/vendor/github.com/andybalholm/brotli/hash_rolling.go b/vendor/github.com/andybalholm/brotli/hash_rolling.go deleted file mode 100644 index ad655a0a5b..0000000000 --- a/vendor/github.com/andybalholm/brotli/hash_rolling.go +++ /dev/null @@ -1,169 +0,0 @@ -package brotli - -/* Copyright 2018 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* NOTE: this hasher does not search in the dictionary. It is used as - backup-hasher, the main hasher already searches in it. */ - -const kRollingHashMul32 uint32 = 69069 - -const kInvalidPosHashRolling uint32 = 0xffffffff - -/* This hasher uses a longer forward length, but returning a higher value here - will hurt compression by the main hasher when combined with a composite - hasher. The hasher tests for forward itself instead. */ -func (*hashRolling) HashTypeLength() uint { - return 4 -} - -func (*hashRolling) StoreLookahead() uint { - return 4 -} - -/* Computes a code from a single byte. A lookup table of 256 values could be - used, but simply adding 1 works about as good. */ -func (*hashRolling) HashByte(b byte) uint32 { - return uint32(b) + 1 -} - -func (h *hashRolling) HashRollingFunctionInitial(state uint32, add byte, factor uint32) uint32 { - return uint32(factor*state + h.HashByte(add)) -} - -func (h *hashRolling) HashRollingFunction(state uint32, add byte, rem byte, factor uint32, factor_remove uint32) uint32 { - return uint32(factor*state + h.HashByte(add) - factor_remove*h.HashByte(rem)) -} - -/* Rolling hash for long distance long string matches. Stores one position - per bucket, bucket key is computed over a long region. */ -type hashRolling struct { - hasherCommon - - jump int - - state uint32 - table []uint32 - next_ix uint - chunk_len uint32 - factor uint32 - factor_remove uint32 -} - -func (h *hashRolling) Initialize(params *encoderParams) { - h.state = 0 - h.next_ix = 0 - - h.factor = kRollingHashMul32 - - /* Compute the factor of the oldest byte to remove: factor**steps modulo - 0xffffffff (the multiplications rely on 32-bit overflow) */ - h.factor_remove = 1 - - for i := 0; i < 32; i += h.jump { - h.factor_remove *= h.factor - } - - h.table = make([]uint32, 16777216) - for i := 0; i < 16777216; i++ { - h.table[i] = kInvalidPosHashRolling - } -} - -func (h *hashRolling) Prepare(one_shot bool, input_size uint, data []byte) { - /* Too small size, cannot use this hasher. */ - if input_size < 32 { - return - } - h.state = 0 - for i := 0; i < 32; i += h.jump { - h.state = h.HashRollingFunctionInitial(h.state, data[i], h.factor) - } -} - -func (*hashRolling) Store(data []byte, mask uint, ix uint) { -} - -func (*hashRolling) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) { -} - -func (h *hashRolling) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { - var position_masked uint - /* In this case we must re-initialize the hasher from scratch from the - current position. */ - - var available uint = num_bytes - if position&uint(h.jump-1) != 0 { - var diff uint = uint(h.jump) - (position & uint(h.jump-1)) - if diff > available { - available = 0 - } else { - available = available - diff - } - position += diff - } - - position_masked = position & ring_buffer_mask - - /* wrapping around ringbuffer not handled. */ - if available > ring_buffer_mask-position_masked { - available = ring_buffer_mask - position_masked - } - - h.Prepare(false, available, ringbuffer[position&ring_buffer_mask:]) - h.next_ix = position -} - -func (*hashRolling) PrepareDistanceCache(distance_cache []int) { -} - -func (h *hashRolling) FindLongestMatch(dictionary *encoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *hasherSearchResult) { - var cur_ix_masked uint = cur_ix & ring_buffer_mask - var pos uint = h.next_ix - - if cur_ix&uint(h.jump-1) != 0 { - return - } - - /* Not enough lookahead */ - if max_length < 32 { - return - } - - for pos = h.next_ix; pos <= cur_ix; pos += uint(h.jump) { - var code uint32 = h.state & ((16777216 * 64) - 1) - var rem byte = data[pos&ring_buffer_mask] - var add byte = data[(pos+32)&ring_buffer_mask] - var found_ix uint = uint(kInvalidPosHashRolling) - - h.state = h.HashRollingFunction(h.state, add, rem, h.factor, h.factor_remove) - - if code < 16777216 { - found_ix = uint(h.table[code]) - h.table[code] = uint32(pos) - if pos == cur_ix && uint32(found_ix) != kInvalidPosHashRolling { - /* The cast to 32-bit makes backward distances up to 4GB work even - if cur_ix is above 4GB, despite using 32-bit values in the table. */ - var backward uint = uint(uint32(cur_ix - found_ix)) - if backward <= max_backward { - var found_ix_masked uint = found_ix & ring_buffer_mask - var len uint = findMatchLengthWithLimit(data[found_ix_masked:], data[cur_ix_masked:], max_length) - if len >= 4 && len > out.len { - var score uint = backwardReferenceScore(uint(len), backward) - if score > out.score { - out.len = uint(len) - out.distance = backward - out.score = score - out.len_code_delta = 0 - } - } - } - } - } - } - - h.next_ix = cur_ix + uint(h.jump) -} diff --git a/vendor/github.com/andybalholm/brotli/histogram.go b/vendor/github.com/andybalholm/brotli/histogram.go deleted file mode 100644 index 0346622beb..0000000000 --- a/vendor/github.com/andybalholm/brotli/histogram.go +++ /dev/null @@ -1,226 +0,0 @@ -package brotli - -import "math" - -/* The distance symbols effectively used by "Large Window Brotli" (32-bit). */ -const numHistogramDistanceSymbols = 544 - -type histogramLiteral struct { - data_ [numLiteralSymbols]uint32 - total_count_ uint - bit_cost_ float64 -} - -func histogramClearLiteral(self *histogramLiteral) { - self.data_ = [numLiteralSymbols]uint32{} - self.total_count_ = 0 - self.bit_cost_ = math.MaxFloat64 -} - -func clearHistogramsLiteral(array []histogramLiteral, length uint) { - var i uint - for i = 0; i < length; i++ { - histogramClearLiteral(&array[i:][0]) - } -} - -func histogramAddLiteral(self *histogramLiteral, val uint) { - self.data_[val]++ - self.total_count_++ -} - -func histogramAddVectorLiteral(self *histogramLiteral, p []byte, n uint) { - self.total_count_ += n - n += 1 - for { - n-- - if n == 0 { - break - } - self.data_[p[0]]++ - p = p[1:] - } -} - -func histogramAddHistogramLiteral(self *histogramLiteral, v *histogramLiteral) { - var i uint - self.total_count_ += v.total_count_ - for i = 0; i < numLiteralSymbols; i++ { - self.data_[i] += v.data_[i] - } -} - -func histogramDataSizeLiteral() uint { - return numLiteralSymbols -} - -type histogramCommand struct { - data_ [numCommandSymbols]uint32 - total_count_ uint - bit_cost_ float64 -} - -func histogramClearCommand(self *histogramCommand) { - self.data_ = [numCommandSymbols]uint32{} - self.total_count_ = 0 - self.bit_cost_ = math.MaxFloat64 -} - -func clearHistogramsCommand(array []histogramCommand, length uint) { - var i uint - for i = 0; i < length; i++ { - histogramClearCommand(&array[i:][0]) - } -} - -func histogramAddCommand(self *histogramCommand, val uint) { - self.data_[val]++ - self.total_count_++ -} - -func histogramAddVectorCommand(self *histogramCommand, p []uint16, n uint) { - self.total_count_ += n - n += 1 - for { - n-- - if n == 0 { - break - } - self.data_[p[0]]++ - p = p[1:] - } -} - -func histogramAddHistogramCommand(self *histogramCommand, v *histogramCommand) { - var i uint - self.total_count_ += v.total_count_ - for i = 0; i < numCommandSymbols; i++ { - self.data_[i] += v.data_[i] - } -} - -func histogramDataSizeCommand() uint { - return numCommandSymbols -} - -type histogramDistance struct { - data_ [numDistanceSymbols]uint32 - total_count_ uint - bit_cost_ float64 -} - -func histogramClearDistance(self *histogramDistance) { - self.data_ = [numDistanceSymbols]uint32{} - self.total_count_ = 0 - self.bit_cost_ = math.MaxFloat64 -} - -func clearHistogramsDistance(array []histogramDistance, length uint) { - var i uint - for i = 0; i < length; i++ { - histogramClearDistance(&array[i:][0]) - } -} - -func histogramAddDistance(self *histogramDistance, val uint) { - self.data_[val]++ - self.total_count_++ -} - -func histogramAddVectorDistance(self *histogramDistance, p []uint16, n uint) { - self.total_count_ += n - n += 1 - for { - n-- - if n == 0 { - break - } - self.data_[p[0]]++ - p = p[1:] - } -} - -func histogramAddHistogramDistance(self *histogramDistance, v *histogramDistance) { - var i uint - self.total_count_ += v.total_count_ - for i = 0; i < numDistanceSymbols; i++ { - self.data_[i] += v.data_[i] - } -} - -func histogramDataSizeDistance() uint { - return numDistanceSymbols -} - -type blockSplitIterator struct { - split_ *blockSplit - idx_ uint - type_ uint - length_ uint -} - -func initBlockSplitIterator(self *blockSplitIterator, split *blockSplit) { - self.split_ = split - self.idx_ = 0 - self.type_ = 0 - if len(split.lengths) > 0 { - self.length_ = uint(split.lengths[0]) - } else { - self.length_ = 0 - } -} - -func blockSplitIteratorNext(self *blockSplitIterator) { - if self.length_ == 0 { - self.idx_++ - self.type_ = uint(self.split_.types[self.idx_]) - self.length_ = uint(self.split_.lengths[self.idx_]) - } - - self.length_-- -} - -func buildHistogramsWithContext(cmds []command, literal_split *blockSplit, insert_and_copy_split *blockSplit, dist_split *blockSplit, ringbuffer []byte, start_pos uint, mask uint, prev_byte byte, prev_byte2 byte, context_modes []int, literal_histograms []histogramLiteral, insert_and_copy_histograms []histogramCommand, copy_dist_histograms []histogramDistance) { - var pos uint = start_pos - var literal_it blockSplitIterator - var insert_and_copy_it blockSplitIterator - var dist_it blockSplitIterator - - initBlockSplitIterator(&literal_it, literal_split) - initBlockSplitIterator(&insert_and_copy_it, insert_and_copy_split) - initBlockSplitIterator(&dist_it, dist_split) - for i := range cmds { - var cmd *command = &cmds[i] - var j uint - blockSplitIteratorNext(&insert_and_copy_it) - histogramAddCommand(&insert_and_copy_histograms[insert_and_copy_it.type_], uint(cmd.cmd_prefix_)) - - /* TODO: unwrap iterator blocks. */ - for j = uint(cmd.insert_len_); j != 0; j-- { - var context uint - blockSplitIteratorNext(&literal_it) - context = literal_it.type_ - if context_modes != nil { - var lut contextLUT = getContextLUT(context_modes[context]) - context = (context << literalContextBits) + uint(getContext(prev_byte, prev_byte2, lut)) - } - - histogramAddLiteral(&literal_histograms[context], uint(ringbuffer[pos&mask])) - prev_byte2 = prev_byte - prev_byte = ringbuffer[pos&mask] - pos++ - } - - pos += uint(commandCopyLen(cmd)) - if commandCopyLen(cmd) != 0 { - prev_byte2 = ringbuffer[(pos-2)&mask] - prev_byte = ringbuffer[(pos-1)&mask] - if cmd.cmd_prefix_ >= 128 { - var context uint - blockSplitIteratorNext(&dist_it) - context = uint(uint32(dist_it.type_< bestQ && - (spec.Value == "*" || spec.Value == offer) { - bestQ = spec.Q - bestOffer = offer - } - } - } - if bestQ == 0 { - bestOffer = "" - } - return bestOffer -} - -// acceptSpec describes an Accept* header. -type acceptSpec struct { - Value string - Q float64 -} - -// parseAccept parses Accept* headers. -func parseAccept(header http.Header, key string) (specs []acceptSpec) { -loop: - for _, s := range header[key] { - for { - var spec acceptSpec - spec.Value, s = expectTokenSlash(s) - if spec.Value == "" { - continue loop - } - spec.Q = 1.0 - s = skipSpace(s) - if strings.HasPrefix(s, ";") { - s = skipSpace(s[1:]) - if !strings.HasPrefix(s, "q=") { - continue loop - } - spec.Q, s = expectQuality(s[2:]) - if spec.Q < 0.0 { - continue loop - } - } - specs = append(specs, spec) - s = skipSpace(s) - if !strings.HasPrefix(s, ",") { - continue loop - } - s = skipSpace(s[1:]) - } - } - return -} - -func skipSpace(s string) (rest string) { - i := 0 - for ; i < len(s); i++ { - if octetTypes[s[i]]&isSpace == 0 { - break - } - } - return s[i:] -} - -func expectTokenSlash(s string) (token, rest string) { - i := 0 - for ; i < len(s); i++ { - b := s[i] - if (octetTypes[b]&isToken == 0) && b != '/' { - break - } - } - return s[:i], s[i:] -} - -func expectQuality(s string) (q float64, rest string) { - switch { - case len(s) == 0: - return -1, "" - case s[0] == '0': - q = 0 - case s[0] == '1': - q = 1 - default: - return -1, "" - } - s = s[1:] - if !strings.HasPrefix(s, ".") { - return q, s - } - s = s[1:] - i := 0 - n := 0 - d := 1 - for ; i < len(s); i++ { - b := s[i] - if b < '0' || b > '9' { - break - } - n = n*10 + int(b) - '0' - d *= 10 - } - return q + float64(n)/float64(d), s[i:] -} - -// Octet types from RFC 2616. -var octetTypes [256]octetType - -type octetType byte - -const ( - isToken octetType = 1 << iota - isSpace -) - -func init() { - // OCTET = - // CHAR = - // CTL = - // CR = - // LF = - // SP = - // HT = - // <"> = - // CRLF = CR LF - // LWS = [CRLF] 1*( SP | HT ) - // TEXT = - // separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> - // | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT - // token = 1* - // qdtext = > - - for c := 0; c < 256; c++ { - var t octetType - isCtl := c <= 31 || c == 127 - isChar := 0 <= c && c <= 127 - isSeparator := strings.IndexRune(" \t\"(),/:;<=>?@[]\\{}", rune(c)) >= 0 - if strings.IndexRune(" \t\r\n", rune(c)) >= 0 { - t |= isSpace - } - if isChar && !isCtl && !isSeparator { - t |= isToken - } - octetTypes[c] = t - } -} diff --git a/vendor/github.com/andybalholm/brotli/huffman.go b/vendor/github.com/andybalholm/brotli/huffman.go deleted file mode 100644 index 182f3d2a55..0000000000 --- a/vendor/github.com/andybalholm/brotli/huffman.go +++ /dev/null @@ -1,653 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Utilities for building Huffman decoding tables. */ - -const huffmanMaxCodeLength = 15 - -/* Maximum possible Huffman table size for an alphabet size of (index * 32), - max code length 15 and root table bits 8. */ -var kMaxHuffmanTableSize = []uint16{ - 256, - 402, - 436, - 468, - 500, - 534, - 566, - 598, - 630, - 662, - 694, - 726, - 758, - 790, - 822, - 854, - 886, - 920, - 952, - 984, - 1016, - 1048, - 1080, - 1112, - 1144, - 1176, - 1208, - 1240, - 1272, - 1304, - 1336, - 1368, - 1400, - 1432, - 1464, - 1496, - 1528, -} - -/* BROTLI_NUM_BLOCK_LEN_SYMBOLS == 26 */ -const huffmanMaxSize26 = 396 - -/* BROTLI_MAX_BLOCK_TYPE_SYMBOLS == 258 */ -const huffmanMaxSize258 = 632 - -/* BROTLI_MAX_CONTEXT_MAP_SYMBOLS == 272 */ -const huffmanMaxSize272 = 646 - -const huffmanMaxCodeLengthCodeLength = 5 - -/* Do not create this struct directly - use the ConstructHuffmanCode - * constructor below! */ -type huffmanCode struct { - bits byte - value uint16 -} - -func constructHuffmanCode(bits byte, value uint16) huffmanCode { - var h huffmanCode - h.bits = bits - h.value = value - return h -} - -/* Builds Huffman lookup table assuming code lengths are in symbol order. */ - -/* Builds Huffman lookup table assuming code lengths are in symbol order. - Returns size of resulting table. */ - -/* Builds a simple Huffman table. The |num_symbols| parameter is to be - interpreted as follows: 0 means 1 symbol, 1 means 2 symbols, - 2 means 3 symbols, 3 means 4 symbols with lengths [2, 2, 2, 2], - 4 means 4 symbols with lengths [1, 2, 3, 3]. */ - -/* Contains a collection of Huffman trees with the same alphabet size. */ -/* max_symbol is needed due to simple codes since log2(alphabet_size) could be - greater than log2(max_symbol). */ -type huffmanTreeGroup struct { - htrees [][]huffmanCode - codes []huffmanCode - alphabet_size uint16 - max_symbol uint16 - num_htrees uint16 -} - -const reverseBitsMax = 8 - -const reverseBitsBase = 0 - -var kReverseBits = [1 << reverseBitsMax]byte{ - 0x00, - 0x80, - 0x40, - 0xC0, - 0x20, - 0xA0, - 0x60, - 0xE0, - 0x10, - 0x90, - 0x50, - 0xD0, - 0x30, - 0xB0, - 0x70, - 0xF0, - 0x08, - 0x88, - 0x48, - 0xC8, - 0x28, - 0xA8, - 0x68, - 0xE8, - 0x18, - 0x98, - 0x58, - 0xD8, - 0x38, - 0xB8, - 0x78, - 0xF8, - 0x04, - 0x84, - 0x44, - 0xC4, - 0x24, - 0xA4, - 0x64, - 0xE4, - 0x14, - 0x94, - 0x54, - 0xD4, - 0x34, - 0xB4, - 0x74, - 0xF4, - 0x0C, - 0x8C, - 0x4C, - 0xCC, - 0x2C, - 0xAC, - 0x6C, - 0xEC, - 0x1C, - 0x9C, - 0x5C, - 0xDC, - 0x3C, - 0xBC, - 0x7C, - 0xFC, - 0x02, - 0x82, - 0x42, - 0xC2, - 0x22, - 0xA2, - 0x62, - 0xE2, - 0x12, - 0x92, - 0x52, - 0xD2, - 0x32, - 0xB2, - 0x72, - 0xF2, - 0x0A, - 0x8A, - 0x4A, - 0xCA, - 0x2A, - 0xAA, - 0x6A, - 0xEA, - 0x1A, - 0x9A, - 0x5A, - 0xDA, - 0x3A, - 0xBA, - 0x7A, - 0xFA, - 0x06, - 0x86, - 0x46, - 0xC6, - 0x26, - 0xA6, - 0x66, - 0xE6, - 0x16, - 0x96, - 0x56, - 0xD6, - 0x36, - 0xB6, - 0x76, - 0xF6, - 0x0E, - 0x8E, - 0x4E, - 0xCE, - 0x2E, - 0xAE, - 0x6E, - 0xEE, - 0x1E, - 0x9E, - 0x5E, - 0xDE, - 0x3E, - 0xBE, - 0x7E, - 0xFE, - 0x01, - 0x81, - 0x41, - 0xC1, - 0x21, - 0xA1, - 0x61, - 0xE1, - 0x11, - 0x91, - 0x51, - 0xD1, - 0x31, - 0xB1, - 0x71, - 0xF1, - 0x09, - 0x89, - 0x49, - 0xC9, - 0x29, - 0xA9, - 0x69, - 0xE9, - 0x19, - 0x99, - 0x59, - 0xD9, - 0x39, - 0xB9, - 0x79, - 0xF9, - 0x05, - 0x85, - 0x45, - 0xC5, - 0x25, - 0xA5, - 0x65, - 0xE5, - 0x15, - 0x95, - 0x55, - 0xD5, - 0x35, - 0xB5, - 0x75, - 0xF5, - 0x0D, - 0x8D, - 0x4D, - 0xCD, - 0x2D, - 0xAD, - 0x6D, - 0xED, - 0x1D, - 0x9D, - 0x5D, - 0xDD, - 0x3D, - 0xBD, - 0x7D, - 0xFD, - 0x03, - 0x83, - 0x43, - 0xC3, - 0x23, - 0xA3, - 0x63, - 0xE3, - 0x13, - 0x93, - 0x53, - 0xD3, - 0x33, - 0xB3, - 0x73, - 0xF3, - 0x0B, - 0x8B, - 0x4B, - 0xCB, - 0x2B, - 0xAB, - 0x6B, - 0xEB, - 0x1B, - 0x9B, - 0x5B, - 0xDB, - 0x3B, - 0xBB, - 0x7B, - 0xFB, - 0x07, - 0x87, - 0x47, - 0xC7, - 0x27, - 0xA7, - 0x67, - 0xE7, - 0x17, - 0x97, - 0x57, - 0xD7, - 0x37, - 0xB7, - 0x77, - 0xF7, - 0x0F, - 0x8F, - 0x4F, - 0xCF, - 0x2F, - 0xAF, - 0x6F, - 0xEF, - 0x1F, - 0x9F, - 0x5F, - 0xDF, - 0x3F, - 0xBF, - 0x7F, - 0xFF, -} - -const reverseBitsLowest = (uint64(1) << (reverseBitsMax - 1 + reverseBitsBase)) - -/* Returns reverse(num >> BROTLI_REVERSE_BITS_BASE, BROTLI_REVERSE_BITS_MAX), - where reverse(value, len) is the bit-wise reversal of the len least - significant bits of value. */ -func reverseBits8(num uint64) uint64 { - return uint64(kReverseBits[num]) -} - -/* Stores code in table[0], table[step], table[2*step], ..., table[end] */ -/* Assumes that end is an integer multiple of step */ -func replicateValue(table []huffmanCode, step int, end int, code huffmanCode) { - for { - end -= step - table[end] = code - if end <= 0 { - break - } - } -} - -/* Returns the table width of the next 2nd level table. |count| is the histogram - of bit lengths for the remaining symbols, |len| is the code length of the - next processed symbol. */ -func nextTableBitSize(count []uint16, len int, root_bits int) int { - var left int = 1 << uint(len-root_bits) - for len < huffmanMaxCodeLength { - left -= int(count[len]) - if left <= 0 { - break - } - len++ - left <<= 1 - } - - return len - root_bits -} - -func buildCodeLengthsHuffmanTable(table []huffmanCode, code_lengths []byte, count []uint16) { - var code huffmanCode /* current table entry */ /* symbol index in original or sorted table */ /* prefix code */ /* prefix code addend */ /* step size to replicate values in current table */ /* size of current table */ /* symbols sorted by code length */ - var symbol int - var key uint64 - var key_step uint64 - var step int - var table_size int - var sorted [codeLengthCodes]int - var offset [huffmanMaxCodeLengthCodeLength + 1]int - var bits int - var bits_count int - /* offsets in sorted table for each length */ - assert(huffmanMaxCodeLengthCodeLength <= reverseBitsMax) - - /* Generate offsets into sorted symbol table by code length. */ - symbol = -1 - - bits = 1 - var i int - for i = 0; i < huffmanMaxCodeLengthCodeLength; i++ { - symbol += int(count[bits]) - offset[bits] = symbol - bits++ - } - - /* Symbols with code length 0 are placed after all other symbols. */ - offset[0] = codeLengthCodes - 1 - - /* Sort symbols by length, by symbol order within each length. */ - symbol = codeLengthCodes - - for { - var i int - for i = 0; i < 6; i++ { - symbol-- - sorted[offset[code_lengths[symbol]]] = symbol - offset[code_lengths[symbol]]-- - } - if symbol == 0 { - break - } - } - - table_size = 1 << huffmanMaxCodeLengthCodeLength - - /* Special case: all symbols but one have 0 code length. */ - if offset[0] == 0 { - code = constructHuffmanCode(0, uint16(sorted[0])) - for key = 0; key < uint64(table_size); key++ { - table[key] = code - } - - return - } - - /* Fill in table. */ - key = 0 - - key_step = reverseBitsLowest - symbol = 0 - bits = 1 - step = 2 - for { - for bits_count = int(count[bits]); bits_count != 0; bits_count-- { - code = constructHuffmanCode(byte(bits), uint16(sorted[symbol])) - symbol++ - replicateValue(table[reverseBits8(key):], step, table_size, code) - key += key_step - } - - step <<= 1 - key_step >>= 1 - bits++ - if bits > huffmanMaxCodeLengthCodeLength { - break - } - } -} - -func buildHuffmanTable(root_table []huffmanCode, root_bits int, symbol_lists symbolList, count []uint16) uint32 { - var code huffmanCode /* current table entry */ /* next available space in table */ /* current code length */ /* symbol index in original or sorted table */ /* prefix code */ /* prefix code addend */ /* 2nd level table prefix code */ /* 2nd level table prefix code addend */ /* step size to replicate values in current table */ /* key length of current table */ /* size of current table */ /* sum of root table size and 2nd level table sizes */ - var table []huffmanCode - var len int - var symbol int - var key uint64 - var key_step uint64 - var sub_key uint64 - var sub_key_step uint64 - var step int - var table_bits int - var table_size int - var total_size int - var max_length int = -1 - var bits int - var bits_count int - - assert(root_bits <= reverseBitsMax) - assert(huffmanMaxCodeLength-root_bits <= reverseBitsMax) - - for symbolListGet(symbol_lists, max_length) == 0xFFFF { - max_length-- - } - max_length += huffmanMaxCodeLength + 1 - - table = root_table - table_bits = root_bits - table_size = 1 << uint(table_bits) - total_size = table_size - - /* Fill in the root table. Reduce the table size to if possible, - and create the repetitions by memcpy. */ - if table_bits > max_length { - table_bits = max_length - table_size = 1 << uint(table_bits) - } - - key = 0 - key_step = reverseBitsLowest - bits = 1 - step = 2 - for { - symbol = bits - (huffmanMaxCodeLength + 1) - for bits_count = int(count[bits]); bits_count != 0; bits_count-- { - symbol = int(symbolListGet(symbol_lists, symbol)) - code = constructHuffmanCode(byte(bits), uint16(symbol)) - replicateValue(table[reverseBits8(key):], step, table_size, code) - key += key_step - } - - step <<= 1 - key_step >>= 1 - bits++ - if bits > table_bits { - break - } - } - - /* If root_bits != table_bits then replicate to fill the remaining slots. */ - for total_size != table_size { - copy(table[table_size:], table[:uint(table_size)]) - table_size <<= 1 - } - - /* Fill in 2nd level tables and add pointers to root table. */ - key_step = reverseBitsLowest >> uint(root_bits-1) - - sub_key = reverseBitsLowest << 1 - sub_key_step = reverseBitsLowest - len = root_bits + 1 - step = 2 - for ; len <= max_length; len++ { - symbol = len - (huffmanMaxCodeLength + 1) - for ; count[len] != 0; count[len]-- { - if sub_key == reverseBitsLowest<<1 { - table = table[table_size:] - table_bits = nextTableBitSize(count, int(len), root_bits) - table_size = 1 << uint(table_bits) - total_size += table_size - sub_key = reverseBits8(key) - key += key_step - root_table[sub_key] = constructHuffmanCode(byte(table_bits+root_bits), uint16(uint64(uint(-cap(table)+cap(root_table)))-sub_key)) - sub_key = 0 - } - - symbol = int(symbolListGet(symbol_lists, symbol)) - code = constructHuffmanCode(byte(len-root_bits), uint16(symbol)) - replicateValue(table[reverseBits8(sub_key):], step, table_size, code) - sub_key += sub_key_step - } - - step <<= 1 - sub_key_step >>= 1 - } - - return uint32(total_size) -} - -func buildSimpleHuffmanTable(table []huffmanCode, root_bits int, val []uint16, num_symbols uint32) uint32 { - var table_size uint32 = 1 - var goal_size uint32 = 1 << uint(root_bits) - switch num_symbols { - case 0: - table[0] = constructHuffmanCode(0, val[0]) - - case 1: - if val[1] > val[0] { - table[0] = constructHuffmanCode(1, val[0]) - table[1] = constructHuffmanCode(1, val[1]) - } else { - table[0] = constructHuffmanCode(1, val[1]) - table[1] = constructHuffmanCode(1, val[0]) - } - - table_size = 2 - - case 2: - table[0] = constructHuffmanCode(1, val[0]) - table[2] = constructHuffmanCode(1, val[0]) - if val[2] > val[1] { - table[1] = constructHuffmanCode(2, val[1]) - table[3] = constructHuffmanCode(2, val[2]) - } else { - table[1] = constructHuffmanCode(2, val[2]) - table[3] = constructHuffmanCode(2, val[1]) - } - - table_size = 4 - - case 3: - var i int - var k int - for i = 0; i < 3; i++ { - for k = i + 1; k < 4; k++ { - if val[k] < val[i] { - var t uint16 = val[k] - val[k] = val[i] - val[i] = t - } - } - } - - table[0] = constructHuffmanCode(2, val[0]) - table[2] = constructHuffmanCode(2, val[1]) - table[1] = constructHuffmanCode(2, val[2]) - table[3] = constructHuffmanCode(2, val[3]) - table_size = 4 - - case 4: - if val[3] < val[2] { - var t uint16 = val[3] - val[3] = val[2] - val[2] = t - } - - table[0] = constructHuffmanCode(1, val[0]) - table[1] = constructHuffmanCode(2, val[1]) - table[2] = constructHuffmanCode(1, val[0]) - table[3] = constructHuffmanCode(3, val[2]) - table[4] = constructHuffmanCode(1, val[0]) - table[5] = constructHuffmanCode(2, val[1]) - table[6] = constructHuffmanCode(1, val[0]) - table[7] = constructHuffmanCode(3, val[3]) - table_size = 8 - } - - for table_size != goal_size { - copy(table[table_size:], table[:uint(table_size)]) - table_size <<= 1 - } - - return goal_size -} diff --git a/vendor/github.com/andybalholm/brotli/literal_cost.go b/vendor/github.com/andybalholm/brotli/literal_cost.go deleted file mode 100644 index 5a9ace94ee..0000000000 --- a/vendor/github.com/andybalholm/brotli/literal_cost.go +++ /dev/null @@ -1,182 +0,0 @@ -package brotli - -func utf8Position(last uint, c uint, clamp uint) uint { - if c < 128 { - return 0 /* Next one is the 'Byte 1' again. */ - } else if c >= 192 { /* Next one is the 'Byte 2' of utf-8 encoding. */ - return brotli_min_size_t(1, clamp) - } else { - /* Let's decide over the last byte if this ends the sequence. */ - if last < 0xE0 { - return 0 /* Completed two or three byte coding. */ /* Next one is the 'Byte 3' of utf-8 encoding. */ - } else { - return brotli_min_size_t(2, clamp) - } - } -} - -func decideMultiByteStatsLevel(pos uint, len uint, mask uint, data []byte) uint { - var counts = [3]uint{0} /* should be 2, but 1 compresses better. */ - var max_utf8 uint = 1 - var last_c uint = 0 - var i uint - for i = 0; i < len; i++ { - var c uint = uint(data[(pos+i)&mask]) - counts[utf8Position(last_c, c, 2)]++ - last_c = c - } - - if counts[2] < 500 { - max_utf8 = 1 - } - - if counts[1]+counts[2] < 25 { - max_utf8 = 0 - } - - return max_utf8 -} - -func estimateBitCostsForLiteralsUTF8(pos uint, len uint, mask uint, data []byte, cost []float32) { - var max_utf8 uint = decideMultiByteStatsLevel(pos, uint(len), mask, data) - /* Bootstrap histograms. */ - var histogram = [3][256]uint{[256]uint{0}} - var window_half uint = 495 - var in_window uint = brotli_min_size_t(window_half, uint(len)) - var in_window_utf8 = [3]uint{0} - /* max_utf8 is 0 (normal ASCII single byte modeling), - 1 (for 2-byte UTF-8 modeling), or 2 (for 3-byte UTF-8 modeling). */ - - var i uint - { - var last_c uint = 0 - var utf8_pos uint = 0 - for i = 0; i < in_window; i++ { - var c uint = uint(data[(pos+i)&mask]) - histogram[utf8_pos][c]++ - in_window_utf8[utf8_pos]++ - utf8_pos = utf8Position(last_c, c, max_utf8) - last_c = c - } - } - - /* Compute bit costs with sliding window. */ - for i = 0; i < len; i++ { - if i >= window_half { - var c uint - var last_c uint - if i < window_half+1 { - c = 0 - } else { - c = uint(data[(pos+i-window_half-1)&mask]) - } - if i < window_half+2 { - last_c = 0 - } else { - last_c = uint(data[(pos+i-window_half-2)&mask]) - } - /* Remove a byte in the past. */ - - var utf8_pos2 uint = utf8Position(last_c, c, max_utf8) - histogram[utf8_pos2][data[(pos+i-window_half)&mask]]-- - in_window_utf8[utf8_pos2]-- - } - - if i+window_half < len { - var c uint = uint(data[(pos+i+window_half-1)&mask]) - var last_c uint = uint(data[(pos+i+window_half-2)&mask]) - /* Add a byte in the future. */ - - var utf8_pos2 uint = utf8Position(last_c, c, max_utf8) - histogram[utf8_pos2][data[(pos+i+window_half)&mask]]++ - in_window_utf8[utf8_pos2]++ - } - { - var c uint - var last_c uint - if i < 1 { - c = 0 - } else { - c = uint(data[(pos+i-1)&mask]) - } - if i < 2 { - last_c = 0 - } else { - last_c = uint(data[(pos+i-2)&mask]) - } - var utf8_pos uint = utf8Position(last_c, c, max_utf8) - var masked_pos uint = (pos + i) & mask - var histo uint = histogram[utf8_pos][data[masked_pos]] - var lit_cost float64 - if histo == 0 { - histo = 1 - } - - lit_cost = fastLog2(in_window_utf8[utf8_pos]) - fastLog2(histo) - lit_cost += 0.02905 - if lit_cost < 1.0 { - lit_cost *= 0.5 - lit_cost += 0.5 - } - - /* Make the first bytes more expensive -- seems to help, not sure why. - Perhaps because the entropy source is changing its properties - rapidly in the beginning of the file, perhaps because the beginning - of the data is a statistical "anomaly". */ - if i < 2000 { - lit_cost += 0.7 - (float64(2000-i) / 2000.0 * 0.35) - } - - cost[i] = float32(lit_cost) - } - } -} - -func estimateBitCostsForLiterals(pos uint, len uint, mask uint, data []byte, cost []float32) { - if isMostlyUTF8(data, pos, mask, uint(len), kMinUTF8Ratio) { - estimateBitCostsForLiteralsUTF8(pos, uint(len), mask, data, cost) - return - } else { - var histogram = [256]uint{0} - var window_half uint = 2000 - var in_window uint = brotli_min_size_t(window_half, uint(len)) - var i uint - /* Bootstrap histogram. */ - for i = 0; i < in_window; i++ { - histogram[data[(pos+i)&mask]]++ - } - - /* Compute bit costs with sliding window. */ - for i = 0; i < len; i++ { - var histo uint - if i >= window_half { - /* Remove a byte in the past. */ - histogram[data[(pos+i-window_half)&mask]]-- - - in_window-- - } - - if i+window_half < len { - /* Add a byte in the future. */ - histogram[data[(pos+i+window_half)&mask]]++ - - in_window++ - } - - histo = histogram[data[(pos+i)&mask]] - if histo == 0 { - histo = 1 - } - { - var lit_cost float64 = fastLog2(in_window) - fastLog2(histo) - lit_cost += 0.029 - if lit_cost < 1.0 { - lit_cost *= 0.5 - lit_cost += 0.5 - } - - cost[i] = float32(lit_cost) - } - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/memory.go b/vendor/github.com/andybalholm/brotli/memory.go deleted file mode 100644 index a07c7050a0..0000000000 --- a/vendor/github.com/andybalholm/brotli/memory.go +++ /dev/null @@ -1,66 +0,0 @@ -package brotli - -/* Copyright 2016 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* -Dynamically grows array capacity to at least the requested size -T: data type -A: array -C: capacity -R: requested size -*/ -func brotli_ensure_capacity_uint8_t(a *[]byte, c *uint, r uint) { - if *c < r { - var new_size uint = *c - if new_size == 0 { - new_size = r - } - - for new_size < r { - new_size *= 2 - } - - if cap(*a) < int(new_size) { - var new_array []byte = make([]byte, new_size) - if *c != 0 { - copy(new_array, (*a)[:*c]) - } - - *a = new_array - } else { - *a = (*a)[:new_size] - } - - *c = new_size - } -} - -func brotli_ensure_capacity_uint32_t(a *[]uint32, c *uint, r uint) { - var new_array []uint32 - if *c < r { - var new_size uint = *c - if new_size == 0 { - new_size = r - } - - for new_size < r { - new_size *= 2 - } - - if cap(*a) < int(new_size) { - new_array = make([]uint32, new_size) - if *c != 0 { - copy(new_array, (*a)[:*c]) - } - - *a = new_array - } else { - *a = (*a)[:new_size] - } - *c = new_size - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock.go b/vendor/github.com/andybalholm/brotli/metablock.go deleted file mode 100644 index 3014df8cdf..0000000000 --- a/vendor/github.com/andybalholm/brotli/metablock.go +++ /dev/null @@ -1,574 +0,0 @@ -package brotli - -import ( - "sync" -) - -/* Copyright 2014 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Algorithms for distributing the literals and commands of a metablock between - block types and contexts. */ - -type metaBlockSplit struct { - literal_split blockSplit - command_split blockSplit - distance_split blockSplit - literal_context_map []uint32 - literal_context_map_size uint - distance_context_map []uint32 - distance_context_map_size uint - literal_histograms []histogramLiteral - literal_histograms_size uint - command_histograms []histogramCommand - command_histograms_size uint - distance_histograms []histogramDistance - distance_histograms_size uint -} - -var metaBlockPool sync.Pool - -func getMetaBlockSplit() *metaBlockSplit { - mb, _ := metaBlockPool.Get().(*metaBlockSplit) - - if mb == nil { - mb = &metaBlockSplit{} - } else { - initBlockSplit(&mb.literal_split) - initBlockSplit(&mb.command_split) - initBlockSplit(&mb.distance_split) - mb.literal_context_map = mb.literal_context_map[:0] - mb.literal_context_map_size = 0 - mb.distance_context_map = mb.distance_context_map[:0] - mb.distance_context_map_size = 0 - mb.literal_histograms = mb.literal_histograms[:0] - mb.command_histograms = mb.command_histograms[:0] - mb.distance_histograms = mb.distance_histograms[:0] - } - return mb -} - -func freeMetaBlockSplit(mb *metaBlockSplit) { - metaBlockPool.Put(mb) -} - -func initDistanceParams(params *encoderParams, npostfix uint32, ndirect uint32) { - var dist_params *distanceParams = ¶ms.dist - var alphabet_size uint32 - var max_distance uint32 - - dist_params.distance_postfix_bits = npostfix - dist_params.num_direct_distance_codes = ndirect - - alphabet_size = uint32(distanceAlphabetSize(uint(npostfix), uint(ndirect), maxDistanceBits)) - max_distance = ndirect + (1 << (maxDistanceBits + npostfix + 2)) - (1 << (npostfix + 2)) - - if params.large_window { - var bound = [maxNpostfix + 1]uint32{0, 4, 12, 28} - var postfix uint32 = 1 << npostfix - alphabet_size = uint32(distanceAlphabetSize(uint(npostfix), uint(ndirect), largeMaxDistanceBits)) - - /* The maximum distance is set so that no distance symbol used can encode - a distance larger than BROTLI_MAX_ALLOWED_DISTANCE with all - its extra bits set. */ - if ndirect < bound[npostfix] { - max_distance = maxAllowedDistance - (bound[npostfix] - ndirect) - } else if ndirect >= bound[npostfix]+postfix { - max_distance = (3 << 29) - 4 + (ndirect - bound[npostfix]) - } else { - max_distance = maxAllowedDistance - } - } - - dist_params.alphabet_size = alphabet_size - dist_params.max_distance = uint(max_distance) -} - -func recomputeDistancePrefixes(cmds []command, orig_params *distanceParams, new_params *distanceParams) { - if orig_params.distance_postfix_bits == new_params.distance_postfix_bits && orig_params.num_direct_distance_codes == new_params.num_direct_distance_codes { - return - } - - for i := range cmds { - var cmd *command = &cmds[i] - if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 { - prefixEncodeCopyDistance(uint(commandRestoreDistanceCode(cmd, orig_params)), uint(new_params.num_direct_distance_codes), uint(new_params.distance_postfix_bits), &cmd.dist_prefix_, &cmd.dist_extra_) - } - } -} - -func computeDistanceCost(cmds []command, orig_params *distanceParams, new_params *distanceParams, cost *float64) bool { - var equal_params bool = false - var dist_prefix uint16 - var dist_extra uint32 - var extra_bits float64 = 0.0 - var histo histogramDistance - histogramClearDistance(&histo) - - if orig_params.distance_postfix_bits == new_params.distance_postfix_bits && orig_params.num_direct_distance_codes == new_params.num_direct_distance_codes { - equal_params = true - } - - for i := range cmds { - cmd := &cmds[i] - if commandCopyLen(cmd) != 0 && cmd.cmd_prefix_ >= 128 { - if equal_params { - dist_prefix = cmd.dist_prefix_ - } else { - var distance uint32 = commandRestoreDistanceCode(cmd, orig_params) - if distance > uint32(new_params.max_distance) { - return false - } - - prefixEncodeCopyDistance(uint(distance), uint(new_params.num_direct_distance_codes), uint(new_params.distance_postfix_bits), &dist_prefix, &dist_extra) - } - - histogramAddDistance(&histo, uint(dist_prefix)&0x3FF) - extra_bits += float64(dist_prefix >> 10) - } - } - - *cost = populationCostDistance(&histo) + extra_bits - return true -} - -var buildMetaBlock_kMaxNumberOfHistograms uint = 256 - -func buildMetaBlock(ringbuffer []byte, pos uint, mask uint, params *encoderParams, prev_byte byte, prev_byte2 byte, cmds []command, literal_context_mode int, mb *metaBlockSplit) { - var distance_histograms []histogramDistance - var literal_histograms []histogramLiteral - var literal_context_modes []int = nil - var literal_histograms_size uint - var distance_histograms_size uint - var i uint - var literal_context_multiplier uint = 1 - var npostfix uint32 - var ndirect_msb uint32 = 0 - var check_orig bool = true - var best_dist_cost float64 = 1e99 - var orig_params encoderParams = *params - /* Histogram ids need to fit in one byte. */ - - var new_params encoderParams = *params - - for npostfix = 0; npostfix <= maxNpostfix; npostfix++ { - for ; ndirect_msb < 16; ndirect_msb++ { - var ndirect uint32 = ndirect_msb << npostfix - var skip bool - var dist_cost float64 - initDistanceParams(&new_params, npostfix, ndirect) - if npostfix == orig_params.dist.distance_postfix_bits && ndirect == orig_params.dist.num_direct_distance_codes { - check_orig = false - } - - skip = !computeDistanceCost(cmds, &orig_params.dist, &new_params.dist, &dist_cost) - if skip || (dist_cost > best_dist_cost) { - break - } - - best_dist_cost = dist_cost - params.dist = new_params.dist - } - - if ndirect_msb > 0 { - ndirect_msb-- - } - ndirect_msb /= 2 - } - - if check_orig { - var dist_cost float64 - computeDistanceCost(cmds, &orig_params.dist, &orig_params.dist, &dist_cost) - if dist_cost < best_dist_cost { - /* NB: currently unused; uncomment when more param tuning is added. */ - /* best_dist_cost = dist_cost; */ - params.dist = orig_params.dist - } - } - - recomputeDistancePrefixes(cmds, &orig_params.dist, ¶ms.dist) - - splitBlock(cmds, ringbuffer, pos, mask, params, &mb.literal_split, &mb.command_split, &mb.distance_split) - - if !params.disable_literal_context_modeling { - literal_context_multiplier = 1 << literalContextBits - literal_context_modes = make([]int, (mb.literal_split.num_types)) - for i = 0; i < mb.literal_split.num_types; i++ { - literal_context_modes[i] = literal_context_mode - } - } - - literal_histograms_size = mb.literal_split.num_types * literal_context_multiplier - literal_histograms = make([]histogramLiteral, literal_histograms_size) - clearHistogramsLiteral(literal_histograms, literal_histograms_size) - - distance_histograms_size = mb.distance_split.num_types << distanceContextBits - distance_histograms = make([]histogramDistance, distance_histograms_size) - clearHistogramsDistance(distance_histograms, distance_histograms_size) - - mb.command_histograms_size = mb.command_split.num_types - if cap(mb.command_histograms) < int(mb.command_histograms_size) { - mb.command_histograms = make([]histogramCommand, (mb.command_histograms_size)) - } else { - mb.command_histograms = mb.command_histograms[:mb.command_histograms_size] - } - clearHistogramsCommand(mb.command_histograms, mb.command_histograms_size) - - buildHistogramsWithContext(cmds, &mb.literal_split, &mb.command_split, &mb.distance_split, ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_modes, literal_histograms, mb.command_histograms, distance_histograms) - literal_context_modes = nil - - mb.literal_context_map_size = mb.literal_split.num_types << literalContextBits - if cap(mb.literal_context_map) < int(mb.literal_context_map_size) { - mb.literal_context_map = make([]uint32, (mb.literal_context_map_size)) - } else { - mb.literal_context_map = mb.literal_context_map[:mb.literal_context_map_size] - } - - mb.literal_histograms_size = mb.literal_context_map_size - if cap(mb.literal_histograms) < int(mb.literal_histograms_size) { - mb.literal_histograms = make([]histogramLiteral, (mb.literal_histograms_size)) - } else { - mb.literal_histograms = mb.literal_histograms[:mb.literal_histograms_size] - } - - clusterHistogramsLiteral(literal_histograms, literal_histograms_size, buildMetaBlock_kMaxNumberOfHistograms, mb.literal_histograms, &mb.literal_histograms_size, mb.literal_context_map) - literal_histograms = nil - - if params.disable_literal_context_modeling { - /* Distribute assignment to all contexts. */ - for i = mb.literal_split.num_types; i != 0; { - var j uint = 0 - i-- - for ; j < 1< 0 { - var entropy [maxStaticContexts]float64 - var combined_histo []histogramLiteral = make([]histogramLiteral, (2 * num_contexts)) - var combined_entropy [2 * maxStaticContexts]float64 - var diff = [2]float64{0.0} - /* Try merging the set of histograms for the current block type with the - respective set of histograms for the last and second last block types. - Decide over the split based on the total reduction of entropy across - all contexts. */ - - var i uint - for i = 0; i < num_contexts; i++ { - var curr_histo_ix uint = self.curr_histogram_ix_ + i - var j uint - entropy[i] = bitsEntropy(histograms[curr_histo_ix].data_[:], self.alphabet_size_) - for j = 0; j < 2; j++ { - var jx uint = j*num_contexts + i - var last_histogram_ix uint = self.last_histogram_ix_[j] + i - combined_histo[jx] = histograms[curr_histo_ix] - histogramAddHistogramLiteral(&combined_histo[jx], &histograms[last_histogram_ix]) - combined_entropy[jx] = bitsEntropy(combined_histo[jx].data_[0:], self.alphabet_size_) - diff[j] += combined_entropy[jx] - entropy[i] - last_entropy[jx] - } - } - - if split.num_types < self.max_block_types_ && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = split.num_types * num_contexts - for i = 0; i < num_contexts; i++ { - last_entropy[num_contexts+i] = last_entropy[i] - last_entropy[i] = entropy[i] - } - - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_ += num_contexts - if self.curr_histogram_ix_ < *self.histograms_size_ { - clearHistogramsLiteral(self.histograms_[self.curr_histogram_ix_:], self.num_contexts_) - } - - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - for i = 0; i < num_contexts; i++ { - histograms[self.last_histogram_ix_[0]+i] = combined_histo[num_contexts+i] - last_entropy[num_contexts+i] = last_entropy[i] - last_entropy[i] = combined_entropy[num_contexts+i] - histogramClearLiteral(&histograms[self.curr_histogram_ix_+i]) - } - - self.num_blocks_++ - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - for i = 0; i < num_contexts; i++ { - histograms[self.last_histogram_ix_[0]+i] = combined_histo[i] - last_entropy[i] = combined_entropy[i] - if split.num_types == 1 { - last_entropy[num_contexts+i] = last_entropy[i] - } - - histogramClearLiteral(&histograms[self.curr_histogram_ix_+i]) - } - - self.block_size_ = 0 - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - - combined_histo = nil - } - - if is_final { - *self.histograms_size_ = split.num_types * num_contexts - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current block type and context. When the - current block reaches the target size, decides on merging the block. */ -func contextBlockSplitterAddSymbol(self *contextBlockSplitter, symbol uint, context uint) { - histogramAddLiteral(&self.histograms_[self.curr_histogram_ix_+context], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - contextBlockSplitterFinishBlock(self, false) /* is_final = */ - } -} - -func mapStaticContexts(num_contexts uint, static_context_map []uint32, mb *metaBlockSplit) { - var i uint - mb.literal_context_map_size = mb.literal_split.num_types << literalContextBits - if cap(mb.literal_context_map) < int(mb.literal_context_map_size) { - mb.literal_context_map = make([]uint32, (mb.literal_context_map_size)) - } else { - mb.literal_context_map = mb.literal_context_map[:mb.literal_context_map_size] - } - - for i = 0; i < mb.literal_split.num_types; i++ { - var offset uint32 = uint32(i * num_contexts) - var j uint - for j = 0; j < 1<= 128 { - blockSplitterAddSymbolDistance(&dist_blocks, uint(cmd.dist_prefix_)&0x3FF) - } - } - } - - if num_contexts == 1 { - blockSplitterFinishBlockLiteral(&lit_blocks.plain, true) /* is_final = */ - } else { - contextBlockSplitterFinishBlock(&lit_blocks.ctx, true) /* is_final = */ - } - - blockSplitterFinishBlockCommand(&cmd_blocks, true) /* is_final = */ - blockSplitterFinishBlockDistance(&dist_blocks, true) /* is_final = */ - - if num_contexts > 1 { - mapStaticContexts(num_contexts, static_context_map, mb) - } -} - -func buildMetaBlockGreedy(ringbuffer []byte, pos uint, mask uint, prev_byte byte, prev_byte2 byte, literal_context_lut contextLUT, num_contexts uint, static_context_map []uint32, commands []command, mb *metaBlockSplit) { - if num_contexts == 1 { - buildMetaBlockGreedyInternal(ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_lut, 1, nil, commands, mb) - } else { - buildMetaBlockGreedyInternal(ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_lut, num_contexts, static_context_map, commands, mb) - } -} - -func optimizeHistograms(num_distance_codes uint32, mb *metaBlockSplit) { - var good_for_rle [numCommandSymbols]byte - var i uint - for i = 0; i < mb.literal_histograms_size; i++ { - optimizeHuffmanCountsForRLE(256, mb.literal_histograms[i].data_[:], good_for_rle[:]) - } - - for i = 0; i < mb.command_histograms_size; i++ { - optimizeHuffmanCountsForRLE(numCommandSymbols, mb.command_histograms[i].data_[:], good_for_rle[:]) - } - - for i = 0; i < mb.distance_histograms_size; i++ { - optimizeHuffmanCountsForRLE(uint(num_distance_codes), mb.distance_histograms[i].data_[:], good_for_rle[:]) - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock_command.go b/vendor/github.com/andybalholm/brotli/metablock_command.go deleted file mode 100644 index 14c7b77135..0000000000 --- a/vendor/github.com/andybalholm/brotli/metablock_command.go +++ /dev/null @@ -1,165 +0,0 @@ -package brotli - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Greedy block splitter for one block category (literal, command or distance). - */ -type blockSplitterCommand struct { - alphabet_size_ uint - min_block_size_ uint - split_threshold_ float64 - num_blocks_ uint - split_ *blockSplit - histograms_ []histogramCommand - histograms_size_ *uint - target_block_size_ uint - block_size_ uint - curr_histogram_ix_ uint - last_histogram_ix_ [2]uint - last_entropy_ [2]float64 - merge_last_count_ uint -} - -func initBlockSplitterCommand(self *blockSplitterCommand, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramCommand, histograms_size *uint) { - var max_num_blocks uint = num_symbols/min_block_size + 1 - var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1) - /* We have to allocate one more histogram than the maximum number of block - types for the current histogram when the meta-block is too big. */ - self.alphabet_size_ = alphabet_size - - self.min_block_size_ = min_block_size - self.split_threshold_ = split_threshold - self.num_blocks_ = 0 - self.split_ = split - self.histograms_size_ = histograms_size - self.target_block_size_ = min_block_size - self.block_size_ = 0 - self.curr_histogram_ix_ = 0 - self.merge_last_count_ = 0 - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks) - self.split_.num_blocks = max_num_blocks - *histograms_size = max_num_types - if histograms == nil || cap(*histograms) < int(*histograms_size) { - *histograms = make([]histogramCommand, (*histograms_size)) - } else { - *histograms = (*histograms)[:*histograms_size] - } - self.histograms_ = *histograms - - /* Clear only current histogram. */ - histogramClearCommand(&self.histograms_[0]) - - self.last_histogram_ix_[1] = 0 - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] -} - -/* Does either of three things: - (1) emits the current block with a new block type; - (2) emits the current block with the type of the second last block; - (3) merges the current block with the last block. */ -func blockSplitterFinishBlockCommand(self *blockSplitterCommand, is_final bool) { - var split *blockSplit = self.split_ - var last_entropy []float64 = self.last_entropy_[:] - var histograms []histogramCommand = self.histograms_ - self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_) - if self.num_blocks_ == 0 { - /* Create first block. */ - split.lengths[0] = uint32(self.block_size_) - - split.types[0] = 0 - last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_) - last_entropy[1] = last_entropy[0] - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - } else if self.block_size_ > 0 { - var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_) - var combined_histo [2]histogramCommand - var combined_entropy [2]float64 - var diff [2]float64 - var j uint - for j = 0; j < 2; j++ { - var last_histogram_ix uint = self.last_histogram_ix_[j] - combined_histo[j] = histograms[self.curr_histogram_ix_] - histogramAddHistogramCommand(&combined_histo[j], &histograms[last_histogram_ix]) - combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_) - diff[j] = combined_entropy[j] - entropy - last_entropy[j] - } - - if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = uint(byte(split.num_types)) - last_entropy[1] = last_entropy[0] - last_entropy[0] = entropy - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - histograms[self.last_histogram_ix_[0]] = combined_histo[1] - last_entropy[1] = last_entropy[0] - last_entropy[0] = combined_entropy[1] - self.num_blocks_++ - self.block_size_ = 0 - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - histograms[self.last_histogram_ix_[0]] = combined_histo[0] - last_entropy[0] = combined_entropy[0] - if split.num_types == 1 { - last_entropy[1] = last_entropy[0] - } - - self.block_size_ = 0 - histogramClearCommand(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - } - - if is_final { - *self.histograms_size_ = split.num_types - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current histogram. When the current histogram - reaches the target size, decides on merging the block. */ -func blockSplitterAddSymbolCommand(self *blockSplitterCommand, symbol uint) { - histogramAddCommand(&self.histograms_[self.curr_histogram_ix_], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - blockSplitterFinishBlockCommand(self, false) /* is_final = */ - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock_distance.go b/vendor/github.com/andybalholm/brotli/metablock_distance.go deleted file mode 100644 index 5110a810e9..0000000000 --- a/vendor/github.com/andybalholm/brotli/metablock_distance.go +++ /dev/null @@ -1,165 +0,0 @@ -package brotli - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Greedy block splitter for one block category (literal, command or distance). - */ -type blockSplitterDistance struct { - alphabet_size_ uint - min_block_size_ uint - split_threshold_ float64 - num_blocks_ uint - split_ *blockSplit - histograms_ []histogramDistance - histograms_size_ *uint - target_block_size_ uint - block_size_ uint - curr_histogram_ix_ uint - last_histogram_ix_ [2]uint - last_entropy_ [2]float64 - merge_last_count_ uint -} - -func initBlockSplitterDistance(self *blockSplitterDistance, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramDistance, histograms_size *uint) { - var max_num_blocks uint = num_symbols/min_block_size + 1 - var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1) - /* We have to allocate one more histogram than the maximum number of block - types for the current histogram when the meta-block is too big. */ - self.alphabet_size_ = alphabet_size - - self.min_block_size_ = min_block_size - self.split_threshold_ = split_threshold - self.num_blocks_ = 0 - self.split_ = split - self.histograms_size_ = histograms_size - self.target_block_size_ = min_block_size - self.block_size_ = 0 - self.curr_histogram_ix_ = 0 - self.merge_last_count_ = 0 - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks) - self.split_.num_blocks = max_num_blocks - *histograms_size = max_num_types - if histograms == nil || cap(*histograms) < int(*histograms_size) { - *histograms = make([]histogramDistance, *histograms_size) - } else { - *histograms = (*histograms)[:*histograms_size] - } - self.histograms_ = *histograms - - /* Clear only current histogram. */ - histogramClearDistance(&self.histograms_[0]) - - self.last_histogram_ix_[1] = 0 - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] -} - -/* Does either of three things: - (1) emits the current block with a new block type; - (2) emits the current block with the type of the second last block; - (3) merges the current block with the last block. */ -func blockSplitterFinishBlockDistance(self *blockSplitterDistance, is_final bool) { - var split *blockSplit = self.split_ - var last_entropy []float64 = self.last_entropy_[:] - var histograms []histogramDistance = self.histograms_ - self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_) - if self.num_blocks_ == 0 { - /* Create first block. */ - split.lengths[0] = uint32(self.block_size_) - - split.types[0] = 0 - last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_) - last_entropy[1] = last_entropy[0] - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - } else if self.block_size_ > 0 { - var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_) - var combined_histo [2]histogramDistance - var combined_entropy [2]float64 - var diff [2]float64 - var j uint - for j = 0; j < 2; j++ { - var last_histogram_ix uint = self.last_histogram_ix_[j] - combined_histo[j] = histograms[self.curr_histogram_ix_] - histogramAddHistogramDistance(&combined_histo[j], &histograms[last_histogram_ix]) - combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_) - diff[j] = combined_entropy[j] - entropy - last_entropy[j] - } - - if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = uint(byte(split.num_types)) - last_entropy[1] = last_entropy[0] - last_entropy[0] = entropy - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - histograms[self.last_histogram_ix_[0]] = combined_histo[1] - last_entropy[1] = last_entropy[0] - last_entropy[0] = combined_entropy[1] - self.num_blocks_++ - self.block_size_ = 0 - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - histograms[self.last_histogram_ix_[0]] = combined_histo[0] - last_entropy[0] = combined_entropy[0] - if split.num_types == 1 { - last_entropy[1] = last_entropy[0] - } - - self.block_size_ = 0 - histogramClearDistance(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - } - - if is_final { - *self.histograms_size_ = split.num_types - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current histogram. When the current histogram - reaches the target size, decides on merging the block. */ -func blockSplitterAddSymbolDistance(self *blockSplitterDistance, symbol uint) { - histogramAddDistance(&self.histograms_[self.curr_histogram_ix_], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - blockSplitterFinishBlockDistance(self, false) /* is_final = */ - } -} diff --git a/vendor/github.com/andybalholm/brotli/metablock_literal.go b/vendor/github.com/andybalholm/brotli/metablock_literal.go deleted file mode 100644 index 307f8da88f..0000000000 --- a/vendor/github.com/andybalholm/brotli/metablock_literal.go +++ /dev/null @@ -1,165 +0,0 @@ -package brotli - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Greedy block splitter for one block category (literal, command or distance). - */ -type blockSplitterLiteral struct { - alphabet_size_ uint - min_block_size_ uint - split_threshold_ float64 - num_blocks_ uint - split_ *blockSplit - histograms_ []histogramLiteral - histograms_size_ *uint - target_block_size_ uint - block_size_ uint - curr_histogram_ix_ uint - last_histogram_ix_ [2]uint - last_entropy_ [2]float64 - merge_last_count_ uint -} - -func initBlockSplitterLiteral(self *blockSplitterLiteral, alphabet_size uint, min_block_size uint, split_threshold float64, num_symbols uint, split *blockSplit, histograms *[]histogramLiteral, histograms_size *uint) { - var max_num_blocks uint = num_symbols/min_block_size + 1 - var max_num_types uint = brotli_min_size_t(max_num_blocks, maxNumberOfBlockTypes+1) - /* We have to allocate one more histogram than the maximum number of block - types for the current histogram when the meta-block is too big. */ - self.alphabet_size_ = alphabet_size - - self.min_block_size_ = min_block_size - self.split_threshold_ = split_threshold - self.num_blocks_ = 0 - self.split_ = split - self.histograms_size_ = histograms_size - self.target_block_size_ = min_block_size - self.block_size_ = 0 - self.curr_histogram_ix_ = 0 - self.merge_last_count_ = 0 - brotli_ensure_capacity_uint8_t(&split.types, &split.types_alloc_size, max_num_blocks) - brotli_ensure_capacity_uint32_t(&split.lengths, &split.lengths_alloc_size, max_num_blocks) - self.split_.num_blocks = max_num_blocks - *histograms_size = max_num_types - if histograms == nil || cap(*histograms) < int(*histograms_size) { - *histograms = make([]histogramLiteral, *histograms_size) - } else { - *histograms = (*histograms)[:*histograms_size] - } - self.histograms_ = *histograms - - /* Clear only current histogram. */ - histogramClearLiteral(&self.histograms_[0]) - - self.last_histogram_ix_[1] = 0 - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] -} - -/* Does either of three things: - (1) emits the current block with a new block type; - (2) emits the current block with the type of the second last block; - (3) merges the current block with the last block. */ -func blockSplitterFinishBlockLiteral(self *blockSplitterLiteral, is_final bool) { - var split *blockSplit = self.split_ - var last_entropy []float64 = self.last_entropy_[:] - var histograms []histogramLiteral = self.histograms_ - self.block_size_ = brotli_max_size_t(self.block_size_, self.min_block_size_) - if self.num_blocks_ == 0 { - /* Create first block. */ - split.lengths[0] = uint32(self.block_size_) - - split.types[0] = 0 - last_entropy[0] = bitsEntropy(histograms[0].data_[:], self.alphabet_size_) - last_entropy[1] = last_entropy[0] - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - } else if self.block_size_ > 0 { - var entropy float64 = bitsEntropy(histograms[self.curr_histogram_ix_].data_[:], self.alphabet_size_) - var combined_histo [2]histogramLiteral - var combined_entropy [2]float64 - var diff [2]float64 - var j uint - for j = 0; j < 2; j++ { - var last_histogram_ix uint = self.last_histogram_ix_[j] - combined_histo[j] = histograms[self.curr_histogram_ix_] - histogramAddHistogramLiteral(&combined_histo[j], &histograms[last_histogram_ix]) - combined_entropy[j] = bitsEntropy(combined_histo[j].data_[0:], self.alphabet_size_) - diff[j] = combined_entropy[j] - entropy - last_entropy[j] - } - - if split.num_types < maxNumberOfBlockTypes && diff[0] > self.split_threshold_ && diff[1] > self.split_threshold_ { - /* Create new block. */ - split.lengths[self.num_blocks_] = uint32(self.block_size_) - - split.types[self.num_blocks_] = byte(split.num_types) - self.last_histogram_ix_[1] = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = uint(byte(split.num_types)) - last_entropy[1] = last_entropy[0] - last_entropy[0] = entropy - self.num_blocks_++ - split.num_types++ - self.curr_histogram_ix_++ - if self.curr_histogram_ix_ < *self.histograms_size_ { - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - } - self.block_size_ = 0 - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else if diff[1] < diff[0]-20.0 { - split.lengths[self.num_blocks_] = uint32(self.block_size_) - split.types[self.num_blocks_] = split.types[self.num_blocks_-2] - /* Combine this block with second last block. */ - - var tmp uint = self.last_histogram_ix_[0] - self.last_histogram_ix_[0] = self.last_histogram_ix_[1] - self.last_histogram_ix_[1] = tmp - histograms[self.last_histogram_ix_[0]] = combined_histo[1] - last_entropy[1] = last_entropy[0] - last_entropy[0] = combined_entropy[1] - self.num_blocks_++ - self.block_size_ = 0 - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_ = 0 - self.target_block_size_ = self.min_block_size_ - } else { - /* Combine this block with last block. */ - split.lengths[self.num_blocks_-1] += uint32(self.block_size_) - - histograms[self.last_histogram_ix_[0]] = combined_histo[0] - last_entropy[0] = combined_entropy[0] - if split.num_types == 1 { - last_entropy[1] = last_entropy[0] - } - - self.block_size_ = 0 - histogramClearLiteral(&histograms[self.curr_histogram_ix_]) - self.merge_last_count_++ - if self.merge_last_count_ > 1 { - self.target_block_size_ += self.min_block_size_ - } - } - } - - if is_final { - *self.histograms_size_ = split.num_types - split.num_blocks = self.num_blocks_ - } -} - -/* Adds the next symbol to the current histogram. When the current histogram - reaches the target size, decides on merging the block. */ -func blockSplitterAddSymbolLiteral(self *blockSplitterLiteral, symbol uint) { - histogramAddLiteral(&self.histograms_[self.curr_histogram_ix_], symbol) - self.block_size_++ - if self.block_size_ == self.target_block_size_ { - blockSplitterFinishBlockLiteral(self, false) /* is_final = */ - } -} diff --git a/vendor/github.com/andybalholm/brotli/params.go b/vendor/github.com/andybalholm/brotli/params.go deleted file mode 100644 index 0a4c687521..0000000000 --- a/vendor/github.com/andybalholm/brotli/params.go +++ /dev/null @@ -1,37 +0,0 @@ -package brotli - -/* Copyright 2017 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Parameters for the Brotli encoder with chosen quality levels. */ -type hasherParams struct { - type_ int - bucket_bits int - block_bits int - hash_len int - num_last_distances_to_check int -} - -type distanceParams struct { - distance_postfix_bits uint32 - num_direct_distance_codes uint32 - alphabet_size uint32 - max_distance uint -} - -/* Encoding parameters */ -type encoderParams struct { - mode int - quality int - lgwin uint - lgblock int - size_hint uint - disable_literal_context_modeling bool - large_window bool - hasher hasherParams - dist distanceParams - dictionary encoderDictionary -} diff --git a/vendor/github.com/andybalholm/brotli/platform.go b/vendor/github.com/andybalholm/brotli/platform.go deleted file mode 100644 index 4ebfb1528b..0000000000 --- a/vendor/github.com/andybalholm/brotli/platform.go +++ /dev/null @@ -1,103 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -func brotli_min_double(a float64, b float64) float64 { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_double(a float64, b float64) float64 { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_float(a float32, b float32) float32 { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_float(a float32, b float32) float32 { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_int(a int, b int) int { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_int(a int, b int) int { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_size_t(a uint, b uint) uint { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_size_t(a uint, b uint) uint { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_uint32_t(a uint32, b uint32) uint32 { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_uint32_t(a uint32, b uint32) uint32 { - if a > b { - return a - } else { - return b - } -} - -func brotli_min_uint8_t(a byte, b byte) byte { - if a < b { - return a - } else { - return b - } -} - -func brotli_max_uint8_t(a byte, b byte) byte { - if a > b { - return a - } else { - return b - } -} diff --git a/vendor/github.com/andybalholm/brotli/prefix.go b/vendor/github.com/andybalholm/brotli/prefix.go deleted file mode 100644 index 484df0d61e..0000000000 --- a/vendor/github.com/andybalholm/brotli/prefix.go +++ /dev/null @@ -1,30 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Functions for encoding of integers into prefix codes the amount of extra - bits, and the actual values of the extra bits. */ - -/* Here distance_code is an intermediate code, i.e. one of the special codes or - the actual distance increased by BROTLI_NUM_DISTANCE_SHORT_CODES - 1. */ -func prefixEncodeCopyDistance(distance_code uint, num_direct_codes uint, postfix_bits uint, code *uint16, extra_bits *uint32) { - if distance_code < numDistanceShortCodes+num_direct_codes { - *code = uint16(distance_code) - *extra_bits = 0 - return - } else { - var dist uint = (uint(1) << (postfix_bits + 2)) + (distance_code - numDistanceShortCodes - num_direct_codes) - var bucket uint = uint(log2FloorNonZero(dist) - 1) - var postfix_mask uint = (1 << postfix_bits) - 1 - var postfix uint = dist & postfix_mask - var prefix uint = (dist >> bucket) & 1 - var offset uint = (2 + prefix) << bucket - var nbits uint = bucket - postfix_bits - *code = uint16(nbits<<10 | (numDistanceShortCodes + num_direct_codes + ((2*(nbits-1) + prefix) << postfix_bits) + postfix)) - *extra_bits = uint32((dist - offset) >> postfix_bits) - } -} diff --git a/vendor/github.com/andybalholm/brotli/prefix_dec.go b/vendor/github.com/andybalholm/brotli/prefix_dec.go deleted file mode 100644 index 183f0d53fe..0000000000 --- a/vendor/github.com/andybalholm/brotli/prefix_dec.go +++ /dev/null @@ -1,723 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -type cmdLutElement struct { - insert_len_extra_bits byte - copy_len_extra_bits byte - distance_code int8 - context byte - insert_len_offset uint16 - copy_len_offset uint16 -} - -var kCmdLut = [numCommandSymbols]cmdLutElement{ - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0000, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0000, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0000, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0000, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0001, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0001, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0001, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0001, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0002, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0002, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0002, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0002, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0003, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0003, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0003, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0003, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0004, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0004, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0004, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0004, 0x0009}, - cmdLutElement{0x00, 0x00, 0, 0x00, 0x0005, 0x0002}, - cmdLutElement{0x00, 0x00, 0, 0x01, 0x0005, 0x0003}, - cmdLutElement{0x00, 0x00, 0, 0x02, 0x0005, 0x0004}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0005}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0006}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0007}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0008}, - cmdLutElement{0x00, 0x00, 0, 0x03, 0x0005, 0x0009}, - cmdLutElement{0x01, 0x00, 0, 0x00, 0x0006, 0x0002}, - cmdLutElement{0x01, 0x00, 0, 0x01, 0x0006, 0x0003}, - cmdLutElement{0x01, 0x00, 0, 0x02, 0x0006, 0x0004}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0005}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0006}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0007}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0008}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0006, 0x0009}, - cmdLutElement{0x01, 0x00, 0, 0x00, 0x0008, 0x0002}, - cmdLutElement{0x01, 0x00, 0, 0x01, 0x0008, 0x0003}, - cmdLutElement{0x01, 0x00, 0, 0x02, 0x0008, 0x0004}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0005}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0006}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0007}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0008}, - cmdLutElement{0x01, 0x00, 0, 0x03, 0x0008, 0x0009}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0000, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0000, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0000, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0000, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0000, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0000, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0000, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0000, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0001, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0001, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0001, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0001, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0001, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0001, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0001, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0001, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0002, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0002, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0002, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0002, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0002, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0002, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0002, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0002, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0003, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0003, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0003, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0003, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0003, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0003, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0003, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0003, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0004, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0004, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0004, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0004, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0004, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0004, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0004, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0004, 0x0036}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0005, 0x000a}, - cmdLutElement{0x00, 0x01, 0, 0x03, 0x0005, 0x000c}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0005, 0x000e}, - cmdLutElement{0x00, 0x02, 0, 0x03, 0x0005, 0x0012}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0005, 0x0016}, - cmdLutElement{0x00, 0x03, 0, 0x03, 0x0005, 0x001e}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0005, 0x0026}, - cmdLutElement{0x00, 0x04, 0, 0x03, 0x0005, 0x0036}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0006, 0x000a}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0006, 0x000c}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0006, 0x000e}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0006, 0x0012}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0006, 0x0016}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0006, 0x001e}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0006, 0x0026}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0006, 0x0036}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0008, 0x000a}, - cmdLutElement{0x01, 0x01, 0, 0x03, 0x0008, 0x000c}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0008, 0x000e}, - cmdLutElement{0x01, 0x02, 0, 0x03, 0x0008, 0x0012}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0008, 0x0016}, - cmdLutElement{0x01, 0x03, 0, 0x03, 0x0008, 0x001e}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0008, 0x0026}, - cmdLutElement{0x01, 0x04, 0, 0x03, 0x0008, 0x0036}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0000, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0000, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0000, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0000, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0001, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0001, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0001, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0001, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0002, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0002, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0002, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0002, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0003, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0003, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0003, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0003, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0004, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0004, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0004, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0004, 0x0009}, - cmdLutElement{0x00, 0x00, -1, 0x00, 0x0005, 0x0002}, - cmdLutElement{0x00, 0x00, -1, 0x01, 0x0005, 0x0003}, - cmdLutElement{0x00, 0x00, -1, 0x02, 0x0005, 0x0004}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0005}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0006}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0007}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0008}, - cmdLutElement{0x00, 0x00, -1, 0x03, 0x0005, 0x0009}, - cmdLutElement{0x01, 0x00, -1, 0x00, 0x0006, 0x0002}, - cmdLutElement{0x01, 0x00, -1, 0x01, 0x0006, 0x0003}, - cmdLutElement{0x01, 0x00, -1, 0x02, 0x0006, 0x0004}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0005}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0006}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0007}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0008}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0006, 0x0009}, - cmdLutElement{0x01, 0x00, -1, 0x00, 0x0008, 0x0002}, - cmdLutElement{0x01, 0x00, -1, 0x01, 0x0008, 0x0003}, - cmdLutElement{0x01, 0x00, -1, 0x02, 0x0008, 0x0004}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0005}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0006}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0007}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0008}, - cmdLutElement{0x01, 0x00, -1, 0x03, 0x0008, 0x0009}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0000, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0000, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0000, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0000, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0000, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0000, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0000, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0000, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0001, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0001, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0001, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0001, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0001, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0001, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0001, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0001, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0002, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0002, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0002, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0002, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0002, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0002, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0002, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0002, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0003, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0003, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0003, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0003, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0003, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0003, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0003, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0003, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0004, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0004, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0004, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0004, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0004, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0004, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0004, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0004, 0x0036}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0005, 0x000a}, - cmdLutElement{0x00, 0x01, -1, 0x03, 0x0005, 0x000c}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0005, 0x000e}, - cmdLutElement{0x00, 0x02, -1, 0x03, 0x0005, 0x0012}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0005, 0x0016}, - cmdLutElement{0x00, 0x03, -1, 0x03, 0x0005, 0x001e}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0005, 0x0026}, - cmdLutElement{0x00, 0x04, -1, 0x03, 0x0005, 0x0036}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0006, 0x000a}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0006, 0x000c}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0006, 0x000e}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0006, 0x0012}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0006, 0x0016}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0006, 0x001e}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0006, 0x0026}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0006, 0x0036}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0008, 0x000a}, - cmdLutElement{0x01, 0x01, -1, 0x03, 0x0008, 0x000c}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0008, 0x000e}, - cmdLutElement{0x01, 0x02, -1, 0x03, 0x0008, 0x0012}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0008, 0x0016}, - cmdLutElement{0x01, 0x03, -1, 0x03, 0x0008, 0x001e}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0008, 0x0026}, - cmdLutElement{0x01, 0x04, -1, 0x03, 0x0008, 0x0036}, - cmdLutElement{0x02, 0x00, -1, 0x00, 0x000a, 0x0002}, - cmdLutElement{0x02, 0x00, -1, 0x01, 0x000a, 0x0003}, - cmdLutElement{0x02, 0x00, -1, 0x02, 0x000a, 0x0004}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0005}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0006}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0007}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0008}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000a, 0x0009}, - cmdLutElement{0x02, 0x00, -1, 0x00, 0x000e, 0x0002}, - cmdLutElement{0x02, 0x00, -1, 0x01, 0x000e, 0x0003}, - cmdLutElement{0x02, 0x00, -1, 0x02, 0x000e, 0x0004}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0005}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0006}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0007}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0008}, - cmdLutElement{0x02, 0x00, -1, 0x03, 0x000e, 0x0009}, - cmdLutElement{0x03, 0x00, -1, 0x00, 0x0012, 0x0002}, - cmdLutElement{0x03, 0x00, -1, 0x01, 0x0012, 0x0003}, - cmdLutElement{0x03, 0x00, -1, 0x02, 0x0012, 0x0004}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0005}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0006}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0007}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0008}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x0012, 0x0009}, - cmdLutElement{0x03, 0x00, -1, 0x00, 0x001a, 0x0002}, - cmdLutElement{0x03, 0x00, -1, 0x01, 0x001a, 0x0003}, - cmdLutElement{0x03, 0x00, -1, 0x02, 0x001a, 0x0004}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0005}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0006}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0007}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0008}, - cmdLutElement{0x03, 0x00, -1, 0x03, 0x001a, 0x0009}, - cmdLutElement{0x04, 0x00, -1, 0x00, 0x0022, 0x0002}, - cmdLutElement{0x04, 0x00, -1, 0x01, 0x0022, 0x0003}, - cmdLutElement{0x04, 0x00, -1, 0x02, 0x0022, 0x0004}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0005}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0006}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0007}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0008}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0022, 0x0009}, - cmdLutElement{0x04, 0x00, -1, 0x00, 0x0032, 0x0002}, - cmdLutElement{0x04, 0x00, -1, 0x01, 0x0032, 0x0003}, - cmdLutElement{0x04, 0x00, -1, 0x02, 0x0032, 0x0004}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0005}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0006}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0007}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0008}, - cmdLutElement{0x04, 0x00, -1, 0x03, 0x0032, 0x0009}, - cmdLutElement{0x05, 0x00, -1, 0x00, 0x0042, 0x0002}, - cmdLutElement{0x05, 0x00, -1, 0x01, 0x0042, 0x0003}, - cmdLutElement{0x05, 0x00, -1, 0x02, 0x0042, 0x0004}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0005}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0006}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0007}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0008}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0042, 0x0009}, - cmdLutElement{0x05, 0x00, -1, 0x00, 0x0062, 0x0002}, - cmdLutElement{0x05, 0x00, -1, 0x01, 0x0062, 0x0003}, - cmdLutElement{0x05, 0x00, -1, 0x02, 0x0062, 0x0004}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0005}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0006}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0007}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0008}, - cmdLutElement{0x05, 0x00, -1, 0x03, 0x0062, 0x0009}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000a, 0x000a}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000a, 0x000c}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000a, 0x000e}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000a, 0x0012}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000a, 0x0016}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000a, 0x001e}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000a, 0x0026}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000a, 0x0036}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000e, 0x000a}, - cmdLutElement{0x02, 0x01, -1, 0x03, 0x000e, 0x000c}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000e, 0x000e}, - cmdLutElement{0x02, 0x02, -1, 0x03, 0x000e, 0x0012}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000e, 0x0016}, - cmdLutElement{0x02, 0x03, -1, 0x03, 0x000e, 0x001e}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000e, 0x0026}, - cmdLutElement{0x02, 0x04, -1, 0x03, 0x000e, 0x0036}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x0012, 0x000a}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x0012, 0x000c}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x0012, 0x000e}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x0012, 0x0012}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x0012, 0x0016}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x0012, 0x001e}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x0012, 0x0026}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x0012, 0x0036}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x001a, 0x000a}, - cmdLutElement{0x03, 0x01, -1, 0x03, 0x001a, 0x000c}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x001a, 0x000e}, - cmdLutElement{0x03, 0x02, -1, 0x03, 0x001a, 0x0012}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x001a, 0x0016}, - cmdLutElement{0x03, 0x03, -1, 0x03, 0x001a, 0x001e}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x001a, 0x0026}, - cmdLutElement{0x03, 0x04, -1, 0x03, 0x001a, 0x0036}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0022, 0x000a}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0022, 0x000c}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0022, 0x000e}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0022, 0x0012}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0022, 0x0016}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0022, 0x001e}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0022, 0x0026}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0022, 0x0036}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0032, 0x000a}, - cmdLutElement{0x04, 0x01, -1, 0x03, 0x0032, 0x000c}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0032, 0x000e}, - cmdLutElement{0x04, 0x02, -1, 0x03, 0x0032, 0x0012}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0032, 0x0016}, - cmdLutElement{0x04, 0x03, -1, 0x03, 0x0032, 0x001e}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0032, 0x0026}, - cmdLutElement{0x04, 0x04, -1, 0x03, 0x0032, 0x0036}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0042, 0x000a}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0042, 0x000c}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0042, 0x000e}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0042, 0x0012}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0042, 0x0016}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0042, 0x001e}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0042, 0x0026}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0042, 0x0036}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0062, 0x000a}, - cmdLutElement{0x05, 0x01, -1, 0x03, 0x0062, 0x000c}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0062, 0x000e}, - cmdLutElement{0x05, 0x02, -1, 0x03, 0x0062, 0x0012}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0062, 0x0016}, - cmdLutElement{0x05, 0x03, -1, 0x03, 0x0062, 0x001e}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0062, 0x0026}, - cmdLutElement{0x05, 0x04, -1, 0x03, 0x0062, 0x0036}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0000, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0000, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0000, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0000, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0000, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0000, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0000, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0000, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0001, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0001, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0001, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0001, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0001, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0001, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0001, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0001, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0002, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0002, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0002, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0002, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0002, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0002, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0002, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0002, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0003, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0003, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0003, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0003, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0003, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0003, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0003, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0003, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0004, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0004, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0004, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0004, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0004, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0004, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0004, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0004, 0x0846}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0005, 0x0046}, - cmdLutElement{0x00, 0x05, -1, 0x03, 0x0005, 0x0066}, - cmdLutElement{0x00, 0x06, -1, 0x03, 0x0005, 0x0086}, - cmdLutElement{0x00, 0x07, -1, 0x03, 0x0005, 0x00c6}, - cmdLutElement{0x00, 0x08, -1, 0x03, 0x0005, 0x0146}, - cmdLutElement{0x00, 0x09, -1, 0x03, 0x0005, 0x0246}, - cmdLutElement{0x00, 0x0a, -1, 0x03, 0x0005, 0x0446}, - cmdLutElement{0x00, 0x18, -1, 0x03, 0x0005, 0x0846}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0006, 0x0046}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0006, 0x0066}, - cmdLutElement{0x01, 0x06, -1, 0x03, 0x0006, 0x0086}, - cmdLutElement{0x01, 0x07, -1, 0x03, 0x0006, 0x00c6}, - cmdLutElement{0x01, 0x08, -1, 0x03, 0x0006, 0x0146}, - cmdLutElement{0x01, 0x09, -1, 0x03, 0x0006, 0x0246}, - cmdLutElement{0x01, 0x0a, -1, 0x03, 0x0006, 0x0446}, - cmdLutElement{0x01, 0x18, -1, 0x03, 0x0006, 0x0846}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0008, 0x0046}, - cmdLutElement{0x01, 0x05, -1, 0x03, 0x0008, 0x0066}, - cmdLutElement{0x01, 0x06, -1, 0x03, 0x0008, 0x0086}, - cmdLutElement{0x01, 0x07, -1, 0x03, 0x0008, 0x00c6}, - cmdLutElement{0x01, 0x08, -1, 0x03, 0x0008, 0x0146}, - cmdLutElement{0x01, 0x09, -1, 0x03, 0x0008, 0x0246}, - cmdLutElement{0x01, 0x0a, -1, 0x03, 0x0008, 0x0446}, - cmdLutElement{0x01, 0x18, -1, 0x03, 0x0008, 0x0846}, - cmdLutElement{0x06, 0x00, -1, 0x00, 0x0082, 0x0002}, - cmdLutElement{0x06, 0x00, -1, 0x01, 0x0082, 0x0003}, - cmdLutElement{0x06, 0x00, -1, 0x02, 0x0082, 0x0004}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0005}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0006}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0007}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0008}, - cmdLutElement{0x06, 0x00, -1, 0x03, 0x0082, 0x0009}, - cmdLutElement{0x07, 0x00, -1, 0x00, 0x00c2, 0x0002}, - cmdLutElement{0x07, 0x00, -1, 0x01, 0x00c2, 0x0003}, - cmdLutElement{0x07, 0x00, -1, 0x02, 0x00c2, 0x0004}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0005}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0006}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0007}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0008}, - cmdLutElement{0x07, 0x00, -1, 0x03, 0x00c2, 0x0009}, - cmdLutElement{0x08, 0x00, -1, 0x00, 0x0142, 0x0002}, - cmdLutElement{0x08, 0x00, -1, 0x01, 0x0142, 0x0003}, - cmdLutElement{0x08, 0x00, -1, 0x02, 0x0142, 0x0004}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0005}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0006}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0007}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0008}, - cmdLutElement{0x08, 0x00, -1, 0x03, 0x0142, 0x0009}, - cmdLutElement{0x09, 0x00, -1, 0x00, 0x0242, 0x0002}, - cmdLutElement{0x09, 0x00, -1, 0x01, 0x0242, 0x0003}, - cmdLutElement{0x09, 0x00, -1, 0x02, 0x0242, 0x0004}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0005}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0006}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0007}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0008}, - cmdLutElement{0x09, 0x00, -1, 0x03, 0x0242, 0x0009}, - cmdLutElement{0x0a, 0x00, -1, 0x00, 0x0442, 0x0002}, - cmdLutElement{0x0a, 0x00, -1, 0x01, 0x0442, 0x0003}, - cmdLutElement{0x0a, 0x00, -1, 0x02, 0x0442, 0x0004}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0005}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0006}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0007}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0008}, - cmdLutElement{0x0a, 0x00, -1, 0x03, 0x0442, 0x0009}, - cmdLutElement{0x0c, 0x00, -1, 0x00, 0x0842, 0x0002}, - cmdLutElement{0x0c, 0x00, -1, 0x01, 0x0842, 0x0003}, - cmdLutElement{0x0c, 0x00, -1, 0x02, 0x0842, 0x0004}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0005}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0006}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0007}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0008}, - cmdLutElement{0x0c, 0x00, -1, 0x03, 0x0842, 0x0009}, - cmdLutElement{0x0e, 0x00, -1, 0x00, 0x1842, 0x0002}, - cmdLutElement{0x0e, 0x00, -1, 0x01, 0x1842, 0x0003}, - cmdLutElement{0x0e, 0x00, -1, 0x02, 0x1842, 0x0004}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0005}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0006}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0007}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0008}, - cmdLutElement{0x0e, 0x00, -1, 0x03, 0x1842, 0x0009}, - cmdLutElement{0x18, 0x00, -1, 0x00, 0x5842, 0x0002}, - cmdLutElement{0x18, 0x00, -1, 0x01, 0x5842, 0x0003}, - cmdLutElement{0x18, 0x00, -1, 0x02, 0x5842, 0x0004}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0005}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0006}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0007}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0008}, - cmdLutElement{0x18, 0x00, -1, 0x03, 0x5842, 0x0009}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000a, 0x0046}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000a, 0x0066}, - cmdLutElement{0x02, 0x06, -1, 0x03, 0x000a, 0x0086}, - cmdLutElement{0x02, 0x07, -1, 0x03, 0x000a, 0x00c6}, - cmdLutElement{0x02, 0x08, -1, 0x03, 0x000a, 0x0146}, - cmdLutElement{0x02, 0x09, -1, 0x03, 0x000a, 0x0246}, - cmdLutElement{0x02, 0x0a, -1, 0x03, 0x000a, 0x0446}, - cmdLutElement{0x02, 0x18, -1, 0x03, 0x000a, 0x0846}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000e, 0x0046}, - cmdLutElement{0x02, 0x05, -1, 0x03, 0x000e, 0x0066}, - cmdLutElement{0x02, 0x06, -1, 0x03, 0x000e, 0x0086}, - cmdLutElement{0x02, 0x07, -1, 0x03, 0x000e, 0x00c6}, - cmdLutElement{0x02, 0x08, -1, 0x03, 0x000e, 0x0146}, - cmdLutElement{0x02, 0x09, -1, 0x03, 0x000e, 0x0246}, - cmdLutElement{0x02, 0x0a, -1, 0x03, 0x000e, 0x0446}, - cmdLutElement{0x02, 0x18, -1, 0x03, 0x000e, 0x0846}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x0012, 0x0046}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x0012, 0x0066}, - cmdLutElement{0x03, 0x06, -1, 0x03, 0x0012, 0x0086}, - cmdLutElement{0x03, 0x07, -1, 0x03, 0x0012, 0x00c6}, - cmdLutElement{0x03, 0x08, -1, 0x03, 0x0012, 0x0146}, - cmdLutElement{0x03, 0x09, -1, 0x03, 0x0012, 0x0246}, - cmdLutElement{0x03, 0x0a, -1, 0x03, 0x0012, 0x0446}, - cmdLutElement{0x03, 0x18, -1, 0x03, 0x0012, 0x0846}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x001a, 0x0046}, - cmdLutElement{0x03, 0x05, -1, 0x03, 0x001a, 0x0066}, - cmdLutElement{0x03, 0x06, -1, 0x03, 0x001a, 0x0086}, - cmdLutElement{0x03, 0x07, -1, 0x03, 0x001a, 0x00c6}, - cmdLutElement{0x03, 0x08, -1, 0x03, 0x001a, 0x0146}, - cmdLutElement{0x03, 0x09, -1, 0x03, 0x001a, 0x0246}, - cmdLutElement{0x03, 0x0a, -1, 0x03, 0x001a, 0x0446}, - cmdLutElement{0x03, 0x18, -1, 0x03, 0x001a, 0x0846}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0022, 0x0046}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0022, 0x0066}, - cmdLutElement{0x04, 0x06, -1, 0x03, 0x0022, 0x0086}, - cmdLutElement{0x04, 0x07, -1, 0x03, 0x0022, 0x00c6}, - cmdLutElement{0x04, 0x08, -1, 0x03, 0x0022, 0x0146}, - cmdLutElement{0x04, 0x09, -1, 0x03, 0x0022, 0x0246}, - cmdLutElement{0x04, 0x0a, -1, 0x03, 0x0022, 0x0446}, - cmdLutElement{0x04, 0x18, -1, 0x03, 0x0022, 0x0846}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0032, 0x0046}, - cmdLutElement{0x04, 0x05, -1, 0x03, 0x0032, 0x0066}, - cmdLutElement{0x04, 0x06, -1, 0x03, 0x0032, 0x0086}, - cmdLutElement{0x04, 0x07, -1, 0x03, 0x0032, 0x00c6}, - cmdLutElement{0x04, 0x08, -1, 0x03, 0x0032, 0x0146}, - cmdLutElement{0x04, 0x09, -1, 0x03, 0x0032, 0x0246}, - cmdLutElement{0x04, 0x0a, -1, 0x03, 0x0032, 0x0446}, - cmdLutElement{0x04, 0x18, -1, 0x03, 0x0032, 0x0846}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0042, 0x0046}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0042, 0x0066}, - cmdLutElement{0x05, 0x06, -1, 0x03, 0x0042, 0x0086}, - cmdLutElement{0x05, 0x07, -1, 0x03, 0x0042, 0x00c6}, - cmdLutElement{0x05, 0x08, -1, 0x03, 0x0042, 0x0146}, - cmdLutElement{0x05, 0x09, -1, 0x03, 0x0042, 0x0246}, - cmdLutElement{0x05, 0x0a, -1, 0x03, 0x0042, 0x0446}, - cmdLutElement{0x05, 0x18, -1, 0x03, 0x0042, 0x0846}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0062, 0x0046}, - cmdLutElement{0x05, 0x05, -1, 0x03, 0x0062, 0x0066}, - cmdLutElement{0x05, 0x06, -1, 0x03, 0x0062, 0x0086}, - cmdLutElement{0x05, 0x07, -1, 0x03, 0x0062, 0x00c6}, - cmdLutElement{0x05, 0x08, -1, 0x03, 0x0062, 0x0146}, - cmdLutElement{0x05, 0x09, -1, 0x03, 0x0062, 0x0246}, - cmdLutElement{0x05, 0x0a, -1, 0x03, 0x0062, 0x0446}, - cmdLutElement{0x05, 0x18, -1, 0x03, 0x0062, 0x0846}, - cmdLutElement{0x06, 0x01, -1, 0x03, 0x0082, 0x000a}, - cmdLutElement{0x06, 0x01, -1, 0x03, 0x0082, 0x000c}, - cmdLutElement{0x06, 0x02, -1, 0x03, 0x0082, 0x000e}, - cmdLutElement{0x06, 0x02, -1, 0x03, 0x0082, 0x0012}, - cmdLutElement{0x06, 0x03, -1, 0x03, 0x0082, 0x0016}, - cmdLutElement{0x06, 0x03, -1, 0x03, 0x0082, 0x001e}, - cmdLutElement{0x06, 0x04, -1, 0x03, 0x0082, 0x0026}, - cmdLutElement{0x06, 0x04, -1, 0x03, 0x0082, 0x0036}, - cmdLutElement{0x07, 0x01, -1, 0x03, 0x00c2, 0x000a}, - cmdLutElement{0x07, 0x01, -1, 0x03, 0x00c2, 0x000c}, - cmdLutElement{0x07, 0x02, -1, 0x03, 0x00c2, 0x000e}, - cmdLutElement{0x07, 0x02, -1, 0x03, 0x00c2, 0x0012}, - cmdLutElement{0x07, 0x03, -1, 0x03, 0x00c2, 0x0016}, - cmdLutElement{0x07, 0x03, -1, 0x03, 0x00c2, 0x001e}, - cmdLutElement{0x07, 0x04, -1, 0x03, 0x00c2, 0x0026}, - cmdLutElement{0x07, 0x04, -1, 0x03, 0x00c2, 0x0036}, - cmdLutElement{0x08, 0x01, -1, 0x03, 0x0142, 0x000a}, - cmdLutElement{0x08, 0x01, -1, 0x03, 0x0142, 0x000c}, - cmdLutElement{0x08, 0x02, -1, 0x03, 0x0142, 0x000e}, - cmdLutElement{0x08, 0x02, -1, 0x03, 0x0142, 0x0012}, - cmdLutElement{0x08, 0x03, -1, 0x03, 0x0142, 0x0016}, - cmdLutElement{0x08, 0x03, -1, 0x03, 0x0142, 0x001e}, - cmdLutElement{0x08, 0x04, -1, 0x03, 0x0142, 0x0026}, - cmdLutElement{0x08, 0x04, -1, 0x03, 0x0142, 0x0036}, - cmdLutElement{0x09, 0x01, -1, 0x03, 0x0242, 0x000a}, - cmdLutElement{0x09, 0x01, -1, 0x03, 0x0242, 0x000c}, - cmdLutElement{0x09, 0x02, -1, 0x03, 0x0242, 0x000e}, - cmdLutElement{0x09, 0x02, -1, 0x03, 0x0242, 0x0012}, - cmdLutElement{0x09, 0x03, -1, 0x03, 0x0242, 0x0016}, - cmdLutElement{0x09, 0x03, -1, 0x03, 0x0242, 0x001e}, - cmdLutElement{0x09, 0x04, -1, 0x03, 0x0242, 0x0026}, - cmdLutElement{0x09, 0x04, -1, 0x03, 0x0242, 0x0036}, - cmdLutElement{0x0a, 0x01, -1, 0x03, 0x0442, 0x000a}, - cmdLutElement{0x0a, 0x01, -1, 0x03, 0x0442, 0x000c}, - cmdLutElement{0x0a, 0x02, -1, 0x03, 0x0442, 0x000e}, - cmdLutElement{0x0a, 0x02, -1, 0x03, 0x0442, 0x0012}, - cmdLutElement{0x0a, 0x03, -1, 0x03, 0x0442, 0x0016}, - cmdLutElement{0x0a, 0x03, -1, 0x03, 0x0442, 0x001e}, - cmdLutElement{0x0a, 0x04, -1, 0x03, 0x0442, 0x0026}, - cmdLutElement{0x0a, 0x04, -1, 0x03, 0x0442, 0x0036}, - cmdLutElement{0x0c, 0x01, -1, 0x03, 0x0842, 0x000a}, - cmdLutElement{0x0c, 0x01, -1, 0x03, 0x0842, 0x000c}, - cmdLutElement{0x0c, 0x02, -1, 0x03, 0x0842, 0x000e}, - cmdLutElement{0x0c, 0x02, -1, 0x03, 0x0842, 0x0012}, - cmdLutElement{0x0c, 0x03, -1, 0x03, 0x0842, 0x0016}, - cmdLutElement{0x0c, 0x03, -1, 0x03, 0x0842, 0x001e}, - cmdLutElement{0x0c, 0x04, -1, 0x03, 0x0842, 0x0026}, - cmdLutElement{0x0c, 0x04, -1, 0x03, 0x0842, 0x0036}, - cmdLutElement{0x0e, 0x01, -1, 0x03, 0x1842, 0x000a}, - cmdLutElement{0x0e, 0x01, -1, 0x03, 0x1842, 0x000c}, - cmdLutElement{0x0e, 0x02, -1, 0x03, 0x1842, 0x000e}, - cmdLutElement{0x0e, 0x02, -1, 0x03, 0x1842, 0x0012}, - cmdLutElement{0x0e, 0x03, -1, 0x03, 0x1842, 0x0016}, - cmdLutElement{0x0e, 0x03, -1, 0x03, 0x1842, 0x001e}, - cmdLutElement{0x0e, 0x04, -1, 0x03, 0x1842, 0x0026}, - cmdLutElement{0x0e, 0x04, -1, 0x03, 0x1842, 0x0036}, - cmdLutElement{0x18, 0x01, -1, 0x03, 0x5842, 0x000a}, - cmdLutElement{0x18, 0x01, -1, 0x03, 0x5842, 0x000c}, - cmdLutElement{0x18, 0x02, -1, 0x03, 0x5842, 0x000e}, - cmdLutElement{0x18, 0x02, -1, 0x03, 0x5842, 0x0012}, - cmdLutElement{0x18, 0x03, -1, 0x03, 0x5842, 0x0016}, - cmdLutElement{0x18, 0x03, -1, 0x03, 0x5842, 0x001e}, - cmdLutElement{0x18, 0x04, -1, 0x03, 0x5842, 0x0026}, - cmdLutElement{0x18, 0x04, -1, 0x03, 0x5842, 0x0036}, - cmdLutElement{0x06, 0x05, -1, 0x03, 0x0082, 0x0046}, - cmdLutElement{0x06, 0x05, -1, 0x03, 0x0082, 0x0066}, - cmdLutElement{0x06, 0x06, -1, 0x03, 0x0082, 0x0086}, - cmdLutElement{0x06, 0x07, -1, 0x03, 0x0082, 0x00c6}, - cmdLutElement{0x06, 0x08, -1, 0x03, 0x0082, 0x0146}, - cmdLutElement{0x06, 0x09, -1, 0x03, 0x0082, 0x0246}, - cmdLutElement{0x06, 0x0a, -1, 0x03, 0x0082, 0x0446}, - cmdLutElement{0x06, 0x18, -1, 0x03, 0x0082, 0x0846}, - cmdLutElement{0x07, 0x05, -1, 0x03, 0x00c2, 0x0046}, - cmdLutElement{0x07, 0x05, -1, 0x03, 0x00c2, 0x0066}, - cmdLutElement{0x07, 0x06, -1, 0x03, 0x00c2, 0x0086}, - cmdLutElement{0x07, 0x07, -1, 0x03, 0x00c2, 0x00c6}, - cmdLutElement{0x07, 0x08, -1, 0x03, 0x00c2, 0x0146}, - cmdLutElement{0x07, 0x09, -1, 0x03, 0x00c2, 0x0246}, - cmdLutElement{0x07, 0x0a, -1, 0x03, 0x00c2, 0x0446}, - cmdLutElement{0x07, 0x18, -1, 0x03, 0x00c2, 0x0846}, - cmdLutElement{0x08, 0x05, -1, 0x03, 0x0142, 0x0046}, - cmdLutElement{0x08, 0x05, -1, 0x03, 0x0142, 0x0066}, - cmdLutElement{0x08, 0x06, -1, 0x03, 0x0142, 0x0086}, - cmdLutElement{0x08, 0x07, -1, 0x03, 0x0142, 0x00c6}, - cmdLutElement{0x08, 0x08, -1, 0x03, 0x0142, 0x0146}, - cmdLutElement{0x08, 0x09, -1, 0x03, 0x0142, 0x0246}, - cmdLutElement{0x08, 0x0a, -1, 0x03, 0x0142, 0x0446}, - cmdLutElement{0x08, 0x18, -1, 0x03, 0x0142, 0x0846}, - cmdLutElement{0x09, 0x05, -1, 0x03, 0x0242, 0x0046}, - cmdLutElement{0x09, 0x05, -1, 0x03, 0x0242, 0x0066}, - cmdLutElement{0x09, 0x06, -1, 0x03, 0x0242, 0x0086}, - cmdLutElement{0x09, 0x07, -1, 0x03, 0x0242, 0x00c6}, - cmdLutElement{0x09, 0x08, -1, 0x03, 0x0242, 0x0146}, - cmdLutElement{0x09, 0x09, -1, 0x03, 0x0242, 0x0246}, - cmdLutElement{0x09, 0x0a, -1, 0x03, 0x0242, 0x0446}, - cmdLutElement{0x09, 0x18, -1, 0x03, 0x0242, 0x0846}, - cmdLutElement{0x0a, 0x05, -1, 0x03, 0x0442, 0x0046}, - cmdLutElement{0x0a, 0x05, -1, 0x03, 0x0442, 0x0066}, - cmdLutElement{0x0a, 0x06, -1, 0x03, 0x0442, 0x0086}, - cmdLutElement{0x0a, 0x07, -1, 0x03, 0x0442, 0x00c6}, - cmdLutElement{0x0a, 0x08, -1, 0x03, 0x0442, 0x0146}, - cmdLutElement{0x0a, 0x09, -1, 0x03, 0x0442, 0x0246}, - cmdLutElement{0x0a, 0x0a, -1, 0x03, 0x0442, 0x0446}, - cmdLutElement{0x0a, 0x18, -1, 0x03, 0x0442, 0x0846}, - cmdLutElement{0x0c, 0x05, -1, 0x03, 0x0842, 0x0046}, - cmdLutElement{0x0c, 0x05, -1, 0x03, 0x0842, 0x0066}, - cmdLutElement{0x0c, 0x06, -1, 0x03, 0x0842, 0x0086}, - cmdLutElement{0x0c, 0x07, -1, 0x03, 0x0842, 0x00c6}, - cmdLutElement{0x0c, 0x08, -1, 0x03, 0x0842, 0x0146}, - cmdLutElement{0x0c, 0x09, -1, 0x03, 0x0842, 0x0246}, - cmdLutElement{0x0c, 0x0a, -1, 0x03, 0x0842, 0x0446}, - cmdLutElement{0x0c, 0x18, -1, 0x03, 0x0842, 0x0846}, - cmdLutElement{0x0e, 0x05, -1, 0x03, 0x1842, 0x0046}, - cmdLutElement{0x0e, 0x05, -1, 0x03, 0x1842, 0x0066}, - cmdLutElement{0x0e, 0x06, -1, 0x03, 0x1842, 0x0086}, - cmdLutElement{0x0e, 0x07, -1, 0x03, 0x1842, 0x00c6}, - cmdLutElement{0x0e, 0x08, -1, 0x03, 0x1842, 0x0146}, - cmdLutElement{0x0e, 0x09, -1, 0x03, 0x1842, 0x0246}, - cmdLutElement{0x0e, 0x0a, -1, 0x03, 0x1842, 0x0446}, - cmdLutElement{0x0e, 0x18, -1, 0x03, 0x1842, 0x0846}, - cmdLutElement{0x18, 0x05, -1, 0x03, 0x5842, 0x0046}, - cmdLutElement{0x18, 0x05, -1, 0x03, 0x5842, 0x0066}, - cmdLutElement{0x18, 0x06, -1, 0x03, 0x5842, 0x0086}, - cmdLutElement{0x18, 0x07, -1, 0x03, 0x5842, 0x00c6}, - cmdLutElement{0x18, 0x08, -1, 0x03, 0x5842, 0x0146}, - cmdLutElement{0x18, 0x09, -1, 0x03, 0x5842, 0x0246}, - cmdLutElement{0x18, 0x0a, -1, 0x03, 0x5842, 0x0446}, - cmdLutElement{0x18, 0x18, -1, 0x03, 0x5842, 0x0846}, -} diff --git a/vendor/github.com/andybalholm/brotli/quality.go b/vendor/github.com/andybalholm/brotli/quality.go deleted file mode 100644 index 49709a3823..0000000000 --- a/vendor/github.com/andybalholm/brotli/quality.go +++ /dev/null @@ -1,196 +0,0 @@ -package brotli - -const fastOnePassCompressionQuality = 0 - -const fastTwoPassCompressionQuality = 1 - -const zopflificationQuality = 10 - -const hqZopflificationQuality = 11 - -const maxQualityForStaticEntropyCodes = 2 - -const minQualityForBlockSplit = 4 - -const minQualityForNonzeroDistanceParams = 4 - -const minQualityForOptimizeHistograms = 4 - -const minQualityForExtensiveReferenceSearch = 5 - -const minQualityForContextModeling = 5 - -const minQualityForHqContextModeling = 7 - -const minQualityForHqBlockSplitting = 10 - -/* For quality below MIN_QUALITY_FOR_BLOCK_SPLIT there is no block splitting, - so we buffer at most this much literals and commands. */ -const maxNumDelayedSymbols = 0x2FFF - -/* Returns hash-table size for quality levels 0 and 1. */ -func maxHashTableSize(quality int) uint { - if quality == fastOnePassCompressionQuality { - return 1 << 15 - } else { - return 1 << 17 - } -} - -/* The maximum length for which the zopflification uses distinct distances. */ -const maxZopfliLenQuality10 = 150 - -const maxZopfliLenQuality11 = 325 - -/* Do not thoroughly search when a long copy is found. */ -const longCopyQuickStep = 16384 - -func maxZopfliLen(params *encoderParams) uint { - if params.quality <= 10 { - return maxZopfliLenQuality10 - } else { - return maxZopfliLenQuality11 - } -} - -/* Number of best candidates to evaluate to expand Zopfli chain. */ -func maxZopfliCandidates(params *encoderParams) uint { - if params.quality <= 10 { - return 1 - } else { - return 5 - } -} - -func sanitizeParams(params *encoderParams) { - params.quality = brotli_min_int(maxQuality, brotli_max_int(minQuality, params.quality)) - if params.quality <= maxQualityForStaticEntropyCodes { - params.large_window = false - } - - if params.lgwin < minWindowBits { - params.lgwin = minWindowBits - } else { - var max_lgwin int - if params.large_window { - max_lgwin = largeMaxWindowBits - } else { - max_lgwin = maxWindowBits - } - if params.lgwin > uint(max_lgwin) { - params.lgwin = uint(max_lgwin) - } - } -} - -/* Returns optimized lg_block value. */ -func computeLgBlock(params *encoderParams) int { - var lgblock int = params.lgblock - if params.quality == fastOnePassCompressionQuality || params.quality == fastTwoPassCompressionQuality { - lgblock = int(params.lgwin) - } else if params.quality < minQualityForBlockSplit { - lgblock = 14 - } else if lgblock == 0 { - lgblock = 16 - if params.quality >= 9 && params.lgwin > uint(lgblock) { - lgblock = brotli_min_int(18, int(params.lgwin)) - } - } else { - lgblock = brotli_min_int(maxInputBlockBits, brotli_max_int(minInputBlockBits, lgblock)) - } - - return lgblock -} - -/* Returns log2 of the size of main ring buffer area. - Allocate at least lgwin + 1 bits for the ring buffer so that the newly - added block fits there completely and we still get lgwin bits and at least - read_block_size_bits + 1 bits because the copy tail length needs to be - smaller than ring-buffer size. */ -func computeRbBits(params *encoderParams) int { - return 1 + brotli_max_int(int(params.lgwin), params.lgblock) -} - -func maxMetablockSize(params *encoderParams) uint { - var bits int = brotli_min_int(computeRbBits(params), maxInputBlockBits) - return uint(1) << uint(bits) -} - -/* When searching for backward references and have not seen matches for a long - time, we can skip some match lookups. Unsuccessful match lookups are very - expensive and this kind of a heuristic speeds up compression quite a lot. - At first 8 byte strides are taken and every second byte is put to hasher. - After 4x more literals stride by 16 bytes, every put 4-th byte to hasher. - Applied only to qualities 2 to 9. */ -func literalSpreeLengthForSparseSearch(params *encoderParams) uint { - if params.quality < 9 { - return 64 - } else { - return 512 - } -} - -func chooseHasher(params *encoderParams, hparams *hasherParams) { - if params.quality > 9 { - hparams.type_ = 10 - } else if params.quality == 4 && params.size_hint >= 1<<20 { - hparams.type_ = 54 - } else if params.quality < 5 { - hparams.type_ = params.quality - } else if params.lgwin <= 16 { - if params.quality < 7 { - hparams.type_ = 40 - } else if params.quality < 9 { - hparams.type_ = 41 - } else { - hparams.type_ = 42 - } - } else if params.size_hint >= 1<<20 && params.lgwin >= 19 { - hparams.type_ = 6 - hparams.block_bits = params.quality - 1 - hparams.bucket_bits = 15 - hparams.hash_len = 5 - if params.quality < 7 { - hparams.num_last_distances_to_check = 4 - } else if params.quality < 9 { - hparams.num_last_distances_to_check = 10 - } else { - hparams.num_last_distances_to_check = 16 - } - } else { - hparams.type_ = 5 - hparams.block_bits = params.quality - 1 - if params.quality < 7 { - hparams.bucket_bits = 14 - } else { - hparams.bucket_bits = 15 - } - if params.quality < 7 { - hparams.num_last_distances_to_check = 4 - } else if params.quality < 9 { - hparams.num_last_distances_to_check = 10 - } else { - hparams.num_last_distances_to_check = 16 - } - } - - if params.lgwin > 24 { - /* Different hashers for large window brotli: not for qualities <= 2, - these are too fast for large window. Not for qualities >= 10: their - hasher already works well with large window. So the changes are: - H3 --> H35: for quality 3. - H54 --> H55: for quality 4 with size hint > 1MB - H6 --> H65: for qualities 5, 6, 7, 8, 9. */ - if hparams.type_ == 3 { - hparams.type_ = 35 - } - - if hparams.type_ == 54 { - hparams.type_ = 55 - } - - if hparams.type_ == 6 { - hparams.type_ = 65 - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/reader.go b/vendor/github.com/andybalholm/brotli/reader.go deleted file mode 100644 index 5c795e6e9e..0000000000 --- a/vendor/github.com/andybalholm/brotli/reader.go +++ /dev/null @@ -1,100 +0,0 @@ -package brotli - -import ( - "errors" - "io" -) - -type decodeError int - -func (err decodeError) Error() string { - return "brotli: " + string(decoderErrorString(int(err))) -} - -var errExcessiveInput = errors.New("brotli: excessive input") -var errInvalidState = errors.New("brotli: invalid state") - -// readBufSize is a "good" buffer size that avoids excessive round-trips -// between C and Go but doesn't waste too much memory on buffering. -// It is arbitrarily chosen to be equal to the constant used in io.Copy. -const readBufSize = 32 * 1024 - -// NewReader creates a new Reader reading the given reader. -func NewReader(src io.Reader) *Reader { - r := new(Reader) - r.Reset(src) - return r -} - -// Reset discards the Reader's state and makes it equivalent to the result of -// its original state from NewReader, but writing to src instead. -// This permits reusing a Reader rather than allocating a new one. -// Error is always nil -func (r *Reader) Reset(src io.Reader) error { - decoderStateInit(r) - r.src = src - r.buf = make([]byte, readBufSize) - return nil -} - -func (r *Reader) Read(p []byte) (n int, err error) { - if !decoderHasMoreOutput(r) && len(r.in) == 0 { - m, readErr := r.src.Read(r.buf) - if m == 0 { - // If readErr is `nil`, we just proxy underlying stream behavior. - return 0, readErr - } - r.in = r.buf[:m] - } - - if len(p) == 0 { - return 0, nil - } - - for { - var written uint - in_len := uint(len(r.in)) - out_len := uint(len(p)) - in_remaining := in_len - out_remaining := out_len - result := decoderDecompressStream(r, &in_remaining, &r.in, &out_remaining, &p) - written = out_len - out_remaining - n = int(written) - - switch result { - case decoderResultSuccess: - if len(r.in) > 0 { - return n, errExcessiveInput - } - return n, nil - case decoderResultError: - return n, decodeError(decoderGetErrorCode(r)) - case decoderResultNeedsMoreOutput: - if n == 0 { - return 0, io.ErrShortBuffer - } - return n, nil - case decoderNeedsMoreInput: - } - - if len(r.in) != 0 { - return 0, errInvalidState - } - - // Calling r.src.Read may block. Don't block if we have data to return. - if n > 0 { - return n, nil - } - - // Top off the buffer. - encN, err := r.src.Read(r.buf) - if encN == 0 { - // Not enough data to complete decoding. - if err == io.EOF { - return 0, io.ErrUnexpectedEOF - } - return 0, err - } - r.in = r.buf[:encN] - } -} diff --git a/vendor/github.com/andybalholm/brotli/ringbuffer.go b/vendor/github.com/andybalholm/brotli/ringbuffer.go deleted file mode 100644 index 1c8f86feec..0000000000 --- a/vendor/github.com/andybalholm/brotli/ringbuffer.go +++ /dev/null @@ -1,134 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* A ringBuffer(window_bits, tail_bits) contains `1 << window_bits' bytes of - data in a circular manner: writing a byte writes it to: - `position() % (1 << window_bits)'. - For convenience, the ringBuffer array contains another copy of the - first `1 << tail_bits' bytes: - buffer_[i] == buffer_[i + (1 << window_bits)], if i < (1 << tail_bits), - and another copy of the last two bytes: - buffer_[-1] == buffer_[(1 << window_bits) - 1] and - buffer_[-2] == buffer_[(1 << window_bits) - 2]. */ -type ringBuffer struct { - size_ uint32 - mask_ uint32 - tail_size_ uint32 - total_size_ uint32 - cur_size_ uint32 - pos_ uint32 - data_ []byte - buffer_ []byte -} - -func ringBufferInit(rb *ringBuffer) { - rb.pos_ = 0 -} - -func ringBufferSetup(params *encoderParams, rb *ringBuffer) { - var window_bits int = computeRbBits(params) - var tail_bits int = params.lgblock - *(*uint32)(&rb.size_) = 1 << uint(window_bits) - *(*uint32)(&rb.mask_) = (1 << uint(window_bits)) - 1 - *(*uint32)(&rb.tail_size_) = 1 << uint(tail_bits) - *(*uint32)(&rb.total_size_) = rb.size_ + rb.tail_size_ -} - -const kSlackForEightByteHashingEverywhere uint = 7 - -/* Allocates or re-allocates data_ to the given length + plus some slack - region before and after. Fills the slack regions with zeros. */ -func ringBufferInitBuffer(buflen uint32, rb *ringBuffer) { - var new_data []byte - var i uint - size := 2 + int(buflen) + int(kSlackForEightByteHashingEverywhere) - if cap(rb.data_) < size { - new_data = make([]byte, size) - } else { - new_data = rb.data_[:size] - } - if rb.data_ != nil { - copy(new_data, rb.data_[:2+rb.cur_size_+uint32(kSlackForEightByteHashingEverywhere)]) - } - - rb.data_ = new_data - rb.cur_size_ = buflen - rb.buffer_ = rb.data_[2:] - rb.data_[1] = 0 - rb.data_[0] = rb.data_[1] - for i = 0; i < kSlackForEightByteHashingEverywhere; i++ { - rb.buffer_[rb.cur_size_+uint32(i)] = 0 - } -} - -func ringBufferWriteTail(bytes []byte, n uint, rb *ringBuffer) { - var masked_pos uint = uint(rb.pos_ & rb.mask_) - if uint32(masked_pos) < rb.tail_size_ { - /* Just fill the tail buffer with the beginning data. */ - var p uint = uint(rb.size_ + uint32(masked_pos)) - copy(rb.buffer_[p:], bytes[:brotli_min_size_t(n, uint(rb.tail_size_-uint32(masked_pos)))]) - } -} - -/* Push bytes into the ring buffer. */ -func ringBufferWrite(bytes []byte, n uint, rb *ringBuffer) { - if rb.pos_ == 0 && uint32(n) < rb.tail_size_ { - /* Special case for the first write: to process the first block, we don't - need to allocate the whole ring-buffer and we don't need the tail - either. However, we do this memory usage optimization only if the - first write is less than the tail size, which is also the input block - size, otherwise it is likely that other blocks will follow and we - will need to reallocate to the full size anyway. */ - rb.pos_ = uint32(n) - - ringBufferInitBuffer(rb.pos_, rb) - copy(rb.buffer_, bytes[:n]) - return - } - - if rb.cur_size_ < rb.total_size_ { - /* Lazily allocate the full buffer. */ - ringBufferInitBuffer(rb.total_size_, rb) - - /* Initialize the last two bytes to zero, so that we don't have to worry - later when we copy the last two bytes to the first two positions. */ - rb.buffer_[rb.size_-2] = 0 - - rb.buffer_[rb.size_-1] = 0 - } - { - var masked_pos uint = uint(rb.pos_ & rb.mask_) - - /* The length of the writes is limited so that we do not need to worry - about a write */ - ringBufferWriteTail(bytes, n, rb) - - if uint32(masked_pos+n) <= rb.size_ { - /* A single write fits. */ - copy(rb.buffer_[masked_pos:], bytes[:n]) - } else { - /* Split into two writes. - Copy into the end of the buffer, including the tail buffer. */ - copy(rb.buffer_[masked_pos:], bytes[:brotli_min_size_t(n, uint(rb.total_size_-uint32(masked_pos)))]) - - /* Copy into the beginning of the buffer */ - copy(rb.buffer_, bytes[rb.size_-uint32(masked_pos):][:uint32(n)-(rb.size_-uint32(masked_pos))]) - } - } - { - var not_first_lap bool = rb.pos_&(1<<31) != 0 - var rb_pos_mask uint32 = (1 << 31) - 1 - rb.data_[0] = rb.buffer_[rb.size_-2] - rb.data_[1] = rb.buffer_[rb.size_-1] - rb.pos_ = (rb.pos_ & rb_pos_mask) + uint32(uint32(n)&rb_pos_mask) - if not_first_lap { - /* Wrap, but preserve not-a-first-lap feature. */ - rb.pos_ |= 1 << 31 - } - } -} diff --git a/vendor/github.com/andybalholm/brotli/state.go b/vendor/github.com/andybalholm/brotli/state.go deleted file mode 100644 index d03348fe80..0000000000 --- a/vendor/github.com/andybalholm/brotli/state.go +++ /dev/null @@ -1,295 +0,0 @@ -package brotli - -import "io" - -/* Copyright 2015 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Brotli state for partial streaming decoding. */ -const ( - stateUninited = iota - stateLargeWindowBits - stateInitialize - stateMetablockBegin - stateMetablockHeader - stateMetablockHeader2 - stateContextModes - stateCommandBegin - stateCommandInner - stateCommandPostDecodeLiterals - stateCommandPostWrapCopy - stateUncompressed - stateMetadata - stateCommandInnerWrite - stateMetablockDone - stateCommandPostWrite1 - stateCommandPostWrite2 - stateHuffmanCode0 - stateHuffmanCode1 - stateHuffmanCode2 - stateHuffmanCode3 - stateContextMap1 - stateContextMap2 - stateTreeGroup - stateDone -) - -const ( - stateMetablockHeaderNone = iota - stateMetablockHeaderEmpty - stateMetablockHeaderNibbles - stateMetablockHeaderSize - stateMetablockHeaderUncompressed - stateMetablockHeaderReserved - stateMetablockHeaderBytes - stateMetablockHeaderMetadata -) - -const ( - stateUncompressedNone = iota - stateUncompressedWrite -) - -const ( - stateTreeGroupNone = iota - stateTreeGroupLoop -) - -const ( - stateContextMapNone = iota - stateContextMapReadPrefix - stateContextMapHuffman - stateContextMapDecode - stateContextMapTransform -) - -const ( - stateHuffmanNone = iota - stateHuffmanSimpleSize - stateHuffmanSimpleRead - stateHuffmanSimpleBuild - stateHuffmanComplex - stateHuffmanLengthSymbols -) - -const ( - stateDecodeUint8None = iota - stateDecodeUint8Short - stateDecodeUint8Long -) - -const ( - stateReadBlockLengthNone = iota - stateReadBlockLengthSuffix -) - -type Reader struct { - src io.Reader - buf []byte // scratch space for reading from src - in []byte // current chunk to decode; usually aliases buf - - state int - loop_counter int - br bitReader - buffer struct { - u64 uint64 - u8 [8]byte - } - buffer_length uint32 - pos int - max_backward_distance int - max_distance int - ringbuffer_size int - ringbuffer_mask int - dist_rb_idx int - dist_rb [4]int - error_code int - sub_loop_counter uint32 - ringbuffer []byte - ringbuffer_end []byte - htree_command []huffmanCode - context_lookup []byte - context_map_slice []byte - dist_context_map_slice []byte - literal_hgroup huffmanTreeGroup - insert_copy_hgroup huffmanTreeGroup - distance_hgroup huffmanTreeGroup - block_type_trees []huffmanCode - block_len_trees []huffmanCode - trivial_literal_context int - distance_context int - meta_block_remaining_len int - block_length_index uint32 - block_length [3]uint32 - num_block_types [3]uint32 - block_type_rb [6]uint32 - distance_postfix_bits uint32 - num_direct_distance_codes uint32 - distance_postfix_mask int - num_dist_htrees uint32 - dist_context_map []byte - literal_htree []huffmanCode - dist_htree_index byte - repeat_code_len uint32 - prev_code_len uint32 - copy_length int - distance_code int - rb_roundtrips uint - partial_pos_out uint - symbol uint32 - repeat uint32 - space uint32 - table [32]huffmanCode - symbol_lists symbolList - symbols_lists_array [huffmanMaxCodeLength + 1 + numCommandSymbols]uint16 - next_symbol [32]int - code_length_code_lengths [codeLengthCodes]byte - code_length_histo [16]uint16 - htree_index int - next []huffmanCode - context_index uint32 - max_run_length_prefix uint32 - code uint32 - context_map_table [huffmanMaxSize272]huffmanCode - substate_metablock_header int - substate_tree_group int - substate_context_map int - substate_uncompressed int - substate_huffman int - substate_decode_uint8 int - substate_read_block_length int - is_last_metablock uint - is_uncompressed uint - is_metadata uint - should_wrap_ringbuffer uint - canny_ringbuffer_allocation uint - large_window bool - size_nibbles uint - window_bits uint32 - new_ringbuffer_size int - num_literal_htrees uint32 - context_map []byte - context_modes []byte - dictionary *dictionary - transforms *transforms - trivial_literal_contexts [8]uint32 -} - -func decoderStateInit(s *Reader) bool { - s.error_code = 0 /* BROTLI_DECODER_NO_ERROR */ - - initBitReader(&s.br) - s.state = stateUninited - s.large_window = false - s.substate_metablock_header = stateMetablockHeaderNone - s.substate_tree_group = stateTreeGroupNone - s.substate_context_map = stateContextMapNone - s.substate_uncompressed = stateUncompressedNone - s.substate_huffman = stateHuffmanNone - s.substate_decode_uint8 = stateDecodeUint8None - s.substate_read_block_length = stateReadBlockLengthNone - - s.buffer_length = 0 - s.loop_counter = 0 - s.pos = 0 - s.rb_roundtrips = 0 - s.partial_pos_out = 0 - - s.block_type_trees = nil - s.block_len_trees = nil - s.ringbuffer = nil - s.ringbuffer_size = 0 - s.new_ringbuffer_size = 0 - s.ringbuffer_mask = 0 - - s.context_map = nil - s.context_modes = nil - s.dist_context_map = nil - s.context_map_slice = nil - s.dist_context_map_slice = nil - - s.sub_loop_counter = 0 - - s.literal_hgroup.codes = nil - s.literal_hgroup.htrees = nil - s.insert_copy_hgroup.codes = nil - s.insert_copy_hgroup.htrees = nil - s.distance_hgroup.codes = nil - s.distance_hgroup.htrees = nil - - s.is_last_metablock = 0 - s.is_uncompressed = 0 - s.is_metadata = 0 - s.should_wrap_ringbuffer = 0 - s.canny_ringbuffer_allocation = 1 - - s.window_bits = 0 - s.max_distance = 0 - s.dist_rb[0] = 16 - s.dist_rb[1] = 15 - s.dist_rb[2] = 11 - s.dist_rb[3] = 4 - s.dist_rb_idx = 0 - s.block_type_trees = nil - s.block_len_trees = nil - - s.symbol_lists.storage = s.symbols_lists_array[:] - s.symbol_lists.offset = huffmanMaxCodeLength + 1 - - s.dictionary = getDictionary() - s.transforms = getTransforms() - - return true -} - -func decoderStateMetablockBegin(s *Reader) { - s.meta_block_remaining_len = 0 - s.block_length[0] = 1 << 24 - s.block_length[1] = 1 << 24 - s.block_length[2] = 1 << 24 - s.num_block_types[0] = 1 - s.num_block_types[1] = 1 - s.num_block_types[2] = 1 - s.block_type_rb[0] = 1 - s.block_type_rb[1] = 0 - s.block_type_rb[2] = 1 - s.block_type_rb[3] = 0 - s.block_type_rb[4] = 1 - s.block_type_rb[5] = 0 - s.context_map = nil - s.context_modes = nil - s.dist_context_map = nil - s.context_map_slice = nil - s.literal_htree = nil - s.dist_context_map_slice = nil - s.dist_htree_index = 0 - s.context_lookup = nil - s.literal_hgroup.codes = nil - s.literal_hgroup.htrees = nil - s.insert_copy_hgroup.codes = nil - s.insert_copy_hgroup.htrees = nil - s.distance_hgroup.codes = nil - s.distance_hgroup.htrees = nil -} - -func decoderStateCleanupAfterMetablock(s *Reader) { - s.context_modes = nil - s.context_map = nil - s.dist_context_map = nil - s.literal_hgroup.htrees = nil - s.insert_copy_hgroup.htrees = nil - s.distance_hgroup.htrees = nil -} - -func decoderHuffmanTreeGroupInit(s *Reader, group *huffmanTreeGroup, alphabet_size uint32, max_symbol uint32, ntrees uint32) bool { - var max_table_size uint = uint(kMaxHuffmanTableSize[(alphabet_size+31)>>5]) - group.alphabet_size = uint16(alphabet_size) - group.max_symbol = uint16(max_symbol) - group.num_htrees = uint16(ntrees) - group.htrees = make([][]huffmanCode, ntrees) - group.codes = make([]huffmanCode, (uint(ntrees) * max_table_size)) - return !(group.codes == nil) -} diff --git a/vendor/github.com/andybalholm/brotli/static_dict.go b/vendor/github.com/andybalholm/brotli/static_dict.go deleted file mode 100644 index 8e7492d7ae..0000000000 --- a/vendor/github.com/andybalholm/brotli/static_dict.go +++ /dev/null @@ -1,666 +0,0 @@ -package brotli - -import "encoding/binary" - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Class to model the static dictionary. */ - -const maxStaticDictionaryMatchLen = 37 - -const kInvalidMatch uint32 = 0xFFFFFFF - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ -func hash(data []byte) uint32 { - var h uint32 = binary.LittleEndian.Uint32(data) * kDictHashMul32 - - /* The higher bits contain more mixture from the multiplication, - so we take our results from there. */ - return h >> uint(32-kDictNumBits) -} - -func addMatch(distance uint, len uint, len_code uint, matches []uint32) { - var match uint32 = uint32((distance << 5) + len_code) - matches[len] = brotli_min_uint32_t(matches[len], match) -} - -func dictMatchLength(dict *dictionary, data []byte, id uint, len uint, maxlen uint) uint { - var offset uint = uint(dict.offsets_by_length[len]) + len*id - return findMatchLengthWithLimit(dict.data[offset:], data, brotli_min_size_t(uint(len), maxlen)) -} - -func isMatch(d *dictionary, w dictWord, data []byte, max_length uint) bool { - if uint(w.len) > max_length { - return false - } else { - var offset uint = uint(d.offsets_by_length[w.len]) + uint(w.len)*uint(w.idx) - var dict []byte = d.data[offset:] - if w.transform == 0 { - /* Match against base dictionary word. */ - return findMatchLengthWithLimit(dict, data, uint(w.len)) == uint(w.len) - } else if w.transform == 10 { - /* Match against uppercase first transform. - Note that there are only ASCII uppercase words in the lookup table. */ - return dict[0] >= 'a' && dict[0] <= 'z' && (dict[0]^32) == data[0] && findMatchLengthWithLimit(dict[1:], data[1:], uint(w.len)-1) == uint(w.len-1) - } else { - /* Match against uppercase all transform. - Note that there are only ASCII uppercase words in the lookup table. */ - var i uint - for i = 0; i < uint(w.len); i++ { - if dict[i] >= 'a' && dict[i] <= 'z' { - if (dict[i] ^ 32) != data[i] { - return false - } - } else { - if dict[i] != data[i] { - return false - } - } - } - - return true - } - } -} - -func findAllStaticDictionaryMatches(dict *encoderDictionary, data []byte, min_length uint, max_length uint, matches []uint32) bool { - var has_found_match bool = false - { - var offset uint = uint(dict.buckets[hash(data)]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 { - var matchlen uint = dictMatchLength(dict.words, data, id, l, max_length) - var s []byte - var minlen uint - var maxlen uint - var len uint - - /* Transform "" + BROTLI_TRANSFORM_IDENTITY + "" */ - if matchlen == l { - addMatch(id, l, l, matches) - has_found_match = true - } - - /* Transforms "" + BROTLI_TRANSFORM_OMIT_LAST_1 + "" and - "" + BROTLI_TRANSFORM_OMIT_LAST_1 + "ing " */ - if matchlen >= l-1 { - addMatch(id+12*n, l-1, l, matches) - if l+2 < max_length && data[l-1] == 'i' && data[l] == 'n' && data[l+1] == 'g' && data[l+2] == ' ' { - addMatch(id+49*n, l+3, l, matches) - } - - has_found_match = true - } - - /* Transform "" + BROTLI_TRANSFORM_OMIT_LAST_# + "" (# = 2 .. 9) */ - minlen = min_length - - if l > 9 { - minlen = brotli_max_size_t(minlen, l-9) - } - maxlen = brotli_min_size_t(matchlen, l-2) - for len = minlen; len <= maxlen; len++ { - var cut uint = l - len - var transform_id uint = (cut << 2) + uint((dict.cutoffTransforms>>(cut*6))&0x3F) - addMatch(id+transform_id*n, uint(len), l, matches) - has_found_match = true - } - - if matchlen < l || l+6 >= max_length { - continue - } - - s = data[l:] - - /* Transforms "" + BROTLI_TRANSFORM_IDENTITY + */ - if s[0] == ' ' { - addMatch(id+n, l+1, l, matches) - if s[1] == 'a' { - if s[2] == ' ' { - addMatch(id+28*n, l+3, l, matches) - } else if s[2] == 's' { - if s[3] == ' ' { - addMatch(id+46*n, l+4, l, matches) - } - } else if s[2] == 't' { - if s[3] == ' ' { - addMatch(id+60*n, l+4, l, matches) - } - } else if s[2] == 'n' { - if s[3] == 'd' && s[4] == ' ' { - addMatch(id+10*n, l+5, l, matches) - } - } - } else if s[1] == 'b' { - if s[2] == 'y' && s[3] == ' ' { - addMatch(id+38*n, l+4, l, matches) - } - } else if s[1] == 'i' { - if s[2] == 'n' { - if s[3] == ' ' { - addMatch(id+16*n, l+4, l, matches) - } - } else if s[2] == 's' { - if s[3] == ' ' { - addMatch(id+47*n, l+4, l, matches) - } - } - } else if s[1] == 'f' { - if s[2] == 'o' { - if s[3] == 'r' && s[4] == ' ' { - addMatch(id+25*n, l+5, l, matches) - } - } else if s[2] == 'r' { - if s[3] == 'o' && s[4] == 'm' && s[5] == ' ' { - addMatch(id+37*n, l+6, l, matches) - } - } - } else if s[1] == 'o' { - if s[2] == 'f' { - if s[3] == ' ' { - addMatch(id+8*n, l+4, l, matches) - } - } else if s[2] == 'n' { - if s[3] == ' ' { - addMatch(id+45*n, l+4, l, matches) - } - } - } else if s[1] == 'n' { - if s[2] == 'o' && s[3] == 't' && s[4] == ' ' { - addMatch(id+80*n, l+5, l, matches) - } - } else if s[1] == 't' { - if s[2] == 'h' { - if s[3] == 'e' { - if s[4] == ' ' { - addMatch(id+5*n, l+5, l, matches) - } - } else if s[3] == 'a' { - if s[4] == 't' && s[5] == ' ' { - addMatch(id+29*n, l+6, l, matches) - } - } - } else if s[2] == 'o' { - if s[3] == ' ' { - addMatch(id+17*n, l+4, l, matches) - } - } - } else if s[1] == 'w' { - if s[2] == 'i' && s[3] == 't' && s[4] == 'h' && s[5] == ' ' { - addMatch(id+35*n, l+6, l, matches) - } - } - } else if s[0] == '"' { - addMatch(id+19*n, l+1, l, matches) - if s[1] == '>' { - addMatch(id+21*n, l+2, l, matches) - } - } else if s[0] == '.' { - addMatch(id+20*n, l+1, l, matches) - if s[1] == ' ' { - addMatch(id+31*n, l+2, l, matches) - if s[2] == 'T' && s[3] == 'h' { - if s[4] == 'e' { - if s[5] == ' ' { - addMatch(id+43*n, l+6, l, matches) - } - } else if s[4] == 'i' { - if s[5] == 's' && s[6] == ' ' { - addMatch(id+75*n, l+7, l, matches) - } - } - } - } - } else if s[0] == ',' { - addMatch(id+76*n, l+1, l, matches) - if s[1] == ' ' { - addMatch(id+14*n, l+2, l, matches) - } - } else if s[0] == '\n' { - addMatch(id+22*n, l+1, l, matches) - if s[1] == '\t' { - addMatch(id+50*n, l+2, l, matches) - } - } else if s[0] == ']' { - addMatch(id+24*n, l+1, l, matches) - } else if s[0] == '\'' { - addMatch(id+36*n, l+1, l, matches) - } else if s[0] == ':' { - addMatch(id+51*n, l+1, l, matches) - } else if s[0] == '(' { - addMatch(id+57*n, l+1, l, matches) - } else if s[0] == '=' { - if s[1] == '"' { - addMatch(id+70*n, l+2, l, matches) - } else if s[1] == '\'' { - addMatch(id+86*n, l+2, l, matches) - } - } else if s[0] == 'a' { - if s[1] == 'l' && s[2] == ' ' { - addMatch(id+84*n, l+3, l, matches) - } - } else if s[0] == 'e' { - if s[1] == 'd' { - if s[2] == ' ' { - addMatch(id+53*n, l+3, l, matches) - } - } else if s[1] == 'r' { - if s[2] == ' ' { - addMatch(id+82*n, l+3, l, matches) - } - } else if s[1] == 's' { - if s[2] == 't' && s[3] == ' ' { - addMatch(id+95*n, l+4, l, matches) - } - } - } else if s[0] == 'f' { - if s[1] == 'u' && s[2] == 'l' && s[3] == ' ' { - addMatch(id+90*n, l+4, l, matches) - } - } else if s[0] == 'i' { - if s[1] == 'v' { - if s[2] == 'e' && s[3] == ' ' { - addMatch(id+92*n, l+4, l, matches) - } - } else if s[1] == 'z' { - if s[2] == 'e' && s[3] == ' ' { - addMatch(id+100*n, l+4, l, matches) - } - } - } else if s[0] == 'l' { - if s[1] == 'e' { - if s[2] == 's' && s[3] == 's' && s[4] == ' ' { - addMatch(id+93*n, l+5, l, matches) - } - } else if s[1] == 'y' { - if s[2] == ' ' { - addMatch(id+61*n, l+3, l, matches) - } - } - } else if s[0] == 'o' { - if s[1] == 'u' && s[2] == 's' && s[3] == ' ' { - addMatch(id+106*n, l+4, l, matches) - } - } - } else { - var is_all_caps bool = (w.transform != transformUppercaseFirst) - /* Set is_all_caps=0 for BROTLI_TRANSFORM_UPPERCASE_FIRST and - is_all_caps=1 otherwise (BROTLI_TRANSFORM_UPPERCASE_ALL) - transform. */ - - var s []byte - if !isMatch(dict.words, w, data, max_length) { - continue - } - - /* Transform "" + kUppercase{First,All} + "" */ - var tmp int - if is_all_caps { - tmp = 44 - } else { - tmp = 9 - } - addMatch(id+uint(tmp)*n, l, l, matches) - - has_found_match = true - if l+1 >= max_length { - continue - } - - /* Transforms "" + kUppercase{First,All} + */ - s = data[l:] - - if s[0] == ' ' { - var tmp int - if is_all_caps { - tmp = 68 - } else { - tmp = 4 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - } else if s[0] == '"' { - var tmp int - if is_all_caps { - tmp = 87 - } else { - tmp = 66 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - if s[1] == '>' { - var tmp int - if is_all_caps { - tmp = 97 - } else { - tmp = 69 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } else if s[0] == '.' { - var tmp int - if is_all_caps { - tmp = 101 - } else { - tmp = 79 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 114 - } else { - tmp = 88 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } else if s[0] == ',' { - var tmp int - if is_all_caps { - tmp = 112 - } else { - tmp = 99 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 107 - } else { - tmp = 58 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } else if s[0] == '\'' { - var tmp int - if is_all_caps { - tmp = 94 - } else { - tmp = 74 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - } else if s[0] == '(' { - var tmp int - if is_all_caps { - tmp = 113 - } else { - tmp = 78 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - } else if s[0] == '=' { - if s[1] == '"' { - var tmp int - if is_all_caps { - tmp = 105 - } else { - tmp = 104 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if s[1] == '\'' { - var tmp int - if is_all_caps { - tmp = 116 - } else { - tmp = 108 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } - } - } - } - } - - /* Transforms with prefixes " " and "." */ - if max_length >= 5 && (data[0] == ' ' || data[0] == '.') { - var is_space bool = (data[0] == ' ') - var offset uint = uint(dict.buckets[hash(data[1:])]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 { - var s []byte - if !isMatch(dict.words, w, data[1:], max_length-1) { - continue - } - - /* Transforms " " + BROTLI_TRANSFORM_IDENTITY + "" and - "." + BROTLI_TRANSFORM_IDENTITY + "" */ - var tmp int - if is_space { - tmp = 6 - } else { - tmp = 32 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - - has_found_match = true - if l+2 >= max_length { - continue - } - - /* Transforms " " + BROTLI_TRANSFORM_IDENTITY + and - "." + BROTLI_TRANSFORM_IDENTITY + - */ - s = data[l+1:] - - if s[0] == ' ' { - var tmp int - if is_space { - tmp = 2 - } else { - tmp = 77 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if s[0] == '(' { - var tmp int - if is_space { - tmp = 89 - } else { - tmp = 67 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if is_space { - if s[0] == ',' { - addMatch(id+103*n, l+2, l, matches) - if s[1] == ' ' { - addMatch(id+33*n, l+3, l, matches) - } - } else if s[0] == '.' { - addMatch(id+71*n, l+2, l, matches) - if s[1] == ' ' { - addMatch(id+52*n, l+3, l, matches) - } - } else if s[0] == '=' { - if s[1] == '"' { - addMatch(id+81*n, l+3, l, matches) - } else if s[1] == '\'' { - addMatch(id+98*n, l+3, l, matches) - } - } - } - } else if is_space { - var is_all_caps bool = (w.transform != transformUppercaseFirst) - /* Set is_all_caps=0 for BROTLI_TRANSFORM_UPPERCASE_FIRST and - is_all_caps=1 otherwise (BROTLI_TRANSFORM_UPPERCASE_ALL) - transform. */ - - var s []byte - if !isMatch(dict.words, w, data[1:], max_length-1) { - continue - } - - /* Transforms " " + kUppercase{First,All} + "" */ - var tmp int - if is_all_caps { - tmp = 85 - } else { - tmp = 30 - } - addMatch(id+uint(tmp)*n, l+1, l, matches) - - has_found_match = true - if l+2 >= max_length { - continue - } - - /* Transforms " " + kUppercase{First,All} + */ - s = data[l+1:] - - if s[0] == ' ' { - var tmp int - if is_all_caps { - tmp = 83 - } else { - tmp = 15 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - } else if s[0] == ',' { - if !is_all_caps { - addMatch(id+109*n, l+2, l, matches) - } - - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 111 - } else { - tmp = 65 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } - } else if s[0] == '.' { - var tmp int - if is_all_caps { - tmp = 115 - } else { - tmp = 96 - } - addMatch(id+uint(tmp)*n, l+2, l, matches) - if s[1] == ' ' { - var tmp int - if is_all_caps { - tmp = 117 - } else { - tmp = 91 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } - } else if s[0] == '=' { - if s[1] == '"' { - var tmp int - if is_all_caps { - tmp = 110 - } else { - tmp = 118 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } else if s[1] == '\'' { - var tmp int - if is_all_caps { - tmp = 119 - } else { - tmp = 120 - } - addMatch(id+uint(tmp)*n, l+3, l, matches) - } - } - } - } - } - - if max_length >= 6 { - /* Transforms with prefixes "e ", "s ", ", " and "\xC2\xA0" */ - if (data[1] == ' ' && (data[0] == 'e' || data[0] == 's' || data[0] == ',')) || (data[0] == 0xC2 && data[1] == 0xA0) { - var offset uint = uint(dict.buckets[hash(data[2:])]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 && isMatch(dict.words, w, data[2:], max_length-2) { - if data[0] == 0xC2 { - addMatch(id+102*n, l+2, l, matches) - has_found_match = true - } else if l+2 < max_length && data[l+2] == ' ' { - var t uint = 13 - if data[0] == 'e' { - t = 18 - } else if data[0] == 's' { - t = 7 - } - addMatch(id+t*n, l+3, l, matches) - has_found_match = true - } - } - } - } - } - - if max_length >= 9 { - /* Transforms with prefixes " the " and ".com/" */ - if (data[0] == ' ' && data[1] == 't' && data[2] == 'h' && data[3] == 'e' && data[4] == ' ') || (data[0] == '.' && data[1] == 'c' && data[2] == 'o' && data[3] == 'm' && data[4] == '/') { - var offset uint = uint(dict.buckets[hash(data[5:])]) - var end bool = offset == 0 - for !end { - var w dictWord - w = dict.dict_words[offset] - offset++ - var l uint = uint(w.len) & 0x1F - var n uint = uint(1) << dict.words.size_bits_by_length[l] - var id uint = uint(w.idx) - end = !(w.len&0x80 == 0) - w.len = byte(l) - if w.transform == 0 && isMatch(dict.words, w, data[5:], max_length-5) { - var tmp int - if data[0] == ' ' { - tmp = 41 - } else { - tmp = 72 - } - addMatch(id+uint(tmp)*n, l+5, l, matches) - has_found_match = true - if l+5 < max_length { - var s []byte = data[l+5:] - if data[0] == ' ' { - if l+8 < max_length && s[0] == ' ' && s[1] == 'o' && s[2] == 'f' && s[3] == ' ' { - addMatch(id+62*n, l+9, l, matches) - if l+12 < max_length && s[4] == 't' && s[5] == 'h' && s[6] == 'e' && s[7] == ' ' { - addMatch(id+73*n, l+13, l, matches) - } - } - } - } - } - } - } - } - - return has_found_match -} diff --git a/vendor/github.com/andybalholm/brotli/static_dict_lut.go b/vendor/github.com/andybalholm/brotli/static_dict_lut.go deleted file mode 100644 index b33963e967..0000000000 --- a/vendor/github.com/andybalholm/brotli/static_dict_lut.go +++ /dev/null @@ -1,75094 +0,0 @@ -package brotli - -/* Copyright 2017 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Lookup table for static dictionary and transforms. */ - -type dictWord struct { - len byte - transform byte - idx uint16 -} - -const kDictNumBits int = 15 - -const kDictHashMul32 uint32 = 0x1E35A7BD - -var kStaticDictionaryBuckets = [32768]uint16{ - 1, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3, - 6, - 0, - 0, - 0, - 0, - 0, - 20, - 0, - 0, - 0, - 21, - 0, - 22, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23, - 0, - 0, - 25, - 0, - 29, - 0, - 53, - 0, - 0, - 0, - 0, - 0, - 0, - 55, - 0, - 0, - 0, - 0, - 0, - 0, - 61, - 76, - 0, - 0, - 0, - 94, - 0, - 0, - 0, - 0, - 0, - 0, - 96, - 0, - 97, - 0, - 98, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 99, - 101, - 106, - 108, - 0, - 0, - 0, - 0, - 0, - 110, - 0, - 111, - 112, - 0, - 113, - 118, - 124, - 0, - 0, - 0, - 0, - 0, - 125, - 128, - 0, - 0, - 0, - 0, - 129, - 0, - 0, - 131, - 0, - 0, - 0, - 0, - 0, - 0, - 132, - 0, - 0, - 135, - 0, - 0, - 0, - 137, - 0, - 0, - 0, - 0, - 0, - 138, - 139, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 142, - 143, - 144, - 0, - 0, - 0, - 0, - 0, - 145, - 0, - 0, - 0, - 146, - 149, - 151, - 152, - 0, - 0, - 153, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 154, - 0, - 0, - 0, - 0, - 0, - 0, - 155, - 0, - 0, - 0, - 0, - 160, - 182, - 0, - 0, - 0, - 0, - 0, - 0, - 183, - 0, - 0, - 0, - 188, - 189, - 0, - 0, - 192, - 0, - 0, - 0, - 0, - 0, - 0, - 194, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 197, - 202, - 209, - 0, - 0, - 210, - 0, - 224, - 0, - 0, - 0, - 225, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 231, - 0, - 0, - 0, - 232, - 0, - 240, - 0, - 0, - 242, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 244, - 0, - 0, - 0, - 246, - 0, - 0, - 249, - 251, - 253, - 0, - 0, - 0, - 0, - 0, - 258, - 0, - 0, - 261, - 263, - 0, - 0, - 0, - 267, - 0, - 0, - 268, - 0, - 269, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 271, - 0, - 0, - 0, - 0, - 0, - 0, - 272, - 0, - 273, - 0, - 277, - 0, - 278, - 286, - 0, - 0, - 0, - 0, - 287, - 0, - 289, - 290, - 291, - 0, - 0, - 0, - 295, - 0, - 0, - 296, - 297, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 298, - 0, - 0, - 0, - 299, - 0, - 0, - 305, - 0, - 324, - 0, - 0, - 0, - 0, - 0, - 327, - 0, - 328, - 329, - 0, - 0, - 0, - 0, - 336, - 0, - 0, - 340, - 0, - 341, - 342, - 343, - 0, - 0, - 346, - 0, - 348, - 0, - 0, - 0, - 0, - 0, - 0, - 349, - 351, - 0, - 0, - 355, - 0, - 363, - 0, - 364, - 0, - 368, - 369, - 0, - 370, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 372, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 373, - 0, - 375, - 0, - 0, - 0, - 0, - 376, - 377, - 0, - 0, - 394, - 395, - 396, - 0, - 0, - 398, - 0, - 0, - 0, - 0, - 400, - 0, - 0, - 408, - 0, - 0, - 0, - 0, - 420, - 0, - 0, - 0, - 0, - 0, - 0, - 421, - 0, - 0, - 422, - 423, - 0, - 0, - 429, - 435, - 436, - 442, - 0, - 0, - 443, - 0, - 444, - 445, - 453, - 456, - 0, - 457, - 0, - 0, - 0, - 0, - 0, - 458, - 0, - 0, - 0, - 459, - 0, - 0, - 0, - 460, - 0, - 462, - 463, - 465, - 0, - 0, - 0, - 0, - 0, - 0, - 466, - 469, - 0, - 0, - 0, - 0, - 0, - 0, - 470, - 0, - 0, - 0, - 474, - 0, - 476, - 0, - 0, - 0, - 0, - 483, - 0, - 485, - 0, - 0, - 0, - 486, - 0, - 0, - 488, - 491, - 492, - 0, - 0, - 497, - 499, - 500, - 0, - 501, - 0, - 0, - 0, - 505, - 0, - 0, - 506, - 0, - 0, - 0, - 507, - 0, - 0, - 0, - 509, - 0, - 0, - 0, - 0, - 511, - 512, - 519, - 0, - 0, - 0, - 0, - 0, - 0, - 529, - 530, - 0, - 0, - 0, - 534, - 0, - 0, - 0, - 0, - 543, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 553, - 0, - 0, - 0, - 0, - 557, - 560, - 0, - 0, - 0, - 0, - 0, - 0, - 561, - 0, - 564, - 0, - 0, - 0, - 0, - 0, - 0, - 565, - 566, - 0, - 575, - 0, - 619, - 0, - 620, - 0, - 0, - 623, - 624, - 0, - 0, - 0, - 625, - 0, - 0, - 626, - 627, - 0, - 0, - 628, - 0, - 0, - 0, - 0, - 630, - 0, - 631, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 641, - 0, - 0, - 0, - 0, - 643, - 656, - 668, - 0, - 0, - 0, - 673, - 0, - 0, - 0, - 674, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 682, - 0, - 687, - 0, - 690, - 0, - 693, - 699, - 700, - 0, - 0, - 0, - 0, - 0, - 0, - 704, - 705, - 0, - 0, - 0, - 0, - 707, - 710, - 0, - 711, - 0, - 0, - 0, - 0, - 726, - 0, - 0, - 729, - 0, - 0, - 0, - 730, - 731, - 0, - 0, - 0, - 0, - 0, - 752, - 0, - 0, - 0, - 762, - 0, - 763, - 0, - 0, - 767, - 0, - 0, - 0, - 770, - 774, - 0, - 0, - 775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 776, - 0, - 0, - 0, - 777, - 783, - 0, - 0, - 0, - 785, - 788, - 0, - 0, - 0, - 0, - 790, - 0, - 0, - 0, - 793, - 0, - 0, - 0, - 0, - 794, - 0, - 0, - 804, - 819, - 821, - 0, - 827, - 0, - 0, - 0, - 834, - 0, - 0, - 835, - 0, - 0, - 0, - 841, - 0, - 844, - 0, - 850, - 851, - 859, - 0, - 860, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 874, - 0, - 876, - 0, - 877, - 890, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 893, - 894, - 898, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 899, - 0, - 0, - 0, - 900, - 904, - 906, - 0, - 0, - 0, - 907, - 0, - 908, - 909, - 0, - 910, - 0, - 0, - 0, - 0, - 911, - 0, - 0, - 0, - 0, - 0, - 916, - 0, - 0, - 0, - 922, - 925, - 0, - 930, - 0, - 934, - 0, - 0, - 0, - 0, - 0, - 943, - 0, - 0, - 944, - 0, - 953, - 954, - 0, - 0, - 0, - 0, - 0, - 0, - 955, - 0, - 962, - 963, - 0, - 0, - 976, - 0, - 0, - 977, - 978, - 979, - 980, - 0, - 981, - 0, - 0, - 0, - 0, - 984, - 0, - 0, - 985, - 0, - 0, - 987, - 989, - 991, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 992, - 0, - 0, - 0, - 993, - 0, - 0, - 0, - 0, - 0, - 0, - 996, - 0, - 0, - 0, - 1000, - 0, - 0, - 0, - 0, - 0, - 1002, - 0, - 0, - 0, - 0, - 1005, - 1007, - 0, - 0, - 0, - 1009, - 0, - 0, - 0, - 1010, - 0, - 0, - 0, - 0, - 0, - 0, - 1011, - 0, - 1012, - 0, - 0, - 0, - 0, - 1014, - 1016, - 0, - 0, - 0, - 1020, - 0, - 1021, - 0, - 0, - 0, - 0, - 1022, - 0, - 0, - 0, - 1024, - 0, - 0, - 0, - 0, - 0, - 0, - 1025, - 0, - 0, - 1026, - 1027, - 0, - 0, - 0, - 0, - 0, - 1031, - 0, - 1033, - 0, - 0, - 0, - 0, - 1034, - 0, - 0, - 0, - 1037, - 1040, - 0, - 0, - 0, - 1042, - 1043, - 0, - 0, - 1053, - 0, - 1054, - 0, - 0, - 1057, - 0, - 0, - 0, - 1058, - 0, - 0, - 1060, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1061, - 0, - 0, - 1062, - 0, - 0, - 0, - 0, - 1063, - 0, - 0, - 0, - 0, - 1064, - 0, - 0, - 0, - 0, - 0, - 1065, - 0, - 0, - 0, - 0, - 1066, - 1067, - 0, - 0, - 0, - 1069, - 1070, - 1072, - 0, - 0, - 0, - 0, - 0, - 0, - 1073, - 0, - 1075, - 0, - 0, - 0, - 0, - 0, - 0, - 1080, - 1084, - 0, - 0, - 0, - 0, - 1088, - 0, - 0, - 0, - 0, - 0, - 0, - 1094, - 0, - 1095, - 0, - 1107, - 0, - 0, - 0, - 1112, - 1114, - 0, - 1119, - 0, - 1122, - 0, - 0, - 1126, - 0, - 1129, - 0, - 1130, - 0, - 0, - 0, - 0, - 0, - 1132, - 0, - 0, - 0, - 0, - 0, - 0, - 1144, - 0, - 0, - 1145, - 1146, - 0, - 1148, - 1149, - 0, - 0, - 1150, - 1151, - 0, - 0, - 0, - 0, - 1152, - 0, - 1153, - 0, - 0, - 0, - 0, - 0, - 1154, - 0, - 1163, - 0, - 0, - 0, - 1164, - 0, - 0, - 0, - 0, - 0, - 1165, - 0, - 1167, - 0, - 1170, - 0, - 0, - 0, - 0, - 0, - 1171, - 1172, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1173, - 1175, - 1177, - 0, - 1186, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1195, - 0, - 0, - 1221, - 0, - 0, - 1224, - 0, - 0, - 1227, - 0, - 0, - 0, - 0, - 0, - 1228, - 1229, - 0, - 0, - 1230, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1231, - 0, - 0, - 0, - 1233, - 0, - 0, - 1243, - 1244, - 1246, - 1248, - 0, - 0, - 0, - 0, - 1254, - 1255, - 1258, - 1259, - 0, - 0, - 0, - 1260, - 0, - 0, - 1261, - 0, - 0, - 0, - 1262, - 1264, - 0, - 0, - 1265, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1266, - 0, - 1267, - 0, - 0, - 0, - 0, - 1273, - 1274, - 1276, - 1289, - 0, - 0, - 1291, - 1292, - 1293, - 0, - 0, - 1294, - 1295, - 1296, - 0, - 0, - 0, - 0, - 1302, - 0, - 1304, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1311, - 1312, - 0, - 1314, - 0, - 1316, - 1320, - 1321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1322, - 1323, - 1324, - 0, - 1335, - 0, - 1336, - 0, - 0, - 0, - 0, - 1341, - 1342, - 0, - 1346, - 0, - 1357, - 0, - 0, - 0, - 1358, - 1360, - 0, - 0, - 0, - 0, - 0, - 0, - 1361, - 0, - 0, - 0, - 1362, - 1365, - 0, - 1366, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1379, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1386, - 0, - 1388, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1395, - 0, - 0, - 0, - 0, - 1403, - 0, - 1405, - 0, - 0, - 1407, - 0, - 0, - 0, - 0, - 0, - 1408, - 1409, - 0, - 1410, - 0, - 0, - 0, - 1412, - 1413, - 1416, - 0, - 0, - 1429, - 1451, - 0, - 0, - 1454, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1455, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1456, - 0, - 0, - 0, - 0, - 1459, - 1460, - 1461, - 1475, - 0, - 0, - 0, - 0, - 0, - 0, - 1477, - 0, - 1480, - 0, - 1481, - 0, - 0, - 1486, - 0, - 0, - 1495, - 0, - 0, - 0, - 1496, - 0, - 0, - 1498, - 1499, - 1501, - 1520, - 1521, - 0, - 0, - 0, - 1526, - 0, - 0, - 0, - 0, - 1528, - 1529, - 0, - 1533, - 1536, - 0, - 0, - 0, - 1537, - 1538, - 1549, - 0, - 1550, - 1558, - 1559, - 1572, - 0, - 1573, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1575, - 0, - 0, - 0, - 0, - 0, - 1579, - 0, - 1599, - 0, - 1603, - 0, - 1604, - 0, - 1605, - 0, - 0, - 0, - 0, - 0, - 1608, - 1610, - 0, - 0, - 0, - 0, - 1611, - 0, - 1615, - 0, - 1616, - 1618, - 0, - 1619, - 0, - 0, - 1622, - 0, - 0, - 0, - 0, - 1634, - 0, - 0, - 0, - 1635, - 0, - 0, - 0, - 1641, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1643, - 0, - 0, - 0, - 1650, - 0, - 0, - 1652, - 0, - 0, - 0, - 0, - 0, - 1653, - 0, - 0, - 0, - 1654, - 0, - 0, - 0, - 0, - 1655, - 0, - 1662, - 0, - 0, - 1663, - 1664, - 0, - 0, - 1668, - 0, - 0, - 1669, - 1670, - 0, - 1672, - 1673, - 0, - 0, - 0, - 0, - 0, - 1674, - 0, - 0, - 0, - 1675, - 1676, - 1680, - 0, - 1682, - 0, - 0, - 1687, - 0, - 0, - 0, - 0, - 0, - 1704, - 0, - 0, - 1705, - 0, - 0, - 1721, - 0, - 0, - 0, - 0, - 1734, - 1735, - 0, - 0, - 0, - 0, - 1737, - 0, - 0, - 0, - 0, - 1739, - 0, - 0, - 1740, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1741, - 1743, - 0, - 0, - 0, - 0, - 1745, - 0, - 0, - 0, - 1749, - 0, - 0, - 0, - 1751, - 0, - 0, - 0, - 0, - 0, - 0, - 1760, - 0, - 0, - 0, - 0, - 1765, - 0, - 0, - 0, - 0, - 0, - 1784, - 0, - 1785, - 1787, - 0, - 0, - 0, - 0, - 1788, - 1789, - 0, - 0, - 0, - 0, - 1790, - 1791, - 1793, - 0, - 1798, - 1799, - 0, - 0, - 0, - 0, - 1801, - 0, - 1803, - 1805, - 0, - 0, - 0, - 1806, - 1811, - 0, - 1812, - 1814, - 0, - 1821, - 0, - 0, - 0, - 0, - 0, - 1822, - 1833, - 0, - 0, - 0, - 0, - 0, - 0, - 1848, - 0, - 0, - 0, - 0, - 0, - 0, - 1857, - 0, - 0, - 0, - 1859, - 0, - 0, - 0, - 0, - 1861, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1866, - 0, - 1921, - 1925, - 0, - 0, - 0, - 1929, - 1930, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1931, - 0, - 0, - 0, - 0, - 1932, - 0, - 0, - 0, - 1934, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1946, - 0, - 0, - 1948, - 0, - 0, - 0, - 0, - 1950, - 0, - 1957, - 0, - 1958, - 0, - 0, - 0, - 0, - 0, - 1965, - 1967, - 0, - 0, - 0, - 0, - 1968, - 0, - 1969, - 0, - 1971, - 1972, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1973, - 0, - 0, - 0, - 0, - 1975, - 0, - 0, - 0, - 0, - 1976, - 1979, - 0, - 1982, - 0, - 0, - 0, - 0, - 1984, - 1988, - 0, - 0, - 0, - 0, - 1990, - 2004, - 2008, - 0, - 0, - 0, - 2012, - 2013, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2015, - 0, - 2016, - 2017, - 0, - 0, - 0, - 0, - 2021, - 0, - 0, - 2025, - 0, - 0, - 0, - 0, - 0, - 2029, - 2036, - 2040, - 0, - 2042, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2043, - 0, - 0, - 0, - 0, - 0, - 2045, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2046, - 2047, - 0, - 2048, - 2049, - 0, - 2059, - 0, - 0, - 2063, - 0, - 2064, - 2065, - 0, - 0, - 2066, - 0, - 0, - 0, - 0, - 0, - 0, - 2069, - 0, - 0, - 0, - 0, - 2070, - 0, - 2071, - 0, - 2072, - 0, - 0, - 0, - 0, - 2080, - 2082, - 2083, - 0, - 0, - 0, - 0, - 0, - 2085, - 0, - 2086, - 2088, - 2089, - 2105, - 0, - 0, - 0, - 0, - 2107, - 0, - 0, - 2116, - 2117, - 0, - 2120, - 0, - 0, - 2122, - 0, - 0, - 0, - 0, - 0, - 2123, - 0, - 0, - 2125, - 2127, - 2128, - 0, - 0, - 0, - 2130, - 0, - 0, - 0, - 2137, - 2139, - 2140, - 2141, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2144, - 2145, - 0, - 0, - 2146, - 2149, - 0, - 0, - 0, - 0, - 2150, - 0, - 0, - 2151, - 2158, - 0, - 2159, - 0, - 2160, - 0, - 0, - 0, - 0, - 0, - 0, - 2161, - 2162, - 0, - 0, - 2194, - 2202, - 0, - 0, - 0, - 0, - 0, - 0, - 2205, - 2217, - 0, - 2220, - 0, - 2221, - 0, - 2222, - 2224, - 0, - 0, - 0, - 0, - 2237, - 0, - 0, - 0, - 0, - 0, - 2238, - 0, - 2239, - 2241, - 0, - 0, - 2242, - 0, - 0, - 0, - 0, - 0, - 2243, - 0, - 0, - 0, - 0, - 0, - 0, - 2252, - 0, - 0, - 2253, - 0, - 0, - 0, - 2257, - 2258, - 0, - 0, - 0, - 2260, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2262, - 0, - 2264, - 0, - 0, - 0, - 0, - 0, - 2269, - 2270, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2271, - 0, - 2273, - 0, - 0, - 0, - 0, - 2277, - 0, - 0, - 0, - 0, - 2278, - 0, - 0, - 0, - 0, - 2279, - 0, - 2280, - 0, - 2283, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2287, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2289, - 2290, - 0, - 0, - 0, - 0, - 2291, - 0, - 2292, - 0, - 0, - 0, - 2293, - 2295, - 2296, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2298, - 0, - 0, - 0, - 0, - 0, - 2303, - 0, - 2305, - 0, - 0, - 2306, - 0, - 2307, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2313, - 2314, - 2315, - 2316, - 0, - 0, - 2318, - 0, - 2319, - 0, - 2322, - 0, - 0, - 2323, - 0, - 2324, - 0, - 2326, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2335, - 0, - 2336, - 2338, - 2339, - 0, - 2340, - 0, - 0, - 0, - 2355, - 0, - 2375, - 0, - 2382, - 2386, - 0, - 2387, - 0, - 0, - 2394, - 0, - 0, - 0, - 0, - 2395, - 0, - 2397, - 0, - 0, - 0, - 0, - 0, - 2398, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2399, - 2402, - 2404, - 2408, - 2411, - 0, - 0, - 0, - 2413, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2415, - 0, - 0, - 2416, - 2417, - 2419, - 0, - 2420, - 0, - 0, - 0, - 0, - 0, - 2425, - 0, - 0, - 0, - 2426, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2427, - 2428, - 0, - 2429, - 0, - 0, - 2430, - 2434, - 0, - 2436, - 0, - 0, - 0, - 0, - 0, - 0, - 2441, - 2442, - 0, - 2445, - 0, - 0, - 2446, - 2457, - 0, - 2459, - 0, - 0, - 2462, - 0, - 2464, - 0, - 2477, - 0, - 2478, - 2486, - 0, - 0, - 0, - 2491, - 0, - 0, - 2493, - 0, - 0, - 2494, - 0, - 2495, - 0, - 2513, - 2523, - 0, - 0, - 0, - 0, - 2524, - 0, - 0, - 0, - 0, - 0, - 0, - 2528, - 2529, - 2530, - 0, - 0, - 2531, - 0, - 2533, - 0, - 0, - 2534, - 2535, - 0, - 2536, - 2537, - 0, - 2538, - 0, - 2539, - 2540, - 0, - 0, - 0, - 2545, - 2546, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2548, - 0, - 0, - 2549, - 0, - 2550, - 2555, - 0, - 0, - 0, - 0, - 0, - 2557, - 0, - 2560, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2561, - 0, - 2576, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2577, - 2578, - 0, - 0, - 0, - 2579, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2580, - 0, - 0, - 0, - 0, - 2581, - 0, - 0, - 0, - 0, - 2583, - 0, - 2584, - 0, - 2588, - 2590, - 0, - 0, - 0, - 2591, - 0, - 0, - 0, - 0, - 2593, - 2594, - 0, - 2595, - 0, - 2601, - 2602, - 0, - 0, - 2603, - 0, - 2605, - 0, - 0, - 0, - 2606, - 2607, - 2611, - 0, - 2615, - 0, - 0, - 0, - 2617, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2619, - 0, - 0, - 2620, - 0, - 0, - 0, - 2621, - 0, - 2623, - 0, - 2625, - 0, - 0, - 2628, - 2629, - 0, - 0, - 2635, - 2636, - 2637, - 0, - 0, - 2639, - 0, - 0, - 0, - 2642, - 0, - 0, - 0, - 0, - 2643, - 0, - 2644, - 0, - 2649, - 0, - 0, - 0, - 0, - 0, - 0, - 2655, - 2656, - 0, - 0, - 2657, - 0, - 0, - 0, - 0, - 0, - 2658, - 0, - 0, - 0, - 0, - 0, - 2659, - 0, - 0, - 0, - 0, - 2664, - 2685, - 0, - 2687, - 0, - 2688, - 0, - 0, - 2689, - 0, - 0, - 2694, - 0, - 2695, - 0, - 0, - 2698, - 0, - 2701, - 2706, - 0, - 0, - 0, - 2707, - 0, - 2709, - 2710, - 2711, - 0, - 0, - 0, - 2720, - 2730, - 2735, - 0, - 0, - 0, - 0, - 2738, - 2740, - 0, - 0, - 0, - 0, - 2747, - 0, - 0, - 0, - 0, - 0, - 0, - 2748, - 0, - 0, - 2749, - 0, - 0, - 0, - 0, - 0, - 2750, - 0, - 0, - 2752, - 2754, - 0, - 0, - 0, - 0, - 0, - 2758, - 0, - 0, - 0, - 0, - 2762, - 0, - 0, - 0, - 0, - 2763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2764, - 2767, - 0, - 0, - 0, - 0, - 2768, - 0, - 0, - 2770, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2772, - 0, - 0, - 0, - 0, - 0, - 2773, - 2776, - 0, - 0, - 2783, - 0, - 0, - 2784, - 0, - 2789, - 0, - 2790, - 0, - 0, - 0, - 2792, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2793, - 2795, - 0, - 0, - 0, - 0, - 0, - 0, - 2796, - 0, - 0, - 0, - 0, - 0, - 0, - 2797, - 2799, - 0, - 0, - 0, - 0, - 2803, - 0, - 0, - 0, - 0, - 2806, - 0, - 2807, - 2808, - 2817, - 2819, - 0, - 0, - 0, - 0, - 0, - 2821, - 0, - 0, - 0, - 0, - 2822, - 2823, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2824, - 0, - 0, - 2828, - 0, - 2834, - 0, - 0, - 0, - 0, - 0, - 0, - 2836, - 0, - 2838, - 0, - 0, - 2839, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2841, - 0, - 0, - 0, - 2842, - 0, - 0, - 0, - 0, - 0, - 2843, - 2844, - 0, - 0, - 0, - 0, - 2846, - 0, - 0, - 2847, - 0, - 2849, - 0, - 2853, - 0, - 0, - 0, - 0, - 0, - 2857, - 0, - 0, - 0, - 0, - 2858, - 0, - 2859, - 0, - 0, - 2860, - 0, - 2862, - 2868, - 0, - 0, - 0, - 0, - 2875, - 0, - 2876, - 0, - 0, - 2877, - 2878, - 2884, - 2889, - 2890, - 0, - 0, - 2891, - 0, - 0, - 2892, - 0, - 0, - 0, - 2906, - 2912, - 0, - 2913, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2916, - 0, - 2934, - 0, - 0, - 0, - 0, - 0, - 2935, - 0, - 0, - 0, - 0, - 2939, - 0, - 2940, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2941, - 0, - 0, - 0, - 2946, - 0, - 2949, - 0, - 0, - 2950, - 2954, - 2955, - 0, - 0, - 0, - 2959, - 2961, - 0, - 0, - 2962, - 0, - 2963, - 0, - 0, - 0, - 0, - 0, - 0, - 2964, - 2965, - 2966, - 2967, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2969, - 0, - 0, - 0, - 0, - 0, - 2970, - 2975, - 0, - 2982, - 2983, - 2984, - 0, - 0, - 0, - 0, - 0, - 2989, - 0, - 0, - 2990, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2991, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 2998, - 0, - 3000, - 3001, - 0, - 0, - 3002, - 0, - 0, - 0, - 3003, - 0, - 0, - 3012, - 0, - 0, - 3022, - 0, - 0, - 3024, - 0, - 0, - 3025, - 3027, - 0, - 0, - 0, - 3030, - 0, - 0, - 0, - 0, - 3034, - 3035, - 0, - 0, - 3036, - 0, - 3039, - 0, - 3049, - 0, - 0, - 3050, - 0, - 0, - 0, - 0, - 0, - 0, - 3051, - 0, - 3053, - 0, - 0, - 0, - 0, - 3057, - 0, - 3058, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3063, - 0, - 0, - 3073, - 3074, - 3078, - 3079, - 0, - 3080, - 3086, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3087, - 0, - 3092, - 0, - 3095, - 0, - 3099, - 0, - 0, - 0, - 3100, - 0, - 3101, - 3102, - 0, - 3122, - 0, - 0, - 0, - 3124, - 0, - 3125, - 0, - 0, - 0, - 0, - 0, - 0, - 3132, - 3134, - 0, - 0, - 3136, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3147, - 0, - 0, - 3149, - 0, - 0, - 0, - 0, - 0, - 3150, - 3151, - 3152, - 0, - 0, - 0, - 0, - 3158, - 0, - 0, - 3160, - 0, - 0, - 3161, - 0, - 0, - 3162, - 0, - 3163, - 3166, - 3168, - 0, - 0, - 3169, - 3170, - 0, - 0, - 3171, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3182, - 0, - 3184, - 0, - 0, - 3188, - 0, - 0, - 3194, - 0, - 0, - 0, - 0, - 0, - 0, - 3204, - 0, - 0, - 0, - 0, - 3209, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3216, - 3217, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3219, - 0, - 0, - 3220, - 3222, - 0, - 3223, - 0, - 0, - 0, - 0, - 3224, - 0, - 3225, - 3226, - 0, - 3228, - 3233, - 0, - 3239, - 3241, - 3242, - 0, - 0, - 3251, - 3252, - 3253, - 3255, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3260, - 0, - 0, - 3261, - 0, - 0, - 0, - 3267, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3271, - 0, - 0, - 0, - 3278, - 0, - 3282, - 0, - 0, - 0, - 3284, - 0, - 0, - 0, - 3285, - 3286, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3287, - 3292, - 0, - 0, - 0, - 0, - 3294, - 3296, - 0, - 0, - 3299, - 3300, - 3301, - 0, - 3302, - 0, - 0, - 0, - 0, - 0, - 3304, - 3306, - 0, - 0, - 0, - 0, - 0, - 0, - 3308, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3311, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3312, - 3314, - 3315, - 0, - 3318, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3319, - 0, - 0, - 0, - 0, - 0, - 3321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3322, - 0, - 0, - 3324, - 3325, - 0, - 0, - 3326, - 0, - 0, - 3328, - 3329, - 3331, - 0, - 0, - 3335, - 0, - 0, - 3337, - 0, - 3338, - 0, - 0, - 0, - 0, - 3343, - 3347, - 0, - 0, - 0, - 3348, - 0, - 0, - 3351, - 0, - 0, - 0, - 0, - 0, - 0, - 3354, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3355, - 0, - 0, - 3365, - 3366, - 3367, - 0, - 0, - 0, - 0, - 0, - 0, - 3368, - 3369, - 0, - 3370, - 0, - 0, - 3373, - 0, - 0, - 3376, - 0, - 0, - 3377, - 0, - 3379, - 3387, - 0, - 0, - 0, - 0, - 0, - 3390, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3402, - 0, - 3403, - 3436, - 3437, - 3439, - 0, - 0, - 3441, - 0, - 0, - 0, - 3442, - 0, - 0, - 3449, - 0, - 0, - 0, - 3450, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3451, - 0, - 0, - 3452, - 0, - 3453, - 3456, - 0, - 3457, - 0, - 0, - 3458, - 0, - 3459, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3460, - 0, - 0, - 3469, - 3470, - 0, - 0, - 3475, - 0, - 0, - 0, - 3480, - 3487, - 3489, - 0, - 3490, - 0, - 0, - 3491, - 3499, - 0, - 3500, - 0, - 0, - 3501, - 0, - 0, - 0, - 3502, - 0, - 3514, - 0, - 0, - 0, - 3516, - 3517, - 0, - 0, - 0, - 3518, - 0, - 0, - 0, - 0, - 3520, - 3521, - 3522, - 0, - 0, - 3526, - 3530, - 0, - 0, - 0, - 0, - 3531, - 0, - 0, - 0, - 0, - 3536, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3539, - 3541, - 0, - 0, - 3542, - 3544, - 0, - 3547, - 3548, - 0, - 0, - 3550, - 0, - 3553, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3554, - 0, - 3555, - 0, - 3558, - 0, - 3559, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3563, - 0, - 3581, - 0, - 0, - 0, - 3599, - 0, - 0, - 0, - 3600, - 0, - 3601, - 0, - 3602, - 3603, - 0, - 0, - 3606, - 3608, - 0, - 3610, - 3611, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3612, - 3616, - 3619, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3624, - 3628, - 0, - 3629, - 3634, - 3635, - 0, - 0, - 0, - 0, - 0, - 0, - 3636, - 0, - 3637, - 0, - 0, - 3638, - 3651, - 0, - 0, - 0, - 0, - 0, - 0, - 3652, - 3653, - 0, - 0, - 0, - 0, - 3656, - 3657, - 0, - 0, - 0, - 0, - 0, - 3658, - 0, - 0, - 0, - 0, - 3659, - 0, - 3661, - 3663, - 3664, - 0, - 3665, - 0, - 3692, - 0, - 0, - 0, - 3694, - 3696, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3698, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3700, - 0, - 0, - 3701, - 0, - 0, - 0, - 3708, - 3709, - 0, - 0, - 0, - 3711, - 3712, - 0, - 0, - 0, - 0, - 0, - 3723, - 0, - 3724, - 3725, - 0, - 0, - 3726, - 0, - 0, - 0, - 0, - 0, - 0, - 3728, - 3729, - 0, - 3734, - 3735, - 3737, - 0, - 0, - 0, - 3743, - 0, - 3745, - 0, - 0, - 3746, - 0, - 0, - 3747, - 3748, - 0, - 3757, - 0, - 3759, - 3766, - 3767, - 0, - 3768, - 0, - 0, - 0, - 0, - 3769, - 0, - 0, - 3771, - 0, - 3774, - 0, - 0, - 0, - 0, - 0, - 0, - 3775, - 0, - 0, - 0, - 0, - 0, - 0, - 3776, - 0, - 3777, - 3786, - 0, - 3788, - 3789, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3791, - 0, - 3811, - 0, - 0, - 0, - 0, - 0, - 3814, - 3815, - 3816, - 3820, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3821, - 0, - 0, - 3825, - 0, - 0, - 0, - 0, - 3835, - 0, - 0, - 3848, - 3849, - 0, - 0, - 0, - 0, - 3850, - 3851, - 3853, - 0, - 0, - 0, - 0, - 3859, - 0, - 3860, - 3862, - 0, - 0, - 0, - 0, - 0, - 3863, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3873, - 0, - 3874, - 0, - 3875, - 3886, - 0, - 3887, - 0, - 0, - 0, - 0, - 3892, - 3913, - 0, - 3914, - 0, - 0, - 0, - 3925, - 3931, - 0, - 0, - 0, - 0, - 3934, - 3941, - 3942, - 0, - 0, - 0, - 0, - 3943, - 0, - 0, - 0, - 3944, - 0, - 0, - 0, - 0, - 0, - 3945, - 0, - 3947, - 0, - 0, - 0, - 3956, - 3957, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3958, - 0, - 3959, - 3965, - 0, - 0, - 0, - 0, - 3966, - 0, - 0, - 0, - 3967, - 0, - 0, - 0, - 3968, - 3974, - 0, - 0, - 0, - 0, - 0, - 3975, - 3977, - 3978, - 0, - 0, - 0, - 0, - 3980, - 0, - 3985, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 3986, - 4011, - 0, - 0, - 4017, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4018, - 0, - 0, - 0, - 0, - 4019, - 0, - 4023, - 0, - 0, - 0, - 4027, - 4028, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4031, - 4034, - 0, - 0, - 4035, - 4037, - 4039, - 4040, - 0, - 0, - 0, - 0, - 0, - 4059, - 0, - 4060, - 4061, - 0, - 4062, - 4063, - 4066, - 0, - 0, - 4072, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4088, - 0, - 0, - 0, - 0, - 0, - 4091, - 0, - 0, - 0, - 0, - 4094, - 4095, - 0, - 0, - 4096, - 0, - 0, - 0, - 0, - 0, - 4098, - 4099, - 0, - 0, - 0, - 4101, - 0, - 4104, - 0, - 0, - 0, - 4105, - 4108, - 0, - 4113, - 0, - 0, - 4115, - 4116, - 0, - 4126, - 0, - 0, - 4127, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4128, - 4132, - 4133, - 0, - 4134, - 0, - 0, - 0, - 4137, - 0, - 0, - 4141, - 0, - 0, - 0, - 0, - 4144, - 4146, - 4147, - 0, - 0, - 0, - 0, - 4148, - 0, - 0, - 4311, - 0, - 0, - 0, - 4314, - 4329, - 0, - 4331, - 4332, - 0, - 4333, - 0, - 4334, - 0, - 0, - 0, - 4335, - 0, - 4336, - 0, - 0, - 0, - 4337, - 0, - 0, - 0, - 4342, - 4345, - 4346, - 4350, - 0, - 4351, - 4352, - 0, - 4354, - 4355, - 0, - 0, - 4364, - 0, - 0, - 0, - 0, - 4369, - 0, - 0, - 0, - 4373, - 0, - 4374, - 0, - 0, - 0, - 0, - 4377, - 0, - 0, - 0, - 0, - 4378, - 0, - 0, - 0, - 4380, - 0, - 0, - 0, - 4381, - 4382, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4384, - 0, - 0, - 0, - 0, - 4385, - 0, - 0, - 0, - 4386, - 0, - 0, - 0, - 4391, - 4398, - 0, - 0, - 0, - 0, - 4407, - 4409, - 0, - 0, - 0, - 0, - 4410, - 0, - 0, - 4411, - 0, - 4414, - 4415, - 4418, - 0, - 4427, - 4428, - 4430, - 0, - 4431, - 0, - 4448, - 0, - 0, - 0, - 0, - 0, - 4449, - 0, - 0, - 0, - 4451, - 4452, - 0, - 4453, - 4454, - 0, - 4456, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4459, - 0, - 4463, - 0, - 0, - 0, - 0, - 0, - 4466, - 0, - 4467, - 0, - 4469, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4470, - 4471, - 0, - 4473, - 0, - 0, - 4475, - 0, - 0, - 0, - 0, - 4477, - 4478, - 0, - 0, - 0, - 4479, - 4481, - 0, - 4482, - 0, - 4484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4486, - 0, - 0, - 4488, - 0, - 0, - 4497, - 0, - 4508, - 0, - 0, - 4510, - 4511, - 0, - 4520, - 4523, - 0, - 4524, - 0, - 4525, - 0, - 4527, - 0, - 0, - 4528, - 0, - 0, - 0, - 0, - 4530, - 0, - 4531, - 0, - 0, - 4532, - 0, - 0, - 0, - 4533, - 0, - 0, - 0, - 0, - 0, - 4535, - 0, - 0, - 0, - 4536, - 0, - 0, - 0, - 0, - 0, - 4541, - 4543, - 4544, - 4545, - 4547, - 0, - 4548, - 0, - 0, - 0, - 0, - 4550, - 4551, - 0, - 4553, - 0, - 0, - 0, - 0, - 4562, - 0, - 0, - 4571, - 0, - 0, - 0, - 4574, - 0, - 0, - 0, - 4575, - 0, - 4576, - 0, - 4577, - 0, - 0, - 0, - 4581, - 0, - 0, - 0, - 0, - 0, - 4582, - 0, - 0, - 4586, - 0, - 0, - 0, - 4588, - 0, - 0, - 4597, - 0, - 4598, - 0, - 0, - 0, - 0, - 4616, - 4617, - 0, - 4618, - 0, - 0, - 0, - 0, - 4619, - 0, - 4620, - 0, - 0, - 4621, - 0, - 4624, - 0, - 0, - 0, - 0, - 0, - 4625, - 0, - 0, - 0, - 0, - 4657, - 0, - 4659, - 0, - 4667, - 0, - 0, - 0, - 4668, - 4670, - 0, - 4672, - 0, - 0, - 0, - 0, - 0, - 4673, - 4676, - 0, - 0, - 0, - 0, - 4687, - 0, - 0, - 0, - 0, - 4697, - 0, - 0, - 0, - 0, - 4699, - 0, - 4701, - 0, - 0, - 0, - 0, - 4702, - 0, - 0, - 4706, - 0, - 0, - 4713, - 0, - 0, - 0, - 4714, - 4715, - 4716, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4717, - 0, - 0, - 4720, - 0, - 4721, - 4729, - 4735, - 0, - 0, - 0, - 4737, - 0, - 0, - 0, - 4739, - 0, - 0, - 0, - 4740, - 0, - 0, - 0, - 4741, - 0, - 0, - 0, - 0, - 0, - 4742, - 0, - 4745, - 4746, - 4747, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4748, - 0, - 0, - 0, - 4749, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4751, - 4786, - 0, - 4787, - 0, - 4788, - 4796, - 0, - 0, - 4797, - 4798, - 0, - 4799, - 4806, - 4807, - 0, - 0, - 0, - 0, - 4809, - 4810, - 0, - 0, - 0, - 0, - 0, - 0, - 4811, - 0, - 0, - 0, - 0, - 0, - 4812, - 0, - 4813, - 0, - 0, - 4815, - 0, - 4821, - 4822, - 0, - 0, - 0, - 0, - 4823, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4824, - 0, - 0, - 0, - 0, - 4826, - 0, - 0, - 0, - 4828, - 0, - 4829, - 0, - 0, - 0, - 4843, - 0, - 0, - 4847, - 0, - 4853, - 4855, - 4858, - 0, - 0, - 0, - 0, - 0, - 4859, - 0, - 4864, - 0, - 0, - 4879, - 0, - 0, - 0, - 0, - 4880, - 0, - 0, - 0, - 0, - 4881, - 0, - 4882, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4883, - 0, - 0, - 0, - 0, - 4884, - 0, - 0, - 0, - 0, - 0, - 4886, - 4887, - 4888, - 4894, - 4896, - 0, - 4902, - 0, - 0, - 4905, - 0, - 0, - 4915, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4916, - 4917, - 4919, - 4921, - 0, - 0, - 0, - 0, - 0, - 4926, - 0, - 0, - 0, - 0, - 4927, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 4929, - 0, - 4930, - 4931, - 0, - 4938, - 0, - 4952, - 0, - 4953, - 4957, - 4960, - 4964, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5019, - 5020, - 5022, - 0, - 0, - 0, - 0, - 0, - 5023, - 0, - 0, - 0, - 5024, - 0, - 0, - 0, - 5025, - 0, - 0, - 0, - 0, - 5028, - 0, - 0, - 0, - 0, - 5029, - 5030, - 5031, - 0, - 5033, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5034, - 5035, - 0, - 5036, - 0, - 0, - 5037, - 0, - 0, - 0, - 0, - 5038, - 0, - 0, - 5039, - 0, - 0, - 0, - 5041, - 5042, - 0, - 0, - 0, - 0, - 5044, - 5049, - 5054, - 0, - 5055, - 0, - 5057, - 0, - 0, - 0, - 5060, - 0, - 0, - 0, - 0, - 0, - 5063, - 0, - 5064, - 5065, - 0, - 5067, - 0, - 0, - 0, - 5068, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5076, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5077, - 0, - 0, - 5078, - 5080, - 0, - 0, - 5083, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5085, - 0, - 0, - 0, - 0, - 0, - 0, - 5098, - 5099, - 5101, - 5105, - 5107, - 0, - 5108, - 0, - 5109, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5110, - 0, - 0, - 0, - 0, - 0, - 5117, - 5118, - 0, - 5121, - 0, - 5122, - 0, - 0, - 5130, - 0, - 0, - 0, - 5137, - 0, - 0, - 0, - 5148, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5151, - 5154, - 0, - 0, - 0, - 5155, - 0, - 0, - 5156, - 5159, - 5161, - 0, - 0, - 0, - 0, - 5162, - 0, - 0, - 0, - 0, - 5163, - 5164, - 0, - 5166, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5167, - 0, - 0, - 0, - 5172, - 0, - 0, - 0, - 0, - 0, - 0, - 5178, - 5179, - 0, - 0, - 5190, - 0, - 0, - 5191, - 5192, - 5194, - 0, - 0, - 5198, - 5201, - 0, - 0, - 0, - 0, - 0, - 5203, - 0, - 5206, - 5209, - 0, - 0, - 0, - 0, - 0, - 0, - 5213, - 0, - 5214, - 5216, - 0, - 0, - 0, - 0, - 0, - 5217, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5218, - 5219, - 0, - 5231, - 0, - 0, - 5244, - 5249, - 0, - 5254, - 0, - 5255, - 0, - 0, - 5257, - 0, - 0, - 0, - 0, - 0, - 5258, - 0, - 5260, - 5270, - 0, - 5277, - 0, - 0, - 0, - 0, - 0, - 0, - 5280, - 5281, - 5282, - 5283, - 0, - 0, - 0, - 0, - 0, - 5284, - 0, - 5285, - 0, - 0, - 0, - 0, - 0, - 5287, - 5288, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5289, - 5291, - 0, - 0, - 5294, - 0, - 0, - 5295, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5304, - 0, - 0, - 5306, - 5307, - 5308, - 0, - 5309, - 0, - 0, - 5310, - 0, - 0, - 0, - 0, - 5311, - 5312, - 0, - 5313, - 0, - 0, - 0, - 0, - 0, - 5316, - 0, - 0, - 0, - 5317, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5325, - 0, - 0, - 0, - 0, - 0, - 0, - 5326, - 0, - 5327, - 5329, - 0, - 5332, - 0, - 0, - 0, - 0, - 5338, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5340, - 0, - 0, - 5341, - 0, - 0, - 0, - 5342, - 0, - 5343, - 5344, - 0, - 0, - 5345, - 0, - 0, - 0, - 0, - 0, - 0, - 5347, - 5348, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5349, - 0, - 5350, - 0, - 5354, - 0, - 0, - 0, - 0, - 5358, - 0, - 0, - 5359, - 0, - 0, - 5361, - 0, - 0, - 5365, - 0, - 5367, - 0, - 5373, - 0, - 0, - 0, - 5379, - 0, - 0, - 0, - 5380, - 0, - 0, - 0, - 5382, - 0, - 5384, - 0, - 0, - 0, - 0, - 0, - 0, - 5385, - 0, - 0, - 0, - 0, - 5387, - 0, - 0, - 0, - 0, - 0, - 0, - 5388, - 5390, - 5393, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5396, - 0, - 0, - 0, - 0, - 5397, - 5402, - 0, - 0, - 0, - 0, - 0, - 5403, - 0, - 0, - 0, - 5404, - 5405, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5406, - 0, - 0, - 0, - 0, - 5410, - 0, - 0, - 5411, - 0, - 5415, - 0, - 0, - 0, - 0, - 5416, - 5434, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5438, - 0, - 5440, - 0, - 0, - 0, - 0, - 0, - 0, - 5441, - 5442, - 0, - 0, - 0, - 5443, - 5444, - 5447, - 0, - 0, - 5448, - 5449, - 5451, - 0, - 0, - 0, - 5456, - 5457, - 0, - 0, - 0, - 5459, - 0, - 0, - 0, - 5461, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5464, - 0, - 5466, - 0, - 0, - 5467, - 0, - 5470, - 0, - 0, - 5473, - 0, - 0, - 5474, - 0, - 0, - 5476, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5477, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5484, - 0, - 0, - 5485, - 5486, - 0, - 0, - 0, - 0, - 0, - 5488, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5489, - 0, - 0, - 0, - 0, - 0, - 5507, - 0, - 0, - 0, - 5510, - 0, - 5511, - 0, - 0, - 5512, - 0, - 0, - 0, - 5513, - 0, - 5515, - 0, - 0, - 5516, - 5517, - 0, - 5518, - 0, - 0, - 5522, - 0, - 0, - 0, - 0, - 0, - 5534, - 5535, - 0, - 0, - 5536, - 0, - 5538, - 0, - 0, - 5543, - 0, - 5544, - 0, - 0, - 5545, - 0, - 5547, - 0, - 5557, - 0, - 0, - 5558, - 0, - 5560, - 5567, - 0, - 0, - 0, - 0, - 5568, - 0, - 0, - 0, - 5571, - 5573, - 0, - 5574, - 0, - 5575, - 0, - 0, - 0, - 0, - 5577, - 0, - 0, - 5598, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5600, - 5609, - 0, - 0, - 0, - 0, - 5610, - 0, - 0, - 5612, - 0, - 5624, - 0, - 5625, - 0, - 0, - 0, - 5629, - 0, - 5641, - 0, - 5642, - 5643, - 0, - 0, - 0, - 0, - 0, - 0, - 5651, - 0, - 0, - 0, - 5652, - 5653, - 0, - 5661, - 5662, - 5678, - 0, - 5679, - 0, - 0, - 0, - 0, - 5685, - 5686, - 0, - 0, - 0, - 0, - 0, - 5690, - 5692, - 0, - 5703, - 0, - 0, - 0, - 0, - 0, - 5706, - 0, - 0, - 0, - 0, - 5707, - 0, - 0, - 0, - 0, - 0, - 0, - 5708, - 0, - 0, - 5709, - 0, - 5710, - 0, - 0, - 0, - 5712, - 0, - 5733, - 0, - 5734, - 5735, - 0, - 0, - 5744, - 5751, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5752, - 0, - 5754, - 0, - 0, - 0, - 0, - 0, - 0, - 5757, - 5758, - 0, - 5760, - 5761, - 0, - 0, - 0, - 0, - 5763, - 5764, - 5765, - 0, - 5766, - 0, - 5767, - 5768, - 0, - 5770, - 0, - 0, - 0, - 0, - 5776, - 5780, - 0, - 0, - 0, - 0, - 5782, - 0, - 0, - 0, - 0, - 5784, - 0, - 0, - 5788, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5797, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5799, - 0, - 0, - 5801, - 0, - 0, - 0, - 5811, - 0, - 0, - 0, - 0, - 0, - 0, - 5816, - 0, - 0, - 5827, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5830, - 5831, - 0, - 0, - 5832, - 0, - 0, - 5833, - 0, - 5835, - 5844, - 5845, - 0, - 5846, - 0, - 0, - 0, - 0, - 0, - 5850, - 0, - 0, - 0, - 0, - 0, - 5852, - 0, - 5855, - 5857, - 0, - 0, - 5859, - 0, - 5861, - 0, - 0, - 5863, - 0, - 5865, - 0, - 0, - 0, - 5873, - 5875, - 0, - 0, - 0, - 5877, - 0, - 5879, - 0, - 0, - 0, - 5888, - 0, - 0, - 5889, - 5891, - 0, - 5894, - 0, - 0, - 0, - 0, - 0, - 0, - 5895, - 0, - 5897, - 0, - 0, - 0, - 0, - 0, - 0, - 5907, - 0, - 5911, - 0, - 0, - 5912, - 0, - 5913, - 5922, - 5924, - 0, - 5927, - 5928, - 0, - 0, - 0, - 0, - 5929, - 5930, - 0, - 5933, - 0, - 0, - 0, - 0, - 5949, - 0, - 0, - 5951, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 5953, - 0, - 0, - 5954, - 0, - 5959, - 5960, - 5961, - 0, - 5964, - 0, - 0, - 0, - 5976, - 5978, - 5987, - 5990, - 0, - 0, - 0, - 0, - 0, - 5991, - 0, - 5992, - 0, - 0, - 0, - 5994, - 5995, - 0, - 0, - 5996, - 0, - 0, - 6001, - 6003, - 0, - 0, - 0, - 0, - 6007, - 0, - 0, - 0, - 0, - 0, - 6008, - 0, - 0, - 6009, - 0, - 6010, - 0, - 0, - 0, - 6011, - 6015, - 0, - 6017, - 0, - 6019, - 0, - 6023, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6025, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6026, - 0, - 6030, - 0, - 0, - 6032, - 0, - 0, - 0, - 6033, - 6038, - 6040, - 0, - 0, - 0, - 6041, - 6045, - 0, - 0, - 6046, - 0, - 0, - 6053, - 0, - 0, - 6054, - 0, - 6055, - 0, - 0, - 0, - 0, - 0, - 0, - 6057, - 0, - 6063, - 0, - 0, - 0, - 6064, - 0, - 6066, - 6071, - 6072, - 0, - 0, - 0, - 0, - 0, - 0, - 6075, - 6076, - 0, - 0, - 6077, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6078, - 6079, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6080, - 0, - 6083, - 0, - 0, - 0, - 0, - 0, - 6084, - 0, - 0, - 6088, - 0, - 6089, - 0, - 0, - 6093, - 6105, - 0, - 0, - 6107, - 0, - 6110, - 0, - 0, - 0, - 6111, - 6125, - 6126, - 0, - 0, - 0, - 6129, - 0, - 0, - 0, - 0, - 6130, - 0, - 0, - 0, - 6131, - 6134, - 0, - 0, - 0, - 0, - 0, - 0, - 6142, - 0, - 0, - 0, - 0, - 0, - 6144, - 0, - 0, - 6146, - 6151, - 6153, - 0, - 6156, - 0, - 6163, - 0, - 6180, - 6181, - 0, - 0, - 0, - 0, - 0, - 6182, - 0, - 0, - 0, - 0, - 6184, - 6195, - 0, - 0, - 6206, - 0, - 6208, - 0, - 0, - 6212, - 6213, - 6214, - 0, - 6215, - 0, - 0, - 0, - 6228, - 0, - 0, - 0, - 6234, - 0, - 0, - 0, - 0, - 0, - 0, - 6235, - 6240, - 0, - 6242, - 6243, - 6244, - 0, - 6250, - 6255, - 0, - 0, - 0, - 0, - 0, - 6257, - 0, - 0, - 0, - 6258, - 6278, - 0, - 6284, - 0, - 0, - 0, - 6285, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6286, - 0, - 0, - 0, - 6320, - 0, - 0, - 6322, - 6332, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6334, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6335, - 0, - 0, - 6337, - 0, - 6338, - 0, - 6339, - 6340, - 0, - 0, - 6356, - 6357, - 6369, - 0, - 0, - 0, - 6370, - 6371, - 6372, - 0, - 6373, - 0, - 0, - 0, - 0, - 0, - 6376, - 0, - 0, - 0, - 0, - 0, - 6382, - 6383, - 6384, - 0, - 0, - 0, - 0, - 6386, - 0, - 6389, - 6397, - 6400, - 6411, - 0, - 6414, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6415, - 6416, - 0, - 0, - 0, - 0, - 0, - 0, - 6417, - 0, - 0, - 0, - 0, - 6418, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6420, - 0, - 6421, - 6423, - 6425, - 0, - 6429, - 6430, - 0, - 6433, - 6438, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6439, - 6440, - 0, - 0, - 6441, - 0, - 0, - 6444, - 0, - 0, - 0, - 0, - 6446, - 0, - 0, - 0, - 0, - 6447, - 6448, - 0, - 0, - 6450, - 0, - 0, - 0, - 6454, - 0, - 0, - 6455, - 0, - 6461, - 0, - 0, - 0, - 0, - 0, - 0, - 6462, - 0, - 0, - 6463, - 0, - 6464, - 0, - 6465, - 6467, - 0, - 0, - 0, - 6468, - 0, - 6479, - 6480, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6481, - 0, - 0, - 6485, - 6487, - 0, - 0, - 0, - 0, - 0, - 0, - 6493, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6494, - 6495, - 6496, - 0, - 0, - 0, - 0, - 0, - 6498, - 0, - 0, - 0, - 6507, - 6508, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6511, - 6512, - 0, - 0, - 0, - 0, - 6513, - 0, - 0, - 0, - 6514, - 0, - 0, - 0, - 0, - 0, - 6516, - 0, - 0, - 6517, - 6518, - 0, - 0, - 0, - 6519, - 6520, - 6521, - 0, - 6523, - 0, - 0, - 0, - 0, - 6524, - 6528, - 0, - 6530, - 0, - 0, - 6532, - 0, - 6578, - 0, - 0, - 0, - 6583, - 0, - 6584, - 0, - 0, - 0, - 6587, - 0, - 0, - 0, - 6590, - 0, - 6591, - 0, - 0, - 0, - 0, - 0, - 6592, - 0, - 0, - 0, - 0, - 6593, - 6594, - 0, - 0, - 0, - 0, - 0, - 6599, - 6600, - 0, - 0, - 6601, - 6602, - 6604, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6608, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6610, - 6611, - 0, - 6615, - 0, - 6616, - 6618, - 6620, - 0, - 6637, - 0, - 0, - 0, - 0, - 6639, - 0, - 0, - 0, - 0, - 6641, - 0, - 6642, - 0, - 0, - 0, - 6647, - 0, - 6660, - 6663, - 0, - 6664, - 0, - 6666, - 6669, - 0, - 6675, - 6676, - 6677, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6678, - 0, - 0, - 0, - 6679, - 0, - 6680, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6693, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6704, - 6705, - 6706, - 0, - 0, - 6711, - 6713, - 0, - 0, - 0, - 0, - 0, - 6716, - 0, - 0, - 0, - 6717, - 0, - 6719, - 6724, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6725, - 6726, - 0, - 0, - 0, - 0, - 0, - 6728, - 6729, - 6735, - 0, - 6737, - 6742, - 0, - 0, - 6743, - 6750, - 0, - 6751, - 0, - 0, - 6752, - 6753, - 0, - 0, - 0, - 0, - 0, - 0, - 6754, - 0, - 0, - 0, - 0, - 0, - 6756, - 0, - 0, - 0, - 0, - 0, - 0, - 6763, - 0, - 0, - 6764, - 6765, - 0, - 0, - 0, - 6770, - 0, - 0, - 0, - 6776, - 6780, - 0, - 6781, - 0, - 0, - 0, - 6783, - 0, - 6784, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6785, - 0, - 0, - 0, - 6792, - 0, - 0, - 0, - 6793, - 0, - 0, - 6802, - 0, - 0, - 0, - 0, - 0, - 6803, - 0, - 0, - 0, - 6804, - 0, - 0, - 0, - 6812, - 0, - 0, - 6823, - 0, - 6824, - 6839, - 0, - 0, - 0, - 0, - 6852, - 0, - 0, - 6854, - 0, - 6856, - 6857, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6867, - 0, - 6868, - 6870, - 6872, - 0, - 0, - 0, - 6873, - 6874, - 0, - 0, - 0, - 0, - 0, - 6875, - 0, - 0, - 6877, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6878, - 0, - 0, - 0, - 6879, - 0, - 6880, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6887, - 0, - 6888, - 6891, - 6893, - 0, - 6895, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6899, - 0, - 0, - 0, - 0, - 6901, - 0, - 0, - 0, - 0, - 6910, - 0, - 6911, - 0, - 0, - 6912, - 0, - 0, - 6913, - 6914, - 0, - 0, - 0, - 6915, - 0, - 0, - 0, - 6916, - 6919, - 0, - 0, - 0, - 0, - 0, - 0, - 6924, - 0, - 6925, - 0, - 0, - 0, - 6926, - 6927, - 6928, - 0, - 6929, - 0, - 6930, - 0, - 0, - 6931, - 6935, - 0, - 6936, - 0, - 0, - 0, - 0, - 6939, - 6940, - 6941, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6942, - 6948, - 6949, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6952, - 6954, - 6963, - 6965, - 6966, - 0, - 0, - 6967, - 6968, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6969, - 0, - 0, - 6970, - 6979, - 0, - 0, - 6980, - 0, - 0, - 6983, - 0, - 0, - 0, - 0, - 0, - 6984, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6988, - 6990, - 6992, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 6995, - 0, - 0, - 0, - 7012, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7019, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7021, - 0, - 0, - 7022, - 7023, - 7028, - 0, - 7030, - 7033, - 0, - 0, - 0, - 0, - 0, - 0, - 7038, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7039, - 0, - 0, - 0, - 0, - 0, - 7046, - 0, - 7047, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7048, - 7052, - 0, - 0, - 0, - 0, - 0, - 7054, - 0, - 7060, - 0, - 0, - 0, - 0, - 7061, - 0, - 7065, - 0, - 0, - 0, - 0, - 7067, - 7069, - 0, - 7070, - 7071, - 7072, - 0, - 0, - 7078, - 0, - 7080, - 7081, - 0, - 7083, - 0, - 0, - 0, - 7084, - 7087, - 7088, - 0, - 0, - 7090, - 0, - 7093, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7107, - 0, - 0, - 7108, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7110, - 0, - 7114, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7115, - 0, - 7116, - 0, - 0, - 0, - 0, - 0, - 7117, - 0, - 0, - 7118, - 0, - 0, - 7124, - 0, - 7125, - 0, - 0, - 7126, - 0, - 0, - 0, - 0, - 7128, - 0, - 0, - 0, - 0, - 0, - 7129, - 0, - 7130, - 0, - 7132, - 7133, - 0, - 0, - 7134, - 0, - 0, - 7139, - 0, - 7148, - 7150, - 0, - 0, - 0, - 0, - 7152, - 0, - 0, - 0, - 7153, - 7156, - 7157, - 0, - 0, - 0, - 0, - 0, - 7158, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7163, - 7165, - 7169, - 0, - 7171, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7172, - 0, - 7173, - 7181, - 0, - 0, - 0, - 0, - 0, - 7182, - 7185, - 0, - 0, - 0, - 0, - 7187, - 0, - 7201, - 7204, - 0, - 0, - 0, - 0, - 0, - 7206, - 7207, - 0, - 0, - 0, - 0, - 7211, - 7216, - 0, - 7218, - 0, - 0, - 0, - 0, - 7226, - 7228, - 7230, - 7232, - 7233, - 7235, - 7237, - 0, - 0, - 0, - 0, - 7238, - 7241, - 0, - 7242, - 0, - 0, - 7247, - 0, - 0, - 0, - 7266, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7289, - 0, - 0, - 7290, - 7291, - 0, - 0, - 7292, - 0, - 7297, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7300, - 0, - 7301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7302, - 0, - 0, - 0, - 0, - 7305, - 0, - 0, - 0, - 0, - 7307, - 0, - 7308, - 0, - 7310, - 0, - 7335, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7337, - 0, - 7343, - 7347, - 0, - 0, - 0, - 0, - 0, - 7348, - 0, - 7349, - 7350, - 7352, - 7354, - 0, - 0, - 0, - 0, - 7357, - 0, - 7358, - 7366, - 0, - 7367, - 7368, - 0, - 0, - 7373, - 0, - 0, - 0, - 7374, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7376, - 0, - 0, - 0, - 7377, - 0, - 0, - 0, - 0, - 0, - 7378, - 0, - 7379, - 7380, - 0, - 0, - 0, - 0, - 0, - 7383, - 0, - 0, - 7386, - 0, - 0, - 0, - 0, - 7398, - 0, - 0, - 0, - 7399, - 7400, - 0, - 7401, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7402, - 0, - 0, - 0, - 0, - 0, - 7405, - 0, - 0, - 0, - 0, - 0, - 7406, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7421, - 7427, - 7429, - 0, - 0, - 0, - 7435, - 0, - 0, - 7436, - 0, - 0, - 0, - 7437, - 0, - 0, - 0, - 0, - 0, - 0, - 7438, - 7443, - 0, - 7446, - 0, - 7448, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7456, - 0, - 0, - 0, - 0, - 0, - 7457, - 0, - 0, - 7461, - 0, - 0, - 0, - 0, - 0, - 7462, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7463, - 7466, - 7472, - 0, - 7476, - 0, - 0, - 7490, - 0, - 7491, - 0, - 0, - 7493, - 0, - 0, - 0, - 7498, - 7499, - 0, - 0, - 7508, - 0, - 0, - 0, - 0, - 0, - 7512, - 0, - 0, - 0, - 7513, - 7514, - 7516, - 0, - 0, - 0, - 0, - 7518, - 0, - 0, - 7519, - 7521, - 7522, - 0, - 0, - 0, - 7526, - 0, - 0, - 7529, - 0, - 0, - 7531, - 0, - 7536, - 0, - 7538, - 0, - 7539, - 0, - 0, - 7541, - 7542, - 7546, - 0, - 0, - 0, - 0, - 0, - 7547, - 0, - 7548, - 0, - 0, - 0, - 0, - 0, - 7550, - 0, - 0, - 7552, - 7553, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7554, - 7563, - 0, - 7573, - 0, - 0, - 0, - 0, - 0, - 0, - 7574, - 7576, - 0, - 7578, - 7581, - 7583, - 0, - 0, - 0, - 7584, - 0, - 7587, - 0, - 0, - 0, - 0, - 0, - 7589, - 0, - 0, - 0, - 7594, - 0, - 0, - 7595, - 0, - 0, - 7600, - 7602, - 7610, - 0, - 0, - 0, - 0, - 0, - 7612, - 0, - 7613, - 7614, - 0, - 0, - 7615, - 0, - 0, - 7616, - 0, - 7620, - 0, - 7621, - 7622, - 0, - 7623, - 0, - 0, - 0, - 0, - 7626, - 0, - 0, - 0, - 0, - 7627, - 7629, - 7631, - 0, - 0, - 7633, - 0, - 0, - 0, - 0, - 0, - 7639, - 0, - 7640, - 7642, - 0, - 0, - 7643, - 0, - 0, - 0, - 0, - 7644, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7645, - 0, - 0, - 0, - 0, - 0, - 7661, - 7662, - 7663, - 7665, - 0, - 7666, - 0, - 7667, - 0, - 7684, - 7688, - 7690, - 0, - 7691, - 0, - 0, - 0, - 0, - 0, - 0, - 7692, - 0, - 0, - 7700, - 0, - 7707, - 0, - 7708, - 0, - 7709, - 0, - 7721, - 0, - 0, - 0, - 7722, - 0, - 7724, - 0, - 0, - 0, - 0, - 0, - 0, - 7729, - 7731, - 0, - 7732, - 0, - 7733, - 7735, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7739, - 0, - 0, - 7741, - 7745, - 0, - 7748, - 0, - 0, - 0, - 7751, - 0, - 0, - 0, - 7752, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7753, - 0, - 0, - 7756, - 0, - 7757, - 0, - 7759, - 0, - 7760, - 0, - 0, - 0, - 0, - 7761, - 7768, - 0, - 0, - 7769, - 0, - 0, - 7770, - 0, - 0, - 7771, - 0, - 0, - 7772, - 0, - 0, - 7773, - 0, - 0, - 0, - 0, - 0, - 7778, - 7783, - 0, - 0, - 0, - 0, - 0, - 7784, - 7785, - 0, - 7790, - 0, - 0, - 0, - 0, - 7792, - 0, - 7798, - 0, - 0, - 0, - 0, - 0, - 7799, - 0, - 7810, - 0, - 0, - 7813, - 0, - 7814, - 0, - 7816, - 0, - 7818, - 7824, - 7825, - 7826, - 0, - 7828, - 7830, - 0, - 0, - 0, - 7840, - 0, - 7842, - 0, - 7843, - 0, - 0, - 0, - 0, - 7844, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7846, - 0, - 0, - 0, - 0, - 0, - 7856, - 7857, - 7858, - 7862, - 0, - 7865, - 0, - 0, - 7866, - 0, - 0, - 7913, - 0, - 0, - 0, - 0, - 7914, - 0, - 0, - 7915, - 7917, - 7918, - 7919, - 0, - 7920, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7921, - 7922, - 0, - 7924, - 0, - 0, - 7925, - 0, - 0, - 7927, - 0, - 7930, - 7935, - 0, - 0, - 7937, - 0, - 0, - 0, - 0, - 0, - 0, - 7939, - 0, - 7940, - 0, - 0, - 0, - 0, - 0, - 7941, - 0, - 0, - 0, - 0, - 7945, - 0, - 0, - 0, - 0, - 7949, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7950, - 0, - 7953, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7968, - 0, - 0, - 0, - 0, - 7969, - 7972, - 7992, - 0, - 7993, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 7994, - 0, - 0, - 0, - 0, - 8007, - 8008, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8010, - 0, - 0, - 0, - 8012, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8018, - 0, - 8028, - 8029, - 0, - 0, - 8030, - 0, - 0, - 8032, - 8033, - 0, - 0, - 8034, - 8036, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8037, - 0, - 0, - 0, - 8043, - 8052, - 8059, - 8060, - 0, - 0, - 8061, - 0, - 0, - 0, - 8062, - 0, - 8063, - 0, - 8064, - 0, - 8066, - 8068, - 0, - 0, - 0, - 8080, - 8081, - 0, - 8089, - 0, - 0, - 0, - 0, - 0, - 8092, - 0, - 0, - 0, - 0, - 0, - 0, - 8093, - 8110, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8111, - 0, - 0, - 0, - 0, - 0, - 8112, - 8115, - 0, - 8117, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8120, - 8121, - 8122, - 8128, - 8129, - 8130, - 8131, - 0, - 0, - 8139, - 0, - 0, - 8144, - 0, - 0, - 0, - 0, - 8145, - 8146, - 8153, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8154, - 0, - 8157, - 8160, - 8162, - 0, - 8164, - 8165, - 0, - 0, - 0, - 0, - 8166, - 8167, - 0, - 0, - 8179, - 0, - 0, - 0, - 8185, - 0, - 0, - 0, - 8186, - 0, - 0, - 8187, - 0, - 0, - 0, - 8188, - 0, - 0, - 0, - 0, - 0, - 8204, - 0, - 0, - 0, - 0, - 8210, - 0, - 0, - 0, - 0, - 0, - 8213, - 0, - 8214, - 0, - 0, - 8215, - 0, - 0, - 0, - 0, - 0, - 0, - 8218, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8219, - 0, - 8221, - 0, - 0, - 8222, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8225, - 0, - 0, - 0, - 8233, - 0, - 0, - 8242, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8247, - 0, - 8248, - 8252, - 0, - 8256, - 8257, - 0, - 0, - 8261, - 0, - 8264, - 8265, - 0, - 0, - 0, - 0, - 8267, - 0, - 0, - 0, - 8269, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8270, - 0, - 0, - 0, - 8278, - 0, - 8279, - 8283, - 0, - 0, - 8285, - 8286, - 8289, - 8292, - 0, - 0, - 0, - 0, - 8293, - 8295, - 8299, - 8300, - 8301, - 0, - 0, - 0, - 0, - 0, - 0, - 8304, - 8307, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8321, - 0, - 0, - 0, - 8322, - 8323, - 8325, - 8326, - 8327, - 0, - 0, - 8332, - 8338, - 0, - 0, - 8340, - 0, - 0, - 0, - 0, - 0, - 8350, - 0, - 0, - 8351, - 0, - 8354, - 8355, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8360, - 8372, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8377, - 0, - 0, - 0, - 0, - 8380, - 0, - 0, - 0, - 8383, - 0, - 8384, - 0, - 0, - 0, - 0, - 8386, - 8392, - 0, - 0, - 8394, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8396, - 8397, - 0, - 8398, - 0, - 8399, - 0, - 0, - 0, - 0, - 0, - 8400, - 0, - 8401, - 8410, - 8411, - 0, - 8412, - 8413, - 8422, - 0, - 0, - 0, - 0, - 8423, - 0, - 0, - 0, - 0, - 8424, - 0, - 0, - 8425, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8441, - 8442, - 0, - 0, - 0, - 0, - 0, - 0, - 8443, - 0, - 0, - 8444, - 0, - 8447, - 0, - 0, - 0, - 0, - 8451, - 0, - 8458, - 0, - 8462, - 0, - 0, - 8468, - 0, - 8469, - 0, - 0, - 0, - 8470, - 0, - 8473, - 8479, - 8480, - 0, - 0, - 0, - 0, - 8481, - 8483, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8484, - 0, - 0, - 8490, - 0, - 0, - 0, - 0, - 0, - 0, - 8491, - 8493, - 8494, - 0, - 8528, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8530, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8534, - 8538, - 8540, - 0, - 0, - 8541, - 0, - 0, - 8545, - 0, - 8557, - 0, - 0, - 8569, - 8570, - 0, - 0, - 8571, - 8574, - 8575, - 8579, - 0, - 8583, - 0, - 0, - 0, - 0, - 8591, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8606, - 0, - 8607, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8608, - 0, - 0, - 8609, - 0, - 0, - 0, - 8610, - 0, - 0, - 0, - 8611, - 0, - 0, - 8613, - 8617, - 8621, - 0, - 0, - 8622, - 0, - 8623, - 0, - 8624, - 8625, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8637, - 8638, - 8639, - 8650, - 0, - 0, - 0, - 0, - 8652, - 8654, - 8655, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8656, - 0, - 0, - 0, - 0, - 0, - 8657, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8658, - 0, - 0, - 8659, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8660, - 0, - 0, - 0, - 0, - 0, - 0, - 8661, - 8663, - 8664, - 0, - 0, - 0, - 0, - 8665, - 0, - 8669, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8671, - 8674, - 0, - 8684, - 0, - 8686, - 0, - 0, - 0, - 8689, - 0, - 0, - 0, - 8690, - 0, - 8706, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8710, - 0, - 8711, - 8713, - 8714, - 8724, - 8727, - 8728, - 8733, - 8736, - 0, - 8737, - 8739, - 0, - 0, - 0, - 0, - 8742, - 8743, - 8745, - 8754, - 0, - 0, - 0, - 0, - 8756, - 0, - 0, - 0, - 0, - 0, - 0, - 8757, - 8760, - 0, - 0, - 0, - 0, - 0, - 8762, - 8763, - 8764, - 0, - 8766, - 8769, - 8770, - 8773, - 0, - 8774, - 0, - 8779, - 0, - 0, - 0, - 0, - 8780, - 0, - 0, - 8781, - 0, - 0, - 8783, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8784, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8785, - 0, - 0, - 0, - 0, - 8786, - 0, - 0, - 0, - 0, - 8788, - 8790, - 0, - 0, - 0, - 8803, - 0, - 8813, - 8814, - 0, - 0, - 0, - 0, - 0, - 8815, - 8816, - 0, - 0, - 0, - 0, - 8818, - 0, - 0, - 0, - 0, - 8822, - 8828, - 8829, - 0, - 8831, - 0, - 0, - 0, - 0, - 8833, - 0, - 0, - 0, - 8834, - 0, - 0, - 0, - 8835, - 0, - 8836, - 0, - 0, - 0, - 8837, - 0, - 0, - 0, - 0, - 0, - 0, - 8838, - 8839, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8840, - 0, - 0, - 0, - 8841, - 0, - 8842, - 0, - 0, - 0, - 8846, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8847, - 0, - 8848, - 0, - 0, - 8864, - 0, - 0, - 8866, - 0, - 0, - 8870, - 8872, - 0, - 0, - 8873, - 8874, - 0, - 0, - 0, - 0, - 0, - 0, - 8875, - 0, - 8876, - 0, - 0, - 0, - 0, - 8896, - 8900, - 0, - 0, - 0, - 0, - 8901, - 0, - 0, - 0, - 0, - 0, - 8904, - 0, - 8907, - 0, - 0, - 0, - 0, - 8911, - 8912, - 8913, - 0, - 0, - 0, - 8914, - 0, - 8915, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8916, - 0, - 0, - 0, - 8929, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 8930, - 0, - 8932, - 0, - 8943, - 0, - 0, - 0, - 8945, - 8947, - 0, - 0, - 0, - 0, - 8949, - 0, - 8950, - 0, - 8954, - 8957, - 0, - 0, - 8970, - 0, - 0, - 0, - 0, - 8971, - 0, - 8996, - 0, - 0, - 0, - 0, - 8997, - 9000, - 0, - 0, - 0, - 0, - 9001, - 9002, - 0, - 9004, - 9009, - 9024, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9027, - 9082, - 0, - 0, - 9083, - 9089, - 0, - 0, - 0, - 0, - 0, - 0, - 9090, - 0, - 0, - 0, - 9092, - 0, - 0, - 9093, - 0, - 9095, - 0, - 0, - 9096, - 9097, - 9101, - 9102, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9112, - 0, - 0, - 0, - 0, - 0, - 0, - 9114, - 0, - 0, - 9120, - 0, - 9121, - 9122, - 0, - 0, - 0, - 9123, - 9124, - 0, - 0, - 9125, - 0, - 0, - 9126, - 0, - 9127, - 0, - 0, - 9129, - 9131, - 0, - 0, - 0, - 9132, - 0, - 0, - 9136, - 0, - 9144, - 0, - 0, - 9148, - 0, - 0, - 0, - 0, - 0, - 0, - 9149, - 0, - 9152, - 9163, - 0, - 0, - 9165, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9166, - 0, - 9169, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9170, - 0, - 0, - 0, - 0, - 9172, - 0, - 9174, - 9175, - 9176, - 0, - 9177, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9186, - 0, - 9187, - 0, - 0, - 0, - 9188, - 9189, - 0, - 0, - 9190, - 0, - 0, - 0, - 0, - 9191, - 0, - 0, - 0, - 9193, - 0, - 0, - 0, - 0, - 9197, - 9198, - 0, - 0, - 0, - 9208, - 9211, - 0, - 0, - 0, - 0, - 9216, - 9217, - 0, - 9220, - 0, - 0, - 0, - 0, - 9221, - 9222, - 9223, - 0, - 9224, - 9225, - 0, - 0, - 9227, - 0, - 9228, - 9229, - 0, - 0, - 9230, - 0, - 9232, - 0, - 9233, - 0, - 0, - 0, - 0, - 0, - 9234, - 9235, - 0, - 0, - 9237, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9238, - 9240, - 0, - 0, - 9241, - 0, - 0, - 0, - 0, - 9244, - 0, - 0, - 0, - 0, - 9247, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9248, - 0, - 0, - 0, - 9249, - 0, - 0, - 0, - 0, - 0, - 9250, - 0, - 0, - 0, - 0, - 9251, - 0, - 0, - 9252, - 9255, - 0, - 0, - 0, - 9256, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9257, - 0, - 0, - 9258, - 0, - 0, - 0, - 0, - 0, - 0, - 9259, - 0, - 0, - 0, - 0, - 0, - 9262, - 9263, - 0, - 0, - 9265, - 9266, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9268, - 9271, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9273, - 0, - 0, - 0, - 9276, - 9277, - 9279, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9280, - 0, - 0, - 9293, - 0, - 0, - 0, - 0, - 0, - 9297, - 9301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9308, - 9309, - 9313, - 9321, - 9322, - 0, - 9326, - 9327, - 0, - 0, - 9477, - 0, - 9479, - 0, - 0, - 0, - 0, - 9482, - 0, - 0, - 0, - 9483, - 0, - 9484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9485, - 0, - 0, - 9486, - 0, - 0, - 0, - 9489, - 0, - 0, - 0, - 0, - 9490, - 9491, - 0, - 0, - 0, - 0, - 9493, - 0, - 9495, - 9496, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9500, - 0, - 9502, - 0, - 0, - 0, - 0, - 0, - 9504, - 9507, - 0, - 9509, - 0, - 9511, - 0, - 0, - 9513, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9515, - 0, - 0, - 0, - 0, - 0, - 0, - 9516, - 9517, - 0, - 0, - 0, - 0, - 9532, - 0, - 0, - 9533, - 0, - 0, - 9538, - 0, - 9539, - 9540, - 0, - 0, - 0, - 0, - 9541, - 0, - 0, - 0, - 9542, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9544, - 9545, - 0, - 9546, - 0, - 0, - 0, - 0, - 0, - 0, - 9547, - 9548, - 0, - 0, - 0, - 9550, - 0, - 9557, - 0, - 9558, - 0, - 9561, - 0, - 9563, - 9570, - 0, - 9572, - 9574, - 9575, - 0, - 0, - 0, - 9577, - 9592, - 0, - 0, - 9596, - 0, - 0, - 0, - 9598, - 0, - 9600, - 0, - 9601, - 0, - 0, - 0, - 0, - 0, - 0, - 9608, - 0, - 9638, - 9639, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9641, - 0, - 0, - 9643, - 9644, - 9645, - 9646, - 0, - 0, - 0, - 9648, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9650, - 9654, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9655, - 0, - 0, - 0, - 0, - 0, - 9656, - 0, - 9657, - 0, - 0, - 0, - 0, - 9658, - 0, - 0, - 9659, - 0, - 0, - 9664, - 0, - 0, - 9665, - 0, - 9667, - 9669, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9671, - 0, - 9673, - 9681, - 0, - 0, - 0, - 0, - 9682, - 9683, - 9684, - 0, - 0, - 0, - 0, - 9686, - 9698, - 0, - 0, - 9700, - 9701, - 9702, - 0, - 9703, - 9717, - 0, - 0, - 0, - 0, - 9718, - 0, - 9726, - 0, - 0, - 0, - 0, - 9727, - 0, - 0, - 0, - 9728, - 0, - 9742, - 0, - 9744, - 0, - 0, - 0, - 9750, - 0, - 9754, - 9755, - 0, - 0, - 0, - 0, - 0, - 9756, - 0, - 9757, - 9768, - 0, - 9769, - 0, - 0, - 0, - 9770, - 9771, - 0, - 9773, - 0, - 9774, - 0, - 9775, - 0, - 0, - 0, - 9776, - 9777, - 9784, - 0, - 0, - 0, - 9786, - 0, - 9789, - 0, - 0, - 0, - 0, - 9793, - 9794, - 0, - 0, - 0, - 9808, - 0, - 0, - 0, - 0, - 0, - 9811, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9812, - 0, - 9820, - 0, - 9823, - 0, - 9828, - 0, - 0, - 0, - 0, - 9830, - 0, - 0, - 9833, - 9836, - 0, - 0, - 0, - 9840, - 0, - 0, - 0, - 9841, - 0, - 0, - 9842, - 0, - 9845, - 0, - 0, - 0, - 9847, - 9848, - 0, - 0, - 9855, - 0, - 0, - 0, - 0, - 0, - 0, - 9856, - 9863, - 9865, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9866, - 9867, - 9868, - 9873, - 9875, - 0, - 0, - 0, - 0, - 0, - 0, - 9880, - 0, - 9886, - 0, - 0, - 0, - 9887, - 0, - 0, - 9891, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9906, - 9907, - 9908, - 0, - 0, - 0, - 9909, - 0, - 0, - 0, - 0, - 0, - 0, - 9910, - 0, - 0, - 0, - 0, - 9913, - 0, - 0, - 0, - 0, - 9914, - 0, - 0, - 0, - 0, - 0, - 9922, - 0, - 0, - 0, - 0, - 9923, - 9925, - 0, - 0, - 0, - 0, - 0, - 0, - 9930, - 0, - 0, - 0, - 9931, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9932, - 0, - 9939, - 0, - 0, - 9940, - 9962, - 9966, - 0, - 9969, - 9970, - 0, - 0, - 9974, - 0, - 9979, - 9981, - 9982, - 0, - 0, - 0, - 9985, - 0, - 0, - 0, - 0, - 0, - 0, - 9987, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 9988, - 9993, - 0, - 0, - 9994, - 0, - 0, - 0, - 9997, - 0, - 10004, - 0, - 0, - 0, - 0, - 0, - 10007, - 10019, - 10020, - 10022, - 0, - 0, - 0, - 10031, - 0, - 0, - 0, - 0, - 0, - 10032, - 0, - 0, - 10034, - 0, - 10036, - 0, - 0, - 0, - 0, - 10038, - 0, - 10039, - 10040, - 10041, - 10042, - 0, - 0, - 0, - 0, - 0, - 10043, - 0, - 0, - 0, - 0, - 0, - 10045, - 10054, - 0, - 0, - 0, - 0, - 10055, - 0, - 0, - 10057, - 10058, - 0, - 0, - 0, - 0, - 0, - 0, - 10059, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10060, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10063, - 0, - 10066, - 0, - 0, - 0, - 10070, - 0, - 10072, - 0, - 0, - 10076, - 10077, - 0, - 0, - 10084, - 0, - 10087, - 10090, - 10091, - 0, - 0, - 0, - 10094, - 10097, - 0, - 0, - 0, - 0, - 0, - 0, - 10098, - 0, - 0, - 0, - 0, - 0, - 0, - 10103, - 0, - 10104, - 0, - 10108, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10120, - 0, - 0, - 0, - 10122, - 0, - 0, - 10125, - 0, - 0, - 0, - 0, - 10127, - 10128, - 0, - 0, - 10134, - 0, - 10135, - 10136, - 0, - 10137, - 0, - 0, - 10147, - 0, - 10149, - 10150, - 0, - 0, - 10156, - 0, - 10158, - 10159, - 10160, - 10168, - 0, - 0, - 10171, - 0, - 10173, - 0, - 0, - 0, - 10176, - 0, - 0, - 0, - 0, - 10177, - 0, - 0, - 0, - 0, - 10178, - 0, - 0, - 0, - 0, - 10194, - 0, - 10202, - 0, - 0, - 10203, - 10204, - 0, - 10205, - 10206, - 0, - 10207, - 0, - 0, - 0, - 0, - 10209, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10213, - 0, - 0, - 0, - 0, - 0, - 0, - 10217, - 0, - 10229, - 0, - 10230, - 10231, - 0, - 0, - 10232, - 0, - 0, - 10237, - 10238, - 10244, - 0, - 0, - 0, - 0, - 0, - 10250, - 0, - 10252, - 0, - 0, - 0, - 0, - 0, - 0, - 10255, - 0, - 0, - 10257, - 0, - 0, - 0, - 0, - 0, - 0, - 10258, - 0, - 10259, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10260, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10284, - 10288, - 10289, - 0, - 0, - 0, - 10290, - 0, - 10296, - 0, - 0, - 0, - 0, - 0, - 10297, - 0, - 0, - 0, - 0, - 0, - 0, - 10298, - 0, - 0, - 0, - 0, - 10299, - 10303, - 0, - 0, - 0, - 0, - 0, - 10306, - 0, - 0, - 0, - 10307, - 0, - 10308, - 0, - 0, - 0, - 0, - 10311, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10315, - 10317, - 0, - 0, - 0, - 10318, - 10319, - 0, - 10321, - 0, - 10326, - 0, - 10328, - 0, - 0, - 0, - 0, - 10329, - 0, - 0, - 10331, - 0, - 10332, - 0, - 0, - 0, - 0, - 0, - 0, - 10334, - 0, - 0, - 10335, - 10338, - 0, - 0, - 0, - 0, - 0, - 10339, - 10349, - 0, - 0, - 0, - 0, - 0, - 0, - 10351, - 0, - 10353, - 0, - 0, - 0, - 0, - 0, - 0, - 10362, - 0, - 10368, - 0, - 10369, - 0, - 0, - 0, - 10372, - 10373, - 0, - 0, - 0, - 0, - 0, - 10374, - 0, - 0, - 0, - 10375, - 0, - 10376, - 0, - 0, - 10386, - 10388, - 10390, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10391, - 0, - 0, - 10392, - 10394, - 0, - 0, - 10396, - 0, - 10397, - 0, - 10403, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10404, - 0, - 10405, - 10410, - 0, - 0, - 10411, - 0, - 10412, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10421, - 10422, - 10423, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10425, - 0, - 0, - 10427, - 0, - 0, - 10430, - 0, - 0, - 0, - 0, - 0, - 10432, - 0, - 10433, - 10434, - 0, - 0, - 0, - 0, - 10436, - 10437, - 0, - 10438, - 0, - 10439, - 0, - 10444, - 10446, - 0, - 0, - 0, - 0, - 0, - 10448, - 0, - 0, - 0, - 0, - 0, - 10449, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10451, - 0, - 10453, - 0, - 0, - 0, - 10454, - 10457, - 0, - 0, - 10459, - 0, - 10469, - 0, - 0, - 0, - 0, - 0, - 10472, - 10481, - 0, - 0, - 0, - 0, - 0, - 10482, - 10483, - 0, - 10492, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10499, - 0, - 0, - 0, - 10502, - 0, - 0, - 10510, - 0, - 10521, - 10524, - 0, - 0, - 10525, - 10526, - 10528, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10530, - 0, - 0, - 0, - 0, - 10533, - 0, - 10534, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10535, - 10536, - 0, - 0, - 10544, - 0, - 10553, - 10556, - 0, - 10557, - 10559, - 0, - 0, - 0, - 0, - 0, - 10562, - 10563, - 10564, - 0, - 10565, - 0, - 0, - 0, - 10566, - 0, - 10567, - 0, - 0, - 0, - 0, - 10575, - 0, - 0, - 10576, - 0, - 10578, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10585, - 10586, - 10587, - 10589, - 0, - 10590, - 0, - 0, - 10594, - 0, - 0, - 0, - 0, - 0, - 10598, - 0, - 0, - 10601, - 0, - 0, - 0, - 10602, - 0, - 10603, - 0, - 10604, - 0, - 10605, - 0, - 0, - 10607, - 0, - 10626, - 0, - 10627, - 0, - 0, - 0, - 0, - 0, - 10629, - 10630, - 10631, - 0, - 0, - 0, - 10646, - 0, - 0, - 0, - 10647, - 0, - 10650, - 0, - 10651, - 0, - 0, - 0, - 10652, - 10653, - 10655, - 0, - 10658, - 0, - 0, - 10659, - 0, - 10667, - 0, - 0, - 0, - 0, - 10669, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10670, - 0, - 0, - 0, - 10671, - 0, - 0, - 0, - 0, - 10672, - 10673, - 0, - 10674, - 0, - 0, - 0, - 10676, - 0, - 0, - 0, - 0, - 0, - 0, - 10678, - 0, - 10682, - 0, - 0, - 10692, - 0, - 10697, - 0, - 0, - 0, - 0, - 10698, - 0, - 0, - 0, - 10700, - 0, - 0, - 0, - 0, - 0, - 10703, - 0, - 10704, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10705, - 0, - 10715, - 10718, - 10720, - 0, - 0, - 10722, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10723, - 0, - 0, - 0, - 0, - 10726, - 0, - 0, - 0, - 0, - 0, - 10727, - 10730, - 10743, - 0, - 0, - 0, - 0, - 0, - 0, - 10744, - 0, - 0, - 10745, - 0, - 0, - 0, - 0, - 0, - 0, - 10748, - 0, - 0, - 0, - 0, - 10750, - 0, - 0, - 10752, - 10753, - 0, - 0, - 0, - 10756, - 0, - 0, - 0, - 0, - 0, - 0, - 10758, - 0, - 0, - 0, - 10759, - 0, - 10769, - 0, - 0, - 10772, - 0, - 0, - 0, - 0, - 0, - 0, - 10773, - 0, - 0, - 0, - 10777, - 0, - 0, - 10779, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10780, - 10784, - 0, - 0, - 0, - 10789, - 0, - 0, - 0, - 10791, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10795, - 0, - 0, - 10796, - 0, - 10808, - 0, - 10809, - 0, - 0, - 0, - 10810, - 0, - 0, - 0, - 10812, - 0, - 0, - 10814, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10815, - 0, - 0, - 0, - 0, - 10816, - 10817, - 0, - 0, - 0, - 0, - 10819, - 0, - 10820, - 0, - 0, - 0, - 0, - 10821, - 10822, - 10823, - 0, - 10826, - 10849, - 0, - 0, - 0, - 0, - 10850, - 0, - 0, - 10852, - 0, - 10853, - 0, - 0, - 10856, - 0, - 0, - 10857, - 10858, - 10859, - 10860, - 0, - 0, - 0, - 0, - 0, - 0, - 10863, - 0, - 10866, - 10867, - 10872, - 10890, - 0, - 0, - 10891, - 10892, - 0, - 0, - 0, - 0, - 0, - 10893, - 0, - 0, - 0, - 10896, - 10899, - 0, - 0, - 10900, - 10902, - 0, - 0, - 0, - 0, - 0, - 10903, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10905, - 0, - 10906, - 0, - 0, - 0, - 0, - 10908, - 10911, - 0, - 10912, - 0, - 0, - 10916, - 0, - 0, - 0, - 0, - 0, - 10917, - 0, - 10918, - 0, - 0, - 0, - 10923, - 0, - 0, - 0, - 0, - 0, - 10924, - 0, - 0, - 10928, - 10929, - 0, - 0, - 10930, - 0, - 0, - 0, - 10932, - 0, - 0, - 0, - 0, - 10939, - 0, - 0, - 10945, - 0, - 0, - 0, - 10947, - 0, - 0, - 10948, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10958, - 0, - 10960, - 10962, - 0, - 0, - 10964, - 0, - 0, - 0, - 10966, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 10967, - 0, - 0, - 0, - 10968, - 0, - 0, - 0, - 10973, - 0, - 0, - 0, - 0, - 0, - 10975, - 0, - 0, - 0, - 10976, - 10978, - 0, - 0, - 10982, - 10984, - 10987, - 0, - 0, - 10988, - 0, - 10989, - 0, - 0, - 10991, - 0, - 0, - 0, - 0, - 10992, - 0, - 0, - 0, - 10993, - 0, - 10995, - 0, - 0, - 0, - 10996, - 10997, - 0, - 0, - 0, - 10998, - 0, - 10999, - 0, - 11001, - 0, - 0, - 0, - 0, - 0, - 0, - 11010, - 11012, - 0, - 11013, - 11016, - 11017, - 0, - 0, - 11019, - 11020, - 11021, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11022, - 0, - 0, - 11023, - 11029, - 0, - 0, - 0, - 0, - 11031, - 0, - 0, - 0, - 11034, - 0, - 0, - 0, - 0, - 11055, - 0, - 0, - 0, - 0, - 0, - 11056, - 11060, - 0, - 0, - 0, - 0, - 0, - 0, - 11061, - 0, - 0, - 11064, - 11065, - 0, - 11066, - 0, - 11069, - 0, - 11085, - 0, - 0, - 0, - 0, - 0, - 11086, - 0, - 0, - 0, - 11088, - 0, - 0, - 0, - 11094, - 0, - 0, - 0, - 11095, - 11096, - 0, - 0, - 0, - 0, - 0, - 0, - 11097, - 11098, - 0, - 0, - 0, - 0, - 0, - 0, - 11099, - 0, - 0, - 11102, - 11108, - 0, - 0, - 0, - 11109, - 0, - 11114, - 11119, - 0, - 11131, - 0, - 0, - 0, - 11142, - 0, - 0, - 11143, - 0, - 11146, - 0, - 11147, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11148, - 0, - 11149, - 11152, - 11153, - 11154, - 0, - 11156, - 0, - 11157, - 0, - 0, - 0, - 11158, - 0, - 0, - 11159, - 11160, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11163, - 0, - 0, - 11164, - 11166, - 0, - 0, - 0, - 11172, - 11174, - 0, - 0, - 0, - 11176, - 0, - 0, - 0, - 0, - 0, - 11182, - 11183, - 0, - 0, - 0, - 11184, - 11187, - 0, - 0, - 11188, - 11189, - 0, - 0, - 0, - 0, - 0, - 0, - 11194, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11200, - 11202, - 0, - 0, - 0, - 0, - 0, - 0, - 11203, - 0, - 11204, - 0, - 0, - 0, - 0, - 0, - 11205, - 0, - 0, - 0, - 11206, - 0, - 11207, - 0, - 0, - 11209, - 0, - 11211, - 0, - 11214, - 0, - 0, - 11231, - 0, - 0, - 0, - 11293, - 11295, - 0, - 0, - 11296, - 11297, - 11302, - 0, - 0, - 0, - 11307, - 0, - 0, - 0, - 0, - 11309, - 11310, - 0, - 11311, - 0, - 0, - 0, - 11313, - 0, - 11314, - 0, - 0, - 0, - 0, - 11334, - 0, - 11338, - 0, - 0, - 0, - 11339, - 0, - 0, - 0, - 0, - 0, - 11340, - 0, - 11341, - 11342, - 0, - 11344, - 0, - 11345, - 0, - 0, - 0, - 11348, - 11349, - 0, - 0, - 11350, - 0, - 0, - 0, - 11355, - 0, - 0, - 0, - 0, - 0, - 0, - 11356, - 0, - 11357, - 11370, - 0, - 0, - 11371, - 0, - 11374, - 11376, - 0, - 0, - 0, - 11377, - 0, - 0, - 11378, - 11383, - 0, - 11386, - 11399, - 0, - 11400, - 11406, - 0, - 0, - 0, - 11408, - 0, - 0, - 11409, - 11412, - 0, - 0, - 0, - 0, - 11417, - 0, - 0, - 0, - 11418, - 0, - 11421, - 0, - 11426, - 11429, - 0, - 0, - 0, - 0, - 0, - 11430, - 0, - 11437, - 0, - 11438, - 0, - 0, - 0, - 0, - 0, - 11440, - 11453, - 0, - 0, - 0, - 0, - 0, - 0, - 11454, - 0, - 0, - 0, - 0, - 11455, - 0, - 0, - 11456, - 11460, - 11461, - 11463, - 0, - 11469, - 0, - 11473, - 0, - 0, - 0, - 0, - 11474, - 0, - 0, - 0, - 11475, - 0, - 11476, - 11477, - 11480, - 0, - 0, - 0, - 0, - 11481, - 0, - 0, - 11484, - 0, - 0, - 11487, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11497, - 0, - 0, - 11502, - 0, - 11509, - 0, - 0, - 11510, - 11511, - 11513, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11515, - 0, - 0, - 0, - 0, - 11516, - 0, - 11520, - 11521, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11529, - 11530, - 11531, - 11534, - 0, - 0, - 11543, - 0, - 0, - 0, - 0, - 0, - 11547, - 0, - 11548, - 0, - 0, - 0, - 0, - 0, - 11552, - 11556, - 0, - 11557, - 0, - 0, - 11559, - 0, - 11560, - 0, - 0, - 0, - 0, - 0, - 0, - 11561, - 0, - 0, - 11563, - 11564, - 0, - 11565, - 0, - 0, - 0, - 0, - 11567, - 0, - 0, - 0, - 11569, - 0, - 11574, - 0, - 11575, - 0, - 0, - 0, - 11577, - 0, - 11578, - 0, - 0, - 0, - 11580, - 11581, - 0, - 0, - 0, - 11582, - 11584, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11587, - 0, - 11588, - 11591, - 0, - 11595, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11596, - 0, - 11597, - 0, - 0, - 0, - 0, - 11598, - 11601, - 0, - 0, - 0, - 11602, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11603, - 11604, - 0, - 11606, - 0, - 0, - 11608, - 0, - 0, - 0, - 0, - 11610, - 0, - 0, - 11611, - 0, - 0, - 0, - 0, - 11613, - 0, - 11622, - 0, - 0, - 0, - 11623, - 0, - 0, - 0, - 0, - 11625, - 0, - 0, - 11626, - 11627, - 11628, - 11630, - 0, - 0, - 0, - 0, - 0, - 0, - 11639, - 0, - 0, - 11646, - 0, - 11648, - 11649, - 0, - 11650, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11651, - 0, - 0, - 11652, - 11653, - 11656, - 0, - 0, - 11677, - 11679, - 0, - 0, - 0, - 0, - 11680, - 0, - 0, - 11681, - 0, - 11685, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11688, - 0, - 0, - 0, - 11716, - 0, - 11719, - 0, - 0, - 0, - 0, - 0, - 11721, - 0, - 0, - 11724, - 11743, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11745, - 11748, - 11750, - 0, - 0, - 0, - 0, - 0, - 11751, - 0, - 0, - 0, - 11752, - 11754, - 0, - 11755, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11759, - 0, - 0, - 0, - 0, - 0, - 0, - 11760, - 0, - 0, - 0, - 11761, - 0, - 0, - 0, - 0, - 0, - 0, - 11766, - 11767, - 0, - 11772, - 11773, - 0, - 11774, - 0, - 0, - 11775, - 0, - 11777, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11778, - 11780, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11783, - 0, - 11784, - 0, - 0, - 0, - 11785, - 0, - 0, - 0, - 11786, - 0, - 0, - 0, - 0, - 11788, - 0, - 0, - 11789, - 11791, - 11792, - 0, - 0, - 0, - 0, - 11795, - 11834, - 11835, - 11836, - 0, - 0, - 11837, - 0, - 0, - 0, - 11838, - 0, - 0, - 11846, - 11851, - 0, - 11852, - 0, - 11869, - 0, - 0, - 0, - 11871, - 0, - 0, - 0, - 11872, - 11874, - 0, - 0, - 0, - 0, - 0, - 0, - 11875, - 0, - 11876, - 11877, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11883, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11884, - 0, - 11885, - 0, - 11886, - 0, - 0, - 11887, - 0, - 11894, - 11895, - 11897, - 11909, - 11910, - 0, - 11912, - 11918, - 0, - 0, - 11920, - 0, - 11922, - 11924, - 11927, - 11928, - 0, - 0, - 0, - 0, - 11929, - 0, - 11934, - 0, - 0, - 0, - 0, - 0, - 11941, - 11943, - 11944, - 0, - 11945, - 0, - 0, - 0, - 0, - 11948, - 11949, - 0, - 0, - 0, - 0, - 11953, - 0, - 11954, - 0, - 11955, - 0, - 11956, - 0, - 0, - 0, - 0, - 0, - 11957, - 0, - 0, - 11959, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 11961, - 0, - 0, - 0, - 0, - 0, - 11978, - 0, - 0, - 0, - 11979, - 11980, - 11986, - 11987, - 0, - 11992, - 0, - 0, - 0, - 0, - 0, - 11993, - 0, - 0, - 0, - 11994, - 0, - 11999, - 12004, - 12005, - 12006, - 0, - 0, - 0, - 0, - 0, - 12011, - 0, - 0, - 12012, - 12014, - 0, - 0, - 12015, - 0, - 0, - 12019, - 12028, - 0, - 0, - 12029, - 0, - 0, - 12032, - 12033, - 0, - 0, - 0, - 0, - 12034, - 0, - 12041, - 12043, - 0, - 0, - 12044, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12046, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12054, - 12055, - 0, - 12056, - 0, - 0, - 0, - 12060, - 12064, - 0, - 0, - 0, - 0, - 0, - 12065, - 12067, - 12068, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12074, - 0, - 0, - 0, - 12075, - 12076, - 0, - 0, - 0, - 12079, - 0, - 12081, - 12086, - 12087, - 0, - 0, - 12088, - 0, - 0, - 0, - 0, - 12089, - 0, - 12092, - 0, - 0, - 0, - 0, - 12097, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12098, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12102, - 12103, - 12104, - 12111, - 0, - 0, - 12114, - 12116, - 0, - 0, - 0, - 12118, - 0, - 0, - 0, - 12119, - 12120, - 12128, - 0, - 0, - 0, - 0, - 12130, - 0, - 0, - 0, - 0, - 0, - 0, - 12131, - 0, - 0, - 0, - 12132, - 12134, - 0, - 0, - 0, - 0, - 12137, - 0, - 12139, - 0, - 12141, - 0, - 0, - 12142, - 0, - 0, - 0, - 12144, - 0, - 0, - 0, - 0, - 0, - 12145, - 0, - 12148, - 0, - 12153, - 0, - 0, - 0, - 0, - 12154, - 12171, - 12173, - 0, - 0, - 0, - 12175, - 0, - 0, - 0, - 0, - 12178, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12183, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12184, - 0, - 0, - 0, - 12186, - 0, - 0, - 0, - 0, - 0, - 12187, - 12188, - 0, - 0, - 12189, - 0, - 12196, - 0, - 12197, - 0, - 0, - 12198, - 0, - 12201, - 0, - 0, - 0, - 0, - 12203, - 0, - 12209, - 0, - 0, - 0, - 0, - 12210, - 12211, - 12212, - 12213, - 0, - 12217, - 12218, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12222, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12223, - 0, - 0, - 12229, - 0, - 0, - 0, - 0, - 12233, - 0, - 0, - 0, - 0, - 12234, - 0, - 0, - 12236, - 12242, - 0, - 0, - 0, - 12243, - 0, - 0, - 0, - 12244, - 12253, - 0, - 12254, - 12256, - 0, - 12257, - 0, - 0, - 12275, - 0, - 0, - 0, - 0, - 0, - 12277, - 0, - 0, - 0, - 0, - 0, - 12278, - 0, - 12289, - 0, - 0, - 12290, - 0, - 12292, - 12293, - 0, - 0, - 12294, - 0, - 12295, - 0, - 0, - 12296, - 0, - 12297, - 0, - 12298, - 0, - 0, - 0, - 0, - 12301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12309, - 0, - 12338, - 12340, - 0, - 0, - 0, - 0, - 12341, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12342, - 12343, - 0, - 12344, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12345, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12346, - 0, - 0, - 0, - 0, - 12348, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12350, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12351, - 0, - 12355, - 12356, - 12357, - 0, - 0, - 12367, - 12370, - 12371, - 0, - 0, - 0, - 0, - 0, - 12372, - 12376, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12379, - 0, - 12382, - 0, - 12383, - 0, - 0, - 12384, - 0, - 0, - 0, - 0, - 12393, - 0, - 0, - 12394, - 0, - 0, - 0, - 0, - 12398, - 12403, - 0, - 0, - 12404, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12410, - 0, - 0, - 0, - 12411, - 0, - 0, - 0, - 12412, - 0, - 0, - 0, - 0, - 12420, - 0, - 12421, - 0, - 0, - 0, - 0, - 0, - 12423, - 0, - 12425, - 12429, - 0, - 0, - 0, - 12431, - 12432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12434, - 0, - 0, - 0, - 0, - 0, - 12435, - 12436, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12437, - 0, - 0, - 0, - 0, - 0, - 12438, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12445, - 0, - 0, - 0, - 12450, - 12451, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12452, - 12475, - 0, - 0, - 12493, - 12494, - 0, - 0, - 0, - 12495, - 0, - 0, - 0, - 0, - 12496, - 12502, - 12509, - 0, - 0, - 0, - 0, - 12510, - 0, - 12512, - 12513, - 0, - 0, - 0, - 0, - 12514, - 0, - 0, - 0, - 12515, - 0, - 12520, - 0, - 0, - 0, - 12524, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12527, - 0, - 0, - 0, - 12528, - 0, - 0, - 0, - 12529, - 0, - 0, - 0, - 0, - 0, - 12530, - 0, - 12535, - 0, - 0, - 12536, - 0, - 12538, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12540, - 0, - 12548, - 0, - 0, - 0, - 0, - 0, - 12550, - 0, - 0, - 0, - 12551, - 12552, - 0, - 0, - 0, - 12554, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12555, - 0, - 0, - 12562, - 0, - 12565, - 0, - 12566, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12569, - 0, - 0, - 0, - 12571, - 12574, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12577, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12578, - 12579, - 12603, - 0, - 12608, - 0, - 0, - 12611, - 0, - 12612, - 0, - 12615, - 0, - 12625, - 0, - 0, - 0, - 0, - 12627, - 12646, - 0, - 12648, - 0, - 0, - 12657, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12670, - 0, - 0, - 12671, - 0, - 12673, - 12677, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12679, - 0, - 12681, - 0, - 12682, - 12693, - 0, - 12694, - 0, - 12697, - 0, - 12701, - 0, - 0, - 0, - 12703, - 12704, - 0, - 0, - 0, - 0, - 12707, - 12737, - 0, - 0, - 12739, - 0, - 0, - 12740, - 0, - 0, - 12742, - 12743, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12745, - 0, - 12746, - 12747, - 0, - 12748, - 0, - 0, - 12759, - 12767, - 0, - 0, - 0, - 0, - 12773, - 0, - 12774, - 12778, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12779, - 0, - 0, - 0, - 0, - 0, - 12780, - 12793, - 0, - 12824, - 0, - 12825, - 0, - 12836, - 0, - 0, - 0, - 0, - 12839, - 0, - 12842, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12843, - 12845, - 0, - 12846, - 0, - 0, - 0, - 0, - 12847, - 0, - 0, - 12850, - 12852, - 12853, - 0, - 0, - 0, - 12854, - 0, - 0, - 0, - 12855, - 0, - 12856, - 0, - 12858, - 0, - 0, - 12859, - 0, - 12862, - 0, - 12863, - 0, - 0, - 12866, - 0, - 12869, - 12872, - 12873, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12875, - 0, - 12877, - 0, - 0, - 12878, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12884, - 12885, - 12888, - 0, - 12889, - 0, - 0, - 0, - 0, - 12893, - 0, - 0, - 0, - 12895, - 12896, - 12898, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12902, - 0, - 12909, - 12910, - 0, - 12926, - 0, - 12928, - 0, - 0, - 0, - 12929, - 0, - 12930, - 0, - 0, - 0, - 0, - 12931, - 0, - 12932, - 12933, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12934, - 0, - 12942, - 0, - 0, - 0, - 0, - 12944, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12946, - 0, - 0, - 12948, - 0, - 0, - 12949, - 0, - 0, - 0, - 0, - 12950, - 0, - 0, - 0, - 0, - 12951, - 0, - 12952, - 0, - 12953, - 0, - 0, - 0, - 12954, - 12958, - 12959, - 0, - 0, - 0, - 0, - 0, - 12960, - 12964, - 0, - 0, - 0, - 0, - 0, - 12966, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 12970, - 0, - 12971, - 0, - 0, - 0, - 0, - 0, - 0, - 12972, - 0, - 0, - 12982, - 0, - 0, - 0, - 12984, - 12985, - 0, - 12986, - 12996, - 12997, - 13001, - 13002, - 0, - 0, - 0, - 0, - 13004, - 0, - 0, - 13005, - 0, - 0, - 13007, - 13009, - 0, - 13017, - 0, - 0, - 0, - 13020, - 0, - 13021, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13022, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13024, - 13027, - 0, - 0, - 0, - 0, - 0, - 13028, - 0, - 0, - 13029, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13032, - 0, - 13037, - 0, - 0, - 0, - 0, - 0, - 0, - 13040, - 0, - 0, - 13041, - 0, - 0, - 0, - 13043, - 13044, - 13046, - 0, - 0, - 0, - 0, - 13047, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13049, - 13054, - 0, - 13056, - 0, - 0, - 13060, - 13061, - 0, - 0, - 0, - 0, - 0, - 13067, - 0, - 0, - 13068, - 0, - 13071, - 0, - 0, - 0, - 0, - 0, - 13077, - 13078, - 0, - 0, - 0, - 0, - 0, - 13079, - 13080, - 13081, - 0, - 13082, - 0, - 0, - 0, - 13085, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13086, - 0, - 13087, - 13088, - 0, - 0, - 0, - 0, - 0, - 13094, - 0, - 13099, - 0, - 13100, - 0, - 0, - 0, - 13101, - 0, - 13125, - 13126, - 13128, - 13129, - 0, - 0, - 13130, - 0, - 13131, - 0, - 0, - 0, - 0, - 0, - 0, - 13134, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13150, - 0, - 13168, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13169, - 0, - 0, - 13170, - 0, - 0, - 0, - 0, - 13174, - 0, - 0, - 0, - 13176, - 0, - 0, - 0, - 0, - 0, - 13177, - 0, - 13178, - 13183, - 13187, - 0, - 0, - 0, - 13189, - 0, - 0, - 13190, - 0, - 0, - 13191, - 0, - 0, - 13206, - 0, - 0, - 0, - 13207, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13212, - 0, - 0, - 13219, - 13232, - 0, - 0, - 0, - 13241, - 0, - 13249, - 13253, - 0, - 0, - 0, - 0, - 0, - 13255, - 13259, - 0, - 13260, - 13261, - 0, - 13262, - 0, - 13272, - 0, - 0, - 0, - 0, - 13276, - 0, - 0, - 0, - 0, - 13277, - 13299, - 0, - 0, - 13301, - 13302, - 0, - 0, - 13303, - 0, - 0, - 13305, - 0, - 13310, - 0, - 0, - 0, - 13311, - 0, - 0, - 0, - 0, - 13325, - 0, - 13328, - 0, - 0, - 0, - 13329, - 0, - 0, - 0, - 0, - 0, - 0, - 13330, - 0, - 0, - 13331, - 0, - 13335, - 0, - 0, - 13342, - 0, - 0, - 0, - 0, - 0, - 13343, - 0, - 13354, - 0, - 13362, - 0, - 13366, - 13367, - 13369, - 0, - 0, - 13371, - 13372, - 0, - 13373, - 13374, - 0, - 13376, - 0, - 13380, - 13381, - 13386, - 0, - 13387, - 13388, - 0, - 13389, - 13391, - 13395, - 0, - 0, - 0, - 0, - 0, - 13401, - 13409, - 0, - 13410, - 0, - 0, - 0, - 0, - 13420, - 0, - 0, - 0, - 0, - 0, - 13422, - 0, - 0, - 0, - 0, - 13423, - 0, - 0, - 0, - 0, - 13425, - 0, - 0, - 0, - 0, - 0, - 13427, - 0, - 0, - 0, - 13428, - 0, - 0, - 13430, - 13438, - 0, - 13439, - 0, - 13445, - 0, - 13448, - 13449, - 0, - 0, - 0, - 0, - 0, - 0, - 13451, - 0, - 13457, - 0, - 0, - 0, - 0, - 13458, - 13459, - 0, - 13460, - 0, - 0, - 0, - 0, - 13464, - 13465, - 13466, - 13470, - 0, - 13471, - 13472, - 13474, - 13475, - 0, - 13476, - 0, - 0, - 13478, - 13479, - 0, - 13481, - 0, - 0, - 0, - 0, - 13487, - 0, - 13490, - 0, - 13493, - 0, - 0, - 13494, - 0, - 0, - 13495, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13496, - 13497, - 0, - 13500, - 0, - 0, - 13516, - 13522, - 0, - 0, - 13525, - 13528, - 0, - 0, - 0, - 13530, - 13535, - 0, - 13537, - 13539, - 0, - 13540, - 0, - 13543, - 0, - 13544, - 0, - 0, - 0, - 0, - 0, - 0, - 13545, - 0, - 0, - 0, - 0, - 0, - 0, - 13547, - 0, - 0, - 0, - 13549, - 13555, - 0, - 0, - 0, - 13556, - 13557, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13558, - 0, - 13563, - 0, - 0, - 0, - 0, - 13564, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13566, - 0, - 0, - 0, - 0, - 0, - 0, - 13569, - 0, - 0, - 13571, - 0, - 0, - 0, - 0, - 13573, - 0, - 0, - 0, - 0, - 0, - 0, - 13578, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13581, - 0, - 13586, - 0, - 13595, - 0, - 13600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13601, - 13603, - 0, - 13604, - 13605, - 13606, - 13607, - 0, - 0, - 13617, - 13618, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13623, - 0, - 13625, - 13627, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13629, - 0, - 0, - 0, - 13634, - 0, - 0, - 0, - 13638, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13654, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13656, - 0, - 13659, - 0, - 0, - 13660, - 0, - 0, - 13662, - 0, - 0, - 0, - 13663, - 0, - 13664, - 0, - 0, - 0, - 0, - 0, - 13668, - 0, - 13669, - 13671, - 0, - 0, - 13672, - 0, - 0, - 0, - 0, - 0, - 0, - 13675, - 13685, - 0, - 13686, - 0, - 0, - 0, - 13687, - 0, - 0, - 0, - 13692, - 13694, - 13697, - 0, - 0, - 0, - 13702, - 0, - 0, - 0, - 0, - 0, - 13705, - 0, - 0, - 0, - 0, - 13707, - 0, - 0, - 0, - 13714, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13715, - 0, - 13716, - 13717, - 0, - 0, - 13719, - 13724, - 13730, - 13731, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13732, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13734, - 0, - 13736, - 0, - 0, - 13737, - 13738, - 13747, - 0, - 13751, - 0, - 0, - 13752, - 0, - 0, - 0, - 13753, - 0, - 13757, - 0, - 0, - 13762, - 13763, - 0, - 13764, - 13765, - 0, - 13766, - 0, - 0, - 13767, - 0, - 0, - 0, - 13768, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13769, - 0, - 0, - 13772, - 0, - 13775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13776, - 13778, - 13787, - 0, - 0, - 0, - 13797, - 0, - 13798, - 0, - 13801, - 0, - 13804, - 13806, - 0, - 0, - 0, - 0, - 13816, - 13817, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13834, - 0, - 13836, - 0, - 0, - 13838, - 0, - 0, - 13839, - 0, - 13840, - 0, - 0, - 0, - 0, - 13842, - 0, - 0, - 0, - 0, - 0, - 0, - 13843, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13845, - 0, - 0, - 0, - 0, - 0, - 13858, - 0, - 0, - 13860, - 0, - 0, - 13861, - 0, - 0, - 13862, - 13863, - 0, - 13868, - 0, - 13869, - 13870, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13872, - 0, - 0, - 0, - 0, - 13873, - 13878, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13886, - 0, - 13888, - 13889, - 13890, - 0, - 0, - 13891, - 13894, - 0, - 13897, - 13899, - 13900, - 13904, - 0, - 0, - 13906, - 0, - 0, - 0, - 13909, - 0, - 0, - 0, - 13910, - 0, - 0, - 0, - 13911, - 0, - 0, - 0, - 0, - 0, - 13912, - 13917, - 0, - 0, - 0, - 0, - 13918, - 0, - 13919, - 0, - 0, - 13920, - 0, - 0, - 0, - 13921, - 0, - 0, - 13922, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13924, - 0, - 13927, - 0, - 0, - 0, - 0, - 0, - 13932, - 0, - 13933, - 0, - 13934, - 0, - 0, - 13935, - 0, - 13944, - 0, - 0, - 0, - 13954, - 0, - 0, - 13955, - 0, - 0, - 0, - 0, - 13956, - 0, - 13957, - 0, - 13967, - 13969, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 13970, - 13990, - 0, - 13991, - 13994, - 0, - 13995, - 0, - 0, - 0, - 0, - 13996, - 0, - 0, - 13999, - 0, - 0, - 0, - 14018, - 0, - 14019, - 0, - 14021, - 0, - 0, - 0, - 0, - 0, - 0, - 14041, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14043, - 0, - 0, - 0, - 0, - 14046, - 0, - 0, - 0, - 14048, - 14049, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14051, - 0, - 0, - 14052, - 14056, - 0, - 14063, - 0, - 14064, - 14066, - 0, - 0, - 14067, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14068, - 0, - 0, - 0, - 14072, - 0, - 14074, - 14075, - 0, - 14076, - 14079, - 14085, - 14086, - 14087, - 14093, - 0, - 0, - 0, - 0, - 14095, - 0, - 0, - 0, - 0, - 0, - 0, - 14096, - 14097, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14098, - 0, - 14102, - 0, - 0, - 0, - 0, - 0, - 14103, - 0, - 0, - 0, - 14104, - 0, - 0, - 14105, - 0, - 0, - 0, - 14107, - 14108, - 0, - 0, - 14109, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14117, - 0, - 0, - 0, - 0, - 14118, - 0, - 0, - 0, - 0, - 14119, - 0, - 0, - 14120, - 0, - 0, - 14121, - 0, - 14122, - 14127, - 0, - 14128, - 14136, - 0, - 0, - 14138, - 0, - 14140, - 0, - 0, - 0, - 14141, - 14142, - 0, - 0, - 0, - 0, - 14146, - 0, - 0, - 14149, - 0, - 14151, - 0, - 0, - 0, - 14152, - 0, - 0, - 14153, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14154, - 0, - 14156, - 14157, - 0, - 0, - 14159, - 0, - 14161, - 0, - 0, - 0, - 0, - 14162, - 0, - 0, - 0, - 0, - 0, - 0, - 14163, - 0, - 0, - 14173, - 0, - 0, - 0, - 0, - 0, - 0, - 14174, - 0, - 0, - 14176, - 0, - 0, - 14178, - 0, - 0, - 14179, - 14181, - 0, - 0, - 14182, - 14185, - 14187, - 0, - 14190, - 0, - 0, - 14197, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14198, - 0, - 0, - 0, - 0, - 0, - 0, - 14199, - 14200, - 0, - 0, - 0, - 14204, - 0, - 0, - 14208, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14231, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14234, - 0, - 0, - 14235, - 0, - 0, - 0, - 14240, - 14241, - 0, - 0, - 0, - 14246, - 0, - 0, - 0, - 14247, - 0, - 14250, - 0, - 0, - 14251, - 0, - 0, - 14254, - 0, - 0, - 14256, - 0, - 0, - 0, - 14260, - 0, - 14261, - 0, - 0, - 0, - 0, - 14262, - 14267, - 14269, - 0, - 0, - 14277, - 0, - 0, - 14278, - 0, - 14279, - 14282, - 0, - 0, - 0, - 14283, - 0, - 0, - 0, - 14284, - 14285, - 0, - 0, - 0, - 0, - 14286, - 0, - 0, - 0, - 14288, - 0, - 0, - 0, - 14289, - 0, - 14290, - 0, - 14293, - 14301, - 14302, - 14304, - 14305, - 0, - 14307, - 0, - 14308, - 14309, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14311, - 14312, - 0, - 0, - 14317, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14318, - 0, - 0, - 0, - 0, - 14320, - 0, - 0, - 0, - 0, - 14321, - 14322, - 0, - 0, - 0, - 0, - 0, - 14326, - 14329, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14330, - 14331, - 0, - 0, - 0, - 0, - 14332, - 0, - 0, - 0, - 14333, - 0, - 0, - 14337, - 14340, - 0, - 14341, - 0, - 0, - 14342, - 0, - 14345, - 14346, - 0, - 0, - 14347, - 0, - 14362, - 0, - 0, - 0, - 0, - 0, - 14364, - 14365, - 14371, - 0, - 14373, - 0, - 0, - 14374, - 0, - 14379, - 0, - 14400, - 0, - 0, - 0, - 0, - 0, - 14401, - 0, - 0, - 14405, - 0, - 14406, - 0, - 14408, - 14409, - 0, - 0, - 0, - 14417, - 0, - 0, - 14424, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14430, - 0, - 0, - 0, - 14431, - 0, - 0, - 14435, - 0, - 14440, - 0, - 0, - 0, - 0, - 0, - 0, - 14442, - 0, - 0, - 14443, - 0, - 0, - 0, - 0, - 0, - 14446, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14454, - 0, - 14457, - 0, - 14460, - 0, - 0, - 14466, - 0, - 0, - 0, - 0, - 0, - 14467, - 0, - 0, - 0, - 0, - 0, - 0, - 14469, - 0, - 14477, - 0, - 0, - 0, - 0, - 0, - 0, - 14478, - 14482, - 0, - 0, - 0, - 14483, - 0, - 0, - 0, - 14485, - 14486, - 0, - 0, - 0, - 14487, - 14488, - 14489, - 14492, - 14493, - 14494, - 14495, - 14496, - 14497, - 0, - 14499, - 0, - 14501, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14502, - 0, - 14507, - 14512, - 14513, - 14514, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14515, - 14526, - 14530, - 0, - 14537, - 0, - 14544, - 0, - 14547, - 0, - 0, - 14548, - 14550, - 14551, - 0, - 0, - 14552, - 0, - 0, - 0, - 14553, - 0, - 14554, - 0, - 0, - 0, - 0, - 14556, - 14564, - 0, - 0, - 14565, - 14566, - 0, - 0, - 0, - 0, - 0, - 0, - 14568, - 0, - 0, - 14569, - 0, - 0, - 0, - 14571, - 14576, - 0, - 0, - 14577, - 14578, - 14579, - 0, - 0, - 14580, - 0, - 0, - 0, - 0, - 14582, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14583, - 0, - 0, - 0, - 0, - 0, - 14587, - 0, - 14588, - 0, - 0, - 14600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14601, - 0, - 0, - 14604, - 14605, - 14611, - 0, - 14613, - 0, - 0, - 0, - 0, - 14615, - 0, - 0, - 0, - 0, - 0, - 0, - 14627, - 0, - 14628, - 0, - 0, - 0, - 0, - 14631, - 0, - 14633, - 14634, - 0, - 0, - 0, - 0, - 14635, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14636, - 0, - 0, - 14639, - 14642, - 0, - 0, - 0, - 0, - 14644, - 0, - 0, - 0, - 0, - 14645, - 14646, - 0, - 14653, - 0, - 0, - 14654, - 0, - 14658, - 0, - 14661, - 0, - 0, - 0, - 14665, - 0, - 0, - 0, - 14668, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14669, - 0, - 0, - 14670, - 0, - 0, - 0, - 14680, - 0, - 0, - 14681, - 0, - 0, - 0, - 0, - 0, - 14682, - 14683, - 0, - 0, - 0, - 0, - 14686, - 0, - 0, - 0, - 0, - 14687, - 14697, - 0, - 0, - 0, - 0, - 14699, - 14705, - 14711, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14712, - 0, - 0, - 0, - 14713, - 0, - 0, - 0, - 0, - 14719, - 0, - 14720, - 14721, - 14726, - 0, - 0, - 0, - 14728, - 14729, - 0, - 0, - 0, - 0, - 14731, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14733, - 14736, - 14737, - 0, - 0, - 14740, - 14742, - 0, - 0, - 0, - 14744, - 14753, - 0, - 0, - 0, - 0, - 14755, - 14758, - 14760, - 0, - 0, - 0, - 0, - 0, - 14761, - 14762, - 14765, - 14771, - 0, - 14772, - 0, - 14773, - 14774, - 0, - 0, - 14775, - 0, - 0, - 14776, - 0, - 0, - 0, - 0, - 14777, - 0, - 14779, - 0, - 0, - 14782, - 0, - 0, - 14785, - 14786, - 14788, - 0, - 0, - 0, - 0, - 0, - 14795, - 0, - 0, - 0, - 0, - 0, - 0, - 14798, - 0, - 14803, - 14804, - 14806, - 0, - 0, - 0, - 14809, - 0, - 0, - 0, - 0, - 0, - 0, - 14810, - 0, - 0, - 0, - 0, - 14811, - 0, - 14812, - 0, - 0, - 0, - 0, - 0, - 14815, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14816, - 0, - 14818, - 0, - 0, - 0, - 0, - 0, - 0, - 14819, - 0, - 14820, - 0, - 14823, - 0, - 0, - 0, - 14824, - 0, - 0, - 14826, - 14827, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14830, - 0, - 0, - 0, - 0, - 0, - 14833, - 0, - 14845, - 0, - 0, - 0, - 0, - 0, - 14846, - 0, - 0, - 14847, - 14871, - 0, - 14873, - 0, - 14876, - 0, - 14877, - 14878, - 14880, - 0, - 0, - 0, - 0, - 0, - 14881, - 0, - 14882, - 14894, - 0, - 0, - 0, - 0, - 14895, - 0, - 14907, - 0, - 14908, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14911, - 0, - 0, - 0, - 0, - 14920, - 0, - 0, - 14931, - 0, - 14932, - 14934, - 14935, - 0, - 0, - 14936, - 0, - 14945, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14947, - 0, - 0, - 14948, - 14949, - 14951, - 0, - 0, - 14952, - 0, - 0, - 0, - 14964, - 14973, - 0, - 0, - 14990, - 0, - 0, - 0, - 0, - 14995, - 0, - 0, - 14998, - 15001, - 0, - 0, - 15002, - 15020, - 0, - 0, - 0, - 0, - 0, - 0, - 15021, - 0, - 15022, - 0, - 0, - 0, - 0, - 15023, - 0, - 0, - 15025, - 15029, - 15033, - 0, - 0, - 0, - 15034, - 0, - 0, - 0, - 15035, - 0, - 0, - 0, - 0, - 0, - 15043, - 15044, - 0, - 0, - 0, - 15045, - 15046, - 15048, - 15050, - 0, - 15065, - 0, - 0, - 0, - 0, - 15066, - 0, - 0, - 15075, - 15082, - 15084, - 0, - 0, - 15085, - 15086, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15088, - 0, - 0, - 0, - 15089, - 0, - 0, - 0, - 0, - 15094, - 0, - 15096, - 0, - 15097, - 0, - 15100, - 0, - 0, - 15102, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15105, - 0, - 0, - 15106, - 0, - 15109, - 15113, - 0, - 0, - 0, - 15115, - 0, - 15118, - 0, - 0, - 0, - 0, - 0, - 0, - 15119, - 0, - 0, - 15120, - 0, - 0, - 0, - 0, - 0, - 15123, - 15129, - 0, - 0, - 0, - 15130, - 0, - 15131, - 0, - 0, - 15134, - 0, - 15135, - 0, - 0, - 0, - 15137, - 15138, - 0, - 0, - 0, - 0, - 0, - 0, - 15139, - 0, - 0, - 0, - 0, - 0, - 15140, - 0, - 0, - 15154, - 15162, - 0, - 15169, - 15170, - 0, - 15175, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15177, - 0, - 15178, - 15179, - 0, - 0, - 0, - 0, - 0, - 15183, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15185, - 15187, - 0, - 15194, - 15195, - 15196, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15204, - 0, - 0, - 0, - 0, - 15206, - 0, - 0, - 0, - 0, - 0, - 15207, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15213, - 0, - 15214, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15232, - 0, - 0, - 0, - 0, - 15234, - 0, - 15238, - 15240, - 0, - 15248, - 0, - 0, - 0, - 0, - 15250, - 15251, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15252, - 0, - 0, - 0, - 15255, - 15262, - 15266, - 0, - 0, - 0, - 15267, - 0, - 0, - 0, - 15277, - 15279, - 0, - 0, - 0, - 15280, - 15281, - 15282, - 0, - 0, - 0, - 0, - 0, - 15285, - 0, - 0, - 0, - 0, - 15289, - 0, - 0, - 15291, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15296, - 15297, - 0, - 0, - 15304, - 0, - 0, - 0, - 0, - 15306, - 0, - 0, - 0, - 0, - 0, - 0, - 15307, - 15308, - 0, - 15309, - 0, - 0, - 15311, - 0, - 0, - 15312, - 15313, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15314, - 15317, - 0, - 0, - 0, - 15318, - 15319, - 0, - 0, - 0, - 0, - 15320, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15321, - 0, - 0, - 0, - 0, - 0, - 15324, - 0, - 15325, - 15326, - 0, - 15330, - 0, - 0, - 0, - 0, - 15334, - 0, - 15335, - 0, - 15341, - 0, - 0, - 15342, - 0, - 0, - 15343, - 15344, - 0, - 0, - 0, - 0, - 15345, - 0, - 0, - 0, - 0, - 15347, - 0, - 0, - 15348, - 15349, - 15350, - 0, - 15356, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15357, - 0, - 15358, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15359, - 15360, - 15364, - 0, - 15380, - 0, - 0, - 0, - 0, - 0, - 15392, - 0, - 0, - 15393, - 0, - 15395, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15396, - 0, - 0, - 15397, - 15398, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15399, - 0, - 15400, - 0, - 0, - 0, - 15402, - 0, - 15405, - 15410, - 0, - 0, - 0, - 0, - 15411, - 0, - 0, - 0, - 15412, - 0, - 15416, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15428, - 0, - 15435, - 0, - 0, - 15438, - 0, - 0, - 0, - 0, - 15439, - 0, - 0, - 0, - 15440, - 0, - 0, - 0, - 15441, - 15449, - 15451, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15452, - 0, - 0, - 15455, - 0, - 0, - 0, - 15456, - 0, - 0, - 15458, - 0, - 15460, - 15461, - 0, - 0, - 0, - 0, - 0, - 15462, - 15464, - 0, - 15465, - 0, - 0, - 15466, - 0, - 0, - 15467, - 0, - 0, - 0, - 0, - 0, - 15468, - 0, - 0, - 0, - 0, - 15481, - 0, - 0, - 15484, - 0, - 15485, - 15486, - 0, - 0, - 0, - 15487, - 0, - 0, - 0, - 0, - 0, - 15488, - 0, - 15492, - 15498, - 0, - 0, - 0, - 15499, - 0, - 0, - 0, - 15500, - 0, - 15501, - 0, - 0, - 15512, - 0, - 15522, - 0, - 0, - 0, - 15524, - 0, - 15525, - 15526, - 0, - 0, - 15527, - 0, - 0, - 15545, - 15546, - 0, - 15548, - 15552, - 0, - 15553, - 0, - 0, - 0, - 15554, - 0, - 15555, - 0, - 15557, - 15565, - 15573, - 15577, - 15578, - 0, - 15582, - 0, - 15583, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15586, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15588, - 0, - 0, - 0, - 0, - 0, - 15589, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15593, - 15594, - 0, - 0, - 0, - 0, - 15595, - 0, - 0, - 0, - 0, - 0, - 0, - 15596, - 0, - 0, - 0, - 15597, - 0, - 0, - 0, - 0, - 15600, - 0, - 0, - 15601, - 0, - 0, - 0, - 0, - 15602, - 15603, - 0, - 0, - 0, - 0, - 0, - 0, - 15604, - 0, - 15609, - 0, - 0, - 15612, - 0, - 0, - 15613, - 0, - 0, - 15615, - 15617, - 15618, - 0, - 0, - 15620, - 0, - 15636, - 15637, - 0, - 0, - 15649, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15650, - 0, - 0, - 15651, - 0, - 0, - 0, - 15656, - 0, - 15658, - 0, - 0, - 0, - 15664, - 0, - 0, - 15665, - 0, - 0, - 15668, - 0, - 0, - 0, - 0, - 0, - 15669, - 0, - 0, - 15674, - 0, - 0, - 15675, - 0, - 0, - 0, - 0, - 15676, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15677, - 0, - 0, - 0, - 0, - 15678, - 0, - 0, - 0, - 0, - 0, - 15679, - 0, - 0, - 15681, - 0, - 15686, - 0, - 0, - 0, - 0, - 15687, - 0, - 15688, - 0, - 0, - 15690, - 0, - 0, - 0, - 15697, - 0, - 15699, - 15700, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15701, - 0, - 15702, - 15703, - 0, - 15704, - 0, - 15705, - 0, - 15707, - 0, - 15709, - 0, - 15712, - 15716, - 0, - 15717, - 0, - 15718, - 15720, - 0, - 0, - 0, - 0, - 0, - 15724, - 0, - 0, - 0, - 15725, - 0, - 15726, - 0, - 0, - 0, - 15740, - 0, - 15745, - 15746, - 0, - 0, - 15747, - 0, - 15748, - 0, - 0, - 0, - 0, - 0, - 15749, - 0, - 0, - 0, - 15752, - 0, - 15753, - 0, - 0, - 0, - 0, - 0, - 0, - 15759, - 0, - 0, - 0, - 15765, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15767, - 0, - 0, - 0, - 15771, - 0, - 0, - 15784, - 0, - 0, - 0, - 0, - 15785, - 15790, - 15791, - 0, - 0, - 15792, - 0, - 0, - 0, - 15807, - 0, - 15811, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15818, - 0, - 0, - 0, - 15819, - 0, - 0, - 0, - 0, - 15821, - 0, - 0, - 0, - 0, - 0, - 15822, - 15824, - 0, - 0, - 15827, - 0, - 0, - 15829, - 15831, - 0, - 15832, - 0, - 0, - 15833, - 0, - 15835, - 15838, - 15839, - 15843, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15844, - 0, - 0, - 0, - 0, - 15845, - 15851, - 15856, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15858, - 15860, - 0, - 15861, - 0, - 0, - 0, - 15864, - 0, - 0, - 0, - 0, - 15865, - 0, - 0, - 0, - 0, - 0, - 0, - 15866, - 0, - 15872, - 0, - 0, - 15876, - 0, - 0, - 0, - 0, - 15877, - 15878, - 15883, - 15885, - 0, - 0, - 15888, - 0, - 0, - 0, - 0, - 0, - 15889, - 15890, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15892, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15893, - 0, - 0, - 15894, - 0, - 0, - 0, - 15895, - 0, - 15896, - 15897, - 0, - 15898, - 15901, - 15902, - 0, - 15911, - 15915, - 0, - 15916, - 0, - 15924, - 15935, - 0, - 15937, - 0, - 0, - 0, - 0, - 0, - 15950, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15958, - 0, - 0, - 0, - 15961, - 0, - 0, - 15966, - 0, - 15967, - 0, - 0, - 15977, - 0, - 0, - 15978, - 0, - 0, - 15981, - 15982, - 15983, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 15986, - 0, - 0, - 0, - 15990, - 0, - 15991, - 15995, - 15998, - 0, - 15999, - 0, - 16000, - 0, - 0, - 0, - 0, - 16008, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16009, - 16011, - 0, - 16013, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16014, - 0, - 0, - 16015, - 16023, - 16024, - 16025, - 0, - 0, - 16026, - 0, - 16030, - 0, - 16032, - 0, - 16033, - 0, - 0, - 0, - 0, - 0, - 0, - 16035, - 16036, - 16037, - 0, - 0, - 0, - 0, - 0, - 16039, - 0, - 0, - 0, - 0, - 16041, - 0, - 0, - 0, - 0, - 0, - 16043, - 16044, - 0, - 0, - 16047, - 0, - 0, - 0, - 16048, - 0, - 0, - 16049, - 16050, - 16052, - 0, - 0, - 0, - 0, - 0, - 16055, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16056, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16058, - 16060, - 16061, - 0, - 0, - 16063, - 0, - 0, - 16064, - 0, - 0, - 0, - 16067, - 16068, - 0, - 0, - 16069, - 16078, - 0, - 0, - 0, - 16079, - 0, - 0, - 0, - 16080, - 0, - 16081, - 0, - 0, - 0, - 16088, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16089, - 16093, - 0, - 16097, - 0, - 16103, - 0, - 16104, - 16105, - 0, - 0, - 16256, - 0, - 0, - 16259, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16260, - 16261, - 0, - 0, - 16262, - 0, - 0, - 16263, - 0, - 16268, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16269, - 0, - 0, - 16270, - 16273, - 0, - 16274, - 0, - 0, - 0, - 0, - 16275, - 16276, - 16277, - 16280, - 0, - 0, - 0, - 16281, - 16284, - 0, - 0, - 0, - 16286, - 0, - 16289, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16290, - 0, - 0, - 0, - 0, - 16291, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16292, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16293, - 16295, - 16297, - 0, - 16302, - 0, - 16304, - 0, - 16305, - 0, - 16306, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16307, - 16308, - 16312, - 0, - 0, - 0, - 0, - 0, - 0, - 16313, - 16315, - 0, - 16318, - 0, - 0, - 0, - 16321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16326, - 16333, - 16336, - 0, - 0, - 0, - 0, - 16337, - 16340, - 0, - 0, - 0, - 0, - 0, - 16345, - 0, - 0, - 16346, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16347, - 0, - 0, - 16348, - 0, - 0, - 0, - 0, - 16349, - 0, - 0, - 0, - 16350, - 0, - 16357, - 0, - 0, - 0, - 0, - 16359, - 16360, - 0, - 0, - 0, - 0, - 16362, - 16363, - 16364, - 16365, - 0, - 0, - 16366, - 0, - 0, - 0, - 0, - 16367, - 16368, - 0, - 16369, - 16374, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16376, - 0, - 0, - 0, - 0, - 16378, - 16379, - 0, - 16380, - 0, - 0, - 0, - 16381, - 16383, - 0, - 0, - 0, - 0, - 0, - 16390, - 0, - 0, - 0, - 16399, - 0, - 16402, - 16404, - 16406, - 16407, - 0, - 0, - 0, - 16409, - 16411, - 0, - 0, - 0, - 0, - 16412, - 0, - 16413, - 16415, - 16423, - 0, - 0, - 0, - 0, - 0, - 16424, - 0, - 0, - 0, - 16428, - 16434, - 16435, - 16449, - 0, - 16450, - 16451, - 0, - 0, - 0, - 16453, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16454, - 0, - 0, - 16456, - 16458, - 0, - 0, - 16459, - 0, - 0, - 16460, - 0, - 0, - 0, - 0, - 16462, - 0, - 16463, - 0, - 0, - 16466, - 0, - 0, - 0, - 0, - 0, - 16479, - 0, - 0, - 16480, - 0, - 16481, - 16484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16485, - 0, - 0, - 0, - 0, - 0, - 0, - 16489, - 0, - 0, - 0, - 0, - 0, - 16491, - 0, - 0, - 16498, - 0, - 0, - 16503, - 0, - 16505, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16506, - 0, - 0, - 0, - 16508, - 16509, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16511, - 16513, - 0, - 0, - 0, - 16516, - 0, - 16517, - 0, - 16519, - 0, - 16529, - 0, - 0, - 16531, - 0, - 0, - 0, - 0, - 0, - 0, - 16534, - 0, - 0, - 16541, - 16542, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16543, - 16547, - 16548, - 0, - 0, - 0, - 16551, - 0, - 16552, - 0, - 0, - 0, - 16553, - 0, - 0, - 16558, - 0, - 0, - 16562, - 16565, - 0, - 0, - 0, - 16570, - 0, - 0, - 0, - 16573, - 16585, - 0, - 0, - 0, - 16586, - 16587, - 16595, - 0, - 16596, - 0, - 16598, - 0, - 0, - 0, - 16600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16601, - 0, - 0, - 0, - 0, - 16603, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16604, - 16612, - 0, - 0, - 0, - 0, - 16613, - 0, - 16618, - 0, - 0, - 0, - 16640, - 0, - 0, - 16641, - 0, - 0, - 0, - 0, - 0, - 0, - 16645, - 0, - 0, - 0, - 0, - 16646, - 0, - 0, - 0, - 0, - 0, - 0, - 16651, - 0, - 0, - 0, - 0, - 16653, - 16654, - 0, - 0, - 0, - 16655, - 0, - 0, - 16656, - 16667, - 0, - 0, - 0, - 0, - 16671, - 0, - 16672, - 0, - 0, - 0, - 16673, - 0, - 0, - 0, - 0, - 0, - 16676, - 0, - 16686, - 0, - 0, - 0, - 0, - 16689, - 0, - 16690, - 0, - 16692, - 0, - 16693, - 0, - 16694, - 0, - 16696, - 0, - 0, - 0, - 16705, - 0, - 0, - 0, - 0, - 0, - 0, - 16707, - 0, - 0, - 0, - 16709, - 0, - 0, - 0, - 0, - 16711, - 0, - 16712, - 16713, - 0, - 0, - 0, - 16715, - 0, - 0, - 0, - 0, - 16716, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16718, - 16724, - 0, - 0, - 16726, - 16727, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16728, - 0, - 16729, - 0, - 0, - 16730, - 0, - 0, - 0, - 0, - 0, - 16731, - 0, - 0, - 0, - 16732, - 0, - 0, - 0, - 0, - 16734, - 16738, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16743, - 0, - 0, - 16745, - 0, - 0, - 0, - 0, - 0, - 16749, - 0, - 16752, - 0, - 0, - 0, - 0, - 16756, - 0, - 0, - 16758, - 0, - 16759, - 0, - 0, - 0, - 0, - 0, - 16760, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16762, - 0, - 16769, - 0, - 16770, - 0, - 16772, - 0, - 0, - 0, - 16777, - 16780, - 0, - 0, - 0, - 0, - 0, - 0, - 16781, - 0, - 0, - 16782, - 0, - 16784, - 0, - 0, - 16785, - 16787, - 16792, - 0, - 0, - 16794, - 0, - 0, - 0, - 16798, - 0, - 0, - 16809, - 0, - 0, - 16814, - 16816, - 16817, - 0, - 16819, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16820, - 0, - 0, - 16836, - 16839, - 0, - 0, - 16841, - 16851, - 16857, - 0, - 0, - 16858, - 16859, - 0, - 0, - 16860, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16862, - 0, - 16863, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16864, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16876, - 0, - 16881, - 16882, - 0, - 16885, - 16886, - 0, - 16887, - 0, - 0, - 0, - 16889, - 16891, - 0, - 0, - 0, - 0, - 0, - 16894, - 16895, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 16897, - 0, - 16898, - 0, - 0, - 0, - 0, - 0, - 16913, - 0, - 0, - 16924, - 16925, - 16926, - 0, - 0, - 16927, - 0, - 0, - 0, - 16937, - 16938, - 0, - 0, - 0, - 16940, - 16941, - 0, - 0, - 0, - 16942, - 16945, - 0, - 16946, - 16949, - 16950, - 0, - 0, - 0, - 16952, - 16955, - 0, - 0, - 0, - 16965, - 0, - 16969, - 0, - 0, - 16975, - 0, - 0, - 16976, - 0, - 0, - 0, - 0, - 16978, - 0, - 0, - 16981, - 0, - 16983, - 16989, - 0, - 0, - 0, - 0, - 16990, - 0, - 0, - 16991, - 0, - 0, - 0, - 16993, - 0, - 16994, - 16996, - 17000, - 0, - 0, - 0, - 0, - 0, - 17002, - 17004, - 0, - 17006, - 0, - 0, - 17007, - 0, - 0, - 0, - 0, - 17008, - 17013, - 17014, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17021, - 0, - 17031, - 0, - 0, - 0, - 0, - 0, - 17033, - 17036, - 0, - 17038, - 0, - 0, - 17039, - 0, - 17045, - 0, - 0, - 17046, - 17047, - 0, - 0, - 0, - 0, - 17048, - 0, - 17049, - 17050, - 0, - 17051, - 17053, - 0, - 17054, - 0, - 17055, - 0, - 0, - 0, - 0, - 0, - 17063, - 0, - 0, - 17064, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17065, - 0, - 0, - 17068, - 0, - 0, - 0, - 0, - 0, - 17072, - 0, - 0, - 0, - 0, - 0, - 0, - 17073, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17074, - 0, - 17080, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17081, - 17083, - 17084, - 0, - 0, - 0, - 17085, - 0, - 0, - 0, - 0, - 17092, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17093, - 0, - 17095, - 17102, - 0, - 0, - 0, - 0, - 0, - 0, - 17103, - 0, - 0, - 17105, - 0, - 17107, - 0, - 0, - 0, - 0, - 17114, - 0, - 0, - 0, - 0, - 0, - 17115, - 17125, - 17127, - 0, - 0, - 17128, - 0, - 0, - 0, - 17129, - 17130, - 0, - 17131, - 0, - 0, - 0, - 0, - 0, - 17132, - 17135, - 17145, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17146, - 0, - 17147, - 0, - 17148, - 0, - 0, - 0, - 0, - 0, - 0, - 17149, - 17150, - 0, - 17151, - 17153, - 0, - 17155, - 0, - 0, - 0, - 0, - 17163, - 17171, - 0, - 17174, - 0, - 0, - 0, - 0, - 17179, - 0, - 0, - 17182, - 17185, - 0, - 0, - 0, - 0, - 0, - 17186, - 0, - 0, - 17188, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17189, - 17191, - 0, - 17194, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17195, - 17196, - 17203, - 17204, - 0, - 0, - 17205, - 17217, - 0, - 0, - 0, - 0, - 0, - 17218, - 0, - 0, - 0, - 0, - 17219, - 0, - 17220, - 0, - 17221, - 0, - 0, - 17230, - 0, - 0, - 0, - 0, - 0, - 17236, - 0, - 17238, - 17239, - 0, - 0, - 0, - 17241, - 17244, - 0, - 0, - 17245, - 0, - 17248, - 0, - 0, - 17251, - 0, - 17252, - 0, - 0, - 17264, - 0, - 17266, - 0, - 0, - 0, - 17268, - 0, - 0, - 0, - 0, - 17271, - 17272, - 0, - 17273, - 0, - 17295, - 0, - 17302, - 0, - 17305, - 0, - 0, - 0, - 17306, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17308, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17309, - 0, - 17310, - 17313, - 0, - 0, - 0, - 0, - 17314, - 17315, - 0, - 17317, - 0, - 0, - 0, - 0, - 17318, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17320, - 0, - 0, - 0, - 0, - 0, - 0, - 17334, - 0, - 17344, - 17348, - 0, - 0, - 0, - 17350, - 17351, - 0, - 0, - 17353, - 0, - 0, - 17354, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17355, - 0, - 0, - 0, - 0, - 0, - 0, - 17356, - 17357, - 0, - 0, - 17359, - 0, - 0, - 0, - 17371, - 0, - 17372, - 0, - 0, - 0, - 17393, - 0, - 0, - 0, - 0, - 17394, - 0, - 0, - 0, - 0, - 0, - 17395, - 0, - 0, - 17399, - 0, - 0, - 0, - 17401, - 17417, - 0, - 17418, - 0, - 17419, - 0, - 0, - 0, - 0, - 0, - 17422, - 17423, - 0, - 0, - 0, - 0, - 0, - 17424, - 0, - 0, - 0, - 0, - 0, - 17428, - 17429, - 17433, - 0, - 0, - 0, - 17437, - 0, - 0, - 17441, - 0, - 0, - 17442, - 0, - 0, - 17453, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17454, - 17456, - 17462, - 0, - 0, - 17466, - 0, - 0, - 17468, - 0, - 0, - 17469, - 0, - 0, - 0, - 0, - 17470, - 0, - 17475, - 0, - 0, - 0, - 0, - 0, - 17479, - 0, - 0, - 0, - 17483, - 17484, - 0, - 17485, - 0, - 17486, - 0, - 17491, - 17492, - 0, - 0, - 17493, - 0, - 17494, - 17495, - 0, - 0, - 0, - 17496, - 0, - 0, - 0, - 17497, - 0, - 0, - 0, - 17502, - 0, - 0, - 0, - 0, - 0, - 17503, - 0, - 17505, - 0, - 17507, - 0, - 0, - 0, - 17512, - 17513, - 17514, - 0, - 0, - 17515, - 0, - 0, - 0, - 17519, - 0, - 0, - 0, - 17522, - 0, - 0, - 17523, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17527, - 0, - 0, - 0, - 17528, - 0, - 0, - 0, - 17534, - 0, - 0, - 0, - 0, - 17536, - 0, - 0, - 0, - 17539, - 0, - 17540, - 17543, - 17549, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17556, - 0, - 0, - 17558, - 0, - 17559, - 0, - 0, - 17560, - 0, - 0, - 0, - 17563, - 0, - 0, - 0, - 0, - 0, - 0, - 17564, - 0, - 0, - 17565, - 17566, - 0, - 17567, - 0, - 0, - 0, - 0, - 0, - 0, - 17569, - 17570, - 0, - 17575, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17581, - 0, - 0, - 0, - 17582, - 17583, - 0, - 17586, - 0, - 0, - 17587, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17588, - 0, - 0, - 0, - 0, - 17596, - 17597, - 0, - 0, - 17598, - 17600, - 0, - 0, - 0, - 0, - 0, - 0, - 17601, - 0, - 0, - 0, - 17604, - 0, - 0, - 17605, - 0, - 0, - 17607, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17612, - 0, - 0, - 17618, - 0, - 17621, - 17622, - 0, - 0, - 0, - 0, - 17623, - 0, - 0, - 17624, - 0, - 0, - 17630, - 0, - 0, - 17631, - 17633, - 17634, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17635, - 0, - 0, - 17636, - 0, - 0, - 17637, - 0, - 17638, - 0, - 17640, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17641, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17643, - 0, - 0, - 0, - 0, - 17645, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17646, - 17662, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17663, - 17664, - 0, - 17665, - 17666, - 0, - 0, - 0, - 17669, - 17671, - 17673, - 0, - 17679, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17684, - 0, - 0, - 0, - 17686, - 0, - 17714, - 0, - 0, - 17720, - 17722, - 17726, - 0, - 0, - 17728, - 0, - 0, - 17729, - 0, - 0, - 0, - 17732, - 0, - 17733, - 0, - 17734, - 0, - 0, - 0, - 17735, - 0, - 0, - 0, - 0, - 17737, - 0, - 0, - 0, - 0, - 17739, - 0, - 0, - 0, - 17741, - 17742, - 0, - 0, - 0, - 0, - 17743, - 17744, - 17745, - 0, - 0, - 0, - 17749, - 0, - 17750, - 17751, - 17752, - 17754, - 17761, - 17762, - 0, - 17763, - 0, - 17766, - 0, - 17772, - 0, - 0, - 0, - 0, - 0, - 17775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17776, - 0, - 0, - 17777, - 0, - 0, - 17778, - 17779, - 0, - 17782, - 17783, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17784, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17821, - 0, - 0, - 0, - 17822, - 0, - 0, - 0, - 17823, - 17825, - 0, - 0, - 0, - 0, - 0, - 17826, - 17831, - 17832, - 17833, - 0, - 0, - 17845, - 0, - 0, - 0, - 17846, - 0, - 0, - 0, - 17848, - 17850, - 17854, - 0, - 17855, - 0, - 0, - 17859, - 0, - 0, - 0, - 0, - 0, - 0, - 17860, - 17861, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 17870, - 17871, - 0, - 0, - 0, - 0, - 0, - 0, - 17872, - 0, - 0, - 0, - 17879, - 0, - 0, - 0, - 17881, - 17883, - 0, - 17884, - 0, - 17885, - 0, - 0, - 17886, - 0, - 0, - 17887, - 17891, - 17953, - 0, - 0, - 0, - 0, - 17954, - 0, - 0, - 17955, - 0, - 17968, - 0, - 0, - 17972, - 0, - 0, - 0, - 0, - 0, - 17974, - 0, - 0, - 0, - 0, - 17976, - 17978, - 0, - 0, - 17983, - 0, - 0, - 0, - 0, - 18003, - 0, - 0, - 0, - 0, - 0, - 18007, - 0, - 0, - 0, - 0, - 0, - 18009, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18010, - 0, - 0, - 0, - 0, - 0, - 0, - 18012, - 0, - 0, - 18014, - 0, - 0, - 0, - 18015, - 0, - 0, - 0, - 18016, - 0, - 18017, - 0, - 0, - 0, - 18030, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18031, - 0, - 0, - 18036, - 18037, - 18038, - 0, - 0, - 18049, - 18056, - 0, - 18057, - 18058, - 0, - 18059, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18062, - 0, - 0, - 0, - 0, - 18064, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18067, - 0, - 0, - 0, - 18068, - 0, - 0, - 18075, - 0, - 0, - 18078, - 18093, - 18094, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18097, - 0, - 0, - 0, - 0, - 0, - 18098, - 18100, - 0, - 0, - 0, - 18108, - 0, - 18111, - 0, - 0, - 18112, - 0, - 18113, - 0, - 0, - 18115, - 18116, - 0, - 18118, - 0, - 0, - 0, - 0, - 18121, - 0, - 0, - 0, - 0, - 18123, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18124, - 0, - 0, - 0, - 0, - 18125, - 18126, - 0, - 18127, - 0, - 0, - 18128, - 18135, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18150, - 0, - 0, - 0, - 0, - 0, - 18151, - 18152, - 0, - 0, - 18156, - 18164, - 0, - 18166, - 18171, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18172, - 18183, - 0, - 18184, - 0, - 0, - 0, - 0, - 18185, - 0, - 18187, - 0, - 0, - 0, - 0, - 0, - 18188, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18189, - 0, - 0, - 18190, - 0, - 0, - 18191, - 18192, - 0, - 0, - 18194, - 18195, - 18196, - 0, - 0, - 0, - 18197, - 0, - 18203, - 0, - 18204, - 0, - 0, - 0, - 0, - 18205, - 0, - 0, - 0, - 18207, - 18208, - 0, - 0, - 18214, - 0, - 0, - 0, - 18215, - 18216, - 0, - 0, - 0, - 18220, - 0, - 0, - 18222, - 0, - 0, - 0, - 0, - 0, - 18223, - 0, - 18225, - 18231, - 0, - 18234, - 0, - 18235, - 0, - 0, - 0, - 0, - 18240, - 0, - 0, - 18241, - 18242, - 0, - 0, - 0, - 0, - 0, - 18243, - 18251, - 0, - 18253, - 0, - 18254, - 0, - 0, - 0, - 18266, - 0, - 0, - 0, - 0, - 0, - 0, - 18269, - 18270, - 18271, - 18273, - 18281, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18282, - 0, - 18283, - 0, - 18284, - 0, - 0, - 0, - 0, - 0, - 0, - 18285, - 0, - 18287, - 18289, - 0, - 0, - 18290, - 0, - 0, - 0, - 0, - 18308, - 0, - 0, - 0, - 18310, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18311, - 0, - 18312, - 18313, - 0, - 18315, - 0, - 0, - 18316, - 18320, - 0, - 18331, - 0, - 18332, - 0, - 18336, - 0, - 0, - 0, - 0, - 18337, - 0, - 18340, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18341, - 0, - 18344, - 18345, - 0, - 18346, - 0, - 0, - 0, - 0, - 0, - 18348, - 0, - 18351, - 0, - 0, - 18356, - 0, - 0, - 0, - 0, - 0, - 0, - 18357, - 0, - 0, - 0, - 0, - 0, - 18367, - 0, - 0, - 0, - 18368, - 0, - 18369, - 0, - 18370, - 18371, - 0, - 0, - 0, - 18437, - 18444, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18445, - 18450, - 0, - 0, - 0, - 0, - 18451, - 0, - 18452, - 0, - 0, - 0, - 18453, - 0, - 0, - 0, - 0, - 0, - 18455, - 0, - 0, - 0, - 18456, - 0, - 18457, - 0, - 18460, - 0, - 0, - 18461, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18466, - 0, - 0, - 18467, - 0, - 0, - 0, - 0, - 18473, - 0, - 0, - 0, - 18476, - 0, - 18477, - 0, - 0, - 0, - 18478, - 18479, - 18480, - 0, - 0, - 0, - 18485, - 0, - 0, - 0, - 18486, - 0, - 0, - 0, - 0, - 0, - 0, - 18488, - 18490, - 0, - 0, - 0, - 0, - 0, - 0, - 18491, - 0, - 0, - 0, - 0, - 0, - 18495, - 0, - 0, - 18496, - 0, - 0, - 0, - 0, - 0, - 0, - 18505, - 0, - 18521, - 0, - 18522, - 18523, - 0, - 0, - 0, - 18525, - 18526, - 0, - 0, - 0, - 0, - 0, - 18527, - 0, - 0, - 0, - 0, - 18532, - 18533, - 0, - 18534, - 0, - 0, - 0, - 0, - 0, - 0, - 18535, - 18537, - 0, - 18538, - 0, - 0, - 0, - 0, - 0, - 0, - 18540, - 18541, - 18542, - 18543, - 0, - 18546, - 0, - 0, - 0, - 0, - 18553, - 18556, - 0, - 0, - 18558, - 0, - 0, - 18569, - 18571, - 0, - 0, - 0, - 18572, - 0, - 18574, - 0, - 0, - 0, - 0, - 18586, - 0, - 0, - 0, - 0, - 0, - 18588, - 0, - 0, - 18589, - 0, - 0, - 0, - 0, - 0, - 0, - 18590, - 0, - 18592, - 0, - 0, - 0, - 0, - 18594, - 0, - 0, - 0, - 18596, - 0, - 0, - 18597, - 18598, - 0, - 0, - 18601, - 0, - 0, - 0, - 0, - 18602, - 0, - 0, - 0, - 18603, - 18604, - 0, - 18605, - 0, - 0, - 0, - 0, - 18608, - 0, - 0, - 18611, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18612, - 0, - 18616, - 0, - 0, - 18617, - 18619, - 0, - 0, - 0, - 18628, - 0, - 0, - 0, - 18629, - 0, - 0, - 18630, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18631, - 0, - 18632, - 0, - 0, - 18635, - 18637, - 0, - 0, - 0, - 0, - 0, - 0, - 18641, - 18643, - 18648, - 0, - 18652, - 0, - 0, - 18653, - 0, - 18655, - 18656, - 0, - 0, - 0, - 18657, - 0, - 0, - 18666, - 18674, - 0, - 0, - 0, - 0, - 18677, - 18684, - 18685, - 0, - 0, - 18686, - 0, - 0, - 18690, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18695, - 18696, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18697, - 0, - 0, - 18700, - 0, - 0, - 0, - 0, - 0, - 0, - 18702, - 0, - 18708, - 0, - 0, - 18709, - 0, - 18710, - 0, - 0, - 18711, - 0, - 18714, - 0, - 0, - 18718, - 0, - 0, - 0, - 0, - 0, - 0, - 18719, - 0, - 0, - 18722, - 0, - 18726, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18731, - 0, - 0, - 0, - 0, - 0, - 18739, - 18741, - 0, - 0, - 18742, - 0, - 18743, - 18744, - 18746, - 18748, - 0, - 18752, - 18753, - 0, - 0, - 18754, - 18763, - 0, - 18765, - 0, - 0, - 0, - 18766, - 0, - 0, - 0, - 18769, - 0, - 0, - 0, - 0, - 0, - 18773, - 18778, - 18779, - 18781, - 0, - 0, - 18784, - 18787, - 0, - 18788, - 0, - 18793, - 0, - 0, - 0, - 0, - 0, - 0, - 18795, - 0, - 0, - 18800, - 0, - 0, - 0, - 0, - 0, - 18801, - 18804, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18806, - 0, - 0, - 0, - 18811, - 18815, - 18816, - 0, - 0, - 0, - 0, - 18825, - 0, - 0, - 18827, - 18829, - 0, - 0, - 18830, - 0, - 0, - 0, - 0, - 18831, - 0, - 0, - 18832, - 0, - 0, - 0, - 0, - 18833, - 0, - 18840, - 0, - 18841, - 0, - 18842, - 0, - 0, - 0, - 0, - 18843, - 0, - 18844, - 0, - 0, - 0, - 0, - 0, - 0, - 18845, - 18846, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18848, - 0, - 0, - 0, - 18853, - 18860, - 0, - 0, - 18862, - 18866, - 0, - 0, - 18867, - 18869, - 0, - 0, - 18874, - 18881, - 18891, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18892, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18895, - 0, - 18896, - 0, - 0, - 0, - 18900, - 0, - 0, - 0, - 18901, - 0, - 18902, - 18915, - 18916, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18919, - 0, - 0, - 0, - 0, - 0, - 18920, - 0, - 0, - 0, - 18921, - 18929, - 0, - 0, - 0, - 0, - 18930, - 0, - 0, - 0, - 0, - 0, - 0, - 18932, - 0, - 0, - 0, - 0, - 18934, - 18942, - 0, - 0, - 0, - 18951, - 18957, - 0, - 0, - 0, - 0, - 18958, - 0, - 0, - 0, - 0, - 18959, - 18960, - 0, - 0, - 18961, - 0, - 0, - 18962, - 0, - 0, - 0, - 0, - 18963, - 18964, - 0, - 0, - 0, - 18965, - 0, - 18967, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 18968, - 0, - 18969, - 0, - 18970, - 18973, - 18976, - 0, - 0, - 0, - 0, - 0, - 0, - 18977, - 0, - 0, - 0, - 18981, - 0, - 0, - 0, - 18990, - 0, - 18998, - 0, - 0, - 0, - 0, - 0, - 18999, - 19003, - 0, - 0, - 19005, - 0, - 0, - 0, - 19006, - 0, - 0, - 0, - 0, - 0, - 0, - 19008, - 19011, - 0, - 0, - 19018, - 0, - 0, - 19019, - 0, - 19024, - 0, - 19031, - 19032, - 0, - 19039, - 0, - 19041, - 19050, - 0, - 0, - 0, - 19051, - 19055, - 19056, - 0, - 19059, - 19063, - 19064, - 0, - 0, - 19088, - 0, - 0, - 0, - 19093, - 19094, - 0, - 0, - 0, - 0, - 19095, - 0, - 19096, - 0, - 0, - 0, - 19097, - 0, - 0, - 19098, - 0, - 19099, - 19100, - 0, - 0, - 19103, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19111, - 0, - 0, - 0, - 0, - 0, - 0, - 19112, - 0, - 0, - 0, - 19116, - 19117, - 0, - 19121, - 19122, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19123, - 19124, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19125, - 19126, - 0, - 19128, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19129, - 19130, - 19131, - 19132, - 0, - 0, - 19146, - 0, - 0, - 19147, - 19156, - 19158, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19182, - 19185, - 0, - 0, - 19187, - 0, - 0, - 0, - 19193, - 0, - 0, - 0, - 0, - 0, - 19194, - 0, - 19197, - 0, - 0, - 0, - 0, - 19198, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19202, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19203, - 0, - 19205, - 19210, - 0, - 0, - 0, - 19213, - 0, - 19218, - 0, - 0, - 0, - 19223, - 19229, - 0, - 0, - 19230, - 0, - 0, - 19231, - 19232, - 19233, - 19239, - 0, - 0, - 0, - 0, - 0, - 19240, - 0, - 19248, - 19249, - 0, - 0, - 0, - 0, - 19254, - 0, - 19256, - 19258, - 19259, - 0, - 0, - 19261, - 0, - 19266, - 0, - 0, - 0, - 19272, - 0, - 19278, - 19281, - 19282, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19283, - 0, - 0, - 19284, - 0, - 0, - 19285, - 19287, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19288, - 19291, - 0, - 19292, - 0, - 0, - 0, - 0, - 19297, - 0, - 19298, - 0, - 0, - 0, - 0, - 19302, - 19303, - 0, - 0, - 0, - 0, - 19304, - 19305, - 0, - 0, - 0, - 0, - 19314, - 0, - 0, - 19315, - 0, - 0, - 19321, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19322, - 0, - 19333, - 0, - 19334, - 19335, - 0, - 19336, - 19337, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19346, - 0, - 0, - 19353, - 0, - 19354, - 19362, - 0, - 19366, - 19367, - 0, - 0, - 19369, - 0, - 19375, - 0, - 19377, - 19380, - 19388, - 0, - 0, - 0, - 0, - 0, - 19389, - 19390, - 0, - 0, - 0, - 0, - 19392, - 0, - 0, - 0, - 0, - 0, - 19402, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19412, - 0, - 0, - 19413, - 19422, - 0, - 19424, - 0, - 0, - 0, - 19425, - 0, - 0, - 0, - 19428, - 0, - 0, - 0, - 0, - 19431, - 0, - 0, - 0, - 0, - 0, - 19432, - 0, - 0, - 0, - 0, - 0, - 19448, - 19459, - 0, - 0, - 19461, - 0, - 19462, - 19463, - 0, - 19467, - 19474, - 19482, - 0, - 0, - 0, - 0, - 19494, - 0, - 0, - 0, - 0, - 19501, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19502, - 19504, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19505, - 0, - 0, - 0, - 0, - 19506, - 19507, - 0, - 0, - 0, - 19508, - 0, - 0, - 19511, - 0, - 0, - 19514, - 0, - 19515, - 0, - 19516, - 0, - 19518, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19530, - 0, - 19537, - 19538, - 0, - 19543, - 19546, - 0, - 19547, - 19551, - 0, - 0, - 0, - 0, - 0, - 0, - 19552, - 19553, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19555, - 0, - 0, - 19556, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19560, - 19561, - 0, - 0, - 19562, - 0, - 0, - 0, - 0, - 0, - 0, - 19565, - 19567, - 0, - 19568, - 0, - 0, - 0, - 19569, - 19570, - 0, - 19578, - 0, - 0, - 0, - 0, - 19580, - 0, - 0, - 0, - 0, - 19581, - 19584, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19585, - 19586, - 0, - 0, - 0, - 19587, - 19588, - 0, - 19589, - 0, - 0, - 0, - 0, - 0, - 0, - 19592, - 19593, - 19599, - 0, - 19600, - 0, - 0, - 19604, - 0, - 0, - 19605, - 0, - 19606, - 19608, - 19610, - 0, - 19613, - 19614, - 0, - 0, - 0, - 0, - 0, - 0, - 19616, - 19617, - 0, - 0, - 19618, - 0, - 0, - 19619, - 0, - 0, - 0, - 19620, - 19621, - 19631, - 0, - 0, - 19632, - 19634, - 19636, - 0, - 19643, - 0, - 0, - 19644, - 19658, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19659, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19675, - 19677, - 0, - 0, - 0, - 0, - 19679, - 0, - 19683, - 0, - 19684, - 0, - 0, - 0, - 0, - 0, - 0, - 19687, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19688, - 19689, - 19692, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19695, - 19697, - 0, - 0, - 0, - 0, - 0, - 19698, - 19699, - 0, - 0, - 19700, - 0, - 19702, - 0, - 0, - 19703, - 0, - 0, - 0, - 0, - 0, - 0, - 19704, - 19708, - 0, - 19710, - 0, - 19713, - 0, - 0, - 0, - 19715, - 0, - 0, - 0, - 0, - 19718, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19720, - 0, - 19722, - 0, - 0, - 19725, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19730, - 0, - 0, - 0, - 0, - 0, - 19731, - 0, - 19734, - 19735, - 19739, - 0, - 0, - 19740, - 0, - 19741, - 0, - 0, - 0, - 19746, - 0, - 0, - 19747, - 0, - 19771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19772, - 19775, - 0, - 0, - 0, - 0, - 0, - 0, - 19778, - 0, - 0, - 0, - 0, - 0, - 19779, - 0, - 0, - 19780, - 19790, - 0, - 19791, - 0, - 0, - 19792, - 0, - 0, - 0, - 19793, - 0, - 0, - 19796, - 19797, - 0, - 0, - 0, - 19799, - 0, - 0, - 0, - 19801, - 0, - 0, - 0, - 0, - 19803, - 0, - 19804, - 0, - 19805, - 0, - 0, - 19807, - 0, - 0, - 0, - 19808, - 0, - 0, - 0, - 0, - 0, - 0, - 19809, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19816, - 0, - 19821, - 0, - 19822, - 19830, - 19831, - 0, - 0, - 0, - 19833, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19838, - 0, - 0, - 0, - 0, - 19839, - 0, - 0, - 19843, - 0, - 0, - 0, - 0, - 19845, - 0, - 0, - 0, - 0, - 19847, - 0, - 0, - 19848, - 0, - 19849, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19851, - 0, - 0, - 0, - 19854, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19864, - 0, - 19865, - 0, - 19866, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19868, - 0, - 0, - 19870, - 0, - 0, - 19871, - 0, - 0, - 19872, - 19873, - 19875, - 0, - 19880, - 19882, - 19884, - 0, - 0, - 19885, - 19886, - 19888, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19890, - 19892, - 19893, - 0, - 0, - 19894, - 0, - 0, - 0, - 19895, - 0, - 19896, - 19902, - 0, - 0, - 19903, - 0, - 0, - 19905, - 0, - 0, - 0, - 19906, - 0, - 19908, - 0, - 19909, - 19911, - 0, - 0, - 0, - 19913, - 19920, - 0, - 19938, - 19939, - 19940, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 19942, - 0, - 19943, - 0, - 19945, - 0, - 0, - 0, - 19951, - 19952, - 19954, - 19960, - 0, - 19965, - 0, - 19971, - 0, - 0, - 0, - 0, - 0, - 19975, - 0, - 19976, - 0, - 19990, - 0, - 0, - 19991, - 0, - 19993, - 0, - 19995, - 0, - 0, - 0, - 19998, - 19999, - 20001, - 0, - 20003, - 20005, - 0, - 20011, - 20012, - 0, - 0, - 0, - 0, - 0, - 0, - 20014, - 0, - 20020, - 0, - 0, - 0, - 0, - 20021, - 0, - 0, - 0, - 0, - 0, - 20023, - 20024, - 0, - 0, - 0, - 0, - 0, - 20025, - 0, - 0, - 20027, - 0, - 0, - 20029, - 0, - 0, - 20032, - 0, - 0, - 0, - 0, - 20044, - 20045, - 0, - 20048, - 20049, - 0, - 0, - 20050, - 0, - 20052, - 0, - 0, - 20054, - 20057, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20059, - 0, - 0, - 20061, - 0, - 20062, - 0, - 20064, - 0, - 0, - 20066, - 0, - 0, - 20067, - 0, - 0, - 0, - 0, - 20069, - 0, - 0, - 0, - 0, - 0, - 0, - 20070, - 20071, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20072, - 0, - 0, - 20073, - 20074, - 0, - 0, - 0, - 0, - 0, - 20075, - 0, - 20078, - 0, - 0, - 0, - 0, - 20080, - 0, - 20081, - 0, - 0, - 0, - 0, - 0, - 0, - 20095, - 0, - 20098, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20107, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20112, - 0, - 0, - 0, - 20113, - 20114, - 0, - 0, - 0, - 20115, - 20123, - 20124, - 0, - 0, - 0, - 20131, - 20133, - 20134, - 0, - 0, - 0, - 0, - 20136, - 0, - 0, - 20137, - 20138, - 20150, - 0, - 20152, - 0, - 0, - 0, - 20153, - 0, - 0, - 20154, - 0, - 0, - 0, - 20158, - 0, - 20163, - 0, - 0, - 20164, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20166, - 0, - 20168, - 0, - 20170, - 0, - 20175, - 0, - 0, - 20178, - 0, - 0, - 0, - 0, - 20223, - 0, - 0, - 0, - 0, - 20224, - 0, - 20226, - 0, - 0, - 20230, - 0, - 20231, - 0, - 0, - 0, - 0, - 20232, - 0, - 0, - 20233, - 20234, - 0, - 20244, - 0, - 20247, - 0, - 0, - 0, - 0, - 0, - 0, - 20249, - 0, - 0, - 0, - 20250, - 0, - 0, - 0, - 0, - 20251, - 0, - 20253, - 0, - 20254, - 0, - 0, - 0, - 0, - 20256, - 0, - 0, - 20264, - 0, - 0, - 0, - 0, - 20266, - 0, - 0, - 0, - 20278, - 0, - 0, - 20279, - 20282, - 0, - 0, - 0, - 0, - 0, - 20283, - 0, - 20284, - 0, - 20285, - 0, - 20287, - 20290, - 0, - 0, - 0, - 0, - 20292, - 0, - 0, - 0, - 0, - 20293, - 20297, - 0, - 0, - 0, - 0, - 0, - 0, - 20299, - 0, - 20300, - 20303, - 0, - 0, - 0, - 0, - 0, - 0, - 20307, - 0, - 0, - 20308, - 0, - 20309, - 0, - 20310, - 0, - 0, - 0, - 0, - 0, - 0, - 20312, - 0, - 0, - 0, - 20314, - 0, - 0, - 0, - 0, - 20315, - 20316, - 0, - 20322, - 0, - 0, - 0, - 0, - 0, - 0, - 20339, - 0, - 0, - 0, - 20342, - 0, - 0, - 0, - 0, - 20352, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20362, - 0, - 0, - 20365, - 0, - 20375, - 20377, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20378, - 20379, - 0, - 20380, - 0, - 0, - 20381, - 0, - 20382, - 0, - 20383, - 0, - 20388, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20390, - 20392, - 20393, - 0, - 0, - 20395, - 0, - 0, - 0, - 0, - 0, - 20396, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20398, - 20415, - 0, - 0, - 0, - 20417, - 0, - 0, - 20420, - 0, - 0, - 20426, - 20428, - 0, - 20431, - 0, - 0, - 20432, - 0, - 20433, - 20434, - 20435, - 0, - 0, - 0, - 0, - 20440, - 0, - 0, - 0, - 0, - 0, - 20442, - 0, - 20443, - 0, - 20446, - 0, - 0, - 0, - 0, - 20448, - 0, - 20451, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20452, - 20453, - 0, - 0, - 20454, - 0, - 0, - 0, - 0, - 0, - 0, - 20457, - 0, - 20458, - 0, - 0, - 0, - 20465, - 0, - 0, - 0, - 0, - 0, - 20469, - 0, - 0, - 0, - 20473, - 0, - 20476, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20477, - 0, - 0, - 20485, - 0, - 0, - 20486, - 0, - 0, - 20487, - 0, - 20496, - 0, - 20497, - 0, - 0, - 20498, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20499, - 20500, - 0, - 20501, - 0, - 0, - 0, - 0, - 0, - 20520, - 20527, - 0, - 20529, - 0, - 0, - 0, - 0, - 20539, - 0, - 0, - 20540, - 0, - 0, - 0, - 20543, - 0, - 0, - 0, - 20546, - 0, - 0, - 0, - 0, - 0, - 20548, - 0, - 0, - 20563, - 0, - 0, - 20564, - 0, - 20566, - 0, - 0, - 0, - 0, - 0, - 20589, - 0, - 0, - 0, - 0, - 20590, - 0, - 0, - 20593, - 20594, - 0, - 0, - 0, - 0, - 20595, - 0, - 20597, - 20598, - 0, - 0, - 0, - 20618, - 20620, - 0, - 0, - 0, - 0, - 20621, - 0, - 0, - 0, - 0, - 20627, - 0, - 0, - 0, - 0, - 0, - 20628, - 0, - 0, - 0, - 20629, - 0, - 20630, - 0, - 0, - 20639, - 0, - 0, - 0, - 0, - 0, - 20707, - 0, - 0, - 20709, - 0, - 0, - 0, - 20713, - 20714, - 0, - 0, - 0, - 0, - 0, - 20724, - 20725, - 0, - 0, - 0, - 0, - 20726, - 20728, - 20729, - 0, - 20733, - 0, - 20734, - 0, - 20735, - 20736, - 0, - 20737, - 0, - 0, - 20744, - 0, - 20745, - 0, - 20748, - 0, - 0, - 20749, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20750, - 0, - 0, - 0, - 0, - 20754, - 0, - 0, - 0, - 20761, - 0, - 0, - 20763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20766, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20767, - 0, - 0, - 0, - 0, - 20768, - 0, - 20769, - 20777, - 0, - 0, - 0, - 0, - 0, - 0, - 20785, - 0, - 0, - 0, - 20786, - 20795, - 20801, - 0, - 20802, - 0, - 20807, - 0, - 0, - 20808, - 0, - 0, - 20810, - 0, - 0, - 20811, - 0, - 20812, - 0, - 0, - 0, - 0, - 0, - 20813, - 0, - 0, - 20818, - 20820, - 20821, - 0, - 0, - 0, - 20822, - 0, - 20823, - 0, - 0, - 0, - 20826, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20829, - 20830, - 20831, - 0, - 20832, - 20836, - 0, - 0, - 20839, - 0, - 0, - 20840, - 20842, - 0, - 20843, - 0, - 20844, - 0, - 20854, - 0, - 0, - 0, - 20855, - 0, - 0, - 0, - 0, - 20856, - 0, - 0, - 0, - 20869, - 0, - 0, - 20871, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20873, - 0, - 0, - 0, - 0, - 0, - 20876, - 0, - 0, - 0, - 0, - 0, - 20880, - 0, - 0, - 20882, - 0, - 0, - 0, - 0, - 20883, - 20884, - 0, - 0, - 20890, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20891, - 0, - 0, - 0, - 0, - 0, - 20905, - 0, - 20906, - 20910, - 0, - 0, - 20912, - 20915, - 0, - 0, - 0, - 0, - 0, - 20916, - 0, - 20917, - 0, - 20919, - 20920, - 20922, - 0, - 20927, - 0, - 20928, - 20929, - 20930, - 0, - 0, - 20935, - 0, - 0, - 20939, - 0, - 0, - 20941, - 0, - 0, - 0, - 20943, - 0, - 0, - 0, - 20946, - 20947, - 0, - 0, - 0, - 0, - 0, - 20950, - 0, - 20954, - 0, - 0, - 20955, - 20964, - 0, - 0, - 20967, - 0, - 0, - 0, - 0, - 0, - 20973, - 20975, - 0, - 0, - 0, - 20984, - 0, - 20987, - 20988, - 0, - 0, - 0, - 0, - 0, - 20989, - 0, - 0, - 0, - 20995, - 0, - 20998, - 0, - 20999, - 0, - 0, - 0, - 0, - 21000, - 21001, - 0, - 0, - 0, - 0, - 21008, - 0, - 21010, - 0, - 21016, - 0, - 0, - 0, - 21017, - 21018, - 0, - 0, - 0, - 0, - 0, - 21021, - 21026, - 21027, - 21028, - 0, - 0, - 21029, - 0, - 0, - 0, - 0, - 0, - 21030, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21031, - 21032, - 0, - 0, - 0, - 0, - 0, - 21037, - 0, - 0, - 21038, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21039, - 0, - 21041, - 0, - 21046, - 21047, - 0, - 0, - 0, - 21049, - 21053, - 0, - 0, - 21057, - 21064, - 21065, - 0, - 0, - 21066, - 21067, - 0, - 0, - 0, - 21069, - 0, - 0, - 0, - 21071, - 21072, - 0, - 0, - 21073, - 0, - 21074, - 0, - 0, - 21078, - 0, - 0, - 0, - 0, - 21079, - 0, - 0, - 21080, - 21081, - 0, - 0, - 21086, - 21087, - 0, - 21089, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21091, - 0, - 21093, - 0, - 21094, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21095, - 0, - 0, - 0, - 0, - 0, - 21096, - 0, - 21098, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21099, - 0, - 0, - 21100, - 21101, - 21102, - 0, - 0, - 0, - 0, - 0, - 21103, - 0, - 21104, - 0, - 0, - 0, - 0, - 0, - 21105, - 21108, - 21109, - 0, - 0, - 21112, - 21113, - 0, - 0, - 0, - 0, - 0, - 0, - 21115, - 21122, - 21123, - 0, - 0, - 0, - 0, - 0, - 21125, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21129, - 21131, - 0, - 0, - 21134, - 0, - 0, - 0, - 21137, - 21142, - 0, - 21143, - 0, - 0, - 21144, - 0, - 21145, - 21146, - 0, - 21152, - 21154, - 21155, - 21156, - 0, - 0, - 0, - 21160, - 0, - 0, - 0, - 0, - 0, - 0, - 21161, - 0, - 21164, - 0, - 21166, - 0, - 0, - 0, - 0, - 21170, - 0, - 0, - 0, - 0, - 21171, - 0, - 0, - 21172, - 0, - 21174, - 0, - 21175, - 0, - 0, - 0, - 0, - 0, - 21176, - 21179, - 21188, - 0, - 0, - 0, - 21189, - 0, - 0, - 21190, - 0, - 0, - 0, - 21192, - 0, - 0, - 21193, - 0, - 0, - 0, - 21198, - 0, - 21212, - 0, - 0, - 21213, - 0, - 0, - 0, - 0, - 0, - 0, - 21215, - 21216, - 0, - 0, - 21223, - 21225, - 0, - 21226, - 0, - 0, - 0, - 0, - 21227, - 21228, - 0, - 0, - 21229, - 0, - 0, - 0, - 0, - 21230, - 21236, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21237, - 0, - 0, - 21238, - 21239, - 0, - 0, - 0, - 0, - 21256, - 0, - 0, - 0, - 0, - 0, - 21257, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21259, - 0, - 0, - 0, - 21263, - 0, - 21272, - 0, - 21274, - 0, - 21282, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21283, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21294, - 0, - 0, - 21297, - 0, - 0, - 0, - 0, - 21298, - 0, - 0, - 0, - 21299, - 0, - 21300, - 21302, - 0, - 21316, - 0, - 21318, - 21322, - 21323, - 0, - 21324, - 0, - 21326, - 0, - 0, - 0, - 21327, - 21328, - 0, - 0, - 0, - 21352, - 0, - 0, - 21354, - 21361, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21362, - 0, - 0, - 0, - 21363, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21366, - 0, - 0, - 21367, - 21372, - 21374, - 0, - 0, - 0, - 21375, - 21377, - 0, - 21378, - 0, - 0, - 0, - 21380, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21381, - 0, - 0, - 0, - 0, - 0, - 0, - 21382, - 0, - 21383, - 0, - 0, - 21384, - 0, - 0, - 21385, - 0, - 0, - 0, - 0, - 21389, - 21390, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21397, - 21398, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21399, - 0, - 21400, - 0, - 0, - 0, - 0, - 21402, - 0, - 0, - 0, - 21403, - 21404, - 0, - 21405, - 21406, - 0, - 0, - 0, - 21407, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21408, - 0, - 0, - 0, - 0, - 21409, - 0, - 21421, - 0, - 21422, - 0, - 0, - 0, - 21425, - 21428, - 0, - 0, - 0, - 0, - 21429, - 0, - 0, - 0, - 0, - 0, - 21433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21434, - 0, - 21443, - 0, - 21444, - 21449, - 0, - 21452, - 0, - 21453, - 21454, - 0, - 0, - 0, - 21457, - 0, - 0, - 21458, - 0, - 0, - 0, - 21460, - 21461, - 0, - 0, - 21464, - 0, - 0, - 0, - 21473, - 21478, - 0, - 0, - 21479, - 0, - 0, - 21481, - 21483, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21484, - 0, - 0, - 21485, - 21486, - 0, - 0, - 21488, - 0, - 0, - 0, - 0, - 0, - 0, - 21523, - 0, - 0, - 21525, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21526, - 0, - 0, - 0, - 0, - 0, - 0, - 21529, - 21530, - 0, - 0, - 21531, - 0, - 0, - 21533, - 0, - 0, - 21539, - 21564, - 0, - 21567, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21575, - 0, - 0, - 0, - 0, - 21577, - 0, - 0, - 0, - 0, - 0, - 21591, - 0, - 0, - 21604, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21605, - 0, - 21606, - 0, - 0, - 21617, - 21618, - 21619, - 21620, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21623, - 0, - 0, - 0, - 0, - 21631, - 0, - 21635, - 0, - 0, - 0, - 0, - 21639, - 21646, - 21653, - 21662, - 0, - 0, - 21663, - 21664, - 0, - 21666, - 0, - 0, - 21667, - 0, - 21670, - 21672, - 21673, - 0, - 21674, - 21683, - 0, - 0, - 0, - 0, - 0, - 21684, - 0, - 21694, - 0, - 0, - 0, - 0, - 21695, - 21700, - 0, - 21703, - 0, - 21704, - 0, - 0, - 21709, - 0, - 0, - 0, - 21710, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21711, - 0, - 0, - 0, - 21712, - 0, - 21717, - 0, - 21730, - 0, - 0, - 0, - 21731, - 21733, - 0, - 0, - 0, - 0, - 21737, - 21741, - 21742, - 0, - 21747, - 0, - 0, - 0, - 21749, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21750, - 0, - 0, - 0, - 0, - 0, - 21752, - 0, - 0, - 0, - 0, - 21753, - 0, - 0, - 0, - 0, - 0, - 0, - 21755, - 21756, - 0, - 21757, - 0, - 0, - 0, - 0, - 0, - 0, - 21760, - 0, - 0, - 21763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21764, - 0, - 0, - 21766, - 0, - 0, - 21767, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21773, - 0, - 21774, - 0, - 0, - 21775, - 0, - 0, - 0, - 0, - 21776, - 0, - 0, - 21777, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21780, - 21787, - 21788, - 21791, - 0, - 0, - 0, - 21797, - 0, - 0, - 0, - 0, - 0, - 21805, - 0, - 0, - 0, - 0, - 21806, - 0, - 21807, - 21809, - 0, - 21810, - 21811, - 0, - 21817, - 21819, - 21820, - 0, - 21823, - 0, - 21824, - 0, - 0, - 21825, - 0, - 0, - 21826, - 21832, - 0, - 0, - 0, - 0, - 0, - 21833, - 21848, - 21849, - 0, - 0, - 21867, - 21870, - 21871, - 21873, - 0, - 0, - 0, - 21874, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21875, - 0, - 21878, - 0, - 0, - 0, - 21879, - 0, - 21881, - 21886, - 0, - 0, - 0, - 0, - 21887, - 0, - 0, - 21888, - 21894, - 21895, - 21897, - 0, - 21901, - 0, - 21904, - 0, - 0, - 21906, - 0, - 0, - 0, - 21909, - 21910, - 21911, - 0, - 0, - 21912, - 0, - 0, - 21913, - 21914, - 21915, - 0, - 21919, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21921, - 0, - 0, - 21922, - 21933, - 21939, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21944, - 0, - 0, - 0, - 0, - 0, - 21945, - 0, - 21947, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21949, - 0, - 0, - 0, - 21950, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21951, - 0, - 21952, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21954, - 21957, - 0, - 0, - 0, - 0, - 21958, - 0, - 21959, - 0, - 0, - 0, - 0, - 0, - 0, - 21962, - 21963, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 21964, - 21965, - 0, - 0, - 21969, - 21970, - 0, - 0, - 0, - 21974, - 0, - 0, - 21980, - 21981, - 0, - 21982, - 0, - 0, - 0, - 0, - 0, - 21985, - 0, - 21988, - 0, - 21992, - 0, - 21999, - 0, - 0, - 0, - 0, - 0, - 0, - 22001, - 0, - 22002, - 0, - 0, - 0, - 0, - 0, - 0, - 22003, - 0, - 0, - 0, - 0, - 0, - 22004, - 0, - 0, - 0, - 22008, - 0, - 22009, - 22015, - 0, - 0, - 22016, - 0, - 0, - 0, - 22017, - 22019, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22020, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22021, - 22037, - 0, - 22039, - 0, - 0, - 0, - 22040, - 0, - 0, - 0, - 22048, - 22049, - 0, - 0, - 22053, - 22055, - 22056, - 22059, - 0, - 0, - 22060, - 22061, - 0, - 0, - 22064, - 0, - 0, - 0, - 0, - 22066, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22073, - 0, - 0, - 0, - 22074, - 22075, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22076, - 0, - 0, - 0, - 0, - 22077, - 22084, - 22099, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22104, - 0, - 0, - 22107, - 0, - 22108, - 0, - 22109, - 0, - 22110, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22111, - 22119, - 0, - 22120, - 22122, - 0, - 0, - 0, - 0, - 22125, - 0, - 0, - 0, - 22128, - 22129, - 0, - 0, - 0, - 0, - 0, - 0, - 22141, - 0, - 0, - 0, - 22142, - 0, - 0, - 22144, - 22146, - 0, - 22148, - 22149, - 22151, - 22154, - 0, - 0, - 0, - 22162, - 0, - 0, - 0, - 0, - 22164, - 22177, - 0, - 0, - 0, - 0, - 22179, - 0, - 22182, - 22183, - 0, - 0, - 22184, - 22188, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22190, - 0, - 22194, - 22201, - 0, - 0, - 22208, - 0, - 22209, - 0, - 22212, - 0, - 0, - 22215, - 0, - 22223, - 22231, - 0, - 0, - 22232, - 0, - 22234, - 0, - 0, - 22235, - 22236, - 0, - 22237, - 0, - 22240, - 0, - 0, - 0, - 0, - 0, - 22241, - 0, - 0, - 0, - 22242, - 22246, - 22247, - 0, - 0, - 0, - 22259, - 22268, - 0, - 22269, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22270, - 0, - 0, - 0, - 0, - 22271, - 0, - 22272, - 0, - 22277, - 0, - 0, - 0, - 0, - 0, - 22278, - 22280, - 22283, - 22286, - 0, - 0, - 22287, - 22289, - 0, - 0, - 22290, - 0, - 22293, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22295, - 0, - 22301, - 22302, - 0, - 0, - 0, - 22305, - 0, - 22308, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22315, - 0, - 0, - 0, - 22317, - 0, - 22334, - 0, - 0, - 0, - 22335, - 0, - 0, - 0, - 0, - 0, - 22336, - 0, - 22338, - 22344, - 0, - 22347, - 22349, - 0, - 22350, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22357, - 0, - 0, - 0, - 0, - 0, - 22358, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22359, - 22360, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22361, - 22366, - 0, - 0, - 22369, - 0, - 22370, - 22373, - 0, - 0, - 0, - 0, - 0, - 22375, - 0, - 22377, - 0, - 0, - 0, - 0, - 0, - 22378, - 0, - 0, - 0, - 0, - 22381, - 0, - 0, - 0, - 0, - 22382, - 0, - 22383, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22391, - 0, - 0, - 22392, - 22395, - 22396, - 22402, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22405, - 0, - 0, - 22406, - 0, - 0, - 22408, - 0, - 0, - 22409, - 22410, - 0, - 0, - 0, - 0, - 0, - 0, - 22424, - 0, - 0, - 0, - 0, - 22426, - 0, - 0, - 0, - 22427, - 0, - 22428, - 0, - 22432, - 0, - 22435, - 22442, - 22443, - 0, - 0, - 0, - 0, - 22444, - 0, - 0, - 0, - 0, - 0, - 22446, - 0, - 22454, - 0, - 22455, - 0, - 0, - 0, - 22465, - 0, - 22470, - 0, - 22471, - 0, - 0, - 0, - 0, - 22472, - 22473, - 0, - 22487, - 0, - 0, - 0, - 22488, - 0, - 0, - 0, - 0, - 22489, - 0, - 0, - 22499, - 0, - 0, - 0, - 0, - 0, - 0, - 22514, - 0, - 0, - 22515, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22516, - 0, - 0, - 0, - 22517, - 22520, - 0, - 0, - 0, - 22534, - 0, - 0, - 22535, - 0, - 0, - 22536, - 0, - 22540, - 22553, - 0, - 22555, - 0, - 0, - 0, - 0, - 22561, - 0, - 0, - 22562, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22566, - 0, - 0, - 0, - 0, - 22567, - 22568, - 0, - 0, - 22575, - 0, - 22579, - 0, - 22582, - 22583, - 22585, - 0, - 0, - 0, - 0, - 0, - 22586, - 0, - 0, - 22587, - 0, - 0, - 22590, - 0, - 0, - 0, - 0, - 0, - 22591, - 0, - 22592, - 0, - 0, - 0, - 0, - 0, - 22593, - 0, - 22602, - 0, - 0, - 22604, - 0, - 0, - 22609, - 0, - 0, - 22618, - 0, - 0, - 0, - 0, - 0, - 0, - 22619, - 0, - 22624, - 22625, - 0, - 0, - 22638, - 0, - 0, - 0, - 0, - 0, - 22639, - 0, - 0, - 22640, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22644, - 0, - 22645, - 22647, - 0, - 0, - 0, - 0, - 22652, - 22653, - 0, - 0, - 0, - 22654, - 0, - 22655, - 0, - 0, - 0, - 22656, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22673, - 22675, - 22676, - 0, - 0, - 22678, - 22679, - 0, - 22691, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22693, - 0, - 0, - 22696, - 0, - 22699, - 22707, - 22708, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22718, - 0, - 22719, - 0, - 0, - 0, - 0, - 22723, - 0, - 0, - 0, - 22724, - 22725, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22726, - 22728, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22729, - 0, - 0, - 22731, - 0, - 0, - 0, - 0, - 22732, - 22735, - 22736, - 0, - 0, - 0, - 0, - 22739, - 0, - 22749, - 0, - 0, - 22751, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22758, - 0, - 0, - 0, - 0, - 0, - 22760, - 0, - 0, - 0, - 0, - 0, - 22764, - 22765, - 22766, - 0, - 22768, - 0, - 0, - 0, - 0, - 0, - 22769, - 22770, - 0, - 0, - 0, - 0, - 0, - 0, - 22771, - 0, - 0, - 22772, - 22775, - 0, - 22776, - 22777, - 22780, - 0, - 0, - 22782, - 22784, - 0, - 22787, - 0, - 22789, - 22796, - 0, - 0, - 0, - 0, - 0, - 22798, - 0, - 0, - 0, - 0, - 0, - 0, - 22802, - 0, - 22803, - 22804, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22805, - 0, - 0, - 22810, - 22811, - 22814, - 22816, - 0, - 22825, - 22826, - 0, - 22831, - 22833, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22834, - 0, - 22836, - 22838, - 0, - 22839, - 0, - 0, - 0, - 0, - 0, - 22840, - 0, - 22847, - 0, - 0, - 0, - 0, - 0, - 22856, - 22857, - 0, - 22858, - 22859, - 0, - 0, - 22862, - 0, - 0, - 22864, - 0, - 0, - 0, - 0, - 22865, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22866, - 0, - 22867, - 22868, - 0, - 0, - 0, - 0, - 22869, - 0, - 22871, - 0, - 22872, - 0, - 22873, - 22881, - 22882, - 22884, - 22885, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22886, - 22887, - 0, - 22894, - 0, - 22895, - 0, - 0, - 0, - 22900, - 0, - 22901, - 0, - 0, - 0, - 0, - 22904, - 0, - 0, - 0, - 0, - 22905, - 22907, - 0, - 0, - 0, - 22915, - 22917, - 0, - 0, - 22918, - 0, - 0, - 0, - 22920, - 0, - 0, - 0, - 22929, - 22930, - 0, - 0, - 0, - 22941, - 22942, - 0, - 0, - 0, - 22943, - 0, - 0, - 0, - 22944, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22946, - 0, - 22947, - 0, - 0, - 22954, - 0, - 22956, - 0, - 0, - 22962, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22963, - 0, - 0, - 22964, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 22965, - 0, - 22968, - 0, - 0, - 0, - 22969, - 0, - 0, - 0, - 0, - 0, - 22970, - 0, - 22971, - 0, - 0, - 0, - 0, - 0, - 22978, - 0, - 0, - 22979, - 0, - 22987, - 0, - 0, - 22989, - 0, - 0, - 0, - 0, - 0, - 0, - 22990, - 0, - 23005, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23006, - 23007, - 23008, - 0, - 0, - 23023, - 23024, - 23029, - 0, - 0, - 0, - 0, - 23030, - 0, - 0, - 0, - 0, - 0, - 23032, - 0, - 0, - 0, - 0, - 0, - 23035, - 0, - 0, - 0, - 0, - 23038, - 0, - 0, - 0, - 23048, - 0, - 23049, - 23052, - 23053, - 23060, - 23061, - 0, - 23063, - 0, - 0, - 0, - 0, - 23067, - 23068, - 0, - 0, - 0, - 23069, - 23073, - 0, - 0, - 0, - 23127, - 0, - 23128, - 0, - 0, - 0, - 0, - 0, - 23129, - 0, - 23138, - 23141, - 0, - 23149, - 0, - 0, - 23150, - 0, - 0, - 0, - 23152, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23154, - 0, - 0, - 0, - 0, - 23157, - 23159, - 23160, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23180, - 0, - 0, - 0, - 0, - 23181, - 0, - 0, - 23188, - 0, - 23189, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23195, - 0, - 0, - 23196, - 23199, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23202, - 0, - 23204, - 0, - 23207, - 0, - 23209, - 23210, - 0, - 0, - 0, - 0, - 0, - 0, - 23227, - 23229, - 0, - 0, - 23230, - 23234, - 23238, - 0, - 0, - 0, - 23245, - 23246, - 23248, - 0, - 0, - 0, - 0, - 23249, - 23254, - 0, - 0, - 0, - 23265, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23268, - 0, - 23276, - 0, - 0, - 0, - 0, - 23277, - 0, - 23297, - 0, - 23298, - 0, - 0, - 0, - 0, - 23299, - 0, - 23302, - 0, - 0, - 23303, - 23312, - 0, - 0, - 23314, - 0, - 23320, - 0, - 0, - 0, - 0, - 23324, - 0, - 23325, - 0, - 23328, - 0, - 23334, - 0, - 0, - 0, - 23337, - 0, - 0, - 0, - 0, - 23343, - 23344, - 23346, - 0, - 23348, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23353, - 0, - 0, - 0, - 0, - 23355, - 0, - 23356, - 23358, - 0, - 0, - 0, - 23359, - 23360, - 0, - 23361, - 0, - 23367, - 0, - 23369, - 0, - 0, - 23373, - 0, - 23378, - 23379, - 0, - 23382, - 23383, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23387, - 0, - 0, - 0, - 0, - 0, - 0, - 23388, - 23390, - 0, - 0, - 23393, - 23398, - 0, - 0, - 0, - 23399, - 0, - 0, - 0, - 23400, - 0, - 0, - 0, - 0, - 23401, - 0, - 0, - 0, - 23415, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23416, - 0, - 23422, - 0, - 23443, - 23444, - 0, - 0, - 0, - 0, - 23448, - 0, - 23454, - 0, - 0, - 0, - 0, - 0, - 0, - 23456, - 0, - 0, - 23458, - 23464, - 0, - 0, - 0, - 0, - 0, - 0, - 23465, - 0, - 0, - 0, - 23470, - 23471, - 0, - 0, - 23472, - 0, - 0, - 0, - 23473, - 23496, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23497, - 0, - 23499, - 0, - 0, - 23502, - 0, - 0, - 23503, - 0, - 0, - 23513, - 0, - 0, - 23515, - 0, - 0, - 0, - 23517, - 0, - 0, - 0, - 0, - 23518, - 23519, - 23521, - 23524, - 0, - 23525, - 23528, - 23539, - 0, - 0, - 0, - 0, - 0, - 23541, - 0, - 0, - 23544, - 0, - 0, - 23556, - 0, - 0, - 23557, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23559, - 0, - 23560, - 0, - 0, - 23561, - 0, - 0, - 23566, - 0, - 0, - 0, - 0, - 0, - 23568, - 23569, - 23570, - 0, - 0, - 0, - 0, - 23571, - 0, - 23574, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23575, - 0, - 23579, - 0, - 0, - 23581, - 0, - 0, - 0, - 0, - 0, - 0, - 23587, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23596, - 23598, - 0, - 0, - 0, - 0, - 23602, - 23606, - 0, - 0, - 23607, - 0, - 23608, - 0, - 0, - 0, - 23614, - 23616, - 0, - 0, - 0, - 0, - 0, - 23618, - 0, - 0, - 23619, - 0, - 0, - 0, - 0, - 23621, - 23626, - 0, - 23627, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23629, - 0, - 23630, - 0, - 0, - 0, - 0, - 23634, - 0, - 23636, - 0, - 0, - 0, - 0, - 0, - 0, - 23638, - 0, - 0, - 0, - 0, - 23640, - 23667, - 0, - 23669, - 0, - 0, - 0, - 23681, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23682, - 0, - 23683, - 0, - 0, - 0, - 0, - 0, - 23684, - 0, - 0, - 0, - 23685, - 23689, - 0, - 23693, - 23694, - 23700, - 0, - 23702, - 0, - 23709, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23712, - 0, - 0, - 0, - 0, - 0, - 23714, - 0, - 0, - 23715, - 0, - 0, - 0, - 0, - 23718, - 0, - 0, - 23720, - 0, - 0, - 0, - 0, - 23722, - 0, - 0, - 0, - 23726, - 23729, - 0, - 23741, - 23746, - 0, - 23748, - 0, - 0, - 0, - 0, - 23749, - 0, - 0, - 0, - 0, - 0, - 23750, - 0, - 0, - 0, - 0, - 23751, - 0, - 23753, - 0, - 0, - 0, - 0, - 23757, - 23765, - 0, - 0, - 0, - 23770, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23771, - 0, - 23772, - 23781, - 0, - 0, - 23796, - 0, - 0, - 0, - 0, - 23798, - 0, - 23799, - 0, - 0, - 0, - 23802, - 0, - 0, - 23806, - 0, - 23807, - 0, - 0, - 23808, - 0, - 23809, - 0, - 23819, - 0, - 0, - 0, - 23821, - 0, - 23827, - 0, - 0, - 0, - 23829, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23830, - 0, - 0, - 0, - 0, - 0, - 0, - 23832, - 23833, - 23834, - 23835, - 0, - 0, - 0, - 0, - 23837, - 23838, - 0, - 0, - 0, - 0, - 0, - 23846, - 0, - 0, - 0, - 0, - 0, - 0, - 23847, - 0, - 0, - 0, - 0, - 0, - 23879, - 23881, - 0, - 0, - 23882, - 23883, - 23895, - 0, - 23899, - 0, - 0, - 0, - 0, - 23901, - 0, - 0, - 0, - 0, - 0, - 0, - 23902, - 0, - 0, - 0, - 0, - 0, - 23903, - 23905, - 0, - 23906, - 0, - 23907, - 23918, - 23919, - 23920, - 0, - 23922, - 0, - 23924, - 0, - 23927, - 0, - 23934, - 0, - 23937, - 23941, - 0, - 23942, - 23946, - 0, - 0, - 0, - 0, - 0, - 23955, - 23956, - 23958, - 0, - 0, - 0, - 0, - 0, - 0, - 23959, - 0, - 23962, - 23965, - 0, - 23966, - 0, - 0, - 0, - 0, - 23967, - 23968, - 0, - 0, - 23973, - 0, - 0, - 23974, - 0, - 0, - 0, - 0, - 23975, - 0, - 23976, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23977, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23980, - 0, - 0, - 23984, - 0, - 23985, - 0, - 0, - 23987, - 0, - 0, - 23988, - 23990, - 23991, - 0, - 0, - 0, - 0, - 0, - 0, - 23992, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23994, - 0, - 0, - 0, - 23998, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 23999, - 0, - 0, - 24003, - 0, - 24004, - 0, - 24006, - 0, - 0, - 0, - 24007, - 0, - 0, - 24008, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24009, - 0, - 0, - 24010, - 0, - 0, - 24011, - 0, - 0, - 24013, - 24014, - 0, - 0, - 24015, - 24016, - 24027, - 0, - 24028, - 24029, - 0, - 24030, - 0, - 0, - 0, - 0, - 0, - 24033, - 24034, - 0, - 24035, - 0, - 0, - 24036, - 0, - 0, - 24044, - 0, - 24048, - 24049, - 24063, - 24067, - 0, - 24068, - 24070, - 0, - 0, - 24071, - 24078, - 24087, - 0, - 24090, - 0, - 0, - 0, - 24095, - 0, - 24098, - 24101, - 24104, - 24106, - 0, - 24107, - 0, - 0, - 0, - 24108, - 0, - 0, - 0, - 0, - 24110, - 24111, - 0, - 24113, - 0, - 0, - 24115, - 24120, - 0, - 0, - 0, - 0, - 0, - 0, - 24124, - 0, - 24125, - 0, - 24126, - 0, - 24127, - 0, - 0, - 0, - 0, - 0, - 24135, - 0, - 0, - 24136, - 0, - 24137, - 24142, - 0, - 0, - 0, - 24146, - 0, - 0, - 24147, - 24149, - 24154, - 0, - 24163, - 0, - 0, - 0, - 24165, - 24166, - 24167, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24169, - 24170, - 24175, - 0, - 0, - 0, - 24178, - 0, - 0, - 24179, - 0, - 0, - 24181, - 0, - 24184, - 24197, - 0, - 24201, - 24204, - 0, - 0, - 0, - 0, - 0, - 0, - 24206, - 24212, - 24220, - 0, - 0, - 0, - 24224, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24226, - 0, - 24234, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24235, - 0, - 24236, - 0, - 0, - 0, - 0, - 0, - 24239, - 24240, - 24241, - 0, - 0, - 24248, - 0, - 0, - 24249, - 0, - 24251, - 0, - 0, - 0, - 0, - 0, - 0, - 24253, - 0, - 24268, - 0, - 0, - 0, - 24269, - 0, - 24271, - 24272, - 0, - 0, - 0, - 0, - 24273, - 0, - 0, - 24274, - 0, - 0, - 24279, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24280, - 0, - 24293, - 24294, - 0, - 0, - 0, - 0, - 0, - 0, - 24296, - 0, - 0, - 24323, - 0, - 0, - 0, - 24329, - 24330, - 24331, - 24339, - 0, - 24351, - 0, - 0, - 24369, - 24370, - 0, - 0, - 0, - 24371, - 0, - 0, - 0, - 0, - 24372, - 24373, - 24374, - 0, - 0, - 0, - 0, - 0, - 24378, - 0, - 0, - 0, - 0, - 24379, - 0, - 24381, - 0, - 24383, - 24389, - 0, - 24390, - 0, - 0, - 24394, - 24395, - 24400, - 0, - 0, - 0, - 24401, - 24402, - 0, - 24406, - 0, - 0, - 0, - 24411, - 0, - 0, - 0, - 24415, - 0, - 24416, - 0, - 0, - 0, - 0, - 0, - 24417, - 0, - 24419, - 0, - 24422, - 0, - 24423, - 24428, - 0, - 24435, - 0, - 0, - 0, - 24439, - 0, - 0, - 0, - 24440, - 24442, - 24446, - 0, - 0, - 0, - 24447, - 24448, - 24449, - 24452, - 0, - 0, - 0, - 0, - 24453, - 24457, - 0, - 0, - 24458, - 24459, - 24460, - 0, - 24465, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24470, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24471, - 0, - 24473, - 24474, - 24475, - 24476, - 0, - 24478, - 0, - 0, - 0, - 0, - 24480, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24481, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24482, - 24485, - 0, - 0, - 0, - 0, - 24486, - 0, - 0, - 0, - 24488, - 0, - 0, - 0, - 24494, - 0, - 0, - 0, - 0, - 24497, - 0, - 0, - 24498, - 0, - 0, - 0, - 24499, - 24506, - 0, - 0, - 0, - 24507, - 0, - 0, - 24511, - 0, - 0, - 24513, - 24514, - 0, - 0, - 0, - 0, - 0, - 24517, - 0, - 24518, - 0, - 24520, - 0, - 24521, - 24524, - 24525, - 0, - 0, - 0, - 0, - 0, - 24527, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24528, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24537, - 24539, - 0, - 24540, - 0, - 0, - 0, - 24548, - 0, - 0, - 0, - 0, - 0, - 24549, - 24550, - 0, - 0, - 0, - 24553, - 24554, - 0, - 24555, - 0, - 24556, - 0, - 24558, - 0, - 0, - 0, - 0, - 0, - 24560, - 0, - 0, - 0, - 24561, - 0, - 0, - 0, - 0, - 0, - 24562, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24567, - 0, - 0, - 0, - 0, - 0, - 24569, - 0, - 0, - 0, - 24574, - 0, - 24575, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24577, - 24581, - 0, - 24584, - 0, - 0, - 0, - 0, - 0, - 24585, - 0, - 0, - 0, - 0, - 0, - 24586, - 0, - 0, - 24587, - 0, - 24588, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24590, - 24591, - 0, - 0, - 0, - 0, - 24592, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24594, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24596, - 24597, - 0, - 0, - 0, - 0, - 24602, - 24603, - 0, - 0, - 0, - 0, - 24604, - 0, - 0, - 24605, - 0, - 24610, - 0, - 0, - 24611, - 0, - 0, - 0, - 0, - 24612, - 24615, - 24616, - 24624, - 0, - 0, - 0, - 24627, - 0, - 24638, - 24639, - 0, - 0, - 0, - 0, - 24640, - 0, - 0, - 0, - 24655, - 24656, - 24657, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24662, - 0, - 24663, - 24664, - 0, - 0, - 0, - 0, - 0, - 24665, - 0, - 0, - 0, - 0, - 24667, - 0, - 0, - 0, - 0, - 0, - 0, - 24668, - 24669, - 0, - 24670, - 24674, - 0, - 0, - 0, - 24675, - 0, - 24678, - 0, - 0, - 24679, - 0, - 0, - 0, - 24681, - 0, - 24683, - 0, - 0, - 0, - 0, - 24684, - 0, - 24685, - 0, - 0, - 24686, - 0, - 0, - 24688, - 24689, - 0, - 0, - 0, - 0, - 24690, - 24691, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24697, - 0, - 24698, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24709, - 0, - 0, - 0, - 0, - 0, - 24710, - 0, - 24712, - 0, - 0, - 0, - 0, - 0, - 0, - 24713, - 24714, - 0, - 24715, - 0, - 24716, - 24718, - 0, - 24719, - 0, - 0, - 0, - 0, - 24720, - 0, - 0, - 24725, - 0, - 0, - 24738, - 0, - 24749, - 24750, - 0, - 0, - 0, - 24752, - 0, - 0, - 0, - 24753, - 0, - 0, - 0, - 24758, - 0, - 0, - 0, - 0, - 0, - 24762, - 0, - 24763, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24764, - 0, - 0, - 0, - 0, - 0, - 24765, - 24767, - 24768, - 0, - 24772, - 0, - 0, - 0, - 0, - 24773, - 0, - 0, - 0, - 0, - 24777, - 0, - 0, - 0, - 0, - 0, - 24785, - 0, - 24786, - 24788, - 0, - 0, - 0, - 24789, - 0, - 0, - 0, - 0, - 24794, - 24798, - 0, - 24799, - 24800, - 0, - 0, - 0, - 24803, - 0, - 24804, - 24806, - 0, - 24807, - 0, - 0, - 0, - 24810, - 0, - 0, - 0, - 0, - 0, - 0, - 24827, - 24828, - 0, - 24835, - 0, - 0, - 0, - 0, - 0, - 0, - 24836, - 0, - 0, - 0, - 0, - 0, - 24839, - 0, - 24843, - 24844, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24847, - 0, - 0, - 24848, - 0, - 0, - 0, - 0, - 0, - 0, - 24849, - 0, - 24850, - 24851, - 0, - 0, - 0, - 24852, - 0, - 24853, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24854, - 0, - 24855, - 0, - 0, - 24868, - 0, - 0, - 0, - 24883, - 0, - 0, - 0, - 24884, - 0, - 24895, - 24897, - 0, - 0, - 0, - 0, - 0, - 24899, - 0, - 0, - 0, - 0, - 0, - 24900, - 0, - 24913, - 0, - 0, - 0, - 0, - 0, - 0, - 24914, - 0, - 0, - 24917, - 24930, - 24931, - 0, - 0, - 0, - 24932, - 0, - 0, - 24939, - 0, - 0, - 24942, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 24945, - 24950, - 0, - 24951, - 0, - 0, - 24953, - 0, - 0, - 0, - 24954, - 0, - 24959, - 0, - 0, - 0, - 24961, - 0, - 0, - 24962, - 0, - 24964, - 24968, - 24970, - 24972, - 0, - 0, - 0, - 0, - 0, - 24976, - 0, - 0, - 0, - 24977, - 0, - 24982, - 0, - 0, - 24983, - 0, - 0, - 24984, - 0, - 0, - 0, - 24993, - 0, - 0, - 0, - 24994, - 0, - 0, - 25001, - 0, - 0, - 0, - 25003, - 0, - 0, - 25018, - 0, - 0, - 25023, - 0, - 0, - 0, - 25034, - 0, - 0, - 25035, - 25036, - 0, - 25037, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25039, - 0, - 0, - 0, - 0, - 0, - 25040, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25042, - 0, - 0, - 25043, - 25045, - 0, - 0, - 0, - 0, - 0, - 0, - 25049, - 0, - 0, - 25051, - 0, - 25052, - 25053, - 0, - 0, - 25054, - 0, - 0, - 0, - 25055, - 0, - 0, - 0, - 0, - 25057, - 25059, - 0, - 0, - 25060, - 25064, - 0, - 25065, - 25069, - 25070, - 0, - 0, - 0, - 0, - 25072, - 0, - 25073, - 0, - 25090, - 0, - 0, - 25092, - 25093, - 25101, - 0, - 0, - 0, - 0, - 0, - 0, - 25105, - 25108, - 0, - 0, - 25113, - 0, - 0, - 25115, - 25116, - 0, - 0, - 0, - 0, - 0, - 0, - 25117, - 0, - 0, - 0, - 25120, - 25121, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25125, - 0, - 0, - 0, - 25126, - 0, - 25130, - 25134, - 0, - 25139, - 0, - 25143, - 0, - 0, - 0, - 25151, - 0, - 25161, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25163, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25174, - 0, - 25175, - 0, - 25207, - 0, - 0, - 0, - 25209, - 0, - 0, - 0, - 0, - 25213, - 0, - 25219, - 0, - 25223, - 0, - 25225, - 0, - 0, - 0, - 25227, - 0, - 0, - 0, - 25228, - 0, - 0, - 0, - 25229, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25231, - 25233, - 0, - 0, - 0, - 0, - 25237, - 25239, - 0, - 0, - 0, - 25243, - 0, - 0, - 0, - 25252, - 0, - 25257, - 25258, - 0, - 0, - 0, - 0, - 25260, - 25265, - 0, - 25268, - 0, - 0, - 25273, - 25324, - 0, - 25325, - 0, - 25326, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25327, - 0, - 0, - 0, - 0, - 0, - 25328, - 0, - 0, - 0, - 0, - 0, - 0, - 25332, - 0, - 0, - 0, - 25333, - 0, - 0, - 0, - 25336, - 25337, - 25338, - 0, - 0, - 25343, - 0, - 25350, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25352, - 0, - 25354, - 0, - 25375, - 0, - 25379, - 0, - 0, - 0, - 0, - 25384, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25386, - 0, - 25388, - 0, - 25390, - 0, - 0, - 25399, - 0, - 0, - 25401, - 0, - 0, - 0, - 25402, - 0, - 0, - 0, - 25407, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25413, - 25415, - 0, - 0, - 25417, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25419, - 0, - 0, - 0, - 25421, - 0, - 0, - 0, - 25424, - 0, - 0, - 0, - 0, - 25433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25435, - 0, - 0, - 0, - 0, - 0, - 0, - 25436, - 0, - 0, - 0, - 25437, - 0, - 0, - 25440, - 0, - 0, - 0, - 0, - 0, - 0, - 25442, - 0, - 0, - 25443, - 0, - 25446, - 0, - 0, - 25449, - 0, - 0, - 0, - 25450, - 0, - 0, - 0, - 0, - 25452, - 0, - 25453, - 25454, - 25455, - 0, - 0, - 0, - 25456, - 0, - 25457, - 0, - 0, - 0, - 25459, - 0, - 25461, - 0, - 25468, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25469, - 0, - 0, - 0, - 0, - 0, - 25471, - 0, - 0, - 0, - 0, - 0, - 25474, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25475, - 0, - 0, - 0, - 0, - 25477, - 0, - 0, - 0, - 0, - 25483, - 0, - 0, - 0, - 0, - 0, - 25484, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25485, - 0, - 25497, - 0, - 0, - 25498, - 0, - 25504, - 0, - 25510, - 0, - 25512, - 0, - 0, - 25513, - 25514, - 0, - 0, - 0, - 0, - 0, - 0, - 25517, - 25518, - 25519, - 0, - 25520, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25521, - 0, - 25522, - 25527, - 25534, - 0, - 25536, - 0, - 25537, - 0, - 0, - 25548, - 25550, - 0, - 0, - 25551, - 0, - 25552, - 0, - 0, - 0, - 0, - 0, - 25554, - 0, - 25555, - 0, - 25556, - 25557, - 25568, - 0, - 0, - 0, - 25570, - 25571, - 0, - 0, - 0, - 0, - 0, - 0, - 25574, - 0, - 0, - 0, - 0, - 25579, - 0, - 0, - 0, - 25581, - 0, - 0, - 0, - 25582, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25588, - 0, - 0, - 0, - 0, - 25589, - 0, - 0, - 0, - 0, - 25590, - 0, - 25591, - 25592, - 25593, - 0, - 25594, - 0, - 0, - 0, - 25596, - 0, - 25597, - 25615, - 0, - 0, - 0, - 0, - 0, - 25618, - 0, - 0, - 0, - 0, - 25619, - 25623, - 0, - 0, - 25629, - 0, - 0, - 25631, - 0, - 0, - 0, - 25635, - 25636, - 0, - 0, - 25649, - 0, - 0, - 0, - 0, - 25654, - 0, - 0, - 0, - 25661, - 25663, - 0, - 0, - 25671, - 0, - 0, - 25678, - 25698, - 0, - 25699, - 25702, - 25703, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25704, - 0, - 0, - 0, - 0, - 0, - 25706, - 0, - 0, - 25710, - 0, - 25711, - 0, - 25712, - 0, - 25715, - 25716, - 25717, - 0, - 0, - 25718, - 25728, - 25732, - 0, - 0, - 0, - 25734, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25737, - 0, - 0, - 25739, - 0, - 0, - 0, - 25740, - 0, - 25741, - 25745, - 0, - 25746, - 0, - 25748, - 25772, - 25778, - 0, - 0, - 0, - 0, - 0, - 25780, - 0, - 0, - 0, - 0, - 25781, - 0, - 25782, - 25784, - 25785, - 0, - 0, - 0, - 25789, - 0, - 0, - 0, - 0, - 0, - 0, - 25797, - 25801, - 0, - 0, - 0, - 25808, - 25809, - 0, - 0, - 25811, - 25814, - 25815, - 0, - 0, - 25817, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25820, - 0, - 0, - 0, - 0, - 25832, - 25833, - 0, - 0, - 0, - 25846, - 0, - 0, - 0, - 25847, - 25848, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25849, - 25850, - 0, - 0, - 25851, - 0, - 0, - 25852, - 0, - 25862, - 0, - 0, - 0, - 25863, - 25865, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25867, - 25868, - 0, - 25869, - 25874, - 0, - 25875, - 0, - 25876, - 25877, - 0, - 0, - 0, - 0, - 25878, - 25902, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25903, - 25904, - 25905, - 0, - 0, - 0, - 25908, - 25909, - 0, - 0, - 0, - 0, - 25910, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25912, - 0, - 25913, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25914, - 0, - 0, - 25916, - 0, - 0, - 0, - 0, - 0, - 25917, - 25927, - 0, - 0, - 0, - 0, - 25928, - 0, - 0, - 25930, - 0, - 0, - 0, - 25933, - 0, - 0, - 25938, - 25942, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25945, - 0, - 25950, - 0, - 25956, - 0, - 0, - 25961, - 25962, - 0, - 0, - 25963, - 0, - 25964, - 25965, - 25966, - 0, - 0, - 0, - 0, - 0, - 25967, - 0, - 0, - 0, - 0, - 25968, - 0, - 0, - 0, - 25969, - 25971, - 0, - 0, - 0, - 0, - 0, - 25973, - 25975, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25978, - 0, - 25981, - 0, - 0, - 0, - 25982, - 0, - 0, - 0, - 25984, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 25993, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26002, - 0, - 0, - 0, - 26005, - 0, - 0, - 0, - 26006, - 26007, - 0, - 0, - 26014, - 26015, - 26016, - 0, - 0, - 0, - 0, - 0, - 0, - 26017, - 26018, - 26020, - 0, - 26022, - 26023, - 0, - 0, - 0, - 26024, - 26028, - 0, - 26029, - 26033, - 26034, - 26044, - 0, - 0, - 0, - 0, - 0, - 26046, - 0, - 0, - 26047, - 0, - 0, - 26049, - 0, - 26050, - 0, - 26051, - 0, - 0, - 0, - 0, - 0, - 26053, - 0, - 0, - 0, - 0, - 26054, - 26059, - 0, - 0, - 0, - 0, - 0, - 0, - 26060, - 0, - 26066, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26067, - 0, - 26069, - 0, - 0, - 26071, - 0, - 0, - 0, - 26073, - 0, - 26074, - 26077, - 0, - 0, - 0, - 0, - 26078, - 0, - 0, - 0, - 26079, - 0, - 26090, - 0, - 0, - 26094, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26095, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26096, - 26101, - 0, - 26107, - 26122, - 0, - 26124, - 0, - 0, - 26125, - 0, - 0, - 0, - 0, - 0, - 0, - 26136, - 26141, - 26155, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26164, - 26166, - 0, - 0, - 0, - 26167, - 0, - 26170, - 26171, - 0, - 0, - 26172, - 0, - 0, - 26174, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26175, - 0, - 0, - 0, - 26176, - 26177, - 0, - 26321, - 26322, - 0, - 26323, - 0, - 0, - 26324, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26325, - 0, - 26331, - 0, - 0, - 0, - 0, - 0, - 0, - 26335, - 0, - 0, - 0, - 26350, - 0, - 0, - 0, - 26379, - 0, - 0, - 26382, - 26383, - 26385, - 0, - 0, - 26392, - 26406, - 0, - 0, - 0, - 0, - 26411, - 0, - 0, - 0, - 0, - 0, - 26412, - 0, - 0, - 26420, - 0, - 0, - 26423, - 0, - 26424, - 26426, - 26432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26435, - 0, - 26436, - 0, - 0, - 0, - 0, - 0, - 26441, - 0, - 26444, - 0, - 0, - 0, - 26446, - 0, - 0, - 0, - 0, - 26447, - 0, - 0, - 0, - 0, - 26449, - 0, - 26450, - 26452, - 0, - 26453, - 26454, - 0, - 0, - 0, - 26455, - 0, - 0, - 0, - 26456, - 0, - 0, - 26458, - 0, - 0, - 26460, - 0, - 26463, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26464, - 26470, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26473, - 0, - 0, - 26474, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26475, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26477, - 0, - 26485, - 0, - 0, - 26486, - 0, - 26487, - 0, - 0, - 26488, - 26493, - 26494, - 0, - 0, - 26495, - 0, - 26497, - 26504, - 26506, - 0, - 0, - 0, - 0, - 0, - 26507, - 0, - 0, - 0, - 0, - 0, - 26509, - 0, - 0, - 26510, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26512, - 0, - 26513, - 26515, - 0, - 0, - 0, - 26518, - 0, - 0, - 0, - 26519, - 0, - 26524, - 26526, - 0, - 0, - 0, - 26527, - 0, - 26532, - 0, - 26533, - 26537, - 26558, - 0, - 0, - 0, - 26559, - 0, - 0, - 0, - 26571, - 0, - 0, - 26573, - 0, - 26588, - 0, - 26593, - 0, - 0, - 0, - 0, - 0, - 0, - 26603, - 0, - 26604, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26606, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26607, - 26609, - 26611, - 26614, - 0, - 0, - 0, - 26616, - 26620, - 0, - 26621, - 0, - 0, - 0, - 0, - 0, - 26627, - 0, - 26629, - 0, - 0, - 26630, - 0, - 0, - 26632, - 26643, - 0, - 0, - 0, - 26644, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26646, - 26647, - 0, - 0, - 0, - 26650, - 0, - 0, - 26656, - 0, - 0, - 0, - 0, - 26663, - 26670, - 26671, - 0, - 0, - 0, - 26685, - 26686, - 26687, - 0, - 26689, - 0, - 0, - 0, - 0, - 26744, - 0, - 26745, - 0, - 26747, - 26748, - 0, - 26749, - 26750, - 26751, - 0, - 0, - 0, - 0, - 26752, - 26755, - 0, - 0, - 0, - 26756, - 26769, - 0, - 0, - 0, - 26774, - 0, - 0, - 0, - 0, - 0, - 26775, - 0, - 26777, - 26778, - 0, - 26786, - 0, - 0, - 0, - 26787, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26788, - 0, - 0, - 26789, - 0, - 0, - 0, - 0, - 0, - 26791, - 0, - 26792, - 26793, - 0, - 0, - 0, - 26794, - 0, - 26797, - 26798, - 0, - 0, - 0, - 26800, - 0, - 0, - 26803, - 0, - 26804, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26805, - 0, - 0, - 26808, - 0, - 0, - 26809, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26812, - 0, - 26825, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26826, - 0, - 0, - 26827, - 26829, - 26834, - 0, - 0, - 0, - 0, - 26835, - 0, - 0, - 26849, - 0, - 26851, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26852, - 0, - 26853, - 26857, - 0, - 26858, - 0, - 26859, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26876, - 0, - 26878, - 26882, - 26883, - 0, - 0, - 0, - 0, - 26890, - 26894, - 0, - 0, - 0, - 0, - 26895, - 26896, - 0, - 0, - 0, - 0, - 0, - 26900, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26911, - 26913, - 26914, - 26915, - 26916, - 26919, - 0, - 0, - 0, - 26921, - 26922, - 0, - 0, - 26925, - 0, - 0, - 0, - 26928, - 0, - 0, - 26929, - 26930, - 0, - 0, - 0, - 26931, - 0, - 26932, - 0, - 0, - 0, - 0, - 0, - 26933, - 0, - 0, - 0, - 0, - 0, - 0, - 26937, - 0, - 0, - 26943, - 0, - 0, - 26944, - 0, - 0, - 0, - 26946, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26956, - 0, - 26958, - 0, - 0, - 26963, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 26965, - 0, - 26969, - 26970, - 26972, - 0, - 0, - 0, - 0, - 0, - 26973, - 0, - 26974, - 0, - 26978, - 0, - 26980, - 0, - 0, - 0, - 0, - 0, - 0, - 26982, - 0, - 26986, - 26987, - 0, - 26990, - 0, - 0, - 0, - 0, - 27003, - 27006, - 0, - 0, - 27007, - 27010, - 27012, - 27013, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27014, - 27015, - 27018, - 0, - 27019, - 0, - 0, - 0, - 0, - 0, - 27025, - 0, - 0, - 0, - 27026, - 0, - 0, - 0, - 0, - 27029, - 27030, - 27031, - 27034, - 0, - 0, - 27036, - 27037, - 0, - 0, - 0, - 27038, - 27042, - 0, - 0, - 0, - 27044, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27045, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27046, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27047, - 27049, - 0, - 27050, - 0, - 0, - 0, - 27051, - 27052, - 0, - 27055, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27056, - 27058, - 27059, - 0, - 27061, - 0, - 27064, - 0, - 0, - 0, - 0, - 0, - 27069, - 0, - 0, - 27070, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27072, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27076, - 0, - 0, - 0, - 0, - 0, - 27078, - 0, - 27079, - 0, - 0, - 0, - 27081, - 0, - 0, - 0, - 0, - 0, - 0, - 27082, - 0, - 27083, - 27086, - 0, - 0, - 0, - 0, - 27087, - 0, - 0, - 0, - 0, - 0, - 27088, - 27090, - 0, - 27094, - 0, - 0, - 27095, - 0, - 27099, - 27102, - 0, - 0, - 0, - 27103, - 0, - 0, - 0, - 0, - 27105, - 0, - 0, - 0, - 27106, - 0, - 0, - 0, - 0, - 0, - 0, - 27107, - 0, - 0, - 0, - 0, - 27108, - 27117, - 0, - 0, - 0, - 0, - 27118, - 0, - 0, - 27124, - 0, - 27126, - 0, - 0, - 27130, - 27131, - 0, - 0, - 0, - 0, - 0, - 0, - 27147, - 0, - 0, - 0, - 0, - 27148, - 27149, - 0, - 0, - 0, - 0, - 27150, - 27151, - 0, - 27152, - 0, - 27159, - 0, - 0, - 0, - 27164, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27175, - 0, - 27189, - 0, - 0, - 27191, - 0, - 27193, - 0, - 27195, - 0, - 27198, - 0, - 0, - 0, - 0, - 0, - 27200, - 0, - 0, - 0, - 0, - 27202, - 0, - 0, - 0, - 0, - 27203, - 0, - 0, - 27204, - 0, - 0, - 27206, - 0, - 27207, - 0, - 0, - 0, - 0, - 27209, - 0, - 0, - 0, - 27213, - 0, - 0, - 27216, - 27219, - 27220, - 27222, - 27223, - 0, - 27224, - 0, - 27225, - 27226, - 0, - 0, - 27233, - 0, - 0, - 0, - 0, - 27235, - 0, - 27237, - 0, - 27238, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27239, - 0, - 27242, - 27243, - 0, - 27250, - 0, - 0, - 0, - 27251, - 0, - 27253, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27254, - 27255, - 27258, - 0, - 0, - 0, - 27259, - 0, - 0, - 0, - 0, - 0, - 0, - 27267, - 0, - 27276, - 27278, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27296, - 27297, - 27301, - 0, - 0, - 0, - 0, - 0, - 0, - 27302, - 0, - 0, - 0, - 0, - 0, - 0, - 27312, - 27313, - 0, - 0, - 0, - 0, - 0, - 27318, - 0, - 27320, - 0, - 27329, - 0, - 27330, - 27331, - 0, - 27332, - 0, - 0, - 0, - 0, - 27340, - 0, - 0, - 0, - 27348, - 0, - 0, - 0, - 0, - 0, - 0, - 27350, - 0, - 27351, - 0, - 0, - 0, - 0, - 27355, - 0, - 0, - 27358, - 27359, - 27361, - 0, - 0, - 0, - 27365, - 0, - 27367, - 0, - 27376, - 27378, - 0, - 0, - 27379, - 0, - 0, - 0, - 0, - 0, - 0, - 27396, - 0, - 27397, - 27404, - 0, - 0, - 0, - 0, - 0, - 27408, - 0, - 0, - 0, - 0, - 27453, - 0, - 0, - 0, - 27456, - 0, - 0, - 0, - 27458, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27459, - 0, - 0, - 0, - 27460, - 0, - 0, - 27461, - 0, - 27465, - 27467, - 0, - 0, - 27469, - 0, - 27470, - 0, - 27471, - 0, - 27477, - 27482, - 0, - 0, - 0, - 0, - 0, - 0, - 27484, - 0, - 0, - 0, - 0, - 0, - 0, - 27485, - 0, - 0, - 0, - 0, - 0, - 27493, - 0, - 27494, - 27502, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27511, - 27532, - 0, - 0, - 0, - 27533, - 27545, - 0, - 0, - 0, - 27546, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27547, - 0, - 0, - 27549, - 27550, - 0, - 27551, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27555, - 0, - 0, - 27571, - 0, - 27573, - 27574, - 27575, - 27577, - 0, - 27578, - 0, - 0, - 27579, - 27585, - 0, - 0, - 0, - 0, - 0, - 27586, - 0, - 0, - 27588, - 27589, - 0, - 0, - 0, - 0, - 27596, - 0, - 0, - 27600, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27608, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27610, - 0, - 0, - 0, - 27618, - 0, - 0, - 27620, - 0, - 0, - 0, - 27631, - 0, - 0, - 27632, - 27634, - 0, - 27636, - 27638, - 0, - 0, - 0, - 27643, - 0, - 27644, - 27649, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27651, - 27660, - 0, - 27661, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27662, - 0, - 0, - 27664, - 0, - 27665, - 0, - 0, - 0, - 27669, - 0, - 27671, - 0, - 0, - 0, - 27673, - 27674, - 0, - 0, - 0, - 27682, - 0, - 0, - 0, - 27711, - 0, - 27712, - 27713, - 27719, - 27720, - 0, - 0, - 27728, - 0, - 27729, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27731, - 0, - 0, - 27732, - 0, - 27733, - 0, - 27738, - 0, - 0, - 0, - 27742, - 0, - 0, - 0, - 27743, - 27744, - 0, - 0, - 0, - 0, - 0, - 0, - 27745, - 27746, - 0, - 0, - 0, - 27747, - 27748, - 27751, - 27752, - 0, - 0, - 0, - 27768, - 27770, - 0, - 0, - 0, - 27774, - 27775, - 0, - 27776, - 27777, - 0, - 0, - 27781, - 0, - 27784, - 0, - 27786, - 0, - 0, - 27791, - 0, - 27792, - 27793, - 27804, - 0, - 27812, - 27813, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27814, - 0, - 27825, - 0, - 27827, - 0, - 0, - 0, - 0, - 27828, - 27861, - 27862, - 0, - 0, - 0, - 27864, - 0, - 0, - 0, - 27865, - 27884, - 0, - 27889, - 0, - 0, - 0, - 0, - 0, - 27890, - 0, - 27891, - 0, - 0, - 0, - 27892, - 0, - 0, - 0, - 0, - 0, - 27897, - 27898, - 0, - 0, - 27899, - 0, - 0, - 0, - 27901, - 27905, - 0, - 0, - 27920, - 0, - 0, - 27921, - 0, - 27922, - 0, - 0, - 0, - 27931, - 27934, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27941, - 0, - 27942, - 0, - 27945, - 0, - 27947, - 27954, - 0, - 0, - 0, - 0, - 27960, - 27963, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 27964, - 27965, - 0, - 0, - 0, - 27967, - 0, - 27969, - 27975, - 0, - 27976, - 27977, - 0, - 27981, - 0, - 27983, - 28051, - 28052, - 0, - 0, - 0, - 0, - 0, - 28056, - 0, - 0, - 0, - 0, - 0, - 0, - 28058, - 28059, - 0, - 0, - 28061, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28063, - 0, - 0, - 0, - 0, - 0, - 0, - 28066, - 0, - 0, - 0, - 0, - 0, - 0, - 28069, - 28070, - 28072, - 0, - 28073, - 0, - 0, - 28074, - 0, - 0, - 0, - 0, - 28075, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28078, - 0, - 0, - 0, - 0, - 28085, - 0, - 0, - 0, - 0, - 28086, - 0, - 0, - 0, - 0, - 0, - 0, - 28088, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28090, - 0, - 28097, - 28114, - 28115, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28116, - 0, - 0, - 0, - 0, - 0, - 28118, - 0, - 28129, - 0, - 28131, - 0, - 0, - 28135, - 0, - 0, - 0, - 28140, - 28141, - 0, - 0, - 0, - 28146, - 0, - 0, - 0, - 0, - 28152, - 0, - 0, - 0, - 0, - 28155, - 28157, - 28161, - 0, - 0, - 0, - 0, - 28166, - 0, - 28167, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28172, - 0, - 0, - 0, - 0, - 0, - 0, - 28173, - 0, - 0, - 28175, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28178, - 28188, - 0, - 28190, - 0, - 0, - 0, - 0, - 0, - 28191, - 0, - 28193, - 28206, - 0, - 0, - 28207, - 28209, - 0, - 28211, - 0, - 28213, - 0, - 0, - 0, - 28215, - 28216, - 28217, - 0, - 28222, - 0, - 28223, - 28225, - 0, - 0, - 0, - 28226, - 0, - 28227, - 28229, - 28232, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28235, - 0, - 28241, - 0, - 0, - 28242, - 0, - 0, - 0, - 0, - 28243, - 0, - 0, - 0, - 28245, - 0, - 0, - 0, - 28248, - 28250, - 0, - 28251, - 28252, - 0, - 0, - 0, - 0, - 0, - 0, - 28253, - 0, - 0, - 28254, - 28255, - 0, - 0, - 28256, - 0, - 0, - 28258, - 0, - 0, - 0, - 0, - 0, - 28259, - 0, - 0, - 28260, - 0, - 0, - 28261, - 0, - 0, - 0, - 0, - 28262, - 28263, - 0, - 0, - 28264, - 0, - 0, - 0, - 28266, - 0, - 28268, - 28269, - 0, - 28270, - 28272, - 28274, - 0, - 28277, - 28278, - 0, - 0, - 0, - 28279, - 0, - 28280, - 28281, - 28283, - 0, - 28292, - 0, - 28294, - 0, - 28297, - 0, - 0, - 0, - 0, - 28299, - 0, - 0, - 0, - 0, - 0, - 28300, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28301, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28302, - 28303, - 0, - 0, - 0, - 0, - 28304, - 0, - 0, - 28305, - 0, - 28312, - 0, - 28313, - 28314, - 0, - 0, - 0, - 0, - 0, - 0, - 28315, - 0, - 0, - 0, - 28320, - 28321, - 0, - 0, - 28328, - 0, - 0, - 0, - 28329, - 28338, - 0, - 28339, - 0, - 0, - 28344, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28347, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28348, - 0, - 0, - 0, - 0, - 0, - 28411, - 0, - 28412, - 28413, - 0, - 28416, - 0, - 0, - 0, - 28420, - 0, - 0, - 0, - 0, - 0, - 28421, - 0, - 0, - 0, - 0, - 28423, - 0, - 0, - 0, - 28424, - 0, - 0, - 28428, - 0, - 0, - 0, - 0, - 0, - 28429, - 0, - 0, - 0, - 28431, - 28434, - 0, - 28458, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28464, - 0, - 0, - 0, - 0, - 28465, - 0, - 28467, - 0, - 0, - 0, - 0, - 0, - 0, - 28471, - 0, - 0, - 0, - 0, - 28474, - 0, - 28480, - 0, - 28481, - 0, - 0, - 28485, - 0, - 0, - 0, - 0, - 28486, - 28488, - 0, - 0, - 28489, - 0, - 0, - 0, - 0, - 28492, - 0, - 0, - 0, - 28495, - 0, - 28497, - 0, - 28499, - 0, - 0, - 0, - 0, - 28500, - 0, - 0, - 28502, - 28503, - 0, - 0, - 0, - 28508, - 0, - 0, - 0, - 28510, - 0, - 0, - 28512, - 28513, - 28514, - 28521, - 0, - 28526, - 0, - 28527, - 28528, - 0, - 0, - 0, - 0, - 28529, - 0, - 0, - 28532, - 0, - 0, - 28537, - 28538, - 0, - 0, - 0, - 28539, - 0, - 28548, - 0, - 28553, - 28554, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28560, - 28563, - 0, - 0, - 28564, - 0, - 0, - 0, - 0, - 28565, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28566, - 28568, - 0, - 0, - 0, - 0, - 0, - 0, - 28569, - 0, - 0, - 0, - 28570, - 0, - 28572, - 28573, - 0, - 0, - 0, - 0, - 28575, - 0, - 0, - 0, - 0, - 28576, - 28581, - 28588, - 0, - 0, - 28589, - 0, - 0, - 0, - 28590, - 28595, - 0, - 28598, - 0, - 0, - 28601, - 0, - 0, - 28605, - 0, - 0, - 0, - 0, - 28614, - 28615, - 28619, - 0, - 0, - 0, - 0, - 0, - 0, - 28620, - 0, - 28626, - 0, - 0, - 28628, - 0, - 28631, - 0, - 28632, - 0, - 0, - 0, - 0, - 0, - 0, - 28635, - 0, - 0, - 0, - 28637, - 28638, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28639, - 0, - 28643, - 0, - 0, - 28652, - 0, - 0, - 0, - 28662, - 0, - 28670, - 28671, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28672, - 28673, - 28675, - 28676, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28691, - 0, - 0, - 0, - 28695, - 0, - 0, - 0, - 28696, - 0, - 28697, - 28698, - 0, - 28705, - 0, - 28707, - 28708, - 28710, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28711, - 28728, - 0, - 0, - 0, - 28736, - 0, - 0, - 0, - 28737, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28738, - 0, - 28739, - 0, - 28741, - 0, - 0, - 28742, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28745, - 0, - 0, - 0, - 0, - 0, - 0, - 28749, - 28750, - 28752, - 28754, - 28756, - 0, - 28757, - 0, - 0, - 0, - 0, - 28759, - 28760, - 0, - 0, - 0, - 0, - 0, - 0, - 28762, - 0, - 0, - 0, - 28764, - 0, - 0, - 0, - 0, - 0, - 0, - 28766, - 0, - 28767, - 28768, - 0, - 0, - 0, - 0, - 28769, - 28770, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28772, - 0, - 28773, - 0, - 28782, - 0, - 0, - 0, - 0, - 0, - 0, - 28784, - 0, - 28785, - 0, - 28786, - 0, - 0, - 0, - 28787, - 0, - 0, - 0, - 28797, - 0, - 0, - 0, - 0, - 0, - 0, - 28799, - 0, - 0, - 28801, - 0, - 0, - 0, - 0, - 28802, - 0, - 28805, - 0, - 0, - 28806, - 0, - 0, - 28807, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28808, - 0, - 0, - 0, - 0, - 0, - 28810, - 28812, - 0, - 0, - 28816, - 28819, - 0, - 0, - 28821, - 0, - 28826, - 0, - 0, - 0, - 28842, - 28852, - 0, - 0, - 28853, - 0, - 28854, - 28855, - 0, - 0, - 0, - 28857, - 0, - 0, - 0, - 28858, - 0, - 28867, - 28868, - 28869, - 0, - 0, - 0, - 28874, - 28880, - 28882, - 28890, - 28892, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28895, - 0, - 0, - 0, - 28898, - 28899, - 0, - 0, - 0, - 28900, - 0, - 0, - 28904, - 0, - 28906, - 0, - 0, - 0, - 0, - 28907, - 0, - 0, - 0, - 0, - 0, - 0, - 28908, - 0, - 0, - 0, - 28910, - 0, - 28914, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28915, - 28916, - 28919, - 0, - 0, - 28920, - 0, - 28921, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28924, - 0, - 0, - 0, - 0, - 28926, - 28929, - 0, - 0, - 0, - 28930, - 0, - 28936, - 0, - 28939, - 0, - 0, - 0, - 0, - 28942, - 0, - 0, - 0, - 0, - 0, - 0, - 28956, - 0, - 0, - 0, - 28966, - 0, - 0, - 0, - 0, - 28967, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 28968, - 0, - 28971, - 0, - 28975, - 28976, - 0, - 28982, - 28983, - 0, - 0, - 28984, - 28989, - 28996, - 28997, - 28998, - 0, - 0, - 0, - 0, - 0, - 0, - 28999, - 0, - 0, - 0, - 0, - 0, - 29000, - 0, - 29001, - 0, - 0, - 0, - 29009, - 0, - 0, - 29011, - 0, - 0, - 29021, - 0, - 0, - 0, - 0, - 29024, - 0, - 29025, - 0, - 0, - 0, - 0, - 0, - 29026, - 0, - 0, - 0, - 29036, - 0, - 0, - 0, - 29037, - 0, - 0, - 0, - 0, - 29038, - 0, - 29045, - 0, - 29047, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29051, - 0, - 0, - 0, - 29054, - 29056, - 29062, - 0, - 29070, - 29082, - 0, - 0, - 0, - 29083, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29084, - 0, - 0, - 0, - 0, - 29085, - 29088, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29090, - 29097, - 0, - 0, - 0, - 29103, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29105, - 0, - 0, - 0, - 0, - 0, - 29107, - 0, - 29109, - 0, - 0, - 0, - 29115, - 0, - 0, - 29120, - 0, - 0, - 29138, - 29140, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29152, - 0, - 29160, - 29174, - 0, - 29176, - 0, - 0, - 29180, - 0, - 29181, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29228, - 0, - 0, - 29229, - 0, - 0, - 29230, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29234, - 0, - 0, - 0, - 29241, - 0, - 29245, - 0, - 29248, - 0, - 29250, - 29256, - 29280, - 0, - 29282, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29285, - 0, - 0, - 29286, - 29291, - 29292, - 0, - 0, - 0, - 0, - 29294, - 0, - 29295, - 0, - 0, - 0, - 0, - 0, - 29296, - 29297, - 29298, - 29300, - 0, - 29302, - 0, - 0, - 29304, - 29307, - 0, - 29312, - 0, - 0, - 0, - 29322, - 0, - 0, - 29323, - 0, - 0, - 29324, - 29326, - 29328, - 0, - 29335, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29338, - 29339, - 0, - 0, - 0, - 0, - 0, - 29341, - 29343, - 0, - 0, - 0, - 0, - 29344, - 0, - 0, - 0, - 0, - 0, - 29345, - 0, - 0, - 0, - 0, - 29346, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29347, - 29348, - 29349, - 0, - 0, - 29354, - 0, - 0, - 29355, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29357, - 0, - 0, - 0, - 0, - 29364, - 0, - 29365, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29366, - 0, - 0, - 29368, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29378, - 0, - 29381, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29386, - 0, - 0, - 0, - 0, - 0, - 0, - 29389, - 0, - 0, - 0, - 29390, - 0, - 0, - 29391, - 29397, - 0, - 29398, - 29412, - 29414, - 29418, - 29419, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29420, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29423, - 0, - 0, - 0, - 29435, - 0, - 0, - 0, - 29437, - 0, - 0, - 29439, - 0, - 29441, - 0, - 0, - 0, - 0, - 29443, - 0, - 29446, - 29450, - 29452, - 0, - 0, - 0, - 0, - 0, - 29456, - 0, - 0, - 0, - 0, - 0, - 29461, - 0, - 0, - 0, - 29464, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29468, - 0, - 29473, - 0, - 0, - 0, - 29486, - 0, - 0, - 0, - 29490, - 0, - 0, - 0, - 29491, - 29492, - 0, - 0, - 29497, - 0, - 0, - 0, - 29498, - 0, - 29499, - 0, - 29502, - 29505, - 0, - 29509, - 0, - 0, - 0, - 29510, - 0, - 0, - 0, - 29512, - 0, - 0, - 0, - 29516, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29518, - 0, - 29519, - 0, - 0, - 0, - 0, - 0, - 29520, - 29521, - 29529, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29530, - 0, - 0, - 29531, - 29538, - 0, - 29540, - 0, - 0, - 0, - 29542, - 0, - 29543, - 29544, - 29547, - 0, - 0, - 29548, - 0, - 0, - 0, - 29549, - 0, - 0, - 0, - 29550, - 0, - 0, - 29552, - 0, - 0, - 0, - 0, - 29558, - 29561, - 0, - 29562, - 29564, - 0, - 0, - 29565, - 0, - 0, - 29566, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29578, - 29584, - 29586, - 29591, - 0, - 0, - 0, - 0, - 29593, - 29594, - 0, - 0, - 29597, - 0, - 0, - 29613, - 0, - 29614, - 0, - 29615, - 0, - 0, - 0, - 0, - 29616, - 29617, - 0, - 0, - 29625, - 0, - 0, - 0, - 29632, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29633, - 0, - 0, - 0, - 0, - 0, - 29634, - 29635, - 29637, - 0, - 29638, - 0, - 29641, - 29643, - 0, - 0, - 0, - 0, - 0, - 0, - 29644, - 0, - 29645, - 0, - 29649, - 0, - 0, - 0, - 29650, - 0, - 29653, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29656, - 29659, - 0, - 0, - 29660, - 0, - 0, - 0, - 29661, - 0, - 0, - 0, - 0, - 0, - 29664, - 0, - 0, - 0, - 29671, - 29673, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29675, - 0, - 29677, - 29679, - 0, - 0, - 29684, - 0, - 0, - 0, - 0, - 0, - 29685, - 0, - 0, - 0, - 29687, - 0, - 0, - 0, - 29688, - 0, - 29689, - 29690, - 29700, - 0, - 29701, - 0, - 0, - 0, - 29702, - 0, - 29706, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29720, - 0, - 29721, - 0, - 29727, - 0, - 29733, - 29734, - 0, - 29750, - 29761, - 0, - 29763, - 0, - 0, - 0, - 0, - 0, - 29764, - 0, - 0, - 29765, - 0, - 0, - 0, - 29771, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29772, - 0, - 0, - 0, - 29773, - 29774, - 29775, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29822, - 0, - 0, - 0, - 29824, - 0, - 29825, - 0, - 0, - 0, - 0, - 0, - 29827, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29829, - 0, - 29832, - 29834, - 0, - 0, - 29835, - 0, - 0, - 29837, - 29838, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29843, - 0, - 0, - 0, - 0, - 29844, - 29845, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29849, - 0, - 0, - 29869, - 29872, - 29890, - 29905, - 0, - 0, - 0, - 0, - 0, - 29907, - 29921, - 0, - 29922, - 0, - 0, - 29923, - 29926, - 29944, - 29946, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 29947, - 29948, - 0, - 0, - 0, - 29951, - 0, - 0, - 0, - 0, - 0, - 29953, - 0, - 0, - 29956, - 0, - 29957, - 0, - 0, - 29962, - 0, - 0, - 0, - 0, - 29971, - 0, - 0, - 0, - 29972, - 0, - 0, - 0, - 0, - 0, - 29978, - 0, - 29979, - 29992, - 30007, - 30008, - 30010, - 0, - 0, - 0, - 30013, - 0, - 0, - 0, - 0, - 30014, - 30016, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30017, - 0, - 0, - 0, - 0, - 0, - 30023, - 30031, - 0, - 0, - 30033, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30034, - 0, - 30038, - 0, - 30039, - 0, - 30040, - 0, - 0, - 0, - 0, - 0, - 0, - 30067, - 30068, - 0, - 0, - 0, - 30069, - 0, - 30072, - 0, - 0, - 0, - 30073, - 0, - 0, - 0, - 0, - 30075, - 0, - 0, - 0, - 0, - 0, - 0, - 30079, - 0, - 0, - 30080, - 0, - 0, - 0, - 0, - 0, - 30082, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30084, - 30090, - 0, - 0, - 30091, - 0, - 0, - 0, - 0, - 30098, - 30118, - 0, - 30119, - 0, - 30121, - 30130, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30131, - 30132, - 30133, - 0, - 0, - 0, - 0, - 0, - 0, - 30135, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30136, - 0, - 0, - 30137, - 30138, - 0, - 0, - 0, - 30139, - 30146, - 0, - 0, - 0, - 0, - 0, - 30147, - 0, - 0, - 30148, - 30151, - 0, - 0, - 0, - 30168, - 0, - 30172, - 30173, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30180, - 30181, - 0, - 30192, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30194, - 30196, - 0, - 0, - 30199, - 0, - 0, - 30202, - 0, - 0, - 0, - 0, - 30203, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30213, - 0, - 0, - 0, - 30216, - 0, - 0, - 30217, - 0, - 0, - 0, - 30218, - 0, - 0, - 0, - 0, - 30219, - 0, - 30220, - 0, - 30222, - 30227, - 0, - 0, - 0, - 0, - 0, - 30231, - 0, - 0, - 30233, - 30235, - 0, - 0, - 0, - 0, - 30238, - 0, - 30240, - 30243, - 30245, - 0, - 30250, - 30252, - 0, - 0, - 0, - 30269, - 0, - 0, - 30271, - 30272, - 0, - 0, - 0, - 30278, - 30280, - 0, - 0, - 30282, - 0, - 30284, - 0, - 30294, - 0, - 0, - 0, - 0, - 30295, - 30296, - 0, - 0, - 0, - 0, - 0, - 30298, - 30299, - 30302, - 30304, - 30306, - 0, - 0, - 0, - 0, - 0, - 0, - 30316, - 30317, - 0, - 0, - 0, - 30318, - 0, - 0, - 0, - 30319, - 0, - 30320, - 30322, - 30326, - 0, - 0, - 0, - 0, - 0, - 30327, - 0, - 30332, - 30348, - 30349, - 0, - 0, - 30356, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30357, - 0, - 30358, - 0, - 30359, - 30360, - 0, - 0, - 30365, - 30366, - 30378, - 0, - 0, - 0, - 0, - 30379, - 0, - 0, - 30381, - 0, - 30385, - 0, - 30388, - 30397, - 0, - 0, - 0, - 30401, - 0, - 0, - 0, - 0, - 30403, - 0, - 0, - 0, - 0, - 0, - 30404, - 0, - 0, - 30405, - 0, - 30406, - 30408, - 0, - 30409, - 0, - 30410, - 0, - 0, - 0, - 30417, - 0, - 0, - 30418, - 30419, - 0, - 30420, - 0, - 30424, - 0, - 0, - 0, - 30427, - 30430, - 30432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30436, - 0, - 30437, - 30438, - 0, - 30441, - 30442, - 0, - 0, - 0, - 30445, - 0, - 0, - 0, - 0, - 30452, - 30456, - 30457, - 0, - 0, - 0, - 30458, - 0, - 30464, - 0, - 0, - 0, - 0, - 0, - 0, - 30467, - 0, - 30469, - 0, - 0, - 0, - 0, - 0, - 30477, - 0, - 0, - 30484, - 0, - 0, - 0, - 0, - 0, - 30485, - 0, - 0, - 0, - 0, - 0, - 30486, - 30487, - 30497, - 30498, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30505, - 0, - 30508, - 0, - 0, - 0, - 30509, - 30510, - 0, - 30514, - 30516, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30523, - 0, - 30524, - 0, - 30525, - 0, - 0, - 0, - 0, - 30537, - 0, - 0, - 30538, - 0, - 0, - 0, - 0, - 0, - 30553, - 0, - 0, - 30555, - 30556, - 30558, - 30559, - 30560, - 0, - 0, - 30561, - 0, - 30562, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30563, - 30570, - 30571, - 0, - 30586, - 30587, - 0, - 0, - 30590, - 0, - 0, - 30594, - 0, - 0, - 0, - 0, - 30611, - 30612, - 30623, - 30634, - 0, - 0, - 30636, - 30640, - 30655, - 30656, - 0, - 30657, - 0, - 0, - 30658, - 30669, - 0, - 30670, - 0, - 30676, - 30678, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30679, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30695, - 0, - 0, - 30698, - 0, - 0, - 0, - 0, - 30700, - 0, - 0, - 0, - 0, - 30701, - 0, - 30702, - 30703, - 0, - 0, - 0, - 0, - 30707, - 0, - 0, - 0, - 30709, - 0, - 0, - 30710, - 30719, - 30729, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30731, - 0, - 0, - 30733, - 0, - 0, - 0, - 30734, - 0, - 0, - 0, - 0, - 0, - 30736, - 30737, - 0, - 0, - 0, - 30740, - 0, - 0, - 0, - 30743, - 0, - 30746, - 0, - 30747, - 30748, - 0, - 0, - 30751, - 30752, - 30753, - 0, - 0, - 0, - 30754, - 0, - 0, - 30760, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30763, - 0, - 30764, - 0, - 0, - 30766, - 0, - 30769, - 30770, - 30771, - 30774, - 30777, - 0, - 0, - 30779, - 30780, - 30781, - 0, - 0, - 0, - 0, - 30790, - 0, - 0, - 0, - 30792, - 0, - 0, - 0, - 0, - 30810, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30812, - 30819, - 0, - 0, - 30823, - 30824, - 0, - 30825, - 0, - 30827, - 0, - 0, - 0, - 0, - 0, - 0, - 30828, - 0, - 0, - 30830, - 0, - 0, - 0, - 30834, - 0, - 30835, - 0, - 30837, - 30838, - 0, - 30845, - 0, - 0, - 0, - 0, - 0, - 30846, - 30847, - 0, - 0, - 30849, - 0, - 30851, - 0, - 0, - 0, - 0, - 0, - 30852, - 30858, - 0, - 0, - 30859, - 0, - 30865, - 0, - 0, - 30866, - 0, - 0, - 30868, - 0, - 0, - 30869, - 0, - 0, - 0, - 30881, - 30883, - 0, - 0, - 0, - 0, - 0, - 30889, - 0, - 30891, - 0, - 0, - 0, - 0, - 30894, - 0, - 30895, - 0, - 30897, - 0, - 30898, - 0, - 0, - 0, - 30904, - 30906, - 0, - 30909, - 0, - 0, - 0, - 0, - 0, - 0, - 30910, - 0, - 0, - 0, - 30915, - 30933, - 30942, - 0, - 0, - 0, - 0, - 30943, - 0, - 0, - 30945, - 0, - 0, - 0, - 0, - 0, - 0, - 30946, - 0, - 0, - 30947, - 0, - 0, - 30955, - 30956, - 0, - 0, - 30960, - 0, - 0, - 30961, - 30962, - 30966, - 0, - 0, - 30969, - 30974, - 0, - 0, - 0, - 30976, - 0, - 0, - 30977, - 0, - 30978, - 30982, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 30994, - 30995, - 30998, - 0, - 31000, - 0, - 0, - 31001, - 0, - 0, - 31003, - 31005, - 0, - 0, - 31006, - 31011, - 0, - 0, - 31014, - 0, - 31016, - 0, - 0, - 0, - 0, - 31018, - 0, - 0, - 31020, - 31023, - 31024, - 31025, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31027, - 31028, - 31029, - 0, - 0, - 0, - 0, - 0, - 0, - 31032, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31036, - 31037, - 31038, - 0, - 0, - 0, - 31041, - 31043, - 31045, - 0, - 31047, - 0, - 0, - 0, - 31048, - 0, - 31049, - 0, - 0, - 0, - 31053, - 31054, - 31055, - 0, - 0, - 31063, - 0, - 0, - 0, - 0, - 0, - 31066, - 0, - 31068, - 31071, - 0, - 0, - 0, - 31072, - 31073, - 0, - 0, - 0, - 0, - 31075, - 0, - 0, - 31076, - 0, - 0, - 0, - 31077, - 31079, - 0, - 31080, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31087, - 0, - 31142, - 0, - 31144, - 0, - 0, - 31145, - 31146, - 31147, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31149, - 0, - 31151, - 31152, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31162, - 31171, - 31174, - 31175, - 0, - 0, - 0, - 31176, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31179, - 0, - 0, - 0, - 31186, - 0, - 0, - 0, - 31192, - 31195, - 0, - 0, - 31196, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31198, - 0, - 0, - 0, - 0, - 0, - 31199, - 0, - 0, - 0, - 31205, - 0, - 0, - 0, - 0, - 31211, - 31215, - 0, - 0, - 0, - 0, - 31231, - 0, - 31232, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31233, - 31236, - 31253, - 0, - 31254, - 0, - 0, - 0, - 0, - 0, - 0, - 31255, - 0, - 0, - 31257, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31258, - 31259, - 0, - 0, - 31260, - 0, - 31261, - 0, - 0, - 0, - 0, - 0, - 31262, - 31263, - 0, - 0, - 31264, - 0, - 31266, - 0, - 31267, - 0, - 0, - 0, - 0, - 0, - 31281, - 0, - 31282, - 0, - 31284, - 0, - 0, - 31285, - 31287, - 31288, - 0, - 0, - 31290, - 0, - 0, - 0, - 31292, - 31295, - 0, - 31299, - 0, - 31300, - 0, - 0, - 0, - 0, - 0, - 31302, - 0, - 0, - 0, - 0, - 31303, - 0, - 0, - 0, - 0, - 0, - 0, - 31304, - 0, - 0, - 0, - 0, - 0, - 31305, - 31308, - 31309, - 31315, - 0, - 31317, - 0, - 0, - 0, - 0, - 0, - 31323, - 0, - 31324, - 0, - 0, - 0, - 0, - 0, - 31325, - 31327, - 0, - 0, - 31331, - 0, - 0, - 0, - 0, - 0, - 31333, - 0, - 0, - 0, - 0, - 0, - 31336, - 0, - 0, - 31337, - 0, - 0, - 0, - 0, - 0, - 0, - 31338, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31339, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31342, - 0, - 0, - 0, - 0, - 31345, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31347, - 0, - 0, - 0, - 0, - 0, - 0, - 31348, - 0, - 0, - 31350, - 31351, - 0, - 31352, - 0, - 0, - 31354, - 0, - 0, - 0, - 0, - 31355, - 0, - 0, - 31356, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31363, - 0, - 31372, - 0, - 0, - 31373, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31376, - 0, - 31388, - 0, - 31389, - 0, - 31392, - 0, - 31401, - 0, - 31405, - 31407, - 31408, - 0, - 31409, - 0, - 0, - 0, - 0, - 0, - 0, - 31413, - 31415, - 0, - 0, - 0, - 31416, - 31418, - 0, - 0, - 0, - 0, - 0, - 0, - 31422, - 31423, - 0, - 0, - 31424, - 0, - 31425, - 31432, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31433, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31434, - 0, - 0, - 0, - 0, - 0, - 0, - 31435, - 0, - 0, - 0, - 0, - 31438, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31442, - 0, - 31444, - 0, - 31448, - 0, - 0, - 31451, - 0, - 0, - 0, - 0, - 31452, - 0, - 31461, - 31465, - 0, - 0, - 31466, - 0, - 0, - 31467, - 0, - 0, - 31468, - 0, - 0, - 0, - 31469, - 31473, - 0, - 31476, - 0, - 0, - 0, - 0, - 31489, - 31490, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31492, - 31493, - 31494, - 0, - 0, - 0, - 0, - 31501, - 31504, - 31505, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31509, - 0, - 0, - 0, - 0, - 31510, - 0, - 0, - 31511, - 0, - 0, - 31513, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31514, - 0, - 31522, - 31536, - 31539, - 31540, - 0, - 31541, - 0, - 0, - 0, - 0, - 0, - 0, - 31546, - 31553, - 31559, - 0, - 0, - 0, - 31560, - 31561, - 31562, - 0, - 0, - 31564, - 31567, - 0, - 31569, - 0, - 0, - 0, - 31570, - 0, - 0, - 0, - 0, - 31571, - 0, - 0, - 0, - 0, - 0, - 0, - 31572, - 31574, - 31580, - 31581, - 0, - 0, - 31582, - 31584, - 31585, - 31586, - 31595, - 0, - 31596, - 0, - 0, - 0, - 0, - 31597, - 0, - 31599, - 0, - 31600, - 31601, - 0, - 0, - 31603, - 31604, - 0, - 0, - 31608, - 31610, - 0, - 0, - 0, - 31611, - 0, - 31615, - 0, - 0, - 0, - 0, - 31616, - 0, - 0, - 0, - 0, - 0, - 0, - 31617, - 0, - 0, - 0, - 0, - 0, - 31618, - 0, - 0, - 0, - 0, - 0, - 0, - 31621, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31622, - 31625, - 0, - 0, - 0, - 0, - 31627, - 0, - 31641, - 0, - 0, - 31642, - 0, - 0, - 31643, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31644, - 0, - 31646, - 0, - 0, - 0, - 0, - 31648, - 0, - 0, - 0, - 31652, - 0, - 0, - 0, - 31657, - 0, - 0, - 31676, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 31689, - 31691, - 31692, - 0, - 31694, - 0, - 0, - 0, - 31696, - 0, - 31702, - 0, - 31703, - 0, -} - -var kStaticDictionaryWords = [31705]dictWord{ - dictWord{0, 0, 0}, - dictWord{8, 0, 1002}, - dictWord{136, 0, 1015}, - dictWord{4, 0, 683}, - dictWord{4, 10, 325}, - dictWord{138, 10, 125}, - dictWord{7, 11, 572}, - dictWord{ - 9, - 11, - 592, - }, - dictWord{11, 11, 680}, - dictWord{11, 11, 842}, - dictWord{11, 11, 924}, - dictWord{12, 11, 356}, - dictWord{12, 11, 550}, - dictWord{13, 11, 317}, - dictWord{13, 11, 370}, - dictWord{13, 11, 469}, - dictWord{13, 11, 471}, - dictWord{14, 11, 397}, - dictWord{18, 11, 69}, - dictWord{146, 11, 145}, - dictWord{ - 134, - 0, - 1265, - }, - dictWord{136, 11, 534}, - dictWord{134, 0, 1431}, - dictWord{11, 0, 138}, - dictWord{140, 0, 40}, - dictWord{4, 0, 155}, - dictWord{7, 0, 1689}, - dictWord{ - 4, - 10, - 718, - }, - dictWord{135, 10, 1216}, - dictWord{4, 0, 245}, - dictWord{5, 0, 151}, - dictWord{5, 0, 741}, - dictWord{6, 0, 1147}, - dictWord{7, 0, 498}, - dictWord{7, 0, 870}, - dictWord{7, 0, 1542}, - dictWord{12, 0, 213}, - dictWord{14, 0, 36}, - dictWord{14, 0, 391}, - dictWord{17, 0, 111}, - dictWord{18, 0, 6}, - dictWord{18, 0, 46}, - dictWord{ - 18, - 0, - 151, - }, - dictWord{19, 0, 36}, - dictWord{20, 0, 32}, - dictWord{20, 0, 56}, - dictWord{20, 0, 69}, - dictWord{20, 0, 102}, - dictWord{21, 0, 4}, - dictWord{22, 0, 8}, - dictWord{ - 22, - 0, - 10, - }, - dictWord{22, 0, 14}, - dictWord{150, 0, 31}, - dictWord{4, 0, 624}, - dictWord{135, 0, 1752}, - dictWord{5, 10, 124}, - dictWord{5, 10, 144}, - dictWord{6, 10, 548}, - dictWord{7, 10, 15}, - dictWord{7, 10, 153}, - dictWord{137, 10, 629}, - dictWord{6, 0, 503}, - dictWord{9, 0, 586}, - dictWord{13, 0, 468}, - dictWord{14, 0, 66}, - dictWord{ - 16, - 0, - 58, - }, - dictWord{7, 10, 1531}, - dictWord{8, 10, 416}, - dictWord{9, 10, 275}, - dictWord{10, 10, 100}, - dictWord{11, 10, 658}, - dictWord{11, 10, 979}, - dictWord{ - 12, - 10, - 86, - }, - dictWord{14, 10, 207}, - dictWord{15, 10, 20}, - dictWord{143, 10, 25}, - dictWord{5, 0, 603}, - dictWord{7, 0, 1212}, - dictWord{9, 0, 565}, - dictWord{ - 14, - 0, - 301, - }, - dictWord{5, 10, 915}, - dictWord{6, 10, 1783}, - dictWord{7, 10, 211}, - dictWord{7, 10, 1353}, - dictWord{9, 10, 83}, - dictWord{10, 10, 376}, - dictWord{ - 10, - 10, - 431, - }, - dictWord{11, 10, 543}, - dictWord{12, 10, 664}, - dictWord{13, 10, 280}, - dictWord{13, 10, 428}, - dictWord{14, 10, 128}, - dictWord{17, 10, 52}, - dictWord{ - 145, - 10, - 81, - }, - dictWord{4, 0, 492}, - dictWord{133, 0, 451}, - dictWord{135, 0, 835}, - dictWord{141, 0, 70}, - dictWord{132, 0, 539}, - dictWord{7, 11, 748}, - dictWord{ - 139, - 11, - 700, - }, - dictWord{7, 11, 1517}, - dictWord{11, 11, 597}, - dictWord{14, 11, 76}, - dictWord{14, 11, 335}, - dictWord{148, 11, 33}, - dictWord{6, 0, 113}, - dictWord{135, 0, 436}, - dictWord{4, 10, 338}, - dictWord{133, 10, 400}, - dictWord{136, 0, 718}, - dictWord{133, 11, 127}, - dictWord{133, 11, 418}, - dictWord{ - 6, - 0, - 1505, - }, - dictWord{7, 0, 520}, - dictWord{6, 11, 198}, - dictWord{11, 10, 892}, - dictWord{140, 11, 83}, - dictWord{4, 10, 221}, - dictWord{5, 10, 659}, - dictWord{ - 5, - 10, - 989, - }, - dictWord{7, 10, 697}, - dictWord{7, 10, 1211}, - dictWord{138, 10, 284}, - dictWord{135, 0, 1070}, - dictWord{5, 11, 276}, - dictWord{6, 11, 55}, - dictWord{ - 135, - 11, - 1369, - }, - dictWord{134, 0, 1515}, - dictWord{6, 11, 1752}, - dictWord{136, 11, 726}, - dictWord{138, 10, 507}, - dictWord{15, 0, 78}, - dictWord{4, 10, 188}, - dictWord{135, 10, 805}, - dictWord{5, 10, 884}, - dictWord{139, 10, 991}, - dictWord{133, 11, 764}, - dictWord{134, 10, 1653}, - dictWord{6, 11, 309}, - dictWord{ - 7, - 11, - 331, - }, - dictWord{138, 11, 550}, - dictWord{135, 11, 1861}, - dictWord{132, 11, 348}, - dictWord{135, 11, 986}, - dictWord{135, 11, 1573}, - dictWord{ - 12, - 0, - 610, - }, - dictWord{13, 0, 431}, - dictWord{144, 0, 59}, - dictWord{9, 11, 799}, - dictWord{140, 10, 166}, - dictWord{134, 0, 1530}, - dictWord{132, 0, 750}, - dictWord{132, 0, 307}, - dictWord{133, 0, 964}, - dictWord{6, 11, 194}, - dictWord{7, 11, 133}, - dictWord{10, 11, 493}, - dictWord{10, 11, 570}, - dictWord{139, 11, 664}, - dictWord{5, 11, 24}, - dictWord{5, 11, 569}, - dictWord{6, 11, 3}, - dictWord{6, 11, 119}, - dictWord{6, 11, 143}, - dictWord{6, 11, 440}, - dictWord{7, 11, 295}, - dictWord{ - 7, - 11, - 599, - }, - dictWord{7, 11, 1686}, - dictWord{7, 11, 1854}, - dictWord{8, 11, 424}, - dictWord{9, 11, 43}, - dictWord{9, 11, 584}, - dictWord{9, 11, 760}, - dictWord{ - 10, - 11, - 148, - }, - dictWord{10, 11, 328}, - dictWord{11, 11, 159}, - dictWord{11, 11, 253}, - dictWord{11, 11, 506}, - dictWord{12, 11, 487}, - dictWord{12, 11, 531}, - dictWord{144, 11, 33}, - dictWord{136, 10, 760}, - dictWord{5, 11, 14}, - dictWord{5, 11, 892}, - dictWord{6, 11, 283}, - dictWord{7, 11, 234}, - dictWord{136, 11, 537}, - dictWord{135, 11, 1251}, - dictWord{4, 11, 126}, - dictWord{8, 11, 635}, - dictWord{147, 11, 34}, - dictWord{4, 11, 316}, - dictWord{135, 11, 1561}, - dictWord{ - 6, - 0, - 999, - }, - dictWord{6, 0, 1310}, - dictWord{137, 11, 861}, - dictWord{4, 11, 64}, - dictWord{5, 11, 352}, - dictWord{5, 11, 720}, - dictWord{6, 11, 368}, - dictWord{ - 139, - 11, - 359, - }, - dictWord{4, 0, 75}, - dictWord{5, 0, 180}, - dictWord{6, 0, 500}, - dictWord{7, 0, 58}, - dictWord{7, 0, 710}, - dictWord{10, 0, 645}, - dictWord{136, 10, 770}, - dictWord{133, 0, 649}, - dictWord{6, 0, 276}, - dictWord{7, 0, 282}, - dictWord{7, 0, 879}, - dictWord{7, 0, 924}, - dictWord{8, 0, 459}, - dictWord{9, 0, 599}, - dictWord{9, 0, 754}, - dictWord{11, 0, 574}, - dictWord{12, 0, 128}, - dictWord{12, 0, 494}, - dictWord{13, 0, 52}, - dictWord{13, 0, 301}, - dictWord{15, 0, 30}, - dictWord{143, 0, 132}, - dictWord{132, 0, 200}, - dictWord{4, 10, 89}, - dictWord{5, 10, 489}, - dictWord{6, 10, 315}, - dictWord{7, 10, 553}, - dictWord{7, 10, 1745}, - dictWord{138, 10, 243}, - dictWord{135, 11, 1050}, - dictWord{7, 0, 1621}, - dictWord{6, 10, 1658}, - dictWord{9, 10, 3}, - dictWord{10, 10, 154}, - dictWord{11, 10, 641}, - dictWord{13, 10, 85}, - dictWord{13, 10, 201}, - dictWord{141, 10, 346}, - dictWord{6, 11, 175}, - dictWord{137, 11, 289}, - dictWord{5, 11, 432}, - dictWord{133, 11, 913}, - dictWord{ - 6, - 0, - 225, - }, - dictWord{137, 0, 211}, - dictWord{7, 0, 718}, - dictWord{8, 0, 687}, - dictWord{139, 0, 374}, - dictWord{4, 10, 166}, - dictWord{133, 10, 505}, - dictWord{ - 9, - 0, - 110, - }, - dictWord{134, 10, 1670}, - dictWord{8, 0, 58}, - dictWord{9, 0, 724}, - dictWord{11, 0, 809}, - dictWord{13, 0, 113}, - dictWord{145, 0, 72}, - dictWord{6, 0, 345}, - dictWord{7, 0, 1247}, - dictWord{144, 11, 82}, - dictWord{5, 11, 931}, - dictWord{134, 11, 1698}, - dictWord{8, 0, 767}, - dictWord{8, 0, 803}, - dictWord{9, 0, 301}, - dictWord{137, 0, 903}, - dictWord{139, 0, 203}, - dictWord{134, 0, 1154}, - dictWord{7, 0, 1949}, - dictWord{136, 0, 674}, - dictWord{134, 0, 259}, - dictWord{ - 135, - 0, - 1275, - }, - dictWord{5, 11, 774}, - dictWord{6, 11, 1637}, - dictWord{6, 11, 1686}, - dictWord{134, 11, 1751}, - dictWord{134, 0, 1231}, - dictWord{7, 10, 445}, - dictWord{8, 10, 307}, - dictWord{8, 10, 704}, - dictWord{10, 10, 41}, - dictWord{10, 10, 439}, - dictWord{11, 10, 237}, - dictWord{11, 10, 622}, - dictWord{140, 10, 201}, - dictWord{136, 0, 254}, - dictWord{6, 11, 260}, - dictWord{135, 11, 1484}, - dictWord{139, 0, 277}, - dictWord{135, 10, 1977}, - dictWord{4, 10, 189}, - dictWord{ - 5, - 10, - 713, - }, - dictWord{6, 11, 573}, - dictWord{136, 10, 57}, - dictWord{138, 10, 371}, - dictWord{132, 10, 552}, - dictWord{134, 11, 344}, - dictWord{133, 0, 248}, - dictWord{9, 0, 800}, - dictWord{10, 0, 693}, - dictWord{11, 0, 482}, - dictWord{11, 0, 734}, - dictWord{11, 0, 789}, - dictWord{134, 11, 240}, - dictWord{4, 0, 116}, - dictWord{ - 5, - 0, - 95, - }, - dictWord{5, 0, 445}, - dictWord{7, 0, 1688}, - dictWord{8, 0, 29}, - dictWord{9, 0, 272}, - dictWord{11, 0, 509}, - dictWord{11, 0, 915}, - dictWord{4, 11, 292}, - dictWord{4, 11, 736}, - dictWord{5, 11, 871}, - dictWord{6, 11, 171}, - dictWord{6, 11, 1689}, - dictWord{7, 11, 1324}, - dictWord{7, 11, 1944}, - dictWord{9, 11, 415}, - dictWord{9, 11, 580}, - dictWord{14, 11, 230}, - dictWord{146, 11, 68}, - dictWord{7, 0, 490}, - dictWord{13, 0, 100}, - dictWord{143, 0, 75}, - dictWord{135, 0, 1641}, - dictWord{133, 0, 543}, - dictWord{7, 11, 209}, - dictWord{8, 11, 661}, - dictWord{10, 11, 42}, - dictWord{11, 11, 58}, - dictWord{12, 11, 58}, - dictWord{12, 11, 118}, - dictWord{141, 11, 32}, - dictWord{5, 0, 181}, - dictWord{8, 0, 41}, - dictWord{6, 11, 63}, - dictWord{135, 11, 920}, - dictWord{133, 0, 657}, - dictWord{133, 11, 793}, - dictWord{138, 0, 709}, - dictWord{7, 0, 25}, - dictWord{8, 0, 202}, - dictWord{138, 0, 536}, - dictWord{5, 11, 665}, - dictWord{135, 10, 1788}, - dictWord{145, 10, 49}, - dictWord{9, 0, 423}, - dictWord{140, 0, 89}, - dictWord{5, 11, 67}, - dictWord{6, 11, 62}, - dictWord{6, 11, 374}, - dictWord{135, 11, 1391}, - dictWord{8, 0, 113}, - dictWord{ - 9, - 0, - 877, - }, - dictWord{10, 0, 554}, - dictWord{11, 0, 83}, - dictWord{12, 0, 136}, - dictWord{19, 0, 109}, - dictWord{9, 11, 790}, - dictWord{140, 11, 47}, - dictWord{ - 138, - 10, - 661, - }, - dictWord{4, 0, 963}, - dictWord{10, 0, 927}, - dictWord{14, 0, 442}, - dictWord{135, 10, 1945}, - dictWord{133, 0, 976}, - dictWord{132, 0, 206}, - dictWord{ - 4, - 11, - 391, - }, - dictWord{135, 11, 1169}, - dictWord{134, 0, 2002}, - dictWord{6, 0, 696}, - dictWord{134, 0, 1008}, - dictWord{134, 0, 1170}, - dictWord{132, 11, 271}, - dictWord{7, 0, 13}, - dictWord{8, 0, 226}, - dictWord{10, 0, 537}, - dictWord{11, 0, 570}, - dictWord{11, 0, 605}, - dictWord{11, 0, 799}, - dictWord{11, 0, 804}, - dictWord{ - 12, - 0, - 85, - }, - dictWord{12, 0, 516}, - dictWord{12, 0, 623}, - dictWord{13, 0, 112}, - dictWord{13, 0, 361}, - dictWord{14, 0, 77}, - dictWord{14, 0, 78}, - dictWord{17, 0, 28}, - dictWord{19, 0, 110}, - dictWord{140, 11, 314}, - dictWord{132, 0, 769}, - dictWord{134, 0, 1544}, - dictWord{4, 0, 551}, - dictWord{137, 0, 678}, - dictWord{5, 10, 84}, - dictWord{134, 10, 163}, - dictWord{9, 0, 57}, - dictWord{9, 0, 459}, - dictWord{10, 0, 425}, - dictWord{11, 0, 119}, - dictWord{12, 0, 184}, - dictWord{12, 0, 371}, - dictWord{ - 13, - 0, - 358, - }, - dictWord{145, 0, 51}, - dictWord{5, 0, 188}, - dictWord{5, 0, 814}, - dictWord{8, 0, 10}, - dictWord{9, 0, 421}, - dictWord{9, 0, 729}, - dictWord{10, 0, 609}, - dictWord{11, 0, 689}, - dictWord{4, 11, 253}, - dictWord{5, 10, 410}, - dictWord{5, 11, 544}, - dictWord{7, 11, 300}, - dictWord{137, 11, 340}, - dictWord{134, 0, 624}, - dictWord{138, 11, 321}, - dictWord{135, 0, 1941}, - dictWord{18, 0, 130}, - dictWord{5, 10, 322}, - dictWord{8, 10, 186}, - dictWord{9, 10, 262}, - dictWord{10, 10, 187}, - dictWord{142, 10, 208}, - dictWord{5, 11, 53}, - dictWord{5, 11, 541}, - dictWord{6, 11, 94}, - dictWord{6, 11, 499}, - dictWord{7, 11, 230}, - dictWord{139, 11, 321}, - dictWord{133, 10, 227}, - dictWord{4, 0, 378}, - dictWord{4, 11, 920}, - dictWord{5, 11, 25}, - dictWord{5, 11, 790}, - dictWord{6, 11, 457}, - dictWord{135, 11, 853}, - dictWord{137, 0, 269}, - dictWord{132, 0, 528}, - dictWord{134, 0, 1146}, - dictWord{7, 10, 1395}, - dictWord{8, 10, 486}, - dictWord{9, 10, 236}, - dictWord{9, 10, 878}, - dictWord{10, 10, 218}, - dictWord{11, 10, 95}, - dictWord{19, 10, 17}, - dictWord{147, 10, 31}, - dictWord{7, 10, 2043}, - dictWord{8, 10, 672}, - dictWord{ - 141, - 10, - 448, - }, - dictWord{134, 0, 1105}, - dictWord{134, 0, 1616}, - dictWord{134, 11, 1765}, - dictWord{140, 11, 163}, - dictWord{5, 10, 412}, - dictWord{133, 11, 822}, - dictWord{132, 11, 634}, - dictWord{6, 0, 656}, - dictWord{134, 11, 1730}, - dictWord{134, 0, 1940}, - dictWord{5, 0, 104}, - dictWord{6, 0, 173}, - dictWord{ - 135, - 0, - 1631, - }, - dictWord{136, 10, 562}, - dictWord{6, 11, 36}, - dictWord{7, 11, 658}, - dictWord{8, 11, 454}, - dictWord{147, 11, 86}, - dictWord{5, 0, 457}, - dictWord{ - 134, - 10, - 1771, - }, - dictWord{7, 0, 810}, - dictWord{8, 0, 138}, - dictWord{8, 0, 342}, - dictWord{9, 0, 84}, - dictWord{10, 0, 193}, - dictWord{11, 0, 883}, - dictWord{140, 0, 359}, - dictWord{9, 0, 620}, - dictWord{135, 10, 1190}, - dictWord{137, 10, 132}, - dictWord{7, 11, 975}, - dictWord{137, 11, 789}, - dictWord{6, 0, 95}, - dictWord{6, 0, 1934}, - dictWord{136, 0, 967}, - dictWord{141, 11, 335}, - dictWord{6, 0, 406}, - dictWord{10, 0, 409}, - dictWord{10, 0, 447}, - dictWord{11, 0, 44}, - dictWord{140, 0, 100}, - dictWord{4, 10, 317}, - dictWord{135, 10, 1279}, - dictWord{132, 0, 477}, - dictWord{134, 0, 1268}, - dictWord{6, 0, 1941}, - dictWord{8, 0, 944}, - dictWord{5, 10, 63}, - dictWord{133, 10, 509}, - dictWord{132, 0, 629}, - dictWord{132, 11, 104}, - dictWord{4, 0, 246}, - dictWord{133, 0, 375}, - dictWord{6, 0, 1636}, - dictWord{ - 132, - 10, - 288, - }, - dictWord{135, 11, 1614}, - dictWord{9, 0, 49}, - dictWord{10, 0, 774}, - dictWord{8, 10, 89}, - dictWord{8, 10, 620}, - dictWord{11, 10, 628}, - dictWord{ - 12, - 10, - 322, - }, - dictWord{143, 10, 124}, - dictWord{4, 0, 282}, - dictWord{7, 0, 1034}, - dictWord{11, 0, 398}, - dictWord{11, 0, 634}, - dictWord{12, 0, 1}, - dictWord{12, 0, 79}, - dictWord{12, 0, 544}, - dictWord{14, 0, 237}, - dictWord{17, 0, 10}, - dictWord{146, 0, 20}, - dictWord{132, 0, 824}, - dictWord{7, 11, 45}, - dictWord{9, 11, 542}, - dictWord{ - 9, - 11, - 566, - }, - dictWord{138, 11, 728}, - dictWord{5, 0, 118}, - dictWord{5, 0, 499}, - dictWord{6, 0, 476}, - dictWord{6, 0, 665}, - dictWord{6, 0, 1176}, - dictWord{ - 6, - 0, - 1196, - }, - dictWord{7, 0, 600}, - dictWord{7, 0, 888}, - dictWord{135, 0, 1096}, - dictWord{7, 0, 296}, - dictWord{7, 0, 596}, - dictWord{8, 0, 560}, - dictWord{8, 0, 586}, - dictWord{9, 0, 612}, - dictWord{11, 0, 304}, - dictWord{12, 0, 46}, - dictWord{13, 0, 89}, - dictWord{14, 0, 112}, - dictWord{145, 0, 122}, - dictWord{5, 0, 894}, - dictWord{ - 6, - 0, - 1772, - }, - dictWord{9, 0, 1009}, - dictWord{138, 10, 120}, - dictWord{5, 11, 533}, - dictWord{7, 11, 755}, - dictWord{138, 11, 780}, - dictWord{151, 10, 1}, - dictWord{ - 6, - 0, - 1474, - }, - dictWord{7, 11, 87}, - dictWord{142, 11, 288}, - dictWord{139, 0, 366}, - dictWord{137, 10, 461}, - dictWord{7, 11, 988}, - dictWord{7, 11, 1939}, - dictWord{ - 9, - 11, - 64, - }, - dictWord{9, 11, 502}, - dictWord{12, 11, 7}, - dictWord{12, 11, 34}, - dictWord{13, 11, 12}, - dictWord{13, 11, 234}, - dictWord{147, 11, 77}, - dictWord{ - 7, - 0, - 1599, - }, - dictWord{7, 0, 1723}, - dictWord{8, 0, 79}, - dictWord{8, 0, 106}, - dictWord{8, 0, 190}, - dictWord{8, 0, 302}, - dictWord{8, 0, 383}, - dictWord{8, 0, 713}, - dictWord{ - 9, - 0, - 119, - }, - dictWord{9, 0, 233}, - dictWord{9, 0, 419}, - dictWord{9, 0, 471}, - dictWord{10, 0, 181}, - dictWord{10, 0, 406}, - dictWord{11, 0, 57}, - dictWord{11, 0, 85}, - dictWord{11, 0, 120}, - dictWord{11, 0, 177}, - dictWord{11, 0, 296}, - dictWord{11, 0, 382}, - dictWord{11, 0, 454}, - dictWord{11, 0, 758}, - dictWord{11, 0, 999}, - dictWord{ - 12, - 0, - 27, - }, - dictWord{12, 0, 98}, - dictWord{12, 0, 131}, - dictWord{12, 0, 245}, - dictWord{12, 0, 312}, - dictWord{12, 0, 446}, - dictWord{12, 0, 454}, - dictWord{13, 0, 25}, - dictWord{13, 0, 98}, - dictWord{13, 0, 426}, - dictWord{13, 0, 508}, - dictWord{14, 0, 70}, - dictWord{14, 0, 163}, - dictWord{14, 0, 272}, - dictWord{14, 0, 277}, - dictWord{ - 14, - 0, - 370, - }, - dictWord{15, 0, 95}, - dictWord{15, 0, 138}, - dictWord{15, 0, 167}, - dictWord{17, 0, 38}, - dictWord{148, 0, 96}, - dictWord{135, 10, 1346}, - dictWord{ - 10, - 0, - 200, - }, - dictWord{19, 0, 2}, - dictWord{151, 0, 22}, - dictWord{135, 11, 141}, - dictWord{134, 10, 85}, - dictWord{134, 0, 1759}, - dictWord{138, 0, 372}, - dictWord{ - 145, - 0, - 16, - }, - dictWord{8, 0, 943}, - dictWord{132, 11, 619}, - dictWord{139, 11, 88}, - dictWord{5, 11, 246}, - dictWord{8, 11, 189}, - dictWord{9, 11, 355}, - dictWord{ - 9, - 11, - 512, - }, - dictWord{10, 11, 124}, - dictWord{10, 11, 453}, - dictWord{11, 11, 143}, - dictWord{11, 11, 416}, - dictWord{11, 11, 859}, - dictWord{141, 11, 341}, - dictWord{ - 5, - 0, - 258, - }, - dictWord{134, 0, 719}, - dictWord{6, 0, 1798}, - dictWord{6, 0, 1839}, - dictWord{8, 0, 900}, - dictWord{10, 0, 874}, - dictWord{10, 0, 886}, - dictWord{ - 12, - 0, - 698, - }, - dictWord{12, 0, 732}, - dictWord{12, 0, 770}, - dictWord{16, 0, 106}, - dictWord{18, 0, 163}, - dictWord{18, 0, 170}, - dictWord{18, 0, 171}, - dictWord{152, 0, 20}, - dictWord{9, 0, 707}, - dictWord{11, 0, 326}, - dictWord{11, 0, 339}, - dictWord{12, 0, 423}, - dictWord{12, 0, 502}, - dictWord{20, 0, 62}, - dictWord{9, 11, 707}, - dictWord{ - 11, - 11, - 326, - }, - dictWord{11, 11, 339}, - dictWord{12, 11, 423}, - dictWord{12, 11, 502}, - dictWord{148, 11, 62}, - dictWord{5, 0, 30}, - dictWord{7, 0, 495}, - dictWord{ - 8, - 0, - 134, - }, - dictWord{9, 0, 788}, - dictWord{140, 0, 438}, - dictWord{133, 11, 678}, - dictWord{5, 10, 279}, - dictWord{6, 10, 235}, - dictWord{7, 10, 468}, - dictWord{ - 8, - 10, - 446, - }, - dictWord{9, 10, 637}, - dictWord{10, 10, 717}, - dictWord{11, 10, 738}, - dictWord{140, 10, 514}, - dictWord{5, 11, 35}, - dictWord{6, 11, 287}, - dictWord{ - 7, - 11, - 862, - }, - dictWord{7, 11, 1886}, - dictWord{138, 11, 179}, - dictWord{7, 0, 1948}, - dictWord{7, 0, 2004}, - dictWord{132, 11, 517}, - dictWord{5, 10, 17}, - dictWord{ - 6, - 10, - 371, - }, - dictWord{137, 10, 528}, - dictWord{4, 0, 115}, - dictWord{5, 0, 669}, - dictWord{6, 0, 407}, - dictWord{8, 0, 311}, - dictWord{11, 0, 10}, - dictWord{141, 0, 5}, - dictWord{137, 0, 381}, - dictWord{5, 0, 50}, - dictWord{6, 0, 439}, - dictWord{7, 0, 780}, - dictWord{135, 0, 1040}, - dictWord{136, 11, 667}, - dictWord{11, 11, 403}, - dictWord{146, 11, 83}, - dictWord{5, 0, 1}, - dictWord{6, 0, 81}, - dictWord{138, 0, 520}, - dictWord{134, 0, 738}, - dictWord{5, 0, 482}, - dictWord{8, 0, 98}, - dictWord{9, 0, 172}, - dictWord{10, 0, 360}, - dictWord{10, 0, 700}, - dictWord{10, 0, 822}, - dictWord{11, 0, 302}, - dictWord{11, 0, 778}, - dictWord{12, 0, 50}, - dictWord{12, 0, 127}, - dictWord{ - 12, - 0, - 396, - }, - dictWord{13, 0, 62}, - dictWord{13, 0, 328}, - dictWord{14, 0, 122}, - dictWord{147, 0, 72}, - dictWord{9, 11, 157}, - dictWord{10, 11, 131}, - dictWord{ - 140, - 11, - 72, - }, - dictWord{135, 11, 714}, - dictWord{135, 11, 539}, - dictWord{5, 0, 2}, - dictWord{6, 0, 512}, - dictWord{7, 0, 797}, - dictWord{7, 0, 1494}, - dictWord{8, 0, 253}, - dictWord{8, 0, 589}, - dictWord{9, 0, 77}, - dictWord{10, 0, 1}, - dictWord{10, 0, 129}, - dictWord{10, 0, 225}, - dictWord{11, 0, 118}, - dictWord{11, 0, 226}, - dictWord{ - 11, - 0, - 251, - }, - dictWord{11, 0, 430}, - dictWord{11, 0, 701}, - dictWord{11, 0, 974}, - dictWord{11, 0, 982}, - dictWord{12, 0, 64}, - dictWord{12, 0, 260}, - dictWord{12, 0, 488}, - dictWord{140, 0, 690}, - dictWord{5, 11, 394}, - dictWord{7, 11, 367}, - dictWord{7, 11, 487}, - dictWord{7, 11, 857}, - dictWord{7, 11, 1713}, - dictWord{8, 11, 246}, - dictWord{9, 11, 537}, - dictWord{10, 11, 165}, - dictWord{12, 11, 219}, - dictWord{140, 11, 561}, - dictWord{136, 0, 557}, - dictWord{5, 10, 779}, - dictWord{5, 10, 807}, - dictWord{6, 10, 1655}, - dictWord{134, 10, 1676}, - dictWord{4, 10, 196}, - dictWord{5, 10, 558}, - dictWord{133, 10, 949}, - dictWord{11, 11, 827}, - dictWord{ - 12, - 11, - 56, - }, - dictWord{14, 11, 34}, - dictWord{143, 11, 148}, - dictWord{137, 0, 347}, - dictWord{133, 0, 572}, - dictWord{134, 0, 832}, - dictWord{4, 0, 12}, - dictWord{ - 7, - 0, - 504, - }, - dictWord{7, 0, 522}, - dictWord{7, 0, 809}, - dictWord{8, 0, 797}, - dictWord{141, 0, 88}, - dictWord{4, 10, 752}, - dictWord{133, 11, 449}, - dictWord{7, 11, 86}, - dictWord{8, 11, 103}, - dictWord{145, 11, 69}, - dictWord{7, 11, 2028}, - dictWord{138, 11, 641}, - dictWord{5, 0, 528}, - dictWord{6, 11, 1}, - dictWord{142, 11, 2}, - dictWord{134, 0, 861}, - dictWord{10, 0, 294}, - dictWord{4, 10, 227}, - dictWord{5, 10, 159}, - dictWord{5, 10, 409}, - dictWord{7, 10, 80}, - dictWord{10, 10, 479}, - dictWord{ - 12, - 10, - 418, - }, - dictWord{14, 10, 50}, - dictWord{14, 10, 249}, - dictWord{142, 10, 295}, - dictWord{7, 10, 1470}, - dictWord{8, 10, 66}, - dictWord{8, 10, 137}, - dictWord{ - 8, - 10, - 761, - }, - dictWord{9, 10, 638}, - dictWord{11, 10, 80}, - dictWord{11, 10, 212}, - dictWord{11, 10, 368}, - dictWord{11, 10, 418}, - dictWord{12, 10, 8}, - dictWord{ - 13, - 10, - 15, - }, - dictWord{16, 10, 61}, - dictWord{17, 10, 59}, - dictWord{19, 10, 28}, - dictWord{148, 10, 84}, - dictWord{20, 0, 109}, - dictWord{135, 11, 1148}, - dictWord{ - 6, - 11, - 277, - }, - dictWord{7, 11, 1274}, - dictWord{7, 11, 1386}, - dictWord{7, 11, 1392}, - dictWord{12, 11, 129}, - dictWord{146, 11, 87}, - dictWord{6, 11, 187}, - dictWord{7, 11, 39}, - dictWord{7, 11, 1203}, - dictWord{8, 11, 380}, - dictWord{8, 11, 542}, - dictWord{14, 11, 117}, - dictWord{149, 11, 28}, - dictWord{134, 0, 1187}, - dictWord{5, 0, 266}, - dictWord{9, 0, 290}, - dictWord{9, 0, 364}, - dictWord{10, 0, 293}, - dictWord{11, 0, 606}, - dictWord{142, 0, 45}, - dictWord{6, 11, 297}, - dictWord{ - 7, - 11, - 793, - }, - dictWord{139, 11, 938}, - dictWord{4, 0, 50}, - dictWord{6, 0, 594}, - dictWord{9, 0, 121}, - dictWord{10, 0, 49}, - dictWord{10, 0, 412}, - dictWord{139, 0, 834}, - dictWord{136, 0, 748}, - dictWord{7, 11, 464}, - dictWord{8, 11, 438}, - dictWord{11, 11, 105}, - dictWord{11, 11, 363}, - dictWord{12, 11, 231}, - dictWord{ - 14, - 11, - 386, - }, - dictWord{15, 11, 102}, - dictWord{148, 11, 75}, - dictWord{132, 0, 466}, - dictWord{13, 0, 399}, - dictWord{14, 0, 337}, - dictWord{6, 10, 38}, - dictWord{ - 7, - 10, - 1220, - }, - dictWord{8, 10, 185}, - dictWord{8, 10, 256}, - dictWord{9, 10, 22}, - dictWord{9, 10, 331}, - dictWord{10, 10, 738}, - dictWord{11, 10, 205}, - dictWord{ - 11, - 10, - 540, - }, - dictWord{11, 10, 746}, - dictWord{13, 10, 465}, - dictWord{142, 10, 194}, - dictWord{9, 0, 378}, - dictWord{141, 0, 162}, - dictWord{137, 0, 519}, - dictWord{ - 4, - 10, - 159, - }, - dictWord{6, 10, 115}, - dictWord{7, 10, 252}, - dictWord{7, 10, 257}, - dictWord{7, 10, 1928}, - dictWord{8, 10, 69}, - dictWord{9, 10, 384}, - dictWord{ - 10, - 10, - 91, - }, - dictWord{10, 10, 615}, - dictWord{12, 10, 375}, - dictWord{14, 10, 235}, - dictWord{18, 10, 117}, - dictWord{147, 10, 123}, - dictWord{5, 11, 604}, - dictWord{ - 5, - 10, - 911, - }, - dictWord{136, 10, 278}, - dictWord{132, 0, 667}, - dictWord{8, 0, 351}, - dictWord{9, 0, 322}, - dictWord{4, 10, 151}, - dictWord{135, 10, 1567}, - dictWord{134, 0, 902}, - dictWord{133, 10, 990}, - dictWord{12, 0, 180}, - dictWord{5, 10, 194}, - dictWord{7, 10, 1662}, - dictWord{137, 10, 90}, - dictWord{4, 0, 869}, - dictWord{134, 0, 1996}, - dictWord{134, 0, 813}, - dictWord{133, 10, 425}, - dictWord{137, 11, 761}, - dictWord{132, 0, 260}, - dictWord{133, 10, 971}, - dictWord{ - 5, - 11, - 20, - }, - dictWord{6, 11, 298}, - dictWord{7, 11, 659}, - dictWord{7, 11, 1366}, - dictWord{137, 11, 219}, - dictWord{4, 0, 39}, - dictWord{5, 0, 36}, - dictWord{ - 7, - 0, - 1843, - }, - dictWord{8, 0, 407}, - dictWord{11, 0, 144}, - dictWord{140, 0, 523}, - dictWord{4, 0, 510}, - dictWord{10, 0, 587}, - dictWord{139, 10, 752}, - dictWord{7, 0, 29}, - dictWord{7, 0, 66}, - dictWord{7, 0, 1980}, - dictWord{10, 0, 487}, - dictWord{138, 0, 809}, - dictWord{13, 0, 260}, - dictWord{14, 0, 82}, - dictWord{18, 0, 63}, - dictWord{ - 137, - 10, - 662, - }, - dictWord{5, 10, 72}, - dictWord{6, 10, 264}, - dictWord{7, 10, 21}, - dictWord{7, 10, 46}, - dictWord{7, 10, 2013}, - dictWord{8, 10, 215}, - dictWord{ - 8, - 10, - 513, - }, - dictWord{10, 10, 266}, - dictWord{139, 10, 22}, - dictWord{134, 0, 570}, - dictWord{6, 0, 565}, - dictWord{7, 0, 1667}, - dictWord{4, 11, 439}, - dictWord{ - 10, - 10, - 95, - }, - dictWord{11, 10, 603}, - dictWord{12, 11, 242}, - dictWord{13, 10, 443}, - dictWord{14, 10, 160}, - dictWord{143, 10, 4}, - dictWord{134, 0, 1464}, - dictWord{ - 134, - 10, - 431, - }, - dictWord{9, 0, 372}, - dictWord{15, 0, 2}, - dictWord{19, 0, 10}, - dictWord{19, 0, 18}, - dictWord{5, 10, 874}, - dictWord{6, 10, 1677}, - dictWord{143, 10, 0}, - dictWord{132, 0, 787}, - dictWord{6, 0, 380}, - dictWord{12, 0, 399}, - dictWord{21, 0, 19}, - dictWord{7, 10, 939}, - dictWord{7, 10, 1172}, - dictWord{7, 10, 1671}, - dictWord{9, 10, 540}, - dictWord{10, 10, 696}, - dictWord{11, 10, 265}, - dictWord{11, 10, 732}, - dictWord{11, 10, 928}, - dictWord{11, 10, 937}, - dictWord{ - 141, - 10, - 438, - }, - dictWord{137, 0, 200}, - dictWord{132, 11, 233}, - dictWord{132, 0, 516}, - dictWord{134, 11, 577}, - dictWord{132, 0, 844}, - dictWord{11, 0, 887}, - dictWord{14, 0, 365}, - dictWord{142, 0, 375}, - dictWord{132, 11, 482}, - dictWord{8, 0, 821}, - dictWord{140, 0, 44}, - dictWord{7, 0, 1655}, - dictWord{136, 0, 305}, - dictWord{5, 10, 682}, - dictWord{135, 10, 1887}, - dictWord{135, 11, 346}, - dictWord{132, 10, 696}, - dictWord{4, 0, 10}, - dictWord{7, 0, 917}, - dictWord{139, 0, 786}, - dictWord{5, 11, 795}, - dictWord{6, 11, 1741}, - dictWord{8, 11, 417}, - dictWord{137, 11, 782}, - dictWord{4, 0, 1016}, - dictWord{134, 0, 2031}, - dictWord{5, 0, 684}, - dictWord{4, 10, 726}, - dictWord{133, 10, 630}, - dictWord{6, 0, 1021}, - dictWord{134, 0, 1480}, - dictWord{8, 10, 802}, - dictWord{136, 10, 838}, - dictWord{ - 134, - 0, - 27, - }, - dictWord{134, 0, 395}, - dictWord{135, 11, 622}, - dictWord{7, 11, 625}, - dictWord{135, 11, 1750}, - dictWord{4, 11, 203}, - dictWord{135, 11, 1936}, - dictWord{6, 10, 118}, - dictWord{7, 10, 215}, - dictWord{7, 10, 1521}, - dictWord{140, 10, 11}, - dictWord{132, 0, 813}, - dictWord{136, 0, 511}, - dictWord{7, 10, 615}, - dictWord{138, 10, 251}, - dictWord{135, 10, 1044}, - dictWord{145, 0, 56}, - dictWord{133, 10, 225}, - dictWord{6, 0, 342}, - dictWord{6, 0, 496}, - dictWord{8, 0, 275}, - dictWord{137, 0, 206}, - dictWord{4, 0, 909}, - dictWord{133, 0, 940}, - dictWord{132, 0, 891}, - dictWord{7, 11, 311}, - dictWord{9, 11, 308}, - dictWord{ - 140, - 11, - 255, - }, - dictWord{4, 10, 370}, - dictWord{5, 10, 756}, - dictWord{135, 10, 1326}, - dictWord{4, 0, 687}, - dictWord{134, 0, 1596}, - dictWord{134, 0, 1342}, - dictWord{ - 6, - 10, - 1662, - }, - dictWord{7, 10, 48}, - dictWord{8, 10, 771}, - dictWord{10, 10, 116}, - dictWord{13, 10, 104}, - dictWord{14, 10, 105}, - dictWord{14, 10, 184}, - dictWord{15, 10, 168}, - dictWord{19, 10, 92}, - dictWord{148, 10, 68}, - dictWord{138, 10, 209}, - dictWord{4, 11, 400}, - dictWord{5, 11, 267}, - dictWord{135, 11, 232}, - dictWord{151, 11, 12}, - dictWord{6, 0, 41}, - dictWord{141, 0, 160}, - dictWord{141, 11, 314}, - dictWord{134, 0, 1718}, - dictWord{136, 0, 778}, - dictWord{ - 142, - 11, - 261, - }, - dictWord{134, 0, 1610}, - dictWord{133, 0, 115}, - dictWord{132, 0, 294}, - dictWord{14, 0, 314}, - dictWord{132, 10, 120}, - dictWord{132, 0, 983}, - dictWord{5, 0, 193}, - dictWord{140, 0, 178}, - dictWord{138, 10, 429}, - dictWord{5, 10, 820}, - dictWord{135, 10, 931}, - dictWord{6, 0, 994}, - dictWord{6, 0, 1051}, - dictWord{6, 0, 1439}, - dictWord{7, 0, 174}, - dictWord{133, 11, 732}, - dictWord{4, 11, 100}, - dictWord{7, 11, 679}, - dictWord{8, 11, 313}, - dictWord{138, 10, 199}, - dictWord{6, 10, 151}, - dictWord{6, 10, 1675}, - dictWord{7, 10, 383}, - dictWord{151, 10, 10}, - dictWord{6, 0, 1796}, - dictWord{8, 0, 848}, - dictWord{8, 0, 867}, - dictWord{ - 8, - 0, - 907, - }, - dictWord{10, 0, 855}, - dictWord{140, 0, 703}, - dictWord{140, 0, 221}, - dictWord{4, 0, 122}, - dictWord{5, 0, 796}, - dictWord{5, 0, 952}, - dictWord{6, 0, 1660}, - dictWord{6, 0, 1671}, - dictWord{8, 0, 567}, - dictWord{9, 0, 687}, - dictWord{9, 0, 742}, - dictWord{10, 0, 686}, - dictWord{11, 0, 682}, - dictWord{11, 0, 909}, - dictWord{ - 140, - 0, - 281, - }, - dictWord{5, 11, 362}, - dictWord{5, 11, 443}, - dictWord{6, 11, 318}, - dictWord{7, 11, 1019}, - dictWord{139, 11, 623}, - dictWord{5, 11, 463}, - dictWord{136, 11, 296}, - dictWord{11, 0, 583}, - dictWord{13, 0, 262}, - dictWord{6, 10, 1624}, - dictWord{12, 10, 422}, - dictWord{142, 10, 360}, - dictWord{5, 0, 179}, - dictWord{7, 0, 1095}, - dictWord{135, 0, 1213}, - dictWord{4, 10, 43}, - dictWord{4, 11, 454}, - dictWord{5, 10, 344}, - dictWord{133, 10, 357}, - dictWord{4, 0, 66}, - dictWord{7, 0, 722}, - dictWord{135, 0, 904}, - dictWord{134, 0, 773}, - dictWord{7, 0, 352}, - dictWord{133, 10, 888}, - dictWord{5, 11, 48}, - dictWord{5, 11, 404}, - dictWord{ - 6, - 11, - 557, - }, - dictWord{7, 11, 458}, - dictWord{8, 11, 597}, - dictWord{10, 11, 455}, - dictWord{10, 11, 606}, - dictWord{11, 11, 49}, - dictWord{11, 11, 548}, - dictWord{ - 12, - 11, - 476, - }, - dictWord{13, 11, 18}, - dictWord{141, 11, 450}, - dictWord{134, 11, 418}, - dictWord{132, 10, 711}, - dictWord{5, 11, 442}, - dictWord{ - 135, - 11, - 1984, - }, - dictWord{141, 0, 35}, - dictWord{137, 0, 152}, - dictWord{134, 0, 1197}, - dictWord{135, 11, 1093}, - dictWord{137, 11, 203}, - dictWord{137, 10, 440}, - dictWord{10, 0, 592}, - dictWord{10, 0, 753}, - dictWord{12, 0, 317}, - dictWord{12, 0, 355}, - dictWord{12, 0, 465}, - dictWord{12, 0, 469}, - dictWord{12, 0, 560}, - dictWord{12, 0, 578}, - dictWord{141, 0, 243}, - dictWord{133, 0, 564}, - dictWord{134, 0, 797}, - dictWord{5, 10, 958}, - dictWord{133, 10, 987}, - dictWord{5, 11, 55}, - dictWord{7, 11, 376}, - dictWord{140, 11, 161}, - dictWord{133, 11, 450}, - dictWord{134, 0, 556}, - dictWord{134, 0, 819}, - dictWord{11, 10, 276}, - dictWord{ - 142, - 10, - 293, - }, - dictWord{7, 0, 544}, - dictWord{138, 0, 61}, - dictWord{8, 0, 719}, - dictWord{4, 10, 65}, - dictWord{5, 10, 479}, - dictWord{5, 10, 1004}, - dictWord{7, 10, 1913}, - dictWord{8, 10, 317}, - dictWord{9, 10, 302}, - dictWord{10, 10, 612}, - dictWord{141, 10, 22}, - dictWord{4, 0, 5}, - dictWord{5, 0, 498}, - dictWord{8, 0, 637}, - dictWord{ - 9, - 0, - 521, - }, - dictWord{4, 11, 213}, - dictWord{4, 10, 261}, - dictWord{7, 11, 223}, - dictWord{7, 10, 510}, - dictWord{136, 11, 80}, - dictWord{5, 0, 927}, - dictWord{7, 0, 101}, - dictWord{4, 10, 291}, - dictWord{7, 11, 381}, - dictWord{7, 11, 806}, - dictWord{7, 11, 820}, - dictWord{8, 11, 354}, - dictWord{8, 11, 437}, - dictWord{8, 11, 787}, - dictWord{9, 10, 515}, - dictWord{9, 11, 657}, - dictWord{10, 11, 58}, - dictWord{10, 11, 339}, - dictWord{10, 11, 749}, - dictWord{11, 11, 914}, - dictWord{12, 10, 152}, - dictWord{12, 11, 162}, - dictWord{12, 10, 443}, - dictWord{13, 11, 75}, - dictWord{13, 10, 392}, - dictWord{14, 11, 106}, - dictWord{14, 11, 198}, - dictWord{ - 14, - 11, - 320, - }, - dictWord{14, 10, 357}, - dictWord{14, 11, 413}, - dictWord{146, 11, 43}, - dictWord{6, 0, 1153}, - dictWord{7, 0, 1441}, - dictWord{136, 11, 747}, - dictWord{ - 4, - 0, - 893, - }, - dictWord{5, 0, 780}, - dictWord{133, 0, 893}, - dictWord{138, 11, 654}, - dictWord{133, 11, 692}, - dictWord{133, 0, 238}, - dictWord{134, 11, 191}, - dictWord{4, 10, 130}, - dictWord{135, 10, 843}, - dictWord{6, 0, 1296}, - dictWord{5, 10, 42}, - dictWord{5, 10, 879}, - dictWord{7, 10, 245}, - dictWord{7, 10, 324}, - dictWord{ - 7, - 10, - 1532, - }, - dictWord{11, 10, 463}, - dictWord{11, 10, 472}, - dictWord{13, 10, 363}, - dictWord{144, 10, 52}, - dictWord{134, 0, 1729}, - dictWord{6, 0, 1999}, - dictWord{136, 0, 969}, - dictWord{4, 10, 134}, - dictWord{133, 10, 372}, - dictWord{4, 0, 60}, - dictWord{7, 0, 941}, - dictWord{7, 0, 1800}, - dictWord{8, 0, 314}, - dictWord{ - 9, - 0, - 700, - }, - dictWord{139, 0, 487}, - dictWord{134, 0, 1144}, - dictWord{6, 11, 162}, - dictWord{7, 11, 1960}, - dictWord{136, 11, 831}, - dictWord{132, 11, 706}, - dictWord{135, 0, 1147}, - dictWord{138, 11, 426}, - dictWord{138, 11, 89}, - dictWord{7, 0, 1853}, - dictWord{138, 0, 437}, - dictWord{136, 0, 419}, - dictWord{ - 135, - 10, - 1634, - }, - dictWord{133, 0, 828}, - dictWord{5, 0, 806}, - dictWord{7, 0, 176}, - dictWord{7, 0, 178}, - dictWord{7, 0, 1240}, - dictWord{7, 0, 1976}, - dictWord{ - 132, - 10, - 644, - }, - dictWord{135, 11, 1877}, - dictWord{5, 11, 420}, - dictWord{135, 11, 1449}, - dictWord{4, 0, 51}, - dictWord{5, 0, 39}, - dictWord{6, 0, 4}, - dictWord{7, 0, 591}, - dictWord{7, 0, 849}, - dictWord{7, 0, 951}, - dictWord{7, 0, 1613}, - dictWord{7, 0, 1760}, - dictWord{7, 0, 1988}, - dictWord{9, 0, 434}, - dictWord{10, 0, 754}, - dictWord{ - 11, - 0, - 25, - }, - dictWord{139, 0, 37}, - dictWord{10, 11, 57}, - dictWord{138, 11, 277}, - dictWord{135, 10, 540}, - dictWord{132, 11, 204}, - dictWord{135, 0, 159}, - dictWord{139, 11, 231}, - dictWord{133, 0, 902}, - dictWord{7, 0, 928}, - dictWord{7, 11, 366}, - dictWord{9, 11, 287}, - dictWord{12, 11, 199}, - dictWord{12, 11, 556}, - dictWord{140, 11, 577}, - dictWord{6, 10, 623}, - dictWord{136, 10, 789}, - dictWord{4, 10, 908}, - dictWord{5, 10, 359}, - dictWord{5, 10, 508}, - dictWord{6, 10, 1723}, - dictWord{7, 10, 343}, - dictWord{7, 10, 1996}, - dictWord{135, 10, 2026}, - dictWord{134, 0, 270}, - dictWord{4, 10, 341}, - dictWord{135, 10, 480}, - dictWord{ - 5, - 11, - 356, - }, - dictWord{135, 11, 224}, - dictWord{11, 11, 588}, - dictWord{11, 11, 864}, - dictWord{11, 11, 968}, - dictWord{143, 11, 160}, - dictWord{132, 0, 556}, - dictWord{137, 0, 801}, - dictWord{132, 0, 416}, - dictWord{142, 0, 372}, - dictWord{5, 0, 152}, - dictWord{5, 0, 197}, - dictWord{7, 0, 340}, - dictWord{7, 0, 867}, - dictWord{ - 10, - 0, - 548, - }, - dictWord{10, 0, 581}, - dictWord{11, 0, 6}, - dictWord{12, 0, 3}, - dictWord{12, 0, 19}, - dictWord{14, 0, 110}, - dictWord{142, 0, 289}, - dictWord{139, 0, 369}, - dictWord{7, 11, 630}, - dictWord{9, 11, 567}, - dictWord{11, 11, 150}, - dictWord{11, 11, 444}, - dictWord{141, 11, 119}, - dictWord{134, 11, 539}, - dictWord{ - 7, - 10, - 1995, - }, - dictWord{8, 10, 299}, - dictWord{11, 10, 890}, - dictWord{140, 10, 674}, - dictWord{7, 0, 34}, - dictWord{7, 0, 190}, - dictWord{8, 0, 28}, - dictWord{8, 0, 141}, - dictWord{8, 0, 444}, - dictWord{8, 0, 811}, - dictWord{9, 0, 468}, - dictWord{11, 0, 334}, - dictWord{12, 0, 24}, - dictWord{12, 0, 386}, - dictWord{140, 0, 576}, - dictWord{ - 133, - 0, - 757, - }, - dictWord{7, 0, 1553}, - dictWord{136, 0, 898}, - dictWord{133, 0, 721}, - dictWord{136, 0, 1012}, - dictWord{4, 0, 789}, - dictWord{5, 0, 647}, - dictWord{ - 135, - 0, - 1102, - }, - dictWord{132, 0, 898}, - dictWord{10, 0, 183}, - dictWord{4, 10, 238}, - dictWord{5, 10, 503}, - dictWord{6, 10, 179}, - dictWord{7, 10, 2003}, - dictWord{ - 8, - 10, - 381, - }, - dictWord{8, 10, 473}, - dictWord{9, 10, 149}, - dictWord{10, 10, 788}, - dictWord{15, 10, 45}, - dictWord{15, 10, 86}, - dictWord{20, 10, 110}, - dictWord{ - 150, - 10, - 57, - }, - dictWord{9, 0, 136}, - dictWord{19, 0, 107}, - dictWord{4, 10, 121}, - dictWord{5, 10, 156}, - dictWord{5, 10, 349}, - dictWord{10, 10, 605}, - dictWord{ - 142, - 10, - 342, - }, - dictWord{4, 11, 235}, - dictWord{135, 11, 255}, - dictWord{4, 11, 194}, - dictWord{5, 11, 584}, - dictWord{6, 11, 384}, - dictWord{7, 11, 583}, - dictWord{ - 10, - 11, - 761, - }, - dictWord{11, 11, 760}, - dictWord{139, 11, 851}, - dictWord{6, 10, 80}, - dictWord{6, 10, 1694}, - dictWord{7, 10, 173}, - dictWord{7, 10, 1974}, - dictWord{ - 9, - 10, - 547, - }, - dictWord{10, 10, 730}, - dictWord{14, 10, 18}, - dictWord{150, 10, 39}, - dictWord{4, 10, 923}, - dictWord{134, 10, 1711}, - dictWord{5, 0, 277}, - dictWord{141, 0, 247}, - dictWord{132, 0, 435}, - dictWord{133, 11, 562}, - dictWord{134, 0, 1311}, - dictWord{5, 11, 191}, - dictWord{137, 11, 271}, - dictWord{ - 132, - 10, - 595, - }, - dictWord{7, 11, 1537}, - dictWord{14, 11, 96}, - dictWord{143, 11, 73}, - dictWord{5, 0, 437}, - dictWord{7, 0, 502}, - dictWord{7, 0, 519}, - dictWord{7, 0, 1122}, - dictWord{7, 0, 1751}, - dictWord{14, 0, 211}, - dictWord{6, 10, 459}, - dictWord{7, 10, 1753}, - dictWord{7, 10, 1805}, - dictWord{8, 10, 658}, - dictWord{9, 10, 1}, - dictWord{11, 10, 959}, - dictWord{141, 10, 446}, - dictWord{6, 0, 814}, - dictWord{4, 11, 470}, - dictWord{5, 11, 473}, - dictWord{6, 11, 153}, - dictWord{7, 11, 1503}, - dictWord{7, 11, 1923}, - dictWord{10, 11, 701}, - dictWord{11, 11, 132}, - dictWord{11, 11, 168}, - dictWord{11, 11, 227}, - dictWord{11, 11, 320}, - dictWord{ - 11, - 11, - 436, - }, - dictWord{11, 11, 525}, - dictWord{11, 11, 855}, - dictWord{12, 11, 41}, - dictWord{12, 11, 286}, - dictWord{13, 11, 103}, - dictWord{13, 11, 284}, - dictWord{ - 14, - 11, - 255, - }, - dictWord{14, 11, 262}, - dictWord{15, 11, 117}, - dictWord{143, 11, 127}, - dictWord{5, 0, 265}, - dictWord{6, 0, 212}, - dictWord{135, 0, 28}, - dictWord{ - 138, - 0, - 750, - }, - dictWord{133, 11, 327}, - dictWord{6, 11, 552}, - dictWord{7, 11, 1754}, - dictWord{137, 11, 604}, - dictWord{134, 0, 2012}, - dictWord{132, 0, 702}, - dictWord{5, 11, 80}, - dictWord{6, 11, 405}, - dictWord{7, 11, 403}, - dictWord{7, 11, 1502}, - dictWord{7, 11, 1626}, - dictWord{8, 11, 456}, - dictWord{9, 11, 487}, - dictWord{9, 11, 853}, - dictWord{9, 11, 889}, - dictWord{10, 11, 309}, - dictWord{11, 11, 721}, - dictWord{11, 11, 994}, - dictWord{12, 11, 430}, - dictWord{ - 141, - 11, - 165, - }, - dictWord{5, 0, 808}, - dictWord{135, 0, 2045}, - dictWord{5, 0, 166}, - dictWord{8, 0, 739}, - dictWord{140, 0, 511}, - dictWord{134, 10, 490}, - dictWord{ - 4, - 11, - 453, - }, - dictWord{5, 11, 887}, - dictWord{6, 11, 535}, - dictWord{8, 11, 6}, - dictWord{136, 11, 543}, - dictWord{4, 0, 119}, - dictWord{5, 0, 170}, - dictWord{5, 0, 447}, - dictWord{7, 0, 1708}, - dictWord{7, 0, 1889}, - dictWord{9, 0, 357}, - dictWord{9, 0, 719}, - dictWord{12, 0, 486}, - dictWord{140, 0, 596}, - dictWord{137, 0, 500}, - dictWord{ - 7, - 10, - 250, - }, - dictWord{136, 10, 507}, - dictWord{132, 10, 158}, - dictWord{6, 0, 809}, - dictWord{134, 0, 1500}, - dictWord{9, 0, 327}, - dictWord{11, 0, 350}, - dictWord{11, 0, 831}, - dictWord{13, 0, 352}, - dictWord{4, 10, 140}, - dictWord{7, 10, 362}, - dictWord{8, 10, 209}, - dictWord{9, 10, 10}, - dictWord{9, 10, 503}, - dictWord{ - 9, - 10, - 614, - }, - dictWord{10, 10, 689}, - dictWord{11, 10, 327}, - dictWord{11, 10, 725}, - dictWord{12, 10, 252}, - dictWord{12, 10, 583}, - dictWord{13, 10, 192}, - dictWord{14, 10, 269}, - dictWord{14, 10, 356}, - dictWord{148, 10, 50}, - dictWord{135, 11, 741}, - dictWord{4, 0, 450}, - dictWord{7, 0, 1158}, - dictWord{19, 10, 1}, - dictWord{19, 10, 26}, - dictWord{150, 10, 9}, - dictWord{6, 0, 597}, - dictWord{135, 0, 1318}, - dictWord{134, 0, 1602}, - dictWord{6, 10, 228}, - dictWord{7, 10, 1341}, - dictWord{9, 10, 408}, - dictWord{138, 10, 343}, - dictWord{7, 0, 1375}, - dictWord{7, 0, 1466}, - dictWord{138, 0, 331}, - dictWord{132, 0, 754}, - dictWord{ - 132, - 10, - 557, - }, - dictWord{5, 11, 101}, - dictWord{6, 11, 88}, - dictWord{6, 11, 543}, - dictWord{7, 11, 1677}, - dictWord{9, 11, 100}, - dictWord{10, 11, 677}, - dictWord{ - 14, - 11, - 169, - }, - dictWord{14, 11, 302}, - dictWord{14, 11, 313}, - dictWord{15, 11, 48}, - dictWord{143, 11, 84}, - dictWord{134, 0, 1368}, - dictWord{4, 11, 310}, - dictWord{ - 9, - 11, - 795, - }, - dictWord{10, 11, 733}, - dictWord{11, 11, 451}, - dictWord{12, 11, 249}, - dictWord{14, 11, 115}, - dictWord{14, 11, 286}, - dictWord{143, 11, 100}, - dictWord{132, 10, 548}, - dictWord{10, 0, 557}, - dictWord{7, 10, 197}, - dictWord{8, 10, 142}, - dictWord{8, 10, 325}, - dictWord{9, 10, 150}, - dictWord{9, 10, 596}, - dictWord{10, 10, 353}, - dictWord{11, 10, 74}, - dictWord{11, 10, 315}, - dictWord{12, 10, 662}, - dictWord{12, 10, 681}, - dictWord{14, 10, 423}, - dictWord{ - 143, - 10, - 141, - }, - dictWord{133, 11, 587}, - dictWord{5, 0, 850}, - dictWord{136, 0, 799}, - dictWord{10, 0, 908}, - dictWord{12, 0, 701}, - dictWord{12, 0, 757}, - dictWord{ - 142, - 0, - 466, - }, - dictWord{4, 0, 62}, - dictWord{5, 0, 275}, - dictWord{18, 0, 19}, - dictWord{6, 10, 399}, - dictWord{6, 10, 579}, - dictWord{7, 10, 692}, - dictWord{7, 10, 846}, - dictWord{ - 7, - 10, - 1015, - }, - dictWord{7, 10, 1799}, - dictWord{8, 10, 403}, - dictWord{9, 10, 394}, - dictWord{10, 10, 133}, - dictWord{12, 10, 4}, - dictWord{12, 10, 297}, - dictWord{12, 10, 452}, - dictWord{16, 10, 81}, - dictWord{18, 10, 25}, - dictWord{21, 10, 14}, - dictWord{22, 10, 12}, - dictWord{151, 10, 18}, - dictWord{12, 0, 459}, - dictWord{ - 7, - 10, - 1546, - }, - dictWord{11, 10, 299}, - dictWord{142, 10, 407}, - dictWord{132, 10, 177}, - dictWord{132, 11, 498}, - dictWord{7, 11, 217}, - dictWord{ - 8, - 11, - 140, - }, - dictWord{138, 11, 610}, - dictWord{5, 10, 411}, - dictWord{135, 10, 653}, - dictWord{134, 0, 1802}, - dictWord{7, 10, 439}, - dictWord{10, 10, 727}, - dictWord{11, 10, 260}, - dictWord{139, 10, 684}, - dictWord{133, 11, 905}, - dictWord{11, 11, 580}, - dictWord{142, 11, 201}, - dictWord{134, 0, 1397}, - dictWord{ - 5, - 10, - 208, - }, - dictWord{7, 10, 753}, - dictWord{135, 10, 1528}, - dictWord{7, 0, 238}, - dictWord{7, 0, 2033}, - dictWord{8, 0, 120}, - dictWord{8, 0, 188}, - dictWord{8, 0, 659}, - dictWord{9, 0, 598}, - dictWord{10, 0, 466}, - dictWord{12, 0, 342}, - dictWord{12, 0, 588}, - dictWord{13, 0, 503}, - dictWord{14, 0, 246}, - dictWord{143, 0, 92}, - dictWord{135, 11, 1041}, - dictWord{4, 11, 456}, - dictWord{7, 11, 105}, - dictWord{7, 11, 358}, - dictWord{7, 11, 1637}, - dictWord{8, 11, 643}, - dictWord{139, 11, 483}, - dictWord{6, 0, 1318}, - dictWord{134, 0, 1324}, - dictWord{4, 0, 201}, - dictWord{7, 0, 1744}, - dictWord{8, 0, 602}, - dictWord{11, 0, 247}, - dictWord{11, 0, 826}, - dictWord{17, 0, 65}, - dictWord{133, 10, 242}, - dictWord{8, 0, 164}, - dictWord{146, 0, 62}, - dictWord{133, 10, 953}, - dictWord{139, 10, 802}, - dictWord{133, 0, 615}, - dictWord{7, 11, 1566}, - dictWord{8, 11, 269}, - dictWord{9, 11, 212}, - dictWord{9, 11, 718}, - dictWord{14, 11, 15}, - dictWord{14, 11, 132}, - dictWord{142, 11, 227}, - dictWord{133, 10, 290}, - dictWord{132, 10, 380}, - dictWord{5, 10, 52}, - dictWord{7, 10, 277}, - dictWord{9, 10, 368}, - dictWord{139, 10, 791}, - dictWord{ - 135, - 0, - 1243, - }, - dictWord{133, 11, 539}, - dictWord{11, 11, 919}, - dictWord{141, 11, 409}, - dictWord{136, 0, 968}, - dictWord{133, 11, 470}, - dictWord{134, 0, 882}, - dictWord{132, 0, 907}, - dictWord{5, 0, 100}, - dictWord{10, 0, 329}, - dictWord{12, 0, 416}, - dictWord{149, 0, 29}, - dictWord{10, 10, 138}, - dictWord{139, 10, 476}, - dictWord{5, 10, 725}, - dictWord{5, 10, 727}, - dictWord{6, 11, 91}, - dictWord{7, 11, 435}, - dictWord{135, 10, 1811}, - dictWord{4, 11, 16}, - dictWord{5, 11, 316}, - dictWord{5, 11, 842}, - dictWord{6, 11, 370}, - dictWord{6, 11, 1778}, - dictWord{8, 11, 166}, - dictWord{11, 11, 812}, - dictWord{12, 11, 206}, - dictWord{12, 11, 351}, - dictWord{14, 11, 418}, - dictWord{16, 11, 15}, - dictWord{16, 11, 34}, - dictWord{18, 11, 3}, - dictWord{19, 11, 3}, - dictWord{19, 11, 7}, - dictWord{20, 11, 4}, - dictWord{ - 149, - 11, - 21, - }, - dictWord{132, 0, 176}, - dictWord{5, 0, 636}, - dictWord{5, 0, 998}, - dictWord{7, 0, 9}, - dictWord{7, 0, 1508}, - dictWord{8, 0, 26}, - dictWord{9, 0, 317}, - dictWord{ - 9, - 0, - 358, - }, - dictWord{10, 0, 210}, - dictWord{10, 0, 292}, - dictWord{10, 0, 533}, - dictWord{11, 0, 555}, - dictWord{12, 0, 526}, - dictWord{12, 0, 607}, - dictWord{ - 13, - 0, - 263, - }, - dictWord{13, 0, 459}, - dictWord{142, 0, 271}, - dictWord{6, 0, 256}, - dictWord{8, 0, 265}, - dictWord{4, 10, 38}, - dictWord{7, 10, 307}, - dictWord{7, 10, 999}, - dictWord{7, 10, 1481}, - dictWord{7, 10, 1732}, - dictWord{7, 10, 1738}, - dictWord{9, 10, 414}, - dictWord{11, 10, 316}, - dictWord{12, 10, 52}, - dictWord{13, 10, 420}, - dictWord{147, 10, 100}, - dictWord{135, 10, 1296}, - dictWord{4, 11, 611}, - dictWord{133, 11, 606}, - dictWord{4, 0, 643}, - dictWord{142, 11, 21}, - dictWord{ - 133, - 11, - 715, - }, - dictWord{133, 10, 723}, - dictWord{6, 0, 610}, - dictWord{135, 11, 597}, - dictWord{10, 0, 127}, - dictWord{141, 0, 27}, - dictWord{6, 0, 1995}, - dictWord{ - 6, - 0, - 2001, - }, - dictWord{8, 0, 119}, - dictWord{136, 0, 973}, - dictWord{4, 11, 149}, - dictWord{138, 11, 368}, - dictWord{12, 0, 522}, - dictWord{4, 11, 154}, - dictWord{ - 5, - 10, - 109, - }, - dictWord{6, 10, 1784}, - dictWord{7, 11, 1134}, - dictWord{7, 10, 1895}, - dictWord{8, 11, 105}, - dictWord{12, 10, 296}, - dictWord{140, 10, 302}, - dictWord{4, 11, 31}, - dictWord{6, 11, 429}, - dictWord{7, 11, 962}, - dictWord{9, 11, 458}, - dictWord{139, 11, 691}, - dictWord{10, 0, 553}, - dictWord{11, 0, 876}, - dictWord{13, 0, 193}, - dictWord{13, 0, 423}, - dictWord{14, 0, 166}, - dictWord{19, 0, 84}, - dictWord{4, 11, 312}, - dictWord{5, 10, 216}, - dictWord{7, 10, 1879}, - dictWord{ - 9, - 10, - 141, - }, - dictWord{9, 10, 270}, - dictWord{9, 10, 679}, - dictWord{10, 10, 159}, - dictWord{11, 10, 197}, - dictWord{12, 10, 538}, - dictWord{12, 10, 559}, - dictWord{14, 10, 144}, - dictWord{14, 10, 167}, - dictWord{143, 10, 67}, - dictWord{134, 0, 1582}, - dictWord{7, 0, 1578}, - dictWord{135, 11, 1578}, - dictWord{ - 137, - 10, - 81, - }, - dictWord{132, 11, 236}, - dictWord{134, 10, 391}, - dictWord{134, 0, 795}, - dictWord{7, 10, 322}, - dictWord{136, 10, 249}, - dictWord{5, 11, 836}, - dictWord{ - 5, - 11, - 857, - }, - dictWord{6, 11, 1680}, - dictWord{7, 11, 59}, - dictWord{147, 11, 53}, - dictWord{135, 0, 432}, - dictWord{10, 11, 68}, - dictWord{139, 11, 494}, - dictWord{4, 11, 81}, - dictWord{139, 11, 867}, - dictWord{7, 0, 126}, - dictWord{136, 0, 84}, - dictWord{142, 11, 280}, - dictWord{5, 11, 282}, - dictWord{8, 11, 650}, - dictWord{ - 9, - 11, - 295, - }, - dictWord{9, 11, 907}, - dictWord{138, 11, 443}, - dictWord{136, 0, 790}, - dictWord{5, 10, 632}, - dictWord{138, 10, 526}, - dictWord{6, 0, 64}, - dictWord{12, 0, 377}, - dictWord{13, 0, 309}, - dictWord{14, 0, 141}, - dictWord{14, 0, 429}, - dictWord{14, 11, 141}, - dictWord{142, 11, 429}, - dictWord{134, 0, 1529}, - dictWord{6, 0, 321}, - dictWord{7, 0, 1857}, - dictWord{9, 0, 530}, - dictWord{19, 0, 99}, - dictWord{7, 10, 948}, - dictWord{7, 10, 1042}, - dictWord{8, 10, 235}, - dictWord{ - 8, - 10, - 461, - }, - dictWord{9, 10, 453}, - dictWord{10, 10, 354}, - dictWord{145, 10, 77}, - dictWord{7, 0, 1104}, - dictWord{11, 0, 269}, - dictWord{11, 0, 539}, - dictWord{ - 11, - 0, - 627, - }, - dictWord{11, 0, 706}, - dictWord{11, 0, 975}, - dictWord{12, 0, 248}, - dictWord{12, 0, 434}, - dictWord{12, 0, 600}, - dictWord{12, 0, 622}, - dictWord{ - 13, - 0, - 297, - }, - dictWord{13, 0, 485}, - dictWord{14, 0, 69}, - dictWord{14, 0, 409}, - dictWord{143, 0, 108}, - dictWord{4, 10, 362}, - dictWord{7, 10, 52}, - dictWord{7, 10, 303}, - dictWord{10, 11, 70}, - dictWord{12, 11, 26}, - dictWord{14, 11, 17}, - dictWord{14, 11, 178}, - dictWord{15, 11, 34}, - dictWord{149, 11, 12}, - dictWord{11, 0, 977}, - dictWord{141, 0, 507}, - dictWord{9, 0, 34}, - dictWord{139, 0, 484}, - dictWord{5, 10, 196}, - dictWord{6, 10, 486}, - dictWord{7, 10, 212}, - dictWord{8, 10, 309}, - dictWord{136, 10, 346}, - dictWord{6, 0, 1700}, - dictWord{7, 0, 26}, - dictWord{7, 0, 293}, - dictWord{7, 0, 382}, - dictWord{7, 0, 1026}, - dictWord{7, 0, 1087}, - dictWord{ - 7, - 0, - 2027, - }, - dictWord{8, 0, 24}, - dictWord{8, 0, 114}, - dictWord{8, 0, 252}, - dictWord{8, 0, 727}, - dictWord{8, 0, 729}, - dictWord{9, 0, 30}, - dictWord{9, 0, 199}, - dictWord{ - 9, - 0, - 231, - }, - dictWord{9, 0, 251}, - dictWord{9, 0, 334}, - dictWord{9, 0, 361}, - dictWord{9, 0, 712}, - dictWord{10, 0, 55}, - dictWord{10, 0, 60}, - dictWord{10, 0, 232}, - dictWord{ - 10, - 0, - 332, - }, - dictWord{10, 0, 384}, - dictWord{10, 0, 396}, - dictWord{10, 0, 504}, - dictWord{10, 0, 542}, - dictWord{10, 0, 652}, - dictWord{11, 0, 20}, - dictWord{11, 0, 48}, - dictWord{11, 0, 207}, - dictWord{11, 0, 291}, - dictWord{11, 0, 298}, - dictWord{11, 0, 342}, - dictWord{11, 0, 365}, - dictWord{11, 0, 394}, - dictWord{11, 0, 620}, - dictWord{11, 0, 705}, - dictWord{11, 0, 1017}, - dictWord{12, 0, 123}, - dictWord{12, 0, 340}, - dictWord{12, 0, 406}, - dictWord{12, 0, 643}, - dictWord{13, 0, 61}, - dictWord{ - 13, - 0, - 269, - }, - dictWord{13, 0, 311}, - dictWord{13, 0, 319}, - dictWord{13, 0, 486}, - dictWord{14, 0, 234}, - dictWord{15, 0, 62}, - dictWord{15, 0, 85}, - dictWord{16, 0, 71}, - dictWord{18, 0, 119}, - dictWord{20, 0, 105}, - dictWord{135, 10, 1912}, - dictWord{4, 11, 71}, - dictWord{5, 11, 376}, - dictWord{7, 11, 119}, - dictWord{138, 11, 665}, - dictWord{10, 0, 918}, - dictWord{10, 0, 926}, - dictWord{4, 10, 686}, - dictWord{136, 11, 55}, - dictWord{138, 10, 625}, - dictWord{136, 10, 706}, - dictWord{ - 132, - 11, - 479, - }, - dictWord{4, 10, 30}, - dictWord{133, 10, 43}, - dictWord{6, 0, 379}, - dictWord{7, 0, 270}, - dictWord{8, 0, 176}, - dictWord{8, 0, 183}, - dictWord{9, 0, 432}, - dictWord{ - 9, - 0, - 661, - }, - dictWord{12, 0, 247}, - dictWord{12, 0, 617}, - dictWord{18, 0, 125}, - dictWord{7, 11, 607}, - dictWord{8, 11, 99}, - dictWord{152, 11, 4}, - dictWord{ - 5, - 0, - 792, - }, - dictWord{133, 0, 900}, - dictWord{4, 11, 612}, - dictWord{133, 11, 561}, - dictWord{4, 11, 41}, - dictWord{4, 10, 220}, - dictWord{5, 11, 74}, - dictWord{ - 7, - 10, - 1535, - }, - dictWord{7, 11, 1627}, - dictWord{11, 11, 871}, - dictWord{140, 11, 619}, - dictWord{135, 0, 1920}, - dictWord{7, 11, 94}, - dictWord{11, 11, 329}, - dictWord{11, 11, 965}, - dictWord{12, 11, 241}, - dictWord{14, 11, 354}, - dictWord{15, 11, 22}, - dictWord{148, 11, 63}, - dictWord{9, 11, 209}, - dictWord{137, 11, 300}, - dictWord{134, 0, 771}, - dictWord{135, 0, 1979}, - dictWord{4, 0, 901}, - dictWord{133, 0, 776}, - dictWord{142, 0, 254}, - dictWord{133, 11, 98}, - dictWord{ - 9, - 11, - 16, - }, - dictWord{141, 11, 386}, - dictWord{133, 11, 984}, - dictWord{4, 11, 182}, - dictWord{6, 11, 205}, - dictWord{135, 11, 220}, - dictWord{7, 10, 1725}, - dictWord{ - 7, - 10, - 1774, - }, - dictWord{138, 10, 393}, - dictWord{5, 10, 263}, - dictWord{134, 10, 414}, - dictWord{4, 11, 42}, - dictWord{9, 11, 205}, - dictWord{9, 11, 786}, - dictWord{138, 11, 659}, - dictWord{14, 0, 140}, - dictWord{148, 0, 41}, - dictWord{8, 0, 440}, - dictWord{10, 0, 359}, - dictWord{6, 10, 178}, - dictWord{6, 11, 289}, - dictWord{ - 6, - 10, - 1750, - }, - dictWord{7, 11, 1670}, - dictWord{9, 10, 690}, - dictWord{10, 10, 155}, - dictWord{10, 10, 373}, - dictWord{11, 10, 698}, - dictWord{12, 11, 57}, - dictWord{13, 10, 155}, - dictWord{20, 10, 93}, - dictWord{151, 11, 4}, - dictWord{4, 0, 37}, - dictWord{5, 0, 334}, - dictWord{7, 0, 1253}, - dictWord{151, 11, 25}, - dictWord{ - 4, - 0, - 508, - }, - dictWord{4, 11, 635}, - dictWord{5, 10, 97}, - dictWord{137, 10, 393}, - dictWord{139, 11, 533}, - dictWord{4, 0, 640}, - dictWord{133, 0, 513}, - dictWord{ - 134, - 10, - 1639, - }, - dictWord{132, 11, 371}, - dictWord{4, 11, 272}, - dictWord{7, 11, 836}, - dictWord{7, 11, 1651}, - dictWord{145, 11, 89}, - dictWord{5, 11, 825}, - dictWord{6, 11, 444}, - dictWord{6, 11, 1640}, - dictWord{136, 11, 308}, - dictWord{4, 10, 191}, - dictWord{7, 10, 934}, - dictWord{8, 10, 647}, - dictWord{145, 10, 97}, - dictWord{12, 0, 246}, - dictWord{15, 0, 162}, - dictWord{19, 0, 64}, - dictWord{20, 0, 8}, - dictWord{20, 0, 95}, - dictWord{22, 0, 24}, - dictWord{152, 0, 17}, - dictWord{4, 0, 533}, - dictWord{5, 10, 165}, - dictWord{9, 10, 346}, - dictWord{138, 10, 655}, - dictWord{5, 11, 737}, - dictWord{139, 10, 885}, - dictWord{133, 10, 877}, - dictWord{ - 8, - 10, - 128, - }, - dictWord{139, 10, 179}, - dictWord{137, 11, 307}, - dictWord{140, 0, 752}, - dictWord{133, 0, 920}, - dictWord{135, 0, 1048}, - dictWord{5, 0, 153}, - dictWord{ - 6, - 0, - 580, - }, - dictWord{6, 10, 1663}, - dictWord{7, 10, 132}, - dictWord{7, 10, 1154}, - dictWord{7, 10, 1415}, - dictWord{7, 10, 1507}, - dictWord{12, 10, 493}, - dictWord{15, 10, 105}, - dictWord{151, 10, 15}, - dictWord{5, 10, 459}, - dictWord{7, 10, 1073}, - dictWord{8, 10, 241}, - dictWord{136, 10, 334}, - dictWord{138, 0, 391}, - dictWord{135, 0, 1952}, - dictWord{133, 11, 525}, - dictWord{8, 11, 641}, - dictWord{11, 11, 388}, - dictWord{140, 11, 580}, - dictWord{142, 0, 126}, - dictWord{ - 134, - 0, - 640, - }, - dictWord{132, 0, 483}, - dictWord{7, 0, 1616}, - dictWord{9, 0, 69}, - dictWord{6, 10, 324}, - dictWord{6, 10, 520}, - dictWord{7, 10, 338}, - dictWord{ - 7, - 10, - 1729, - }, - dictWord{8, 10, 228}, - dictWord{139, 10, 750}, - dictWord{5, 11, 493}, - dictWord{134, 11, 528}, - dictWord{135, 0, 734}, - dictWord{4, 11, 174}, - dictWord{135, 11, 911}, - dictWord{138, 0, 480}, - dictWord{9, 0, 495}, - dictWord{146, 0, 104}, - dictWord{135, 10, 705}, - dictWord{9, 0, 472}, - dictWord{4, 10, 73}, - dictWord{6, 10, 612}, - dictWord{7, 10, 927}, - dictWord{7, 10, 1330}, - dictWord{7, 10, 1822}, - dictWord{8, 10, 217}, - dictWord{9, 10, 765}, - dictWord{9, 10, 766}, - dictWord{10, 10, 408}, - dictWord{11, 10, 51}, - dictWord{11, 10, 793}, - dictWord{12, 10, 266}, - dictWord{15, 10, 158}, - dictWord{20, 10, 89}, - dictWord{150, 10, 32}, - dictWord{7, 11, 548}, - dictWord{137, 11, 58}, - dictWord{4, 11, 32}, - dictWord{5, 11, 215}, - dictWord{6, 11, 269}, - dictWord{7, 11, 1782}, - dictWord{7, 11, 1892}, - dictWord{10, 11, 16}, - dictWord{11, 11, 822}, - dictWord{11, 11, 954}, - dictWord{141, 11, 481}, - dictWord{132, 0, 874}, - dictWord{9, 0, 229}, - dictWord{5, 10, 389}, - dictWord{136, 10, 636}, - dictWord{7, 11, 1749}, - dictWord{136, 11, 477}, - dictWord{134, 0, 948}, - dictWord{5, 11, 308}, - dictWord{135, 11, 1088}, - dictWord{ - 4, - 0, - 748, - }, - dictWord{139, 0, 1009}, - dictWord{136, 10, 21}, - dictWord{6, 0, 555}, - dictWord{135, 0, 485}, - dictWord{5, 11, 126}, - dictWord{8, 11, 297}, - dictWord{ - 9, - 11, - 366, - }, - dictWord{9, 11, 445}, - dictWord{12, 11, 53}, - dictWord{12, 11, 374}, - dictWord{141, 11, 492}, - dictWord{7, 11, 1551}, - dictWord{139, 11, 361}, - dictWord{136, 0, 193}, - dictWord{136, 0, 472}, - dictWord{8, 0, 653}, - dictWord{13, 0, 93}, - dictWord{147, 0, 14}, - dictWord{132, 0, 984}, - dictWord{132, 11, 175}, - dictWord{5, 0, 172}, - dictWord{6, 0, 1971}, - dictWord{132, 11, 685}, - dictWord{149, 11, 8}, - dictWord{133, 11, 797}, - dictWord{13, 0, 83}, - dictWord{5, 10, 189}, - dictWord{ - 7, - 10, - 442, - }, - dictWord{7, 10, 443}, - dictWord{8, 10, 281}, - dictWord{12, 10, 174}, - dictWord{141, 10, 261}, - dictWord{134, 0, 1568}, - dictWord{133, 11, 565}, - dictWord{139, 0, 384}, - dictWord{133, 0, 260}, - dictWord{7, 0, 758}, - dictWord{7, 0, 880}, - dictWord{7, 0, 1359}, - dictWord{9, 0, 164}, - dictWord{9, 0, 167}, - dictWord{ - 10, - 0, - 156, - }, - dictWord{10, 0, 588}, - dictWord{12, 0, 101}, - dictWord{14, 0, 48}, - dictWord{15, 0, 70}, - dictWord{6, 10, 2}, - dictWord{7, 10, 1262}, - dictWord{ - 7, - 10, - 1737, - }, - dictWord{8, 10, 22}, - dictWord{8, 10, 270}, - dictWord{8, 10, 612}, - dictWord{9, 10, 312}, - dictWord{9, 10, 436}, - dictWord{10, 10, 311}, - dictWord{ - 10, - 10, - 623, - }, - dictWord{11, 10, 72}, - dictWord{11, 10, 330}, - dictWord{11, 10, 455}, - dictWord{12, 10, 321}, - dictWord{12, 10, 504}, - dictWord{12, 10, 530}, - dictWord{ - 12, - 10, - 543, - }, - dictWord{13, 10, 17}, - dictWord{13, 10, 156}, - dictWord{13, 10, 334}, - dictWord{17, 10, 60}, - dictWord{148, 10, 64}, - dictWord{4, 11, 252}, - dictWord{ - 7, - 11, - 1068, - }, - dictWord{10, 11, 434}, - dictWord{11, 11, 228}, - dictWord{11, 11, 426}, - dictWord{13, 11, 231}, - dictWord{18, 11, 106}, - dictWord{148, 11, 87}, - dictWord{7, 10, 354}, - dictWord{10, 10, 410}, - dictWord{139, 10, 815}, - dictWord{6, 0, 367}, - dictWord{7, 10, 670}, - dictWord{7, 10, 1327}, - dictWord{8, 10, 411}, - dictWord{8, 10, 435}, - dictWord{9, 10, 653}, - dictWord{9, 10, 740}, - dictWord{10, 10, 385}, - dictWord{11, 10, 222}, - dictWord{11, 10, 324}, - dictWord{11, 10, 829}, - dictWord{140, 10, 611}, - dictWord{7, 0, 1174}, - dictWord{6, 10, 166}, - dictWord{135, 10, 374}, - dictWord{146, 0, 121}, - dictWord{132, 0, 828}, - dictWord{ - 5, - 11, - 231, - }, - dictWord{138, 11, 509}, - dictWord{7, 11, 601}, - dictWord{9, 11, 277}, - dictWord{9, 11, 674}, - dictWord{10, 11, 178}, - dictWord{10, 11, 257}, - dictWord{ - 10, - 11, - 418, - }, - dictWord{11, 11, 531}, - dictWord{11, 11, 544}, - dictWord{11, 11, 585}, - dictWord{12, 11, 113}, - dictWord{12, 11, 475}, - dictWord{13, 11, 99}, - dictWord{142, 11, 428}, - dictWord{134, 0, 1541}, - dictWord{135, 11, 1779}, - dictWord{5, 0, 343}, - dictWord{134, 10, 398}, - dictWord{135, 10, 50}, - dictWord{ - 135, - 11, - 1683, - }, - dictWord{4, 0, 440}, - dictWord{7, 0, 57}, - dictWord{8, 0, 167}, - dictWord{8, 0, 375}, - dictWord{9, 0, 82}, - dictWord{9, 0, 561}, - dictWord{9, 0, 744}, - dictWord{ - 10, - 0, - 620, - }, - dictWord{137, 11, 744}, - dictWord{134, 0, 926}, - dictWord{6, 10, 517}, - dictWord{7, 10, 1159}, - dictWord{10, 10, 621}, - dictWord{139, 10, 192}, - dictWord{137, 0, 827}, - dictWord{8, 0, 194}, - dictWord{136, 0, 756}, - dictWord{10, 10, 223}, - dictWord{139, 10, 645}, - dictWord{7, 10, 64}, - dictWord{ - 136, - 10, - 245, - }, - dictWord{4, 11, 399}, - dictWord{5, 11, 119}, - dictWord{5, 11, 494}, - dictWord{7, 11, 751}, - dictWord{137, 11, 556}, - dictWord{132, 0, 808}, - dictWord{ - 135, - 0, - 22, - }, - dictWord{7, 10, 1763}, - dictWord{140, 10, 310}, - dictWord{5, 0, 639}, - dictWord{7, 0, 1249}, - dictWord{11, 0, 896}, - dictWord{134, 11, 584}, - dictWord{ - 134, - 0, - 1614, - }, - dictWord{135, 0, 860}, - dictWord{135, 11, 1121}, - dictWord{5, 10, 129}, - dictWord{6, 10, 61}, - dictWord{135, 10, 947}, - dictWord{4, 0, 102}, - dictWord{ - 7, - 0, - 815, - }, - dictWord{7, 0, 1699}, - dictWord{139, 0, 964}, - dictWord{13, 10, 505}, - dictWord{141, 10, 506}, - dictWord{139, 10, 1000}, - dictWord{ - 132, - 11, - 679, - }, - dictWord{132, 0, 899}, - dictWord{132, 0, 569}, - dictWord{5, 11, 694}, - dictWord{137, 11, 714}, - dictWord{136, 0, 795}, - dictWord{6, 0, 2045}, - dictWord{ - 139, - 11, - 7, - }, - dictWord{6, 0, 52}, - dictWord{9, 0, 104}, - dictWord{9, 0, 559}, - dictWord{12, 0, 308}, - dictWord{147, 0, 87}, - dictWord{4, 0, 301}, - dictWord{132, 0, 604}, - dictWord{133, 10, 637}, - dictWord{136, 0, 779}, - dictWord{5, 11, 143}, - dictWord{5, 11, 769}, - dictWord{6, 11, 1760}, - dictWord{7, 11, 682}, - dictWord{7, 11, 1992}, - dictWord{136, 11, 736}, - dictWord{137, 10, 590}, - dictWord{147, 0, 32}, - dictWord{137, 11, 527}, - dictWord{5, 10, 280}, - dictWord{135, 10, 1226}, - dictWord{134, 0, 494}, - dictWord{6, 0, 677}, - dictWord{6, 0, 682}, - dictWord{134, 0, 1044}, - dictWord{133, 10, 281}, - dictWord{135, 10, 1064}, - dictWord{7, 0, 508}, - dictWord{133, 11, 860}, - dictWord{6, 11, 422}, - dictWord{7, 11, 0}, - dictWord{7, 11, 1544}, - dictWord{9, 11, 577}, - dictWord{11, 11, 990}, - dictWord{12, 11, 141}, - dictWord{12, 11, 453}, - dictWord{13, 11, 47}, - dictWord{141, 11, 266}, - dictWord{134, 0, 1014}, - dictWord{5, 11, 515}, - dictWord{137, 11, 131}, - dictWord{ - 134, - 0, - 957, - }, - dictWord{132, 11, 646}, - dictWord{6, 0, 310}, - dictWord{7, 0, 1849}, - dictWord{8, 0, 72}, - dictWord{8, 0, 272}, - dictWord{8, 0, 431}, - dictWord{9, 0, 12}, - dictWord{ - 9, - 0, - 376, - }, - dictWord{10, 0, 563}, - dictWord{10, 0, 630}, - dictWord{10, 0, 796}, - dictWord{10, 0, 810}, - dictWord{11, 0, 367}, - dictWord{11, 0, 599}, - dictWord{ - 11, - 0, - 686, - }, - dictWord{140, 0, 672}, - dictWord{7, 0, 570}, - dictWord{4, 11, 396}, - dictWord{7, 10, 120}, - dictWord{7, 11, 728}, - dictWord{8, 10, 489}, - dictWord{9, 11, 117}, - dictWord{9, 10, 319}, - dictWord{10, 10, 820}, - dictWord{11, 10, 1004}, - dictWord{12, 10, 379}, - dictWord{12, 10, 679}, - dictWord{13, 10, 117}, - dictWord{ - 13, - 11, - 202, - }, - dictWord{13, 10, 412}, - dictWord{14, 10, 25}, - dictWord{15, 10, 52}, - dictWord{15, 10, 161}, - dictWord{16, 10, 47}, - dictWord{20, 11, 51}, - dictWord{ - 149, - 10, - 2, - }, - dictWord{6, 11, 121}, - dictWord{6, 11, 124}, - dictWord{6, 11, 357}, - dictWord{7, 11, 1138}, - dictWord{7, 11, 1295}, - dictWord{8, 11, 162}, - dictWord{ - 139, - 11, - 655, - }, - dictWord{8, 0, 449}, - dictWord{4, 10, 937}, - dictWord{5, 10, 801}, - dictWord{136, 11, 449}, - dictWord{139, 11, 958}, - dictWord{6, 0, 181}, - dictWord{ - 7, - 0, - 537, - }, - dictWord{8, 0, 64}, - dictWord{9, 0, 127}, - dictWord{10, 0, 496}, - dictWord{12, 0, 510}, - dictWord{141, 0, 384}, - dictWord{138, 11, 253}, - dictWord{4, 0, 244}, - dictWord{135, 0, 233}, - dictWord{133, 11, 237}, - dictWord{132, 10, 365}, - dictWord{6, 0, 1650}, - dictWord{10, 0, 702}, - dictWord{139, 0, 245}, - dictWord{ - 5, - 10, - 7, - }, - dictWord{139, 10, 774}, - dictWord{13, 0, 463}, - dictWord{20, 0, 49}, - dictWord{13, 11, 463}, - dictWord{148, 11, 49}, - dictWord{4, 10, 734}, - dictWord{ - 5, - 10, - 662, - }, - dictWord{134, 10, 430}, - dictWord{4, 10, 746}, - dictWord{135, 10, 1090}, - dictWord{5, 10, 360}, - dictWord{136, 10, 237}, - dictWord{137, 0, 338}, - dictWord{143, 11, 10}, - dictWord{7, 11, 571}, - dictWord{138, 11, 366}, - dictWord{134, 0, 1279}, - dictWord{9, 11, 513}, - dictWord{10, 11, 22}, - dictWord{10, 11, 39}, - dictWord{12, 11, 122}, - dictWord{140, 11, 187}, - dictWord{133, 0, 896}, - dictWord{146, 0, 178}, - dictWord{134, 0, 695}, - dictWord{137, 0, 808}, - dictWord{ - 134, - 11, - 587, - }, - dictWord{7, 11, 107}, - dictWord{7, 11, 838}, - dictWord{8, 11, 550}, - dictWord{138, 11, 401}, - dictWord{7, 0, 1117}, - dictWord{136, 0, 539}, - dictWord{ - 4, - 10, - 277, - }, - dictWord{5, 10, 608}, - dictWord{6, 10, 493}, - dictWord{7, 10, 457}, - dictWord{140, 10, 384}, - dictWord{133, 11, 768}, - dictWord{12, 0, 257}, - dictWord{ - 7, - 10, - 27, - }, - dictWord{135, 10, 316}, - dictWord{140, 0, 1003}, - dictWord{4, 0, 207}, - dictWord{5, 0, 586}, - dictWord{5, 0, 676}, - dictWord{6, 0, 448}, - dictWord{ - 8, - 0, - 244, - }, - dictWord{11, 0, 1}, - dictWord{13, 0, 3}, - dictWord{16, 0, 54}, - dictWord{17, 0, 4}, - dictWord{18, 0, 13}, - dictWord{133, 10, 552}, - dictWord{4, 10, 401}, - dictWord{ - 137, - 10, - 264, - }, - dictWord{5, 0, 516}, - dictWord{7, 0, 1883}, - dictWord{135, 11, 1883}, - dictWord{12, 0, 960}, - dictWord{132, 11, 894}, - dictWord{5, 0, 4}, - dictWord{ - 5, - 0, - 810, - }, - dictWord{6, 0, 13}, - dictWord{6, 0, 538}, - dictWord{6, 0, 1690}, - dictWord{6, 0, 1726}, - dictWord{7, 0, 499}, - dictWord{7, 0, 1819}, - dictWord{8, 0, 148}, - dictWord{ - 8, - 0, - 696, - }, - dictWord{8, 0, 791}, - dictWord{12, 0, 125}, - dictWord{143, 0, 9}, - dictWord{135, 0, 1268}, - dictWord{11, 0, 30}, - dictWord{14, 0, 315}, - dictWord{ - 9, - 10, - 543, - }, - dictWord{10, 10, 524}, - dictWord{12, 10, 524}, - dictWord{16, 10, 18}, - dictWord{20, 10, 26}, - dictWord{148, 10, 65}, - dictWord{6, 0, 748}, - dictWord{ - 4, - 10, - 205, - }, - dictWord{5, 10, 623}, - dictWord{7, 10, 104}, - dictWord{136, 10, 519}, - dictWord{11, 0, 542}, - dictWord{139, 0, 852}, - dictWord{140, 0, 6}, - dictWord{ - 132, - 0, - 848, - }, - dictWord{7, 0, 1385}, - dictWord{11, 0, 582}, - dictWord{11, 0, 650}, - dictWord{11, 0, 901}, - dictWord{11, 0, 949}, - dictWord{12, 0, 232}, - dictWord{12, 0, 236}, - dictWord{13, 0, 413}, - dictWord{13, 0, 501}, - dictWord{18, 0, 116}, - dictWord{7, 10, 579}, - dictWord{9, 10, 41}, - dictWord{9, 10, 244}, - dictWord{9, 10, 669}, - dictWord{10, 10, 5}, - dictWord{11, 10, 861}, - dictWord{11, 10, 951}, - dictWord{139, 10, 980}, - dictWord{4, 0, 945}, - dictWord{6, 0, 1811}, - dictWord{6, 0, 1845}, - dictWord{ - 6, - 0, - 1853, - }, - dictWord{6, 0, 1858}, - dictWord{8, 0, 862}, - dictWord{12, 0, 782}, - dictWord{12, 0, 788}, - dictWord{18, 0, 160}, - dictWord{148, 0, 117}, - dictWord{ - 132, - 10, - 717, - }, - dictWord{4, 0, 925}, - dictWord{5, 0, 803}, - dictWord{8, 0, 698}, - dictWord{138, 0, 828}, - dictWord{134, 0, 1416}, - dictWord{132, 0, 610}, - dictWord{ - 139, - 0, - 992, - }, - dictWord{6, 0, 878}, - dictWord{134, 0, 1477}, - dictWord{135, 0, 1847}, - dictWord{138, 11, 531}, - dictWord{137, 11, 539}, - dictWord{134, 11, 272}, - dictWord{133, 0, 383}, - dictWord{134, 0, 1404}, - dictWord{132, 10, 489}, - dictWord{4, 11, 9}, - dictWord{5, 11, 128}, - dictWord{7, 11, 368}, - dictWord{ - 11, - 11, - 480, - }, - dictWord{148, 11, 3}, - dictWord{136, 0, 986}, - dictWord{9, 0, 660}, - dictWord{138, 0, 347}, - dictWord{135, 10, 892}, - dictWord{136, 11, 682}, - dictWord{ - 7, - 0, - 572, - }, - dictWord{9, 0, 592}, - dictWord{11, 0, 680}, - dictWord{12, 0, 356}, - dictWord{140, 0, 550}, - dictWord{7, 0, 1411}, - dictWord{138, 11, 527}, - dictWord{ - 4, - 11, - 2, - }, - dictWord{7, 11, 545}, - dictWord{135, 11, 894}, - dictWord{137, 10, 473}, - dictWord{11, 0, 64}, - dictWord{7, 11, 481}, - dictWord{7, 10, 819}, - dictWord{9, 10, 26}, - dictWord{9, 10, 392}, - dictWord{9, 11, 792}, - dictWord{10, 10, 152}, - dictWord{10, 10, 226}, - dictWord{12, 10, 276}, - dictWord{12, 10, 426}, - dictWord{ - 12, - 10, - 589, - }, - dictWord{13, 10, 460}, - dictWord{15, 10, 97}, - dictWord{19, 10, 48}, - dictWord{148, 10, 104}, - dictWord{135, 10, 51}, - dictWord{136, 11, 445}, - dictWord{136, 11, 646}, - dictWord{135, 0, 606}, - dictWord{132, 10, 674}, - dictWord{6, 0, 1829}, - dictWord{134, 0, 1830}, - dictWord{132, 10, 770}, - dictWord{ - 5, - 10, - 79, - }, - dictWord{7, 10, 1027}, - dictWord{7, 10, 1477}, - dictWord{139, 10, 52}, - dictWord{5, 11, 530}, - dictWord{142, 11, 113}, - dictWord{134, 10, 1666}, - dictWord{ - 7, - 0, - 748, - }, - dictWord{139, 0, 700}, - dictWord{134, 10, 195}, - dictWord{133, 10, 789}, - dictWord{9, 0, 87}, - dictWord{10, 0, 365}, - dictWord{4, 10, 251}, - dictWord{ - 4, - 10, - 688, - }, - dictWord{7, 10, 513}, - dictWord{135, 10, 1284}, - dictWord{136, 11, 111}, - dictWord{133, 0, 127}, - dictWord{6, 0, 198}, - dictWord{140, 0, 83}, - dictWord{133, 11, 556}, - dictWord{133, 10, 889}, - dictWord{4, 10, 160}, - dictWord{5, 10, 330}, - dictWord{7, 10, 1434}, - dictWord{136, 10, 174}, - dictWord{5, 0, 276}, - dictWord{6, 0, 55}, - dictWord{7, 0, 1369}, - dictWord{138, 0, 864}, - dictWord{8, 11, 16}, - dictWord{140, 11, 568}, - dictWord{6, 0, 1752}, - dictWord{136, 0, 726}, - dictWord{135, 0, 1066}, - dictWord{133, 0, 764}, - dictWord{6, 11, 186}, - dictWord{137, 11, 426}, - dictWord{11, 0, 683}, - dictWord{139, 11, 683}, - dictWord{ - 6, - 0, - 309, - }, - dictWord{7, 0, 331}, - dictWord{138, 0, 550}, - dictWord{133, 10, 374}, - dictWord{6, 0, 1212}, - dictWord{6, 0, 1852}, - dictWord{7, 0, 1062}, - dictWord{ - 8, - 0, - 874, - }, - dictWord{8, 0, 882}, - dictWord{138, 0, 936}, - dictWord{132, 11, 585}, - dictWord{134, 0, 1364}, - dictWord{7, 0, 986}, - dictWord{133, 10, 731}, - dictWord{ - 6, - 0, - 723, - }, - dictWord{6, 0, 1408}, - dictWord{138, 0, 381}, - dictWord{135, 0, 1573}, - dictWord{134, 0, 1025}, - dictWord{4, 10, 626}, - dictWord{5, 10, 642}, - dictWord{ - 6, - 10, - 425, - }, - dictWord{10, 10, 202}, - dictWord{139, 10, 141}, - dictWord{4, 11, 93}, - dictWord{5, 11, 252}, - dictWord{6, 11, 229}, - dictWord{7, 11, 291}, - dictWord{ - 9, - 11, - 550, - }, - dictWord{139, 11, 644}, - dictWord{137, 11, 749}, - dictWord{137, 11, 162}, - dictWord{132, 11, 381}, - dictWord{135, 0, 1559}, - dictWord{ - 6, - 0, - 194, - }, - dictWord{7, 0, 133}, - dictWord{10, 0, 493}, - dictWord{10, 0, 570}, - dictWord{139, 0, 664}, - dictWord{5, 0, 24}, - dictWord{5, 0, 569}, - dictWord{6, 0, 3}, - dictWord{ - 6, - 0, - 119, - }, - dictWord{6, 0, 143}, - dictWord{6, 0, 440}, - dictWord{7, 0, 295}, - dictWord{7, 0, 599}, - dictWord{7, 0, 1686}, - dictWord{7, 0, 1854}, - dictWord{8, 0, 424}, - dictWord{ - 9, - 0, - 43, - }, - dictWord{9, 0, 584}, - dictWord{9, 0, 760}, - dictWord{10, 0, 148}, - dictWord{10, 0, 328}, - dictWord{11, 0, 159}, - dictWord{11, 0, 253}, - dictWord{11, 0, 506}, - dictWord{12, 0, 487}, - dictWord{140, 0, 531}, - dictWord{6, 0, 661}, - dictWord{134, 0, 1517}, - dictWord{136, 10, 835}, - dictWord{151, 10, 17}, - dictWord{5, 0, 14}, - dictWord{5, 0, 892}, - dictWord{6, 0, 283}, - dictWord{7, 0, 234}, - dictWord{136, 0, 537}, - dictWord{139, 0, 541}, - dictWord{4, 0, 126}, - dictWord{8, 0, 635}, - dictWord{ - 147, - 0, - 34, - }, - dictWord{4, 0, 316}, - dictWord{4, 0, 495}, - dictWord{135, 0, 1561}, - dictWord{4, 11, 187}, - dictWord{5, 11, 184}, - dictWord{5, 11, 690}, - dictWord{ - 7, - 11, - 1869, - }, - dictWord{138, 11, 756}, - dictWord{139, 11, 783}, - dictWord{4, 0, 998}, - dictWord{137, 0, 861}, - dictWord{136, 0, 1009}, - dictWord{139, 11, 292}, - dictWord{5, 11, 21}, - dictWord{6, 11, 77}, - dictWord{6, 11, 157}, - dictWord{7, 11, 974}, - dictWord{7, 11, 1301}, - dictWord{7, 11, 1339}, - dictWord{7, 11, 1490}, - dictWord{ - 7, - 11, - 1873, - }, - dictWord{137, 11, 628}, - dictWord{7, 11, 1283}, - dictWord{9, 11, 227}, - dictWord{9, 11, 499}, - dictWord{10, 11, 341}, - dictWord{11, 11, 325}, - dictWord{11, 11, 408}, - dictWord{14, 11, 180}, - dictWord{15, 11, 144}, - dictWord{18, 11, 47}, - dictWord{147, 11, 49}, - dictWord{4, 0, 64}, - dictWord{5, 0, 352}, - dictWord{5, 0, 720}, - dictWord{6, 0, 368}, - dictWord{139, 0, 359}, - dictWord{5, 10, 384}, - dictWord{8, 10, 455}, - dictWord{140, 10, 48}, - dictWord{5, 10, 264}, - dictWord{ - 134, - 10, - 184, - }, - dictWord{7, 0, 1577}, - dictWord{10, 0, 304}, - dictWord{10, 0, 549}, - dictWord{12, 0, 365}, - dictWord{13, 0, 220}, - dictWord{13, 0, 240}, - dictWord{ - 142, - 0, - 33, - }, - dictWord{134, 0, 1107}, - dictWord{134, 0, 929}, - dictWord{135, 0, 1142}, - dictWord{6, 0, 175}, - dictWord{137, 0, 289}, - dictWord{5, 0, 432}, - dictWord{ - 133, - 0, - 913, - }, - dictWord{6, 0, 279}, - dictWord{7, 0, 219}, - dictWord{5, 10, 633}, - dictWord{135, 10, 1323}, - dictWord{7, 0, 785}, - dictWord{7, 10, 359}, - dictWord{ - 8, - 10, - 243, - }, - dictWord{140, 10, 175}, - dictWord{139, 0, 595}, - dictWord{132, 10, 105}, - dictWord{8, 11, 398}, - dictWord{9, 11, 681}, - dictWord{139, 11, 632}, - dictWord{140, 0, 80}, - dictWord{5, 0, 931}, - dictWord{134, 0, 1698}, - dictWord{142, 11, 241}, - dictWord{134, 11, 20}, - dictWord{134, 0, 1323}, - dictWord{11, 0, 526}, - dictWord{11, 0, 939}, - dictWord{141, 0, 290}, - dictWord{5, 0, 774}, - dictWord{6, 0, 780}, - dictWord{6, 0, 1637}, - dictWord{6, 0, 1686}, - dictWord{6, 0, 1751}, - dictWord{ - 8, - 0, - 559, - }, - dictWord{141, 0, 109}, - dictWord{141, 0, 127}, - dictWord{7, 0, 1167}, - dictWord{11, 0, 934}, - dictWord{13, 0, 391}, - dictWord{17, 0, 76}, - dictWord{ - 135, - 11, - 709, - }, - dictWord{135, 0, 963}, - dictWord{6, 0, 260}, - dictWord{135, 0, 1484}, - dictWord{134, 0, 573}, - dictWord{4, 10, 758}, - dictWord{139, 11, 941}, - dictWord{135, 10, 1649}, - dictWord{145, 11, 36}, - dictWord{4, 0, 292}, - dictWord{137, 0, 580}, - dictWord{4, 0, 736}, - dictWord{5, 0, 871}, - dictWord{6, 0, 1689}, - dictWord{135, 0, 1944}, - dictWord{7, 11, 945}, - dictWord{11, 11, 713}, - dictWord{139, 11, 744}, - dictWord{134, 0, 1164}, - dictWord{135, 11, 937}, - dictWord{ - 6, - 0, - 1922, - }, - dictWord{9, 0, 982}, - dictWord{15, 0, 173}, - dictWord{15, 0, 178}, - dictWord{15, 0, 200}, - dictWord{18, 0, 189}, - dictWord{18, 0, 207}, - dictWord{21, 0, 47}, - dictWord{135, 11, 1652}, - dictWord{7, 0, 1695}, - dictWord{139, 10, 128}, - dictWord{6, 0, 63}, - dictWord{135, 0, 920}, - dictWord{133, 0, 793}, - dictWord{ - 143, - 11, - 134, - }, - dictWord{133, 10, 918}, - dictWord{5, 0, 67}, - dictWord{6, 0, 62}, - dictWord{6, 0, 374}, - dictWord{135, 0, 1391}, - dictWord{9, 0, 790}, - dictWord{12, 0, 47}, - dictWord{4, 11, 579}, - dictWord{5, 11, 226}, - dictWord{5, 11, 323}, - dictWord{135, 11, 960}, - dictWord{10, 11, 784}, - dictWord{141, 11, 191}, - dictWord{4, 0, 391}, - dictWord{135, 0, 1169}, - dictWord{137, 0, 443}, - dictWord{13, 11, 232}, - dictWord{146, 11, 35}, - dictWord{132, 10, 340}, - dictWord{132, 0, 271}, - dictWord{ - 137, - 11, - 313, - }, - dictWord{5, 11, 973}, - dictWord{137, 11, 659}, - dictWord{134, 0, 1140}, - dictWord{6, 11, 135}, - dictWord{135, 11, 1176}, - dictWord{4, 0, 253}, - dictWord{5, 0, 544}, - dictWord{7, 0, 300}, - dictWord{137, 0, 340}, - dictWord{7, 0, 897}, - dictWord{5, 10, 985}, - dictWord{7, 10, 509}, - dictWord{145, 10, 96}, - dictWord{ - 138, - 11, - 735, - }, - dictWord{135, 10, 1919}, - dictWord{138, 0, 890}, - dictWord{5, 0, 818}, - dictWord{134, 0, 1122}, - dictWord{5, 0, 53}, - dictWord{5, 0, 541}, - dictWord{ - 6, - 0, - 94, - }, - dictWord{6, 0, 499}, - dictWord{7, 0, 230}, - dictWord{139, 0, 321}, - dictWord{4, 0, 920}, - dictWord{5, 0, 25}, - dictWord{5, 0, 790}, - dictWord{6, 0, 457}, - dictWord{ - 7, - 0, - 853, - }, - dictWord{8, 0, 788}, - dictWord{142, 11, 31}, - dictWord{132, 10, 247}, - dictWord{135, 11, 314}, - dictWord{132, 0, 468}, - dictWord{7, 0, 243}, - dictWord{ - 6, - 10, - 337, - }, - dictWord{7, 10, 494}, - dictWord{8, 10, 27}, - dictWord{8, 10, 599}, - dictWord{138, 10, 153}, - dictWord{4, 10, 184}, - dictWord{5, 10, 390}, - dictWord{ - 7, - 10, - 618, - }, - dictWord{7, 10, 1456}, - dictWord{139, 10, 710}, - dictWord{134, 0, 870}, - dictWord{134, 0, 1238}, - dictWord{134, 0, 1765}, - dictWord{10, 0, 853}, - dictWord{10, 0, 943}, - dictWord{14, 0, 437}, - dictWord{14, 0, 439}, - dictWord{14, 0, 443}, - dictWord{14, 0, 446}, - dictWord{14, 0, 452}, - dictWord{14, 0, 469}, - dictWord{ - 14, - 0, - 471, - }, - dictWord{14, 0, 473}, - dictWord{16, 0, 93}, - dictWord{16, 0, 102}, - dictWord{16, 0, 110}, - dictWord{148, 0, 121}, - dictWord{4, 0, 605}, - dictWord{ - 7, - 0, - 518, - }, - dictWord{7, 0, 1282}, - dictWord{7, 0, 1918}, - dictWord{10, 0, 180}, - dictWord{139, 0, 218}, - dictWord{133, 0, 822}, - dictWord{4, 0, 634}, - dictWord{ - 11, - 0, - 916, - }, - dictWord{142, 0, 419}, - dictWord{6, 11, 281}, - dictWord{7, 11, 6}, - dictWord{8, 11, 282}, - dictWord{8, 11, 480}, - dictWord{8, 11, 499}, - dictWord{9, 11, 198}, - dictWord{10, 11, 143}, - dictWord{10, 11, 169}, - dictWord{10, 11, 211}, - dictWord{10, 11, 417}, - dictWord{10, 11, 574}, - dictWord{11, 11, 147}, - dictWord{ - 11, - 11, - 395, - }, - dictWord{12, 11, 75}, - dictWord{12, 11, 407}, - dictWord{12, 11, 608}, - dictWord{13, 11, 500}, - dictWord{142, 11, 251}, - dictWord{134, 0, 898}, - dictWord{ - 6, - 0, - 36, - }, - dictWord{7, 0, 658}, - dictWord{8, 0, 454}, - dictWord{150, 11, 48}, - dictWord{133, 11, 674}, - dictWord{135, 11, 1776}, - dictWord{4, 11, 419}, - dictWord{ - 10, - 10, - 227, - }, - dictWord{11, 10, 497}, - dictWord{11, 10, 709}, - dictWord{140, 10, 415}, - dictWord{6, 10, 360}, - dictWord{7, 10, 1664}, - dictWord{136, 10, 478}, - dictWord{137, 0, 806}, - dictWord{12, 11, 508}, - dictWord{14, 11, 102}, - dictWord{14, 11, 226}, - dictWord{144, 11, 57}, - dictWord{135, 11, 1123}, - dictWord{ - 4, - 11, - 138, - }, - dictWord{7, 11, 1012}, - dictWord{7, 11, 1280}, - dictWord{137, 11, 76}, - dictWord{5, 11, 29}, - dictWord{140, 11, 638}, - dictWord{136, 10, 699}, - dictWord{134, 0, 1326}, - dictWord{132, 0, 104}, - dictWord{135, 11, 735}, - dictWord{132, 10, 739}, - dictWord{134, 0, 1331}, - dictWord{7, 0, 260}, - dictWord{ - 135, - 11, - 260, - }, - dictWord{135, 11, 1063}, - dictWord{7, 0, 45}, - dictWord{9, 0, 542}, - dictWord{9, 0, 566}, - dictWord{10, 0, 728}, - dictWord{137, 10, 869}, - dictWord{ - 4, - 10, - 67, - }, - dictWord{5, 10, 422}, - dictWord{7, 10, 1037}, - dictWord{7, 10, 1289}, - dictWord{7, 10, 1555}, - dictWord{9, 10, 741}, - dictWord{145, 10, 108}, - dictWord{ - 139, - 0, - 263, - }, - dictWord{134, 0, 1516}, - dictWord{14, 0, 146}, - dictWord{15, 0, 42}, - dictWord{16, 0, 23}, - dictWord{17, 0, 86}, - dictWord{146, 0, 17}, - dictWord{ - 138, - 0, - 468, - }, - dictWord{136, 0, 1005}, - dictWord{4, 11, 17}, - dictWord{5, 11, 23}, - dictWord{7, 11, 995}, - dictWord{11, 11, 383}, - dictWord{11, 11, 437}, - dictWord{ - 12, - 11, - 460, - }, - dictWord{140, 11, 532}, - dictWord{7, 0, 87}, - dictWord{142, 0, 288}, - dictWord{138, 10, 96}, - dictWord{135, 11, 626}, - dictWord{144, 10, 26}, - dictWord{ - 7, - 0, - 988, - }, - dictWord{7, 0, 1939}, - dictWord{9, 0, 64}, - dictWord{9, 0, 502}, - dictWord{12, 0, 22}, - dictWord{12, 0, 34}, - dictWord{13, 0, 12}, - dictWord{13, 0, 234}, - dictWord{147, 0, 77}, - dictWord{13, 0, 133}, - dictWord{8, 10, 203}, - dictWord{11, 10, 823}, - dictWord{11, 10, 846}, - dictWord{12, 10, 482}, - dictWord{13, 10, 277}, - dictWord{13, 10, 302}, - dictWord{13, 10, 464}, - dictWord{14, 10, 205}, - dictWord{142, 10, 221}, - dictWord{4, 10, 449}, - dictWord{133, 10, 718}, - dictWord{ - 135, - 0, - 141, - }, - dictWord{6, 0, 1842}, - dictWord{136, 0, 872}, - dictWord{8, 11, 70}, - dictWord{12, 11, 171}, - dictWord{141, 11, 272}, - dictWord{4, 10, 355}, - dictWord{ - 6, - 10, - 311, - }, - dictWord{9, 10, 256}, - dictWord{138, 10, 404}, - dictWord{132, 0, 619}, - dictWord{137, 0, 261}, - dictWord{10, 11, 233}, - dictWord{10, 10, 758}, - dictWord{139, 11, 76}, - dictWord{5, 0, 246}, - dictWord{8, 0, 189}, - dictWord{9, 0, 355}, - dictWord{9, 0, 512}, - dictWord{10, 0, 124}, - dictWord{10, 0, 453}, - dictWord{ - 11, - 0, - 143, - }, - dictWord{11, 0, 416}, - dictWord{11, 0, 859}, - dictWord{141, 0, 341}, - dictWord{134, 11, 442}, - dictWord{133, 10, 827}, - dictWord{5, 10, 64}, - dictWord{ - 140, - 10, - 581, - }, - dictWord{4, 10, 442}, - dictWord{7, 10, 1047}, - dictWord{7, 10, 1352}, - dictWord{135, 10, 1643}, - dictWord{134, 11, 1709}, - dictWord{5, 0, 678}, - dictWord{6, 0, 305}, - dictWord{7, 0, 775}, - dictWord{7, 0, 1065}, - dictWord{133, 10, 977}, - dictWord{11, 11, 69}, - dictWord{12, 11, 105}, - dictWord{12, 11, 117}, - dictWord{13, 11, 213}, - dictWord{14, 11, 13}, - dictWord{14, 11, 62}, - dictWord{14, 11, 177}, - dictWord{14, 11, 421}, - dictWord{15, 11, 19}, - dictWord{146, 11, 141}, - dictWord{137, 11, 309}, - dictWord{5, 0, 35}, - dictWord{7, 0, 862}, - dictWord{7, 0, 1886}, - dictWord{138, 0, 179}, - dictWord{136, 0, 285}, - dictWord{132, 0, 517}, - dictWord{7, 11, 976}, - dictWord{9, 11, 146}, - dictWord{10, 11, 206}, - dictWord{10, 11, 596}, - dictWord{13, 11, 218}, - dictWord{142, 11, 153}, - dictWord{ - 132, - 10, - 254, - }, - dictWord{6, 0, 214}, - dictWord{12, 0, 540}, - dictWord{4, 10, 275}, - dictWord{7, 10, 1219}, - dictWord{140, 10, 376}, - dictWord{8, 0, 667}, - dictWord{ - 11, - 0, - 403, - }, - dictWord{146, 0, 83}, - dictWord{12, 0, 74}, - dictWord{10, 11, 648}, - dictWord{11, 11, 671}, - dictWord{143, 11, 46}, - dictWord{135, 0, 125}, - dictWord{ - 134, - 10, - 1753, - }, - dictWord{133, 0, 761}, - dictWord{6, 0, 912}, - dictWord{4, 11, 518}, - dictWord{6, 10, 369}, - dictWord{6, 10, 502}, - dictWord{7, 10, 1036}, - dictWord{ - 7, - 11, - 1136, - }, - dictWord{8, 10, 348}, - dictWord{9, 10, 452}, - dictWord{10, 10, 26}, - dictWord{11, 10, 224}, - dictWord{11, 10, 387}, - dictWord{11, 10, 772}, - dictWord{12, 10, 95}, - dictWord{12, 10, 629}, - dictWord{13, 10, 195}, - dictWord{13, 10, 207}, - dictWord{13, 10, 241}, - dictWord{14, 10, 260}, - dictWord{14, 10, 270}, - dictWord{143, 10, 140}, - dictWord{10, 0, 131}, - dictWord{140, 0, 72}, - dictWord{132, 10, 269}, - dictWord{5, 10, 480}, - dictWord{7, 10, 532}, - dictWord{ - 7, - 10, - 1197, - }, - dictWord{7, 10, 1358}, - dictWord{8, 10, 291}, - dictWord{11, 10, 349}, - dictWord{142, 10, 396}, - dictWord{8, 11, 689}, - dictWord{137, 11, 863}, - dictWord{ - 8, - 0, - 333, - }, - dictWord{138, 0, 182}, - dictWord{4, 11, 18}, - dictWord{7, 11, 145}, - dictWord{7, 11, 444}, - dictWord{7, 11, 1278}, - dictWord{8, 11, 49}, - dictWord{ - 8, - 11, - 400, - }, - dictWord{9, 11, 71}, - dictWord{9, 11, 250}, - dictWord{10, 11, 459}, - dictWord{12, 11, 160}, - dictWord{144, 11, 24}, - dictWord{14, 11, 35}, - dictWord{ - 142, - 11, - 191, - }, - dictWord{135, 11, 1864}, - dictWord{135, 0, 1338}, - dictWord{148, 10, 15}, - dictWord{14, 0, 94}, - dictWord{15, 0, 65}, - dictWord{16, 0, 4}, - dictWord{ - 16, - 0, - 77, - }, - dictWord{16, 0, 80}, - dictWord{145, 0, 5}, - dictWord{12, 11, 82}, - dictWord{143, 11, 36}, - dictWord{133, 11, 1010}, - dictWord{133, 0, 449}, - dictWord{ - 133, - 0, - 646, - }, - dictWord{7, 0, 86}, - dictWord{8, 0, 103}, - dictWord{135, 10, 657}, - dictWord{7, 0, 2028}, - dictWord{138, 0, 641}, - dictWord{136, 10, 533}, - dictWord{ - 134, - 0, - 1, - }, - dictWord{139, 11, 970}, - dictWord{5, 11, 87}, - dictWord{7, 11, 313}, - dictWord{7, 11, 1103}, - dictWord{10, 11, 112}, - dictWord{10, 11, 582}, - dictWord{ - 11, - 11, - 389, - }, - dictWord{11, 11, 813}, - dictWord{12, 11, 385}, - dictWord{13, 11, 286}, - dictWord{14, 11, 124}, - dictWord{146, 11, 108}, - dictWord{6, 0, 869}, - dictWord{ - 132, - 11, - 267, - }, - dictWord{6, 0, 277}, - dictWord{7, 0, 1274}, - dictWord{7, 0, 1386}, - dictWord{146, 0, 87}, - dictWord{6, 0, 187}, - dictWord{7, 0, 39}, - dictWord{7, 0, 1203}, - dictWord{8, 0, 380}, - dictWord{14, 0, 117}, - dictWord{149, 0, 28}, - dictWord{4, 10, 211}, - dictWord{4, 10, 332}, - dictWord{5, 10, 335}, - dictWord{6, 10, 238}, - dictWord{ - 7, - 10, - 269, - }, - dictWord{7, 10, 811}, - dictWord{7, 10, 1797}, - dictWord{8, 10, 836}, - dictWord{9, 10, 507}, - dictWord{141, 10, 242}, - dictWord{4, 0, 785}, - dictWord{ - 5, - 0, - 368, - }, - dictWord{6, 0, 297}, - dictWord{7, 0, 793}, - dictWord{139, 0, 938}, - dictWord{7, 0, 464}, - dictWord{8, 0, 558}, - dictWord{11, 0, 105}, - dictWord{12, 0, 231}, - dictWord{14, 0, 386}, - dictWord{15, 0, 102}, - dictWord{148, 0, 75}, - dictWord{133, 10, 1009}, - dictWord{8, 0, 877}, - dictWord{140, 0, 731}, - dictWord{ - 139, - 11, - 289, - }, - dictWord{10, 11, 249}, - dictWord{139, 11, 209}, - dictWord{132, 11, 561}, - dictWord{134, 0, 1608}, - dictWord{132, 11, 760}, - dictWord{134, 0, 1429}, - dictWord{9, 11, 154}, - dictWord{140, 11, 485}, - dictWord{5, 10, 228}, - dictWord{6, 10, 203}, - dictWord{7, 10, 156}, - dictWord{8, 10, 347}, - dictWord{ - 137, - 10, - 265, - }, - dictWord{7, 0, 1010}, - dictWord{11, 0, 733}, - dictWord{11, 0, 759}, - dictWord{13, 0, 34}, - dictWord{14, 0, 427}, - dictWord{146, 0, 45}, - dictWord{7, 10, 1131}, - dictWord{135, 10, 1468}, - dictWord{136, 11, 255}, - dictWord{7, 0, 1656}, - dictWord{9, 0, 369}, - dictWord{10, 0, 338}, - dictWord{10, 0, 490}, - dictWord{ - 11, - 0, - 154, - }, - dictWord{11, 0, 545}, - dictWord{11, 0, 775}, - dictWord{13, 0, 77}, - dictWord{141, 0, 274}, - dictWord{133, 11, 621}, - dictWord{134, 0, 1038}, - dictWord{ - 4, - 11, - 368, - }, - dictWord{135, 11, 641}, - dictWord{6, 0, 2010}, - dictWord{8, 0, 979}, - dictWord{8, 0, 985}, - dictWord{10, 0, 951}, - dictWord{138, 0, 1011}, - dictWord{ - 134, - 0, - 1005, - }, - dictWord{19, 0, 121}, - dictWord{5, 10, 291}, - dictWord{5, 10, 318}, - dictWord{7, 10, 765}, - dictWord{9, 10, 389}, - dictWord{140, 10, 548}, - dictWord{ - 5, - 0, - 20, - }, - dictWord{6, 0, 298}, - dictWord{7, 0, 659}, - dictWord{137, 0, 219}, - dictWord{7, 0, 1440}, - dictWord{11, 0, 854}, - dictWord{11, 0, 872}, - dictWord{11, 0, 921}, - dictWord{12, 0, 551}, - dictWord{13, 0, 472}, - dictWord{142, 0, 367}, - dictWord{5, 0, 490}, - dictWord{6, 0, 615}, - dictWord{6, 0, 620}, - dictWord{135, 0, 683}, - dictWord{ - 6, - 0, - 1070, - }, - dictWord{134, 0, 1597}, - dictWord{139, 0, 522}, - dictWord{132, 0, 439}, - dictWord{136, 0, 669}, - dictWord{6, 0, 766}, - dictWord{6, 0, 1143}, - dictWord{ - 6, - 0, - 1245, - }, - dictWord{10, 10, 525}, - dictWord{139, 10, 82}, - dictWord{9, 11, 92}, - dictWord{147, 11, 91}, - dictWord{6, 0, 668}, - dictWord{134, 0, 1218}, - dictWord{ - 6, - 11, - 525, - }, - dictWord{9, 11, 876}, - dictWord{140, 11, 284}, - dictWord{132, 0, 233}, - dictWord{136, 0, 547}, - dictWord{132, 10, 422}, - dictWord{5, 10, 355}, - dictWord{145, 10, 0}, - dictWord{6, 11, 300}, - dictWord{135, 11, 1515}, - dictWord{4, 0, 482}, - dictWord{137, 10, 905}, - dictWord{4, 0, 886}, - dictWord{7, 0, 346}, - dictWord{133, 11, 594}, - dictWord{133, 10, 865}, - dictWord{5, 10, 914}, - dictWord{134, 10, 1625}, - dictWord{135, 0, 334}, - dictWord{5, 0, 795}, - dictWord{ - 6, - 0, - 1741, - }, - dictWord{133, 10, 234}, - dictWord{135, 10, 1383}, - dictWord{6, 11, 1641}, - dictWord{136, 11, 820}, - dictWord{135, 0, 371}, - dictWord{7, 11, 1313}, - dictWord{138, 11, 660}, - dictWord{135, 10, 1312}, - dictWord{135, 0, 622}, - dictWord{7, 0, 625}, - dictWord{135, 0, 1750}, - dictWord{135, 0, 339}, - dictWord{ - 4, - 0, - 203, - }, - dictWord{135, 0, 1936}, - dictWord{15, 0, 29}, - dictWord{16, 0, 38}, - dictWord{15, 11, 29}, - dictWord{144, 11, 38}, - dictWord{5, 0, 338}, - dictWord{ - 135, - 0, - 1256, - }, - dictWord{135, 10, 1493}, - dictWord{10, 0, 130}, - dictWord{6, 10, 421}, - dictWord{7, 10, 61}, - dictWord{7, 10, 1540}, - dictWord{138, 10, 501}, - dictWord{ - 6, - 11, - 389, - }, - dictWord{7, 11, 149}, - dictWord{9, 11, 142}, - dictWord{138, 11, 94}, - dictWord{137, 10, 341}, - dictWord{11, 0, 678}, - dictWord{12, 0, 307}, - dictWord{142, 10, 98}, - dictWord{6, 11, 8}, - dictWord{7, 11, 1881}, - dictWord{136, 11, 91}, - dictWord{135, 0, 2044}, - dictWord{6, 0, 770}, - dictWord{6, 0, 802}, - dictWord{ - 6, - 0, - 812, - }, - dictWord{7, 0, 311}, - dictWord{9, 0, 308}, - dictWord{12, 0, 255}, - dictWord{6, 10, 102}, - dictWord{7, 10, 72}, - dictWord{15, 10, 142}, - dictWord{ - 147, - 10, - 67, - }, - dictWord{151, 10, 30}, - dictWord{135, 10, 823}, - dictWord{135, 0, 1266}, - dictWord{135, 11, 1746}, - dictWord{135, 10, 1870}, - dictWord{4, 0, 400}, - dictWord{5, 0, 267}, - dictWord{135, 0, 232}, - dictWord{7, 11, 24}, - dictWord{11, 11, 542}, - dictWord{139, 11, 852}, - dictWord{135, 11, 1739}, - dictWord{4, 11, 503}, - dictWord{135, 11, 1661}, - dictWord{5, 11, 130}, - dictWord{7, 11, 1314}, - dictWord{9, 11, 610}, - dictWord{10, 11, 718}, - dictWord{11, 11, 601}, - dictWord{ - 11, - 11, - 819, - }, - dictWord{11, 11, 946}, - dictWord{140, 11, 536}, - dictWord{10, 11, 149}, - dictWord{11, 11, 280}, - dictWord{142, 11, 336}, - dictWord{7, 0, 739}, - dictWord{11, 0, 690}, - dictWord{7, 11, 1946}, - dictWord{8, 10, 48}, - dictWord{8, 10, 88}, - dictWord{8, 10, 582}, - dictWord{8, 10, 681}, - dictWord{9, 10, 373}, - dictWord{ - 9, - 10, - 864, - }, - dictWord{11, 10, 157}, - dictWord{11, 10, 843}, - dictWord{148, 10, 27}, - dictWord{134, 0, 990}, - dictWord{4, 10, 88}, - dictWord{5, 10, 137}, - dictWord{ - 5, - 10, - 174, - }, - dictWord{5, 10, 777}, - dictWord{6, 10, 1664}, - dictWord{6, 10, 1725}, - dictWord{7, 10, 77}, - dictWord{7, 10, 426}, - dictWord{7, 10, 1317}, - dictWord{ - 7, - 10, - 1355, - }, - dictWord{8, 10, 126}, - dictWord{8, 10, 563}, - dictWord{9, 10, 523}, - dictWord{9, 10, 750}, - dictWord{10, 10, 310}, - dictWord{10, 10, 836}, - dictWord{ - 11, - 10, - 42, - }, - dictWord{11, 10, 318}, - dictWord{11, 10, 731}, - dictWord{12, 10, 68}, - dictWord{12, 10, 92}, - dictWord{12, 10, 507}, - dictWord{12, 10, 692}, - dictWord{ - 13, - 10, - 81, - }, - dictWord{13, 10, 238}, - dictWord{13, 10, 374}, - dictWord{14, 10, 436}, - dictWord{18, 10, 138}, - dictWord{19, 10, 78}, - dictWord{19, 10, 111}, - dictWord{20, 10, 55}, - dictWord{20, 10, 77}, - dictWord{148, 10, 92}, - dictWord{141, 10, 418}, - dictWord{7, 0, 1831}, - dictWord{132, 10, 938}, - dictWord{6, 0, 776}, - dictWord{134, 0, 915}, - dictWord{138, 10, 351}, - dictWord{5, 11, 348}, - dictWord{6, 11, 522}, - dictWord{6, 10, 1668}, - dictWord{7, 10, 1499}, - dictWord{8, 10, 117}, - dictWord{9, 10, 314}, - dictWord{138, 10, 174}, - dictWord{135, 10, 707}, - dictWord{132, 0, 613}, - dictWord{133, 10, 403}, - dictWord{132, 11, 392}, - dictWord{ - 5, - 11, - 433, - }, - dictWord{9, 11, 633}, - dictWord{139, 11, 629}, - dictWord{133, 0, 763}, - dictWord{132, 0, 878}, - dictWord{132, 0, 977}, - dictWord{132, 0, 100}, - dictWord{6, 0, 463}, - dictWord{4, 10, 44}, - dictWord{5, 10, 311}, - dictWord{7, 10, 639}, - dictWord{7, 10, 762}, - dictWord{7, 10, 1827}, - dictWord{9, 10, 8}, - dictWord{ - 9, - 10, - 462, - }, - dictWord{148, 10, 83}, - dictWord{134, 11, 234}, - dictWord{4, 10, 346}, - dictWord{7, 10, 115}, - dictWord{9, 10, 180}, - dictWord{9, 10, 456}, - dictWord{ - 138, - 10, - 363, - }, - dictWord{5, 0, 362}, - dictWord{5, 0, 443}, - dictWord{6, 0, 318}, - dictWord{7, 0, 1019}, - dictWord{139, 0, 623}, - dictWord{5, 0, 463}, - dictWord{8, 0, 296}, - dictWord{7, 11, 140}, - dictWord{7, 11, 1950}, - dictWord{8, 11, 680}, - dictWord{11, 11, 817}, - dictWord{147, 11, 88}, - dictWord{7, 11, 1222}, - dictWord{ - 138, - 11, - 386, - }, - dictWord{142, 0, 137}, - dictWord{132, 0, 454}, - dictWord{7, 0, 1914}, - dictWord{6, 11, 5}, - dictWord{7, 10, 1051}, - dictWord{9, 10, 545}, - dictWord{ - 11, - 11, - 249, - }, - dictWord{12, 11, 313}, - dictWord{16, 11, 66}, - dictWord{145, 11, 26}, - dictWord{135, 0, 1527}, - dictWord{145, 0, 58}, - dictWord{148, 11, 59}, - dictWord{ - 5, - 0, - 48, - }, - dictWord{5, 0, 404}, - dictWord{6, 0, 557}, - dictWord{7, 0, 458}, - dictWord{8, 0, 597}, - dictWord{10, 0, 455}, - dictWord{10, 0, 606}, - dictWord{11, 0, 49}, - dictWord{ - 11, - 0, - 548, - }, - dictWord{12, 0, 476}, - dictWord{13, 0, 18}, - dictWord{141, 0, 450}, - dictWord{5, 11, 963}, - dictWord{134, 11, 1773}, - dictWord{133, 0, 729}, - dictWord{138, 11, 586}, - dictWord{5, 0, 442}, - dictWord{135, 0, 1984}, - dictWord{134, 0, 449}, - dictWord{144, 0, 40}, - dictWord{4, 0, 853}, - dictWord{7, 11, 180}, - dictWord{8, 11, 509}, - dictWord{136, 11, 792}, - dictWord{6, 10, 185}, - dictWord{7, 10, 1899}, - dictWord{9, 10, 875}, - dictWord{139, 10, 673}, - dictWord{ - 134, - 11, - 524, - }, - dictWord{12, 0, 227}, - dictWord{4, 10, 327}, - dictWord{5, 10, 478}, - dictWord{7, 10, 1332}, - dictWord{136, 10, 753}, - dictWord{6, 0, 1491}, - dictWord{ - 5, - 10, - 1020, - }, - dictWord{133, 10, 1022}, - dictWord{4, 10, 103}, - dictWord{133, 10, 401}, - dictWord{132, 11, 931}, - dictWord{4, 10, 499}, - dictWord{135, 10, 1421}, - dictWord{5, 0, 55}, - dictWord{7, 0, 376}, - dictWord{140, 0, 161}, - dictWord{133, 0, 450}, - dictWord{6, 0, 1174}, - dictWord{134, 0, 1562}, - dictWord{10, 0, 62}, - dictWord{13, 0, 400}, - dictWord{135, 11, 1837}, - dictWord{140, 0, 207}, - dictWord{135, 0, 869}, - dictWord{4, 11, 773}, - dictWord{5, 11, 618}, - dictWord{ - 137, - 11, - 756, - }, - dictWord{132, 10, 96}, - dictWord{4, 0, 213}, - dictWord{7, 0, 223}, - dictWord{8, 0, 80}, - dictWord{135, 10, 968}, - dictWord{4, 11, 90}, - dictWord{5, 11, 337}, - dictWord{5, 11, 545}, - dictWord{7, 11, 754}, - dictWord{9, 11, 186}, - dictWord{10, 11, 72}, - dictWord{10, 11, 782}, - dictWord{11, 11, 513}, - dictWord{11, 11, 577}, - dictWord{11, 11, 610}, - dictWord{11, 11, 889}, - dictWord{11, 11, 961}, - dictWord{12, 11, 354}, - dictWord{12, 11, 362}, - dictWord{12, 11, 461}, - dictWord{ - 12, - 11, - 595, - }, - dictWord{13, 11, 79}, - dictWord{143, 11, 121}, - dictWord{7, 0, 381}, - dictWord{7, 0, 806}, - dictWord{7, 0, 820}, - dictWord{8, 0, 354}, - dictWord{8, 0, 437}, - dictWord{8, 0, 787}, - dictWord{9, 0, 657}, - dictWord{10, 0, 58}, - dictWord{10, 0, 339}, - dictWord{10, 0, 749}, - dictWord{11, 0, 914}, - dictWord{12, 0, 162}, - dictWord{ - 13, - 0, - 75, - }, - dictWord{14, 0, 106}, - dictWord{14, 0, 198}, - dictWord{14, 0, 320}, - dictWord{14, 0, 413}, - dictWord{146, 0, 43}, - dictWord{136, 0, 747}, - dictWord{ - 136, - 0, - 954, - }, - dictWord{134, 0, 1073}, - dictWord{135, 0, 556}, - dictWord{7, 11, 151}, - dictWord{9, 11, 329}, - dictWord{139, 11, 254}, - dictWord{5, 0, 692}, - dictWord{ - 134, - 0, - 1395, - }, - dictWord{6, 10, 563}, - dictWord{137, 10, 224}, - dictWord{134, 0, 191}, - dictWord{132, 0, 804}, - dictWord{9, 11, 187}, - dictWord{10, 11, 36}, - dictWord{17, 11, 44}, - dictWord{146, 11, 64}, - dictWord{7, 11, 165}, - dictWord{7, 11, 919}, - dictWord{136, 11, 517}, - dictWord{4, 11, 506}, - dictWord{5, 11, 295}, - dictWord{7, 11, 1680}, - dictWord{15, 11, 14}, - dictWord{144, 11, 5}, - dictWord{4, 0, 706}, - dictWord{6, 0, 162}, - dictWord{7, 0, 1960}, - dictWord{136, 0, 831}, - dictWord{ - 135, - 11, - 1376, - }, - dictWord{7, 11, 987}, - dictWord{9, 11, 688}, - dictWord{10, 11, 522}, - dictWord{11, 11, 788}, - dictWord{140, 11, 566}, - dictWord{150, 0, 35}, - dictWord{138, 0, 426}, - dictWord{135, 0, 1235}, - dictWord{135, 11, 1741}, - dictWord{7, 11, 389}, - dictWord{7, 11, 700}, - dictWord{7, 11, 940}, - dictWord{ - 8, - 11, - 514, - }, - dictWord{9, 11, 116}, - dictWord{9, 11, 535}, - dictWord{10, 11, 118}, - dictWord{11, 11, 107}, - dictWord{11, 11, 148}, - dictWord{11, 11, 922}, - dictWord{ - 12, - 11, - 254, - }, - dictWord{12, 11, 421}, - dictWord{142, 11, 238}, - dictWord{134, 0, 1234}, - dictWord{132, 11, 743}, - dictWord{4, 10, 910}, - dictWord{5, 10, 832}, - dictWord{135, 11, 1335}, - dictWord{141, 0, 96}, - dictWord{135, 11, 185}, - dictWord{146, 0, 149}, - dictWord{4, 0, 204}, - dictWord{137, 0, 902}, - dictWord{ - 4, - 11, - 784, - }, - dictWord{133, 11, 745}, - dictWord{136, 0, 833}, - dictWord{136, 0, 949}, - dictWord{7, 0, 366}, - dictWord{9, 0, 287}, - dictWord{12, 0, 199}, - dictWord{ - 12, - 0, - 556, - }, - dictWord{12, 0, 577}, - dictWord{5, 11, 81}, - dictWord{7, 11, 146}, - dictWord{7, 11, 1342}, - dictWord{7, 11, 1446}, - dictWord{8, 11, 53}, - dictWord{8, 11, 561}, - dictWord{8, 11, 694}, - dictWord{8, 11, 754}, - dictWord{9, 11, 97}, - dictWord{9, 11, 115}, - dictWord{9, 11, 894}, - dictWord{10, 11, 462}, - dictWord{10, 11, 813}, - dictWord{11, 11, 230}, - dictWord{11, 11, 657}, - dictWord{11, 11, 699}, - dictWord{11, 11, 748}, - dictWord{12, 11, 119}, - dictWord{12, 11, 200}, - dictWord{ - 12, - 11, - 283, - }, - dictWord{14, 11, 273}, - dictWord{145, 11, 15}, - dictWord{5, 11, 408}, - dictWord{137, 11, 747}, - dictWord{9, 11, 498}, - dictWord{140, 11, 181}, - dictWord{ - 6, - 0, - 2020, - }, - dictWord{136, 0, 992}, - dictWord{5, 0, 356}, - dictWord{135, 0, 224}, - dictWord{134, 0, 784}, - dictWord{7, 0, 630}, - dictWord{9, 0, 567}, - dictWord{ - 11, - 0, - 150, - }, - dictWord{11, 0, 444}, - dictWord{13, 0, 119}, - dictWord{8, 10, 528}, - dictWord{137, 10, 348}, - dictWord{134, 0, 539}, - dictWord{4, 10, 20}, - dictWord{ - 133, - 10, - 616, - }, - dictWord{142, 0, 27}, - dictWord{7, 11, 30}, - dictWord{8, 11, 86}, - dictWord{8, 11, 315}, - dictWord{8, 11, 700}, - dictWord{9, 11, 576}, - dictWord{9, 11, 858}, - dictWord{11, 11, 310}, - dictWord{11, 11, 888}, - dictWord{11, 11, 904}, - dictWord{12, 11, 361}, - dictWord{141, 11, 248}, - dictWord{138, 11, 839}, - dictWord{ - 134, - 0, - 755, - }, - dictWord{134, 0, 1063}, - dictWord{7, 10, 1091}, - dictWord{135, 10, 1765}, - dictWord{134, 11, 428}, - dictWord{7, 11, 524}, - dictWord{8, 11, 169}, - dictWord{8, 11, 234}, - dictWord{9, 11, 480}, - dictWord{138, 11, 646}, - dictWord{139, 0, 814}, - dictWord{7, 11, 1462}, - dictWord{139, 11, 659}, - dictWord{ - 4, - 10, - 26, - }, - dictWord{5, 10, 429}, - dictWord{6, 10, 245}, - dictWord{7, 10, 704}, - dictWord{7, 10, 1379}, - dictWord{135, 10, 1474}, - dictWord{7, 11, 1205}, - dictWord{ - 138, - 11, - 637, - }, - dictWord{139, 11, 803}, - dictWord{132, 10, 621}, - dictWord{136, 0, 987}, - dictWord{4, 11, 266}, - dictWord{8, 11, 4}, - dictWord{9, 11, 39}, - dictWord{ - 10, - 11, - 166, - }, - dictWord{11, 11, 918}, - dictWord{12, 11, 635}, - dictWord{20, 11, 10}, - dictWord{22, 11, 27}, - dictWord{150, 11, 43}, - dictWord{4, 0, 235}, - dictWord{ - 135, - 0, - 255, - }, - dictWord{4, 0, 194}, - dictWord{5, 0, 584}, - dictWord{6, 0, 384}, - dictWord{7, 0, 583}, - dictWord{10, 0, 761}, - dictWord{11, 0, 760}, - dictWord{139, 0, 851}, - dictWord{133, 10, 542}, - dictWord{134, 0, 1086}, - dictWord{133, 10, 868}, - dictWord{8, 0, 1016}, - dictWord{136, 0, 1018}, - dictWord{7, 0, 1396}, - dictWord{ - 7, - 11, - 1396, - }, - dictWord{136, 10, 433}, - dictWord{135, 10, 1495}, - dictWord{138, 10, 215}, - dictWord{141, 10, 124}, - dictWord{7, 11, 157}, - dictWord{ - 8, - 11, - 279, - }, - dictWord{9, 11, 759}, - dictWord{16, 11, 31}, - dictWord{16, 11, 39}, - dictWord{16, 11, 75}, - dictWord{18, 11, 24}, - dictWord{20, 11, 42}, - dictWord{152, 11, 1}, - dictWord{5, 0, 562}, - dictWord{134, 11, 604}, - dictWord{134, 0, 913}, - dictWord{5, 0, 191}, - dictWord{137, 0, 271}, - dictWord{4, 0, 470}, - dictWord{6, 0, 153}, - dictWord{7, 0, 1503}, - dictWord{7, 0, 1923}, - dictWord{10, 0, 701}, - dictWord{11, 0, 132}, - dictWord{11, 0, 227}, - dictWord{11, 0, 320}, - dictWord{11, 0, 436}, - dictWord{ - 11, - 0, - 525, - }, - dictWord{11, 0, 855}, - dictWord{11, 0, 873}, - dictWord{12, 0, 41}, - dictWord{12, 0, 286}, - dictWord{13, 0, 103}, - dictWord{13, 0, 284}, - dictWord{ - 14, - 0, - 255, - }, - dictWord{14, 0, 262}, - dictWord{15, 0, 117}, - dictWord{143, 0, 127}, - dictWord{7, 0, 475}, - dictWord{12, 0, 45}, - dictWord{147, 10, 112}, - dictWord{ - 132, - 11, - 567, - }, - dictWord{137, 11, 859}, - dictWord{6, 0, 713}, - dictWord{6, 0, 969}, - dictWord{6, 0, 1290}, - dictWord{134, 0, 1551}, - dictWord{133, 0, 327}, - dictWord{ - 6, - 0, - 552, - }, - dictWord{6, 0, 1292}, - dictWord{7, 0, 1754}, - dictWord{137, 0, 604}, - dictWord{4, 0, 223}, - dictWord{6, 0, 359}, - dictWord{11, 0, 3}, - dictWord{13, 0, 108}, - dictWord{14, 0, 89}, - dictWord{16, 0, 22}, - dictWord{5, 11, 762}, - dictWord{7, 11, 1880}, - dictWord{9, 11, 680}, - dictWord{139, 11, 798}, - dictWord{5, 0, 80}, - dictWord{ - 6, - 0, - 405, - }, - dictWord{7, 0, 403}, - dictWord{7, 0, 1502}, - dictWord{8, 0, 456}, - dictWord{9, 0, 487}, - dictWord{9, 0, 853}, - dictWord{9, 0, 889}, - dictWord{10, 0, 309}, - dictWord{ - 11, - 0, - 721, - }, - dictWord{11, 0, 994}, - dictWord{12, 0, 430}, - dictWord{141, 0, 165}, - dictWord{133, 11, 298}, - dictWord{132, 10, 647}, - dictWord{134, 0, 2016}, - dictWord{18, 10, 10}, - dictWord{146, 11, 10}, - dictWord{4, 0, 453}, - dictWord{5, 0, 887}, - dictWord{6, 0, 535}, - dictWord{8, 0, 6}, - dictWord{8, 0, 543}, - dictWord{ - 136, - 0, - 826, - }, - dictWord{136, 0, 975}, - dictWord{10, 0, 961}, - dictWord{138, 0, 962}, - dictWord{138, 10, 220}, - dictWord{6, 0, 1891}, - dictWord{6, 0, 1893}, - dictWord{ - 9, - 0, - 916, - }, - dictWord{9, 0, 965}, - dictWord{9, 0, 972}, - dictWord{12, 0, 801}, - dictWord{12, 0, 859}, - dictWord{12, 0, 883}, - dictWord{15, 0, 226}, - dictWord{149, 0, 51}, - dictWord{132, 10, 109}, - dictWord{135, 11, 267}, - dictWord{7, 11, 92}, - dictWord{7, 11, 182}, - dictWord{8, 11, 453}, - dictWord{9, 11, 204}, - dictWord{11, 11, 950}, - dictWord{12, 11, 94}, - dictWord{12, 11, 644}, - dictWord{16, 11, 20}, - dictWord{16, 11, 70}, - dictWord{16, 11, 90}, - dictWord{147, 11, 55}, - dictWord{ - 134, - 10, - 1746, - }, - dictWord{6, 11, 71}, - dictWord{7, 11, 845}, - dictWord{7, 11, 1308}, - dictWord{8, 11, 160}, - dictWord{137, 11, 318}, - dictWord{5, 0, 101}, - dictWord{6, 0, 88}, - dictWord{7, 0, 263}, - dictWord{7, 0, 628}, - dictWord{7, 0, 1677}, - dictWord{8, 0, 349}, - dictWord{9, 0, 100}, - dictWord{10, 0, 677}, - dictWord{14, 0, 169}, - dictWord{ - 14, - 0, - 302, - }, - dictWord{14, 0, 313}, - dictWord{15, 0, 48}, - dictWord{15, 0, 84}, - dictWord{7, 11, 237}, - dictWord{8, 11, 664}, - dictWord{9, 11, 42}, - dictWord{9, 11, 266}, - dictWord{9, 11, 380}, - dictWord{9, 11, 645}, - dictWord{10, 11, 177}, - dictWord{138, 11, 276}, - dictWord{138, 11, 69}, - dictWord{4, 0, 310}, - dictWord{7, 0, 708}, - dictWord{7, 0, 996}, - dictWord{9, 0, 795}, - dictWord{10, 0, 390}, - dictWord{10, 0, 733}, - dictWord{11, 0, 451}, - dictWord{12, 0, 249}, - dictWord{14, 0, 115}, - dictWord{ - 14, - 0, - 286, - }, - dictWord{143, 0, 100}, - dictWord{5, 0, 587}, - dictWord{4, 10, 40}, - dictWord{10, 10, 67}, - dictWord{11, 10, 117}, - dictWord{11, 10, 768}, - dictWord{ - 139, - 10, - 935, - }, - dictWord{6, 0, 1942}, - dictWord{7, 0, 512}, - dictWord{136, 0, 983}, - dictWord{7, 10, 992}, - dictWord{8, 10, 301}, - dictWord{9, 10, 722}, - dictWord{12, 10, 63}, - dictWord{13, 10, 29}, - dictWord{14, 10, 161}, - dictWord{143, 10, 18}, - dictWord{136, 11, 76}, - dictWord{139, 10, 923}, - dictWord{134, 0, 645}, - dictWord{ - 134, - 0, - 851, - }, - dictWord{4, 0, 498}, - dictWord{132, 11, 293}, - dictWord{7, 0, 217}, - dictWord{8, 0, 140}, - dictWord{10, 0, 610}, - dictWord{14, 11, 352}, - dictWord{ - 17, - 11, - 53, - }, - dictWord{18, 11, 146}, - dictWord{18, 11, 152}, - dictWord{19, 11, 11}, - dictWord{150, 11, 54}, - dictWord{134, 0, 1448}, - dictWord{138, 11, 841}, - dictWord{133, 0, 905}, - dictWord{4, 11, 605}, - dictWord{7, 11, 518}, - dictWord{7, 11, 1282}, - dictWord{7, 11, 1918}, - dictWord{10, 11, 180}, - dictWord{139, 11, 218}, - dictWord{139, 11, 917}, - dictWord{135, 10, 825}, - dictWord{140, 10, 328}, - dictWord{4, 0, 456}, - dictWord{7, 0, 105}, - dictWord{7, 0, 358}, - dictWord{7, 0, 1637}, - dictWord{8, 0, 643}, - dictWord{139, 0, 483}, - dictWord{134, 0, 792}, - dictWord{6, 11, 96}, - dictWord{135, 11, 1426}, - dictWord{137, 11, 691}, - dictWord{ - 4, - 11, - 651, - }, - dictWord{133, 11, 289}, - dictWord{7, 11, 688}, - dictWord{8, 11, 35}, - dictWord{9, 11, 511}, - dictWord{10, 11, 767}, - dictWord{147, 11, 118}, - dictWord{ - 150, - 0, - 56, - }, - dictWord{5, 0, 243}, - dictWord{5, 0, 535}, - dictWord{6, 10, 204}, - dictWord{10, 10, 320}, - dictWord{10, 10, 583}, - dictWord{13, 10, 502}, - dictWord{ - 14, - 10, - 72, - }, - dictWord{14, 10, 274}, - dictWord{14, 10, 312}, - dictWord{14, 10, 344}, - dictWord{15, 10, 159}, - dictWord{16, 10, 62}, - dictWord{16, 10, 69}, - dictWord{ - 17, - 10, - 30, - }, - dictWord{18, 10, 42}, - dictWord{18, 10, 53}, - dictWord{18, 10, 84}, - dictWord{18, 10, 140}, - dictWord{19, 10, 68}, - dictWord{19, 10, 85}, - dictWord{20, 10, 5}, - dictWord{20, 10, 45}, - dictWord{20, 10, 101}, - dictWord{22, 10, 7}, - dictWord{150, 10, 20}, - dictWord{4, 10, 558}, - dictWord{6, 10, 390}, - dictWord{7, 10, 162}, - dictWord{7, 10, 689}, - dictWord{9, 10, 360}, - dictWord{138, 10, 653}, - dictWord{146, 11, 23}, - dictWord{135, 0, 1748}, - dictWord{5, 10, 856}, - dictWord{ - 6, - 10, - 1672, - }, - dictWord{6, 10, 1757}, - dictWord{134, 10, 1781}, - dictWord{5, 0, 539}, - dictWord{5, 0, 754}, - dictWord{6, 0, 876}, - dictWord{132, 11, 704}, - dictWord{ - 135, - 11, - 1078, - }, - dictWord{5, 10, 92}, - dictWord{10, 10, 736}, - dictWord{140, 10, 102}, - dictWord{17, 0, 91}, - dictWord{5, 10, 590}, - dictWord{137, 10, 213}, - dictWord{134, 0, 1565}, - dictWord{6, 0, 91}, - dictWord{135, 0, 435}, - dictWord{4, 0, 939}, - dictWord{140, 0, 792}, - dictWord{134, 0, 1399}, - dictWord{4, 0, 16}, - dictWord{ - 5, - 0, - 316, - }, - dictWord{5, 0, 842}, - dictWord{6, 0, 370}, - dictWord{6, 0, 1778}, - dictWord{8, 0, 166}, - dictWord{11, 0, 812}, - dictWord{12, 0, 206}, - dictWord{12, 0, 351}, - dictWord{14, 0, 418}, - dictWord{16, 0, 15}, - dictWord{16, 0, 34}, - dictWord{18, 0, 3}, - dictWord{19, 0, 3}, - dictWord{19, 0, 7}, - dictWord{20, 0, 4}, - dictWord{21, 0, 21}, - dictWord{ - 4, - 11, - 720, - }, - dictWord{133, 11, 306}, - dictWord{144, 0, 95}, - dictWord{133, 11, 431}, - dictWord{132, 11, 234}, - dictWord{135, 0, 551}, - dictWord{4, 0, 999}, - dictWord{6, 0, 1966}, - dictWord{134, 0, 2042}, - dictWord{7, 0, 619}, - dictWord{10, 0, 547}, - dictWord{11, 0, 122}, - dictWord{12, 0, 601}, - dictWord{15, 0, 7}, - dictWord{148, 0, 20}, - dictWord{5, 11, 464}, - dictWord{6, 11, 236}, - dictWord{7, 11, 276}, - dictWord{7, 11, 696}, - dictWord{7, 11, 914}, - dictWord{7, 11, 1108}, - dictWord{ - 7, - 11, - 1448, - }, - dictWord{9, 11, 15}, - dictWord{9, 11, 564}, - dictWord{10, 11, 14}, - dictWord{12, 11, 565}, - dictWord{13, 11, 449}, - dictWord{14, 11, 53}, - dictWord{ - 15, - 11, - 13, - }, - dictWord{16, 11, 64}, - dictWord{145, 11, 41}, - dictWord{6, 0, 884}, - dictWord{6, 0, 1019}, - dictWord{134, 0, 1150}, - dictWord{6, 11, 1767}, - dictWord{ - 12, - 11, - 194, - }, - dictWord{145, 11, 107}, - dictWord{136, 10, 503}, - dictWord{133, 11, 840}, - dictWord{7, 0, 671}, - dictWord{134, 10, 466}, - dictWord{132, 0, 888}, - dictWord{4, 0, 149}, - dictWord{138, 0, 368}, - dictWord{4, 0, 154}, - dictWord{7, 0, 1134}, - dictWord{136, 0, 105}, - dictWord{135, 0, 983}, - dictWord{9, 11, 642}, - dictWord{11, 11, 236}, - dictWord{142, 11, 193}, - dictWord{4, 0, 31}, - dictWord{6, 0, 429}, - dictWord{7, 0, 962}, - dictWord{9, 0, 458}, - dictWord{139, 0, 691}, - dictWord{ - 6, - 0, - 643, - }, - dictWord{134, 0, 1102}, - dictWord{132, 0, 312}, - dictWord{4, 11, 68}, - dictWord{5, 11, 634}, - dictWord{6, 11, 386}, - dictWord{7, 11, 794}, - dictWord{ - 8, - 11, - 273, - }, - dictWord{9, 11, 563}, - dictWord{10, 11, 105}, - dictWord{10, 11, 171}, - dictWord{11, 11, 94}, - dictWord{139, 11, 354}, - dictWord{133, 0, 740}, - dictWord{ - 135, - 0, - 1642, - }, - dictWord{4, 11, 95}, - dictWord{7, 11, 416}, - dictWord{8, 11, 211}, - dictWord{139, 11, 830}, - dictWord{132, 0, 236}, - dictWord{138, 10, 241}, - dictWord{7, 11, 731}, - dictWord{13, 11, 20}, - dictWord{143, 11, 11}, - dictWord{5, 0, 836}, - dictWord{5, 0, 857}, - dictWord{6, 0, 1680}, - dictWord{135, 0, 59}, - dictWord{ - 10, - 0, - 68, - }, - dictWord{11, 0, 494}, - dictWord{152, 11, 6}, - dictWord{4, 0, 81}, - dictWord{139, 0, 867}, - dictWord{135, 0, 795}, - dictWord{133, 11, 689}, - dictWord{ - 4, - 0, - 1001, - }, - dictWord{5, 0, 282}, - dictWord{6, 0, 1932}, - dictWord{6, 0, 1977}, - dictWord{6, 0, 1987}, - dictWord{6, 0, 1992}, - dictWord{8, 0, 650}, - dictWord{8, 0, 919}, - dictWord{8, 0, 920}, - dictWord{8, 0, 923}, - dictWord{8, 0, 926}, - dictWord{8, 0, 927}, - dictWord{8, 0, 931}, - dictWord{8, 0, 939}, - dictWord{8, 0, 947}, - dictWord{8, 0, 956}, - dictWord{8, 0, 997}, - dictWord{9, 0, 907}, - dictWord{10, 0, 950}, - dictWord{10, 0, 953}, - dictWord{10, 0, 954}, - dictWord{10, 0, 956}, - dictWord{10, 0, 958}, - dictWord{ - 10, - 0, - 959, - }, - dictWord{10, 0, 964}, - dictWord{10, 0, 970}, - dictWord{10, 0, 972}, - dictWord{10, 0, 973}, - dictWord{10, 0, 975}, - dictWord{10, 0, 976}, - dictWord{ - 10, - 0, - 980, - }, - dictWord{10, 0, 981}, - dictWord{10, 0, 984}, - dictWord{10, 0, 988}, - dictWord{10, 0, 990}, - dictWord{10, 0, 995}, - dictWord{10, 0, 999}, - dictWord{ - 10, - 0, - 1002, - }, - dictWord{10, 0, 1003}, - dictWord{10, 0, 1005}, - dictWord{10, 0, 1006}, - dictWord{10, 0, 1008}, - dictWord{10, 0, 1009}, - dictWord{10, 0, 1012}, - dictWord{10, 0, 1014}, - dictWord{10, 0, 1015}, - dictWord{10, 0, 1019}, - dictWord{10, 0, 1020}, - dictWord{10, 0, 1022}, - dictWord{12, 0, 959}, - dictWord{12, 0, 961}, - dictWord{12, 0, 962}, - dictWord{12, 0, 963}, - dictWord{12, 0, 964}, - dictWord{12, 0, 965}, - dictWord{12, 0, 967}, - dictWord{12, 0, 968}, - dictWord{12, 0, 969}, - dictWord{12, 0, 970}, - dictWord{12, 0, 971}, - dictWord{12, 0, 972}, - dictWord{12, 0, 973}, - dictWord{12, 0, 974}, - dictWord{12, 0, 975}, - dictWord{12, 0, 976}, - dictWord{ - 12, - 0, - 977, - }, - dictWord{12, 0, 979}, - dictWord{12, 0, 981}, - dictWord{12, 0, 982}, - dictWord{12, 0, 983}, - dictWord{12, 0, 984}, - dictWord{12, 0, 985}, - dictWord{ - 12, - 0, - 986, - }, - dictWord{12, 0, 987}, - dictWord{12, 0, 989}, - dictWord{12, 0, 990}, - dictWord{12, 0, 992}, - dictWord{12, 0, 993}, - dictWord{12, 0, 995}, - dictWord{12, 0, 998}, - dictWord{12, 0, 999}, - dictWord{12, 0, 1000}, - dictWord{12, 0, 1001}, - dictWord{12, 0, 1002}, - dictWord{12, 0, 1004}, - dictWord{12, 0, 1005}, - dictWord{ - 12, - 0, - 1006, - }, - dictWord{12, 0, 1007}, - dictWord{12, 0, 1008}, - dictWord{12, 0, 1009}, - dictWord{12, 0, 1010}, - dictWord{12, 0, 1011}, - dictWord{12, 0, 1012}, - dictWord{12, 0, 1014}, - dictWord{12, 0, 1015}, - dictWord{12, 0, 1016}, - dictWord{12, 0, 1017}, - dictWord{12, 0, 1018}, - dictWord{12, 0, 1019}, - dictWord{ - 12, - 0, - 1022, - }, - dictWord{12, 0, 1023}, - dictWord{14, 0, 475}, - dictWord{14, 0, 477}, - dictWord{14, 0, 478}, - dictWord{14, 0, 479}, - dictWord{14, 0, 480}, - dictWord{ - 14, - 0, - 482, - }, - dictWord{14, 0, 483}, - dictWord{14, 0, 484}, - dictWord{14, 0, 485}, - dictWord{14, 0, 486}, - dictWord{14, 0, 487}, - dictWord{14, 0, 488}, - dictWord{14, 0, 489}, - dictWord{14, 0, 490}, - dictWord{14, 0, 491}, - dictWord{14, 0, 492}, - dictWord{14, 0, 493}, - dictWord{14, 0, 494}, - dictWord{14, 0, 495}, - dictWord{14, 0, 496}, - dictWord{14, 0, 497}, - dictWord{14, 0, 498}, - dictWord{14, 0, 499}, - dictWord{14, 0, 500}, - dictWord{14, 0, 501}, - dictWord{14, 0, 502}, - dictWord{14, 0, 503}, - dictWord{ - 14, - 0, - 504, - }, - dictWord{14, 0, 506}, - dictWord{14, 0, 507}, - dictWord{14, 0, 508}, - dictWord{14, 0, 509}, - dictWord{14, 0, 510}, - dictWord{14, 0, 511}, - dictWord{ - 16, - 0, - 113, - }, - dictWord{16, 0, 114}, - dictWord{16, 0, 115}, - dictWord{16, 0, 117}, - dictWord{16, 0, 118}, - dictWord{16, 0, 119}, - dictWord{16, 0, 121}, - dictWord{16, 0, 122}, - dictWord{16, 0, 123}, - dictWord{16, 0, 124}, - dictWord{16, 0, 125}, - dictWord{16, 0, 126}, - dictWord{16, 0, 127}, - dictWord{18, 0, 242}, - dictWord{18, 0, 243}, - dictWord{18, 0, 244}, - dictWord{18, 0, 245}, - dictWord{18, 0, 248}, - dictWord{18, 0, 249}, - dictWord{18, 0, 250}, - dictWord{18, 0, 251}, - dictWord{18, 0, 252}, - dictWord{ - 18, - 0, - 253, - }, - dictWord{18, 0, 254}, - dictWord{18, 0, 255}, - dictWord{20, 0, 125}, - dictWord{20, 0, 126}, - dictWord{148, 0, 127}, - dictWord{7, 11, 1717}, - dictWord{ - 7, - 11, - 1769, - }, - dictWord{138, 11, 546}, - dictWord{7, 11, 1127}, - dictWord{7, 11, 1572}, - dictWord{10, 11, 297}, - dictWord{10, 11, 422}, - dictWord{11, 11, 764}, - dictWord{11, 11, 810}, - dictWord{12, 11, 264}, - dictWord{13, 11, 102}, - dictWord{13, 11, 300}, - dictWord{13, 11, 484}, - dictWord{14, 11, 147}, - dictWord{ - 14, - 11, - 229, - }, - dictWord{17, 11, 71}, - dictWord{18, 11, 118}, - dictWord{147, 11, 120}, - dictWord{6, 0, 1148}, - dictWord{134, 0, 1586}, - dictWord{132, 0, 775}, - dictWord{135, 10, 954}, - dictWord{133, 11, 864}, - dictWord{133, 11, 928}, - dictWord{138, 11, 189}, - dictWord{135, 10, 1958}, - dictWord{6, 10, 549}, - dictWord{ - 8, - 10, - 34, - }, - dictWord{8, 10, 283}, - dictWord{9, 10, 165}, - dictWord{138, 10, 475}, - dictWord{5, 10, 652}, - dictWord{5, 10, 701}, - dictWord{135, 10, 449}, - dictWord{135, 11, 695}, - dictWord{4, 10, 655}, - dictWord{7, 10, 850}, - dictWord{17, 10, 75}, - dictWord{146, 10, 137}, - dictWord{140, 11, 682}, - dictWord{ - 133, - 11, - 523, - }, - dictWord{8, 0, 970}, - dictWord{136, 10, 670}, - dictWord{136, 11, 555}, - dictWord{7, 11, 76}, - dictWord{8, 11, 44}, - dictWord{9, 11, 884}, - dictWord{ - 10, - 11, - 580, - }, - dictWord{11, 11, 399}, - dictWord{11, 11, 894}, - dictWord{15, 11, 122}, - dictWord{18, 11, 144}, - dictWord{147, 11, 61}, - dictWord{6, 10, 159}, - dictWord{ - 6, - 10, - 364, - }, - dictWord{7, 10, 516}, - dictWord{7, 10, 1439}, - dictWord{137, 10, 518}, - dictWord{4, 0, 71}, - dictWord{5, 0, 376}, - dictWord{7, 0, 119}, - dictWord{ - 138, - 0, - 665, - }, - dictWord{141, 10, 151}, - dictWord{11, 0, 827}, - dictWord{14, 0, 34}, - dictWord{143, 0, 148}, - dictWord{133, 11, 518}, - dictWord{4, 0, 479}, - dictWord{ - 135, - 11, - 1787, - }, - dictWord{135, 11, 1852}, - dictWord{135, 10, 993}, - dictWord{7, 0, 607}, - dictWord{136, 0, 99}, - dictWord{134, 0, 1960}, - dictWord{132, 0, 793}, - dictWord{4, 0, 41}, - dictWord{5, 0, 74}, - dictWord{7, 0, 1627}, - dictWord{11, 0, 871}, - dictWord{140, 0, 619}, - dictWord{7, 0, 94}, - dictWord{11, 0, 329}, - dictWord{ - 11, - 0, - 965, - }, - dictWord{12, 0, 241}, - dictWord{14, 0, 354}, - dictWord{15, 0, 22}, - dictWord{148, 0, 63}, - dictWord{7, 10, 501}, - dictWord{9, 10, 111}, - dictWord{10, 10, 141}, - dictWord{11, 10, 332}, - dictWord{13, 10, 43}, - dictWord{13, 10, 429}, - dictWord{14, 10, 130}, - dictWord{14, 10, 415}, - dictWord{145, 10, 102}, - dictWord{ - 9, - 0, - 209, - }, - dictWord{137, 0, 300}, - dictWord{134, 0, 1497}, - dictWord{138, 11, 255}, - dictWord{4, 11, 934}, - dictWord{5, 11, 138}, - dictWord{136, 11, 610}, - dictWord{133, 0, 98}, - dictWord{6, 0, 1316}, - dictWord{10, 11, 804}, - dictWord{138, 11, 832}, - dictWord{8, 11, 96}, - dictWord{9, 11, 36}, - dictWord{10, 11, 607}, - dictWord{11, 11, 423}, - dictWord{11, 11, 442}, - dictWord{12, 11, 309}, - dictWord{14, 11, 199}, - dictWord{15, 11, 90}, - dictWord{145, 11, 110}, - dictWord{ - 132, - 0, - 463, - }, - dictWord{5, 10, 149}, - dictWord{136, 10, 233}, - dictWord{133, 10, 935}, - dictWord{4, 11, 652}, - dictWord{8, 11, 320}, - dictWord{9, 11, 13}, - dictWord{ - 9, - 11, - 398, - }, - dictWord{9, 11, 727}, - dictWord{10, 11, 75}, - dictWord{10, 11, 184}, - dictWord{10, 11, 230}, - dictWord{10, 11, 564}, - dictWord{10, 11, 569}, - dictWord{ - 11, - 11, - 973, - }, - dictWord{12, 11, 70}, - dictWord{12, 11, 189}, - dictWord{13, 11, 57}, - dictWord{13, 11, 257}, - dictWord{22, 11, 6}, - dictWord{150, 11, 16}, - dictWord{ - 142, - 0, - 291, - }, - dictWord{12, 10, 582}, - dictWord{146, 10, 131}, - dictWord{136, 10, 801}, - dictWord{133, 0, 984}, - dictWord{145, 11, 116}, - dictWord{4, 11, 692}, - dictWord{133, 11, 321}, - dictWord{4, 0, 182}, - dictWord{6, 0, 205}, - dictWord{135, 0, 220}, - dictWord{4, 0, 42}, - dictWord{9, 0, 205}, - dictWord{9, 0, 786}, - dictWord{ - 138, - 0, - 659, - }, - dictWord{6, 0, 801}, - dictWord{11, 11, 130}, - dictWord{140, 11, 609}, - dictWord{132, 0, 635}, - dictWord{5, 11, 345}, - dictWord{135, 11, 1016}, - dictWord{139, 0, 533}, - dictWord{132, 0, 371}, - dictWord{4, 0, 272}, - dictWord{135, 0, 836}, - dictWord{6, 0, 1282}, - dictWord{135, 11, 1100}, - dictWord{5, 0, 825}, - dictWord{134, 0, 1640}, - dictWord{135, 11, 1325}, - dictWord{133, 11, 673}, - dictWord{4, 11, 287}, - dictWord{133, 11, 1018}, - dictWord{135, 0, 357}, - dictWord{ - 6, - 0, - 467, - }, - dictWord{137, 0, 879}, - dictWord{7, 0, 317}, - dictWord{135, 0, 569}, - dictWord{6, 0, 924}, - dictWord{134, 0, 1588}, - dictWord{5, 11, 34}, - dictWord{ - 5, - 10, - 406, - }, - dictWord{10, 11, 724}, - dictWord{12, 11, 444}, - dictWord{13, 11, 354}, - dictWord{18, 11, 32}, - dictWord{23, 11, 24}, - dictWord{23, 11, 31}, - dictWord{ - 152, - 11, - 5, - }, - dictWord{6, 0, 1795}, - dictWord{6, 0, 1835}, - dictWord{6, 0, 1836}, - dictWord{6, 0, 1856}, - dictWord{8, 0, 844}, - dictWord{8, 0, 849}, - dictWord{8, 0, 854}, - dictWord{8, 0, 870}, - dictWord{8, 0, 887}, - dictWord{10, 0, 852}, - dictWord{138, 0, 942}, - dictWord{6, 10, 69}, - dictWord{135, 10, 117}, - dictWord{137, 0, 307}, - dictWord{ - 4, - 0, - 944, - }, - dictWord{6, 0, 1799}, - dictWord{6, 0, 1825}, - dictWord{10, 0, 848}, - dictWord{10, 0, 875}, - dictWord{10, 0, 895}, - dictWord{10, 0, 899}, - dictWord{ - 10, - 0, - 902, - }, - dictWord{140, 0, 773}, - dictWord{11, 0, 43}, - dictWord{13, 0, 72}, - dictWord{141, 0, 142}, - dictWord{135, 10, 1830}, - dictWord{134, 11, 382}, - dictWord{ - 4, - 10, - 432, - }, - dictWord{135, 10, 824}, - dictWord{132, 11, 329}, - dictWord{7, 0, 1820}, - dictWord{139, 11, 124}, - dictWord{133, 10, 826}, - dictWord{ - 133, - 0, - 525, - }, - dictWord{132, 11, 906}, - dictWord{7, 11, 1940}, - dictWord{136, 11, 366}, - dictWord{138, 11, 10}, - dictWord{4, 11, 123}, - dictWord{4, 11, 649}, - dictWord{ - 5, - 11, - 605, - }, - dictWord{7, 11, 1509}, - dictWord{136, 11, 36}, - dictWord{6, 0, 110}, - dictWord{135, 0, 1681}, - dictWord{133, 0, 493}, - dictWord{133, 11, 767}, - dictWord{4, 0, 174}, - dictWord{135, 0, 911}, - dictWord{138, 11, 786}, - dictWord{8, 0, 417}, - dictWord{137, 0, 782}, - dictWord{133, 10, 1000}, - dictWord{7, 0, 733}, - dictWord{137, 0, 583}, - dictWord{4, 10, 297}, - dictWord{6, 10, 529}, - dictWord{7, 10, 152}, - dictWord{7, 10, 713}, - dictWord{7, 10, 1845}, - dictWord{8, 10, 710}, - dictWord{8, 10, 717}, - dictWord{12, 10, 639}, - dictWord{140, 10, 685}, - dictWord{4, 0, 32}, - dictWord{5, 0, 215}, - dictWord{6, 0, 269}, - dictWord{7, 0, 1782}, - dictWord{ - 7, - 0, - 1892, - }, - dictWord{10, 0, 16}, - dictWord{11, 0, 822}, - dictWord{11, 0, 954}, - dictWord{141, 0, 481}, - dictWord{4, 11, 273}, - dictWord{5, 11, 658}, - dictWord{ - 133, - 11, - 995, - }, - dictWord{136, 0, 477}, - dictWord{134, 11, 72}, - dictWord{135, 11, 1345}, - dictWord{5, 0, 308}, - dictWord{7, 0, 1088}, - dictWord{4, 10, 520}, - dictWord{ - 135, - 10, - 575, - }, - dictWord{133, 11, 589}, - dictWord{5, 0, 126}, - dictWord{8, 0, 297}, - dictWord{9, 0, 366}, - dictWord{140, 0, 374}, - dictWord{7, 0, 1551}, - dictWord{ - 139, - 0, - 361, - }, - dictWord{5, 11, 117}, - dictWord{6, 11, 514}, - dictWord{6, 11, 541}, - dictWord{7, 11, 1164}, - dictWord{7, 11, 1436}, - dictWord{8, 11, 220}, - dictWord{ - 8, - 11, - 648, - }, - dictWord{10, 11, 688}, - dictWord{139, 11, 560}, - dictWord{133, 11, 686}, - dictWord{4, 0, 946}, - dictWord{6, 0, 1807}, - dictWord{8, 0, 871}, - dictWord{ - 10, - 0, - 854, - }, - dictWord{10, 0, 870}, - dictWord{10, 0, 888}, - dictWord{10, 0, 897}, - dictWord{10, 0, 920}, - dictWord{12, 0, 722}, - dictWord{12, 0, 761}, - dictWord{ - 12, - 0, - 763, - }, - dictWord{12, 0, 764}, - dictWord{14, 0, 454}, - dictWord{14, 0, 465}, - dictWord{16, 0, 107}, - dictWord{18, 0, 167}, - dictWord{18, 0, 168}, - dictWord{ - 146, - 0, - 172, - }, - dictWord{132, 0, 175}, - dictWord{135, 0, 1307}, - dictWord{132, 0, 685}, - dictWord{135, 11, 1834}, - dictWord{133, 0, 797}, - dictWord{6, 0, 745}, - dictWord{ - 6, - 0, - 858, - }, - dictWord{134, 0, 963}, - dictWord{133, 0, 565}, - dictWord{5, 10, 397}, - dictWord{6, 10, 154}, - dictWord{7, 11, 196}, - dictWord{7, 10, 676}, - dictWord{ - 8, - 10, - 443, - }, - dictWord{8, 10, 609}, - dictWord{9, 10, 24}, - dictWord{9, 10, 325}, - dictWord{10, 10, 35}, - dictWord{10, 11, 765}, - dictWord{11, 11, 347}, - dictWord{ - 11, - 10, - 535, - }, - dictWord{11, 11, 552}, - dictWord{11, 11, 576}, - dictWord{11, 10, 672}, - dictWord{11, 11, 790}, - dictWord{11, 10, 1018}, - dictWord{12, 11, 263}, - dictWord{12, 10, 637}, - dictWord{13, 11, 246}, - dictWord{13, 11, 270}, - dictWord{13, 11, 395}, - dictWord{14, 11, 74}, - dictWord{14, 11, 176}, - dictWord{ - 14, - 11, - 190, - }, - dictWord{14, 11, 398}, - dictWord{14, 11, 412}, - dictWord{15, 11, 32}, - dictWord{15, 11, 63}, - dictWord{16, 10, 30}, - dictWord{16, 11, 88}, - dictWord{ - 147, - 11, - 105, - }, - dictWord{13, 11, 84}, - dictWord{141, 11, 122}, - dictWord{4, 0, 252}, - dictWord{7, 0, 1068}, - dictWord{10, 0, 434}, - dictWord{11, 0, 228}, - dictWord{ - 11, - 0, - 426, - }, - dictWord{13, 0, 231}, - dictWord{18, 0, 106}, - dictWord{148, 0, 87}, - dictWord{137, 0, 826}, - dictWord{4, 11, 589}, - dictWord{139, 11, 282}, - dictWord{ - 5, - 11, - 381, - }, - dictWord{135, 11, 1792}, - dictWord{132, 0, 791}, - dictWord{5, 0, 231}, - dictWord{10, 0, 509}, - dictWord{133, 10, 981}, - dictWord{7, 0, 601}, - dictWord{ - 9, - 0, - 277, - }, - dictWord{9, 0, 674}, - dictWord{10, 0, 178}, - dictWord{10, 0, 418}, - dictWord{10, 0, 571}, - dictWord{11, 0, 531}, - dictWord{12, 0, 113}, - dictWord{12, 0, 475}, - dictWord{13, 0, 99}, - dictWord{142, 0, 428}, - dictWord{4, 10, 56}, - dictWord{7, 11, 616}, - dictWord{7, 10, 1791}, - dictWord{8, 10, 607}, - dictWord{8, 10, 651}, - dictWord{10, 11, 413}, - dictWord{11, 10, 465}, - dictWord{11, 10, 835}, - dictWord{12, 10, 337}, - dictWord{141, 10, 480}, - dictWord{7, 0, 1591}, - dictWord{144, 0, 43}, - dictWord{9, 10, 158}, - dictWord{138, 10, 411}, - dictWord{135, 0, 1683}, - dictWord{8, 0, 289}, - dictWord{11, 0, 45}, - dictWord{12, 0, 278}, - dictWord{140, 0, 537}, - dictWord{6, 11, 120}, - dictWord{7, 11, 1188}, - dictWord{7, 11, 1710}, - dictWord{8, 11, 286}, - dictWord{9, 11, 667}, - dictWord{11, 11, 592}, - dictWord{ - 139, - 11, - 730, - }, - dictWord{136, 10, 617}, - dictWord{135, 0, 1120}, - dictWord{135, 11, 1146}, - dictWord{139, 10, 563}, - dictWord{4, 11, 352}, - dictWord{4, 10, 369}, - dictWord{135, 11, 687}, - dictWord{143, 11, 38}, - dictWord{4, 0, 399}, - dictWord{5, 0, 119}, - dictWord{5, 0, 494}, - dictWord{7, 0, 751}, - dictWord{9, 0, 556}, - dictWord{ - 14, - 11, - 179, - }, - dictWord{15, 11, 151}, - dictWord{150, 11, 11}, - dictWord{4, 11, 192}, - dictWord{5, 11, 49}, - dictWord{6, 11, 200}, - dictWord{6, 11, 293}, - dictWord{ - 6, - 11, - 1696, - }, - dictWord{135, 11, 488}, - dictWord{4, 0, 398}, - dictWord{133, 0, 660}, - dictWord{7, 0, 1030}, - dictWord{134, 10, 622}, - dictWord{135, 11, 595}, - dictWord{141, 0, 168}, - dictWord{132, 11, 147}, - dictWord{7, 0, 973}, - dictWord{10, 10, 624}, - dictWord{142, 10, 279}, - dictWord{132, 10, 363}, - dictWord{ - 132, - 0, - 642, - }, - dictWord{133, 11, 934}, - dictWord{134, 0, 1615}, - dictWord{7, 11, 505}, - dictWord{135, 11, 523}, - dictWord{7, 0, 594}, - dictWord{7, 0, 851}, - dictWord{ - 7, - 0, - 1858, - }, - dictWord{9, 0, 411}, - dictWord{9, 0, 574}, - dictWord{9, 0, 666}, - dictWord{9, 0, 737}, - dictWord{10, 0, 346}, - dictWord{10, 0, 712}, - dictWord{11, 0, 246}, - dictWord{11, 0, 432}, - dictWord{11, 0, 517}, - dictWord{11, 0, 647}, - dictWord{11, 0, 679}, - dictWord{11, 0, 727}, - dictWord{12, 0, 304}, - dictWord{12, 0, 305}, - dictWord{ - 12, - 0, - 323, - }, - dictWord{12, 0, 483}, - dictWord{12, 0, 572}, - dictWord{12, 0, 593}, - dictWord{12, 0, 602}, - dictWord{13, 0, 95}, - dictWord{13, 0, 101}, - dictWord{ - 13, - 0, - 171, - }, - dictWord{13, 0, 315}, - dictWord{13, 0, 378}, - dictWord{13, 0, 425}, - dictWord{13, 0, 475}, - dictWord{14, 0, 63}, - dictWord{14, 0, 380}, - dictWord{14, 0, 384}, - dictWord{15, 0, 133}, - dictWord{18, 0, 112}, - dictWord{148, 0, 72}, - dictWord{135, 0, 1093}, - dictWord{132, 0, 679}, - dictWord{8, 0, 913}, - dictWord{10, 0, 903}, - dictWord{10, 0, 915}, - dictWord{12, 0, 648}, - dictWord{12, 0, 649}, - dictWord{14, 0, 455}, - dictWord{16, 0, 112}, - dictWord{138, 11, 438}, - dictWord{137, 0, 203}, - dictWord{134, 10, 292}, - dictWord{134, 0, 1492}, - dictWord{7, 0, 1374}, - dictWord{8, 0, 540}, - dictWord{5, 10, 177}, - dictWord{6, 10, 616}, - dictWord{7, 10, 827}, - dictWord{9, 10, 525}, - dictWord{138, 10, 656}, - dictWord{135, 0, 1486}, - dictWord{9, 0, 714}, - dictWord{138, 10, 31}, - dictWord{136, 0, 825}, - dictWord{ - 134, - 0, - 1511, - }, - dictWord{132, 11, 637}, - dictWord{134, 0, 952}, - dictWord{4, 10, 161}, - dictWord{133, 10, 631}, - dictWord{5, 0, 143}, - dictWord{5, 0, 769}, - dictWord{ - 6, - 0, - 1760, - }, - dictWord{7, 0, 682}, - dictWord{7, 0, 1992}, - dictWord{136, 0, 736}, - dictWord{132, 0, 700}, - dictWord{134, 0, 1540}, - dictWord{132, 11, 777}, - dictWord{ - 9, - 11, - 867, - }, - dictWord{138, 11, 837}, - dictWord{7, 0, 1557}, - dictWord{135, 10, 1684}, - dictWord{133, 0, 860}, - dictWord{6, 0, 422}, - dictWord{7, 0, 0}, - dictWord{ - 7, - 0, - 1544, - }, - dictWord{9, 0, 605}, - dictWord{11, 0, 990}, - dictWord{12, 0, 235}, - dictWord{12, 0, 453}, - dictWord{13, 0, 47}, - dictWord{13, 0, 266}, - dictWord{9, 10, 469}, - dictWord{9, 10, 709}, - dictWord{12, 10, 512}, - dictWord{14, 10, 65}, - dictWord{145, 10, 12}, - dictWord{11, 0, 807}, - dictWord{10, 10, 229}, - dictWord{11, 10, 73}, - dictWord{139, 10, 376}, - dictWord{6, 11, 170}, - dictWord{7, 11, 1080}, - dictWord{8, 11, 395}, - dictWord{8, 11, 487}, - dictWord{11, 11, 125}, - dictWord{ - 141, - 11, - 147, - }, - dictWord{5, 0, 515}, - dictWord{137, 0, 131}, - dictWord{7, 0, 1605}, - dictWord{11, 0, 962}, - dictWord{146, 0, 139}, - dictWord{132, 0, 646}, - dictWord{ - 4, - 0, - 396, - }, - dictWord{7, 0, 728}, - dictWord{9, 0, 117}, - dictWord{13, 0, 202}, - dictWord{148, 0, 51}, - dictWord{6, 0, 121}, - dictWord{6, 0, 124}, - dictWord{6, 0, 357}, - dictWord{ - 7, - 0, - 1138, - }, - dictWord{7, 0, 1295}, - dictWord{8, 0, 162}, - dictWord{8, 0, 508}, - dictWord{11, 0, 655}, - dictWord{4, 11, 535}, - dictWord{6, 10, 558}, - dictWord{ - 7, - 10, - 651, - }, - dictWord{8, 11, 618}, - dictWord{9, 10, 0}, - dictWord{10, 10, 34}, - dictWord{139, 10, 1008}, - dictWord{135, 11, 1245}, - dictWord{138, 0, 357}, - dictWord{ - 150, - 11, - 23, - }, - dictWord{133, 0, 237}, - dictWord{135, 0, 1784}, - dictWord{7, 10, 1832}, - dictWord{138, 10, 374}, - dictWord{132, 0, 713}, - dictWord{132, 11, 46}, - dictWord{6, 0, 1536}, - dictWord{10, 0, 348}, - dictWord{5, 11, 811}, - dictWord{6, 11, 1679}, - dictWord{6, 11, 1714}, - dictWord{135, 11, 2032}, - dictWord{ - 11, - 11, - 182, - }, - dictWord{142, 11, 195}, - dictWord{6, 0, 523}, - dictWord{7, 0, 738}, - dictWord{7, 10, 771}, - dictWord{7, 10, 1731}, - dictWord{9, 10, 405}, - dictWord{ - 138, - 10, - 421, - }, - dictWord{7, 11, 1458}, - dictWord{9, 11, 407}, - dictWord{139, 11, 15}, - dictWord{6, 11, 34}, - dictWord{7, 11, 69}, - dictWord{7, 11, 640}, - dictWord{ - 7, - 11, - 1089, - }, - dictWord{8, 11, 708}, - dictWord{8, 11, 721}, - dictWord{9, 11, 363}, - dictWord{9, 11, 643}, - dictWord{10, 11, 628}, - dictWord{148, 11, 98}, - dictWord{ - 133, - 0, - 434, - }, - dictWord{135, 0, 1877}, - dictWord{7, 0, 571}, - dictWord{138, 0, 366}, - dictWord{5, 10, 881}, - dictWord{133, 10, 885}, - dictWord{9, 0, 513}, - dictWord{ - 10, - 0, - 25, - }, - dictWord{10, 0, 39}, - dictWord{12, 0, 122}, - dictWord{140, 0, 187}, - dictWord{132, 0, 580}, - dictWord{5, 10, 142}, - dictWord{134, 10, 546}, - dictWord{ - 132, - 11, - 462, - }, - dictWord{137, 0, 873}, - dictWord{5, 10, 466}, - dictWord{11, 10, 571}, - dictWord{12, 10, 198}, - dictWord{13, 10, 283}, - dictWord{14, 10, 186}, - dictWord{15, 10, 21}, - dictWord{143, 10, 103}, - dictWord{7, 0, 171}, - dictWord{4, 10, 185}, - dictWord{5, 10, 257}, - dictWord{5, 10, 839}, - dictWord{5, 10, 936}, - dictWord{ - 9, - 10, - 399, - }, - dictWord{10, 10, 258}, - dictWord{10, 10, 395}, - dictWord{10, 10, 734}, - dictWord{11, 10, 1014}, - dictWord{12, 10, 23}, - dictWord{13, 10, 350}, - dictWord{14, 10, 150}, - dictWord{147, 10, 6}, - dictWord{134, 0, 625}, - dictWord{7, 0, 107}, - dictWord{7, 0, 838}, - dictWord{8, 0, 550}, - dictWord{138, 0, 401}, - dictWord{ - 5, - 11, - 73, - }, - dictWord{6, 11, 23}, - dictWord{134, 11, 338}, - dictWord{4, 0, 943}, - dictWord{6, 0, 1850}, - dictWord{12, 0, 713}, - dictWord{142, 0, 434}, - dictWord{ - 11, - 0, - 588, - }, - dictWord{11, 0, 864}, - dictWord{11, 0, 936}, - dictWord{11, 0, 968}, - dictWord{12, 0, 73}, - dictWord{12, 0, 343}, - dictWord{12, 0, 394}, - dictWord{13, 0, 275}, - dictWord{14, 0, 257}, - dictWord{15, 0, 160}, - dictWord{7, 10, 404}, - dictWord{7, 10, 1377}, - dictWord{7, 10, 1430}, - dictWord{7, 10, 2017}, - dictWord{8, 10, 149}, - dictWord{8, 10, 239}, - dictWord{8, 10, 512}, - dictWord{8, 10, 793}, - dictWord{8, 10, 818}, - dictWord{9, 10, 474}, - dictWord{9, 10, 595}, - dictWord{10, 10, 122}, - dictWord{10, 10, 565}, - dictWord{10, 10, 649}, - dictWord{10, 10, 783}, - dictWord{11, 10, 239}, - dictWord{11, 10, 295}, - dictWord{11, 10, 447}, - dictWord{ - 11, - 10, - 528, - }, - dictWord{11, 10, 639}, - dictWord{11, 10, 800}, - dictWord{12, 10, 25}, - dictWord{12, 10, 157}, - dictWord{12, 10, 316}, - dictWord{12, 10, 390}, - dictWord{ - 12, - 10, - 391, - }, - dictWord{12, 10, 395}, - dictWord{12, 10, 478}, - dictWord{12, 10, 503}, - dictWord{12, 10, 592}, - dictWord{12, 10, 680}, - dictWord{13, 10, 50}, - dictWord{13, 10, 53}, - dictWord{13, 10, 132}, - dictWord{13, 10, 198}, - dictWord{13, 10, 322}, - dictWord{13, 10, 415}, - dictWord{13, 10, 511}, - dictWord{14, 10, 71}, - dictWord{14, 10, 395}, - dictWord{15, 10, 71}, - dictWord{15, 10, 136}, - dictWord{17, 10, 123}, - dictWord{18, 10, 93}, - dictWord{147, 10, 58}, - dictWord{ - 133, - 0, - 768, - }, - dictWord{11, 0, 103}, - dictWord{142, 0, 0}, - dictWord{136, 10, 712}, - dictWord{132, 0, 799}, - dictWord{132, 0, 894}, - dictWord{7, 11, 725}, - dictWord{ - 8, - 11, - 498, - }, - dictWord{139, 11, 268}, - dictWord{135, 11, 1798}, - dictWord{135, 11, 773}, - dictWord{141, 11, 360}, - dictWord{4, 10, 377}, - dictWord{152, 10, 13}, - dictWord{135, 0, 1673}, - dictWord{132, 11, 583}, - dictWord{134, 0, 1052}, - dictWord{133, 11, 220}, - dictWord{140, 11, 69}, - dictWord{132, 11, 544}, - dictWord{ - 4, - 10, - 180, - }, - dictWord{135, 10, 1906}, - dictWord{134, 0, 272}, - dictWord{4, 0, 441}, - dictWord{134, 0, 1421}, - dictWord{4, 0, 9}, - dictWord{5, 0, 128}, - dictWord{ - 7, - 0, - 368, - }, - dictWord{11, 0, 480}, - dictWord{148, 0, 3}, - dictWord{5, 11, 176}, - dictWord{6, 11, 437}, - dictWord{6, 11, 564}, - dictWord{11, 11, 181}, - dictWord{ - 141, - 11, - 183, - }, - dictWord{132, 10, 491}, - dictWord{7, 0, 1182}, - dictWord{141, 11, 67}, - dictWord{6, 0, 1346}, - dictWord{4, 10, 171}, - dictWord{138, 10, 234}, - dictWord{ - 4, - 10, - 586, - }, - dictWord{7, 10, 1186}, - dictWord{138, 10, 631}, - dictWord{136, 0, 682}, - dictWord{134, 0, 1004}, - dictWord{15, 0, 24}, - dictWord{143, 11, 24}, - dictWord{134, 0, 968}, - dictWord{4, 0, 2}, - dictWord{6, 0, 742}, - dictWord{6, 0, 793}, - dictWord{7, 0, 545}, - dictWord{7, 0, 894}, - dictWord{9, 10, 931}, - dictWord{ - 10, - 10, - 334, - }, - dictWord{148, 10, 71}, - dictWord{136, 11, 600}, - dictWord{133, 10, 765}, - dictWord{9, 0, 769}, - dictWord{140, 0, 185}, - dictWord{4, 11, 790}, - dictWord{ - 5, - 11, - 273, - }, - dictWord{134, 11, 394}, - dictWord{7, 0, 474}, - dictWord{137, 0, 578}, - dictWord{4, 11, 135}, - dictWord{6, 11, 127}, - dictWord{7, 11, 1185}, - dictWord{ - 7, - 11, - 1511, - }, - dictWord{8, 11, 613}, - dictWord{11, 11, 5}, - dictWord{12, 11, 133}, - dictWord{12, 11, 495}, - dictWord{12, 11, 586}, - dictWord{14, 11, 385}, - dictWord{15, 11, 118}, - dictWord{17, 11, 20}, - dictWord{146, 11, 98}, - dictWord{133, 10, 424}, - dictWord{5, 0, 530}, - dictWord{142, 0, 113}, - dictWord{6, 11, 230}, - dictWord{7, 11, 961}, - dictWord{7, 11, 1085}, - dictWord{136, 11, 462}, - dictWord{7, 11, 1954}, - dictWord{137, 11, 636}, - dictWord{136, 10, 714}, - dictWord{ - 149, - 11, - 6, - }, - dictWord{135, 10, 685}, - dictWord{9, 10, 420}, - dictWord{10, 10, 269}, - dictWord{10, 10, 285}, - dictWord{10, 10, 576}, - dictWord{11, 10, 397}, - dictWord{13, 10, 175}, - dictWord{145, 10, 90}, - dictWord{132, 10, 429}, - dictWord{5, 0, 556}, - dictWord{5, 11, 162}, - dictWord{136, 11, 68}, - dictWord{132, 11, 654}, - dictWord{4, 11, 156}, - dictWord{7, 11, 998}, - dictWord{7, 11, 1045}, - dictWord{7, 11, 1860}, - dictWord{9, 11, 48}, - dictWord{9, 11, 692}, - dictWord{11, 11, 419}, - dictWord{139, 11, 602}, - dictWord{6, 0, 1317}, - dictWord{8, 0, 16}, - dictWord{9, 0, 825}, - dictWord{12, 0, 568}, - dictWord{7, 11, 1276}, - dictWord{8, 11, 474}, - dictWord{137, 11, 652}, - dictWord{18, 0, 97}, - dictWord{7, 10, 18}, - dictWord{7, 10, 699}, - dictWord{7, 10, 1966}, - dictWord{8, 10, 752}, - dictWord{9, 10, 273}, - dictWord{ - 9, - 10, - 412, - }, - dictWord{9, 10, 703}, - dictWord{10, 10, 71}, - dictWord{10, 10, 427}, - dictWord{138, 10, 508}, - dictWord{10, 0, 703}, - dictWord{7, 11, 1454}, - dictWord{138, 11, 703}, - dictWord{4, 10, 53}, - dictWord{5, 10, 186}, - dictWord{135, 10, 752}, - dictWord{134, 0, 892}, - dictWord{134, 0, 1571}, - dictWord{8, 10, 575}, - dictWord{10, 10, 289}, - dictWord{139, 10, 319}, - dictWord{6, 0, 186}, - dictWord{137, 0, 426}, - dictWord{134, 0, 1101}, - dictWord{132, 10, 675}, - dictWord{ - 132, - 0, - 585, - }, - dictWord{6, 0, 1870}, - dictWord{137, 0, 937}, - dictWord{152, 11, 10}, - dictWord{9, 11, 197}, - dictWord{10, 11, 300}, - dictWord{12, 11, 473}, - dictWord{ - 13, - 11, - 90, - }, - dictWord{141, 11, 405}, - dictWord{4, 0, 93}, - dictWord{5, 0, 252}, - dictWord{6, 0, 229}, - dictWord{7, 0, 291}, - dictWord{9, 0, 550}, - dictWord{139, 0, 644}, - dictWord{137, 0, 749}, - dictWord{9, 0, 162}, - dictWord{6, 10, 209}, - dictWord{8, 10, 468}, - dictWord{9, 10, 210}, - dictWord{11, 10, 36}, - dictWord{12, 10, 28}, - dictWord{12, 10, 630}, - dictWord{13, 10, 21}, - dictWord{13, 10, 349}, - dictWord{14, 10, 7}, - dictWord{145, 10, 13}, - dictWord{132, 0, 381}, - dictWord{132, 11, 606}, - dictWord{4, 10, 342}, - dictWord{135, 10, 1179}, - dictWord{7, 11, 1587}, - dictWord{7, 11, 1707}, - dictWord{10, 11, 528}, - dictWord{139, 11, 504}, - dictWord{ - 12, - 11, - 39, - }, - dictWord{13, 11, 265}, - dictWord{141, 11, 439}, - dictWord{4, 10, 928}, - dictWord{133, 10, 910}, - dictWord{7, 10, 1838}, - dictWord{7, 11, 1978}, - dictWord{136, 11, 676}, - dictWord{6, 0, 762}, - dictWord{6, 0, 796}, - dictWord{134, 0, 956}, - dictWord{4, 10, 318}, - dictWord{4, 10, 496}, - dictWord{7, 10, 856}, - dictWord{139, 10, 654}, - dictWord{137, 11, 242}, - dictWord{4, 11, 361}, - dictWord{133, 11, 315}, - dictWord{132, 11, 461}, - dictWord{132, 11, 472}, - dictWord{ - 132, - 0, - 857, - }, - dictWord{5, 0, 21}, - dictWord{6, 0, 77}, - dictWord{6, 0, 157}, - dictWord{7, 0, 974}, - dictWord{7, 0, 1301}, - dictWord{7, 0, 1339}, - dictWord{7, 0, 1490}, - dictWord{ - 7, - 0, - 1873, - }, - dictWord{9, 0, 628}, - dictWord{7, 10, 915}, - dictWord{8, 10, 247}, - dictWord{147, 10, 0}, - dictWord{4, 10, 202}, - dictWord{5, 10, 382}, - dictWord{ - 6, - 10, - 454, - }, - dictWord{7, 10, 936}, - dictWord{7, 10, 1803}, - dictWord{8, 10, 758}, - dictWord{9, 10, 375}, - dictWord{9, 10, 895}, - dictWord{10, 10, 743}, - dictWord{ - 10, - 10, - 792, - }, - dictWord{11, 10, 978}, - dictWord{11, 10, 1012}, - dictWord{142, 10, 109}, - dictWord{7, 11, 617}, - dictWord{10, 11, 498}, - dictWord{11, 11, 501}, - dictWord{12, 11, 16}, - dictWord{140, 11, 150}, - dictWord{7, 10, 1150}, - dictWord{7, 10, 1425}, - dictWord{7, 10, 1453}, - dictWord{10, 11, 747}, - dictWord{ - 140, - 10, - 513, - }, - dictWord{133, 11, 155}, - dictWord{11, 0, 919}, - dictWord{141, 0, 409}, - dictWord{138, 10, 791}, - dictWord{10, 0, 633}, - dictWord{139, 11, 729}, - dictWord{ - 7, - 11, - 163, - }, - dictWord{8, 11, 319}, - dictWord{9, 11, 402}, - dictWord{10, 11, 24}, - dictWord{10, 11, 681}, - dictWord{11, 11, 200}, - dictWord{11, 11, 567}, - dictWord{12, 11, 253}, - dictWord{12, 11, 410}, - dictWord{142, 11, 219}, - dictWord{5, 11, 475}, - dictWord{7, 11, 1780}, - dictWord{9, 11, 230}, - dictWord{11, 11, 297}, - dictWord{11, 11, 558}, - dictWord{14, 11, 322}, - dictWord{147, 11, 76}, - dictWord{7, 0, 332}, - dictWord{6, 10, 445}, - dictWord{137, 10, 909}, - dictWord{ - 135, - 11, - 1956, - }, - dictWord{136, 11, 274}, - dictWord{134, 10, 578}, - dictWord{135, 0, 1489}, - dictWord{135, 11, 1848}, - dictWord{5, 11, 944}, - dictWord{ - 134, - 11, - 1769, - }, - dictWord{132, 11, 144}, - dictWord{136, 10, 766}, - dictWord{4, 0, 832}, - dictWord{135, 10, 541}, - dictWord{8, 0, 398}, - dictWord{9, 0, 681}, - dictWord{ - 139, - 0, - 632, - }, - dictWord{136, 0, 645}, - dictWord{9, 0, 791}, - dictWord{10, 0, 93}, - dictWord{16, 0, 13}, - dictWord{17, 0, 23}, - dictWord{18, 0, 135}, - dictWord{19, 0, 12}, - dictWord{20, 0, 1}, - dictWord{20, 0, 12}, - dictWord{148, 0, 14}, - dictWord{6, 11, 247}, - dictWord{137, 11, 555}, - dictWord{134, 0, 20}, - dictWord{132, 0, 800}, - dictWord{135, 0, 1841}, - dictWord{139, 10, 983}, - dictWord{137, 10, 768}, - dictWord{132, 10, 584}, - dictWord{141, 11, 51}, - dictWord{6, 0, 1993}, - dictWord{ - 4, - 11, - 620, - }, - dictWord{138, 11, 280}, - dictWord{136, 0, 769}, - dictWord{11, 0, 290}, - dictWord{11, 0, 665}, - dictWord{7, 11, 1810}, - dictWord{11, 11, 866}, - dictWord{ - 12, - 11, - 103, - }, - dictWord{13, 11, 495}, - dictWord{17, 11, 67}, - dictWord{147, 11, 74}, - dictWord{134, 0, 1426}, - dictWord{139, 0, 60}, - dictWord{4, 10, 326}, - dictWord{135, 10, 1770}, - dictWord{7, 0, 1874}, - dictWord{9, 0, 641}, - dictWord{132, 10, 226}, - dictWord{6, 0, 644}, - dictWord{5, 10, 426}, - dictWord{8, 10, 30}, - dictWord{ - 9, - 10, - 2, - }, - dictWord{11, 10, 549}, - dictWord{147, 10, 122}, - dictWord{5, 11, 428}, - dictWord{138, 11, 442}, - dictWord{135, 11, 1871}, - dictWord{ - 135, - 0, - 1757, - }, - dictWord{147, 10, 117}, - dictWord{135, 0, 937}, - dictWord{135, 0, 1652}, - dictWord{6, 0, 654}, - dictWord{134, 0, 1476}, - dictWord{133, 11, 99}, - dictWord{135, 0, 527}, - dictWord{132, 10, 345}, - dictWord{4, 10, 385}, - dictWord{4, 11, 397}, - dictWord{7, 10, 265}, - dictWord{135, 10, 587}, - dictWord{4, 0, 579}, - dictWord{5, 0, 226}, - dictWord{5, 0, 323}, - dictWord{135, 0, 960}, - dictWord{134, 0, 1486}, - dictWord{8, 11, 502}, - dictWord{144, 11, 9}, - dictWord{4, 10, 347}, - dictWord{ - 5, - 10, - 423, - }, - dictWord{5, 10, 996}, - dictWord{135, 10, 1329}, - dictWord{7, 11, 727}, - dictWord{146, 11, 73}, - dictWord{4, 11, 485}, - dictWord{7, 11, 353}, - dictWord{7, 10, 1259}, - dictWord{7, 11, 1523}, - dictWord{9, 10, 125}, - dictWord{139, 10, 65}, - dictWord{6, 0, 325}, - dictWord{5, 10, 136}, - dictWord{6, 11, 366}, - dictWord{ - 7, - 11, - 1384, - }, - dictWord{7, 11, 1601}, - dictWord{136, 10, 644}, - dictWord{138, 11, 160}, - dictWord{6, 0, 1345}, - dictWord{137, 11, 282}, - dictWord{18, 0, 91}, - dictWord{147, 0, 70}, - dictWord{136, 0, 404}, - dictWord{4, 11, 157}, - dictWord{133, 11, 471}, - dictWord{133, 0, 973}, - dictWord{6, 0, 135}, - dictWord{ - 135, - 0, - 1176, - }, - dictWord{8, 11, 116}, - dictWord{11, 11, 551}, - dictWord{142, 11, 159}, - dictWord{4, 0, 549}, - dictWord{4, 10, 433}, - dictWord{133, 10, 719}, - dictWord{ - 136, - 0, - 976, - }, - dictWord{5, 11, 160}, - dictWord{7, 11, 363}, - dictWord{7, 11, 589}, - dictWord{10, 11, 170}, - dictWord{141, 11, 55}, - dictWord{144, 0, 21}, - dictWord{ - 144, - 0, - 51, - }, - dictWord{135, 0, 314}, - dictWord{135, 10, 1363}, - dictWord{4, 11, 108}, - dictWord{7, 11, 405}, - dictWord{10, 11, 491}, - dictWord{139, 11, 498}, - dictWord{146, 0, 4}, - dictWord{4, 10, 555}, - dictWord{8, 10, 536}, - dictWord{10, 10, 288}, - dictWord{139, 10, 1005}, - dictWord{135, 11, 1005}, - dictWord{6, 0, 281}, - dictWord{7, 0, 6}, - dictWord{8, 0, 282}, - dictWord{8, 0, 480}, - dictWord{8, 0, 499}, - dictWord{9, 0, 198}, - dictWord{10, 0, 143}, - dictWord{10, 0, 169}, - dictWord{ - 10, - 0, - 211, - }, - dictWord{10, 0, 417}, - dictWord{10, 0, 574}, - dictWord{11, 0, 147}, - dictWord{11, 0, 395}, - dictWord{12, 0, 75}, - dictWord{12, 0, 407}, - dictWord{12, 0, 608}, - dictWord{13, 0, 500}, - dictWord{142, 0, 251}, - dictWord{6, 0, 1093}, - dictWord{6, 0, 1405}, - dictWord{9, 10, 370}, - dictWord{138, 10, 90}, - dictWord{4, 11, 926}, - dictWord{133, 11, 983}, - dictWord{135, 0, 1776}, - dictWord{134, 0, 1528}, - dictWord{132, 0, 419}, - dictWord{132, 11, 538}, - dictWord{6, 11, 294}, - dictWord{ - 7, - 11, - 1267, - }, - dictWord{136, 11, 624}, - dictWord{135, 11, 1772}, - dictWord{138, 11, 301}, - dictWord{4, 10, 257}, - dictWord{135, 10, 2031}, - dictWord{4, 0, 138}, - dictWord{7, 0, 1012}, - dictWord{7, 0, 1280}, - dictWord{9, 0, 76}, - dictWord{135, 10, 1768}, - dictWord{132, 11, 757}, - dictWord{5, 0, 29}, - dictWord{140, 0, 638}, - dictWord{7, 11, 655}, - dictWord{135, 11, 1844}, - dictWord{7, 0, 1418}, - dictWord{6, 11, 257}, - dictWord{135, 11, 1522}, - dictWord{8, 11, 469}, - dictWord{ - 138, - 11, - 47, - }, - dictWord{142, 11, 278}, - dictWord{6, 10, 83}, - dictWord{6, 10, 1733}, - dictWord{135, 10, 1389}, - dictWord{11, 11, 204}, - dictWord{11, 11, 243}, - dictWord{140, 11, 293}, - dictWord{135, 11, 1875}, - dictWord{6, 0, 1710}, - dictWord{135, 0, 2038}, - dictWord{137, 11, 299}, - dictWord{4, 0, 17}, - dictWord{5, 0, 23}, - dictWord{7, 0, 995}, - dictWord{11, 0, 383}, - dictWord{11, 0, 437}, - dictWord{12, 0, 460}, - dictWord{140, 0, 532}, - dictWord{133, 0, 862}, - dictWord{137, 10, 696}, - dictWord{6, 0, 592}, - dictWord{138, 0, 946}, - dictWord{138, 11, 599}, - dictWord{7, 10, 1718}, - dictWord{9, 10, 95}, - dictWord{9, 10, 274}, - dictWord{10, 10, 279}, - dictWord{10, 10, 317}, - dictWord{10, 10, 420}, - dictWord{11, 10, 303}, - dictWord{11, 10, 808}, - dictWord{12, 10, 134}, - dictWord{12, 10, 367}, - dictWord{ - 13, - 10, - 149, - }, - dictWord{13, 10, 347}, - dictWord{14, 10, 349}, - dictWord{14, 10, 406}, - dictWord{18, 10, 22}, - dictWord{18, 10, 89}, - dictWord{18, 10, 122}, - dictWord{ - 147, - 10, - 47, - }, - dictWord{8, 0, 70}, - dictWord{12, 0, 171}, - dictWord{141, 0, 272}, - dictWord{133, 10, 26}, - dictWord{132, 10, 550}, - dictWord{137, 0, 812}, - dictWord{ - 10, - 0, - 233, - }, - dictWord{139, 0, 76}, - dictWord{134, 0, 988}, - dictWord{134, 0, 442}, - dictWord{136, 10, 822}, - dictWord{7, 0, 896}, - dictWord{4, 10, 902}, - dictWord{ - 5, - 10, - 809, - }, - dictWord{134, 10, 122}, - dictWord{5, 11, 150}, - dictWord{7, 11, 106}, - dictWord{8, 11, 603}, - dictWord{9, 11, 593}, - dictWord{9, 11, 634}, - dictWord{ - 10, - 11, - 44, - }, - dictWord{10, 11, 173}, - dictWord{11, 11, 462}, - dictWord{11, 11, 515}, - dictWord{13, 11, 216}, - dictWord{13, 11, 288}, - dictWord{142, 11, 400}, - dictWord{136, 0, 483}, - dictWord{135, 10, 262}, - dictWord{6, 0, 1709}, - dictWord{133, 10, 620}, - dictWord{4, 10, 34}, - dictWord{5, 10, 574}, - dictWord{7, 10, 279}, - dictWord{7, 10, 1624}, - dictWord{136, 10, 601}, - dictWord{137, 10, 170}, - dictWord{147, 0, 119}, - dictWord{12, 11, 108}, - dictWord{141, 11, 291}, - dictWord{ - 11, - 0, - 69, - }, - dictWord{12, 0, 105}, - dictWord{12, 0, 117}, - dictWord{13, 0, 213}, - dictWord{14, 0, 13}, - dictWord{14, 0, 62}, - dictWord{14, 0, 177}, - dictWord{14, 0, 421}, - dictWord{15, 0, 19}, - dictWord{146, 0, 141}, - dictWord{137, 0, 309}, - dictWord{11, 11, 278}, - dictWord{142, 11, 73}, - dictWord{7, 0, 608}, - dictWord{7, 0, 976}, - dictWord{9, 0, 146}, - dictWord{10, 0, 206}, - dictWord{10, 0, 596}, - dictWord{13, 0, 218}, - dictWord{142, 0, 153}, - dictWord{133, 10, 332}, - dictWord{6, 10, 261}, - dictWord{ - 8, - 10, - 182, - }, - dictWord{139, 10, 943}, - dictWord{4, 11, 493}, - dictWord{144, 11, 55}, - dictWord{134, 10, 1721}, - dictWord{132, 0, 768}, - dictWord{4, 10, 933}, - dictWord{133, 10, 880}, - dictWord{7, 11, 555}, - dictWord{7, 11, 1316}, - dictWord{7, 11, 1412}, - dictWord{7, 11, 1839}, - dictWord{9, 11, 192}, - dictWord{ - 9, - 11, - 589, - }, - dictWord{11, 11, 241}, - dictWord{11, 11, 676}, - dictWord{11, 11, 811}, - dictWord{11, 11, 891}, - dictWord{12, 11, 140}, - dictWord{12, 11, 346}, - dictWord{ - 12, - 11, - 479, - }, - dictWord{13, 11, 30}, - dictWord{13, 11, 49}, - dictWord{13, 11, 381}, - dictWord{14, 11, 188}, - dictWord{15, 11, 150}, - dictWord{16, 11, 76}, - dictWord{18, 11, 30}, - dictWord{148, 11, 52}, - dictWord{4, 0, 518}, - dictWord{135, 0, 1136}, - dictWord{6, 11, 568}, - dictWord{7, 11, 112}, - dictWord{7, 11, 1804}, - dictWord{8, 11, 362}, - dictWord{8, 11, 410}, - dictWord{8, 11, 830}, - dictWord{9, 11, 514}, - dictWord{11, 11, 649}, - dictWord{142, 11, 157}, - dictWord{135, 11, 673}, - dictWord{8, 0, 689}, - dictWord{137, 0, 863}, - dictWord{4, 0, 18}, - dictWord{7, 0, 145}, - dictWord{7, 0, 444}, - dictWord{7, 0, 1278}, - dictWord{8, 0, 49}, - dictWord{8, 0, 400}, - dictWord{9, 0, 71}, - dictWord{9, 0, 250}, - dictWord{10, 0, 459}, - dictWord{12, 0, 160}, - dictWord{16, 0, 24}, - dictWord{132, 11, 625}, - dictWord{140, 0, 1020}, - dictWord{4, 0, 997}, - dictWord{6, 0, 1946}, - dictWord{6, 0, 1984}, - dictWord{134, 0, 1998}, - dictWord{6, 11, 16}, - dictWord{6, 11, 158}, - dictWord{7, 11, 43}, - dictWord{ - 7, - 11, - 129, - }, - dictWord{7, 11, 181}, - dictWord{8, 11, 276}, - dictWord{8, 11, 377}, - dictWord{10, 11, 523}, - dictWord{11, 11, 816}, - dictWord{12, 11, 455}, - dictWord{ - 13, - 11, - 303, - }, - dictWord{142, 11, 135}, - dictWord{133, 10, 812}, - dictWord{134, 0, 658}, - dictWord{4, 11, 1}, - dictWord{7, 11, 1143}, - dictWord{7, 11, 1463}, - dictWord{8, 11, 61}, - dictWord{9, 11, 207}, - dictWord{9, 11, 390}, - dictWord{9, 11, 467}, - dictWord{139, 11, 836}, - dictWord{150, 11, 26}, - dictWord{140, 0, 106}, - dictWord{6, 0, 1827}, - dictWord{10, 0, 931}, - dictWord{18, 0, 166}, - dictWord{20, 0, 114}, - dictWord{4, 10, 137}, - dictWord{7, 10, 1178}, - dictWord{7, 11, 1319}, - dictWord{135, 10, 1520}, - dictWord{133, 0, 1010}, - dictWord{4, 11, 723}, - dictWord{5, 11, 895}, - dictWord{7, 11, 1031}, - dictWord{8, 11, 199}, - dictWord{8, 11, 340}, - dictWord{9, 11, 153}, - dictWord{9, 11, 215}, - dictWord{10, 11, 21}, - dictWord{10, 11, 59}, - dictWord{10, 11, 80}, - dictWord{10, 11, 224}, - dictWord{11, 11, 229}, - dictWord{11, 11, 652}, - dictWord{12, 11, 192}, - dictWord{13, 11, 146}, - dictWord{142, 11, 91}, - dictWord{132, 11, 295}, - dictWord{6, 11, 619}, - dictWord{ - 7, - 11, - 898, - }, - dictWord{7, 11, 1092}, - dictWord{8, 11, 485}, - dictWord{18, 11, 28}, - dictWord{147, 11, 116}, - dictWord{137, 11, 51}, - dictWord{6, 10, 1661}, - dictWord{ - 7, - 10, - 1975, - }, - dictWord{7, 10, 2009}, - dictWord{135, 10, 2011}, - dictWord{5, 11, 309}, - dictWord{140, 11, 211}, - dictWord{5, 0, 87}, - dictWord{7, 0, 313}, - dictWord{ - 7, - 0, - 1103, - }, - dictWord{10, 0, 208}, - dictWord{10, 0, 582}, - dictWord{11, 0, 389}, - dictWord{11, 0, 813}, - dictWord{12, 0, 385}, - dictWord{13, 0, 286}, - dictWord{ - 14, - 0, - 124, - }, - dictWord{146, 0, 108}, - dictWord{5, 11, 125}, - dictWord{8, 11, 77}, - dictWord{138, 11, 15}, - dictWord{132, 0, 267}, - dictWord{133, 0, 703}, - dictWord{ - 137, - 11, - 155, - }, - dictWord{133, 11, 439}, - dictWord{11, 11, 164}, - dictWord{140, 11, 76}, - dictWord{9, 0, 496}, - dictWord{5, 10, 89}, - dictWord{7, 10, 1915}, - dictWord{ - 9, - 10, - 185, - }, - dictWord{9, 10, 235}, - dictWord{10, 10, 64}, - dictWord{10, 10, 270}, - dictWord{10, 10, 403}, - dictWord{10, 10, 469}, - dictWord{10, 10, 529}, - dictWord{10, 10, 590}, - dictWord{11, 10, 140}, - dictWord{11, 10, 860}, - dictWord{13, 10, 1}, - dictWord{13, 10, 422}, - dictWord{14, 10, 341}, - dictWord{14, 10, 364}, - dictWord{17, 10, 93}, - dictWord{18, 10, 113}, - dictWord{19, 10, 97}, - dictWord{147, 10, 113}, - dictWord{133, 10, 695}, - dictWord{135, 0, 1121}, - dictWord{ - 5, - 10, - 6, - }, - dictWord{6, 10, 183}, - dictWord{7, 10, 680}, - dictWord{7, 10, 978}, - dictWord{7, 10, 1013}, - dictWord{7, 10, 1055}, - dictWord{12, 10, 230}, - dictWord{ - 13, - 10, - 172, - }, - dictWord{146, 10, 29}, - dictWord{4, 11, 8}, - dictWord{7, 11, 1152}, - dictWord{7, 11, 1153}, - dictWord{7, 11, 1715}, - dictWord{9, 11, 374}, - dictWord{ - 10, - 11, - 478, - }, - dictWord{139, 11, 648}, - dictWord{135, 11, 1099}, - dictWord{6, 10, 29}, - dictWord{139, 10, 63}, - dictWord{4, 0, 561}, - dictWord{10, 0, 249}, - dictWord{ - 139, - 0, - 209, - }, - dictWord{132, 0, 760}, - dictWord{7, 11, 799}, - dictWord{138, 11, 511}, - dictWord{136, 11, 87}, - dictWord{9, 0, 154}, - dictWord{140, 0, 485}, - dictWord{136, 0, 255}, - dictWord{132, 0, 323}, - dictWord{140, 0, 419}, - dictWord{132, 10, 311}, - dictWord{134, 10, 1740}, - dictWord{4, 0, 368}, - dictWord{ - 135, - 0, - 641, - }, - dictWord{7, 10, 170}, - dictWord{8, 10, 90}, - dictWord{8, 10, 177}, - dictWord{8, 10, 415}, - dictWord{11, 10, 714}, - dictWord{142, 10, 281}, - dictWord{ - 4, - 11, - 69, - }, - dictWord{5, 11, 122}, - dictWord{9, 11, 656}, - dictWord{138, 11, 464}, - dictWord{5, 11, 849}, - dictWord{134, 11, 1633}, - dictWord{8, 0, 522}, - dictWord{ - 142, - 0, - 328, - }, - dictWord{11, 10, 91}, - dictWord{13, 10, 129}, - dictWord{15, 10, 101}, - dictWord{145, 10, 125}, - dictWord{7, 0, 562}, - dictWord{8, 0, 551}, - dictWord{ - 4, - 10, - 494, - }, - dictWord{6, 10, 74}, - dictWord{7, 10, 44}, - dictWord{11, 11, 499}, - dictWord{12, 10, 17}, - dictWord{15, 10, 5}, - dictWord{148, 10, 11}, - dictWord{4, 10, 276}, - dictWord{133, 10, 296}, - dictWord{9, 0, 92}, - dictWord{147, 0, 91}, - dictWord{4, 10, 7}, - dictWord{5, 10, 90}, - dictWord{5, 10, 158}, - dictWord{6, 10, 542}, - dictWord{ - 7, - 10, - 221, - }, - dictWord{7, 10, 1574}, - dictWord{9, 10, 490}, - dictWord{10, 10, 540}, - dictWord{11, 10, 443}, - dictWord{139, 10, 757}, - dictWord{6, 0, 525}, - dictWord{ - 6, - 0, - 1976, - }, - dictWord{8, 0, 806}, - dictWord{9, 0, 876}, - dictWord{140, 0, 284}, - dictWord{5, 11, 859}, - dictWord{7, 10, 588}, - dictWord{7, 11, 1160}, - dictWord{ - 8, - 11, - 107, - }, - dictWord{9, 10, 175}, - dictWord{9, 11, 291}, - dictWord{9, 11, 439}, - dictWord{10, 10, 530}, - dictWord{10, 11, 663}, - dictWord{11, 11, 609}, - dictWord{ - 140, - 11, - 197, - }, - dictWord{7, 11, 168}, - dictWord{13, 11, 196}, - dictWord{141, 11, 237}, - dictWord{139, 0, 958}, - dictWord{133, 0, 594}, - dictWord{135, 10, 580}, - dictWord{7, 10, 88}, - dictWord{136, 10, 627}, - dictWord{6, 0, 479}, - dictWord{6, 0, 562}, - dictWord{7, 0, 1060}, - dictWord{13, 0, 6}, - dictWord{5, 10, 872}, - dictWord{ - 6, - 10, - 57, - }, - dictWord{7, 10, 471}, - dictWord{9, 10, 447}, - dictWord{137, 10, 454}, - dictWord{136, 11, 413}, - dictWord{145, 11, 19}, - dictWord{4, 11, 117}, - dictWord{ - 6, - 11, - 372, - }, - dictWord{7, 11, 1905}, - dictWord{142, 11, 323}, - dictWord{4, 11, 722}, - dictWord{139, 11, 471}, - dictWord{17, 0, 61}, - dictWord{5, 10, 31}, - dictWord{134, 10, 614}, - dictWord{8, 10, 330}, - dictWord{140, 10, 477}, - dictWord{7, 10, 1200}, - dictWord{138, 10, 460}, - dictWord{6, 10, 424}, - dictWord{ - 135, - 10, - 1866, - }, - dictWord{6, 0, 1641}, - dictWord{136, 0, 820}, - dictWord{6, 0, 1556}, - dictWord{134, 0, 1618}, - dictWord{9, 11, 5}, - dictWord{12, 11, 216}, - dictWord{ - 12, - 11, - 294, - }, - dictWord{12, 11, 298}, - dictWord{12, 11, 400}, - dictWord{12, 11, 518}, - dictWord{13, 11, 229}, - dictWord{143, 11, 139}, - dictWord{15, 11, 155}, - dictWord{144, 11, 79}, - dictWord{4, 0, 302}, - dictWord{135, 0, 1766}, - dictWord{5, 10, 13}, - dictWord{134, 10, 142}, - dictWord{6, 0, 148}, - dictWord{7, 0, 1313}, - dictWord{ - 7, - 10, - 116, - }, - dictWord{8, 10, 322}, - dictWord{8, 10, 755}, - dictWord{9, 10, 548}, - dictWord{10, 10, 714}, - dictWord{11, 10, 884}, - dictWord{141, 10, 324}, - dictWord{137, 0, 676}, - dictWord{9, 11, 88}, - dictWord{139, 11, 270}, - dictWord{5, 11, 12}, - dictWord{7, 11, 375}, - dictWord{137, 11, 438}, - dictWord{134, 0, 1674}, - dictWord{7, 10, 1472}, - dictWord{135, 10, 1554}, - dictWord{11, 0, 178}, - dictWord{7, 10, 1071}, - dictWord{7, 10, 1541}, - dictWord{7, 10, 1767}, - dictWord{ - 7, - 10, - 1806, - }, - dictWord{11, 10, 162}, - dictWord{11, 10, 242}, - dictWord{12, 10, 605}, - dictWord{15, 10, 26}, - dictWord{144, 10, 44}, - dictWord{6, 0, 389}, - dictWord{ - 7, - 0, - 149, - }, - dictWord{9, 0, 142}, - dictWord{138, 0, 94}, - dictWord{140, 11, 71}, - dictWord{145, 10, 115}, - dictWord{6, 0, 8}, - dictWord{7, 0, 1881}, - dictWord{8, 0, 91}, - dictWord{11, 11, 966}, - dictWord{12, 11, 287}, - dictWord{13, 11, 342}, - dictWord{13, 11, 402}, - dictWord{15, 11, 110}, - dictWord{143, 11, 163}, - dictWord{ - 4, - 11, - 258, - }, - dictWord{136, 11, 639}, - dictWord{6, 11, 22}, - dictWord{7, 11, 903}, - dictWord{138, 11, 577}, - dictWord{133, 11, 681}, - dictWord{135, 10, 1111}, - dictWord{135, 11, 1286}, - dictWord{9, 0, 112}, - dictWord{8, 10, 1}, - dictWord{138, 10, 326}, - dictWord{5, 10, 488}, - dictWord{6, 10, 527}, - dictWord{7, 10, 489}, - dictWord{ - 7, - 10, - 1636, - }, - dictWord{8, 10, 121}, - dictWord{8, 10, 144}, - dictWord{8, 10, 359}, - dictWord{9, 10, 193}, - dictWord{9, 10, 241}, - dictWord{9, 10, 336}, - dictWord{ - 9, - 10, - 882, - }, - dictWord{11, 10, 266}, - dictWord{11, 10, 372}, - dictWord{11, 10, 944}, - dictWord{12, 10, 401}, - dictWord{140, 10, 641}, - dictWord{4, 11, 664}, - dictWord{133, 11, 804}, - dictWord{6, 0, 747}, - dictWord{134, 0, 1015}, - dictWord{135, 0, 1746}, - dictWord{9, 10, 31}, - dictWord{10, 10, 244}, - dictWord{ - 10, - 10, - 699, - }, - dictWord{12, 10, 149}, - dictWord{141, 10, 497}, - dictWord{133, 10, 377}, - dictWord{135, 0, 24}, - dictWord{6, 0, 1352}, - dictWord{5, 11, 32}, - dictWord{ - 145, - 10, - 101, - }, - dictWord{7, 0, 1530}, - dictWord{10, 0, 158}, - dictWord{13, 0, 13}, - dictWord{13, 0, 137}, - dictWord{13, 0, 258}, - dictWord{14, 0, 111}, - dictWord{ - 14, - 0, - 225, - }, - dictWord{14, 0, 253}, - dictWord{14, 0, 304}, - dictWord{14, 0, 339}, - dictWord{14, 0, 417}, - dictWord{146, 0, 33}, - dictWord{4, 0, 503}, - dictWord{ - 135, - 0, - 1661, - }, - dictWord{5, 0, 130}, - dictWord{6, 0, 845}, - dictWord{7, 0, 1314}, - dictWord{9, 0, 610}, - dictWord{10, 0, 718}, - dictWord{11, 0, 601}, - dictWord{11, 0, 819}, - dictWord{11, 0, 946}, - dictWord{140, 0, 536}, - dictWord{10, 0, 149}, - dictWord{11, 0, 280}, - dictWord{142, 0, 336}, - dictWord{134, 0, 1401}, - dictWord{ - 135, - 0, - 1946, - }, - dictWord{8, 0, 663}, - dictWord{144, 0, 8}, - dictWord{134, 0, 1607}, - dictWord{135, 10, 2023}, - dictWord{4, 11, 289}, - dictWord{7, 11, 629}, - dictWord{ - 7, - 11, - 1698, - }, - dictWord{7, 11, 1711}, - dictWord{140, 11, 215}, - dictWord{6, 11, 450}, - dictWord{136, 11, 109}, - dictWord{10, 0, 882}, - dictWord{10, 0, 883}, - dictWord{10, 0, 914}, - dictWord{138, 0, 928}, - dictWord{133, 10, 843}, - dictWord{136, 11, 705}, - dictWord{132, 10, 554}, - dictWord{133, 10, 536}, - dictWord{ - 5, - 0, - 417, - }, - dictWord{9, 10, 79}, - dictWord{11, 10, 625}, - dictWord{145, 10, 7}, - dictWord{7, 11, 1238}, - dictWord{142, 11, 37}, - dictWord{4, 0, 392}, - dictWord{ - 135, - 0, - 1597, - }, - dictWord{5, 0, 433}, - dictWord{9, 0, 633}, - dictWord{11, 0, 629}, - dictWord{132, 10, 424}, - dictWord{7, 10, 336}, - dictWord{136, 10, 785}, - dictWord{ - 134, - 11, - 355, - }, - dictWord{6, 0, 234}, - dictWord{7, 0, 769}, - dictWord{9, 0, 18}, - dictWord{138, 0, 358}, - dictWord{4, 10, 896}, - dictWord{134, 10, 1777}, - dictWord{ - 138, - 11, - 323, - }, - dictWord{7, 0, 140}, - dictWord{7, 0, 1950}, - dictWord{8, 0, 680}, - dictWord{11, 0, 817}, - dictWord{147, 0, 88}, - dictWord{7, 0, 1222}, - dictWord{ - 138, - 0, - 386, - }, - dictWord{139, 11, 908}, - dictWord{11, 0, 249}, - dictWord{12, 0, 313}, - dictWord{16, 0, 66}, - dictWord{145, 0, 26}, - dictWord{134, 0, 5}, - dictWord{7, 10, 750}, - dictWord{9, 10, 223}, - dictWord{11, 10, 27}, - dictWord{11, 10, 466}, - dictWord{12, 10, 624}, - dictWord{14, 10, 265}, - dictWord{146, 10, 61}, - dictWord{ - 134, - 11, - 26, - }, - dictWord{134, 0, 1216}, - dictWord{5, 0, 963}, - dictWord{134, 0, 1773}, - dictWord{4, 11, 414}, - dictWord{5, 11, 467}, - dictWord{9, 11, 654}, - dictWord{ - 10, - 11, - 451, - }, - dictWord{12, 11, 59}, - dictWord{141, 11, 375}, - dictWord{135, 11, 17}, - dictWord{4, 10, 603}, - dictWord{133, 10, 661}, - dictWord{4, 10, 11}, - dictWord{ - 6, - 10, - 128, - }, - dictWord{7, 10, 231}, - dictWord{7, 10, 1533}, - dictWord{138, 10, 725}, - dictWord{135, 11, 955}, - dictWord{7, 0, 180}, - dictWord{8, 0, 509}, - dictWord{ - 136, - 0, - 792, - }, - dictWord{132, 10, 476}, - dictWord{132, 0, 1002}, - dictWord{133, 11, 538}, - dictWord{135, 10, 1807}, - dictWord{132, 0, 931}, - dictWord{7, 0, 943}, - dictWord{11, 0, 614}, - dictWord{140, 0, 747}, - dictWord{135, 0, 1837}, - dictWord{9, 10, 20}, - dictWord{10, 10, 324}, - dictWord{10, 10, 807}, - dictWord{ - 139, - 10, - 488, - }, - dictWord{134, 0, 641}, - dictWord{6, 11, 280}, - dictWord{10, 11, 502}, - dictWord{11, 11, 344}, - dictWord{140, 11, 38}, - dictWord{5, 11, 45}, - dictWord{ - 7, - 11, - 1161, - }, - dictWord{11, 11, 448}, - dictWord{11, 11, 880}, - dictWord{13, 11, 139}, - dictWord{13, 11, 407}, - dictWord{15, 11, 16}, - dictWord{17, 11, 95}, - dictWord{ - 18, - 11, - 66, - }, - dictWord{18, 11, 88}, - dictWord{18, 11, 123}, - dictWord{149, 11, 7}, - dictWord{9, 0, 280}, - dictWord{138, 0, 134}, - dictWord{22, 0, 22}, - dictWord{23, 0, 5}, - dictWord{151, 0, 29}, - dictWord{136, 11, 777}, - dictWord{4, 0, 90}, - dictWord{5, 0, 545}, - dictWord{7, 0, 754}, - dictWord{9, 0, 186}, - dictWord{10, 0, 72}, - dictWord{ - 10, - 0, - 782, - }, - dictWord{11, 0, 577}, - dictWord{11, 0, 610}, - dictWord{11, 0, 960}, - dictWord{12, 0, 354}, - dictWord{12, 0, 362}, - dictWord{12, 0, 595}, - dictWord{ - 4, - 11, - 410, - }, - dictWord{135, 11, 521}, - dictWord{135, 11, 1778}, - dictWord{5, 10, 112}, - dictWord{6, 10, 103}, - dictWord{134, 10, 150}, - dictWord{138, 10, 356}, - dictWord{132, 0, 742}, - dictWord{7, 0, 151}, - dictWord{9, 0, 329}, - dictWord{139, 0, 254}, - dictWord{8, 0, 853}, - dictWord{8, 0, 881}, - dictWord{8, 0, 911}, - dictWord{ - 8, - 0, - 912, - }, - dictWord{10, 0, 872}, - dictWord{12, 0, 741}, - dictWord{12, 0, 742}, - dictWord{152, 0, 18}, - dictWord{4, 11, 573}, - dictWord{136, 11, 655}, - dictWord{ - 6, - 0, - 921, - }, - dictWord{134, 0, 934}, - dictWord{9, 0, 187}, - dictWord{10, 0, 36}, - dictWord{11, 0, 1016}, - dictWord{17, 0, 44}, - dictWord{146, 0, 64}, - dictWord{7, 0, 833}, - dictWord{136, 0, 517}, - dictWord{4, 0, 506}, - dictWord{5, 0, 295}, - dictWord{135, 0, 1680}, - dictWord{4, 10, 708}, - dictWord{8, 10, 15}, - dictWord{9, 10, 50}, - dictWord{ - 9, - 10, - 386, - }, - dictWord{11, 10, 18}, - dictWord{11, 10, 529}, - dictWord{140, 10, 228}, - dictWord{7, 0, 251}, - dictWord{7, 0, 1701}, - dictWord{8, 0, 436}, - dictWord{ - 4, - 10, - 563, - }, - dictWord{7, 10, 592}, - dictWord{7, 10, 637}, - dictWord{7, 10, 770}, - dictWord{8, 10, 463}, - dictWord{9, 10, 60}, - dictWord{9, 10, 335}, - dictWord{9, 10, 904}, - dictWord{10, 10, 73}, - dictWord{11, 10, 434}, - dictWord{12, 10, 585}, - dictWord{13, 10, 331}, - dictWord{18, 10, 110}, - dictWord{148, 10, 60}, - dictWord{ - 132, - 10, - 502, - }, - dictWord{136, 0, 584}, - dictWord{6, 10, 347}, - dictWord{138, 10, 161}, - dictWord{7, 0, 987}, - dictWord{9, 0, 688}, - dictWord{10, 0, 522}, - dictWord{ - 11, - 0, - 788, - }, - dictWord{12, 0, 137}, - dictWord{12, 0, 566}, - dictWord{14, 0, 9}, - dictWord{14, 0, 24}, - dictWord{14, 0, 64}, - dictWord{7, 11, 899}, - dictWord{142, 11, 325}, - dictWord{4, 0, 214}, - dictWord{5, 0, 500}, - dictWord{5, 10, 102}, - dictWord{6, 10, 284}, - dictWord{7, 10, 1079}, - dictWord{7, 10, 1423}, - dictWord{7, 10, 1702}, - dictWord{ - 8, - 10, - 470, - }, - dictWord{9, 10, 554}, - dictWord{9, 10, 723}, - dictWord{139, 10, 333}, - dictWord{7, 10, 246}, - dictWord{135, 10, 840}, - dictWord{6, 10, 10}, - dictWord{ - 8, - 10, - 571, - }, - dictWord{9, 10, 739}, - dictWord{143, 10, 91}, - dictWord{133, 10, 626}, - dictWord{146, 0, 195}, - dictWord{134, 0, 1775}, - dictWord{7, 0, 389}, - dictWord{7, 0, 700}, - dictWord{7, 0, 940}, - dictWord{8, 0, 514}, - dictWord{9, 0, 116}, - dictWord{9, 0, 535}, - dictWord{10, 0, 118}, - dictWord{11, 0, 107}, - dictWord{ - 11, - 0, - 148, - }, - dictWord{11, 0, 922}, - dictWord{12, 0, 254}, - dictWord{12, 0, 421}, - dictWord{142, 0, 238}, - dictWord{5, 10, 18}, - dictWord{6, 10, 526}, - dictWord{13, 10, 24}, - dictWord{13, 10, 110}, - dictWord{19, 10, 5}, - dictWord{147, 10, 44}, - dictWord{132, 0, 743}, - dictWord{11, 0, 292}, - dictWord{4, 10, 309}, - dictWord{5, 10, 462}, - dictWord{7, 10, 970}, - dictWord{135, 10, 1097}, - dictWord{22, 10, 30}, - dictWord{150, 10, 33}, - dictWord{139, 11, 338}, - dictWord{135, 11, 1598}, - dictWord{ - 7, - 0, - 1283, - }, - dictWord{9, 0, 227}, - dictWord{11, 0, 325}, - dictWord{11, 0, 408}, - dictWord{14, 0, 180}, - dictWord{146, 0, 47}, - dictWord{4, 0, 953}, - dictWord{6, 0, 1805}, - dictWord{6, 0, 1814}, - dictWord{6, 0, 1862}, - dictWord{140, 0, 774}, - dictWord{6, 11, 611}, - dictWord{135, 11, 1733}, - dictWord{135, 11, 1464}, - dictWord{ - 5, - 0, - 81, - }, - dictWord{7, 0, 146}, - dictWord{7, 0, 1342}, - dictWord{8, 0, 53}, - dictWord{8, 0, 561}, - dictWord{8, 0, 694}, - dictWord{8, 0, 754}, - dictWord{9, 0, 115}, - dictWord{ - 9, - 0, - 179, - }, - dictWord{9, 0, 894}, - dictWord{10, 0, 462}, - dictWord{10, 0, 813}, - dictWord{11, 0, 230}, - dictWord{11, 0, 657}, - dictWord{11, 0, 699}, - dictWord{11, 0, 748}, - dictWord{12, 0, 119}, - dictWord{12, 0, 200}, - dictWord{12, 0, 283}, - dictWord{142, 0, 273}, - dictWord{5, 0, 408}, - dictWord{6, 0, 789}, - dictWord{6, 0, 877}, - dictWord{ - 6, - 0, - 1253, - }, - dictWord{6, 0, 1413}, - dictWord{137, 0, 747}, - dictWord{134, 10, 1704}, - dictWord{135, 11, 663}, - dictWord{6, 0, 1910}, - dictWord{6, 0, 1915}, - dictWord{6, 0, 1923}, - dictWord{9, 0, 913}, - dictWord{9, 0, 928}, - dictWord{9, 0, 950}, - dictWord{9, 0, 954}, - dictWord{9, 0, 978}, - dictWord{9, 0, 993}, - dictWord{12, 0, 812}, - dictWord{12, 0, 819}, - dictWord{12, 0, 831}, - dictWord{12, 0, 833}, - dictWord{12, 0, 838}, - dictWord{12, 0, 909}, - dictWord{12, 0, 928}, - dictWord{12, 0, 931}, - dictWord{12, 0, 950}, - dictWord{15, 0, 186}, - dictWord{15, 0, 187}, - dictWord{15, 0, 195}, - dictWord{15, 0, 196}, - dictWord{15, 0, 209}, - dictWord{15, 0, 215}, - dictWord{ - 15, - 0, - 236, - }, - dictWord{15, 0, 241}, - dictWord{15, 0, 249}, - dictWord{15, 0, 253}, - dictWord{18, 0, 180}, - dictWord{18, 0, 221}, - dictWord{18, 0, 224}, - dictWord{ - 18, - 0, - 227, - }, - dictWord{18, 0, 229}, - dictWord{149, 0, 60}, - dictWord{7, 0, 1826}, - dictWord{135, 0, 1938}, - dictWord{11, 0, 490}, - dictWord{18, 0, 143}, - dictWord{ - 5, - 10, - 86, - }, - dictWord{7, 10, 743}, - dictWord{9, 10, 85}, - dictWord{10, 10, 281}, - dictWord{10, 10, 432}, - dictWord{12, 10, 251}, - dictWord{13, 10, 118}, - dictWord{ - 142, - 10, - 378, - }, - dictWord{5, 10, 524}, - dictWord{133, 10, 744}, - dictWord{141, 11, 442}, - dictWord{10, 10, 107}, - dictWord{140, 10, 436}, - dictWord{135, 11, 503}, - dictWord{134, 0, 1162}, - dictWord{132, 10, 927}, - dictWord{7, 0, 30}, - dictWord{8, 0, 86}, - dictWord{8, 0, 315}, - dictWord{8, 0, 700}, - dictWord{9, 0, 576}, - dictWord{ - 9, - 0, - 858, - }, - dictWord{10, 0, 414}, - dictWord{11, 0, 310}, - dictWord{11, 0, 888}, - dictWord{11, 0, 904}, - dictWord{12, 0, 361}, - dictWord{13, 0, 248}, - dictWord{13, 0, 371}, - dictWord{14, 0, 142}, - dictWord{12, 10, 670}, - dictWord{146, 10, 94}, - dictWord{134, 0, 721}, - dictWord{4, 11, 113}, - dictWord{5, 11, 163}, - dictWord{5, 11, 735}, - dictWord{7, 11, 1009}, - dictWord{7, 10, 1149}, - dictWord{9, 11, 9}, - dictWord{9, 10, 156}, - dictWord{9, 11, 771}, - dictWord{12, 11, 90}, - dictWord{13, 11, 138}, - dictWord{13, 11, 410}, - dictWord{143, 11, 128}, - dictWord{138, 0, 839}, - dictWord{133, 10, 778}, - dictWord{137, 0, 617}, - dictWord{133, 10, 502}, - dictWord{ - 8, - 10, - 196, - }, - dictWord{10, 10, 283}, - dictWord{139, 10, 406}, - dictWord{6, 0, 428}, - dictWord{7, 0, 524}, - dictWord{8, 0, 169}, - dictWord{8, 0, 234}, - dictWord{9, 0, 480}, - dictWord{138, 0, 646}, - dictWord{133, 10, 855}, - dictWord{134, 0, 1648}, - dictWord{7, 0, 1205}, - dictWord{138, 0, 637}, - dictWord{7, 0, 1596}, - dictWord{ - 4, - 11, - 935, - }, - dictWord{133, 11, 823}, - dictWord{5, 11, 269}, - dictWord{7, 11, 434}, - dictWord{7, 11, 891}, - dictWord{8, 11, 339}, - dictWord{9, 11, 702}, - dictWord{ - 11, - 11, - 594, - }, - dictWord{11, 11, 718}, - dictWord{145, 11, 100}, - dictWord{7, 11, 878}, - dictWord{9, 11, 485}, - dictWord{141, 11, 264}, - dictWord{4, 0, 266}, - dictWord{ - 8, - 0, - 4, - }, - dictWord{9, 0, 39}, - dictWord{10, 0, 166}, - dictWord{11, 0, 918}, - dictWord{12, 0, 635}, - dictWord{20, 0, 10}, - dictWord{22, 0, 27}, - dictWord{22, 0, 43}, - dictWord{ - 22, - 0, - 52, - }, - dictWord{134, 11, 1713}, - dictWord{7, 10, 1400}, - dictWord{9, 10, 446}, - dictWord{138, 10, 45}, - dictWord{135, 11, 900}, - dictWord{132, 0, 862}, - dictWord{134, 0, 1554}, - dictWord{135, 11, 1033}, - dictWord{19, 0, 16}, - dictWord{147, 11, 16}, - dictWord{135, 11, 1208}, - dictWord{7, 0, 157}, - dictWord{ - 136, - 0, - 279, - }, - dictWord{6, 0, 604}, - dictWord{136, 0, 391}, - dictWord{13, 10, 455}, - dictWord{15, 10, 99}, - dictWord{15, 10, 129}, - dictWord{144, 10, 68}, - dictWord{ - 135, - 10, - 172, - }, - dictWord{7, 0, 945}, - dictWord{11, 0, 713}, - dictWord{139, 0, 744}, - dictWord{4, 0, 973}, - dictWord{10, 0, 877}, - dictWord{10, 0, 937}, - dictWord{ - 10, - 0, - 938, - }, - dictWord{140, 0, 711}, - dictWord{139, 0, 1022}, - dictWord{132, 10, 568}, - dictWord{142, 11, 143}, - dictWord{4, 0, 567}, - dictWord{9, 0, 859}, - dictWord{ - 132, - 10, - 732, - }, - dictWord{7, 0, 1846}, - dictWord{136, 0, 628}, - dictWord{136, 10, 733}, - dictWord{133, 0, 762}, - dictWord{4, 10, 428}, - dictWord{135, 10, 1789}, - dictWord{10, 0, 784}, - dictWord{13, 0, 191}, - dictWord{7, 10, 2015}, - dictWord{140, 10, 665}, - dictWord{133, 0, 298}, - dictWord{7, 0, 633}, - dictWord{7, 0, 905}, - dictWord{7, 0, 909}, - dictWord{7, 0, 1538}, - dictWord{9, 0, 767}, - dictWord{140, 0, 636}, - dictWord{138, 10, 806}, - dictWord{132, 0, 795}, - dictWord{139, 0, 301}, - dictWord{135, 0, 1970}, - dictWord{5, 11, 625}, - dictWord{135, 11, 1617}, - dictWord{135, 11, 275}, - dictWord{7, 11, 37}, - dictWord{8, 11, 425}, - dictWord{ - 8, - 11, - 693, - }, - dictWord{9, 11, 720}, - dictWord{10, 11, 380}, - dictWord{10, 11, 638}, - dictWord{11, 11, 273}, - dictWord{11, 11, 307}, - dictWord{11, 11, 473}, - dictWord{ - 12, - 11, - 61, - }, - dictWord{143, 11, 43}, - dictWord{135, 11, 198}, - dictWord{134, 0, 1236}, - dictWord{7, 0, 369}, - dictWord{12, 0, 644}, - dictWord{12, 0, 645}, - dictWord{144, 0, 90}, - dictWord{19, 0, 15}, - dictWord{149, 0, 27}, - dictWord{6, 0, 71}, - dictWord{7, 0, 845}, - dictWord{8, 0, 160}, - dictWord{9, 0, 318}, - dictWord{6, 10, 1623}, - dictWord{134, 10, 1681}, - dictWord{134, 0, 1447}, - dictWord{134, 0, 1255}, - dictWord{138, 0, 735}, - dictWord{8, 0, 76}, - dictWord{132, 11, 168}, - dictWord{ - 6, - 10, - 1748, - }, - dictWord{8, 10, 715}, - dictWord{9, 10, 802}, - dictWord{10, 10, 46}, - dictWord{10, 10, 819}, - dictWord{13, 10, 308}, - dictWord{14, 10, 351}, - dictWord{14, 10, 363}, - dictWord{146, 10, 67}, - dictWord{135, 11, 91}, - dictWord{6, 0, 474}, - dictWord{4, 10, 63}, - dictWord{133, 10, 347}, - dictWord{133, 10, 749}, - dictWord{138, 0, 841}, - dictWord{133, 10, 366}, - dictWord{6, 0, 836}, - dictWord{132, 11, 225}, - dictWord{135, 0, 1622}, - dictWord{135, 10, 89}, - dictWord{ - 140, - 0, - 735, - }, - dictWord{134, 0, 1601}, - dictWord{138, 11, 145}, - dictWord{6, 0, 1390}, - dictWord{137, 0, 804}, - dictWord{142, 0, 394}, - dictWord{6, 11, 15}, - dictWord{ - 7, - 11, - 70, - }, - dictWord{10, 11, 240}, - dictWord{147, 11, 93}, - dictWord{6, 0, 96}, - dictWord{135, 0, 1426}, - dictWord{4, 0, 651}, - dictWord{133, 0, 289}, - dictWord{ - 7, - 11, - 956, - }, - dictWord{7, 10, 977}, - dictWord{7, 11, 1157}, - dictWord{7, 11, 1506}, - dictWord{7, 11, 1606}, - dictWord{7, 11, 1615}, - dictWord{7, 11, 1619}, - dictWord{ - 7, - 11, - 1736, - }, - dictWord{7, 11, 1775}, - dictWord{8, 11, 590}, - dictWord{9, 11, 324}, - dictWord{9, 11, 736}, - dictWord{9, 11, 774}, - dictWord{9, 11, 776}, - dictWord{ - 9, - 11, - 784, - }, - dictWord{10, 11, 567}, - dictWord{10, 11, 708}, - dictWord{11, 11, 518}, - dictWord{11, 11, 613}, - dictWord{11, 11, 695}, - dictWord{11, 11, 716}, - dictWord{11, 11, 739}, - dictWord{11, 11, 770}, - dictWord{11, 11, 771}, - dictWord{11, 11, 848}, - dictWord{11, 11, 857}, - dictWord{11, 11, 931}, - dictWord{ - 11, - 11, - 947, - }, - dictWord{12, 11, 326}, - dictWord{12, 11, 387}, - dictWord{12, 11, 484}, - dictWord{12, 11, 528}, - dictWord{12, 11, 552}, - dictWord{12, 11, 613}, - dictWord{ - 13, - 11, - 189, - }, - dictWord{13, 11, 256}, - dictWord{13, 11, 340}, - dictWord{13, 11, 432}, - dictWord{13, 11, 436}, - dictWord{13, 11, 440}, - dictWord{13, 11, 454}, - dictWord{14, 11, 174}, - dictWord{14, 11, 220}, - dictWord{14, 11, 284}, - dictWord{14, 11, 390}, - dictWord{145, 11, 121}, - dictWord{7, 0, 688}, - dictWord{8, 0, 35}, - dictWord{9, 0, 511}, - dictWord{10, 0, 767}, - dictWord{147, 0, 118}, - dictWord{134, 0, 667}, - dictWord{4, 0, 513}, - dictWord{5, 10, 824}, - dictWord{133, 10, 941}, - dictWord{7, 10, 440}, - dictWord{8, 10, 230}, - dictWord{139, 10, 106}, - dictWord{134, 0, 2034}, - dictWord{135, 11, 1399}, - dictWord{143, 11, 66}, - dictWord{ - 135, - 11, - 1529, - }, - dictWord{4, 11, 145}, - dictWord{6, 11, 176}, - dictWord{7, 11, 395}, - dictWord{9, 11, 562}, - dictWord{144, 11, 28}, - dictWord{132, 11, 501}, - dictWord{132, 0, 704}, - dictWord{134, 0, 1524}, - dictWord{7, 0, 1078}, - dictWord{134, 11, 464}, - dictWord{6, 11, 509}, - dictWord{10, 11, 82}, - dictWord{20, 11, 91}, - dictWord{151, 11, 13}, - dictWord{4, 0, 720}, - dictWord{133, 0, 306}, - dictWord{133, 0, 431}, - dictWord{7, 0, 1196}, - dictWord{4, 10, 914}, - dictWord{5, 10, 800}, - dictWord{133, 10, 852}, - dictWord{135, 11, 1189}, - dictWord{10, 0, 54}, - dictWord{141, 10, 115}, - dictWord{7, 10, 564}, - dictWord{142, 10, 168}, - dictWord{ - 5, - 0, - 464, - }, - dictWord{6, 0, 236}, - dictWord{7, 0, 696}, - dictWord{7, 0, 914}, - dictWord{7, 0, 1108}, - dictWord{7, 0, 1448}, - dictWord{9, 0, 15}, - dictWord{9, 0, 564}, - dictWord{ - 10, - 0, - 14, - }, - dictWord{12, 0, 565}, - dictWord{13, 0, 449}, - dictWord{14, 0, 53}, - dictWord{15, 0, 13}, - dictWord{16, 0, 64}, - dictWord{17, 0, 41}, - dictWord{4, 10, 918}, - dictWord{133, 10, 876}, - dictWord{6, 0, 1418}, - dictWord{134, 10, 1764}, - dictWord{4, 10, 92}, - dictWord{133, 10, 274}, - dictWord{134, 0, 907}, - dictWord{ - 4, - 11, - 114, - }, - dictWord{8, 10, 501}, - dictWord{9, 11, 492}, - dictWord{13, 11, 462}, - dictWord{142, 11, 215}, - dictWord{4, 11, 77}, - dictWord{5, 11, 361}, - dictWord{ - 6, - 11, - 139, - }, - dictWord{6, 11, 401}, - dictWord{6, 11, 404}, - dictWord{7, 11, 413}, - dictWord{7, 11, 715}, - dictWord{7, 11, 1716}, - dictWord{11, 11, 279}, - dictWord{ - 12, - 11, - 179, - }, - dictWord{12, 11, 258}, - dictWord{13, 11, 244}, - dictWord{142, 11, 358}, - dictWord{6, 0, 1767}, - dictWord{12, 0, 194}, - dictWord{145, 0, 107}, - dictWord{ - 134, - 11, - 1717, - }, - dictWord{5, 10, 743}, - dictWord{142, 11, 329}, - dictWord{4, 10, 49}, - dictWord{7, 10, 280}, - dictWord{135, 10, 1633}, - dictWord{5, 0, 840}, - dictWord{7, 11, 1061}, - dictWord{8, 11, 82}, - dictWord{11, 11, 250}, - dictWord{12, 11, 420}, - dictWord{141, 11, 184}, - dictWord{135, 11, 724}, - dictWord{ - 134, - 0, - 900, - }, - dictWord{136, 10, 47}, - dictWord{134, 0, 1436}, - dictWord{144, 11, 0}, - dictWord{6, 0, 675}, - dictWord{7, 0, 1008}, - dictWord{7, 0, 1560}, - dictWord{ - 9, - 0, - 642, - }, - dictWord{11, 0, 236}, - dictWord{14, 0, 193}, - dictWord{5, 10, 272}, - dictWord{5, 10, 908}, - dictWord{5, 10, 942}, - dictWord{8, 10, 197}, - dictWord{9, 10, 47}, - dictWord{11, 10, 538}, - dictWord{139, 10, 742}, - dictWord{4, 0, 68}, - dictWord{5, 0, 628}, - dictWord{5, 0, 634}, - dictWord{6, 0, 386}, - dictWord{7, 0, 794}, - dictWord{ - 8, - 0, - 273, - }, - dictWord{9, 0, 563}, - dictWord{10, 0, 105}, - dictWord{10, 0, 171}, - dictWord{11, 0, 94}, - dictWord{139, 0, 354}, - dictWord{135, 10, 1911}, - dictWord{ - 137, - 10, - 891, - }, - dictWord{4, 0, 95}, - dictWord{6, 0, 1297}, - dictWord{6, 0, 1604}, - dictWord{7, 0, 416}, - dictWord{139, 0, 830}, - dictWord{6, 11, 513}, - dictWord{ - 135, - 11, - 1052, - }, - dictWord{7, 0, 731}, - dictWord{13, 0, 20}, - dictWord{143, 0, 11}, - dictWord{137, 11, 899}, - dictWord{10, 0, 850}, - dictWord{140, 0, 697}, - dictWord{ - 4, - 0, - 662, - }, - dictWord{7, 11, 1417}, - dictWord{12, 11, 382}, - dictWord{17, 11, 48}, - dictWord{152, 11, 12}, - dictWord{133, 0, 736}, - dictWord{132, 0, 861}, - dictWord{ - 4, - 10, - 407, - }, - dictWord{132, 10, 560}, - dictWord{141, 10, 490}, - dictWord{6, 11, 545}, - dictWord{7, 11, 565}, - dictWord{7, 11, 1669}, - dictWord{10, 11, 114}, - dictWord{11, 11, 642}, - dictWord{140, 11, 618}, - dictWord{6, 0, 871}, - dictWord{134, 0, 1000}, - dictWord{5, 0, 864}, - dictWord{10, 0, 648}, - dictWord{11, 0, 671}, - dictWord{15, 0, 46}, - dictWord{133, 11, 5}, - dictWord{133, 0, 928}, - dictWord{11, 0, 90}, - dictWord{13, 0, 7}, - dictWord{4, 10, 475}, - dictWord{11, 10, 35}, - dictWord{ - 13, - 10, - 71, - }, - dictWord{13, 10, 177}, - dictWord{142, 10, 422}, - dictWord{136, 0, 332}, - dictWord{135, 11, 192}, - dictWord{134, 0, 1055}, - dictWord{136, 11, 763}, - dictWord{11, 0, 986}, - dictWord{140, 0, 682}, - dictWord{7, 0, 76}, - dictWord{8, 0, 44}, - dictWord{9, 0, 884}, - dictWord{10, 0, 580}, - dictWord{11, 0, 399}, - dictWord{ - 11, - 0, - 894, - }, - dictWord{143, 0, 122}, - dictWord{135, 11, 1237}, - dictWord{135, 10, 636}, - dictWord{11, 0, 300}, - dictWord{6, 10, 222}, - dictWord{7, 10, 1620}, - dictWord{ - 8, - 10, - 409, - }, - dictWord{137, 10, 693}, - dictWord{4, 11, 87}, - dictWord{5, 11, 250}, - dictWord{10, 11, 601}, - dictWord{13, 11, 298}, - dictWord{13, 11, 353}, - dictWord{141, 11, 376}, - dictWord{5, 0, 518}, - dictWord{10, 0, 340}, - dictWord{11, 0, 175}, - dictWord{149, 0, 16}, - dictWord{140, 0, 771}, - dictWord{6, 0, 1108}, - dictWord{137, 0, 831}, - dictWord{132, 0, 836}, - dictWord{135, 0, 1852}, - dictWord{4, 0, 957}, - dictWord{6, 0, 1804}, - dictWord{8, 0, 842}, - dictWord{8, 0, 843}, - dictWord{ - 8, - 0, - 851, - }, - dictWord{8, 0, 855}, - dictWord{140, 0, 767}, - dictWord{135, 11, 814}, - dictWord{4, 11, 57}, - dictWord{7, 11, 1195}, - dictWord{7, 11, 1438}, - dictWord{ - 7, - 11, - 1548, - }, - dictWord{7, 11, 1835}, - dictWord{7, 11, 1904}, - dictWord{9, 11, 757}, - dictWord{10, 11, 604}, - dictWord{139, 11, 519}, - dictWord{133, 10, 882}, - dictWord{138, 0, 246}, - dictWord{4, 0, 934}, - dictWord{5, 0, 202}, - dictWord{8, 0, 610}, - dictWord{7, 11, 1897}, - dictWord{12, 11, 290}, - dictWord{13, 11, 80}, - dictWord{13, 11, 437}, - dictWord{145, 11, 74}, - dictWord{8, 0, 96}, - dictWord{9, 0, 36}, - dictWord{10, 0, 607}, - dictWord{10, 0, 804}, - dictWord{10, 0, 832}, - dictWord{ - 11, - 0, - 423, - }, - dictWord{11, 0, 442}, - dictWord{12, 0, 309}, - dictWord{14, 0, 199}, - dictWord{15, 0, 90}, - dictWord{145, 0, 110}, - dictWord{132, 10, 426}, - dictWord{ - 7, - 0, - 654, - }, - dictWord{8, 0, 240}, - dictWord{6, 10, 58}, - dictWord{7, 10, 745}, - dictWord{7, 10, 1969}, - dictWord{8, 10, 675}, - dictWord{9, 10, 479}, - dictWord{9, 10, 731}, - dictWord{10, 10, 330}, - dictWord{10, 10, 593}, - dictWord{10, 10, 817}, - dictWord{11, 10, 32}, - dictWord{11, 10, 133}, - dictWord{11, 10, 221}, - dictWord{ - 145, - 10, - 68, - }, - dictWord{9, 0, 13}, - dictWord{9, 0, 398}, - dictWord{9, 0, 727}, - dictWord{10, 0, 75}, - dictWord{10, 0, 184}, - dictWord{10, 0, 230}, - dictWord{10, 0, 564}, - dictWord{ - 10, - 0, - 569, - }, - dictWord{11, 0, 973}, - dictWord{12, 0, 70}, - dictWord{12, 0, 189}, - dictWord{13, 0, 57}, - dictWord{141, 0, 257}, - dictWord{4, 11, 209}, - dictWord{ - 135, - 11, - 902, - }, - dictWord{7, 0, 391}, - dictWord{137, 10, 538}, - dictWord{134, 0, 403}, - dictWord{6, 11, 303}, - dictWord{7, 11, 335}, - dictWord{7, 11, 1437}, - dictWord{ - 7, - 11, - 1668, - }, - dictWord{8, 11, 553}, - dictWord{8, 11, 652}, - dictWord{8, 11, 656}, - dictWord{9, 11, 558}, - dictWord{11, 11, 743}, - dictWord{149, 11, 18}, - dictWord{ - 132, - 11, - 559, - }, - dictWord{11, 0, 75}, - dictWord{142, 0, 267}, - dictWord{6, 0, 815}, - dictWord{141, 11, 2}, - dictWord{141, 0, 366}, - dictWord{137, 0, 631}, - dictWord{ - 133, - 11, - 1017, - }, - dictWord{5, 0, 345}, - dictWord{135, 0, 1016}, - dictWord{133, 11, 709}, - dictWord{134, 11, 1745}, - dictWord{133, 10, 566}, - dictWord{7, 0, 952}, - dictWord{6, 10, 48}, - dictWord{9, 10, 139}, - dictWord{10, 10, 399}, - dictWord{11, 10, 469}, - dictWord{12, 10, 634}, - dictWord{141, 10, 223}, - dictWord{ - 133, - 0, - 673, - }, - dictWord{9, 0, 850}, - dictWord{7, 11, 8}, - dictWord{136, 11, 206}, - dictWord{6, 0, 662}, - dictWord{149, 0, 35}, - dictWord{4, 0, 287}, - dictWord{133, 0, 1018}, - dictWord{6, 10, 114}, - dictWord{7, 10, 1224}, - dictWord{7, 10, 1556}, - dictWord{136, 10, 3}, - dictWord{8, 10, 576}, - dictWord{137, 10, 267}, - dictWord{4, 0, 884}, - dictWord{5, 0, 34}, - dictWord{10, 0, 724}, - dictWord{12, 0, 444}, - dictWord{13, 0, 354}, - dictWord{18, 0, 32}, - dictWord{23, 0, 24}, - dictWord{23, 0, 31}, - dictWord{ - 152, - 0, - 5, - }, - dictWord{133, 10, 933}, - dictWord{132, 11, 776}, - dictWord{138, 0, 151}, - dictWord{136, 0, 427}, - dictWord{134, 0, 382}, - dictWord{132, 0, 329}, - dictWord{ - 9, - 0, - 846, - }, - dictWord{10, 0, 827}, - dictWord{138, 11, 33}, - dictWord{9, 0, 279}, - dictWord{10, 0, 407}, - dictWord{14, 0, 84}, - dictWord{22, 0, 18}, - dictWord{ - 135, - 11, - 1297, - }, - dictWord{136, 11, 406}, - dictWord{132, 0, 906}, - dictWord{136, 0, 366}, - dictWord{134, 0, 843}, - dictWord{134, 0, 1443}, - dictWord{135, 0, 1372}, - dictWord{138, 0, 992}, - dictWord{4, 0, 123}, - dictWord{5, 0, 605}, - dictWord{7, 0, 1509}, - dictWord{136, 0, 36}, - dictWord{132, 0, 649}, - dictWord{8, 11, 175}, - dictWord{10, 11, 168}, - dictWord{138, 11, 573}, - dictWord{133, 0, 767}, - dictWord{134, 0, 1018}, - dictWord{135, 11, 1305}, - dictWord{12, 10, 30}, - dictWord{ - 13, - 10, - 148, - }, - dictWord{14, 10, 87}, - dictWord{14, 10, 182}, - dictWord{16, 10, 42}, - dictWord{148, 10, 70}, - dictWord{134, 11, 607}, - dictWord{4, 0, 273}, - dictWord{ - 5, - 0, - 658, - }, - dictWord{133, 0, 995}, - dictWord{6, 0, 72}, - dictWord{139, 11, 174}, - dictWord{10, 0, 483}, - dictWord{12, 0, 368}, - dictWord{7, 10, 56}, - dictWord{ - 7, - 10, - 1989, - }, - dictWord{8, 10, 337}, - dictWord{8, 10, 738}, - dictWord{9, 10, 600}, - dictWord{13, 10, 447}, - dictWord{142, 10, 92}, - dictWord{5, 11, 784}, - dictWord{ - 138, - 10, - 666, - }, - dictWord{135, 0, 1345}, - dictWord{139, 11, 882}, - dictWord{134, 0, 1293}, - dictWord{133, 0, 589}, - dictWord{134, 0, 1988}, - dictWord{5, 0, 117}, - dictWord{6, 0, 514}, - dictWord{6, 0, 541}, - dictWord{7, 0, 1164}, - dictWord{7, 0, 1436}, - dictWord{8, 0, 220}, - dictWord{8, 0, 648}, - dictWord{10, 0, 688}, - dictWord{ - 139, - 0, - 560, - }, - dictWord{136, 0, 379}, - dictWord{5, 0, 686}, - dictWord{7, 10, 866}, - dictWord{135, 10, 1163}, - dictWord{132, 10, 328}, - dictWord{9, 11, 14}, - dictWord{ - 9, - 11, - 441, - }, - dictWord{10, 11, 306}, - dictWord{139, 11, 9}, - dictWord{4, 10, 101}, - dictWord{135, 10, 1171}, - dictWord{5, 10, 833}, - dictWord{136, 10, 744}, - dictWord{5, 11, 161}, - dictWord{7, 11, 839}, - dictWord{135, 11, 887}, - dictWord{7, 0, 196}, - dictWord{10, 0, 765}, - dictWord{11, 0, 347}, - dictWord{11, 0, 552}, - dictWord{11, 0, 790}, - dictWord{12, 0, 263}, - dictWord{13, 0, 246}, - dictWord{13, 0, 270}, - dictWord{13, 0, 395}, - dictWord{14, 0, 176}, - dictWord{14, 0, 190}, - dictWord{ - 14, - 0, - 398, - }, - dictWord{14, 0, 412}, - dictWord{15, 0, 32}, - dictWord{15, 0, 63}, - dictWord{16, 0, 88}, - dictWord{147, 0, 105}, - dictWord{6, 10, 9}, - dictWord{6, 10, 397}, - dictWord{7, 10, 53}, - dictWord{7, 10, 1742}, - dictWord{10, 10, 632}, - dictWord{11, 10, 828}, - dictWord{140, 10, 146}, - dictWord{5, 0, 381}, - dictWord{135, 0, 1792}, - dictWord{134, 0, 1452}, - dictWord{135, 11, 429}, - dictWord{8, 0, 367}, - dictWord{10, 0, 760}, - dictWord{14, 0, 79}, - dictWord{20, 0, 17}, - dictWord{152, 0, 0}, - dictWord{7, 0, 616}, - dictWord{138, 0, 413}, - dictWord{11, 10, 417}, - dictWord{12, 10, 223}, - dictWord{140, 10, 265}, - dictWord{7, 11, 1611}, - dictWord{13, 11, 14}, - dictWord{15, 11, 44}, - dictWord{19, 11, 13}, - dictWord{148, 11, 76}, - dictWord{135, 0, 1229}, - dictWord{6, 0, 120}, - dictWord{7, 0, 1188}, - dictWord{7, 0, 1710}, - dictWord{8, 0, 286}, - dictWord{9, 0, 667}, - dictWord{11, 0, 592}, - dictWord{139, 0, 730}, - dictWord{135, 11, 1814}, - dictWord{135, 0, 1146}, - dictWord{4, 10, 186}, - dictWord{5, 10, 157}, - dictWord{8, 10, 168}, - dictWord{138, 10, 6}, - dictWord{4, 0, 352}, - dictWord{135, 0, 687}, - dictWord{4, 0, 192}, - dictWord{5, 0, 49}, - dictWord{ - 6, - 0, - 200, - }, - dictWord{6, 0, 293}, - dictWord{6, 0, 1696}, - dictWord{135, 0, 1151}, - dictWord{133, 10, 875}, - dictWord{5, 10, 773}, - dictWord{5, 10, 991}, - dictWord{ - 6, - 10, - 1635, - }, - dictWord{134, 10, 1788}, - dictWord{7, 10, 111}, - dictWord{136, 10, 581}, - dictWord{6, 0, 935}, - dictWord{134, 0, 1151}, - dictWord{134, 0, 1050}, - dictWord{132, 0, 650}, - dictWord{132, 0, 147}, - dictWord{11, 0, 194}, - dictWord{12, 0, 62}, - dictWord{12, 0, 88}, - dictWord{11, 11, 194}, - dictWord{12, 11, 62}, - dictWord{140, 11, 88}, - dictWord{6, 0, 339}, - dictWord{135, 0, 923}, - dictWord{134, 10, 1747}, - dictWord{7, 11, 643}, - dictWord{136, 11, 236}, - dictWord{ - 133, - 0, - 934, - }, - dictWord{7, 10, 1364}, - dictWord{7, 10, 1907}, - dictWord{141, 10, 158}, - dictWord{132, 10, 659}, - dictWord{4, 10, 404}, - dictWord{135, 10, 675}, - dictWord{7, 11, 581}, - dictWord{9, 11, 644}, - dictWord{137, 11, 699}, - dictWord{13, 0, 211}, - dictWord{14, 0, 133}, - dictWord{14, 0, 204}, - dictWord{15, 0, 64}, - dictWord{ - 15, - 0, - 69, - }, - dictWord{15, 0, 114}, - dictWord{16, 0, 10}, - dictWord{19, 0, 23}, - dictWord{19, 0, 35}, - dictWord{19, 0, 39}, - dictWord{19, 0, 51}, - dictWord{19, 0, 71}, - dictWord{19, 0, 75}, - dictWord{152, 0, 15}, - dictWord{133, 10, 391}, - dictWord{5, 11, 54}, - dictWord{135, 11, 1513}, - dictWord{7, 0, 222}, - dictWord{8, 0, 341}, - dictWord{ - 5, - 10, - 540, - }, - dictWord{134, 10, 1697}, - dictWord{134, 10, 78}, - dictWord{132, 11, 744}, - dictWord{136, 0, 293}, - dictWord{137, 11, 701}, - dictWord{ - 7, - 11, - 930, - }, - dictWord{10, 11, 402}, - dictWord{10, 11, 476}, - dictWord{13, 11, 452}, - dictWord{18, 11, 55}, - dictWord{147, 11, 104}, - dictWord{132, 0, 637}, - dictWord{133, 10, 460}, - dictWord{8, 11, 50}, - dictWord{137, 11, 624}, - dictWord{132, 11, 572}, - dictWord{134, 0, 1159}, - dictWord{4, 10, 199}, - dictWord{ - 139, - 10, - 34, - }, - dictWord{134, 0, 847}, - dictWord{134, 10, 388}, - dictWord{6, 11, 43}, - dictWord{7, 11, 38}, - dictWord{8, 11, 248}, - dictWord{9, 11, 504}, - dictWord{ - 138, - 11, - 513, - }, - dictWord{9, 0, 683}, - dictWord{4, 10, 511}, - dictWord{6, 10, 608}, - dictWord{9, 10, 333}, - dictWord{10, 10, 602}, - dictWord{11, 10, 441}, - dictWord{ - 11, - 10, - 723, - }, - dictWord{11, 10, 976}, - dictWord{140, 10, 357}, - dictWord{9, 0, 867}, - dictWord{138, 0, 837}, - dictWord{6, 0, 944}, - dictWord{135, 11, 326}, - dictWord{ - 135, - 0, - 1809, - }, - dictWord{5, 10, 938}, - dictWord{7, 11, 783}, - dictWord{136, 10, 707}, - dictWord{133, 11, 766}, - dictWord{133, 11, 363}, - dictWord{6, 0, 170}, - dictWord{7, 0, 1080}, - dictWord{8, 0, 395}, - dictWord{8, 0, 487}, - dictWord{141, 0, 147}, - dictWord{6, 11, 258}, - dictWord{140, 11, 409}, - dictWord{4, 0, 535}, - dictWord{ - 8, - 0, - 618, - }, - dictWord{5, 11, 249}, - dictWord{148, 11, 82}, - dictWord{6, 0, 1379}, - dictWord{149, 11, 15}, - dictWord{135, 0, 1625}, - dictWord{150, 0, 23}, - dictWord{ - 5, - 11, - 393, - }, - dictWord{6, 11, 378}, - dictWord{7, 11, 1981}, - dictWord{9, 11, 32}, - dictWord{9, 11, 591}, - dictWord{10, 11, 685}, - dictWord{10, 11, 741}, - dictWord{ - 142, - 11, - 382, - }, - dictWord{133, 11, 788}, - dictWord{7, 11, 1968}, - dictWord{10, 11, 19}, - dictWord{139, 11, 911}, - dictWord{7, 11, 1401}, - dictWord{ - 135, - 11, - 1476, - }, - dictWord{4, 11, 61}, - dictWord{5, 11, 58}, - dictWord{5, 11, 171}, - dictWord{5, 11, 635}, - dictWord{5, 11, 683}, - dictWord{5, 11, 700}, - dictWord{6, 11, 291}, - dictWord{6, 11, 566}, - dictWord{7, 11, 1650}, - dictWord{11, 11, 523}, - dictWord{12, 11, 273}, - dictWord{12, 11, 303}, - dictWord{15, 11, 39}, - dictWord{ - 143, - 11, - 111, - }, - dictWord{6, 10, 469}, - dictWord{7, 10, 1709}, - dictWord{138, 10, 515}, - dictWord{4, 0, 778}, - dictWord{134, 11, 589}, - dictWord{132, 0, 46}, - dictWord{ - 5, - 0, - 811, - }, - dictWord{6, 0, 1679}, - dictWord{6, 0, 1714}, - dictWord{135, 0, 2032}, - dictWord{7, 0, 1458}, - dictWord{9, 0, 407}, - dictWord{11, 0, 15}, - dictWord{12, 0, 651}, - dictWord{149, 0, 37}, - dictWord{7, 0, 938}, - dictWord{132, 10, 500}, - dictWord{6, 0, 34}, - dictWord{7, 0, 69}, - dictWord{7, 0, 1089}, - dictWord{7, 0, 1281}, - dictWord{ - 8, - 0, - 708, - }, - dictWord{8, 0, 721}, - dictWord{9, 0, 363}, - dictWord{148, 0, 98}, - dictWord{10, 11, 231}, - dictWord{147, 11, 124}, - dictWord{7, 11, 726}, - dictWord{ - 152, - 11, - 9, - }, - dictWord{5, 10, 68}, - dictWord{134, 10, 383}, - dictWord{136, 11, 583}, - dictWord{4, 11, 917}, - dictWord{133, 11, 1005}, - dictWord{11, 10, 216}, - dictWord{139, 10, 340}, - dictWord{135, 11, 1675}, - dictWord{8, 0, 441}, - dictWord{10, 0, 314}, - dictWord{143, 0, 3}, - dictWord{132, 11, 919}, - dictWord{4, 10, 337}, - dictWord{6, 10, 353}, - dictWord{7, 10, 1934}, - dictWord{8, 10, 488}, - dictWord{137, 10, 429}, - dictWord{7, 0, 889}, - dictWord{7, 10, 1795}, - dictWord{8, 10, 259}, - dictWord{9, 10, 135}, - dictWord{9, 10, 177}, - dictWord{9, 10, 860}, - dictWord{10, 10, 825}, - dictWord{11, 10, 115}, - dictWord{11, 10, 370}, - dictWord{11, 10, 405}, - dictWord{11, 10, 604}, - dictWord{12, 10, 10}, - dictWord{12, 10, 667}, - dictWord{12, 10, 669}, - dictWord{13, 10, 76}, - dictWord{14, 10, 310}, - dictWord{ - 15, - 10, - 76, - }, - dictWord{15, 10, 147}, - dictWord{148, 10, 23}, - dictWord{4, 10, 15}, - dictWord{4, 11, 255}, - dictWord{5, 10, 22}, - dictWord{5, 11, 302}, - dictWord{6, 11, 132}, - dictWord{6, 10, 244}, - dictWord{7, 10, 40}, - dictWord{7, 11, 128}, - dictWord{7, 10, 200}, - dictWord{7, 11, 283}, - dictWord{7, 10, 906}, - dictWord{7, 10, 1199}, - dictWord{ - 7, - 11, - 1299, - }, - dictWord{9, 10, 616}, - dictWord{10, 11, 52}, - dictWord{10, 11, 514}, - dictWord{10, 10, 716}, - dictWord{11, 10, 635}, - dictWord{11, 10, 801}, - dictWord{11, 11, 925}, - dictWord{12, 10, 458}, - dictWord{13, 11, 92}, - dictWord{142, 11, 309}, - dictWord{132, 0, 462}, - dictWord{137, 11, 173}, - dictWord{ - 135, - 10, - 1735, - }, - dictWord{8, 0, 525}, - dictWord{5, 10, 598}, - dictWord{7, 10, 791}, - dictWord{8, 10, 108}, - dictWord{137, 10, 123}, - dictWord{5, 0, 73}, - dictWord{6, 0, 23}, - dictWord{134, 0, 338}, - dictWord{132, 0, 676}, - dictWord{132, 10, 683}, - dictWord{7, 0, 725}, - dictWord{8, 0, 498}, - dictWord{139, 0, 268}, - dictWord{12, 0, 21}, - dictWord{151, 0, 7}, - dictWord{135, 0, 773}, - dictWord{4, 10, 155}, - dictWord{135, 10, 1689}, - dictWord{4, 0, 164}, - dictWord{5, 0, 730}, - dictWord{5, 10, 151}, - dictWord{ - 5, - 10, - 741, - }, - dictWord{6, 11, 210}, - dictWord{7, 10, 498}, - dictWord{7, 10, 870}, - dictWord{7, 10, 1542}, - dictWord{12, 10, 213}, - dictWord{14, 10, 36}, - dictWord{ - 14, - 10, - 391, - }, - dictWord{17, 10, 111}, - dictWord{18, 10, 6}, - dictWord{18, 10, 46}, - dictWord{18, 10, 151}, - dictWord{19, 10, 36}, - dictWord{20, 10, 32}, - dictWord{ - 20, - 10, - 56, - }, - dictWord{20, 10, 69}, - dictWord{20, 10, 102}, - dictWord{21, 10, 4}, - dictWord{22, 10, 8}, - dictWord{22, 10, 10}, - dictWord{22, 10, 14}, - dictWord{ - 150, - 10, - 31, - }, - dictWord{4, 10, 624}, - dictWord{135, 10, 1752}, - dictWord{4, 0, 583}, - dictWord{9, 0, 936}, - dictWord{15, 0, 214}, - dictWord{18, 0, 199}, - dictWord{24, 0, 26}, - dictWord{134, 11, 588}, - dictWord{7, 0, 1462}, - dictWord{11, 0, 659}, - dictWord{4, 11, 284}, - dictWord{134, 11, 223}, - dictWord{133, 0, 220}, - dictWord{ - 139, - 0, - 803, - }, - dictWord{132, 0, 544}, - dictWord{4, 10, 492}, - dictWord{133, 10, 451}, - dictWord{16, 0, 98}, - dictWord{148, 0, 119}, - dictWord{4, 11, 218}, - dictWord{ - 7, - 11, - 526, - }, - dictWord{143, 11, 137}, - dictWord{135, 10, 835}, - dictWord{4, 11, 270}, - dictWord{5, 11, 192}, - dictWord{6, 11, 332}, - dictWord{7, 11, 1322}, - dictWord{ - 13, - 11, - 9, - }, - dictWord{13, 10, 70}, - dictWord{14, 11, 104}, - dictWord{142, 11, 311}, - dictWord{132, 10, 539}, - dictWord{140, 11, 661}, - dictWord{5, 0, 176}, - dictWord{ - 6, - 0, - 437, - }, - dictWord{6, 0, 564}, - dictWord{11, 0, 181}, - dictWord{141, 0, 183}, - dictWord{135, 0, 1192}, - dictWord{6, 10, 113}, - dictWord{135, 10, 436}, - dictWord{136, 10, 718}, - dictWord{135, 10, 520}, - dictWord{135, 0, 1878}, - dictWord{140, 11, 196}, - dictWord{7, 11, 379}, - dictWord{8, 11, 481}, - dictWord{ - 137, - 11, - 377, - }, - dictWord{5, 11, 1003}, - dictWord{6, 11, 149}, - dictWord{137, 11, 746}, - dictWord{8, 11, 262}, - dictWord{9, 11, 627}, - dictWord{10, 11, 18}, - dictWord{ - 11, - 11, - 214, - }, - dictWord{11, 11, 404}, - dictWord{11, 11, 457}, - dictWord{11, 11, 780}, - dictWord{11, 11, 849}, - dictWord{11, 11, 913}, - dictWord{13, 11, 330}, - dictWord{13, 11, 401}, - dictWord{142, 11, 200}, - dictWord{149, 0, 26}, - dictWord{136, 11, 304}, - dictWord{132, 11, 142}, - dictWord{135, 0, 944}, - dictWord{ - 4, - 0, - 790, - }, - dictWord{5, 0, 273}, - dictWord{134, 0, 394}, - dictWord{134, 0, 855}, - dictWord{4, 0, 135}, - dictWord{6, 0, 127}, - dictWord{7, 0, 1185}, - dictWord{7, 0, 1511}, - dictWord{8, 0, 613}, - dictWord{11, 0, 5}, - dictWord{12, 0, 336}, - dictWord{12, 0, 495}, - dictWord{12, 0, 586}, - dictWord{12, 0, 660}, - dictWord{12, 0, 668}, - dictWord{ - 14, - 0, - 385, - }, - dictWord{15, 0, 118}, - dictWord{17, 0, 20}, - dictWord{146, 0, 98}, - dictWord{6, 0, 230}, - dictWord{9, 0, 752}, - dictWord{18, 0, 109}, - dictWord{12, 10, 610}, - dictWord{13, 10, 431}, - dictWord{144, 10, 59}, - dictWord{7, 0, 1954}, - dictWord{135, 11, 925}, - dictWord{4, 11, 471}, - dictWord{5, 11, 51}, - dictWord{6, 11, 602}, - dictWord{8, 11, 484}, - dictWord{10, 11, 195}, - dictWord{140, 11, 159}, - dictWord{132, 10, 307}, - dictWord{136, 11, 688}, - dictWord{132, 11, 697}, - dictWord{ - 7, - 11, - 812, - }, - dictWord{7, 11, 1261}, - dictWord{7, 11, 1360}, - dictWord{9, 11, 632}, - dictWord{140, 11, 352}, - dictWord{5, 0, 162}, - dictWord{8, 0, 68}, - dictWord{ - 133, - 10, - 964, - }, - dictWord{4, 0, 654}, - dictWord{136, 11, 212}, - dictWord{4, 0, 156}, - dictWord{7, 0, 998}, - dictWord{7, 0, 1045}, - dictWord{7, 0, 1860}, - dictWord{9, 0, 48}, - dictWord{9, 0, 692}, - dictWord{11, 0, 419}, - dictWord{139, 0, 602}, - dictWord{133, 11, 221}, - dictWord{4, 11, 373}, - dictWord{5, 11, 283}, - dictWord{6, 11, 480}, - dictWord{135, 11, 609}, - dictWord{142, 11, 216}, - dictWord{132, 0, 240}, - dictWord{6, 11, 192}, - dictWord{9, 11, 793}, - dictWord{145, 11, 55}, - dictWord{ - 4, - 10, - 75, - }, - dictWord{5, 10, 180}, - dictWord{6, 10, 500}, - dictWord{7, 10, 58}, - dictWord{7, 10, 710}, - dictWord{138, 10, 645}, - dictWord{4, 11, 132}, - dictWord{5, 11, 69}, - dictWord{5, 10, 649}, - dictWord{135, 11, 1242}, - dictWord{6, 10, 276}, - dictWord{7, 10, 282}, - dictWord{7, 10, 879}, - dictWord{7, 10, 924}, - dictWord{8, 10, 459}, - dictWord{9, 10, 599}, - dictWord{9, 10, 754}, - dictWord{11, 10, 574}, - dictWord{12, 10, 128}, - dictWord{12, 10, 494}, - dictWord{13, 10, 52}, - dictWord{13, 10, 301}, - dictWord{15, 10, 30}, - dictWord{143, 10, 132}, - dictWord{132, 10, 200}, - dictWord{4, 11, 111}, - dictWord{135, 11, 302}, - dictWord{9, 0, 197}, - dictWord{ - 10, - 0, - 300, - }, - dictWord{12, 0, 473}, - dictWord{13, 0, 90}, - dictWord{141, 0, 405}, - dictWord{132, 11, 767}, - dictWord{6, 11, 42}, - dictWord{7, 11, 1416}, - dictWord{ - 7, - 11, - 1590, - }, - dictWord{7, 11, 2005}, - dictWord{8, 11, 131}, - dictWord{8, 11, 466}, - dictWord{9, 11, 672}, - dictWord{13, 11, 252}, - dictWord{148, 11, 103}, - dictWord{ - 8, - 0, - 958, - }, - dictWord{8, 0, 999}, - dictWord{10, 0, 963}, - dictWord{138, 0, 1001}, - dictWord{135, 10, 1621}, - dictWord{135, 0, 858}, - dictWord{4, 0, 606}, - dictWord{ - 137, - 11, - 444, - }, - dictWord{6, 11, 44}, - dictWord{136, 11, 368}, - dictWord{139, 11, 172}, - dictWord{4, 11, 570}, - dictWord{133, 11, 120}, - dictWord{139, 11, 624}, - dictWord{7, 0, 1978}, - dictWord{8, 0, 676}, - dictWord{6, 10, 225}, - dictWord{137, 10, 211}, - dictWord{7, 0, 972}, - dictWord{11, 0, 102}, - dictWord{136, 10, 687}, - dictWord{6, 11, 227}, - dictWord{135, 11, 1589}, - dictWord{8, 10, 58}, - dictWord{9, 10, 724}, - dictWord{11, 10, 809}, - dictWord{13, 10, 113}, - dictWord{ - 145, - 10, - 72, - }, - dictWord{4, 0, 361}, - dictWord{133, 0, 315}, - dictWord{132, 0, 461}, - dictWord{6, 10, 345}, - dictWord{135, 10, 1247}, - dictWord{132, 0, 472}, - dictWord{ - 8, - 10, - 767, - }, - dictWord{8, 10, 803}, - dictWord{9, 10, 301}, - dictWord{137, 10, 903}, - dictWord{135, 11, 1333}, - dictWord{135, 11, 477}, - dictWord{7, 10, 1949}, - dictWord{136, 10, 674}, - dictWord{6, 0, 905}, - dictWord{138, 0, 747}, - dictWord{133, 0, 155}, - dictWord{134, 10, 259}, - dictWord{7, 0, 163}, - dictWord{8, 0, 319}, - dictWord{9, 0, 402}, - dictWord{10, 0, 24}, - dictWord{10, 0, 681}, - dictWord{11, 0, 200}, - dictWord{12, 0, 253}, - dictWord{12, 0, 410}, - dictWord{142, 0, 219}, - dictWord{ - 5, - 0, - 475, - }, - dictWord{7, 0, 1780}, - dictWord{9, 0, 230}, - dictWord{11, 0, 297}, - dictWord{11, 0, 558}, - dictWord{14, 0, 322}, - dictWord{19, 0, 76}, - dictWord{6, 11, 1667}, - dictWord{7, 11, 2036}, - dictWord{138, 11, 600}, - dictWord{136, 10, 254}, - dictWord{6, 0, 848}, - dictWord{135, 0, 1956}, - dictWord{6, 11, 511}, - dictWord{ - 140, - 11, - 132, - }, - dictWord{5, 11, 568}, - dictWord{6, 11, 138}, - dictWord{135, 11, 1293}, - dictWord{6, 0, 631}, - dictWord{137, 0, 838}, - dictWord{149, 0, 36}, - dictWord{ - 4, - 11, - 565, - }, - dictWord{8, 11, 23}, - dictWord{136, 11, 827}, - dictWord{5, 0, 944}, - dictWord{134, 0, 1769}, - dictWord{4, 0, 144}, - dictWord{6, 0, 842}, - dictWord{ - 6, - 0, - 1400, - }, - dictWord{4, 11, 922}, - dictWord{133, 11, 1023}, - dictWord{133, 10, 248}, - dictWord{9, 10, 800}, - dictWord{10, 10, 693}, - dictWord{11, 10, 482}, - dictWord{11, 10, 734}, - dictWord{139, 10, 789}, - dictWord{7, 11, 1002}, - dictWord{139, 11, 145}, - dictWord{4, 10, 116}, - dictWord{5, 10, 95}, - dictWord{5, 10, 445}, - dictWord{7, 10, 1688}, - dictWord{8, 10, 29}, - dictWord{9, 10, 272}, - dictWord{11, 10, 509}, - dictWord{139, 10, 915}, - dictWord{14, 0, 369}, - dictWord{146, 0, 72}, - dictWord{135, 10, 1641}, - dictWord{132, 11, 740}, - dictWord{133, 10, 543}, - dictWord{140, 11, 116}, - dictWord{6, 0, 247}, - dictWord{9, 0, 555}, - dictWord{ - 5, - 10, - 181, - }, - dictWord{136, 10, 41}, - dictWord{133, 10, 657}, - dictWord{136, 0, 996}, - dictWord{138, 10, 709}, - dictWord{7, 0, 189}, - dictWord{8, 10, 202}, - dictWord{ - 138, - 10, - 536, - }, - dictWord{136, 11, 402}, - dictWord{4, 11, 716}, - dictWord{141, 11, 31}, - dictWord{10, 0, 280}, - dictWord{138, 0, 797}, - dictWord{9, 10, 423}, - dictWord{140, 10, 89}, - dictWord{8, 10, 113}, - dictWord{9, 10, 877}, - dictWord{10, 10, 554}, - dictWord{11, 10, 83}, - dictWord{12, 10, 136}, - dictWord{147, 10, 109}, - dictWord{133, 10, 976}, - dictWord{7, 0, 746}, - dictWord{132, 10, 206}, - dictWord{136, 0, 526}, - dictWord{139, 0, 345}, - dictWord{136, 0, 1017}, - dictWord{ - 8, - 11, - 152, - }, - dictWord{9, 11, 53}, - dictWord{9, 11, 268}, - dictWord{9, 11, 901}, - dictWord{10, 11, 518}, - dictWord{10, 11, 829}, - dictWord{11, 11, 188}, - dictWord{ - 13, - 11, - 74, - }, - dictWord{14, 11, 46}, - dictWord{15, 11, 17}, - dictWord{15, 11, 33}, - dictWord{17, 11, 40}, - dictWord{18, 11, 36}, - dictWord{19, 11, 20}, - dictWord{22, 11, 1}, - dictWord{152, 11, 2}, - dictWord{133, 11, 736}, - dictWord{136, 11, 532}, - dictWord{5, 0, 428}, - dictWord{138, 0, 651}, - dictWord{135, 11, 681}, - dictWord{ - 135, - 0, - 1162, - }, - dictWord{7, 0, 327}, - dictWord{13, 0, 230}, - dictWord{17, 0, 113}, - dictWord{8, 10, 226}, - dictWord{10, 10, 537}, - dictWord{11, 10, 570}, - dictWord{ - 11, - 10, - 605, - }, - dictWord{11, 10, 799}, - dictWord{11, 10, 804}, - dictWord{12, 10, 85}, - dictWord{12, 10, 516}, - dictWord{12, 10, 623}, - dictWord{12, 11, 677}, - dictWord{ - 13, - 10, - 361, - }, - dictWord{14, 10, 77}, - dictWord{14, 10, 78}, - dictWord{147, 10, 110}, - dictWord{4, 0, 792}, - dictWord{7, 0, 1717}, - dictWord{10, 0, 546}, - dictWord{ - 132, - 10, - 769, - }, - dictWord{4, 11, 684}, - dictWord{136, 11, 384}, - dictWord{132, 10, 551}, - dictWord{134, 0, 1203}, - dictWord{9, 10, 57}, - dictWord{9, 10, 459}, - dictWord{10, 10, 425}, - dictWord{11, 10, 119}, - dictWord{12, 10, 184}, - dictWord{12, 10, 371}, - dictWord{13, 10, 358}, - dictWord{145, 10, 51}, - dictWord{5, 0, 672}, - dictWord{5, 10, 814}, - dictWord{8, 10, 10}, - dictWord{9, 10, 421}, - dictWord{9, 10, 729}, - dictWord{10, 10, 609}, - dictWord{139, 10, 689}, - dictWord{138, 0, 189}, - dictWord{134, 10, 624}, - dictWord{7, 11, 110}, - dictWord{7, 11, 188}, - dictWord{8, 11, 290}, - dictWord{8, 11, 591}, - dictWord{9, 11, 382}, - dictWord{9, 11, 649}, - dictWord{11, 11, 71}, - dictWord{11, 11, 155}, - dictWord{11, 11, 313}, - dictWord{12, 11, 5}, - dictWord{13, 11, 325}, - dictWord{142, 11, 287}, - dictWord{133, 0, 99}, - dictWord{6, 0, 1053}, - dictWord{135, 0, 298}, - dictWord{7, 11, 360}, - dictWord{7, 11, 425}, - dictWord{9, 11, 66}, - dictWord{9, 11, 278}, - dictWord{138, 11, 644}, - dictWord{4, 0, 397}, - dictWord{136, 0, 555}, - dictWord{137, 10, 269}, - dictWord{132, 10, 528}, - dictWord{4, 11, 900}, - dictWord{133, 11, 861}, - dictWord{ - 6, - 0, - 1157, - }, - dictWord{5, 11, 254}, - dictWord{7, 11, 985}, - dictWord{136, 11, 73}, - dictWord{7, 11, 1959}, - dictWord{136, 11, 683}, - dictWord{12, 0, 398}, - dictWord{ - 20, - 0, - 39, - }, - dictWord{21, 0, 11}, - dictWord{150, 0, 41}, - dictWord{4, 0, 485}, - dictWord{7, 0, 353}, - dictWord{135, 0, 1523}, - dictWord{6, 0, 366}, - dictWord{7, 0, 1384}, - dictWord{135, 0, 1601}, - dictWord{138, 0, 787}, - dictWord{137, 0, 282}, - dictWord{5, 10, 104}, - dictWord{6, 10, 173}, - dictWord{135, 10, 1631}, - dictWord{ - 139, - 11, - 146, - }, - dictWord{4, 0, 157}, - dictWord{133, 0, 471}, - dictWord{134, 0, 941}, - dictWord{132, 11, 725}, - dictWord{7, 0, 1336}, - dictWord{8, 10, 138}, - dictWord{ - 8, - 10, - 342, - }, - dictWord{9, 10, 84}, - dictWord{10, 10, 193}, - dictWord{11, 10, 883}, - dictWord{140, 10, 359}, - dictWord{134, 11, 196}, - dictWord{136, 0, 116}, - dictWord{133, 11, 831}, - dictWord{134, 0, 787}, - dictWord{134, 10, 95}, - dictWord{6, 10, 406}, - dictWord{10, 10, 409}, - dictWord{10, 10, 447}, - dictWord{ - 11, - 10, - 44, - }, - dictWord{140, 10, 100}, - dictWord{5, 0, 160}, - dictWord{7, 0, 363}, - dictWord{7, 0, 589}, - dictWord{10, 0, 170}, - dictWord{141, 0, 55}, - dictWord{134, 0, 1815}, - dictWord{132, 0, 866}, - dictWord{6, 0, 889}, - dictWord{6, 0, 1067}, - dictWord{6, 0, 1183}, - dictWord{4, 11, 321}, - dictWord{134, 11, 569}, - dictWord{5, 11, 848}, - dictWord{134, 11, 66}, - dictWord{4, 11, 36}, - dictWord{6, 10, 1636}, - dictWord{7, 11, 1387}, - dictWord{10, 11, 205}, - dictWord{11, 11, 755}, - dictWord{ - 141, - 11, - 271, - }, - dictWord{132, 0, 689}, - dictWord{9, 0, 820}, - dictWord{4, 10, 282}, - dictWord{7, 10, 1034}, - dictWord{11, 10, 398}, - dictWord{11, 10, 634}, - dictWord{ - 12, - 10, - 1, - }, - dictWord{12, 10, 79}, - dictWord{12, 10, 544}, - dictWord{14, 10, 237}, - dictWord{17, 10, 10}, - dictWord{146, 10, 20}, - dictWord{4, 0, 108}, - dictWord{7, 0, 804}, - dictWord{139, 0, 498}, - dictWord{132, 11, 887}, - dictWord{6, 0, 1119}, - dictWord{135, 11, 620}, - dictWord{6, 11, 165}, - dictWord{138, 11, 388}, - dictWord{ - 5, - 0, - 244, - }, - dictWord{5, 10, 499}, - dictWord{6, 10, 476}, - dictWord{7, 10, 600}, - dictWord{7, 10, 888}, - dictWord{135, 10, 1096}, - dictWord{140, 0, 609}, - dictWord{ - 135, - 0, - 1005, - }, - dictWord{4, 0, 412}, - dictWord{133, 0, 581}, - dictWord{4, 11, 719}, - dictWord{135, 11, 155}, - dictWord{7, 10, 296}, - dictWord{7, 10, 596}, - dictWord{ - 8, - 10, - 560, - }, - dictWord{8, 10, 586}, - dictWord{9, 10, 612}, - dictWord{11, 10, 304}, - dictWord{12, 10, 46}, - dictWord{13, 10, 89}, - dictWord{14, 10, 112}, - dictWord{ - 145, - 10, - 122, - }, - dictWord{4, 0, 895}, - dictWord{133, 0, 772}, - dictWord{142, 11, 307}, - dictWord{135, 0, 1898}, - dictWord{4, 0, 926}, - dictWord{133, 0, 983}, - dictWord{4, 11, 353}, - dictWord{6, 11, 146}, - dictWord{6, 11, 1789}, - dictWord{7, 11, 288}, - dictWord{7, 11, 990}, - dictWord{7, 11, 1348}, - dictWord{9, 11, 665}, - dictWord{ - 9, - 11, - 898, - }, - dictWord{11, 11, 893}, - dictWord{142, 11, 212}, - dictWord{132, 0, 538}, - dictWord{133, 11, 532}, - dictWord{6, 0, 294}, - dictWord{7, 0, 1267}, - dictWord{8, 0, 624}, - dictWord{141, 0, 496}, - dictWord{7, 0, 1325}, - dictWord{4, 11, 45}, - dictWord{135, 11, 1257}, - dictWord{138, 0, 301}, - dictWord{9, 0, 298}, - dictWord{12, 0, 291}, - dictWord{13, 0, 276}, - dictWord{14, 0, 6}, - dictWord{17, 0, 18}, - dictWord{21, 0, 32}, - dictWord{7, 10, 1599}, - dictWord{7, 10, 1723}, - dictWord{ - 8, - 10, - 79, - }, - dictWord{8, 10, 106}, - dictWord{8, 10, 190}, - dictWord{8, 10, 302}, - dictWord{8, 10, 383}, - dictWord{8, 10, 713}, - dictWord{9, 10, 119}, - dictWord{9, 10, 233}, - dictWord{9, 10, 419}, - dictWord{9, 10, 471}, - dictWord{10, 10, 181}, - dictWord{10, 10, 406}, - dictWord{11, 10, 57}, - dictWord{11, 10, 85}, - dictWord{11, 10, 120}, - dictWord{11, 10, 177}, - dictWord{11, 10, 296}, - dictWord{11, 10, 382}, - dictWord{11, 10, 454}, - dictWord{11, 10, 758}, - dictWord{11, 10, 999}, - dictWord{ - 12, - 10, - 27, - }, - dictWord{12, 10, 131}, - dictWord{12, 10, 245}, - dictWord{12, 10, 312}, - dictWord{12, 10, 446}, - dictWord{12, 10, 454}, - dictWord{13, 10, 98}, - dictWord{ - 13, - 10, - 426, - }, - dictWord{13, 10, 508}, - dictWord{14, 10, 163}, - dictWord{14, 10, 272}, - dictWord{14, 10, 277}, - dictWord{14, 10, 370}, - dictWord{15, 10, 95}, - dictWord{15, 10, 138}, - dictWord{15, 10, 167}, - dictWord{17, 10, 38}, - dictWord{148, 10, 96}, - dictWord{132, 0, 757}, - dictWord{134, 0, 1263}, - dictWord{4, 0, 820}, - dictWord{134, 10, 1759}, - dictWord{133, 0, 722}, - dictWord{136, 11, 816}, - dictWord{138, 10, 372}, - dictWord{145, 10, 16}, - dictWord{134, 0, 1039}, - dictWord{ - 4, - 0, - 991, - }, - dictWord{134, 0, 2028}, - dictWord{133, 10, 258}, - dictWord{7, 0, 1875}, - dictWord{139, 0, 124}, - dictWord{6, 11, 559}, - dictWord{6, 11, 1691}, - dictWord{135, 11, 586}, - dictWord{5, 0, 324}, - dictWord{7, 0, 881}, - dictWord{8, 10, 134}, - dictWord{9, 10, 788}, - dictWord{140, 10, 438}, - dictWord{7, 11, 1823}, - dictWord{139, 11, 693}, - dictWord{6, 0, 1348}, - dictWord{134, 0, 1545}, - dictWord{134, 0, 911}, - dictWord{132, 0, 954}, - dictWord{8, 0, 329}, - dictWord{8, 0, 414}, - dictWord{7, 10, 1948}, - dictWord{135, 10, 2004}, - dictWord{5, 0, 517}, - dictWord{6, 10, 439}, - dictWord{7, 10, 780}, - dictWord{135, 10, 1040}, - dictWord{ - 132, - 0, - 816, - }, - dictWord{5, 10, 1}, - dictWord{6, 10, 81}, - dictWord{138, 10, 520}, - dictWord{9, 0, 713}, - dictWord{10, 0, 222}, - dictWord{5, 10, 482}, - dictWord{8, 10, 98}, - dictWord{10, 10, 700}, - dictWord{10, 10, 822}, - dictWord{11, 10, 302}, - dictWord{11, 10, 778}, - dictWord{12, 10, 50}, - dictWord{12, 10, 127}, - dictWord{12, 10, 396}, - dictWord{13, 10, 62}, - dictWord{13, 10, 328}, - dictWord{14, 10, 122}, - dictWord{147, 10, 72}, - dictWord{137, 0, 33}, - dictWord{5, 10, 2}, - dictWord{7, 10, 1494}, - dictWord{136, 10, 589}, - dictWord{6, 10, 512}, - dictWord{7, 10, 797}, - dictWord{8, 10, 253}, - dictWord{9, 10, 77}, - dictWord{10, 10, 1}, - dictWord{10, 11, 108}, - dictWord{10, 10, 129}, - dictWord{10, 10, 225}, - dictWord{11, 11, 116}, - dictWord{11, 10, 118}, - dictWord{11, 10, 226}, - dictWord{11, 10, 251}, - dictWord{ - 11, - 10, - 430, - }, - dictWord{11, 10, 701}, - dictWord{11, 10, 974}, - dictWord{11, 10, 982}, - dictWord{12, 10, 64}, - dictWord{12, 10, 260}, - dictWord{12, 10, 488}, - dictWord{ - 140, - 10, - 690, - }, - dictWord{134, 11, 456}, - dictWord{133, 11, 925}, - dictWord{5, 0, 150}, - dictWord{7, 0, 106}, - dictWord{7, 0, 774}, - dictWord{8, 0, 603}, - dictWord{ - 9, - 0, - 593, - }, - dictWord{9, 0, 634}, - dictWord{10, 0, 44}, - dictWord{10, 0, 173}, - dictWord{11, 0, 462}, - dictWord{11, 0, 515}, - dictWord{13, 0, 216}, - dictWord{13, 0, 288}, - dictWord{142, 0, 400}, - dictWord{137, 10, 347}, - dictWord{5, 0, 748}, - dictWord{134, 0, 553}, - dictWord{12, 0, 108}, - dictWord{141, 0, 291}, - dictWord{7, 0, 420}, - dictWord{4, 10, 12}, - dictWord{7, 10, 522}, - dictWord{7, 10, 809}, - dictWord{8, 10, 797}, - dictWord{141, 10, 88}, - dictWord{6, 11, 193}, - dictWord{7, 11, 240}, - dictWord{ - 7, - 11, - 1682, - }, - dictWord{10, 11, 51}, - dictWord{10, 11, 640}, - dictWord{11, 11, 410}, - dictWord{13, 11, 82}, - dictWord{14, 11, 247}, - dictWord{14, 11, 331}, - dictWord{142, 11, 377}, - dictWord{133, 10, 528}, - dictWord{135, 0, 1777}, - dictWord{4, 0, 493}, - dictWord{144, 0, 55}, - dictWord{136, 11, 633}, - dictWord{ - 139, - 0, - 81, - }, - dictWord{6, 0, 980}, - dictWord{136, 0, 321}, - dictWord{148, 10, 109}, - dictWord{5, 10, 266}, - dictWord{9, 10, 290}, - dictWord{9, 10, 364}, - dictWord{ - 10, - 10, - 293, - }, - dictWord{11, 10, 606}, - dictWord{142, 10, 45}, - dictWord{6, 0, 568}, - dictWord{7, 0, 112}, - dictWord{7, 0, 1804}, - dictWord{8, 0, 362}, - dictWord{8, 0, 410}, - dictWord{8, 0, 830}, - dictWord{9, 0, 514}, - dictWord{11, 0, 649}, - dictWord{142, 0, 157}, - dictWord{4, 0, 74}, - dictWord{6, 0, 510}, - dictWord{6, 10, 594}, - dictWord{ - 9, - 10, - 121, - }, - dictWord{10, 10, 49}, - dictWord{10, 10, 412}, - dictWord{139, 10, 834}, - dictWord{134, 0, 838}, - dictWord{136, 10, 748}, - dictWord{132, 10, 466}, - dictWord{132, 0, 625}, - dictWord{135, 11, 1443}, - dictWord{4, 11, 237}, - dictWord{135, 11, 514}, - dictWord{9, 10, 378}, - dictWord{141, 10, 162}, - dictWord{6, 0, 16}, - dictWord{6, 0, 158}, - dictWord{7, 0, 43}, - dictWord{7, 0, 129}, - dictWord{7, 0, 181}, - dictWord{8, 0, 276}, - dictWord{8, 0, 377}, - dictWord{10, 0, 523}, - dictWord{ - 11, - 0, - 816, - }, - dictWord{12, 0, 455}, - dictWord{13, 0, 303}, - dictWord{142, 0, 135}, - dictWord{135, 0, 281}, - dictWord{4, 0, 1}, - dictWord{7, 0, 1143}, - dictWord{7, 0, 1463}, - dictWord{8, 0, 61}, - dictWord{9, 0, 207}, - dictWord{9, 0, 390}, - dictWord{9, 0, 467}, - dictWord{139, 0, 836}, - dictWord{6, 11, 392}, - dictWord{7, 11, 65}, - dictWord{ - 135, - 11, - 2019, - }, - dictWord{132, 10, 667}, - dictWord{4, 0, 723}, - dictWord{5, 0, 895}, - dictWord{7, 0, 1031}, - dictWord{8, 0, 199}, - dictWord{8, 0, 340}, - dictWord{9, 0, 153}, - dictWord{9, 0, 215}, - dictWord{10, 0, 21}, - dictWord{10, 0, 59}, - dictWord{10, 0, 80}, - dictWord{10, 0, 224}, - dictWord{10, 0, 838}, - dictWord{11, 0, 229}, - dictWord{ - 11, - 0, - 652, - }, - dictWord{12, 0, 192}, - dictWord{13, 0, 146}, - dictWord{142, 0, 91}, - dictWord{132, 0, 295}, - dictWord{137, 0, 51}, - dictWord{9, 11, 222}, - dictWord{ - 10, - 11, - 43, - }, - dictWord{139, 11, 900}, - dictWord{5, 0, 309}, - dictWord{140, 0, 211}, - dictWord{5, 0, 125}, - dictWord{8, 0, 77}, - dictWord{138, 0, 15}, - dictWord{136, 11, 604}, - dictWord{138, 0, 789}, - dictWord{5, 0, 173}, - dictWord{4, 10, 39}, - dictWord{7, 10, 1843}, - dictWord{8, 10, 407}, - dictWord{11, 10, 144}, - dictWord{140, 10, 523}, - dictWord{138, 11, 265}, - dictWord{133, 0, 439}, - dictWord{132, 10, 510}, - dictWord{7, 0, 648}, - dictWord{7, 0, 874}, - dictWord{11, 0, 164}, - dictWord{12, 0, 76}, - dictWord{18, 0, 9}, - dictWord{7, 10, 1980}, - dictWord{10, 10, 487}, - dictWord{138, 10, 809}, - dictWord{12, 0, 111}, - dictWord{14, 0, 294}, - dictWord{19, 0, 45}, - dictWord{13, 10, 260}, - dictWord{146, 10, 63}, - dictWord{133, 11, 549}, - dictWord{134, 10, 570}, - dictWord{4, 0, 8}, - dictWord{7, 0, 1152}, - dictWord{7, 0, 1153}, - dictWord{7, 0, 1715}, - dictWord{9, 0, 374}, - dictWord{10, 0, 478}, - dictWord{139, 0, 648}, - dictWord{135, 0, 1099}, - dictWord{5, 0, 575}, - dictWord{6, 0, 354}, - dictWord{ - 135, - 0, - 701, - }, - dictWord{7, 11, 36}, - dictWord{8, 11, 201}, - dictWord{136, 11, 605}, - dictWord{4, 10, 787}, - dictWord{136, 11, 156}, - dictWord{6, 0, 518}, - dictWord{ - 149, - 11, - 13, - }, - dictWord{140, 11, 224}, - dictWord{134, 0, 702}, - dictWord{132, 10, 516}, - dictWord{5, 11, 724}, - dictWord{10, 11, 305}, - dictWord{11, 11, 151}, - dictWord{12, 11, 33}, - dictWord{12, 11, 121}, - dictWord{12, 11, 381}, - dictWord{17, 11, 3}, - dictWord{17, 11, 27}, - dictWord{17, 11, 78}, - dictWord{18, 11, 18}, - dictWord{19, 11, 54}, - dictWord{149, 11, 5}, - dictWord{8, 0, 87}, - dictWord{4, 11, 523}, - dictWord{5, 11, 638}, - dictWord{11, 10, 887}, - dictWord{14, 10, 365}, - dictWord{ - 142, - 10, - 375, - }, - dictWord{138, 0, 438}, - dictWord{136, 10, 821}, - dictWord{135, 11, 1908}, - dictWord{6, 11, 242}, - dictWord{7, 11, 227}, - dictWord{7, 11, 1581}, - dictWord{8, 11, 104}, - dictWord{9, 11, 113}, - dictWord{9, 11, 220}, - dictWord{9, 11, 427}, - dictWord{10, 11, 74}, - dictWord{10, 11, 239}, - dictWord{11, 11, 579}, - dictWord{11, 11, 1023}, - dictWord{13, 11, 4}, - dictWord{13, 11, 204}, - dictWord{13, 11, 316}, - dictWord{18, 11, 95}, - dictWord{148, 11, 86}, - dictWord{4, 0, 69}, - dictWord{5, 0, 122}, - dictWord{5, 0, 849}, - dictWord{6, 0, 1633}, - dictWord{9, 0, 656}, - dictWord{138, 0, 464}, - dictWord{7, 0, 1802}, - dictWord{4, 10, 10}, - dictWord{ - 139, - 10, - 786, - }, - dictWord{135, 11, 861}, - dictWord{139, 0, 499}, - dictWord{7, 0, 476}, - dictWord{7, 0, 1592}, - dictWord{138, 0, 87}, - dictWord{133, 10, 684}, - dictWord{ - 4, - 0, - 840, - }, - dictWord{134, 10, 27}, - dictWord{142, 0, 283}, - dictWord{6, 0, 1620}, - dictWord{7, 11, 1328}, - dictWord{136, 11, 494}, - dictWord{5, 0, 859}, - dictWord{ - 7, - 0, - 1160, - }, - dictWord{8, 0, 107}, - dictWord{9, 0, 291}, - dictWord{9, 0, 439}, - dictWord{10, 0, 663}, - dictWord{11, 0, 609}, - dictWord{140, 0, 197}, - dictWord{ - 7, - 11, - 1306, - }, - dictWord{8, 11, 505}, - dictWord{9, 11, 482}, - dictWord{10, 11, 126}, - dictWord{11, 11, 225}, - dictWord{12, 11, 347}, - dictWord{12, 11, 449}, - dictWord{ - 13, - 11, - 19, - }, - dictWord{142, 11, 218}, - dictWord{5, 11, 268}, - dictWord{10, 11, 764}, - dictWord{12, 11, 120}, - dictWord{13, 11, 39}, - dictWord{145, 11, 127}, - dictWord{145, 10, 56}, - dictWord{7, 11, 1672}, - dictWord{10, 11, 472}, - dictWord{11, 11, 189}, - dictWord{143, 11, 51}, - dictWord{6, 10, 342}, - dictWord{6, 10, 496}, - dictWord{8, 10, 275}, - dictWord{137, 10, 206}, - dictWord{133, 0, 600}, - dictWord{4, 0, 117}, - dictWord{6, 0, 372}, - dictWord{7, 0, 1905}, - dictWord{142, 0, 323}, - dictWord{4, 10, 909}, - dictWord{5, 10, 940}, - dictWord{135, 11, 1471}, - dictWord{132, 10, 891}, - dictWord{4, 0, 722}, - dictWord{139, 0, 471}, - dictWord{4, 11, 384}, - dictWord{135, 11, 1022}, - dictWord{132, 10, 687}, - dictWord{9, 0, 5}, - dictWord{12, 0, 216}, - dictWord{12, 0, 294}, - dictWord{12, 0, 298}, - dictWord{12, 0, 400}, - dictWord{12, 0, 518}, - dictWord{13, 0, 229}, - dictWord{143, 0, 139}, - dictWord{135, 11, 1703}, - dictWord{7, 11, 1602}, - dictWord{10, 11, 698}, - dictWord{ - 12, - 11, - 212, - }, - dictWord{141, 11, 307}, - dictWord{6, 10, 41}, - dictWord{141, 10, 160}, - dictWord{135, 11, 1077}, - dictWord{9, 11, 159}, - dictWord{11, 11, 28}, - dictWord{140, 11, 603}, - dictWord{4, 0, 514}, - dictWord{7, 0, 1304}, - dictWord{138, 0, 477}, - dictWord{134, 0, 1774}, - dictWord{9, 0, 88}, - dictWord{139, 0, 270}, - dictWord{5, 0, 12}, - dictWord{7, 0, 375}, - dictWord{9, 0, 438}, - dictWord{134, 10, 1718}, - dictWord{132, 11, 515}, - dictWord{136, 10, 778}, - dictWord{8, 11, 632}, - dictWord{8, 11, 697}, - dictWord{137, 11, 854}, - dictWord{6, 0, 362}, - dictWord{6, 0, 997}, - dictWord{146, 0, 51}, - dictWord{7, 0, 816}, - dictWord{7, 0, 1241}, - dictWord{ - 9, - 0, - 283, - }, - dictWord{9, 0, 520}, - dictWord{10, 0, 213}, - dictWord{10, 0, 307}, - dictWord{10, 0, 463}, - dictWord{10, 0, 671}, - dictWord{10, 0, 746}, - dictWord{11, 0, 401}, - dictWord{11, 0, 794}, - dictWord{12, 0, 517}, - dictWord{18, 0, 107}, - dictWord{147, 0, 115}, - dictWord{133, 10, 115}, - dictWord{150, 11, 28}, - dictWord{4, 11, 136}, - dictWord{133, 11, 551}, - dictWord{142, 10, 314}, - dictWord{132, 0, 258}, - dictWord{6, 0, 22}, - dictWord{7, 0, 903}, - dictWord{7, 0, 1963}, - dictWord{8, 0, 639}, - dictWord{138, 0, 577}, - dictWord{5, 0, 681}, - dictWord{8, 0, 782}, - dictWord{13, 0, 130}, - dictWord{17, 0, 84}, - dictWord{5, 10, 193}, - dictWord{140, 10, 178}, - dictWord{ - 9, - 11, - 17, - }, - dictWord{138, 11, 291}, - dictWord{7, 11, 1287}, - dictWord{9, 11, 44}, - dictWord{10, 11, 552}, - dictWord{10, 11, 642}, - dictWord{11, 11, 839}, - dictWord{12, 11, 274}, - dictWord{12, 11, 275}, - dictWord{12, 11, 372}, - dictWord{13, 11, 91}, - dictWord{142, 11, 125}, - dictWord{135, 10, 174}, - dictWord{4, 0, 664}, - dictWord{5, 0, 804}, - dictWord{139, 0, 1013}, - dictWord{134, 0, 942}, - dictWord{6, 0, 1349}, - dictWord{6, 0, 1353}, - dictWord{6, 0, 1450}, - dictWord{7, 11, 1518}, - dictWord{139, 11, 694}, - dictWord{11, 0, 356}, - dictWord{4, 10, 122}, - dictWord{5, 10, 796}, - dictWord{5, 10, 952}, - dictWord{6, 10, 1660}, - dictWord{ - 6, - 10, - 1671, - }, - dictWord{8, 10, 567}, - dictWord{9, 10, 687}, - dictWord{9, 10, 742}, - dictWord{10, 10, 686}, - dictWord{11, 10, 682}, - dictWord{140, 10, 281}, - dictWord{ - 5, - 0, - 32, - }, - dictWord{6, 11, 147}, - dictWord{7, 11, 886}, - dictWord{9, 11, 753}, - dictWord{138, 11, 268}, - dictWord{5, 10, 179}, - dictWord{7, 10, 1095}, - dictWord{ - 135, - 10, - 1213, - }, - dictWord{4, 10, 66}, - dictWord{7, 10, 722}, - dictWord{135, 10, 904}, - dictWord{135, 10, 352}, - dictWord{9, 11, 245}, - dictWord{138, 11, 137}, - dictWord{4, 0, 289}, - dictWord{7, 0, 629}, - dictWord{7, 0, 1698}, - dictWord{7, 0, 1711}, - dictWord{12, 0, 215}, - dictWord{133, 11, 414}, - dictWord{6, 0, 1975}, - dictWord{135, 11, 1762}, - dictWord{6, 0, 450}, - dictWord{136, 0, 109}, - dictWord{141, 10, 35}, - dictWord{134, 11, 599}, - dictWord{136, 0, 705}, - dictWord{ - 133, - 0, - 664, - }, - dictWord{134, 11, 1749}, - dictWord{11, 11, 402}, - dictWord{12, 11, 109}, - dictWord{12, 11, 431}, - dictWord{13, 11, 179}, - dictWord{13, 11, 206}, - dictWord{14, 11, 175}, - dictWord{14, 11, 217}, - dictWord{16, 11, 3}, - dictWord{148, 11, 53}, - dictWord{135, 0, 1238}, - dictWord{134, 11, 1627}, - dictWord{ - 132, - 11, - 488, - }, - dictWord{13, 0, 318}, - dictWord{10, 10, 592}, - dictWord{10, 10, 753}, - dictWord{12, 10, 317}, - dictWord{12, 10, 355}, - dictWord{12, 10, 465}, - dictWord{ - 12, - 10, - 469, - }, - dictWord{12, 10, 560}, - dictWord{140, 10, 578}, - dictWord{133, 10, 564}, - dictWord{132, 11, 83}, - dictWord{140, 11, 676}, - dictWord{6, 0, 1872}, - dictWord{6, 0, 1906}, - dictWord{6, 0, 1907}, - dictWord{9, 0, 934}, - dictWord{9, 0, 956}, - dictWord{9, 0, 960}, - dictWord{9, 0, 996}, - dictWord{12, 0, 794}, - dictWord{ - 12, - 0, - 876, - }, - dictWord{12, 0, 880}, - dictWord{12, 0, 918}, - dictWord{15, 0, 230}, - dictWord{18, 0, 234}, - dictWord{18, 0, 238}, - dictWord{21, 0, 38}, - dictWord{149, 0, 62}, - dictWord{134, 10, 556}, - dictWord{134, 11, 278}, - dictWord{137, 0, 103}, - dictWord{7, 10, 544}, - dictWord{8, 10, 719}, - dictWord{138, 10, 61}, - dictWord{ - 4, - 10, - 5, - }, - dictWord{5, 10, 498}, - dictWord{8, 10, 637}, - dictWord{137, 10, 521}, - dictWord{7, 0, 777}, - dictWord{12, 0, 229}, - dictWord{12, 0, 239}, - dictWord{15, 0, 12}, - dictWord{12, 11, 229}, - dictWord{12, 11, 239}, - dictWord{143, 11, 12}, - dictWord{6, 0, 26}, - dictWord{7, 11, 388}, - dictWord{7, 11, 644}, - dictWord{139, 11, 781}, - dictWord{7, 11, 229}, - dictWord{8, 11, 59}, - dictWord{9, 11, 190}, - dictWord{9, 11, 257}, - dictWord{10, 11, 378}, - dictWord{140, 11, 191}, - dictWord{133, 10, 927}, - dictWord{135, 10, 1441}, - dictWord{4, 10, 893}, - dictWord{5, 10, 780}, - dictWord{133, 10, 893}, - dictWord{4, 0, 414}, - dictWord{5, 0, 467}, - dictWord{9, 0, 654}, - dictWord{10, 0, 451}, - dictWord{12, 0, 59}, - dictWord{141, 0, 375}, - dictWord{142, 0, 173}, - dictWord{135, 0, 17}, - dictWord{7, 0, 1350}, - dictWord{133, 10, 238}, - dictWord{135, 0, 955}, - dictWord{4, 0, 960}, - dictWord{10, 0, 887}, - dictWord{12, 0, 753}, - dictWord{18, 0, 161}, - dictWord{18, 0, 162}, - dictWord{152, 0, 19}, - dictWord{136, 11, 344}, - dictWord{6, 10, 1729}, - dictWord{137, 11, 288}, - dictWord{132, 11, 660}, - dictWord{4, 0, 217}, - dictWord{5, 0, 710}, - dictWord{7, 0, 760}, - dictWord{7, 0, 1926}, - dictWord{9, 0, 428}, - dictWord{9, 0, 708}, - dictWord{10, 0, 254}, - dictWord{10, 0, 296}, - dictWord{10, 0, 720}, - dictWord{11, 0, 109}, - dictWord{ - 11, - 0, - 255, - }, - dictWord{12, 0, 165}, - dictWord{12, 0, 315}, - dictWord{13, 0, 107}, - dictWord{13, 0, 203}, - dictWord{14, 0, 54}, - dictWord{14, 0, 99}, - dictWord{14, 0, 114}, - dictWord{14, 0, 388}, - dictWord{16, 0, 85}, - dictWord{17, 0, 9}, - dictWord{17, 0, 33}, - dictWord{20, 0, 25}, - dictWord{20, 0, 28}, - dictWord{20, 0, 29}, - dictWord{21, 0, 9}, - dictWord{21, 0, 10}, - dictWord{21, 0, 34}, - dictWord{22, 0, 17}, - dictWord{4, 10, 60}, - dictWord{7, 10, 1800}, - dictWord{8, 10, 314}, - dictWord{9, 10, 700}, - dictWord{ - 139, - 10, - 487, - }, - dictWord{7, 11, 1035}, - dictWord{138, 11, 737}, - dictWord{7, 11, 690}, - dictWord{9, 11, 217}, - dictWord{9, 11, 587}, - dictWord{140, 11, 521}, - dictWord{6, 0, 919}, - dictWord{7, 11, 706}, - dictWord{7, 11, 1058}, - dictWord{138, 11, 538}, - dictWord{7, 10, 1853}, - dictWord{138, 10, 437}, - dictWord{ - 136, - 10, - 419, - }, - dictWord{6, 0, 280}, - dictWord{10, 0, 502}, - dictWord{11, 0, 344}, - dictWord{140, 0, 38}, - dictWord{5, 0, 45}, - dictWord{7, 0, 1161}, - dictWord{11, 0, 448}, - dictWord{11, 0, 880}, - dictWord{13, 0, 139}, - dictWord{13, 0, 407}, - dictWord{15, 0, 16}, - dictWord{17, 0, 95}, - dictWord{18, 0, 66}, - dictWord{18, 0, 88}, - dictWord{ - 18, - 0, - 123, - }, - dictWord{149, 0, 7}, - dictWord{11, 11, 92}, - dictWord{11, 11, 196}, - dictWord{11, 11, 409}, - dictWord{11, 11, 450}, - dictWord{11, 11, 666}, - dictWord{ - 11, - 11, - 777, - }, - dictWord{12, 11, 262}, - dictWord{13, 11, 385}, - dictWord{13, 11, 393}, - dictWord{15, 11, 115}, - dictWord{16, 11, 45}, - dictWord{145, 11, 82}, - dictWord{136, 0, 777}, - dictWord{134, 11, 1744}, - dictWord{4, 0, 410}, - dictWord{7, 0, 521}, - dictWord{133, 10, 828}, - dictWord{134, 0, 673}, - dictWord{7, 0, 1110}, - dictWord{7, 0, 1778}, - dictWord{7, 10, 176}, - dictWord{135, 10, 178}, - dictWord{5, 10, 806}, - dictWord{7, 11, 268}, - dictWord{7, 10, 1976}, - dictWord{ - 136, - 11, - 569, - }, - dictWord{4, 11, 733}, - dictWord{9, 11, 194}, - dictWord{10, 11, 92}, - dictWord{11, 11, 198}, - dictWord{12, 11, 84}, - dictWord{12, 11, 87}, - dictWord{ - 13, - 11, - 128, - }, - dictWord{144, 11, 74}, - dictWord{5, 0, 341}, - dictWord{7, 0, 1129}, - dictWord{11, 0, 414}, - dictWord{4, 10, 51}, - dictWord{6, 10, 4}, - dictWord{7, 10, 591}, - dictWord{7, 10, 849}, - dictWord{7, 10, 951}, - dictWord{7, 10, 1613}, - dictWord{7, 10, 1760}, - dictWord{7, 10, 1988}, - dictWord{9, 10, 434}, - dictWord{10, 10, 754}, - dictWord{11, 10, 25}, - dictWord{139, 10, 37}, - dictWord{133, 10, 902}, - dictWord{135, 10, 928}, - dictWord{135, 0, 787}, - dictWord{132, 0, 436}, - dictWord{ - 134, - 10, - 270, - }, - dictWord{7, 0, 1587}, - dictWord{135, 0, 1707}, - dictWord{6, 0, 377}, - dictWord{7, 0, 1025}, - dictWord{9, 0, 613}, - dictWord{145, 0, 104}, - dictWord{ - 7, - 11, - 982, - }, - dictWord{7, 11, 1361}, - dictWord{10, 11, 32}, - dictWord{143, 11, 56}, - dictWord{139, 0, 96}, - dictWord{132, 0, 451}, - dictWord{132, 10, 416}, - dictWord{ - 142, - 10, - 372, - }, - dictWord{5, 10, 152}, - dictWord{5, 10, 197}, - dictWord{7, 11, 306}, - dictWord{7, 10, 340}, - dictWord{7, 10, 867}, - dictWord{10, 10, 548}, - dictWord{ - 10, - 10, - 581, - }, - dictWord{11, 10, 6}, - dictWord{12, 10, 3}, - dictWord{12, 10, 19}, - dictWord{14, 10, 110}, - dictWord{142, 10, 289}, - dictWord{134, 0, 680}, - dictWord{ - 134, - 11, - 609, - }, - dictWord{7, 0, 483}, - dictWord{7, 10, 190}, - dictWord{8, 10, 28}, - dictWord{8, 10, 141}, - dictWord{8, 10, 444}, - dictWord{8, 10, 811}, - dictWord{ - 9, - 10, - 468, - }, - dictWord{11, 10, 334}, - dictWord{12, 10, 24}, - dictWord{12, 10, 386}, - dictWord{140, 10, 576}, - dictWord{10, 0, 916}, - dictWord{133, 10, 757}, - dictWord{ - 5, - 10, - 721, - }, - dictWord{135, 10, 1553}, - dictWord{133, 11, 178}, - dictWord{134, 0, 937}, - dictWord{132, 10, 898}, - dictWord{133, 0, 739}, - dictWord{ - 147, - 0, - 82, - }, - dictWord{135, 0, 663}, - dictWord{146, 0, 128}, - dictWord{5, 10, 277}, - dictWord{141, 10, 247}, - dictWord{134, 0, 1087}, - dictWord{132, 10, 435}, - dictWord{ - 6, - 11, - 381, - }, - dictWord{7, 11, 645}, - dictWord{7, 11, 694}, - dictWord{136, 11, 546}, - dictWord{7, 0, 503}, - dictWord{135, 0, 1885}, - dictWord{6, 0, 1965}, - dictWord{ - 8, - 0, - 925, - }, - dictWord{138, 0, 955}, - dictWord{4, 0, 113}, - dictWord{5, 0, 163}, - dictWord{5, 0, 735}, - dictWord{7, 0, 1009}, - dictWord{9, 0, 9}, - dictWord{9, 0, 771}, - dictWord{12, 0, 90}, - dictWord{13, 0, 138}, - dictWord{13, 0, 410}, - dictWord{143, 0, 128}, - dictWord{4, 0, 324}, - dictWord{138, 0, 104}, - dictWord{7, 0, 460}, - dictWord{ - 5, - 10, - 265, - }, - dictWord{134, 10, 212}, - dictWord{133, 11, 105}, - dictWord{7, 11, 261}, - dictWord{7, 11, 1107}, - dictWord{7, 11, 1115}, - dictWord{7, 11, 1354}, - dictWord{7, 11, 1588}, - dictWord{7, 11, 1705}, - dictWord{7, 11, 1902}, - dictWord{9, 11, 465}, - dictWord{10, 11, 248}, - dictWord{10, 11, 349}, - dictWord{10, 11, 647}, - dictWord{11, 11, 527}, - dictWord{11, 11, 660}, - dictWord{11, 11, 669}, - dictWord{12, 11, 529}, - dictWord{141, 11, 305}, - dictWord{5, 11, 438}, - dictWord{ - 9, - 11, - 694, - }, - dictWord{12, 11, 627}, - dictWord{141, 11, 210}, - dictWord{152, 11, 11}, - dictWord{4, 0, 935}, - dictWord{133, 0, 823}, - dictWord{132, 10, 702}, - dictWord{ - 5, - 0, - 269, - }, - dictWord{7, 0, 434}, - dictWord{7, 0, 891}, - dictWord{8, 0, 339}, - dictWord{9, 0, 702}, - dictWord{11, 0, 594}, - dictWord{11, 0, 718}, - dictWord{17, 0, 100}, - dictWord{5, 10, 808}, - dictWord{135, 10, 2045}, - dictWord{7, 0, 1014}, - dictWord{9, 0, 485}, - dictWord{141, 0, 264}, - dictWord{134, 0, 1713}, - dictWord{7, 0, 1810}, - dictWord{11, 0, 866}, - dictWord{12, 0, 103}, - dictWord{13, 0, 495}, - dictWord{140, 11, 233}, - dictWord{4, 0, 423}, - dictWord{10, 0, 949}, - dictWord{138, 0, 1013}, - dictWord{135, 0, 900}, - dictWord{8, 11, 25}, - dictWord{138, 11, 826}, - dictWord{5, 10, 166}, - dictWord{8, 10, 739}, - dictWord{140, 10, 511}, - dictWord{ - 134, - 0, - 2018, - }, - dictWord{7, 11, 1270}, - dictWord{139, 11, 612}, - dictWord{4, 10, 119}, - dictWord{5, 10, 170}, - dictWord{5, 10, 447}, - dictWord{7, 10, 1708}, - dictWord{ - 7, - 10, - 1889, - }, - dictWord{9, 10, 357}, - dictWord{9, 10, 719}, - dictWord{12, 10, 486}, - dictWord{140, 10, 596}, - dictWord{12, 0, 574}, - dictWord{140, 11, 574}, - dictWord{132, 11, 308}, - dictWord{6, 0, 964}, - dictWord{6, 0, 1206}, - dictWord{134, 0, 1302}, - dictWord{4, 10, 450}, - dictWord{135, 10, 1158}, - dictWord{ - 135, - 11, - 150, - }, - dictWord{136, 11, 649}, - dictWord{14, 0, 213}, - dictWord{148, 0, 38}, - dictWord{9, 11, 45}, - dictWord{9, 11, 311}, - dictWord{141, 11, 42}, - dictWord{ - 134, - 11, - 521, - }, - dictWord{7, 10, 1375}, - dictWord{7, 10, 1466}, - dictWord{138, 10, 331}, - dictWord{132, 10, 754}, - dictWord{5, 11, 339}, - dictWord{7, 11, 1442}, - dictWord{14, 11, 3}, - dictWord{15, 11, 41}, - dictWord{147, 11, 66}, - dictWord{136, 11, 378}, - dictWord{134, 0, 1022}, - dictWord{5, 10, 850}, - dictWord{136, 10, 799}, - dictWord{142, 0, 143}, - dictWord{135, 0, 2029}, - dictWord{134, 11, 1628}, - dictWord{8, 0, 523}, - dictWord{150, 0, 34}, - dictWord{5, 0, 625}, - dictWord{ - 135, - 0, - 1617, - }, - dictWord{7, 0, 275}, - dictWord{7, 10, 238}, - dictWord{7, 10, 2033}, - dictWord{8, 10, 120}, - dictWord{8, 10, 188}, - dictWord{8, 10, 659}, - dictWord{ - 9, - 10, - 598, - }, - dictWord{10, 10, 466}, - dictWord{12, 10, 342}, - dictWord{12, 10, 588}, - dictWord{13, 10, 503}, - dictWord{14, 10, 246}, - dictWord{143, 10, 92}, - dictWord{ - 7, - 0, - 37, - }, - dictWord{8, 0, 425}, - dictWord{8, 0, 693}, - dictWord{9, 0, 720}, - dictWord{10, 0, 380}, - dictWord{10, 0, 638}, - dictWord{11, 0, 273}, - dictWord{11, 0, 473}, - dictWord{12, 0, 61}, - dictWord{143, 0, 43}, - dictWord{135, 11, 829}, - dictWord{135, 0, 1943}, - dictWord{132, 0, 765}, - dictWord{5, 11, 486}, - dictWord{ - 135, - 11, - 1349, - }, - dictWord{7, 11, 1635}, - dictWord{8, 11, 17}, - dictWord{10, 11, 217}, - dictWord{138, 11, 295}, - dictWord{4, 10, 201}, - dictWord{7, 10, 1744}, - dictWord{ - 8, - 10, - 602, - }, - dictWord{11, 10, 247}, - dictWord{11, 10, 826}, - dictWord{145, 10, 65}, - dictWord{138, 11, 558}, - dictWord{11, 0, 551}, - dictWord{142, 0, 159}, - dictWord{8, 10, 164}, - dictWord{146, 10, 62}, - dictWord{139, 11, 176}, - dictWord{132, 0, 168}, - dictWord{136, 0, 1010}, - dictWord{134, 0, 1994}, - dictWord{ - 135, - 0, - 91, - }, - dictWord{138, 0, 532}, - dictWord{135, 10, 1243}, - dictWord{135, 0, 1884}, - dictWord{132, 10, 907}, - dictWord{5, 10, 100}, - dictWord{10, 10, 329}, - dictWord{12, 10, 416}, - dictWord{149, 10, 29}, - dictWord{134, 11, 447}, - dictWord{132, 10, 176}, - dictWord{5, 10, 636}, - dictWord{5, 10, 998}, - dictWord{7, 10, 9}, - dictWord{7, 10, 1508}, - dictWord{8, 10, 26}, - dictWord{9, 10, 317}, - dictWord{9, 10, 358}, - dictWord{10, 10, 210}, - dictWord{10, 10, 292}, - dictWord{10, 10, 533}, - dictWord{11, 10, 555}, - dictWord{12, 10, 526}, - dictWord{12, 10, 607}, - dictWord{13, 10, 263}, - dictWord{13, 10, 459}, - dictWord{142, 10, 271}, - dictWord{ - 4, - 11, - 609, - }, - dictWord{135, 11, 756}, - dictWord{6, 0, 15}, - dictWord{7, 0, 70}, - dictWord{10, 0, 240}, - dictWord{147, 0, 93}, - dictWord{4, 11, 930}, - dictWord{133, 11, 947}, - dictWord{134, 0, 1227}, - dictWord{134, 0, 1534}, - dictWord{133, 11, 939}, - dictWord{133, 11, 962}, - dictWord{5, 11, 651}, - dictWord{8, 11, 170}, - dictWord{ - 9, - 11, - 61, - }, - dictWord{9, 11, 63}, - dictWord{10, 11, 23}, - dictWord{10, 11, 37}, - dictWord{10, 11, 834}, - dictWord{11, 11, 4}, - dictWord{11, 11, 187}, - dictWord{ - 11, - 11, - 281, - }, - dictWord{11, 11, 503}, - dictWord{11, 11, 677}, - dictWord{12, 11, 96}, - dictWord{12, 11, 130}, - dictWord{12, 11, 244}, - dictWord{14, 11, 5}, - dictWord{ - 14, - 11, - 40, - }, - dictWord{14, 11, 162}, - dictWord{14, 11, 202}, - dictWord{146, 11, 133}, - dictWord{4, 11, 406}, - dictWord{5, 11, 579}, - dictWord{12, 11, 492}, - dictWord{ - 150, - 11, - 15, - }, - dictWord{139, 0, 392}, - dictWord{6, 10, 610}, - dictWord{10, 10, 127}, - dictWord{141, 10, 27}, - dictWord{7, 0, 655}, - dictWord{7, 0, 1844}, - dictWord{ - 136, - 10, - 119, - }, - dictWord{4, 0, 145}, - dictWord{6, 0, 176}, - dictWord{7, 0, 395}, - dictWord{137, 0, 562}, - dictWord{132, 0, 501}, - dictWord{140, 11, 145}, - dictWord{ - 136, - 0, - 1019, - }, - dictWord{134, 0, 509}, - dictWord{139, 0, 267}, - dictWord{6, 11, 17}, - dictWord{7, 11, 16}, - dictWord{7, 11, 1001}, - dictWord{7, 11, 1982}, - dictWord{ - 9, - 11, - 886, - }, - dictWord{10, 11, 489}, - dictWord{10, 11, 800}, - dictWord{11, 11, 782}, - dictWord{12, 11, 320}, - dictWord{13, 11, 467}, - dictWord{14, 11, 145}, - dictWord{14, 11, 387}, - dictWord{143, 11, 119}, - dictWord{145, 11, 17}, - dictWord{6, 0, 1099}, - dictWord{133, 11, 458}, - dictWord{7, 11, 1983}, - dictWord{8, 11, 0}, - dictWord{8, 11, 171}, - dictWord{9, 11, 120}, - dictWord{9, 11, 732}, - dictWord{10, 11, 473}, - dictWord{11, 11, 656}, - dictWord{11, 11, 998}, - dictWord{18, 11, 0}, - dictWord{18, 11, 2}, - dictWord{147, 11, 21}, - dictWord{12, 11, 427}, - dictWord{146, 11, 38}, - dictWord{10, 0, 948}, - dictWord{138, 0, 968}, - dictWord{7, 10, 126}, - dictWord{136, 10, 84}, - dictWord{136, 10, 790}, - dictWord{4, 0, 114}, - dictWord{9, 0, 492}, - dictWord{13, 0, 462}, - dictWord{142, 0, 215}, - dictWord{6, 10, 64}, - dictWord{12, 10, 377}, - dictWord{141, 10, 309}, - dictWord{4, 0, 77}, - dictWord{5, 0, 361}, - dictWord{6, 0, 139}, - dictWord{6, 0, 401}, - dictWord{6, 0, 404}, - dictWord{ - 7, - 0, - 413, - }, - dictWord{7, 0, 715}, - dictWord{7, 0, 1716}, - dictWord{11, 0, 279}, - dictWord{12, 0, 179}, - dictWord{12, 0, 258}, - dictWord{13, 0, 244}, - dictWord{142, 0, 358}, - dictWord{134, 0, 1717}, - dictWord{7, 0, 772}, - dictWord{7, 0, 1061}, - dictWord{7, 0, 1647}, - dictWord{8, 0, 82}, - dictWord{11, 0, 250}, - dictWord{11, 0, 607}, - dictWord{12, 0, 311}, - dictWord{12, 0, 420}, - dictWord{13, 0, 184}, - dictWord{13, 0, 367}, - dictWord{7, 10, 1104}, - dictWord{11, 10, 269}, - dictWord{11, 10, 539}, - dictWord{11, 10, 627}, - dictWord{11, 10, 706}, - dictWord{11, 10, 975}, - dictWord{12, 10, 248}, - dictWord{12, 10, 434}, - dictWord{12, 10, 600}, - dictWord{ - 12, - 10, - 622, - }, - dictWord{13, 10, 297}, - dictWord{13, 10, 485}, - dictWord{14, 10, 69}, - dictWord{14, 10, 409}, - dictWord{143, 10, 108}, - dictWord{135, 0, 724}, - dictWord{ - 4, - 11, - 512, - }, - dictWord{4, 11, 519}, - dictWord{133, 11, 342}, - dictWord{134, 0, 1133}, - dictWord{145, 11, 29}, - dictWord{11, 10, 977}, - dictWord{141, 10, 507}, - dictWord{6, 0, 841}, - dictWord{6, 0, 1042}, - dictWord{6, 0, 1194}, - dictWord{10, 0, 993}, - dictWord{140, 0, 1021}, - dictWord{6, 11, 31}, - dictWord{7, 11, 491}, - dictWord{7, 11, 530}, - dictWord{8, 11, 592}, - dictWord{9, 10, 34}, - dictWord{11, 11, 53}, - dictWord{11, 10, 484}, - dictWord{11, 11, 779}, - dictWord{12, 11, 167}, - dictWord{12, 11, 411}, - dictWord{14, 11, 14}, - dictWord{14, 11, 136}, - dictWord{15, 11, 72}, - dictWord{16, 11, 17}, - dictWord{144, 11, 72}, - dictWord{4, 0, 1021}, - dictWord{6, 0, 2037}, - dictWord{133, 11, 907}, - dictWord{7, 0, 373}, - dictWord{8, 0, 335}, - dictWord{8, 0, 596}, - dictWord{9, 0, 488}, - dictWord{6, 10, 1700}, - dictWord{ - 7, - 10, - 293, - }, - dictWord{7, 10, 382}, - dictWord{7, 10, 1026}, - dictWord{7, 10, 1087}, - dictWord{7, 10, 2027}, - dictWord{8, 10, 252}, - dictWord{8, 10, 727}, - dictWord{ - 8, - 10, - 729, - }, - dictWord{9, 10, 30}, - dictWord{9, 10, 199}, - dictWord{9, 10, 231}, - dictWord{9, 10, 251}, - dictWord{9, 10, 334}, - dictWord{9, 10, 361}, - dictWord{9, 10, 712}, - dictWord{10, 10, 55}, - dictWord{10, 10, 60}, - dictWord{10, 10, 232}, - dictWord{10, 10, 332}, - dictWord{10, 10, 384}, - dictWord{10, 10, 396}, - dictWord{ - 10, - 10, - 504, - }, - dictWord{10, 10, 542}, - dictWord{10, 10, 652}, - dictWord{11, 10, 20}, - dictWord{11, 10, 48}, - dictWord{11, 10, 207}, - dictWord{11, 10, 291}, - dictWord{ - 11, - 10, - 298, - }, - dictWord{11, 10, 342}, - dictWord{11, 10, 365}, - dictWord{11, 10, 394}, - dictWord{11, 10, 620}, - dictWord{11, 10, 705}, - dictWord{11, 10, 1017}, - dictWord{12, 10, 123}, - dictWord{12, 10, 340}, - dictWord{12, 10, 406}, - dictWord{12, 10, 643}, - dictWord{13, 10, 61}, - dictWord{13, 10, 269}, - dictWord{ - 13, - 10, - 311, - }, - dictWord{13, 10, 319}, - dictWord{13, 10, 486}, - dictWord{14, 10, 234}, - dictWord{15, 10, 62}, - dictWord{15, 10, 85}, - dictWord{16, 10, 71}, - dictWord{ - 18, - 10, - 119, - }, - dictWord{148, 10, 105}, - dictWord{150, 0, 37}, - dictWord{4, 11, 208}, - dictWord{5, 11, 106}, - dictWord{6, 11, 531}, - dictWord{8, 11, 408}, - dictWord{ - 9, - 11, - 188, - }, - dictWord{138, 11, 572}, - dictWord{132, 0, 564}, - dictWord{6, 0, 513}, - dictWord{135, 0, 1052}, - dictWord{132, 0, 825}, - dictWord{9, 0, 899}, - dictWord{ - 140, - 11, - 441, - }, - dictWord{134, 0, 778}, - dictWord{133, 11, 379}, - dictWord{7, 0, 1417}, - dictWord{12, 0, 382}, - dictWord{17, 0, 48}, - dictWord{152, 0, 12}, - dictWord{ - 132, - 11, - 241, - }, - dictWord{7, 0, 1116}, - dictWord{6, 10, 379}, - dictWord{7, 10, 270}, - dictWord{8, 10, 176}, - dictWord{8, 10, 183}, - dictWord{9, 10, 432}, - dictWord{ - 9, - 10, - 661, - }, - dictWord{12, 10, 247}, - dictWord{12, 10, 617}, - dictWord{146, 10, 125}, - dictWord{5, 10, 792}, - dictWord{133, 10, 900}, - dictWord{6, 0, 545}, - dictWord{ - 7, - 0, - 565, - }, - dictWord{7, 0, 1669}, - dictWord{10, 0, 114}, - dictWord{11, 0, 642}, - dictWord{140, 0, 618}, - dictWord{133, 0, 5}, - dictWord{138, 11, 7}, - dictWord{ - 132, - 11, - 259, - }, - dictWord{135, 0, 192}, - dictWord{134, 0, 701}, - dictWord{136, 0, 763}, - dictWord{135, 10, 1979}, - dictWord{4, 10, 901}, - dictWord{133, 10, 776}, - dictWord{10, 0, 755}, - dictWord{147, 0, 29}, - dictWord{133, 0, 759}, - dictWord{4, 11, 173}, - dictWord{5, 11, 312}, - dictWord{5, 11, 512}, - dictWord{135, 11, 1285}, - dictWord{7, 11, 1603}, - dictWord{7, 11, 1691}, - dictWord{9, 11, 464}, - dictWord{11, 11, 195}, - dictWord{12, 11, 279}, - dictWord{12, 11, 448}, - dictWord{ - 14, - 11, - 11, - }, - dictWord{147, 11, 102}, - dictWord{7, 0, 370}, - dictWord{7, 0, 1007}, - dictWord{7, 0, 1177}, - dictWord{135, 0, 1565}, - dictWord{135, 0, 1237}, - dictWord{ - 4, - 0, - 87, - }, - dictWord{5, 0, 250}, - dictWord{141, 0, 298}, - dictWord{4, 11, 452}, - dictWord{5, 11, 583}, - dictWord{5, 11, 817}, - dictWord{6, 11, 433}, - dictWord{7, 11, 593}, - dictWord{7, 11, 720}, - dictWord{7, 11, 1378}, - dictWord{8, 11, 161}, - dictWord{9, 11, 284}, - dictWord{10, 11, 313}, - dictWord{139, 11, 886}, - dictWord{4, 11, 547}, - dictWord{135, 11, 1409}, - dictWord{136, 11, 722}, - dictWord{4, 10, 37}, - dictWord{5, 10, 334}, - dictWord{135, 10, 1253}, - dictWord{132, 10, 508}, - dictWord{ - 12, - 0, - 107, - }, - dictWord{146, 0, 31}, - dictWord{8, 11, 420}, - dictWord{139, 11, 193}, - dictWord{135, 0, 814}, - dictWord{135, 11, 409}, - dictWord{140, 0, 991}, - dictWord{4, 0, 57}, - dictWord{7, 0, 1195}, - dictWord{7, 0, 1438}, - dictWord{7, 0, 1548}, - dictWord{7, 0, 1835}, - dictWord{7, 0, 1904}, - dictWord{9, 0, 757}, - dictWord{ - 10, - 0, - 604, - }, - dictWord{139, 0, 519}, - dictWord{132, 0, 540}, - dictWord{138, 11, 308}, - dictWord{132, 10, 533}, - dictWord{136, 0, 608}, - dictWord{144, 11, 65}, - dictWord{4, 0, 1014}, - dictWord{134, 0, 2029}, - dictWord{4, 0, 209}, - dictWord{7, 0, 902}, - dictWord{5, 11, 1002}, - dictWord{136, 11, 745}, - dictWord{134, 0, 2030}, - dictWord{6, 0, 303}, - dictWord{7, 0, 335}, - dictWord{7, 0, 1437}, - dictWord{7, 0, 1668}, - dictWord{8, 0, 553}, - dictWord{8, 0, 652}, - dictWord{8, 0, 656}, - dictWord{ - 9, - 0, - 558, - }, - dictWord{11, 0, 743}, - dictWord{149, 0, 18}, - dictWord{5, 11, 575}, - dictWord{6, 11, 354}, - dictWord{135, 11, 701}, - dictWord{4, 11, 239}, - dictWord{ - 6, - 11, - 477, - }, - dictWord{7, 11, 1607}, - dictWord{11, 11, 68}, - dictWord{139, 11, 617}, - dictWord{132, 0, 559}, - dictWord{8, 0, 527}, - dictWord{18, 0, 60}, - dictWord{ - 147, - 0, - 24, - }, - dictWord{133, 10, 920}, - dictWord{138, 0, 511}, - dictWord{133, 0, 1017}, - dictWord{133, 0, 675}, - dictWord{138, 10, 391}, - dictWord{11, 0, 156}, - dictWord{135, 10, 1952}, - dictWord{138, 11, 369}, - dictWord{132, 11, 367}, - dictWord{133, 0, 709}, - dictWord{6, 0, 698}, - dictWord{134, 0, 887}, - dictWord{ - 142, - 10, - 126, - }, - dictWord{134, 0, 1745}, - dictWord{132, 10, 483}, - dictWord{13, 11, 299}, - dictWord{142, 11, 75}, - dictWord{133, 0, 714}, - dictWord{7, 0, 8}, - dictWord{ - 136, - 0, - 206, - }, - dictWord{138, 10, 480}, - dictWord{4, 11, 694}, - dictWord{9, 10, 495}, - dictWord{146, 10, 104}, - dictWord{7, 11, 1248}, - dictWord{11, 11, 621}, - dictWord{139, 11, 702}, - dictWord{140, 11, 687}, - dictWord{132, 0, 776}, - dictWord{139, 10, 1009}, - dictWord{135, 0, 1272}, - dictWord{134, 0, 1059}, - dictWord{ - 8, - 10, - 653, - }, - dictWord{13, 10, 93}, - dictWord{147, 10, 14}, - dictWord{135, 11, 213}, - dictWord{136, 0, 406}, - dictWord{133, 10, 172}, - dictWord{132, 0, 947}, - dictWord{8, 0, 175}, - dictWord{10, 0, 168}, - dictWord{138, 0, 573}, - dictWord{132, 0, 870}, - dictWord{6, 0, 1567}, - dictWord{151, 11, 28}, - dictWord{ - 134, - 11, - 472, - }, - dictWord{5, 10, 260}, - dictWord{136, 11, 132}, - dictWord{4, 11, 751}, - dictWord{11, 11, 390}, - dictWord{140, 11, 32}, - dictWord{4, 11, 409}, - dictWord{ - 133, - 11, - 78, - }, - dictWord{12, 0, 554}, - dictWord{6, 11, 473}, - dictWord{145, 11, 105}, - dictWord{133, 0, 784}, - dictWord{8, 0, 908}, - dictWord{136, 11, 306}, - dictWord{139, 0, 882}, - dictWord{6, 0, 358}, - dictWord{7, 0, 1393}, - dictWord{8, 0, 396}, - dictWord{10, 0, 263}, - dictWord{14, 0, 154}, - dictWord{16, 0, 48}, - dictWord{ - 17, - 0, - 8, - }, - dictWord{7, 11, 1759}, - dictWord{8, 11, 396}, - dictWord{10, 11, 263}, - dictWord{14, 11, 154}, - dictWord{16, 11, 48}, - dictWord{145, 11, 8}, - dictWord{ - 13, - 11, - 163, - }, - dictWord{13, 11, 180}, - dictWord{18, 11, 78}, - dictWord{148, 11, 35}, - dictWord{14, 0, 32}, - dictWord{18, 0, 85}, - dictWord{20, 0, 2}, - dictWord{152, 0, 16}, - dictWord{7, 0, 228}, - dictWord{10, 0, 770}, - dictWord{8, 10, 167}, - dictWord{8, 10, 375}, - dictWord{9, 10, 82}, - dictWord{9, 10, 561}, - dictWord{138, 10, 620}, - dictWord{132, 0, 845}, - dictWord{9, 0, 14}, - dictWord{9, 0, 441}, - dictWord{10, 0, 306}, - dictWord{139, 0, 9}, - dictWord{11, 0, 966}, - dictWord{12, 0, 287}, - dictWord{ - 13, - 0, - 342, - }, - dictWord{13, 0, 402}, - dictWord{15, 0, 110}, - dictWord{15, 0, 163}, - dictWord{8, 10, 194}, - dictWord{136, 10, 756}, - dictWord{134, 0, 1578}, - dictWord{ - 4, - 0, - 967, - }, - dictWord{6, 0, 1820}, - dictWord{6, 0, 1847}, - dictWord{140, 0, 716}, - dictWord{136, 0, 594}, - dictWord{7, 0, 1428}, - dictWord{7, 0, 1640}, - dictWord{ - 7, - 0, - 1867, - }, - dictWord{9, 0, 169}, - dictWord{9, 0, 182}, - dictWord{9, 0, 367}, - dictWord{9, 0, 478}, - dictWord{9, 0, 506}, - dictWord{9, 0, 551}, - dictWord{9, 0, 557}, - dictWord{ - 9, - 0, - 648, - }, - dictWord{9, 0, 697}, - dictWord{9, 0, 705}, - dictWord{9, 0, 725}, - dictWord{9, 0, 787}, - dictWord{9, 0, 794}, - dictWord{10, 0, 198}, - dictWord{10, 0, 214}, - dictWord{10, 0, 267}, - dictWord{10, 0, 275}, - dictWord{10, 0, 456}, - dictWord{10, 0, 551}, - dictWord{10, 0, 561}, - dictWord{10, 0, 613}, - dictWord{10, 0, 627}, - dictWord{ - 10, - 0, - 668, - }, - dictWord{10, 0, 675}, - dictWord{10, 0, 691}, - dictWord{10, 0, 695}, - dictWord{10, 0, 707}, - dictWord{10, 0, 715}, - dictWord{11, 0, 183}, - dictWord{ - 11, - 0, - 201, - }, - dictWord{11, 0, 244}, - dictWord{11, 0, 262}, - dictWord{11, 0, 352}, - dictWord{11, 0, 439}, - dictWord{11, 0, 493}, - dictWord{11, 0, 572}, - dictWord{11, 0, 591}, - dictWord{11, 0, 608}, - dictWord{11, 0, 611}, - dictWord{11, 0, 646}, - dictWord{11, 0, 674}, - dictWord{11, 0, 711}, - dictWord{11, 0, 751}, - dictWord{11, 0, 761}, - dictWord{11, 0, 776}, - dictWord{11, 0, 785}, - dictWord{11, 0, 850}, - dictWord{11, 0, 853}, - dictWord{11, 0, 862}, - dictWord{11, 0, 865}, - dictWord{11, 0, 868}, - dictWord{ - 11, - 0, - 875, - }, - dictWord{11, 0, 898}, - dictWord{11, 0, 902}, - dictWord{11, 0, 903}, - dictWord{11, 0, 910}, - dictWord{11, 0, 932}, - dictWord{11, 0, 942}, - dictWord{ - 11, - 0, - 957, - }, - dictWord{11, 0, 967}, - dictWord{11, 0, 972}, - dictWord{12, 0, 148}, - dictWord{12, 0, 195}, - dictWord{12, 0, 220}, - dictWord{12, 0, 237}, - dictWord{12, 0, 318}, - dictWord{12, 0, 339}, - dictWord{12, 0, 393}, - dictWord{12, 0, 445}, - dictWord{12, 0, 450}, - dictWord{12, 0, 474}, - dictWord{12, 0, 505}, - dictWord{12, 0, 509}, - dictWord{12, 0, 533}, - dictWord{12, 0, 591}, - dictWord{12, 0, 594}, - dictWord{12, 0, 597}, - dictWord{12, 0, 621}, - dictWord{12, 0, 633}, - dictWord{12, 0, 642}, - dictWord{ - 13, - 0, - 59, - }, - dictWord{13, 0, 60}, - dictWord{13, 0, 145}, - dictWord{13, 0, 239}, - dictWord{13, 0, 250}, - dictWord{13, 0, 329}, - dictWord{13, 0, 344}, - dictWord{13, 0, 365}, - dictWord{13, 0, 372}, - dictWord{13, 0, 387}, - dictWord{13, 0, 403}, - dictWord{13, 0, 414}, - dictWord{13, 0, 456}, - dictWord{13, 0, 470}, - dictWord{13, 0, 478}, - dictWord{13, 0, 483}, - dictWord{13, 0, 489}, - dictWord{14, 0, 55}, - dictWord{14, 0, 57}, - dictWord{14, 0, 81}, - dictWord{14, 0, 90}, - dictWord{14, 0, 148}, - dictWord{ - 14, - 0, - 239, - }, - dictWord{14, 0, 266}, - dictWord{14, 0, 321}, - dictWord{14, 0, 326}, - dictWord{14, 0, 327}, - dictWord{14, 0, 330}, - dictWord{14, 0, 347}, - dictWord{14, 0, 355}, - dictWord{14, 0, 401}, - dictWord{14, 0, 404}, - dictWord{14, 0, 411}, - dictWord{14, 0, 414}, - dictWord{14, 0, 416}, - dictWord{14, 0, 420}, - dictWord{15, 0, 61}, - dictWord{15, 0, 74}, - dictWord{15, 0, 87}, - dictWord{15, 0, 88}, - dictWord{15, 0, 94}, - dictWord{15, 0, 96}, - dictWord{15, 0, 116}, - dictWord{15, 0, 149}, - dictWord{15, 0, 154}, - dictWord{16, 0, 50}, - dictWord{16, 0, 63}, - dictWord{16, 0, 73}, - dictWord{17, 0, 2}, - dictWord{17, 0, 66}, - dictWord{17, 0, 92}, - dictWord{17, 0, 103}, - dictWord{ - 17, - 0, - 112, - }, - dictWord{17, 0, 120}, - dictWord{18, 0, 50}, - dictWord{18, 0, 54}, - dictWord{18, 0, 82}, - dictWord{18, 0, 86}, - dictWord{18, 0, 90}, - dictWord{18, 0, 111}, - dictWord{ - 18, - 0, - 115, - }, - dictWord{18, 0, 156}, - dictWord{19, 0, 40}, - dictWord{19, 0, 79}, - dictWord{20, 0, 78}, - dictWord{21, 0, 22}, - dictWord{135, 11, 883}, - dictWord{5, 0, 161}, - dictWord{135, 0, 839}, - dictWord{4, 0, 782}, - dictWord{13, 11, 293}, - dictWord{142, 11, 56}, - dictWord{133, 11, 617}, - dictWord{139, 11, 50}, - dictWord{ - 135, - 10, - 22, - }, - dictWord{145, 0, 64}, - dictWord{5, 10, 639}, - dictWord{7, 10, 1249}, - dictWord{139, 10, 896}, - dictWord{138, 0, 998}, - dictWord{135, 11, 2042}, - dictWord{ - 4, - 11, - 546, - }, - dictWord{142, 11, 233}, - dictWord{6, 0, 1043}, - dictWord{134, 0, 1574}, - dictWord{134, 0, 1496}, - dictWord{4, 10, 102}, - dictWord{7, 10, 815}, - dictWord{7, 10, 1699}, - dictWord{139, 10, 964}, - dictWord{12, 0, 781}, - dictWord{142, 0, 461}, - dictWord{4, 11, 313}, - dictWord{133, 11, 577}, - dictWord{ - 6, - 0, - 639, - }, - dictWord{6, 0, 1114}, - dictWord{137, 0, 817}, - dictWord{8, 11, 184}, - dictWord{141, 11, 433}, - dictWord{7, 0, 1814}, - dictWord{135, 11, 935}, - dictWord{ - 10, - 0, - 997, - }, - dictWord{140, 0, 958}, - dictWord{4, 0, 812}, - dictWord{137, 11, 625}, - dictWord{132, 10, 899}, - dictWord{136, 10, 795}, - dictWord{5, 11, 886}, - dictWord{6, 11, 46}, - dictWord{6, 11, 1790}, - dictWord{7, 11, 14}, - dictWord{7, 11, 732}, - dictWord{7, 11, 1654}, - dictWord{8, 11, 95}, - dictWord{8, 11, 327}, - dictWord{ - 8, - 11, - 616, - }, - dictWord{10, 11, 598}, - dictWord{10, 11, 769}, - dictWord{11, 11, 134}, - dictWord{11, 11, 747}, - dictWord{12, 11, 378}, - dictWord{142, 11, 97}, - dictWord{136, 0, 139}, - dictWord{6, 10, 52}, - dictWord{9, 10, 104}, - dictWord{9, 10, 559}, - dictWord{12, 10, 308}, - dictWord{147, 10, 87}, - dictWord{133, 11, 1021}, - dictWord{132, 10, 604}, - dictWord{132, 10, 301}, - dictWord{136, 10, 779}, - dictWord{7, 0, 643}, - dictWord{136, 0, 236}, - dictWord{132, 11, 153}, - dictWord{ - 134, - 0, - 1172, - }, - dictWord{147, 10, 32}, - dictWord{133, 11, 798}, - dictWord{6, 0, 1338}, - dictWord{132, 11, 587}, - dictWord{6, 11, 598}, - dictWord{7, 11, 42}, - dictWord{ - 8, - 11, - 695, - }, - dictWord{10, 11, 212}, - dictWord{11, 11, 158}, - dictWord{14, 11, 196}, - dictWord{145, 11, 85}, - dictWord{135, 10, 508}, - dictWord{5, 11, 957}, - dictWord{5, 11, 1008}, - dictWord{135, 11, 249}, - dictWord{4, 11, 129}, - dictWord{135, 11, 465}, - dictWord{5, 0, 54}, - dictWord{7, 11, 470}, - dictWord{7, 11, 1057}, - dictWord{7, 11, 1201}, - dictWord{9, 11, 755}, - dictWord{11, 11, 906}, - dictWord{140, 11, 527}, - dictWord{7, 11, 908}, - dictWord{146, 11, 7}, - dictWord{ - 5, - 11, - 148, - }, - dictWord{136, 11, 450}, - dictWord{144, 11, 1}, - dictWord{4, 0, 256}, - dictWord{135, 0, 1488}, - dictWord{9, 0, 351}, - dictWord{6, 10, 310}, - dictWord{ - 7, - 10, - 1849, - }, - dictWord{8, 10, 72}, - dictWord{8, 10, 272}, - dictWord{8, 10, 431}, - dictWord{9, 10, 12}, - dictWord{10, 10, 563}, - dictWord{10, 10, 630}, - dictWord{ - 10, - 10, - 796, - }, - dictWord{10, 10, 810}, - dictWord{11, 10, 367}, - dictWord{11, 10, 599}, - dictWord{11, 10, 686}, - dictWord{140, 10, 672}, - dictWord{6, 0, 1885}, - dictWord{ - 6, - 0, - 1898, - }, - dictWord{6, 0, 1899}, - dictWord{140, 0, 955}, - dictWord{4, 0, 714}, - dictWord{133, 0, 469}, - dictWord{6, 0, 1270}, - dictWord{134, 0, 1456}, - dictWord{132, 0, 744}, - dictWord{6, 0, 313}, - dictWord{7, 10, 537}, - dictWord{8, 10, 64}, - dictWord{9, 10, 127}, - dictWord{10, 10, 496}, - dictWord{12, 10, 510}, - dictWord{141, 10, 384}, - dictWord{4, 11, 217}, - dictWord{4, 10, 244}, - dictWord{5, 11, 710}, - dictWord{7, 10, 233}, - dictWord{7, 11, 1926}, - dictWord{9, 11, 428}, - dictWord{9, 11, 708}, - dictWord{10, 11, 254}, - dictWord{10, 11, 296}, - dictWord{10, 11, 720}, - dictWord{11, 11, 109}, - dictWord{11, 11, 255}, - dictWord{12, 11, 165}, - dictWord{12, 11, 315}, - dictWord{13, 11, 107}, - dictWord{13, 11, 203}, - dictWord{14, 11, 54}, - dictWord{14, 11, 99}, - dictWord{14, 11, 114}, - dictWord{ - 14, - 11, - 388, - }, - dictWord{16, 11, 85}, - dictWord{17, 11, 9}, - dictWord{17, 11, 33}, - dictWord{20, 11, 25}, - dictWord{20, 11, 28}, - dictWord{20, 11, 29}, - dictWord{21, 11, 9}, - dictWord{21, 11, 10}, - dictWord{21, 11, 34}, - dictWord{150, 11, 17}, - dictWord{138, 0, 402}, - dictWord{7, 0, 969}, - dictWord{146, 0, 55}, - dictWord{8, 0, 50}, - dictWord{ - 137, - 0, - 624, - }, - dictWord{134, 0, 1355}, - dictWord{132, 0, 572}, - dictWord{134, 10, 1650}, - dictWord{10, 10, 702}, - dictWord{139, 10, 245}, - dictWord{ - 10, - 0, - 847, - }, - dictWord{142, 0, 445}, - dictWord{6, 0, 43}, - dictWord{7, 0, 38}, - dictWord{8, 0, 248}, - dictWord{138, 0, 513}, - dictWord{133, 0, 369}, - dictWord{137, 10, 338}, - dictWord{133, 0, 766}, - dictWord{133, 0, 363}, - dictWord{133, 10, 896}, - dictWord{8, 11, 392}, - dictWord{11, 11, 54}, - dictWord{13, 11, 173}, - dictWord{ - 13, - 11, - 294, - }, - dictWord{148, 11, 7}, - dictWord{134, 0, 678}, - dictWord{7, 11, 1230}, - dictWord{136, 11, 531}, - dictWord{6, 0, 258}, - dictWord{140, 0, 409}, - dictWord{ - 5, - 0, - 249, - }, - dictWord{148, 0, 82}, - dictWord{7, 10, 1117}, - dictWord{136, 10, 539}, - dictWord{5, 0, 393}, - dictWord{6, 0, 378}, - dictWord{7, 0, 1981}, - dictWord{9, 0, 32}, - dictWord{9, 0, 591}, - dictWord{10, 0, 685}, - dictWord{10, 0, 741}, - dictWord{142, 0, 382}, - dictWord{133, 0, 788}, - dictWord{134, 0, 1281}, - dictWord{ - 134, - 0, - 1295, - }, - dictWord{7, 0, 1968}, - dictWord{141, 0, 509}, - dictWord{4, 0, 61}, - dictWord{5, 0, 58}, - dictWord{5, 0, 171}, - dictWord{5, 0, 683}, - dictWord{6, 0, 291}, - dictWord{ - 6, - 0, - 566, - }, - dictWord{7, 0, 1650}, - dictWord{11, 0, 523}, - dictWord{12, 0, 273}, - dictWord{12, 0, 303}, - dictWord{15, 0, 39}, - dictWord{143, 0, 111}, - dictWord{ - 6, - 0, - 706, - }, - dictWord{134, 0, 1283}, - dictWord{134, 0, 589}, - dictWord{135, 11, 1433}, - dictWord{133, 11, 435}, - dictWord{7, 0, 1059}, - dictWord{13, 0, 54}, - dictWord{ - 5, - 10, - 4, - }, - dictWord{5, 10, 810}, - dictWord{6, 10, 13}, - dictWord{6, 10, 538}, - dictWord{6, 10, 1690}, - dictWord{6, 10, 1726}, - dictWord{7, 10, 1819}, - dictWord{ - 8, - 10, - 148, - }, - dictWord{8, 10, 696}, - dictWord{8, 10, 791}, - dictWord{12, 10, 125}, - dictWord{143, 10, 9}, - dictWord{135, 10, 1268}, - dictWord{5, 11, 85}, - dictWord{ - 6, - 11, - 419, - }, - dictWord{7, 11, 134}, - dictWord{7, 11, 305}, - dictWord{7, 11, 361}, - dictWord{7, 11, 1337}, - dictWord{8, 11, 71}, - dictWord{140, 11, 519}, - dictWord{ - 137, - 0, - 824, - }, - dictWord{140, 11, 688}, - dictWord{5, 11, 691}, - dictWord{7, 11, 345}, - dictWord{7, 10, 1385}, - dictWord{9, 11, 94}, - dictWord{11, 10, 582}, - dictWord{ - 11, - 10, - 650, - }, - dictWord{11, 10, 901}, - dictWord{11, 10, 949}, - dictWord{12, 11, 169}, - dictWord{12, 10, 232}, - dictWord{12, 10, 236}, - dictWord{13, 10, 413}, - dictWord{13, 10, 501}, - dictWord{146, 10, 116}, - dictWord{4, 0, 917}, - dictWord{133, 0, 1005}, - dictWord{7, 0, 1598}, - dictWord{5, 11, 183}, - dictWord{6, 11, 582}, - dictWord{9, 11, 344}, - dictWord{10, 11, 679}, - dictWord{140, 11, 435}, - dictWord{4, 10, 925}, - dictWord{5, 10, 803}, - dictWord{8, 10, 698}, - dictWord{ - 138, - 10, - 828, - }, - dictWord{132, 0, 919}, - dictWord{135, 11, 511}, - dictWord{139, 10, 992}, - dictWord{4, 0, 255}, - dictWord{5, 0, 302}, - dictWord{6, 0, 132}, - dictWord{ - 7, - 0, - 128, - }, - dictWord{7, 0, 283}, - dictWord{7, 0, 1299}, - dictWord{10, 0, 52}, - dictWord{10, 0, 514}, - dictWord{11, 0, 925}, - dictWord{13, 0, 92}, - dictWord{142, 0, 309}, - dictWord{134, 0, 1369}, - dictWord{135, 10, 1847}, - dictWord{134, 0, 328}, - dictWord{7, 11, 1993}, - dictWord{136, 11, 684}, - dictWord{133, 10, 383}, - dictWord{137, 0, 173}, - dictWord{134, 11, 583}, - dictWord{134, 0, 1411}, - dictWord{19, 0, 65}, - dictWord{5, 11, 704}, - dictWord{8, 11, 357}, - dictWord{10, 11, 745}, - dictWord{14, 11, 426}, - dictWord{17, 11, 94}, - dictWord{147, 11, 57}, - dictWord{9, 10, 660}, - dictWord{138, 10, 347}, - dictWord{4, 11, 179}, - dictWord{5, 11, 198}, - dictWord{133, 11, 697}, - dictWord{7, 11, 347}, - dictWord{7, 11, 971}, - dictWord{8, 11, 181}, - dictWord{138, 11, 711}, - dictWord{141, 0, 442}, - dictWord{ - 11, - 0, - 842, - }, - dictWord{11, 0, 924}, - dictWord{13, 0, 317}, - dictWord{13, 0, 370}, - dictWord{13, 0, 469}, - dictWord{13, 0, 471}, - dictWord{14, 0, 397}, - dictWord{18, 0, 69}, - dictWord{18, 0, 145}, - dictWord{7, 10, 572}, - dictWord{9, 10, 592}, - dictWord{11, 10, 680}, - dictWord{12, 10, 356}, - dictWord{140, 10, 550}, - dictWord{14, 11, 19}, - dictWord{14, 11, 28}, - dictWord{144, 11, 29}, - dictWord{136, 0, 534}, - dictWord{4, 11, 243}, - dictWord{5, 11, 203}, - dictWord{7, 11, 19}, - dictWord{7, 11, 71}, - dictWord{7, 11, 113}, - dictWord{10, 11, 405}, - dictWord{11, 11, 357}, - dictWord{142, 11, 240}, - dictWord{6, 0, 210}, - dictWord{10, 0, 845}, - dictWord{138, 0, 862}, - dictWord{7, 11, 1351}, - dictWord{9, 11, 581}, - dictWord{10, 11, 639}, - dictWord{11, 11, 453}, - dictWord{140, 11, 584}, - dictWord{7, 11, 1450}, - dictWord{ - 139, - 11, - 99, - }, - dictWord{10, 0, 892}, - dictWord{12, 0, 719}, - dictWord{144, 0, 105}, - dictWord{4, 0, 284}, - dictWord{6, 0, 223}, - dictWord{134, 11, 492}, - dictWord{5, 11, 134}, - dictWord{6, 11, 408}, - dictWord{6, 11, 495}, - dictWord{135, 11, 1593}, - dictWord{136, 0, 529}, - dictWord{137, 0, 807}, - dictWord{4, 0, 218}, - dictWord{7, 0, 526}, - dictWord{143, 0, 137}, - dictWord{6, 0, 1444}, - dictWord{142, 11, 4}, - dictWord{132, 11, 665}, - dictWord{4, 0, 270}, - dictWord{5, 0, 192}, - dictWord{6, 0, 332}, - dictWord{7, 0, 1322}, - dictWord{4, 11, 248}, - dictWord{7, 11, 137}, - dictWord{137, 11, 349}, - dictWord{140, 0, 661}, - dictWord{7, 0, 1517}, - dictWord{11, 0, 597}, - dictWord{14, 0, 76}, - dictWord{14, 0, 335}, - dictWord{20, 0, 33}, - dictWord{7, 10, 748}, - dictWord{139, 10, 700}, - dictWord{5, 11, 371}, - dictWord{135, 11, 563}, - dictWord{146, 11, 57}, - dictWord{133, 10, 127}, - dictWord{133, 0, 418}, - dictWord{4, 11, 374}, - dictWord{7, 11, 547}, - dictWord{7, 11, 1700}, - dictWord{7, 11, 1833}, - dictWord{139, 11, 858}, - dictWord{6, 10, 198}, - dictWord{140, 10, 83}, - dictWord{7, 11, 1812}, - dictWord{13, 11, 259}, - dictWord{13, 11, 356}, - dictWord{ - 14, - 11, - 242, - }, - dictWord{147, 11, 114}, - dictWord{7, 0, 379}, - dictWord{8, 0, 481}, - dictWord{9, 0, 377}, - dictWord{5, 10, 276}, - dictWord{6, 10, 55}, - dictWord{ - 135, - 10, - 1369, - }, - dictWord{138, 11, 286}, - dictWord{5, 0, 1003}, - dictWord{6, 0, 149}, - dictWord{6, 10, 1752}, - dictWord{136, 10, 726}, - dictWord{8, 0, 262}, - dictWord{ - 9, - 0, - 627, - }, - dictWord{10, 0, 18}, - dictWord{11, 0, 214}, - dictWord{11, 0, 404}, - dictWord{11, 0, 457}, - dictWord{11, 0, 780}, - dictWord{11, 0, 913}, - dictWord{13, 0, 401}, - dictWord{14, 0, 200}, - dictWord{6, 11, 1647}, - dictWord{7, 11, 1552}, - dictWord{7, 11, 2010}, - dictWord{9, 11, 494}, - dictWord{137, 11, 509}, - dictWord{ - 135, - 0, - 742, - }, - dictWord{136, 0, 304}, - dictWord{132, 0, 142}, - dictWord{133, 10, 764}, - dictWord{6, 10, 309}, - dictWord{7, 10, 331}, - dictWord{138, 10, 550}, - dictWord{135, 10, 1062}, - dictWord{6, 11, 123}, - dictWord{7, 11, 214}, - dictWord{7, 10, 986}, - dictWord{9, 11, 728}, - dictWord{10, 11, 157}, - dictWord{11, 11, 346}, - dictWord{11, 11, 662}, - dictWord{143, 11, 106}, - dictWord{135, 10, 1573}, - dictWord{7, 0, 925}, - dictWord{137, 0, 799}, - dictWord{4, 0, 471}, - dictWord{5, 0, 51}, - dictWord{6, 0, 602}, - dictWord{8, 0, 484}, - dictWord{138, 0, 195}, - dictWord{136, 0, 688}, - dictWord{132, 0, 697}, - dictWord{6, 0, 1169}, - dictWord{6, 0, 1241}, - dictWord{6, 10, 194}, - dictWord{7, 10, 133}, - dictWord{10, 10, 493}, - dictWord{10, 10, 570}, - dictWord{139, 10, 664}, - dictWord{140, 0, 751}, - dictWord{7, 0, 929}, - dictWord{10, 0, 452}, - dictWord{11, 0, 878}, - dictWord{16, 0, 33}, - dictWord{5, 10, 24}, - dictWord{5, 10, 569}, - dictWord{6, 10, 3}, - dictWord{6, 10, 119}, - dictWord{ - 6, - 10, - 143, - }, - dictWord{6, 10, 440}, - dictWord{7, 10, 599}, - dictWord{7, 10, 1686}, - dictWord{7, 10, 1854}, - dictWord{8, 10, 424}, - dictWord{9, 10, 43}, - dictWord{ - 9, - 10, - 584, - }, - dictWord{9, 10, 760}, - dictWord{10, 10, 328}, - dictWord{11, 10, 159}, - dictWord{11, 10, 253}, - dictWord{12, 10, 487}, - dictWord{140, 10, 531}, - dictWord{ - 4, - 11, - 707, - }, - dictWord{13, 11, 106}, - dictWord{18, 11, 49}, - dictWord{147, 11, 41}, - dictWord{5, 0, 221}, - dictWord{5, 11, 588}, - dictWord{134, 11, 393}, - dictWord{134, 0, 1437}, - dictWord{6, 11, 211}, - dictWord{7, 11, 1690}, - dictWord{11, 11, 486}, - dictWord{140, 11, 369}, - dictWord{5, 10, 14}, - dictWord{5, 10, 892}, - dictWord{6, 10, 283}, - dictWord{7, 10, 234}, - dictWord{136, 10, 537}, - dictWord{4, 0, 988}, - dictWord{136, 0, 955}, - dictWord{135, 0, 1251}, - dictWord{4, 10, 126}, - dictWord{8, 10, 635}, - dictWord{147, 10, 34}, - dictWord{4, 10, 316}, - dictWord{135, 10, 1561}, - dictWord{137, 10, 861}, - dictWord{4, 10, 64}, - dictWord{ - 5, - 10, - 352, - }, - dictWord{5, 10, 720}, - dictWord{6, 10, 368}, - dictWord{139, 10, 359}, - dictWord{134, 0, 192}, - dictWord{4, 0, 132}, - dictWord{5, 0, 69}, - dictWord{ - 135, - 0, - 1242, - }, - dictWord{7, 10, 1577}, - dictWord{10, 10, 304}, - dictWord{10, 10, 549}, - dictWord{12, 10, 365}, - dictWord{13, 10, 220}, - dictWord{13, 10, 240}, - dictWord{142, 10, 33}, - dictWord{4, 0, 111}, - dictWord{7, 0, 865}, - dictWord{134, 11, 219}, - dictWord{5, 11, 582}, - dictWord{6, 11, 1646}, - dictWord{7, 11, 99}, - dictWord{ - 7, - 11, - 1962, - }, - dictWord{7, 11, 1986}, - dictWord{8, 11, 515}, - dictWord{8, 11, 773}, - dictWord{9, 11, 23}, - dictWord{9, 11, 491}, - dictWord{12, 11, 620}, - dictWord{ - 14, - 11, - 52, - }, - dictWord{145, 11, 50}, - dictWord{132, 0, 767}, - dictWord{7, 11, 568}, - dictWord{148, 11, 21}, - dictWord{6, 0, 42}, - dictWord{7, 0, 1416}, - dictWord{ - 7, - 0, - 2005, - }, - dictWord{8, 0, 131}, - dictWord{8, 0, 466}, - dictWord{9, 0, 672}, - dictWord{13, 0, 252}, - dictWord{20, 0, 103}, - dictWord{133, 11, 851}, - dictWord{ - 135, - 0, - 1050, - }, - dictWord{6, 10, 175}, - dictWord{137, 10, 289}, - dictWord{5, 10, 432}, - dictWord{133, 10, 913}, - dictWord{6, 0, 44}, - dictWord{136, 0, 368}, - dictWord{ - 135, - 11, - 784, - }, - dictWord{132, 0, 570}, - dictWord{133, 0, 120}, - dictWord{139, 10, 595}, - dictWord{140, 0, 29}, - dictWord{6, 0, 227}, - dictWord{135, 0, 1589}, - dictWord{4, 11, 98}, - dictWord{7, 11, 1365}, - dictWord{9, 11, 422}, - dictWord{9, 11, 670}, - dictWord{10, 11, 775}, - dictWord{11, 11, 210}, - dictWord{13, 11, 26}, - dictWord{13, 11, 457}, - dictWord{141, 11, 476}, - dictWord{140, 10, 80}, - dictWord{5, 10, 931}, - dictWord{134, 10, 1698}, - dictWord{133, 0, 522}, - dictWord{ - 134, - 0, - 1120, - }, - dictWord{135, 0, 1529}, - dictWord{12, 0, 739}, - dictWord{14, 0, 448}, - dictWord{142, 0, 467}, - dictWord{11, 10, 526}, - dictWord{11, 10, 939}, - dictWord{141, 10, 290}, - dictWord{5, 10, 774}, - dictWord{6, 10, 1637}, - dictWord{6, 10, 1686}, - dictWord{134, 10, 1751}, - dictWord{6, 0, 1667}, - dictWord{ - 135, - 0, - 2036, - }, - dictWord{7, 10, 1167}, - dictWord{11, 10, 934}, - dictWord{13, 10, 391}, - dictWord{145, 10, 76}, - dictWord{137, 11, 147}, - dictWord{6, 10, 260}, - dictWord{ - 7, - 10, - 1484, - }, - dictWord{11, 11, 821}, - dictWord{12, 11, 110}, - dictWord{12, 11, 153}, - dictWord{18, 11, 41}, - dictWord{150, 11, 19}, - dictWord{6, 0, 511}, - dictWord{12, 0, 132}, - dictWord{134, 10, 573}, - dictWord{5, 0, 568}, - dictWord{6, 0, 138}, - dictWord{135, 0, 1293}, - dictWord{132, 0, 1020}, - dictWord{8, 0, 258}, - dictWord{9, 0, 208}, - dictWord{137, 0, 359}, - dictWord{4, 0, 565}, - dictWord{8, 0, 23}, - dictWord{136, 0, 827}, - dictWord{134, 0, 344}, - dictWord{4, 0, 922}, - dictWord{ - 5, - 0, - 1023, - }, - dictWord{13, 11, 477}, - dictWord{14, 11, 120}, - dictWord{148, 11, 61}, - dictWord{134, 0, 240}, - dictWord{5, 11, 209}, - dictWord{6, 11, 30}, - dictWord{ - 11, - 11, - 56, - }, - dictWord{139, 11, 305}, - dictWord{6, 0, 171}, - dictWord{7, 0, 1002}, - dictWord{7, 0, 1324}, - dictWord{9, 0, 415}, - dictWord{14, 0, 230}, - dictWord{ - 18, - 0, - 68, - }, - dictWord{4, 10, 292}, - dictWord{4, 10, 736}, - dictWord{5, 10, 871}, - dictWord{6, 10, 1689}, - dictWord{7, 10, 1944}, - dictWord{137, 10, 580}, - dictWord{ - 9, - 11, - 635, - }, - dictWord{139, 11, 559}, - dictWord{4, 11, 150}, - dictWord{5, 11, 303}, - dictWord{134, 11, 327}, - dictWord{6, 10, 63}, - dictWord{135, 10, 920}, - dictWord{ - 133, - 10, - 793, - }, - dictWord{8, 11, 192}, - dictWord{10, 11, 78}, - dictWord{10, 11, 555}, - dictWord{11, 11, 308}, - dictWord{13, 11, 359}, - dictWord{147, 11, 95}, - dictWord{135, 11, 786}, - dictWord{135, 11, 1712}, - dictWord{136, 0, 402}, - dictWord{6, 0, 754}, - dictWord{6, 11, 1638}, - dictWord{7, 11, 79}, - dictWord{7, 11, 496}, - dictWord{9, 11, 138}, - dictWord{10, 11, 336}, - dictWord{11, 11, 12}, - dictWord{12, 11, 412}, - dictWord{12, 11, 440}, - dictWord{142, 11, 305}, - dictWord{4, 0, 716}, - dictWord{141, 0, 31}, - dictWord{133, 0, 982}, - dictWord{8, 0, 691}, - dictWord{8, 0, 731}, - dictWord{5, 10, 67}, - dictWord{6, 10, 62}, - dictWord{6, 10, 374}, - dictWord{ - 135, - 10, - 1391, - }, - dictWord{9, 10, 790}, - dictWord{140, 10, 47}, - dictWord{139, 11, 556}, - dictWord{151, 11, 1}, - dictWord{7, 11, 204}, - dictWord{7, 11, 415}, - dictWord{8, 11, 42}, - dictWord{10, 11, 85}, - dictWord{11, 11, 33}, - dictWord{11, 11, 564}, - dictWord{12, 11, 571}, - dictWord{149, 11, 1}, - dictWord{8, 0, 888}, - dictWord{ - 7, - 11, - 610, - }, - dictWord{135, 11, 1501}, - dictWord{4, 10, 391}, - dictWord{135, 10, 1169}, - dictWord{5, 0, 847}, - dictWord{9, 0, 840}, - dictWord{138, 0, 803}, - dictWord{137, 0, 823}, - dictWord{134, 0, 785}, - dictWord{8, 0, 152}, - dictWord{9, 0, 53}, - dictWord{9, 0, 268}, - dictWord{9, 0, 901}, - dictWord{10, 0, 518}, - dictWord{ - 10, - 0, - 829, - }, - dictWord{11, 0, 188}, - dictWord{13, 0, 74}, - dictWord{14, 0, 46}, - dictWord{15, 0, 17}, - dictWord{15, 0, 33}, - dictWord{17, 0, 40}, - dictWord{18, 0, 36}, - dictWord{ - 19, - 0, - 20, - }, - dictWord{22, 0, 1}, - dictWord{152, 0, 2}, - dictWord{4, 11, 3}, - dictWord{5, 11, 247}, - dictWord{5, 11, 644}, - dictWord{7, 11, 744}, - dictWord{7, 11, 1207}, - dictWord{7, 11, 1225}, - dictWord{7, 11, 1909}, - dictWord{146, 11, 147}, - dictWord{136, 0, 532}, - dictWord{135, 0, 681}, - dictWord{132, 10, 271}, - dictWord{ - 140, - 0, - 314, - }, - dictWord{140, 0, 677}, - dictWord{4, 0, 684}, - dictWord{136, 0, 384}, - dictWord{5, 11, 285}, - dictWord{9, 11, 67}, - dictWord{13, 11, 473}, - dictWord{ - 143, - 11, - 82, - }, - dictWord{4, 10, 253}, - dictWord{5, 10, 544}, - dictWord{7, 10, 300}, - dictWord{137, 10, 340}, - dictWord{7, 0, 110}, - dictWord{7, 0, 447}, - dictWord{8, 0, 290}, - dictWord{8, 0, 591}, - dictWord{9, 0, 382}, - dictWord{9, 0, 649}, - dictWord{11, 0, 71}, - dictWord{11, 0, 155}, - dictWord{11, 0, 313}, - dictWord{12, 0, 5}, - dictWord{13, 0, 325}, - dictWord{142, 0, 287}, - dictWord{134, 0, 1818}, - dictWord{136, 0, 1007}, - dictWord{138, 0, 321}, - dictWord{7, 0, 360}, - dictWord{7, 0, 425}, - dictWord{9, 0, 66}, - dictWord{9, 0, 278}, - dictWord{138, 0, 644}, - dictWord{133, 10, 818}, - dictWord{5, 0, 385}, - dictWord{5, 10, 541}, - dictWord{6, 10, 94}, - dictWord{6, 10, 499}, - dictWord{ - 7, - 10, - 230, - }, - dictWord{139, 10, 321}, - dictWord{4, 10, 920}, - dictWord{5, 10, 25}, - dictWord{5, 10, 790}, - dictWord{6, 10, 457}, - dictWord{7, 10, 853}, - dictWord{ - 136, - 10, - 788, - }, - dictWord{4, 0, 900}, - dictWord{133, 0, 861}, - dictWord{5, 0, 254}, - dictWord{7, 0, 985}, - dictWord{136, 0, 73}, - dictWord{7, 0, 1959}, - dictWord{ - 136, - 0, - 683, - }, - dictWord{134, 10, 1765}, - dictWord{133, 10, 822}, - dictWord{132, 10, 634}, - dictWord{4, 11, 29}, - dictWord{6, 11, 532}, - dictWord{7, 11, 1628}, - dictWord{ - 7, - 11, - 1648, - }, - dictWord{9, 11, 303}, - dictWord{9, 11, 350}, - dictWord{10, 11, 433}, - dictWord{11, 11, 97}, - dictWord{11, 11, 557}, - dictWord{11, 11, 745}, - dictWord{12, 11, 289}, - dictWord{12, 11, 335}, - dictWord{12, 11, 348}, - dictWord{12, 11, 606}, - dictWord{13, 11, 116}, - dictWord{13, 11, 233}, - dictWord{ - 13, - 11, - 466, - }, - dictWord{14, 11, 181}, - dictWord{14, 11, 209}, - dictWord{14, 11, 232}, - dictWord{14, 11, 236}, - dictWord{14, 11, 300}, - dictWord{16, 11, 41}, - dictWord{ - 148, - 11, - 97, - }, - dictWord{19, 0, 86}, - dictWord{6, 10, 36}, - dictWord{7, 10, 658}, - dictWord{136, 10, 454}, - dictWord{135, 11, 1692}, - dictWord{132, 0, 725}, - dictWord{ - 5, - 11, - 501, - }, - dictWord{7, 11, 1704}, - dictWord{9, 11, 553}, - dictWord{11, 11, 520}, - dictWord{12, 11, 557}, - dictWord{141, 11, 249}, - dictWord{134, 0, 196}, - dictWord{133, 0, 831}, - dictWord{136, 0, 723}, - dictWord{7, 0, 1897}, - dictWord{13, 0, 80}, - dictWord{13, 0, 437}, - dictWord{145, 0, 74}, - dictWord{4, 0, 992}, - dictWord{ - 6, - 0, - 627, - }, - dictWord{136, 0, 994}, - dictWord{135, 11, 1294}, - dictWord{132, 10, 104}, - dictWord{5, 0, 848}, - dictWord{6, 0, 66}, - dictWord{136, 0, 764}, - dictWord{ - 4, - 0, - 36, - }, - dictWord{7, 0, 1387}, - dictWord{10, 0, 205}, - dictWord{139, 0, 755}, - dictWord{6, 0, 1046}, - dictWord{134, 0, 1485}, - dictWord{134, 0, 950}, - dictWord{132, 0, 887}, - dictWord{14, 0, 450}, - dictWord{148, 0, 111}, - dictWord{7, 0, 620}, - dictWord{7, 0, 831}, - dictWord{9, 10, 542}, - dictWord{9, 10, 566}, - dictWord{ - 138, - 10, - 728, - }, - dictWord{6, 0, 165}, - dictWord{138, 0, 388}, - dictWord{139, 10, 263}, - dictWord{4, 0, 719}, - dictWord{135, 0, 155}, - dictWord{138, 10, 468}, - dictWord{6, 11, 453}, - dictWord{144, 11, 36}, - dictWord{134, 11, 129}, - dictWord{5, 0, 533}, - dictWord{7, 0, 755}, - dictWord{138, 0, 780}, - dictWord{134, 0, 1465}, - dictWord{4, 0, 353}, - dictWord{6, 0, 146}, - dictWord{6, 0, 1789}, - dictWord{7, 0, 427}, - dictWord{7, 0, 990}, - dictWord{7, 0, 1348}, - dictWord{9, 0, 665}, - dictWord{9, 0, 898}, - dictWord{11, 0, 893}, - dictWord{142, 0, 212}, - dictWord{7, 10, 87}, - dictWord{142, 10, 288}, - dictWord{4, 0, 45}, - dictWord{135, 0, 1257}, - dictWord{12, 0, 7}, - dictWord{7, 10, 988}, - dictWord{7, 10, 1939}, - dictWord{9, 10, 64}, - dictWord{9, 10, 502}, - dictWord{12, 10, 34}, - dictWord{13, 10, 12}, - dictWord{13, 10, 234}, - dictWord{147, 10, 77}, - dictWord{4, 0, 607}, - dictWord{5, 11, 60}, - dictWord{6, 11, 504}, - dictWord{7, 11, 614}, - dictWord{7, 11, 1155}, - dictWord{140, 11, 0}, - dictWord{ - 135, - 10, - 141, - }, - dictWord{8, 11, 198}, - dictWord{11, 11, 29}, - dictWord{140, 11, 534}, - dictWord{140, 0, 65}, - dictWord{136, 0, 816}, - dictWord{132, 10, 619}, - dictWord{139, 0, 88}, - dictWord{5, 10, 246}, - dictWord{8, 10, 189}, - dictWord{9, 10, 355}, - dictWord{9, 10, 512}, - dictWord{10, 10, 124}, - dictWord{10, 10, 453}, - dictWord{11, 10, 143}, - dictWord{11, 10, 416}, - dictWord{11, 10, 859}, - dictWord{141, 10, 341}, - dictWord{4, 11, 379}, - dictWord{135, 11, 1397}, - dictWord{ - 4, - 0, - 600, - }, - dictWord{137, 0, 621}, - dictWord{133, 0, 367}, - dictWord{134, 0, 561}, - dictWord{6, 0, 559}, - dictWord{134, 0, 1691}, - dictWord{6, 0, 585}, - dictWord{ - 134, - 11, - 585, - }, - dictWord{135, 11, 1228}, - dictWord{4, 11, 118}, - dictWord{5, 10, 678}, - dictWord{6, 11, 274}, - dictWord{6, 11, 361}, - dictWord{7, 11, 75}, - dictWord{ - 141, - 11, - 441, - }, - dictWord{135, 11, 1818}, - dictWord{137, 11, 841}, - dictWord{5, 0, 573}, - dictWord{6, 0, 287}, - dictWord{7, 10, 862}, - dictWord{7, 10, 1886}, - dictWord{138, 10, 179}, - dictWord{132, 10, 517}, - dictWord{140, 11, 693}, - dictWord{5, 11, 314}, - dictWord{6, 11, 221}, - dictWord{7, 11, 419}, - dictWord{ - 10, - 11, - 650, - }, - dictWord{11, 11, 396}, - dictWord{12, 11, 156}, - dictWord{13, 11, 369}, - dictWord{14, 11, 333}, - dictWord{145, 11, 47}, - dictWord{140, 10, 540}, - dictWord{136, 10, 667}, - dictWord{11, 10, 403}, - dictWord{146, 10, 83}, - dictWord{6, 0, 672}, - dictWord{133, 10, 761}, - dictWord{9, 0, 157}, - dictWord{10, 10, 131}, - dictWord{140, 10, 72}, - dictWord{7, 0, 714}, - dictWord{134, 11, 460}, - dictWord{134, 0, 456}, - dictWord{133, 0, 925}, - dictWord{5, 11, 682}, - dictWord{ - 135, - 11, - 1887, - }, - dictWord{136, 11, 510}, - dictWord{136, 11, 475}, - dictWord{133, 11, 1016}, - dictWord{9, 0, 19}, - dictWord{7, 11, 602}, - dictWord{8, 11, 179}, - dictWord{ - 10, - 11, - 781, - }, - dictWord{140, 11, 126}, - dictWord{6, 11, 329}, - dictWord{138, 11, 111}, - dictWord{6, 0, 822}, - dictWord{134, 0, 1473}, - dictWord{144, 11, 86}, - dictWord{11, 0, 113}, - dictWord{139, 11, 113}, - dictWord{5, 11, 821}, - dictWord{134, 11, 1687}, - dictWord{133, 10, 449}, - dictWord{7, 0, 463}, - dictWord{ - 17, - 0, - 69, - }, - dictWord{136, 10, 103}, - dictWord{7, 10, 2028}, - dictWord{138, 10, 641}, - dictWord{6, 0, 193}, - dictWord{7, 0, 240}, - dictWord{7, 0, 1682}, - dictWord{ - 10, - 0, - 51, - }, - dictWord{10, 0, 640}, - dictWord{11, 0, 410}, - dictWord{13, 0, 82}, - dictWord{14, 0, 247}, - dictWord{14, 0, 331}, - dictWord{142, 0, 377}, - dictWord{6, 0, 471}, - dictWord{11, 0, 411}, - dictWord{142, 0, 2}, - dictWord{5, 11, 71}, - dictWord{7, 11, 1407}, - dictWord{9, 11, 388}, - dictWord{9, 11, 704}, - dictWord{10, 11, 261}, - dictWord{ - 10, - 11, - 619, - }, - dictWord{11, 11, 547}, - dictWord{11, 11, 619}, - dictWord{143, 11, 157}, - dictWord{136, 0, 633}, - dictWord{135, 0, 1148}, - dictWord{6, 0, 554}, - dictWord{7, 0, 1392}, - dictWord{12, 0, 129}, - dictWord{7, 10, 1274}, - dictWord{7, 10, 1386}, - dictWord{7, 11, 2008}, - dictWord{9, 11, 337}, - dictWord{10, 11, 517}, - dictWord{146, 10, 87}, - dictWord{7, 0, 803}, - dictWord{8, 0, 542}, - dictWord{6, 10, 187}, - dictWord{7, 10, 1203}, - dictWord{8, 10, 380}, - dictWord{14, 10, 117}, - dictWord{149, 10, 28}, - dictWord{6, 10, 297}, - dictWord{7, 10, 793}, - dictWord{139, 10, 938}, - dictWord{8, 0, 438}, - dictWord{11, 0, 363}, - dictWord{7, 10, 464}, - dictWord{11, 10, 105}, - dictWord{12, 10, 231}, - dictWord{14, 10, 386}, - dictWord{15, 10, 102}, - dictWord{148, 10, 75}, - dictWord{5, 11, 16}, - dictWord{6, 11, 86}, - dictWord{6, 11, 603}, - dictWord{7, 11, 292}, - dictWord{7, 11, 561}, - dictWord{8, 11, 257}, - dictWord{8, 11, 382}, - dictWord{9, 11, 721}, - dictWord{9, 11, 778}, - dictWord{ - 11, - 11, - 581, - }, - dictWord{140, 11, 466}, - dictWord{6, 0, 717}, - dictWord{4, 11, 486}, - dictWord{133, 11, 491}, - dictWord{132, 0, 875}, - dictWord{132, 11, 72}, - dictWord{6, 11, 265}, - dictWord{135, 11, 847}, - dictWord{4, 0, 237}, - dictWord{135, 0, 514}, - dictWord{6, 0, 392}, - dictWord{7, 0, 65}, - dictWord{135, 0, 2019}, - dictWord{140, 11, 261}, - dictWord{135, 11, 922}, - dictWord{137, 11, 404}, - dictWord{12, 0, 563}, - dictWord{14, 0, 101}, - dictWord{18, 0, 129}, - dictWord{ - 7, - 10, - 1010, - }, - dictWord{11, 10, 733}, - dictWord{11, 10, 759}, - dictWord{13, 10, 34}, - dictWord{146, 10, 45}, - dictWord{7, 10, 1656}, - dictWord{9, 10, 369}, - dictWord{ - 10, - 10, - 338, - }, - dictWord{10, 10, 490}, - dictWord{11, 10, 154}, - dictWord{11, 10, 545}, - dictWord{11, 10, 775}, - dictWord{13, 10, 77}, - dictWord{141, 10, 274}, - dictWord{4, 0, 444}, - dictWord{10, 0, 146}, - dictWord{140, 0, 9}, - dictWord{139, 11, 163}, - dictWord{7, 0, 1260}, - dictWord{135, 0, 1790}, - dictWord{9, 0, 222}, - dictWord{10, 0, 43}, - dictWord{139, 0, 900}, - dictWord{137, 11, 234}, - dictWord{138, 0, 971}, - dictWord{137, 0, 761}, - dictWord{134, 0, 699}, - dictWord{ - 136, - 11, - 434, - }, - dictWord{6, 0, 1116}, - dictWord{7, 0, 1366}, - dictWord{5, 10, 20}, - dictWord{6, 11, 197}, - dictWord{6, 10, 298}, - dictWord{7, 10, 659}, - dictWord{8, 11, 205}, - dictWord{137, 10, 219}, - dictWord{132, 11, 490}, - dictWord{11, 11, 820}, - dictWord{150, 11, 51}, - dictWord{7, 10, 1440}, - dictWord{11, 10, 854}, - dictWord{ - 11, - 10, - 872, - }, - dictWord{11, 10, 921}, - dictWord{12, 10, 551}, - dictWord{13, 10, 472}, - dictWord{142, 10, 367}, - dictWord{140, 11, 13}, - dictWord{132, 0, 829}, - dictWord{12, 0, 242}, - dictWord{132, 10, 439}, - dictWord{136, 10, 669}, - dictWord{6, 0, 593}, - dictWord{6, 11, 452}, - dictWord{7, 11, 312}, - dictWord{ - 138, - 11, - 219, - }, - dictWord{4, 11, 333}, - dictWord{9, 11, 176}, - dictWord{12, 11, 353}, - dictWord{141, 11, 187}, - dictWord{7, 0, 36}, - dictWord{8, 0, 201}, - dictWord{ - 136, - 0, - 605, - }, - dictWord{140, 0, 224}, - dictWord{132, 10, 233}, - dictWord{134, 0, 1430}, - dictWord{134, 0, 1806}, - dictWord{4, 0, 523}, - dictWord{133, 0, 638}, - dictWord{ - 6, - 0, - 1889, - }, - dictWord{9, 0, 958}, - dictWord{9, 0, 971}, - dictWord{9, 0, 976}, - dictWord{12, 0, 796}, - dictWord{12, 0, 799}, - dictWord{12, 0, 808}, - dictWord{ - 12, - 0, - 835, - }, - dictWord{12, 0, 836}, - dictWord{12, 0, 914}, - dictWord{12, 0, 946}, - dictWord{15, 0, 216}, - dictWord{15, 0, 232}, - dictWord{18, 0, 183}, - dictWord{18, 0, 187}, - dictWord{18, 0, 194}, - dictWord{18, 0, 212}, - dictWord{18, 0, 232}, - dictWord{149, 0, 49}, - dictWord{132, 10, 482}, - dictWord{6, 0, 827}, - dictWord{134, 0, 1434}, - dictWord{135, 10, 346}, - dictWord{134, 0, 2043}, - dictWord{6, 0, 242}, - dictWord{7, 0, 227}, - dictWord{7, 0, 1581}, - dictWord{8, 0, 104}, - dictWord{9, 0, 113}, - dictWord{9, 0, 220}, - dictWord{9, 0, 427}, - dictWord{10, 0, 136}, - dictWord{10, 0, 239}, - dictWord{11, 0, 579}, - dictWord{11, 0, 1023}, - dictWord{13, 0, 4}, - dictWord{ - 13, - 0, - 204, - }, - dictWord{13, 0, 316}, - dictWord{148, 0, 86}, - dictWord{134, 11, 1685}, - dictWord{7, 0, 148}, - dictWord{8, 0, 284}, - dictWord{141, 0, 63}, - dictWord{ - 142, - 0, - 10, - }, - dictWord{135, 11, 584}, - dictWord{134, 0, 1249}, - dictWord{7, 0, 861}, - dictWord{135, 10, 334}, - dictWord{5, 10, 795}, - dictWord{6, 10, 1741}, - dictWord{ - 137, - 11, - 70, - }, - dictWord{132, 0, 807}, - dictWord{7, 11, 135}, - dictWord{8, 11, 7}, - dictWord{8, 11, 62}, - dictWord{9, 11, 243}, - dictWord{10, 11, 658}, - dictWord{ - 10, - 11, - 697, - }, - dictWord{11, 11, 456}, - dictWord{139, 11, 756}, - dictWord{9, 11, 395}, - dictWord{138, 11, 79}, - dictWord{137, 11, 108}, - dictWord{147, 0, 94}, - dictWord{136, 0, 494}, - dictWord{135, 11, 631}, - dictWord{135, 10, 622}, - dictWord{7, 0, 1510}, - dictWord{135, 10, 1750}, - dictWord{4, 10, 203}, - dictWord{ - 135, - 10, - 1936, - }, - dictWord{7, 11, 406}, - dictWord{7, 11, 459}, - dictWord{8, 11, 606}, - dictWord{139, 11, 726}, - dictWord{7, 0, 1306}, - dictWord{8, 0, 505}, - dictWord{ - 9, - 0, - 482, - }, - dictWord{10, 0, 126}, - dictWord{11, 0, 225}, - dictWord{12, 0, 347}, - dictWord{12, 0, 449}, - dictWord{13, 0, 19}, - dictWord{14, 0, 218}, - dictWord{142, 0, 435}, - dictWord{5, 0, 268}, - dictWord{10, 0, 764}, - dictWord{12, 0, 120}, - dictWord{13, 0, 39}, - dictWord{145, 0, 127}, - dictWord{142, 11, 68}, - dictWord{11, 10, 678}, - dictWord{140, 10, 307}, - dictWord{12, 11, 268}, - dictWord{12, 11, 640}, - dictWord{142, 11, 119}, - dictWord{135, 10, 2044}, - dictWord{133, 11, 612}, - dictWord{ - 4, - 11, - 372, - }, - dictWord{7, 11, 482}, - dictWord{8, 11, 158}, - dictWord{9, 11, 602}, - dictWord{9, 11, 615}, - dictWord{10, 11, 245}, - dictWord{10, 11, 678}, - dictWord{ - 10, - 11, - 744, - }, - dictWord{11, 11, 248}, - dictWord{139, 11, 806}, - dictWord{7, 10, 311}, - dictWord{9, 10, 308}, - dictWord{140, 10, 255}, - dictWord{4, 0, 384}, - dictWord{135, 0, 1022}, - dictWord{5, 11, 854}, - dictWord{135, 11, 1991}, - dictWord{135, 10, 1266}, - dictWord{4, 10, 400}, - dictWord{5, 10, 267}, - dictWord{ - 135, - 10, - 232, - }, - dictWord{135, 0, 1703}, - dictWord{9, 0, 159}, - dictWord{11, 0, 661}, - dictWord{140, 0, 603}, - dictWord{4, 0, 964}, - dictWord{14, 0, 438}, - dictWord{ - 14, - 0, - 444, - }, - dictWord{14, 0, 456}, - dictWord{22, 0, 60}, - dictWord{22, 0, 63}, - dictWord{9, 11, 106}, - dictWord{9, 11, 163}, - dictWord{9, 11, 296}, - dictWord{10, 11, 167}, - dictWord{10, 11, 172}, - dictWord{10, 11, 777}, - dictWord{139, 11, 16}, - dictWord{136, 0, 583}, - dictWord{132, 0, 515}, - dictWord{8, 0, 632}, - dictWord{8, 0, 697}, - dictWord{137, 0, 854}, - dictWord{5, 11, 195}, - dictWord{135, 11, 1685}, - dictWord{6, 0, 1123}, - dictWord{134, 0, 1365}, - dictWord{134, 11, 328}, - dictWord{ - 7, - 11, - 1997, - }, - dictWord{8, 11, 730}, - dictWord{139, 11, 1006}, - dictWord{4, 0, 136}, - dictWord{133, 0, 551}, - dictWord{134, 0, 1782}, - dictWord{7, 0, 1287}, - dictWord{ - 9, - 0, - 44, - }, - dictWord{10, 0, 552}, - dictWord{10, 0, 642}, - dictWord{11, 0, 839}, - dictWord{12, 0, 274}, - dictWord{12, 0, 275}, - dictWord{12, 0, 372}, - dictWord{ - 13, - 0, - 91, - }, - dictWord{142, 0, 125}, - dictWord{5, 11, 751}, - dictWord{11, 11, 797}, - dictWord{140, 11, 203}, - dictWord{133, 0, 732}, - dictWord{7, 0, 679}, - dictWord{ - 8, - 0, - 313, - }, - dictWord{4, 10, 100}, - dictWord{135, 11, 821}, - dictWord{10, 0, 361}, - dictWord{142, 0, 316}, - dictWord{134, 0, 595}, - dictWord{6, 0, 147}, - dictWord{ - 7, - 0, - 886, - }, - dictWord{9, 0, 753}, - dictWord{138, 0, 268}, - dictWord{5, 10, 362}, - dictWord{5, 10, 443}, - dictWord{6, 10, 318}, - dictWord{7, 10, 1019}, - dictWord{ - 139, - 10, - 623, - }, - dictWord{5, 10, 463}, - dictWord{136, 10, 296}, - dictWord{4, 10, 454}, - dictWord{5, 11, 950}, - dictWord{5, 11, 994}, - dictWord{134, 11, 351}, - dictWord{ - 138, - 0, - 137, - }, - dictWord{5, 10, 48}, - dictWord{5, 10, 404}, - dictWord{6, 10, 557}, - dictWord{7, 10, 458}, - dictWord{8, 10, 597}, - dictWord{10, 10, 455}, - dictWord{ - 10, - 10, - 606, - }, - dictWord{11, 10, 49}, - dictWord{11, 10, 548}, - dictWord{12, 10, 476}, - dictWord{13, 10, 18}, - dictWord{141, 10, 450}, - dictWord{133, 0, 414}, - dictWord{ - 135, - 0, - 1762, - }, - dictWord{5, 11, 421}, - dictWord{135, 11, 47}, - dictWord{5, 10, 442}, - dictWord{135, 10, 1984}, - dictWord{134, 0, 599}, - dictWord{134, 0, 1749}, - dictWord{134, 0, 1627}, - dictWord{4, 0, 488}, - dictWord{132, 11, 350}, - dictWord{137, 11, 751}, - dictWord{132, 0, 83}, - dictWord{140, 0, 676}, - dictWord{ - 133, - 11, - 967, - }, - dictWord{7, 0, 1639}, - dictWord{5, 10, 55}, - dictWord{140, 10, 161}, - dictWord{4, 11, 473}, - dictWord{7, 11, 623}, - dictWord{8, 11, 808}, - dictWord{ - 9, - 11, - 871, - }, - dictWord{9, 11, 893}, - dictWord{11, 11, 38}, - dictWord{11, 11, 431}, - dictWord{12, 11, 112}, - dictWord{12, 11, 217}, - dictWord{12, 11, 243}, - dictWord{ - 12, - 11, - 562, - }, - dictWord{12, 11, 683}, - dictWord{13, 11, 141}, - dictWord{13, 11, 197}, - dictWord{13, 11, 227}, - dictWord{13, 11, 406}, - dictWord{13, 11, 487}, - dictWord{14, 11, 156}, - dictWord{14, 11, 203}, - dictWord{14, 11, 224}, - dictWord{14, 11, 256}, - dictWord{18, 11, 58}, - dictWord{150, 11, 0}, - dictWord{ - 133, - 10, - 450, - }, - dictWord{7, 11, 736}, - dictWord{139, 11, 264}, - dictWord{134, 0, 278}, - dictWord{4, 11, 222}, - dictWord{7, 11, 286}, - dictWord{136, 11, 629}, - dictWord{ - 135, - 10, - 869, - }, - dictWord{140, 0, 97}, - dictWord{144, 0, 14}, - dictWord{134, 0, 1085}, - dictWord{4, 10, 213}, - dictWord{7, 10, 223}, - dictWord{136, 10, 80}, - dictWord{ - 7, - 0, - 388, - }, - dictWord{7, 0, 644}, - dictWord{139, 0, 781}, - dictWord{132, 0, 849}, - dictWord{7, 0, 229}, - dictWord{8, 0, 59}, - dictWord{9, 0, 190}, - dictWord{10, 0, 378}, - dictWord{140, 0, 191}, - dictWord{7, 10, 381}, - dictWord{7, 10, 806}, - dictWord{7, 10, 820}, - dictWord{8, 10, 354}, - dictWord{8, 10, 437}, - dictWord{8, 10, 787}, - dictWord{9, 10, 657}, - dictWord{10, 10, 58}, - dictWord{10, 10, 339}, - dictWord{10, 10, 749}, - dictWord{11, 10, 914}, - dictWord{12, 10, 162}, - dictWord{13, 10, 75}, - dictWord{14, 10, 106}, - dictWord{14, 10, 198}, - dictWord{14, 10, 320}, - dictWord{14, 10, 413}, - dictWord{146, 10, 43}, - dictWord{141, 11, 306}, - dictWord{ - 136, - 10, - 747, - }, - dictWord{134, 0, 1115}, - dictWord{16, 0, 94}, - dictWord{16, 0, 108}, - dictWord{136, 11, 146}, - dictWord{6, 0, 700}, - dictWord{6, 0, 817}, - dictWord{ - 134, - 0, - 1002, - }, - dictWord{133, 10, 692}, - dictWord{4, 11, 465}, - dictWord{135, 11, 1663}, - dictWord{134, 10, 191}, - dictWord{6, 0, 1414}, - dictWord{ - 135, - 11, - 913, - }, - dictWord{132, 0, 660}, - dictWord{7, 0, 1035}, - dictWord{138, 0, 737}, - dictWord{6, 10, 162}, - dictWord{7, 10, 1960}, - dictWord{136, 10, 831}, - dictWord{ - 132, - 10, - 706, - }, - dictWord{7, 0, 690}, - dictWord{9, 0, 217}, - dictWord{9, 0, 587}, - dictWord{140, 0, 521}, - dictWord{138, 10, 426}, - dictWord{135, 10, 1235}, - dictWord{ - 6, - 11, - 82, - }, - dictWord{7, 11, 138}, - dictWord{7, 11, 517}, - dictWord{9, 11, 673}, - dictWord{139, 11, 238}, - dictWord{138, 0, 272}, - dictWord{5, 11, 495}, - dictWord{ - 7, - 11, - 834, - }, - dictWord{9, 11, 733}, - dictWord{139, 11, 378}, - dictWord{134, 0, 1744}, - dictWord{132, 0, 1011}, - dictWord{7, 11, 828}, - dictWord{142, 11, 116}, - dictWord{4, 0, 733}, - dictWord{9, 0, 194}, - dictWord{10, 0, 92}, - dictWord{11, 0, 198}, - dictWord{12, 0, 84}, - dictWord{13, 0, 128}, - dictWord{133, 11, 559}, - dictWord{ - 10, - 0, - 57, - }, - dictWord{10, 0, 277}, - dictWord{6, 11, 21}, - dictWord{6, 11, 1737}, - dictWord{7, 11, 1444}, - dictWord{136, 11, 224}, - dictWord{4, 10, 204}, - dictWord{ - 137, - 10, - 902, - }, - dictWord{136, 10, 833}, - dictWord{11, 0, 348}, - dictWord{12, 0, 99}, - dictWord{18, 0, 1}, - dictWord{18, 0, 11}, - dictWord{19, 0, 4}, - dictWord{7, 10, 366}, - dictWord{9, 10, 287}, - dictWord{12, 10, 199}, - dictWord{12, 10, 556}, - dictWord{140, 10, 577}, - dictWord{6, 0, 1981}, - dictWord{136, 0, 936}, - dictWord{ - 21, - 0, - 33, - }, - dictWord{150, 0, 40}, - dictWord{5, 11, 519}, - dictWord{138, 11, 204}, - dictWord{5, 10, 356}, - dictWord{135, 10, 224}, - dictWord{134, 0, 775}, - dictWord{ - 135, - 0, - 306, - }, - dictWord{7, 10, 630}, - dictWord{9, 10, 567}, - dictWord{11, 10, 150}, - dictWord{11, 10, 444}, - dictWord{141, 10, 119}, - dictWord{5, 0, 979}, - dictWord{ - 134, - 10, - 539, - }, - dictWord{133, 0, 611}, - dictWord{4, 11, 402}, - dictWord{135, 11, 1679}, - dictWord{5, 0, 178}, - dictWord{7, 11, 2}, - dictWord{8, 11, 323}, - dictWord{ - 136, - 11, - 479, - }, - dictWord{5, 11, 59}, - dictWord{135, 11, 672}, - dictWord{4, 0, 1010}, - dictWord{6, 0, 1969}, - dictWord{138, 11, 237}, - dictWord{133, 11, 412}, - dictWord{146, 11, 34}, - dictWord{7, 11, 1740}, - dictWord{146, 11, 48}, - dictWord{134, 0, 664}, - dictWord{139, 10, 814}, - dictWord{4, 11, 85}, - dictWord{ - 135, - 11, - 549, - }, - dictWord{133, 11, 94}, - dictWord{133, 11, 457}, - dictWord{132, 0, 390}, - dictWord{134, 0, 1510}, - dictWord{4, 10, 235}, - dictWord{135, 10, 255}, - dictWord{4, 10, 194}, - dictWord{5, 10, 584}, - dictWord{6, 11, 11}, - dictWord{6, 10, 384}, - dictWord{7, 11, 187}, - dictWord{7, 10, 583}, - dictWord{10, 10, 761}, - dictWord{ - 11, - 10, - 760, - }, - dictWord{139, 10, 851}, - dictWord{4, 11, 522}, - dictWord{139, 11, 802}, - dictWord{135, 0, 493}, - dictWord{10, 11, 776}, - dictWord{13, 11, 345}, - dictWord{142, 11, 425}, - dictWord{146, 0, 37}, - dictWord{4, 11, 52}, - dictWord{135, 11, 661}, - dictWord{134, 0, 724}, - dictWord{134, 0, 829}, - dictWord{ - 133, - 11, - 520, - }, - dictWord{133, 10, 562}, - dictWord{4, 11, 281}, - dictWord{5, 11, 38}, - dictWord{7, 11, 194}, - dictWord{7, 11, 668}, - dictWord{7, 11, 1893}, - dictWord{ - 137, - 11, - 397, - }, - dictWord{5, 10, 191}, - dictWord{137, 10, 271}, - dictWord{7, 0, 1537}, - dictWord{14, 0, 96}, - dictWord{143, 0, 73}, - dictWord{5, 0, 473}, - dictWord{ - 11, - 0, - 168, - }, - dictWord{4, 10, 470}, - dictWord{6, 10, 153}, - dictWord{7, 10, 1503}, - dictWord{7, 10, 1923}, - dictWord{10, 10, 701}, - dictWord{11, 10, 132}, - dictWord{ - 11, - 10, - 227, - }, - dictWord{11, 10, 320}, - dictWord{11, 10, 436}, - dictWord{11, 10, 525}, - dictWord{11, 10, 855}, - dictWord{12, 10, 41}, - dictWord{12, 10, 286}, - dictWord{13, 10, 103}, - dictWord{13, 10, 284}, - dictWord{14, 10, 255}, - dictWord{14, 10, 262}, - dictWord{15, 10, 117}, - dictWord{143, 10, 127}, - dictWord{ - 133, - 0, - 105, - }, - dictWord{5, 0, 438}, - dictWord{9, 0, 694}, - dictWord{12, 0, 627}, - dictWord{141, 0, 210}, - dictWord{133, 10, 327}, - dictWord{6, 10, 552}, - dictWord{ - 7, - 10, - 1754, - }, - dictWord{137, 10, 604}, - dictWord{134, 0, 1256}, - dictWord{152, 0, 11}, - dictWord{5, 11, 448}, - dictWord{11, 11, 98}, - dictWord{139, 11, 524}, - dictWord{ - 7, - 0, - 1626, - }, - dictWord{5, 10, 80}, - dictWord{6, 10, 405}, - dictWord{7, 10, 403}, - dictWord{7, 10, 1502}, - dictWord{8, 10, 456}, - dictWord{9, 10, 487}, - dictWord{ - 9, - 10, - 853, - }, - dictWord{9, 10, 889}, - dictWord{10, 10, 309}, - dictWord{11, 10, 721}, - dictWord{11, 10, 994}, - dictWord{12, 10, 430}, - dictWord{13, 10, 165}, - dictWord{ - 14, - 11, - 16, - }, - dictWord{146, 11, 44}, - dictWord{132, 0, 779}, - dictWord{8, 0, 25}, - dictWord{138, 0, 826}, - dictWord{4, 10, 453}, - dictWord{5, 10, 887}, - dictWord{ - 6, - 10, - 535, - }, - dictWord{8, 10, 6}, - dictWord{8, 10, 543}, - dictWord{136, 10, 826}, - dictWord{137, 11, 461}, - dictWord{140, 11, 632}, - dictWord{132, 0, 308}, - dictWord{135, 0, 741}, - dictWord{132, 0, 671}, - dictWord{7, 0, 150}, - dictWord{8, 0, 649}, - dictWord{136, 0, 1020}, - dictWord{9, 0, 99}, - dictWord{6, 11, 336}, - dictWord{ - 8, - 11, - 552, - }, - dictWord{9, 11, 285}, - dictWord{10, 11, 99}, - dictWord{139, 11, 568}, - dictWord{134, 0, 521}, - dictWord{5, 0, 339}, - dictWord{14, 0, 3}, - dictWord{ - 15, - 0, - 41, - }, - dictWord{15, 0, 166}, - dictWord{147, 0, 66}, - dictWord{6, 11, 423}, - dictWord{7, 11, 665}, - dictWord{7, 11, 1210}, - dictWord{9, 11, 218}, - dictWord{ - 141, - 11, - 222, - }, - dictWord{6, 0, 543}, - dictWord{5, 10, 101}, - dictWord{5, 11, 256}, - dictWord{6, 10, 88}, - dictWord{7, 10, 1677}, - dictWord{9, 10, 100}, - dictWord{10, 10, 677}, - dictWord{14, 10, 169}, - dictWord{14, 10, 302}, - dictWord{14, 10, 313}, - dictWord{15, 10, 48}, - dictWord{143, 10, 84}, - dictWord{4, 10, 310}, - dictWord{ - 7, - 10, - 708, - }, - dictWord{7, 10, 996}, - dictWord{9, 10, 795}, - dictWord{10, 10, 390}, - dictWord{10, 10, 733}, - dictWord{11, 10, 451}, - dictWord{12, 10, 249}, - dictWord{ - 14, - 10, - 115, - }, - dictWord{14, 10, 286}, - dictWord{143, 10, 100}, - dictWord{133, 10, 587}, - dictWord{13, 11, 417}, - dictWord{14, 11, 129}, - dictWord{143, 11, 15}, - dictWord{134, 0, 1358}, - dictWord{136, 11, 554}, - dictWord{132, 10, 498}, - dictWord{7, 10, 217}, - dictWord{8, 10, 140}, - dictWord{138, 10, 610}, - dictWord{ - 135, - 11, - 989, - }, - dictWord{135, 11, 634}, - dictWord{6, 0, 155}, - dictWord{140, 0, 234}, - dictWord{135, 11, 462}, - dictWord{132, 11, 618}, - dictWord{ - 134, - 0, - 1628, - }, - dictWord{132, 0, 766}, - dictWord{4, 11, 339}, - dictWord{5, 10, 905}, - dictWord{135, 11, 259}, - dictWord{135, 0, 829}, - dictWord{4, 11, 759}, - dictWord{ - 141, - 11, - 169, - }, - dictWord{7, 0, 1445}, - dictWord{4, 10, 456}, - dictWord{7, 10, 358}, - dictWord{7, 10, 1637}, - dictWord{8, 10, 643}, - dictWord{139, 10, 483}, - dictWord{ - 5, - 0, - 486, - }, - dictWord{135, 0, 1349}, - dictWord{5, 11, 688}, - dictWord{135, 11, 712}, - dictWord{7, 0, 1635}, - dictWord{8, 0, 17}, - dictWord{10, 0, 217}, - dictWord{ - 10, - 0, - 295, - }, - dictWord{12, 0, 2}, - dictWord{140, 11, 2}, - dictWord{138, 0, 558}, - dictWord{150, 10, 56}, - dictWord{4, 11, 278}, - dictWord{5, 11, 465}, - dictWord{ - 135, - 11, - 1367, - }, - dictWord{136, 11, 482}, - dictWord{133, 10, 535}, - dictWord{6, 0, 1362}, - dictWord{6, 0, 1461}, - dictWord{10, 11, 274}, - dictWord{10, 11, 625}, - dictWord{139, 11, 530}, - dictWord{5, 0, 599}, - dictWord{5, 11, 336}, - dictWord{6, 11, 341}, - dictWord{6, 11, 478}, - dictWord{6, 11, 1763}, - dictWord{136, 11, 386}, - dictWord{7, 10, 1748}, - dictWord{137, 11, 151}, - dictWord{134, 0, 1376}, - dictWord{133, 10, 539}, - dictWord{135, 11, 73}, - dictWord{135, 11, 1971}, - dictWord{139, 11, 283}, - dictWord{9, 0, 93}, - dictWord{139, 0, 474}, - dictWord{6, 10, 91}, - dictWord{135, 10, 435}, - dictWord{6, 0, 447}, - dictWord{5, 11, 396}, - dictWord{134, 11, 501}, - dictWord{4, 10, 16}, - dictWord{5, 10, 316}, - dictWord{5, 10, 842}, - dictWord{6, 10, 370}, - dictWord{6, 10, 1778}, - dictWord{8, 10, 166}, - dictWord{11, 10, 812}, - dictWord{12, 10, 206}, - dictWord{12, 10, 351}, - dictWord{14, 10, 418}, - dictWord{16, 10, 15}, - dictWord{16, 10, 34}, - dictWord{18, 10, 3}, - dictWord{19, 10, 3}, - dictWord{19, 10, 7}, - dictWord{20, 10, 4}, - dictWord{149, 10, 21}, - dictWord{7, 0, 577}, - dictWord{7, 0, 1432}, - dictWord{9, 0, 475}, - dictWord{9, 0, 505}, - dictWord{9, 0, 526}, - dictWord{9, 0, 609}, - dictWord{9, 0, 689}, - dictWord{9, 0, 726}, - dictWord{9, 0, 735}, - dictWord{9, 0, 738}, - dictWord{10, 0, 556}, - dictWord{ - 10, - 0, - 674, - }, - dictWord{10, 0, 684}, - dictWord{11, 0, 89}, - dictWord{11, 0, 202}, - dictWord{11, 0, 272}, - dictWord{11, 0, 380}, - dictWord{11, 0, 415}, - dictWord{11, 0, 505}, - dictWord{11, 0, 537}, - dictWord{11, 0, 550}, - dictWord{11, 0, 562}, - dictWord{11, 0, 640}, - dictWord{11, 0, 667}, - dictWord{11, 0, 688}, - dictWord{11, 0, 847}, - dictWord{11, 0, 927}, - dictWord{11, 0, 930}, - dictWord{11, 0, 940}, - dictWord{12, 0, 144}, - dictWord{12, 0, 325}, - dictWord{12, 0, 329}, - dictWord{12, 0, 389}, - dictWord{ - 12, - 0, - 403, - }, - dictWord{12, 0, 451}, - dictWord{12, 0, 515}, - dictWord{12, 0, 604}, - dictWord{12, 0, 616}, - dictWord{12, 0, 626}, - dictWord{13, 0, 66}, - dictWord{ - 13, - 0, - 131, - }, - dictWord{13, 0, 167}, - dictWord{13, 0, 236}, - dictWord{13, 0, 368}, - dictWord{13, 0, 411}, - dictWord{13, 0, 434}, - dictWord{13, 0, 453}, - dictWord{13, 0, 461}, - dictWord{13, 0, 474}, - dictWord{14, 0, 59}, - dictWord{14, 0, 60}, - dictWord{14, 0, 139}, - dictWord{14, 0, 152}, - dictWord{14, 0, 276}, - dictWord{14, 0, 353}, - dictWord{ - 14, - 0, - 402, - }, - dictWord{15, 0, 28}, - dictWord{15, 0, 81}, - dictWord{15, 0, 123}, - dictWord{15, 0, 152}, - dictWord{18, 0, 136}, - dictWord{148, 0, 88}, - dictWord{ - 4, - 11, - 929, - }, - dictWord{133, 11, 799}, - dictWord{136, 11, 46}, - dictWord{142, 0, 307}, - dictWord{4, 0, 609}, - dictWord{7, 0, 756}, - dictWord{9, 0, 544}, - dictWord{ - 11, - 0, - 413, - }, - dictWord{144, 0, 25}, - dictWord{10, 0, 687}, - dictWord{7, 10, 619}, - dictWord{10, 10, 547}, - dictWord{11, 10, 122}, - dictWord{140, 10, 601}, - dictWord{ - 4, - 0, - 930, - }, - dictWord{133, 0, 947}, - dictWord{133, 0, 939}, - dictWord{142, 0, 21}, - dictWord{4, 11, 892}, - dictWord{133, 11, 770}, - dictWord{133, 0, 962}, - dictWord{ - 5, - 0, - 651, - }, - dictWord{8, 0, 170}, - dictWord{9, 0, 61}, - dictWord{9, 0, 63}, - dictWord{10, 0, 23}, - dictWord{10, 0, 37}, - dictWord{10, 0, 834}, - dictWord{11, 0, 4}, - dictWord{ - 11, - 0, - 187, - }, - dictWord{11, 0, 281}, - dictWord{11, 0, 503}, - dictWord{11, 0, 677}, - dictWord{12, 0, 96}, - dictWord{12, 0, 130}, - dictWord{12, 0, 244}, - dictWord{14, 0, 5}, - dictWord{14, 0, 40}, - dictWord{14, 0, 162}, - dictWord{14, 0, 202}, - dictWord{146, 0, 133}, - dictWord{4, 0, 406}, - dictWord{5, 0, 579}, - dictWord{12, 0, 492}, - dictWord{ - 150, - 0, - 15, - }, - dictWord{135, 11, 158}, - dictWord{135, 0, 597}, - dictWord{132, 0, 981}, - dictWord{132, 10, 888}, - dictWord{4, 10, 149}, - dictWord{138, 10, 368}, - dictWord{132, 0, 545}, - dictWord{4, 10, 154}, - dictWord{7, 10, 1134}, - dictWord{136, 10, 105}, - dictWord{135, 11, 2001}, - dictWord{134, 0, 1558}, - dictWord{ - 4, - 10, - 31, - }, - dictWord{6, 10, 429}, - dictWord{7, 10, 962}, - dictWord{9, 10, 458}, - dictWord{139, 10, 691}, - dictWord{132, 10, 312}, - dictWord{135, 10, 1642}, - dictWord{ - 6, - 0, - 17, - }, - dictWord{6, 0, 1304}, - dictWord{7, 0, 16}, - dictWord{7, 0, 1001}, - dictWord{9, 0, 886}, - dictWord{10, 0, 489}, - dictWord{10, 0, 800}, - dictWord{11, 0, 782}, - dictWord{12, 0, 320}, - dictWord{13, 0, 467}, - dictWord{14, 0, 145}, - dictWord{14, 0, 387}, - dictWord{143, 0, 119}, - dictWord{135, 0, 1982}, - dictWord{17, 0, 17}, - dictWord{7, 11, 1461}, - dictWord{140, 11, 91}, - dictWord{4, 10, 236}, - dictWord{132, 11, 602}, - dictWord{138, 0, 907}, - dictWord{136, 0, 110}, - dictWord{7, 0, 272}, - dictWord{19, 0, 53}, - dictWord{5, 10, 836}, - dictWord{5, 10, 857}, - dictWord{134, 10, 1680}, - dictWord{5, 0, 458}, - dictWord{7, 11, 1218}, - dictWord{136, 11, 303}, - dictWord{7, 0, 1983}, - dictWord{8, 0, 0}, - dictWord{8, 0, 171}, - dictWord{9, 0, 120}, - dictWord{9, 0, 732}, - dictWord{10, 0, 473}, - dictWord{11, 0, 656}, - dictWord{ - 11, - 0, - 998, - }, - dictWord{18, 0, 0}, - dictWord{18, 0, 2}, - dictWord{19, 0, 21}, - dictWord{10, 10, 68}, - dictWord{139, 10, 494}, - dictWord{137, 11, 662}, - dictWord{4, 11, 13}, - dictWord{5, 11, 567}, - dictWord{7, 11, 1498}, - dictWord{9, 11, 124}, - dictWord{11, 11, 521}, - dictWord{140, 11, 405}, - dictWord{4, 10, 81}, - dictWord{139, 10, 867}, - dictWord{135, 11, 1006}, - dictWord{7, 11, 800}, - dictWord{7, 11, 1783}, - dictWord{138, 11, 12}, - dictWord{9, 0, 295}, - dictWord{10, 0, 443}, - dictWord{ - 5, - 10, - 282, - }, - dictWord{8, 10, 650}, - dictWord{137, 10, 907}, - dictWord{132, 11, 735}, - dictWord{4, 11, 170}, - dictWord{4, 10, 775}, - dictWord{135, 11, 323}, - dictWord{ - 6, - 0, - 1844, - }, - dictWord{10, 0, 924}, - dictWord{11, 11, 844}, - dictWord{12, 11, 104}, - dictWord{140, 11, 625}, - dictWord{5, 11, 304}, - dictWord{7, 11, 1403}, - dictWord{140, 11, 498}, - dictWord{134, 0, 1232}, - dictWord{4, 0, 519}, - dictWord{10, 0, 70}, - dictWord{12, 0, 26}, - dictWord{14, 0, 17}, - dictWord{14, 0, 178}, - dictWord{ - 15, - 0, - 34, - }, - dictWord{149, 0, 12}, - dictWord{132, 0, 993}, - dictWord{4, 11, 148}, - dictWord{133, 11, 742}, - dictWord{6, 0, 31}, - dictWord{7, 0, 491}, - dictWord{7, 0, 530}, - dictWord{8, 0, 592}, - dictWord{11, 0, 53}, - dictWord{11, 0, 779}, - dictWord{12, 0, 167}, - dictWord{12, 0, 411}, - dictWord{14, 0, 14}, - dictWord{14, 0, 136}, - dictWord{ - 15, - 0, - 72, - }, - dictWord{16, 0, 17}, - dictWord{144, 0, 72}, - dictWord{133, 0, 907}, - dictWord{134, 0, 733}, - dictWord{133, 11, 111}, - dictWord{4, 10, 71}, - dictWord{ - 5, - 10, - 376, - }, - dictWord{7, 10, 119}, - dictWord{138, 10, 665}, - dictWord{136, 0, 55}, - dictWord{8, 0, 430}, - dictWord{136, 11, 430}, - dictWord{4, 0, 208}, - dictWord{ - 5, - 0, - 106, - }, - dictWord{6, 0, 531}, - dictWord{8, 0, 408}, - dictWord{9, 0, 188}, - dictWord{138, 0, 572}, - dictWord{12, 0, 56}, - dictWord{11, 10, 827}, - dictWord{14, 10, 34}, - dictWord{143, 10, 148}, - dictWord{134, 0, 1693}, - dictWord{133, 11, 444}, - dictWord{132, 10, 479}, - dictWord{140, 0, 441}, - dictWord{9, 0, 449}, - dictWord{ - 10, - 0, - 192, - }, - dictWord{138, 0, 740}, - dictWord{134, 0, 928}, - dictWord{4, 0, 241}, - dictWord{7, 10, 607}, - dictWord{136, 10, 99}, - dictWord{8, 11, 123}, - dictWord{ - 15, - 11, - 6, - }, - dictWord{144, 11, 7}, - dictWord{6, 11, 285}, - dictWord{8, 11, 654}, - dictWord{11, 11, 749}, - dictWord{12, 11, 190}, - dictWord{12, 11, 327}, - dictWord{ - 13, - 11, - 120, - }, - dictWord{13, 11, 121}, - dictWord{13, 11, 327}, - dictWord{15, 11, 47}, - dictWord{146, 11, 40}, - dictWord{4, 10, 41}, - dictWord{5, 10, 74}, - dictWord{ - 7, - 10, - 1627, - }, - dictWord{11, 10, 871}, - dictWord{140, 10, 619}, - dictWord{7, 0, 1525}, - dictWord{11, 10, 329}, - dictWord{11, 10, 965}, - dictWord{12, 10, 241}, - dictWord{14, 10, 354}, - dictWord{15, 10, 22}, - dictWord{148, 10, 63}, - dictWord{132, 0, 259}, - dictWord{135, 11, 183}, - dictWord{9, 10, 209}, - dictWord{ - 137, - 10, - 300, - }, - dictWord{5, 11, 937}, - dictWord{135, 11, 100}, - dictWord{133, 10, 98}, - dictWord{4, 0, 173}, - dictWord{5, 0, 312}, - dictWord{5, 0, 512}, - dictWord{ - 135, - 0, - 1285, - }, - dictWord{141, 0, 185}, - dictWord{7, 0, 1603}, - dictWord{7, 0, 1691}, - dictWord{9, 0, 464}, - dictWord{11, 0, 195}, - dictWord{12, 0, 279}, - dictWord{ - 12, - 0, - 448, - }, - dictWord{14, 0, 11}, - dictWord{147, 0, 102}, - dictWord{135, 0, 1113}, - dictWord{133, 10, 984}, - dictWord{4, 0, 452}, - dictWord{5, 0, 583}, - dictWord{ - 135, - 0, - 720, - }, - dictWord{4, 0, 547}, - dictWord{5, 0, 817}, - dictWord{6, 0, 433}, - dictWord{7, 0, 593}, - dictWord{7, 0, 1378}, - dictWord{8, 0, 161}, - dictWord{9, 0, 284}, - dictWord{ - 10, - 0, - 313, - }, - dictWord{139, 0, 886}, - dictWord{8, 0, 722}, - dictWord{4, 10, 182}, - dictWord{6, 10, 205}, - dictWord{135, 10, 220}, - dictWord{150, 0, 13}, - dictWord{ - 4, - 10, - 42, - }, - dictWord{9, 10, 205}, - dictWord{9, 10, 786}, - dictWord{138, 10, 659}, - dictWord{6, 0, 289}, - dictWord{7, 0, 1670}, - dictWord{12, 0, 57}, - dictWord{151, 0, 4}, - dictWord{132, 10, 635}, - dictWord{14, 0, 43}, - dictWord{146, 0, 21}, - dictWord{139, 10, 533}, - dictWord{135, 0, 1694}, - dictWord{8, 0, 420}, - dictWord{ - 139, - 0, - 193, - }, - dictWord{135, 0, 409}, - dictWord{132, 10, 371}, - dictWord{4, 10, 272}, - dictWord{135, 10, 836}, - dictWord{5, 10, 825}, - dictWord{134, 10, 1640}, - dictWord{5, 11, 251}, - dictWord{5, 11, 956}, - dictWord{8, 11, 268}, - dictWord{9, 11, 214}, - dictWord{146, 11, 142}, - dictWord{138, 0, 308}, - dictWord{6, 0, 1863}, - dictWord{141, 11, 37}, - dictWord{137, 10, 879}, - dictWord{7, 10, 317}, - dictWord{135, 10, 569}, - dictWord{132, 11, 294}, - dictWord{134, 0, 790}, - dictWord{ - 5, - 0, - 1002, - }, - dictWord{136, 0, 745}, - dictWord{5, 11, 346}, - dictWord{5, 11, 711}, - dictWord{136, 11, 390}, - dictWord{135, 0, 289}, - dictWord{5, 0, 504}, - dictWord{ - 11, - 0, - 68, - }, - dictWord{137, 10, 307}, - dictWord{4, 0, 239}, - dictWord{6, 0, 477}, - dictWord{7, 0, 1607}, - dictWord{139, 0, 617}, - dictWord{149, 0, 13}, - dictWord{ - 133, - 0, - 609, - }, - dictWord{133, 11, 624}, - dictWord{5, 11, 783}, - dictWord{7, 11, 1998}, - dictWord{135, 11, 2047}, - dictWord{133, 10, 525}, - dictWord{132, 0, 367}, - dictWord{132, 11, 594}, - dictWord{6, 0, 528}, - dictWord{133, 10, 493}, - dictWord{4, 10, 174}, - dictWord{135, 10, 911}, - dictWord{8, 10, 417}, - dictWord{ - 137, - 10, - 782, - }, - dictWord{132, 0, 694}, - dictWord{7, 0, 548}, - dictWord{137, 0, 58}, - dictWord{4, 10, 32}, - dictWord{5, 10, 215}, - dictWord{6, 10, 269}, - dictWord{7, 10, 1782}, - dictWord{7, 10, 1892}, - dictWord{10, 10, 16}, - dictWord{11, 10, 822}, - dictWord{11, 10, 954}, - dictWord{141, 10, 481}, - dictWord{140, 0, 687}, - dictWord{ - 7, - 0, - 1749, - }, - dictWord{136, 10, 477}, - dictWord{132, 11, 569}, - dictWord{133, 10, 308}, - dictWord{135, 10, 1088}, - dictWord{4, 0, 661}, - dictWord{138, 0, 1004}, - dictWord{5, 11, 37}, - dictWord{6, 11, 39}, - dictWord{6, 11, 451}, - dictWord{7, 11, 218}, - dictWord{7, 11, 667}, - dictWord{7, 11, 1166}, - dictWord{7, 11, 1687}, - dictWord{8, 11, 662}, - dictWord{144, 11, 2}, - dictWord{9, 0, 445}, - dictWord{12, 0, 53}, - dictWord{13, 0, 492}, - dictWord{5, 10, 126}, - dictWord{8, 10, 297}, - dictWord{ - 9, - 10, - 366, - }, - dictWord{140, 10, 374}, - dictWord{7, 10, 1551}, - dictWord{139, 10, 361}, - dictWord{148, 0, 74}, - dictWord{134, 11, 508}, - dictWord{135, 0, 213}, - dictWord{132, 10, 175}, - dictWord{132, 10, 685}, - dictWord{6, 0, 760}, - dictWord{6, 0, 834}, - dictWord{134, 0, 1248}, - dictWord{7, 11, 453}, - dictWord{7, 11, 635}, - dictWord{7, 11, 796}, - dictWord{8, 11, 331}, - dictWord{9, 11, 328}, - dictWord{9, 11, 330}, - dictWord{9, 11, 865}, - dictWord{10, 11, 119}, - dictWord{10, 11, 235}, - dictWord{11, 11, 111}, - dictWord{11, 11, 129}, - dictWord{11, 11, 240}, - dictWord{12, 11, 31}, - dictWord{12, 11, 66}, - dictWord{12, 11, 222}, - dictWord{12, 11, 269}, - dictWord{12, 11, 599}, - dictWord{12, 11, 689}, - dictWord{13, 11, 186}, - dictWord{13, 11, 364}, - dictWord{142, 11, 345}, - dictWord{7, 0, 1672}, - dictWord{ - 139, - 0, - 189, - }, - dictWord{133, 10, 797}, - dictWord{133, 10, 565}, - dictWord{6, 0, 1548}, - dictWord{6, 11, 98}, - dictWord{7, 11, 585}, - dictWord{135, 11, 702}, - dictWord{ - 9, - 0, - 968, - }, - dictWord{15, 0, 192}, - dictWord{149, 0, 56}, - dictWord{4, 10, 252}, - dictWord{6, 11, 37}, - dictWord{7, 11, 299}, - dictWord{7, 10, 1068}, - dictWord{ - 7, - 11, - 1666, - }, - dictWord{8, 11, 195}, - dictWord{8, 11, 316}, - dictWord{9, 11, 178}, - dictWord{9, 11, 276}, - dictWord{9, 11, 339}, - dictWord{9, 11, 536}, - dictWord{ - 10, - 11, - 102, - }, - dictWord{10, 11, 362}, - dictWord{10, 10, 434}, - dictWord{10, 11, 785}, - dictWord{11, 11, 55}, - dictWord{11, 11, 149}, - dictWord{11, 10, 228}, - dictWord{ - 11, - 10, - 426, - }, - dictWord{11, 11, 773}, - dictWord{13, 10, 231}, - dictWord{13, 11, 416}, - dictWord{13, 11, 419}, - dictWord{14, 11, 38}, - dictWord{14, 11, 41}, - dictWord{14, 11, 210}, - dictWord{18, 10, 106}, - dictWord{148, 10, 87}, - dictWord{4, 0, 751}, - dictWord{11, 0, 390}, - dictWord{140, 0, 32}, - dictWord{4, 0, 409}, - dictWord{133, 0, 78}, - dictWord{11, 11, 458}, - dictWord{12, 11, 15}, - dictWord{140, 11, 432}, - dictWord{7, 0, 1602}, - dictWord{10, 0, 257}, - dictWord{10, 0, 698}, - dictWord{11, 0, 544}, - dictWord{11, 0, 585}, - dictWord{12, 0, 212}, - dictWord{13, 0, 307}, - dictWord{5, 10, 231}, - dictWord{7, 10, 601}, - dictWord{9, 10, 277}, - dictWord{ - 9, - 10, - 674, - }, - dictWord{10, 10, 178}, - dictWord{10, 10, 418}, - dictWord{10, 10, 509}, - dictWord{11, 10, 531}, - dictWord{12, 10, 113}, - dictWord{12, 10, 475}, - dictWord{13, 10, 99}, - dictWord{142, 10, 428}, - dictWord{6, 0, 473}, - dictWord{145, 0, 105}, - dictWord{6, 0, 1949}, - dictWord{15, 0, 156}, - dictWord{133, 11, 645}, - dictWord{7, 10, 1591}, - dictWord{144, 10, 43}, - dictWord{135, 0, 1779}, - dictWord{135, 10, 1683}, - dictWord{4, 11, 290}, - dictWord{135, 11, 1356}, - dictWord{134, 0, 763}, - dictWord{6, 11, 70}, - dictWord{7, 11, 1292}, - dictWord{10, 11, 762}, - dictWord{139, 11, 288}, - dictWord{142, 0, 29}, - dictWord{140, 11, 428}, - dictWord{7, 0, 883}, - dictWord{7, 11, 131}, - dictWord{7, 11, 422}, - dictWord{8, 11, 210}, - dictWord{140, 11, 573}, - dictWord{134, 0, 488}, - dictWord{4, 10, 399}, - dictWord{5, 10, 119}, - dictWord{5, 10, 494}, - dictWord{7, 10, 751}, - dictWord{137, 10, 556}, - dictWord{133, 0, 617}, - dictWord{132, 11, 936}, - dictWord{ - 139, - 0, - 50, - }, - dictWord{7, 0, 1518}, - dictWord{139, 0, 694}, - dictWord{137, 0, 785}, - dictWord{4, 0, 546}, - dictWord{135, 0, 2042}, - dictWord{7, 11, 716}, - dictWord{ - 13, - 11, - 97, - }, - dictWord{141, 11, 251}, - dictWord{132, 11, 653}, - dictWord{145, 0, 22}, - dictWord{134, 0, 1016}, - dictWord{4, 0, 313}, - dictWord{133, 0, 577}, - dictWord{ - 136, - 11, - 657, - }, - dictWord{8, 0, 184}, - dictWord{141, 0, 433}, - dictWord{135, 0, 935}, - dictWord{6, 0, 720}, - dictWord{9, 0, 114}, - dictWord{146, 11, 80}, - dictWord{ - 12, - 0, - 186, - }, - dictWord{12, 0, 292}, - dictWord{14, 0, 100}, - dictWord{18, 0, 70}, - dictWord{7, 10, 594}, - dictWord{7, 10, 851}, - dictWord{7, 10, 1858}, - dictWord{ - 9, - 10, - 411, - }, - dictWord{9, 10, 574}, - dictWord{9, 10, 666}, - dictWord{9, 10, 737}, - dictWord{10, 10, 346}, - dictWord{10, 10, 712}, - dictWord{11, 10, 246}, - dictWord{ - 11, - 10, - 432, - }, - dictWord{11, 10, 517}, - dictWord{11, 10, 647}, - dictWord{11, 10, 679}, - dictWord{11, 10, 727}, - dictWord{12, 10, 304}, - dictWord{12, 10, 305}, - dictWord{12, 10, 323}, - dictWord{12, 10, 483}, - dictWord{12, 10, 572}, - dictWord{12, 10, 593}, - dictWord{12, 10, 602}, - dictWord{13, 10, 95}, - dictWord{13, 10, 101}, - dictWord{13, 10, 171}, - dictWord{13, 10, 315}, - dictWord{13, 10, 378}, - dictWord{13, 10, 425}, - dictWord{13, 10, 475}, - dictWord{14, 10, 63}, - dictWord{ - 14, - 10, - 380, - }, - dictWord{14, 10, 384}, - dictWord{15, 10, 133}, - dictWord{18, 10, 112}, - dictWord{148, 10, 72}, - dictWord{135, 10, 1093}, - dictWord{135, 11, 1836}, - dictWord{132, 10, 679}, - dictWord{137, 10, 203}, - dictWord{11, 0, 402}, - dictWord{12, 0, 109}, - dictWord{12, 0, 431}, - dictWord{13, 0, 179}, - dictWord{13, 0, 206}, - dictWord{14, 0, 217}, - dictWord{16, 0, 3}, - dictWord{148, 0, 53}, - dictWord{7, 11, 1368}, - dictWord{8, 11, 232}, - dictWord{8, 11, 361}, - dictWord{10, 11, 682}, - dictWord{138, 11, 742}, - dictWord{137, 10, 714}, - dictWord{5, 0, 886}, - dictWord{6, 0, 46}, - dictWord{6, 0, 1790}, - dictWord{7, 0, 14}, - dictWord{7, 0, 732}, - dictWord{ - 7, - 0, - 1654, - }, - dictWord{8, 0, 95}, - dictWord{8, 0, 327}, - dictWord{8, 0, 616}, - dictWord{9, 0, 892}, - dictWord{10, 0, 598}, - dictWord{10, 0, 769}, - dictWord{11, 0, 134}, - dictWord{11, 0, 747}, - dictWord{12, 0, 378}, - dictWord{14, 0, 97}, - dictWord{137, 11, 534}, - dictWord{4, 0, 969}, - dictWord{136, 10, 825}, - dictWord{137, 11, 27}, - dictWord{6, 0, 727}, - dictWord{142, 11, 12}, - dictWord{133, 0, 1021}, - dictWord{134, 0, 1190}, - dictWord{134, 11, 1657}, - dictWord{5, 10, 143}, - dictWord{ - 5, - 10, - 769, - }, - dictWord{6, 10, 1760}, - dictWord{7, 10, 682}, - dictWord{7, 10, 1992}, - dictWord{136, 10, 736}, - dictWord{132, 0, 153}, - dictWord{135, 11, 127}, - dictWord{133, 0, 798}, - dictWord{132, 0, 587}, - dictWord{6, 0, 598}, - dictWord{7, 0, 42}, - dictWord{8, 0, 695}, - dictWord{10, 0, 212}, - dictWord{11, 0, 158}, - dictWord{ - 14, - 0, - 196, - }, - dictWord{145, 0, 85}, - dictWord{133, 10, 860}, - dictWord{6, 0, 1929}, - dictWord{134, 0, 1933}, - dictWord{5, 0, 957}, - dictWord{5, 0, 1008}, - dictWord{ - 9, - 0, - 577, - }, - dictWord{12, 0, 141}, - dictWord{6, 10, 422}, - dictWord{7, 10, 0}, - dictWord{7, 10, 1544}, - dictWord{8, 11, 364}, - dictWord{11, 10, 990}, - dictWord{ - 12, - 10, - 453, - }, - dictWord{13, 10, 47}, - dictWord{141, 10, 266}, - dictWord{134, 0, 1319}, - dictWord{4, 0, 129}, - dictWord{135, 0, 465}, - dictWord{7, 0, 470}, - dictWord{ - 7, - 0, - 1057, - }, - dictWord{7, 0, 1201}, - dictWord{9, 0, 755}, - dictWord{11, 0, 906}, - dictWord{140, 0, 527}, - dictWord{7, 0, 908}, - dictWord{146, 0, 7}, - dictWord{5, 0, 148}, - dictWord{136, 0, 450}, - dictWord{5, 10, 515}, - dictWord{137, 10, 131}, - dictWord{7, 10, 1605}, - dictWord{11, 10, 962}, - dictWord{146, 10, 139}, - dictWord{ - 132, - 10, - 646, - }, - dictWord{134, 0, 1166}, - dictWord{4, 10, 396}, - dictWord{7, 10, 728}, - dictWord{9, 10, 117}, - dictWord{13, 10, 202}, - dictWord{148, 10, 51}, - dictWord{ - 6, - 10, - 121, - }, - dictWord{6, 10, 124}, - dictWord{6, 10, 357}, - dictWord{7, 10, 1138}, - dictWord{7, 10, 1295}, - dictWord{8, 10, 162}, - dictWord{139, 10, 655}, - dictWord{14, 0, 374}, - dictWord{142, 11, 374}, - dictWord{138, 0, 253}, - dictWord{139, 0, 1003}, - dictWord{5, 11, 909}, - dictWord{9, 11, 849}, - dictWord{ - 138, - 11, - 805, - }, - dictWord{133, 10, 237}, - dictWord{7, 11, 525}, - dictWord{7, 11, 1579}, - dictWord{8, 11, 497}, - dictWord{136, 11, 573}, - dictWord{137, 0, 46}, - dictWord{ - 132, - 0, - 879, - }, - dictWord{134, 0, 806}, - dictWord{135, 0, 1868}, - dictWord{6, 0, 1837}, - dictWord{134, 0, 1846}, - dictWord{6, 0, 730}, - dictWord{134, 0, 881}, - dictWord{7, 0, 965}, - dictWord{7, 0, 1460}, - dictWord{7, 0, 1604}, - dictWord{7, 11, 193}, - dictWord{7, 11, 397}, - dictWord{7, 11, 1105}, - dictWord{8, 11, 124}, - dictWord{ - 8, - 11, - 619, - }, - dictWord{9, 11, 305}, - dictWord{10, 11, 264}, - dictWord{11, 11, 40}, - dictWord{12, 11, 349}, - dictWord{13, 11, 134}, - dictWord{13, 11, 295}, - dictWord{14, 11, 155}, - dictWord{15, 11, 120}, - dictWord{146, 11, 105}, - dictWord{136, 0, 506}, - dictWord{143, 0, 10}, - dictWord{4, 11, 262}, - dictWord{7, 11, 342}, - dictWord{7, 10, 571}, - dictWord{7, 10, 1877}, - dictWord{10, 10, 366}, - dictWord{141, 11, 23}, - dictWord{133, 11, 641}, - dictWord{10, 0, 22}, - dictWord{9, 10, 513}, - dictWord{10, 10, 39}, - dictWord{12, 10, 122}, - dictWord{140, 10, 187}, - dictWord{135, 11, 1431}, - dictWord{150, 11, 49}, - dictWord{4, 11, 99}, - dictWord{ - 6, - 11, - 250, - }, - dictWord{6, 11, 346}, - dictWord{8, 11, 127}, - dictWord{138, 11, 81}, - dictWord{6, 0, 2014}, - dictWord{8, 0, 928}, - dictWord{10, 0, 960}, - dictWord{10, 0, 979}, - dictWord{140, 0, 996}, - dictWord{134, 0, 296}, - dictWord{132, 11, 915}, - dictWord{5, 11, 75}, - dictWord{9, 11, 517}, - dictWord{10, 11, 470}, - dictWord{ - 12, - 11, - 155, - }, - dictWord{141, 11, 224}, - dictWord{137, 10, 873}, - dictWord{4, 0, 854}, - dictWord{140, 11, 18}, - dictWord{134, 0, 587}, - dictWord{7, 10, 107}, - dictWord{ - 7, - 10, - 838, - }, - dictWord{8, 10, 550}, - dictWord{138, 10, 401}, - dictWord{11, 0, 636}, - dictWord{15, 0, 145}, - dictWord{17, 0, 34}, - dictWord{19, 0, 50}, - dictWord{ - 23, - 0, - 20, - }, - dictWord{11, 10, 588}, - dictWord{11, 10, 864}, - dictWord{11, 10, 968}, - dictWord{143, 10, 160}, - dictWord{135, 11, 216}, - dictWord{7, 0, 982}, - dictWord{ - 10, - 0, - 32, - }, - dictWord{143, 0, 56}, - dictWord{133, 10, 768}, - dictWord{133, 11, 954}, - dictWord{6, 11, 304}, - dictWord{7, 11, 1114}, - dictWord{8, 11, 418}, - dictWord{ - 10, - 11, - 345, - }, - dictWord{11, 11, 341}, - dictWord{11, 11, 675}, - dictWord{141, 11, 40}, - dictWord{9, 11, 410}, - dictWord{139, 11, 425}, - dictWord{136, 0, 941}, - dictWord{5, 0, 435}, - dictWord{132, 10, 894}, - dictWord{5, 0, 85}, - dictWord{6, 0, 419}, - dictWord{7, 0, 134}, - dictWord{7, 0, 305}, - dictWord{7, 0, 361}, - dictWord{ - 7, - 0, - 1337, - }, - dictWord{8, 0, 71}, - dictWord{140, 0, 519}, - dictWord{140, 0, 688}, - dictWord{135, 0, 740}, - dictWord{5, 0, 691}, - dictWord{7, 0, 345}, - dictWord{9, 0, 94}, - dictWord{140, 0, 169}, - dictWord{5, 0, 183}, - dictWord{6, 0, 582}, - dictWord{10, 0, 679}, - dictWord{140, 0, 435}, - dictWord{134, 11, 14}, - dictWord{6, 0, 945}, - dictWord{135, 0, 511}, - dictWord{134, 11, 1708}, - dictWord{5, 11, 113}, - dictWord{6, 11, 243}, - dictWord{7, 11, 1865}, - dictWord{11, 11, 161}, - dictWord{16, 11, 37}, - dictWord{145, 11, 99}, - dictWord{132, 11, 274}, - dictWord{137, 0, 539}, - dictWord{7, 0, 1993}, - dictWord{8, 0, 684}, - dictWord{134, 10, 272}, - dictWord{ - 6, - 0, - 659, - }, - dictWord{134, 0, 982}, - dictWord{4, 10, 9}, - dictWord{5, 10, 128}, - dictWord{7, 10, 368}, - dictWord{11, 10, 480}, - dictWord{148, 10, 3}, - dictWord{ - 134, - 0, - 583, - }, - dictWord{132, 0, 803}, - dictWord{133, 0, 704}, - dictWord{4, 0, 179}, - dictWord{5, 0, 198}, - dictWord{133, 0, 697}, - dictWord{7, 0, 347}, - dictWord{7, 0, 971}, - dictWord{8, 0, 181}, - dictWord{10, 0, 711}, - dictWord{135, 11, 166}, - dictWord{136, 10, 682}, - dictWord{4, 10, 2}, - dictWord{7, 10, 545}, - dictWord{7, 10, 894}, - dictWord{136, 11, 521}, - dictWord{135, 0, 481}, - dictWord{132, 0, 243}, - dictWord{5, 0, 203}, - dictWord{7, 0, 19}, - dictWord{7, 0, 71}, - dictWord{7, 0, 113}, - dictWord{ - 10, - 0, - 405, - }, - dictWord{11, 0, 357}, - dictWord{142, 0, 240}, - dictWord{5, 11, 725}, - dictWord{5, 11, 727}, - dictWord{135, 11, 1811}, - dictWord{6, 0, 826}, - dictWord{ - 137, - 11, - 304, - }, - dictWord{7, 0, 1450}, - dictWord{139, 0, 99}, - dictWord{133, 11, 654}, - dictWord{134, 0, 492}, - dictWord{5, 0, 134}, - dictWord{6, 0, 408}, - dictWord{ - 6, - 0, - 495, - }, - dictWord{7, 0, 1593}, - dictWord{6, 11, 273}, - dictWord{10, 11, 188}, - dictWord{13, 11, 377}, - dictWord{146, 11, 77}, - dictWord{9, 10, 769}, - dictWord{ - 140, - 10, - 185, - }, - dictWord{135, 11, 410}, - dictWord{142, 0, 4}, - dictWord{4, 0, 665}, - dictWord{134, 11, 1785}, - dictWord{4, 0, 248}, - dictWord{7, 0, 137}, - dictWord{ - 137, - 0, - 349, - }, - dictWord{5, 10, 530}, - dictWord{142, 10, 113}, - dictWord{7, 0, 1270}, - dictWord{139, 0, 612}, - dictWord{132, 11, 780}, - dictWord{5, 0, 371}, - dictWord{135, 0, 563}, - dictWord{135, 0, 826}, - dictWord{6, 0, 1535}, - dictWord{23, 0, 21}, - dictWord{151, 0, 23}, - dictWord{4, 0, 374}, - dictWord{7, 0, 547}, - dictWord{ - 7, - 0, - 1700, - }, - dictWord{7, 0, 1833}, - dictWord{139, 0, 858}, - dictWord{133, 10, 556}, - dictWord{7, 11, 612}, - dictWord{8, 11, 545}, - dictWord{8, 11, 568}, - dictWord{ - 8, - 11, - 642, - }, - dictWord{9, 11, 717}, - dictWord{10, 11, 541}, - dictWord{10, 11, 763}, - dictWord{11, 11, 449}, - dictWord{12, 11, 489}, - dictWord{13, 11, 153}, - dictWord{ - 13, - 11, - 296, - }, - dictWord{14, 11, 138}, - dictWord{14, 11, 392}, - dictWord{15, 11, 50}, - dictWord{16, 11, 6}, - dictWord{16, 11, 12}, - dictWord{148, 11, 9}, - dictWord{ - 9, - 0, - 311, - }, - dictWord{141, 0, 42}, - dictWord{8, 10, 16}, - dictWord{140, 10, 568}, - dictWord{6, 0, 1968}, - dictWord{6, 0, 2027}, - dictWord{138, 0, 991}, - dictWord{ - 6, - 0, - 1647, - }, - dictWord{7, 0, 1552}, - dictWord{7, 0, 2010}, - dictWord{9, 0, 494}, - dictWord{137, 0, 509}, - dictWord{133, 11, 948}, - dictWord{6, 10, 186}, - dictWord{ - 137, - 10, - 426, - }, - dictWord{134, 0, 769}, - dictWord{134, 0, 642}, - dictWord{132, 10, 585}, - dictWord{6, 0, 123}, - dictWord{7, 0, 214}, - dictWord{9, 0, 728}, - dictWord{ - 10, - 0, - 157, - }, - dictWord{11, 0, 346}, - dictWord{11, 0, 662}, - dictWord{143, 0, 106}, - dictWord{142, 11, 381}, - dictWord{135, 0, 1435}, - dictWord{4, 11, 532}, - dictWord{ - 5, - 11, - 706, - }, - dictWord{135, 11, 662}, - dictWord{5, 11, 837}, - dictWord{134, 11, 1651}, - dictWord{4, 10, 93}, - dictWord{5, 10, 252}, - dictWord{6, 10, 229}, - dictWord{ - 7, - 10, - 291, - }, - dictWord{9, 10, 550}, - dictWord{139, 10, 644}, - dictWord{148, 0, 79}, - dictWord{137, 10, 749}, - dictWord{134, 0, 1425}, - dictWord{ - 137, - 10, - 162, - }, - dictWord{4, 11, 362}, - dictWord{7, 11, 52}, - dictWord{7, 11, 303}, - dictWord{140, 11, 166}, - dictWord{132, 10, 381}, - dictWord{4, 11, 330}, - dictWord{ - 7, - 11, - 933, - }, - dictWord{7, 11, 2012}, - dictWord{136, 11, 292}, - dictWord{135, 11, 767}, - dictWord{4, 0, 707}, - dictWord{5, 0, 588}, - dictWord{6, 0, 393}, - dictWord{ - 13, - 0, - 106, - }, - dictWord{18, 0, 49}, - dictWord{147, 0, 41}, - dictWord{6, 0, 211}, - dictWord{7, 0, 1690}, - dictWord{11, 0, 486}, - dictWord{140, 0, 369}, - dictWord{ - 137, - 11, - 883, - }, - dictWord{4, 11, 703}, - dictWord{135, 11, 207}, - dictWord{4, 0, 187}, - dictWord{5, 0, 184}, - dictWord{5, 0, 690}, - dictWord{7, 0, 1869}, - dictWord{10, 0, 756}, - dictWord{139, 0, 783}, - dictWord{132, 11, 571}, - dictWord{134, 0, 1382}, - dictWord{5, 0, 175}, - dictWord{6, 10, 77}, - dictWord{6, 10, 157}, - dictWord{7, 10, 974}, - dictWord{7, 10, 1301}, - dictWord{7, 10, 1339}, - dictWord{7, 10, 1490}, - dictWord{7, 10, 1873}, - dictWord{137, 10, 628}, - dictWord{134, 0, 1493}, - dictWord{ - 5, - 11, - 873, - }, - dictWord{133, 11, 960}, - dictWord{134, 0, 1007}, - dictWord{12, 11, 93}, - dictWord{12, 11, 501}, - dictWord{13, 11, 362}, - dictWord{14, 11, 151}, - dictWord{15, 11, 40}, - dictWord{15, 11, 59}, - dictWord{16, 11, 46}, - dictWord{17, 11, 25}, - dictWord{18, 11, 14}, - dictWord{18, 11, 134}, - dictWord{19, 11, 25}, - dictWord{ - 19, - 11, - 69, - }, - dictWord{20, 11, 16}, - dictWord{20, 11, 19}, - dictWord{20, 11, 66}, - dictWord{21, 11, 23}, - dictWord{21, 11, 25}, - dictWord{150, 11, 42}, - dictWord{ - 11, - 10, - 919, - }, - dictWord{141, 10, 409}, - dictWord{134, 0, 219}, - dictWord{5, 0, 582}, - dictWord{6, 0, 1646}, - dictWord{7, 0, 99}, - dictWord{7, 0, 1962}, - dictWord{ - 7, - 0, - 1986, - }, - dictWord{8, 0, 515}, - dictWord{8, 0, 773}, - dictWord{9, 0, 23}, - dictWord{9, 0, 491}, - dictWord{12, 0, 620}, - dictWord{142, 0, 93}, - dictWord{133, 0, 851}, - dictWord{5, 11, 33}, - dictWord{134, 11, 470}, - dictWord{135, 11, 1291}, - dictWord{134, 0, 1278}, - dictWord{135, 11, 1882}, - dictWord{135, 10, 1489}, - dictWord{132, 0, 1000}, - dictWord{138, 0, 982}, - dictWord{8, 0, 762}, - dictWord{8, 0, 812}, - dictWord{137, 0, 910}, - dictWord{6, 11, 47}, - dictWord{7, 11, 90}, - dictWord{ - 7, - 11, - 664, - }, - dictWord{7, 11, 830}, - dictWord{7, 11, 1380}, - dictWord{7, 11, 2025}, - dictWord{8, 11, 448}, - dictWord{136, 11, 828}, - dictWord{4, 0, 98}, - dictWord{ - 4, - 0, - 940, - }, - dictWord{6, 0, 1819}, - dictWord{6, 0, 1834}, - dictWord{6, 0, 1841}, - dictWord{7, 0, 1365}, - dictWord{8, 0, 859}, - dictWord{8, 0, 897}, - dictWord{8, 0, 918}, - dictWord{9, 0, 422}, - dictWord{9, 0, 670}, - dictWord{10, 0, 775}, - dictWord{10, 0, 894}, - dictWord{10, 0, 909}, - dictWord{10, 0, 910}, - dictWord{10, 0, 935}, - dictWord{ - 11, - 0, - 210, - }, - dictWord{12, 0, 750}, - dictWord{12, 0, 755}, - dictWord{13, 0, 26}, - dictWord{13, 0, 457}, - dictWord{13, 0, 476}, - dictWord{16, 0, 100}, - dictWord{16, 0, 109}, - dictWord{18, 0, 173}, - dictWord{18, 0, 175}, - dictWord{8, 10, 398}, - dictWord{9, 10, 681}, - dictWord{139, 10, 632}, - dictWord{9, 11, 417}, - dictWord{ - 137, - 11, - 493, - }, - dictWord{136, 10, 645}, - dictWord{138, 0, 906}, - dictWord{134, 0, 1730}, - dictWord{134, 10, 20}, - dictWord{133, 11, 1019}, - dictWord{134, 0, 1185}, - dictWord{10, 0, 40}, - dictWord{136, 10, 769}, - dictWord{9, 0, 147}, - dictWord{134, 11, 208}, - dictWord{140, 0, 650}, - dictWord{5, 0, 209}, - dictWord{6, 0, 30}, - dictWord{11, 0, 56}, - dictWord{139, 0, 305}, - dictWord{132, 0, 553}, - dictWord{138, 11, 344}, - dictWord{6, 11, 68}, - dictWord{7, 11, 398}, - dictWord{7, 11, 448}, - dictWord{ - 7, - 11, - 1629, - }, - dictWord{7, 11, 1813}, - dictWord{8, 11, 387}, - dictWord{8, 11, 442}, - dictWord{9, 11, 710}, - dictWord{10, 11, 282}, - dictWord{138, 11, 722}, - dictWord{5, 0, 597}, - dictWord{14, 0, 20}, - dictWord{142, 11, 20}, - dictWord{135, 0, 1614}, - dictWord{135, 10, 1757}, - dictWord{4, 0, 150}, - dictWord{5, 0, 303}, - dictWord{6, 0, 327}, - dictWord{135, 10, 937}, - dictWord{16, 0, 49}, - dictWord{7, 10, 1652}, - dictWord{144, 11, 49}, - dictWord{8, 0, 192}, - dictWord{10, 0, 78}, - dictWord{ - 141, - 0, - 359, - }, - dictWord{135, 0, 786}, - dictWord{143, 0, 134}, - dictWord{6, 0, 1638}, - dictWord{7, 0, 79}, - dictWord{7, 0, 496}, - dictWord{9, 0, 138}, - dictWord{ - 10, - 0, - 336, - }, - dictWord{11, 0, 12}, - dictWord{12, 0, 412}, - dictWord{12, 0, 440}, - dictWord{142, 0, 305}, - dictWord{136, 11, 491}, - dictWord{4, 10, 579}, - dictWord{ - 5, - 10, - 226, - }, - dictWord{5, 10, 323}, - dictWord{135, 10, 960}, - dictWord{7, 0, 204}, - dictWord{7, 0, 415}, - dictWord{8, 0, 42}, - dictWord{10, 0, 85}, - dictWord{139, 0, 564}, - dictWord{132, 0, 614}, - dictWord{4, 11, 403}, - dictWord{5, 11, 441}, - dictWord{7, 11, 450}, - dictWord{11, 11, 101}, - dictWord{12, 11, 193}, - dictWord{141, 11, 430}, - dictWord{135, 11, 1927}, - dictWord{135, 11, 1330}, - dictWord{4, 0, 3}, - dictWord{5, 0, 247}, - dictWord{5, 0, 644}, - dictWord{7, 0, 744}, - dictWord{7, 0, 1207}, - dictWord{7, 0, 1225}, - dictWord{7, 0, 1909}, - dictWord{146, 0, 147}, - dictWord{136, 0, 942}, - dictWord{4, 0, 1019}, - dictWord{134, 0, 2023}, - dictWord{5, 11, 679}, - dictWord{133, 10, 973}, - dictWord{5, 0, 285}, - dictWord{9, 0, 67}, - dictWord{13, 0, 473}, - dictWord{143, 0, 82}, - dictWord{7, 11, 328}, - dictWord{137, 11, 326}, - dictWord{151, 0, 8}, - dictWord{6, 10, 135}, - dictWord{135, 10, 1176}, - dictWord{135, 11, 1128}, - dictWord{134, 0, 1309}, - dictWord{135, 11, 1796}, - dictWord{ - 135, - 10, - 314, - }, - dictWord{4, 11, 574}, - dictWord{7, 11, 350}, - dictWord{7, 11, 1024}, - dictWord{8, 11, 338}, - dictWord{9, 11, 677}, - dictWord{10, 11, 808}, - dictWord{ - 139, - 11, - 508, - }, - dictWord{7, 11, 818}, - dictWord{17, 11, 14}, - dictWord{17, 11, 45}, - dictWord{18, 11, 75}, - dictWord{148, 11, 18}, - dictWord{146, 10, 4}, - dictWord{ - 135, - 11, - 1081, - }, - dictWord{4, 0, 29}, - dictWord{6, 0, 532}, - dictWord{7, 0, 1628}, - dictWord{7, 0, 1648}, - dictWord{9, 0, 350}, - dictWord{10, 0, 433}, - dictWord{11, 0, 97}, - dictWord{11, 0, 557}, - dictWord{11, 0, 745}, - dictWord{12, 0, 289}, - dictWord{12, 0, 335}, - dictWord{12, 0, 348}, - dictWord{12, 0, 606}, - dictWord{13, 0, 116}, - dictWord{13, 0, 233}, - dictWord{13, 0, 466}, - dictWord{14, 0, 181}, - dictWord{14, 0, 209}, - dictWord{14, 0, 232}, - dictWord{14, 0, 236}, - dictWord{14, 0, 300}, - dictWord{ - 16, - 0, - 41, - }, - dictWord{148, 0, 97}, - dictWord{7, 0, 318}, - dictWord{6, 10, 281}, - dictWord{8, 10, 282}, - dictWord{8, 10, 480}, - dictWord{8, 10, 499}, - dictWord{9, 10, 198}, - dictWord{10, 10, 143}, - dictWord{10, 10, 169}, - dictWord{10, 10, 211}, - dictWord{10, 10, 417}, - dictWord{10, 10, 574}, - dictWord{11, 10, 147}, - dictWord{ - 11, - 10, - 395, - }, - dictWord{12, 10, 75}, - dictWord{12, 10, 407}, - dictWord{12, 10, 608}, - dictWord{13, 10, 500}, - dictWord{142, 10, 251}, - dictWord{135, 11, 1676}, - dictWord{135, 11, 2037}, - dictWord{135, 0, 1692}, - dictWord{5, 0, 501}, - dictWord{7, 0, 1704}, - dictWord{9, 0, 553}, - dictWord{11, 0, 520}, - dictWord{12, 0, 557}, - dictWord{141, 0, 249}, - dictWord{6, 0, 1527}, - dictWord{14, 0, 324}, - dictWord{15, 0, 55}, - dictWord{15, 0, 80}, - dictWord{14, 11, 324}, - dictWord{15, 11, 55}, - dictWord{143, 11, 80}, - dictWord{135, 10, 1776}, - dictWord{8, 0, 988}, - dictWord{137, 11, 297}, - dictWord{132, 10, 419}, - dictWord{142, 0, 223}, - dictWord{ - 139, - 11, - 234, - }, - dictWord{7, 0, 1123}, - dictWord{12, 0, 508}, - dictWord{14, 0, 102}, - dictWord{14, 0, 226}, - dictWord{144, 0, 57}, - dictWord{4, 10, 138}, - dictWord{ - 7, - 10, - 1012, - }, - dictWord{7, 10, 1280}, - dictWord{137, 10, 76}, - dictWord{7, 0, 1764}, - dictWord{5, 10, 29}, - dictWord{140, 10, 638}, - dictWord{134, 0, 2015}, - dictWord{134, 0, 1599}, - dictWord{138, 11, 56}, - dictWord{6, 11, 306}, - dictWord{7, 11, 1140}, - dictWord{7, 11, 1340}, - dictWord{8, 11, 133}, - dictWord{ - 138, - 11, - 449, - }, - dictWord{139, 11, 1011}, - dictWord{6, 10, 1710}, - dictWord{135, 10, 2038}, - dictWord{7, 11, 1763}, - dictWord{140, 11, 310}, - dictWord{6, 0, 129}, - dictWord{4, 10, 17}, - dictWord{5, 10, 23}, - dictWord{7, 10, 995}, - dictWord{11, 10, 383}, - dictWord{11, 10, 437}, - dictWord{12, 10, 460}, - dictWord{140, 10, 532}, - dictWord{5, 11, 329}, - dictWord{136, 11, 260}, - dictWord{133, 10, 862}, - dictWord{132, 0, 534}, - dictWord{6, 0, 811}, - dictWord{135, 0, 626}, - dictWord{ - 132, - 11, - 657, - }, - dictWord{4, 0, 25}, - dictWord{5, 0, 60}, - dictWord{6, 0, 504}, - dictWord{7, 0, 614}, - dictWord{7, 0, 1155}, - dictWord{12, 0, 0}, - dictWord{152, 11, 7}, - dictWord{ - 7, - 0, - 1248, - }, - dictWord{11, 0, 621}, - dictWord{139, 0, 702}, - dictWord{137, 0, 321}, - dictWord{8, 10, 70}, - dictWord{12, 10, 171}, - dictWord{141, 10, 272}, - dictWord{ - 10, - 10, - 233, - }, - dictWord{139, 10, 76}, - dictWord{4, 0, 379}, - dictWord{7, 0, 1397}, - dictWord{134, 10, 442}, - dictWord{5, 11, 66}, - dictWord{7, 11, 1896}, - dictWord{ - 136, - 11, - 288, - }, - dictWord{134, 11, 1643}, - dictWord{134, 10, 1709}, - dictWord{4, 11, 21}, - dictWord{5, 11, 91}, - dictWord{5, 11, 570}, - dictWord{5, 11, 648}, - dictWord{5, 11, 750}, - dictWord{5, 11, 781}, - dictWord{6, 11, 54}, - dictWord{6, 11, 112}, - dictWord{6, 11, 402}, - dictWord{6, 11, 1732}, - dictWord{7, 11, 315}, - dictWord{ - 7, - 11, - 749, - }, - dictWord{7, 11, 1347}, - dictWord{7, 11, 1900}, - dictWord{9, 11, 78}, - dictWord{9, 11, 508}, - dictWord{10, 11, 611}, - dictWord{11, 11, 510}, - dictWord{ - 11, - 11, - 728, - }, - dictWord{13, 11, 36}, - dictWord{14, 11, 39}, - dictWord{16, 11, 83}, - dictWord{17, 11, 124}, - dictWord{148, 11, 30}, - dictWord{4, 0, 118}, - dictWord{ - 6, - 0, - 274, - }, - dictWord{6, 0, 361}, - dictWord{7, 0, 75}, - dictWord{141, 0, 441}, - dictWord{10, 11, 322}, - dictWord{10, 11, 719}, - dictWord{139, 11, 407}, - dictWord{ - 147, - 10, - 119, - }, - dictWord{12, 11, 549}, - dictWord{14, 11, 67}, - dictWord{147, 11, 60}, - dictWord{11, 10, 69}, - dictWord{12, 10, 105}, - dictWord{12, 10, 117}, - dictWord{13, 10, 213}, - dictWord{14, 10, 13}, - dictWord{14, 10, 62}, - dictWord{14, 10, 177}, - dictWord{14, 10, 421}, - dictWord{15, 10, 19}, - dictWord{146, 10, 141}, - dictWord{9, 0, 841}, - dictWord{137, 10, 309}, - dictWord{7, 10, 608}, - dictWord{7, 10, 976}, - dictWord{8, 11, 125}, - dictWord{8, 11, 369}, - dictWord{8, 11, 524}, - dictWord{9, 10, 146}, - dictWord{10, 10, 206}, - dictWord{10, 11, 486}, - dictWord{10, 10, 596}, - dictWord{11, 11, 13}, - dictWord{11, 11, 381}, - dictWord{11, 11, 736}, - dictWord{11, 11, 766}, - dictWord{11, 11, 845}, - dictWord{13, 11, 114}, - dictWord{13, 10, 218}, - dictWord{13, 11, 292}, - dictWord{14, 11, 47}, - dictWord{ - 142, - 10, - 153, - }, - dictWord{12, 0, 693}, - dictWord{135, 11, 759}, - dictWord{5, 0, 314}, - dictWord{6, 0, 221}, - dictWord{7, 0, 419}, - dictWord{10, 0, 650}, - dictWord{11, 0, 396}, - dictWord{12, 0, 156}, - dictWord{13, 0, 369}, - dictWord{14, 0, 333}, - dictWord{145, 0, 47}, - dictWord{6, 11, 1684}, - dictWord{6, 11, 1731}, - dictWord{7, 11, 356}, - dictWord{7, 11, 1932}, - dictWord{8, 11, 54}, - dictWord{8, 11, 221}, - dictWord{9, 11, 225}, - dictWord{9, 11, 356}, - dictWord{10, 11, 77}, - dictWord{10, 11, 446}, - dictWord{10, 11, 731}, - dictWord{12, 11, 404}, - dictWord{141, 11, 491}, - dictWord{132, 11, 375}, - dictWord{4, 10, 518}, - dictWord{135, 10, 1136}, - dictWord{ - 4, - 0, - 913, - }, - dictWord{4, 11, 411}, - dictWord{11, 11, 643}, - dictWord{140, 11, 115}, - dictWord{4, 11, 80}, - dictWord{133, 11, 44}, - dictWord{8, 10, 689}, - dictWord{ - 137, - 10, - 863, - }, - dictWord{138, 0, 880}, - dictWord{4, 10, 18}, - dictWord{7, 10, 145}, - dictWord{7, 10, 444}, - dictWord{7, 10, 1278}, - dictWord{8, 10, 49}, - dictWord{ - 8, - 10, - 400, - }, - dictWord{9, 10, 71}, - dictWord{9, 10, 250}, - dictWord{10, 10, 459}, - dictWord{12, 10, 160}, - dictWord{144, 10, 24}, - dictWord{136, 0, 475}, - dictWord{ - 5, - 0, - 1016, - }, - dictWord{5, 11, 299}, - dictWord{135, 11, 1083}, - dictWord{7, 0, 602}, - dictWord{8, 0, 179}, - dictWord{10, 0, 781}, - dictWord{140, 0, 126}, - dictWord{ - 6, - 0, - 329, - }, - dictWord{138, 0, 111}, - dictWord{135, 0, 1864}, - dictWord{4, 11, 219}, - dictWord{7, 11, 1761}, - dictWord{137, 11, 86}, - dictWord{6, 0, 1888}, - dictWord{ - 6, - 0, - 1892, - }, - dictWord{6, 0, 1901}, - dictWord{6, 0, 1904}, - dictWord{9, 0, 953}, - dictWord{9, 0, 985}, - dictWord{9, 0, 991}, - dictWord{9, 0, 1001}, - dictWord{12, 0, 818}, - dictWord{12, 0, 846}, - dictWord{12, 0, 847}, - dictWord{12, 0, 861}, - dictWord{12, 0, 862}, - dictWord{12, 0, 873}, - dictWord{12, 0, 875}, - dictWord{12, 0, 877}, - dictWord{12, 0, 879}, - dictWord{12, 0, 881}, - dictWord{12, 0, 884}, - dictWord{12, 0, 903}, - dictWord{12, 0, 915}, - dictWord{12, 0, 926}, - dictWord{12, 0, 939}, - dictWord{ - 15, - 0, - 182, - }, - dictWord{15, 0, 219}, - dictWord{15, 0, 255}, - dictWord{18, 0, 191}, - dictWord{18, 0, 209}, - dictWord{18, 0, 211}, - dictWord{149, 0, 41}, - dictWord{ - 5, - 11, - 328, - }, - dictWord{135, 11, 918}, - dictWord{137, 0, 780}, - dictWord{12, 0, 82}, - dictWord{143, 0, 36}, - dictWord{133, 10, 1010}, - dictWord{5, 0, 821}, - dictWord{ - 134, - 0, - 1687, - }, - dictWord{133, 11, 514}, - dictWord{132, 0, 956}, - dictWord{134, 0, 1180}, - dictWord{10, 0, 112}, - dictWord{5, 10, 87}, - dictWord{7, 10, 313}, - dictWord{ - 7, - 10, - 1103, - }, - dictWord{10, 10, 582}, - dictWord{11, 10, 389}, - dictWord{11, 10, 813}, - dictWord{12, 10, 385}, - dictWord{13, 10, 286}, - dictWord{14, 10, 124}, - dictWord{146, 10, 108}, - dictWord{5, 0, 71}, - dictWord{7, 0, 1407}, - dictWord{9, 0, 704}, - dictWord{10, 0, 261}, - dictWord{10, 0, 619}, - dictWord{11, 0, 547}, - dictWord{11, 0, 619}, - dictWord{143, 0, 157}, - dictWord{4, 0, 531}, - dictWord{5, 0, 455}, - dictWord{5, 11, 301}, - dictWord{6, 11, 571}, - dictWord{14, 11, 49}, - dictWord{ - 146, - 11, - 102, - }, - dictWord{132, 10, 267}, - dictWord{6, 0, 385}, - dictWord{7, 0, 2008}, - dictWord{9, 0, 337}, - dictWord{138, 0, 517}, - dictWord{133, 11, 726}, - dictWord{133, 11, 364}, - dictWord{4, 11, 76}, - dictWord{7, 11, 1550}, - dictWord{9, 11, 306}, - dictWord{9, 11, 430}, - dictWord{9, 11, 663}, - dictWord{10, 11, 683}, - dictWord{11, 11, 427}, - dictWord{11, 11, 753}, - dictWord{12, 11, 334}, - dictWord{12, 11, 442}, - dictWord{14, 11, 258}, - dictWord{14, 11, 366}, - dictWord{ - 143, - 11, - 131, - }, - dictWord{6, 0, 1865}, - dictWord{6, 0, 1879}, - dictWord{6, 0, 1881}, - dictWord{6, 0, 1894}, - dictWord{6, 0, 1908}, - dictWord{9, 0, 915}, - dictWord{9, 0, 926}, - dictWord{9, 0, 940}, - dictWord{9, 0, 943}, - dictWord{9, 0, 966}, - dictWord{9, 0, 980}, - dictWord{9, 0, 989}, - dictWord{9, 0, 1005}, - dictWord{9, 0, 1010}, - dictWord{ - 12, - 0, - 813, - }, - dictWord{12, 0, 817}, - dictWord{12, 0, 840}, - dictWord{12, 0, 843}, - dictWord{12, 0, 855}, - dictWord{12, 0, 864}, - dictWord{12, 0, 871}, - dictWord{12, 0, 872}, - dictWord{12, 0, 899}, - dictWord{12, 0, 905}, - dictWord{12, 0, 924}, - dictWord{15, 0, 171}, - dictWord{15, 0, 181}, - dictWord{15, 0, 224}, - dictWord{15, 0, 235}, - dictWord{15, 0, 251}, - dictWord{146, 0, 184}, - dictWord{137, 11, 52}, - dictWord{5, 0, 16}, - dictWord{6, 0, 86}, - dictWord{6, 0, 603}, - dictWord{7, 0, 292}, - dictWord{7, 0, 561}, - dictWord{8, 0, 257}, - dictWord{8, 0, 382}, - dictWord{9, 0, 721}, - dictWord{9, 0, 778}, - dictWord{11, 0, 581}, - dictWord{140, 0, 466}, - dictWord{4, 0, 486}, - dictWord{ - 5, - 0, - 491, - }, - dictWord{135, 10, 1121}, - dictWord{4, 0, 72}, - dictWord{6, 0, 265}, - dictWord{135, 0, 1300}, - dictWord{135, 11, 1183}, - dictWord{10, 10, 249}, - dictWord{139, 10, 209}, - dictWord{132, 10, 561}, - dictWord{137, 11, 519}, - dictWord{4, 11, 656}, - dictWord{4, 10, 760}, - dictWord{135, 11, 779}, - dictWord{ - 9, - 10, - 154, - }, - dictWord{140, 10, 485}, - dictWord{135, 11, 1793}, - dictWord{135, 11, 144}, - dictWord{136, 10, 255}, - dictWord{133, 0, 621}, - dictWord{4, 10, 368}, - dictWord{135, 10, 641}, - dictWord{135, 11, 1373}, - dictWord{7, 11, 554}, - dictWord{7, 11, 605}, - dictWord{141, 11, 10}, - dictWord{137, 0, 234}, - dictWord{ - 5, - 0, - 815, - }, - dictWord{6, 0, 1688}, - dictWord{134, 0, 1755}, - dictWord{5, 11, 838}, - dictWord{5, 11, 841}, - dictWord{134, 11, 1649}, - dictWord{7, 0, 1987}, - dictWord{ - 7, - 0, - 2040, - }, - dictWord{136, 0, 743}, - dictWord{133, 11, 1012}, - dictWord{6, 0, 197}, - dictWord{136, 0, 205}, - dictWord{6, 0, 314}, - dictWord{134, 11, 314}, - dictWord{144, 11, 53}, - dictWord{6, 11, 251}, - dictWord{7, 11, 365}, - dictWord{7, 11, 1357}, - dictWord{7, 11, 1497}, - dictWord{8, 11, 154}, - dictWord{141, 11, 281}, - dictWord{133, 11, 340}, - dictWord{6, 0, 452}, - dictWord{7, 0, 312}, - dictWord{138, 0, 219}, - dictWord{138, 0, 589}, - dictWord{4, 0, 333}, - dictWord{9, 0, 176}, - dictWord{12, 0, 353}, - dictWord{141, 0, 187}, - dictWord{9, 10, 92}, - dictWord{147, 10, 91}, - dictWord{134, 0, 1110}, - dictWord{11, 0, 47}, - dictWord{139, 11, 495}, - dictWord{6, 10, 525}, - dictWord{8, 10, 806}, - dictWord{9, 10, 876}, - dictWord{140, 10, 284}, - dictWord{8, 11, 261}, - dictWord{9, 11, 144}, - dictWord{9, 11, 466}, - dictWord{10, 11, 370}, - dictWord{12, 11, 470}, - dictWord{13, 11, 144}, - dictWord{142, 11, 348}, - dictWord{137, 11, 897}, - dictWord{8, 0, 863}, - dictWord{8, 0, 864}, - dictWord{8, 0, 868}, - dictWord{8, 0, 884}, - dictWord{10, 0, 866}, - dictWord{10, 0, 868}, - dictWord{10, 0, 873}, - dictWord{10, 0, 911}, - dictWord{10, 0, 912}, - dictWord{ - 10, - 0, - 944, - }, - dictWord{12, 0, 727}, - dictWord{6, 11, 248}, - dictWord{9, 11, 546}, - dictWord{10, 11, 535}, - dictWord{11, 11, 681}, - dictWord{141, 11, 135}, - dictWord{ - 6, - 0, - 300, - }, - dictWord{135, 0, 1515}, - dictWord{134, 0, 1237}, - dictWord{139, 10, 958}, - dictWord{133, 10, 594}, - dictWord{140, 11, 250}, - dictWord{ - 134, - 0, - 1685, - }, - dictWord{134, 11, 567}, - dictWord{7, 0, 135}, - dictWord{8, 0, 7}, - dictWord{8, 0, 62}, - dictWord{9, 0, 243}, - dictWord{10, 0, 658}, - dictWord{10, 0, 697}, - dictWord{11, 0, 456}, - dictWord{139, 0, 756}, - dictWord{9, 0, 395}, - dictWord{138, 0, 79}, - dictWord{6, 10, 1641}, - dictWord{136, 10, 820}, - dictWord{4, 10, 302}, - dictWord{135, 10, 1766}, - dictWord{134, 11, 174}, - dictWord{135, 10, 1313}, - dictWord{135, 0, 631}, - dictWord{134, 10, 1674}, - dictWord{134, 11, 395}, - dictWord{138, 0, 835}, - dictWord{7, 0, 406}, - dictWord{7, 0, 459}, - dictWord{8, 0, 606}, - dictWord{139, 0, 726}, - dictWord{134, 11, 617}, - dictWord{134, 0, 979}, - dictWord{ - 6, - 10, - 389, - }, - dictWord{7, 10, 149}, - dictWord{9, 10, 142}, - dictWord{138, 10, 94}, - dictWord{5, 11, 878}, - dictWord{133, 11, 972}, - dictWord{6, 10, 8}, - dictWord{ - 7, - 10, - 1881, - }, - dictWord{8, 10, 91}, - dictWord{136, 11, 511}, - dictWord{133, 0, 612}, - dictWord{132, 11, 351}, - dictWord{4, 0, 372}, - dictWord{7, 0, 482}, - dictWord{ - 8, - 0, - 158, - }, - dictWord{9, 0, 602}, - dictWord{9, 0, 615}, - dictWord{10, 0, 245}, - dictWord{10, 0, 678}, - dictWord{10, 0, 744}, - dictWord{11, 0, 248}, - dictWord{ - 139, - 0, - 806, - }, - dictWord{5, 0, 854}, - dictWord{135, 0, 1991}, - dictWord{132, 11, 286}, - dictWord{135, 11, 344}, - dictWord{7, 11, 438}, - dictWord{7, 11, 627}, - dictWord{ - 7, - 11, - 1516, - }, - dictWord{8, 11, 40}, - dictWord{9, 11, 56}, - dictWord{9, 11, 294}, - dictWord{10, 11, 30}, - dictWord{10, 11, 259}, - dictWord{11, 11, 969}, - dictWord{ - 146, - 11, - 148, - }, - dictWord{135, 0, 1492}, - dictWord{5, 11, 259}, - dictWord{7, 11, 414}, - dictWord{7, 11, 854}, - dictWord{142, 11, 107}, - dictWord{135, 10, 1746}, - dictWord{6, 0, 833}, - dictWord{134, 0, 998}, - dictWord{135, 10, 24}, - dictWord{6, 0, 750}, - dictWord{135, 0, 1739}, - dictWord{4, 10, 503}, - dictWord{ - 135, - 10, - 1661, - }, - dictWord{5, 10, 130}, - dictWord{7, 10, 1314}, - dictWord{9, 10, 610}, - dictWord{10, 10, 718}, - dictWord{11, 10, 601}, - dictWord{11, 10, 819}, - dictWord{ - 11, - 10, - 946, - }, - dictWord{140, 10, 536}, - dictWord{10, 10, 149}, - dictWord{11, 10, 280}, - dictWord{142, 10, 336}, - dictWord{132, 11, 738}, - dictWord{ - 135, - 10, - 1946, - }, - dictWord{5, 0, 195}, - dictWord{135, 0, 1685}, - dictWord{7, 0, 1997}, - dictWord{8, 0, 730}, - dictWord{139, 0, 1006}, - dictWord{151, 11, 17}, - dictWord{ - 133, - 11, - 866, - }, - dictWord{14, 0, 463}, - dictWord{14, 0, 470}, - dictWord{150, 0, 61}, - dictWord{5, 0, 751}, - dictWord{8, 0, 266}, - dictWord{11, 0, 578}, - dictWord{ - 4, - 10, - 392, - }, - dictWord{135, 10, 1597}, - dictWord{5, 10, 433}, - dictWord{9, 10, 633}, - dictWord{139, 10, 629}, - dictWord{135, 0, 821}, - dictWord{6, 0, 715}, - dictWord{ - 134, - 0, - 1325, - }, - dictWord{133, 11, 116}, - dictWord{6, 0, 868}, - dictWord{132, 11, 457}, - dictWord{134, 0, 959}, - dictWord{6, 10, 234}, - dictWord{138, 11, 199}, - dictWord{7, 0, 1053}, - dictWord{7, 10, 1950}, - dictWord{8, 10, 680}, - dictWord{11, 10, 817}, - dictWord{147, 10, 88}, - dictWord{7, 10, 1222}, - dictWord{ - 138, - 10, - 386, - }, - dictWord{5, 0, 950}, - dictWord{5, 0, 994}, - dictWord{6, 0, 351}, - dictWord{134, 0, 1124}, - dictWord{134, 0, 1081}, - dictWord{7, 0, 1595}, - dictWord{6, 10, 5}, - dictWord{11, 10, 249}, - dictWord{12, 10, 313}, - dictWord{16, 10, 66}, - dictWord{145, 10, 26}, - dictWord{148, 0, 59}, - dictWord{5, 11, 527}, - dictWord{6, 11, 189}, - dictWord{135, 11, 859}, - dictWord{5, 10, 963}, - dictWord{6, 10, 1773}, - dictWord{11, 11, 104}, - dictWord{11, 11, 554}, - dictWord{15, 11, 60}, - dictWord{ - 143, - 11, - 125, - }, - dictWord{135, 0, 47}, - dictWord{137, 0, 684}, - dictWord{134, 11, 116}, - dictWord{134, 0, 1606}, - dictWord{134, 0, 777}, - dictWord{7, 0, 1020}, - dictWord{ - 8, - 10, - 509, - }, - dictWord{136, 10, 792}, - dictWord{135, 0, 1094}, - dictWord{132, 0, 350}, - dictWord{133, 11, 487}, - dictWord{4, 11, 86}, - dictWord{5, 11, 667}, - dictWord{5, 11, 753}, - dictWord{6, 11, 316}, - dictWord{6, 11, 455}, - dictWord{135, 11, 946}, - dictWord{7, 0, 1812}, - dictWord{13, 0, 259}, - dictWord{13, 0, 356}, - dictWord{14, 0, 242}, - dictWord{147, 0, 114}, - dictWord{132, 10, 931}, - dictWord{133, 0, 967}, - dictWord{4, 0, 473}, - dictWord{7, 0, 623}, - dictWord{8, 0, 808}, - dictWord{ - 9, - 0, - 871, - }, - dictWord{9, 0, 893}, - dictWord{11, 0, 38}, - dictWord{11, 0, 431}, - dictWord{12, 0, 112}, - dictWord{12, 0, 217}, - dictWord{12, 0, 243}, - dictWord{12, 0, 562}, - dictWord{12, 0, 663}, - dictWord{12, 0, 683}, - dictWord{13, 0, 141}, - dictWord{13, 0, 197}, - dictWord{13, 0, 227}, - dictWord{13, 0, 406}, - dictWord{13, 0, 487}, - dictWord{14, 0, 156}, - dictWord{14, 0, 203}, - dictWord{14, 0, 224}, - dictWord{14, 0, 256}, - dictWord{18, 0, 58}, - dictWord{150, 0, 0}, - dictWord{138, 0, 286}, - dictWord{ - 7, - 10, - 943, - }, - dictWord{139, 10, 614}, - dictWord{135, 10, 1837}, - dictWord{150, 11, 45}, - dictWord{132, 0, 798}, - dictWord{4, 0, 222}, - dictWord{7, 0, 286}, - dictWord{136, 0, 629}, - dictWord{4, 11, 79}, - dictWord{7, 11, 1773}, - dictWord{10, 11, 450}, - dictWord{11, 11, 589}, - dictWord{13, 11, 332}, - dictWord{13, 11, 493}, - dictWord{14, 11, 183}, - dictWord{14, 11, 334}, - dictWord{14, 11, 362}, - dictWord{14, 11, 368}, - dictWord{14, 11, 376}, - dictWord{14, 11, 379}, - dictWord{ - 19, - 11, - 90, - }, - dictWord{19, 11, 103}, - dictWord{19, 11, 127}, - dictWord{148, 11, 90}, - dictWord{5, 0, 337}, - dictWord{11, 0, 513}, - dictWord{11, 0, 889}, - dictWord{ - 11, - 0, - 961, - }, - dictWord{12, 0, 461}, - dictWord{13, 0, 79}, - dictWord{15, 0, 121}, - dictWord{4, 10, 90}, - dictWord{5, 10, 545}, - dictWord{7, 10, 754}, - dictWord{9, 10, 186}, - dictWord{10, 10, 72}, - dictWord{10, 10, 782}, - dictWord{11, 10, 577}, - dictWord{11, 10, 610}, - dictWord{12, 10, 354}, - dictWord{12, 10, 362}, - dictWord{ - 140, - 10, - 595, - }, - dictWord{141, 0, 306}, - dictWord{136, 0, 146}, - dictWord{7, 0, 1646}, - dictWord{9, 10, 329}, - dictWord{11, 10, 254}, - dictWord{141, 11, 124}, - dictWord{ - 4, - 0, - 465, - }, - dictWord{135, 0, 1663}, - dictWord{132, 0, 525}, - dictWord{133, 11, 663}, - dictWord{10, 0, 299}, - dictWord{18, 0, 74}, - dictWord{9, 10, 187}, - dictWord{ - 11, - 10, - 1016, - }, - dictWord{145, 10, 44}, - dictWord{7, 0, 165}, - dictWord{7, 0, 919}, - dictWord{4, 10, 506}, - dictWord{136, 10, 517}, - dictWord{5, 10, 295}, - dictWord{ - 135, - 10, - 1680, - }, - dictWord{133, 11, 846}, - dictWord{134, 0, 1064}, - dictWord{5, 11, 378}, - dictWord{7, 11, 1402}, - dictWord{7, 11, 1414}, - dictWord{8, 11, 465}, - dictWord{9, 11, 286}, - dictWord{10, 11, 185}, - dictWord{10, 11, 562}, - dictWord{10, 11, 635}, - dictWord{11, 11, 31}, - dictWord{11, 11, 393}, - dictWord{ - 12, - 11, - 456, - }, - dictWord{13, 11, 312}, - dictWord{18, 11, 65}, - dictWord{18, 11, 96}, - dictWord{147, 11, 89}, - dictWord{132, 0, 596}, - dictWord{7, 10, 987}, - dictWord{ - 9, - 10, - 688, - }, - dictWord{10, 10, 522}, - dictWord{11, 10, 788}, - dictWord{140, 10, 566}, - dictWord{6, 0, 82}, - dictWord{7, 0, 138}, - dictWord{7, 0, 517}, - dictWord{7, 0, 1741}, - dictWord{11, 0, 238}, - dictWord{4, 11, 648}, - dictWord{134, 10, 1775}, - dictWord{7, 0, 1233}, - dictWord{7, 10, 700}, - dictWord{7, 10, 940}, - dictWord{8, 10, 514}, - dictWord{9, 10, 116}, - dictWord{9, 10, 535}, - dictWord{10, 10, 118}, - dictWord{11, 10, 107}, - dictWord{11, 10, 148}, - dictWord{11, 10, 922}, - dictWord{ - 12, - 10, - 254, - }, - dictWord{12, 10, 421}, - dictWord{142, 10, 238}, - dictWord{4, 0, 962}, - dictWord{6, 0, 1824}, - dictWord{8, 0, 894}, - dictWord{12, 0, 708}, - dictWord{ - 12, - 0, - 725, - }, - dictWord{14, 0, 451}, - dictWord{20, 0, 94}, - dictWord{22, 0, 59}, - dictWord{150, 0, 62}, - dictWord{5, 11, 945}, - dictWord{6, 11, 1656}, - dictWord{6, 11, 1787}, - dictWord{7, 11, 167}, - dictWord{8, 11, 824}, - dictWord{9, 11, 391}, - dictWord{10, 11, 375}, - dictWord{139, 11, 185}, - dictWord{5, 0, 495}, - dictWord{7, 0, 834}, - dictWord{9, 0, 733}, - dictWord{139, 0, 378}, - dictWord{4, 10, 743}, - dictWord{135, 11, 1273}, - dictWord{6, 0, 1204}, - dictWord{7, 11, 1645}, - dictWord{8, 11, 352}, - dictWord{137, 11, 249}, - dictWord{139, 10, 292}, - dictWord{133, 0, 559}, - dictWord{132, 11, 152}, - dictWord{9, 0, 499}, - dictWord{10, 0, 341}, - dictWord{ - 15, - 0, - 144, - }, - dictWord{19, 0, 49}, - dictWord{7, 10, 1283}, - dictWord{9, 10, 227}, - dictWord{11, 10, 325}, - dictWord{11, 10, 408}, - dictWord{14, 10, 180}, - dictWord{ - 146, - 10, - 47, - }, - dictWord{6, 0, 21}, - dictWord{6, 0, 1737}, - dictWord{7, 0, 1444}, - dictWord{136, 0, 224}, - dictWord{133, 11, 1006}, - dictWord{7, 0, 1446}, - dictWord{ - 9, - 0, - 97, - }, - dictWord{17, 0, 15}, - dictWord{5, 10, 81}, - dictWord{7, 10, 146}, - dictWord{7, 10, 1342}, - dictWord{8, 10, 53}, - dictWord{8, 10, 561}, - dictWord{8, 10, 694}, - dictWord{8, 10, 754}, - dictWord{9, 10, 115}, - dictWord{9, 10, 894}, - dictWord{10, 10, 462}, - dictWord{10, 10, 813}, - dictWord{11, 10, 230}, - dictWord{11, 10, 657}, - dictWord{11, 10, 699}, - dictWord{11, 10, 748}, - dictWord{12, 10, 119}, - dictWord{12, 10, 200}, - dictWord{12, 10, 283}, - dictWord{142, 10, 273}, - dictWord{ - 5, - 10, - 408, - }, - dictWord{137, 10, 747}, - dictWord{135, 11, 431}, - dictWord{135, 11, 832}, - dictWord{6, 0, 729}, - dictWord{134, 0, 953}, - dictWord{4, 0, 727}, - dictWord{ - 8, - 0, - 565, - }, - dictWord{5, 11, 351}, - dictWord{7, 11, 264}, - dictWord{136, 11, 565}, - dictWord{134, 0, 1948}, - dictWord{5, 0, 519}, - dictWord{5, 11, 40}, - dictWord{ - 7, - 11, - 598, - }, - dictWord{7, 11, 1638}, - dictWord{8, 11, 78}, - dictWord{9, 11, 166}, - dictWord{9, 11, 640}, - dictWord{9, 11, 685}, - dictWord{9, 11, 773}, - dictWord{ - 11, - 11, - 215, - }, - dictWord{13, 11, 65}, - dictWord{14, 11, 172}, - dictWord{14, 11, 317}, - dictWord{145, 11, 6}, - dictWord{8, 11, 60}, - dictWord{9, 11, 343}, - dictWord{ - 139, - 11, - 769, - }, - dictWord{137, 11, 455}, - dictWord{134, 0, 1193}, - dictWord{140, 0, 790}, - dictWord{7, 11, 1951}, - dictWord{8, 11, 765}, - dictWord{8, 11, 772}, - dictWord{140, 11, 671}, - dictWord{7, 11, 108}, - dictWord{8, 11, 219}, - dictWord{8, 11, 388}, - dictWord{9, 11, 639}, - dictWord{9, 11, 775}, - dictWord{11, 11, 275}, - dictWord{140, 11, 464}, - dictWord{132, 11, 468}, - dictWord{7, 10, 30}, - dictWord{8, 10, 86}, - dictWord{8, 10, 315}, - dictWord{8, 10, 700}, - dictWord{9, 10, 576}, - dictWord{ - 9, - 10, - 858, - }, - dictWord{11, 10, 310}, - dictWord{11, 10, 888}, - dictWord{11, 10, 904}, - dictWord{12, 10, 361}, - dictWord{141, 10, 248}, - dictWord{5, 11, 15}, - dictWord{6, 11, 56}, - dictWord{7, 11, 1758}, - dictWord{8, 11, 500}, - dictWord{9, 11, 730}, - dictWord{11, 11, 331}, - dictWord{13, 11, 150}, - dictWord{142, 11, 282}, - dictWord{4, 0, 402}, - dictWord{7, 0, 2}, - dictWord{8, 0, 323}, - dictWord{136, 0, 479}, - dictWord{138, 10, 839}, - dictWord{11, 0, 580}, - dictWord{142, 0, 201}, - dictWord{ - 5, - 0, - 59, - }, - dictWord{135, 0, 672}, - dictWord{137, 10, 617}, - dictWord{146, 0, 34}, - dictWord{134, 11, 1886}, - dictWord{4, 0, 961}, - dictWord{136, 0, 896}, - dictWord{ - 6, - 0, - 1285, - }, - dictWord{5, 11, 205}, - dictWord{6, 11, 438}, - dictWord{137, 11, 711}, - dictWord{134, 10, 428}, - dictWord{7, 10, 524}, - dictWord{8, 10, 169}, - dictWord{8, 10, 234}, - dictWord{9, 10, 480}, - dictWord{138, 10, 646}, - dictWord{148, 0, 46}, - dictWord{141, 0, 479}, - dictWord{133, 11, 534}, - dictWord{6, 0, 2019}, - dictWord{134, 10, 1648}, - dictWord{4, 0, 85}, - dictWord{7, 0, 549}, - dictWord{7, 10, 1205}, - dictWord{138, 10, 637}, - dictWord{4, 0, 663}, - dictWord{5, 0, 94}, - dictWord{ - 7, - 11, - 235, - }, - dictWord{7, 11, 1475}, - dictWord{15, 11, 68}, - dictWord{146, 11, 120}, - dictWord{6, 11, 443}, - dictWord{9, 11, 237}, - dictWord{9, 11, 571}, - dictWord{ - 9, - 11, - 695, - }, - dictWord{10, 11, 139}, - dictWord{11, 11, 715}, - dictWord{12, 11, 417}, - dictWord{141, 11, 421}, - dictWord{132, 0, 783}, - dictWord{4, 0, 682}, - dictWord{8, 0, 65}, - dictWord{9, 10, 39}, - dictWord{10, 10, 166}, - dictWord{11, 10, 918}, - dictWord{12, 10, 635}, - dictWord{20, 10, 10}, - dictWord{22, 10, 27}, - dictWord{ - 22, - 10, - 43, - }, - dictWord{150, 10, 52}, - dictWord{6, 0, 11}, - dictWord{135, 0, 187}, - dictWord{132, 0, 522}, - dictWord{4, 0, 52}, - dictWord{135, 0, 661}, - dictWord{ - 4, - 0, - 383, - }, - dictWord{133, 0, 520}, - dictWord{135, 11, 546}, - dictWord{11, 0, 343}, - dictWord{142, 0, 127}, - dictWord{4, 11, 578}, - dictWord{7, 10, 157}, - dictWord{ - 7, - 11, - 624, - }, - dictWord{7, 11, 916}, - dictWord{8, 10, 279}, - dictWord{10, 11, 256}, - dictWord{11, 11, 87}, - dictWord{139, 11, 703}, - dictWord{134, 10, 604}, - dictWord{ - 4, - 0, - 281, - }, - dictWord{5, 0, 38}, - dictWord{7, 0, 194}, - dictWord{7, 0, 668}, - dictWord{7, 0, 1893}, - dictWord{137, 0, 397}, - dictWord{7, 10, 945}, - dictWord{11, 10, 713}, - dictWord{139, 10, 744}, - dictWord{139, 10, 1022}, - dictWord{9, 0, 635}, - dictWord{139, 0, 559}, - dictWord{5, 11, 923}, - dictWord{7, 11, 490}, - dictWord{ - 12, - 11, - 553, - }, - dictWord{13, 11, 100}, - dictWord{14, 11, 118}, - dictWord{143, 11, 75}, - dictWord{132, 0, 975}, - dictWord{132, 10, 567}, - dictWord{137, 10, 859}, - dictWord{7, 10, 1846}, - dictWord{7, 11, 1846}, - dictWord{8, 10, 628}, - dictWord{136, 11, 628}, - dictWord{148, 0, 116}, - dictWord{138, 11, 750}, - dictWord{14, 0, 51}, - dictWord{14, 11, 51}, - dictWord{15, 11, 7}, - dictWord{148, 11, 20}, - dictWord{132, 0, 858}, - dictWord{134, 0, 1075}, - dictWord{4, 11, 924}, - dictWord{ - 133, - 10, - 762, - }, - dictWord{136, 0, 535}, - dictWord{133, 0, 448}, - dictWord{10, 10, 784}, - dictWord{141, 10, 191}, - dictWord{133, 10, 298}, - dictWord{7, 0, 610}, - dictWord{135, 0, 1501}, - dictWord{7, 10, 633}, - dictWord{7, 10, 905}, - dictWord{7, 10, 909}, - dictWord{7, 10, 1538}, - dictWord{9, 10, 767}, - dictWord{140, 10, 636}, - dictWord{4, 11, 265}, - dictWord{7, 11, 807}, - dictWord{135, 11, 950}, - dictWord{5, 11, 93}, - dictWord{12, 11, 267}, - dictWord{144, 11, 26}, - dictWord{136, 0, 191}, - dictWord{139, 10, 301}, - dictWord{135, 10, 1970}, - dictWord{135, 0, 267}, - dictWord{4, 0, 319}, - dictWord{5, 0, 699}, - dictWord{138, 0, 673}, - dictWord{ - 6, - 0, - 336, - }, - dictWord{7, 0, 92}, - dictWord{7, 0, 182}, - dictWord{8, 0, 453}, - dictWord{8, 0, 552}, - dictWord{9, 0, 204}, - dictWord{9, 0, 285}, - dictWord{10, 0, 99}, - dictWord{ - 11, - 0, - 568, - }, - dictWord{11, 0, 950}, - dictWord{12, 0, 94}, - dictWord{16, 0, 20}, - dictWord{16, 0, 70}, - dictWord{19, 0, 55}, - dictWord{12, 10, 644}, - dictWord{144, 10, 90}, - dictWord{6, 0, 551}, - dictWord{7, 0, 1308}, - dictWord{7, 10, 845}, - dictWord{7, 11, 994}, - dictWord{8, 10, 160}, - dictWord{137, 10, 318}, - dictWord{19, 11, 1}, - dictWord{ - 19, - 11, - 26, - }, - dictWord{150, 11, 9}, - dictWord{7, 0, 1406}, - dictWord{9, 0, 218}, - dictWord{141, 0, 222}, - dictWord{5, 0, 256}, - dictWord{138, 0, 69}, - dictWord{ - 5, - 11, - 233, - }, - dictWord{5, 11, 320}, - dictWord{6, 11, 140}, - dictWord{7, 11, 330}, - dictWord{136, 11, 295}, - dictWord{6, 0, 1980}, - dictWord{136, 0, 952}, - dictWord{ - 4, - 0, - 833, - }, - dictWord{137, 11, 678}, - dictWord{133, 11, 978}, - dictWord{4, 11, 905}, - dictWord{6, 11, 1701}, - dictWord{137, 11, 843}, - dictWord{138, 10, 735}, - dictWord{136, 10, 76}, - dictWord{17, 0, 39}, - dictWord{148, 0, 36}, - dictWord{18, 0, 81}, - dictWord{146, 11, 81}, - dictWord{14, 0, 352}, - dictWord{17, 0, 53}, - dictWord{ - 18, - 0, - 146, - }, - dictWord{18, 0, 152}, - dictWord{19, 0, 11}, - dictWord{150, 0, 54}, - dictWord{135, 0, 634}, - dictWord{138, 10, 841}, - dictWord{132, 0, 618}, - dictWord{ - 4, - 0, - 339, - }, - dictWord{7, 0, 259}, - dictWord{17, 0, 73}, - dictWord{4, 11, 275}, - dictWord{140, 11, 376}, - dictWord{132, 11, 509}, - dictWord{7, 11, 273}, - dictWord{ - 139, - 11, - 377, - }, - dictWord{4, 0, 759}, - dictWord{13, 0, 169}, - dictWord{137, 10, 804}, - dictWord{6, 10, 96}, - dictWord{135, 10, 1426}, - dictWord{4, 10, 651}, - dictWord{133, 10, 289}, - dictWord{7, 0, 1075}, - dictWord{8, 10, 35}, - dictWord{9, 10, 511}, - dictWord{10, 10, 767}, - dictWord{147, 10, 118}, - dictWord{6, 0, 649}, - dictWord{6, 0, 670}, - dictWord{136, 0, 482}, - dictWord{5, 0, 336}, - dictWord{6, 0, 341}, - dictWord{6, 0, 478}, - dictWord{6, 0, 1763}, - dictWord{136, 0, 386}, - dictWord{ - 5, - 11, - 802, - }, - dictWord{7, 11, 2021}, - dictWord{8, 11, 805}, - dictWord{14, 11, 94}, - dictWord{15, 11, 65}, - dictWord{16, 11, 4}, - dictWord{16, 11, 77}, - dictWord{16, 11, 80}, - dictWord{145, 11, 5}, - dictWord{6, 0, 1035}, - dictWord{5, 11, 167}, - dictWord{5, 11, 899}, - dictWord{6, 11, 410}, - dictWord{137, 11, 777}, - dictWord{ - 134, - 11, - 1705, - }, - dictWord{5, 0, 924}, - dictWord{133, 0, 969}, - dictWord{132, 10, 704}, - dictWord{135, 0, 73}, - dictWord{135, 11, 10}, - dictWord{135, 10, 1078}, - dictWord{ - 5, - 11, - 11, - }, - dictWord{6, 11, 117}, - dictWord{6, 11, 485}, - dictWord{7, 11, 1133}, - dictWord{9, 11, 582}, - dictWord{9, 11, 594}, - dictWord{11, 11, 21}, - dictWord{ - 11, - 11, - 818, - }, - dictWord{12, 11, 535}, - dictWord{141, 11, 86}, - dictWord{135, 0, 1971}, - dictWord{4, 11, 264}, - dictWord{7, 11, 1067}, - dictWord{8, 11, 204}, - dictWord{8, 11, 385}, - dictWord{139, 11, 953}, - dictWord{6, 0, 1458}, - dictWord{135, 0, 1344}, - dictWord{5, 0, 396}, - dictWord{134, 0, 501}, - dictWord{4, 10, 720}, - dictWord{133, 10, 306}, - dictWord{4, 0, 929}, - dictWord{5, 0, 799}, - dictWord{8, 0, 46}, - dictWord{8, 0, 740}, - dictWord{133, 10, 431}, - dictWord{7, 11, 646}, - dictWord{ - 7, - 11, - 1730, - }, - dictWord{11, 11, 446}, - dictWord{141, 11, 178}, - dictWord{7, 0, 276}, - dictWord{5, 10, 464}, - dictWord{6, 10, 236}, - dictWord{7, 10, 696}, - dictWord{ - 7, - 10, - 914, - }, - dictWord{7, 10, 1108}, - dictWord{7, 10, 1448}, - dictWord{9, 10, 15}, - dictWord{9, 10, 564}, - dictWord{10, 10, 14}, - dictWord{12, 10, 565}, - dictWord{ - 13, - 10, - 449, - }, - dictWord{14, 10, 53}, - dictWord{15, 10, 13}, - dictWord{16, 10, 64}, - dictWord{145, 10, 41}, - dictWord{4, 0, 892}, - dictWord{133, 0, 770}, - dictWord{ - 6, - 10, - 1767, - }, - dictWord{12, 10, 194}, - dictWord{145, 10, 107}, - dictWord{135, 0, 158}, - dictWord{5, 10, 840}, - dictWord{138, 11, 608}, - dictWord{134, 0, 1432}, - dictWord{138, 11, 250}, - dictWord{8, 11, 794}, - dictWord{9, 11, 400}, - dictWord{10, 11, 298}, - dictWord{142, 11, 228}, - dictWord{151, 0, 25}, - dictWord{ - 7, - 11, - 1131, - }, - dictWord{135, 11, 1468}, - dictWord{135, 0, 2001}, - dictWord{9, 10, 642}, - dictWord{11, 10, 236}, - dictWord{142, 10, 193}, - dictWord{4, 10, 68}, - dictWord{5, 10, 634}, - dictWord{6, 10, 386}, - dictWord{7, 10, 794}, - dictWord{8, 10, 273}, - dictWord{9, 10, 563}, - dictWord{10, 10, 105}, - dictWord{10, 10, 171}, - dictWord{11, 10, 94}, - dictWord{139, 10, 354}, - dictWord{136, 11, 724}, - dictWord{132, 0, 478}, - dictWord{11, 11, 512}, - dictWord{13, 11, 205}, - dictWord{ - 19, - 11, - 30, - }, - dictWord{22, 11, 36}, - dictWord{151, 11, 19}, - dictWord{7, 0, 1461}, - dictWord{140, 0, 91}, - dictWord{6, 11, 190}, - dictWord{7, 11, 768}, - dictWord{ - 135, - 11, - 1170, - }, - dictWord{4, 0, 602}, - dictWord{8, 0, 211}, - dictWord{4, 10, 95}, - dictWord{7, 10, 416}, - dictWord{139, 10, 830}, - dictWord{7, 10, 731}, - dictWord{13, 10, 20}, - dictWord{143, 10, 11}, - dictWord{6, 0, 1068}, - dictWord{135, 0, 1872}, - dictWord{4, 0, 13}, - dictWord{5, 0, 567}, - dictWord{7, 0, 1498}, - dictWord{9, 0, 124}, - dictWord{11, 0, 521}, - dictWord{12, 0, 405}, - dictWord{135, 11, 1023}, - dictWord{135, 0, 1006}, - dictWord{132, 0, 735}, - dictWord{138, 0, 812}, - dictWord{4, 0, 170}, - dictWord{135, 0, 323}, - dictWord{6, 11, 137}, - dictWord{9, 11, 75}, - dictWord{9, 11, 253}, - dictWord{10, 11, 194}, - dictWord{138, 11, 444}, - dictWord{5, 0, 304}, - dictWord{7, 0, 1403}, - dictWord{5, 10, 864}, - dictWord{10, 10, 648}, - dictWord{11, 10, 671}, - dictWord{143, 10, 46}, - dictWord{135, 11, 1180}, - dictWord{ - 133, - 10, - 928, - }, - dictWord{4, 0, 148}, - dictWord{133, 0, 742}, - dictWord{11, 10, 986}, - dictWord{140, 10, 682}, - dictWord{133, 0, 523}, - dictWord{135, 11, 1743}, - dictWord{7, 0, 730}, - dictWord{18, 0, 144}, - dictWord{19, 0, 61}, - dictWord{8, 10, 44}, - dictWord{9, 10, 884}, - dictWord{10, 10, 580}, - dictWord{11, 10, 399}, - dictWord{ - 11, - 10, - 894, - }, - dictWord{143, 10, 122}, - dictWord{5, 11, 760}, - dictWord{7, 11, 542}, - dictWord{8, 11, 135}, - dictWord{136, 11, 496}, - dictWord{136, 0, 981}, - dictWord{133, 0, 111}, - dictWord{10, 0, 132}, - dictWord{11, 0, 191}, - dictWord{11, 0, 358}, - dictWord{139, 0, 460}, - dictWord{7, 11, 319}, - dictWord{7, 11, 355}, - dictWord{ - 7, - 11, - 763, - }, - dictWord{10, 11, 389}, - dictWord{145, 11, 43}, - dictWord{134, 0, 890}, - dictWord{134, 0, 1420}, - dictWord{136, 11, 557}, - dictWord{ - 133, - 10, - 518, - }, - dictWord{133, 0, 444}, - dictWord{135, 0, 1787}, - dictWord{135, 10, 1852}, - dictWord{8, 0, 123}, - dictWord{15, 0, 6}, - dictWord{144, 0, 7}, - dictWord{ - 6, - 0, - 2041, - }, - dictWord{10, 11, 38}, - dictWord{139, 11, 784}, - dictWord{136, 0, 932}, - dictWord{5, 0, 937}, - dictWord{135, 0, 100}, - dictWord{6, 0, 995}, - dictWord{ - 4, - 11, - 58, - }, - dictWord{5, 11, 286}, - dictWord{6, 11, 319}, - dictWord{7, 11, 402}, - dictWord{7, 11, 1254}, - dictWord{7, 11, 1903}, - dictWord{8, 11, 356}, - dictWord{ - 140, - 11, - 408, - }, - dictWord{4, 11, 389}, - dictWord{9, 11, 181}, - dictWord{9, 11, 255}, - dictWord{10, 11, 8}, - dictWord{10, 11, 29}, - dictWord{10, 11, 816}, - dictWord{ - 11, - 11, - 311, - }, - dictWord{11, 11, 561}, - dictWord{12, 11, 67}, - dictWord{141, 11, 181}, - dictWord{138, 0, 255}, - dictWord{5, 0, 138}, - dictWord{4, 10, 934}, - dictWord{ - 136, - 10, - 610, - }, - dictWord{4, 0, 965}, - dictWord{10, 0, 863}, - dictWord{138, 0, 898}, - dictWord{10, 10, 804}, - dictWord{138, 10, 832}, - dictWord{12, 0, 631}, - dictWord{ - 8, - 10, - 96, - }, - dictWord{9, 10, 36}, - dictWord{10, 10, 607}, - dictWord{11, 10, 423}, - dictWord{11, 10, 442}, - dictWord{12, 10, 309}, - dictWord{14, 10, 199}, - dictWord{ - 15, - 10, - 90, - }, - dictWord{145, 10, 110}, - dictWord{134, 0, 1394}, - dictWord{4, 0, 652}, - dictWord{8, 0, 320}, - dictWord{22, 0, 6}, - dictWord{22, 0, 16}, - dictWord{ - 9, - 10, - 13, - }, - dictWord{9, 10, 398}, - dictWord{9, 10, 727}, - dictWord{10, 10, 75}, - dictWord{10, 10, 184}, - dictWord{10, 10, 230}, - dictWord{10, 10, 564}, - dictWord{ - 10, - 10, - 569, - }, - dictWord{11, 10, 973}, - dictWord{12, 10, 70}, - dictWord{12, 10, 189}, - dictWord{13, 10, 57}, - dictWord{141, 10, 257}, - dictWord{6, 0, 897}, - dictWord{ - 134, - 0, - 1333, - }, - dictWord{4, 0, 692}, - dictWord{133, 0, 321}, - dictWord{133, 11, 373}, - dictWord{135, 0, 922}, - dictWord{5, 0, 619}, - dictWord{133, 0, 698}, - dictWord{ - 137, - 10, - 631, - }, - dictWord{5, 10, 345}, - dictWord{135, 10, 1016}, - dictWord{9, 0, 957}, - dictWord{9, 0, 1018}, - dictWord{12, 0, 828}, - dictWord{12, 0, 844}, - dictWord{ - 12, - 0, - 897, - }, - dictWord{12, 0, 901}, - dictWord{12, 0, 943}, - dictWord{15, 0, 180}, - dictWord{18, 0, 197}, - dictWord{18, 0, 200}, - dictWord{18, 0, 213}, - dictWord{ - 18, - 0, - 214, - }, - dictWord{146, 0, 226}, - dictWord{5, 0, 917}, - dictWord{134, 0, 1659}, - dictWord{135, 0, 1100}, - dictWord{134, 0, 1173}, - dictWord{134, 0, 1930}, - dictWord{5, 0, 251}, - dictWord{5, 0, 956}, - dictWord{8, 0, 268}, - dictWord{9, 0, 214}, - dictWord{146, 0, 142}, - dictWord{133, 10, 673}, - dictWord{137, 10, 850}, - dictWord{ - 4, - 10, - 287, - }, - dictWord{133, 10, 1018}, - dictWord{132, 11, 672}, - dictWord{5, 0, 346}, - dictWord{5, 0, 711}, - dictWord{8, 0, 390}, - dictWord{11, 11, 752}, - dictWord{139, 11, 885}, - dictWord{5, 10, 34}, - dictWord{10, 10, 724}, - dictWord{12, 10, 444}, - dictWord{13, 10, 354}, - dictWord{18, 10, 32}, - dictWord{23, 10, 24}, - dictWord{23, 10, 31}, - dictWord{152, 10, 5}, - dictWord{4, 11, 710}, - dictWord{134, 11, 606}, - dictWord{134, 0, 744}, - dictWord{134, 10, 382}, - dictWord{ - 133, - 11, - 145, - }, - dictWord{4, 10, 329}, - dictWord{7, 11, 884}, - dictWord{140, 11, 124}, - dictWord{4, 11, 467}, - dictWord{5, 11, 405}, - dictWord{134, 11, 544}, - dictWord{ - 9, - 10, - 846, - }, - dictWord{138, 10, 827}, - dictWord{133, 0, 624}, - dictWord{9, 11, 372}, - dictWord{15, 11, 2}, - dictWord{19, 11, 10}, - dictWord{147, 11, 18}, - dictWord{ - 4, - 11, - 387, - }, - dictWord{135, 11, 1288}, - dictWord{5, 0, 783}, - dictWord{7, 0, 1998}, - dictWord{135, 0, 2047}, - dictWord{132, 10, 906}, - dictWord{136, 10, 366}, - dictWord{135, 11, 550}, - dictWord{4, 10, 123}, - dictWord{4, 10, 649}, - dictWord{5, 10, 605}, - dictWord{7, 10, 1509}, - dictWord{136, 10, 36}, - dictWord{ - 134, - 0, - 1125, - }, - dictWord{132, 0, 594}, - dictWord{133, 10, 767}, - dictWord{135, 11, 1227}, - dictWord{136, 11, 467}, - dictWord{4, 11, 576}, - dictWord{ - 135, - 11, - 1263, - }, - dictWord{4, 0, 268}, - dictWord{7, 0, 1534}, - dictWord{135, 11, 1534}, - dictWord{4, 10, 273}, - dictWord{5, 10, 658}, - dictWord{5, 11, 919}, - dictWord{ - 5, - 10, - 995, - }, - dictWord{134, 11, 1673}, - dictWord{133, 0, 563}, - dictWord{134, 10, 72}, - dictWord{135, 10, 1345}, - dictWord{4, 11, 82}, - dictWord{5, 11, 333}, - dictWord{ - 5, - 11, - 904, - }, - dictWord{6, 11, 207}, - dictWord{7, 11, 325}, - dictWord{7, 11, 1726}, - dictWord{8, 11, 101}, - dictWord{10, 11, 778}, - dictWord{139, 11, 220}, - dictWord{5, 0, 37}, - dictWord{6, 0, 39}, - dictWord{6, 0, 451}, - dictWord{7, 0, 218}, - dictWord{7, 0, 667}, - dictWord{7, 0, 1166}, - dictWord{7, 0, 1687}, - dictWord{8, 0, 662}, - dictWord{16, 0, 2}, - dictWord{133, 10, 589}, - dictWord{134, 0, 1332}, - dictWord{133, 11, 903}, - dictWord{134, 0, 508}, - dictWord{5, 10, 117}, - dictWord{6, 10, 514}, - dictWord{6, 10, 541}, - dictWord{7, 10, 1164}, - dictWord{7, 10, 1436}, - dictWord{8, 10, 220}, - dictWord{8, 10, 648}, - dictWord{10, 10, 688}, - dictWord{11, 10, 560}, - dictWord{140, 11, 147}, - dictWord{6, 11, 555}, - dictWord{135, 11, 485}, - dictWord{133, 10, 686}, - dictWord{7, 0, 453}, - dictWord{7, 0, 635}, - dictWord{7, 0, 796}, - dictWord{8, 0, 331}, - dictWord{9, 0, 330}, - dictWord{9, 0, 865}, - dictWord{10, 0, 119}, - dictWord{10, 0, 235}, - dictWord{11, 0, 111}, - dictWord{11, 0, 129}, - dictWord{ - 11, - 0, - 240, - }, - dictWord{12, 0, 31}, - dictWord{12, 0, 66}, - dictWord{12, 0, 222}, - dictWord{12, 0, 269}, - dictWord{12, 0, 599}, - dictWord{12, 0, 684}, - dictWord{12, 0, 689}, - dictWord{12, 0, 691}, - dictWord{142, 0, 345}, - dictWord{135, 0, 1834}, - dictWord{4, 11, 705}, - dictWord{7, 11, 615}, - dictWord{138, 11, 251}, - dictWord{ - 136, - 11, - 345, - }, - dictWord{137, 0, 527}, - dictWord{6, 0, 98}, - dictWord{7, 0, 702}, - dictWord{135, 0, 991}, - dictWord{11, 0, 576}, - dictWord{14, 0, 74}, - dictWord{7, 10, 196}, - dictWord{10, 10, 765}, - dictWord{11, 10, 347}, - dictWord{11, 10, 552}, - dictWord{11, 10, 790}, - dictWord{12, 10, 263}, - dictWord{13, 10, 246}, - dictWord{ - 13, - 10, - 270, - }, - dictWord{13, 10, 395}, - dictWord{14, 10, 176}, - dictWord{14, 10, 190}, - dictWord{14, 10, 398}, - dictWord{14, 10, 412}, - dictWord{15, 10, 32}, - dictWord{ - 15, - 10, - 63, - }, - dictWord{16, 10, 88}, - dictWord{147, 10, 105}, - dictWord{134, 11, 90}, - dictWord{13, 0, 84}, - dictWord{141, 0, 122}, - dictWord{6, 0, 37}, - dictWord{ - 7, - 0, - 299, - }, - dictWord{7, 0, 1666}, - dictWord{8, 0, 195}, - dictWord{8, 0, 316}, - dictWord{9, 0, 178}, - dictWord{9, 0, 276}, - dictWord{9, 0, 339}, - dictWord{9, 0, 536}, - dictWord{ - 10, - 0, - 102, - }, - dictWord{10, 0, 362}, - dictWord{10, 0, 785}, - dictWord{11, 0, 55}, - dictWord{11, 0, 149}, - dictWord{11, 0, 773}, - dictWord{13, 0, 416}, - dictWord{ - 13, - 0, - 419, - }, - dictWord{14, 0, 38}, - dictWord{14, 0, 41}, - dictWord{142, 0, 210}, - dictWord{5, 10, 381}, - dictWord{135, 10, 1792}, - dictWord{7, 11, 813}, - dictWord{ - 12, - 11, - 497, - }, - dictWord{141, 11, 56}, - dictWord{7, 10, 616}, - dictWord{138, 10, 413}, - dictWord{133, 0, 645}, - dictWord{6, 11, 125}, - dictWord{135, 11, 1277}, - dictWord{132, 0, 290}, - dictWord{6, 0, 70}, - dictWord{7, 0, 1292}, - dictWord{10, 0, 762}, - dictWord{139, 0, 288}, - dictWord{6, 10, 120}, - dictWord{7, 10, 1188}, - dictWord{ - 7, - 10, - 1710, - }, - dictWord{8, 10, 286}, - dictWord{9, 10, 667}, - dictWord{11, 10, 592}, - dictWord{139, 10, 730}, - dictWord{135, 11, 1784}, - dictWord{7, 0, 1315}, - dictWord{135, 11, 1315}, - dictWord{134, 0, 1955}, - dictWord{135, 10, 1146}, - dictWord{7, 0, 131}, - dictWord{7, 0, 422}, - dictWord{8, 0, 210}, - dictWord{ - 140, - 0, - 573, - }, - dictWord{4, 10, 352}, - dictWord{135, 10, 687}, - dictWord{139, 0, 797}, - dictWord{143, 0, 38}, - dictWord{14, 0, 179}, - dictWord{15, 0, 151}, - dictWord{ - 150, - 0, - 11, - }, - dictWord{7, 0, 488}, - dictWord{4, 10, 192}, - dictWord{5, 10, 49}, - dictWord{6, 10, 200}, - dictWord{6, 10, 293}, - dictWord{134, 10, 1696}, - dictWord{ - 132, - 0, - 936, - }, - dictWord{135, 11, 703}, - dictWord{6, 11, 160}, - dictWord{7, 11, 1106}, - dictWord{9, 11, 770}, - dictWord{10, 11, 618}, - dictWord{11, 11, 112}, - dictWord{ - 140, - 11, - 413, - }, - dictWord{5, 0, 453}, - dictWord{134, 0, 441}, - dictWord{135, 0, 595}, - dictWord{132, 10, 650}, - dictWord{132, 10, 147}, - dictWord{6, 0, 991}, - dictWord{6, 0, 1182}, - dictWord{12, 11, 271}, - dictWord{145, 11, 109}, - dictWord{133, 10, 934}, - dictWord{140, 11, 221}, - dictWord{132, 0, 653}, - dictWord{ - 7, - 0, - 505, - }, - dictWord{135, 0, 523}, - dictWord{134, 0, 903}, - dictWord{135, 11, 479}, - dictWord{7, 11, 304}, - dictWord{9, 11, 646}, - dictWord{9, 11, 862}, - dictWord{ - 10, - 11, - 262, - }, - dictWord{11, 11, 696}, - dictWord{12, 11, 208}, - dictWord{15, 11, 79}, - dictWord{147, 11, 108}, - dictWord{146, 0, 80}, - dictWord{135, 11, 981}, - dictWord{142, 0, 432}, - dictWord{132, 0, 314}, - dictWord{137, 11, 152}, - dictWord{7, 0, 1368}, - dictWord{8, 0, 232}, - dictWord{8, 0, 361}, - dictWord{10, 0, 682}, - dictWord{138, 0, 742}, - dictWord{135, 11, 1586}, - dictWord{9, 0, 534}, - dictWord{4, 11, 434}, - dictWord{11, 11, 663}, - dictWord{12, 11, 210}, - dictWord{13, 11, 166}, - dictWord{13, 11, 310}, - dictWord{14, 11, 373}, - dictWord{147, 11, 43}, - dictWord{7, 11, 1091}, - dictWord{135, 11, 1765}, - dictWord{6, 11, 550}, - dictWord{ - 135, - 11, - 652, - }, - dictWord{137, 0, 27}, - dictWord{142, 0, 12}, - dictWord{4, 10, 637}, - dictWord{5, 11, 553}, - dictWord{7, 11, 766}, - dictWord{138, 11, 824}, - dictWord{ - 7, - 11, - 737, - }, - dictWord{8, 11, 298}, - dictWord{136, 11, 452}, - dictWord{7, 0, 736}, - dictWord{139, 0, 264}, - dictWord{134, 0, 1657}, - dictWord{133, 11, 292}, - dictWord{138, 11, 135}, - dictWord{6, 0, 844}, - dictWord{134, 0, 1117}, - dictWord{135, 0, 127}, - dictWord{9, 10, 867}, - dictWord{138, 10, 837}, - dictWord{ - 6, - 0, - 1184, - }, - dictWord{134, 0, 1208}, - dictWord{134, 0, 1294}, - dictWord{136, 0, 364}, - dictWord{6, 0, 1415}, - dictWord{7, 0, 1334}, - dictWord{11, 0, 125}, - dictWord{ - 6, - 10, - 170, - }, - dictWord{7, 11, 393}, - dictWord{8, 10, 395}, - dictWord{8, 10, 487}, - dictWord{10, 11, 603}, - dictWord{11, 11, 206}, - dictWord{141, 10, 147}, - dictWord{137, 11, 748}, - dictWord{4, 11, 912}, - dictWord{137, 11, 232}, - dictWord{4, 10, 535}, - dictWord{136, 10, 618}, - dictWord{137, 0, 792}, - dictWord{ - 7, - 11, - 1973, - }, - dictWord{136, 11, 716}, - dictWord{135, 11, 98}, - dictWord{5, 0, 909}, - dictWord{9, 0, 849}, - dictWord{138, 0, 805}, - dictWord{4, 0, 630}, - dictWord{ - 132, - 0, - 699, - }, - dictWord{5, 11, 733}, - dictWord{14, 11, 103}, - dictWord{150, 10, 23}, - dictWord{12, 11, 158}, - dictWord{18, 11, 8}, - dictWord{19, 11, 62}, - dictWord{ - 20, - 11, - 6, - }, - dictWord{22, 11, 4}, - dictWord{23, 11, 2}, - dictWord{151, 11, 9}, - dictWord{132, 0, 968}, - dictWord{132, 10, 778}, - dictWord{132, 10, 46}, - dictWord{5, 10, 811}, - dictWord{6, 10, 1679}, - dictWord{6, 10, 1714}, - dictWord{135, 10, 2032}, - dictWord{6, 0, 1446}, - dictWord{7, 10, 1458}, - dictWord{9, 10, 407}, - dictWord{ - 139, - 10, - 15, - }, - dictWord{7, 0, 206}, - dictWord{7, 0, 397}, - dictWord{7, 0, 621}, - dictWord{7, 0, 640}, - dictWord{8, 0, 124}, - dictWord{8, 0, 619}, - dictWord{9, 0, 305}, - dictWord{ - 9, - 0, - 643, - }, - dictWord{10, 0, 264}, - dictWord{10, 0, 628}, - dictWord{11, 0, 40}, - dictWord{12, 0, 349}, - dictWord{13, 0, 134}, - dictWord{13, 0, 295}, - dictWord{ - 14, - 0, - 155, - }, - dictWord{15, 0, 120}, - dictWord{18, 0, 105}, - dictWord{6, 10, 34}, - dictWord{7, 10, 1089}, - dictWord{8, 10, 708}, - dictWord{8, 10, 721}, - dictWord{9, 10, 363}, - dictWord{148, 10, 98}, - dictWord{4, 0, 262}, - dictWord{5, 0, 641}, - dictWord{135, 0, 342}, - dictWord{137, 11, 72}, - dictWord{4, 0, 99}, - dictWord{6, 0, 250}, - dictWord{ - 6, - 0, - 346, - }, - dictWord{8, 0, 127}, - dictWord{138, 0, 81}, - dictWord{132, 0, 915}, - dictWord{5, 0, 75}, - dictWord{9, 0, 517}, - dictWord{10, 0, 470}, - dictWord{12, 0, 155}, - dictWord{141, 0, 224}, - dictWord{132, 10, 462}, - dictWord{11, 11, 600}, - dictWord{11, 11, 670}, - dictWord{141, 11, 245}, - dictWord{142, 0, 83}, - dictWord{ - 5, - 10, - 73, - }, - dictWord{6, 10, 23}, - dictWord{134, 10, 338}, - dictWord{6, 0, 1031}, - dictWord{139, 11, 923}, - dictWord{7, 11, 164}, - dictWord{7, 11, 1571}, - dictWord{ - 9, - 11, - 107, - }, - dictWord{140, 11, 225}, - dictWord{134, 0, 1470}, - dictWord{133, 0, 954}, - dictWord{6, 0, 304}, - dictWord{8, 0, 418}, - dictWord{10, 0, 345}, - dictWord{ - 11, - 0, - 341, - }, - dictWord{139, 0, 675}, - dictWord{9, 0, 410}, - dictWord{139, 0, 425}, - dictWord{4, 11, 27}, - dictWord{5, 11, 484}, - dictWord{5, 11, 510}, - dictWord{6, 11, 434}, - dictWord{7, 11, 1000}, - dictWord{7, 11, 1098}, - dictWord{8, 11, 2}, - dictWord{136, 11, 200}, - dictWord{134, 0, 734}, - dictWord{140, 11, 257}, - dictWord{ - 7, - 10, - 725, - }, - dictWord{8, 10, 498}, - dictWord{139, 10, 268}, - dictWord{134, 0, 1822}, - dictWord{135, 0, 1798}, - dictWord{135, 10, 773}, - dictWord{132, 11, 460}, - dictWord{4, 11, 932}, - dictWord{133, 11, 891}, - dictWord{134, 0, 14}, - dictWord{132, 10, 583}, - dictWord{7, 10, 1462}, - dictWord{8, 11, 625}, - dictWord{ - 139, - 10, - 659, - }, - dictWord{5, 0, 113}, - dictWord{6, 0, 243}, - dictWord{6, 0, 1708}, - dictWord{7, 0, 1865}, - dictWord{11, 0, 161}, - dictWord{16, 0, 37}, - dictWord{17, 0, 99}, - dictWord{133, 10, 220}, - dictWord{134, 11, 76}, - dictWord{5, 11, 461}, - dictWord{135, 11, 1925}, - dictWord{140, 0, 69}, - dictWord{8, 11, 92}, - dictWord{ - 137, - 11, - 221, - }, - dictWord{139, 10, 803}, - dictWord{132, 10, 544}, - dictWord{4, 0, 274}, - dictWord{134, 0, 922}, - dictWord{132, 0, 541}, - dictWord{5, 0, 627}, - dictWord{ - 6, - 10, - 437, - }, - dictWord{6, 10, 564}, - dictWord{11, 10, 181}, - dictWord{141, 10, 183}, - dictWord{135, 10, 1192}, - dictWord{7, 0, 166}, - dictWord{132, 11, 763}, - dictWord{133, 11, 253}, - dictWord{134, 0, 849}, - dictWord{9, 11, 73}, - dictWord{10, 11, 110}, - dictWord{14, 11, 185}, - dictWord{145, 11, 119}, - dictWord{5, 11, 212}, - dictWord{12, 11, 35}, - dictWord{141, 11, 382}, - dictWord{133, 0, 717}, - dictWord{137, 0, 304}, - dictWord{136, 0, 600}, - dictWord{133, 0, 654}, - dictWord{ - 6, - 0, - 273, - }, - dictWord{10, 0, 188}, - dictWord{13, 0, 377}, - dictWord{146, 0, 77}, - dictWord{4, 10, 790}, - dictWord{5, 10, 273}, - dictWord{134, 10, 394}, - dictWord{ - 132, - 0, - 543, - }, - dictWord{135, 0, 410}, - dictWord{11, 0, 98}, - dictWord{11, 0, 524}, - dictWord{141, 0, 87}, - dictWord{132, 0, 941}, - dictWord{135, 11, 1175}, - dictWord{ - 4, - 0, - 250, - }, - dictWord{7, 0, 1612}, - dictWord{11, 0, 186}, - dictWord{12, 0, 133}, - dictWord{6, 10, 127}, - dictWord{7, 10, 1511}, - dictWord{8, 10, 613}, - dictWord{ - 12, - 10, - 495, - }, - dictWord{12, 10, 586}, - dictWord{12, 10, 660}, - dictWord{12, 10, 668}, - dictWord{14, 10, 385}, - dictWord{15, 10, 118}, - dictWord{17, 10, 20}, - dictWord{ - 146, - 10, - 98, - }, - dictWord{6, 0, 1785}, - dictWord{133, 11, 816}, - dictWord{134, 0, 1339}, - dictWord{7, 0, 961}, - dictWord{7, 0, 1085}, - dictWord{7, 0, 1727}, - dictWord{ - 8, - 0, - 462, - }, - dictWord{6, 10, 230}, - dictWord{135, 11, 1727}, - dictWord{9, 0, 636}, - dictWord{135, 10, 1954}, - dictWord{132, 0, 780}, - dictWord{5, 11, 869}, - dictWord{5, 11, 968}, - dictWord{6, 11, 1626}, - dictWord{8, 11, 734}, - dictWord{136, 11, 784}, - dictWord{4, 11, 542}, - dictWord{6, 11, 1716}, - dictWord{6, 11, 1727}, - dictWord{7, 11, 1082}, - dictWord{7, 11, 1545}, - dictWord{8, 11, 56}, - dictWord{8, 11, 118}, - dictWord{8, 11, 412}, - dictWord{8, 11, 564}, - dictWord{9, 11, 888}, - dictWord{9, 11, 908}, - dictWord{10, 11, 50}, - dictWord{10, 11, 423}, - dictWord{11, 11, 685}, - dictWord{11, 11, 697}, - dictWord{11, 11, 933}, - dictWord{12, 11, 299}, - dictWord{13, 11, 126}, - dictWord{13, 11, 136}, - dictWord{13, 11, 170}, - dictWord{141, 11, 190}, - dictWord{134, 11, 226}, - dictWord{4, 11, 232}, - dictWord{ - 9, - 11, - 202, - }, - dictWord{10, 11, 474}, - dictWord{140, 11, 433}, - dictWord{137, 11, 500}, - dictWord{5, 0, 529}, - dictWord{136, 10, 68}, - dictWord{132, 10, 654}, - dictWord{ - 4, - 10, - 156, - }, - dictWord{7, 10, 998}, - dictWord{7, 10, 1045}, - dictWord{7, 10, 1860}, - dictWord{9, 10, 48}, - dictWord{9, 10, 692}, - dictWord{11, 10, 419}, - dictWord{139, 10, 602}, - dictWord{7, 0, 1276}, - dictWord{8, 0, 474}, - dictWord{9, 0, 652}, - dictWord{6, 11, 108}, - dictWord{7, 11, 1003}, - dictWord{7, 11, 1181}, - dictWord{136, 11, 343}, - dictWord{7, 11, 1264}, - dictWord{7, 11, 1678}, - dictWord{11, 11, 945}, - dictWord{12, 11, 341}, - dictWord{12, 11, 471}, - dictWord{ - 140, - 11, - 569, - }, - dictWord{134, 11, 1712}, - dictWord{5, 0, 948}, - dictWord{12, 0, 468}, - dictWord{19, 0, 96}, - dictWord{148, 0, 24}, - dictWord{4, 11, 133}, - dictWord{ - 7, - 11, - 711, - }, - dictWord{7, 11, 1298}, - dictWord{7, 11, 1585}, - dictWord{135, 11, 1929}, - dictWord{6, 0, 753}, - dictWord{140, 0, 657}, - dictWord{139, 0, 941}, - dictWord{ - 6, - 11, - 99, - }, - dictWord{7, 11, 1808}, - dictWord{145, 11, 57}, - dictWord{6, 11, 574}, - dictWord{7, 11, 428}, - dictWord{7, 11, 1250}, - dictWord{10, 11, 669}, - dictWord{ - 11, - 11, - 485, - }, - dictWord{11, 11, 840}, - dictWord{12, 11, 300}, - dictWord{142, 11, 250}, - dictWord{4, 0, 532}, - dictWord{5, 0, 706}, - dictWord{135, 0, 662}, - dictWord{ - 5, - 0, - 837, - }, - dictWord{6, 0, 1651}, - dictWord{139, 0, 985}, - dictWord{7, 0, 1861}, - dictWord{9, 10, 197}, - dictWord{10, 10, 300}, - dictWord{12, 10, 473}, - dictWord{ - 13, - 10, - 90, - }, - dictWord{141, 10, 405}, - dictWord{137, 11, 252}, - dictWord{6, 11, 323}, - dictWord{135, 11, 1564}, - dictWord{4, 0, 330}, - dictWord{4, 0, 863}, - dictWord{7, 0, 933}, - dictWord{7, 0, 2012}, - dictWord{8, 0, 292}, - dictWord{7, 11, 461}, - dictWord{8, 11, 775}, - dictWord{138, 11, 435}, - dictWord{132, 10, 606}, - dictWord{ - 4, - 11, - 655, - }, - dictWord{7, 11, 850}, - dictWord{17, 11, 75}, - dictWord{146, 11, 137}, - dictWord{135, 0, 767}, - dictWord{7, 10, 1978}, - dictWord{136, 10, 676}, - dictWord{132, 0, 641}, - dictWord{135, 11, 1559}, - dictWord{134, 0, 1233}, - dictWord{137, 0, 242}, - dictWord{17, 0, 114}, - dictWord{4, 10, 361}, - dictWord{ - 133, - 10, - 315, - }, - dictWord{137, 0, 883}, - dictWord{132, 10, 461}, - dictWord{138, 0, 274}, - dictWord{134, 0, 2008}, - dictWord{134, 0, 1794}, - dictWord{4, 0, 703}, - dictWord{135, 0, 207}, - dictWord{12, 0, 285}, - dictWord{132, 10, 472}, - dictWord{132, 0, 571}, - dictWord{5, 0, 873}, - dictWord{5, 0, 960}, - dictWord{8, 0, 823}, - dictWord{9, 0, 881}, - dictWord{136, 11, 577}, - dictWord{7, 0, 617}, - dictWord{10, 0, 498}, - dictWord{11, 0, 501}, - dictWord{12, 0, 16}, - dictWord{140, 0, 150}, - dictWord{ - 138, - 10, - 747, - }, - dictWord{132, 0, 431}, - dictWord{133, 10, 155}, - dictWord{11, 0, 283}, - dictWord{11, 0, 567}, - dictWord{7, 10, 163}, - dictWord{8, 10, 319}, - dictWord{ - 9, - 10, - 402, - }, - dictWord{10, 10, 24}, - dictWord{10, 10, 681}, - dictWord{11, 10, 200}, - dictWord{12, 10, 253}, - dictWord{12, 10, 410}, - dictWord{142, 10, 219}, - dictWord{4, 11, 413}, - dictWord{5, 11, 677}, - dictWord{8, 11, 432}, - dictWord{140, 11, 280}, - dictWord{9, 0, 401}, - dictWord{5, 10, 475}, - dictWord{7, 10, 1780}, - dictWord{11, 10, 297}, - dictWord{11, 10, 558}, - dictWord{14, 10, 322}, - dictWord{147, 10, 76}, - dictWord{6, 0, 781}, - dictWord{9, 0, 134}, - dictWord{10, 0, 2}, - dictWord{ - 10, - 0, - 27, - }, - dictWord{10, 0, 333}, - dictWord{11, 0, 722}, - dictWord{143, 0, 1}, - dictWord{5, 0, 33}, - dictWord{6, 0, 470}, - dictWord{139, 0, 424}, - dictWord{ - 135, - 0, - 2006, - }, - dictWord{12, 0, 783}, - dictWord{135, 10, 1956}, - dictWord{136, 0, 274}, - dictWord{135, 0, 1882}, - dictWord{132, 0, 794}, - dictWord{135, 0, 1848}, - dictWord{5, 10, 944}, - dictWord{134, 10, 1769}, - dictWord{6, 0, 47}, - dictWord{7, 0, 90}, - dictWord{7, 0, 664}, - dictWord{7, 0, 830}, - dictWord{7, 0, 1380}, - dictWord{ - 7, - 0, - 2025, - }, - dictWord{8, 0, 448}, - dictWord{136, 0, 828}, - dictWord{132, 10, 144}, - dictWord{134, 0, 1199}, - dictWord{4, 11, 395}, - dictWord{139, 11, 762}, - dictWord{135, 11, 1504}, - dictWord{9, 0, 417}, - dictWord{137, 0, 493}, - dictWord{9, 11, 174}, - dictWord{10, 11, 164}, - dictWord{11, 11, 440}, - dictWord{11, 11, 841}, - dictWord{143, 11, 98}, - dictWord{134, 11, 426}, - dictWord{139, 11, 1002}, - dictWord{134, 0, 295}, - dictWord{134, 0, 816}, - dictWord{6, 10, 247}, - dictWord{ - 137, - 10, - 555, - }, - dictWord{133, 0, 1019}, - dictWord{4, 0, 620}, - dictWord{5, 11, 476}, - dictWord{10, 10, 280}, - dictWord{138, 10, 797}, - dictWord{139, 0, 464}, - dictWord{5, 11, 76}, - dictWord{6, 11, 458}, - dictWord{6, 11, 497}, - dictWord{7, 11, 764}, - dictWord{7, 11, 868}, - dictWord{9, 11, 658}, - dictWord{10, 11, 594}, - dictWord{ - 11, - 11, - 173, - }, - dictWord{11, 11, 566}, - dictWord{12, 11, 20}, - dictWord{12, 11, 338}, - dictWord{141, 11, 200}, - dictWord{134, 0, 208}, - dictWord{4, 11, 526}, - dictWord{7, 11, 1029}, - dictWord{135, 11, 1054}, - dictWord{132, 11, 636}, - dictWord{6, 11, 233}, - dictWord{7, 11, 660}, - dictWord{7, 11, 1124}, - dictWord{ - 17, - 11, - 31, - }, - dictWord{19, 11, 22}, - dictWord{151, 11, 14}, - dictWord{10, 0, 442}, - dictWord{133, 10, 428}, - dictWord{10, 0, 930}, - dictWord{140, 0, 778}, - dictWord{ - 6, - 0, - 68, - }, - dictWord{7, 0, 448}, - dictWord{7, 0, 1629}, - dictWord{7, 0, 1769}, - dictWord{7, 0, 1813}, - dictWord{8, 0, 442}, - dictWord{8, 0, 516}, - dictWord{9, 0, 710}, - dictWord{ - 10, - 0, - 282, - }, - dictWord{10, 0, 722}, - dictWord{7, 10, 1717}, - dictWord{138, 10, 546}, - dictWord{134, 0, 1128}, - dictWord{11, 0, 844}, - dictWord{12, 0, 104}, - dictWord{140, 0, 625}, - dictWord{4, 11, 432}, - dictWord{135, 11, 824}, - dictWord{138, 10, 189}, - dictWord{133, 0, 787}, - dictWord{133, 10, 99}, - dictWord{ - 4, - 11, - 279, - }, - dictWord{7, 11, 301}, - dictWord{137, 11, 362}, - dictWord{8, 0, 491}, - dictWord{4, 10, 397}, - dictWord{136, 10, 555}, - dictWord{4, 11, 178}, - dictWord{ - 133, - 11, - 399, - }, - dictWord{134, 0, 711}, - dictWord{144, 0, 9}, - dictWord{4, 0, 403}, - dictWord{5, 0, 441}, - dictWord{7, 0, 450}, - dictWord{10, 0, 840}, - dictWord{11, 0, 101}, - dictWord{12, 0, 193}, - dictWord{141, 0, 430}, - dictWord{135, 11, 1246}, - dictWord{12, 10, 398}, - dictWord{20, 10, 39}, - dictWord{21, 10, 11}, - dictWord{ - 150, - 10, - 41, - }, - dictWord{4, 10, 485}, - dictWord{7, 10, 353}, - dictWord{135, 10, 1523}, - dictWord{6, 10, 366}, - dictWord{7, 10, 1384}, - dictWord{7, 10, 1601}, - dictWord{ - 135, - 11, - 1912, - }, - dictWord{7, 0, 396}, - dictWord{10, 0, 160}, - dictWord{135, 11, 396}, - dictWord{137, 10, 282}, - dictWord{134, 11, 1692}, - dictWord{4, 10, 157}, - dictWord{5, 10, 471}, - dictWord{6, 11, 202}, - dictWord{10, 11, 448}, - dictWord{11, 11, 208}, - dictWord{12, 11, 360}, - dictWord{17, 11, 117}, - dictWord{ - 17, - 11, - 118, - }, - dictWord{18, 11, 27}, - dictWord{148, 11, 67}, - dictWord{133, 0, 679}, - dictWord{137, 0, 326}, - dictWord{136, 10, 116}, - dictWord{7, 11, 872}, - dictWord{ - 10, - 11, - 516, - }, - dictWord{139, 11, 167}, - dictWord{132, 11, 224}, - dictWord{5, 11, 546}, - dictWord{7, 11, 35}, - dictWord{8, 11, 11}, - dictWord{8, 11, 12}, - dictWord{ - 9, - 11, - 315, - }, - dictWord{9, 11, 533}, - dictWord{10, 11, 802}, - dictWord{11, 11, 166}, - dictWord{12, 11, 525}, - dictWord{142, 11, 243}, - dictWord{7, 0, 1128}, - dictWord{135, 11, 1920}, - dictWord{5, 11, 241}, - dictWord{8, 11, 242}, - dictWord{9, 11, 451}, - dictWord{10, 11, 667}, - dictWord{11, 11, 598}, - dictWord{ - 140, - 11, - 429, - }, - dictWord{6, 0, 737}, - dictWord{5, 10, 160}, - dictWord{7, 10, 363}, - dictWord{7, 10, 589}, - dictWord{10, 10, 170}, - dictWord{141, 10, 55}, - dictWord{ - 135, - 0, - 1796, - }, - dictWord{142, 11, 254}, - dictWord{4, 0, 574}, - dictWord{7, 0, 350}, - dictWord{7, 0, 1024}, - dictWord{8, 0, 338}, - dictWord{9, 0, 677}, - dictWord{138, 0, 808}, - dictWord{134, 0, 1096}, - dictWord{137, 11, 516}, - dictWord{7, 0, 405}, - dictWord{10, 0, 491}, - dictWord{4, 10, 108}, - dictWord{4, 11, 366}, - dictWord{ - 139, - 10, - 498, - }, - dictWord{11, 11, 337}, - dictWord{142, 11, 303}, - dictWord{134, 11, 1736}, - dictWord{7, 0, 1081}, - dictWord{140, 11, 364}, - dictWord{7, 10, 1005}, - dictWord{140, 10, 609}, - dictWord{7, 0, 1676}, - dictWord{4, 10, 895}, - dictWord{133, 10, 772}, - dictWord{135, 0, 2037}, - dictWord{6, 0, 1207}, - dictWord{ - 11, - 11, - 916, - }, - dictWord{142, 11, 419}, - dictWord{14, 11, 140}, - dictWord{148, 11, 41}, - dictWord{6, 11, 331}, - dictWord{136, 11, 623}, - dictWord{9, 0, 944}, - dictWord{ - 9, - 0, - 969, - }, - dictWord{9, 0, 1022}, - dictWord{12, 0, 913}, - dictWord{12, 0, 936}, - dictWord{15, 0, 177}, - dictWord{15, 0, 193}, - dictWord{4, 10, 926}, - dictWord{ - 133, - 10, - 983, - }, - dictWord{5, 0, 354}, - dictWord{135, 11, 506}, - dictWord{8, 0, 598}, - dictWord{9, 0, 664}, - dictWord{138, 0, 441}, - dictWord{4, 11, 640}, - dictWord{ - 133, - 11, - 513, - }, - dictWord{137, 0, 297}, - dictWord{132, 10, 538}, - dictWord{6, 10, 294}, - dictWord{7, 10, 1267}, - dictWord{136, 10, 624}, - dictWord{7, 0, 1772}, - dictWord{ - 7, - 11, - 1888, - }, - dictWord{8, 11, 289}, - dictWord{11, 11, 45}, - dictWord{12, 11, 278}, - dictWord{140, 11, 537}, - dictWord{135, 10, 1325}, - dictWord{138, 0, 751}, - dictWord{141, 0, 37}, - dictWord{134, 0, 1828}, - dictWord{132, 10, 757}, - dictWord{132, 11, 394}, - dictWord{6, 0, 257}, - dictWord{135, 0, 1522}, - dictWord{ - 4, - 0, - 582, - }, - dictWord{9, 0, 191}, - dictWord{135, 11, 1931}, - dictWord{7, 11, 574}, - dictWord{7, 11, 1719}, - dictWord{137, 11, 145}, - dictWord{132, 11, 658}, - dictWord{10, 0, 790}, - dictWord{132, 11, 369}, - dictWord{9, 11, 781}, - dictWord{10, 11, 144}, - dictWord{11, 11, 385}, - dictWord{13, 11, 161}, - dictWord{13, 11, 228}, - dictWord{13, 11, 268}, - dictWord{148, 11, 107}, - dictWord{8, 0, 469}, - dictWord{10, 0, 47}, - dictWord{136, 11, 374}, - dictWord{6, 0, 306}, - dictWord{7, 0, 1140}, - dictWord{7, 0, 1340}, - dictWord{8, 0, 133}, - dictWord{138, 0, 449}, - dictWord{139, 0, 1011}, - dictWord{7, 10, 1875}, - dictWord{139, 10, 124}, - dictWord{ - 4, - 11, - 344, - }, - dictWord{6, 11, 498}, - dictWord{139, 11, 323}, - dictWord{137, 0, 299}, - dictWord{132, 0, 837}, - dictWord{133, 11, 906}, - dictWord{5, 0, 329}, - dictWord{ - 8, - 0, - 260, - }, - dictWord{138, 0, 10}, - dictWord{134, 0, 1320}, - dictWord{4, 0, 657}, - dictWord{146, 0, 158}, - dictWord{135, 0, 1191}, - dictWord{152, 0, 7}, - dictWord{ - 6, - 0, - 1939, - }, - dictWord{8, 0, 974}, - dictWord{138, 0, 996}, - dictWord{135, 0, 1665}, - dictWord{11, 11, 126}, - dictWord{139, 11, 287}, - dictWord{143, 0, 8}, - dictWord{ - 14, - 11, - 149, - }, - dictWord{14, 11, 399}, - dictWord{143, 11, 57}, - dictWord{5, 0, 66}, - dictWord{7, 0, 1896}, - dictWord{136, 0, 288}, - dictWord{7, 0, 175}, - dictWord{ - 10, - 0, - 494, - }, - dictWord{5, 10, 150}, - dictWord{8, 10, 603}, - dictWord{9, 10, 593}, - dictWord{9, 10, 634}, - dictWord{10, 10, 173}, - dictWord{11, 10, 462}, - dictWord{ - 11, - 10, - 515, - }, - dictWord{13, 10, 216}, - dictWord{13, 10, 288}, - dictWord{142, 10, 400}, - dictWord{134, 0, 1643}, - dictWord{136, 11, 21}, - dictWord{4, 0, 21}, - dictWord{ - 5, - 0, - 91, - }, - dictWord{5, 0, 648}, - dictWord{5, 0, 750}, - dictWord{5, 0, 781}, - dictWord{6, 0, 54}, - dictWord{6, 0, 112}, - dictWord{6, 0, 402}, - dictWord{6, 0, 1732}, - dictWord{ - 7, - 0, - 315, - }, - dictWord{7, 0, 749}, - dictWord{7, 0, 1427}, - dictWord{7, 0, 1900}, - dictWord{9, 0, 78}, - dictWord{9, 0, 508}, - dictWord{10, 0, 611}, - dictWord{10, 0, 811}, - dictWord{11, 0, 510}, - dictWord{11, 0, 728}, - dictWord{13, 0, 36}, - dictWord{14, 0, 39}, - dictWord{16, 0, 83}, - dictWord{17, 0, 124}, - dictWord{148, 0, 30}, - dictWord{ - 4, - 0, - 668, - }, - dictWord{136, 0, 570}, - dictWord{10, 0, 322}, - dictWord{10, 0, 719}, - dictWord{139, 0, 407}, - dictWord{135, 11, 1381}, - dictWord{136, 11, 193}, - dictWord{12, 10, 108}, - dictWord{141, 10, 291}, - dictWord{132, 11, 616}, - dictWord{136, 11, 692}, - dictWord{8, 0, 125}, - dictWord{8, 0, 369}, - dictWord{8, 0, 524}, - dictWord{10, 0, 486}, - dictWord{11, 0, 13}, - dictWord{11, 0, 381}, - dictWord{11, 0, 736}, - dictWord{11, 0, 766}, - dictWord{11, 0, 845}, - dictWord{13, 0, 114}, - dictWord{ - 13, - 0, - 292, - }, - dictWord{142, 0, 47}, - dictWord{134, 0, 1247}, - dictWord{6, 0, 1684}, - dictWord{6, 0, 1731}, - dictWord{7, 0, 356}, - dictWord{8, 0, 54}, - dictWord{8, 0, 221}, - dictWord{9, 0, 225}, - dictWord{9, 0, 356}, - dictWord{10, 0, 77}, - dictWord{10, 0, 446}, - dictWord{10, 0, 731}, - dictWord{12, 0, 404}, - dictWord{141, 0, 491}, - dictWord{135, 10, 1777}, - dictWord{4, 11, 305}, - dictWord{4, 10, 493}, - dictWord{144, 10, 55}, - dictWord{4, 0, 951}, - dictWord{6, 0, 1809}, - dictWord{6, 0, 1849}, - dictWord{8, 0, 846}, - dictWord{8, 0, 866}, - dictWord{8, 0, 899}, - dictWord{10, 0, 896}, - dictWord{12, 0, 694}, - dictWord{142, 0, 468}, - dictWord{5, 11, 214}, - dictWord{ - 7, - 11, - 603, - }, - dictWord{8, 11, 611}, - dictWord{9, 11, 686}, - dictWord{10, 11, 88}, - dictWord{11, 11, 459}, - dictWord{11, 11, 496}, - dictWord{12, 11, 463}, - dictWord{ - 12, - 11, - 590, - }, - dictWord{13, 11, 0}, - dictWord{142, 11, 214}, - dictWord{132, 0, 411}, - dictWord{4, 0, 80}, - dictWord{133, 0, 44}, - dictWord{140, 11, 74}, - dictWord{ - 143, - 0, - 31, - }, - dictWord{7, 0, 669}, - dictWord{6, 10, 568}, - dictWord{7, 10, 1804}, - dictWord{8, 10, 362}, - dictWord{8, 10, 410}, - dictWord{8, 10, 830}, - dictWord{9, 10, 514}, - dictWord{11, 10, 649}, - dictWord{142, 10, 157}, - dictWord{7, 0, 673}, - dictWord{134, 11, 1703}, - dictWord{132, 10, 625}, - dictWord{134, 0, 1303}, - dictWord{ - 5, - 0, - 299, - }, - dictWord{135, 0, 1083}, - dictWord{138, 0, 704}, - dictWord{6, 0, 275}, - dictWord{7, 0, 408}, - dictWord{6, 10, 158}, - dictWord{7, 10, 129}, - dictWord{ - 7, - 10, - 181, - }, - dictWord{8, 10, 276}, - dictWord{8, 10, 377}, - dictWord{10, 10, 523}, - dictWord{11, 10, 816}, - dictWord{12, 10, 455}, - dictWord{13, 10, 303}, - dictWord{ - 142, - 10, - 135, - }, - dictWord{4, 0, 219}, - dictWord{7, 0, 367}, - dictWord{7, 0, 1713}, - dictWord{7, 0, 1761}, - dictWord{9, 0, 86}, - dictWord{9, 0, 537}, - dictWord{10, 0, 165}, - dictWord{12, 0, 219}, - dictWord{140, 0, 561}, - dictWord{8, 0, 216}, - dictWord{4, 10, 1}, - dictWord{4, 11, 737}, - dictWord{6, 11, 317}, - dictWord{7, 10, 1143}, - dictWord{ - 7, - 10, - 1463, - }, - dictWord{9, 10, 207}, - dictWord{9, 10, 390}, - dictWord{9, 10, 467}, - dictWord{10, 11, 98}, - dictWord{11, 11, 294}, - dictWord{11, 10, 836}, - dictWord{ - 12, - 11, - 60, - }, - dictWord{12, 11, 437}, - dictWord{13, 11, 64}, - dictWord{13, 11, 380}, - dictWord{142, 11, 430}, - dictWord{6, 11, 1758}, - dictWord{8, 11, 520}, - dictWord{9, 11, 345}, - dictWord{9, 11, 403}, - dictWord{142, 11, 350}, - dictWord{5, 11, 47}, - dictWord{10, 11, 242}, - dictWord{138, 11, 579}, - dictWord{5, 11, 139}, - dictWord{7, 11, 1168}, - dictWord{138, 11, 539}, - dictWord{135, 0, 1319}, - dictWord{4, 10, 295}, - dictWord{4, 10, 723}, - dictWord{5, 10, 895}, - dictWord{ - 7, - 10, - 1031, - }, - dictWord{8, 10, 199}, - dictWord{8, 10, 340}, - dictWord{9, 10, 153}, - dictWord{9, 10, 215}, - dictWord{10, 10, 21}, - dictWord{10, 10, 59}, - dictWord{ - 10, - 10, - 80, - }, - dictWord{10, 10, 224}, - dictWord{10, 10, 838}, - dictWord{11, 10, 229}, - dictWord{11, 10, 652}, - dictWord{12, 10, 192}, - dictWord{13, 10, 146}, - dictWord{ - 142, - 10, - 91, - }, - dictWord{140, 0, 428}, - dictWord{137, 10, 51}, - dictWord{133, 0, 514}, - dictWord{5, 10, 309}, - dictWord{140, 10, 211}, - dictWord{6, 0, 1010}, - dictWord{5, 10, 125}, - dictWord{8, 10, 77}, - dictWord{138, 10, 15}, - dictWord{4, 0, 55}, - dictWord{5, 0, 301}, - dictWord{6, 0, 571}, - dictWord{142, 0, 49}, - dictWord{ - 146, - 0, - 102, - }, - dictWord{136, 11, 370}, - dictWord{4, 11, 107}, - dictWord{7, 11, 613}, - dictWord{8, 11, 358}, - dictWord{8, 11, 439}, - dictWord{8, 11, 504}, - dictWord{ - 9, - 11, - 501, - }, - dictWord{10, 11, 383}, - dictWord{139, 11, 477}, - dictWord{132, 11, 229}, - dictWord{133, 0, 364}, - dictWord{133, 10, 439}, - dictWord{4, 11, 903}, - dictWord{135, 11, 1816}, - dictWord{11, 0, 379}, - dictWord{140, 10, 76}, - dictWord{4, 0, 76}, - dictWord{4, 0, 971}, - dictWord{7, 0, 1550}, - dictWord{9, 0, 306}, - dictWord{ - 9, - 0, - 430, - }, - dictWord{9, 0, 663}, - dictWord{10, 0, 683}, - dictWord{10, 0, 921}, - dictWord{11, 0, 427}, - dictWord{11, 0, 753}, - dictWord{12, 0, 334}, - dictWord{12, 0, 442}, - dictWord{14, 0, 258}, - dictWord{14, 0, 366}, - dictWord{143, 0, 131}, - dictWord{137, 0, 52}, - dictWord{4, 11, 47}, - dictWord{6, 11, 373}, - dictWord{7, 11, 452}, - dictWord{7, 11, 543}, - dictWord{7, 11, 1714}, - dictWord{7, 11, 1856}, - dictWord{9, 11, 6}, - dictWord{11, 11, 257}, - dictWord{139, 11, 391}, - dictWord{4, 10, 8}, - dictWord{ - 7, - 10, - 1152, - }, - dictWord{7, 10, 1153}, - dictWord{7, 10, 1715}, - dictWord{9, 10, 374}, - dictWord{10, 10, 478}, - dictWord{139, 10, 648}, - dictWord{4, 11, 785}, - dictWord{133, 11, 368}, - dictWord{135, 10, 1099}, - dictWord{135, 11, 860}, - dictWord{5, 11, 980}, - dictWord{134, 11, 1754}, - dictWord{134, 0, 1258}, - dictWord{ - 6, - 0, - 1058, - }, - dictWord{6, 0, 1359}, - dictWord{7, 11, 536}, - dictWord{7, 11, 1331}, - dictWord{136, 11, 143}, - dictWord{4, 0, 656}, - dictWord{135, 0, 779}, - dictWord{136, 10, 87}, - dictWord{5, 11, 19}, - dictWord{6, 11, 533}, - dictWord{146, 11, 126}, - dictWord{7, 0, 144}, - dictWord{138, 10, 438}, - dictWord{5, 11, 395}, - dictWord{5, 11, 951}, - dictWord{134, 11, 1776}, - dictWord{135, 0, 1373}, - dictWord{7, 0, 554}, - dictWord{7, 0, 605}, - dictWord{141, 0, 10}, - dictWord{4, 10, 69}, - dictWord{ - 5, - 10, - 122, - }, - dictWord{9, 10, 656}, - dictWord{138, 10, 464}, - dictWord{5, 10, 849}, - dictWord{134, 10, 1633}, - dictWord{5, 0, 838}, - dictWord{5, 0, 841}, - dictWord{134, 0, 1649}, - dictWord{133, 0, 1012}, - dictWord{139, 10, 499}, - dictWord{7, 10, 476}, - dictWord{7, 10, 1592}, - dictWord{138, 10, 87}, - dictWord{ - 6, - 0, - 251, - }, - dictWord{7, 0, 365}, - dictWord{7, 0, 1357}, - dictWord{7, 0, 1497}, - dictWord{8, 0, 154}, - dictWord{141, 0, 281}, - dictWord{132, 11, 441}, - dictWord{ - 132, - 11, - 695, - }, - dictWord{7, 11, 497}, - dictWord{9, 11, 387}, - dictWord{147, 11, 81}, - dictWord{133, 0, 340}, - dictWord{14, 10, 283}, - dictWord{142, 11, 283}, - dictWord{ - 134, - 0, - 810, - }, - dictWord{135, 11, 1894}, - dictWord{139, 0, 495}, - dictWord{5, 11, 284}, - dictWord{6, 11, 49}, - dictWord{6, 11, 350}, - dictWord{7, 11, 1}, - dictWord{ - 7, - 11, - 377, - }, - dictWord{7, 11, 1693}, - dictWord{8, 11, 18}, - dictWord{8, 11, 678}, - dictWord{9, 11, 161}, - dictWord{9, 11, 585}, - dictWord{9, 11, 671}, - dictWord{ - 9, - 11, - 839, - }, - dictWord{11, 11, 912}, - dictWord{141, 11, 427}, - dictWord{5, 10, 859}, - dictWord{7, 10, 1160}, - dictWord{8, 10, 107}, - dictWord{9, 10, 291}, - dictWord{ - 9, - 10, - 439, - }, - dictWord{10, 10, 663}, - dictWord{11, 10, 609}, - dictWord{140, 10, 197}, - dictWord{8, 0, 261}, - dictWord{9, 0, 144}, - dictWord{9, 0, 466}, - dictWord{ - 10, - 0, - 370, - }, - dictWord{12, 0, 470}, - dictWord{13, 0, 144}, - dictWord{142, 0, 348}, - dictWord{137, 0, 897}, - dictWord{6, 0, 248}, - dictWord{9, 0, 546}, - dictWord{10, 0, 535}, - dictWord{11, 0, 681}, - dictWord{141, 0, 135}, - dictWord{4, 0, 358}, - dictWord{135, 0, 1496}, - dictWord{134, 0, 567}, - dictWord{136, 0, 445}, - dictWord{ - 4, - 10, - 117, - }, - dictWord{6, 10, 372}, - dictWord{7, 10, 1905}, - dictWord{142, 10, 323}, - dictWord{4, 10, 722}, - dictWord{139, 10, 471}, - dictWord{6, 0, 697}, - dictWord{ - 134, - 0, - 996, - }, - dictWord{7, 11, 2007}, - dictWord{9, 11, 101}, - dictWord{9, 11, 450}, - dictWord{10, 11, 66}, - dictWord{10, 11, 842}, - dictWord{11, 11, 536}, - dictWord{ - 140, - 11, - 587, - }, - dictWord{132, 0, 577}, - dictWord{134, 0, 1336}, - dictWord{9, 10, 5}, - dictWord{12, 10, 216}, - dictWord{12, 10, 294}, - dictWord{12, 10, 298}, - dictWord{12, 10, 400}, - dictWord{12, 10, 518}, - dictWord{13, 10, 229}, - dictWord{143, 10, 139}, - dictWord{6, 0, 174}, - dictWord{138, 0, 917}, - dictWord{ - 134, - 10, - 1774, - }, - dictWord{5, 10, 12}, - dictWord{7, 10, 375}, - dictWord{9, 10, 88}, - dictWord{9, 10, 438}, - dictWord{11, 11, 62}, - dictWord{139, 10, 270}, - dictWord{ - 134, - 11, - 1766, - }, - dictWord{6, 11, 0}, - dictWord{7, 11, 84}, - dictWord{7, 10, 816}, - dictWord{7, 10, 1241}, - dictWord{9, 10, 283}, - dictWord{9, 10, 520}, - dictWord{10, 10, 213}, - dictWord{10, 10, 307}, - dictWord{10, 10, 463}, - dictWord{10, 10, 671}, - dictWord{10, 10, 746}, - dictWord{11, 10, 401}, - dictWord{11, 10, 794}, - dictWord{ - 11, - 11, - 895, - }, - dictWord{12, 10, 517}, - dictWord{17, 11, 11}, - dictWord{18, 10, 107}, - dictWord{147, 10, 115}, - dictWord{5, 0, 878}, - dictWord{133, 0, 972}, - dictWord{ - 6, - 11, - 1665, - }, - dictWord{7, 11, 256}, - dictWord{7, 11, 1388}, - dictWord{138, 11, 499}, - dictWord{4, 10, 258}, - dictWord{136, 10, 639}, - dictWord{4, 11, 22}, - dictWord{5, 11, 10}, - dictWord{6, 10, 22}, - dictWord{7, 11, 848}, - dictWord{7, 10, 903}, - dictWord{7, 10, 1963}, - dictWord{8, 11, 97}, - dictWord{138, 10, 577}, - dictWord{ - 5, - 10, - 681, - }, - dictWord{136, 10, 782}, - dictWord{133, 11, 481}, - dictWord{132, 0, 351}, - dictWord{4, 10, 664}, - dictWord{5, 10, 804}, - dictWord{139, 10, 1013}, - dictWord{6, 11, 134}, - dictWord{7, 11, 437}, - dictWord{7, 11, 959}, - dictWord{9, 11, 37}, - dictWord{14, 11, 285}, - dictWord{14, 11, 371}, - dictWord{144, 11, 60}, - dictWord{7, 11, 486}, - dictWord{8, 11, 155}, - dictWord{11, 11, 93}, - dictWord{140, 11, 164}, - dictWord{132, 0, 286}, - dictWord{7, 0, 438}, - dictWord{7, 0, 627}, - dictWord{7, 0, 1516}, - dictWord{8, 0, 40}, - dictWord{9, 0, 56}, - dictWord{9, 0, 294}, - dictWord{10, 0, 30}, - dictWord{11, 0, 969}, - dictWord{11, 0, 995}, - dictWord{146, 0, 148}, - dictWord{5, 11, 591}, - dictWord{135, 11, 337}, - dictWord{134, 0, 1950}, - dictWord{133, 10, 32}, - dictWord{138, 11, 500}, - dictWord{5, 11, 380}, - dictWord{ - 5, - 11, - 650, - }, - dictWord{136, 11, 310}, - dictWord{4, 11, 364}, - dictWord{7, 11, 1156}, - dictWord{7, 11, 1187}, - dictWord{137, 11, 409}, - dictWord{4, 0, 738}, - dictWord{134, 11, 482}, - dictWord{4, 11, 781}, - dictWord{6, 11, 487}, - dictWord{7, 11, 926}, - dictWord{8, 11, 263}, - dictWord{139, 11, 500}, - dictWord{135, 11, 418}, - dictWord{6, 0, 2047}, - dictWord{10, 0, 969}, - dictWord{4, 10, 289}, - dictWord{7, 10, 629}, - dictWord{7, 10, 1698}, - dictWord{7, 10, 1711}, - dictWord{ - 140, - 10, - 215, - }, - dictWord{6, 10, 450}, - dictWord{136, 10, 109}, - dictWord{134, 0, 818}, - dictWord{136, 10, 705}, - dictWord{133, 0, 866}, - dictWord{4, 11, 94}, - dictWord{ - 135, - 11, - 1265, - }, - dictWord{132, 11, 417}, - dictWord{134, 0, 1467}, - dictWord{135, 10, 1238}, - dictWord{4, 0, 972}, - dictWord{6, 0, 1851}, - dictWord{ - 134, - 0, - 1857, - }, - dictWord{134, 0, 355}, - dictWord{133, 0, 116}, - dictWord{132, 0, 457}, - dictWord{135, 11, 1411}, - dictWord{4, 11, 408}, - dictWord{4, 11, 741}, - dictWord{135, 11, 500}, - dictWord{134, 10, 26}, - dictWord{142, 11, 137}, - dictWord{5, 0, 527}, - dictWord{6, 0, 189}, - dictWord{7, 0, 859}, - dictWord{136, 0, 267}, - dictWord{11, 0, 104}, - dictWord{11, 0, 554}, - dictWord{15, 0, 60}, - dictWord{143, 0, 125}, - dictWord{134, 0, 1613}, - dictWord{4, 10, 414}, - dictWord{5, 10, 467}, - dictWord{ - 9, - 10, - 654, - }, - dictWord{10, 10, 451}, - dictWord{12, 10, 59}, - dictWord{141, 10, 375}, - dictWord{135, 10, 17}, - dictWord{134, 0, 116}, - dictWord{135, 11, 541}, - dictWord{135, 10, 955}, - dictWord{6, 11, 73}, - dictWord{135, 11, 177}, - dictWord{133, 11, 576}, - dictWord{134, 0, 886}, - dictWord{133, 0, 487}, - dictWord{ - 4, - 0, - 86, - }, - dictWord{5, 0, 667}, - dictWord{5, 0, 753}, - dictWord{6, 0, 316}, - dictWord{6, 0, 455}, - dictWord{135, 0, 946}, - dictWord{142, 11, 231}, - dictWord{150, 0, 45}, - dictWord{134, 0, 863}, - dictWord{134, 0, 1953}, - dictWord{6, 10, 280}, - dictWord{10, 10, 502}, - dictWord{11, 10, 344}, - dictWord{140, 10, 38}, - dictWord{4, 0, 79}, - dictWord{7, 0, 1773}, - dictWord{10, 0, 450}, - dictWord{11, 0, 589}, - dictWord{13, 0, 332}, - dictWord{13, 0, 493}, - dictWord{14, 0, 183}, - dictWord{14, 0, 334}, - dictWord{14, 0, 362}, - dictWord{14, 0, 368}, - dictWord{14, 0, 376}, - dictWord{14, 0, 379}, - dictWord{19, 0, 90}, - dictWord{19, 0, 103}, - dictWord{19, 0, 127}, - dictWord{ - 148, - 0, - 90, - }, - dictWord{5, 10, 45}, - dictWord{7, 10, 1161}, - dictWord{11, 10, 448}, - dictWord{11, 10, 880}, - dictWord{13, 10, 139}, - dictWord{13, 10, 407}, - dictWord{ - 15, - 10, - 16, - }, - dictWord{17, 10, 95}, - dictWord{18, 10, 66}, - dictWord{18, 10, 88}, - dictWord{18, 10, 123}, - dictWord{149, 10, 7}, - dictWord{136, 10, 777}, - dictWord{ - 4, - 10, - 410, - }, - dictWord{135, 10, 521}, - dictWord{135, 10, 1778}, - dictWord{135, 11, 538}, - dictWord{142, 0, 381}, - dictWord{133, 11, 413}, - dictWord{ - 134, - 0, - 1142, - }, - dictWord{6, 0, 1189}, - dictWord{136, 11, 495}, - dictWord{5, 0, 663}, - dictWord{6, 0, 1962}, - dictWord{134, 0, 2003}, - dictWord{7, 11, 54}, - dictWord{ - 8, - 11, - 312, - }, - dictWord{10, 11, 191}, - dictWord{10, 11, 614}, - dictWord{140, 11, 567}, - dictWord{132, 10, 436}, - dictWord{133, 0, 846}, - dictWord{10, 0, 528}, - dictWord{11, 0, 504}, - dictWord{7, 10, 1587}, - dictWord{135, 10, 1707}, - dictWord{5, 0, 378}, - dictWord{8, 0, 465}, - dictWord{9, 0, 286}, - dictWord{10, 0, 185}, - dictWord{ - 10, - 0, - 562, - }, - dictWord{10, 0, 635}, - dictWord{11, 0, 31}, - dictWord{11, 0, 393}, - dictWord{13, 0, 312}, - dictWord{18, 0, 65}, - dictWord{18, 0, 96}, - dictWord{147, 0, 89}, - dictWord{7, 0, 899}, - dictWord{14, 0, 325}, - dictWord{6, 11, 468}, - dictWord{7, 11, 567}, - dictWord{7, 11, 1478}, - dictWord{8, 11, 530}, - dictWord{142, 11, 290}, - dictWord{7, 0, 1880}, - dictWord{9, 0, 680}, - dictWord{139, 0, 798}, - dictWord{134, 0, 1770}, - dictWord{132, 0, 648}, - dictWord{150, 11, 35}, - dictWord{5, 0, 945}, - dictWord{6, 0, 1656}, - dictWord{6, 0, 1787}, - dictWord{7, 0, 167}, - dictWord{8, 0, 824}, - dictWord{9, 0, 391}, - dictWord{10, 0, 375}, - dictWord{139, 0, 185}, - dictWord{ - 6, - 11, - 484, - }, - dictWord{135, 11, 822}, - dictWord{134, 0, 2046}, - dictWord{7, 0, 1645}, - dictWord{8, 0, 352}, - dictWord{137, 0, 249}, - dictWord{132, 0, 152}, - dictWord{6, 0, 611}, - dictWord{135, 0, 1733}, - dictWord{6, 11, 1724}, - dictWord{135, 11, 2022}, - dictWord{133, 0, 1006}, - dictWord{141, 11, 96}, - dictWord{ - 5, - 0, - 420, - }, - dictWord{135, 0, 1449}, - dictWord{146, 11, 149}, - dictWord{135, 0, 832}, - dictWord{135, 10, 663}, - dictWord{133, 0, 351}, - dictWord{5, 0, 40}, - dictWord{ - 7, - 0, - 598, - }, - dictWord{7, 0, 1638}, - dictWord{8, 0, 78}, - dictWord{9, 0, 166}, - dictWord{9, 0, 640}, - dictWord{9, 0, 685}, - dictWord{9, 0, 773}, - dictWord{11, 0, 215}, - dictWord{13, 0, 65}, - dictWord{14, 0, 172}, - dictWord{14, 0, 317}, - dictWord{145, 0, 6}, - dictWord{8, 0, 60}, - dictWord{9, 0, 343}, - dictWord{139, 0, 769}, - dictWord{ - 134, - 0, - 1354, - }, - dictWord{132, 0, 724}, - dictWord{137, 0, 745}, - dictWord{132, 11, 474}, - dictWord{7, 0, 1951}, - dictWord{8, 0, 765}, - dictWord{8, 0, 772}, - dictWord{ - 140, - 0, - 671, - }, - dictWord{7, 0, 108}, - dictWord{8, 0, 219}, - dictWord{8, 0, 388}, - dictWord{9, 0, 775}, - dictWord{11, 0, 275}, - dictWord{140, 0, 464}, - dictWord{137, 0, 639}, - dictWord{135, 10, 503}, - dictWord{133, 11, 366}, - dictWord{5, 0, 15}, - dictWord{6, 0, 56}, - dictWord{7, 0, 1758}, - dictWord{8, 0, 500}, - dictWord{9, 0, 730}, - dictWord{ - 11, - 0, - 331, - }, - dictWord{13, 0, 150}, - dictWord{14, 0, 282}, - dictWord{5, 11, 305}, - dictWord{9, 11, 560}, - dictWord{141, 11, 208}, - dictWord{4, 10, 113}, - dictWord{ - 5, - 10, - 163, - }, - dictWord{5, 10, 735}, - dictWord{7, 10, 1009}, - dictWord{9, 10, 9}, - dictWord{9, 10, 771}, - dictWord{12, 10, 90}, - dictWord{13, 10, 138}, - dictWord{ - 13, - 10, - 410, - }, - dictWord{143, 10, 128}, - dictWord{4, 10, 324}, - dictWord{138, 10, 104}, - dictWord{135, 11, 466}, - dictWord{142, 11, 27}, - dictWord{134, 0, 1886}, - dictWord{5, 0, 205}, - dictWord{6, 0, 438}, - dictWord{9, 0, 711}, - dictWord{4, 11, 480}, - dictWord{6, 11, 167}, - dictWord{6, 11, 302}, - dictWord{6, 11, 1642}, - dictWord{ - 7, - 11, - 130, - }, - dictWord{7, 11, 656}, - dictWord{7, 11, 837}, - dictWord{7, 11, 1547}, - dictWord{7, 11, 1657}, - dictWord{8, 11, 429}, - dictWord{9, 11, 228}, - dictWord{ - 10, - 11, - 643, - }, - dictWord{13, 11, 289}, - dictWord{13, 11, 343}, - dictWord{147, 11, 101}, - dictWord{134, 0, 865}, - dictWord{6, 0, 2025}, - dictWord{136, 0, 965}, - dictWord{ - 7, - 11, - 278, - }, - dictWord{10, 11, 739}, - dictWord{11, 11, 708}, - dictWord{141, 11, 348}, - dictWord{133, 0, 534}, - dictWord{135, 11, 1922}, - dictWord{ - 137, - 0, - 691, - }, - dictWord{4, 10, 935}, - dictWord{133, 10, 823}, - dictWord{6, 0, 443}, - dictWord{9, 0, 237}, - dictWord{9, 0, 571}, - dictWord{9, 0, 695}, - dictWord{10, 0, 139}, - dictWord{11, 0, 715}, - dictWord{12, 0, 417}, - dictWord{141, 0, 421}, - dictWord{5, 10, 269}, - dictWord{7, 10, 434}, - dictWord{7, 10, 891}, - dictWord{8, 10, 339}, - dictWord{ - 9, - 10, - 702, - }, - dictWord{11, 10, 594}, - dictWord{11, 10, 718}, - dictWord{145, 10, 100}, - dictWord{6, 0, 1555}, - dictWord{7, 0, 878}, - dictWord{9, 10, 485}, - dictWord{141, 10, 264}, - dictWord{134, 10, 1713}, - dictWord{7, 10, 1810}, - dictWord{11, 10, 866}, - dictWord{12, 10, 103}, - dictWord{141, 10, 495}, - dictWord{ - 135, - 10, - 900, - }, - dictWord{6, 0, 1410}, - dictWord{9, 11, 316}, - dictWord{139, 11, 256}, - dictWord{4, 0, 995}, - dictWord{135, 0, 1033}, - dictWord{132, 0, 578}, - dictWord{10, 0, 881}, - dictWord{12, 0, 740}, - dictWord{12, 0, 743}, - dictWord{140, 0, 759}, - dictWord{132, 0, 822}, - dictWord{133, 0, 923}, - dictWord{142, 10, 143}, - dictWord{135, 11, 1696}, - dictWord{6, 11, 363}, - dictWord{7, 11, 1955}, - dictWord{136, 11, 725}, - dictWord{132, 0, 924}, - dictWord{133, 0, 665}, - dictWord{ - 135, - 10, - 2029, - }, - dictWord{135, 0, 1901}, - dictWord{4, 0, 265}, - dictWord{6, 0, 1092}, - dictWord{6, 0, 1417}, - dictWord{7, 0, 807}, - dictWord{135, 0, 950}, - dictWord{ - 5, - 0, - 93, - }, - dictWord{12, 0, 267}, - dictWord{141, 0, 498}, - dictWord{135, 0, 1451}, - dictWord{5, 11, 813}, - dictWord{135, 11, 2046}, - dictWord{5, 10, 625}, - dictWord{135, 10, 1617}, - dictWord{135, 0, 747}, - dictWord{6, 0, 788}, - dictWord{137, 0, 828}, - dictWord{7, 0, 184}, - dictWord{11, 0, 307}, - dictWord{11, 0, 400}, - dictWord{15, 0, 130}, - dictWord{5, 11, 712}, - dictWord{7, 11, 1855}, - dictWord{8, 10, 425}, - dictWord{8, 10, 693}, - dictWord{9, 10, 720}, - dictWord{10, 10, 380}, - dictWord{10, 10, 638}, - dictWord{11, 11, 17}, - dictWord{11, 10, 473}, - dictWord{12, 10, 61}, - dictWord{13, 11, 321}, - dictWord{144, 11, 67}, - dictWord{135, 0, 198}, - dictWord{6, 11, 320}, - dictWord{7, 11, 781}, - dictWord{7, 11, 1921}, - dictWord{9, 11, 55}, - dictWord{10, 11, 186}, - dictWord{10, 11, 273}, - dictWord{10, 11, 664}, - dictWord{10, 11, 801}, - dictWord{11, 11, 996}, - dictWord{11, 11, 997}, - dictWord{13, 11, 157}, - dictWord{142, 11, 170}, - dictWord{136, 11, 271}, - dictWord{ - 135, - 0, - 994, - }, - dictWord{7, 11, 103}, - dictWord{7, 11, 863}, - dictWord{11, 11, 184}, - dictWord{14, 11, 299}, - dictWord{145, 11, 62}, - dictWord{11, 10, 551}, - dictWord{142, 10, 159}, - dictWord{5, 0, 233}, - dictWord{5, 0, 320}, - dictWord{6, 0, 140}, - dictWord{8, 0, 295}, - dictWord{8, 0, 615}, - dictWord{136, 11, 615}, - dictWord{ - 133, - 0, - 978, - }, - dictWord{4, 0, 905}, - dictWord{6, 0, 1701}, - dictWord{137, 0, 843}, - dictWord{132, 10, 168}, - dictWord{4, 0, 974}, - dictWord{8, 0, 850}, - dictWord{ - 12, - 0, - 709, - }, - dictWord{12, 0, 768}, - dictWord{140, 0, 786}, - dictWord{135, 10, 91}, - dictWord{152, 0, 6}, - dictWord{138, 10, 532}, - dictWord{135, 10, 1884}, - dictWord{132, 0, 509}, - dictWord{6, 0, 1307}, - dictWord{135, 0, 273}, - dictWord{5, 11, 77}, - dictWord{7, 11, 1455}, - dictWord{10, 11, 843}, - dictWord{19, 11, 73}, - dictWord{150, 11, 5}, - dictWord{132, 11, 458}, - dictWord{135, 11, 1420}, - dictWord{6, 11, 109}, - dictWord{138, 11, 382}, - dictWord{6, 0, 201}, - dictWord{6, 11, 330}, - dictWord{7, 10, 70}, - dictWord{7, 11, 1084}, - dictWord{10, 10, 240}, - dictWord{11, 11, 142}, - dictWord{147, 10, 93}, - dictWord{7, 0, 1041}, - dictWord{ - 140, - 11, - 328, - }, - dictWord{133, 11, 354}, - dictWord{134, 0, 1040}, - dictWord{133, 0, 693}, - dictWord{134, 0, 774}, - dictWord{139, 0, 234}, - dictWord{132, 0, 336}, - dictWord{7, 0, 1399}, - dictWord{139, 10, 392}, - dictWord{20, 0, 22}, - dictWord{148, 11, 22}, - dictWord{5, 0, 802}, - dictWord{7, 0, 2021}, - dictWord{136, 0, 805}, - dictWord{ - 5, - 0, - 167, - }, - dictWord{5, 0, 899}, - dictWord{6, 0, 410}, - dictWord{137, 0, 777}, - dictWord{137, 0, 789}, - dictWord{134, 0, 1705}, - dictWord{7, 10, 655}, - dictWord{ - 135, - 10, - 1844, - }, - dictWord{4, 10, 145}, - dictWord{6, 10, 176}, - dictWord{7, 10, 395}, - dictWord{137, 10, 562}, - dictWord{132, 10, 501}, - dictWord{135, 0, 10}, - dictWord{5, 0, 11}, - dictWord{6, 0, 117}, - dictWord{6, 0, 485}, - dictWord{7, 0, 1133}, - dictWord{9, 0, 582}, - dictWord{9, 0, 594}, - dictWord{10, 0, 82}, - dictWord{11, 0, 21}, - dictWord{11, 0, 818}, - dictWord{12, 0, 535}, - dictWord{13, 0, 86}, - dictWord{20, 0, 91}, - dictWord{23, 0, 13}, - dictWord{134, 10, 509}, - dictWord{4, 0, 264}, - dictWord{ - 7, - 0, - 1067, - }, - dictWord{8, 0, 204}, - dictWord{8, 0, 385}, - dictWord{139, 0, 953}, - dictWord{139, 11, 737}, - dictWord{138, 0, 56}, - dictWord{134, 0, 1917}, - dictWord{ - 133, - 0, - 470, - }, - dictWord{10, 11, 657}, - dictWord{14, 11, 297}, - dictWord{142, 11, 361}, - dictWord{135, 11, 412}, - dictWord{7, 0, 1198}, - dictWord{7, 11, 1198}, - dictWord{8, 11, 556}, - dictWord{14, 11, 123}, - dictWord{14, 11, 192}, - dictWord{143, 11, 27}, - dictWord{7, 11, 1985}, - dictWord{14, 11, 146}, - dictWord{15, 11, 42}, - dictWord{16, 11, 23}, - dictWord{17, 11, 86}, - dictWord{146, 11, 17}, - dictWord{11, 0, 1015}, - dictWord{136, 11, 122}, - dictWord{4, 10, 114}, - dictWord{ - 9, - 10, - 492, - }, - dictWord{13, 10, 462}, - dictWord{142, 10, 215}, - dictWord{4, 10, 77}, - dictWord{5, 10, 361}, - dictWord{6, 10, 139}, - dictWord{6, 10, 401}, - dictWord{ - 6, - 10, - 404, - }, - dictWord{7, 10, 413}, - dictWord{7, 10, 715}, - dictWord{7, 10, 1716}, - dictWord{11, 10, 279}, - dictWord{12, 10, 179}, - dictWord{12, 10, 258}, - dictWord{ - 13, - 10, - 244, - }, - dictWord{142, 10, 358}, - dictWord{134, 10, 1717}, - dictWord{7, 10, 1061}, - dictWord{8, 10, 82}, - dictWord{11, 10, 250}, - dictWord{12, 10, 420}, - dictWord{141, 10, 184}, - dictWord{133, 0, 715}, - dictWord{135, 10, 724}, - dictWord{9, 0, 919}, - dictWord{9, 0, 922}, - dictWord{9, 0, 927}, - dictWord{9, 0, 933}, - dictWord{9, 0, 962}, - dictWord{9, 0, 1000}, - dictWord{9, 0, 1002}, - dictWord{9, 0, 1021}, - dictWord{12, 0, 890}, - dictWord{12, 0, 907}, - dictWord{12, 0, 930}, - dictWord{ - 15, - 0, - 207, - }, - dictWord{15, 0, 228}, - dictWord{15, 0, 238}, - dictWord{149, 0, 61}, - dictWord{8, 0, 794}, - dictWord{9, 0, 400}, - dictWord{10, 0, 298}, - dictWord{142, 0, 228}, - dictWord{5, 11, 430}, - dictWord{5, 11, 932}, - dictWord{6, 11, 131}, - dictWord{7, 11, 417}, - dictWord{9, 11, 522}, - dictWord{11, 11, 314}, - dictWord{141, 11, 390}, - dictWord{132, 0, 867}, - dictWord{8, 0, 724}, - dictWord{132, 11, 507}, - dictWord{137, 11, 261}, - dictWord{4, 11, 343}, - dictWord{133, 11, 511}, - dictWord{ - 6, - 0, - 190, - }, - dictWord{7, 0, 768}, - dictWord{135, 0, 1170}, - dictWord{6, 10, 513}, - dictWord{135, 10, 1052}, - dictWord{7, 11, 455}, - dictWord{138, 11, 591}, - dictWord{134, 0, 1066}, - dictWord{137, 10, 899}, - dictWord{14, 0, 67}, - dictWord{147, 0, 60}, - dictWord{4, 0, 948}, - dictWord{18, 0, 174}, - dictWord{146, 0, 176}, - dictWord{135, 0, 1023}, - dictWord{7, 10, 1417}, - dictWord{12, 10, 382}, - dictWord{17, 10, 48}, - dictWord{152, 10, 12}, - dictWord{134, 11, 575}, - dictWord{ - 132, - 0, - 764, - }, - dictWord{6, 10, 545}, - dictWord{7, 10, 565}, - dictWord{7, 10, 1669}, - dictWord{10, 10, 114}, - dictWord{11, 10, 642}, - dictWord{140, 10, 618}, - dictWord{ - 6, - 0, - 137, - }, - dictWord{9, 0, 75}, - dictWord{9, 0, 253}, - dictWord{10, 0, 194}, - dictWord{138, 0, 444}, - dictWord{4, 0, 756}, - dictWord{133, 10, 5}, - dictWord{8, 0, 1008}, - dictWord{135, 10, 192}, - dictWord{132, 0, 842}, - dictWord{11, 0, 643}, - dictWord{12, 0, 115}, - dictWord{136, 10, 763}, - dictWord{139, 0, 67}, - dictWord{ - 133, - 10, - 759, - }, - dictWord{4, 0, 821}, - dictWord{5, 0, 760}, - dictWord{7, 0, 542}, - dictWord{8, 0, 135}, - dictWord{8, 0, 496}, - dictWord{135, 11, 580}, - dictWord{7, 10, 370}, - dictWord{7, 10, 1007}, - dictWord{7, 10, 1177}, - dictWord{135, 10, 1565}, - dictWord{135, 10, 1237}, - dictWord{140, 0, 736}, - dictWord{7, 0, 319}, - dictWord{ - 7, - 0, - 355, - }, - dictWord{7, 0, 763}, - dictWord{10, 0, 389}, - dictWord{145, 0, 43}, - dictWord{8, 11, 333}, - dictWord{138, 11, 182}, - dictWord{4, 10, 87}, - dictWord{5, 10, 250}, - dictWord{141, 10, 298}, - dictWord{138, 0, 786}, - dictWord{134, 0, 2044}, - dictWord{8, 11, 330}, - dictWord{140, 11, 477}, - dictWord{135, 11, 1338}, - dictWord{132, 11, 125}, - dictWord{134, 0, 1030}, - dictWord{134, 0, 1083}, - dictWord{132, 11, 721}, - dictWord{135, 10, 814}, - dictWord{7, 11, 776}, - dictWord{ - 8, - 11, - 145, - }, - dictWord{147, 11, 56}, - dictWord{134, 0, 1226}, - dictWord{4, 10, 57}, - dictWord{7, 10, 1195}, - dictWord{7, 10, 1438}, - dictWord{7, 10, 1548}, - dictWord{ - 7, - 10, - 1835, - }, - dictWord{7, 10, 1904}, - dictWord{9, 10, 757}, - dictWord{10, 10, 604}, - dictWord{139, 10, 519}, - dictWord{7, 11, 792}, - dictWord{8, 11, 147}, - dictWord{10, 11, 821}, - dictWord{139, 11, 1021}, - dictWord{137, 11, 797}, - dictWord{4, 0, 58}, - dictWord{5, 0, 286}, - dictWord{6, 0, 319}, - dictWord{7, 0, 402}, - dictWord{ - 7, - 0, - 1254, - }, - dictWord{7, 0, 1903}, - dictWord{8, 0, 356}, - dictWord{140, 0, 408}, - dictWord{4, 0, 389}, - dictWord{4, 0, 815}, - dictWord{9, 0, 181}, - dictWord{9, 0, 255}, - dictWord{10, 0, 8}, - dictWord{10, 0, 29}, - dictWord{10, 0, 816}, - dictWord{11, 0, 311}, - dictWord{11, 0, 561}, - dictWord{12, 0, 67}, - dictWord{141, 0, 181}, - dictWord{ - 7, - 11, - 1472, - }, - dictWord{135, 11, 1554}, - dictWord{7, 11, 1071}, - dictWord{7, 11, 1541}, - dictWord{7, 11, 1767}, - dictWord{7, 11, 1806}, - dictWord{7, 11, 1999}, - dictWord{9, 11, 248}, - dictWord{10, 11, 400}, - dictWord{11, 11, 162}, - dictWord{11, 11, 178}, - dictWord{11, 11, 242}, - dictWord{12, 11, 605}, - dictWord{ - 15, - 11, - 26, - }, - dictWord{144, 11, 44}, - dictWord{5, 11, 168}, - dictWord{5, 11, 930}, - dictWord{8, 11, 74}, - dictWord{9, 11, 623}, - dictWord{12, 11, 500}, - dictWord{ - 12, - 11, - 579, - }, - dictWord{13, 11, 41}, - dictWord{143, 11, 93}, - dictWord{6, 11, 220}, - dictWord{7, 11, 1101}, - dictWord{141, 11, 105}, - dictWord{5, 0, 474}, - dictWord{ - 7, - 0, - 507, - }, - dictWord{4, 10, 209}, - dictWord{7, 11, 507}, - dictWord{135, 10, 902}, - dictWord{132, 0, 427}, - dictWord{6, 0, 413}, - dictWord{7, 10, 335}, - dictWord{ - 7, - 10, - 1437, - }, - dictWord{7, 10, 1668}, - dictWord{8, 10, 553}, - dictWord{8, 10, 652}, - dictWord{8, 10, 656}, - dictWord{9, 10, 558}, - dictWord{11, 10, 743}, - dictWord{ - 149, - 10, - 18, - }, - dictWord{132, 0, 730}, - dictWord{6, 11, 19}, - dictWord{7, 11, 1413}, - dictWord{139, 11, 428}, - dictWord{133, 0, 373}, - dictWord{132, 10, 559}, - dictWord{7, 11, 96}, - dictWord{8, 11, 401}, - dictWord{137, 11, 896}, - dictWord{7, 0, 799}, - dictWord{7, 0, 1972}, - dictWord{5, 10, 1017}, - dictWord{138, 10, 511}, - dictWord{135, 0, 1793}, - dictWord{7, 11, 1961}, - dictWord{7, 11, 1965}, - dictWord{8, 11, 702}, - dictWord{136, 11, 750}, - dictWord{8, 11, 150}, - dictWord{8, 11, 737}, - dictWord{140, 11, 366}, - dictWord{132, 0, 322}, - dictWord{133, 10, 709}, - dictWord{8, 11, 800}, - dictWord{9, 11, 148}, - dictWord{9, 11, 872}, - dictWord{ - 9, - 11, - 890, - }, - dictWord{11, 11, 309}, - dictWord{11, 11, 1001}, - dictWord{13, 11, 267}, - dictWord{141, 11, 323}, - dictWord{134, 10, 1745}, - dictWord{7, 0, 290}, - dictWord{136, 10, 206}, - dictWord{7, 0, 1651}, - dictWord{145, 0, 89}, - dictWord{139, 0, 2}, - dictWord{132, 0, 672}, - dictWord{6, 0, 1860}, - dictWord{8, 0, 905}, - dictWord{ - 10, - 0, - 844, - }, - dictWord{10, 0, 846}, - dictWord{10, 0, 858}, - dictWord{12, 0, 699}, - dictWord{12, 0, 746}, - dictWord{140, 0, 772}, - dictWord{135, 11, 424}, - dictWord{133, 11, 547}, - dictWord{133, 0, 737}, - dictWord{5, 11, 490}, - dictWord{6, 11, 615}, - dictWord{6, 11, 620}, - dictWord{135, 11, 683}, - dictWord{6, 0, 746}, - dictWord{134, 0, 1612}, - dictWord{132, 10, 776}, - dictWord{9, 11, 385}, - dictWord{149, 11, 17}, - dictWord{133, 0, 145}, - dictWord{135, 10, 1272}, - dictWord{ - 7, - 0, - 884, - }, - dictWord{140, 0, 124}, - dictWord{4, 0, 387}, - dictWord{135, 0, 1288}, - dictWord{5, 11, 133}, - dictWord{136, 10, 406}, - dictWord{136, 11, 187}, - dictWord{ - 6, - 0, - 679, - }, - dictWord{8, 11, 8}, - dictWord{138, 11, 0}, - dictWord{135, 0, 550}, - dictWord{135, 11, 798}, - dictWord{136, 11, 685}, - dictWord{7, 11, 1086}, - dictWord{145, 11, 46}, - dictWord{8, 10, 175}, - dictWord{10, 10, 168}, - dictWord{138, 10, 573}, - dictWord{135, 0, 1305}, - dictWord{4, 0, 576}, - dictWord{ - 135, - 0, - 1263, - }, - dictWord{6, 0, 686}, - dictWord{134, 0, 1563}, - dictWord{134, 0, 607}, - dictWord{5, 0, 919}, - dictWord{134, 0, 1673}, - dictWord{148, 0, 37}, - dictWord{ - 8, - 11, - 774, - }, - dictWord{10, 11, 670}, - dictWord{140, 11, 51}, - dictWord{133, 10, 784}, - dictWord{139, 10, 882}, - dictWord{4, 0, 82}, - dictWord{5, 0, 333}, - dictWord{ - 5, - 0, - 904, - }, - dictWord{6, 0, 207}, - dictWord{7, 0, 325}, - dictWord{7, 0, 1726}, - dictWord{8, 0, 101}, - dictWord{10, 0, 778}, - dictWord{139, 0, 220}, - dictWord{135, 11, 371}, - dictWord{132, 0, 958}, - dictWord{133, 0, 903}, - dictWord{4, 11, 127}, - dictWord{5, 11, 350}, - dictWord{6, 11, 356}, - dictWord{8, 11, 426}, - dictWord{9, 11, 572}, - dictWord{10, 11, 247}, - dictWord{139, 11, 312}, - dictWord{140, 0, 147}, - dictWord{6, 11, 59}, - dictWord{7, 11, 885}, - dictWord{9, 11, 603}, - dictWord{ - 141, - 11, - 397, - }, - dictWord{10, 0, 367}, - dictWord{9, 10, 14}, - dictWord{9, 10, 441}, - dictWord{139, 10, 9}, - dictWord{11, 10, 966}, - dictWord{12, 10, 287}, - dictWord{ - 13, - 10, - 342, - }, - dictWord{13, 10, 402}, - dictWord{15, 10, 110}, - dictWord{143, 10, 163}, - dictWord{134, 0, 690}, - dictWord{132, 0, 705}, - dictWord{9, 0, 651}, - dictWord{ - 11, - 0, - 971, - }, - dictWord{13, 0, 273}, - dictWord{7, 10, 1428}, - dictWord{7, 10, 1640}, - dictWord{7, 10, 1867}, - dictWord{9, 10, 169}, - dictWord{9, 10, 182}, - dictWord{ - 9, - 10, - 367, - }, - dictWord{9, 10, 478}, - dictWord{9, 10, 506}, - dictWord{9, 10, 551}, - dictWord{9, 10, 557}, - dictWord{9, 10, 648}, - dictWord{9, 10, 697}, - dictWord{ - 9, - 10, - 705, - }, - dictWord{9, 10, 725}, - dictWord{9, 10, 787}, - dictWord{9, 10, 794}, - dictWord{10, 10, 198}, - dictWord{10, 10, 214}, - dictWord{10, 10, 267}, - dictWord{ - 10, - 10, - 275, - }, - dictWord{10, 10, 456}, - dictWord{10, 10, 551}, - dictWord{10, 10, 561}, - dictWord{10, 10, 613}, - dictWord{10, 10, 627}, - dictWord{10, 10, 668}, - dictWord{10, 10, 675}, - dictWord{10, 10, 691}, - dictWord{10, 10, 695}, - dictWord{10, 10, 707}, - dictWord{10, 10, 715}, - dictWord{11, 10, 183}, - dictWord{ - 11, - 10, - 201, - }, - dictWord{11, 10, 262}, - dictWord{11, 10, 352}, - dictWord{11, 10, 439}, - dictWord{11, 10, 493}, - dictWord{11, 10, 572}, - dictWord{11, 10, 591}, - dictWord{ - 11, - 10, - 608, - }, - dictWord{11, 10, 611}, - dictWord{11, 10, 646}, - dictWord{11, 10, 674}, - dictWord{11, 10, 711}, - dictWord{11, 10, 751}, - dictWord{11, 10, 761}, - dictWord{11, 10, 776}, - dictWord{11, 10, 785}, - dictWord{11, 10, 850}, - dictWord{11, 10, 853}, - dictWord{11, 10, 862}, - dictWord{11, 10, 865}, - dictWord{ - 11, - 10, - 868, - }, - dictWord{11, 10, 875}, - dictWord{11, 10, 898}, - dictWord{11, 10, 902}, - dictWord{11, 10, 903}, - dictWord{11, 10, 910}, - dictWord{11, 10, 932}, - dictWord{ - 11, - 10, - 942, - }, - dictWord{11, 10, 957}, - dictWord{11, 10, 967}, - dictWord{11, 10, 972}, - dictWord{12, 10, 148}, - dictWord{12, 10, 195}, - dictWord{12, 10, 220}, - dictWord{12, 10, 237}, - dictWord{12, 10, 318}, - dictWord{12, 10, 339}, - dictWord{12, 10, 393}, - dictWord{12, 10, 445}, - dictWord{12, 10, 450}, - dictWord{ - 12, - 10, - 474, - }, - dictWord{12, 10, 505}, - dictWord{12, 10, 509}, - dictWord{12, 10, 533}, - dictWord{12, 10, 591}, - dictWord{12, 10, 594}, - dictWord{12, 10, 597}, - dictWord{ - 12, - 10, - 621, - }, - dictWord{12, 10, 633}, - dictWord{12, 10, 642}, - dictWord{13, 10, 59}, - dictWord{13, 10, 60}, - dictWord{13, 10, 145}, - dictWord{13, 10, 239}, - dictWord{13, 10, 250}, - dictWord{13, 10, 329}, - dictWord{13, 10, 344}, - dictWord{13, 10, 365}, - dictWord{13, 10, 372}, - dictWord{13, 10, 387}, - dictWord{ - 13, - 10, - 403, - }, - dictWord{13, 10, 414}, - dictWord{13, 10, 456}, - dictWord{13, 10, 470}, - dictWord{13, 10, 478}, - dictWord{13, 10, 483}, - dictWord{13, 10, 489}, - dictWord{ - 14, - 10, - 55, - }, - dictWord{14, 10, 57}, - dictWord{14, 10, 81}, - dictWord{14, 10, 90}, - dictWord{14, 10, 148}, - dictWord{14, 10, 239}, - dictWord{14, 10, 266}, - dictWord{ - 14, - 10, - 321, - }, - dictWord{14, 10, 326}, - dictWord{14, 10, 327}, - dictWord{14, 10, 330}, - dictWord{14, 10, 347}, - dictWord{14, 10, 355}, - dictWord{14, 10, 401}, - dictWord{14, 10, 404}, - dictWord{14, 10, 411}, - dictWord{14, 10, 414}, - dictWord{14, 10, 416}, - dictWord{14, 10, 420}, - dictWord{15, 10, 61}, - dictWord{ - 15, - 10, - 74, - }, - dictWord{15, 10, 87}, - dictWord{15, 10, 88}, - dictWord{15, 10, 94}, - dictWord{15, 10, 96}, - dictWord{15, 10, 116}, - dictWord{15, 10, 149}, - dictWord{ - 15, - 10, - 154, - }, - dictWord{16, 10, 50}, - dictWord{16, 10, 63}, - dictWord{16, 10, 73}, - dictWord{17, 10, 2}, - dictWord{17, 10, 66}, - dictWord{17, 10, 92}, - dictWord{17, 10, 103}, - dictWord{17, 10, 112}, - dictWord{17, 10, 120}, - dictWord{18, 10, 50}, - dictWord{18, 10, 54}, - dictWord{18, 10, 82}, - dictWord{18, 10, 86}, - dictWord{18, 10, 90}, - dictWord{18, 10, 111}, - dictWord{18, 10, 115}, - dictWord{18, 10, 156}, - dictWord{19, 10, 40}, - dictWord{19, 10, 79}, - dictWord{20, 10, 78}, - dictWord{149, 10, 22}, - dictWord{7, 0, 887}, - dictWord{5, 10, 161}, - dictWord{135, 10, 839}, - dictWord{142, 11, 98}, - dictWord{134, 0, 90}, - dictWord{138, 11, 356}, - dictWord{ - 135, - 11, - 441, - }, - dictWord{6, 11, 111}, - dictWord{7, 11, 4}, - dictWord{8, 11, 163}, - dictWord{8, 11, 776}, - dictWord{138, 11, 566}, - dictWord{134, 0, 908}, - dictWord{ - 134, - 0, - 1261, - }, - dictWord{7, 0, 813}, - dictWord{12, 0, 497}, - dictWord{141, 0, 56}, - dictWord{134, 0, 1235}, - dictWord{135, 0, 429}, - dictWord{135, 11, 1994}, - dictWord{138, 0, 904}, - dictWord{6, 0, 125}, - dictWord{7, 0, 1277}, - dictWord{137, 0, 772}, - dictWord{151, 0, 12}, - dictWord{4, 0, 841}, - dictWord{5, 0, 386}, - dictWord{ - 133, - 11, - 386, - }, - dictWord{5, 11, 297}, - dictWord{135, 11, 1038}, - dictWord{6, 0, 860}, - dictWord{6, 0, 1069}, - dictWord{135, 11, 309}, - dictWord{136, 0, 946}, - dictWord{135, 10, 1814}, - dictWord{141, 11, 418}, - dictWord{136, 11, 363}, - dictWord{10, 0, 768}, - dictWord{139, 0, 787}, - dictWord{22, 11, 30}, - dictWord{ - 150, - 11, - 33, - }, - dictWord{6, 0, 160}, - dictWord{7, 0, 1106}, - dictWord{9, 0, 770}, - dictWord{11, 0, 112}, - dictWord{140, 0, 413}, - dictWord{11, 11, 216}, - dictWord{ - 139, - 11, - 340, - }, - dictWord{136, 10, 139}, - dictWord{135, 11, 1390}, - dictWord{135, 11, 808}, - dictWord{132, 11, 280}, - dictWord{12, 0, 271}, - dictWord{17, 0, 109}, - dictWord{7, 10, 643}, - dictWord{136, 10, 236}, - dictWord{140, 11, 54}, - dictWord{4, 11, 421}, - dictWord{133, 11, 548}, - dictWord{11, 0, 719}, - dictWord{12, 0, 36}, - dictWord{141, 0, 337}, - dictWord{7, 0, 581}, - dictWord{9, 0, 644}, - dictWord{137, 0, 699}, - dictWord{11, 11, 511}, - dictWord{13, 11, 394}, - dictWord{14, 11, 298}, - dictWord{14, 11, 318}, - dictWord{146, 11, 103}, - dictWord{7, 0, 304}, - dictWord{9, 0, 646}, - dictWord{9, 0, 862}, - dictWord{11, 0, 696}, - dictWord{12, 0, 208}, - dictWord{15, 0, 79}, - dictWord{147, 0, 108}, - dictWord{4, 0, 631}, - dictWord{7, 0, 1126}, - dictWord{135, 0, 1536}, - dictWord{135, 11, 1527}, - dictWord{8, 0, 880}, - dictWord{10, 0, 869}, - dictWord{138, 0, 913}, - dictWord{7, 0, 1513}, - dictWord{5, 10, 54}, - dictWord{6, 11, 254}, - dictWord{9, 11, 109}, - dictWord{138, 11, 103}, - dictWord{135, 0, 981}, - dictWord{133, 11, 729}, - dictWord{132, 10, 744}, - dictWord{132, 0, 434}, - dictWord{134, 0, 550}, - dictWord{7, 0, 930}, - dictWord{10, 0, 476}, - dictWord{13, 0, 452}, - dictWord{19, 0, 104}, - dictWord{6, 11, 1630}, - dictWord{10, 10, 402}, - dictWord{146, 10, 55}, - dictWord{5, 0, 553}, - dictWord{138, 0, 824}, - dictWord{136, 0, 452}, - dictWord{8, 0, 151}, - dictWord{137, 10, 624}, - dictWord{132, 10, 572}, - dictWord{132, 0, 772}, - dictWord{133, 11, 671}, - dictWord{ - 133, - 0, - 292, - }, - dictWord{138, 0, 135}, - dictWord{132, 11, 889}, - dictWord{140, 11, 207}, - dictWord{9, 0, 504}, - dictWord{6, 10, 43}, - dictWord{7, 10, 38}, - dictWord{ - 8, - 10, - 248, - }, - dictWord{138, 10, 513}, - dictWord{6, 0, 1089}, - dictWord{135, 11, 1910}, - dictWord{4, 11, 627}, - dictWord{133, 11, 775}, - dictWord{135, 0, 783}, - dictWord{133, 10, 766}, - dictWord{133, 10, 363}, - dictWord{7, 0, 387}, - dictWord{135, 11, 387}, - dictWord{7, 0, 393}, - dictWord{10, 0, 603}, - dictWord{11, 0, 206}, - dictWord{7, 11, 202}, - dictWord{11, 11, 362}, - dictWord{11, 11, 948}, - dictWord{140, 11, 388}, - dictWord{6, 11, 507}, - dictWord{7, 11, 451}, - dictWord{8, 11, 389}, - dictWord{12, 11, 490}, - dictWord{13, 11, 16}, - dictWord{13, 11, 215}, - dictWord{13, 11, 351}, - dictWord{18, 11, 132}, - dictWord{147, 11, 125}, - dictWord{ - 4, - 0, - 912, - }, - dictWord{9, 0, 232}, - dictWord{135, 11, 841}, - dictWord{6, 10, 258}, - dictWord{140, 10, 409}, - dictWord{5, 10, 249}, - dictWord{148, 10, 82}, - dictWord{ - 136, - 11, - 566, - }, - dictWord{6, 0, 977}, - dictWord{135, 11, 1214}, - dictWord{7, 0, 1973}, - dictWord{136, 0, 716}, - dictWord{135, 0, 98}, - dictWord{133, 0, 733}, - dictWord{ - 5, - 11, - 912, - }, - dictWord{134, 11, 1695}, - dictWord{5, 10, 393}, - dictWord{6, 10, 378}, - dictWord{7, 10, 1981}, - dictWord{9, 10, 32}, - dictWord{9, 10, 591}, - dictWord{10, 10, 685}, - dictWord{10, 10, 741}, - dictWord{142, 10, 382}, - dictWord{133, 10, 788}, - dictWord{10, 0, 19}, - dictWord{11, 0, 911}, - dictWord{7, 10, 1968}, - dictWord{141, 10, 509}, - dictWord{5, 0, 668}, - dictWord{5, 11, 236}, - dictWord{6, 11, 572}, - dictWord{8, 11, 492}, - dictWord{11, 11, 618}, - dictWord{144, 11, 56}, - dictWord{135, 11, 1789}, - dictWord{4, 0, 360}, - dictWord{5, 0, 635}, - dictWord{5, 0, 700}, - dictWord{5, 10, 58}, - dictWord{5, 10, 171}, - dictWord{5, 10, 683}, - dictWord{ - 6, - 10, - 291, - }, - dictWord{6, 10, 566}, - dictWord{7, 10, 1650}, - dictWord{11, 10, 523}, - dictWord{12, 10, 273}, - dictWord{12, 10, 303}, - dictWord{15, 10, 39}, - dictWord{143, 10, 111}, - dictWord{133, 0, 901}, - dictWord{134, 10, 589}, - dictWord{5, 11, 190}, - dictWord{136, 11, 318}, - dictWord{140, 0, 656}, - dictWord{ - 7, - 0, - 726, - }, - dictWord{152, 0, 9}, - dictWord{4, 10, 917}, - dictWord{133, 10, 1005}, - dictWord{135, 10, 1598}, - dictWord{134, 11, 491}, - dictWord{4, 10, 919}, - dictWord{133, 11, 434}, - dictWord{137, 0, 72}, - dictWord{6, 0, 1269}, - dictWord{6, 0, 1566}, - dictWord{134, 0, 1621}, - dictWord{9, 0, 463}, - dictWord{10, 0, 595}, - dictWord{4, 10, 255}, - dictWord{5, 10, 302}, - dictWord{6, 10, 132}, - dictWord{7, 10, 128}, - dictWord{7, 10, 283}, - dictWord{7, 10, 1299}, - dictWord{10, 10, 52}, - dictWord{ - 10, - 10, - 514, - }, - dictWord{11, 10, 925}, - dictWord{13, 10, 92}, - dictWord{142, 10, 309}, - dictWord{135, 0, 1454}, - dictWord{134, 0, 1287}, - dictWord{11, 0, 600}, - dictWord{13, 0, 245}, - dictWord{137, 10, 173}, - dictWord{136, 0, 989}, - dictWord{7, 0, 164}, - dictWord{7, 0, 1571}, - dictWord{9, 0, 107}, - dictWord{140, 0, 225}, - dictWord{6, 0, 1061}, - dictWord{141, 10, 442}, - dictWord{4, 0, 27}, - dictWord{5, 0, 484}, - dictWord{5, 0, 510}, - dictWord{6, 0, 434}, - dictWord{7, 0, 1000}, - dictWord{ - 7, - 0, - 1098, - }, - dictWord{136, 0, 2}, - dictWord{7, 11, 85}, - dictWord{7, 11, 247}, - dictWord{8, 11, 585}, - dictWord{10, 11, 163}, - dictWord{138, 11, 316}, - dictWord{ - 11, - 11, - 103, - }, - dictWord{142, 11, 0}, - dictWord{134, 0, 1127}, - dictWord{4, 0, 460}, - dictWord{134, 0, 852}, - dictWord{134, 10, 210}, - dictWord{4, 0, 932}, - dictWord{ - 133, - 0, - 891, - }, - dictWord{6, 0, 588}, - dictWord{147, 11, 83}, - dictWord{8, 0, 625}, - dictWord{4, 10, 284}, - dictWord{134, 10, 223}, - dictWord{134, 0, 76}, - dictWord{8, 0, 92}, - dictWord{137, 0, 221}, - dictWord{4, 11, 124}, - dictWord{10, 11, 457}, - dictWord{11, 11, 121}, - dictWord{11, 11, 169}, - dictWord{11, 11, 422}, - dictWord{ - 11, - 11, - 870, - }, - dictWord{12, 11, 214}, - dictWord{13, 11, 389}, - dictWord{14, 11, 187}, - dictWord{143, 11, 77}, - dictWord{9, 11, 618}, - dictWord{138, 11, 482}, - dictWord{ - 4, - 10, - 218, - }, - dictWord{7, 10, 526}, - dictWord{143, 10, 137}, - dictWord{13, 0, 9}, - dictWord{14, 0, 104}, - dictWord{14, 0, 311}, - dictWord{4, 10, 270}, - dictWord{ - 5, - 10, - 192, - }, - dictWord{6, 10, 332}, - dictWord{135, 10, 1322}, - dictWord{140, 10, 661}, - dictWord{135, 11, 1193}, - dictWord{6, 11, 107}, - dictWord{7, 11, 638}, - dictWord{7, 11, 1632}, - dictWord{137, 11, 396}, - dictWord{132, 0, 763}, - dictWord{4, 0, 622}, - dictWord{5, 11, 370}, - dictWord{134, 11, 1756}, - dictWord{ - 133, - 0, - 253, - }, - dictWord{135, 0, 546}, - dictWord{9, 0, 73}, - dictWord{10, 0, 110}, - dictWord{14, 0, 185}, - dictWord{17, 0, 119}, - dictWord{133, 11, 204}, - dictWord{7, 0, 624}, - dictWord{7, 0, 916}, - dictWord{10, 0, 256}, - dictWord{139, 0, 87}, - dictWord{7, 10, 379}, - dictWord{8, 10, 481}, - dictWord{137, 10, 377}, - dictWord{5, 0, 212}, - dictWord{12, 0, 35}, - dictWord{13, 0, 382}, - dictWord{5, 11, 970}, - dictWord{134, 11, 1706}, - dictWord{9, 0, 746}, - dictWord{5, 10, 1003}, - dictWord{134, 10, 149}, - dictWord{10, 0, 150}, - dictWord{11, 0, 849}, - dictWord{13, 0, 330}, - dictWord{8, 10, 262}, - dictWord{9, 10, 627}, - dictWord{11, 10, 214}, - dictWord{11, 10, 404}, - dictWord{11, 10, 457}, - dictWord{11, 10, 780}, - dictWord{11, 10, 913}, - dictWord{13, 10, 401}, - dictWord{142, 10, 200}, - dictWord{134, 0, 1466}, - dictWord{ - 135, - 11, - 3, - }, - dictWord{6, 0, 1299}, - dictWord{4, 11, 35}, - dictWord{5, 11, 121}, - dictWord{5, 11, 483}, - dictWord{5, 11, 685}, - dictWord{6, 11, 489}, - dictWord{7, 11, 1204}, - dictWord{136, 11, 394}, - dictWord{135, 10, 742}, - dictWord{4, 10, 142}, - dictWord{136, 10, 304}, - dictWord{4, 11, 921}, - dictWord{133, 11, 1007}, - dictWord{ - 134, - 0, - 1518, - }, - dictWord{6, 0, 1229}, - dictWord{135, 0, 1175}, - dictWord{133, 0, 816}, - dictWord{12, 0, 159}, - dictWord{4, 10, 471}, - dictWord{4, 11, 712}, - dictWord{ - 5, - 10, - 51, - }, - dictWord{6, 10, 602}, - dictWord{7, 10, 925}, - dictWord{8, 10, 484}, - dictWord{138, 10, 195}, - dictWord{134, 11, 1629}, - dictWord{5, 0, 869}, - dictWord{ - 5, - 0, - 968, - }, - dictWord{6, 0, 1626}, - dictWord{8, 0, 734}, - dictWord{136, 0, 784}, - dictWord{4, 0, 542}, - dictWord{6, 0, 1716}, - dictWord{6, 0, 1727}, - dictWord{ - 7, - 0, - 1082, - }, - dictWord{7, 0, 1545}, - dictWord{8, 0, 56}, - dictWord{8, 0, 118}, - dictWord{8, 0, 412}, - dictWord{8, 0, 564}, - dictWord{9, 0, 888}, - dictWord{9, 0, 908}, - dictWord{ - 10, - 0, - 50, - }, - dictWord{10, 0, 423}, - dictWord{11, 0, 685}, - dictWord{11, 0, 697}, - dictWord{11, 0, 933}, - dictWord{12, 0, 299}, - dictWord{13, 0, 126}, - dictWord{ - 13, - 0, - 136, - }, - dictWord{13, 0, 170}, - dictWord{13, 0, 190}, - dictWord{136, 10, 688}, - dictWord{132, 10, 697}, - dictWord{4, 0, 232}, - dictWord{9, 0, 202}, - dictWord{ - 10, - 0, - 474, - }, - dictWord{140, 0, 433}, - dictWord{136, 0, 212}, - dictWord{6, 0, 108}, - dictWord{7, 0, 1003}, - dictWord{7, 0, 1181}, - dictWord{8, 0, 111}, - dictWord{ - 136, - 0, - 343, - }, - dictWord{5, 10, 221}, - dictWord{135, 11, 1255}, - dictWord{133, 11, 485}, - dictWord{134, 0, 1712}, - dictWord{142, 0, 216}, - dictWord{5, 0, 643}, - dictWord{ - 6, - 0, - 516, - }, - dictWord{4, 11, 285}, - dictWord{5, 11, 317}, - dictWord{6, 11, 301}, - dictWord{7, 11, 7}, - dictWord{8, 11, 153}, - dictWord{10, 11, 766}, - dictWord{ - 11, - 11, - 468, - }, - dictWord{12, 11, 467}, - dictWord{141, 11, 143}, - dictWord{4, 0, 133}, - dictWord{7, 0, 711}, - dictWord{7, 0, 1298}, - dictWord{135, 0, 1585}, - dictWord{ - 134, - 0, - 650, - }, - dictWord{135, 11, 512}, - dictWord{6, 0, 99}, - dictWord{7, 0, 1808}, - dictWord{145, 0, 57}, - dictWord{6, 0, 246}, - dictWord{6, 0, 574}, - dictWord{7, 0, 428}, - dictWord{9, 0, 793}, - dictWord{10, 0, 669}, - dictWord{11, 0, 485}, - dictWord{11, 0, 840}, - dictWord{12, 0, 300}, - dictWord{14, 0, 250}, - dictWord{145, 0, 55}, - dictWord{ - 4, - 10, - 132, - }, - dictWord{5, 10, 69}, - dictWord{135, 10, 1242}, - dictWord{136, 0, 1023}, - dictWord{7, 0, 302}, - dictWord{132, 10, 111}, - dictWord{135, 0, 1871}, - dictWord{132, 0, 728}, - dictWord{9, 0, 252}, - dictWord{132, 10, 767}, - dictWord{6, 0, 461}, - dictWord{7, 0, 1590}, - dictWord{7, 10, 1416}, - dictWord{7, 10, 2005}, - dictWord{8, 10, 131}, - dictWord{8, 10, 466}, - dictWord{9, 10, 672}, - dictWord{13, 10, 252}, - dictWord{148, 10, 103}, - dictWord{6, 0, 323}, - dictWord{135, 0, 1564}, - dictWord{7, 0, 461}, - dictWord{136, 0, 775}, - dictWord{6, 10, 44}, - dictWord{136, 10, 368}, - dictWord{139, 0, 172}, - dictWord{132, 0, 464}, - dictWord{4, 10, 570}, - dictWord{133, 10, 120}, - dictWord{137, 11, 269}, - dictWord{6, 10, 227}, - dictWord{135, 10, 1589}, - dictWord{6, 11, 1719}, - dictWord{6, 11, 1735}, - dictWord{ - 7, - 11, - 2016, - }, - dictWord{7, 11, 2020}, - dictWord{8, 11, 837}, - dictWord{137, 11, 852}, - dictWord{7, 0, 727}, - dictWord{146, 0, 73}, - dictWord{132, 0, 1023}, - dictWord{135, 11, 852}, - dictWord{135, 10, 1529}, - dictWord{136, 0, 577}, - dictWord{138, 11, 568}, - dictWord{134, 0, 1037}, - dictWord{8, 11, 67}, - dictWord{ - 138, - 11, - 419, - }, - dictWord{4, 0, 413}, - dictWord{5, 0, 677}, - dictWord{8, 0, 432}, - dictWord{140, 0, 280}, - dictWord{10, 0, 600}, - dictWord{6, 10, 1667}, - dictWord{ - 7, - 11, - 967, - }, - dictWord{7, 10, 2036}, - dictWord{141, 11, 11}, - dictWord{6, 10, 511}, - dictWord{140, 10, 132}, - dictWord{6, 0, 799}, - dictWord{5, 10, 568}, - dictWord{ - 6, - 10, - 138, - }, - dictWord{135, 10, 1293}, - dictWord{8, 0, 159}, - dictWord{4, 10, 565}, - dictWord{136, 10, 827}, - dictWord{7, 0, 646}, - dictWord{7, 0, 1730}, - dictWord{ - 11, - 0, - 446, - }, - dictWord{141, 0, 178}, - dictWord{4, 10, 922}, - dictWord{133, 10, 1023}, - dictWord{135, 11, 11}, - dictWord{132, 0, 395}, - dictWord{11, 0, 145}, - dictWord{135, 10, 1002}, - dictWord{9, 0, 174}, - dictWord{10, 0, 164}, - dictWord{11, 0, 440}, - dictWord{11, 0, 514}, - dictWord{11, 0, 841}, - dictWord{15, 0, 98}, - dictWord{149, 0, 20}, - dictWord{134, 0, 426}, - dictWord{10, 0, 608}, - dictWord{139, 0, 1002}, - dictWord{7, 11, 320}, - dictWord{8, 11, 51}, - dictWord{12, 11, 481}, - dictWord{12, 11, 570}, - dictWord{148, 11, 106}, - dictWord{9, 0, 977}, - dictWord{9, 0, 983}, - dictWord{132, 11, 445}, - dictWord{138, 0, 250}, - dictWord{139, 0, 100}, - dictWord{6, 0, 1982}, - dictWord{136, 10, 402}, - dictWord{133, 11, 239}, - dictWord{4, 10, 716}, - dictWord{141, 10, 31}, - dictWord{5, 0, 476}, - dictWord{7, 11, 83}, - dictWord{7, 11, 1990}, - dictWord{8, 11, 130}, - dictWord{139, 11, 720}, - dictWord{8, 10, 691}, - dictWord{136, 10, 731}, - dictWord{5, 11, 123}, - dictWord{ - 6, - 11, - 530, - }, - dictWord{7, 11, 348}, - dictWord{135, 11, 1419}, - dictWord{5, 0, 76}, - dictWord{6, 0, 458}, - dictWord{6, 0, 497}, - dictWord{7, 0, 868}, - dictWord{9, 0, 658}, - dictWord{10, 0, 594}, - dictWord{11, 0, 173}, - dictWord{11, 0, 566}, - dictWord{12, 0, 20}, - dictWord{12, 0, 338}, - dictWord{141, 0, 200}, - dictWord{9, 11, 139}, - dictWord{ - 10, - 11, - 399, - }, - dictWord{11, 11, 469}, - dictWord{12, 11, 634}, - dictWord{141, 11, 223}, - dictWord{9, 10, 840}, - dictWord{138, 10, 803}, - dictWord{133, 10, 847}, - dictWord{11, 11, 223}, - dictWord{140, 11, 168}, - dictWord{132, 11, 210}, - dictWord{8, 0, 447}, - dictWord{9, 10, 53}, - dictWord{9, 10, 268}, - dictWord{9, 10, 901}, - dictWord{10, 10, 518}, - dictWord{10, 10, 829}, - dictWord{11, 10, 188}, - dictWord{13, 10, 74}, - dictWord{14, 10, 46}, - dictWord{15, 10, 17}, - dictWord{15, 10, 33}, - dictWord{17, 10, 40}, - dictWord{18, 10, 36}, - dictWord{19, 10, 20}, - dictWord{22, 10, 1}, - dictWord{152, 10, 2}, - dictWord{4, 0, 526}, - dictWord{7, 0, 1029}, - dictWord{135, 0, 1054}, - dictWord{19, 11, 59}, - dictWord{150, 11, 2}, - dictWord{4, 0, 636}, - dictWord{6, 0, 1875}, - dictWord{6, 0, 1920}, - dictWord{9, 0, 999}, - dictWord{ - 12, - 0, - 807, - }, - dictWord{12, 0, 825}, - dictWord{15, 0, 179}, - dictWord{15, 0, 190}, - dictWord{18, 0, 182}, - dictWord{136, 10, 532}, - dictWord{6, 0, 1699}, - dictWord{ - 7, - 0, - 660, - }, - dictWord{7, 0, 1124}, - dictWord{17, 0, 31}, - dictWord{19, 0, 22}, - dictWord{151, 0, 14}, - dictWord{135, 10, 681}, - dictWord{132, 11, 430}, - dictWord{ - 140, - 10, - 677, - }, - dictWord{4, 10, 684}, - dictWord{136, 10, 384}, - dictWord{132, 11, 756}, - dictWord{133, 11, 213}, - dictWord{7, 0, 188}, - dictWord{7, 10, 110}, - dictWord{ - 8, - 10, - 290, - }, - dictWord{8, 10, 591}, - dictWord{9, 10, 382}, - dictWord{9, 10, 649}, - dictWord{11, 10, 71}, - dictWord{11, 10, 155}, - dictWord{11, 10, 313}, - dictWord{ - 12, - 10, - 5, - }, - dictWord{13, 10, 325}, - dictWord{142, 10, 287}, - dictWord{7, 10, 360}, - dictWord{7, 10, 425}, - dictWord{9, 10, 66}, - dictWord{9, 10, 278}, - dictWord{ - 138, - 10, - 644, - }, - dictWord{142, 11, 164}, - dictWord{4, 0, 279}, - dictWord{7, 0, 301}, - dictWord{137, 0, 362}, - dictWord{134, 11, 586}, - dictWord{135, 0, 1743}, - dictWord{4, 0, 178}, - dictWord{133, 0, 399}, - dictWord{4, 10, 900}, - dictWord{133, 10, 861}, - dictWord{5, 10, 254}, - dictWord{7, 10, 985}, - dictWord{136, 10, 73}, - dictWord{133, 11, 108}, - dictWord{7, 10, 1959}, - dictWord{136, 10, 683}, - dictWord{133, 11, 219}, - dictWord{4, 11, 193}, - dictWord{5, 11, 916}, - dictWord{ - 7, - 11, - 364, - }, - dictWord{10, 11, 398}, - dictWord{10, 11, 726}, - dictWord{11, 11, 317}, - dictWord{11, 11, 626}, - dictWord{12, 11, 142}, - dictWord{12, 11, 288}, - dictWord{ - 12, - 11, - 678, - }, - dictWord{13, 11, 313}, - dictWord{15, 11, 113}, - dictWord{18, 11, 114}, - dictWord{21, 11, 30}, - dictWord{150, 11, 53}, - dictWord{6, 11, 241}, - dictWord{7, 11, 907}, - dictWord{8, 11, 832}, - dictWord{9, 11, 342}, - dictWord{10, 11, 729}, - dictWord{11, 11, 284}, - dictWord{11, 11, 445}, - dictWord{11, 11, 651}, - dictWord{11, 11, 863}, - dictWord{13, 11, 398}, - dictWord{146, 11, 99}, - dictWord{132, 0, 872}, - dictWord{134, 0, 831}, - dictWord{134, 0, 1692}, - dictWord{ - 6, - 0, - 202, - }, - dictWord{6, 0, 1006}, - dictWord{9, 0, 832}, - dictWord{10, 0, 636}, - dictWord{11, 0, 208}, - dictWord{12, 0, 360}, - dictWord{17, 0, 118}, - dictWord{18, 0, 27}, - dictWord{20, 0, 67}, - dictWord{137, 11, 734}, - dictWord{132, 10, 725}, - dictWord{7, 11, 993}, - dictWord{138, 11, 666}, - dictWord{134, 0, 1954}, - dictWord{ - 134, - 10, - 196, - }, - dictWord{7, 0, 872}, - dictWord{10, 0, 516}, - dictWord{139, 0, 167}, - dictWord{133, 10, 831}, - dictWord{4, 11, 562}, - dictWord{9, 11, 254}, - dictWord{ - 139, - 11, - 879, - }, - dictWord{137, 0, 313}, - dictWord{4, 0, 224}, - dictWord{132, 11, 786}, - dictWord{11, 0, 24}, - dictWord{12, 0, 170}, - dictWord{136, 10, 723}, - dictWord{ - 5, - 0, - 546, - }, - dictWord{7, 0, 35}, - dictWord{8, 0, 11}, - dictWord{8, 0, 12}, - dictWord{9, 0, 315}, - dictWord{9, 0, 533}, - dictWord{10, 0, 802}, - dictWord{11, 0, 166}, - dictWord{ - 12, - 0, - 525, - }, - dictWord{142, 0, 243}, - dictWord{7, 0, 1937}, - dictWord{13, 10, 80}, - dictWord{13, 10, 437}, - dictWord{145, 10, 74}, - dictWord{5, 0, 241}, - dictWord{ - 8, - 0, - 242, - }, - dictWord{9, 0, 451}, - dictWord{10, 0, 667}, - dictWord{11, 0, 598}, - dictWord{140, 0, 429}, - dictWord{150, 0, 46}, - dictWord{6, 0, 1273}, - dictWord{ - 137, - 0, - 830, - }, - dictWord{5, 10, 848}, - dictWord{6, 10, 66}, - dictWord{136, 10, 764}, - dictWord{6, 0, 825}, - dictWord{134, 0, 993}, - dictWord{4, 0, 1006}, - dictWord{ - 10, - 0, - 327, - }, - dictWord{13, 0, 271}, - dictWord{4, 10, 36}, - dictWord{7, 10, 1387}, - dictWord{139, 10, 755}, - dictWord{134, 0, 1023}, - dictWord{135, 0, 1580}, - dictWord{ - 4, - 0, - 366, - }, - dictWord{137, 0, 516}, - dictWord{132, 10, 887}, - dictWord{6, 0, 1736}, - dictWord{135, 0, 1891}, - dictWord{6, 11, 216}, - dictWord{7, 11, 901}, - dictWord{ - 7, - 11, - 1343, - }, - dictWord{136, 11, 493}, - dictWord{6, 10, 165}, - dictWord{138, 10, 388}, - dictWord{7, 11, 341}, - dictWord{139, 11, 219}, - dictWord{4, 10, 719}, - dictWord{135, 10, 155}, - dictWord{134, 0, 1935}, - dictWord{132, 0, 826}, - dictWord{6, 0, 331}, - dictWord{6, 0, 1605}, - dictWord{8, 0, 623}, - dictWord{11, 0, 139}, - dictWord{139, 0, 171}, - dictWord{135, 11, 1734}, - dictWord{10, 11, 115}, - dictWord{11, 11, 420}, - dictWord{12, 11, 154}, - dictWord{13, 11, 404}, - dictWord{ - 14, - 11, - 346, - }, - dictWord{15, 11, 54}, - dictWord{143, 11, 112}, - dictWord{7, 0, 288}, - dictWord{4, 10, 353}, - dictWord{6, 10, 146}, - dictWord{6, 10, 1789}, - dictWord{ - 7, - 10, - 990, - }, - dictWord{7, 10, 1348}, - dictWord{9, 10, 665}, - dictWord{9, 10, 898}, - dictWord{11, 10, 893}, - dictWord{142, 10, 212}, - dictWord{6, 0, 916}, - dictWord{134, 0, 1592}, - dictWord{7, 0, 1888}, - dictWord{4, 10, 45}, - dictWord{135, 10, 1257}, - dictWord{5, 11, 1011}, - dictWord{136, 11, 701}, - dictWord{ - 139, - 11, - 596, - }, - dictWord{4, 11, 54}, - dictWord{5, 11, 666}, - dictWord{7, 11, 1039}, - dictWord{7, 11, 1130}, - dictWord{9, 11, 195}, - dictWord{138, 11, 302}, - dictWord{ - 134, - 0, - 1471, - }, - dictWord{134, 0, 1570}, - dictWord{132, 0, 394}, - dictWord{140, 10, 65}, - dictWord{136, 10, 816}, - dictWord{135, 0, 1931}, - dictWord{7, 0, 574}, - dictWord{135, 0, 1719}, - dictWord{134, 11, 467}, - dictWord{132, 0, 658}, - dictWord{9, 0, 781}, - dictWord{10, 0, 144}, - dictWord{11, 0, 385}, - dictWord{13, 0, 161}, - dictWord{13, 0, 228}, - dictWord{13, 0, 268}, - dictWord{20, 0, 107}, - dictWord{134, 11, 1669}, - dictWord{136, 0, 374}, - dictWord{135, 0, 735}, - dictWord{4, 0, 344}, - dictWord{6, 0, 498}, - dictWord{139, 0, 323}, - dictWord{7, 0, 586}, - dictWord{7, 0, 1063}, - dictWord{6, 10, 559}, - dictWord{134, 10, 1691}, - dictWord{137, 0, 155}, - dictWord{133, 0, 906}, - dictWord{7, 11, 122}, - dictWord{9, 11, 259}, - dictWord{10, 11, 84}, - dictWord{11, 11, 470}, - dictWord{12, 11, 541}, - dictWord{ - 141, - 11, - 379, - }, - dictWord{134, 0, 1139}, - dictWord{10, 0, 108}, - dictWord{139, 0, 116}, - dictWord{134, 10, 456}, - dictWord{133, 10, 925}, - dictWord{5, 11, 82}, - dictWord{ - 5, - 11, - 131, - }, - dictWord{7, 11, 1755}, - dictWord{8, 11, 31}, - dictWord{9, 11, 168}, - dictWord{9, 11, 764}, - dictWord{139, 11, 869}, - dictWord{134, 11, 605}, - dictWord{ - 5, - 11, - 278, - }, - dictWord{137, 11, 68}, - dictWord{4, 11, 163}, - dictWord{5, 11, 201}, - dictWord{5, 11, 307}, - dictWord{5, 11, 310}, - dictWord{6, 11, 335}, - dictWord{ - 7, - 11, - 284, - }, - dictWord{136, 11, 165}, - dictWord{135, 11, 1660}, - dictWord{6, 11, 33}, - dictWord{135, 11, 1244}, - dictWord{4, 0, 616}, - dictWord{136, 11, 483}, - dictWord{8, 0, 857}, - dictWord{8, 0, 902}, - dictWord{8, 0, 910}, - dictWord{10, 0, 879}, - dictWord{12, 0, 726}, - dictWord{4, 11, 199}, - dictWord{139, 11, 34}, - dictWord{136, 0, 692}, - dictWord{6, 10, 193}, - dictWord{7, 10, 240}, - dictWord{7, 10, 1682}, - dictWord{10, 10, 51}, - dictWord{10, 10, 640}, - dictWord{11, 10, 410}, - dictWord{13, 10, 82}, - dictWord{14, 10, 247}, - dictWord{14, 10, 331}, - dictWord{142, 10, 377}, - dictWord{6, 0, 823}, - dictWord{134, 0, 983}, - dictWord{ - 139, - 10, - 411, - }, - dictWord{132, 0, 305}, - dictWord{136, 10, 633}, - dictWord{138, 11, 203}, - dictWord{134, 0, 681}, - dictWord{6, 11, 326}, - dictWord{7, 11, 677}, - dictWord{137, 11, 425}, - dictWord{5, 0, 214}, - dictWord{7, 0, 603}, - dictWord{8, 0, 611}, - dictWord{9, 0, 686}, - dictWord{10, 0, 88}, - dictWord{11, 0, 459}, - dictWord{ - 11, - 0, - 496, - }, - dictWord{12, 0, 463}, - dictWord{12, 0, 590}, - dictWord{141, 0, 0}, - dictWord{136, 0, 1004}, - dictWord{142, 0, 23}, - dictWord{134, 0, 1703}, - dictWord{ - 147, - 11, - 8, - }, - dictWord{145, 11, 56}, - dictWord{135, 0, 1443}, - dictWord{4, 10, 237}, - dictWord{135, 10, 514}, - dictWord{6, 0, 714}, - dictWord{145, 0, 19}, - dictWord{ - 5, - 11, - 358, - }, - dictWord{7, 11, 473}, - dictWord{7, 11, 1184}, - dictWord{10, 11, 662}, - dictWord{13, 11, 212}, - dictWord{13, 11, 304}, - dictWord{13, 11, 333}, - dictWord{145, 11, 98}, - dictWord{4, 0, 737}, - dictWord{10, 0, 98}, - dictWord{11, 0, 294}, - dictWord{12, 0, 60}, - dictWord{12, 0, 437}, - dictWord{13, 0, 64}, - dictWord{ - 13, - 0, - 380, - }, - dictWord{142, 0, 430}, - dictWord{6, 10, 392}, - dictWord{7, 10, 65}, - dictWord{135, 10, 2019}, - dictWord{6, 0, 1758}, - dictWord{8, 0, 520}, - dictWord{ - 9, - 0, - 345, - }, - dictWord{9, 0, 403}, - dictWord{142, 0, 350}, - dictWord{5, 0, 47}, - dictWord{10, 0, 242}, - dictWord{138, 0, 579}, - dictWord{5, 0, 139}, - dictWord{7, 0, 1168}, - dictWord{138, 0, 539}, - dictWord{134, 0, 1459}, - dictWord{13, 0, 388}, - dictWord{141, 11, 388}, - dictWord{134, 0, 253}, - dictWord{7, 10, 1260}, - dictWord{ - 135, - 10, - 1790, - }, - dictWord{10, 0, 252}, - dictWord{9, 10, 222}, - dictWord{139, 10, 900}, - dictWord{140, 0, 745}, - dictWord{133, 11, 946}, - dictWord{4, 0, 107}, - dictWord{ - 7, - 0, - 613, - }, - dictWord{8, 0, 439}, - dictWord{8, 0, 504}, - dictWord{9, 0, 501}, - dictWord{10, 0, 383}, - dictWord{139, 0, 477}, - dictWord{135, 11, 1485}, - dictWord{ - 132, - 0, - 871, - }, - dictWord{7, 11, 411}, - dictWord{7, 11, 590}, - dictWord{8, 11, 631}, - dictWord{9, 11, 323}, - dictWord{10, 11, 355}, - dictWord{11, 11, 491}, - dictWord{ - 12, - 11, - 143, - }, - dictWord{12, 11, 402}, - dictWord{13, 11, 73}, - dictWord{14, 11, 408}, - dictWord{15, 11, 107}, - dictWord{146, 11, 71}, - dictWord{132, 0, 229}, - dictWord{132, 0, 903}, - dictWord{140, 0, 71}, - dictWord{133, 0, 549}, - dictWord{4, 0, 47}, - dictWord{6, 0, 373}, - dictWord{7, 0, 452}, - dictWord{7, 0, 543}, - dictWord{ - 7, - 0, - 1828, - }, - dictWord{7, 0, 1856}, - dictWord{9, 0, 6}, - dictWord{11, 0, 257}, - dictWord{139, 0, 391}, - dictWord{7, 11, 1467}, - dictWord{8, 11, 328}, - dictWord{ - 10, - 11, - 544, - }, - dictWord{11, 11, 955}, - dictWord{13, 11, 320}, - dictWord{145, 11, 83}, - dictWord{5, 0, 980}, - dictWord{134, 0, 1754}, - dictWord{136, 0, 865}, - dictWord{ - 5, - 0, - 705, - }, - dictWord{137, 0, 606}, - dictWord{7, 0, 161}, - dictWord{8, 10, 201}, - dictWord{136, 10, 605}, - dictWord{143, 11, 35}, - dictWord{5, 11, 835}, - dictWord{ - 6, - 11, - 483, - }, - dictWord{140, 10, 224}, - dictWord{7, 0, 536}, - dictWord{7, 0, 1331}, - dictWord{136, 0, 143}, - dictWord{134, 0, 1388}, - dictWord{5, 0, 724}, - dictWord{ - 10, - 0, - 305, - }, - dictWord{11, 0, 151}, - dictWord{12, 0, 33}, - dictWord{12, 0, 121}, - dictWord{12, 0, 381}, - dictWord{17, 0, 3}, - dictWord{17, 0, 27}, - dictWord{17, 0, 78}, - dictWord{18, 0, 18}, - dictWord{19, 0, 54}, - dictWord{149, 0, 5}, - dictWord{4, 10, 523}, - dictWord{133, 10, 638}, - dictWord{5, 0, 19}, - dictWord{134, 0, 533}, - dictWord{ - 5, - 0, - 395, - }, - dictWord{5, 0, 951}, - dictWord{134, 0, 1776}, - dictWord{135, 0, 1908}, - dictWord{132, 0, 846}, - dictWord{10, 0, 74}, - dictWord{11, 0, 663}, - dictWord{ - 12, - 0, - 210, - }, - dictWord{13, 0, 166}, - dictWord{13, 0, 310}, - dictWord{14, 0, 373}, - dictWord{18, 0, 95}, - dictWord{19, 0, 43}, - dictWord{6, 10, 242}, - dictWord{7, 10, 227}, - dictWord{7, 10, 1581}, - dictWord{8, 10, 104}, - dictWord{9, 10, 113}, - dictWord{9, 10, 220}, - dictWord{9, 10, 427}, - dictWord{10, 10, 239}, - dictWord{11, 10, 579}, - dictWord{11, 10, 1023}, - dictWord{13, 10, 4}, - dictWord{13, 10, 204}, - dictWord{13, 10, 316}, - dictWord{148, 10, 86}, - dictWord{9, 11, 716}, - dictWord{11, 11, 108}, - dictWord{13, 11, 123}, - dictWord{14, 11, 252}, - dictWord{19, 11, 38}, - dictWord{21, 11, 3}, - dictWord{151, 11, 11}, - dictWord{8, 0, 372}, - dictWord{9, 0, 122}, - dictWord{138, 0, 175}, - dictWord{132, 11, 677}, - dictWord{7, 11, 1374}, - dictWord{136, 11, 540}, - dictWord{135, 10, 861}, - dictWord{132, 0, 695}, - dictWord{ - 7, - 0, - 497, - }, - dictWord{9, 0, 387}, - dictWord{147, 0, 81}, - dictWord{136, 0, 937}, - dictWord{134, 0, 718}, - dictWord{7, 0, 1328}, - dictWord{136, 10, 494}, - dictWord{ - 132, - 11, - 331, - }, - dictWord{6, 0, 1581}, - dictWord{133, 11, 747}, - dictWord{5, 0, 284}, - dictWord{6, 0, 49}, - dictWord{6, 0, 350}, - dictWord{7, 0, 1}, - dictWord{7, 0, 377}, - dictWord{7, 0, 1693}, - dictWord{8, 0, 18}, - dictWord{8, 0, 678}, - dictWord{9, 0, 161}, - dictWord{9, 0, 585}, - dictWord{9, 0, 671}, - dictWord{9, 0, 839}, - dictWord{11, 0, 912}, - dictWord{141, 0, 427}, - dictWord{7, 10, 1306}, - dictWord{8, 10, 505}, - dictWord{9, 10, 482}, - dictWord{10, 10, 126}, - dictWord{11, 10, 225}, - dictWord{12, 10, 347}, - dictWord{12, 10, 449}, - dictWord{13, 10, 19}, - dictWord{14, 10, 218}, - dictWord{142, 10, 435}, - dictWord{10, 10, 764}, - dictWord{12, 10, 120}, - dictWord{ - 13, - 10, - 39, - }, - dictWord{145, 10, 127}, - dictWord{4, 0, 597}, - dictWord{133, 10, 268}, - dictWord{134, 0, 1094}, - dictWord{4, 0, 1008}, - dictWord{134, 0, 1973}, - dictWord{132, 0, 811}, - dictWord{139, 0, 908}, - dictWord{135, 0, 1471}, - dictWord{133, 11, 326}, - dictWord{4, 10, 384}, - dictWord{135, 10, 1022}, - dictWord{ - 7, - 0, - 1935, - }, - dictWord{8, 0, 324}, - dictWord{12, 0, 42}, - dictWord{4, 11, 691}, - dictWord{7, 11, 1935}, - dictWord{8, 11, 324}, - dictWord{9, 11, 35}, - dictWord{10, 11, 680}, - dictWord{11, 11, 364}, - dictWord{12, 11, 42}, - dictWord{13, 11, 357}, - dictWord{146, 11, 16}, - dictWord{135, 0, 2014}, - dictWord{7, 0, 2007}, - dictWord{ - 9, - 0, - 101, - }, - dictWord{9, 0, 450}, - dictWord{10, 0, 66}, - dictWord{10, 0, 842}, - dictWord{11, 0, 536}, - dictWord{12, 0, 587}, - dictWord{6, 11, 32}, - dictWord{7, 11, 385}, - dictWord{7, 11, 757}, - dictWord{7, 11, 1916}, - dictWord{8, 11, 37}, - dictWord{8, 11, 94}, - dictWord{8, 11, 711}, - dictWord{9, 11, 541}, - dictWord{10, 11, 162}, - dictWord{ - 10, - 11, - 795, - }, - dictWord{11, 11, 989}, - dictWord{11, 11, 1010}, - dictWord{12, 11, 14}, - dictWord{142, 11, 308}, - dictWord{139, 0, 586}, - dictWord{ - 135, - 10, - 1703, - }, - dictWord{7, 0, 1077}, - dictWord{11, 0, 28}, - dictWord{9, 10, 159}, - dictWord{140, 10, 603}, - dictWord{6, 0, 1221}, - dictWord{136, 10, 583}, - dictWord{ - 6, - 11, - 152, - }, - dictWord{6, 11, 349}, - dictWord{6, 11, 1682}, - dictWord{7, 11, 1252}, - dictWord{8, 11, 112}, - dictWord{9, 11, 435}, - dictWord{9, 11, 668}, - dictWord{ - 10, - 11, - 290, - }, - dictWord{10, 11, 319}, - dictWord{10, 11, 815}, - dictWord{11, 11, 180}, - dictWord{11, 11, 837}, - dictWord{12, 11, 240}, - dictWord{13, 11, 152}, - dictWord{13, 11, 219}, - dictWord{142, 11, 158}, - dictWord{139, 0, 62}, - dictWord{132, 10, 515}, - dictWord{8, 10, 632}, - dictWord{8, 10, 697}, - dictWord{ - 137, - 10, - 854, - }, - dictWord{134, 0, 1766}, - dictWord{132, 11, 581}, - dictWord{6, 11, 126}, - dictWord{7, 11, 573}, - dictWord{8, 11, 397}, - dictWord{142, 11, 44}, - dictWord{ - 150, - 0, - 28, - }, - dictWord{11, 0, 670}, - dictWord{22, 0, 25}, - dictWord{4, 10, 136}, - dictWord{133, 10, 551}, - dictWord{6, 0, 1665}, - dictWord{7, 0, 256}, - dictWord{ - 7, - 0, - 1388, - }, - dictWord{138, 0, 499}, - dictWord{4, 0, 22}, - dictWord{5, 0, 10}, - dictWord{7, 0, 1576}, - dictWord{136, 0, 97}, - dictWord{134, 10, 1782}, - dictWord{5, 0, 481}, - dictWord{7, 10, 1287}, - dictWord{9, 10, 44}, - dictWord{10, 10, 552}, - dictWord{10, 10, 642}, - dictWord{11, 10, 839}, - dictWord{12, 10, 274}, - dictWord{ - 12, - 10, - 275, - }, - dictWord{12, 10, 372}, - dictWord{13, 10, 91}, - dictWord{142, 10, 125}, - dictWord{133, 11, 926}, - dictWord{7, 11, 1232}, - dictWord{137, 11, 531}, - dictWord{6, 0, 134}, - dictWord{7, 0, 437}, - dictWord{7, 0, 1824}, - dictWord{9, 0, 37}, - dictWord{14, 0, 285}, - dictWord{142, 0, 371}, - dictWord{7, 0, 486}, - dictWord{8, 0, 155}, - dictWord{11, 0, 93}, - dictWord{140, 0, 164}, - dictWord{6, 0, 1391}, - dictWord{134, 0, 1442}, - dictWord{133, 11, 670}, - dictWord{133, 0, 591}, - dictWord{ - 6, - 10, - 147, - }, - dictWord{7, 10, 886}, - dictWord{7, 11, 1957}, - dictWord{9, 10, 753}, - dictWord{138, 10, 268}, - dictWord{5, 0, 380}, - dictWord{5, 0, 650}, - dictWord{ - 7, - 0, - 1173, - }, - dictWord{136, 0, 310}, - dictWord{4, 0, 364}, - dictWord{7, 0, 1156}, - dictWord{7, 0, 1187}, - dictWord{137, 0, 409}, - dictWord{135, 11, 1621}, - dictWord{ - 134, - 0, - 482, - }, - dictWord{133, 11, 506}, - dictWord{4, 0, 781}, - dictWord{6, 0, 487}, - dictWord{7, 0, 926}, - dictWord{8, 0, 263}, - dictWord{139, 0, 500}, - dictWord{ - 138, - 10, - 137, - }, - dictWord{135, 11, 242}, - dictWord{139, 11, 96}, - dictWord{133, 10, 414}, - dictWord{135, 10, 1762}, - dictWord{134, 0, 804}, - dictWord{5, 11, 834}, - dictWord{7, 11, 1202}, - dictWord{8, 11, 14}, - dictWord{9, 11, 481}, - dictWord{137, 11, 880}, - dictWord{134, 10, 599}, - dictWord{4, 0, 94}, - dictWord{135, 0, 1265}, - dictWord{4, 0, 415}, - dictWord{132, 0, 417}, - dictWord{5, 0, 348}, - dictWord{6, 0, 522}, - dictWord{6, 10, 1749}, - dictWord{7, 11, 1526}, - dictWord{138, 11, 465}, - dictWord{134, 10, 1627}, - dictWord{132, 0, 1012}, - dictWord{132, 10, 488}, - dictWord{4, 11, 357}, - dictWord{6, 11, 172}, - dictWord{7, 11, 143}, - dictWord{ - 137, - 11, - 413, - }, - dictWord{4, 10, 83}, - dictWord{4, 11, 590}, - dictWord{146, 11, 76}, - dictWord{140, 10, 676}, - dictWord{7, 11, 287}, - dictWord{8, 11, 355}, - dictWord{ - 9, - 11, - 293, - }, - dictWord{137, 11, 743}, - dictWord{134, 10, 278}, - dictWord{6, 0, 1803}, - dictWord{18, 0, 165}, - dictWord{24, 0, 21}, - dictWord{5, 11, 169}, - dictWord{ - 7, - 11, - 333, - }, - dictWord{136, 11, 45}, - dictWord{12, 10, 97}, - dictWord{140, 11, 97}, - dictWord{4, 0, 408}, - dictWord{4, 0, 741}, - dictWord{135, 0, 500}, - dictWord{ - 132, - 11, - 198, - }, - dictWord{7, 10, 388}, - dictWord{7, 10, 644}, - dictWord{139, 10, 781}, - dictWord{4, 11, 24}, - dictWord{5, 11, 140}, - dictWord{5, 11, 185}, - dictWord{ - 7, - 11, - 1500, - }, - dictWord{11, 11, 565}, - dictWord{139, 11, 838}, - dictWord{6, 0, 1321}, - dictWord{9, 0, 257}, - dictWord{7, 10, 229}, - dictWord{8, 10, 59}, - dictWord{ - 9, - 10, - 190, - }, - dictWord{10, 10, 378}, - dictWord{140, 10, 191}, - dictWord{4, 11, 334}, - dictWord{133, 11, 593}, - dictWord{135, 11, 1885}, - dictWord{134, 0, 1138}, - dictWord{4, 0, 249}, - dictWord{6, 0, 73}, - dictWord{135, 0, 177}, - dictWord{133, 0, 576}, - dictWord{142, 0, 231}, - dictWord{137, 0, 288}, - dictWord{132, 10, 660}, - dictWord{7, 10, 1035}, - dictWord{138, 10, 737}, - dictWord{135, 0, 1487}, - dictWord{6, 0, 989}, - dictWord{9, 0, 433}, - dictWord{7, 10, 690}, - dictWord{9, 10, 587}, - dictWord{140, 10, 521}, - dictWord{7, 0, 1264}, - dictWord{7, 0, 1678}, - dictWord{11, 0, 945}, - dictWord{12, 0, 341}, - dictWord{12, 0, 471}, - dictWord{140, 0, 569}, - dictWord{132, 11, 709}, - dictWord{133, 11, 897}, - dictWord{5, 11, 224}, - dictWord{13, 11, 174}, - dictWord{146, 11, 52}, - dictWord{135, 11, 1840}, - dictWord{ - 134, - 10, - 1744, - }, - dictWord{12, 0, 87}, - dictWord{16, 0, 74}, - dictWord{4, 10, 733}, - dictWord{9, 10, 194}, - dictWord{10, 10, 92}, - dictWord{11, 10, 198}, - dictWord{ - 12, - 10, - 84, - }, - dictWord{141, 10, 128}, - dictWord{140, 0, 779}, - dictWord{135, 0, 538}, - dictWord{4, 11, 608}, - dictWord{133, 11, 497}, - dictWord{133, 0, 413}, - dictWord{7, 11, 1375}, - dictWord{7, 11, 1466}, - dictWord{138, 11, 331}, - dictWord{136, 0, 495}, - dictWord{6, 11, 540}, - dictWord{136, 11, 136}, - dictWord{7, 0, 54}, - dictWord{8, 0, 312}, - dictWord{10, 0, 191}, - dictWord{10, 0, 614}, - dictWord{140, 0, 567}, - dictWord{6, 0, 468}, - dictWord{7, 0, 567}, - dictWord{7, 0, 1478}, - dictWord{ - 8, - 0, - 530, - }, - dictWord{14, 0, 290}, - dictWord{133, 11, 999}, - dictWord{4, 11, 299}, - dictWord{7, 10, 306}, - dictWord{135, 11, 1004}, - dictWord{142, 11, 296}, - dictWord{134, 0, 1484}, - dictWord{133, 10, 979}, - dictWord{6, 0, 609}, - dictWord{9, 0, 815}, - dictWord{12, 11, 137}, - dictWord{14, 11, 9}, - dictWord{14, 11, 24}, - dictWord{142, 11, 64}, - dictWord{133, 11, 456}, - dictWord{6, 0, 484}, - dictWord{135, 0, 822}, - dictWord{133, 10, 178}, - dictWord{136, 11, 180}, - dictWord{ - 132, - 11, - 755, - }, - dictWord{137, 0, 900}, - dictWord{135, 0, 1335}, - dictWord{6, 0, 1724}, - dictWord{135, 0, 2022}, - dictWord{135, 11, 1139}, - dictWord{5, 0, 640}, - dictWord{132, 10, 390}, - dictWord{6, 0, 1831}, - dictWord{138, 11, 633}, - dictWord{135, 11, 566}, - dictWord{4, 11, 890}, - dictWord{5, 11, 805}, - dictWord{5, 11, 819}, - dictWord{5, 11, 961}, - dictWord{6, 11, 396}, - dictWord{6, 11, 1631}, - dictWord{6, 11, 1678}, - dictWord{7, 11, 1967}, - dictWord{7, 11, 2041}, - dictWord{ - 9, - 11, - 630, - }, - dictWord{11, 11, 8}, - dictWord{11, 11, 1019}, - dictWord{12, 11, 176}, - dictWord{13, 11, 225}, - dictWord{14, 11, 292}, - dictWord{149, 11, 24}, - dictWord{ - 132, - 0, - 474, - }, - dictWord{134, 0, 1103}, - dictWord{135, 0, 1504}, - dictWord{134, 0, 1576}, - dictWord{6, 0, 961}, - dictWord{6, 0, 1034}, - dictWord{140, 0, 655}, - dictWord{11, 11, 514}, - dictWord{149, 11, 20}, - dictWord{5, 0, 305}, - dictWord{135, 11, 1815}, - dictWord{7, 11, 1505}, - dictWord{10, 11, 190}, - dictWord{ - 10, - 11, - 634, - }, - dictWord{11, 11, 792}, - dictWord{12, 11, 358}, - dictWord{140, 11, 447}, - dictWord{5, 11, 0}, - dictWord{6, 11, 536}, - dictWord{7, 11, 604}, - dictWord{ - 13, - 11, - 445, - }, - dictWord{145, 11, 126}, - dictWord{7, 0, 1236}, - dictWord{133, 10, 105}, - dictWord{4, 0, 480}, - dictWord{6, 0, 217}, - dictWord{6, 0, 302}, - dictWord{ - 6, - 0, - 1642, - }, - dictWord{7, 0, 130}, - dictWord{7, 0, 837}, - dictWord{7, 0, 1321}, - dictWord{7, 0, 1547}, - dictWord{7, 0, 1657}, - dictWord{8, 0, 429}, - dictWord{9, 0, 228}, - dictWord{13, 0, 289}, - dictWord{13, 0, 343}, - dictWord{19, 0, 101}, - dictWord{6, 11, 232}, - dictWord{6, 11, 412}, - dictWord{7, 11, 1074}, - dictWord{8, 11, 9}, - dictWord{ - 8, - 11, - 157, - }, - dictWord{8, 11, 786}, - dictWord{9, 11, 196}, - dictWord{9, 11, 352}, - dictWord{9, 11, 457}, - dictWord{10, 11, 337}, - dictWord{11, 11, 232}, - dictWord{ - 11, - 11, - 877, - }, - dictWord{12, 11, 480}, - dictWord{140, 11, 546}, - dictWord{5, 10, 438}, - dictWord{7, 11, 958}, - dictWord{9, 10, 694}, - dictWord{12, 10, 627}, - dictWord{ - 13, - 11, - 38, - }, - dictWord{141, 10, 210}, - dictWord{4, 11, 382}, - dictWord{136, 11, 579}, - dictWord{7, 0, 278}, - dictWord{10, 0, 739}, - dictWord{11, 0, 708}, - dictWord{ - 141, - 0, - 348, - }, - dictWord{4, 11, 212}, - dictWord{135, 11, 1206}, - dictWord{135, 11, 1898}, - dictWord{6, 0, 708}, - dictWord{6, 0, 1344}, - dictWord{152, 10, 11}, - dictWord{137, 11, 768}, - dictWord{134, 0, 1840}, - dictWord{140, 0, 233}, - dictWord{8, 10, 25}, - dictWord{138, 10, 826}, - dictWord{6, 0, 2017}, - dictWord{ - 133, - 11, - 655, - }, - dictWord{6, 0, 1488}, - dictWord{139, 11, 290}, - dictWord{132, 10, 308}, - dictWord{134, 0, 1590}, - dictWord{134, 0, 1800}, - dictWord{134, 0, 1259}, - dictWord{16, 0, 28}, - dictWord{6, 11, 231}, - dictWord{7, 11, 95}, - dictWord{136, 11, 423}, - dictWord{133, 11, 300}, - dictWord{135, 10, 150}, - dictWord{ - 136, - 10, - 649, - }, - dictWord{7, 11, 1874}, - dictWord{137, 11, 641}, - dictWord{6, 11, 237}, - dictWord{7, 11, 611}, - dictWord{8, 11, 100}, - dictWord{9, 11, 416}, - dictWord{ - 11, - 11, - 335, - }, - dictWord{12, 11, 173}, - dictWord{146, 11, 101}, - dictWord{137, 0, 45}, - dictWord{134, 10, 521}, - dictWord{17, 0, 36}, - dictWord{14, 11, 26}, - dictWord{ - 146, - 11, - 150, - }, - dictWord{7, 0, 1442}, - dictWord{14, 0, 22}, - dictWord{5, 10, 339}, - dictWord{15, 10, 41}, - dictWord{15, 10, 166}, - dictWord{147, 10, 66}, - dictWord{ - 8, - 0, - 378, - }, - dictWord{6, 11, 581}, - dictWord{135, 11, 1119}, - dictWord{134, 0, 1507}, - dictWord{147, 11, 117}, - dictWord{139, 0, 39}, - dictWord{134, 0, 1054}, - dictWord{6, 0, 363}, - dictWord{7, 0, 1955}, - dictWord{136, 0, 725}, - dictWord{134, 0, 2036}, - dictWord{133, 11, 199}, - dictWord{6, 0, 1871}, - dictWord{9, 0, 935}, - dictWord{9, 0, 961}, - dictWord{9, 0, 1004}, - dictWord{9, 0, 1016}, - dictWord{12, 0, 805}, - dictWord{12, 0, 852}, - dictWord{12, 0, 853}, - dictWord{12, 0, 869}, - dictWord{ - 12, - 0, - 882, - }, - dictWord{12, 0, 896}, - dictWord{12, 0, 906}, - dictWord{12, 0, 917}, - dictWord{12, 0, 940}, - dictWord{15, 0, 170}, - dictWord{15, 0, 176}, - dictWord{ - 15, - 0, - 188, - }, - dictWord{15, 0, 201}, - dictWord{15, 0, 205}, - dictWord{15, 0, 212}, - dictWord{15, 0, 234}, - dictWord{15, 0, 244}, - dictWord{18, 0, 181}, - dictWord{18, 0, 193}, - dictWord{18, 0, 196}, - dictWord{18, 0, 201}, - dictWord{18, 0, 202}, - dictWord{18, 0, 210}, - dictWord{18, 0, 217}, - dictWord{18, 0, 235}, - dictWord{18, 0, 236}, - dictWord{18, 0, 237}, - dictWord{21, 0, 54}, - dictWord{21, 0, 55}, - dictWord{21, 0, 58}, - dictWord{21, 0, 59}, - dictWord{152, 0, 22}, - dictWord{134, 10, 1628}, - dictWord{ - 137, - 0, - 805, - }, - dictWord{5, 0, 813}, - dictWord{135, 0, 2046}, - dictWord{142, 11, 42}, - dictWord{5, 0, 712}, - dictWord{6, 0, 1240}, - dictWord{11, 0, 17}, - dictWord{ - 13, - 0, - 321, - }, - dictWord{144, 0, 67}, - dictWord{132, 0, 617}, - dictWord{135, 10, 829}, - dictWord{6, 0, 320}, - dictWord{7, 0, 781}, - dictWord{7, 0, 1921}, - dictWord{9, 0, 55}, - dictWord{10, 0, 186}, - dictWord{10, 0, 273}, - dictWord{10, 0, 664}, - dictWord{10, 0, 801}, - dictWord{11, 0, 996}, - dictWord{11, 0, 997}, - dictWord{13, 0, 157}, - dictWord{142, 0, 170}, - dictWord{136, 0, 271}, - dictWord{5, 10, 486}, - dictWord{135, 10, 1349}, - dictWord{18, 11, 91}, - dictWord{147, 11, 70}, - dictWord{10, 0, 445}, - dictWord{7, 10, 1635}, - dictWord{8, 10, 17}, - dictWord{138, 10, 295}, - dictWord{136, 11, 404}, - dictWord{7, 0, 103}, - dictWord{7, 0, 863}, - dictWord{11, 0, 184}, - dictWord{145, 0, 62}, - dictWord{138, 10, 558}, - dictWord{137, 0, 659}, - dictWord{6, 11, 312}, - dictWord{6, 11, 1715}, - dictWord{10, 11, 584}, - dictWord{ - 11, - 11, - 546, - }, - dictWord{11, 11, 692}, - dictWord{12, 11, 259}, - dictWord{12, 11, 295}, - dictWord{13, 11, 46}, - dictWord{141, 11, 154}, - dictWord{134, 0, 676}, - dictWord{132, 11, 588}, - dictWord{4, 11, 231}, - dictWord{5, 11, 61}, - dictWord{6, 11, 104}, - dictWord{7, 11, 729}, - dictWord{7, 11, 964}, - dictWord{7, 11, 1658}, - dictWord{140, 11, 414}, - dictWord{6, 11, 263}, - dictWord{138, 11, 757}, - dictWord{11, 0, 337}, - dictWord{142, 0, 303}, - dictWord{135, 11, 1363}, - dictWord{ - 132, - 11, - 320, - }, - dictWord{140, 0, 506}, - dictWord{134, 10, 447}, - dictWord{5, 0, 77}, - dictWord{7, 0, 1455}, - dictWord{10, 0, 843}, - dictWord{147, 0, 73}, - dictWord{ - 7, - 10, - 577, - }, - dictWord{7, 10, 1432}, - dictWord{9, 10, 475}, - dictWord{9, 10, 505}, - dictWord{9, 10, 526}, - dictWord{9, 10, 609}, - dictWord{9, 10, 689}, - dictWord{ - 9, - 10, - 726, - }, - dictWord{9, 10, 735}, - dictWord{9, 10, 738}, - dictWord{10, 10, 556}, - dictWord{10, 10, 674}, - dictWord{10, 10, 684}, - dictWord{11, 10, 89}, - dictWord{ - 11, - 10, - 202, - }, - dictWord{11, 10, 272}, - dictWord{11, 10, 380}, - dictWord{11, 10, 415}, - dictWord{11, 10, 505}, - dictWord{11, 10, 537}, - dictWord{11, 10, 550}, - dictWord{11, 10, 562}, - dictWord{11, 10, 640}, - dictWord{11, 10, 667}, - dictWord{11, 10, 688}, - dictWord{11, 10, 847}, - dictWord{11, 10, 927}, - dictWord{ - 11, - 10, - 930, - }, - dictWord{11, 10, 940}, - dictWord{12, 10, 144}, - dictWord{12, 10, 325}, - dictWord{12, 10, 329}, - dictWord{12, 10, 389}, - dictWord{12, 10, 403}, - dictWord{ - 12, - 10, - 451, - }, - dictWord{12, 10, 515}, - dictWord{12, 10, 604}, - dictWord{12, 10, 616}, - dictWord{12, 10, 626}, - dictWord{13, 10, 66}, - dictWord{13, 10, 131}, - dictWord{13, 10, 167}, - dictWord{13, 10, 236}, - dictWord{13, 10, 368}, - dictWord{13, 10, 411}, - dictWord{13, 10, 434}, - dictWord{13, 10, 453}, - dictWord{ - 13, - 10, - 461, - }, - dictWord{13, 10, 474}, - dictWord{14, 10, 59}, - dictWord{14, 10, 60}, - dictWord{14, 10, 139}, - dictWord{14, 10, 152}, - dictWord{14, 10, 276}, - dictWord{ - 14, - 10, - 353, - }, - dictWord{14, 10, 402}, - dictWord{15, 10, 28}, - dictWord{15, 10, 81}, - dictWord{15, 10, 123}, - dictWord{15, 10, 152}, - dictWord{18, 10, 136}, - dictWord{148, 10, 88}, - dictWord{132, 0, 458}, - dictWord{135, 0, 1420}, - dictWord{6, 0, 109}, - dictWord{10, 0, 382}, - dictWord{4, 11, 405}, - dictWord{4, 10, 609}, - dictWord{7, 10, 756}, - dictWord{7, 11, 817}, - dictWord{9, 10, 544}, - dictWord{11, 10, 413}, - dictWord{14, 11, 58}, - dictWord{14, 10, 307}, - dictWord{16, 10, 25}, - dictWord{17, 11, 37}, - dictWord{146, 11, 124}, - dictWord{6, 0, 330}, - dictWord{7, 0, 1084}, - dictWord{11, 0, 142}, - dictWord{133, 11, 974}, - dictWord{4, 10, 930}, - dictWord{133, 10, 947}, - dictWord{5, 10, 939}, - dictWord{142, 11, 394}, - dictWord{16, 0, 91}, - dictWord{145, 0, 87}, - dictWord{5, 11, 235}, - dictWord{5, 10, 962}, - dictWord{7, 11, 1239}, - dictWord{11, 11, 131}, - dictWord{140, 11, 370}, - dictWord{11, 0, 492}, - dictWord{5, 10, 651}, - dictWord{8, 10, 170}, - dictWord{9, 10, 61}, - dictWord{9, 10, 63}, - dictWord{10, 10, 23}, - dictWord{10, 10, 37}, - dictWord{10, 10, 834}, - dictWord{11, 10, 4}, - dictWord{11, 10, 281}, - dictWord{11, 10, 503}, - dictWord{ - 11, - 10, - 677, - }, - dictWord{12, 10, 96}, - dictWord{12, 10, 130}, - dictWord{12, 10, 244}, - dictWord{14, 10, 5}, - dictWord{14, 10, 40}, - dictWord{14, 10, 162}, - dictWord{ - 14, - 10, - 202, - }, - dictWord{146, 10, 133}, - dictWord{4, 10, 406}, - dictWord{5, 10, 579}, - dictWord{12, 10, 492}, - dictWord{150, 10, 15}, - dictWord{9, 11, 137}, - dictWord{138, 11, 221}, - dictWord{134, 0, 1239}, - dictWord{11, 0, 211}, - dictWord{140, 0, 145}, - dictWord{7, 11, 390}, - dictWord{138, 11, 140}, - dictWord{ - 135, - 11, - 1418, - }, - dictWord{135, 11, 1144}, - dictWord{134, 0, 1049}, - dictWord{7, 0, 321}, - dictWord{6, 10, 17}, - dictWord{7, 10, 1001}, - dictWord{7, 10, 1982}, - dictWord{ - 9, - 10, - 886, - }, - dictWord{10, 10, 489}, - dictWord{10, 10, 800}, - dictWord{11, 10, 782}, - dictWord{12, 10, 320}, - dictWord{13, 10, 467}, - dictWord{14, 10, 145}, - dictWord{14, 10, 387}, - dictWord{143, 10, 119}, - dictWord{145, 10, 17}, - dictWord{5, 11, 407}, - dictWord{11, 11, 489}, - dictWord{19, 11, 37}, - dictWord{20, 11, 73}, - dictWord{150, 11, 38}, - dictWord{133, 10, 458}, - dictWord{135, 0, 1985}, - dictWord{7, 10, 1983}, - dictWord{8, 10, 0}, - dictWord{8, 10, 171}, - dictWord{ - 9, - 10, - 120, - }, - dictWord{9, 10, 732}, - dictWord{10, 10, 473}, - dictWord{11, 10, 656}, - dictWord{11, 10, 998}, - dictWord{18, 10, 0}, - dictWord{18, 10, 2}, - dictWord{ - 147, - 10, - 21, - }, - dictWord{5, 11, 325}, - dictWord{7, 11, 1483}, - dictWord{8, 11, 5}, - dictWord{8, 11, 227}, - dictWord{9, 11, 105}, - dictWord{10, 11, 585}, - dictWord{ - 140, - 11, - 614, - }, - dictWord{136, 0, 122}, - dictWord{132, 0, 234}, - dictWord{135, 11, 1196}, - dictWord{6, 0, 976}, - dictWord{6, 0, 1098}, - dictWord{134, 0, 1441}, - dictWord{ - 7, - 0, - 253, - }, - dictWord{136, 0, 549}, - dictWord{6, 11, 621}, - dictWord{13, 11, 504}, - dictWord{144, 11, 19}, - dictWord{132, 10, 519}, - dictWord{5, 0, 430}, - dictWord{ - 5, - 0, - 932, - }, - dictWord{6, 0, 131}, - dictWord{7, 0, 417}, - dictWord{9, 0, 522}, - dictWord{11, 0, 314}, - dictWord{141, 0, 390}, - dictWord{14, 0, 149}, - dictWord{14, 0, 399}, - dictWord{143, 0, 57}, - dictWord{5, 10, 907}, - dictWord{6, 10, 31}, - dictWord{6, 11, 218}, - dictWord{7, 10, 491}, - dictWord{7, 10, 530}, - dictWord{8, 10, 592}, - dictWord{11, 10, 53}, - dictWord{11, 10, 779}, - dictWord{12, 10, 167}, - dictWord{12, 10, 411}, - dictWord{14, 10, 14}, - dictWord{14, 10, 136}, - dictWord{15, 10, 72}, - dictWord{16, 10, 17}, - dictWord{144, 10, 72}, - dictWord{140, 11, 330}, - dictWord{7, 11, 454}, - dictWord{7, 11, 782}, - dictWord{136, 11, 768}, - dictWord{ - 132, - 0, - 507, - }, - dictWord{10, 11, 676}, - dictWord{140, 11, 462}, - dictWord{6, 0, 630}, - dictWord{9, 0, 811}, - dictWord{4, 10, 208}, - dictWord{5, 10, 106}, - dictWord{ - 6, - 10, - 531, - }, - dictWord{8, 10, 408}, - dictWord{9, 10, 188}, - dictWord{138, 10, 572}, - dictWord{4, 0, 343}, - dictWord{5, 0, 511}, - dictWord{134, 10, 1693}, - dictWord{ - 134, - 11, - 164, - }, - dictWord{132, 0, 448}, - dictWord{7, 0, 455}, - dictWord{138, 0, 591}, - dictWord{135, 0, 1381}, - dictWord{12, 10, 441}, - dictWord{150, 11, 50}, - dictWord{9, 10, 449}, - dictWord{10, 10, 192}, - dictWord{138, 10, 740}, - dictWord{6, 0, 575}, - dictWord{132, 10, 241}, - dictWord{134, 0, 1175}, - dictWord{ - 134, - 0, - 653, - }, - dictWord{134, 0, 1761}, - dictWord{134, 0, 1198}, - dictWord{132, 10, 259}, - dictWord{6, 11, 343}, - dictWord{7, 11, 195}, - dictWord{9, 11, 226}, - dictWord{ - 10, - 11, - 197, - }, - dictWord{10, 11, 575}, - dictWord{11, 11, 502}, - dictWord{139, 11, 899}, - dictWord{7, 0, 1127}, - dictWord{7, 0, 1572}, - dictWord{10, 0, 297}, - dictWord{10, 0, 422}, - dictWord{11, 0, 764}, - dictWord{11, 0, 810}, - dictWord{12, 0, 264}, - dictWord{13, 0, 102}, - dictWord{13, 0, 300}, - dictWord{13, 0, 484}, - dictWord{ - 14, - 0, - 147, - }, - dictWord{14, 0, 229}, - dictWord{17, 0, 71}, - dictWord{18, 0, 118}, - dictWord{147, 0, 120}, - dictWord{135, 11, 666}, - dictWord{132, 0, 678}, - dictWord{ - 4, - 10, - 173, - }, - dictWord{5, 10, 312}, - dictWord{5, 10, 512}, - dictWord{135, 10, 1285}, - dictWord{7, 10, 1603}, - dictWord{7, 10, 1691}, - dictWord{9, 10, 464}, - dictWord{11, 10, 195}, - dictWord{12, 10, 279}, - dictWord{12, 10, 448}, - dictWord{14, 10, 11}, - dictWord{147, 10, 102}, - dictWord{16, 0, 99}, - dictWord{146, 0, 164}, - dictWord{7, 11, 1125}, - dictWord{9, 11, 143}, - dictWord{11, 11, 61}, - dictWord{14, 11, 405}, - dictWord{150, 11, 21}, - dictWord{137, 11, 260}, - dictWord{ - 4, - 10, - 452, - }, - dictWord{5, 10, 583}, - dictWord{5, 10, 817}, - dictWord{6, 10, 433}, - dictWord{7, 10, 593}, - dictWord{7, 10, 720}, - dictWord{7, 10, 1378}, - dictWord{ - 8, - 10, - 161, - }, - dictWord{9, 10, 284}, - dictWord{10, 10, 313}, - dictWord{139, 10, 886}, - dictWord{132, 10, 547}, - dictWord{136, 10, 722}, - dictWord{14, 0, 35}, - dictWord{142, 0, 191}, - dictWord{141, 0, 45}, - dictWord{138, 0, 121}, - dictWord{132, 0, 125}, - dictWord{134, 0, 1622}, - dictWord{133, 11, 959}, - dictWord{ - 8, - 10, - 420, - }, - dictWord{139, 10, 193}, - dictWord{132, 0, 721}, - dictWord{135, 10, 409}, - dictWord{136, 0, 145}, - dictWord{7, 0, 792}, - dictWord{8, 0, 147}, - dictWord{ - 10, - 0, - 821, - }, - dictWord{11, 0, 970}, - dictWord{11, 0, 1021}, - dictWord{136, 11, 173}, - dictWord{134, 11, 266}, - dictWord{132, 0, 715}, - dictWord{7, 0, 1999}, - dictWord{138, 10, 308}, - dictWord{133, 0, 531}, - dictWord{5, 0, 168}, - dictWord{5, 0, 930}, - dictWord{8, 0, 74}, - dictWord{9, 0, 623}, - dictWord{12, 0, 500}, - dictWord{ - 140, - 0, - 579, - }, - dictWord{144, 0, 65}, - dictWord{138, 11, 246}, - dictWord{6, 0, 220}, - dictWord{7, 0, 1101}, - dictWord{13, 0, 105}, - dictWord{142, 11, 314}, - dictWord{ - 5, - 10, - 1002, - }, - dictWord{136, 10, 745}, - dictWord{134, 0, 960}, - dictWord{20, 0, 0}, - dictWord{148, 11, 0}, - dictWord{4, 0, 1005}, - dictWord{4, 10, 239}, - dictWord{ - 6, - 10, - 477, - }, - dictWord{7, 10, 1607}, - dictWord{11, 10, 68}, - dictWord{139, 10, 617}, - dictWord{6, 0, 19}, - dictWord{7, 0, 1413}, - dictWord{139, 0, 428}, - dictWord{ - 149, - 10, - 13, - }, - dictWord{7, 0, 96}, - dictWord{8, 0, 401}, - dictWord{8, 0, 703}, - dictWord{9, 0, 896}, - dictWord{136, 11, 300}, - dictWord{134, 0, 1595}, - dictWord{145, 0, 116}, - dictWord{136, 0, 1021}, - dictWord{7, 0, 1961}, - dictWord{7, 0, 1965}, - dictWord{7, 0, 2030}, - dictWord{8, 0, 150}, - dictWord{8, 0, 702}, - dictWord{8, 0, 737}, - dictWord{ - 8, - 0, - 750, - }, - dictWord{140, 0, 366}, - dictWord{11, 11, 75}, - dictWord{142, 11, 267}, - dictWord{132, 10, 367}, - dictWord{8, 0, 800}, - dictWord{9, 0, 148}, - dictWord{ - 9, - 0, - 872, - }, - dictWord{9, 0, 890}, - dictWord{11, 0, 309}, - dictWord{11, 0, 1001}, - dictWord{13, 0, 267}, - dictWord{13, 0, 323}, - dictWord{5, 11, 427}, - dictWord{ - 5, - 11, - 734, - }, - dictWord{7, 11, 478}, - dictWord{136, 11, 52}, - dictWord{7, 11, 239}, - dictWord{11, 11, 217}, - dictWord{142, 11, 165}, - dictWord{132, 11, 323}, - dictWord{140, 11, 419}, - dictWord{13, 0, 299}, - dictWord{142, 0, 75}, - dictWord{6, 11, 87}, - dictWord{6, 11, 1734}, - dictWord{7, 11, 20}, - dictWord{7, 11, 1056}, - dictWord{ - 8, - 11, - 732, - }, - dictWord{9, 11, 406}, - dictWord{9, 11, 911}, - dictWord{138, 11, 694}, - dictWord{134, 0, 1383}, - dictWord{132, 10, 694}, - dictWord{ - 133, - 11, - 613, - }, - dictWord{137, 0, 779}, - dictWord{4, 0, 598}, - dictWord{140, 10, 687}, - dictWord{6, 0, 970}, - dictWord{135, 0, 424}, - dictWord{133, 0, 547}, - dictWord{ - 7, - 11, - 32, - }, - dictWord{7, 11, 984}, - dictWord{8, 11, 85}, - dictWord{8, 11, 709}, - dictWord{9, 11, 579}, - dictWord{9, 11, 847}, - dictWord{9, 11, 856}, - dictWord{10, 11, 799}, - dictWord{11, 11, 258}, - dictWord{11, 11, 1007}, - dictWord{12, 11, 331}, - dictWord{12, 11, 615}, - dictWord{13, 11, 188}, - dictWord{13, 11, 435}, - dictWord{ - 14, - 11, - 8, - }, - dictWord{15, 11, 165}, - dictWord{16, 11, 27}, - dictWord{148, 11, 40}, - dictWord{6, 0, 1222}, - dictWord{134, 0, 1385}, - dictWord{132, 0, 876}, - dictWord{ - 138, - 11, - 151, - }, - dictWord{135, 10, 213}, - dictWord{4, 11, 167}, - dictWord{135, 11, 82}, - dictWord{133, 0, 133}, - dictWord{6, 11, 24}, - dictWord{7, 11, 74}, - dictWord{ - 7, - 11, - 678, - }, - dictWord{137, 11, 258}, - dictWord{5, 11, 62}, - dictWord{6, 11, 534}, - dictWord{7, 11, 684}, - dictWord{7, 11, 1043}, - dictWord{7, 11, 1072}, - dictWord{ - 8, - 11, - 280, - }, - dictWord{8, 11, 541}, - dictWord{8, 11, 686}, - dictWord{10, 11, 519}, - dictWord{11, 11, 252}, - dictWord{140, 11, 282}, - dictWord{136, 0, 187}, - dictWord{8, 0, 8}, - dictWord{10, 0, 0}, - dictWord{10, 0, 818}, - dictWord{139, 0, 988}, - dictWord{132, 11, 359}, - dictWord{11, 0, 429}, - dictWord{15, 0, 51}, - dictWord{ - 135, - 10, - 1672, - }, - dictWord{136, 0, 685}, - dictWord{5, 11, 211}, - dictWord{7, 11, 88}, - dictWord{136, 11, 627}, - dictWord{134, 0, 472}, - dictWord{136, 0, 132}, - dictWord{ - 6, - 11, - 145, - }, - dictWord{141, 11, 336}, - dictWord{4, 10, 751}, - dictWord{11, 10, 390}, - dictWord{140, 10, 32}, - dictWord{6, 0, 938}, - dictWord{6, 0, 1060}, - dictWord{ - 4, - 11, - 263, - }, - dictWord{4, 10, 409}, - dictWord{133, 10, 78}, - dictWord{137, 0, 874}, - dictWord{8, 0, 774}, - dictWord{10, 0, 670}, - dictWord{12, 0, 51}, - dictWord{ - 4, - 11, - 916, - }, - dictWord{6, 10, 473}, - dictWord{7, 10, 1602}, - dictWord{10, 10, 698}, - dictWord{12, 10, 212}, - dictWord{13, 10, 307}, - dictWord{145, 10, 105}, - dictWord{146, 0, 92}, - dictWord{143, 10, 156}, - dictWord{132, 0, 830}, - dictWord{137, 0, 701}, - dictWord{4, 11, 599}, - dictWord{6, 11, 1634}, - dictWord{7, 11, 5}, - dictWord{7, 11, 55}, - dictWord{7, 11, 67}, - dictWord{7, 11, 97}, - dictWord{7, 11, 691}, - dictWord{7, 11, 979}, - dictWord{7, 11, 1697}, - dictWord{8, 11, 207}, - dictWord{ - 8, - 11, - 214, - }, - dictWord{8, 11, 231}, - dictWord{8, 11, 294}, - dictWord{8, 11, 336}, - dictWord{8, 11, 428}, - dictWord{8, 11, 451}, - dictWord{8, 11, 460}, - dictWord{8, 11, 471}, - dictWord{8, 11, 622}, - dictWord{8, 11, 626}, - dictWord{8, 11, 679}, - dictWord{8, 11, 759}, - dictWord{8, 11, 829}, - dictWord{9, 11, 11}, - dictWord{9, 11, 246}, - dictWord{ - 9, - 11, - 484, - }, - dictWord{9, 11, 573}, - dictWord{9, 11, 706}, - dictWord{9, 11, 762}, - dictWord{9, 11, 798}, - dictWord{9, 11, 855}, - dictWord{9, 11, 870}, - dictWord{ - 9, - 11, - 912, - }, - dictWord{10, 11, 303}, - dictWord{10, 11, 335}, - dictWord{10, 11, 424}, - dictWord{10, 11, 461}, - dictWord{10, 11, 543}, - dictWord{10, 11, 759}, - dictWord{10, 11, 814}, - dictWord{11, 11, 59}, - dictWord{11, 11, 199}, - dictWord{11, 11, 235}, - dictWord{11, 11, 475}, - dictWord{11, 11, 590}, - dictWord{11, 11, 929}, - dictWord{11, 11, 963}, - dictWord{12, 11, 114}, - dictWord{12, 11, 182}, - dictWord{12, 11, 226}, - dictWord{12, 11, 332}, - dictWord{12, 11, 439}, - dictWord{ - 12, - 11, - 575, - }, - dictWord{12, 11, 598}, - dictWord{13, 11, 8}, - dictWord{13, 11, 125}, - dictWord{13, 11, 194}, - dictWord{13, 11, 287}, - dictWord{14, 11, 197}, - dictWord{ - 14, - 11, - 383, - }, - dictWord{15, 11, 53}, - dictWord{17, 11, 63}, - dictWord{19, 11, 46}, - dictWord{19, 11, 98}, - dictWord{19, 11, 106}, - dictWord{148, 11, 85}, - dictWord{ - 4, - 0, - 127, - }, - dictWord{5, 0, 350}, - dictWord{6, 0, 356}, - dictWord{8, 0, 426}, - dictWord{9, 0, 572}, - dictWord{10, 0, 247}, - dictWord{139, 0, 312}, - dictWord{134, 0, 1215}, - dictWord{6, 0, 59}, - dictWord{9, 0, 603}, - dictWord{13, 0, 397}, - dictWord{7, 11, 1853}, - dictWord{138, 11, 437}, - dictWord{134, 0, 1762}, - dictWord{ - 147, - 11, - 126, - }, - dictWord{135, 10, 883}, - dictWord{13, 0, 293}, - dictWord{142, 0, 56}, - dictWord{133, 10, 617}, - dictWord{139, 10, 50}, - dictWord{5, 11, 187}, - dictWord{ - 7, - 10, - 1518, - }, - dictWord{139, 10, 694}, - dictWord{135, 0, 441}, - dictWord{6, 0, 111}, - dictWord{7, 0, 4}, - dictWord{8, 0, 163}, - dictWord{8, 0, 776}, - dictWord{ - 138, - 0, - 566, - }, - dictWord{132, 0, 806}, - dictWord{4, 11, 215}, - dictWord{9, 11, 38}, - dictWord{10, 11, 3}, - dictWord{11, 11, 23}, - dictWord{11, 11, 127}, - dictWord{ - 139, - 11, - 796, - }, - dictWord{14, 0, 233}, - dictWord{4, 10, 546}, - dictWord{135, 10, 2042}, - dictWord{135, 0, 1994}, - dictWord{134, 0, 1739}, - dictWord{135, 11, 1530}, - dictWord{136, 0, 393}, - dictWord{5, 0, 297}, - dictWord{7, 0, 1038}, - dictWord{14, 0, 359}, - dictWord{19, 0, 52}, - dictWord{148, 0, 47}, - dictWord{135, 0, 309}, - dictWord{ - 4, - 10, - 313, - }, - dictWord{133, 10, 577}, - dictWord{8, 10, 184}, - dictWord{141, 10, 433}, - dictWord{135, 10, 935}, - dictWord{12, 10, 186}, - dictWord{ - 12, - 10, - 292, - }, - dictWord{14, 10, 100}, - dictWord{146, 10, 70}, - dictWord{136, 0, 363}, - dictWord{14, 0, 175}, - dictWord{11, 10, 402}, - dictWord{12, 10, 109}, - dictWord{ - 12, - 10, - 431, - }, - dictWord{13, 10, 179}, - dictWord{13, 10, 206}, - dictWord{14, 10, 217}, - dictWord{16, 10, 3}, - dictWord{148, 10, 53}, - dictWord{5, 10, 886}, - dictWord{ - 6, - 10, - 46, - }, - dictWord{6, 10, 1790}, - dictWord{7, 10, 14}, - dictWord{7, 10, 732}, - dictWord{7, 10, 1654}, - dictWord{8, 10, 95}, - dictWord{8, 10, 327}, - dictWord{ - 8, - 10, - 616, - }, - dictWord{9, 10, 892}, - dictWord{10, 10, 598}, - dictWord{10, 10, 769}, - dictWord{11, 10, 134}, - dictWord{11, 10, 747}, - dictWord{12, 10, 378}, - dictWord{ - 142, - 10, - 97, - }, - dictWord{136, 0, 666}, - dictWord{135, 0, 1675}, - dictWord{6, 0, 655}, - dictWord{134, 0, 1600}, - dictWord{135, 0, 808}, - dictWord{133, 10, 1021}, - dictWord{4, 11, 28}, - dictWord{5, 11, 440}, - dictWord{7, 11, 248}, - dictWord{11, 11, 833}, - dictWord{140, 11, 344}, - dictWord{134, 11, 1654}, - dictWord{ - 132, - 0, - 280, - }, - dictWord{140, 0, 54}, - dictWord{4, 0, 421}, - dictWord{133, 0, 548}, - dictWord{132, 10, 153}, - dictWord{6, 11, 339}, - dictWord{135, 11, 923}, - dictWord{ - 133, - 11, - 853, - }, - dictWord{133, 10, 798}, - dictWord{132, 10, 587}, - dictWord{6, 11, 249}, - dictWord{7, 11, 1234}, - dictWord{139, 11, 573}, - dictWord{6, 10, 598}, - dictWord{7, 10, 42}, - dictWord{8, 10, 695}, - dictWord{10, 10, 212}, - dictWord{11, 10, 158}, - dictWord{14, 10, 196}, - dictWord{145, 10, 85}, - dictWord{7, 0, 249}, - dictWord{5, 10, 957}, - dictWord{133, 10, 1008}, - dictWord{4, 10, 129}, - dictWord{135, 10, 465}, - dictWord{6, 0, 254}, - dictWord{7, 0, 842}, - dictWord{7, 0, 1659}, - dictWord{9, 0, 109}, - dictWord{10, 0, 103}, - dictWord{7, 10, 908}, - dictWord{7, 10, 1201}, - dictWord{9, 10, 755}, - dictWord{11, 10, 906}, - dictWord{12, 10, 527}, - dictWord{146, 10, 7}, - dictWord{5, 0, 262}, - dictWord{136, 10, 450}, - dictWord{144, 0, 1}, - dictWord{10, 11, 201}, - dictWord{142, 11, 319}, - dictWord{7, 11, 49}, - dictWord{ - 7, - 11, - 392, - }, - dictWord{8, 11, 20}, - dictWord{8, 11, 172}, - dictWord{8, 11, 690}, - dictWord{9, 11, 383}, - dictWord{9, 11, 845}, - dictWord{10, 11, 48}, - dictWord{ - 11, - 11, - 293, - }, - dictWord{11, 11, 832}, - dictWord{11, 11, 920}, - dictWord{141, 11, 221}, - dictWord{5, 11, 858}, - dictWord{133, 11, 992}, - dictWord{134, 0, 805}, - dictWord{139, 10, 1003}, - dictWord{6, 0, 1630}, - dictWord{134, 11, 307}, - dictWord{7, 11, 1512}, - dictWord{135, 11, 1794}, - dictWord{6, 11, 268}, - dictWord{ - 137, - 11, - 62, - }, - dictWord{135, 10, 1868}, - dictWord{133, 0, 671}, - dictWord{4, 0, 989}, - dictWord{8, 0, 972}, - dictWord{136, 0, 998}, - dictWord{132, 11, 423}, - dictWord{132, 0, 889}, - dictWord{135, 0, 1382}, - dictWord{135, 0, 1910}, - dictWord{7, 10, 965}, - dictWord{7, 10, 1460}, - dictWord{135, 10, 1604}, - dictWord{ - 4, - 0, - 627, - }, - dictWord{5, 0, 775}, - dictWord{138, 11, 106}, - dictWord{134, 11, 348}, - dictWord{7, 0, 202}, - dictWord{11, 0, 362}, - dictWord{11, 0, 948}, - dictWord{ - 140, - 0, - 388, - }, - dictWord{138, 11, 771}, - dictWord{6, 11, 613}, - dictWord{136, 11, 223}, - dictWord{6, 0, 560}, - dictWord{7, 0, 451}, - dictWord{8, 0, 389}, - dictWord{ - 12, - 0, - 490, - }, - dictWord{13, 0, 16}, - dictWord{13, 0, 215}, - dictWord{13, 0, 351}, - dictWord{18, 0, 132}, - dictWord{147, 0, 125}, - dictWord{135, 0, 841}, - dictWord{ - 136, - 0, - 566, - }, - dictWord{136, 0, 938}, - dictWord{132, 11, 670}, - dictWord{5, 0, 912}, - dictWord{6, 0, 1695}, - dictWord{140, 11, 55}, - dictWord{9, 11, 40}, - dictWord{ - 139, - 11, - 136, - }, - dictWord{7, 0, 1361}, - dictWord{7, 10, 982}, - dictWord{10, 10, 32}, - dictWord{143, 10, 56}, - dictWord{11, 11, 259}, - dictWord{140, 11, 270}, - dictWord{ - 5, - 0, - 236, - }, - dictWord{6, 0, 572}, - dictWord{8, 0, 492}, - dictWord{11, 0, 618}, - dictWord{144, 0, 56}, - dictWord{8, 11, 572}, - dictWord{9, 11, 310}, - dictWord{9, 11, 682}, - dictWord{137, 11, 698}, - dictWord{134, 0, 1854}, - dictWord{5, 0, 190}, - dictWord{136, 0, 318}, - dictWord{133, 10, 435}, - dictWord{135, 0, 1376}, - dictWord{ - 4, - 11, - 296, - }, - dictWord{6, 11, 352}, - dictWord{7, 11, 401}, - dictWord{7, 11, 1410}, - dictWord{7, 11, 1594}, - dictWord{7, 11, 1674}, - dictWord{8, 11, 63}, - dictWord{ - 8, - 11, - 660, - }, - dictWord{137, 11, 74}, - dictWord{7, 0, 349}, - dictWord{5, 10, 85}, - dictWord{6, 10, 419}, - dictWord{7, 10, 305}, - dictWord{7, 10, 361}, - dictWord{7, 10, 1337}, - dictWord{8, 10, 71}, - dictWord{140, 10, 519}, - dictWord{4, 11, 139}, - dictWord{4, 11, 388}, - dictWord{140, 11, 188}, - dictWord{6, 0, 1972}, - dictWord{6, 0, 2013}, - dictWord{8, 0, 951}, - dictWord{10, 0, 947}, - dictWord{10, 0, 974}, - dictWord{10, 0, 1018}, - dictWord{142, 0, 476}, - dictWord{140, 10, 688}, - dictWord{ - 135, - 10, - 740, - }, - dictWord{5, 10, 691}, - dictWord{7, 10, 345}, - dictWord{9, 10, 94}, - dictWord{140, 10, 169}, - dictWord{9, 0, 344}, - dictWord{5, 10, 183}, - dictWord{6, 10, 582}, - dictWord{10, 10, 679}, - dictWord{140, 10, 435}, - dictWord{135, 10, 511}, - dictWord{132, 0, 850}, - dictWord{8, 11, 441}, - dictWord{10, 11, 314}, - dictWord{ - 143, - 11, - 3, - }, - dictWord{7, 10, 1993}, - dictWord{136, 10, 684}, - dictWord{4, 11, 747}, - dictWord{6, 11, 290}, - dictWord{6, 10, 583}, - dictWord{7, 11, 649}, - dictWord{ - 7, - 11, - 1479, - }, - dictWord{135, 11, 1583}, - dictWord{133, 11, 232}, - dictWord{133, 10, 704}, - dictWord{134, 0, 910}, - dictWord{4, 10, 179}, - dictWord{5, 10, 198}, - dictWord{133, 10, 697}, - dictWord{7, 10, 347}, - dictWord{7, 10, 971}, - dictWord{8, 10, 181}, - dictWord{138, 10, 711}, - dictWord{136, 11, 525}, - dictWord{ - 14, - 0, - 19, - }, - dictWord{14, 0, 28}, - dictWord{144, 0, 29}, - dictWord{7, 0, 85}, - dictWord{7, 0, 247}, - dictWord{8, 0, 585}, - dictWord{138, 0, 163}, - dictWord{4, 0, 487}, - dictWord{ - 7, - 11, - 472, - }, - dictWord{7, 11, 1801}, - dictWord{10, 11, 748}, - dictWord{141, 11, 458}, - dictWord{4, 10, 243}, - dictWord{5, 10, 203}, - dictWord{7, 10, 19}, - dictWord{ - 7, - 10, - 71, - }, - dictWord{7, 10, 113}, - dictWord{10, 10, 405}, - dictWord{11, 10, 357}, - dictWord{142, 10, 240}, - dictWord{7, 10, 1450}, - dictWord{139, 10, 99}, - dictWord{132, 11, 425}, - dictWord{138, 0, 145}, - dictWord{147, 0, 83}, - dictWord{6, 10, 492}, - dictWord{137, 11, 247}, - dictWord{4, 0, 1013}, - dictWord{ - 134, - 0, - 2033, - }, - dictWord{5, 10, 134}, - dictWord{6, 10, 408}, - dictWord{6, 10, 495}, - dictWord{135, 10, 1593}, - dictWord{135, 0, 1922}, - dictWord{134, 11, 1768}, - dictWord{4, 0, 124}, - dictWord{10, 0, 457}, - dictWord{11, 0, 121}, - dictWord{11, 0, 169}, - dictWord{11, 0, 870}, - dictWord{11, 0, 874}, - dictWord{12, 0, 214}, - dictWord{ - 14, - 0, - 187, - }, - dictWord{143, 0, 77}, - dictWord{5, 0, 557}, - dictWord{135, 0, 1457}, - dictWord{139, 0, 66}, - dictWord{5, 11, 943}, - dictWord{6, 11, 1779}, - dictWord{ - 142, - 10, - 4, - }, - dictWord{4, 10, 248}, - dictWord{4, 10, 665}, - dictWord{7, 10, 137}, - dictWord{137, 10, 349}, - dictWord{7, 0, 1193}, - dictWord{5, 11, 245}, - dictWord{ - 6, - 11, - 576, - }, - dictWord{7, 11, 582}, - dictWord{136, 11, 225}, - dictWord{144, 0, 82}, - dictWord{7, 10, 1270}, - dictWord{139, 10, 612}, - dictWord{5, 0, 454}, - dictWord{ - 10, - 0, - 352, - }, - dictWord{138, 11, 352}, - dictWord{18, 0, 57}, - dictWord{5, 10, 371}, - dictWord{135, 10, 563}, - dictWord{135, 0, 1333}, - dictWord{6, 0, 107}, - dictWord{ - 7, - 0, - 638, - }, - dictWord{7, 0, 1632}, - dictWord{9, 0, 396}, - dictWord{134, 11, 610}, - dictWord{5, 0, 370}, - dictWord{134, 0, 1756}, - dictWord{4, 10, 374}, - dictWord{ - 7, - 10, - 547, - }, - dictWord{7, 10, 1700}, - dictWord{7, 10, 1833}, - dictWord{139, 10, 858}, - dictWord{133, 0, 204}, - dictWord{6, 0, 1305}, - dictWord{9, 10, 311}, - dictWord{ - 141, - 10, - 42, - }, - dictWord{5, 0, 970}, - dictWord{134, 0, 1706}, - dictWord{6, 10, 1647}, - dictWord{7, 10, 1552}, - dictWord{7, 10, 2010}, - dictWord{9, 10, 494}, - dictWord{137, 10, 509}, - dictWord{13, 11, 455}, - dictWord{15, 11, 99}, - dictWord{15, 11, 129}, - dictWord{144, 11, 68}, - dictWord{135, 0, 3}, - dictWord{4, 0, 35}, - dictWord{ - 5, - 0, - 121, - }, - dictWord{5, 0, 483}, - dictWord{5, 0, 685}, - dictWord{6, 0, 489}, - dictWord{6, 0, 782}, - dictWord{6, 0, 1032}, - dictWord{7, 0, 1204}, - dictWord{136, 0, 394}, - dictWord{4, 0, 921}, - dictWord{133, 0, 1007}, - dictWord{8, 11, 360}, - dictWord{138, 11, 63}, - dictWord{135, 0, 1696}, - dictWord{134, 0, 1519}, - dictWord{ - 132, - 11, - 443, - }, - dictWord{135, 11, 944}, - dictWord{6, 10, 123}, - dictWord{7, 10, 214}, - dictWord{9, 10, 728}, - dictWord{10, 10, 157}, - dictWord{11, 10, 346}, - dictWord{11, 10, 662}, - dictWord{143, 10, 106}, - dictWord{137, 0, 981}, - dictWord{135, 10, 1435}, - dictWord{134, 0, 1072}, - dictWord{132, 0, 712}, - dictWord{ - 134, - 0, - 1629, - }, - dictWord{134, 0, 728}, - dictWord{4, 11, 298}, - dictWord{137, 11, 483}, - dictWord{6, 0, 1177}, - dictWord{6, 0, 1271}, - dictWord{5, 11, 164}, - dictWord{ - 7, - 11, - 121, - }, - dictWord{142, 11, 189}, - dictWord{7, 0, 1608}, - dictWord{4, 10, 707}, - dictWord{5, 10, 588}, - dictWord{6, 10, 393}, - dictWord{13, 10, 106}, - dictWord{ - 18, - 10, - 49, - }, - dictWord{147, 10, 41}, - dictWord{23, 0, 16}, - dictWord{151, 11, 16}, - dictWord{6, 10, 211}, - dictWord{7, 10, 1690}, - dictWord{11, 10, 486}, - dictWord{140, 10, 369}, - dictWord{133, 0, 485}, - dictWord{19, 11, 15}, - dictWord{149, 11, 27}, - dictWord{4, 11, 172}, - dictWord{9, 11, 611}, - dictWord{10, 11, 436}, - dictWord{12, 11, 673}, - dictWord{141, 11, 255}, - dictWord{5, 11, 844}, - dictWord{10, 11, 484}, - dictWord{11, 11, 754}, - dictWord{12, 11, 457}, - dictWord{ - 14, - 11, - 171, - }, - dictWord{14, 11, 389}, - dictWord{146, 11, 153}, - dictWord{4, 0, 285}, - dictWord{5, 0, 27}, - dictWord{5, 0, 317}, - dictWord{6, 0, 301}, - dictWord{7, 0, 7}, - dictWord{ - 8, - 0, - 153, - }, - dictWord{10, 0, 766}, - dictWord{11, 0, 468}, - dictWord{12, 0, 467}, - dictWord{141, 0, 143}, - dictWord{134, 0, 1462}, - dictWord{9, 11, 263}, - dictWord{ - 10, - 11, - 147, - }, - dictWord{138, 11, 492}, - dictWord{133, 11, 537}, - dictWord{6, 0, 1945}, - dictWord{6, 0, 1986}, - dictWord{6, 0, 1991}, - dictWord{134, 0, 2038}, - dictWord{134, 10, 219}, - dictWord{137, 11, 842}, - dictWord{14, 0, 52}, - dictWord{17, 0, 50}, - dictWord{5, 10, 582}, - dictWord{6, 10, 1646}, - dictWord{7, 10, 99}, - dictWord{7, 10, 1962}, - dictWord{7, 10, 1986}, - dictWord{8, 10, 515}, - dictWord{8, 10, 773}, - dictWord{9, 10, 23}, - dictWord{9, 10, 491}, - dictWord{12, 10, 620}, - dictWord{142, 10, 93}, - dictWord{138, 11, 97}, - dictWord{20, 0, 21}, - dictWord{20, 0, 44}, - dictWord{133, 10, 851}, - dictWord{136, 0, 819}, - dictWord{139, 0, 917}, - dictWord{5, 11, 230}, - dictWord{5, 11, 392}, - dictWord{6, 11, 420}, - dictWord{8, 10, 762}, - dictWord{8, 10, 812}, - dictWord{9, 11, 568}, - dictWord{9, 10, 910}, - dictWord{140, 11, 612}, - dictWord{135, 0, 784}, - dictWord{15, 0, 135}, - dictWord{143, 11, 135}, - dictWord{10, 0, 454}, - dictWord{140, 0, 324}, - dictWord{4, 11, 0}, - dictWord{5, 11, 41}, - dictWord{7, 11, 1459}, - dictWord{7, 11, 1469}, - dictWord{7, 11, 1618}, - dictWord{7, 11, 1859}, - dictWord{9, 11, 549}, - dictWord{139, 11, 905}, - dictWord{4, 10, 98}, - dictWord{7, 10, 1365}, - dictWord{9, 10, 422}, - dictWord{9, 10, 670}, - dictWord{10, 10, 775}, - dictWord{11, 10, 210}, - dictWord{13, 10, 26}, - dictWord{13, 10, 457}, - dictWord{141, 10, 476}, - dictWord{6, 0, 1719}, - dictWord{6, 0, 1735}, - dictWord{7, 0, 2016}, - dictWord{7, 0, 2020}, - dictWord{8, 0, 837}, - dictWord{137, 0, 852}, - dictWord{133, 11, 696}, - dictWord{135, 0, 852}, - dictWord{132, 0, 952}, - dictWord{134, 10, 1730}, - dictWord{132, 11, 771}, - dictWord{ - 138, - 0, - 568, - }, - dictWord{137, 0, 448}, - dictWord{139, 0, 146}, - dictWord{8, 0, 67}, - dictWord{138, 0, 419}, - dictWord{133, 11, 921}, - dictWord{137, 10, 147}, - dictWord{134, 0, 1826}, - dictWord{10, 0, 657}, - dictWord{14, 0, 297}, - dictWord{142, 0, 361}, - dictWord{6, 0, 666}, - dictWord{6, 0, 767}, - dictWord{134, 0, 1542}, - dictWord{139, 0, 729}, - dictWord{6, 11, 180}, - dictWord{7, 11, 1137}, - dictWord{8, 11, 751}, - dictWord{139, 11, 805}, - dictWord{4, 11, 183}, - dictWord{7, 11, 271}, - dictWord{11, 11, 824}, - dictWord{11, 11, 952}, - dictWord{13, 11, 278}, - dictWord{13, 11, 339}, - dictWord{13, 11, 482}, - dictWord{14, 11, 424}, - dictWord{ - 148, - 11, - 99, - }, - dictWord{4, 0, 669}, - dictWord{5, 11, 477}, - dictWord{5, 11, 596}, - dictWord{6, 11, 505}, - dictWord{7, 11, 1221}, - dictWord{11, 11, 907}, - dictWord{ - 12, - 11, - 209, - }, - dictWord{141, 11, 214}, - dictWord{135, 11, 1215}, - dictWord{5, 0, 402}, - dictWord{6, 10, 30}, - dictWord{11, 10, 56}, - dictWord{139, 10, 305}, - dictWord{ - 7, - 11, - 564, - }, - dictWord{142, 11, 168}, - dictWord{139, 0, 152}, - dictWord{7, 0, 912}, - dictWord{135, 10, 1614}, - dictWord{4, 10, 150}, - dictWord{5, 10, 303}, - dictWord{134, 10, 327}, - dictWord{7, 0, 320}, - dictWord{8, 0, 51}, - dictWord{9, 0, 868}, - dictWord{10, 0, 833}, - dictWord{12, 0, 481}, - dictWord{12, 0, 570}, - dictWord{ - 148, - 0, - 106, - }, - dictWord{132, 0, 445}, - dictWord{7, 11, 274}, - dictWord{11, 11, 263}, - dictWord{11, 11, 479}, - dictWord{11, 11, 507}, - dictWord{140, 11, 277}, - dictWord{10, 0, 555}, - dictWord{11, 0, 308}, - dictWord{19, 0, 95}, - dictWord{6, 11, 1645}, - dictWord{8, 10, 192}, - dictWord{10, 10, 78}, - dictWord{141, 10, 359}, - dictWord{135, 10, 786}, - dictWord{6, 11, 92}, - dictWord{6, 11, 188}, - dictWord{7, 11, 1269}, - dictWord{7, 11, 1524}, - dictWord{7, 11, 1876}, - dictWord{10, 11, 228}, - dictWord{139, 11, 1020}, - dictWord{4, 11, 459}, - dictWord{133, 11, 966}, - dictWord{11, 0, 386}, - dictWord{6, 10, 1638}, - dictWord{7, 10, 79}, - dictWord{ - 7, - 10, - 496, - }, - dictWord{9, 10, 138}, - dictWord{10, 10, 336}, - dictWord{12, 10, 412}, - dictWord{12, 10, 440}, - dictWord{142, 10, 305}, - dictWord{133, 0, 239}, - dictWord{ - 7, - 0, - 83, - }, - dictWord{7, 0, 1990}, - dictWord{8, 0, 130}, - dictWord{139, 0, 720}, - dictWord{138, 11, 709}, - dictWord{4, 0, 143}, - dictWord{5, 0, 550}, - dictWord{ - 133, - 0, - 752, - }, - dictWord{5, 0, 123}, - dictWord{6, 0, 530}, - dictWord{7, 0, 348}, - dictWord{135, 0, 1419}, - dictWord{135, 0, 2024}, - dictWord{6, 11, 18}, - dictWord{7, 11, 179}, - dictWord{7, 11, 721}, - dictWord{7, 11, 932}, - dictWord{8, 11, 548}, - dictWord{8, 11, 757}, - dictWord{9, 11, 54}, - dictWord{9, 11, 65}, - dictWord{9, 11, 532}, - dictWord{ - 9, - 11, - 844, - }, - dictWord{10, 11, 113}, - dictWord{10, 11, 117}, - dictWord{10, 11, 236}, - dictWord{10, 11, 315}, - dictWord{10, 11, 430}, - dictWord{10, 11, 798}, - dictWord{11, 11, 153}, - dictWord{11, 11, 351}, - dictWord{11, 11, 375}, - dictWord{12, 11, 78}, - dictWord{12, 11, 151}, - dictWord{12, 11, 392}, - dictWord{ - 14, - 11, - 248, - }, - dictWord{143, 11, 23}, - dictWord{7, 10, 204}, - dictWord{7, 10, 415}, - dictWord{8, 10, 42}, - dictWord{10, 10, 85}, - dictWord{139, 10, 564}, - dictWord{ - 134, - 0, - 958, - }, - dictWord{133, 11, 965}, - dictWord{132, 0, 210}, - dictWord{135, 11, 1429}, - dictWord{138, 11, 480}, - dictWord{134, 11, 182}, - dictWord{ - 139, - 11, - 345, - }, - dictWord{10, 11, 65}, - dictWord{10, 11, 488}, - dictWord{138, 11, 497}, - dictWord{4, 10, 3}, - dictWord{5, 10, 247}, - dictWord{5, 10, 644}, - dictWord{ - 7, - 10, - 744, - }, - dictWord{7, 10, 1207}, - dictWord{7, 10, 1225}, - dictWord{7, 10, 1909}, - dictWord{146, 10, 147}, - dictWord{132, 0, 430}, - dictWord{5, 10, 285}, - dictWord{ - 9, - 10, - 67, - }, - dictWord{13, 10, 473}, - dictWord{143, 10, 82}, - dictWord{144, 11, 16}, - dictWord{7, 11, 1162}, - dictWord{9, 11, 588}, - dictWord{10, 11, 260}, - dictWord{151, 10, 8}, - dictWord{133, 0, 213}, - dictWord{138, 0, 7}, - dictWord{135, 0, 801}, - dictWord{134, 11, 1786}, - dictWord{135, 11, 308}, - dictWord{6, 0, 936}, - dictWord{134, 0, 1289}, - dictWord{133, 0, 108}, - dictWord{132, 0, 885}, - dictWord{133, 0, 219}, - dictWord{139, 0, 587}, - dictWord{4, 0, 193}, - dictWord{5, 0, 916}, - dictWord{6, 0, 1041}, - dictWord{7, 0, 364}, - dictWord{10, 0, 398}, - dictWord{10, 0, 726}, - dictWord{11, 0, 317}, - dictWord{11, 0, 626}, - dictWord{12, 0, 142}, - dictWord{12, 0, 288}, - dictWord{12, 0, 678}, - dictWord{13, 0, 313}, - dictWord{15, 0, 113}, - dictWord{146, 0, 114}, - dictWord{135, 0, 1165}, - dictWord{6, 0, 241}, - dictWord{ - 9, - 0, - 342, - }, - dictWord{10, 0, 729}, - dictWord{11, 0, 284}, - dictWord{11, 0, 445}, - dictWord{11, 0, 651}, - dictWord{11, 0, 863}, - dictWord{13, 0, 398}, - dictWord{ - 146, - 0, - 99, - }, - dictWord{7, 0, 907}, - dictWord{136, 0, 832}, - dictWord{9, 0, 303}, - dictWord{4, 10, 29}, - dictWord{6, 10, 532}, - dictWord{7, 10, 1628}, - dictWord{7, 10, 1648}, - dictWord{9, 10, 350}, - dictWord{10, 10, 433}, - dictWord{11, 10, 97}, - dictWord{11, 10, 557}, - dictWord{11, 10, 745}, - dictWord{12, 10, 289}, - dictWord{ - 12, - 10, - 335, - }, - dictWord{12, 10, 348}, - dictWord{12, 10, 606}, - dictWord{13, 10, 116}, - dictWord{13, 10, 233}, - dictWord{13, 10, 466}, - dictWord{14, 10, 181}, - dictWord{ - 14, - 10, - 209, - }, - dictWord{14, 10, 232}, - dictWord{14, 10, 236}, - dictWord{14, 10, 300}, - dictWord{16, 10, 41}, - dictWord{148, 10, 97}, - dictWord{7, 11, 423}, - dictWord{7, 10, 1692}, - dictWord{136, 11, 588}, - dictWord{6, 0, 931}, - dictWord{134, 0, 1454}, - dictWord{5, 10, 501}, - dictWord{7, 10, 1704}, - dictWord{9, 10, 553}, - dictWord{11, 10, 520}, - dictWord{12, 10, 557}, - dictWord{141, 10, 249}, - dictWord{136, 11, 287}, - dictWord{4, 0, 562}, - dictWord{9, 0, 254}, - dictWord{ - 139, - 0, - 879, - }, - dictWord{132, 0, 786}, - dictWord{14, 11, 32}, - dictWord{18, 11, 85}, - dictWord{20, 11, 2}, - dictWord{152, 11, 16}, - dictWord{135, 0, 1294}, - dictWord{ - 7, - 11, - 723, - }, - dictWord{135, 11, 1135}, - dictWord{6, 0, 216}, - dictWord{7, 0, 901}, - dictWord{7, 0, 1343}, - dictWord{8, 0, 493}, - dictWord{134, 11, 403}, - dictWord{ - 7, - 11, - 719, - }, - dictWord{8, 11, 809}, - dictWord{136, 11, 834}, - dictWord{5, 11, 210}, - dictWord{6, 11, 213}, - dictWord{7, 11, 60}, - dictWord{10, 11, 364}, - dictWord{ - 139, - 11, - 135, - }, - dictWord{7, 0, 341}, - dictWord{11, 0, 219}, - dictWord{5, 11, 607}, - dictWord{8, 11, 326}, - dictWord{136, 11, 490}, - dictWord{4, 11, 701}, - dictWord{ - 5, - 11, - 472, - }, - dictWord{5, 11, 639}, - dictWord{7, 11, 1249}, - dictWord{9, 11, 758}, - dictWord{139, 11, 896}, - dictWord{135, 11, 380}, - dictWord{135, 11, 1947}, - dictWord{139, 0, 130}, - dictWord{135, 0, 1734}, - dictWord{10, 0, 115}, - dictWord{11, 0, 420}, - dictWord{12, 0, 154}, - dictWord{13, 0, 404}, - dictWord{14, 0, 346}, - dictWord{143, 0, 54}, - dictWord{134, 10, 129}, - dictWord{4, 11, 386}, - dictWord{7, 11, 41}, - dictWord{8, 11, 405}, - dictWord{9, 11, 497}, - dictWord{11, 11, 110}, - dictWord{11, 11, 360}, - dictWord{15, 11, 37}, - dictWord{144, 11, 84}, - dictWord{141, 11, 282}, - dictWord{5, 11, 46}, - dictWord{7, 11, 1452}, - dictWord{7, 11, 1480}, - dictWord{8, 11, 634}, - dictWord{140, 11, 472}, - dictWord{4, 11, 524}, - dictWord{136, 11, 810}, - dictWord{10, 11, 238}, - dictWord{141, 11, 33}, - dictWord{ - 133, - 0, - 604, - }, - dictWord{5, 0, 1011}, - dictWord{136, 0, 701}, - dictWord{8, 0, 856}, - dictWord{8, 0, 858}, - dictWord{8, 0, 879}, - dictWord{12, 0, 702}, - dictWord{142, 0, 447}, - dictWord{4, 0, 54}, - dictWord{5, 0, 666}, - dictWord{7, 0, 1039}, - dictWord{7, 0, 1130}, - dictWord{9, 0, 195}, - dictWord{138, 0, 302}, - dictWord{4, 10, 25}, - dictWord{ - 5, - 10, - 60, - }, - dictWord{6, 10, 504}, - dictWord{7, 10, 614}, - dictWord{7, 10, 1155}, - dictWord{140, 10, 0}, - dictWord{7, 10, 1248}, - dictWord{11, 10, 621}, - dictWord{ - 139, - 10, - 702, - }, - dictWord{133, 11, 997}, - dictWord{137, 10, 321}, - dictWord{134, 0, 1669}, - dictWord{134, 0, 1791}, - dictWord{4, 10, 379}, - dictWord{ - 135, - 10, - 1397, - }, - dictWord{138, 11, 372}, - dictWord{5, 11, 782}, - dictWord{5, 11, 829}, - dictWord{134, 11, 1738}, - dictWord{135, 0, 1228}, - dictWord{4, 10, 118}, - dictWord{6, 10, 274}, - dictWord{6, 10, 361}, - dictWord{7, 10, 75}, - dictWord{141, 10, 441}, - dictWord{132, 0, 623}, - dictWord{9, 11, 279}, - dictWord{10, 11, 407}, - dictWord{14, 11, 84}, - dictWord{150, 11, 18}, - dictWord{137, 10, 841}, - dictWord{135, 0, 798}, - dictWord{140, 10, 693}, - dictWord{5, 10, 314}, - dictWord{6, 10, 221}, - dictWord{7, 10, 419}, - dictWord{10, 10, 650}, - dictWord{11, 10, 396}, - dictWord{12, 10, 156}, - dictWord{13, 10, 369}, - dictWord{14, 10, 333}, - dictWord{ - 145, - 10, - 47, - }, - dictWord{135, 11, 1372}, - dictWord{7, 0, 122}, - dictWord{9, 0, 259}, - dictWord{10, 0, 84}, - dictWord{11, 0, 470}, - dictWord{12, 0, 541}, - dictWord{ - 141, - 0, - 379, - }, - dictWord{134, 0, 837}, - dictWord{8, 0, 1013}, - dictWord{4, 11, 78}, - dictWord{5, 11, 96}, - dictWord{5, 11, 182}, - dictWord{7, 11, 1724}, - dictWord{ - 7, - 11, - 1825, - }, - dictWord{10, 11, 394}, - dictWord{10, 11, 471}, - dictWord{11, 11, 532}, - dictWord{14, 11, 340}, - dictWord{145, 11, 88}, - dictWord{134, 0, 577}, - dictWord{135, 11, 1964}, - dictWord{132, 10, 913}, - dictWord{134, 0, 460}, - dictWord{8, 0, 891}, - dictWord{10, 0, 901}, - dictWord{10, 0, 919}, - dictWord{10, 0, 932}, - dictWord{12, 0, 715}, - dictWord{12, 0, 728}, - dictWord{12, 0, 777}, - dictWord{14, 0, 457}, - dictWord{144, 0, 103}, - dictWord{5, 0, 82}, - dictWord{5, 0, 131}, - dictWord{ - 7, - 0, - 1755, - }, - dictWord{8, 0, 31}, - dictWord{9, 0, 168}, - dictWord{9, 0, 764}, - dictWord{139, 0, 869}, - dictWord{136, 10, 475}, - dictWord{6, 0, 605}, - dictWord{ - 5, - 10, - 1016, - }, - dictWord{9, 11, 601}, - dictWord{9, 11, 619}, - dictWord{10, 11, 505}, - dictWord{10, 11, 732}, - dictWord{11, 11, 355}, - dictWord{140, 11, 139}, - dictWord{ - 7, - 10, - 602, - }, - dictWord{8, 10, 179}, - dictWord{10, 10, 781}, - dictWord{140, 10, 126}, - dictWord{134, 0, 1246}, - dictWord{6, 10, 329}, - dictWord{138, 10, 111}, - dictWord{6, 11, 215}, - dictWord{7, 11, 1028}, - dictWord{7, 11, 1473}, - dictWord{7, 11, 1721}, - dictWord{9, 11, 424}, - dictWord{138, 11, 779}, - dictWord{5, 0, 278}, - dictWord{137, 0, 68}, - dictWord{6, 0, 932}, - dictWord{6, 0, 1084}, - dictWord{144, 0, 86}, - dictWord{4, 0, 163}, - dictWord{5, 0, 201}, - dictWord{5, 0, 307}, - dictWord{ - 5, - 0, - 310, - }, - dictWord{6, 0, 335}, - dictWord{7, 0, 284}, - dictWord{7, 0, 1660}, - dictWord{136, 0, 165}, - dictWord{136, 0, 781}, - dictWord{134, 0, 707}, - dictWord{6, 0, 33}, - dictWord{135, 0, 1244}, - dictWord{5, 10, 821}, - dictWord{6, 11, 67}, - dictWord{6, 10, 1687}, - dictWord{7, 11, 258}, - dictWord{7, 11, 1630}, - dictWord{9, 11, 354}, - dictWord{9, 11, 675}, - dictWord{10, 11, 830}, - dictWord{14, 11, 80}, - dictWord{145, 11, 80}, - dictWord{6, 11, 141}, - dictWord{7, 11, 225}, - dictWord{9, 11, 59}, - dictWord{9, 11, 607}, - dictWord{10, 11, 312}, - dictWord{11, 11, 687}, - dictWord{12, 11, 555}, - dictWord{13, 11, 373}, - dictWord{13, 11, 494}, - dictWord{148, 11, 58}, - dictWord{134, 0, 1113}, - dictWord{9, 0, 388}, - dictWord{5, 10, 71}, - dictWord{7, 10, 1407}, - dictWord{9, 10, 704}, - dictWord{10, 10, 261}, - dictWord{10, 10, 619}, - dictWord{11, 10, 547}, - dictWord{11, 10, 619}, - dictWord{143, 10, 157}, - dictWord{7, 0, 1953}, - dictWord{136, 0, 720}, - dictWord{138, 0, 203}, - dictWord{ - 7, - 10, - 2008, - }, - dictWord{9, 10, 337}, - dictWord{138, 10, 517}, - dictWord{6, 0, 326}, - dictWord{7, 0, 677}, - dictWord{137, 0, 425}, - dictWord{139, 11, 81}, - dictWord{ - 7, - 0, - 1316, - }, - dictWord{7, 0, 1412}, - dictWord{7, 0, 1839}, - dictWord{9, 0, 589}, - dictWord{11, 0, 241}, - dictWord{11, 0, 676}, - dictWord{11, 0, 811}, - dictWord{11, 0, 891}, - dictWord{12, 0, 140}, - dictWord{12, 0, 346}, - dictWord{12, 0, 479}, - dictWord{13, 0, 140}, - dictWord{13, 0, 381}, - dictWord{14, 0, 188}, - dictWord{18, 0, 30}, - dictWord{148, 0, 108}, - dictWord{5, 0, 416}, - dictWord{6, 10, 86}, - dictWord{6, 10, 603}, - dictWord{7, 10, 292}, - dictWord{7, 10, 561}, - dictWord{8, 10, 257}, - dictWord{ - 8, - 10, - 382, - }, - dictWord{9, 10, 721}, - dictWord{9, 10, 778}, - dictWord{11, 10, 581}, - dictWord{140, 10, 466}, - dictWord{4, 10, 486}, - dictWord{133, 10, 491}, - dictWord{134, 0, 1300}, - dictWord{132, 10, 72}, - dictWord{7, 0, 847}, - dictWord{6, 10, 265}, - dictWord{7, 11, 430}, - dictWord{139, 11, 46}, - dictWord{5, 11, 602}, - dictWord{6, 11, 106}, - dictWord{7, 11, 1786}, - dictWord{7, 11, 1821}, - dictWord{7, 11, 2018}, - dictWord{9, 11, 418}, - dictWord{137, 11, 763}, - dictWord{5, 0, 358}, - dictWord{7, 0, 535}, - dictWord{7, 0, 1184}, - dictWord{10, 0, 662}, - dictWord{13, 0, 212}, - dictWord{13, 0, 304}, - dictWord{13, 0, 333}, - dictWord{145, 0, 98}, - dictWord{ - 5, - 11, - 65, - }, - dictWord{6, 11, 416}, - dictWord{7, 11, 1720}, - dictWord{7, 11, 1924}, - dictWord{8, 11, 677}, - dictWord{10, 11, 109}, - dictWord{11, 11, 14}, - dictWord{ - 11, - 11, - 70, - }, - dictWord{11, 11, 569}, - dictWord{11, 11, 735}, - dictWord{15, 11, 153}, - dictWord{148, 11, 80}, - dictWord{6, 0, 1823}, - dictWord{8, 0, 839}, - dictWord{ - 8, - 0, - 852, - }, - dictWord{8, 0, 903}, - dictWord{10, 0, 940}, - dictWord{12, 0, 707}, - dictWord{140, 0, 775}, - dictWord{135, 11, 1229}, - dictWord{6, 0, 1522}, - dictWord{ - 140, - 0, - 654, - }, - dictWord{136, 11, 595}, - dictWord{139, 0, 163}, - dictWord{141, 0, 314}, - dictWord{132, 0, 978}, - dictWord{4, 0, 601}, - dictWord{6, 0, 2035}, - dictWord{137, 10, 234}, - dictWord{5, 10, 815}, - dictWord{6, 10, 1688}, - dictWord{134, 10, 1755}, - dictWord{133, 0, 946}, - dictWord{136, 0, 434}, - dictWord{ - 6, - 10, - 197, - }, - dictWord{136, 10, 205}, - dictWord{7, 0, 411}, - dictWord{7, 0, 590}, - dictWord{8, 0, 631}, - dictWord{9, 0, 323}, - dictWord{10, 0, 355}, - dictWord{11, 0, 491}, - dictWord{12, 0, 143}, - dictWord{12, 0, 402}, - dictWord{13, 0, 73}, - dictWord{14, 0, 408}, - dictWord{15, 0, 107}, - dictWord{146, 0, 71}, - dictWord{7, 0, 1467}, - dictWord{ - 8, - 0, - 328, - }, - dictWord{10, 0, 544}, - dictWord{11, 0, 955}, - dictWord{12, 0, 13}, - dictWord{13, 0, 320}, - dictWord{145, 0, 83}, - dictWord{142, 0, 410}, - dictWord{ - 11, - 0, - 511, - }, - dictWord{13, 0, 394}, - dictWord{14, 0, 298}, - dictWord{14, 0, 318}, - dictWord{146, 0, 103}, - dictWord{6, 10, 452}, - dictWord{7, 10, 312}, - dictWord{ - 138, - 10, - 219, - }, - dictWord{138, 10, 589}, - dictWord{4, 10, 333}, - dictWord{9, 10, 176}, - dictWord{12, 10, 353}, - dictWord{141, 10, 187}, - dictWord{135, 11, 329}, - dictWord{132, 11, 469}, - dictWord{5, 0, 835}, - dictWord{134, 0, 483}, - dictWord{134, 11, 1743}, - dictWord{5, 11, 929}, - dictWord{6, 11, 340}, - dictWord{8, 11, 376}, - dictWord{136, 11, 807}, - dictWord{134, 10, 1685}, - dictWord{132, 0, 677}, - dictWord{5, 11, 218}, - dictWord{7, 11, 1610}, - dictWord{138, 11, 83}, - dictWord{ - 5, - 11, - 571, - }, - dictWord{135, 11, 1842}, - dictWord{132, 11, 455}, - dictWord{137, 0, 70}, - dictWord{135, 0, 1405}, - dictWord{7, 10, 135}, - dictWord{8, 10, 7}, - dictWord{ - 8, - 10, - 62, - }, - dictWord{9, 10, 243}, - dictWord{10, 10, 658}, - dictWord{10, 10, 697}, - dictWord{11, 10, 456}, - dictWord{139, 10, 756}, - dictWord{9, 10, 395}, - dictWord{138, 10, 79}, - dictWord{137, 0, 108}, - dictWord{6, 11, 161}, - dictWord{7, 11, 372}, - dictWord{137, 11, 597}, - dictWord{132, 11, 349}, - dictWord{ - 132, - 0, - 777, - }, - dictWord{132, 0, 331}, - dictWord{135, 10, 631}, - dictWord{133, 0, 747}, - dictWord{6, 11, 432}, - dictWord{6, 11, 608}, - dictWord{139, 11, 322}, - dictWord{138, 10, 835}, - dictWord{5, 11, 468}, - dictWord{7, 11, 1809}, - dictWord{10, 11, 325}, - dictWord{11, 11, 856}, - dictWord{12, 11, 345}, - dictWord{ - 143, - 11, - 104, - }, - dictWord{133, 11, 223}, - dictWord{7, 10, 406}, - dictWord{7, 10, 459}, - dictWord{8, 10, 606}, - dictWord{139, 10, 726}, - dictWord{132, 11, 566}, - dictWord{142, 0, 68}, - dictWord{4, 11, 59}, - dictWord{135, 11, 1394}, - dictWord{6, 11, 436}, - dictWord{139, 11, 481}, - dictWord{4, 11, 48}, - dictWord{5, 11, 271}, - dictWord{135, 11, 953}, - dictWord{139, 11, 170}, - dictWord{5, 11, 610}, - dictWord{136, 11, 457}, - dictWord{133, 11, 755}, - dictWord{135, 11, 1217}, - dictWord{ - 133, - 10, - 612, - }, - dictWord{132, 11, 197}, - dictWord{132, 0, 505}, - dictWord{4, 10, 372}, - dictWord{7, 10, 482}, - dictWord{8, 10, 158}, - dictWord{9, 10, 602}, - dictWord{ - 9, - 10, - 615, - }, - dictWord{10, 10, 245}, - dictWord{10, 10, 678}, - dictWord{10, 10, 744}, - dictWord{11, 10, 248}, - dictWord{139, 10, 806}, - dictWord{133, 0, 326}, - dictWord{5, 10, 854}, - dictWord{135, 10, 1991}, - dictWord{4, 0, 691}, - dictWord{146, 0, 16}, - dictWord{6, 0, 628}, - dictWord{9, 0, 35}, - dictWord{10, 0, 680}, - dictWord{10, 0, 793}, - dictWord{11, 0, 364}, - dictWord{13, 0, 357}, - dictWord{143, 0, 164}, - dictWord{138, 0, 654}, - dictWord{6, 0, 32}, - dictWord{7, 0, 385}, - dictWord{ - 7, - 0, - 757, - }, - dictWord{7, 0, 1916}, - dictWord{8, 0, 37}, - dictWord{8, 0, 94}, - dictWord{8, 0, 711}, - dictWord{9, 0, 541}, - dictWord{10, 0, 162}, - dictWord{10, 0, 795}, - dictWord{ - 11, - 0, - 989, - }, - dictWord{11, 0, 1010}, - dictWord{12, 0, 14}, - dictWord{142, 0, 308}, - dictWord{133, 11, 217}, - dictWord{6, 0, 152}, - dictWord{6, 0, 349}, - dictWord{ - 6, - 0, - 1682, - }, - dictWord{7, 0, 1252}, - dictWord{8, 0, 112}, - dictWord{9, 0, 435}, - dictWord{9, 0, 668}, - dictWord{10, 0, 290}, - dictWord{10, 0, 319}, - dictWord{10, 0, 815}, - dictWord{11, 0, 180}, - dictWord{11, 0, 837}, - dictWord{12, 0, 240}, - dictWord{13, 0, 152}, - dictWord{13, 0, 219}, - dictWord{142, 0, 158}, - dictWord{4, 0, 581}, - dictWord{134, 0, 726}, - dictWord{5, 10, 195}, - dictWord{135, 10, 1685}, - dictWord{6, 0, 126}, - dictWord{7, 0, 573}, - dictWord{8, 0, 397}, - dictWord{142, 0, 44}, - dictWord{138, 0, 89}, - dictWord{7, 10, 1997}, - dictWord{8, 10, 730}, - dictWord{139, 10, 1006}, - dictWord{134, 0, 1531}, - dictWord{134, 0, 1167}, - dictWord{ - 5, - 0, - 926, - }, - dictWord{12, 0, 203}, - dictWord{133, 10, 751}, - dictWord{4, 11, 165}, - dictWord{7, 11, 1398}, - dictWord{135, 11, 1829}, - dictWord{7, 0, 1232}, - dictWord{137, 0, 531}, - dictWord{135, 10, 821}, - dictWord{134, 0, 943}, - dictWord{133, 0, 670}, - dictWord{4, 0, 880}, - dictWord{139, 0, 231}, - dictWord{ - 134, - 0, - 1617, - }, - dictWord{135, 0, 1957}, - dictWord{5, 11, 9}, - dictWord{7, 11, 297}, - dictWord{7, 11, 966}, - dictWord{140, 11, 306}, - dictWord{6, 0, 975}, - dictWord{ - 134, - 0, - 985, - }, - dictWord{5, 10, 950}, - dictWord{5, 10, 994}, - dictWord{134, 10, 351}, - dictWord{12, 11, 21}, - dictWord{151, 11, 7}, - dictWord{5, 11, 146}, - dictWord{ - 6, - 11, - 411, - }, - dictWord{138, 11, 721}, - dictWord{7, 0, 242}, - dictWord{135, 0, 1942}, - dictWord{6, 11, 177}, - dictWord{135, 11, 467}, - dictWord{5, 0, 421}, - dictWord{ - 7, - 10, - 47, - }, - dictWord{137, 10, 684}, - dictWord{5, 0, 834}, - dictWord{7, 0, 1202}, - dictWord{8, 0, 14}, - dictWord{9, 0, 481}, - dictWord{137, 0, 880}, - dictWord{138, 0, 465}, - dictWord{6, 0, 688}, - dictWord{9, 0, 834}, - dictWord{132, 10, 350}, - dictWord{132, 0, 855}, - dictWord{4, 0, 357}, - dictWord{6, 0, 172}, - dictWord{7, 0, 143}, - dictWord{137, 0, 413}, - dictWord{133, 11, 200}, - dictWord{132, 0, 590}, - dictWord{7, 10, 1812}, - dictWord{13, 10, 259}, - dictWord{13, 10, 356}, - dictWord{ - 14, - 10, - 242, - }, - dictWord{147, 10, 114}, - dictWord{133, 10, 967}, - dictWord{11, 0, 114}, - dictWord{4, 10, 473}, - dictWord{7, 10, 623}, - dictWord{8, 10, 808}, - dictWord{ - 9, - 10, - 871, - }, - dictWord{9, 10, 893}, - dictWord{11, 10, 431}, - dictWord{12, 10, 112}, - dictWord{12, 10, 217}, - dictWord{12, 10, 243}, - dictWord{12, 10, 562}, - dictWord{ - 12, - 10, - 663, - }, - dictWord{12, 10, 683}, - dictWord{13, 10, 141}, - dictWord{13, 10, 197}, - dictWord{13, 10, 227}, - dictWord{13, 10, 406}, - dictWord{13, 10, 487}, - dictWord{14, 10, 156}, - dictWord{14, 10, 203}, - dictWord{14, 10, 224}, - dictWord{14, 10, 256}, - dictWord{18, 10, 58}, - dictWord{150, 10, 0}, - dictWord{ - 138, - 10, - 286, - }, - dictWord{4, 10, 222}, - dictWord{7, 10, 286}, - dictWord{136, 10, 629}, - dictWord{5, 0, 169}, - dictWord{7, 0, 333}, - dictWord{136, 0, 45}, - dictWord{ - 134, - 11, - 481, - }, - dictWord{132, 0, 198}, - dictWord{4, 0, 24}, - dictWord{5, 0, 140}, - dictWord{5, 0, 185}, - dictWord{7, 0, 1500}, - dictWord{11, 0, 565}, - dictWord{11, 0, 838}, - dictWord{4, 11, 84}, - dictWord{7, 11, 1482}, - dictWord{10, 11, 76}, - dictWord{138, 11, 142}, - dictWord{133, 0, 585}, - dictWord{141, 10, 306}, - dictWord{ - 133, - 11, - 1015, - }, - dictWord{4, 11, 315}, - dictWord{5, 11, 507}, - dictWord{135, 11, 1370}, - dictWord{136, 10, 146}, - dictWord{6, 0, 691}, - dictWord{134, 0, 1503}, - dictWord{ - 4, - 0, - 334, - }, - dictWord{133, 0, 593}, - dictWord{4, 10, 465}, - dictWord{135, 10, 1663}, - dictWord{142, 11, 173}, - dictWord{135, 0, 913}, - dictWord{12, 0, 116}, - dictWord{134, 11, 1722}, - dictWord{134, 0, 1360}, - dictWord{132, 0, 802}, - dictWord{8, 11, 222}, - dictWord{8, 11, 476}, - dictWord{9, 11, 238}, - dictWord{ - 11, - 11, - 516, - }, - dictWord{11, 11, 575}, - dictWord{15, 11, 109}, - dictWord{146, 11, 100}, - dictWord{6, 0, 308}, - dictWord{9, 0, 673}, - dictWord{7, 10, 138}, - dictWord{ - 7, - 10, - 517, - }, - dictWord{139, 10, 238}, - dictWord{132, 0, 709}, - dictWord{6, 0, 1876}, - dictWord{6, 0, 1895}, - dictWord{9, 0, 994}, - dictWord{9, 0, 1006}, - dictWord{ - 12, - 0, - 829, - }, - dictWord{12, 0, 888}, - dictWord{12, 0, 891}, - dictWord{146, 0, 185}, - dictWord{148, 10, 94}, - dictWord{4, 0, 228}, - dictWord{133, 0, 897}, - dictWord{ - 7, - 0, - 1840, - }, - dictWord{5, 10, 495}, - dictWord{7, 10, 834}, - dictWord{9, 10, 733}, - dictWord{139, 10, 378}, - dictWord{133, 10, 559}, - dictWord{6, 10, 21}, - dictWord{ - 6, - 10, - 1737, - }, - dictWord{7, 10, 1444}, - dictWord{136, 10, 224}, - dictWord{4, 0, 608}, - dictWord{133, 0, 497}, - dictWord{6, 11, 40}, - dictWord{135, 11, 1781}, - dictWord{134, 0, 1573}, - dictWord{135, 0, 2039}, - dictWord{6, 0, 540}, - dictWord{136, 0, 136}, - dictWord{4, 0, 897}, - dictWord{5, 0, 786}, - dictWord{133, 10, 519}, - dictWord{6, 0, 1878}, - dictWord{6, 0, 1884}, - dictWord{9, 0, 938}, - dictWord{9, 0, 948}, - dictWord{9, 0, 955}, - dictWord{9, 0, 973}, - dictWord{9, 0, 1012}, - dictWord{ - 12, - 0, - 895, - }, - dictWord{12, 0, 927}, - dictWord{143, 0, 254}, - dictWord{134, 0, 1469}, - dictWord{133, 0, 999}, - dictWord{4, 0, 299}, - dictWord{135, 0, 1004}, - dictWord{ - 4, - 0, - 745, - }, - dictWord{133, 0, 578}, - dictWord{136, 11, 574}, - dictWord{133, 0, 456}, - dictWord{134, 0, 1457}, - dictWord{7, 0, 1679}, - dictWord{132, 10, 402}, - dictWord{7, 0, 693}, - dictWord{8, 0, 180}, - dictWord{12, 0, 163}, - dictWord{8, 10, 323}, - dictWord{136, 10, 479}, - dictWord{11, 10, 580}, - dictWord{142, 10, 201}, - dictWord{5, 10, 59}, - dictWord{135, 10, 672}, - dictWord{132, 11, 354}, - dictWord{146, 10, 34}, - dictWord{4, 0, 755}, - dictWord{135, 11, 1558}, - dictWord{ - 7, - 0, - 1740, - }, - dictWord{146, 0, 48}, - dictWord{4, 10, 85}, - dictWord{135, 10, 549}, - dictWord{139, 0, 338}, - dictWord{133, 10, 94}, - dictWord{134, 0, 1091}, - dictWord{135, 11, 469}, - dictWord{12, 0, 695}, - dictWord{12, 0, 704}, - dictWord{20, 0, 113}, - dictWord{5, 11, 830}, - dictWord{14, 11, 338}, - dictWord{148, 11, 81}, - dictWord{135, 0, 1464}, - dictWord{6, 10, 11}, - dictWord{135, 10, 187}, - dictWord{135, 0, 975}, - dictWord{13, 0, 335}, - dictWord{132, 10, 522}, - dictWord{ - 134, - 0, - 1979, - }, - dictWord{5, 11, 496}, - dictWord{135, 11, 203}, - dictWord{4, 10, 52}, - dictWord{135, 10, 661}, - dictWord{7, 0, 1566}, - dictWord{8, 0, 269}, - dictWord{ - 9, - 0, - 212, - }, - dictWord{9, 0, 718}, - dictWord{14, 0, 15}, - dictWord{14, 0, 132}, - dictWord{142, 0, 227}, - dictWord{4, 0, 890}, - dictWord{5, 0, 805}, - dictWord{5, 0, 819}, - dictWord{ - 5, - 0, - 961, - }, - dictWord{6, 0, 396}, - dictWord{6, 0, 1631}, - dictWord{6, 0, 1678}, - dictWord{7, 0, 1967}, - dictWord{7, 0, 2041}, - dictWord{9, 0, 630}, - dictWord{11, 0, 8}, - dictWord{11, 0, 1019}, - dictWord{12, 0, 176}, - dictWord{13, 0, 225}, - dictWord{14, 0, 292}, - dictWord{21, 0, 24}, - dictWord{4, 10, 383}, - dictWord{133, 10, 520}, - dictWord{134, 11, 547}, - dictWord{135, 11, 1748}, - dictWord{5, 11, 88}, - dictWord{137, 11, 239}, - dictWord{146, 11, 128}, - dictWord{7, 11, 650}, - dictWord{ - 135, - 11, - 1310, - }, - dictWord{4, 10, 281}, - dictWord{5, 10, 38}, - dictWord{7, 10, 194}, - dictWord{7, 10, 668}, - dictWord{7, 10, 1893}, - dictWord{137, 10, 397}, - dictWord{135, 0, 1815}, - dictWord{9, 10, 635}, - dictWord{139, 10, 559}, - dictWord{7, 0, 1505}, - dictWord{10, 0, 190}, - dictWord{10, 0, 634}, - dictWord{11, 0, 792}, - dictWord{12, 0, 358}, - dictWord{140, 0, 447}, - dictWord{5, 0, 0}, - dictWord{6, 0, 536}, - dictWord{7, 0, 604}, - dictWord{13, 0, 445}, - dictWord{145, 0, 126}, - dictWord{ - 7, - 11, - 1076, - }, - dictWord{9, 11, 80}, - dictWord{11, 11, 78}, - dictWord{11, 11, 421}, - dictWord{11, 11, 534}, - dictWord{140, 11, 545}, - dictWord{8, 0, 966}, - dictWord{ - 10, - 0, - 1023, - }, - dictWord{14, 11, 369}, - dictWord{146, 11, 72}, - dictWord{135, 11, 1641}, - dictWord{6, 0, 232}, - dictWord{6, 0, 412}, - dictWord{7, 0, 1074}, - dictWord{ - 8, - 0, - 9, - }, - dictWord{8, 0, 157}, - dictWord{8, 0, 786}, - dictWord{9, 0, 196}, - dictWord{9, 0, 352}, - dictWord{9, 0, 457}, - dictWord{10, 0, 337}, - dictWord{11, 0, 232}, - dictWord{ - 11, - 0, - 877, - }, - dictWord{12, 0, 480}, - dictWord{140, 0, 546}, - dictWord{135, 0, 958}, - dictWord{4, 0, 382}, - dictWord{136, 0, 579}, - dictWord{4, 0, 212}, - dictWord{ - 135, - 0, - 1206, - }, - dictWord{4, 11, 497}, - dictWord{5, 11, 657}, - dictWord{135, 11, 1584}, - dictWord{132, 0, 681}, - dictWord{8, 0, 971}, - dictWord{138, 0, 965}, - dictWord{ - 5, - 10, - 448, - }, - dictWord{136, 10, 535}, - dictWord{14, 0, 16}, - dictWord{146, 0, 44}, - dictWord{11, 0, 584}, - dictWord{11, 0, 616}, - dictWord{14, 0, 275}, - dictWord{ - 11, - 11, - 584, - }, - dictWord{11, 11, 616}, - dictWord{142, 11, 275}, - dictWord{136, 11, 13}, - dictWord{7, 10, 610}, - dictWord{135, 10, 1501}, - dictWord{7, 11, 642}, - dictWord{8, 11, 250}, - dictWord{11, 11, 123}, - dictWord{11, 11, 137}, - dictWord{13, 11, 48}, - dictWord{142, 11, 95}, - dictWord{133, 0, 655}, - dictWord{17, 0, 67}, - dictWord{147, 0, 74}, - dictWord{134, 0, 751}, - dictWord{134, 0, 1967}, - dictWord{6, 0, 231}, - dictWord{136, 0, 423}, - dictWord{5, 0, 300}, - dictWord{138, 0, 1016}, - dictWord{4, 10, 319}, - dictWord{5, 10, 699}, - dictWord{138, 10, 673}, - dictWord{6, 0, 237}, - dictWord{7, 0, 611}, - dictWord{8, 0, 100}, - dictWord{9, 0, 416}, - dictWord{ - 11, - 0, - 335, - }, - dictWord{12, 0, 173}, - dictWord{18, 0, 101}, - dictWord{6, 10, 336}, - dictWord{8, 10, 552}, - dictWord{9, 10, 285}, - dictWord{10, 10, 99}, - dictWord{ - 139, - 10, - 568, - }, - dictWord{134, 0, 1370}, - dictWord{7, 10, 1406}, - dictWord{9, 10, 218}, - dictWord{141, 10, 222}, - dictWord{133, 10, 256}, - dictWord{ - 135, - 0, - 1208, - }, - dictWord{14, 11, 213}, - dictWord{148, 11, 38}, - dictWord{6, 0, 1219}, - dictWord{135, 11, 1642}, - dictWord{13, 0, 417}, - dictWord{14, 0, 129}, - dictWord{143, 0, 15}, - dictWord{10, 11, 545}, - dictWord{140, 11, 301}, - dictWord{17, 10, 39}, - dictWord{148, 10, 36}, - dictWord{133, 0, 199}, - dictWord{4, 11, 904}, - dictWord{133, 11, 794}, - dictWord{12, 0, 427}, - dictWord{146, 0, 38}, - dictWord{134, 0, 949}, - dictWord{8, 0, 665}, - dictWord{135, 10, 634}, - dictWord{ - 132, - 10, - 618, - }, - dictWord{135, 10, 259}, - dictWord{132, 10, 339}, - dictWord{133, 11, 761}, - dictWord{141, 10, 169}, - dictWord{132, 10, 759}, - dictWord{5, 0, 688}, - dictWord{7, 0, 539}, - dictWord{135, 0, 712}, - dictWord{7, 11, 386}, - dictWord{138, 11, 713}, - dictWord{134, 0, 1186}, - dictWord{6, 11, 7}, - dictWord{6, 11, 35}, - dictWord{ - 7, - 11, - 147, - }, - dictWord{7, 11, 1069}, - dictWord{7, 11, 1568}, - dictWord{7, 11, 1575}, - dictWord{7, 11, 1917}, - dictWord{8, 11, 43}, - dictWord{8, 11, 208}, - dictWord{ - 9, - 11, - 128, - }, - dictWord{9, 11, 866}, - dictWord{10, 11, 20}, - dictWord{11, 11, 981}, - dictWord{147, 11, 33}, - dictWord{7, 11, 893}, - dictWord{8, 10, 482}, - dictWord{141, 11, 424}, - dictWord{6, 0, 312}, - dictWord{6, 0, 1715}, - dictWord{10, 0, 584}, - dictWord{11, 0, 546}, - dictWord{11, 0, 692}, - dictWord{12, 0, 259}, - dictWord{ - 12, - 0, - 295, - }, - dictWord{13, 0, 46}, - dictWord{141, 0, 154}, - dictWord{5, 10, 336}, - dictWord{6, 10, 341}, - dictWord{6, 10, 478}, - dictWord{6, 10, 1763}, - dictWord{ - 136, - 10, - 386, - }, - dictWord{137, 0, 151}, - dictWord{132, 0, 588}, - dictWord{152, 0, 4}, - dictWord{6, 11, 322}, - dictWord{9, 11, 552}, - dictWord{11, 11, 274}, - dictWord{ - 13, - 11, - 209, - }, - dictWord{13, 11, 499}, - dictWord{14, 11, 85}, - dictWord{15, 11, 126}, - dictWord{145, 11, 70}, - dictWord{135, 10, 73}, - dictWord{4, 0, 231}, - dictWord{ - 5, - 0, - 61, - }, - dictWord{6, 0, 104}, - dictWord{7, 0, 729}, - dictWord{7, 0, 964}, - dictWord{7, 0, 1658}, - dictWord{140, 0, 414}, - dictWord{6, 0, 263}, - dictWord{138, 0, 757}, - dictWord{135, 10, 1971}, - dictWord{4, 0, 612}, - dictWord{133, 0, 561}, - dictWord{132, 0, 320}, - dictWord{135, 10, 1344}, - dictWord{8, 11, 83}, - dictWord{ - 8, - 11, - 817, - }, - dictWord{9, 11, 28}, - dictWord{9, 11, 29}, - dictWord{9, 11, 885}, - dictWord{10, 11, 387}, - dictWord{11, 11, 633}, - dictWord{11, 11, 740}, - dictWord{ - 13, - 11, - 235, - }, - dictWord{13, 11, 254}, - dictWord{15, 11, 143}, - dictWord{143, 11, 146}, - dictWord{5, 10, 396}, - dictWord{134, 10, 501}, - dictWord{140, 11, 49}, - dictWord{132, 0, 225}, - dictWord{4, 10, 929}, - dictWord{5, 10, 799}, - dictWord{8, 10, 46}, - dictWord{136, 10, 740}, - dictWord{4, 0, 405}, - dictWord{7, 0, 817}, - dictWord{ - 14, - 0, - 58, - }, - dictWord{17, 0, 37}, - dictWord{146, 0, 124}, - dictWord{133, 0, 974}, - dictWord{4, 11, 412}, - dictWord{133, 11, 581}, - dictWord{4, 10, 892}, - dictWord{ - 133, - 10, - 770, - }, - dictWord{4, 0, 996}, - dictWord{134, 0, 2026}, - dictWord{4, 0, 527}, - dictWord{5, 0, 235}, - dictWord{7, 0, 1239}, - dictWord{11, 0, 131}, - dictWord{ - 140, - 0, - 370, - }, - dictWord{9, 0, 16}, - dictWord{13, 0, 386}, - dictWord{135, 11, 421}, - dictWord{7, 0, 956}, - dictWord{7, 0, 1157}, - dictWord{7, 0, 1506}, - dictWord{7, 0, 1606}, - dictWord{7, 0, 1615}, - dictWord{7, 0, 1619}, - dictWord{7, 0, 1736}, - dictWord{7, 0, 1775}, - dictWord{8, 0, 590}, - dictWord{9, 0, 324}, - dictWord{9, 0, 736}, - dictWord{ - 9, - 0, - 774, - }, - dictWord{9, 0, 776}, - dictWord{9, 0, 784}, - dictWord{10, 0, 567}, - dictWord{10, 0, 708}, - dictWord{11, 0, 518}, - dictWord{11, 0, 613}, - dictWord{11, 0, 695}, - dictWord{11, 0, 716}, - dictWord{11, 0, 739}, - dictWord{11, 0, 770}, - dictWord{11, 0, 771}, - dictWord{11, 0, 848}, - dictWord{11, 0, 857}, - dictWord{11, 0, 931}, - dictWord{ - 11, - 0, - 947, - }, - dictWord{12, 0, 326}, - dictWord{12, 0, 387}, - dictWord{12, 0, 484}, - dictWord{12, 0, 528}, - dictWord{12, 0, 552}, - dictWord{12, 0, 613}, - dictWord{ - 13, - 0, - 189, - }, - dictWord{13, 0, 256}, - dictWord{13, 0, 340}, - dictWord{13, 0, 432}, - dictWord{13, 0, 436}, - dictWord{13, 0, 440}, - dictWord{13, 0, 454}, - dictWord{14, 0, 174}, - dictWord{14, 0, 220}, - dictWord{14, 0, 284}, - dictWord{14, 0, 390}, - dictWord{145, 0, 121}, - dictWord{135, 10, 158}, - dictWord{9, 0, 137}, - dictWord{138, 0, 221}, - dictWord{4, 11, 110}, - dictWord{10, 11, 415}, - dictWord{10, 11, 597}, - dictWord{142, 11, 206}, - dictWord{141, 11, 496}, - dictWord{135, 11, 205}, - dictWord{ - 151, - 10, - 25, - }, - dictWord{135, 11, 778}, - dictWord{7, 11, 1656}, - dictWord{7, 10, 2001}, - dictWord{9, 11, 369}, - dictWord{10, 11, 338}, - dictWord{10, 11, 490}, - dictWord{11, 11, 154}, - dictWord{11, 11, 545}, - dictWord{11, 11, 775}, - dictWord{13, 11, 77}, - dictWord{141, 11, 274}, - dictWord{4, 11, 444}, - dictWord{ - 10, - 11, - 146, - }, - dictWord{140, 11, 9}, - dictWord{7, 0, 390}, - dictWord{138, 0, 140}, - dictWord{135, 0, 1144}, - dictWord{134, 0, 464}, - dictWord{7, 10, 1461}, - dictWord{ - 140, - 10, - 91, - }, - dictWord{132, 10, 602}, - dictWord{4, 11, 283}, - dictWord{135, 11, 1194}, - dictWord{5, 0, 407}, - dictWord{11, 0, 204}, - dictWord{11, 0, 243}, - dictWord{ - 11, - 0, - 489, - }, - dictWord{12, 0, 293}, - dictWord{19, 0, 37}, - dictWord{20, 0, 73}, - dictWord{150, 0, 38}, - dictWord{7, 0, 1218}, - dictWord{136, 0, 303}, - dictWord{ - 5, - 0, - 325, - }, - dictWord{8, 0, 5}, - dictWord{8, 0, 227}, - dictWord{9, 0, 105}, - dictWord{10, 0, 585}, - dictWord{12, 0, 614}, - dictWord{4, 10, 13}, - dictWord{5, 10, 567}, - dictWord{ - 7, - 10, - 1498, - }, - dictWord{9, 10, 124}, - dictWord{11, 10, 521}, - dictWord{140, 10, 405}, - dictWord{135, 10, 1006}, - dictWord{7, 0, 800}, - dictWord{10, 0, 12}, - dictWord{134, 11, 1720}, - dictWord{135, 0, 1783}, - dictWord{132, 10, 735}, - dictWord{138, 10, 812}, - dictWord{4, 10, 170}, - dictWord{135, 10, 323}, - dictWord{ - 6, - 0, - 621, - }, - dictWord{13, 0, 504}, - dictWord{144, 0, 89}, - dictWord{5, 10, 304}, - dictWord{135, 10, 1403}, - dictWord{137, 11, 216}, - dictWord{6, 0, 920}, - dictWord{ - 6, - 0, - 1104, - }, - dictWord{9, 11, 183}, - dictWord{139, 11, 286}, - dictWord{4, 0, 376}, - dictWord{133, 10, 742}, - dictWord{134, 0, 218}, - dictWord{8, 0, 641}, - dictWord{ - 11, - 0, - 388, - }, - dictWord{140, 0, 580}, - dictWord{7, 0, 454}, - dictWord{7, 0, 782}, - dictWord{8, 0, 768}, - dictWord{140, 0, 686}, - dictWord{137, 11, 33}, - dictWord{ - 133, - 10, - 111, - }, - dictWord{144, 0, 0}, - dictWord{10, 0, 676}, - dictWord{140, 0, 462}, - dictWord{6, 0, 164}, - dictWord{136, 11, 735}, - dictWord{133, 10, 444}, - dictWord{ - 150, - 0, - 50, - }, - dictWord{7, 11, 1862}, - dictWord{12, 11, 491}, - dictWord{12, 11, 520}, - dictWord{13, 11, 383}, - dictWord{14, 11, 244}, - dictWord{146, 11, 12}, - dictWord{ - 5, - 11, - 132, - }, - dictWord{9, 11, 486}, - dictWord{9, 11, 715}, - dictWord{10, 11, 458}, - dictWord{11, 11, 373}, - dictWord{11, 11, 668}, - dictWord{11, 11, 795}, - dictWord{11, 11, 897}, - dictWord{12, 11, 272}, - dictWord{12, 11, 424}, - dictWord{12, 11, 539}, - dictWord{12, 11, 558}, - dictWord{14, 11, 245}, - dictWord{ - 14, - 11, - 263, - }, - dictWord{14, 11, 264}, - dictWord{14, 11, 393}, - dictWord{142, 11, 403}, - dictWord{8, 10, 123}, - dictWord{15, 10, 6}, - dictWord{144, 10, 7}, - dictWord{ - 6, - 0, - 285, - }, - dictWord{8, 0, 654}, - dictWord{11, 0, 749}, - dictWord{12, 0, 190}, - dictWord{12, 0, 327}, - dictWord{13, 0, 120}, - dictWord{13, 0, 121}, - dictWord{13, 0, 327}, - dictWord{15, 0, 47}, - dictWord{146, 0, 40}, - dictWord{5, 11, 8}, - dictWord{6, 11, 89}, - dictWord{6, 11, 400}, - dictWord{7, 11, 1569}, - dictWord{7, 11, 1623}, - dictWord{ - 7, - 11, - 1850, - }, - dictWord{8, 11, 218}, - dictWord{8, 11, 422}, - dictWord{9, 11, 570}, - dictWord{138, 11, 626}, - dictWord{6, 11, 387}, - dictWord{7, 11, 882}, - dictWord{141, 11, 111}, - dictWord{6, 0, 343}, - dictWord{7, 0, 195}, - dictWord{9, 0, 226}, - dictWord{10, 0, 197}, - dictWord{10, 0, 575}, - dictWord{11, 0, 502}, - dictWord{ - 11, - 0, - 899, - }, - dictWord{6, 11, 224}, - dictWord{7, 11, 877}, - dictWord{137, 11, 647}, - dictWord{5, 10, 937}, - dictWord{135, 10, 100}, - dictWord{135, 11, 790}, - dictWord{150, 0, 29}, - dictWord{147, 0, 8}, - dictWord{134, 0, 1812}, - dictWord{149, 0, 8}, - dictWord{135, 11, 394}, - dictWord{7, 0, 1125}, - dictWord{9, 0, 143}, - dictWord{ - 11, - 0, - 61, - }, - dictWord{14, 0, 405}, - dictWord{150, 0, 21}, - dictWord{10, 11, 755}, - dictWord{147, 11, 29}, - dictWord{9, 11, 378}, - dictWord{141, 11, 162}, - dictWord{135, 10, 922}, - dictWord{5, 10, 619}, - dictWord{133, 10, 698}, - dictWord{134, 0, 1327}, - dictWord{6, 0, 1598}, - dictWord{137, 0, 575}, - dictWord{ - 9, - 11, - 569, - }, - dictWord{12, 11, 12}, - dictWord{12, 11, 81}, - dictWord{12, 11, 319}, - dictWord{13, 11, 69}, - dictWord{14, 11, 259}, - dictWord{16, 11, 87}, - dictWord{ - 17, - 11, - 1, - }, - dictWord{17, 11, 21}, - dictWord{17, 11, 24}, - dictWord{18, 11, 15}, - dictWord{18, 11, 56}, - dictWord{18, 11, 59}, - dictWord{18, 11, 127}, - dictWord{18, 11, 154}, - dictWord{19, 11, 19}, - dictWord{148, 11, 31}, - dictWord{6, 0, 895}, - dictWord{135, 11, 1231}, - dictWord{5, 0, 959}, - dictWord{7, 11, 124}, - dictWord{136, 11, 38}, - dictWord{5, 11, 261}, - dictWord{7, 11, 78}, - dictWord{7, 11, 199}, - dictWord{8, 11, 815}, - dictWord{9, 11, 126}, - dictWord{138, 11, 342}, - dictWord{5, 10, 917}, - dictWord{134, 10, 1659}, - dictWord{7, 0, 1759}, - dictWord{5, 11, 595}, - dictWord{135, 11, 1863}, - dictWord{136, 0, 173}, - dictWord{134, 0, 266}, - dictWord{ - 142, - 0, - 261, - }, - dictWord{132, 11, 628}, - dictWord{5, 10, 251}, - dictWord{5, 10, 956}, - dictWord{8, 10, 268}, - dictWord{9, 10, 214}, - dictWord{146, 10, 142}, - dictWord{ - 7, - 11, - 266, - }, - dictWord{136, 11, 804}, - dictWord{135, 11, 208}, - dictWord{6, 11, 79}, - dictWord{7, 11, 1021}, - dictWord{135, 11, 1519}, - dictWord{11, 11, 704}, - dictWord{141, 11, 396}, - dictWord{5, 10, 346}, - dictWord{5, 10, 711}, - dictWord{136, 10, 390}, - dictWord{136, 11, 741}, - dictWord{134, 11, 376}, - dictWord{ - 134, - 0, - 1427, - }, - dictWord{6, 0, 1033}, - dictWord{6, 0, 1217}, - dictWord{136, 0, 300}, - dictWord{133, 10, 624}, - dictWord{6, 11, 100}, - dictWord{7, 11, 244}, - dictWord{ - 7, - 11, - 632, - }, - dictWord{7, 11, 1609}, - dictWord{8, 11, 178}, - dictWord{8, 11, 638}, - dictWord{141, 11, 58}, - dictWord{6, 0, 584}, - dictWord{5, 10, 783}, - dictWord{ - 7, - 10, - 1998, - }, - dictWord{135, 10, 2047}, - dictWord{5, 0, 427}, - dictWord{5, 0, 734}, - dictWord{7, 0, 478}, - dictWord{136, 0, 52}, - dictWord{7, 0, 239}, - dictWord{ - 11, - 0, - 217, - }, - dictWord{142, 0, 165}, - dictWord{134, 0, 1129}, - dictWord{6, 0, 168}, - dictWord{6, 0, 1734}, - dictWord{7, 0, 20}, - dictWord{7, 0, 1056}, - dictWord{8, 0, 732}, - dictWord{9, 0, 406}, - dictWord{9, 0, 911}, - dictWord{138, 0, 694}, - dictWord{132, 10, 594}, - dictWord{133, 11, 791}, - dictWord{7, 11, 686}, - dictWord{8, 11, 33}, - dictWord{8, 11, 238}, - dictWord{10, 11, 616}, - dictWord{11, 11, 467}, - dictWord{11, 11, 881}, - dictWord{13, 11, 217}, - dictWord{13, 11, 253}, - dictWord{ - 142, - 11, - 268, - }, - dictWord{137, 11, 476}, - dictWord{134, 0, 418}, - dictWord{133, 0, 613}, - dictWord{132, 0, 632}, - dictWord{132, 11, 447}, - dictWord{7, 0, 32}, - dictWord{ - 7, - 0, - 984, - }, - dictWord{8, 0, 85}, - dictWord{8, 0, 709}, - dictWord{9, 0, 579}, - dictWord{9, 0, 847}, - dictWord{9, 0, 856}, - dictWord{10, 0, 799}, - dictWord{11, 0, 258}, - dictWord{ - 11, - 0, - 1007, - }, - dictWord{12, 0, 331}, - dictWord{12, 0, 615}, - dictWord{13, 0, 188}, - dictWord{13, 0, 435}, - dictWord{14, 0, 8}, - dictWord{15, 0, 165}, - dictWord{ - 16, - 0, - 27, - }, - dictWord{20, 0, 40}, - dictWord{144, 11, 35}, - dictWord{4, 11, 128}, - dictWord{5, 11, 415}, - dictWord{6, 11, 462}, - dictWord{7, 11, 294}, - dictWord{7, 11, 578}, - dictWord{10, 11, 710}, - dictWord{139, 11, 86}, - dictWord{5, 0, 694}, - dictWord{136, 0, 909}, - dictWord{7, 0, 1109}, - dictWord{11, 0, 7}, - dictWord{5, 10, 37}, - dictWord{ - 6, - 10, - 39, - }, - dictWord{6, 10, 451}, - dictWord{7, 10, 218}, - dictWord{7, 10, 1166}, - dictWord{7, 10, 1687}, - dictWord{8, 10, 662}, - dictWord{144, 10, 2}, - dictWord{ - 136, - 11, - 587, - }, - dictWord{6, 11, 427}, - dictWord{7, 11, 1018}, - dictWord{138, 11, 692}, - dictWord{4, 11, 195}, - dictWord{6, 10, 508}, - dictWord{135, 11, 802}, - dictWord{4, 0, 167}, - dictWord{135, 0, 82}, - dictWord{5, 0, 62}, - dictWord{6, 0, 24}, - dictWord{6, 0, 534}, - dictWord{7, 0, 74}, - dictWord{7, 0, 678}, - dictWord{7, 0, 684}, - dictWord{ - 7, - 0, - 1043, - }, - dictWord{7, 0, 1072}, - dictWord{8, 0, 280}, - dictWord{8, 0, 541}, - dictWord{8, 0, 686}, - dictWord{9, 0, 258}, - dictWord{10, 0, 519}, - dictWord{11, 0, 252}, - dictWord{140, 0, 282}, - dictWord{138, 0, 33}, - dictWord{4, 0, 359}, - dictWord{133, 11, 738}, - dictWord{7, 0, 980}, - dictWord{9, 0, 328}, - dictWord{13, 0, 186}, - dictWord{13, 0, 364}, - dictWord{7, 10, 635}, - dictWord{7, 10, 796}, - dictWord{8, 10, 331}, - dictWord{9, 10, 330}, - dictWord{9, 10, 865}, - dictWord{10, 10, 119}, - dictWord{ - 10, - 10, - 235, - }, - dictWord{11, 10, 111}, - dictWord{11, 10, 129}, - dictWord{11, 10, 240}, - dictWord{12, 10, 31}, - dictWord{12, 10, 66}, - dictWord{12, 10, 222}, - dictWord{12, 10, 269}, - dictWord{12, 10, 599}, - dictWord{12, 10, 684}, - dictWord{12, 10, 689}, - dictWord{12, 10, 691}, - dictWord{142, 10, 345}, - dictWord{ - 137, - 10, - 527, - }, - dictWord{6, 0, 596}, - dictWord{7, 0, 585}, - dictWord{135, 10, 702}, - dictWord{134, 11, 1683}, - dictWord{133, 0, 211}, - dictWord{6, 0, 145}, - dictWord{ - 141, - 0, - 336, - }, - dictWord{134, 0, 1130}, - dictWord{7, 0, 873}, - dictWord{6, 10, 37}, - dictWord{7, 10, 1666}, - dictWord{8, 10, 195}, - dictWord{8, 10, 316}, - dictWord{ - 9, - 10, - 178, - }, - dictWord{9, 10, 276}, - dictWord{9, 10, 339}, - dictWord{9, 10, 536}, - dictWord{10, 10, 102}, - dictWord{10, 10, 362}, - dictWord{10, 10, 785}, - dictWord{ - 11, - 10, - 55, - }, - dictWord{11, 10, 149}, - dictWord{11, 10, 773}, - dictWord{13, 10, 416}, - dictWord{13, 10, 419}, - dictWord{14, 10, 38}, - dictWord{14, 10, 41}, - dictWord{ - 142, - 10, - 210, - }, - dictWord{8, 0, 840}, - dictWord{136, 0, 841}, - dictWord{132, 0, 263}, - dictWord{5, 11, 3}, - dictWord{8, 11, 578}, - dictWord{9, 11, 118}, - dictWord{ - 10, - 11, - 705, - }, - dictWord{12, 11, 383}, - dictWord{141, 11, 279}, - dictWord{132, 0, 916}, - dictWord{133, 11, 229}, - dictWord{133, 10, 645}, - dictWord{15, 0, 155}, - dictWord{16, 0, 79}, - dictWord{8, 11, 102}, - dictWord{10, 11, 578}, - dictWord{10, 11, 672}, - dictWord{12, 11, 496}, - dictWord{13, 11, 408}, - dictWord{14, 11, 121}, - dictWord{145, 11, 106}, - dictWord{4, 0, 599}, - dictWord{5, 0, 592}, - dictWord{6, 0, 1634}, - dictWord{7, 0, 5}, - dictWord{7, 0, 55}, - dictWord{7, 0, 67}, - dictWord{7, 0, 97}, - dictWord{7, 0, 691}, - dictWord{7, 0, 979}, - dictWord{7, 0, 1600}, - dictWord{7, 0, 1697}, - dictWord{8, 0, 207}, - dictWord{8, 0, 214}, - dictWord{8, 0, 231}, - dictWord{8, 0, 294}, - dictWord{8, 0, 336}, - dictWord{8, 0, 428}, - dictWord{8, 0, 471}, - dictWord{8, 0, 622}, - dictWord{8, 0, 626}, - dictWord{8, 0, 679}, - dictWord{8, 0, 759}, - dictWord{8, 0, 829}, - dictWord{9, 0, 11}, - dictWord{9, 0, 246}, - dictWord{9, 0, 484}, - dictWord{9, 0, 573}, - dictWord{9, 0, 706}, - dictWord{9, 0, 762}, - dictWord{9, 0, 798}, - dictWord{9, 0, 855}, - dictWord{9, 0, 870}, - dictWord{9, 0, 912}, - dictWord{10, 0, 303}, - dictWord{10, 0, 335}, - dictWord{10, 0, 424}, - dictWord{10, 0, 461}, - dictWord{10, 0, 543}, - dictWord{ - 10, - 0, - 759, - }, - dictWord{10, 0, 814}, - dictWord{11, 0, 59}, - dictWord{11, 0, 199}, - dictWord{11, 0, 235}, - dictWord{11, 0, 590}, - dictWord{11, 0, 631}, - dictWord{11, 0, 929}, - dictWord{11, 0, 963}, - dictWord{11, 0, 987}, - dictWord{12, 0, 114}, - dictWord{12, 0, 182}, - dictWord{12, 0, 226}, - dictWord{12, 0, 332}, - dictWord{12, 0, 439}, - dictWord{12, 0, 575}, - dictWord{12, 0, 598}, - dictWord{12, 0, 675}, - dictWord{13, 0, 8}, - dictWord{13, 0, 125}, - dictWord{13, 0, 194}, - dictWord{13, 0, 287}, - dictWord{ - 14, - 0, - 197, - }, - dictWord{14, 0, 383}, - dictWord{15, 0, 53}, - dictWord{17, 0, 63}, - dictWord{19, 0, 46}, - dictWord{19, 0, 98}, - dictWord{19, 0, 106}, - dictWord{148, 0, 85}, - dictWord{ - 7, - 0, - 1356, - }, - dictWord{132, 10, 290}, - dictWord{6, 10, 70}, - dictWord{7, 10, 1292}, - dictWord{10, 10, 762}, - dictWord{139, 10, 288}, - dictWord{150, 11, 55}, - dictWord{4, 0, 593}, - dictWord{8, 11, 115}, - dictWord{8, 11, 350}, - dictWord{9, 11, 489}, - dictWord{10, 11, 128}, - dictWord{11, 11, 306}, - dictWord{12, 11, 373}, - dictWord{14, 11, 30}, - dictWord{17, 11, 79}, - dictWord{147, 11, 80}, - dictWord{135, 11, 1235}, - dictWord{134, 0, 1392}, - dictWord{4, 11, 230}, - dictWord{ - 133, - 11, - 702, - }, - dictWord{147, 0, 126}, - dictWord{7, 10, 131}, - dictWord{7, 10, 422}, - dictWord{8, 10, 210}, - dictWord{140, 10, 573}, - dictWord{134, 0, 1179}, - dictWord{ - 139, - 11, - 435, - }, - dictWord{139, 10, 797}, - dictWord{134, 11, 1728}, - dictWord{4, 0, 162}, - dictWord{18, 11, 26}, - dictWord{19, 11, 42}, - dictWord{20, 11, 43}, - dictWord{21, 11, 0}, - dictWord{23, 11, 27}, - dictWord{152, 11, 14}, - dictWord{132, 10, 936}, - dictWord{6, 0, 765}, - dictWord{5, 10, 453}, - dictWord{134, 10, 441}, - dictWord{133, 0, 187}, - dictWord{135, 0, 1286}, - dictWord{6, 0, 635}, - dictWord{6, 0, 904}, - dictWord{6, 0, 1210}, - dictWord{134, 0, 1489}, - dictWord{4, 0, 215}, - dictWord{ - 8, - 0, - 890, - }, - dictWord{9, 0, 38}, - dictWord{10, 0, 923}, - dictWord{11, 0, 23}, - dictWord{11, 0, 127}, - dictWord{139, 0, 796}, - dictWord{6, 0, 1165}, - dictWord{ - 134, - 0, - 1306, - }, - dictWord{7, 0, 716}, - dictWord{13, 0, 97}, - dictWord{141, 0, 251}, - dictWord{132, 10, 653}, - dictWord{136, 0, 657}, - dictWord{146, 10, 80}, - dictWord{ - 5, - 11, - 622, - }, - dictWord{7, 11, 1032}, - dictWord{11, 11, 26}, - dictWord{11, 11, 213}, - dictWord{11, 11, 707}, - dictWord{12, 11, 380}, - dictWord{13, 11, 226}, - dictWord{141, 11, 355}, - dictWord{6, 0, 299}, - dictWord{5, 11, 70}, - dictWord{6, 11, 334}, - dictWord{9, 11, 171}, - dictWord{11, 11, 637}, - dictWord{12, 11, 202}, - dictWord{14, 11, 222}, - dictWord{145, 11, 42}, - dictWord{142, 0, 134}, - dictWord{4, 11, 23}, - dictWord{5, 11, 313}, - dictWord{5, 11, 1014}, - dictWord{6, 11, 50}, - dictWord{ - 6, - 11, - 51, - }, - dictWord{7, 11, 142}, - dictWord{7, 11, 384}, - dictWord{9, 11, 783}, - dictWord{139, 11, 741}, - dictWord{4, 11, 141}, - dictWord{7, 11, 559}, - dictWord{ - 8, - 11, - 640, - }, - dictWord{9, 11, 460}, - dictWord{12, 11, 183}, - dictWord{141, 11, 488}, - dictWord{136, 11, 614}, - dictWord{7, 10, 1368}, - dictWord{8, 10, 232}, - dictWord{8, 10, 361}, - dictWord{10, 10, 682}, - dictWord{138, 10, 742}, - dictWord{137, 10, 534}, - dictWord{6, 0, 1082}, - dictWord{140, 0, 658}, - dictWord{ - 137, - 10, - 27, - }, - dictWord{135, 0, 2002}, - dictWord{142, 10, 12}, - dictWord{4, 0, 28}, - dictWord{5, 0, 440}, - dictWord{7, 0, 248}, - dictWord{11, 0, 833}, - dictWord{140, 0, 344}, - dictWord{7, 10, 736}, - dictWord{139, 10, 264}, - dictWord{134, 10, 1657}, - dictWord{134, 0, 1654}, - dictWord{138, 0, 531}, - dictWord{5, 11, 222}, - dictWord{ - 9, - 11, - 140, - }, - dictWord{138, 11, 534}, - dictWord{6, 0, 634}, - dictWord{6, 0, 798}, - dictWord{134, 0, 840}, - dictWord{138, 11, 503}, - dictWord{135, 10, 127}, - dictWord{133, 0, 853}, - dictWord{5, 11, 154}, - dictWord{7, 11, 1491}, - dictWord{10, 11, 379}, - dictWord{138, 11, 485}, - dictWord{6, 0, 249}, - dictWord{7, 0, 1234}, - dictWord{139, 0, 573}, - dictWord{133, 11, 716}, - dictWord{7, 11, 1570}, - dictWord{140, 11, 542}, - dictWord{136, 10, 364}, - dictWord{138, 0, 527}, - dictWord{ - 4, - 11, - 91, - }, - dictWord{5, 11, 388}, - dictWord{5, 11, 845}, - dictWord{6, 11, 206}, - dictWord{6, 11, 252}, - dictWord{6, 11, 365}, - dictWord{7, 11, 136}, - dictWord{7, 11, 531}, - dictWord{8, 11, 264}, - dictWord{136, 11, 621}, - dictWord{134, 0, 1419}, - dictWord{135, 11, 1441}, - dictWord{7, 0, 49}, - dictWord{7, 0, 392}, - dictWord{8, 0, 20}, - dictWord{8, 0, 172}, - dictWord{8, 0, 690}, - dictWord{9, 0, 383}, - dictWord{9, 0, 845}, - dictWord{10, 0, 48}, - dictWord{11, 0, 293}, - dictWord{11, 0, 832}, - dictWord{ - 11, - 0, - 920, - }, - dictWord{11, 0, 984}, - dictWord{141, 0, 221}, - dictWord{5, 0, 858}, - dictWord{133, 0, 992}, - dictWord{5, 0, 728}, - dictWord{137, 10, 792}, - dictWord{ - 5, - 10, - 909, - }, - dictWord{9, 10, 849}, - dictWord{138, 10, 805}, - dictWord{7, 0, 525}, - dictWord{7, 0, 1579}, - dictWord{8, 0, 497}, - dictWord{136, 0, 573}, - dictWord{6, 0, 268}, - dictWord{137, 0, 62}, - dictWord{135, 11, 576}, - dictWord{134, 0, 1201}, - dictWord{5, 11, 771}, - dictWord{5, 11, 863}, - dictWord{5, 11, 898}, - dictWord{ - 6, - 11, - 1632, - }, - dictWord{6, 11, 1644}, - dictWord{134, 11, 1780}, - dictWord{133, 11, 331}, - dictWord{7, 0, 193}, - dictWord{7, 0, 1105}, - dictWord{10, 0, 495}, - dictWord{ - 7, - 10, - 397, - }, - dictWord{8, 10, 124}, - dictWord{8, 10, 619}, - dictWord{9, 10, 305}, - dictWord{11, 10, 40}, - dictWord{12, 10, 349}, - dictWord{13, 10, 134}, - dictWord{ - 13, - 10, - 295, - }, - dictWord{14, 10, 155}, - dictWord{15, 10, 120}, - dictWord{146, 10, 105}, - dictWord{138, 0, 106}, - dictWord{6, 0, 859}, - dictWord{5, 11, 107}, - dictWord{ - 7, - 11, - 201, - }, - dictWord{136, 11, 518}, - dictWord{6, 11, 446}, - dictWord{135, 11, 1817}, - dictWord{13, 0, 23}, - dictWord{4, 10, 262}, - dictWord{135, 10, 342}, - dictWord{133, 10, 641}, - dictWord{137, 11, 851}, - dictWord{6, 0, 925}, - dictWord{137, 0, 813}, - dictWord{132, 11, 504}, - dictWord{6, 0, 613}, - dictWord{ - 136, - 0, - 223, - }, - dictWord{4, 10, 99}, - dictWord{6, 10, 250}, - dictWord{6, 10, 346}, - dictWord{8, 10, 127}, - dictWord{138, 10, 81}, - dictWord{136, 0, 953}, - dictWord{ - 132, - 10, - 915, - }, - dictWord{139, 11, 892}, - dictWord{5, 10, 75}, - dictWord{9, 10, 517}, - dictWord{10, 10, 470}, - dictWord{12, 10, 155}, - dictWord{141, 10, 224}, - dictWord{ - 4, - 0, - 666, - }, - dictWord{7, 0, 1017}, - dictWord{7, 11, 996}, - dictWord{138, 11, 390}, - dictWord{5, 11, 883}, - dictWord{133, 11, 975}, - dictWord{14, 10, 83}, - dictWord{ - 142, - 11, - 83, - }, - dictWord{4, 0, 670}, - dictWord{5, 11, 922}, - dictWord{134, 11, 1707}, - dictWord{135, 0, 216}, - dictWord{9, 0, 40}, - dictWord{11, 0, 136}, - dictWord{ - 135, - 11, - 787, - }, - dictWord{5, 10, 954}, - dictWord{5, 11, 993}, - dictWord{7, 11, 515}, - dictWord{137, 11, 91}, - dictWord{139, 0, 259}, - dictWord{7, 0, 1114}, - dictWord{ - 9, - 0, - 310, - }, - dictWord{9, 0, 682}, - dictWord{10, 0, 440}, - dictWord{13, 0, 40}, - dictWord{6, 10, 304}, - dictWord{8, 10, 418}, - dictWord{11, 10, 341}, - dictWord{ - 139, - 10, - 675, - }, - dictWord{14, 0, 296}, - dictWord{9, 10, 410}, - dictWord{139, 10, 425}, - dictWord{10, 11, 377}, - dictWord{12, 11, 363}, - dictWord{13, 11, 68}, - dictWord{ - 13, - 11, - 94, - }, - dictWord{14, 11, 108}, - dictWord{142, 11, 306}, - dictWord{7, 0, 1401}, - dictWord{135, 0, 1476}, - dictWord{4, 0, 296}, - dictWord{6, 0, 475}, - dictWord{ - 7, - 0, - 401, - }, - dictWord{7, 0, 1410}, - dictWord{7, 0, 1594}, - dictWord{7, 0, 1674}, - dictWord{8, 0, 63}, - dictWord{8, 0, 660}, - dictWord{137, 0, 74}, - dictWord{4, 0, 139}, - dictWord{4, 0, 388}, - dictWord{140, 0, 188}, - dictWord{132, 0, 797}, - dictWord{132, 11, 766}, - dictWord{5, 11, 103}, - dictWord{7, 11, 921}, - dictWord{8, 11, 580}, - dictWord{8, 11, 593}, - dictWord{8, 11, 630}, - dictWord{138, 11, 28}, - dictWord{4, 11, 911}, - dictWord{5, 11, 867}, - dictWord{133, 11, 1013}, - dictWord{134, 10, 14}, - dictWord{134, 0, 1572}, - dictWord{134, 10, 1708}, - dictWord{21, 0, 39}, - dictWord{5, 10, 113}, - dictWord{6, 10, 243}, - dictWord{7, 10, 1865}, - dictWord{ - 11, - 10, - 161, - }, - dictWord{16, 10, 37}, - dictWord{145, 10, 99}, - dictWord{7, 11, 1563}, - dictWord{141, 11, 182}, - dictWord{5, 11, 135}, - dictWord{6, 11, 519}, - dictWord{ - 7, - 11, - 1722, - }, - dictWord{10, 11, 271}, - dictWord{11, 11, 261}, - dictWord{145, 11, 54}, - dictWord{132, 10, 274}, - dictWord{134, 0, 1594}, - dictWord{4, 11, 300}, - dictWord{5, 11, 436}, - dictWord{135, 11, 484}, - dictWord{4, 0, 747}, - dictWord{6, 0, 290}, - dictWord{7, 0, 649}, - dictWord{7, 0, 1479}, - dictWord{135, 0, 1583}, - dictWord{133, 11, 535}, - dictWord{147, 11, 82}, - dictWord{133, 0, 232}, - dictWord{137, 0, 887}, - dictWord{135, 10, 166}, - dictWord{136, 0, 521}, - dictWord{4, 0, 14}, - dictWord{7, 0, 472}, - dictWord{7, 0, 1801}, - dictWord{10, 0, 748}, - dictWord{141, 0, 458}, - dictWord{134, 0, 741}, - dictWord{134, 0, 992}, - dictWord{16, 0, 111}, - dictWord{137, 10, 304}, - dictWord{4, 0, 425}, - dictWord{5, 11, 387}, - dictWord{7, 11, 557}, - dictWord{12, 11, 547}, - dictWord{142, 11, 86}, - dictWord{ - 135, - 11, - 1747, - }, - dictWord{5, 10, 654}, - dictWord{135, 11, 1489}, - dictWord{7, 0, 789}, - dictWord{4, 11, 6}, - dictWord{5, 11, 708}, - dictWord{136, 11, 75}, - dictWord{ - 6, - 10, - 273, - }, - dictWord{10, 10, 188}, - dictWord{13, 10, 377}, - dictWord{146, 10, 77}, - dictWord{6, 0, 1593}, - dictWord{4, 11, 303}, - dictWord{7, 11, 619}, - dictWord{ - 10, - 11, - 547, - }, - dictWord{10, 11, 687}, - dictWord{11, 11, 122}, - dictWord{140, 11, 601}, - dictWord{134, 0, 1768}, - dictWord{135, 10, 410}, - dictWord{138, 11, 772}, - dictWord{11, 0, 233}, - dictWord{139, 10, 524}, - dictWord{5, 0, 943}, - dictWord{134, 0, 1779}, - dictWord{134, 10, 1785}, - dictWord{136, 11, 529}, - dictWord{ - 132, - 0, - 955, - }, - dictWord{5, 0, 245}, - dictWord{6, 0, 576}, - dictWord{7, 0, 582}, - dictWord{136, 0, 225}, - dictWord{132, 10, 780}, - dictWord{142, 0, 241}, - dictWord{ - 134, - 0, - 1943, - }, - dictWord{4, 11, 106}, - dictWord{7, 11, 310}, - dictWord{7, 11, 1785}, - dictWord{10, 11, 690}, - dictWord{139, 11, 717}, - dictWord{134, 0, 1284}, - dictWord{5, 11, 890}, - dictWord{133, 11, 988}, - dictWord{6, 11, 626}, - dictWord{142, 11, 431}, - dictWord{10, 11, 706}, - dictWord{145, 11, 32}, - dictWord{ - 137, - 11, - 332, - }, - dictWord{132, 11, 698}, - dictWord{135, 0, 709}, - dictWord{5, 10, 948}, - dictWord{138, 11, 17}, - dictWord{136, 0, 554}, - dictWord{134, 0, 1564}, - dictWord{139, 10, 941}, - dictWord{132, 0, 443}, - dictWord{134, 0, 909}, - dictWord{134, 11, 84}, - dictWord{142, 0, 280}, - dictWord{4, 10, 532}, - dictWord{5, 10, 706}, - dictWord{135, 10, 662}, - dictWord{132, 0, 729}, - dictWord{5, 10, 837}, - dictWord{6, 10, 1651}, - dictWord{139, 10, 985}, - dictWord{135, 10, 1861}, - dictWord{ - 4, - 0, - 348, - }, - dictWord{152, 11, 3}, - dictWord{5, 11, 986}, - dictWord{6, 11, 130}, - dictWord{7, 11, 1582}, - dictWord{8, 11, 458}, - dictWord{10, 11, 101}, - dictWord{ - 10, - 11, - 318, - }, - dictWord{138, 11, 823}, - dictWord{134, 0, 758}, - dictWord{4, 0, 298}, - dictWord{137, 0, 848}, - dictWord{4, 10, 330}, - dictWord{7, 10, 933}, - dictWord{ - 7, - 10, - 2012, - }, - dictWord{136, 10, 292}, - dictWord{7, 11, 1644}, - dictWord{137, 11, 129}, - dictWord{6, 0, 1422}, - dictWord{9, 0, 829}, - dictWord{135, 10, 767}, - dictWord{5, 0, 164}, - dictWord{7, 0, 121}, - dictWord{142, 0, 189}, - dictWord{7, 0, 812}, - dictWord{7, 0, 1261}, - dictWord{7, 0, 1360}, - dictWord{9, 0, 632}, - dictWord{ - 140, - 0, - 352, - }, - dictWord{135, 11, 1788}, - dictWord{139, 0, 556}, - dictWord{135, 11, 997}, - dictWord{145, 10, 114}, - dictWord{4, 0, 172}, - dictWord{9, 0, 611}, - dictWord{10, 0, 436}, - dictWord{12, 0, 673}, - dictWord{13, 0, 255}, - dictWord{137, 10, 883}, - dictWord{11, 0, 530}, - dictWord{138, 10, 274}, - dictWord{133, 0, 844}, - dictWord{134, 0, 984}, - dictWord{13, 0, 232}, - dictWord{18, 0, 35}, - dictWord{4, 10, 703}, - dictWord{135, 10, 207}, - dictWord{132, 10, 571}, - dictWord{9, 0, 263}, - dictWord{10, 0, 147}, - dictWord{138, 0, 492}, - dictWord{7, 11, 1756}, - dictWord{137, 11, 98}, - dictWord{5, 10, 873}, - dictWord{5, 10, 960}, - dictWord{8, 10, 823}, - dictWord{137, 10, 881}, - dictWord{133, 0, 537}, - dictWord{132, 0, 859}, - dictWord{7, 11, 1046}, - dictWord{139, 11, 160}, - dictWord{137, 0, 842}, - dictWord{ - 139, - 10, - 283, - }, - dictWord{5, 10, 33}, - dictWord{6, 10, 470}, - dictWord{139, 10, 424}, - dictWord{6, 11, 45}, - dictWord{7, 11, 433}, - dictWord{8, 11, 129}, - dictWord{ - 9, - 11, - 21, - }, - dictWord{10, 11, 392}, - dictWord{11, 11, 79}, - dictWord{12, 11, 499}, - dictWord{13, 11, 199}, - dictWord{141, 11, 451}, - dictWord{135, 0, 1291}, - dictWord{135, 10, 1882}, - dictWord{7, 11, 558}, - dictWord{136, 11, 353}, - dictWord{134, 0, 1482}, - dictWord{5, 0, 230}, - dictWord{5, 0, 392}, - dictWord{6, 0, 420}, - dictWord{9, 0, 568}, - dictWord{140, 0, 612}, - dictWord{6, 0, 262}, - dictWord{7, 10, 90}, - dictWord{7, 10, 664}, - dictWord{7, 10, 830}, - dictWord{7, 10, 1380}, - dictWord{ - 7, - 10, - 2025, - }, - dictWord{8, 11, 81}, - dictWord{8, 10, 448}, - dictWord{8, 10, 828}, - dictWord{9, 11, 189}, - dictWord{9, 11, 201}, - dictWord{11, 11, 478}, - dictWord{ - 11, - 11, - 712, - }, - dictWord{141, 11, 338}, - dictWord{142, 0, 31}, - dictWord{5, 11, 353}, - dictWord{151, 11, 26}, - dictWord{132, 0, 753}, - dictWord{4, 0, 0}, - dictWord{ - 5, - 0, - 41, - }, - dictWord{7, 0, 1459}, - dictWord{7, 0, 1469}, - dictWord{7, 0, 1859}, - dictWord{9, 0, 549}, - dictWord{139, 0, 905}, - dictWord{9, 10, 417}, - dictWord{ - 137, - 10, - 493, - }, - dictWord{135, 11, 1113}, - dictWord{133, 0, 696}, - dictWord{141, 11, 448}, - dictWord{134, 10, 295}, - dictWord{132, 0, 834}, - dictWord{4, 0, 771}, - dictWord{5, 10, 1019}, - dictWord{6, 11, 25}, - dictWord{7, 11, 855}, - dictWord{7, 11, 1258}, - dictWord{144, 11, 32}, - dictWord{134, 0, 1076}, - dictWord{133, 0, 921}, - dictWord{133, 0, 674}, - dictWord{4, 11, 4}, - dictWord{7, 11, 1118}, - dictWord{7, 11, 1320}, - dictWord{7, 11, 1706}, - dictWord{8, 11, 277}, - dictWord{9, 11, 622}, - dictWord{10, 11, 9}, - dictWord{11, 11, 724}, - dictWord{12, 11, 350}, - dictWord{12, 11, 397}, - dictWord{13, 11, 28}, - dictWord{13, 11, 159}, - dictWord{15, 11, 89}, - dictWord{18, 11, 5}, - dictWord{19, 11, 9}, - dictWord{20, 11, 34}, - dictWord{150, 11, 47}, - dictWord{134, 10, 208}, - dictWord{6, 0, 444}, - dictWord{136, 0, 308}, - dictWord{ - 6, - 0, - 180, - }, - dictWord{7, 0, 1137}, - dictWord{8, 0, 751}, - dictWord{139, 0, 805}, - dictWord{4, 0, 183}, - dictWord{7, 0, 271}, - dictWord{11, 0, 824}, - dictWord{ - 11, - 0, - 952, - }, - dictWord{13, 0, 278}, - dictWord{13, 0, 339}, - dictWord{13, 0, 482}, - dictWord{14, 0, 424}, - dictWord{148, 0, 99}, - dictWord{7, 11, 317}, - dictWord{ - 135, - 11, - 569, - }, - dictWord{4, 0, 19}, - dictWord{5, 0, 477}, - dictWord{5, 0, 596}, - dictWord{6, 0, 505}, - dictWord{7, 0, 1221}, - dictWord{11, 0, 907}, - dictWord{12, 0, 209}, - dictWord{141, 0, 214}, - dictWord{135, 0, 1215}, - dictWord{6, 0, 271}, - dictWord{7, 0, 398}, - dictWord{8, 0, 387}, - dictWord{10, 0, 344}, - dictWord{7, 10, 448}, - dictWord{ - 7, - 10, - 1629, - }, - dictWord{7, 10, 1813}, - dictWord{8, 10, 442}, - dictWord{9, 10, 710}, - dictWord{10, 10, 282}, - dictWord{138, 10, 722}, - dictWord{11, 10, 844}, - dictWord{12, 10, 104}, - dictWord{140, 10, 625}, - dictWord{134, 11, 255}, - dictWord{133, 10, 787}, - dictWord{134, 0, 1645}, - dictWord{11, 11, 956}, - dictWord{ - 151, - 11, - 3, - }, - dictWord{6, 0, 92}, - dictWord{6, 0, 188}, - dictWord{7, 0, 209}, - dictWord{7, 0, 1269}, - dictWord{7, 0, 1524}, - dictWord{7, 0, 1876}, - dictWord{8, 0, 661}, - dictWord{10, 0, 42}, - dictWord{10, 0, 228}, - dictWord{11, 0, 58}, - dictWord{11, 0, 1020}, - dictWord{12, 0, 58}, - dictWord{12, 0, 118}, - dictWord{141, 0, 32}, - dictWord{ - 4, - 0, - 459, - }, - dictWord{133, 0, 966}, - dictWord{4, 11, 536}, - dictWord{7, 11, 1141}, - dictWord{10, 11, 723}, - dictWord{139, 11, 371}, - dictWord{140, 0, 330}, - dictWord{134, 0, 1557}, - dictWord{7, 11, 285}, - dictWord{135, 11, 876}, - dictWord{136, 10, 491}, - dictWord{135, 11, 560}, - dictWord{6, 0, 18}, - dictWord{7, 0, 179}, - dictWord{7, 0, 932}, - dictWord{8, 0, 548}, - dictWord{8, 0, 757}, - dictWord{9, 0, 54}, - dictWord{9, 0, 65}, - dictWord{9, 0, 532}, - dictWord{9, 0, 844}, - dictWord{10, 0, 113}, - dictWord{10, 0, 117}, - dictWord{10, 0, 315}, - dictWord{10, 0, 560}, - dictWord{10, 0, 622}, - dictWord{10, 0, 798}, - dictWord{11, 0, 153}, - dictWord{11, 0, 351}, - dictWord{ - 11, - 0, - 375, - }, - dictWord{12, 0, 78}, - dictWord{12, 0, 151}, - dictWord{12, 0, 392}, - dictWord{12, 0, 666}, - dictWord{14, 0, 248}, - dictWord{143, 0, 23}, - dictWord{ - 6, - 0, - 1742, - }, - dictWord{132, 11, 690}, - dictWord{4, 10, 403}, - dictWord{5, 10, 441}, - dictWord{7, 10, 450}, - dictWord{10, 10, 840}, - dictWord{11, 10, 101}, - dictWord{ - 12, - 10, - 193, - }, - dictWord{141, 10, 430}, - dictWord{133, 0, 965}, - dictWord{134, 0, 182}, - dictWord{10, 0, 65}, - dictWord{10, 0, 488}, - dictWord{138, 0, 497}, - dictWord{135, 11, 1346}, - dictWord{6, 0, 973}, - dictWord{6, 0, 1158}, - dictWord{10, 11, 200}, - dictWord{19, 11, 2}, - dictWord{151, 11, 22}, - dictWord{4, 11, 190}, - dictWord{133, 11, 554}, - dictWord{133, 10, 679}, - dictWord{7, 0, 328}, - dictWord{137, 10, 326}, - dictWord{133, 11, 1001}, - dictWord{9, 0, 588}, - dictWord{ - 138, - 0, - 260, - }, - dictWord{133, 11, 446}, - dictWord{135, 10, 1128}, - dictWord{135, 10, 1796}, - dictWord{147, 11, 119}, - dictWord{134, 0, 1786}, - dictWord{ - 6, - 0, - 1328, - }, - dictWord{6, 0, 1985}, - dictWord{8, 0, 962}, - dictWord{138, 0, 1017}, - dictWord{135, 0, 308}, - dictWord{11, 0, 508}, - dictWord{4, 10, 574}, - dictWord{ - 7, - 10, - 350, - }, - dictWord{7, 10, 1024}, - dictWord{8, 10, 338}, - dictWord{9, 10, 677}, - dictWord{138, 10, 808}, - dictWord{138, 11, 752}, - dictWord{135, 10, 1081}, - dictWord{137, 11, 96}, - dictWord{7, 10, 1676}, - dictWord{135, 10, 2037}, - dictWord{136, 0, 588}, - dictWord{132, 11, 304}, - dictWord{133, 0, 614}, - dictWord{ - 140, - 0, - 793, - }, - dictWord{136, 0, 287}, - dictWord{137, 10, 297}, - dictWord{141, 10, 37}, - dictWord{6, 11, 53}, - dictWord{6, 11, 199}, - dictWord{7, 11, 1408}, - dictWord{ - 8, - 11, - 32, - }, - dictWord{8, 11, 93}, - dictWord{9, 11, 437}, - dictWord{10, 11, 397}, - dictWord{10, 11, 629}, - dictWord{11, 11, 593}, - dictWord{11, 11, 763}, - dictWord{ - 13, - 11, - 326, - }, - dictWord{145, 11, 35}, - dictWord{134, 11, 105}, - dictWord{9, 11, 320}, - dictWord{10, 11, 506}, - dictWord{138, 11, 794}, - dictWord{5, 11, 114}, - dictWord{5, 11, 255}, - dictWord{141, 11, 285}, - dictWord{140, 0, 290}, - dictWord{7, 11, 2035}, - dictWord{8, 11, 19}, - dictWord{9, 11, 89}, - dictWord{138, 11, 831}, - dictWord{134, 0, 1136}, - dictWord{7, 0, 719}, - dictWord{8, 0, 796}, - dictWord{8, 0, 809}, - dictWord{8, 0, 834}, - dictWord{6, 10, 306}, - dictWord{7, 10, 1140}, - dictWord{ - 7, - 10, - 1340, - }, - dictWord{8, 10, 133}, - dictWord{138, 10, 449}, - dictWord{139, 10, 1011}, - dictWord{5, 0, 210}, - dictWord{6, 0, 213}, - dictWord{7, 0, 60}, - dictWord{ - 10, - 0, - 364, - }, - dictWord{139, 0, 135}, - dictWord{5, 0, 607}, - dictWord{8, 0, 326}, - dictWord{136, 0, 490}, - dictWord{138, 11, 176}, - dictWord{132, 0, 701}, - dictWord{ - 5, - 0, - 472, - }, - dictWord{7, 0, 380}, - dictWord{137, 0, 758}, - dictWord{135, 0, 1947}, - dictWord{6, 0, 1079}, - dictWord{138, 0, 278}, - dictWord{138, 11, 391}, - dictWord{ - 5, - 10, - 329, - }, - dictWord{8, 10, 260}, - dictWord{139, 11, 156}, - dictWord{4, 0, 386}, - dictWord{7, 0, 41}, - dictWord{8, 0, 405}, - dictWord{8, 0, 728}, - dictWord{9, 0, 497}, - dictWord{11, 0, 110}, - dictWord{11, 0, 360}, - dictWord{15, 0, 37}, - dictWord{144, 0, 84}, - dictWord{5, 0, 46}, - dictWord{7, 0, 1452}, - dictWord{7, 0, 1480}, - dictWord{ - 8, - 0, - 634, - }, - dictWord{140, 0, 472}, - dictWord{136, 0, 961}, - dictWord{4, 0, 524}, - dictWord{136, 0, 810}, - dictWord{10, 0, 238}, - dictWord{141, 0, 33}, - dictWord{ - 132, - 10, - 657, - }, - dictWord{152, 10, 7}, - dictWord{133, 0, 532}, - dictWord{5, 0, 997}, - dictWord{135, 10, 1665}, - dictWord{7, 11, 594}, - dictWord{7, 11, 851}, - dictWord{ - 7, - 11, - 1858, - }, - dictWord{9, 11, 411}, - dictWord{9, 11, 574}, - dictWord{9, 11, 666}, - dictWord{9, 11, 737}, - dictWord{10, 11, 346}, - dictWord{10, 11, 712}, - dictWord{ - 11, - 11, - 246, - }, - dictWord{11, 11, 432}, - dictWord{11, 11, 517}, - dictWord{11, 11, 647}, - dictWord{11, 11, 679}, - dictWord{11, 11, 727}, - dictWord{12, 11, 304}, - dictWord{12, 11, 305}, - dictWord{12, 11, 323}, - dictWord{12, 11, 483}, - dictWord{12, 11, 572}, - dictWord{12, 11, 593}, - dictWord{12, 11, 602}, - dictWord{ - 13, - 11, - 95, - }, - dictWord{13, 11, 101}, - dictWord{13, 11, 171}, - dictWord{13, 11, 315}, - dictWord{13, 11, 378}, - dictWord{13, 11, 425}, - dictWord{13, 11, 475}, - dictWord{ - 14, - 11, - 63, - }, - dictWord{14, 11, 380}, - dictWord{14, 11, 384}, - dictWord{15, 11, 133}, - dictWord{18, 11, 112}, - dictWord{148, 11, 72}, - dictWord{5, 11, 955}, - dictWord{136, 11, 814}, - dictWord{134, 0, 1301}, - dictWord{5, 10, 66}, - dictWord{7, 10, 1896}, - dictWord{136, 10, 288}, - dictWord{133, 11, 56}, - dictWord{ - 134, - 10, - 1643, - }, - dictWord{6, 0, 1298}, - dictWord{148, 11, 100}, - dictWord{5, 0, 782}, - dictWord{5, 0, 829}, - dictWord{6, 0, 671}, - dictWord{6, 0, 1156}, - dictWord{6, 0, 1738}, - dictWord{137, 11, 621}, - dictWord{4, 0, 306}, - dictWord{5, 0, 570}, - dictWord{7, 0, 1347}, - dictWord{5, 10, 91}, - dictWord{5, 10, 648}, - dictWord{5, 10, 750}, - dictWord{ - 5, - 10, - 781, - }, - dictWord{6, 10, 54}, - dictWord{6, 10, 112}, - dictWord{6, 10, 402}, - dictWord{6, 10, 1732}, - dictWord{7, 10, 315}, - dictWord{7, 10, 749}, - dictWord{ - 7, - 10, - 1900, - }, - dictWord{9, 10, 78}, - dictWord{9, 10, 508}, - dictWord{10, 10, 611}, - dictWord{10, 10, 811}, - dictWord{11, 10, 510}, - dictWord{11, 10, 728}, - dictWord{ - 13, - 10, - 36, - }, - dictWord{14, 10, 39}, - dictWord{16, 10, 83}, - dictWord{17, 10, 124}, - dictWord{148, 10, 30}, - dictWord{8, 10, 570}, - dictWord{9, 11, 477}, - dictWord{ - 141, - 11, - 78, - }, - dictWord{4, 11, 639}, - dictWord{10, 11, 4}, - dictWord{10, 10, 322}, - dictWord{10, 10, 719}, - dictWord{11, 10, 407}, - dictWord{11, 11, 638}, - dictWord{ - 12, - 11, - 177, - }, - dictWord{148, 11, 57}, - dictWord{7, 0, 1823}, - dictWord{139, 0, 693}, - dictWord{7, 0, 759}, - dictWord{5, 11, 758}, - dictWord{8, 10, 125}, - dictWord{ - 8, - 10, - 369, - }, - dictWord{8, 10, 524}, - dictWord{10, 10, 486}, - dictWord{11, 10, 13}, - dictWord{11, 10, 381}, - dictWord{11, 10, 736}, - dictWord{11, 10, 766}, - dictWord{ - 11, - 10, - 845, - }, - dictWord{13, 10, 114}, - dictWord{13, 10, 292}, - dictWord{142, 10, 47}, - dictWord{7, 0, 1932}, - dictWord{6, 10, 1684}, - dictWord{6, 10, 1731}, - dictWord{7, 10, 356}, - dictWord{8, 10, 54}, - dictWord{8, 10, 221}, - dictWord{9, 10, 225}, - dictWord{9, 10, 356}, - dictWord{10, 10, 77}, - dictWord{10, 10, 446}, - dictWord{ - 10, - 10, - 731, - }, - dictWord{12, 10, 404}, - dictWord{141, 10, 491}, - dictWord{135, 11, 552}, - dictWord{135, 11, 1112}, - dictWord{4, 0, 78}, - dictWord{5, 0, 96}, - dictWord{ - 5, - 0, - 182, - }, - dictWord{6, 0, 1257}, - dictWord{7, 0, 1724}, - dictWord{7, 0, 1825}, - dictWord{10, 0, 394}, - dictWord{10, 0, 471}, - dictWord{11, 0, 532}, - dictWord{ - 14, - 0, - 340, - }, - dictWord{145, 0, 88}, - dictWord{139, 11, 328}, - dictWord{135, 0, 1964}, - dictWord{132, 10, 411}, - dictWord{4, 10, 80}, - dictWord{5, 10, 44}, - dictWord{ - 137, - 11, - 133, - }, - dictWord{5, 11, 110}, - dictWord{6, 11, 169}, - dictWord{6, 11, 1702}, - dictWord{7, 11, 400}, - dictWord{8, 11, 538}, - dictWord{9, 11, 184}, - dictWord{ - 9, - 11, - 524, - }, - dictWord{140, 11, 218}, - dictWord{4, 0, 521}, - dictWord{5, 10, 299}, - dictWord{7, 10, 1083}, - dictWord{140, 11, 554}, - dictWord{6, 11, 133}, - dictWord{ - 9, - 11, - 353, - }, - dictWord{12, 11, 628}, - dictWord{146, 11, 79}, - dictWord{6, 0, 215}, - dictWord{7, 0, 584}, - dictWord{7, 0, 1028}, - dictWord{7, 0, 1473}, - dictWord{ - 7, - 0, - 1721, - }, - dictWord{9, 0, 424}, - dictWord{138, 0, 779}, - dictWord{7, 0, 857}, - dictWord{7, 0, 1209}, - dictWord{7, 10, 1713}, - dictWord{9, 10, 537}, - dictWord{ - 10, - 10, - 165, - }, - dictWord{12, 10, 219}, - dictWord{140, 10, 561}, - dictWord{4, 10, 219}, - dictWord{6, 11, 93}, - dictWord{7, 11, 1422}, - dictWord{7, 10, 1761}, - dictWord{ - 7, - 11, - 1851, - }, - dictWord{8, 11, 673}, - dictWord{9, 10, 86}, - dictWord{9, 11, 529}, - dictWord{140, 11, 43}, - dictWord{137, 11, 371}, - dictWord{136, 0, 671}, - dictWord{ - 5, - 0, - 328, - }, - dictWord{135, 0, 918}, - dictWord{132, 0, 529}, - dictWord{9, 11, 25}, - dictWord{10, 11, 467}, - dictWord{138, 11, 559}, - dictWord{4, 11, 335}, - dictWord{ - 135, - 11, - 942, - }, - dictWord{134, 0, 716}, - dictWord{134, 0, 1509}, - dictWord{6, 0, 67}, - dictWord{7, 0, 258}, - dictWord{7, 0, 1630}, - dictWord{9, 0, 354}, - dictWord{ - 9, - 0, - 675, - }, - dictWord{10, 0, 830}, - dictWord{14, 0, 80}, - dictWord{17, 0, 80}, - dictWord{140, 10, 428}, - dictWord{134, 0, 1112}, - dictWord{6, 0, 141}, - dictWord{7, 0, 225}, - dictWord{9, 0, 59}, - dictWord{9, 0, 607}, - dictWord{10, 0, 312}, - dictWord{11, 0, 687}, - dictWord{12, 0, 555}, - dictWord{13, 0, 373}, - dictWord{13, 0, 494}, - dictWord{ - 148, - 0, - 58, - }, - dictWord{133, 10, 514}, - dictWord{8, 11, 39}, - dictWord{10, 11, 773}, - dictWord{11, 11, 84}, - dictWord{12, 11, 205}, - dictWord{142, 11, 1}, - dictWord{ - 8, - 0, - 783, - }, - dictWord{5, 11, 601}, - dictWord{133, 11, 870}, - dictWord{136, 11, 594}, - dictWord{4, 10, 55}, - dictWord{5, 10, 301}, - dictWord{6, 10, 571}, - dictWord{ - 14, - 10, - 49, - }, - dictWord{146, 10, 102}, - dictWord{132, 11, 181}, - dictWord{134, 11, 1652}, - dictWord{133, 10, 364}, - dictWord{4, 11, 97}, - dictWord{5, 11, 147}, - dictWord{6, 11, 286}, - dictWord{7, 11, 1362}, - dictWord{141, 11, 176}, - dictWord{4, 10, 76}, - dictWord{7, 10, 1550}, - dictWord{9, 10, 306}, - dictWord{9, 10, 430}, - dictWord{9, 10, 663}, - dictWord{10, 10, 683}, - dictWord{11, 10, 427}, - dictWord{11, 10, 753}, - dictWord{12, 10, 334}, - dictWord{12, 10, 442}, - dictWord{ - 14, - 10, - 258, - }, - dictWord{14, 10, 366}, - dictWord{143, 10, 131}, - dictWord{137, 10, 52}, - dictWord{6, 0, 955}, - dictWord{134, 0, 1498}, - dictWord{6, 11, 375}, - dictWord{ - 7, - 11, - 169, - }, - dictWord{7, 11, 254}, - dictWord{136, 11, 780}, - dictWord{7, 0, 430}, - dictWord{11, 0, 46}, - dictWord{14, 0, 343}, - dictWord{142, 11, 343}, - dictWord{ - 135, - 0, - 1183, - }, - dictWord{5, 0, 602}, - dictWord{7, 0, 2018}, - dictWord{9, 0, 418}, - dictWord{9, 0, 803}, - dictWord{135, 11, 1447}, - dictWord{8, 0, 677}, - dictWord{ - 135, - 11, - 1044, - }, - dictWord{139, 11, 285}, - dictWord{4, 10, 656}, - dictWord{135, 10, 779}, - dictWord{135, 10, 144}, - dictWord{5, 11, 629}, - dictWord{ - 135, - 11, - 1549, - }, - dictWord{135, 10, 1373}, - dictWord{138, 11, 209}, - dictWord{7, 10, 554}, - dictWord{7, 10, 605}, - dictWord{141, 10, 10}, - dictWord{5, 10, 838}, - dictWord{ - 5, - 10, - 841, - }, - dictWord{134, 10, 1649}, - dictWord{133, 10, 1012}, - dictWord{6, 0, 1357}, - dictWord{134, 0, 1380}, - dictWord{144, 0, 53}, - dictWord{6, 0, 590}, - dictWord{7, 10, 365}, - dictWord{7, 10, 1357}, - dictWord{7, 10, 1497}, - dictWord{8, 10, 154}, - dictWord{141, 10, 281}, - dictWord{133, 10, 340}, - dictWord{ - 132, - 11, - 420, - }, - dictWord{135, 0, 329}, - dictWord{147, 11, 32}, - dictWord{4, 0, 469}, - dictWord{10, 11, 429}, - dictWord{139, 10, 495}, - dictWord{8, 10, 261}, - dictWord{ - 9, - 10, - 144, - }, - dictWord{9, 10, 466}, - dictWord{10, 10, 370}, - dictWord{12, 10, 470}, - dictWord{13, 10, 144}, - dictWord{142, 10, 348}, - dictWord{142, 0, 460}, - dictWord{4, 11, 325}, - dictWord{9, 10, 897}, - dictWord{138, 11, 125}, - dictWord{6, 0, 1743}, - dictWord{6, 10, 248}, - dictWord{9, 10, 546}, - dictWord{10, 10, 535}, - dictWord{11, 10, 681}, - dictWord{141, 10, 135}, - dictWord{4, 0, 990}, - dictWord{5, 0, 929}, - dictWord{6, 0, 340}, - dictWord{8, 0, 376}, - dictWord{8, 0, 807}, - dictWord{ - 8, - 0, - 963, - }, - dictWord{8, 0, 980}, - dictWord{138, 0, 1007}, - dictWord{134, 0, 1603}, - dictWord{140, 0, 250}, - dictWord{4, 11, 714}, - dictWord{133, 11, 469}, - dictWord{134, 10, 567}, - dictWord{136, 10, 445}, - dictWord{5, 0, 218}, - dictWord{7, 0, 1610}, - dictWord{8, 0, 646}, - dictWord{10, 0, 83}, - dictWord{11, 11, 138}, - dictWord{140, 11, 40}, - dictWord{7, 0, 1512}, - dictWord{135, 0, 1794}, - dictWord{135, 11, 1216}, - dictWord{11, 0, 0}, - dictWord{16, 0, 78}, - dictWord{132, 11, 718}, - dictWord{133, 0, 571}, - dictWord{132, 0, 455}, - dictWord{134, 0, 1012}, - dictWord{5, 11, 124}, - dictWord{5, 11, 144}, - dictWord{6, 11, 548}, - dictWord{7, 11, 15}, - dictWord{7, 11, 153}, - dictWord{137, 11, 629}, - dictWord{142, 11, 10}, - dictWord{6, 11, 75}, - dictWord{7, 11, 1531}, - dictWord{8, 11, 416}, - dictWord{9, 11, 240}, - dictWord{9, 11, 275}, - dictWord{10, 11, 100}, - dictWord{11, 11, 658}, - dictWord{11, 11, 979}, - dictWord{12, 11, 86}, - dictWord{13, 11, 468}, - dictWord{14, 11, 66}, - dictWord{14, 11, 207}, - dictWord{15, 11, 20}, - dictWord{15, 11, 25}, - dictWord{144, 11, 58}, - dictWord{132, 10, 577}, - dictWord{5, 11, 141}, - dictWord{ - 5, - 11, - 915, - }, - dictWord{6, 11, 1783}, - dictWord{7, 11, 211}, - dictWord{7, 11, 698}, - dictWord{7, 11, 1353}, - dictWord{9, 11, 83}, - dictWord{9, 11, 281}, - dictWord{ - 10, - 11, - 376, - }, - dictWord{10, 11, 431}, - dictWord{11, 11, 543}, - dictWord{12, 11, 664}, - dictWord{13, 11, 280}, - dictWord{13, 11, 428}, - dictWord{14, 11, 61}, - dictWord{ - 14, - 11, - 128, - }, - dictWord{17, 11, 52}, - dictWord{145, 11, 81}, - dictWord{6, 0, 161}, - dictWord{7, 0, 372}, - dictWord{137, 0, 597}, - dictWord{132, 0, 349}, - dictWord{ - 10, - 11, - 702, - }, - dictWord{139, 11, 245}, - dictWord{134, 0, 524}, - dictWord{134, 10, 174}, - dictWord{6, 0, 432}, - dictWord{9, 0, 751}, - dictWord{139, 0, 322}, - dictWord{147, 11, 94}, - dictWord{4, 11, 338}, - dictWord{133, 11, 400}, - dictWord{5, 0, 468}, - dictWord{10, 0, 325}, - dictWord{11, 0, 856}, - dictWord{12, 0, 345}, - dictWord{143, 0, 104}, - dictWord{133, 0, 223}, - dictWord{132, 0, 566}, - dictWord{4, 11, 221}, - dictWord{5, 11, 659}, - dictWord{5, 11, 989}, - dictWord{7, 11, 697}, - dictWord{7, 11, 1211}, - dictWord{138, 11, 284}, - dictWord{135, 11, 1070}, - dictWord{4, 0, 59}, - dictWord{135, 0, 1394}, - dictWord{6, 0, 436}, - dictWord{11, 0, 481}, - dictWord{5, 10, 878}, - dictWord{133, 10, 972}, - dictWord{4, 0, 48}, - dictWord{5, 0, 271}, - dictWord{135, 0, 953}, - dictWord{5, 0, 610}, - dictWord{136, 0, 457}, - dictWord{ - 4, - 0, - 773, - }, - dictWord{5, 0, 618}, - dictWord{137, 0, 756}, - dictWord{133, 0, 755}, - dictWord{135, 0, 1217}, - dictWord{138, 11, 507}, - dictWord{132, 10, 351}, - dictWord{132, 0, 197}, - dictWord{143, 11, 78}, - dictWord{4, 11, 188}, - dictWord{7, 11, 805}, - dictWord{11, 11, 276}, - dictWord{142, 11, 293}, - dictWord{ - 5, - 11, - 884, - }, - dictWord{139, 11, 991}, - dictWord{132, 10, 286}, - dictWord{10, 0, 259}, - dictWord{10, 0, 428}, - dictWord{7, 10, 438}, - dictWord{7, 10, 627}, - dictWord{ - 7, - 10, - 1516, - }, - dictWord{8, 10, 40}, - dictWord{9, 10, 56}, - dictWord{9, 10, 294}, - dictWord{11, 10, 969}, - dictWord{11, 10, 995}, - dictWord{146, 10, 148}, - dictWord{ - 4, - 0, - 356, - }, - dictWord{5, 0, 217}, - dictWord{5, 0, 492}, - dictWord{5, 0, 656}, - dictWord{8, 0, 544}, - dictWord{136, 11, 544}, - dictWord{5, 0, 259}, - dictWord{6, 0, 1230}, - dictWord{7, 0, 414}, - dictWord{7, 0, 854}, - dictWord{142, 0, 107}, - dictWord{132, 0, 1007}, - dictWord{15, 0, 14}, - dictWord{144, 0, 5}, - dictWord{6, 0, 1580}, - dictWord{ - 132, - 10, - 738, - }, - dictWord{132, 11, 596}, - dictWord{132, 0, 673}, - dictWord{133, 10, 866}, - dictWord{6, 0, 1843}, - dictWord{135, 11, 1847}, - dictWord{4, 0, 165}, - dictWord{7, 0, 1398}, - dictWord{135, 0, 1829}, - dictWord{135, 11, 1634}, - dictWord{147, 11, 65}, - dictWord{6, 0, 885}, - dictWord{6, 0, 1009}, - dictWord{ - 137, - 0, - 809, - }, - dictWord{133, 10, 116}, - dictWord{132, 10, 457}, - dictWord{136, 11, 770}, - dictWord{9, 0, 498}, - dictWord{12, 0, 181}, - dictWord{10, 11, 361}, - dictWord{142, 11, 316}, - dictWord{134, 11, 595}, - dictWord{5, 0, 9}, - dictWord{7, 0, 297}, - dictWord{7, 0, 966}, - dictWord{140, 0, 306}, - dictWord{4, 11, 89}, - dictWord{ - 5, - 11, - 489, - }, - dictWord{6, 11, 315}, - dictWord{7, 11, 553}, - dictWord{7, 11, 1745}, - dictWord{138, 11, 243}, - dictWord{134, 0, 1487}, - dictWord{132, 0, 437}, - dictWord{ - 5, - 0, - 146, - }, - dictWord{6, 0, 411}, - dictWord{138, 0, 721}, - dictWord{5, 10, 527}, - dictWord{6, 10, 189}, - dictWord{135, 10, 859}, - dictWord{11, 10, 104}, - dictWord{ - 11, - 10, - 554, - }, - dictWord{15, 10, 60}, - dictWord{143, 10, 125}, - dictWord{6, 11, 1658}, - dictWord{9, 11, 3}, - dictWord{10, 11, 154}, - dictWord{11, 11, 641}, - dictWord{13, 11, 85}, - dictWord{13, 11, 201}, - dictWord{141, 11, 346}, - dictWord{6, 0, 177}, - dictWord{135, 0, 467}, - dictWord{134, 0, 1377}, - dictWord{ - 134, - 10, - 116, - }, - dictWord{136, 11, 645}, - dictWord{4, 11, 166}, - dictWord{5, 11, 505}, - dictWord{6, 11, 1670}, - dictWord{137, 11, 110}, - dictWord{133, 10, 487}, - dictWord{ - 4, - 10, - 86, - }, - dictWord{5, 10, 667}, - dictWord{5, 10, 753}, - dictWord{6, 10, 316}, - dictWord{6, 10, 455}, - dictWord{135, 10, 946}, - dictWord{133, 0, 200}, - dictWord{132, 0, 959}, - dictWord{6, 0, 1928}, - dictWord{134, 0, 1957}, - dictWord{139, 11, 203}, - dictWord{150, 10, 45}, - dictWord{4, 10, 79}, - dictWord{7, 10, 1773}, - dictWord{10, 10, 450}, - dictWord{11, 10, 589}, - dictWord{13, 10, 332}, - dictWord{13, 10, 493}, - dictWord{14, 10, 183}, - dictWord{14, 10, 334}, - dictWord{ - 14, - 10, - 362, - }, - dictWord{14, 10, 368}, - dictWord{14, 10, 376}, - dictWord{14, 10, 379}, - dictWord{19, 10, 90}, - dictWord{19, 10, 103}, - dictWord{19, 10, 127}, - dictWord{148, 10, 90}, - dictWord{6, 0, 1435}, - dictWord{135, 11, 1275}, - dictWord{134, 0, 481}, - dictWord{7, 11, 445}, - dictWord{8, 11, 307}, - dictWord{8, 11, 704}, - dictWord{10, 11, 41}, - dictWord{10, 11, 439}, - dictWord{11, 11, 237}, - dictWord{11, 11, 622}, - dictWord{140, 11, 201}, - dictWord{135, 11, 869}, - dictWord{ - 4, - 0, - 84, - }, - dictWord{7, 0, 1482}, - dictWord{10, 0, 76}, - dictWord{138, 0, 142}, - dictWord{11, 11, 277}, - dictWord{144, 11, 14}, - dictWord{135, 11, 1977}, - dictWord{ - 4, - 11, - 189, - }, - dictWord{5, 11, 713}, - dictWord{136, 11, 57}, - dictWord{133, 0, 1015}, - dictWord{138, 11, 371}, - dictWord{4, 0, 315}, - dictWord{5, 0, 507}, - dictWord{ - 135, - 0, - 1370, - }, - dictWord{4, 11, 552}, - dictWord{142, 10, 381}, - dictWord{9, 0, 759}, - dictWord{16, 0, 31}, - dictWord{16, 0, 39}, - dictWord{16, 0, 75}, - dictWord{18, 0, 24}, - dictWord{20, 0, 42}, - dictWord{152, 0, 1}, - dictWord{134, 0, 712}, - dictWord{134, 0, 1722}, - dictWord{133, 10, 663}, - dictWord{133, 10, 846}, - dictWord{ - 8, - 0, - 222, - }, - dictWord{8, 0, 476}, - dictWord{9, 0, 238}, - dictWord{11, 0, 516}, - dictWord{11, 0, 575}, - dictWord{15, 0, 109}, - dictWord{146, 0, 100}, - dictWord{7, 0, 1402}, - dictWord{7, 0, 1414}, - dictWord{12, 0, 456}, - dictWord{5, 10, 378}, - dictWord{8, 10, 465}, - dictWord{9, 10, 286}, - dictWord{10, 10, 185}, - dictWord{10, 10, 562}, - dictWord{10, 10, 635}, - dictWord{11, 10, 31}, - dictWord{11, 10, 393}, - dictWord{13, 10, 312}, - dictWord{18, 10, 65}, - dictWord{18, 10, 96}, - dictWord{147, 10, 89}, - dictWord{4, 0, 986}, - dictWord{6, 0, 1958}, - dictWord{6, 0, 2032}, - dictWord{8, 0, 934}, - dictWord{138, 0, 985}, - dictWord{7, 10, 1880}, - dictWord{9, 10, 680}, - dictWord{139, 10, 798}, - dictWord{134, 10, 1770}, - dictWord{145, 11, 49}, - dictWord{132, 11, 614}, - dictWord{132, 10, 648}, - dictWord{5, 10, 945}, - dictWord{ - 6, - 10, - 1656, - }, - dictWord{6, 10, 1787}, - dictWord{7, 10, 167}, - dictWord{8, 10, 824}, - dictWord{9, 10, 391}, - dictWord{10, 10, 375}, - dictWord{139, 10, 185}, - dictWord{138, 11, 661}, - dictWord{7, 0, 1273}, - dictWord{135, 11, 1945}, - dictWord{7, 0, 706}, - dictWord{7, 0, 1058}, - dictWord{138, 0, 538}, - dictWord{7, 10, 1645}, - dictWord{8, 10, 352}, - dictWord{137, 10, 249}, - dictWord{132, 10, 152}, - dictWord{11, 0, 92}, - dictWord{11, 0, 196}, - dictWord{11, 0, 409}, - dictWord{11, 0, 450}, - dictWord{11, 0, 666}, - dictWord{11, 0, 777}, - dictWord{12, 0, 262}, - dictWord{13, 0, 385}, - dictWord{13, 0, 393}, - dictWord{15, 0, 115}, - dictWord{16, 0, 45}, - dictWord{145, 0, 82}, - dictWord{133, 10, 1006}, - dictWord{6, 0, 40}, - dictWord{135, 0, 1781}, - dictWord{9, 11, 614}, - dictWord{139, 11, 327}, - dictWord{5, 10, 420}, - dictWord{135, 10, 1449}, - dictWord{135, 0, 431}, - dictWord{10, 0, 97}, - dictWord{135, 10, 832}, - dictWord{6, 0, 423}, - dictWord{7, 0, 665}, - dictWord{ - 135, - 0, - 1210, - }, - dictWord{7, 0, 237}, - dictWord{8, 0, 664}, - dictWord{9, 0, 42}, - dictWord{9, 0, 266}, - dictWord{9, 0, 380}, - dictWord{9, 0, 645}, - dictWord{10, 0, 177}, - dictWord{ - 138, - 0, - 276, - }, - dictWord{7, 0, 264}, - dictWord{133, 10, 351}, - dictWord{8, 0, 213}, - dictWord{5, 10, 40}, - dictWord{7, 10, 598}, - dictWord{7, 10, 1638}, - dictWord{ - 9, - 10, - 166, - }, - dictWord{9, 10, 640}, - dictWord{9, 10, 685}, - dictWord{9, 10, 773}, - dictWord{11, 10, 215}, - dictWord{13, 10, 65}, - dictWord{14, 10, 172}, - dictWord{ - 14, - 10, - 317, - }, - dictWord{145, 10, 6}, - dictWord{5, 11, 84}, - dictWord{134, 11, 163}, - dictWord{8, 10, 60}, - dictWord{9, 10, 343}, - dictWord{139, 10, 769}, - dictWord{ - 137, - 0, - 455, - }, - dictWord{133, 11, 410}, - dictWord{8, 0, 906}, - dictWord{12, 0, 700}, - dictWord{12, 0, 706}, - dictWord{140, 0, 729}, - dictWord{21, 11, 33}, - dictWord{ - 150, - 11, - 40, - }, - dictWord{7, 10, 1951}, - dictWord{8, 10, 765}, - dictWord{8, 10, 772}, - dictWord{140, 10, 671}, - dictWord{7, 10, 108}, - dictWord{8, 10, 219}, - dictWord{ - 8, - 10, - 388, - }, - dictWord{9, 10, 639}, - dictWord{9, 10, 775}, - dictWord{11, 10, 275}, - dictWord{140, 10, 464}, - dictWord{5, 11, 322}, - dictWord{7, 11, 1941}, - dictWord{ - 8, - 11, - 186, - }, - dictWord{9, 11, 262}, - dictWord{10, 11, 187}, - dictWord{14, 11, 208}, - dictWord{146, 11, 130}, - dictWord{139, 0, 624}, - dictWord{8, 0, 574}, - dictWord{ - 5, - 11, - 227, - }, - dictWord{140, 11, 29}, - dictWord{7, 11, 1546}, - dictWord{11, 11, 299}, - dictWord{142, 11, 407}, - dictWord{5, 10, 15}, - dictWord{6, 10, 56}, - dictWord{ - 7, - 10, - 1758, - }, - dictWord{8, 10, 500}, - dictWord{9, 10, 730}, - dictWord{11, 10, 331}, - dictWord{13, 10, 150}, - dictWord{142, 10, 282}, - dictWord{7, 11, 1395}, - dictWord{8, 11, 486}, - dictWord{9, 11, 236}, - dictWord{9, 11, 878}, - dictWord{10, 11, 218}, - dictWord{11, 11, 95}, - dictWord{19, 11, 17}, - dictWord{147, 11, 31}, - dictWord{135, 11, 2043}, - dictWord{4, 0, 354}, - dictWord{146, 11, 4}, - dictWord{140, 11, 80}, - dictWord{135, 0, 1558}, - dictWord{134, 10, 1886}, - dictWord{ - 5, - 10, - 205, - }, - dictWord{6, 10, 438}, - dictWord{137, 10, 711}, - dictWord{133, 11, 522}, - dictWord{133, 10, 534}, - dictWord{7, 0, 235}, - dictWord{7, 0, 1475}, - dictWord{ - 15, - 0, - 68, - }, - dictWord{146, 0, 120}, - dictWord{137, 10, 691}, - dictWord{4, 0, 942}, - dictWord{6, 0, 1813}, - dictWord{8, 0, 917}, - dictWord{10, 0, 884}, - dictWord{ - 12, - 0, - 696, - }, - dictWord{12, 0, 717}, - dictWord{12, 0, 723}, - dictWord{12, 0, 738}, - dictWord{12, 0, 749}, - dictWord{12, 0, 780}, - dictWord{16, 0, 97}, - dictWord{146, 0, 169}, - dictWord{6, 10, 443}, - dictWord{8, 11, 562}, - dictWord{9, 10, 237}, - dictWord{9, 10, 571}, - dictWord{9, 10, 695}, - dictWord{10, 10, 139}, - dictWord{11, 10, 715}, - dictWord{12, 10, 417}, - dictWord{141, 10, 421}, - dictWord{135, 0, 957}, - dictWord{133, 0, 830}, - dictWord{134, 11, 1771}, - dictWord{146, 0, 23}, - dictWord{ - 5, - 0, - 496, - }, - dictWord{6, 0, 694}, - dictWord{7, 0, 203}, - dictWord{7, 11, 1190}, - dictWord{137, 11, 620}, - dictWord{137, 11, 132}, - dictWord{6, 0, 547}, - dictWord{ - 134, - 0, - 1549, - }, - dictWord{8, 11, 258}, - dictWord{9, 11, 208}, - dictWord{137, 11, 359}, - dictWord{4, 0, 864}, - dictWord{5, 0, 88}, - dictWord{137, 0, 239}, - dictWord{ - 135, - 11, - 493, - }, - dictWord{4, 11, 317}, - dictWord{135, 11, 1279}, - dictWord{132, 11, 477}, - dictWord{4, 10, 578}, - dictWord{5, 11, 63}, - dictWord{133, 11, 509}, - dictWord{ - 7, - 0, - 650, - }, - dictWord{135, 0, 1310}, - dictWord{7, 0, 1076}, - dictWord{9, 0, 80}, - dictWord{11, 0, 78}, - dictWord{11, 0, 421}, - dictWord{11, 0, 534}, - dictWord{ - 140, - 0, - 545, - }, - dictWord{132, 11, 288}, - dictWord{12, 0, 553}, - dictWord{14, 0, 118}, - dictWord{133, 10, 923}, - dictWord{7, 0, 274}, - dictWord{11, 0, 479}, - dictWord{ - 139, - 0, - 507, - }, - dictWord{8, 11, 89}, - dictWord{8, 11, 620}, - dictWord{9, 11, 49}, - dictWord{10, 11, 774}, - dictWord{11, 11, 628}, - dictWord{12, 11, 322}, - dictWord{ - 143, - 11, - 124, - }, - dictWord{4, 0, 497}, - dictWord{135, 0, 1584}, - dictWord{7, 0, 261}, - dictWord{7, 0, 1115}, - dictWord{7, 0, 1354}, - dictWord{7, 0, 1404}, - dictWord{ - 7, - 0, - 1588, - }, - dictWord{7, 0, 1705}, - dictWord{7, 0, 1902}, - dictWord{9, 0, 465}, - dictWord{10, 0, 248}, - dictWord{10, 0, 349}, - dictWord{10, 0, 647}, - dictWord{11, 0, 527}, - dictWord{11, 0, 660}, - dictWord{11, 0, 669}, - dictWord{12, 0, 529}, - dictWord{13, 0, 305}, - dictWord{132, 10, 924}, - dictWord{133, 10, 665}, - dictWord{ - 136, - 0, - 13, - }, - dictWord{6, 0, 791}, - dictWord{138, 11, 120}, - dictWord{7, 0, 642}, - dictWord{8, 0, 250}, - dictWord{11, 0, 123}, - dictWord{11, 0, 137}, - dictWord{13, 0, 48}, - dictWord{142, 0, 95}, - dictWord{4, 10, 265}, - dictWord{7, 10, 807}, - dictWord{135, 10, 950}, - dictWord{5, 10, 93}, - dictWord{140, 10, 267}, - dictWord{135, 0, 1429}, - dictWord{4, 0, 949}, - dictWord{10, 0, 885}, - dictWord{10, 0, 891}, - dictWord{10, 0, 900}, - dictWord{10, 0, 939}, - dictWord{12, 0, 760}, - dictWord{142, 0, 449}, - dictWord{139, 11, 366}, - dictWord{132, 0, 818}, - dictWord{134, 11, 85}, - dictWord{135, 10, 994}, - dictWord{7, 0, 330}, - dictWord{5, 10, 233}, - dictWord{5, 10, 320}, - dictWord{6, 10, 140}, - dictWord{136, 10, 295}, - dictWord{4, 0, 1004}, - dictWord{8, 0, 982}, - dictWord{136, 0, 993}, - dictWord{133, 10, 978}, - dictWord{4, 10, 905}, - dictWord{6, 10, 1701}, - dictWord{137, 10, 843}, - dictWord{10, 0, 545}, - dictWord{140, 0, 301}, - dictWord{6, 0, 947}, - dictWord{134, 0, 1062}, - dictWord{ - 134, - 0, - 1188, - }, - dictWord{4, 0, 904}, - dictWord{5, 0, 794}, - dictWord{152, 10, 6}, - dictWord{134, 0, 1372}, - dictWord{135, 11, 608}, - dictWord{5, 11, 279}, - dictWord{ - 6, - 11, - 235, - }, - dictWord{7, 11, 468}, - dictWord{8, 11, 446}, - dictWord{9, 11, 637}, - dictWord{10, 11, 717}, - dictWord{11, 11, 738}, - dictWord{140, 11, 514}, - dictWord{ - 132, - 10, - 509, - }, - dictWord{5, 11, 17}, - dictWord{6, 11, 371}, - dictWord{137, 11, 528}, - dictWord{132, 0, 693}, - dictWord{4, 11, 115}, - dictWord{5, 11, 669}, - dictWord{ - 6, - 11, - 407, - }, - dictWord{8, 11, 311}, - dictWord{11, 11, 10}, - dictWord{141, 11, 5}, - dictWord{11, 0, 377}, - dictWord{7, 10, 273}, - dictWord{137, 11, 381}, - dictWord{ - 135, - 0, - 695, - }, - dictWord{7, 0, 386}, - dictWord{138, 0, 713}, - dictWord{135, 10, 1041}, - dictWord{134, 0, 1291}, - dictWord{6, 0, 7}, - dictWord{6, 0, 35}, - dictWord{ - 7, - 0, - 147, - }, - dictWord{7, 0, 1069}, - dictWord{7, 0, 1568}, - dictWord{7, 0, 1575}, - dictWord{7, 0, 1917}, - dictWord{8, 0, 43}, - dictWord{8, 0, 208}, - dictWord{9, 0, 128}, - dictWord{ - 9, - 0, - 866, - }, - dictWord{10, 0, 20}, - dictWord{11, 0, 981}, - dictWord{147, 0, 33}, - dictWord{7, 0, 893}, - dictWord{141, 0, 424}, - dictWord{139, 10, 234}, - dictWord{ - 150, - 11, - 56, - }, - dictWord{5, 11, 779}, - dictWord{5, 11, 807}, - dictWord{6, 11, 1655}, - dictWord{134, 11, 1676}, - dictWord{5, 10, 802}, - dictWord{7, 10, 2021}, - dictWord{136, 10, 805}, - dictWord{4, 11, 196}, - dictWord{5, 10, 167}, - dictWord{5, 11, 558}, - dictWord{5, 10, 899}, - dictWord{5, 11, 949}, - dictWord{6, 10, 410}, - dictWord{137, 10, 777}, - dictWord{137, 10, 789}, - dictWord{134, 10, 1705}, - dictWord{8, 0, 904}, - dictWord{140, 0, 787}, - dictWord{6, 0, 322}, - dictWord{9, 0, 552}, - dictWord{11, 0, 274}, - dictWord{13, 0, 209}, - dictWord{13, 0, 499}, - dictWord{14, 0, 85}, - dictWord{15, 0, 126}, - dictWord{145, 0, 70}, - dictWord{135, 10, 10}, - dictWord{ - 5, - 10, - 11, - }, - dictWord{6, 10, 117}, - dictWord{6, 10, 485}, - dictWord{7, 10, 1133}, - dictWord{9, 10, 582}, - dictWord{9, 10, 594}, - dictWord{11, 10, 21}, - dictWord{ - 11, - 10, - 818, - }, - dictWord{12, 10, 535}, - dictWord{141, 10, 86}, - dictWord{4, 10, 264}, - dictWord{7, 10, 1067}, - dictWord{8, 10, 204}, - dictWord{8, 10, 385}, - dictWord{139, 10, 953}, - dictWord{132, 11, 752}, - dictWord{138, 10, 56}, - dictWord{133, 10, 470}, - dictWord{6, 0, 1808}, - dictWord{8, 0, 83}, - dictWord{8, 0, 742}, - dictWord{8, 0, 817}, - dictWord{9, 0, 28}, - dictWord{9, 0, 29}, - dictWord{9, 0, 885}, - dictWord{10, 0, 387}, - dictWord{11, 0, 633}, - dictWord{11, 0, 740}, - dictWord{13, 0, 235}, - dictWord{13, 0, 254}, - dictWord{15, 0, 143}, - dictWord{143, 0, 146}, - dictWord{140, 0, 49}, - dictWord{134, 0, 1832}, - dictWord{4, 11, 227}, - dictWord{5, 11, 159}, - dictWord{5, 11, 409}, - dictWord{7, 11, 80}, - dictWord{10, 11, 294}, - dictWord{10, 11, 479}, - dictWord{12, 11, 418}, - dictWord{14, 11, 50}, - dictWord{14, 11, 249}, - dictWord{142, 11, 295}, - dictWord{7, 11, 1470}, - dictWord{8, 11, 66}, - dictWord{8, 11, 137}, - dictWord{8, 11, 761}, - dictWord{9, 11, 638}, - dictWord{11, 11, 80}, - dictWord{11, 11, 212}, - dictWord{11, 11, 368}, - dictWord{11, 11, 418}, - dictWord{12, 11, 8}, - dictWord{13, 11, 15}, - dictWord{16, 11, 61}, - dictWord{17, 11, 59}, - dictWord{19, 11, 28}, - dictWord{148, 11, 84}, - dictWord{139, 10, 1015}, - dictWord{138, 11, 468}, - dictWord{135, 0, 421}, - dictWord{6, 0, 415}, - dictWord{ - 7, - 0, - 1049, - }, - dictWord{137, 0, 442}, - dictWord{6, 11, 38}, - dictWord{7, 11, 1220}, - dictWord{8, 11, 185}, - dictWord{8, 11, 256}, - dictWord{9, 11, 22}, - dictWord{ - 9, - 11, - 331, - }, - dictWord{10, 11, 738}, - dictWord{11, 11, 205}, - dictWord{11, 11, 540}, - dictWord{11, 11, 746}, - dictWord{13, 11, 399}, - dictWord{13, 11, 465}, - dictWord{ - 14, - 11, - 88, - }, - dictWord{142, 11, 194}, - dictWord{139, 0, 289}, - dictWord{133, 10, 715}, - dictWord{4, 0, 110}, - dictWord{10, 0, 415}, - dictWord{10, 0, 597}, - dictWord{142, 0, 206}, - dictWord{4, 11, 159}, - dictWord{6, 11, 115}, - dictWord{7, 11, 252}, - dictWord{7, 11, 257}, - dictWord{7, 11, 1928}, - dictWord{8, 11, 69}, - dictWord{ - 9, - 11, - 384, - }, - dictWord{10, 11, 91}, - dictWord{10, 11, 615}, - dictWord{12, 11, 375}, - dictWord{14, 11, 235}, - dictWord{18, 11, 117}, - dictWord{147, 11, 123}, - dictWord{5, 11, 911}, - dictWord{136, 11, 278}, - dictWord{7, 0, 205}, - dictWord{7, 0, 2000}, - dictWord{8, 10, 794}, - dictWord{9, 10, 400}, - dictWord{10, 10, 298}, - dictWord{142, 10, 228}, - dictWord{135, 11, 1774}, - dictWord{4, 11, 151}, - dictWord{7, 11, 1567}, - dictWord{8, 11, 351}, - dictWord{137, 11, 322}, - dictWord{ - 136, - 10, - 724, - }, - dictWord{133, 11, 990}, - dictWord{7, 0, 1539}, - dictWord{11, 0, 512}, - dictWord{13, 0, 205}, - dictWord{19, 0, 30}, - dictWord{22, 0, 36}, - dictWord{23, 0, 19}, - dictWord{135, 11, 1539}, - dictWord{5, 11, 194}, - dictWord{7, 11, 1662}, - dictWord{9, 11, 90}, - dictWord{140, 11, 180}, - dictWord{6, 10, 190}, - dictWord{ - 7, - 10, - 768, - }, - dictWord{135, 10, 1170}, - dictWord{134, 0, 1340}, - dictWord{4, 0, 283}, - dictWord{135, 0, 1194}, - dictWord{133, 11, 425}, - dictWord{133, 11, 971}, - dictWord{12, 0, 549}, - dictWord{14, 10, 67}, - dictWord{147, 10, 60}, - dictWord{135, 10, 1023}, - dictWord{134, 0, 1720}, - dictWord{138, 11, 587}, - dictWord{ - 5, - 11, - 72, - }, - dictWord{6, 11, 264}, - dictWord{7, 11, 21}, - dictWord{7, 11, 46}, - dictWord{7, 11, 2013}, - dictWord{8, 11, 215}, - dictWord{8, 11, 513}, - dictWord{10, 11, 266}, - dictWord{139, 11, 22}, - dictWord{5, 0, 319}, - dictWord{135, 0, 534}, - dictWord{6, 10, 137}, - dictWord{9, 10, 75}, - dictWord{9, 10, 253}, - dictWord{10, 10, 194}, - dictWord{138, 10, 444}, - dictWord{7, 0, 1180}, - dictWord{20, 0, 112}, - dictWord{6, 11, 239}, - dictWord{7, 11, 118}, - dictWord{10, 11, 95}, - dictWord{11, 11, 603}, - dictWord{13, 11, 443}, - dictWord{14, 11, 160}, - dictWord{143, 11, 4}, - dictWord{134, 11, 431}, - dictWord{5, 11, 874}, - dictWord{6, 11, 1677}, - dictWord{ - 11, - 10, - 643, - }, - dictWord{12, 10, 115}, - dictWord{143, 11, 0}, - dictWord{134, 0, 967}, - dictWord{6, 11, 65}, - dictWord{7, 11, 939}, - dictWord{7, 11, 1172}, - dictWord{ - 7, - 11, - 1671, - }, - dictWord{9, 11, 540}, - dictWord{10, 11, 696}, - dictWord{11, 11, 265}, - dictWord{11, 11, 732}, - dictWord{11, 11, 928}, - dictWord{11, 11, 937}, - dictWord{ - 12, - 11, - 399, - }, - dictWord{13, 11, 438}, - dictWord{149, 11, 19}, - dictWord{137, 11, 200}, - dictWord{135, 0, 1940}, - dictWord{5, 10, 760}, - dictWord{7, 10, 542}, - dictWord{8, 10, 135}, - dictWord{136, 10, 496}, - dictWord{140, 11, 44}, - dictWord{7, 11, 1655}, - dictWord{136, 11, 305}, - dictWord{7, 10, 319}, - dictWord{ - 7, - 10, - 355, - }, - dictWord{7, 10, 763}, - dictWord{10, 10, 389}, - dictWord{145, 10, 43}, - dictWord{136, 0, 735}, - dictWord{138, 10, 786}, - dictWord{137, 11, 19}, - dictWord{132, 11, 696}, - dictWord{5, 0, 132}, - dictWord{9, 0, 486}, - dictWord{9, 0, 715}, - dictWord{10, 0, 458}, - dictWord{11, 0, 373}, - dictWord{11, 0, 668}, - dictWord{ - 11, - 0, - 795, - }, - dictWord{11, 0, 897}, - dictWord{12, 0, 272}, - dictWord{12, 0, 424}, - dictWord{12, 0, 539}, - dictWord{12, 0, 558}, - dictWord{14, 0, 245}, - dictWord{ - 14, - 0, - 263, - }, - dictWord{14, 0, 264}, - dictWord{14, 0, 393}, - dictWord{142, 0, 403}, - dictWord{10, 0, 38}, - dictWord{139, 0, 784}, - dictWord{132, 0, 838}, - dictWord{ - 4, - 11, - 302, - }, - dictWord{135, 11, 1766}, - dictWord{133, 0, 379}, - dictWord{5, 0, 8}, - dictWord{6, 0, 89}, - dictWord{6, 0, 400}, - dictWord{7, 0, 1569}, - dictWord{7, 0, 1623}, - dictWord{7, 0, 1850}, - dictWord{8, 0, 218}, - dictWord{8, 0, 422}, - dictWord{9, 0, 570}, - dictWord{10, 0, 626}, - dictWord{4, 11, 726}, - dictWord{133, 11, 630}, - dictWord{ - 4, - 0, - 1017, - }, - dictWord{138, 0, 660}, - dictWord{6, 0, 387}, - dictWord{7, 0, 882}, - dictWord{141, 0, 111}, - dictWord{6, 0, 224}, - dictWord{7, 0, 877}, - dictWord{ - 137, - 0, - 647, - }, - dictWord{4, 10, 58}, - dictWord{5, 10, 286}, - dictWord{6, 10, 319}, - dictWord{7, 10, 402}, - dictWord{7, 10, 1254}, - dictWord{7, 10, 1903}, - dictWord{ - 8, - 10, - 356, - }, - dictWord{140, 10, 408}, - dictWord{135, 0, 790}, - dictWord{9, 0, 510}, - dictWord{10, 0, 53}, - dictWord{4, 10, 389}, - dictWord{9, 10, 181}, - dictWord{ - 10, - 10, - 29, - }, - dictWord{10, 10, 816}, - dictWord{11, 10, 311}, - dictWord{11, 10, 561}, - dictWord{12, 10, 67}, - dictWord{141, 10, 181}, - dictWord{142, 0, 458}, - dictWord{ - 6, - 11, - 118, - }, - dictWord{7, 11, 215}, - dictWord{7, 11, 1521}, - dictWord{140, 11, 11}, - dictWord{134, 0, 954}, - dictWord{135, 0, 394}, - dictWord{134, 0, 1367}, - dictWord{5, 11, 225}, - dictWord{133, 10, 373}, - dictWord{132, 0, 882}, - dictWord{7, 0, 1409}, - dictWord{135, 10, 1972}, - dictWord{135, 10, 1793}, - dictWord{ - 4, - 11, - 370, - }, - dictWord{5, 11, 756}, - dictWord{135, 11, 1326}, - dictWord{150, 11, 13}, - dictWord{7, 11, 354}, - dictWord{10, 11, 410}, - dictWord{139, 11, 815}, - dictWord{6, 11, 1662}, - dictWord{7, 11, 48}, - dictWord{8, 11, 771}, - dictWord{10, 11, 116}, - dictWord{13, 11, 104}, - dictWord{14, 11, 105}, - dictWord{14, 11, 184}, - dictWord{15, 11, 168}, - dictWord{19, 11, 92}, - dictWord{148, 11, 68}, - dictWord{7, 0, 124}, - dictWord{136, 0, 38}, - dictWord{5, 0, 261}, - dictWord{7, 0, 78}, - dictWord{ - 7, - 0, - 199, - }, - dictWord{8, 0, 815}, - dictWord{9, 0, 126}, - dictWord{10, 0, 342}, - dictWord{140, 0, 647}, - dictWord{4, 0, 628}, - dictWord{140, 0, 724}, - dictWord{7, 0, 266}, - dictWord{8, 0, 804}, - dictWord{7, 10, 1651}, - dictWord{145, 10, 89}, - dictWord{135, 0, 208}, - dictWord{134, 0, 1178}, - dictWord{6, 0, 79}, - dictWord{135, 0, 1519}, - dictWord{132, 10, 672}, - dictWord{133, 10, 737}, - dictWord{136, 0, 741}, - dictWord{132, 11, 120}, - dictWord{4, 0, 710}, - dictWord{6, 0, 376}, - dictWord{ - 134, - 0, - 606, - }, - dictWord{134, 0, 1347}, - dictWord{134, 0, 1494}, - dictWord{6, 0, 850}, - dictWord{6, 0, 1553}, - dictWord{137, 0, 821}, - dictWord{5, 10, 145}, - dictWord{ - 134, - 11, - 593, - }, - dictWord{7, 0, 1311}, - dictWord{140, 0, 135}, - dictWord{4, 0, 467}, - dictWord{5, 0, 405}, - dictWord{134, 0, 544}, - dictWord{5, 11, 820}, - dictWord{ - 135, - 11, - 931, - }, - dictWord{6, 0, 100}, - dictWord{7, 0, 244}, - dictWord{7, 0, 632}, - dictWord{7, 0, 1609}, - dictWord{8, 0, 178}, - dictWord{8, 0, 638}, - dictWord{141, 0, 58}, - dictWord{4, 10, 387}, - dictWord{135, 10, 1288}, - dictWord{6, 11, 151}, - dictWord{6, 11, 1675}, - dictWord{7, 11, 383}, - dictWord{151, 11, 10}, - dictWord{ - 132, - 0, - 481, - }, - dictWord{135, 10, 550}, - dictWord{134, 0, 1378}, - dictWord{6, 11, 1624}, - dictWord{11, 11, 11}, - dictWord{12, 11, 422}, - dictWord{13, 11, 262}, - dictWord{142, 11, 360}, - dictWord{133, 0, 791}, - dictWord{4, 11, 43}, - dictWord{5, 11, 344}, - dictWord{133, 11, 357}, - dictWord{7, 0, 1227}, - dictWord{140, 0, 978}, - dictWord{7, 0, 686}, - dictWord{8, 0, 33}, - dictWord{8, 0, 238}, - dictWord{10, 0, 616}, - dictWord{11, 0, 467}, - dictWord{11, 0, 881}, - dictWord{13, 0, 217}, - dictWord{ - 13, - 0, - 253, - }, - dictWord{142, 0, 268}, - dictWord{137, 0, 857}, - dictWord{8, 0, 467}, - dictWord{8, 0, 1006}, - dictWord{7, 11, 148}, - dictWord{8, 11, 284}, - dictWord{ - 141, - 11, - 63, - }, - dictWord{4, 10, 576}, - dictWord{135, 10, 1263}, - dictWord{133, 11, 888}, - dictWord{5, 10, 919}, - dictWord{134, 10, 1673}, - dictWord{20, 10, 37}, - dictWord{148, 11, 37}, - dictWord{132, 0, 447}, - dictWord{132, 11, 711}, - dictWord{4, 0, 128}, - dictWord{5, 0, 415}, - dictWord{6, 0, 462}, - dictWord{7, 0, 294}, - dictWord{ - 7, - 0, - 578, - }, - dictWord{10, 0, 710}, - dictWord{139, 0, 86}, - dictWord{4, 10, 82}, - dictWord{5, 10, 333}, - dictWord{5, 10, 904}, - dictWord{6, 10, 207}, - dictWord{7, 10, 325}, - dictWord{7, 10, 1726}, - dictWord{8, 10, 101}, - dictWord{10, 10, 778}, - dictWord{139, 10, 220}, - dictWord{136, 0, 587}, - dictWord{137, 11, 440}, - dictWord{ - 133, - 10, - 903, - }, - dictWord{6, 0, 427}, - dictWord{7, 0, 1018}, - dictWord{138, 0, 692}, - dictWord{4, 0, 195}, - dictWord{135, 0, 802}, - dictWord{140, 10, 147}, - dictWord{ - 134, - 0, - 1546, - }, - dictWord{134, 0, 684}, - dictWord{132, 10, 705}, - dictWord{136, 0, 345}, - dictWord{11, 11, 678}, - dictWord{140, 11, 307}, - dictWord{ - 133, - 0, - 365, - }, - dictWord{134, 0, 1683}, - dictWord{4, 11, 65}, - dictWord{5, 11, 479}, - dictWord{5, 11, 1004}, - dictWord{7, 11, 1913}, - dictWord{8, 11, 317}, - dictWord{ - 9, - 11, - 302, - }, - dictWord{10, 11, 612}, - dictWord{141, 11, 22}, - dictWord{138, 0, 472}, - dictWord{4, 11, 261}, - dictWord{135, 11, 510}, - dictWord{134, 10, 90}, - dictWord{142, 0, 433}, - dictWord{151, 0, 28}, - dictWord{4, 11, 291}, - dictWord{7, 11, 101}, - dictWord{9, 11, 515}, - dictWord{12, 11, 152}, - dictWord{12, 11, 443}, - dictWord{13, 11, 392}, - dictWord{142, 11, 357}, - dictWord{140, 0, 997}, - dictWord{5, 0, 3}, - dictWord{8, 0, 578}, - dictWord{9, 0, 118}, - dictWord{10, 0, 705}, - dictWord{ - 141, - 0, - 279, - }, - dictWord{135, 11, 1266}, - dictWord{7, 10, 813}, - dictWord{12, 10, 497}, - dictWord{141, 10, 56}, - dictWord{133, 0, 229}, - dictWord{6, 10, 125}, - dictWord{135, 10, 1277}, - dictWord{8, 0, 102}, - dictWord{10, 0, 578}, - dictWord{10, 0, 672}, - dictWord{12, 0, 496}, - dictWord{13, 0, 408}, - dictWord{14, 0, 121}, - dictWord{17, 0, 106}, - dictWord{151, 10, 12}, - dictWord{6, 0, 866}, - dictWord{134, 0, 1080}, - dictWord{136, 0, 1022}, - dictWord{4, 11, 130}, - dictWord{135, 11, 843}, - dictWord{5, 11, 42}, - dictWord{5, 11, 879}, - dictWord{7, 11, 245}, - dictWord{7, 11, 324}, - dictWord{7, 11, 1532}, - dictWord{11, 11, 463}, - dictWord{11, 11, 472}, - dictWord{13, 11, 363}, - dictWord{144, 11, 52}, - dictWord{150, 0, 55}, - dictWord{8, 0, 115}, - dictWord{8, 0, 350}, - dictWord{9, 0, 489}, - dictWord{10, 0, 128}, - dictWord{ - 11, - 0, - 306, - }, - dictWord{12, 0, 373}, - dictWord{14, 0, 30}, - dictWord{17, 0, 79}, - dictWord{19, 0, 80}, - dictWord{4, 11, 134}, - dictWord{133, 11, 372}, - dictWord{ - 134, - 0, - 657, - }, - dictWord{134, 0, 933}, - dictWord{135, 11, 1147}, - dictWord{4, 0, 230}, - dictWord{133, 0, 702}, - dictWord{134, 0, 1728}, - dictWord{4, 0, 484}, - dictWord{ - 18, - 0, - 26, - }, - dictWord{19, 0, 42}, - dictWord{20, 0, 43}, - dictWord{21, 0, 0}, - dictWord{23, 0, 27}, - dictWord{152, 0, 14}, - dictWord{7, 0, 185}, - dictWord{135, 0, 703}, - dictWord{ - 6, - 0, - 417, - }, - dictWord{10, 0, 618}, - dictWord{7, 10, 1106}, - dictWord{9, 10, 770}, - dictWord{11, 10, 112}, - dictWord{140, 10, 413}, - dictWord{134, 0, 803}, - dictWord{132, 11, 644}, - dictWord{134, 0, 1262}, - dictWord{7, 11, 540}, - dictWord{12, 10, 271}, - dictWord{145, 10, 109}, - dictWord{135, 11, 123}, - dictWord{ - 132, - 0, - 633, - }, - dictWord{134, 11, 623}, - dictWord{4, 11, 908}, - dictWord{5, 11, 359}, - dictWord{5, 11, 508}, - dictWord{6, 11, 1723}, - dictWord{7, 11, 343}, - dictWord{ - 7, - 11, - 1996, - }, - dictWord{135, 11, 2026}, - dictWord{135, 0, 479}, - dictWord{10, 0, 262}, - dictWord{7, 10, 304}, - dictWord{9, 10, 646}, - dictWord{9, 10, 862}, - dictWord{ - 11, - 10, - 696, - }, - dictWord{12, 10, 208}, - dictWord{15, 10, 79}, - dictWord{147, 10, 108}, - dictWord{4, 11, 341}, - dictWord{135, 11, 480}, - dictWord{134, 0, 830}, - dictWord{5, 0, 70}, - dictWord{5, 0, 622}, - dictWord{6, 0, 334}, - dictWord{7, 0, 1032}, - dictWord{9, 0, 171}, - dictWord{11, 0, 26}, - dictWord{11, 0, 213}, - dictWord{ - 11, - 0, - 637, - }, - dictWord{11, 0, 707}, - dictWord{12, 0, 202}, - dictWord{12, 0, 380}, - dictWord{13, 0, 226}, - dictWord{13, 0, 355}, - dictWord{14, 0, 222}, - dictWord{145, 0, 42}, - dictWord{135, 10, 981}, - dictWord{143, 0, 217}, - dictWord{137, 11, 114}, - dictWord{4, 0, 23}, - dictWord{4, 0, 141}, - dictWord{5, 0, 313}, - dictWord{5, 0, 1014}, - dictWord{6, 0, 50}, - dictWord{6, 0, 51}, - dictWord{7, 0, 142}, - dictWord{7, 0, 384}, - dictWord{7, 0, 559}, - dictWord{8, 0, 640}, - dictWord{9, 0, 460}, - dictWord{9, 0, 783}, - dictWord{11, 0, 741}, - dictWord{12, 0, 183}, - dictWord{141, 0, 488}, - dictWord{141, 0, 360}, - dictWord{7, 0, 1586}, - dictWord{7, 11, 1995}, - dictWord{8, 11, 299}, - dictWord{11, 11, 890}, - dictWord{140, 11, 674}, - dictWord{132, 10, 434}, - dictWord{7, 0, 652}, - dictWord{134, 10, 550}, - dictWord{7, 0, 766}, - dictWord{5, 10, 553}, - dictWord{138, 10, 824}, - dictWord{7, 0, 737}, - dictWord{8, 0, 298}, - dictWord{136, 10, 452}, - dictWord{4, 11, 238}, - dictWord{5, 11, 503}, - dictWord{6, 11, 179}, - dictWord{7, 11, 2003}, - dictWord{8, 11, 381}, - dictWord{8, 11, 473}, - dictWord{9, 11, 149}, - dictWord{10, 11, 183}, - dictWord{15, 11, 45}, - dictWord{143, 11, 86}, - dictWord{133, 10, 292}, - dictWord{5, 0, 222}, - dictWord{9, 0, 655}, - dictWord{138, 0, 534}, - dictWord{138, 10, 135}, - dictWord{4, 11, 121}, - dictWord{5, 11, 156}, - dictWord{5, 11, 349}, - dictWord{9, 11, 136}, - dictWord{10, 11, 605}, - dictWord{14, 11, 342}, - dictWord{147, 11, 107}, - dictWord{137, 0, 906}, - dictWord{6, 0, 1013}, - dictWord{134, 0, 1250}, - dictWord{6, 0, 1956}, - dictWord{6, 0, 2009}, - dictWord{8, 0, 991}, - dictWord{144, 0, 120}, - dictWord{135, 11, 1192}, - dictWord{ - 138, - 0, - 503, - }, - dictWord{5, 0, 154}, - dictWord{7, 0, 1491}, - dictWord{10, 0, 379}, - dictWord{138, 0, 485}, - dictWord{6, 0, 1867}, - dictWord{6, 0, 1914}, - dictWord{6, 0, 1925}, - dictWord{9, 0, 917}, - dictWord{9, 0, 925}, - dictWord{9, 0, 932}, - dictWord{9, 0, 951}, - dictWord{9, 0, 1007}, - dictWord{9, 0, 1013}, - dictWord{12, 0, 806}, - dictWord{ - 12, - 0, - 810, - }, - dictWord{12, 0, 814}, - dictWord{12, 0, 816}, - dictWord{12, 0, 824}, - dictWord{12, 0, 832}, - dictWord{12, 0, 837}, - dictWord{12, 0, 863}, - dictWord{ - 12, - 0, - 868, - }, - dictWord{12, 0, 870}, - dictWord{12, 0, 889}, - dictWord{12, 0, 892}, - dictWord{12, 0, 900}, - dictWord{12, 0, 902}, - dictWord{12, 0, 908}, - dictWord{12, 0, 933}, - dictWord{12, 0, 942}, - dictWord{12, 0, 949}, - dictWord{12, 0, 954}, - dictWord{15, 0, 175}, - dictWord{15, 0, 203}, - dictWord{15, 0, 213}, - dictWord{15, 0, 218}, - dictWord{15, 0, 225}, - dictWord{15, 0, 231}, - dictWord{15, 0, 239}, - dictWord{15, 0, 248}, - dictWord{15, 0, 252}, - dictWord{18, 0, 190}, - dictWord{18, 0, 204}, - dictWord{ - 18, - 0, - 215, - }, - dictWord{18, 0, 216}, - dictWord{18, 0, 222}, - dictWord{18, 0, 225}, - dictWord{18, 0, 230}, - dictWord{18, 0, 239}, - dictWord{18, 0, 241}, - dictWord{ - 21, - 0, - 42, - }, - dictWord{21, 0, 43}, - dictWord{21, 0, 44}, - dictWord{21, 0, 45}, - dictWord{21, 0, 46}, - dictWord{21, 0, 53}, - dictWord{24, 0, 27}, - dictWord{152, 0, 31}, - dictWord{ - 133, - 0, - 716, - }, - dictWord{135, 0, 844}, - dictWord{4, 0, 91}, - dictWord{5, 0, 388}, - dictWord{5, 0, 845}, - dictWord{6, 0, 206}, - dictWord{6, 0, 252}, - dictWord{6, 0, 365}, - dictWord{ - 7, - 0, - 136, - }, - dictWord{7, 0, 531}, - dictWord{136, 0, 621}, - dictWord{7, 10, 393}, - dictWord{10, 10, 603}, - dictWord{139, 10, 206}, - dictWord{6, 11, 80}, - dictWord{ - 6, - 11, - 1694, - }, - dictWord{7, 11, 173}, - dictWord{7, 11, 1974}, - dictWord{9, 11, 547}, - dictWord{10, 11, 730}, - dictWord{14, 11, 18}, - dictWord{150, 11, 39}, - dictWord{137, 0, 748}, - dictWord{4, 11, 923}, - dictWord{134, 11, 1711}, - dictWord{4, 10, 912}, - dictWord{137, 10, 232}, - dictWord{7, 10, 98}, - dictWord{7, 10, 1973}, - dictWord{136, 10, 716}, - dictWord{14, 0, 103}, - dictWord{133, 10, 733}, - dictWord{132, 11, 595}, - dictWord{12, 0, 158}, - dictWord{18, 0, 8}, - dictWord{19, 0, 62}, - dictWord{20, 0, 6}, - dictWord{22, 0, 4}, - dictWord{23, 0, 2}, - dictWord{23, 0, 9}, - dictWord{5, 11, 240}, - dictWord{6, 11, 459}, - dictWord{7, 11, 12}, - dictWord{7, 11, 114}, - dictWord{7, 11, 502}, - dictWord{7, 11, 1751}, - dictWord{7, 11, 1753}, - dictWord{7, 11, 1805}, - dictWord{8, 11, 658}, - dictWord{9, 11, 1}, - dictWord{11, 11, 959}, - dictWord{13, 11, 446}, - dictWord{142, 11, 211}, - dictWord{135, 0, 576}, - dictWord{5, 0, 771}, - dictWord{5, 0, 863}, - dictWord{5, 0, 898}, - dictWord{6, 0, 648}, - dictWord{ - 6, - 0, - 1632, - }, - dictWord{6, 0, 1644}, - dictWord{134, 0, 1780}, - dictWord{133, 0, 331}, - dictWord{7, 11, 633}, - dictWord{7, 11, 905}, - dictWord{7, 11, 909}, - dictWord{ - 7, - 11, - 1538, - }, - dictWord{9, 11, 767}, - dictWord{140, 11, 636}, - dictWord{140, 0, 632}, - dictWord{5, 0, 107}, - dictWord{7, 0, 201}, - dictWord{136, 0, 518}, - dictWord{ - 6, - 0, - 446, - }, - dictWord{7, 0, 1817}, - dictWord{134, 11, 490}, - dictWord{9, 0, 851}, - dictWord{141, 0, 510}, - dictWord{7, 11, 250}, - dictWord{8, 11, 506}, - dictWord{ - 136, - 11, - 507, - }, - dictWord{4, 0, 504}, - dictWord{137, 10, 72}, - dictWord{132, 11, 158}, - dictWord{4, 11, 140}, - dictWord{7, 11, 362}, - dictWord{8, 11, 209}, - dictWord{ - 9, - 11, - 10, - }, - dictWord{9, 11, 160}, - dictWord{9, 11, 503}, - dictWord{10, 11, 689}, - dictWord{11, 11, 350}, - dictWord{11, 11, 553}, - dictWord{11, 11, 725}, - dictWord{ - 12, - 11, - 252, - }, - dictWord{12, 11, 583}, - dictWord{13, 11, 192}, - dictWord{13, 11, 352}, - dictWord{14, 11, 269}, - dictWord{14, 11, 356}, - dictWord{148, 11, 50}, - dictWord{6, 11, 597}, - dictWord{135, 11, 1318}, - dictWord{135, 10, 1454}, - dictWord{5, 0, 883}, - dictWord{5, 0, 975}, - dictWord{8, 0, 392}, - dictWord{148, 0, 7}, - dictWord{6, 11, 228}, - dictWord{7, 11, 1341}, - dictWord{9, 11, 408}, - dictWord{138, 11, 343}, - dictWord{11, 11, 348}, - dictWord{11, 10, 600}, - dictWord{12, 11, 99}, - dictWord{13, 10, 245}, - dictWord{18, 11, 1}, - dictWord{18, 11, 11}, - dictWord{147, 11, 4}, - dictWord{134, 11, 296}, - dictWord{5, 0, 922}, - dictWord{134, 0, 1707}, - dictWord{132, 11, 557}, - dictWord{4, 11, 548}, - dictWord{7, 10, 164}, - dictWord{7, 10, 1571}, - dictWord{9, 10, 107}, - dictWord{140, 10, 225}, - dictWord{ - 7, - 11, - 197, - }, - dictWord{8, 11, 142}, - dictWord{8, 11, 325}, - dictWord{9, 11, 150}, - dictWord{9, 11, 596}, - dictWord{10, 11, 350}, - dictWord{10, 11, 353}, - dictWord{ - 11, - 11, - 74, - }, - dictWord{11, 11, 315}, - dictWord{14, 11, 423}, - dictWord{143, 11, 141}, - dictWord{5, 0, 993}, - dictWord{7, 0, 515}, - dictWord{137, 0, 91}, - dictWord{4, 0, 131}, - dictWord{8, 0, 200}, - dictWord{5, 10, 484}, - dictWord{5, 10, 510}, - dictWord{6, 10, 434}, - dictWord{7, 10, 1000}, - dictWord{7, 10, 1098}, - dictWord{136, 10, 2}, - dictWord{152, 0, 10}, - dictWord{4, 11, 62}, - dictWord{5, 11, 83}, - dictWord{6, 11, 399}, - dictWord{6, 11, 579}, - dictWord{7, 11, 692}, - dictWord{7, 11, 846}, - dictWord{ - 7, - 11, - 1015, - }, - dictWord{7, 11, 1799}, - dictWord{8, 11, 403}, - dictWord{9, 11, 394}, - dictWord{10, 11, 133}, - dictWord{12, 11, 4}, - dictWord{12, 11, 297}, - dictWord{ - 12, - 11, - 452, - }, - dictWord{16, 11, 81}, - dictWord{18, 11, 19}, - dictWord{18, 11, 25}, - dictWord{21, 11, 14}, - dictWord{22, 11, 12}, - dictWord{151, 11, 18}, - dictWord{ - 140, - 11, - 459, - }, - dictWord{132, 11, 177}, - dictWord{7, 0, 1433}, - dictWord{9, 0, 365}, - dictWord{137, 11, 365}, - dictWord{132, 10, 460}, - dictWord{5, 0, 103}, - dictWord{ - 6, - 0, - 2004, - }, - dictWord{7, 0, 921}, - dictWord{8, 0, 580}, - dictWord{8, 0, 593}, - dictWord{8, 0, 630}, - dictWord{10, 0, 28}, - dictWord{5, 11, 411}, - dictWord{ - 135, - 11, - 653, - }, - dictWord{4, 10, 932}, - dictWord{133, 10, 891}, - dictWord{4, 0, 911}, - dictWord{5, 0, 867}, - dictWord{5, 0, 1013}, - dictWord{7, 0, 2034}, - dictWord{8, 0, 798}, - dictWord{136, 0, 813}, - dictWord{7, 11, 439}, - dictWord{10, 11, 727}, - dictWord{11, 11, 260}, - dictWord{139, 11, 684}, - dictWord{136, 10, 625}, - dictWord{ - 5, - 11, - 208, - }, - dictWord{7, 11, 753}, - dictWord{135, 11, 1528}, - dictWord{5, 0, 461}, - dictWord{7, 0, 1925}, - dictWord{12, 0, 39}, - dictWord{13, 0, 265}, - dictWord{ - 13, - 0, - 439, - }, - dictWord{134, 10, 76}, - dictWord{6, 0, 853}, - dictWord{8, 10, 92}, - dictWord{137, 10, 221}, - dictWord{5, 0, 135}, - dictWord{6, 0, 519}, - dictWord{7, 0, 1722}, - dictWord{10, 0, 271}, - dictWord{11, 0, 261}, - dictWord{145, 0, 54}, - dictWord{139, 11, 814}, - dictWord{14, 0, 338}, - dictWord{148, 0, 81}, - dictWord{4, 0, 300}, - dictWord{133, 0, 436}, - dictWord{5, 0, 419}, - dictWord{5, 0, 687}, - dictWord{7, 0, 864}, - dictWord{9, 0, 470}, - dictWord{135, 11, 864}, - dictWord{9, 0, 836}, - dictWord{ - 133, - 11, - 242, - }, - dictWord{134, 0, 1937}, - dictWord{4, 10, 763}, - dictWord{133, 11, 953}, - dictWord{132, 10, 622}, - dictWord{132, 0, 393}, - dictWord{ - 133, - 10, - 253, - }, - dictWord{8, 0, 357}, - dictWord{10, 0, 745}, - dictWord{14, 0, 426}, - dictWord{17, 0, 94}, - dictWord{19, 0, 57}, - dictWord{135, 10, 546}, - dictWord{5, 11, 615}, - dictWord{146, 11, 37}, - dictWord{9, 10, 73}, - dictWord{10, 10, 110}, - dictWord{14, 10, 185}, - dictWord{145, 10, 119}, - dictWord{11, 0, 703}, - dictWord{7, 10, 624}, - dictWord{7, 10, 916}, - dictWord{10, 10, 256}, - dictWord{139, 10, 87}, - dictWord{133, 11, 290}, - dictWord{5, 10, 212}, - dictWord{12, 10, 35}, - dictWord{ - 141, - 10, - 382, - }, - dictWord{132, 11, 380}, - dictWord{5, 11, 52}, - dictWord{7, 11, 277}, - dictWord{9, 11, 368}, - dictWord{139, 11, 791}, - dictWord{133, 0, 387}, - dictWord{ - 10, - 11, - 138, - }, - dictWord{139, 11, 476}, - dictWord{4, 0, 6}, - dictWord{5, 0, 708}, - dictWord{136, 0, 75}, - dictWord{7, 0, 1351}, - dictWord{9, 0, 581}, - dictWord{10, 0, 639}, - dictWord{11, 0, 453}, - dictWord{140, 0, 584}, - dictWord{132, 0, 303}, - dictWord{138, 0, 772}, - dictWord{135, 10, 1175}, - dictWord{4, 0, 749}, - dictWord{ - 5, - 10, - 816, - }, - dictWord{6, 11, 256}, - dictWord{7, 11, 307}, - dictWord{7, 11, 999}, - dictWord{7, 11, 1481}, - dictWord{7, 11, 1732}, - dictWord{7, 11, 1738}, - dictWord{ - 8, - 11, - 265, - }, - dictWord{9, 11, 414}, - dictWord{11, 11, 316}, - dictWord{12, 11, 52}, - dictWord{13, 11, 420}, - dictWord{147, 11, 100}, - dictWord{135, 11, 1296}, - dictWord{ - 6, - 0, - 1065, - }, - dictWord{5, 10, 869}, - dictWord{5, 10, 968}, - dictWord{6, 10, 1626}, - dictWord{8, 10, 734}, - dictWord{136, 10, 784}, - dictWord{4, 10, 542}, - dictWord{ - 6, - 10, - 1716, - }, - dictWord{6, 10, 1727}, - dictWord{7, 10, 1082}, - dictWord{7, 10, 1545}, - dictWord{8, 10, 56}, - dictWord{8, 10, 118}, - dictWord{8, 10, 412}, - dictWord{ - 8, - 10, - 564, - }, - dictWord{9, 10, 888}, - dictWord{9, 10, 908}, - dictWord{10, 10, 50}, - dictWord{10, 10, 423}, - dictWord{11, 10, 685}, - dictWord{11, 10, 697}, - dictWord{11, 10, 933}, - dictWord{12, 10, 299}, - dictWord{13, 10, 126}, - dictWord{13, 10, 136}, - dictWord{13, 10, 170}, - dictWord{141, 10, 190}, - dictWord{ - 134, - 0, - 226, - }, - dictWord{4, 0, 106}, - dictWord{7, 0, 310}, - dictWord{11, 0, 717}, - dictWord{133, 11, 723}, - dictWord{5, 0, 890}, - dictWord{5, 0, 988}, - dictWord{4, 10, 232}, - dictWord{9, 10, 202}, - dictWord{10, 10, 474}, - dictWord{140, 10, 433}, - dictWord{6, 0, 626}, - dictWord{142, 0, 431}, - dictWord{10, 0, 706}, - dictWord{150, 0, 44}, - dictWord{13, 0, 51}, - dictWord{6, 10, 108}, - dictWord{7, 10, 1003}, - dictWord{7, 10, 1181}, - dictWord{8, 10, 111}, - dictWord{136, 10, 343}, - dictWord{132, 0, 698}, - dictWord{5, 11, 109}, - dictWord{6, 11, 1784}, - dictWord{7, 11, 1895}, - dictWord{12, 11, 296}, - dictWord{140, 11, 302}, - dictWord{134, 0, 828}, - dictWord{ - 134, - 10, - 1712, - }, - dictWord{138, 0, 17}, - dictWord{7, 0, 1929}, - dictWord{4, 10, 133}, - dictWord{5, 11, 216}, - dictWord{7, 10, 711}, - dictWord{7, 10, 1298}, - dictWord{ - 7, - 10, - 1585, - }, - dictWord{7, 11, 1879}, - dictWord{9, 11, 141}, - dictWord{9, 11, 270}, - dictWord{9, 11, 679}, - dictWord{10, 11, 159}, - dictWord{10, 11, 553}, - dictWord{ - 11, - 11, - 197, - }, - dictWord{11, 11, 438}, - dictWord{12, 11, 538}, - dictWord{12, 11, 559}, - dictWord{13, 11, 193}, - dictWord{13, 11, 423}, - dictWord{14, 11, 144}, - dictWord{14, 11, 166}, - dictWord{14, 11, 167}, - dictWord{15, 11, 67}, - dictWord{147, 11, 84}, - dictWord{141, 11, 127}, - dictWord{7, 11, 1872}, - dictWord{ - 137, - 11, - 81, - }, - dictWord{6, 10, 99}, - dictWord{7, 10, 1808}, - dictWord{145, 10, 57}, - dictWord{134, 11, 391}, - dictWord{5, 0, 689}, - dictWord{6, 0, 84}, - dictWord{7, 0, 1250}, - dictWord{6, 10, 574}, - dictWord{7, 10, 428}, - dictWord{10, 10, 669}, - dictWord{11, 10, 485}, - dictWord{11, 10, 840}, - dictWord{12, 10, 300}, - dictWord{ - 142, - 10, - 250, - }, - dictWord{7, 11, 322}, - dictWord{136, 11, 249}, - dictWord{7, 11, 432}, - dictWord{135, 11, 1649}, - dictWord{135, 10, 1871}, - dictWord{137, 10, 252}, - dictWord{6, 11, 155}, - dictWord{140, 11, 234}, - dictWord{7, 0, 871}, - dictWord{19, 0, 27}, - dictWord{147, 11, 27}, - dictWord{140, 0, 498}, - dictWord{5, 0, 986}, - dictWord{6, 0, 130}, - dictWord{138, 0, 823}, - dictWord{6, 0, 1793}, - dictWord{7, 0, 1582}, - dictWord{8, 0, 458}, - dictWord{10, 0, 101}, - dictWord{10, 0, 318}, - dictWord{ - 10, - 0, - 945, - }, - dictWord{12, 0, 734}, - dictWord{16, 0, 104}, - dictWord{18, 0, 177}, - dictWord{6, 10, 323}, - dictWord{135, 10, 1564}, - dictWord{5, 11, 632}, - dictWord{ - 138, - 11, - 526, - }, - dictWord{10, 0, 435}, - dictWord{7, 10, 461}, - dictWord{136, 10, 775}, - dictWord{6, 11, 144}, - dictWord{7, 11, 948}, - dictWord{7, 11, 1042}, - dictWord{ - 7, - 11, - 1857, - }, - dictWord{8, 11, 235}, - dictWord{8, 11, 461}, - dictWord{9, 11, 453}, - dictWord{9, 11, 530}, - dictWord{10, 11, 354}, - dictWord{17, 11, 77}, - dictWord{ - 19, - 11, - 99, - }, - dictWord{148, 11, 79}, - dictWord{138, 0, 966}, - dictWord{7, 0, 1644}, - dictWord{137, 0, 129}, - dictWord{135, 0, 997}, - dictWord{136, 0, 502}, - dictWord{ - 5, - 11, - 196, - }, - dictWord{6, 11, 486}, - dictWord{7, 11, 212}, - dictWord{8, 11, 309}, - dictWord{136, 11, 346}, - dictWord{7, 10, 727}, - dictWord{146, 10, 73}, - dictWord{132, 0, 823}, - dictWord{132, 11, 686}, - dictWord{135, 0, 1927}, - dictWord{4, 0, 762}, - dictWord{7, 0, 1756}, - dictWord{137, 0, 98}, - dictWord{136, 10, 577}, - dictWord{24, 0, 8}, - dictWord{4, 11, 30}, - dictWord{5, 11, 43}, - dictWord{152, 11, 8}, - dictWord{7, 0, 1046}, - dictWord{139, 0, 160}, - dictWord{7, 0, 492}, - dictWord{ - 4, - 10, - 413, - }, - dictWord{5, 10, 677}, - dictWord{7, 11, 492}, - dictWord{8, 10, 432}, - dictWord{140, 10, 280}, - dictWord{6, 0, 45}, - dictWord{7, 0, 433}, - dictWord{8, 0, 129}, - dictWord{9, 0, 21}, - dictWord{10, 0, 392}, - dictWord{11, 0, 79}, - dictWord{12, 0, 499}, - dictWord{13, 0, 199}, - dictWord{141, 0, 451}, - dictWord{7, 0, 558}, - dictWord{ - 136, - 0, - 353, - }, - dictWord{4, 11, 220}, - dictWord{7, 11, 1535}, - dictWord{9, 11, 93}, - dictWord{139, 11, 474}, - dictWord{7, 10, 646}, - dictWord{7, 10, 1730}, - dictWord{ - 11, - 10, - 446, - }, - dictWord{141, 10, 178}, - dictWord{133, 0, 785}, - dictWord{134, 0, 1145}, - dictWord{8, 0, 81}, - dictWord{9, 0, 189}, - dictWord{9, 0, 201}, - dictWord{ - 11, - 0, - 478, - }, - dictWord{11, 0, 712}, - dictWord{141, 0, 338}, - dictWord{5, 0, 353}, - dictWord{151, 0, 26}, - dictWord{11, 0, 762}, - dictWord{132, 10, 395}, - dictWord{ - 134, - 0, - 2024, - }, - dictWord{4, 0, 611}, - dictWord{133, 0, 606}, - dictWord{9, 10, 174}, - dictWord{10, 10, 164}, - dictWord{11, 10, 440}, - dictWord{11, 10, 841}, - dictWord{ - 143, - 10, - 98, - }, - dictWord{134, 10, 426}, - dictWord{10, 10, 608}, - dictWord{139, 10, 1002}, - dictWord{138, 10, 250}, - dictWord{6, 0, 25}, - dictWord{7, 0, 855}, - dictWord{7, 0, 1258}, - dictWord{144, 0, 32}, - dictWord{7, 11, 1725}, - dictWord{138, 11, 393}, - dictWord{5, 11, 263}, - dictWord{134, 11, 414}, - dictWord{6, 0, 2011}, - dictWord{133, 10, 476}, - dictWord{4, 0, 4}, - dictWord{7, 0, 1118}, - dictWord{7, 0, 1320}, - dictWord{7, 0, 1706}, - dictWord{8, 0, 277}, - dictWord{9, 0, 622}, - dictWord{ - 10, - 0, - 9, - }, - dictWord{11, 0, 724}, - dictWord{12, 0, 350}, - dictWord{12, 0, 397}, - dictWord{13, 0, 28}, - dictWord{13, 0, 159}, - dictWord{15, 0, 89}, - dictWord{18, 0, 5}, - dictWord{ - 19, - 0, - 9, - }, - dictWord{20, 0, 34}, - dictWord{22, 0, 47}, - dictWord{6, 11, 178}, - dictWord{6, 11, 1750}, - dictWord{8, 11, 251}, - dictWord{9, 11, 690}, - dictWord{ - 10, - 11, - 155, - }, - dictWord{10, 11, 196}, - dictWord{10, 11, 373}, - dictWord{11, 11, 698}, - dictWord{13, 11, 155}, - dictWord{148, 11, 93}, - dictWord{5, 11, 97}, - dictWord{ - 137, - 11, - 393, - }, - dictWord{7, 0, 764}, - dictWord{11, 0, 461}, - dictWord{12, 0, 172}, - dictWord{5, 10, 76}, - dictWord{6, 10, 458}, - dictWord{6, 10, 497}, - dictWord{ - 7, - 10, - 868, - }, - dictWord{9, 10, 658}, - dictWord{10, 10, 594}, - dictWord{11, 10, 566}, - dictWord{12, 10, 338}, - dictWord{141, 10, 200}, - dictWord{134, 0, 1449}, - dictWord{138, 11, 40}, - dictWord{134, 11, 1639}, - dictWord{134, 0, 1445}, - dictWord{6, 0, 1168}, - dictWord{4, 10, 526}, - dictWord{7, 10, 1029}, - dictWord{ - 135, - 10, - 1054, - }, - dictWord{4, 11, 191}, - dictWord{7, 11, 934}, - dictWord{8, 11, 647}, - dictWord{145, 11, 97}, - dictWord{132, 10, 636}, - dictWord{6, 0, 233}, - dictWord{ - 7, - 10, - 660, - }, - dictWord{7, 10, 1124}, - dictWord{17, 10, 31}, - dictWord{19, 10, 22}, - dictWord{151, 10, 14}, - dictWord{6, 10, 1699}, - dictWord{136, 11, 110}, - dictWord{ - 12, - 11, - 246, - }, - dictWord{15, 11, 162}, - dictWord{19, 11, 64}, - dictWord{20, 11, 8}, - dictWord{20, 11, 95}, - dictWord{22, 11, 24}, - dictWord{152, 11, 17}, - dictWord{ - 5, - 11, - 165, - }, - dictWord{9, 11, 346}, - dictWord{138, 11, 655}, - dictWord{5, 11, 319}, - dictWord{135, 11, 534}, - dictWord{134, 0, 255}, - dictWord{9, 0, 216}, - dictWord{ - 8, - 11, - 128, - }, - dictWord{139, 11, 179}, - dictWord{9, 0, 183}, - dictWord{139, 0, 286}, - dictWord{11, 0, 956}, - dictWord{151, 0, 3}, - dictWord{4, 0, 536}, - dictWord{ - 7, - 0, - 1141, - }, - dictWord{10, 0, 723}, - dictWord{139, 0, 371}, - dictWord{4, 10, 279}, - dictWord{7, 10, 301}, - dictWord{137, 10, 362}, - dictWord{7, 0, 285}, - dictWord{ - 5, - 11, - 57, - }, - dictWord{6, 11, 101}, - dictWord{6, 11, 1663}, - dictWord{7, 11, 132}, - dictWord{7, 11, 1048}, - dictWord{7, 11, 1154}, - dictWord{7, 11, 1415}, - dictWord{ - 7, - 11, - 1507, - }, - dictWord{12, 11, 493}, - dictWord{15, 11, 105}, - dictWord{151, 11, 15}, - dictWord{5, 11, 459}, - dictWord{7, 11, 1073}, - dictWord{7, 10, 1743}, - dictWord{ - 8, - 11, - 241, - }, - dictWord{136, 11, 334}, - dictWord{4, 10, 178}, - dictWord{133, 10, 399}, - dictWord{135, 0, 560}, - dictWord{132, 0, 690}, - dictWord{135, 0, 1246}, - dictWord{18, 0, 157}, - dictWord{147, 0, 63}, - dictWord{10, 0, 599}, - dictWord{11, 0, 33}, - dictWord{12, 0, 571}, - dictWord{149, 0, 1}, - dictWord{6, 11, 324}, - dictWord{ - 6, - 11, - 520, - }, - dictWord{7, 11, 338}, - dictWord{7, 11, 1616}, - dictWord{7, 11, 1729}, - dictWord{8, 11, 228}, - dictWord{9, 11, 69}, - dictWord{139, 11, 750}, - dictWord{ - 7, - 0, - 1862, - }, - dictWord{12, 0, 491}, - dictWord{12, 0, 520}, - dictWord{13, 0, 383}, - dictWord{142, 0, 244}, - dictWord{135, 11, 734}, - dictWord{134, 10, 1692}, - dictWord{10, 0, 448}, - dictWord{11, 0, 630}, - dictWord{17, 0, 117}, - dictWord{6, 10, 202}, - dictWord{7, 11, 705}, - dictWord{12, 10, 360}, - dictWord{17, 10, 118}, - dictWord{18, 10, 27}, - dictWord{148, 10, 67}, - dictWord{4, 11, 73}, - dictWord{6, 11, 612}, - dictWord{7, 11, 927}, - dictWord{7, 11, 1822}, - dictWord{8, 11, 217}, - dictWord{ - 9, - 11, - 472, - }, - dictWord{9, 11, 765}, - dictWord{9, 11, 766}, - dictWord{10, 11, 408}, - dictWord{11, 11, 51}, - dictWord{11, 11, 793}, - dictWord{12, 11, 266}, - dictWord{ - 15, - 11, - 158, - }, - dictWord{20, 11, 89}, - dictWord{150, 11, 32}, - dictWord{4, 0, 190}, - dictWord{133, 0, 554}, - dictWord{133, 0, 1001}, - dictWord{5, 11, 389}, - dictWord{ - 8, - 11, - 636, - }, - dictWord{137, 11, 229}, - dictWord{5, 0, 446}, - dictWord{7, 10, 872}, - dictWord{10, 10, 516}, - dictWord{139, 10, 167}, - dictWord{137, 10, 313}, - dictWord{132, 10, 224}, - dictWord{134, 0, 1313}, - dictWord{5, 10, 546}, - dictWord{7, 10, 35}, - dictWord{8, 10, 11}, - dictWord{8, 10, 12}, - dictWord{9, 10, 315}, - dictWord{9, 10, 533}, - dictWord{10, 10, 802}, - dictWord{11, 10, 166}, - dictWord{12, 10, 525}, - dictWord{142, 10, 243}, - dictWord{6, 0, 636}, - dictWord{137, 0, 837}, - dictWord{5, 10, 241}, - dictWord{8, 10, 242}, - dictWord{9, 10, 451}, - dictWord{10, 10, 667}, - dictWord{11, 10, 598}, - dictWord{140, 10, 429}, - dictWord{22, 10, 46}, - dictWord{150, 11, 46}, - dictWord{136, 11, 472}, - dictWord{11, 0, 278}, - dictWord{142, 0, 73}, - dictWord{141, 11, 185}, - dictWord{132, 0, 868}, - dictWord{ - 134, - 0, - 972, - }, - dictWord{4, 10, 366}, - dictWord{137, 10, 516}, - dictWord{138, 0, 1010}, - dictWord{5, 11, 189}, - dictWord{6, 10, 1736}, - dictWord{7, 11, 442}, - dictWord{ - 7, - 11, - 443, - }, - dictWord{8, 11, 281}, - dictWord{12, 11, 174}, - dictWord{13, 11, 83}, - dictWord{141, 11, 261}, - dictWord{139, 11, 384}, - dictWord{6, 11, 2}, - dictWord{ - 7, - 11, - 191, - }, - dictWord{7, 11, 446}, - dictWord{7, 11, 758}, - dictWord{7, 11, 1262}, - dictWord{7, 11, 1737}, - dictWord{8, 11, 22}, - dictWord{8, 11, 270}, - dictWord{ - 8, - 11, - 612, - }, - dictWord{9, 11, 4}, - dictWord{9, 11, 167}, - dictWord{9, 11, 312}, - dictWord{9, 11, 436}, - dictWord{10, 11, 156}, - dictWord{10, 11, 216}, - dictWord{ - 10, - 11, - 311, - }, - dictWord{10, 11, 623}, - dictWord{11, 11, 72}, - dictWord{11, 11, 330}, - dictWord{11, 11, 455}, - dictWord{12, 11, 101}, - dictWord{12, 11, 321}, - dictWord{ - 12, - 11, - 504, - }, - dictWord{12, 11, 530}, - dictWord{12, 11, 543}, - dictWord{13, 11, 17}, - dictWord{13, 11, 156}, - dictWord{13, 11, 334}, - dictWord{14, 11, 48}, - dictWord{15, 11, 70}, - dictWord{17, 11, 60}, - dictWord{148, 11, 64}, - dictWord{6, 10, 331}, - dictWord{136, 10, 623}, - dictWord{135, 0, 1231}, - dictWord{132, 0, 304}, - dictWord{6, 11, 60}, - dictWord{7, 11, 670}, - dictWord{7, 11, 1327}, - dictWord{8, 11, 411}, - dictWord{8, 11, 435}, - dictWord{9, 11, 653}, - dictWord{9, 11, 740}, - dictWord{10, 11, 385}, - dictWord{11, 11, 222}, - dictWord{11, 11, 324}, - dictWord{11, 11, 829}, - dictWord{140, 11, 611}, - dictWord{7, 0, 506}, - dictWord{6, 11, 166}, - dictWord{7, 11, 374}, - dictWord{135, 11, 1174}, - dictWord{14, 11, 43}, - dictWord{146, 11, 21}, - dictWord{135, 11, 1694}, - dictWord{135, 10, 1888}, - dictWord{ - 5, - 11, - 206, - }, - dictWord{134, 11, 398}, - dictWord{135, 11, 50}, - dictWord{150, 0, 26}, - dictWord{6, 0, 53}, - dictWord{6, 0, 199}, - dictWord{7, 0, 1408}, - dictWord{ - 8, - 0, - 32, - }, - dictWord{8, 0, 93}, - dictWord{10, 0, 397}, - dictWord{10, 0, 629}, - dictWord{11, 0, 593}, - dictWord{11, 0, 763}, - dictWord{13, 0, 326}, - dictWord{145, 0, 35}, - dictWord{134, 0, 105}, - dictWord{132, 10, 394}, - dictWord{4, 0, 843}, - dictWord{138, 0, 794}, - dictWord{11, 0, 704}, - dictWord{141, 0, 396}, - dictWord{5, 0, 114}, - dictWord{5, 0, 255}, - dictWord{141, 0, 285}, - dictWord{6, 0, 619}, - dictWord{7, 0, 898}, - dictWord{7, 0, 1092}, - dictWord{8, 0, 485}, - dictWord{18, 0, 28}, - dictWord{ - 19, - 0, - 116, - }, - dictWord{135, 10, 1931}, - dictWord{9, 0, 145}, - dictWord{7, 10, 574}, - dictWord{135, 10, 1719}, - dictWord{7, 0, 2035}, - dictWord{8, 0, 19}, - dictWord{ - 9, - 0, - 89, - }, - dictWord{138, 0, 831}, - dictWord{132, 10, 658}, - dictWord{6, 11, 517}, - dictWord{7, 11, 1159}, - dictWord{10, 11, 621}, - dictWord{139, 11, 192}, - dictWord{ - 7, - 0, - 1933, - }, - dictWord{7, 11, 1933}, - dictWord{9, 10, 781}, - dictWord{10, 10, 144}, - dictWord{11, 10, 385}, - dictWord{13, 10, 161}, - dictWord{13, 10, 228}, - dictWord{13, 10, 268}, - dictWord{148, 10, 107}, - dictWord{136, 10, 374}, - dictWord{10, 11, 223}, - dictWord{139, 11, 645}, - dictWord{135, 0, 1728}, - dictWord{ - 7, - 11, - 64, - }, - dictWord{7, 11, 289}, - dictWord{136, 11, 245}, - dictWord{4, 10, 344}, - dictWord{6, 10, 498}, - dictWord{139, 10, 323}, - dictWord{136, 0, 746}, - dictWord{ - 135, - 10, - 1063, - }, - dictWord{137, 10, 155}, - dictWord{4, 0, 987}, - dictWord{6, 0, 1964}, - dictWord{6, 0, 1974}, - dictWord{6, 0, 1990}, - dictWord{136, 0, 995}, - dictWord{133, 11, 609}, - dictWord{133, 10, 906}, - dictWord{134, 0, 1550}, - dictWord{134, 0, 874}, - dictWord{5, 11, 129}, - dictWord{6, 11, 61}, - dictWord{ - 135, - 11, - 947, - }, - dictWord{4, 0, 1018}, - dictWord{6, 0, 1938}, - dictWord{6, 0, 2021}, - dictWord{134, 0, 2039}, - dictWord{132, 0, 814}, - dictWord{11, 0, 126}, - dictWord{ - 139, - 0, - 287, - }, - dictWord{134, 0, 1264}, - dictWord{5, 0, 955}, - dictWord{136, 0, 814}, - dictWord{141, 11, 506}, - dictWord{132, 11, 314}, - dictWord{6, 0, 981}, - dictWord{139, 11, 1000}, - dictWord{5, 0, 56}, - dictWord{8, 0, 892}, - dictWord{8, 0, 915}, - dictWord{140, 0, 776}, - dictWord{148, 0, 100}, - dictWord{10, 0, 4}, - dictWord{ - 10, - 0, - 13, - }, - dictWord{11, 0, 638}, - dictWord{148, 0, 57}, - dictWord{148, 11, 74}, - dictWord{5, 0, 738}, - dictWord{132, 10, 616}, - dictWord{133, 11, 637}, - dictWord{ - 136, - 10, - 692, - }, - dictWord{133, 0, 758}, - dictWord{132, 10, 305}, - dictWord{137, 11, 590}, - dictWord{5, 11, 280}, - dictWord{135, 11, 1226}, - dictWord{ - 134, - 11, - 494, - }, - dictWord{135, 0, 1112}, - dictWord{133, 11, 281}, - dictWord{13, 0, 44}, - dictWord{14, 0, 214}, - dictWord{5, 10, 214}, - dictWord{7, 10, 603}, - dictWord{ - 8, - 10, - 611, - }, - dictWord{9, 10, 686}, - dictWord{10, 10, 88}, - dictWord{11, 10, 459}, - dictWord{11, 10, 496}, - dictWord{12, 10, 463}, - dictWord{140, 10, 590}, - dictWord{ - 139, - 0, - 328, - }, - dictWord{135, 11, 1064}, - dictWord{137, 0, 133}, - dictWord{7, 0, 168}, - dictWord{13, 0, 196}, - dictWord{141, 0, 237}, - dictWord{134, 10, 1703}, - dictWord{134, 0, 1152}, - dictWord{135, 0, 1245}, - dictWord{5, 0, 110}, - dictWord{6, 0, 169}, - dictWord{6, 0, 1702}, - dictWord{7, 0, 400}, - dictWord{8, 0, 538}, - dictWord{ - 9, - 0, - 184, - }, - dictWord{9, 0, 524}, - dictWord{140, 0, 218}, - dictWord{6, 0, 1816}, - dictWord{10, 0, 871}, - dictWord{12, 0, 769}, - dictWord{140, 0, 785}, - dictWord{ - 132, - 11, - 630, - }, - dictWord{7, 11, 33}, - dictWord{7, 11, 120}, - dictWord{8, 11, 489}, - dictWord{9, 11, 319}, - dictWord{10, 11, 820}, - dictWord{11, 11, 1004}, - dictWord{ - 12, - 11, - 379, - }, - dictWord{13, 11, 117}, - dictWord{13, 11, 412}, - dictWord{14, 11, 25}, - dictWord{15, 11, 52}, - dictWord{15, 11, 161}, - dictWord{16, 11, 47}, - dictWord{149, 11, 2}, - dictWord{6, 0, 133}, - dictWord{8, 0, 413}, - dictWord{9, 0, 353}, - dictWord{139, 0, 993}, - dictWord{145, 10, 19}, - dictWord{4, 11, 937}, - dictWord{ - 133, - 11, - 801, - }, - dictWord{134, 0, 978}, - dictWord{6, 0, 93}, - dictWord{6, 0, 1508}, - dictWord{7, 0, 1422}, - dictWord{7, 0, 1851}, - dictWord{8, 0, 673}, - dictWord{9, 0, 529}, - dictWord{140, 0, 43}, - dictWord{6, 0, 317}, - dictWord{10, 0, 512}, - dictWord{4, 10, 737}, - dictWord{11, 10, 294}, - dictWord{12, 10, 60}, - dictWord{12, 10, 437}, - dictWord{13, 10, 64}, - dictWord{13, 10, 380}, - dictWord{142, 10, 430}, - dictWord{9, 0, 371}, - dictWord{7, 11, 1591}, - dictWord{144, 11, 43}, - dictWord{6, 10, 1758}, - dictWord{8, 10, 520}, - dictWord{9, 10, 345}, - dictWord{9, 10, 403}, - dictWord{142, 10, 350}, - dictWord{5, 0, 526}, - dictWord{10, 10, 242}, - dictWord{ - 138, - 10, - 579, - }, - dictWord{9, 0, 25}, - dictWord{10, 0, 467}, - dictWord{138, 0, 559}, - dictWord{5, 10, 139}, - dictWord{7, 10, 1168}, - dictWord{138, 10, 539}, - dictWord{ - 4, - 0, - 335, - }, - dictWord{135, 0, 942}, - dictWord{140, 0, 754}, - dictWord{132, 11, 365}, - dictWord{11, 0, 182}, - dictWord{142, 0, 195}, - dictWord{142, 11, 29}, - dictWord{ - 5, - 11, - 7, - }, - dictWord{139, 11, 774}, - dictWord{4, 11, 746}, - dictWord{135, 11, 1090}, - dictWord{8, 0, 39}, - dictWord{10, 0, 773}, - dictWord{11, 0, 84}, - dictWord{ - 12, - 0, - 205, - }, - dictWord{142, 0, 1}, - dictWord{5, 0, 601}, - dictWord{5, 0, 870}, - dictWord{5, 11, 360}, - dictWord{136, 11, 237}, - dictWord{132, 0, 181}, - dictWord{ - 136, - 0, - 370, - }, - dictWord{134, 0, 1652}, - dictWord{8, 0, 358}, - dictWord{4, 10, 107}, - dictWord{7, 10, 613}, - dictWord{8, 10, 439}, - dictWord{8, 10, 504}, - dictWord{ - 9, - 10, - 501, - }, - dictWord{10, 10, 383}, - dictWord{139, 10, 477}, - dictWord{132, 10, 229}, - dictWord{137, 11, 785}, - dictWord{4, 0, 97}, - dictWord{5, 0, 147}, - dictWord{ - 6, - 0, - 286, - }, - dictWord{7, 0, 1362}, - dictWord{141, 0, 176}, - dictWord{6, 0, 537}, - dictWord{7, 0, 788}, - dictWord{7, 0, 1816}, - dictWord{132, 10, 903}, - dictWord{ - 140, - 10, - 71, - }, - dictWord{6, 0, 743}, - dictWord{134, 0, 1223}, - dictWord{6, 0, 375}, - dictWord{7, 0, 169}, - dictWord{7, 0, 254}, - dictWord{8, 0, 780}, - dictWord{135, 11, 1493}, - dictWord{7, 0, 1714}, - dictWord{4, 10, 47}, - dictWord{6, 10, 373}, - dictWord{7, 10, 452}, - dictWord{7, 10, 543}, - dictWord{7, 10, 1856}, - dictWord{9, 10, 6}, - dictWord{ - 11, - 10, - 257, - }, - dictWord{139, 10, 391}, - dictWord{6, 0, 896}, - dictWord{136, 0, 1003}, - dictWord{135, 0, 1447}, - dictWord{137, 11, 341}, - dictWord{5, 10, 980}, - dictWord{134, 10, 1754}, - dictWord{145, 11, 22}, - dictWord{4, 11, 277}, - dictWord{5, 11, 608}, - dictWord{6, 11, 493}, - dictWord{7, 11, 457}, - dictWord{ - 140, - 11, - 384, - }, - dictWord{7, 10, 536}, - dictWord{7, 10, 1331}, - dictWord{136, 10, 143}, - dictWord{140, 0, 744}, - dictWord{7, 11, 27}, - dictWord{135, 11, 316}, - dictWord{ - 18, - 0, - 126, - }, - dictWord{5, 10, 19}, - dictWord{134, 10, 533}, - dictWord{4, 0, 788}, - dictWord{11, 0, 41}, - dictWord{5, 11, 552}, - dictWord{5, 11, 586}, - dictWord{ - 5, - 11, - 676, - }, - dictWord{6, 11, 448}, - dictWord{8, 11, 244}, - dictWord{11, 11, 1}, - dictWord{11, 11, 41}, - dictWord{13, 11, 3}, - dictWord{16, 11, 54}, - dictWord{17, 11, 4}, - dictWord{146, 11, 13}, - dictWord{4, 0, 985}, - dictWord{6, 0, 1801}, - dictWord{4, 11, 401}, - dictWord{137, 11, 264}, - dictWord{5, 10, 395}, - dictWord{5, 10, 951}, - dictWord{134, 10, 1776}, - dictWord{5, 0, 629}, - dictWord{135, 0, 1549}, - dictWord{11, 10, 663}, - dictWord{12, 10, 210}, - dictWord{13, 10, 166}, - dictWord{ - 13, - 10, - 310, - }, - dictWord{14, 10, 373}, - dictWord{147, 10, 43}, - dictWord{9, 11, 543}, - dictWord{10, 11, 524}, - dictWord{11, 11, 30}, - dictWord{12, 11, 524}, - dictWord{ - 14, - 11, - 315, - }, - dictWord{16, 11, 18}, - dictWord{20, 11, 26}, - dictWord{148, 11, 65}, - dictWord{4, 11, 205}, - dictWord{5, 11, 623}, - dictWord{7, 11, 104}, - dictWord{ - 136, - 11, - 519, - }, - dictWord{5, 0, 293}, - dictWord{134, 0, 601}, - dictWord{7, 11, 579}, - dictWord{9, 11, 41}, - dictWord{9, 11, 244}, - dictWord{9, 11, 669}, - dictWord{ - 10, - 11, - 5, - }, - dictWord{11, 11, 861}, - dictWord{11, 11, 951}, - dictWord{139, 11, 980}, - dictWord{132, 11, 717}, - dictWord{132, 10, 695}, - dictWord{7, 10, 497}, - dictWord{ - 9, - 10, - 387, - }, - dictWord{147, 10, 81}, - dictWord{132, 0, 420}, - dictWord{142, 0, 37}, - dictWord{6, 0, 1134}, - dictWord{6, 0, 1900}, - dictWord{12, 0, 830}, - dictWord{ - 12, - 0, - 878, - }, - dictWord{12, 0, 894}, - dictWord{15, 0, 221}, - dictWord{143, 0, 245}, - dictWord{132, 11, 489}, - dictWord{7, 0, 1570}, - dictWord{140, 0, 542}, - dictWord{ - 8, - 0, - 933, - }, - dictWord{136, 0, 957}, - dictWord{6, 0, 1371}, - dictWord{7, 0, 31}, - dictWord{8, 0, 373}, - dictWord{5, 10, 284}, - dictWord{6, 10, 49}, - dictWord{6, 10, 350}, - dictWord{7, 10, 377}, - dictWord{7, 10, 1693}, - dictWord{8, 10, 678}, - dictWord{9, 10, 161}, - dictWord{9, 10, 585}, - dictWord{9, 10, 671}, - dictWord{9, 10, 839}, - dictWord{11, 10, 912}, - dictWord{141, 10, 427}, - dictWord{135, 11, 892}, - dictWord{4, 0, 325}, - dictWord{138, 0, 125}, - dictWord{139, 11, 47}, - dictWord{ - 132, - 10, - 597, - }, - dictWord{138, 0, 323}, - dictWord{6, 0, 1547}, - dictWord{7, 11, 1605}, - dictWord{9, 11, 473}, - dictWord{11, 11, 962}, - dictWord{146, 11, 139}, - dictWord{ - 139, - 10, - 908, - }, - dictWord{7, 11, 819}, - dictWord{9, 11, 26}, - dictWord{9, 11, 392}, - dictWord{10, 11, 152}, - dictWord{10, 11, 226}, - dictWord{11, 11, 19}, - dictWord{ - 12, - 11, - 276, - }, - dictWord{12, 11, 426}, - dictWord{12, 11, 589}, - dictWord{13, 11, 460}, - dictWord{15, 11, 97}, - dictWord{19, 11, 48}, - dictWord{148, 11, 104}, - dictWord{135, 11, 51}, - dictWord{4, 0, 718}, - dictWord{135, 0, 1216}, - dictWord{6, 0, 1896}, - dictWord{6, 0, 1905}, - dictWord{6, 0, 1912}, - dictWord{9, 0, 947}, - dictWord{ - 9, - 0, - 974, - }, - dictWord{12, 0, 809}, - dictWord{12, 0, 850}, - dictWord{12, 0, 858}, - dictWord{12, 0, 874}, - dictWord{12, 0, 887}, - dictWord{12, 0, 904}, - dictWord{ - 12, - 0, - 929, - }, - dictWord{12, 0, 948}, - dictWord{12, 0, 952}, - dictWord{15, 0, 198}, - dictWord{15, 0, 206}, - dictWord{15, 0, 220}, - dictWord{15, 0, 227}, - dictWord{15, 0, 247}, - dictWord{18, 0, 188}, - dictWord{21, 0, 48}, - dictWord{21, 0, 50}, - dictWord{24, 0, 25}, - dictWord{24, 0, 29}, - dictWord{7, 11, 761}, - dictWord{7, 11, 1051}, - dictWord{ - 137, - 11, - 545, - }, - dictWord{5, 0, 124}, - dictWord{5, 0, 144}, - dictWord{6, 0, 548}, - dictWord{7, 0, 15}, - dictWord{7, 0, 153}, - dictWord{137, 0, 629}, - dictWord{ - 135, - 11, - 606, - }, - dictWord{135, 10, 2014}, - dictWord{7, 10, 2007}, - dictWord{9, 11, 46}, - dictWord{9, 10, 101}, - dictWord{9, 10, 450}, - dictWord{10, 10, 66}, - dictWord{ - 10, - 10, - 842, - }, - dictWord{11, 10, 536}, - dictWord{140, 10, 587}, - dictWord{6, 0, 75}, - dictWord{7, 0, 1531}, - dictWord{8, 0, 416}, - dictWord{9, 0, 240}, - dictWord{9, 0, 275}, - dictWord{10, 0, 100}, - dictWord{11, 0, 658}, - dictWord{11, 0, 979}, - dictWord{12, 0, 86}, - dictWord{14, 0, 207}, - dictWord{15, 0, 20}, - dictWord{143, 0, 25}, - dictWord{ - 5, - 0, - 141, - }, - dictWord{5, 0, 915}, - dictWord{6, 0, 1783}, - dictWord{7, 0, 211}, - dictWord{7, 0, 698}, - dictWord{7, 0, 1353}, - dictWord{9, 0, 83}, - dictWord{9, 0, 281}, - dictWord{ - 10, - 0, - 376, - }, - dictWord{10, 0, 431}, - dictWord{11, 0, 543}, - dictWord{12, 0, 664}, - dictWord{13, 0, 280}, - dictWord{13, 0, 428}, - dictWord{14, 0, 61}, - dictWord{ - 14, - 0, - 128, - }, - dictWord{17, 0, 52}, - dictWord{145, 0, 81}, - dictWord{132, 11, 674}, - dictWord{135, 0, 533}, - dictWord{149, 0, 6}, - dictWord{132, 11, 770}, - dictWord{ - 133, - 0, - 538, - }, - dictWord{5, 11, 79}, - dictWord{7, 11, 1027}, - dictWord{7, 11, 1477}, - dictWord{139, 11, 52}, - dictWord{139, 10, 62}, - dictWord{4, 0, 338}, - dictWord{ - 133, - 0, - 400, - }, - dictWord{5, 11, 789}, - dictWord{134, 11, 195}, - dictWord{4, 11, 251}, - dictWord{4, 11, 688}, - dictWord{7, 11, 513}, - dictWord{7, 11, 1284}, - dictWord{ - 9, - 11, - 87, - }, - dictWord{138, 11, 365}, - dictWord{134, 10, 1766}, - dictWord{6, 0, 0}, - dictWord{7, 0, 84}, - dictWord{11, 0, 895}, - dictWord{145, 0, 11}, - dictWord{ - 139, - 0, - 892, - }, - dictWord{4, 0, 221}, - dictWord{5, 0, 659}, - dictWord{7, 0, 697}, - dictWord{7, 0, 1211}, - dictWord{138, 0, 284}, - dictWord{133, 0, 989}, - dictWord{ - 133, - 11, - 889, - }, - dictWord{4, 11, 160}, - dictWord{5, 11, 330}, - dictWord{7, 11, 1434}, - dictWord{136, 11, 174}, - dictWord{6, 10, 1665}, - dictWord{7, 10, 256}, - dictWord{ - 7, - 10, - 1388, - }, - dictWord{10, 10, 499}, - dictWord{139, 10, 670}, - dictWord{7, 0, 848}, - dictWord{4, 10, 22}, - dictWord{5, 10, 10}, - dictWord{136, 10, 97}, - dictWord{ - 138, - 0, - 507, - }, - dictWord{133, 10, 481}, - dictWord{4, 0, 188}, - dictWord{135, 0, 805}, - dictWord{5, 0, 884}, - dictWord{6, 0, 732}, - dictWord{139, 0, 991}, - dictWord{ - 135, - 11, - 968, - }, - dictWord{11, 11, 636}, - dictWord{15, 11, 145}, - dictWord{17, 11, 34}, - dictWord{19, 11, 50}, - dictWord{151, 11, 20}, - dictWord{7, 0, 959}, - dictWord{ - 16, - 0, - 60, - }, - dictWord{6, 10, 134}, - dictWord{7, 10, 437}, - dictWord{9, 10, 37}, - dictWord{14, 10, 285}, - dictWord{142, 10, 371}, - dictWord{7, 10, 486}, - dictWord{ - 8, - 10, - 155, - }, - dictWord{11, 10, 93}, - dictWord{140, 10, 164}, - dictWord{134, 0, 1653}, - dictWord{7, 0, 337}, - dictWord{133, 10, 591}, - dictWord{6, 0, 1989}, - dictWord{ - 8, - 0, - 922, - }, - dictWord{8, 0, 978}, - dictWord{133, 11, 374}, - dictWord{132, 0, 638}, - dictWord{138, 0, 500}, - dictWord{133, 11, 731}, - dictWord{5, 10, 380}, - dictWord{ - 5, - 10, - 650, - }, - dictWord{136, 10, 310}, - dictWord{138, 11, 381}, - dictWord{4, 10, 364}, - dictWord{7, 10, 1156}, - dictWord{7, 10, 1187}, - dictWord{137, 10, 409}, - dictWord{137, 11, 224}, - dictWord{140, 0, 166}, - dictWord{134, 10, 482}, - dictWord{4, 11, 626}, - dictWord{5, 11, 642}, - dictWord{6, 11, 425}, - dictWord{ - 10, - 11, - 202, - }, - dictWord{139, 11, 141}, - dictWord{4, 10, 781}, - dictWord{6, 10, 487}, - dictWord{7, 10, 926}, - dictWord{8, 10, 263}, - dictWord{139, 10, 500}, - dictWord{ - 135, - 0, - 418, - }, - dictWord{4, 10, 94}, - dictWord{135, 10, 1265}, - dictWord{136, 0, 760}, - dictWord{132, 10, 417}, - dictWord{136, 11, 835}, - dictWord{5, 10, 348}, - dictWord{134, 10, 522}, - dictWord{6, 0, 1277}, - dictWord{134, 0, 1538}, - dictWord{139, 11, 541}, - dictWord{135, 11, 1597}, - dictWord{5, 11, 384}, - dictWord{ - 8, - 11, - 455, - }, - dictWord{140, 11, 48}, - dictWord{136, 0, 770}, - dictWord{5, 11, 264}, - dictWord{134, 11, 184}, - dictWord{4, 0, 89}, - dictWord{5, 0, 489}, - dictWord{ - 6, - 0, - 315, - }, - dictWord{7, 0, 553}, - dictWord{7, 0, 1745}, - dictWord{138, 0, 243}, - dictWord{4, 10, 408}, - dictWord{4, 10, 741}, - dictWord{135, 10, 500}, - dictWord{ - 134, - 0, - 1396, - }, - dictWord{133, 0, 560}, - dictWord{6, 0, 1658}, - dictWord{9, 0, 3}, - dictWord{10, 0, 154}, - dictWord{11, 0, 641}, - dictWord{13, 0, 85}, - dictWord{13, 0, 201}, - dictWord{141, 0, 346}, - dictWord{135, 11, 1595}, - dictWord{5, 11, 633}, - dictWord{6, 11, 28}, - dictWord{7, 11, 219}, - dictWord{135, 11, 1323}, - dictWord{ - 9, - 11, - 769, - }, - dictWord{140, 11, 185}, - dictWord{135, 11, 785}, - dictWord{7, 11, 359}, - dictWord{8, 11, 243}, - dictWord{140, 11, 175}, - dictWord{138, 0, 586}, - dictWord{ - 7, - 0, - 1271, - }, - dictWord{134, 10, 73}, - dictWord{132, 11, 105}, - dictWord{4, 0, 166}, - dictWord{5, 0, 505}, - dictWord{134, 0, 1670}, - dictWord{133, 10, 576}, - dictWord{4, 11, 324}, - dictWord{138, 11, 104}, - dictWord{142, 10, 231}, - dictWord{6, 0, 637}, - dictWord{7, 10, 1264}, - dictWord{7, 10, 1678}, - dictWord{ - 11, - 10, - 945, - }, - dictWord{12, 10, 341}, - dictWord{12, 10, 471}, - dictWord{12, 10, 569}, - dictWord{23, 11, 21}, - dictWord{151, 11, 23}, - dictWord{8, 11, 559}, - dictWord{ - 141, - 11, - 109, - }, - dictWord{134, 0, 1947}, - dictWord{7, 0, 445}, - dictWord{8, 0, 307}, - dictWord{8, 0, 704}, - dictWord{10, 0, 41}, - dictWord{10, 0, 439}, - dictWord{ - 11, - 0, - 237, - }, - dictWord{11, 0, 622}, - dictWord{140, 0, 201}, - dictWord{135, 11, 963}, - dictWord{135, 0, 1977}, - dictWord{4, 0, 189}, - dictWord{5, 0, 713}, - dictWord{ - 136, - 0, - 57, - }, - dictWord{138, 0, 371}, - dictWord{135, 10, 538}, - dictWord{132, 0, 552}, - dictWord{6, 0, 883}, - dictWord{133, 10, 413}, - dictWord{6, 0, 923}, - dictWord{ - 132, - 11, - 758, - }, - dictWord{138, 11, 215}, - dictWord{136, 10, 495}, - dictWord{7, 10, 54}, - dictWord{8, 10, 312}, - dictWord{10, 10, 191}, - dictWord{10, 10, 614}, - dictWord{140, 10, 567}, - dictWord{7, 11, 351}, - dictWord{139, 11, 128}, - dictWord{7, 0, 875}, - dictWord{6, 10, 468}, - dictWord{7, 10, 1478}, - dictWord{8, 10, 530}, - dictWord{142, 10, 290}, - dictWord{135, 0, 1788}, - dictWord{17, 0, 49}, - dictWord{133, 11, 918}, - dictWord{12, 11, 398}, - dictWord{20, 11, 39}, - dictWord{ - 21, - 11, - 11, - }, - dictWord{150, 11, 41}, - dictWord{10, 0, 661}, - dictWord{6, 10, 484}, - dictWord{135, 10, 822}, - dictWord{135, 0, 1945}, - dictWord{134, 0, 794}, - dictWord{ - 137, - 10, - 900, - }, - dictWord{135, 10, 1335}, - dictWord{6, 10, 1724}, - dictWord{135, 10, 2022}, - dictWord{132, 11, 340}, - dictWord{134, 0, 1135}, - dictWord{ - 4, - 0, - 784, - }, - dictWord{133, 0, 745}, - dictWord{5, 0, 84}, - dictWord{134, 0, 163}, - dictWord{133, 0, 410}, - dictWord{4, 0, 976}, - dictWord{5, 11, 985}, - dictWord{7, 11, 509}, - dictWord{7, 11, 529}, - dictWord{145, 11, 96}, - dictWord{132, 10, 474}, - dictWord{134, 0, 703}, - dictWord{135, 11, 1919}, - dictWord{5, 0, 322}, - dictWord{ - 8, - 0, - 186, - }, - dictWord{9, 0, 262}, - dictWord{10, 0, 187}, - dictWord{142, 0, 208}, - dictWord{135, 10, 1504}, - dictWord{133, 0, 227}, - dictWord{9, 0, 560}, - dictWord{ - 13, - 0, - 208, - }, - dictWord{133, 10, 305}, - dictWord{132, 11, 247}, - dictWord{7, 0, 1395}, - dictWord{8, 0, 486}, - dictWord{9, 0, 236}, - dictWord{9, 0, 878}, - dictWord{ - 10, - 0, - 218, - }, - dictWord{11, 0, 95}, - dictWord{19, 0, 17}, - dictWord{147, 0, 31}, - dictWord{7, 0, 2043}, - dictWord{8, 0, 672}, - dictWord{141, 0, 448}, - dictWord{4, 11, 184}, - dictWord{5, 11, 390}, - dictWord{6, 11, 337}, - dictWord{7, 11, 23}, - dictWord{7, 11, 494}, - dictWord{7, 11, 618}, - dictWord{7, 11, 1456}, - dictWord{8, 11, 27}, - dictWord{ - 8, - 11, - 599, - }, - dictWord{10, 11, 153}, - dictWord{139, 11, 710}, - dictWord{135, 0, 466}, - dictWord{135, 10, 1236}, - dictWord{6, 0, 167}, - dictWord{7, 0, 186}, - dictWord{7, 0, 656}, - dictWord{10, 0, 643}, - dictWord{4, 10, 480}, - dictWord{6, 10, 302}, - dictWord{6, 10, 1642}, - dictWord{7, 10, 837}, - dictWord{7, 10, 1547}, - dictWord{ - 7, - 10, - 1657, - }, - dictWord{8, 10, 429}, - dictWord{9, 10, 228}, - dictWord{13, 10, 289}, - dictWord{13, 10, 343}, - dictWord{147, 10, 101}, - dictWord{134, 0, 1428}, - dictWord{134, 0, 1440}, - dictWord{5, 0, 412}, - dictWord{7, 10, 278}, - dictWord{10, 10, 739}, - dictWord{11, 10, 708}, - dictWord{141, 10, 348}, - dictWord{ - 134, - 0, - 1118, - }, - dictWord{136, 0, 562}, - dictWord{148, 11, 46}, - dictWord{9, 0, 316}, - dictWord{139, 0, 256}, - dictWord{134, 0, 1771}, - dictWord{135, 0, 1190}, - dictWord{137, 0, 132}, - dictWord{10, 11, 227}, - dictWord{11, 11, 497}, - dictWord{11, 11, 709}, - dictWord{140, 11, 415}, - dictWord{143, 0, 66}, - dictWord{6, 11, 360}, - dictWord{7, 11, 1664}, - dictWord{136, 11, 478}, - dictWord{144, 10, 28}, - dictWord{4, 0, 317}, - dictWord{135, 0, 1279}, - dictWord{5, 0, 63}, - dictWord{ - 133, - 0, - 509, - }, - dictWord{136, 11, 699}, - dictWord{145, 10, 36}, - dictWord{134, 0, 1475}, - dictWord{11, 11, 343}, - dictWord{142, 11, 127}, - dictWord{132, 11, 739}, - dictWord{132, 0, 288}, - dictWord{135, 11, 1757}, - dictWord{8, 0, 89}, - dictWord{8, 0, 620}, - dictWord{9, 0, 608}, - dictWord{11, 0, 628}, - dictWord{12, 0, 322}, - dictWord{143, 0, 124}, - dictWord{134, 0, 1225}, - dictWord{7, 0, 1189}, - dictWord{4, 11, 67}, - dictWord{5, 11, 422}, - dictWord{6, 10, 363}, - dictWord{7, 11, 1037}, - dictWord{7, 11, 1289}, - dictWord{7, 11, 1555}, - dictWord{7, 10, 1955}, - dictWord{8, 10, 725}, - dictWord{9, 11, 741}, - dictWord{145, 11, 108}, - dictWord{ - 134, - 0, - 1468, - }, - dictWord{6, 0, 689}, - dictWord{134, 0, 1451}, - dictWord{138, 0, 120}, - dictWord{151, 0, 1}, - dictWord{137, 10, 805}, - dictWord{142, 0, 329}, - dictWord{ - 5, - 10, - 813, - }, - dictWord{135, 10, 2046}, - dictWord{135, 0, 226}, - dictWord{138, 11, 96}, - dictWord{7, 0, 1855}, - dictWord{5, 10, 712}, - dictWord{11, 10, 17}, - dictWord{13, 10, 321}, - dictWord{144, 10, 67}, - dictWord{9, 0, 461}, - dictWord{6, 10, 320}, - dictWord{7, 10, 781}, - dictWord{7, 10, 1921}, - dictWord{9, 10, 55}, - dictWord{ - 10, - 10, - 186, - }, - dictWord{10, 10, 273}, - dictWord{10, 10, 664}, - dictWord{10, 10, 801}, - dictWord{11, 10, 996}, - dictWord{11, 10, 997}, - dictWord{13, 10, 157}, - dictWord{142, 10, 170}, - dictWord{8, 11, 203}, - dictWord{8, 10, 271}, - dictWord{11, 11, 823}, - dictWord{11, 11, 846}, - dictWord{12, 11, 482}, - dictWord{ - 13, - 11, - 133, - }, - dictWord{13, 11, 277}, - dictWord{13, 11, 302}, - dictWord{13, 11, 464}, - dictWord{14, 11, 205}, - dictWord{142, 11, 221}, - dictWord{135, 0, 1346}, - dictWord{4, 11, 449}, - dictWord{133, 11, 718}, - dictWord{134, 0, 85}, - dictWord{14, 0, 299}, - dictWord{7, 10, 103}, - dictWord{7, 10, 863}, - dictWord{11, 10, 184}, - dictWord{145, 10, 62}, - dictWord{4, 11, 355}, - dictWord{6, 11, 311}, - dictWord{9, 11, 256}, - dictWord{138, 11, 404}, - dictWord{137, 10, 659}, - dictWord{ - 138, - 11, - 758, - }, - dictWord{133, 11, 827}, - dictWord{5, 11, 64}, - dictWord{140, 11, 581}, - dictWord{134, 0, 1171}, - dictWord{4, 11, 442}, - dictWord{7, 11, 1047}, - dictWord{ - 7, - 11, - 1352, - }, - dictWord{135, 11, 1643}, - dictWord{132, 0, 980}, - dictWord{5, 11, 977}, - dictWord{6, 11, 288}, - dictWord{7, 11, 528}, - dictWord{135, 11, 1065}, - dictWord{5, 0, 279}, - dictWord{6, 0, 235}, - dictWord{7, 0, 468}, - dictWord{8, 0, 446}, - dictWord{9, 0, 637}, - dictWord{10, 0, 717}, - dictWord{11, 0, 738}, - dictWord{ - 140, - 0, - 514, - }, - dictWord{132, 0, 293}, - dictWord{11, 10, 337}, - dictWord{142, 10, 303}, - dictWord{136, 11, 285}, - dictWord{5, 0, 17}, - dictWord{6, 0, 371}, - dictWord{ - 9, - 0, - 528, - }, - dictWord{12, 0, 364}, - dictWord{132, 11, 254}, - dictWord{5, 10, 77}, - dictWord{7, 10, 1455}, - dictWord{10, 10, 843}, - dictWord{147, 10, 73}, - dictWord{ - 150, - 0, - 5, - }, - dictWord{132, 10, 458}, - dictWord{6, 11, 12}, - dictWord{7, 11, 1219}, - dictWord{145, 11, 73}, - dictWord{135, 10, 1420}, - dictWord{6, 10, 109}, - dictWord{138, 10, 382}, - dictWord{135, 11, 125}, - dictWord{6, 10, 330}, - dictWord{7, 10, 1084}, - dictWord{139, 10, 142}, - dictWord{6, 11, 369}, - dictWord{ - 6, - 11, - 502, - }, - dictWord{7, 11, 1036}, - dictWord{8, 11, 348}, - dictWord{9, 11, 452}, - dictWord{10, 11, 26}, - dictWord{11, 11, 224}, - dictWord{11, 11, 387}, - dictWord{ - 11, - 11, - 772, - }, - dictWord{12, 11, 95}, - dictWord{12, 11, 629}, - dictWord{13, 11, 195}, - dictWord{13, 11, 207}, - dictWord{13, 11, 241}, - dictWord{14, 11, 260}, - dictWord{ - 14, - 11, - 270, - }, - dictWord{143, 11, 140}, - dictWord{132, 11, 269}, - dictWord{5, 11, 480}, - dictWord{7, 11, 532}, - dictWord{7, 11, 1197}, - dictWord{7, 11, 1358}, - dictWord{8, 11, 291}, - dictWord{11, 11, 349}, - dictWord{142, 11, 396}, - dictWord{150, 0, 48}, - dictWord{10, 0, 601}, - dictWord{13, 0, 353}, - dictWord{141, 0, 376}, - dictWord{5, 0, 779}, - dictWord{5, 0, 807}, - dictWord{6, 0, 1655}, - dictWord{134, 0, 1676}, - dictWord{142, 11, 223}, - dictWord{4, 0, 196}, - dictWord{5, 0, 558}, - dictWord{133, 0, 949}, - dictWord{148, 11, 15}, - dictWord{135, 11, 1764}, - dictWord{134, 0, 1322}, - dictWord{132, 0, 752}, - dictWord{139, 0, 737}, - dictWord{ - 135, - 11, - 657, - }, - dictWord{136, 11, 533}, - dictWord{135, 0, 412}, - dictWord{4, 0, 227}, - dictWord{5, 0, 159}, - dictWord{5, 0, 409}, - dictWord{7, 0, 80}, - dictWord{8, 0, 556}, - dictWord{10, 0, 479}, - dictWord{12, 0, 418}, - dictWord{14, 0, 50}, - dictWord{14, 0, 123}, - dictWord{14, 0, 192}, - dictWord{14, 0, 249}, - dictWord{14, 0, 295}, - dictWord{143, 0, 27}, - dictWord{7, 0, 1470}, - dictWord{8, 0, 66}, - dictWord{8, 0, 137}, - dictWord{8, 0, 761}, - dictWord{9, 0, 638}, - dictWord{11, 0, 80}, - dictWord{11, 0, 212}, - dictWord{11, 0, 368}, - dictWord{11, 0, 418}, - dictWord{12, 0, 8}, - dictWord{13, 0, 15}, - dictWord{16, 0, 61}, - dictWord{17, 0, 59}, - dictWord{19, 0, 28}, - dictWord{ - 148, - 0, - 84, - }, - dictWord{135, 10, 1985}, - dictWord{4, 11, 211}, - dictWord{4, 11, 332}, - dictWord{5, 11, 335}, - dictWord{6, 11, 238}, - dictWord{7, 11, 269}, - dictWord{ - 7, - 11, - 811, - }, - dictWord{7, 11, 1797}, - dictWord{8, 10, 122}, - dictWord{8, 11, 836}, - dictWord{9, 11, 507}, - dictWord{141, 11, 242}, - dictWord{6, 0, 683}, - dictWord{ - 134, - 0, - 1252, - }, - dictWord{4, 0, 873}, - dictWord{132, 10, 234}, - dictWord{134, 0, 835}, - dictWord{6, 0, 38}, - dictWord{7, 0, 1220}, - dictWord{8, 0, 185}, - dictWord{8, 0, 256}, - dictWord{9, 0, 22}, - dictWord{9, 0, 331}, - dictWord{10, 0, 738}, - dictWord{11, 0, 205}, - dictWord{11, 0, 540}, - dictWord{11, 0, 746}, - dictWord{13, 0, 465}, - dictWord{ - 14, - 0, - 88, - }, - dictWord{142, 0, 194}, - dictWord{138, 0, 986}, - dictWord{5, 11, 1009}, - dictWord{12, 11, 582}, - dictWord{146, 11, 131}, - dictWord{4, 0, 159}, - dictWord{ - 6, - 0, - 115, - }, - dictWord{7, 0, 252}, - dictWord{7, 0, 257}, - dictWord{7, 0, 1928}, - dictWord{8, 0, 69}, - dictWord{9, 0, 384}, - dictWord{10, 0, 91}, - dictWord{10, 0, 615}, - dictWord{ - 12, - 0, - 375, - }, - dictWord{14, 0, 235}, - dictWord{18, 0, 117}, - dictWord{147, 0, 123}, - dictWord{133, 0, 911}, - dictWord{136, 0, 278}, - dictWord{5, 10, 430}, - dictWord{ - 5, - 10, - 932, - }, - dictWord{6, 10, 131}, - dictWord{7, 10, 417}, - dictWord{9, 10, 522}, - dictWord{11, 10, 314}, - dictWord{141, 10, 390}, - dictWord{14, 10, 149}, - dictWord{14, 10, 399}, - dictWord{143, 10, 57}, - dictWord{4, 0, 151}, - dictWord{7, 0, 1567}, - dictWord{136, 0, 749}, - dictWord{5, 11, 228}, - dictWord{6, 11, 203}, - dictWord{ - 7, - 11, - 156, - }, - dictWord{8, 11, 347}, - dictWord{137, 11, 265}, - dictWord{132, 10, 507}, - dictWord{10, 0, 989}, - dictWord{140, 0, 956}, - dictWord{133, 0, 990}, - dictWord{5, 0, 194}, - dictWord{6, 0, 927}, - dictWord{7, 0, 1662}, - dictWord{9, 0, 90}, - dictWord{140, 0, 564}, - dictWord{4, 10, 343}, - dictWord{133, 10, 511}, - dictWord{133, 0, 425}, - dictWord{7, 10, 455}, - dictWord{138, 10, 591}, - dictWord{4, 0, 774}, - dictWord{7, 11, 476}, - dictWord{7, 11, 1592}, - dictWord{138, 11, 87}, - dictWord{5, 0, 971}, - dictWord{135, 10, 1381}, - dictWord{5, 11, 318}, - dictWord{147, 11, 121}, - dictWord{5, 11, 291}, - dictWord{7, 11, 765}, - dictWord{9, 11, 389}, - dictWord{140, 11, 548}, - dictWord{134, 10, 575}, - dictWord{4, 0, 827}, - dictWord{12, 0, 646}, - dictWord{12, 0, 705}, - dictWord{12, 0, 712}, - dictWord{140, 0, 714}, - dictWord{139, 0, 752}, - dictWord{137, 0, 662}, - dictWord{5, 0, 72}, - dictWord{6, 0, 264}, - dictWord{7, 0, 21}, - dictWord{7, 0, 46}, - dictWord{7, 0, 2013}, - dictWord{ - 8, - 0, - 215, - }, - dictWord{8, 0, 513}, - dictWord{10, 0, 266}, - dictWord{139, 0, 22}, - dictWord{139, 11, 522}, - dictWord{6, 0, 239}, - dictWord{7, 0, 118}, - dictWord{10, 0, 95}, - dictWord{11, 0, 603}, - dictWord{13, 0, 443}, - dictWord{14, 0, 160}, - dictWord{143, 0, 4}, - dictWord{6, 0, 431}, - dictWord{134, 0, 669}, - dictWord{7, 10, 1127}, - dictWord{ - 7, - 10, - 1572, - }, - dictWord{10, 10, 297}, - dictWord{10, 10, 422}, - dictWord{11, 10, 764}, - dictWord{11, 10, 810}, - dictWord{12, 10, 264}, - dictWord{13, 10, 102}, - dictWord{13, 10, 300}, - dictWord{13, 10, 484}, - dictWord{14, 10, 147}, - dictWord{14, 10, 229}, - dictWord{17, 10, 71}, - dictWord{18, 10, 118}, - dictWord{ - 147, - 10, - 120, - }, - dictWord{5, 0, 874}, - dictWord{6, 0, 1677}, - dictWord{15, 0, 0}, - dictWord{10, 11, 525}, - dictWord{139, 11, 82}, - dictWord{6, 0, 65}, - dictWord{7, 0, 939}, - dictWord{ - 7, - 0, - 1172, - }, - dictWord{7, 0, 1671}, - dictWord{9, 0, 540}, - dictWord{10, 0, 696}, - dictWord{11, 0, 265}, - dictWord{11, 0, 732}, - dictWord{11, 0, 928}, - dictWord{ - 11, - 0, - 937, - }, - dictWord{141, 0, 438}, - dictWord{134, 0, 1350}, - dictWord{136, 11, 547}, - dictWord{132, 11, 422}, - dictWord{5, 11, 355}, - dictWord{145, 11, 0}, - dictWord{137, 11, 905}, - dictWord{5, 0, 682}, - dictWord{135, 0, 1887}, - dictWord{132, 0, 809}, - dictWord{4, 0, 696}, - dictWord{133, 11, 865}, - dictWord{6, 0, 1074}, - dictWord{6, 0, 1472}, - dictWord{14, 10, 35}, - dictWord{142, 10, 191}, - dictWord{5, 11, 914}, - dictWord{134, 11, 1625}, - dictWord{133, 11, 234}, - dictWord{ - 135, - 11, - 1383, - }, - dictWord{137, 11, 780}, - dictWord{132, 10, 125}, - dictWord{4, 0, 726}, - dictWord{133, 0, 630}, - dictWord{8, 0, 802}, - dictWord{136, 0, 838}, - dictWord{132, 10, 721}, - dictWord{6, 0, 1337}, - dictWord{7, 0, 776}, - dictWord{19, 0, 56}, - dictWord{136, 10, 145}, - dictWord{132, 0, 970}, - dictWord{7, 10, 792}, - dictWord{8, 10, 147}, - dictWord{10, 10, 821}, - dictWord{139, 10, 1021}, - dictWord{139, 10, 970}, - dictWord{8, 0, 940}, - dictWord{137, 0, 797}, - dictWord{ - 135, - 11, - 1312, - }, - dictWord{9, 0, 248}, - dictWord{10, 0, 400}, - dictWord{7, 11, 816}, - dictWord{7, 11, 1241}, - dictWord{7, 10, 1999}, - dictWord{9, 11, 283}, - dictWord{ - 9, - 11, - 520, - }, - dictWord{10, 11, 213}, - dictWord{10, 11, 307}, - dictWord{10, 11, 463}, - dictWord{10, 11, 671}, - dictWord{10, 11, 746}, - dictWord{11, 11, 401}, - dictWord{ - 11, - 11, - 794, - }, - dictWord{12, 11, 517}, - dictWord{18, 11, 107}, - dictWord{147, 11, 115}, - dictWord{6, 0, 1951}, - dictWord{134, 0, 2040}, - dictWord{ - 135, - 11, - 339, - }, - dictWord{13, 0, 41}, - dictWord{15, 0, 93}, - dictWord{5, 10, 168}, - dictWord{5, 10, 930}, - dictWord{8, 10, 74}, - dictWord{9, 10, 623}, - dictWord{12, 10, 500}, - dictWord{140, 10, 579}, - dictWord{6, 0, 118}, - dictWord{7, 0, 215}, - dictWord{7, 0, 1521}, - dictWord{140, 0, 11}, - dictWord{6, 10, 220}, - dictWord{7, 10, 1101}, - dictWord{141, 10, 105}, - dictWord{6, 11, 421}, - dictWord{7, 11, 61}, - dictWord{7, 11, 1540}, - dictWord{10, 11, 11}, - dictWord{138, 11, 501}, - dictWord{7, 0, 615}, - dictWord{138, 0, 251}, - dictWord{140, 11, 631}, - dictWord{135, 0, 1044}, - dictWord{6, 10, 19}, - dictWord{7, 10, 1413}, - dictWord{139, 10, 428}, - dictWord{ - 133, - 0, - 225, - }, - dictWord{7, 10, 96}, - dictWord{8, 10, 401}, - dictWord{8, 10, 703}, - dictWord{137, 10, 896}, - dictWord{145, 10, 116}, - dictWord{6, 11, 102}, - dictWord{ - 7, - 11, - 72, - }, - dictWord{15, 11, 142}, - dictWord{147, 11, 67}, - dictWord{7, 10, 1961}, - dictWord{7, 10, 1965}, - dictWord{8, 10, 702}, - dictWord{136, 10, 750}, - dictWord{ - 7, - 10, - 2030, - }, - dictWord{8, 10, 150}, - dictWord{8, 10, 737}, - dictWord{12, 10, 366}, - dictWord{151, 11, 30}, - dictWord{4, 0, 370}, - dictWord{5, 0, 756}, - dictWord{ - 7, - 0, - 1326, - }, - dictWord{135, 11, 823}, - dictWord{8, 10, 800}, - dictWord{9, 10, 148}, - dictWord{9, 10, 872}, - dictWord{9, 10, 890}, - dictWord{11, 10, 309}, - dictWord{ - 11, - 10, - 1001, - }, - dictWord{13, 10, 267}, - dictWord{141, 10, 323}, - dictWord{6, 0, 1662}, - dictWord{7, 0, 48}, - dictWord{8, 0, 771}, - dictWord{10, 0, 116}, - dictWord{ - 13, - 0, - 104, - }, - dictWord{14, 0, 105}, - dictWord{14, 0, 184}, - dictWord{15, 0, 168}, - dictWord{19, 0, 92}, - dictWord{148, 0, 68}, - dictWord{10, 0, 209}, - dictWord{ - 135, - 11, - 1870, - }, - dictWord{7, 11, 68}, - dictWord{8, 11, 48}, - dictWord{8, 11, 88}, - dictWord{8, 11, 582}, - dictWord{8, 11, 681}, - dictWord{9, 11, 373}, - dictWord{9, 11, 864}, - dictWord{11, 11, 157}, - dictWord{11, 11, 336}, - dictWord{11, 11, 843}, - dictWord{148, 11, 27}, - dictWord{134, 0, 930}, - dictWord{4, 11, 88}, - dictWord{5, 11, 137}, - dictWord{5, 11, 174}, - dictWord{5, 11, 777}, - dictWord{6, 11, 1664}, - dictWord{6, 11, 1725}, - dictWord{7, 11, 77}, - dictWord{7, 11, 426}, - dictWord{7, 11, 1317}, - dictWord{7, 11, 1355}, - dictWord{8, 11, 126}, - dictWord{8, 11, 563}, - dictWord{9, 11, 523}, - dictWord{9, 11, 750}, - dictWord{10, 11, 310}, - dictWord{10, 11, 836}, - dictWord{11, 11, 42}, - dictWord{11, 11, 318}, - dictWord{11, 11, 731}, - dictWord{12, 11, 68}, - dictWord{12, 11, 92}, - dictWord{12, 11, 507}, - dictWord{12, 11, 692}, - dictWord{13, 11, 81}, - dictWord{13, 11, 238}, - dictWord{13, 11, 374}, - dictWord{18, 11, 138}, - dictWord{19, 11, 78}, - dictWord{19, 11, 111}, - dictWord{20, 11, 55}, - dictWord{20, 11, 77}, - dictWord{148, 11, 92}, - dictWord{4, 11, 938}, - dictWord{135, 11, 1831}, - dictWord{5, 10, 547}, - dictWord{7, 10, 424}, - dictWord{ - 8, - 11, - 617, - }, - dictWord{138, 11, 351}, - dictWord{6, 0, 1286}, - dictWord{6, 11, 1668}, - dictWord{7, 11, 1499}, - dictWord{8, 11, 117}, - dictWord{9, 11, 314}, - dictWord{ - 138, - 11, - 174, - }, - dictWord{6, 0, 759}, - dictWord{6, 0, 894}, - dictWord{7, 11, 707}, - dictWord{139, 11, 563}, - dictWord{4, 0, 120}, - dictWord{135, 0, 1894}, - dictWord{ - 9, - 0, - 385, - }, - dictWord{149, 0, 17}, - dictWord{138, 0, 429}, - dictWord{133, 11, 403}, - dictWord{5, 0, 820}, - dictWord{135, 0, 931}, - dictWord{10, 0, 199}, - dictWord{ - 133, - 10, - 133, - }, - dictWord{6, 0, 151}, - dictWord{6, 0, 1675}, - dictWord{7, 0, 383}, - dictWord{151, 0, 10}, - dictWord{6, 0, 761}, - dictWord{136, 10, 187}, - dictWord{ - 8, - 0, - 365, - }, - dictWord{10, 10, 0}, - dictWord{10, 10, 818}, - dictWord{139, 10, 988}, - dictWord{4, 11, 44}, - dictWord{5, 11, 311}, - dictWord{6, 11, 156}, - dictWord{ - 7, - 11, - 639, - }, - dictWord{7, 11, 762}, - dictWord{7, 11, 1827}, - dictWord{9, 11, 8}, - dictWord{9, 11, 462}, - dictWord{148, 11, 83}, - dictWord{4, 11, 346}, - dictWord{7, 11, 115}, - dictWord{9, 11, 180}, - dictWord{9, 11, 456}, - dictWord{138, 11, 363}, - dictWord{136, 10, 685}, - dictWord{7, 0, 1086}, - dictWord{145, 0, 46}, - dictWord{ - 6, - 0, - 1624, - }, - dictWord{11, 0, 11}, - dictWord{12, 0, 422}, - dictWord{13, 0, 444}, - dictWord{142, 0, 360}, - dictWord{6, 0, 1020}, - dictWord{6, 0, 1260}, - dictWord{ - 134, - 0, - 1589, - }, - dictWord{4, 0, 43}, - dictWord{5, 0, 344}, - dictWord{5, 0, 357}, - dictWord{14, 0, 472}, - dictWord{150, 0, 58}, - dictWord{6, 0, 1864}, - dictWord{6, 0, 1866}, - dictWord{6, 0, 1868}, - dictWord{6, 0, 1869}, - dictWord{6, 0, 1874}, - dictWord{6, 0, 1877}, - dictWord{6, 0, 1903}, - dictWord{6, 0, 1911}, - dictWord{9, 0, 920}, - dictWord{ - 9, - 0, - 921, - }, - dictWord{9, 0, 924}, - dictWord{9, 0, 946}, - dictWord{9, 0, 959}, - dictWord{9, 0, 963}, - dictWord{9, 0, 970}, - dictWord{9, 0, 997}, - dictWord{9, 0, 1008}, - dictWord{ - 9, - 0, - 1017, - }, - dictWord{12, 0, 795}, - dictWord{12, 0, 797}, - dictWord{12, 0, 798}, - dictWord{12, 0, 800}, - dictWord{12, 0, 803}, - dictWord{12, 0, 811}, - dictWord{ - 12, - 0, - 820, - }, - dictWord{12, 0, 821}, - dictWord{12, 0, 839}, - dictWord{12, 0, 841}, - dictWord{12, 0, 848}, - dictWord{12, 0, 911}, - dictWord{12, 0, 921}, - dictWord{12, 0, 922}, - dictWord{12, 0, 925}, - dictWord{12, 0, 937}, - dictWord{12, 0, 944}, - dictWord{12, 0, 945}, - dictWord{12, 0, 953}, - dictWord{15, 0, 184}, - dictWord{15, 0, 191}, - dictWord{15, 0, 199}, - dictWord{15, 0, 237}, - dictWord{15, 0, 240}, - dictWord{15, 0, 243}, - dictWord{15, 0, 246}, - dictWord{18, 0, 203}, - dictWord{21, 0, 40}, - dictWord{ - 21, - 0, - 52, - }, - dictWord{21, 0, 57}, - dictWord{24, 0, 23}, - dictWord{24, 0, 28}, - dictWord{152, 0, 30}, - dictWord{134, 0, 725}, - dictWord{145, 11, 58}, - dictWord{133, 0, 888}, - dictWord{137, 10, 874}, - dictWord{4, 0, 711}, - dictWord{8, 10, 774}, - dictWord{10, 10, 670}, - dictWord{140, 10, 51}, - dictWord{144, 11, 40}, - dictWord{ - 6, - 11, - 185, - }, - dictWord{7, 11, 1899}, - dictWord{139, 11, 673}, - dictWord{137, 10, 701}, - dictWord{137, 0, 440}, - dictWord{4, 11, 327}, - dictWord{5, 11, 478}, - dictWord{ - 7, - 11, - 1332, - }, - dictWord{8, 11, 753}, - dictWord{140, 11, 227}, - dictWord{4, 10, 127}, - dictWord{5, 10, 350}, - dictWord{6, 10, 356}, - dictWord{8, 10, 426}, - dictWord{ - 9, - 10, - 572, - }, - dictWord{10, 10, 247}, - dictWord{139, 10, 312}, - dictWord{5, 11, 1020}, - dictWord{133, 11, 1022}, - dictWord{4, 11, 103}, - dictWord{ - 133, - 11, - 401, - }, - dictWord{6, 0, 1913}, - dictWord{6, 0, 1926}, - dictWord{6, 0, 1959}, - dictWord{9, 0, 914}, - dictWord{9, 0, 939}, - dictWord{9, 0, 952}, - dictWord{9, 0, 979}, - dictWord{ - 9, - 0, - 990, - }, - dictWord{9, 0, 998}, - dictWord{9, 0, 1003}, - dictWord{9, 0, 1023}, - dictWord{12, 0, 827}, - dictWord{12, 0, 834}, - dictWord{12, 0, 845}, - dictWord{ - 12, - 0, - 912, - }, - dictWord{12, 0, 935}, - dictWord{12, 0, 951}, - dictWord{15, 0, 172}, - dictWord{15, 0, 174}, - dictWord{18, 0, 198}, - dictWord{149, 0, 63}, - dictWord{5, 0, 958}, - dictWord{5, 0, 987}, - dictWord{4, 11, 499}, - dictWord{135, 11, 1421}, - dictWord{7, 0, 885}, - dictWord{6, 10, 59}, - dictWord{6, 10, 1762}, - dictWord{9, 10, 603}, - dictWord{141, 10, 397}, - dictWord{10, 11, 62}, - dictWord{141, 11, 164}, - dictWord{4, 0, 847}, - dictWord{135, 0, 326}, - dictWord{11, 0, 276}, - dictWord{142, 0, 293}, - dictWord{4, 0, 65}, - dictWord{5, 0, 479}, - dictWord{5, 0, 1004}, - dictWord{7, 0, 1913}, - dictWord{8, 0, 317}, - dictWord{9, 0, 302}, - dictWord{10, 0, 612}, - dictWord{ - 13, - 0, - 22, - }, - dictWord{132, 11, 96}, - dictWord{4, 0, 261}, - dictWord{135, 0, 510}, - dictWord{135, 0, 1514}, - dictWord{6, 10, 111}, - dictWord{7, 10, 4}, - dictWord{8, 10, 163}, - dictWord{8, 10, 776}, - dictWord{138, 10, 566}, - dictWord{4, 0, 291}, - dictWord{9, 0, 515}, - dictWord{12, 0, 152}, - dictWord{12, 0, 443}, - dictWord{13, 0, 392}, - dictWord{142, 0, 357}, - dictWord{7, 11, 399}, - dictWord{135, 11, 1492}, - dictWord{4, 0, 589}, - dictWord{139, 0, 282}, - dictWord{6, 11, 563}, - dictWord{ - 135, - 10, - 1994, - }, - dictWord{5, 10, 297}, - dictWord{135, 10, 1038}, - dictWord{4, 0, 130}, - dictWord{7, 0, 843}, - dictWord{135, 0, 1562}, - dictWord{5, 0, 42}, - dictWord{ - 5, - 0, - 879, - }, - dictWord{7, 0, 245}, - dictWord{7, 0, 324}, - dictWord{7, 0, 1532}, - dictWord{11, 0, 463}, - dictWord{11, 0, 472}, - dictWord{13, 0, 363}, - dictWord{144, 0, 52}, - dictWord{4, 0, 134}, - dictWord{133, 0, 372}, - dictWord{133, 0, 680}, - dictWord{136, 10, 363}, - dictWord{6, 0, 1997}, - dictWord{8, 0, 935}, - dictWord{136, 0, 977}, - dictWord{4, 0, 810}, - dictWord{135, 0, 1634}, - dictWord{135, 10, 1675}, - dictWord{7, 0, 1390}, - dictWord{4, 11, 910}, - dictWord{133, 11, 832}, - dictWord{ - 7, - 10, - 808, - }, - dictWord{8, 11, 266}, - dictWord{139, 11, 578}, - dictWord{132, 0, 644}, - dictWord{4, 0, 982}, - dictWord{138, 0, 867}, - dictWord{132, 10, 280}, - dictWord{ - 135, - 0, - 540, - }, - dictWord{140, 10, 54}, - dictWord{135, 0, 123}, - dictWord{134, 0, 1978}, - dictWord{4, 10, 421}, - dictWord{133, 10, 548}, - dictWord{6, 0, 623}, - dictWord{136, 0, 789}, - dictWord{4, 0, 908}, - dictWord{5, 0, 359}, - dictWord{5, 0, 508}, - dictWord{6, 0, 1723}, - dictWord{7, 0, 343}, - dictWord{7, 0, 1996}, - dictWord{ - 135, - 0, - 2026, - }, - dictWord{134, 0, 1220}, - dictWord{4, 0, 341}, - dictWord{135, 0, 480}, - dictWord{6, 10, 254}, - dictWord{9, 10, 109}, - dictWord{138, 10, 103}, - dictWord{ - 134, - 0, - 888, - }, - dictWord{8, 11, 528}, - dictWord{137, 11, 348}, - dictWord{7, 0, 1995}, - dictWord{8, 0, 299}, - dictWord{11, 0, 890}, - dictWord{12, 0, 674}, - dictWord{ - 4, - 11, - 20, - }, - dictWord{133, 11, 616}, - dictWord{135, 11, 1094}, - dictWord{134, 10, 1630}, - dictWord{4, 0, 238}, - dictWord{5, 0, 503}, - dictWord{6, 0, 179}, - dictWord{ - 7, - 0, - 2003, - }, - dictWord{8, 0, 381}, - dictWord{8, 0, 473}, - dictWord{9, 0, 149}, - dictWord{10, 0, 788}, - dictWord{15, 0, 45}, - dictWord{15, 0, 86}, - dictWord{20, 0, 110}, - dictWord{150, 0, 57}, - dictWord{133, 10, 671}, - dictWord{4, 11, 26}, - dictWord{5, 11, 429}, - dictWord{6, 11, 245}, - dictWord{7, 11, 704}, - dictWord{7, 11, 1379}, - dictWord{135, 11, 1474}, - dictWord{4, 0, 121}, - dictWord{5, 0, 156}, - dictWord{5, 0, 349}, - dictWord{9, 0, 431}, - dictWord{10, 0, 605}, - dictWord{142, 0, 342}, - dictWord{ - 7, - 11, - 943, - }, - dictWord{139, 11, 614}, - dictWord{132, 10, 889}, - dictWord{132, 11, 621}, - dictWord{7, 10, 1382}, - dictWord{7, 11, 1382}, - dictWord{ - 135, - 10, - 1910, - }, - dictWord{132, 10, 627}, - dictWord{133, 10, 775}, - dictWord{133, 11, 542}, - dictWord{133, 11, 868}, - dictWord{136, 11, 433}, - dictWord{6, 0, 1373}, - dictWord{7, 0, 1011}, - dictWord{11, 10, 362}, - dictWord{11, 10, 948}, - dictWord{140, 10, 388}, - dictWord{6, 0, 80}, - dictWord{7, 0, 173}, - dictWord{9, 0, 547}, - dictWord{10, 0, 730}, - dictWord{14, 0, 18}, - dictWord{22, 0, 39}, - dictWord{135, 11, 1495}, - dictWord{6, 0, 1694}, - dictWord{135, 0, 1974}, - dictWord{140, 0, 196}, - dictWord{4, 0, 923}, - dictWord{6, 0, 507}, - dictWord{6, 0, 1711}, - dictWord{7, 10, 451}, - dictWord{8, 10, 389}, - dictWord{12, 10, 490}, - dictWord{13, 10, 16}, - dictWord{ - 13, - 10, - 215, - }, - dictWord{13, 10, 351}, - dictWord{18, 10, 132}, - dictWord{147, 10, 125}, - dictWord{6, 0, 646}, - dictWord{134, 0, 1047}, - dictWord{135, 10, 841}, - dictWord{136, 10, 566}, - dictWord{6, 0, 1611}, - dictWord{135, 0, 1214}, - dictWord{139, 0, 926}, - dictWord{132, 11, 525}, - dictWord{132, 0, 595}, - dictWord{ - 5, - 0, - 240, - }, - dictWord{6, 0, 459}, - dictWord{7, 0, 12}, - dictWord{7, 0, 114}, - dictWord{7, 0, 949}, - dictWord{7, 0, 1753}, - dictWord{7, 0, 1805}, - dictWord{8, 0, 658}, - dictWord{ - 9, - 0, - 1, - }, - dictWord{11, 0, 959}, - dictWord{141, 0, 446}, - dictWord{5, 10, 912}, - dictWord{134, 10, 1695}, - dictWord{132, 0, 446}, - dictWord{7, 11, 62}, - dictWord{ - 12, - 11, - 45, - }, - dictWord{147, 11, 112}, - dictWord{5, 10, 236}, - dictWord{6, 10, 572}, - dictWord{8, 10, 492}, - dictWord{11, 10, 618}, - dictWord{144, 10, 56}, - dictWord{ - 5, - 10, - 190, - }, - dictWord{136, 10, 318}, - dictWord{135, 10, 1376}, - dictWord{4, 11, 223}, - dictWord{6, 11, 359}, - dictWord{11, 11, 3}, - dictWord{13, 11, 108}, - dictWord{ - 14, - 11, - 89, - }, - dictWord{144, 11, 22}, - dictWord{132, 11, 647}, - dictWord{134, 0, 490}, - dictWord{134, 0, 491}, - dictWord{134, 0, 1584}, - dictWord{ - 135, - 11, - 685, - }, - dictWord{138, 11, 220}, - dictWord{7, 0, 250}, - dictWord{136, 0, 507}, - dictWord{132, 0, 158}, - dictWord{4, 0, 140}, - dictWord{7, 0, 362}, - dictWord{8, 0, 209}, - dictWord{9, 0, 10}, - dictWord{9, 0, 160}, - dictWord{9, 0, 503}, - dictWord{9, 0, 614}, - dictWord{10, 0, 689}, - dictWord{11, 0, 327}, - dictWord{11, 0, 553}, - dictWord{ - 11, - 0, - 725, - }, - dictWord{11, 0, 767}, - dictWord{12, 0, 252}, - dictWord{12, 0, 583}, - dictWord{13, 0, 192}, - dictWord{14, 0, 269}, - dictWord{14, 0, 356}, - dictWord{148, 0, 50}, - dictWord{19, 0, 1}, - dictWord{19, 0, 26}, - dictWord{150, 0, 9}, - dictWord{132, 11, 109}, - dictWord{6, 0, 228}, - dictWord{7, 0, 1341}, - dictWord{9, 0, 408}, - dictWord{ - 138, - 0, - 343, - }, - dictWord{4, 0, 373}, - dictWord{5, 0, 283}, - dictWord{6, 0, 480}, - dictWord{7, 0, 609}, - dictWord{10, 0, 860}, - dictWord{138, 0, 878}, - dictWord{6, 0, 779}, - dictWord{134, 0, 1209}, - dictWord{4, 0, 557}, - dictWord{7, 11, 263}, - dictWord{7, 11, 628}, - dictWord{136, 11, 349}, - dictWord{132, 0, 548}, - dictWord{7, 0, 197}, - dictWord{8, 0, 142}, - dictWord{8, 0, 325}, - dictWord{9, 0, 150}, - dictWord{9, 0, 596}, - dictWord{10, 0, 350}, - dictWord{10, 0, 353}, - dictWord{11, 0, 74}, - dictWord{ - 11, - 0, - 315, - }, - dictWord{12, 0, 662}, - dictWord{12, 0, 681}, - dictWord{14, 0, 423}, - dictWord{143, 0, 141}, - dictWord{4, 11, 40}, - dictWord{10, 11, 67}, - dictWord{ - 11, - 11, - 117, - }, - dictWord{11, 11, 768}, - dictWord{139, 11, 935}, - dictWord{7, 11, 992}, - dictWord{8, 11, 301}, - dictWord{9, 11, 722}, - dictWord{12, 11, 63}, - dictWord{ - 13, - 11, - 29, - }, - dictWord{14, 11, 161}, - dictWord{143, 11, 18}, - dictWord{6, 0, 1490}, - dictWord{138, 11, 532}, - dictWord{5, 0, 580}, - dictWord{7, 0, 378}, - dictWord{ - 7, - 0, - 674, - }, - dictWord{7, 0, 1424}, - dictWord{15, 0, 83}, - dictWord{16, 0, 11}, - dictWord{15, 11, 83}, - dictWord{144, 11, 11}, - dictWord{6, 0, 1057}, - dictWord{6, 0, 1335}, - dictWord{10, 0, 316}, - dictWord{7, 10, 85}, - dictWord{7, 10, 247}, - dictWord{8, 10, 585}, - dictWord{138, 10, 163}, - dictWord{4, 0, 169}, - dictWord{5, 0, 83}, - dictWord{ - 6, - 0, - 399, - }, - dictWord{6, 0, 579}, - dictWord{6, 0, 1513}, - dictWord{7, 0, 692}, - dictWord{7, 0, 846}, - dictWord{7, 0, 1015}, - dictWord{7, 0, 1799}, - dictWord{8, 0, 403}, - dictWord{9, 0, 394}, - dictWord{10, 0, 133}, - dictWord{12, 0, 4}, - dictWord{12, 0, 297}, - dictWord{12, 0, 452}, - dictWord{16, 0, 81}, - dictWord{18, 0, 25}, - dictWord{21, 0, 14}, - dictWord{22, 0, 12}, - dictWord{151, 0, 18}, - dictWord{134, 0, 1106}, - dictWord{7, 0, 1546}, - dictWord{11, 0, 299}, - dictWord{142, 0, 407}, - dictWord{134, 0, 1192}, - dictWord{132, 0, 177}, - dictWord{5, 0, 411}, - dictWord{135, 0, 653}, - dictWord{7, 0, 439}, - dictWord{10, 0, 727}, - dictWord{11, 0, 260}, - dictWord{139, 0, 684}, - dictWord{138, 10, 145}, - dictWord{147, 10, 83}, - dictWord{5, 0, 208}, - dictWord{7, 0, 753}, - dictWord{135, 0, 1528}, - dictWord{137, 11, 617}, - dictWord{ - 135, - 10, - 1922, - }, - dictWord{135, 11, 825}, - dictWord{11, 0, 422}, - dictWord{13, 0, 389}, - dictWord{4, 10, 124}, - dictWord{10, 10, 457}, - dictWord{11, 10, 121}, - dictWord{ - 11, - 10, - 169, - }, - dictWord{11, 10, 870}, - dictWord{12, 10, 214}, - dictWord{14, 10, 187}, - dictWord{143, 10, 77}, - dictWord{11, 0, 615}, - dictWord{15, 0, 58}, - dictWord{ - 11, - 11, - 615, - }, - dictWord{143, 11, 58}, - dictWord{9, 0, 618}, - dictWord{138, 0, 482}, - dictWord{6, 0, 1952}, - dictWord{6, 0, 1970}, - dictWord{142, 0, 505}, - dictWord{ - 7, - 10, - 1193, - }, - dictWord{135, 11, 1838}, - dictWord{133, 0, 242}, - dictWord{135, 10, 1333}, - dictWord{6, 10, 107}, - dictWord{7, 10, 638}, - dictWord{ - 7, - 10, - 1632, - }, - dictWord{137, 10, 396}, - dictWord{133, 0, 953}, - dictWord{5, 10, 370}, - dictWord{134, 10, 1756}, - dictWord{5, 11, 28}, - dictWord{6, 11, 204}, - dictWord{ - 10, - 11, - 320, - }, - dictWord{10, 11, 583}, - dictWord{13, 11, 502}, - dictWord{14, 11, 72}, - dictWord{14, 11, 274}, - dictWord{14, 11, 312}, - dictWord{14, 11, 344}, - dictWord{15, 11, 159}, - dictWord{16, 11, 62}, - dictWord{16, 11, 69}, - dictWord{17, 11, 30}, - dictWord{18, 11, 42}, - dictWord{18, 11, 53}, - dictWord{18, 11, 84}, - dictWord{18, 11, 140}, - dictWord{19, 11, 68}, - dictWord{19, 11, 85}, - dictWord{20, 11, 5}, - dictWord{20, 11, 45}, - dictWord{20, 11, 101}, - dictWord{22, 11, 7}, - dictWord{ - 150, - 11, - 20, - }, - dictWord{4, 11, 558}, - dictWord{6, 11, 390}, - dictWord{7, 11, 162}, - dictWord{7, 11, 689}, - dictWord{9, 11, 360}, - dictWord{138, 11, 653}, - dictWord{ - 11, - 0, - 802, - }, - dictWord{141, 0, 67}, - dictWord{133, 10, 204}, - dictWord{133, 0, 290}, - dictWord{5, 10, 970}, - dictWord{134, 10, 1706}, - dictWord{132, 0, 380}, - dictWord{5, 0, 52}, - dictWord{7, 0, 277}, - dictWord{9, 0, 368}, - dictWord{139, 0, 791}, - dictWord{5, 11, 856}, - dictWord{6, 11, 1672}, - dictWord{6, 11, 1757}, - dictWord{ - 6, - 11, - 1781, - }, - dictWord{7, 11, 1150}, - dictWord{7, 11, 1425}, - dictWord{7, 11, 1453}, - dictWord{140, 11, 513}, - dictWord{5, 11, 92}, - dictWord{7, 10, 3}, - dictWord{ - 10, - 11, - 736, - }, - dictWord{140, 11, 102}, - dictWord{4, 0, 112}, - dictWord{5, 0, 653}, - dictWord{5, 10, 483}, - dictWord{5, 10, 685}, - dictWord{6, 10, 489}, - dictWord{ - 7, - 10, - 1204, - }, - dictWord{136, 10, 394}, - dictWord{132, 10, 921}, - dictWord{6, 0, 1028}, - dictWord{133, 10, 1007}, - dictWord{5, 11, 590}, - dictWord{9, 11, 213}, - dictWord{145, 11, 91}, - dictWord{135, 10, 1696}, - dictWord{10, 0, 138}, - dictWord{139, 0, 476}, - dictWord{5, 0, 725}, - dictWord{5, 0, 727}, - dictWord{135, 0, 1811}, - dictWord{4, 0, 979}, - dictWord{6, 0, 1821}, - dictWord{6, 0, 1838}, - dictWord{8, 0, 876}, - dictWord{8, 0, 883}, - dictWord{8, 0, 889}, - dictWord{8, 0, 893}, - dictWord{ - 8, - 0, - 895, - }, - dictWord{10, 0, 934}, - dictWord{12, 0, 720}, - dictWord{14, 0, 459}, - dictWord{148, 0, 123}, - dictWord{135, 11, 551}, - dictWord{4, 0, 38}, - dictWord{6, 0, 435}, - dictWord{7, 0, 307}, - dictWord{7, 0, 999}, - dictWord{7, 0, 1481}, - dictWord{7, 0, 1732}, - dictWord{7, 0, 1738}, - dictWord{8, 0, 371}, - dictWord{9, 0, 414}, - dictWord{ - 11, - 0, - 316, - }, - dictWord{12, 0, 52}, - dictWord{13, 0, 420}, - dictWord{147, 0, 100}, - dictWord{135, 0, 1296}, - dictWord{132, 10, 712}, - dictWord{134, 10, 1629}, - dictWord{133, 0, 723}, - dictWord{134, 0, 651}, - dictWord{136, 11, 191}, - dictWord{9, 11, 791}, - dictWord{10, 11, 93}, - dictWord{11, 11, 301}, - dictWord{16, 11, 13}, - dictWord{17, 11, 23}, - dictWord{18, 11, 135}, - dictWord{19, 11, 12}, - dictWord{20, 11, 1}, - dictWord{20, 11, 12}, - dictWord{148, 11, 14}, - dictWord{136, 11, 503}, - dictWord{6, 11, 466}, - dictWord{135, 11, 671}, - dictWord{6, 0, 1200}, - dictWord{134, 0, 1330}, - dictWord{135, 0, 1255}, - dictWord{134, 0, 986}, - dictWord{ - 5, - 0, - 109, - }, - dictWord{6, 0, 1784}, - dictWord{7, 0, 1895}, - dictWord{12, 0, 296}, - dictWord{140, 0, 302}, - dictWord{135, 11, 983}, - dictWord{133, 10, 485}, - dictWord{ - 134, - 0, - 660, - }, - dictWord{134, 0, 800}, - dictWord{5, 0, 216}, - dictWord{5, 0, 294}, - dictWord{6, 0, 591}, - dictWord{7, 0, 1879}, - dictWord{9, 0, 141}, - dictWord{9, 0, 270}, - dictWord{9, 0, 679}, - dictWord{10, 0, 159}, - dictWord{11, 0, 197}, - dictWord{11, 0, 438}, - dictWord{12, 0, 538}, - dictWord{12, 0, 559}, - dictWord{14, 0, 144}, - dictWord{ - 14, - 0, - 167, - }, - dictWord{15, 0, 67}, - dictWord{4, 10, 285}, - dictWord{5, 10, 317}, - dictWord{6, 10, 301}, - dictWord{7, 10, 7}, - dictWord{8, 10, 153}, - dictWord{ - 10, - 10, - 766, - }, - dictWord{11, 10, 468}, - dictWord{12, 10, 467}, - dictWord{141, 10, 143}, - dictWord{136, 0, 945}, - dictWord{134, 0, 1090}, - dictWord{137, 0, 81}, - dictWord{12, 11, 468}, - dictWord{19, 11, 96}, - dictWord{148, 11, 24}, - dictWord{134, 0, 391}, - dictWord{138, 11, 241}, - dictWord{7, 0, 322}, - dictWord{136, 0, 249}, - dictWord{134, 0, 1412}, - dictWord{135, 11, 795}, - dictWord{5, 0, 632}, - dictWord{138, 0, 526}, - dictWord{136, 10, 819}, - dictWord{6, 0, 144}, - dictWord{7, 0, 948}, - dictWord{7, 0, 1042}, - dictWord{8, 0, 235}, - dictWord{8, 0, 461}, - dictWord{9, 0, 453}, - dictWord{9, 0, 796}, - dictWord{10, 0, 354}, - dictWord{17, 0, 77}, - dictWord{ - 135, - 11, - 954, - }, - dictWord{139, 10, 917}, - dictWord{6, 0, 940}, - dictWord{134, 0, 1228}, - dictWord{4, 0, 362}, - dictWord{7, 0, 52}, - dictWord{135, 0, 303}, - dictWord{ - 6, - 11, - 549, - }, - dictWord{8, 11, 34}, - dictWord{8, 11, 283}, - dictWord{9, 11, 165}, - dictWord{138, 11, 475}, - dictWord{7, 11, 370}, - dictWord{7, 11, 1007}, - dictWord{ - 7, - 11, - 1177, - }, - dictWord{135, 11, 1565}, - dictWord{5, 11, 652}, - dictWord{5, 11, 701}, - dictWord{135, 11, 449}, - dictWord{5, 0, 196}, - dictWord{6, 0, 486}, - dictWord{ - 7, - 0, - 212, - }, - dictWord{8, 0, 309}, - dictWord{136, 0, 346}, - dictWord{6, 10, 1719}, - dictWord{6, 10, 1735}, - dictWord{7, 10, 2016}, - dictWord{7, 10, 2020}, - dictWord{ - 8, - 10, - 837, - }, - dictWord{137, 10, 852}, - dictWord{6, 11, 159}, - dictWord{6, 11, 364}, - dictWord{7, 11, 516}, - dictWord{7, 11, 1439}, - dictWord{137, 11, 518}, - dictWord{135, 0, 1912}, - dictWord{135, 0, 1290}, - dictWord{132, 0, 686}, - dictWord{141, 11, 151}, - dictWord{138, 0, 625}, - dictWord{136, 0, 706}, - dictWord{ - 138, - 10, - 568, - }, - dictWord{139, 0, 412}, - dictWord{4, 0, 30}, - dictWord{133, 0, 43}, - dictWord{8, 10, 67}, - dictWord{138, 10, 419}, - dictWord{7, 0, 967}, - dictWord{ - 141, - 0, - 11, - }, - dictWord{12, 0, 758}, - dictWord{14, 0, 441}, - dictWord{142, 0, 462}, - dictWord{10, 10, 657}, - dictWord{14, 10, 297}, - dictWord{142, 10, 361}, - dictWord{ - 139, - 10, - 729, - }, - dictWord{4, 0, 220}, - dictWord{135, 0, 1535}, - dictWord{7, 11, 501}, - dictWord{9, 11, 111}, - dictWord{10, 11, 141}, - dictWord{11, 11, 332}, - dictWord{ - 13, - 11, - 43, - }, - dictWord{13, 11, 429}, - dictWord{14, 11, 130}, - dictWord{14, 11, 415}, - dictWord{145, 11, 102}, - dictWord{4, 0, 950}, - dictWord{6, 0, 1859}, - dictWord{ - 7, - 0, - 11, - }, - dictWord{8, 0, 873}, - dictWord{12, 0, 710}, - dictWord{12, 0, 718}, - dictWord{12, 0, 748}, - dictWord{12, 0, 765}, - dictWord{148, 0, 124}, - dictWord{ - 5, - 11, - 149, - }, - dictWord{5, 11, 935}, - dictWord{136, 11, 233}, - dictWord{142, 11, 291}, - dictWord{134, 0, 1579}, - dictWord{7, 0, 890}, - dictWord{8, 10, 51}, - dictWord{ - 9, - 10, - 868, - }, - dictWord{10, 10, 833}, - dictWord{12, 10, 481}, - dictWord{12, 10, 570}, - dictWord{148, 10, 106}, - dictWord{141, 0, 2}, - dictWord{132, 10, 445}, - dictWord{136, 11, 801}, - dictWord{135, 0, 1774}, - dictWord{7, 0, 1725}, - dictWord{138, 0, 393}, - dictWord{5, 0, 263}, - dictWord{134, 0, 414}, - dictWord{ - 132, - 11, - 322, - }, - dictWord{133, 10, 239}, - dictWord{7, 0, 456}, - dictWord{7, 10, 1990}, - dictWord{8, 10, 130}, - dictWord{139, 10, 720}, - dictWord{137, 0, 818}, - dictWord{ - 5, - 10, - 123, - }, - dictWord{6, 10, 530}, - dictWord{7, 10, 348}, - dictWord{135, 10, 1419}, - dictWord{135, 10, 2024}, - dictWord{6, 0, 178}, - dictWord{6, 0, 1750}, - dictWord{8, 0, 251}, - dictWord{9, 0, 690}, - dictWord{10, 0, 155}, - dictWord{10, 0, 196}, - dictWord{10, 0, 373}, - dictWord{11, 0, 698}, - dictWord{13, 0, 155}, - dictWord{ - 148, - 0, - 93, - }, - dictWord{5, 0, 97}, - dictWord{137, 0, 393}, - dictWord{134, 0, 674}, - dictWord{11, 0, 223}, - dictWord{140, 0, 168}, - dictWord{132, 10, 210}, - dictWord{ - 139, - 11, - 464, - }, - dictWord{6, 0, 1639}, - dictWord{146, 0, 159}, - dictWord{139, 11, 2}, - dictWord{7, 0, 934}, - dictWord{8, 0, 647}, - dictWord{17, 0, 97}, - dictWord{19, 0, 59}, - dictWord{150, 0, 2}, - dictWord{132, 0, 191}, - dictWord{5, 0, 165}, - dictWord{9, 0, 346}, - dictWord{10, 0, 655}, - dictWord{11, 0, 885}, - dictWord{4, 10, 430}, - dictWord{135, 11, 357}, - dictWord{133, 0, 877}, - dictWord{5, 10, 213}, - dictWord{133, 11, 406}, - dictWord{8, 0, 128}, - dictWord{139, 0, 179}, - dictWord{6, 11, 69}, - dictWord{135, 11, 117}, - dictWord{135, 0, 1297}, - dictWord{11, 11, 43}, - dictWord{13, 11, 72}, - dictWord{141, 11, 142}, - dictWord{135, 11, 1830}, - dictWord{ - 142, - 0, - 164, - }, - dictWord{5, 0, 57}, - dictWord{6, 0, 101}, - dictWord{6, 0, 586}, - dictWord{6, 0, 1663}, - dictWord{7, 0, 132}, - dictWord{7, 0, 1154}, - dictWord{7, 0, 1415}, - dictWord{7, 0, 1507}, - dictWord{12, 0, 493}, - dictWord{15, 0, 105}, - dictWord{151, 0, 15}, - dictWord{5, 0, 459}, - dictWord{7, 0, 1073}, - dictWord{8, 0, 241}, - dictWord{ - 136, - 0, - 334, - }, - dictWord{133, 11, 826}, - dictWord{133, 10, 108}, - dictWord{5, 10, 219}, - dictWord{10, 11, 132}, - dictWord{11, 11, 191}, - dictWord{11, 11, 358}, - dictWord{139, 11, 460}, - dictWord{6, 0, 324}, - dictWord{6, 0, 520}, - dictWord{7, 0, 338}, - dictWord{7, 0, 1729}, - dictWord{8, 0, 228}, - dictWord{139, 0, 750}, - dictWord{ - 21, - 0, - 30, - }, - dictWord{22, 0, 53}, - dictWord{4, 10, 193}, - dictWord{5, 10, 916}, - dictWord{7, 10, 364}, - dictWord{10, 10, 398}, - dictWord{10, 10, 726}, - dictWord{ - 11, - 10, - 317, - }, - dictWord{11, 10, 626}, - dictWord{12, 10, 142}, - dictWord{12, 10, 288}, - dictWord{12, 10, 678}, - dictWord{13, 10, 313}, - dictWord{15, 10, 113}, - dictWord{146, 10, 114}, - dictWord{6, 11, 110}, - dictWord{135, 11, 1681}, - dictWord{135, 0, 910}, - dictWord{6, 10, 241}, - dictWord{7, 10, 907}, - dictWord{8, 10, 832}, - dictWord{9, 10, 342}, - dictWord{10, 10, 729}, - dictWord{11, 10, 284}, - dictWord{11, 10, 445}, - dictWord{11, 10, 651}, - dictWord{11, 10, 863}, - dictWord{ - 13, - 10, - 398, - }, - dictWord{146, 10, 99}, - dictWord{7, 0, 705}, - dictWord{9, 0, 734}, - dictWord{5, 11, 1000}, - dictWord{7, 11, 733}, - dictWord{137, 11, 583}, - dictWord{4, 0, 73}, - dictWord{6, 0, 612}, - dictWord{7, 0, 927}, - dictWord{7, 0, 1822}, - dictWord{8, 0, 217}, - dictWord{9, 0, 765}, - dictWord{9, 0, 766}, - dictWord{10, 0, 408}, - dictWord{ - 11, - 0, - 51, - }, - dictWord{11, 0, 793}, - dictWord{12, 0, 266}, - dictWord{15, 0, 158}, - dictWord{20, 0, 89}, - dictWord{150, 0, 32}, - dictWord{7, 0, 1330}, - dictWord{4, 11, 297}, - dictWord{6, 11, 529}, - dictWord{7, 11, 152}, - dictWord{7, 11, 713}, - dictWord{7, 11, 1845}, - dictWord{8, 11, 710}, - dictWord{8, 11, 717}, - dictWord{140, 11, 639}, - dictWord{5, 0, 389}, - dictWord{136, 0, 636}, - dictWord{134, 0, 1409}, - dictWord{4, 10, 562}, - dictWord{9, 10, 254}, - dictWord{139, 10, 879}, - dictWord{134, 0, 893}, - dictWord{132, 10, 786}, - dictWord{4, 11, 520}, - dictWord{135, 11, 575}, - dictWord{136, 0, 21}, - dictWord{140, 0, 721}, - dictWord{136, 0, 959}, - dictWord{ - 7, - 11, - 1428, - }, - dictWord{7, 11, 1640}, - dictWord{9, 11, 169}, - dictWord{9, 11, 182}, - dictWord{9, 11, 367}, - dictWord{9, 11, 478}, - dictWord{9, 11, 506}, - dictWord{ - 9, - 11, - 551, - }, - dictWord{9, 11, 648}, - dictWord{9, 11, 651}, - dictWord{9, 11, 697}, - dictWord{9, 11, 705}, - dictWord{9, 11, 725}, - dictWord{9, 11, 787}, - dictWord{9, 11, 794}, - dictWord{10, 11, 198}, - dictWord{10, 11, 214}, - dictWord{10, 11, 267}, - dictWord{10, 11, 275}, - dictWord{10, 11, 456}, - dictWord{10, 11, 551}, - dictWord{ - 10, - 11, - 561, - }, - dictWord{10, 11, 613}, - dictWord{10, 11, 627}, - dictWord{10, 11, 668}, - dictWord{10, 11, 675}, - dictWord{10, 11, 691}, - dictWord{10, 11, 695}, - dictWord{10, 11, 707}, - dictWord{10, 11, 715}, - dictWord{11, 11, 183}, - dictWord{11, 11, 201}, - dictWord{11, 11, 244}, - dictWord{11, 11, 262}, - dictWord{ - 11, - 11, - 352, - }, - dictWord{11, 11, 439}, - dictWord{11, 11, 493}, - dictWord{11, 11, 572}, - dictWord{11, 11, 591}, - dictWord{11, 11, 608}, - dictWord{11, 11, 611}, - dictWord{ - 11, - 11, - 646, - }, - dictWord{11, 11, 674}, - dictWord{11, 11, 711}, - dictWord{11, 11, 751}, - dictWord{11, 11, 761}, - dictWord{11, 11, 776}, - dictWord{11, 11, 785}, - dictWord{11, 11, 850}, - dictWord{11, 11, 853}, - dictWord{11, 11, 862}, - dictWord{11, 11, 865}, - dictWord{11, 11, 868}, - dictWord{11, 11, 898}, - dictWord{ - 11, - 11, - 902, - }, - dictWord{11, 11, 903}, - dictWord{11, 11, 910}, - dictWord{11, 11, 932}, - dictWord{11, 11, 942}, - dictWord{11, 11, 957}, - dictWord{11, 11, 967}, - dictWord{ - 11, - 11, - 972, - }, - dictWord{12, 11, 148}, - dictWord{12, 11, 195}, - dictWord{12, 11, 220}, - dictWord{12, 11, 237}, - dictWord{12, 11, 318}, - dictWord{12, 11, 339}, - dictWord{12, 11, 393}, - dictWord{12, 11, 445}, - dictWord{12, 11, 450}, - dictWord{12, 11, 474}, - dictWord{12, 11, 509}, - dictWord{12, 11, 533}, - dictWord{ - 12, - 11, - 591, - }, - dictWord{12, 11, 594}, - dictWord{12, 11, 597}, - dictWord{12, 11, 621}, - dictWord{12, 11, 633}, - dictWord{12, 11, 642}, - dictWord{13, 11, 59}, - dictWord{ - 13, - 11, - 60, - }, - dictWord{13, 11, 145}, - dictWord{13, 11, 239}, - dictWord{13, 11, 250}, - dictWord{13, 11, 273}, - dictWord{13, 11, 329}, - dictWord{13, 11, 344}, - dictWord{13, 11, 365}, - dictWord{13, 11, 372}, - dictWord{13, 11, 387}, - dictWord{13, 11, 403}, - dictWord{13, 11, 414}, - dictWord{13, 11, 456}, - dictWord{ - 13, - 11, - 478, - }, - dictWord{13, 11, 483}, - dictWord{13, 11, 489}, - dictWord{14, 11, 55}, - dictWord{14, 11, 57}, - dictWord{14, 11, 81}, - dictWord{14, 11, 90}, - dictWord{ - 14, - 11, - 148, - }, - dictWord{14, 11, 239}, - dictWord{14, 11, 266}, - dictWord{14, 11, 321}, - dictWord{14, 11, 326}, - dictWord{14, 11, 327}, - dictWord{14, 11, 330}, - dictWord{ - 14, - 11, - 347, - }, - dictWord{14, 11, 355}, - dictWord{14, 11, 401}, - dictWord{14, 11, 411}, - dictWord{14, 11, 414}, - dictWord{14, 11, 416}, - dictWord{14, 11, 420}, - dictWord{15, 11, 61}, - dictWord{15, 11, 74}, - dictWord{15, 11, 87}, - dictWord{15, 11, 88}, - dictWord{15, 11, 94}, - dictWord{15, 11, 96}, - dictWord{15, 11, 116}, - dictWord{15, 11, 149}, - dictWord{15, 11, 154}, - dictWord{16, 11, 50}, - dictWord{16, 11, 63}, - dictWord{16, 11, 73}, - dictWord{17, 11, 2}, - dictWord{17, 11, 66}, - dictWord{ - 17, - 11, - 92, - }, - dictWord{17, 11, 103}, - dictWord{17, 11, 112}, - dictWord{18, 11, 50}, - dictWord{18, 11, 54}, - dictWord{18, 11, 82}, - dictWord{18, 11, 86}, - dictWord{ - 18, - 11, - 90, - }, - dictWord{18, 11, 111}, - dictWord{18, 11, 115}, - dictWord{18, 11, 156}, - dictWord{19, 11, 40}, - dictWord{19, 11, 79}, - dictWord{20, 11, 78}, - dictWord{ - 149, - 11, - 22, - }, - dictWord{137, 11, 170}, - dictWord{134, 0, 1433}, - dictWord{135, 11, 1307}, - dictWord{139, 11, 411}, - dictWord{5, 0, 189}, - dictWord{7, 0, 442}, - dictWord{7, 0, 443}, - dictWord{8, 0, 281}, - dictWord{12, 0, 174}, - dictWord{141, 0, 261}, - dictWord{6, 10, 216}, - dictWord{7, 10, 901}, - dictWord{7, 10, 1343}, - dictWord{136, 10, 493}, - dictWord{5, 11, 397}, - dictWord{6, 11, 154}, - dictWord{7, 10, 341}, - dictWord{7, 11, 676}, - dictWord{8, 11, 443}, - dictWord{8, 11, 609}, - dictWord{ - 9, - 11, - 24, - }, - dictWord{9, 11, 325}, - dictWord{10, 11, 35}, - dictWord{11, 10, 219}, - dictWord{11, 11, 535}, - dictWord{11, 11, 672}, - dictWord{11, 11, 1018}, - dictWord{12, 11, 637}, - dictWord{144, 11, 30}, - dictWord{6, 0, 2}, - dictWord{7, 0, 191}, - dictWord{7, 0, 446}, - dictWord{7, 0, 1262}, - dictWord{7, 0, 1737}, - dictWord{8, 0, 22}, - dictWord{8, 0, 270}, - dictWord{8, 0, 612}, - dictWord{9, 0, 4}, - dictWord{9, 0, 312}, - dictWord{9, 0, 436}, - dictWord{9, 0, 626}, - dictWord{10, 0, 216}, - dictWord{10, 0, 311}, - dictWord{10, 0, 521}, - dictWord{10, 0, 623}, - dictWord{11, 0, 72}, - dictWord{11, 0, 330}, - dictWord{11, 0, 455}, - dictWord{12, 0, 321}, - dictWord{12, 0, 504}, - dictWord{12, 0, 530}, - dictWord{12, 0, 543}, - dictWord{13, 0, 17}, - dictWord{13, 0, 156}, - dictWord{13, 0, 334}, - dictWord{14, 0, 131}, - dictWord{17, 0, 60}, - dictWord{ - 148, - 0, - 64, - }, - dictWord{7, 0, 354}, - dictWord{10, 0, 410}, - dictWord{139, 0, 815}, - dictWord{139, 10, 130}, - dictWord{7, 10, 1734}, - dictWord{137, 11, 631}, - dictWord{ - 12, - 0, - 425, - }, - dictWord{15, 0, 112}, - dictWord{10, 10, 115}, - dictWord{11, 10, 420}, - dictWord{13, 10, 404}, - dictWord{14, 10, 346}, - dictWord{143, 10, 54}, - dictWord{ - 6, - 0, - 60, - }, - dictWord{6, 0, 166}, - dictWord{7, 0, 374}, - dictWord{7, 0, 670}, - dictWord{7, 0, 1327}, - dictWord{8, 0, 411}, - dictWord{8, 0, 435}, - dictWord{9, 0, 653}, - dictWord{ - 9, - 0, - 740, - }, - dictWord{10, 0, 385}, - dictWord{11, 0, 222}, - dictWord{11, 0, 324}, - dictWord{11, 0, 829}, - dictWord{140, 0, 611}, - dictWord{7, 0, 1611}, - dictWord{ - 13, - 0, - 14, - }, - dictWord{15, 0, 44}, - dictWord{19, 0, 13}, - dictWord{148, 0, 76}, - dictWord{133, 11, 981}, - dictWord{4, 11, 56}, - dictWord{7, 11, 1791}, - dictWord{8, 11, 607}, - dictWord{8, 11, 651}, - dictWord{11, 11, 465}, - dictWord{11, 11, 835}, - dictWord{12, 11, 337}, - dictWord{141, 11, 480}, - dictWord{6, 0, 1478}, - dictWord{ - 5, - 10, - 1011, - }, - dictWord{136, 10, 701}, - dictWord{139, 0, 596}, - dictWord{5, 0, 206}, - dictWord{134, 0, 398}, - dictWord{4, 10, 54}, - dictWord{5, 10, 666}, - dictWord{ - 7, - 10, - 1039, - }, - dictWord{7, 10, 1130}, - dictWord{9, 10, 195}, - dictWord{138, 10, 302}, - dictWord{7, 0, 50}, - dictWord{9, 11, 158}, - dictWord{138, 11, 411}, - dictWord{ - 135, - 11, - 1120, - }, - dictWord{6, 0, 517}, - dictWord{7, 0, 1159}, - dictWord{10, 0, 621}, - dictWord{11, 0, 192}, - dictWord{134, 10, 1669}, - dictWord{4, 0, 592}, - dictWord{ - 6, - 0, - 600, - }, - dictWord{135, 0, 1653}, - dictWord{10, 0, 223}, - dictWord{139, 0, 645}, - dictWord{136, 11, 139}, - dictWord{7, 0, 64}, - dictWord{136, 0, 245}, - dictWord{ - 142, - 0, - 278, - }, - dictWord{6, 11, 622}, - dictWord{135, 11, 1030}, - dictWord{136, 0, 604}, - dictWord{134, 0, 1502}, - dictWord{138, 0, 265}, - dictWord{ - 141, - 11, - 168, - }, - dictWord{7, 0, 1763}, - dictWord{140, 0, 310}, - dictWord{7, 10, 798}, - dictWord{139, 11, 719}, - dictWord{7, 11, 160}, - dictWord{10, 11, 624}, - dictWord{ - 142, - 11, - 279, - }, - dictWord{132, 11, 363}, - dictWord{7, 10, 122}, - dictWord{9, 10, 259}, - dictWord{10, 10, 84}, - dictWord{11, 10, 470}, - dictWord{12, 10, 541}, - dictWord{141, 10, 379}, - dictWord{5, 0, 129}, - dictWord{6, 0, 61}, - dictWord{135, 0, 947}, - dictWord{134, 0, 1356}, - dictWord{135, 11, 1191}, - dictWord{13, 0, 505}, - dictWord{141, 0, 506}, - dictWord{11, 0, 1000}, - dictWord{5, 10, 82}, - dictWord{5, 10, 131}, - dictWord{7, 10, 1755}, - dictWord{8, 10, 31}, - dictWord{9, 10, 168}, - dictWord{9, 10, 764}, - dictWord{139, 10, 869}, - dictWord{134, 0, 966}, - dictWord{134, 10, 605}, - dictWord{134, 11, 292}, - dictWord{5, 11, 177}, - dictWord{ - 6, - 11, - 616, - }, - dictWord{7, 11, 827}, - dictWord{9, 11, 525}, - dictWord{138, 11, 656}, - dictWord{135, 11, 1486}, - dictWord{138, 11, 31}, - dictWord{5, 10, 278}, - dictWord{137, 10, 68}, - dictWord{4, 10, 163}, - dictWord{5, 10, 201}, - dictWord{5, 10, 307}, - dictWord{5, 10, 310}, - dictWord{6, 10, 335}, - dictWord{7, 10, 284}, - dictWord{136, 10, 165}, - dictWord{6, 0, 839}, - dictWord{135, 10, 1660}, - dictWord{136, 10, 781}, - dictWord{6, 10, 33}, - dictWord{135, 10, 1244}, - dictWord{ - 133, - 0, - 637, - }, - dictWord{4, 11, 161}, - dictWord{133, 11, 631}, - dictWord{137, 0, 590}, - dictWord{7, 10, 1953}, - dictWord{136, 10, 720}, - dictWord{5, 0, 280}, - dictWord{ - 7, - 0, - 1226, - }, - dictWord{138, 10, 203}, - dictWord{134, 0, 1386}, - dictWord{5, 0, 281}, - dictWord{6, 0, 1026}, - dictWord{6, 10, 326}, - dictWord{7, 10, 677}, - dictWord{ - 137, - 10, - 425, - }, - dictWord{7, 11, 1557}, - dictWord{135, 11, 1684}, - dictWord{135, 0, 1064}, - dictWord{9, 11, 469}, - dictWord{9, 11, 709}, - dictWord{12, 11, 512}, - dictWord{14, 11, 65}, - dictWord{145, 11, 12}, - dictWord{134, 0, 917}, - dictWord{10, 11, 229}, - dictWord{11, 11, 73}, - dictWord{11, 11, 376}, - dictWord{ - 139, - 11, - 433, - }, - dictWord{7, 0, 555}, - dictWord{9, 0, 192}, - dictWord{13, 0, 30}, - dictWord{13, 0, 49}, - dictWord{15, 0, 150}, - dictWord{16, 0, 76}, - dictWord{20, 0, 52}, - dictWord{ - 7, - 10, - 1316, - }, - dictWord{7, 10, 1412}, - dictWord{7, 10, 1839}, - dictWord{9, 10, 589}, - dictWord{11, 10, 241}, - dictWord{11, 10, 676}, - dictWord{11, 10, 811}, - dictWord{11, 10, 891}, - dictWord{12, 10, 140}, - dictWord{12, 10, 346}, - dictWord{12, 10, 479}, - dictWord{13, 10, 381}, - dictWord{14, 10, 188}, - dictWord{ - 146, - 10, - 30, - }, - dictWord{149, 0, 15}, - dictWord{6, 0, 1882}, - dictWord{6, 0, 1883}, - dictWord{6, 0, 1897}, - dictWord{9, 0, 945}, - dictWord{9, 0, 1014}, - dictWord{9, 0, 1020}, - dictWord{12, 0, 823}, - dictWord{12, 0, 842}, - dictWord{12, 0, 866}, - dictWord{12, 0, 934}, - dictWord{15, 0, 242}, - dictWord{146, 0, 208}, - dictWord{6, 0, 965}, - dictWord{134, 0, 1499}, - dictWord{7, 0, 33}, - dictWord{7, 0, 120}, - dictWord{8, 0, 489}, - dictWord{9, 0, 319}, - dictWord{10, 0, 820}, - dictWord{11, 0, 1004}, - dictWord{ - 12, - 0, - 379, - }, - dictWord{12, 0, 679}, - dictWord{13, 0, 117}, - dictWord{13, 0, 412}, - dictWord{14, 0, 25}, - dictWord{15, 0, 52}, - dictWord{15, 0, 161}, - dictWord{16, 0, 47}, - dictWord{149, 0, 2}, - dictWord{6, 11, 558}, - dictWord{7, 11, 651}, - dictWord{8, 11, 421}, - dictWord{9, 11, 0}, - dictWord{138, 11, 34}, - dictWord{4, 0, 937}, - dictWord{ - 5, - 0, - 801, - }, - dictWord{7, 0, 473}, - dictWord{5, 10, 358}, - dictWord{7, 10, 1184}, - dictWord{10, 10, 662}, - dictWord{13, 10, 212}, - dictWord{13, 10, 304}, - dictWord{ - 13, - 10, - 333, - }, - dictWord{145, 10, 98}, - dictWord{132, 0, 877}, - dictWord{6, 0, 693}, - dictWord{134, 0, 824}, - dictWord{132, 0, 365}, - dictWord{7, 11, 1832}, - dictWord{ - 138, - 11, - 374, - }, - dictWord{5, 0, 7}, - dictWord{139, 0, 774}, - dictWord{4, 0, 734}, - dictWord{5, 0, 662}, - dictWord{134, 0, 430}, - dictWord{4, 0, 746}, - dictWord{ - 135, - 0, - 1090, - }, - dictWord{5, 0, 360}, - dictWord{8, 0, 237}, - dictWord{10, 0, 231}, - dictWord{147, 0, 124}, - dictWord{138, 11, 348}, - dictWord{6, 11, 6}, - dictWord{7, 11, 81}, - dictWord{7, 11, 771}, - dictWord{7, 11, 1731}, - dictWord{9, 11, 405}, - dictWord{138, 11, 421}, - dictWord{6, 0, 740}, - dictWord{137, 0, 822}, - dictWord{ - 133, - 10, - 946, - }, - dictWord{7, 0, 1485}, - dictWord{136, 0, 929}, - dictWord{7, 10, 411}, - dictWord{8, 10, 631}, - dictWord{9, 10, 323}, - dictWord{10, 10, 355}, - dictWord{ - 11, - 10, - 491, - }, - dictWord{12, 10, 143}, - dictWord{12, 10, 402}, - dictWord{13, 10, 73}, - dictWord{14, 10, 408}, - dictWord{15, 10, 107}, - dictWord{146, 10, 71}, - dictWord{ - 135, - 10, - 590, - }, - dictWord{5, 11, 881}, - dictWord{133, 11, 885}, - dictWord{150, 11, 25}, - dictWord{4, 0, 852}, - dictWord{5, 11, 142}, - dictWord{134, 11, 546}, - dictWord{7, 10, 1467}, - dictWord{8, 10, 328}, - dictWord{10, 10, 544}, - dictWord{11, 10, 955}, - dictWord{13, 10, 320}, - dictWord{145, 10, 83}, - dictWord{9, 0, 17}, - dictWord{10, 0, 291}, - dictWord{11, 10, 511}, - dictWord{13, 10, 394}, - dictWord{14, 10, 298}, - dictWord{14, 10, 318}, - dictWord{146, 10, 103}, - dictWord{5, 11, 466}, - dictWord{11, 11, 571}, - dictWord{12, 11, 198}, - dictWord{13, 11, 283}, - dictWord{14, 11, 186}, - dictWord{15, 11, 21}, - dictWord{143, 11, 103}, - dictWord{ - 134, - 0, - 1001, - }, - dictWord{4, 11, 185}, - dictWord{5, 11, 257}, - dictWord{5, 11, 839}, - dictWord{5, 11, 936}, - dictWord{7, 11, 171}, - dictWord{9, 11, 399}, - dictWord{ - 10, - 11, - 258, - }, - dictWord{10, 11, 395}, - dictWord{10, 11, 734}, - dictWord{11, 11, 1014}, - dictWord{12, 11, 23}, - dictWord{13, 11, 350}, - dictWord{14, 11, 150}, - dictWord{147, 11, 6}, - dictWord{143, 0, 35}, - dictWord{132, 0, 831}, - dictWord{5, 10, 835}, - dictWord{134, 10, 483}, - dictWord{4, 0, 277}, - dictWord{5, 0, 608}, - dictWord{ - 6, - 0, - 493, - }, - dictWord{7, 0, 457}, - dictWord{12, 0, 384}, - dictWord{7, 11, 404}, - dictWord{7, 11, 1377}, - dictWord{7, 11, 1430}, - dictWord{7, 11, 2017}, - dictWord{ - 8, - 11, - 149, - }, - dictWord{8, 11, 239}, - dictWord{8, 11, 512}, - dictWord{8, 11, 793}, - dictWord{8, 11, 818}, - dictWord{9, 11, 474}, - dictWord{9, 11, 595}, - dictWord{ - 10, - 11, - 122, - }, - dictWord{10, 11, 565}, - dictWord{10, 11, 649}, - dictWord{10, 11, 783}, - dictWord{11, 11, 239}, - dictWord{11, 11, 295}, - dictWord{11, 11, 447}, - dictWord{ - 11, - 11, - 528, - }, - dictWord{11, 11, 639}, - dictWord{11, 11, 800}, - dictWord{11, 11, 936}, - dictWord{12, 11, 25}, - dictWord{12, 11, 73}, - dictWord{12, 11, 77}, - dictWord{12, 11, 157}, - dictWord{12, 11, 316}, - dictWord{12, 11, 390}, - dictWord{12, 11, 391}, - dictWord{12, 11, 394}, - dictWord{12, 11, 395}, - dictWord{ - 12, - 11, - 478, - }, - dictWord{12, 11, 503}, - dictWord{12, 11, 592}, - dictWord{12, 11, 680}, - dictWord{13, 11, 50}, - dictWord{13, 11, 53}, - dictWord{13, 11, 132}, - dictWord{ - 13, - 11, - 198, - }, - dictWord{13, 11, 275}, - dictWord{13, 11, 322}, - dictWord{13, 11, 415}, - dictWord{14, 11, 71}, - dictWord{14, 11, 257}, - dictWord{14, 11, 395}, - dictWord{15, 11, 71}, - dictWord{15, 11, 136}, - dictWord{17, 11, 123}, - dictWord{18, 11, 93}, - dictWord{147, 11, 58}, - dictWord{134, 0, 1351}, - dictWord{7, 0, 27}, - dictWord{135, 0, 316}, - dictWord{136, 11, 712}, - dictWord{136, 0, 984}, - dictWord{133, 0, 552}, - dictWord{137, 0, 264}, - dictWord{132, 0, 401}, - dictWord{6, 0, 710}, - dictWord{6, 0, 1111}, - dictWord{134, 0, 1343}, - dictWord{134, 0, 1211}, - dictWord{9, 0, 543}, - dictWord{10, 0, 524}, - dictWord{11, 0, 108}, - dictWord{11, 0, 653}, - dictWord{12, 0, 524}, - dictWord{13, 0, 123}, - dictWord{14, 0, 252}, - dictWord{16, 0, 18}, - dictWord{19, 0, 38}, - dictWord{20, 0, 26}, - dictWord{20, 0, 65}, - dictWord{ - 21, - 0, - 3, - }, - dictWord{151, 0, 11}, - dictWord{4, 0, 205}, - dictWord{5, 0, 623}, - dictWord{7, 0, 104}, - dictWord{8, 0, 519}, - dictWord{137, 0, 716}, - dictWord{132, 10, 677}, - dictWord{4, 11, 377}, - dictWord{152, 11, 13}, - dictWord{135, 11, 1673}, - dictWord{7, 0, 579}, - dictWord{9, 0, 41}, - dictWord{9, 0, 244}, - dictWord{9, 0, 669}, - dictWord{ - 10, - 0, - 5, - }, - dictWord{11, 0, 861}, - dictWord{11, 0, 951}, - dictWord{139, 0, 980}, - dictWord{132, 0, 717}, - dictWord{136, 0, 1011}, - dictWord{132, 0, 805}, - dictWord{ - 4, - 11, - 180, - }, - dictWord{135, 11, 1906}, - dictWord{132, 10, 777}, - dictWord{132, 10, 331}, - dictWord{132, 0, 489}, - dictWord{6, 0, 1024}, - dictWord{4, 11, 491}, - dictWord{133, 10, 747}, - dictWord{135, 11, 1182}, - dictWord{4, 11, 171}, - dictWord{138, 11, 234}, - dictWord{4, 11, 586}, - dictWord{7, 11, 1186}, - dictWord{ - 138, - 11, - 631, - }, - dictWord{135, 0, 892}, - dictWord{135, 11, 336}, - dictWord{9, 11, 931}, - dictWord{10, 11, 334}, - dictWord{148, 11, 71}, - dictWord{137, 0, 473}, - dictWord{6, 0, 864}, - dictWord{12, 0, 659}, - dictWord{139, 11, 926}, - dictWord{7, 0, 819}, - dictWord{9, 0, 26}, - dictWord{9, 0, 392}, - dictWord{10, 0, 152}, - dictWord{ - 10, - 0, - 226, - }, - dictWord{11, 0, 19}, - dictWord{12, 0, 276}, - dictWord{12, 0, 426}, - dictWord{12, 0, 589}, - dictWord{13, 0, 460}, - dictWord{15, 0, 97}, - dictWord{19, 0, 48}, - dictWord{148, 0, 104}, - dictWord{135, 0, 51}, - dictWord{133, 10, 326}, - dictWord{4, 10, 691}, - dictWord{146, 10, 16}, - dictWord{9, 0, 130}, - dictWord{11, 0, 765}, - dictWord{10, 10, 680}, - dictWord{10, 10, 793}, - dictWord{141, 10, 357}, - dictWord{133, 11, 765}, - dictWord{8, 0, 229}, - dictWord{6, 10, 32}, - dictWord{7, 10, 385}, - dictWord{7, 10, 757}, - dictWord{7, 10, 1916}, - dictWord{8, 10, 94}, - dictWord{8, 10, 711}, - dictWord{9, 10, 541}, - dictWord{10, 10, 162}, - dictWord{10, 10, 795}, - dictWord{11, 10, 989}, - dictWord{11, 10, 1010}, - dictWord{12, 10, 14}, - dictWord{142, 10, 308}, - dictWord{7, 11, 474}, - dictWord{137, 11, 578}, - dictWord{ - 132, - 0, - 674, - }, - dictWord{132, 0, 770}, - dictWord{5, 0, 79}, - dictWord{7, 0, 1027}, - dictWord{7, 0, 1477}, - dictWord{139, 0, 52}, - dictWord{133, 11, 424}, - dictWord{ - 134, - 0, - 1666, - }, - dictWord{6, 0, 409}, - dictWord{6, 10, 349}, - dictWord{6, 10, 1682}, - dictWord{7, 10, 1252}, - dictWord{8, 10, 112}, - dictWord{8, 11, 714}, - dictWord{ - 9, - 10, - 435, - }, - dictWord{9, 10, 668}, - dictWord{10, 10, 290}, - dictWord{10, 10, 319}, - dictWord{10, 10, 815}, - dictWord{11, 10, 180}, - dictWord{11, 10, 837}, - dictWord{ - 12, - 10, - 240, - }, - dictWord{13, 10, 152}, - dictWord{13, 10, 219}, - dictWord{142, 10, 158}, - dictWord{5, 0, 789}, - dictWord{134, 0, 195}, - dictWord{4, 0, 251}, - dictWord{ - 4, - 0, - 688, - }, - dictWord{7, 0, 513}, - dictWord{135, 0, 1284}, - dictWord{132, 10, 581}, - dictWord{9, 11, 420}, - dictWord{10, 11, 269}, - dictWord{10, 11, 285}, - dictWord{10, 11, 576}, - dictWord{11, 11, 397}, - dictWord{13, 11, 175}, - dictWord{145, 11, 90}, - dictWord{6, 10, 126}, - dictWord{7, 10, 573}, - dictWord{8, 10, 397}, - dictWord{142, 10, 44}, - dictWord{132, 11, 429}, - dictWord{133, 0, 889}, - dictWord{4, 0, 160}, - dictWord{5, 0, 330}, - dictWord{7, 0, 1434}, - dictWord{136, 0, 174}, - dictWord{7, 11, 18}, - dictWord{7, 11, 699}, - dictWord{7, 11, 1966}, - dictWord{8, 11, 752}, - dictWord{9, 11, 273}, - dictWord{9, 11, 412}, - dictWord{9, 11, 703}, - dictWord{ - 10, - 11, - 71, - }, - dictWord{10, 11, 427}, - dictWord{10, 11, 508}, - dictWord{146, 11, 97}, - dictWord{6, 0, 872}, - dictWord{134, 0, 899}, - dictWord{133, 10, 926}, - dictWord{134, 0, 1126}, - dictWord{134, 0, 918}, - dictWord{4, 11, 53}, - dictWord{5, 11, 186}, - dictWord{135, 11, 752}, - dictWord{7, 0, 268}, - dictWord{136, 0, 569}, - dictWord{134, 0, 1224}, - dictWord{6, 0, 1361}, - dictWord{7, 10, 1232}, - dictWord{137, 10, 531}, - dictWord{8, 11, 575}, - dictWord{10, 11, 289}, - dictWord{ - 139, - 11, - 319, - }, - dictWord{133, 10, 670}, - dictWord{132, 11, 675}, - dictWord{133, 0, 374}, - dictWord{135, 10, 1957}, - dictWord{133, 0, 731}, - dictWord{11, 0, 190}, - dictWord{15, 0, 49}, - dictWord{11, 11, 190}, - dictWord{143, 11, 49}, - dictWord{4, 0, 626}, - dictWord{5, 0, 506}, - dictWord{5, 0, 642}, - dictWord{6, 0, 425}, - dictWord{ - 10, - 0, - 202, - }, - dictWord{139, 0, 141}, - dictWord{137, 0, 444}, - dictWord{7, 10, 242}, - dictWord{135, 10, 1942}, - dictWord{6, 11, 209}, - dictWord{8, 11, 468}, - dictWord{ - 9, - 11, - 210, - }, - dictWord{11, 11, 36}, - dictWord{12, 11, 28}, - dictWord{12, 11, 630}, - dictWord{13, 11, 21}, - dictWord{13, 11, 349}, - dictWord{14, 11, 7}, - dictWord{ - 145, - 11, - 13, - }, - dictWord{4, 11, 342}, - dictWord{135, 11, 1179}, - dictWord{5, 10, 834}, - dictWord{7, 10, 1202}, - dictWord{8, 10, 14}, - dictWord{9, 10, 481}, - dictWord{ - 137, - 10, - 880, - }, - dictWord{4, 11, 928}, - dictWord{133, 11, 910}, - dictWord{4, 11, 318}, - dictWord{4, 11, 496}, - dictWord{7, 11, 856}, - dictWord{139, 11, 654}, - dictWord{136, 0, 835}, - dictWord{7, 0, 1526}, - dictWord{138, 10, 465}, - dictWord{151, 0, 17}, - dictWord{135, 0, 477}, - dictWord{4, 10, 357}, - dictWord{6, 10, 172}, - dictWord{7, 10, 143}, - dictWord{137, 10, 413}, - dictWord{6, 0, 1374}, - dictWord{138, 0, 994}, - dictWord{18, 0, 76}, - dictWord{132, 10, 590}, - dictWord{7, 0, 287}, - dictWord{8, 0, 355}, - dictWord{9, 0, 293}, - dictWord{137, 0, 743}, - dictWord{134, 0, 1389}, - dictWord{7, 11, 915}, - dictWord{8, 11, 247}, - dictWord{147, 11, 0}, - dictWord{ - 4, - 11, - 202, - }, - dictWord{5, 11, 382}, - dictWord{6, 11, 454}, - dictWord{7, 11, 936}, - dictWord{7, 11, 1803}, - dictWord{8, 11, 758}, - dictWord{9, 11, 375}, - dictWord{ - 9, - 11, - 895, - }, - dictWord{10, 11, 743}, - dictWord{10, 11, 792}, - dictWord{11, 11, 978}, - dictWord{11, 11, 1012}, - dictWord{142, 11, 109}, - dictWord{5, 0, 384}, - dictWord{8, 0, 455}, - dictWord{140, 0, 48}, - dictWord{132, 11, 390}, - dictWord{5, 10, 169}, - dictWord{7, 10, 333}, - dictWord{136, 10, 45}, - dictWord{5, 0, 264}, - dictWord{134, 0, 184}, - dictWord{138, 11, 791}, - dictWord{133, 11, 717}, - dictWord{132, 10, 198}, - dictWord{6, 11, 445}, - dictWord{7, 11, 332}, - dictWord{ - 137, - 11, - 909, - }, - dictWord{136, 0, 1001}, - dictWord{4, 10, 24}, - dictWord{5, 10, 140}, - dictWord{5, 10, 185}, - dictWord{7, 10, 1500}, - dictWord{11, 10, 565}, - dictWord{ - 139, - 10, - 838, - }, - dictWord{134, 11, 578}, - dictWord{5, 0, 633}, - dictWord{6, 0, 28}, - dictWord{135, 0, 1323}, - dictWord{132, 0, 851}, - dictWord{136, 11, 267}, - dictWord{ - 7, - 0, - 359, - }, - dictWord{8, 0, 243}, - dictWord{140, 0, 175}, - dictWord{4, 10, 334}, - dictWord{133, 10, 593}, - dictWord{141, 11, 87}, - dictWord{136, 11, 766}, - dictWord{10, 0, 287}, - dictWord{12, 0, 138}, - dictWord{10, 11, 287}, - dictWord{140, 11, 138}, - dictWord{4, 0, 105}, - dictWord{132, 0, 740}, - dictWord{140, 10, 116}, - dictWord{134, 0, 857}, - dictWord{135, 11, 1841}, - dictWord{6, 0, 1402}, - dictWord{137, 0, 819}, - dictWord{132, 11, 584}, - dictWord{132, 10, 709}, - dictWord{ - 133, - 10, - 897, - }, - dictWord{5, 0, 224}, - dictWord{13, 0, 174}, - dictWord{146, 0, 52}, - dictWord{135, 10, 1840}, - dictWord{4, 10, 608}, - dictWord{133, 10, 497}, - dictWord{139, 11, 60}, - dictWord{4, 0, 758}, - dictWord{135, 0, 1649}, - dictWord{4, 11, 226}, - dictWord{4, 11, 326}, - dictWord{135, 11, 1770}, - dictWord{5, 11, 426}, - dictWord{8, 11, 30}, - dictWord{9, 11, 2}, - dictWord{11, 11, 549}, - dictWord{147, 11, 122}, - dictWord{135, 10, 2039}, - dictWord{6, 10, 540}, - dictWord{ - 136, - 10, - 136, - }, - dictWord{4, 0, 573}, - dictWord{8, 0, 655}, - dictWord{4, 10, 897}, - dictWord{133, 10, 786}, - dictWord{7, 0, 351}, - dictWord{139, 0, 128}, - dictWord{ - 133, - 10, - 999, - }, - dictWord{4, 10, 299}, - dictWord{135, 10, 1004}, - dictWord{133, 0, 918}, - dictWord{132, 11, 345}, - dictWord{4, 11, 385}, - dictWord{7, 11, 265}, - dictWord{135, 11, 587}, - dictWord{133, 10, 456}, - dictWord{136, 10, 180}, - dictWord{6, 0, 687}, - dictWord{134, 0, 1537}, - dictWord{4, 11, 347}, - dictWord{ - 5, - 11, - 423, - }, - dictWord{5, 11, 996}, - dictWord{135, 11, 1329}, - dictWord{132, 10, 755}, - dictWord{7, 11, 1259}, - dictWord{9, 11, 125}, - dictWord{11, 11, 65}, - dictWord{140, 11, 285}, - dictWord{5, 11, 136}, - dictWord{6, 11, 136}, - dictWord{136, 11, 644}, - dictWord{134, 0, 1525}, - dictWord{4, 0, 1009}, - dictWord{ - 135, - 0, - 1139, - }, - dictWord{139, 10, 338}, - dictWord{132, 0, 340}, - dictWord{135, 10, 1464}, - dictWord{8, 0, 847}, - dictWord{10, 0, 861}, - dictWord{10, 0, 876}, - dictWord{ - 10, - 0, - 889, - }, - dictWord{10, 0, 922}, - dictWord{10, 0, 929}, - dictWord{10, 0, 933}, - dictWord{12, 0, 784}, - dictWord{140, 0, 791}, - dictWord{139, 0, 176}, - dictWord{ - 9, - 11, - 134, - }, - dictWord{10, 11, 2}, - dictWord{10, 11, 27}, - dictWord{10, 11, 333}, - dictWord{11, 11, 722}, - dictWord{143, 11, 1}, - dictWord{4, 11, 433}, - dictWord{ - 133, - 11, - 719, - }, - dictWord{5, 0, 985}, - dictWord{7, 0, 509}, - dictWord{7, 0, 529}, - dictWord{145, 0, 96}, - dictWord{132, 0, 615}, - dictWord{4, 10, 890}, - dictWord{ - 5, - 10, - 805, - }, - dictWord{5, 10, 819}, - dictWord{5, 10, 961}, - dictWord{6, 10, 396}, - dictWord{6, 10, 1631}, - dictWord{6, 10, 1678}, - dictWord{7, 10, 1967}, - dictWord{ - 7, - 10, - 2041, - }, - dictWord{9, 10, 630}, - dictWord{11, 10, 8}, - dictWord{11, 10, 1019}, - dictWord{12, 10, 176}, - dictWord{13, 10, 225}, - dictWord{14, 10, 292}, - dictWord{ - 149, - 10, - 24, - }, - dictWord{135, 0, 1919}, - dictWord{134, 0, 1131}, - dictWord{144, 11, 21}, - dictWord{144, 11, 51}, - dictWord{135, 10, 1815}, - dictWord{4, 0, 247}, - dictWord{7, 10, 1505}, - dictWord{10, 10, 190}, - dictWord{10, 10, 634}, - dictWord{11, 10, 792}, - dictWord{12, 10, 358}, - dictWord{140, 10, 447}, - dictWord{ - 5, - 10, - 0, - }, - dictWord{6, 10, 536}, - dictWord{7, 10, 604}, - dictWord{13, 10, 445}, - dictWord{145, 10, 126}, - dictWord{4, 0, 184}, - dictWord{5, 0, 390}, - dictWord{6, 0, 337}, - dictWord{7, 0, 23}, - dictWord{7, 0, 494}, - dictWord{7, 0, 618}, - dictWord{7, 0, 1456}, - dictWord{8, 0, 27}, - dictWord{8, 0, 599}, - dictWord{10, 0, 153}, - dictWord{ - 139, - 0, - 710, - }, - dictWord{6, 10, 232}, - dictWord{6, 10, 412}, - dictWord{7, 10, 1074}, - dictWord{8, 10, 9}, - dictWord{8, 10, 157}, - dictWord{8, 10, 786}, - dictWord{9, 10, 196}, - dictWord{9, 10, 352}, - dictWord{9, 10, 457}, - dictWord{10, 10, 337}, - dictWord{11, 10, 232}, - dictWord{11, 10, 877}, - dictWord{12, 10, 480}, - dictWord{ - 140, - 10, - 546, - }, - dictWord{13, 0, 38}, - dictWord{135, 10, 958}, - dictWord{4, 10, 382}, - dictWord{136, 10, 579}, - dictWord{4, 10, 212}, - dictWord{135, 10, 1206}, - dictWord{ - 4, - 11, - 555, - }, - dictWord{8, 11, 536}, - dictWord{138, 11, 288}, - dictWord{11, 11, 139}, - dictWord{139, 11, 171}, - dictWord{9, 11, 370}, - dictWord{138, 11, 90}, - dictWord{132, 0, 1015}, - dictWord{134, 0, 1088}, - dictWord{5, 10, 655}, - dictWord{135, 11, 977}, - dictWord{134, 0, 1585}, - dictWord{17, 10, 67}, - dictWord{ - 147, - 10, - 74, - }, - dictWord{10, 0, 227}, - dictWord{11, 0, 497}, - dictWord{11, 0, 709}, - dictWord{140, 0, 415}, - dictWord{6, 0, 360}, - dictWord{7, 0, 1664}, - dictWord{ - 136, - 0, - 478, - }, - dictWord{7, 0, 95}, - dictWord{6, 10, 231}, - dictWord{136, 10, 423}, - dictWord{140, 11, 65}, - dictWord{4, 11, 257}, - dictWord{135, 11, 2031}, - dictWord{ - 135, - 11, - 1768, - }, - dictWord{133, 10, 300}, - dictWord{139, 11, 211}, - dictWord{136, 0, 699}, - dictWord{6, 10, 237}, - dictWord{7, 10, 611}, - dictWord{8, 10, 100}, - dictWord{9, 10, 416}, - dictWord{11, 10, 335}, - dictWord{12, 10, 173}, - dictWord{146, 10, 101}, - dictWord{14, 0, 26}, - dictWord{146, 0, 150}, - dictWord{6, 0, 581}, - dictWord{135, 0, 1119}, - dictWord{135, 10, 1208}, - dictWord{132, 0, 739}, - dictWord{6, 11, 83}, - dictWord{6, 11, 1733}, - dictWord{135, 11, 1389}, - dictWord{ - 137, - 0, - 869, - }, - dictWord{4, 0, 67}, - dictWord{5, 0, 422}, - dictWord{7, 0, 1037}, - dictWord{7, 0, 1289}, - dictWord{7, 0, 1555}, - dictWord{9, 0, 741}, - dictWord{145, 0, 108}, - dictWord{133, 10, 199}, - dictWord{12, 10, 427}, - dictWord{146, 10, 38}, - dictWord{136, 0, 464}, - dictWord{142, 0, 42}, - dictWord{10, 0, 96}, - dictWord{8, 11, 501}, - dictWord{137, 11, 696}, - dictWord{134, 11, 592}, - dictWord{4, 0, 512}, - dictWord{4, 0, 966}, - dictWord{5, 0, 342}, - dictWord{6, 0, 1855}, - dictWord{8, 0, 869}, - dictWord{8, 0, 875}, - dictWord{8, 0, 901}, - dictWord{144, 0, 26}, - dictWord{8, 0, 203}, - dictWord{11, 0, 823}, - dictWord{11, 0, 846}, - dictWord{12, 0, 482}, - dictWord{ - 13, - 0, - 277, - }, - dictWord{13, 0, 302}, - dictWord{13, 0, 464}, - dictWord{14, 0, 205}, - dictWord{142, 0, 221}, - dictWord{4, 0, 449}, - dictWord{133, 0, 718}, - dictWord{ - 7, - 11, - 1718, - }, - dictWord{9, 11, 95}, - dictWord{9, 11, 274}, - dictWord{10, 11, 279}, - dictWord{10, 11, 317}, - dictWord{10, 11, 420}, - dictWord{11, 11, 303}, - dictWord{ - 11, - 11, - 808, - }, - dictWord{12, 11, 134}, - dictWord{12, 11, 367}, - dictWord{13, 11, 149}, - dictWord{13, 11, 347}, - dictWord{14, 11, 349}, - dictWord{14, 11, 406}, - dictWord{18, 11, 22}, - dictWord{18, 11, 89}, - dictWord{18, 11, 122}, - dictWord{147, 11, 47}, - dictWord{133, 11, 26}, - dictWord{4, 0, 355}, - dictWord{6, 0, 311}, - dictWord{ - 9, - 0, - 256, - }, - dictWord{138, 0, 404}, - dictWord{132, 11, 550}, - dictWord{10, 0, 758}, - dictWord{6, 10, 312}, - dictWord{6, 10, 1715}, - dictWord{10, 10, 584}, - dictWord{11, 10, 546}, - dictWord{11, 10, 692}, - dictWord{12, 10, 259}, - dictWord{12, 10, 295}, - dictWord{13, 10, 46}, - dictWord{141, 10, 154}, - dictWord{ - 136, - 11, - 822, - }, - dictWord{5, 0, 827}, - dictWord{4, 11, 902}, - dictWord{5, 11, 809}, - dictWord{6, 11, 122}, - dictWord{135, 11, 896}, - dictWord{5, 0, 64}, - dictWord{140, 0, 581}, - dictWord{4, 0, 442}, - dictWord{6, 0, 739}, - dictWord{7, 0, 1047}, - dictWord{7, 0, 1352}, - dictWord{7, 0, 1643}, - dictWord{7, 11, 1911}, - dictWord{9, 11, 449}, - dictWord{10, 11, 192}, - dictWord{138, 11, 740}, - dictWord{135, 11, 262}, - dictWord{132, 10, 588}, - dictWord{133, 11, 620}, - dictWord{5, 0, 977}, - dictWord{ - 6, - 0, - 288, - }, - dictWord{7, 0, 528}, - dictWord{4, 11, 34}, - dictWord{5, 11, 574}, - dictWord{7, 11, 279}, - dictWord{7, 11, 1624}, - dictWord{136, 11, 601}, - dictWord{ - 6, - 0, - 1375, - }, - dictWord{4, 10, 231}, - dictWord{5, 10, 61}, - dictWord{6, 10, 104}, - dictWord{7, 10, 729}, - dictWord{7, 10, 964}, - dictWord{7, 10, 1658}, - dictWord{ - 140, - 10, - 414, - }, - dictWord{6, 10, 263}, - dictWord{138, 10, 757}, - dictWord{132, 10, 320}, - dictWord{4, 0, 254}, - dictWord{7, 0, 1309}, - dictWord{5, 11, 332}, - dictWord{ - 135, - 11, - 1309, - }, - dictWord{6, 11, 261}, - dictWord{8, 11, 182}, - dictWord{139, 11, 943}, - dictWord{132, 10, 225}, - dictWord{6, 0, 12}, - dictWord{135, 0, 1219}, - dictWord{4, 0, 275}, - dictWord{12, 0, 376}, - dictWord{6, 11, 1721}, - dictWord{141, 11, 490}, - dictWord{4, 11, 933}, - dictWord{133, 11, 880}, - dictWord{6, 0, 951}, - dictWord{6, 0, 1109}, - dictWord{6, 0, 1181}, - dictWord{7, 0, 154}, - dictWord{4, 10, 405}, - dictWord{7, 10, 817}, - dictWord{14, 10, 58}, - dictWord{17, 10, 37}, - dictWord{ - 146, - 10, - 124, - }, - dictWord{6, 0, 1520}, - dictWord{133, 10, 974}, - dictWord{134, 0, 1753}, - dictWord{6, 0, 369}, - dictWord{6, 0, 502}, - dictWord{7, 0, 1036}, - dictWord{ - 8, - 0, - 348, - }, - dictWord{9, 0, 452}, - dictWord{10, 0, 26}, - dictWord{11, 0, 224}, - dictWord{11, 0, 387}, - dictWord{11, 0, 772}, - dictWord{12, 0, 95}, - dictWord{12, 0, 629}, - dictWord{13, 0, 195}, - dictWord{13, 0, 207}, - dictWord{13, 0, 241}, - dictWord{14, 0, 260}, - dictWord{14, 0, 270}, - dictWord{143, 0, 140}, - dictWord{132, 0, 269}, - dictWord{5, 0, 480}, - dictWord{7, 0, 532}, - dictWord{7, 0, 1197}, - dictWord{7, 0, 1358}, - dictWord{8, 0, 291}, - dictWord{11, 0, 349}, - dictWord{142, 0, 396}, - dictWord{ - 5, - 10, - 235, - }, - dictWord{7, 10, 1239}, - dictWord{11, 10, 131}, - dictWord{140, 10, 370}, - dictWord{7, 10, 956}, - dictWord{7, 10, 1157}, - dictWord{7, 10, 1506}, - dictWord{ - 7, - 10, - 1606, - }, - dictWord{7, 10, 1615}, - dictWord{7, 10, 1619}, - dictWord{7, 10, 1736}, - dictWord{7, 10, 1775}, - dictWord{8, 10, 590}, - dictWord{9, 10, 324}, - dictWord{9, 10, 736}, - dictWord{9, 10, 774}, - dictWord{9, 10, 776}, - dictWord{9, 10, 784}, - dictWord{10, 10, 567}, - dictWord{10, 10, 708}, - dictWord{11, 10, 518}, - dictWord{11, 10, 613}, - dictWord{11, 10, 695}, - dictWord{11, 10, 716}, - dictWord{11, 10, 739}, - dictWord{11, 10, 770}, - dictWord{11, 10, 771}, - dictWord{ - 11, - 10, - 848, - }, - dictWord{11, 10, 857}, - dictWord{11, 10, 931}, - dictWord{11, 10, 947}, - dictWord{12, 10, 326}, - dictWord{12, 10, 387}, - dictWord{12, 10, 484}, - dictWord{ - 12, - 10, - 528, - }, - dictWord{12, 10, 552}, - dictWord{12, 10, 613}, - dictWord{13, 10, 189}, - dictWord{13, 10, 256}, - dictWord{13, 10, 340}, - dictWord{13, 10, 432}, - dictWord{13, 10, 436}, - dictWord{13, 10, 440}, - dictWord{13, 10, 454}, - dictWord{14, 10, 174}, - dictWord{14, 10, 220}, - dictWord{14, 10, 284}, - dictWord{ - 14, - 10, - 390, - }, - dictWord{145, 10, 121}, - dictWord{8, 11, 598}, - dictWord{9, 11, 664}, - dictWord{138, 11, 441}, - dictWord{9, 10, 137}, - dictWord{138, 10, 221}, - dictWord{133, 11, 812}, - dictWord{148, 0, 15}, - dictWord{134, 0, 1341}, - dictWord{6, 0, 1017}, - dictWord{4, 11, 137}, - dictWord{7, 11, 1178}, - dictWord{ - 135, - 11, - 1520, - }, - dictWord{7, 10, 390}, - dictWord{138, 10, 140}, - dictWord{7, 11, 1260}, - dictWord{135, 11, 1790}, - dictWord{137, 11, 191}, - dictWord{ - 135, - 10, - 1144, - }, - dictWord{6, 0, 1810}, - dictWord{7, 0, 657}, - dictWord{8, 0, 886}, - dictWord{10, 0, 857}, - dictWord{14, 0, 440}, - dictWord{144, 0, 96}, - dictWord{8, 0, 533}, - dictWord{6, 11, 1661}, - dictWord{7, 11, 1975}, - dictWord{7, 11, 2009}, - dictWord{135, 11, 2011}, - dictWord{6, 0, 1453}, - dictWord{134, 10, 464}, - dictWord{ - 132, - 11, - 715, - }, - dictWord{5, 10, 407}, - dictWord{11, 10, 204}, - dictWord{11, 10, 243}, - dictWord{11, 10, 489}, - dictWord{12, 10, 293}, - dictWord{19, 10, 37}, - dictWord{20, 10, 73}, - dictWord{150, 10, 38}, - dictWord{133, 11, 703}, - dictWord{4, 0, 211}, - dictWord{7, 0, 1483}, - dictWord{5, 10, 325}, - dictWord{8, 10, 5}, - dictWord{ - 8, - 10, - 227, - }, - dictWord{9, 10, 105}, - dictWord{10, 10, 585}, - dictWord{140, 10, 614}, - dictWord{4, 0, 332}, - dictWord{5, 0, 335}, - dictWord{6, 0, 238}, - dictWord{ - 7, - 0, - 269, - }, - dictWord{7, 0, 811}, - dictWord{7, 0, 1797}, - dictWord{8, 0, 836}, - dictWord{9, 0, 507}, - dictWord{141, 0, 242}, - dictWord{5, 11, 89}, - dictWord{7, 11, 1915}, - dictWord{9, 11, 185}, - dictWord{9, 11, 235}, - dictWord{9, 11, 496}, - dictWord{10, 11, 64}, - dictWord{10, 11, 270}, - dictWord{10, 11, 403}, - dictWord{10, 11, 469}, - dictWord{10, 11, 529}, - dictWord{10, 11, 590}, - dictWord{11, 11, 140}, - dictWord{11, 11, 860}, - dictWord{13, 11, 1}, - dictWord{13, 11, 422}, - dictWord{14, 11, 341}, - dictWord{14, 11, 364}, - dictWord{17, 11, 93}, - dictWord{18, 11, 113}, - dictWord{19, 11, 97}, - dictWord{147, 11, 113}, - dictWord{133, 11, 695}, - dictWord{ - 16, - 0, - 19, - }, - dictWord{5, 11, 6}, - dictWord{6, 11, 183}, - dictWord{6, 10, 621}, - dictWord{7, 11, 680}, - dictWord{7, 11, 978}, - dictWord{7, 11, 1013}, - dictWord{7, 11, 1055}, - dictWord{12, 11, 230}, - dictWord{13, 11, 172}, - dictWord{13, 10, 504}, - dictWord{146, 11, 29}, - dictWord{136, 0, 156}, - dictWord{133, 0, 1009}, - dictWord{ - 6, - 11, - 29, - }, - dictWord{139, 11, 63}, - dictWord{134, 0, 820}, - dictWord{134, 10, 218}, - dictWord{7, 10, 454}, - dictWord{7, 10, 782}, - dictWord{8, 10, 768}, - dictWord{ - 140, - 10, - 686, - }, - dictWord{5, 0, 228}, - dictWord{6, 0, 203}, - dictWord{7, 0, 156}, - dictWord{8, 0, 347}, - dictWord{9, 0, 265}, - dictWord{18, 0, 39}, - dictWord{20, 0, 54}, - dictWord{21, 0, 31}, - dictWord{22, 0, 3}, - dictWord{23, 0, 0}, - dictWord{15, 11, 8}, - dictWord{18, 11, 39}, - dictWord{20, 11, 54}, - dictWord{21, 11, 31}, - dictWord{22, 11, 3}, - dictWord{151, 11, 0}, - dictWord{7, 0, 1131}, - dictWord{135, 0, 1468}, - dictWord{144, 10, 0}, - dictWord{134, 0, 1276}, - dictWord{10, 10, 676}, - dictWord{ - 140, - 10, - 462, - }, - dictWord{132, 11, 311}, - dictWord{134, 11, 1740}, - dictWord{7, 11, 170}, - dictWord{8, 11, 90}, - dictWord{8, 11, 177}, - dictWord{8, 11, 415}, - dictWord{ - 11, - 11, - 714, - }, - dictWord{142, 11, 281}, - dictWord{134, 10, 164}, - dictWord{6, 0, 1792}, - dictWord{138, 0, 849}, - dictWord{150, 10, 50}, - dictWord{5, 0, 291}, - dictWord{5, 0, 318}, - dictWord{7, 0, 765}, - dictWord{9, 0, 389}, - dictWord{12, 0, 548}, - dictWord{8, 11, 522}, - dictWord{142, 11, 328}, - dictWord{11, 11, 91}, - dictWord{ - 13, - 11, - 129, - }, - dictWord{15, 11, 101}, - dictWord{145, 11, 125}, - dictWord{4, 11, 494}, - dictWord{6, 11, 74}, - dictWord{7, 11, 44}, - dictWord{7, 11, 407}, - dictWord{ - 8, - 11, - 551, - }, - dictWord{12, 11, 17}, - dictWord{15, 11, 5}, - dictWord{148, 11, 11}, - dictWord{4, 11, 276}, - dictWord{133, 11, 296}, - dictWord{6, 10, 343}, - dictWord{ - 7, - 10, - 195, - }, - dictWord{7, 11, 1777}, - dictWord{9, 10, 226}, - dictWord{10, 10, 197}, - dictWord{10, 10, 575}, - dictWord{11, 10, 502}, - dictWord{139, 10, 899}, - dictWord{ - 10, - 0, - 525, - }, - dictWord{139, 0, 82}, - dictWord{14, 0, 453}, - dictWord{4, 11, 7}, - dictWord{5, 11, 90}, - dictWord{5, 11, 158}, - dictWord{6, 11, 542}, - dictWord{7, 11, 221}, - dictWord{7, 11, 1574}, - dictWord{9, 11, 490}, - dictWord{10, 11, 540}, - dictWord{11, 11, 443}, - dictWord{139, 11, 757}, - dictWord{135, 0, 666}, - dictWord{ - 22, - 10, - 29, - }, - dictWord{150, 11, 29}, - dictWord{4, 0, 422}, - dictWord{147, 10, 8}, - dictWord{5, 0, 355}, - dictWord{145, 0, 0}, - dictWord{6, 0, 1873}, - dictWord{9, 0, 918}, - dictWord{7, 11, 588}, - dictWord{9, 11, 175}, - dictWord{138, 11, 530}, - dictWord{143, 11, 31}, - dictWord{11, 0, 165}, - dictWord{7, 10, 1125}, - dictWord{9, 10, 143}, - dictWord{14, 10, 405}, - dictWord{150, 10, 21}, - dictWord{9, 0, 260}, - dictWord{137, 0, 905}, - dictWord{5, 11, 872}, - dictWord{6, 11, 57}, - dictWord{6, 11, 479}, - dictWord{ - 6, - 11, - 562, - }, - dictWord{7, 11, 471}, - dictWord{7, 11, 1060}, - dictWord{9, 11, 447}, - dictWord{9, 11, 454}, - dictWord{141, 11, 6}, - dictWord{138, 11, 704}, - dictWord{133, 0, 865}, - dictWord{5, 0, 914}, - dictWord{134, 0, 1625}, - dictWord{133, 0, 234}, - dictWord{7, 0, 1383}, - dictWord{5, 11, 31}, - dictWord{6, 11, 614}, - dictWord{145, 11, 61}, - dictWord{7, 11, 1200}, - dictWord{138, 11, 460}, - dictWord{6, 11, 424}, - dictWord{135, 11, 1866}, - dictWord{136, 0, 306}, - dictWord{ - 5, - 10, - 959, - }, - dictWord{12, 11, 30}, - dictWord{13, 11, 148}, - dictWord{14, 11, 87}, - dictWord{14, 11, 182}, - dictWord{16, 11, 42}, - dictWord{18, 11, 92}, - dictWord{ - 148, - 11, - 70, - }, - dictWord{6, 0, 1919}, - dictWord{6, 0, 1921}, - dictWord{9, 0, 923}, - dictWord{9, 0, 930}, - dictWord{9, 0, 941}, - dictWord{9, 0, 949}, - dictWord{9, 0, 987}, - dictWord{ - 9, - 0, - 988, - }, - dictWord{9, 0, 992}, - dictWord{12, 0, 802}, - dictWord{12, 0, 815}, - dictWord{12, 0, 856}, - dictWord{12, 0, 885}, - dictWord{12, 0, 893}, - dictWord{ - 12, - 0, - 898, - }, - dictWord{12, 0, 919}, - dictWord{12, 0, 920}, - dictWord{12, 0, 941}, - dictWord{12, 0, 947}, - dictWord{15, 0, 183}, - dictWord{15, 0, 185}, - dictWord{15, 0, 189}, - dictWord{15, 0, 197}, - dictWord{15, 0, 202}, - dictWord{15, 0, 233}, - dictWord{18, 0, 218}, - dictWord{18, 0, 219}, - dictWord{18, 0, 233}, - dictWord{143, 11, 156}, - dictWord{135, 10, 1759}, - dictWord{136, 10, 173}, - dictWord{13, 0, 163}, - dictWord{13, 0, 180}, - dictWord{18, 0, 78}, - dictWord{20, 0, 35}, - dictWord{5, 11, 13}, - dictWord{134, 11, 142}, - dictWord{134, 10, 266}, - dictWord{6, 11, 97}, - dictWord{7, 11, 116}, - dictWord{8, 11, 322}, - dictWord{8, 11, 755}, - dictWord{9, 11, 548}, - dictWord{10, 11, 714}, - dictWord{11, 11, 884}, - dictWord{141, 11, 324}, - dictWord{135, 0, 1312}, - dictWord{9, 0, 814}, - dictWord{137, 11, 676}, - dictWord{ - 133, - 0, - 707, - }, - dictWord{135, 0, 1493}, - dictWord{6, 0, 421}, - dictWord{7, 0, 61}, - dictWord{7, 0, 1540}, - dictWord{10, 0, 11}, - dictWord{138, 0, 501}, - dictWord{12, 0, 733}, - dictWord{12, 0, 766}, - dictWord{7, 11, 866}, - dictWord{135, 11, 1163}, - dictWord{137, 0, 341}, - dictWord{142, 0, 98}, - dictWord{145, 11, 115}, - dictWord{ - 135, - 11, - 1111, - }, - dictWord{136, 10, 300}, - dictWord{136, 0, 1014}, - dictWord{8, 11, 1}, - dictWord{9, 11, 112}, - dictWord{138, 11, 326}, - dictWord{132, 11, 730}, - dictWord{5, 11, 488}, - dictWord{6, 11, 527}, - dictWord{7, 11, 489}, - dictWord{7, 11, 1636}, - dictWord{8, 11, 121}, - dictWord{8, 11, 144}, - dictWord{8, 11, 359}, - dictWord{ - 9, - 11, - 193, - }, - dictWord{9, 11, 241}, - dictWord{9, 11, 336}, - dictWord{9, 11, 882}, - dictWord{11, 11, 266}, - dictWord{11, 11, 372}, - dictWord{11, 11, 944}, - dictWord{ - 12, - 11, - 401, - }, - dictWord{140, 11, 641}, - dictWord{6, 0, 971}, - dictWord{134, 0, 1121}, - dictWord{6, 0, 102}, - dictWord{7, 0, 72}, - dictWord{15, 0, 142}, - dictWord{ - 147, - 0, - 67, - }, - dictWord{151, 0, 30}, - dictWord{135, 0, 823}, - dictWord{134, 0, 1045}, - dictWord{5, 10, 427}, - dictWord{5, 10, 734}, - dictWord{7, 10, 478}, - dictWord{ - 136, - 10, - 52, - }, - dictWord{7, 0, 1930}, - dictWord{11, 10, 217}, - dictWord{142, 10, 165}, - dictWord{6, 0, 1512}, - dictWord{135, 0, 1870}, - dictWord{9, 11, 31}, - dictWord{ - 10, - 11, - 244, - }, - dictWord{10, 11, 699}, - dictWord{12, 11, 149}, - dictWord{141, 11, 497}, - dictWord{133, 11, 377}, - dictWord{145, 11, 101}, - dictWord{ - 10, - 11, - 158, - }, - dictWord{13, 11, 13}, - dictWord{13, 11, 137}, - dictWord{13, 11, 258}, - dictWord{14, 11, 111}, - dictWord{14, 11, 225}, - dictWord{14, 11, 253}, - dictWord{ - 14, - 11, - 304, - }, - dictWord{14, 11, 339}, - dictWord{14, 11, 417}, - dictWord{146, 11, 33}, - dictWord{6, 0, 87}, - dictWord{6, 10, 1734}, - dictWord{7, 10, 20}, - dictWord{ - 7, - 10, - 1056, - }, - dictWord{8, 10, 732}, - dictWord{9, 10, 406}, - dictWord{9, 10, 911}, - dictWord{138, 10, 694}, - dictWord{134, 0, 1243}, - dictWord{137, 0, 245}, - dictWord{ - 7, - 0, - 68, - }, - dictWord{8, 0, 48}, - dictWord{8, 0, 88}, - dictWord{8, 0, 582}, - dictWord{8, 0, 681}, - dictWord{9, 0, 373}, - dictWord{9, 0, 864}, - dictWord{11, 0, 157}, - dictWord{ - 11, - 0, - 336, - }, - dictWord{11, 0, 843}, - dictWord{148, 0, 27}, - dictWord{8, 11, 663}, - dictWord{144, 11, 8}, - dictWord{133, 10, 613}, - dictWord{4, 0, 88}, - dictWord{ - 5, - 0, - 137, - }, - dictWord{5, 0, 174}, - dictWord{5, 0, 777}, - dictWord{6, 0, 1664}, - dictWord{6, 0, 1725}, - dictWord{7, 0, 77}, - dictWord{7, 0, 426}, - dictWord{7, 0, 1317}, - dictWord{ - 7, - 0, - 1355, - }, - dictWord{8, 0, 126}, - dictWord{8, 0, 563}, - dictWord{9, 0, 523}, - dictWord{9, 0, 750}, - dictWord{10, 0, 310}, - dictWord{10, 0, 836}, - dictWord{11, 0, 42}, - dictWord{11, 0, 318}, - dictWord{11, 0, 731}, - dictWord{12, 0, 68}, - dictWord{12, 0, 92}, - dictWord{12, 0, 507}, - dictWord{12, 0, 692}, - dictWord{13, 0, 81}, - dictWord{ - 13, - 0, - 238, - }, - dictWord{13, 0, 374}, - dictWord{14, 0, 436}, - dictWord{18, 0, 138}, - dictWord{19, 0, 78}, - dictWord{19, 0, 111}, - dictWord{20, 0, 55}, - dictWord{20, 0, 77}, - dictWord{148, 0, 92}, - dictWord{141, 0, 418}, - dictWord{4, 0, 938}, - dictWord{137, 0, 625}, - dictWord{138, 0, 351}, - dictWord{5, 11, 843}, - dictWord{7, 10, 32}, - dictWord{ - 7, - 10, - 984, - }, - dictWord{8, 10, 85}, - dictWord{8, 10, 709}, - dictWord{9, 10, 579}, - dictWord{9, 10, 847}, - dictWord{9, 10, 856}, - dictWord{10, 10, 799}, - dictWord{ - 11, - 10, - 258, - }, - dictWord{11, 10, 1007}, - dictWord{12, 10, 331}, - dictWord{12, 10, 615}, - dictWord{13, 10, 188}, - dictWord{13, 10, 435}, - dictWord{14, 10, 8}, - dictWord{ - 15, - 10, - 165, - }, - dictWord{16, 10, 27}, - dictWord{148, 10, 40}, - dictWord{6, 0, 1668}, - dictWord{7, 0, 1499}, - dictWord{8, 0, 117}, - dictWord{9, 0, 314}, - dictWord{ - 138, - 0, - 174, - }, - dictWord{135, 0, 707}, - dictWord{132, 11, 554}, - dictWord{133, 11, 536}, - dictWord{5, 0, 403}, - dictWord{5, 11, 207}, - dictWord{9, 11, 79}, - dictWord{ - 11, - 11, - 625, - }, - dictWord{145, 11, 7}, - dictWord{132, 11, 424}, - dictWord{136, 11, 785}, - dictWord{4, 10, 167}, - dictWord{135, 10, 82}, - dictWord{9, 0, 7}, - dictWord{ - 23, - 0, - 6, - }, - dictWord{9, 11, 7}, - dictWord{151, 11, 6}, - dictWord{6, 0, 282}, - dictWord{5, 10, 62}, - dictWord{6, 10, 534}, - dictWord{7, 10, 74}, - dictWord{7, 10, 678}, - dictWord{ - 7, - 10, - 684, - }, - dictWord{7, 10, 1043}, - dictWord{7, 10, 1072}, - dictWord{8, 10, 280}, - dictWord{8, 10, 541}, - dictWord{8, 10, 686}, - dictWord{9, 10, 258}, - dictWord{ - 10, - 10, - 519, - }, - dictWord{11, 10, 252}, - dictWord{140, 10, 282}, - dictWord{138, 10, 33}, - dictWord{132, 10, 359}, - dictWord{4, 0, 44}, - dictWord{5, 0, 311}, - dictWord{ - 6, - 0, - 156, - }, - dictWord{7, 0, 639}, - dictWord{7, 0, 762}, - dictWord{7, 0, 1827}, - dictWord{9, 0, 8}, - dictWord{9, 0, 462}, - dictWord{148, 0, 83}, - dictWord{7, 11, 769}, - dictWord{ - 9, - 11, - 18, - }, - dictWord{138, 11, 358}, - dictWord{4, 0, 346}, - dictWord{7, 0, 115}, - dictWord{9, 0, 180}, - dictWord{9, 0, 456}, - dictWord{10, 0, 363}, - dictWord{ - 4, - 11, - 896, - }, - dictWord{134, 11, 1777}, - dictWord{133, 10, 211}, - dictWord{7, 0, 761}, - dictWord{7, 0, 1051}, - dictWord{137, 0, 545}, - dictWord{6, 10, 145}, - dictWord{ - 141, - 10, - 336, - }, - dictWord{7, 11, 750}, - dictWord{9, 11, 223}, - dictWord{11, 11, 27}, - dictWord{11, 11, 466}, - dictWord{12, 11, 624}, - dictWord{14, 11, 265}, - dictWord{146, 11, 61}, - dictWord{6, 0, 752}, - dictWord{6, 0, 768}, - dictWord{6, 0, 1195}, - dictWord{6, 0, 1254}, - dictWord{6, 0, 1619}, - dictWord{137, 0, 835}, - dictWord{ - 6, - 0, - 1936, - }, - dictWord{8, 0, 930}, - dictWord{136, 0, 960}, - dictWord{132, 10, 263}, - dictWord{132, 11, 249}, - dictWord{12, 0, 653}, - dictWord{132, 10, 916}, - dictWord{4, 11, 603}, - dictWord{133, 11, 661}, - dictWord{8, 0, 344}, - dictWord{4, 11, 11}, - dictWord{6, 11, 128}, - dictWord{7, 11, 231}, - dictWord{7, 11, 1533}, - dictWord{138, 11, 725}, - dictWord{134, 0, 1483}, - dictWord{134, 0, 875}, - dictWord{6, 0, 185}, - dictWord{7, 0, 1899}, - dictWord{9, 0, 875}, - dictWord{139, 0, 673}, - dictWord{15, 10, 155}, - dictWord{144, 10, 79}, - dictWord{7, 0, 93}, - dictWord{7, 0, 210}, - dictWord{7, 0, 1223}, - dictWord{8, 0, 451}, - dictWord{8, 0, 460}, - dictWord{ - 11, - 0, - 353, - }, - dictWord{11, 0, 475}, - dictWord{4, 10, 599}, - dictWord{6, 10, 1634}, - dictWord{7, 10, 67}, - dictWord{7, 10, 691}, - dictWord{7, 10, 979}, - dictWord{ - 7, - 10, - 1697, - }, - dictWord{8, 10, 207}, - dictWord{8, 10, 214}, - dictWord{8, 10, 231}, - dictWord{8, 10, 294}, - dictWord{8, 10, 336}, - dictWord{8, 10, 428}, - dictWord{ - 8, - 10, - 471, - }, - dictWord{8, 10, 622}, - dictWord{8, 10, 626}, - dictWord{8, 10, 679}, - dictWord{8, 10, 759}, - dictWord{8, 10, 829}, - dictWord{9, 10, 11}, - dictWord{9, 10, 246}, - dictWord{9, 10, 484}, - dictWord{9, 10, 573}, - dictWord{9, 10, 706}, - dictWord{9, 10, 762}, - dictWord{9, 10, 798}, - dictWord{9, 10, 855}, - dictWord{9, 10, 870}, - dictWord{ - 9, - 10, - 912, - }, - dictWord{10, 10, 303}, - dictWord{10, 10, 335}, - dictWord{10, 10, 424}, - dictWord{10, 10, 461}, - dictWord{10, 10, 543}, - dictWord{10, 10, 759}, - dictWord{10, 10, 814}, - dictWord{11, 10, 59}, - dictWord{11, 10, 235}, - dictWord{11, 10, 590}, - dictWord{11, 10, 929}, - dictWord{11, 10, 963}, - dictWord{ - 11, - 10, - 987, - }, - dictWord{12, 10, 114}, - dictWord{12, 10, 182}, - dictWord{12, 10, 226}, - dictWord{12, 10, 332}, - dictWord{12, 10, 439}, - dictWord{12, 10, 575}, - dictWord{ - 12, - 10, - 598, - }, - dictWord{12, 10, 675}, - dictWord{13, 10, 8}, - dictWord{13, 10, 125}, - dictWord{13, 10, 194}, - dictWord{13, 10, 287}, - dictWord{14, 10, 197}, - dictWord{14, 10, 383}, - dictWord{15, 10, 53}, - dictWord{17, 10, 63}, - dictWord{19, 10, 46}, - dictWord{19, 10, 98}, - dictWord{19, 10, 106}, - dictWord{148, 10, 85}, - dictWord{132, 11, 476}, - dictWord{4, 0, 327}, - dictWord{5, 0, 478}, - dictWord{7, 0, 1332}, - dictWord{136, 0, 753}, - dictWord{5, 0, 1020}, - dictWord{133, 0, 1022}, - dictWord{135, 11, 1807}, - dictWord{4, 0, 103}, - dictWord{133, 0, 401}, - dictWord{4, 0, 499}, - dictWord{135, 0, 1421}, - dictWord{10, 0, 207}, - dictWord{13, 0, 164}, - dictWord{147, 10, 126}, - dictWord{9, 11, 20}, - dictWord{10, 11, 324}, - dictWord{139, 11, 488}, - dictWord{132, 0, 96}, - dictWord{9, 11, 280}, - dictWord{ - 138, - 11, - 134, - }, - dictWord{135, 0, 968}, - dictWord{133, 10, 187}, - dictWord{135, 10, 1286}, - dictWord{5, 11, 112}, - dictWord{6, 11, 103}, - dictWord{134, 11, 150}, - dictWord{8, 0, 914}, - dictWord{10, 0, 3}, - dictWord{4, 10, 215}, - dictWord{9, 10, 38}, - dictWord{11, 10, 23}, - dictWord{11, 10, 127}, - dictWord{139, 10, 796}, - dictWord{ - 135, - 0, - 399, - }, - dictWord{6, 0, 563}, - dictWord{137, 0, 224}, - dictWord{6, 0, 704}, - dictWord{134, 0, 1214}, - dictWord{4, 11, 708}, - dictWord{8, 11, 15}, - dictWord{ - 9, - 11, - 50, - }, - dictWord{9, 11, 386}, - dictWord{11, 11, 18}, - dictWord{11, 11, 529}, - dictWord{140, 11, 228}, - dictWord{4, 11, 563}, - dictWord{7, 11, 109}, - dictWord{ - 7, - 11, - 592, - }, - dictWord{7, 11, 637}, - dictWord{7, 11, 770}, - dictWord{7, 11, 1701}, - dictWord{8, 11, 436}, - dictWord{8, 11, 463}, - dictWord{9, 11, 60}, - dictWord{9, 11, 335}, - dictWord{9, 11, 904}, - dictWord{10, 11, 73}, - dictWord{11, 11, 434}, - dictWord{12, 11, 585}, - dictWord{13, 11, 331}, - dictWord{18, 11, 110}, - dictWord{ - 148, - 11, - 60, - }, - dictWord{134, 0, 1559}, - dictWord{132, 11, 502}, - dictWord{6, 11, 347}, - dictWord{138, 11, 161}, - dictWord{4, 11, 33}, - dictWord{5, 11, 102}, - dictWord{ - 5, - 11, - 500, - }, - dictWord{6, 11, 284}, - dictWord{7, 11, 1079}, - dictWord{7, 11, 1423}, - dictWord{7, 11, 1702}, - dictWord{8, 11, 470}, - dictWord{9, 11, 554}, - dictWord{ - 9, - 11, - 723, - }, - dictWord{139, 11, 333}, - dictWord{7, 11, 246}, - dictWord{135, 11, 840}, - dictWord{6, 11, 10}, - dictWord{8, 11, 571}, - dictWord{9, 11, 739}, - dictWord{ - 143, - 11, - 91, - }, - dictWord{8, 0, 861}, - dictWord{10, 0, 905}, - dictWord{12, 0, 730}, - dictWord{12, 0, 789}, - dictWord{133, 11, 626}, - dictWord{134, 0, 946}, - dictWord{ - 5, - 0, - 746, - }, - dictWord{12, 0, 333}, - dictWord{14, 0, 332}, - dictWord{12, 11, 333}, - dictWord{142, 11, 332}, - dictWord{5, 11, 18}, - dictWord{6, 11, 526}, - dictWord{ - 13, - 11, - 24, - }, - dictWord{13, 11, 110}, - dictWord{19, 11, 5}, - dictWord{147, 11, 44}, - dictWord{4, 0, 910}, - dictWord{5, 0, 832}, - dictWord{135, 10, 2002}, - dictWord{ - 10, - 11, - 768, - }, - dictWord{139, 11, 787}, - dictWord{4, 11, 309}, - dictWord{5, 11, 462}, - dictWord{7, 11, 970}, - dictWord{135, 11, 1097}, - dictWord{4, 10, 28}, - dictWord{ - 5, - 10, - 440, - }, - dictWord{7, 10, 248}, - dictWord{11, 10, 833}, - dictWord{140, 10, 344}, - dictWord{134, 10, 1654}, - dictWord{6, 0, 632}, - dictWord{6, 0, 652}, - dictWord{ - 6, - 0, - 1272, - }, - dictWord{6, 0, 1384}, - dictWord{134, 0, 1560}, - dictWord{134, 11, 1704}, - dictWord{6, 0, 1393}, - dictWord{133, 10, 853}, - dictWord{6, 10, 249}, - dictWord{7, 10, 1234}, - dictWord{139, 10, 573}, - dictWord{5, 11, 86}, - dictWord{7, 11, 743}, - dictWord{9, 11, 85}, - dictWord{10, 11, 281}, - dictWord{10, 11, 432}, - dictWord{11, 11, 490}, - dictWord{12, 11, 251}, - dictWord{13, 11, 118}, - dictWord{14, 11, 378}, - dictWord{146, 11, 143}, - dictWord{5, 11, 524}, - dictWord{ - 133, - 11, - 744, - }, - dictWord{134, 0, 1514}, - dictWord{10, 0, 201}, - dictWord{142, 0, 319}, - dictWord{7, 0, 717}, - dictWord{10, 0, 510}, - dictWord{7, 10, 392}, - dictWord{ - 8, - 10, - 20, - }, - dictWord{8, 10, 172}, - dictWord{8, 10, 690}, - dictWord{9, 10, 383}, - dictWord{9, 10, 845}, - dictWord{11, 10, 293}, - dictWord{11, 10, 832}, - dictWord{ - 11, - 10, - 920, - }, - dictWord{11, 10, 984}, - dictWord{141, 10, 221}, - dictWord{134, 0, 1381}, - dictWord{5, 10, 858}, - dictWord{133, 10, 992}, - dictWord{8, 0, 528}, - dictWord{137, 0, 348}, - dictWord{10, 11, 107}, - dictWord{140, 11, 436}, - dictWord{4, 0, 20}, - dictWord{133, 0, 616}, - dictWord{134, 0, 1251}, - dictWord{ - 132, - 11, - 927, - }, - dictWord{10, 11, 123}, - dictWord{12, 11, 670}, - dictWord{13, 11, 371}, - dictWord{14, 11, 142}, - dictWord{146, 11, 94}, - dictWord{134, 0, 1163}, - dictWord{ - 7, - 11, - 1149, - }, - dictWord{137, 11, 156}, - dictWord{134, 0, 307}, - dictWord{133, 11, 778}, - dictWord{7, 0, 1091}, - dictWord{135, 0, 1765}, - dictWord{ - 5, - 11, - 502, - }, - dictWord{6, 10, 268}, - dictWord{137, 10, 62}, - dictWord{8, 11, 196}, - dictWord{10, 11, 283}, - dictWord{139, 11, 406}, - dictWord{4, 0, 26}, - dictWord{ - 5, - 0, - 429, - }, - dictWord{6, 0, 245}, - dictWord{7, 0, 704}, - dictWord{7, 0, 1379}, - dictWord{135, 0, 1474}, - dictWord{133, 11, 855}, - dictWord{132, 0, 881}, - dictWord{ - 4, - 0, - 621, - }, - dictWord{135, 11, 1596}, - dictWord{7, 11, 1400}, - dictWord{9, 11, 446}, - dictWord{138, 11, 45}, - dictWord{6, 0, 736}, - dictWord{138, 10, 106}, - dictWord{133, 0, 542}, - dictWord{134, 0, 348}, - dictWord{133, 0, 868}, - dictWord{136, 0, 433}, - dictWord{135, 0, 1495}, - dictWord{138, 0, 771}, - dictWord{ - 6, - 10, - 613, - }, - dictWord{136, 10, 223}, - dictWord{138, 0, 215}, - dictWord{141, 0, 124}, - dictWord{136, 11, 391}, - dictWord{135, 11, 172}, - dictWord{132, 10, 670}, - dictWord{140, 0, 55}, - dictWord{9, 10, 40}, - dictWord{139, 10, 136}, - dictWord{7, 0, 62}, - dictWord{147, 0, 112}, - dictWord{132, 0, 856}, - dictWord{132, 11, 568}, - dictWord{12, 0, 270}, - dictWord{139, 10, 259}, - dictWord{8, 0, 572}, - dictWord{137, 0, 698}, - dictWord{4, 11, 732}, - dictWord{9, 10, 310}, - dictWord{137, 10, 682}, - dictWord{142, 10, 296}, - dictWord{134, 0, 939}, - dictWord{136, 11, 733}, - dictWord{135, 11, 1435}, - dictWord{7, 10, 1401}, - dictWord{135, 10, 1476}, - dictWord{6, 0, 352}, - dictWord{4, 10, 296}, - dictWord{7, 10, 401}, - dictWord{7, 10, 1410}, - dictWord{7, 10, 1594}, - dictWord{7, 10, 1674}, - dictWord{8, 10, 63}, - dictWord{ - 8, - 10, - 660, - }, - dictWord{137, 10, 74}, - dictWord{4, 11, 428}, - dictWord{133, 11, 668}, - dictWord{4, 10, 139}, - dictWord{4, 10, 388}, - dictWord{140, 10, 188}, - dictWord{7, 11, 2015}, - dictWord{140, 11, 665}, - dictWord{132, 0, 647}, - dictWord{146, 0, 10}, - dictWord{138, 0, 220}, - dictWord{142, 0, 464}, - dictWord{ - 132, - 0, - 109, - }, - dictWord{134, 0, 1746}, - dictWord{6, 0, 515}, - dictWord{4, 10, 747}, - dictWord{6, 11, 1623}, - dictWord{6, 11, 1681}, - dictWord{7, 10, 649}, - dictWord{ - 7, - 10, - 1479, - }, - dictWord{135, 10, 1583}, - dictWord{133, 10, 232}, - dictWord{135, 0, 566}, - dictWord{137, 10, 887}, - dictWord{4, 0, 40}, - dictWord{10, 0, 67}, - dictWord{ - 11, - 0, - 117, - }, - dictWord{11, 0, 768}, - dictWord{139, 0, 935}, - dictWord{132, 0, 801}, - dictWord{7, 0, 992}, - dictWord{8, 0, 301}, - dictWord{9, 0, 722}, - dictWord{ - 12, - 0, - 63, - }, - dictWord{13, 0, 29}, - dictWord{14, 0, 161}, - dictWord{143, 0, 18}, - dictWord{139, 0, 923}, - dictWord{6, 11, 1748}, - dictWord{8, 11, 715}, - dictWord{9, 11, 802}, - dictWord{10, 11, 46}, - dictWord{10, 11, 819}, - dictWord{13, 11, 308}, - dictWord{14, 11, 351}, - dictWord{14, 11, 363}, - dictWord{146, 11, 67}, - dictWord{ - 137, - 11, - 745, - }, - dictWord{7, 0, 1145}, - dictWord{4, 10, 14}, - dictWord{7, 10, 1801}, - dictWord{10, 10, 748}, - dictWord{141, 10, 458}, - dictWord{4, 11, 63}, - dictWord{ - 5, - 11, - 347, - }, - dictWord{134, 11, 474}, - dictWord{135, 0, 568}, - dictWord{4, 10, 425}, - dictWord{7, 11, 577}, - dictWord{7, 11, 1432}, - dictWord{9, 11, 475}, - dictWord{ - 9, - 11, - 505, - }, - dictWord{9, 11, 526}, - dictWord{9, 11, 609}, - dictWord{9, 11, 689}, - dictWord{9, 11, 726}, - dictWord{9, 11, 735}, - dictWord{9, 11, 738}, - dictWord{ - 10, - 11, - 556, - }, - dictWord{10, 11, 674}, - dictWord{10, 11, 684}, - dictWord{11, 11, 89}, - dictWord{11, 11, 202}, - dictWord{11, 11, 272}, - dictWord{11, 11, 380}, - dictWord{ - 11, - 11, - 415, - }, - dictWord{11, 11, 505}, - dictWord{11, 11, 537}, - dictWord{11, 11, 550}, - dictWord{11, 11, 562}, - dictWord{11, 11, 640}, - dictWord{11, 11, 667}, - dictWord{11, 11, 688}, - dictWord{11, 11, 847}, - dictWord{11, 11, 927}, - dictWord{11, 11, 930}, - dictWord{11, 11, 940}, - dictWord{12, 11, 144}, - dictWord{ - 12, - 11, - 325, - }, - dictWord{12, 11, 329}, - dictWord{12, 11, 389}, - dictWord{12, 11, 403}, - dictWord{12, 11, 451}, - dictWord{12, 11, 515}, - dictWord{12, 11, 604}, - dictWord{ - 12, - 11, - 616, - }, - dictWord{12, 11, 626}, - dictWord{13, 11, 66}, - dictWord{13, 11, 131}, - dictWord{13, 11, 167}, - dictWord{13, 11, 236}, - dictWord{13, 11, 368}, - dictWord{13, 11, 411}, - dictWord{13, 11, 434}, - dictWord{13, 11, 453}, - dictWord{13, 11, 461}, - dictWord{13, 11, 474}, - dictWord{14, 11, 59}, - dictWord{14, 11, 60}, - dictWord{14, 11, 139}, - dictWord{14, 11, 152}, - dictWord{14, 11, 276}, - dictWord{14, 11, 353}, - dictWord{14, 11, 402}, - dictWord{15, 11, 28}, - dictWord{ - 15, - 11, - 81, - }, - dictWord{15, 11, 123}, - dictWord{15, 11, 152}, - dictWord{18, 11, 136}, - dictWord{148, 11, 88}, - dictWord{137, 0, 247}, - dictWord{135, 11, 1622}, - dictWord{ - 9, - 11, - 544, - }, - dictWord{11, 11, 413}, - dictWord{144, 11, 25}, - dictWord{4, 0, 645}, - dictWord{7, 0, 825}, - dictWord{6, 10, 1768}, - dictWord{135, 11, 89}, - dictWord{140, 0, 328}, - dictWord{5, 10, 943}, - dictWord{134, 10, 1779}, - dictWord{134, 0, 1363}, - dictWord{5, 10, 245}, - dictWord{6, 10, 576}, - dictWord{7, 10, 582}, - dictWord{136, 10, 225}, - dictWord{134, 0, 1280}, - dictWord{5, 11, 824}, - dictWord{133, 11, 941}, - dictWord{7, 11, 440}, - dictWord{8, 11, 230}, - dictWord{ - 139, - 11, - 106, - }, - dictWord{5, 0, 28}, - dictWord{6, 0, 204}, - dictWord{10, 0, 320}, - dictWord{10, 0, 583}, - dictWord{13, 0, 502}, - dictWord{14, 0, 72}, - dictWord{14, 0, 274}, - dictWord{14, 0, 312}, - dictWord{14, 0, 344}, - dictWord{15, 0, 159}, - dictWord{16, 0, 62}, - dictWord{16, 0, 69}, - dictWord{17, 0, 30}, - dictWord{18, 0, 42}, - dictWord{ - 18, - 0, - 53, - }, - dictWord{18, 0, 84}, - dictWord{18, 0, 140}, - dictWord{19, 0, 68}, - dictWord{19, 0, 85}, - dictWord{20, 0, 5}, - dictWord{20, 0, 45}, - dictWord{20, 0, 101}, - dictWord{ - 22, - 0, - 7, - }, - dictWord{150, 0, 20}, - dictWord{4, 0, 558}, - dictWord{6, 0, 390}, - dictWord{7, 0, 162}, - dictWord{7, 0, 689}, - dictWord{9, 0, 360}, - dictWord{138, 0, 653}, - dictWord{134, 0, 764}, - dictWord{6, 0, 862}, - dictWord{137, 0, 833}, - dictWord{5, 0, 856}, - dictWord{6, 0, 1672}, - dictWord{6, 0, 1757}, - dictWord{134, 0, 1781}, - dictWord{ - 5, - 0, - 92, - }, - dictWord{10, 0, 736}, - dictWord{140, 0, 102}, - dictWord{6, 0, 1927}, - dictWord{6, 0, 1944}, - dictWord{8, 0, 924}, - dictWord{8, 0, 948}, - dictWord{ - 10, - 0, - 967, - }, - dictWord{138, 0, 978}, - dictWord{134, 0, 1479}, - dictWord{5, 0, 590}, - dictWord{8, 0, 360}, - dictWord{9, 0, 213}, - dictWord{138, 0, 63}, - dictWord{ - 134, - 0, - 1521, - }, - dictWord{6, 0, 709}, - dictWord{134, 0, 891}, - dictWord{132, 10, 443}, - dictWord{13, 0, 477}, - dictWord{14, 0, 120}, - dictWord{148, 0, 61}, - dictWord{ - 4, - 11, - 914, - }, - dictWord{5, 11, 800}, - dictWord{133, 11, 852}, - dictWord{10, 11, 54}, - dictWord{141, 11, 115}, - dictWord{4, 11, 918}, - dictWord{133, 11, 876}, - dictWord{139, 11, 152}, - dictWord{4, 11, 92}, - dictWord{133, 11, 274}, - dictWord{135, 11, 1901}, - dictWord{9, 11, 800}, - dictWord{10, 11, 693}, - dictWord{ - 11, - 11, - 482, - }, - dictWord{11, 11, 734}, - dictWord{139, 11, 789}, - dictWord{9, 0, 483}, - dictWord{132, 10, 298}, - dictWord{6, 0, 1213}, - dictWord{141, 11, 498}, - dictWord{135, 11, 1451}, - dictWord{133, 11, 743}, - dictWord{4, 0, 1022}, - dictWord{10, 0, 1000}, - dictWord{12, 0, 957}, - dictWord{12, 0, 980}, - dictWord{ - 12, - 0, - 1013, - }, - dictWord{14, 0, 481}, - dictWord{144, 0, 116}, - dictWord{8, 0, 503}, - dictWord{17, 0, 29}, - dictWord{4, 11, 49}, - dictWord{7, 11, 280}, - dictWord{ - 135, - 11, - 1633, - }, - dictWord{135, 0, 1712}, - dictWord{134, 0, 466}, - dictWord{136, 11, 47}, - dictWord{5, 10, 164}, - dictWord{7, 10, 121}, - dictWord{142, 10, 189}, - dictWord{ - 7, - 10, - 812, - }, - dictWord{7, 10, 1261}, - dictWord{7, 10, 1360}, - dictWord{9, 10, 632}, - dictWord{140, 10, 352}, - dictWord{139, 10, 556}, - dictWord{132, 0, 731}, - dictWord{5, 11, 272}, - dictWord{5, 11, 908}, - dictWord{5, 11, 942}, - dictWord{7, 11, 1008}, - dictWord{7, 11, 1560}, - dictWord{8, 11, 197}, - dictWord{9, 11, 47}, - dictWord{11, 11, 538}, - dictWord{139, 11, 742}, - dictWord{4, 10, 172}, - dictWord{9, 10, 611}, - dictWord{10, 10, 436}, - dictWord{12, 10, 673}, - dictWord{ - 141, - 10, - 255, - }, - dictWord{133, 10, 844}, - dictWord{10, 0, 484}, - dictWord{11, 0, 754}, - dictWord{12, 0, 457}, - dictWord{14, 0, 171}, - dictWord{14, 0, 389}, - dictWord{ - 146, - 0, - 153, - }, - dictWord{9, 10, 263}, - dictWord{10, 10, 147}, - dictWord{138, 10, 492}, - dictWord{137, 11, 891}, - dictWord{138, 0, 241}, - dictWord{133, 10, 537}, - dictWord{6, 0, 2005}, - dictWord{136, 0, 964}, - dictWord{137, 10, 842}, - dictWord{151, 11, 8}, - dictWord{4, 11, 407}, - dictWord{132, 11, 560}, - dictWord{ - 135, - 11, - 1884, - }, - dictWord{6, 0, 1100}, - dictWord{134, 0, 1242}, - dictWord{135, 0, 954}, - dictWord{5, 10, 230}, - dictWord{5, 10, 392}, - dictWord{6, 10, 420}, - dictWord{ - 9, - 10, - 568, - }, - dictWord{140, 10, 612}, - dictWord{4, 11, 475}, - dictWord{11, 11, 35}, - dictWord{11, 11, 90}, - dictWord{13, 11, 7}, - dictWord{13, 11, 71}, - dictWord{ - 13, - 11, - 177, - }, - dictWord{142, 11, 422}, - dictWord{136, 11, 332}, - dictWord{135, 0, 1958}, - dictWord{6, 0, 549}, - dictWord{8, 0, 34}, - dictWord{8, 0, 283}, - dictWord{ - 9, - 0, - 165, - }, - dictWord{138, 0, 475}, - dictWord{10, 0, 952}, - dictWord{12, 0, 966}, - dictWord{140, 0, 994}, - dictWord{5, 0, 652}, - dictWord{5, 0, 701}, - dictWord{ - 135, - 0, - 449, - }, - dictWord{4, 0, 655}, - dictWord{7, 0, 850}, - dictWord{17, 0, 75}, - dictWord{146, 0, 137}, - dictWord{4, 0, 146}, - dictWord{7, 0, 1618}, - dictWord{8, 0, 670}, - dictWord{ - 5, - 10, - 41, - }, - dictWord{7, 10, 1459}, - dictWord{7, 10, 1469}, - dictWord{7, 10, 1859}, - dictWord{9, 10, 549}, - dictWord{139, 10, 905}, - dictWord{133, 10, 696}, - dictWord{6, 0, 159}, - dictWord{6, 0, 364}, - dictWord{7, 0, 516}, - dictWord{137, 0, 518}, - dictWord{135, 0, 1439}, - dictWord{6, 11, 222}, - dictWord{7, 11, 636}, - dictWord{ - 7, - 11, - 1620, - }, - dictWord{8, 11, 409}, - dictWord{9, 11, 693}, - dictWord{139, 11, 77}, - dictWord{13, 0, 151}, - dictWord{141, 11, 45}, - dictWord{6, 0, 1027}, - dictWord{ - 4, - 11, - 336, - }, - dictWord{132, 10, 771}, - dictWord{139, 11, 392}, - dictWord{10, 11, 121}, - dictWord{11, 11, 175}, - dictWord{149, 11, 16}, - dictWord{8, 0, 950}, - dictWord{138, 0, 983}, - dictWord{133, 10, 921}, - dictWord{135, 0, 993}, - dictWord{6, 10, 180}, - dictWord{7, 10, 1137}, - dictWord{8, 10, 751}, - dictWord{ - 139, - 10, - 805, - }, - dictWord{7, 0, 501}, - dictWord{9, 0, 111}, - dictWord{10, 0, 141}, - dictWord{11, 0, 332}, - dictWord{13, 0, 43}, - dictWord{13, 0, 429}, - dictWord{14, 0, 130}, - dictWord{14, 0, 415}, - dictWord{145, 0, 102}, - dictWord{4, 10, 183}, - dictWord{5, 11, 882}, - dictWord{7, 10, 271}, - dictWord{11, 10, 824}, - dictWord{11, 10, 952}, - dictWord{13, 10, 278}, - dictWord{13, 10, 339}, - dictWord{13, 10, 482}, - dictWord{14, 10, 424}, - dictWord{148, 10, 99}, - dictWord{4, 10, 19}, - dictWord{5, 10, 477}, - dictWord{5, 10, 596}, - dictWord{6, 10, 505}, - dictWord{7, 10, 1221}, - dictWord{11, 10, 907}, - dictWord{12, 10, 209}, - dictWord{141, 10, 214}, - dictWord{ - 135, - 10, - 1215, - }, - dictWord{133, 0, 452}, - dictWord{132, 11, 426}, - dictWord{5, 0, 149}, - dictWord{136, 0, 233}, - dictWord{133, 0, 935}, - dictWord{6, 11, 58}, - dictWord{ - 7, - 11, - 654, - }, - dictWord{7, 11, 745}, - dictWord{7, 11, 1969}, - dictWord{8, 11, 240}, - dictWord{8, 11, 675}, - dictWord{9, 11, 479}, - dictWord{9, 11, 731}, - dictWord{ - 10, - 11, - 330, - }, - dictWord{10, 11, 593}, - dictWord{10, 11, 817}, - dictWord{11, 11, 32}, - dictWord{11, 11, 133}, - dictWord{11, 11, 221}, - dictWord{145, 11, 68}, - dictWord{ - 12, - 0, - 582, - }, - dictWord{18, 0, 131}, - dictWord{7, 11, 102}, - dictWord{137, 11, 538}, - dictWord{136, 0, 801}, - dictWord{134, 10, 1645}, - dictWord{132, 0, 70}, - dictWord{6, 10, 92}, - dictWord{6, 10, 188}, - dictWord{7, 10, 1269}, - dictWord{7, 10, 1524}, - dictWord{7, 10, 1876}, - dictWord{10, 10, 228}, - dictWord{139, 10, 1020}, - dictWord{4, 10, 459}, - dictWord{133, 10, 966}, - dictWord{138, 0, 369}, - dictWord{16, 0, 36}, - dictWord{140, 10, 330}, - dictWord{141, 11, 366}, - dictWord{ - 7, - 0, - 721, - }, - dictWord{10, 0, 236}, - dictWord{12, 0, 204}, - dictWord{6, 10, 18}, - dictWord{7, 10, 932}, - dictWord{8, 10, 757}, - dictWord{9, 10, 54}, - dictWord{9, 10, 65}, - dictWord{9, 10, 844}, - dictWord{10, 10, 113}, - dictWord{10, 10, 315}, - dictWord{10, 10, 798}, - dictWord{11, 10, 153}, - dictWord{12, 10, 151}, - dictWord{12, 10, 392}, - dictWord{12, 10, 666}, - dictWord{142, 10, 248}, - dictWord{7, 0, 241}, - dictWord{10, 0, 430}, - dictWord{8, 10, 548}, - dictWord{9, 10, 532}, - dictWord{10, 10, 117}, - dictWord{11, 10, 351}, - dictWord{11, 10, 375}, - dictWord{143, 10, 23}, - dictWord{134, 10, 1742}, - dictWord{133, 10, 965}, - dictWord{133, 11, 566}, - dictWord{ - 6, - 11, - 48, - }, - dictWord{135, 11, 63}, - dictWord{134, 10, 182}, - dictWord{10, 10, 65}, - dictWord{10, 10, 488}, - dictWord{138, 10, 497}, - dictWord{6, 11, 114}, - dictWord{7, 11, 1224}, - dictWord{7, 11, 1556}, - dictWord{136, 11, 3}, - dictWord{134, 0, 1817}, - dictWord{8, 11, 576}, - dictWord{137, 11, 267}, - dictWord{ - 6, - 0, - 1078, - }, - dictWord{144, 0, 16}, - dictWord{9, 10, 588}, - dictWord{138, 10, 260}, - dictWord{138, 0, 1021}, - dictWord{5, 0, 406}, - dictWord{134, 0, 2022}, - dictWord{133, 11, 933}, - dictWord{6, 0, 69}, - dictWord{135, 0, 117}, - dictWord{7, 0, 1830}, - dictWord{136, 11, 427}, - dictWord{4, 0, 432}, - dictWord{135, 0, 824}, - dictWord{134, 10, 1786}, - dictWord{133, 0, 826}, - dictWord{139, 11, 67}, - dictWord{133, 11, 759}, - dictWord{135, 10, 308}, - dictWord{137, 0, 816}, - dictWord{ - 133, - 0, - 1000, - }, - dictWord{4, 0, 297}, - dictWord{6, 0, 529}, - dictWord{7, 0, 152}, - dictWord{7, 0, 713}, - dictWord{7, 0, 1845}, - dictWord{8, 0, 710}, - dictWord{8, 0, 717}, - dictWord{12, 0, 639}, - dictWord{140, 0, 685}, - dictWord{7, 0, 423}, - dictWord{136, 10, 588}, - dictWord{136, 10, 287}, - dictWord{136, 0, 510}, - dictWord{ - 134, - 0, - 1048, - }, - dictWord{6, 0, 618}, - dictWord{7, 11, 56}, - dictWord{7, 11, 1989}, - dictWord{8, 11, 337}, - dictWord{8, 11, 738}, - dictWord{9, 11, 600}, - dictWord{ - 10, - 11, - 483, - }, - dictWord{12, 11, 37}, - dictWord{13, 11, 447}, - dictWord{142, 11, 92}, - dictWord{4, 0, 520}, - dictWord{135, 0, 575}, - dictWord{8, 0, 990}, - dictWord{ - 138, - 0, - 977, - }, - dictWord{135, 11, 774}, - dictWord{9, 11, 347}, - dictWord{11, 11, 24}, - dictWord{140, 11, 170}, - dictWord{136, 11, 379}, - dictWord{140, 10, 290}, - dictWord{132, 11, 328}, - dictWord{4, 0, 321}, - dictWord{134, 0, 569}, - dictWord{4, 11, 101}, - dictWord{135, 11, 1171}, - dictWord{7, 0, 723}, - dictWord{7, 0, 1135}, - dictWord{5, 11, 833}, - dictWord{136, 11, 744}, - dictWord{7, 10, 719}, - dictWord{8, 10, 809}, - dictWord{136, 10, 834}, - dictWord{8, 0, 921}, - dictWord{136, 10, 796}, - dictWord{5, 10, 210}, - dictWord{6, 10, 213}, - dictWord{7, 10, 60}, - dictWord{10, 10, 364}, - dictWord{139, 10, 135}, - dictWord{5, 0, 397}, - dictWord{6, 0, 154}, - dictWord{7, 0, 676}, - dictWord{8, 0, 443}, - dictWord{8, 0, 609}, - dictWord{9, 0, 24}, - dictWord{9, 0, 325}, - dictWord{10, 0, 35}, - dictWord{11, 0, 535}, - dictWord{11, 0, 672}, - dictWord{11, 0, 1018}, - dictWord{12, 0, 637}, - dictWord{16, 0, 30}, - dictWord{5, 10, 607}, - dictWord{8, 10, 326}, - dictWord{136, 10, 490}, - dictWord{4, 10, 701}, - dictWord{5, 10, 472}, - dictWord{6, 11, 9}, - dictWord{6, 11, 397}, - dictWord{7, 11, 53}, - dictWord{7, 11, 1742}, - dictWord{9, 10, 758}, - dictWord{10, 11, 632}, - dictWord{ - 11, - 11, - 828, - }, - dictWord{140, 11, 146}, - dictWord{135, 10, 380}, - dictWord{135, 10, 1947}, - dictWord{148, 11, 109}, - dictWord{10, 10, 278}, - dictWord{ - 138, - 11, - 278, - }, - dictWord{134, 0, 856}, - dictWord{7, 0, 139}, - dictWord{4, 10, 386}, - dictWord{8, 10, 405}, - dictWord{8, 10, 728}, - dictWord{9, 10, 497}, - dictWord{ - 11, - 10, - 110, - }, - dictWord{11, 10, 360}, - dictWord{15, 10, 37}, - dictWord{144, 10, 84}, - dictWord{141, 0, 282}, - dictWord{133, 0, 981}, - dictWord{5, 0, 288}, - dictWord{ - 7, - 10, - 1452, - }, - dictWord{7, 10, 1480}, - dictWord{8, 10, 634}, - dictWord{140, 10, 472}, - dictWord{7, 0, 1890}, - dictWord{8, 11, 367}, - dictWord{10, 11, 760}, - dictWord{ - 14, - 11, - 79, - }, - dictWord{20, 11, 17}, - dictWord{152, 11, 0}, - dictWord{4, 10, 524}, - dictWord{136, 10, 810}, - dictWord{4, 0, 56}, - dictWord{7, 0, 1791}, - dictWord{ - 8, - 0, - 607, - }, - dictWord{8, 0, 651}, - dictWord{11, 0, 465}, - dictWord{11, 0, 835}, - dictWord{12, 0, 337}, - dictWord{141, 0, 480}, - dictWord{10, 10, 238}, - dictWord{ - 141, - 10, - 33, - }, - dictWord{11, 11, 417}, - dictWord{12, 11, 223}, - dictWord{140, 11, 265}, - dictWord{9, 0, 158}, - dictWord{10, 0, 411}, - dictWord{140, 0, 261}, - dictWord{ - 133, - 10, - 532, - }, - dictWord{133, 10, 997}, - dictWord{12, 11, 186}, - dictWord{12, 11, 292}, - dictWord{14, 11, 100}, - dictWord{146, 11, 70}, - dictWord{6, 0, 1403}, - dictWord{136, 0, 617}, - dictWord{134, 0, 1205}, - dictWord{139, 0, 563}, - dictWord{4, 0, 242}, - dictWord{134, 0, 333}, - dictWord{4, 11, 186}, - dictWord{5, 11, 157}, - dictWord{8, 11, 168}, - dictWord{138, 11, 6}, - dictWord{132, 0, 369}, - dictWord{133, 11, 875}, - dictWord{5, 10, 782}, - dictWord{5, 10, 829}, - dictWord{ - 134, - 10, - 1738, - }, - dictWord{134, 0, 622}, - dictWord{135, 11, 1272}, - dictWord{6, 0, 1407}, - dictWord{7, 11, 111}, - dictWord{136, 11, 581}, - dictWord{7, 10, 1823}, - dictWord{139, 10, 693}, - dictWord{7, 0, 160}, - dictWord{10, 0, 624}, - dictWord{142, 0, 279}, - dictWord{132, 0, 363}, - dictWord{10, 11, 589}, - dictWord{12, 11, 111}, - dictWord{13, 11, 260}, - dictWord{14, 11, 82}, - dictWord{18, 11, 63}, - dictWord{147, 11, 45}, - dictWord{7, 11, 1364}, - dictWord{7, 11, 1907}, - dictWord{ - 141, - 11, - 158, - }, - dictWord{4, 11, 404}, - dictWord{4, 11, 659}, - dictWord{135, 11, 675}, - dictWord{13, 11, 211}, - dictWord{14, 11, 133}, - dictWord{14, 11, 204}, - dictWord{ - 15, - 11, - 64, - }, - dictWord{15, 11, 69}, - dictWord{15, 11, 114}, - dictWord{16, 11, 10}, - dictWord{19, 11, 23}, - dictWord{19, 11, 35}, - dictWord{19, 11, 39}, - dictWord{ - 19, - 11, - 51, - }, - dictWord{19, 11, 71}, - dictWord{19, 11, 75}, - dictWord{152, 11, 15}, - dictWord{4, 10, 78}, - dictWord{5, 10, 96}, - dictWord{5, 10, 182}, - dictWord{7, 10, 1724}, - dictWord{7, 10, 1825}, - dictWord{10, 10, 394}, - dictWord{10, 10, 471}, - dictWord{11, 10, 532}, - dictWord{14, 10, 340}, - dictWord{145, 10, 88}, - dictWord{ - 135, - 10, - 1964, - }, - dictWord{133, 11, 391}, - dictWord{11, 11, 887}, - dictWord{14, 11, 365}, - dictWord{142, 11, 375}, - dictWord{5, 11, 540}, - dictWord{6, 11, 1697}, - dictWord{7, 11, 222}, - dictWord{136, 11, 341}, - dictWord{134, 11, 78}, - dictWord{9, 0, 601}, - dictWord{9, 0, 619}, - dictWord{10, 0, 505}, - dictWord{10, 0, 732}, - dictWord{11, 0, 355}, - dictWord{140, 0, 139}, - dictWord{134, 0, 292}, - dictWord{139, 0, 174}, - dictWord{5, 0, 177}, - dictWord{6, 0, 616}, - dictWord{7, 0, 827}, - dictWord{ - 9, - 0, - 525, - }, - dictWord{138, 0, 656}, - dictWord{10, 0, 31}, - dictWord{6, 10, 215}, - dictWord{7, 10, 1028}, - dictWord{7, 10, 1473}, - dictWord{7, 10, 1721}, - dictWord{ - 9, - 10, - 424, - }, - dictWord{138, 10, 779}, - dictWord{135, 10, 584}, - dictWord{136, 11, 293}, - dictWord{134, 0, 685}, - dictWord{135, 11, 1868}, - dictWord{ - 133, - 11, - 460, - }, - dictWord{7, 0, 647}, - dictWord{6, 10, 67}, - dictWord{7, 10, 1630}, - dictWord{9, 10, 354}, - dictWord{9, 10, 675}, - dictWord{10, 10, 830}, - dictWord{ - 14, - 10, - 80, - }, - dictWord{145, 10, 80}, - dictWord{4, 0, 161}, - dictWord{133, 0, 631}, - dictWord{6, 10, 141}, - dictWord{7, 10, 225}, - dictWord{9, 10, 59}, - dictWord{9, 10, 607}, - dictWord{10, 10, 312}, - dictWord{11, 10, 687}, - dictWord{12, 10, 555}, - dictWord{13, 10, 373}, - dictWord{13, 10, 494}, - dictWord{148, 10, 58}, - dictWord{ - 7, - 11, - 965, - }, - dictWord{7, 11, 1460}, - dictWord{135, 11, 1604}, - dictWord{136, 10, 783}, - dictWord{134, 11, 388}, - dictWord{6, 0, 722}, - dictWord{6, 0, 1267}, - dictWord{ - 4, - 11, - 511, - }, - dictWord{9, 11, 333}, - dictWord{9, 11, 379}, - dictWord{10, 11, 602}, - dictWord{11, 11, 441}, - dictWord{11, 11, 723}, - dictWord{11, 11, 976}, - dictWord{140, 11, 357}, - dictWord{134, 0, 1797}, - dictWord{135, 0, 1684}, - dictWord{9, 0, 469}, - dictWord{9, 0, 709}, - dictWord{12, 0, 512}, - dictWord{14, 0, 65}, - dictWord{17, 0, 12}, - dictWord{5, 11, 938}, - dictWord{136, 11, 707}, - dictWord{7, 0, 1230}, - dictWord{136, 0, 531}, - dictWord{10, 0, 229}, - dictWord{11, 0, 73}, - dictWord{ - 11, - 0, - 376, - }, - dictWord{139, 0, 433}, - dictWord{12, 0, 268}, - dictWord{12, 0, 640}, - dictWord{142, 0, 119}, - dictWord{7, 10, 430}, - dictWord{139, 10, 46}, - dictWord{ - 6, - 0, - 558, - }, - dictWord{7, 0, 651}, - dictWord{8, 0, 421}, - dictWord{9, 0, 0}, - dictWord{10, 0, 34}, - dictWord{139, 0, 1008}, - dictWord{6, 0, 106}, - dictWord{7, 0, 1786}, - dictWord{7, 0, 1821}, - dictWord{9, 0, 102}, - dictWord{9, 0, 763}, - dictWord{5, 10, 602}, - dictWord{7, 10, 2018}, - dictWord{137, 10, 418}, - dictWord{5, 0, 65}, - dictWord{ - 6, - 0, - 416, - }, - dictWord{7, 0, 1720}, - dictWord{7, 0, 1924}, - dictWord{10, 0, 109}, - dictWord{11, 0, 14}, - dictWord{11, 0, 70}, - dictWord{11, 0, 569}, - dictWord{11, 0, 735}, - dictWord{15, 0, 153}, - dictWord{20, 0, 80}, - dictWord{136, 10, 677}, - dictWord{135, 11, 1625}, - dictWord{137, 11, 772}, - dictWord{136, 0, 595}, - dictWord{ - 6, - 11, - 469, - }, - dictWord{7, 11, 1709}, - dictWord{138, 11, 515}, - dictWord{7, 0, 1832}, - dictWord{138, 0, 374}, - dictWord{9, 0, 106}, - dictWord{9, 0, 163}, - dictWord{ - 9, - 0, - 296, - }, - dictWord{10, 0, 167}, - dictWord{10, 0, 172}, - dictWord{10, 0, 777}, - dictWord{139, 0, 16}, - dictWord{6, 0, 6}, - dictWord{7, 0, 81}, - dictWord{7, 0, 771}, - dictWord{ - 7, - 0, - 1731, - }, - dictWord{9, 0, 405}, - dictWord{138, 0, 421}, - dictWord{4, 11, 500}, - dictWord{135, 11, 938}, - dictWord{5, 11, 68}, - dictWord{134, 11, 383}, - dictWord{ - 5, - 0, - 881, - }, - dictWord{133, 0, 885}, - dictWord{6, 0, 854}, - dictWord{6, 0, 1132}, - dictWord{6, 0, 1495}, - dictWord{6, 0, 1526}, - dictWord{6, 0, 1533}, - dictWord{ - 134, - 0, - 1577, - }, - dictWord{4, 11, 337}, - dictWord{6, 11, 353}, - dictWord{7, 11, 1934}, - dictWord{8, 11, 488}, - dictWord{137, 11, 429}, - dictWord{7, 11, 236}, - dictWord{ - 7, - 11, - 1795, - }, - dictWord{8, 11, 259}, - dictWord{9, 11, 135}, - dictWord{9, 11, 177}, - dictWord{10, 11, 825}, - dictWord{11, 11, 115}, - dictWord{11, 11, 370}, - dictWord{ - 11, - 11, - 405, - }, - dictWord{11, 11, 604}, - dictWord{12, 11, 10}, - dictWord{12, 11, 667}, - dictWord{12, 11, 669}, - dictWord{13, 11, 76}, - dictWord{14, 11, 310}, - dictWord{15, 11, 76}, - dictWord{15, 11, 147}, - dictWord{148, 11, 23}, - dictWord{5, 0, 142}, - dictWord{134, 0, 546}, - dictWord{4, 11, 15}, - dictWord{5, 11, 22}, - dictWord{ - 6, - 11, - 244, - }, - dictWord{7, 11, 40}, - dictWord{7, 11, 200}, - dictWord{7, 11, 906}, - dictWord{7, 11, 1199}, - dictWord{9, 11, 616}, - dictWord{10, 11, 716}, - dictWord{ - 11, - 11, - 635, - }, - dictWord{11, 11, 801}, - dictWord{140, 11, 458}, - dictWord{5, 0, 466}, - dictWord{11, 0, 571}, - dictWord{12, 0, 198}, - dictWord{13, 0, 283}, - dictWord{ - 14, - 0, - 186, - }, - dictWord{15, 0, 21}, - dictWord{15, 0, 103}, - dictWord{135, 10, 329}, - dictWord{4, 0, 185}, - dictWord{5, 0, 257}, - dictWord{5, 0, 839}, - dictWord{5, 0, 936}, - dictWord{9, 0, 399}, - dictWord{10, 0, 258}, - dictWord{10, 0, 395}, - dictWord{10, 0, 734}, - dictWord{11, 0, 1014}, - dictWord{12, 0, 23}, - dictWord{13, 0, 350}, - dictWord{ - 14, - 0, - 150, - }, - dictWord{19, 0, 6}, - dictWord{135, 11, 1735}, - dictWord{12, 11, 36}, - dictWord{141, 11, 337}, - dictWord{5, 11, 598}, - dictWord{7, 11, 791}, - dictWord{ - 8, - 11, - 108, - }, - dictWord{137, 11, 123}, - dictWord{132, 10, 469}, - dictWord{7, 0, 404}, - dictWord{7, 0, 1377}, - dictWord{7, 0, 1430}, - dictWord{7, 0, 2017}, - dictWord{ - 8, - 0, - 149, - }, - dictWord{8, 0, 239}, - dictWord{8, 0, 512}, - dictWord{8, 0, 793}, - dictWord{8, 0, 818}, - dictWord{9, 0, 474}, - dictWord{9, 0, 595}, - dictWord{10, 0, 122}, - dictWord{10, 0, 565}, - dictWord{10, 0, 649}, - dictWord{10, 0, 783}, - dictWord{11, 0, 239}, - dictWord{11, 0, 295}, - dictWord{11, 0, 447}, - dictWord{11, 0, 528}, - dictWord{ - 11, - 0, - 639, - }, - dictWord{11, 0, 800}, - dictWord{12, 0, 25}, - dictWord{12, 0, 77}, - dictWord{12, 0, 157}, - dictWord{12, 0, 256}, - dictWord{12, 0, 316}, - dictWord{12, 0, 390}, - dictWord{12, 0, 391}, - dictWord{12, 0, 395}, - dictWord{12, 0, 478}, - dictWord{12, 0, 503}, - dictWord{12, 0, 592}, - dictWord{12, 0, 680}, - dictWord{13, 0, 50}, - dictWord{13, 0, 53}, - dictWord{13, 0, 132}, - dictWord{13, 0, 198}, - dictWord{13, 0, 322}, - dictWord{13, 0, 415}, - dictWord{13, 0, 511}, - dictWord{14, 0, 71}, - dictWord{ - 14, - 0, - 395, - }, - dictWord{15, 0, 71}, - dictWord{15, 0, 136}, - dictWord{17, 0, 123}, - dictWord{18, 0, 93}, - dictWord{147, 0, 58}, - dictWord{136, 0, 712}, - dictWord{ - 134, - 10, - 1743, - }, - dictWord{5, 10, 929}, - dictWord{6, 10, 340}, - dictWord{8, 10, 376}, - dictWord{136, 10, 807}, - dictWord{6, 0, 1848}, - dictWord{8, 0, 860}, - dictWord{ - 10, - 0, - 856, - }, - dictWord{10, 0, 859}, - dictWord{10, 0, 925}, - dictWord{10, 0, 941}, - dictWord{140, 0, 762}, - dictWord{6, 0, 629}, - dictWord{6, 0, 906}, - dictWord{9, 0, 810}, - dictWord{140, 0, 652}, - dictWord{5, 10, 218}, - dictWord{7, 10, 1610}, - dictWord{138, 10, 83}, - dictWord{7, 10, 1512}, - dictWord{135, 10, 1794}, - dictWord{ - 4, - 0, - 377, - }, - dictWord{24, 0, 13}, - dictWord{4, 11, 155}, - dictWord{7, 11, 1689}, - dictWord{11, 10, 0}, - dictWord{144, 10, 78}, - dictWord{4, 11, 164}, - dictWord{5, 11, 151}, - dictWord{5, 11, 730}, - dictWord{5, 11, 741}, - dictWord{7, 11, 498}, - dictWord{7, 11, 870}, - dictWord{7, 11, 1542}, - dictWord{12, 11, 213}, - dictWord{14, 11, 36}, - dictWord{14, 11, 391}, - dictWord{17, 11, 111}, - dictWord{18, 11, 6}, - dictWord{18, 11, 46}, - dictWord{18, 11, 151}, - dictWord{19, 11, 36}, - dictWord{20, 11, 32}, - dictWord{20, 11, 56}, - dictWord{20, 11, 69}, - dictWord{20, 11, 102}, - dictWord{21, 11, 4}, - dictWord{22, 11, 8}, - dictWord{22, 11, 10}, - dictWord{22, 11, 14}, - dictWord{ - 150, - 11, - 31, - }, - dictWord{7, 0, 1842}, - dictWord{133, 10, 571}, - dictWord{4, 10, 455}, - dictWord{4, 11, 624}, - dictWord{135, 11, 1752}, - dictWord{134, 0, 1501}, - dictWord{4, 11, 492}, - dictWord{5, 11, 451}, - dictWord{6, 10, 161}, - dictWord{7, 10, 372}, - dictWord{137, 10, 597}, - dictWord{132, 10, 349}, - dictWord{4, 0, 180}, - dictWord{135, 0, 1906}, - dictWord{135, 11, 835}, - dictWord{141, 11, 70}, - dictWord{132, 0, 491}, - dictWord{137, 10, 751}, - dictWord{6, 10, 432}, - dictWord{ - 139, - 10, - 322, - }, - dictWord{4, 0, 171}, - dictWord{138, 0, 234}, - dictWord{6, 11, 113}, - dictWord{135, 11, 436}, - dictWord{4, 0, 586}, - dictWord{7, 0, 1186}, - dictWord{ - 138, - 0, - 631, - }, - dictWord{5, 10, 468}, - dictWord{10, 10, 325}, - dictWord{11, 10, 856}, - dictWord{12, 10, 345}, - dictWord{143, 10, 104}, - dictWord{5, 10, 223}, - dictWord{10, 11, 592}, - dictWord{10, 11, 753}, - dictWord{12, 11, 317}, - dictWord{12, 11, 355}, - dictWord{12, 11, 465}, - dictWord{12, 11, 469}, - dictWord{ - 12, - 11, - 560, - }, - dictWord{12, 11, 578}, - dictWord{141, 11, 243}, - dictWord{132, 10, 566}, - dictWord{135, 11, 520}, - dictWord{4, 10, 59}, - dictWord{135, 10, 1394}, - dictWord{6, 10, 436}, - dictWord{139, 10, 481}, - dictWord{9, 0, 931}, - dictWord{10, 0, 334}, - dictWord{20, 0, 71}, - dictWord{4, 10, 48}, - dictWord{5, 10, 271}, - dictWord{ - 7, - 10, - 953, - }, - dictWord{135, 11, 1878}, - dictWord{11, 0, 170}, - dictWord{5, 10, 610}, - dictWord{136, 10, 457}, - dictWord{133, 10, 755}, - dictWord{6, 0, 1587}, - dictWord{135, 10, 1217}, - dictWord{4, 10, 197}, - dictWord{149, 11, 26}, - dictWord{133, 11, 585}, - dictWord{137, 11, 521}, - dictWord{133, 0, 765}, - dictWord{ - 133, - 10, - 217, - }, - dictWord{139, 11, 586}, - dictWord{133, 0, 424}, - dictWord{9, 11, 752}, - dictWord{12, 11, 610}, - dictWord{13, 11, 431}, - dictWord{16, 11, 59}, - dictWord{146, 11, 109}, - dictWord{136, 0, 714}, - dictWord{7, 0, 685}, - dictWord{132, 11, 307}, - dictWord{9, 0, 420}, - dictWord{10, 0, 269}, - dictWord{10, 0, 285}, - dictWord{10, 0, 576}, - dictWord{11, 0, 397}, - dictWord{13, 0, 175}, - dictWord{145, 0, 90}, - dictWord{132, 0, 429}, - dictWord{133, 11, 964}, - dictWord{9, 11, 463}, - dictWord{138, 11, 595}, - dictWord{7, 0, 18}, - dictWord{7, 0, 699}, - dictWord{7, 0, 1966}, - dictWord{8, 0, 752}, - dictWord{9, 0, 273}, - dictWord{9, 0, 412}, - dictWord{ - 9, - 0, - 703, - }, - dictWord{10, 0, 71}, - dictWord{10, 0, 427}, - dictWord{138, 0, 508}, - dictWord{4, 10, 165}, - dictWord{7, 10, 1398}, - dictWord{135, 10, 1829}, - dictWord{ - 4, - 0, - 53, - }, - dictWord{5, 0, 186}, - dictWord{7, 0, 752}, - dictWord{7, 0, 828}, - dictWord{142, 0, 116}, - dictWord{8, 0, 575}, - dictWord{10, 0, 289}, - dictWord{139, 0, 319}, - dictWord{132, 0, 675}, - dictWord{134, 0, 1424}, - dictWord{4, 11, 75}, - dictWord{5, 11, 180}, - dictWord{6, 11, 500}, - dictWord{7, 11, 58}, - dictWord{7, 11, 710}, - dictWord{138, 11, 645}, - dictWord{133, 11, 649}, - dictWord{6, 11, 276}, - dictWord{7, 11, 282}, - dictWord{7, 11, 879}, - dictWord{7, 11, 924}, - dictWord{8, 11, 459}, - dictWord{9, 11, 599}, - dictWord{9, 11, 754}, - dictWord{11, 11, 574}, - dictWord{12, 11, 128}, - dictWord{12, 11, 494}, - dictWord{13, 11, 52}, - dictWord{13, 11, 301}, - dictWord{15, 11, 30}, - dictWord{143, 11, 132}, - dictWord{6, 0, 647}, - dictWord{134, 0, 1095}, - dictWord{5, 10, 9}, - dictWord{7, 10, 297}, - dictWord{7, 10, 966}, - dictWord{140, 10, 306}, - dictWord{132, 11, 200}, - dictWord{134, 0, 1334}, - dictWord{5, 10, 146}, - dictWord{6, 10, 411}, - dictWord{138, 10, 721}, - dictWord{ - 6, - 0, - 209, - }, - dictWord{6, 0, 1141}, - dictWord{6, 0, 1288}, - dictWord{8, 0, 468}, - dictWord{9, 0, 210}, - dictWord{11, 0, 36}, - dictWord{12, 0, 28}, - dictWord{12, 0, 630}, - dictWord{13, 0, 21}, - dictWord{13, 0, 349}, - dictWord{14, 0, 7}, - dictWord{145, 0, 13}, - dictWord{6, 10, 177}, - dictWord{135, 10, 467}, - dictWord{4, 0, 342}, - dictWord{ - 135, - 0, - 1179, - }, - dictWord{10, 11, 454}, - dictWord{140, 11, 324}, - dictWord{4, 0, 928}, - dictWord{133, 0, 910}, - dictWord{7, 0, 1838}, - dictWord{6, 11, 225}, - dictWord{ - 137, - 11, - 211, - }, - dictWord{16, 0, 101}, - dictWord{20, 0, 115}, - dictWord{20, 0, 118}, - dictWord{148, 0, 122}, - dictWord{4, 0, 496}, - dictWord{135, 0, 856}, - dictWord{ - 4, - 0, - 318, - }, - dictWord{11, 0, 654}, - dictWord{7, 11, 718}, - dictWord{139, 11, 102}, - dictWord{8, 11, 58}, - dictWord{9, 11, 724}, - dictWord{11, 11, 809}, - dictWord{ - 13, - 11, - 113, - }, - dictWord{145, 11, 72}, - dictWord{5, 10, 200}, - dictWord{6, 11, 345}, - dictWord{135, 11, 1247}, - dictWord{8, 11, 767}, - dictWord{8, 11, 803}, - dictWord{ - 9, - 11, - 301, - }, - dictWord{137, 11, 903}, - dictWord{7, 0, 915}, - dictWord{8, 0, 247}, - dictWord{19, 0, 0}, - dictWord{7, 11, 1949}, - dictWord{136, 11, 674}, - dictWord{ - 4, - 0, - 202, - }, - dictWord{5, 0, 382}, - dictWord{6, 0, 454}, - dictWord{7, 0, 936}, - dictWord{7, 0, 1803}, - dictWord{8, 0, 758}, - dictWord{9, 0, 375}, - dictWord{9, 0, 895}, - dictWord{ - 10, - 0, - 743, - }, - dictWord{10, 0, 792}, - dictWord{11, 0, 978}, - dictWord{11, 0, 1012}, - dictWord{142, 0, 109}, - dictWord{7, 0, 1150}, - dictWord{7, 0, 1425}, - dictWord{ - 7, - 0, - 1453, - }, - dictWord{140, 0, 513}, - dictWord{134, 11, 259}, - dictWord{138, 0, 791}, - dictWord{11, 0, 821}, - dictWord{12, 0, 110}, - dictWord{12, 0, 153}, - dictWord{ - 18, - 0, - 41, - }, - dictWord{150, 0, 19}, - dictWord{134, 10, 481}, - dictWord{132, 0, 796}, - dictWord{6, 0, 445}, - dictWord{9, 0, 909}, - dictWord{136, 11, 254}, - dictWord{ - 10, - 0, - 776, - }, - dictWord{13, 0, 345}, - dictWord{142, 0, 425}, - dictWord{4, 10, 84}, - dictWord{7, 10, 1482}, - dictWord{10, 10, 76}, - dictWord{138, 10, 142}, - dictWord{ - 135, - 11, - 742, - }, - dictWord{6, 0, 578}, - dictWord{133, 10, 1015}, - dictWord{6, 0, 1387}, - dictWord{4, 10, 315}, - dictWord{5, 10, 507}, - dictWord{135, 10, 1370}, - dictWord{4, 0, 438}, - dictWord{133, 0, 555}, - dictWord{136, 0, 766}, - dictWord{133, 11, 248}, - dictWord{134, 10, 1722}, - dictWord{4, 11, 116}, - dictWord{5, 11, 95}, - dictWord{5, 11, 445}, - dictWord{7, 11, 1688}, - dictWord{8, 11, 29}, - dictWord{9, 11, 272}, - dictWord{11, 11, 509}, - dictWord{139, 11, 915}, - dictWord{135, 0, 541}, - dictWord{133, 11, 543}, - dictWord{8, 10, 222}, - dictWord{8, 10, 476}, - dictWord{9, 10, 238}, - dictWord{11, 10, 516}, - dictWord{11, 10, 575}, - dictWord{ - 15, - 10, - 109, - }, - dictWord{146, 10, 100}, - dictWord{6, 0, 880}, - dictWord{134, 0, 1191}, - dictWord{5, 11, 181}, - dictWord{136, 11, 41}, - dictWord{134, 0, 1506}, - dictWord{132, 11, 681}, - dictWord{7, 11, 25}, - dictWord{8, 11, 202}, - dictWord{138, 11, 536}, - dictWord{139, 0, 983}, - dictWord{137, 0, 768}, - dictWord{132, 0, 584}, - dictWord{9, 11, 423}, - dictWord{140, 11, 89}, - dictWord{8, 11, 113}, - dictWord{9, 11, 877}, - dictWord{10, 11, 554}, - dictWord{11, 11, 83}, - dictWord{12, 11, 136}, - dictWord{147, 11, 109}, - dictWord{7, 10, 706}, - dictWord{7, 10, 1058}, - dictWord{138, 10, 538}, - dictWord{133, 11, 976}, - dictWord{4, 11, 206}, - dictWord{ - 135, - 11, - 746, - }, - dictWord{136, 11, 526}, - dictWord{140, 0, 737}, - dictWord{11, 10, 92}, - dictWord{11, 10, 196}, - dictWord{11, 10, 409}, - dictWord{11, 10, 450}, - dictWord{11, 10, 666}, - dictWord{11, 10, 777}, - dictWord{12, 10, 262}, - dictWord{13, 10, 385}, - dictWord{13, 10, 393}, - dictWord{15, 10, 115}, - dictWord{ - 16, - 10, - 45, - }, - dictWord{145, 10, 82}, - dictWord{4, 0, 226}, - dictWord{4, 0, 326}, - dictWord{7, 0, 1770}, - dictWord{4, 11, 319}, - dictWord{5, 11, 699}, - dictWord{138, 11, 673}, - dictWord{6, 10, 40}, - dictWord{135, 10, 1781}, - dictWord{5, 0, 426}, - dictWord{8, 0, 30}, - dictWord{9, 0, 2}, - dictWord{11, 0, 549}, - dictWord{147, 0, 122}, - dictWord{ - 6, - 0, - 1161, - }, - dictWord{134, 0, 1329}, - dictWord{138, 10, 97}, - dictWord{6, 10, 423}, - dictWord{7, 10, 665}, - dictWord{135, 10, 1210}, - dictWord{7, 11, 13}, - dictWord{ - 8, - 11, - 226, - }, - dictWord{10, 11, 537}, - dictWord{11, 11, 570}, - dictWord{11, 11, 605}, - dictWord{11, 11, 799}, - dictWord{11, 11, 804}, - dictWord{12, 11, 85}, - dictWord{12, 11, 516}, - dictWord{12, 11, 623}, - dictWord{13, 11, 112}, - dictWord{13, 11, 361}, - dictWord{14, 11, 77}, - dictWord{14, 11, 78}, - dictWord{17, 11, 28}, - dictWord{147, 11, 110}, - dictWord{132, 11, 769}, - dictWord{132, 11, 551}, - dictWord{132, 11, 728}, - dictWord{147, 0, 117}, - dictWord{9, 11, 57}, - dictWord{ - 9, - 11, - 459, - }, - dictWord{10, 11, 425}, - dictWord{11, 11, 119}, - dictWord{12, 11, 184}, - dictWord{12, 11, 371}, - dictWord{13, 11, 358}, - dictWord{145, 11, 51}, - dictWord{ - 5, - 11, - 188, - }, - dictWord{5, 11, 814}, - dictWord{8, 11, 10}, - dictWord{9, 11, 421}, - dictWord{9, 11, 729}, - dictWord{10, 11, 609}, - dictWord{139, 11, 689}, - dictWord{134, 11, 624}, - dictWord{135, 11, 298}, - dictWord{135, 0, 462}, - dictWord{4, 0, 345}, - dictWord{139, 10, 624}, - dictWord{136, 10, 574}, - dictWord{ - 4, - 0, - 385, - }, - dictWord{7, 0, 265}, - dictWord{135, 0, 587}, - dictWord{6, 0, 808}, - dictWord{132, 11, 528}, - dictWord{133, 0, 398}, - dictWord{132, 10, 354}, - dictWord{ - 4, - 0, - 347, - }, - dictWord{5, 0, 423}, - dictWord{5, 0, 996}, - dictWord{135, 0, 1329}, - dictWord{135, 10, 1558}, - dictWord{7, 0, 1259}, - dictWord{9, 0, 125}, - dictWord{ - 139, - 0, - 65, - }, - dictWord{5, 0, 136}, - dictWord{6, 0, 136}, - dictWord{136, 0, 644}, - dictWord{5, 11, 104}, - dictWord{6, 11, 173}, - dictWord{135, 11, 1631}, - dictWord{ - 135, - 0, - 469, - }, - dictWord{133, 10, 830}, - dictWord{4, 0, 278}, - dictWord{5, 0, 465}, - dictWord{135, 0, 1367}, - dictWord{7, 11, 810}, - dictWord{8, 11, 138}, - dictWord{ - 8, - 11, - 342, - }, - dictWord{9, 11, 84}, - dictWord{10, 11, 193}, - dictWord{11, 11, 883}, - dictWord{140, 11, 359}, - dictWord{5, 10, 496}, - dictWord{135, 10, 203}, - dictWord{ - 4, - 0, - 433, - }, - dictWord{133, 0, 719}, - dictWord{6, 11, 95}, - dictWord{134, 10, 547}, - dictWord{5, 10, 88}, - dictWord{137, 10, 239}, - dictWord{6, 11, 406}, - dictWord{ - 10, - 11, - 409, - }, - dictWord{10, 11, 447}, - dictWord{11, 11, 44}, - dictWord{140, 11, 100}, - dictWord{134, 0, 1423}, - dictWord{7, 10, 650}, - dictWord{135, 10, 1310}, - dictWord{134, 0, 749}, - dictWord{135, 11, 1243}, - dictWord{135, 0, 1363}, - dictWord{6, 0, 381}, - dictWord{7, 0, 645}, - dictWord{7, 0, 694}, - dictWord{8, 0, 546}, - dictWord{7, 10, 1076}, - dictWord{9, 10, 80}, - dictWord{11, 10, 78}, - dictWord{11, 10, 421}, - dictWord{11, 10, 534}, - dictWord{140, 10, 545}, - dictWord{ - 134, - 11, - 1636, - }, - dictWord{135, 11, 1344}, - dictWord{12, 0, 277}, - dictWord{7, 10, 274}, - dictWord{11, 10, 479}, - dictWord{139, 10, 507}, - dictWord{6, 0, 705}, - dictWord{ - 6, - 0, - 783, - }, - dictWord{6, 0, 1275}, - dictWord{6, 0, 1481}, - dictWord{4, 11, 282}, - dictWord{7, 11, 1034}, - dictWord{11, 11, 398}, - dictWord{11, 11, 634}, - dictWord{ - 12, - 11, - 1, - }, - dictWord{12, 11, 79}, - dictWord{12, 11, 544}, - dictWord{14, 11, 237}, - dictWord{17, 11, 10}, - dictWord{146, 11, 20}, - dictWord{134, 0, 453}, - dictWord{ - 4, - 0, - 555, - }, - dictWord{8, 0, 536}, - dictWord{10, 0, 288}, - dictWord{11, 0, 1005}, - dictWord{4, 10, 497}, - dictWord{135, 10, 1584}, - dictWord{5, 11, 118}, - dictWord{ - 5, - 11, - 499, - }, - dictWord{6, 11, 476}, - dictWord{7, 11, 600}, - dictWord{7, 11, 888}, - dictWord{135, 11, 1096}, - dictWord{138, 0, 987}, - dictWord{7, 0, 1107}, - dictWord{ - 7, - 10, - 261, - }, - dictWord{7, 10, 1115}, - dictWord{7, 10, 1354}, - dictWord{7, 10, 1588}, - dictWord{7, 10, 1705}, - dictWord{7, 10, 1902}, - dictWord{9, 10, 465}, - dictWord{10, 10, 248}, - dictWord{10, 10, 349}, - dictWord{10, 10, 647}, - dictWord{11, 10, 527}, - dictWord{11, 10, 660}, - dictWord{11, 10, 669}, - dictWord{ - 12, - 10, - 529, - }, - dictWord{141, 10, 305}, - dictWord{7, 11, 296}, - dictWord{7, 11, 596}, - dictWord{8, 11, 560}, - dictWord{8, 11, 586}, - dictWord{9, 11, 612}, - dictWord{ - 11, - 11, - 100, - }, - dictWord{11, 11, 304}, - dictWord{12, 11, 46}, - dictWord{13, 11, 89}, - dictWord{14, 11, 112}, - dictWord{145, 11, 122}, - dictWord{9, 0, 370}, - dictWord{ - 138, - 0, - 90, - }, - dictWord{136, 10, 13}, - dictWord{132, 0, 860}, - dictWord{7, 10, 642}, - dictWord{8, 10, 250}, - dictWord{11, 10, 123}, - dictWord{11, 10, 137}, - dictWord{ - 13, - 10, - 48, - }, - dictWord{142, 10, 95}, - dictWord{135, 10, 1429}, - dictWord{137, 11, 321}, - dictWord{132, 0, 257}, - dictWord{135, 0, 2031}, - dictWord{7, 0, 1768}, - dictWord{7, 11, 1599}, - dictWord{7, 11, 1723}, - dictWord{8, 11, 79}, - dictWord{8, 11, 106}, - dictWord{8, 11, 190}, - dictWord{8, 11, 302}, - dictWord{8, 11, 383}, - dictWord{9, 11, 119}, - dictWord{9, 11, 233}, - dictWord{9, 11, 298}, - dictWord{9, 11, 419}, - dictWord{9, 11, 471}, - dictWord{10, 11, 181}, - dictWord{10, 11, 406}, - dictWord{11, 11, 57}, - dictWord{11, 11, 85}, - dictWord{11, 11, 120}, - dictWord{11, 11, 177}, - dictWord{11, 11, 296}, - dictWord{11, 11, 382}, - dictWord{11, 11, 454}, - dictWord{11, 11, 758}, - dictWord{11, 11, 999}, - dictWord{12, 11, 27}, - dictWord{12, 11, 98}, - dictWord{12, 11, 131}, - dictWord{12, 11, 245}, - dictWord{ - 12, - 11, - 312, - }, - dictWord{12, 11, 446}, - dictWord{12, 11, 454}, - dictWord{13, 11, 25}, - dictWord{13, 11, 98}, - dictWord{13, 11, 426}, - dictWord{13, 11, 508}, - dictWord{ - 14, - 11, - 6, - }, - dictWord{14, 11, 163}, - dictWord{14, 11, 272}, - dictWord{14, 11, 277}, - dictWord{14, 11, 370}, - dictWord{15, 11, 95}, - dictWord{15, 11, 138}, - dictWord{ - 15, - 11, - 167, - }, - dictWord{17, 11, 18}, - dictWord{17, 11, 38}, - dictWord{20, 11, 96}, - dictWord{149, 11, 32}, - dictWord{5, 11, 722}, - dictWord{134, 11, 1759}, - dictWord{145, 11, 16}, - dictWord{6, 0, 1071}, - dictWord{134, 0, 1561}, - dictWord{10, 10, 545}, - dictWord{140, 10, 301}, - dictWord{6, 0, 83}, - dictWord{6, 0, 1733}, - dictWord{135, 0, 1389}, - dictWord{4, 0, 835}, - dictWord{135, 0, 1818}, - dictWord{133, 11, 258}, - dictWord{4, 10, 904}, - dictWord{133, 10, 794}, - dictWord{ - 134, - 0, - 2006, - }, - dictWord{5, 11, 30}, - dictWord{7, 11, 495}, - dictWord{8, 11, 134}, - dictWord{9, 11, 788}, - dictWord{140, 11, 438}, - dictWord{135, 11, 2004}, - dictWord{ - 137, - 0, - 696, - }, - dictWord{5, 11, 50}, - dictWord{6, 11, 439}, - dictWord{7, 11, 780}, - dictWord{135, 11, 1040}, - dictWord{7, 11, 772}, - dictWord{7, 11, 1104}, - dictWord{ - 7, - 11, - 1647, - }, - dictWord{11, 11, 269}, - dictWord{11, 11, 539}, - dictWord{11, 11, 607}, - dictWord{11, 11, 627}, - dictWord{11, 11, 706}, - dictWord{11, 11, 975}, - dictWord{12, 11, 248}, - dictWord{12, 11, 311}, - dictWord{12, 11, 434}, - dictWord{12, 11, 600}, - dictWord{12, 11, 622}, - dictWord{13, 11, 297}, - dictWord{ - 13, - 11, - 367, - }, - dictWord{13, 11, 485}, - dictWord{14, 11, 69}, - dictWord{14, 11, 409}, - dictWord{143, 11, 108}, - dictWord{5, 11, 1}, - dictWord{6, 11, 81}, - dictWord{ - 138, - 11, - 520, - }, - dictWord{7, 0, 1718}, - dictWord{9, 0, 95}, - dictWord{9, 0, 274}, - dictWord{10, 0, 279}, - dictWord{10, 0, 317}, - dictWord{10, 0, 420}, - dictWord{11, 0, 303}, - dictWord{11, 0, 808}, - dictWord{12, 0, 134}, - dictWord{12, 0, 367}, - dictWord{13, 0, 149}, - dictWord{13, 0, 347}, - dictWord{14, 0, 349}, - dictWord{14, 0, 406}, - dictWord{ - 18, - 0, - 22, - }, - dictWord{18, 0, 89}, - dictWord{18, 0, 122}, - dictWord{147, 0, 47}, - dictWord{5, 11, 482}, - dictWord{8, 11, 98}, - dictWord{9, 11, 172}, - dictWord{10, 11, 222}, - dictWord{10, 11, 700}, - dictWord{10, 11, 822}, - dictWord{11, 11, 302}, - dictWord{11, 11, 778}, - dictWord{12, 11, 50}, - dictWord{12, 11, 127}, - dictWord{ - 12, - 11, - 396, - }, - dictWord{13, 11, 62}, - dictWord{13, 11, 328}, - dictWord{14, 11, 122}, - dictWord{147, 11, 72}, - dictWord{7, 10, 386}, - dictWord{138, 10, 713}, - dictWord{ - 6, - 10, - 7, - }, - dictWord{6, 10, 35}, - dictWord{7, 10, 147}, - dictWord{7, 10, 1069}, - dictWord{7, 10, 1568}, - dictWord{7, 10, 1575}, - dictWord{7, 10, 1917}, - dictWord{ - 8, - 10, - 43, - }, - dictWord{8, 10, 208}, - dictWord{9, 10, 128}, - dictWord{9, 10, 866}, - dictWord{10, 10, 20}, - dictWord{11, 10, 981}, - dictWord{147, 10, 33}, - dictWord{ - 133, - 0, - 26, - }, - dictWord{132, 0, 550}, - dictWord{5, 11, 2}, - dictWord{7, 11, 1494}, - dictWord{136, 11, 589}, - dictWord{6, 11, 512}, - dictWord{7, 11, 797}, - dictWord{ - 8, - 11, - 253, - }, - dictWord{9, 11, 77}, - dictWord{10, 11, 1}, - dictWord{10, 11, 129}, - dictWord{10, 11, 225}, - dictWord{11, 11, 118}, - dictWord{11, 11, 226}, - dictWord{ - 11, - 11, - 251, - }, - dictWord{11, 11, 430}, - dictWord{11, 11, 701}, - dictWord{11, 11, 974}, - dictWord{11, 11, 982}, - dictWord{12, 11, 64}, - dictWord{12, 11, 260}, - dictWord{ - 12, - 11, - 488, - }, - dictWord{140, 11, 690}, - dictWord{7, 10, 893}, - dictWord{141, 10, 424}, - dictWord{134, 0, 901}, - dictWord{136, 0, 822}, - dictWord{4, 0, 902}, - dictWord{5, 0, 809}, - dictWord{134, 0, 122}, - dictWord{6, 0, 807}, - dictWord{134, 0, 1366}, - dictWord{7, 0, 262}, - dictWord{5, 11, 748}, - dictWord{134, 11, 553}, - dictWord{133, 0, 620}, - dictWord{4, 0, 34}, - dictWord{5, 0, 574}, - dictWord{7, 0, 279}, - dictWord{7, 0, 1624}, - dictWord{136, 0, 601}, - dictWord{9, 0, 170}, - dictWord{ - 6, - 10, - 322, - }, - dictWord{9, 10, 552}, - dictWord{11, 10, 274}, - dictWord{13, 10, 209}, - dictWord{13, 10, 499}, - dictWord{14, 10, 85}, - dictWord{15, 10, 126}, - dictWord{ - 145, - 10, - 70, - }, - dictWord{132, 0, 537}, - dictWord{4, 11, 12}, - dictWord{7, 11, 420}, - dictWord{7, 11, 522}, - dictWord{7, 11, 809}, - dictWord{8, 11, 797}, - dictWord{ - 141, - 11, - 88, - }, - dictWord{133, 0, 332}, - dictWord{8, 10, 83}, - dictWord{8, 10, 742}, - dictWord{8, 10, 817}, - dictWord{9, 10, 28}, - dictWord{9, 10, 29}, - dictWord{9, 10, 885}, - dictWord{10, 10, 387}, - dictWord{11, 10, 633}, - dictWord{11, 10, 740}, - dictWord{13, 10, 235}, - dictWord{13, 10, 254}, - dictWord{15, 10, 143}, - dictWord{ - 143, - 10, - 146, - }, - dictWord{6, 0, 1909}, - dictWord{9, 0, 964}, - dictWord{12, 0, 822}, - dictWord{12, 0, 854}, - dictWord{12, 0, 865}, - dictWord{12, 0, 910}, - dictWord{12, 0, 938}, - dictWord{15, 0, 169}, - dictWord{15, 0, 208}, - dictWord{15, 0, 211}, - dictWord{18, 0, 205}, - dictWord{18, 0, 206}, - dictWord{18, 0, 220}, - dictWord{18, 0, 223}, - dictWord{152, 0, 24}, - dictWord{140, 10, 49}, - dictWord{5, 11, 528}, - dictWord{135, 11, 1580}, - dictWord{6, 0, 261}, - dictWord{8, 0, 182}, - dictWord{139, 0, 943}, - dictWord{134, 0, 1721}, - dictWord{4, 0, 933}, - dictWord{133, 0, 880}, - dictWord{136, 11, 321}, - dictWord{5, 11, 266}, - dictWord{9, 11, 290}, - dictWord{9, 11, 364}, - dictWord{10, 11, 293}, - dictWord{11, 11, 606}, - dictWord{142, 11, 45}, - dictWord{6, 0, 1609}, - dictWord{4, 11, 50}, - dictWord{6, 11, 510}, - dictWord{6, 11, 594}, - dictWord{9, 11, 121}, - dictWord{10, 11, 49}, - dictWord{10, 11, 412}, - dictWord{139, 11, 834}, - dictWord{7, 0, 895}, - dictWord{136, 11, 748}, - dictWord{132, 11, 466}, - dictWord{4, 10, 110}, - dictWord{10, 10, 415}, - dictWord{10, 10, 597}, - dictWord{142, 10, 206}, - dictWord{133, 0, 812}, - dictWord{135, 11, 281}, - dictWord{ - 6, - 0, - 1890, - }, - dictWord{6, 0, 1902}, - dictWord{6, 0, 1916}, - dictWord{9, 0, 929}, - dictWord{9, 0, 942}, - dictWord{9, 0, 975}, - dictWord{9, 0, 984}, - dictWord{9, 0, 986}, - dictWord{ - 9, - 0, - 1011, - }, - dictWord{9, 0, 1019}, - dictWord{12, 0, 804}, - dictWord{12, 0, 851}, - dictWord{12, 0, 867}, - dictWord{12, 0, 916}, - dictWord{12, 0, 923}, - dictWord{ - 15, - 0, - 194, - }, - dictWord{15, 0, 204}, - dictWord{15, 0, 210}, - dictWord{15, 0, 222}, - dictWord{15, 0, 223}, - dictWord{15, 0, 229}, - dictWord{15, 0, 250}, - dictWord{ - 18, - 0, - 179, - }, - dictWord{18, 0, 186}, - dictWord{18, 0, 192}, - dictWord{7, 10, 205}, - dictWord{135, 10, 2000}, - dictWord{132, 11, 667}, - dictWord{135, 0, 778}, - dictWord{ - 4, - 0, - 137, - }, - dictWord{7, 0, 1178}, - dictWord{135, 0, 1520}, - dictWord{134, 0, 1314}, - dictWord{4, 11, 242}, - dictWord{134, 11, 333}, - dictWord{6, 0, 1661}, - dictWord{7, 0, 1975}, - dictWord{7, 0, 2009}, - dictWord{135, 0, 2011}, - dictWord{134, 0, 1591}, - dictWord{4, 10, 283}, - dictWord{135, 10, 1194}, - dictWord{ - 11, - 0, - 820, - }, - dictWord{150, 0, 51}, - dictWord{4, 11, 39}, - dictWord{5, 11, 36}, - dictWord{7, 11, 1843}, - dictWord{8, 11, 407}, - dictWord{11, 11, 144}, - dictWord{ - 140, - 11, - 523, - }, - dictWord{134, 10, 1720}, - dictWord{4, 11, 510}, - dictWord{7, 11, 29}, - dictWord{7, 11, 66}, - dictWord{7, 11, 1980}, - dictWord{10, 11, 487}, - dictWord{ - 10, - 11, - 809, - }, - dictWord{146, 11, 9}, - dictWord{5, 0, 89}, - dictWord{7, 0, 1915}, - dictWord{9, 0, 185}, - dictWord{9, 0, 235}, - dictWord{10, 0, 64}, - dictWord{10, 0, 270}, - dictWord{10, 0, 403}, - dictWord{10, 0, 469}, - dictWord{10, 0, 529}, - dictWord{10, 0, 590}, - dictWord{11, 0, 140}, - dictWord{11, 0, 860}, - dictWord{13, 0, 1}, - dictWord{ - 13, - 0, - 422, - }, - dictWord{14, 0, 341}, - dictWord{14, 0, 364}, - dictWord{17, 0, 93}, - dictWord{18, 0, 113}, - dictWord{19, 0, 97}, - dictWord{147, 0, 113}, - dictWord{133, 0, 695}, - dictWord{6, 0, 987}, - dictWord{134, 0, 1160}, - dictWord{5, 0, 6}, - dictWord{6, 0, 183}, - dictWord{7, 0, 680}, - dictWord{7, 0, 978}, - dictWord{7, 0, 1013}, - dictWord{ - 7, - 0, - 1055, - }, - dictWord{12, 0, 230}, - dictWord{13, 0, 172}, - dictWord{146, 0, 29}, - dictWord{134, 11, 570}, - dictWord{132, 11, 787}, - dictWord{134, 11, 518}, - dictWord{ - 6, - 0, - 29, - }, - dictWord{139, 0, 63}, - dictWord{132, 11, 516}, - dictWord{136, 11, 821}, - dictWord{132, 0, 311}, - dictWord{134, 0, 1740}, - dictWord{7, 0, 170}, - dictWord{8, 0, 90}, - dictWord{8, 0, 177}, - dictWord{8, 0, 415}, - dictWord{11, 0, 714}, - dictWord{14, 0, 281}, - dictWord{136, 10, 735}, - dictWord{134, 0, 1961}, - dictWord{ - 135, - 11, - 1405, - }, - dictWord{4, 11, 10}, - dictWord{7, 11, 917}, - dictWord{139, 11, 786}, - dictWord{5, 10, 132}, - dictWord{9, 10, 486}, - dictWord{9, 10, 715}, - dictWord{ - 10, - 10, - 458, - }, - dictWord{11, 10, 373}, - dictWord{11, 10, 668}, - dictWord{11, 10, 795}, - dictWord{11, 10, 897}, - dictWord{12, 10, 272}, - dictWord{12, 10, 424}, - dictWord{12, 10, 539}, - dictWord{12, 10, 558}, - dictWord{14, 10, 245}, - dictWord{14, 10, 263}, - dictWord{14, 10, 264}, - dictWord{14, 10, 393}, - dictWord{ - 142, - 10, - 403, - }, - dictWord{11, 0, 91}, - dictWord{13, 0, 129}, - dictWord{15, 0, 101}, - dictWord{145, 0, 125}, - dictWord{135, 0, 1132}, - dictWord{4, 0, 494}, - dictWord{6, 0, 74}, - dictWord{7, 0, 44}, - dictWord{7, 0, 407}, - dictWord{12, 0, 17}, - dictWord{15, 0, 5}, - dictWord{148, 0, 11}, - dictWord{133, 10, 379}, - dictWord{5, 0, 270}, - dictWord{ - 5, - 11, - 684, - }, - dictWord{6, 10, 89}, - dictWord{6, 10, 400}, - dictWord{7, 10, 1569}, - dictWord{7, 10, 1623}, - dictWord{7, 10, 1850}, - dictWord{8, 10, 218}, - dictWord{ - 8, - 10, - 422, - }, - dictWord{9, 10, 570}, - dictWord{138, 10, 626}, - dictWord{4, 0, 276}, - dictWord{133, 0, 296}, - dictWord{6, 0, 1523}, - dictWord{134, 11, 27}, - dictWord{ - 6, - 10, - 387, - }, - dictWord{7, 10, 882}, - dictWord{141, 10, 111}, - dictWord{6, 10, 224}, - dictWord{7, 10, 877}, - dictWord{137, 10, 647}, - dictWord{135, 10, 790}, - dictWord{ - 4, - 0, - 7, - }, - dictWord{5, 0, 90}, - dictWord{5, 0, 158}, - dictWord{6, 0, 542}, - dictWord{7, 0, 221}, - dictWord{7, 0, 1574}, - dictWord{9, 0, 490}, - dictWord{10, 0, 540}, - dictWord{ - 11, - 0, - 443, - }, - dictWord{139, 0, 757}, - dictWord{7, 0, 588}, - dictWord{9, 0, 175}, - dictWord{138, 0, 530}, - dictWord{135, 10, 394}, - dictWord{142, 11, 23}, - dictWord{ - 134, - 0, - 786, - }, - dictWord{135, 0, 580}, - dictWord{7, 0, 88}, - dictWord{136, 0, 627}, - dictWord{5, 0, 872}, - dictWord{6, 0, 57}, - dictWord{7, 0, 471}, - dictWord{9, 0, 447}, - dictWord{137, 0, 454}, - dictWord{6, 11, 342}, - dictWord{6, 11, 496}, - dictWord{8, 11, 275}, - dictWord{137, 11, 206}, - dictWord{4, 11, 909}, - dictWord{133, 11, 940}, - dictWord{6, 0, 735}, - dictWord{132, 11, 891}, - dictWord{8, 0, 845}, - dictWord{8, 0, 916}, - dictWord{135, 10, 1409}, - dictWord{5, 0, 31}, - dictWord{134, 0, 614}, - dictWord{11, 0, 458}, - dictWord{12, 0, 15}, - dictWord{140, 0, 432}, - dictWord{8, 0, 330}, - dictWord{140, 0, 477}, - dictWord{4, 0, 530}, - dictWord{5, 0, 521}, - dictWord{ - 7, - 0, - 1200, - }, - dictWord{10, 0, 460}, - dictWord{132, 11, 687}, - dictWord{6, 0, 424}, - dictWord{135, 0, 1866}, - dictWord{9, 0, 569}, - dictWord{12, 0, 12}, - dictWord{ - 12, - 0, - 81, - }, - dictWord{12, 0, 319}, - dictWord{13, 0, 69}, - dictWord{14, 0, 259}, - dictWord{16, 0, 87}, - dictWord{17, 0, 1}, - dictWord{17, 0, 21}, - dictWord{17, 0, 24}, - dictWord{ - 18, - 0, - 15, - }, - dictWord{18, 0, 56}, - dictWord{18, 0, 59}, - dictWord{18, 0, 127}, - dictWord{18, 0, 154}, - dictWord{19, 0, 19}, - dictWord{148, 0, 31}, - dictWord{7, 0, 1302}, - dictWord{136, 10, 38}, - dictWord{134, 11, 253}, - dictWord{5, 10, 261}, - dictWord{7, 10, 78}, - dictWord{7, 10, 199}, - dictWord{8, 10, 815}, - dictWord{9, 10, 126}, - dictWord{138, 10, 342}, - dictWord{5, 0, 595}, - dictWord{135, 0, 1863}, - dictWord{6, 11, 41}, - dictWord{141, 11, 160}, - dictWord{5, 0, 13}, - dictWord{134, 0, 142}, - dictWord{6, 0, 97}, - dictWord{7, 0, 116}, - dictWord{8, 0, 322}, - dictWord{8, 0, 755}, - dictWord{9, 0, 548}, - dictWord{10, 0, 714}, - dictWord{11, 0, 884}, - dictWord{13, 0, 324}, - dictWord{7, 11, 1304}, - dictWord{138, 11, 477}, - dictWord{132, 10, 628}, - dictWord{134, 11, 1718}, - dictWord{7, 10, 266}, - dictWord{136, 10, 804}, - dictWord{135, 10, 208}, - dictWord{7, 0, 1021}, - dictWord{6, 10, 79}, - dictWord{135, 10, 1519}, - dictWord{7, 0, 1472}, - dictWord{135, 0, 1554}, - dictWord{6, 11, 362}, - dictWord{146, 11, 51}, - dictWord{7, 0, 1071}, - dictWord{7, 0, 1541}, - dictWord{7, 0, 1767}, - dictWord{7, 0, 1806}, - dictWord{11, 0, 162}, - dictWord{11, 0, 242}, - dictWord{11, 0, 452}, - dictWord{12, 0, 605}, - dictWord{15, 0, 26}, - dictWord{144, 0, 44}, - dictWord{136, 10, 741}, - dictWord{133, 11, 115}, - dictWord{145, 0, 115}, - dictWord{134, 10, 376}, - dictWord{6, 0, 1406}, - dictWord{134, 0, 1543}, - dictWord{5, 11, 193}, - dictWord{12, 11, 178}, - dictWord{13, 11, 130}, - dictWord{ - 145, - 11, - 84, - }, - dictWord{135, 0, 1111}, - dictWord{8, 0, 1}, - dictWord{9, 0, 650}, - dictWord{10, 0, 326}, - dictWord{5, 11, 705}, - dictWord{137, 11, 606}, - dictWord{5, 0, 488}, - dictWord{6, 0, 527}, - dictWord{7, 0, 489}, - dictWord{7, 0, 1636}, - dictWord{8, 0, 121}, - dictWord{8, 0, 144}, - dictWord{8, 0, 359}, - dictWord{9, 0, 193}, - dictWord{9, 0, 241}, - dictWord{9, 0, 336}, - dictWord{9, 0, 882}, - dictWord{11, 0, 266}, - dictWord{11, 0, 372}, - dictWord{11, 0, 944}, - dictWord{12, 0, 401}, - dictWord{140, 0, 641}, - dictWord{135, 11, 174}, - dictWord{6, 0, 267}, - dictWord{7, 10, 244}, - dictWord{7, 10, 632}, - dictWord{7, 10, 1609}, - dictWord{8, 10, 178}, - dictWord{8, 10, 638}, - dictWord{141, 10, 58}, - dictWord{134, 0, 1983}, - dictWord{134, 0, 1155}, - dictWord{134, 0, 1575}, - dictWord{134, 0, 1438}, - dictWord{9, 0, 31}, - dictWord{ - 10, - 0, - 244, - }, - dictWord{10, 0, 699}, - dictWord{12, 0, 149}, - dictWord{141, 0, 497}, - dictWord{133, 0, 377}, - dictWord{4, 11, 122}, - dictWord{5, 11, 796}, - dictWord{ - 5, - 11, - 952, - }, - dictWord{6, 11, 1660}, - dictWord{6, 11, 1671}, - dictWord{8, 11, 567}, - dictWord{9, 11, 687}, - dictWord{9, 11, 742}, - dictWord{10, 11, 686}, - dictWord{ - 11, - 11, - 356, - }, - dictWord{11, 11, 682}, - dictWord{140, 11, 281}, - dictWord{145, 0, 101}, - dictWord{11, 11, 0}, - dictWord{144, 11, 78}, - dictWord{5, 11, 179}, - dictWord{ - 5, - 10, - 791, - }, - dictWord{7, 11, 1095}, - dictWord{135, 11, 1213}, - dictWord{8, 11, 372}, - dictWord{9, 11, 122}, - dictWord{138, 11, 175}, - dictWord{7, 10, 686}, - dictWord{8, 10, 33}, - dictWord{8, 10, 238}, - dictWord{10, 10, 616}, - dictWord{11, 10, 467}, - dictWord{11, 10, 881}, - dictWord{13, 10, 217}, - dictWord{13, 10, 253}, - dictWord{142, 10, 268}, - dictWord{9, 0, 476}, - dictWord{4, 11, 66}, - dictWord{7, 11, 722}, - dictWord{135, 11, 904}, - dictWord{7, 11, 352}, - dictWord{137, 11, 684}, - dictWord{135, 0, 2023}, - dictWord{135, 0, 1836}, - dictWord{132, 10, 447}, - dictWord{5, 0, 843}, - dictWord{144, 0, 35}, - dictWord{137, 11, 779}, - dictWord{ - 141, - 11, - 35, - }, - dictWord{4, 10, 128}, - dictWord{5, 10, 415}, - dictWord{6, 10, 462}, - dictWord{7, 10, 294}, - dictWord{7, 10, 578}, - dictWord{10, 10, 710}, - dictWord{ - 139, - 10, - 86, - }, - dictWord{132, 0, 554}, - dictWord{133, 0, 536}, - dictWord{136, 10, 587}, - dictWord{5, 0, 207}, - dictWord{9, 0, 79}, - dictWord{11, 0, 625}, - dictWord{ - 145, - 0, - 7, - }, - dictWord{7, 0, 1371}, - dictWord{6, 10, 427}, - dictWord{138, 10, 692}, - dictWord{4, 0, 424}, - dictWord{4, 10, 195}, - dictWord{135, 10, 802}, - dictWord{ - 8, - 0, - 785, - }, - dictWord{133, 11, 564}, - dictWord{135, 0, 336}, - dictWord{4, 0, 896}, - dictWord{6, 0, 1777}, - dictWord{134, 11, 556}, - dictWord{137, 11, 103}, - dictWord{134, 10, 1683}, - dictWord{7, 11, 544}, - dictWord{8, 11, 719}, - dictWord{138, 11, 61}, - dictWord{138, 10, 472}, - dictWord{4, 11, 5}, - dictWord{5, 11, 498}, - dictWord{136, 11, 637}, - dictWord{7, 0, 750}, - dictWord{9, 0, 223}, - dictWord{11, 0, 27}, - dictWord{11, 0, 466}, - dictWord{12, 0, 624}, - dictWord{14, 0, 265}, - dictWord{ - 146, - 0, - 61, - }, - dictWord{12, 0, 238}, - dictWord{18, 0, 155}, - dictWord{12, 11, 238}, - dictWord{146, 11, 155}, - dictWord{151, 10, 28}, - dictWord{133, 11, 927}, - dictWord{12, 0, 383}, - dictWord{5, 10, 3}, - dictWord{8, 10, 578}, - dictWord{9, 10, 118}, - dictWord{10, 10, 705}, - dictWord{141, 10, 279}, - dictWord{4, 11, 893}, - dictWord{ - 5, - 11, - 780, - }, - dictWord{133, 11, 893}, - dictWord{4, 0, 603}, - dictWord{133, 0, 661}, - dictWord{4, 0, 11}, - dictWord{6, 0, 128}, - dictWord{7, 0, 231}, - dictWord{ - 7, - 0, - 1533, - }, - dictWord{10, 0, 725}, - dictWord{5, 10, 229}, - dictWord{5, 11, 238}, - dictWord{135, 11, 1350}, - dictWord{8, 10, 102}, - dictWord{10, 10, 578}, - dictWord{ - 10, - 10, - 672, - }, - dictWord{12, 10, 496}, - dictWord{13, 10, 408}, - dictWord{14, 10, 121}, - dictWord{145, 10, 106}, - dictWord{132, 0, 476}, - dictWord{134, 0, 1552}, - dictWord{134, 11, 1729}, - dictWord{8, 10, 115}, - dictWord{8, 10, 350}, - dictWord{9, 10, 489}, - dictWord{10, 10, 128}, - dictWord{11, 10, 306}, - dictWord{ - 12, - 10, - 373, - }, - dictWord{14, 10, 30}, - dictWord{17, 10, 79}, - dictWord{19, 10, 80}, - dictWord{150, 10, 55}, - dictWord{135, 0, 1807}, - dictWord{4, 0, 680}, - dictWord{ - 4, - 11, - 60, - }, - dictWord{7, 11, 760}, - dictWord{7, 11, 1800}, - dictWord{8, 11, 314}, - dictWord{9, 11, 700}, - dictWord{139, 11, 487}, - dictWord{4, 10, 230}, - dictWord{ - 5, - 10, - 702, - }, - dictWord{148, 11, 94}, - dictWord{132, 11, 228}, - dictWord{139, 0, 435}, - dictWord{9, 0, 20}, - dictWord{10, 0, 324}, - dictWord{10, 0, 807}, - dictWord{ - 139, - 0, - 488, - }, - dictWord{6, 10, 1728}, - dictWord{136, 11, 419}, - dictWord{4, 10, 484}, - dictWord{18, 10, 26}, - dictWord{19, 10, 42}, - dictWord{20, 10, 43}, - dictWord{ - 21, - 10, - 0, - }, - dictWord{23, 10, 27}, - dictWord{152, 10, 14}, - dictWord{135, 0, 1431}, - dictWord{133, 11, 828}, - dictWord{5, 0, 112}, - dictWord{6, 0, 103}, - dictWord{ - 6, - 0, - 150, - }, - dictWord{7, 0, 1303}, - dictWord{9, 0, 292}, - dictWord{10, 0, 481}, - dictWord{20, 0, 13}, - dictWord{7, 11, 176}, - dictWord{7, 11, 178}, - dictWord{7, 11, 1110}, - dictWord{10, 11, 481}, - dictWord{148, 11, 13}, - dictWord{138, 0, 356}, - dictWord{4, 11, 51}, - dictWord{5, 11, 39}, - dictWord{6, 11, 4}, - dictWord{7, 11, 591}, - dictWord{ - 7, - 11, - 849, - }, - dictWord{7, 11, 951}, - dictWord{7, 11, 1129}, - dictWord{7, 11, 1613}, - dictWord{7, 11, 1760}, - dictWord{7, 11, 1988}, - dictWord{9, 11, 434}, - dictWord{10, 11, 754}, - dictWord{11, 11, 25}, - dictWord{11, 11, 37}, - dictWord{139, 11, 414}, - dictWord{6, 0, 1963}, - dictWord{134, 0, 2000}, - dictWord{ - 132, - 10, - 633, - }, - dictWord{6, 0, 1244}, - dictWord{133, 11, 902}, - dictWord{135, 11, 928}, - dictWord{140, 0, 18}, - dictWord{138, 0, 204}, - dictWord{135, 11, 1173}, - dictWord{134, 0, 867}, - dictWord{4, 0, 708}, - dictWord{8, 0, 15}, - dictWord{9, 0, 50}, - dictWord{9, 0, 386}, - dictWord{11, 0, 18}, - dictWord{11, 0, 529}, - dictWord{140, 0, 228}, - dictWord{134, 11, 270}, - dictWord{4, 0, 563}, - dictWord{7, 0, 109}, - dictWord{7, 0, 592}, - dictWord{7, 0, 637}, - dictWord{7, 0, 770}, - dictWord{8, 0, 463}, - dictWord{ - 9, - 0, - 60, - }, - dictWord{9, 0, 335}, - dictWord{9, 0, 904}, - dictWord{10, 0, 73}, - dictWord{11, 0, 434}, - dictWord{12, 0, 585}, - dictWord{13, 0, 331}, - dictWord{18, 0, 110}, - dictWord{148, 0, 60}, - dictWord{132, 0, 502}, - dictWord{14, 11, 359}, - dictWord{19, 11, 52}, - dictWord{148, 11, 47}, - dictWord{6, 11, 377}, - dictWord{7, 11, 1025}, - dictWord{9, 11, 613}, - dictWord{145, 11, 104}, - dictWord{6, 0, 347}, - dictWord{10, 0, 161}, - dictWord{5, 10, 70}, - dictWord{5, 10, 622}, - dictWord{6, 10, 334}, - dictWord{ - 7, - 10, - 1032, - }, - dictWord{9, 10, 171}, - dictWord{11, 10, 26}, - dictWord{11, 10, 213}, - dictWord{11, 10, 637}, - dictWord{11, 10, 707}, - dictWord{12, 10, 202}, - dictWord{12, 10, 380}, - dictWord{13, 10, 226}, - dictWord{13, 10, 355}, - dictWord{14, 10, 222}, - dictWord{145, 10, 42}, - dictWord{132, 11, 416}, - dictWord{4, 0, 33}, - dictWord{5, 0, 102}, - dictWord{6, 0, 284}, - dictWord{7, 0, 1079}, - dictWord{7, 0, 1423}, - dictWord{7, 0, 1702}, - dictWord{8, 0, 470}, - dictWord{9, 0, 554}, - dictWord{ - 9, - 0, - 723, - }, - dictWord{11, 0, 333}, - dictWord{142, 11, 372}, - dictWord{5, 11, 152}, - dictWord{5, 11, 197}, - dictWord{7, 11, 340}, - dictWord{7, 11, 867}, - dictWord{ - 10, - 11, - 548, - }, - dictWord{10, 11, 581}, - dictWord{11, 11, 6}, - dictWord{12, 11, 3}, - dictWord{12, 11, 19}, - dictWord{14, 11, 110}, - dictWord{142, 11, 289}, - dictWord{ - 7, - 0, - 246, - }, - dictWord{135, 0, 840}, - dictWord{6, 0, 10}, - dictWord{8, 0, 571}, - dictWord{9, 0, 739}, - dictWord{143, 0, 91}, - dictWord{6, 0, 465}, - dictWord{7, 0, 1465}, - dictWord{ - 4, - 10, - 23, - }, - dictWord{4, 10, 141}, - dictWord{5, 10, 313}, - dictWord{5, 10, 1014}, - dictWord{6, 10, 50}, - dictWord{7, 10, 142}, - dictWord{7, 10, 559}, - dictWord{ - 8, - 10, - 640, - }, - dictWord{9, 10, 460}, - dictWord{9, 10, 783}, - dictWord{11, 10, 741}, - dictWord{12, 10, 183}, - dictWord{141, 10, 488}, - dictWord{133, 0, 626}, - dictWord{ - 136, - 0, - 614, - }, - dictWord{138, 0, 237}, - dictWord{7, 11, 34}, - dictWord{7, 11, 190}, - dictWord{8, 11, 28}, - dictWord{8, 11, 141}, - dictWord{8, 11, 444}, - dictWord{ - 8, - 11, - 811, - }, - dictWord{9, 11, 468}, - dictWord{11, 11, 334}, - dictWord{12, 11, 24}, - dictWord{12, 11, 386}, - dictWord{140, 11, 576}, - dictWord{133, 11, 757}, - dictWord{ - 5, - 0, - 18, - }, - dictWord{6, 0, 526}, - dictWord{13, 0, 24}, - dictWord{13, 0, 110}, - dictWord{19, 0, 5}, - dictWord{147, 0, 44}, - dictWord{6, 0, 506}, - dictWord{134, 11, 506}, - dictWord{135, 11, 1553}, - dictWord{4, 0, 309}, - dictWord{5, 0, 462}, - dictWord{7, 0, 970}, - dictWord{7, 0, 1097}, - dictWord{22, 0, 30}, - dictWord{22, 0, 33}, - dictWord{ - 7, - 11, - 1385, - }, - dictWord{11, 11, 582}, - dictWord{11, 11, 650}, - dictWord{11, 11, 901}, - dictWord{11, 11, 949}, - dictWord{12, 11, 232}, - dictWord{12, 11, 236}, - dictWord{13, 11, 413}, - dictWord{13, 11, 501}, - dictWord{146, 11, 116}, - dictWord{9, 0, 140}, - dictWord{5, 10, 222}, - dictWord{138, 10, 534}, - dictWord{6, 0, 1056}, - dictWord{137, 10, 906}, - dictWord{134, 0, 1704}, - dictWord{138, 10, 503}, - dictWord{134, 0, 1036}, - dictWord{5, 10, 154}, - dictWord{7, 10, 1491}, - dictWord{ - 10, - 10, - 379, - }, - dictWord{138, 10, 485}, - dictWord{4, 11, 383}, - dictWord{133, 10, 716}, - dictWord{134, 0, 1315}, - dictWord{5, 0, 86}, - dictWord{7, 0, 743}, - dictWord{ - 9, - 0, - 85, - }, - dictWord{10, 0, 281}, - dictWord{10, 0, 432}, - dictWord{11, 0, 825}, - dictWord{12, 0, 251}, - dictWord{13, 0, 118}, - dictWord{142, 0, 378}, - dictWord{ - 8, - 0, - 264, - }, - dictWord{4, 10, 91}, - dictWord{5, 10, 388}, - dictWord{5, 10, 845}, - dictWord{6, 10, 206}, - dictWord{6, 10, 252}, - dictWord{6, 10, 365}, - dictWord{7, 10, 136}, - dictWord{7, 10, 531}, - dictWord{136, 10, 621}, - dictWord{5, 0, 524}, - dictWord{133, 0, 744}, - dictWord{5, 11, 277}, - dictWord{141, 11, 247}, - dictWord{ - 132, - 11, - 435, - }, - dictWord{10, 0, 107}, - dictWord{140, 0, 436}, - dictWord{132, 0, 927}, - dictWord{10, 0, 123}, - dictWord{12, 0, 670}, - dictWord{146, 0, 94}, - dictWord{ - 7, - 0, - 1149, - }, - dictWord{9, 0, 156}, - dictWord{138, 0, 957}, - dictWord{5, 11, 265}, - dictWord{6, 11, 212}, - dictWord{135, 11, 28}, - dictWord{133, 0, 778}, - dictWord{ - 133, - 0, - 502, - }, - dictWord{8, 0, 196}, - dictWord{10, 0, 283}, - dictWord{139, 0, 406}, - dictWord{135, 10, 576}, - dictWord{136, 11, 535}, - dictWord{134, 0, 1312}, - dictWord{ - 5, - 10, - 771, - }, - dictWord{5, 10, 863}, - dictWord{5, 10, 898}, - dictWord{6, 10, 1632}, - dictWord{6, 10, 1644}, - dictWord{134, 10, 1780}, - dictWord{5, 0, 855}, - dictWord{5, 10, 331}, - dictWord{135, 11, 1487}, - dictWord{132, 11, 702}, - dictWord{5, 11, 808}, - dictWord{135, 11, 2045}, - dictWord{7, 0, 1400}, - dictWord{ - 9, - 0, - 446, - }, - dictWord{138, 0, 45}, - dictWord{140, 10, 632}, - dictWord{132, 0, 1003}, - dictWord{5, 11, 166}, - dictWord{8, 11, 739}, - dictWord{140, 11, 511}, - dictWord{ - 5, - 10, - 107, - }, - dictWord{7, 10, 201}, - dictWord{136, 10, 518}, - dictWord{6, 10, 446}, - dictWord{135, 10, 1817}, - dictWord{134, 0, 1532}, - dictWord{ - 134, - 0, - 1097, - }, - dictWord{4, 11, 119}, - dictWord{5, 11, 170}, - dictWord{5, 11, 447}, - dictWord{7, 11, 1708}, - dictWord{7, 11, 1889}, - dictWord{9, 11, 357}, - dictWord{ - 9, - 11, - 719, - }, - dictWord{12, 11, 486}, - dictWord{140, 11, 596}, - dictWord{9, 10, 851}, - dictWord{141, 10, 510}, - dictWord{7, 0, 612}, - dictWord{8, 0, 545}, - dictWord{ - 8, - 0, - 568, - }, - dictWord{8, 0, 642}, - dictWord{9, 0, 717}, - dictWord{10, 0, 541}, - dictWord{10, 0, 763}, - dictWord{11, 0, 449}, - dictWord{12, 0, 489}, - dictWord{13, 0, 153}, - dictWord{13, 0, 296}, - dictWord{14, 0, 138}, - dictWord{14, 0, 392}, - dictWord{15, 0, 50}, - dictWord{16, 0, 6}, - dictWord{16, 0, 12}, - dictWord{20, 0, 9}, - dictWord{ - 132, - 10, - 504, - }, - dictWord{4, 11, 450}, - dictWord{135, 11, 1158}, - dictWord{11, 0, 54}, - dictWord{13, 0, 173}, - dictWord{13, 0, 294}, - dictWord{5, 10, 883}, - dictWord{ - 5, - 10, - 975, - }, - dictWord{8, 10, 392}, - dictWord{148, 10, 7}, - dictWord{13, 0, 455}, - dictWord{15, 0, 99}, - dictWord{15, 0, 129}, - dictWord{144, 0, 68}, - dictWord{135, 0, 172}, - dictWord{132, 11, 754}, - dictWord{5, 10, 922}, - dictWord{134, 10, 1707}, - dictWord{134, 0, 1029}, - dictWord{17, 11, 39}, - dictWord{148, 11, 36}, - dictWord{ - 4, - 0, - 568, - }, - dictWord{5, 10, 993}, - dictWord{7, 10, 515}, - dictWord{137, 10, 91}, - dictWord{132, 0, 732}, - dictWord{10, 0, 617}, - dictWord{138, 11, 617}, - dictWord{ - 134, - 0, - 974, - }, - dictWord{7, 0, 989}, - dictWord{10, 0, 377}, - dictWord{12, 0, 363}, - dictWord{13, 0, 68}, - dictWord{13, 0, 94}, - dictWord{14, 0, 108}, - dictWord{ - 142, - 0, - 306, - }, - dictWord{136, 0, 733}, - dictWord{132, 0, 428}, - dictWord{7, 0, 1789}, - dictWord{135, 11, 1062}, - dictWord{7, 0, 2015}, - dictWord{140, 0, 665}, - dictWord{135, 10, 1433}, - dictWord{5, 0, 287}, - dictWord{7, 10, 921}, - dictWord{8, 10, 580}, - dictWord{8, 10, 593}, - dictWord{8, 10, 630}, - dictWord{138, 10, 28}, - dictWord{138, 0, 806}, - dictWord{4, 10, 911}, - dictWord{5, 10, 867}, - dictWord{5, 10, 1013}, - dictWord{7, 10, 2034}, - dictWord{8, 10, 798}, - dictWord{136, 10, 813}, - dictWord{134, 0, 1539}, - dictWord{8, 11, 523}, - dictWord{150, 11, 34}, - dictWord{135, 11, 740}, - dictWord{7, 11, 238}, - dictWord{7, 11, 2033}, - dictWord{ - 8, - 11, - 120, - }, - dictWord{8, 11, 188}, - dictWord{8, 11, 659}, - dictWord{9, 11, 598}, - dictWord{10, 11, 466}, - dictWord{12, 11, 342}, - dictWord{12, 11, 588}, - dictWord{ - 13, - 11, - 503, - }, - dictWord{14, 11, 246}, - dictWord{143, 11, 92}, - dictWord{7, 0, 1563}, - dictWord{141, 0, 182}, - dictWord{5, 10, 135}, - dictWord{6, 10, 519}, - dictWord{ - 7, - 10, - 1722, - }, - dictWord{10, 10, 271}, - dictWord{11, 10, 261}, - dictWord{145, 10, 54}, - dictWord{14, 10, 338}, - dictWord{148, 10, 81}, - dictWord{7, 0, 484}, - dictWord{ - 4, - 10, - 300, - }, - dictWord{133, 10, 436}, - dictWord{145, 11, 114}, - dictWord{6, 0, 1623}, - dictWord{134, 0, 1681}, - dictWord{133, 11, 640}, - dictWord{4, 11, 201}, - dictWord{7, 11, 1744}, - dictWord{8, 11, 602}, - dictWord{11, 11, 247}, - dictWord{11, 11, 826}, - dictWord{145, 11, 65}, - dictWord{8, 11, 164}, - dictWord{ - 146, - 11, - 62, - }, - dictWord{6, 0, 1833}, - dictWord{6, 0, 1861}, - dictWord{136, 0, 878}, - dictWord{134, 0, 1569}, - dictWord{8, 10, 357}, - dictWord{10, 10, 745}, - dictWord{ - 14, - 10, - 426, - }, - dictWord{17, 10, 94}, - dictWord{147, 10, 57}, - dictWord{12, 0, 93}, - dictWord{12, 0, 501}, - dictWord{13, 0, 362}, - dictWord{14, 0, 151}, - dictWord{15, 0, 40}, - dictWord{15, 0, 59}, - dictWord{16, 0, 46}, - dictWord{17, 0, 25}, - dictWord{18, 0, 14}, - dictWord{18, 0, 134}, - dictWord{19, 0, 25}, - dictWord{19, 0, 69}, - dictWord{ - 20, - 0, - 16, - }, - dictWord{20, 0, 19}, - dictWord{20, 0, 66}, - dictWord{21, 0, 23}, - dictWord{21, 0, 25}, - dictWord{150, 0, 42}, - dictWord{6, 0, 1748}, - dictWord{8, 0, 715}, - dictWord{ - 9, - 0, - 802, - }, - dictWord{10, 0, 46}, - dictWord{10, 0, 819}, - dictWord{13, 0, 308}, - dictWord{14, 0, 351}, - dictWord{14, 0, 363}, - dictWord{146, 0, 67}, - dictWord{ - 132, - 0, - 994, - }, - dictWord{4, 0, 63}, - dictWord{133, 0, 347}, - dictWord{132, 0, 591}, - dictWord{133, 0, 749}, - dictWord{7, 11, 1577}, - dictWord{10, 11, 304}, - dictWord{ - 10, - 11, - 549, - }, - dictWord{11, 11, 424}, - dictWord{12, 11, 365}, - dictWord{13, 11, 220}, - dictWord{13, 11, 240}, - dictWord{142, 11, 33}, - dictWord{133, 0, 366}, - dictWord{ - 7, - 0, - 557, - }, - dictWord{12, 0, 547}, - dictWord{14, 0, 86}, - dictWord{133, 10, 387}, - dictWord{135, 0, 1747}, - dictWord{132, 11, 907}, - dictWord{5, 11, 100}, - dictWord{10, 11, 329}, - dictWord{12, 11, 416}, - dictWord{149, 11, 29}, - dictWord{4, 10, 6}, - dictWord{5, 10, 708}, - dictWord{136, 10, 75}, - dictWord{7, 10, 1351}, - dictWord{9, 10, 581}, - dictWord{10, 10, 639}, - dictWord{11, 10, 453}, - dictWord{140, 10, 584}, - dictWord{7, 0, 89}, - dictWord{132, 10, 303}, - dictWord{138, 10, 772}, - dictWord{132, 11, 176}, - dictWord{5, 11, 636}, - dictWord{5, 11, 998}, - dictWord{8, 11, 26}, - dictWord{137, 11, 358}, - dictWord{7, 11, 9}, - dictWord{7, 11, 1508}, - dictWord{9, 11, 317}, - dictWord{10, 11, 210}, - dictWord{10, 11, 292}, - dictWord{10, 11, 533}, - dictWord{11, 11, 555}, - dictWord{12, 11, 526}, - dictWord{ - 12, - 11, - 607, - }, - dictWord{13, 11, 263}, - dictWord{13, 11, 459}, - dictWord{142, 11, 271}, - dictWord{134, 0, 1463}, - dictWord{6, 0, 772}, - dictWord{6, 0, 1137}, - dictWord{ - 139, - 11, - 595, - }, - dictWord{7, 0, 977}, - dictWord{139, 11, 66}, - dictWord{138, 0, 893}, - dictWord{20, 0, 48}, - dictWord{148, 11, 48}, - dictWord{5, 0, 824}, - dictWord{ - 133, - 0, - 941, - }, - dictWord{134, 11, 295}, - dictWord{7, 0, 1543}, - dictWord{7, 0, 1785}, - dictWord{10, 0, 690}, - dictWord{4, 10, 106}, - dictWord{139, 10, 717}, - dictWord{ - 7, - 0, - 440, - }, - dictWord{8, 0, 230}, - dictWord{139, 0, 106}, - dictWord{5, 10, 890}, - dictWord{133, 10, 988}, - dictWord{6, 10, 626}, - dictWord{142, 10, 431}, - dictWord{ - 10, - 11, - 127, - }, - dictWord{141, 11, 27}, - dictWord{17, 0, 32}, - dictWord{10, 10, 706}, - dictWord{150, 10, 44}, - dictWord{132, 0, 216}, - dictWord{137, 0, 332}, - dictWord{4, 10, 698}, - dictWord{136, 11, 119}, - dictWord{139, 11, 267}, - dictWord{138, 10, 17}, - dictWord{11, 11, 526}, - dictWord{11, 11, 939}, - dictWord{ - 141, - 11, - 290, - }, - dictWord{7, 11, 1167}, - dictWord{11, 11, 934}, - dictWord{13, 11, 391}, - dictWord{145, 11, 76}, - dictWord{139, 11, 39}, - dictWord{134, 10, 84}, - dictWord{ - 4, - 0, - 914, - }, - dictWord{5, 0, 800}, - dictWord{133, 0, 852}, - dictWord{10, 0, 416}, - dictWord{141, 0, 115}, - dictWord{7, 0, 564}, - dictWord{142, 0, 168}, - dictWord{ - 4, - 0, - 918, - }, - dictWord{133, 0, 876}, - dictWord{134, 0, 1764}, - dictWord{152, 0, 3}, - dictWord{4, 0, 92}, - dictWord{5, 0, 274}, - dictWord{7, 11, 126}, - dictWord{136, 11, 84}, - dictWord{140, 10, 498}, - dictWord{136, 11, 790}, - dictWord{8, 0, 501}, - dictWord{5, 10, 986}, - dictWord{6, 10, 130}, - dictWord{7, 10, 1582}, - dictWord{ - 8, - 10, - 458, - }, - dictWord{10, 10, 101}, - dictWord{10, 10, 318}, - dictWord{138, 10, 823}, - dictWord{6, 11, 64}, - dictWord{12, 11, 377}, - dictWord{141, 11, 309}, - dictWord{ - 5, - 0, - 743, - }, - dictWord{138, 0, 851}, - dictWord{4, 0, 49}, - dictWord{7, 0, 280}, - dictWord{135, 0, 1633}, - dictWord{134, 0, 879}, - dictWord{136, 0, 47}, - dictWord{ - 7, - 10, - 1644, - }, - dictWord{137, 10, 129}, - dictWord{132, 0, 865}, - dictWord{134, 0, 1202}, - dictWord{9, 11, 34}, - dictWord{139, 11, 484}, - dictWord{135, 10, 997}, - dictWord{5, 0, 272}, - dictWord{5, 0, 908}, - dictWord{5, 0, 942}, - dictWord{8, 0, 197}, - dictWord{9, 0, 47}, - dictWord{11, 0, 538}, - dictWord{139, 0, 742}, - dictWord{ - 6, - 11, - 1700, - }, - dictWord{7, 11, 26}, - dictWord{7, 11, 293}, - dictWord{7, 11, 382}, - dictWord{7, 11, 1026}, - dictWord{7, 11, 1087}, - dictWord{7, 11, 2027}, - dictWord{ - 8, - 11, - 24, - }, - dictWord{8, 11, 114}, - dictWord{8, 11, 252}, - dictWord{8, 11, 727}, - dictWord{8, 11, 729}, - dictWord{9, 11, 30}, - dictWord{9, 11, 199}, - dictWord{9, 11, 231}, - dictWord{9, 11, 251}, - dictWord{9, 11, 334}, - dictWord{9, 11, 361}, - dictWord{9, 11, 488}, - dictWord{9, 11, 712}, - dictWord{10, 11, 55}, - dictWord{10, 11, 60}, - dictWord{ - 10, - 11, - 232, - }, - dictWord{10, 11, 332}, - dictWord{10, 11, 384}, - dictWord{10, 11, 396}, - dictWord{10, 11, 504}, - dictWord{10, 11, 542}, - dictWord{10, 11, 652}, - dictWord{11, 11, 20}, - dictWord{11, 11, 48}, - dictWord{11, 11, 207}, - dictWord{11, 11, 291}, - dictWord{11, 11, 298}, - dictWord{11, 11, 342}, - dictWord{ - 11, - 11, - 365, - }, - dictWord{11, 11, 394}, - dictWord{11, 11, 620}, - dictWord{11, 11, 705}, - dictWord{11, 11, 1017}, - dictWord{12, 11, 123}, - dictWord{12, 11, 340}, - dictWord{12, 11, 406}, - dictWord{12, 11, 643}, - dictWord{13, 11, 61}, - dictWord{13, 11, 269}, - dictWord{13, 11, 311}, - dictWord{13, 11, 319}, - dictWord{13, 11, 486}, - dictWord{14, 11, 234}, - dictWord{15, 11, 62}, - dictWord{15, 11, 85}, - dictWord{16, 11, 71}, - dictWord{18, 11, 119}, - dictWord{148, 11, 105}, - dictWord{ - 6, - 0, - 1455, - }, - dictWord{150, 11, 37}, - dictWord{135, 10, 1927}, - dictWord{135, 0, 1911}, - dictWord{137, 0, 891}, - dictWord{7, 10, 1756}, - dictWord{137, 10, 98}, - dictWord{7, 10, 1046}, - dictWord{139, 10, 160}, - dictWord{132, 0, 761}, - dictWord{6, 11, 379}, - dictWord{7, 11, 270}, - dictWord{7, 11, 1116}, - dictWord{ - 8, - 11, - 176, - }, - dictWord{8, 11, 183}, - dictWord{9, 11, 432}, - dictWord{9, 11, 661}, - dictWord{12, 11, 247}, - dictWord{12, 11, 617}, - dictWord{146, 11, 125}, - dictWord{ - 6, - 10, - 45, - }, - dictWord{7, 10, 433}, - dictWord{8, 10, 129}, - dictWord{9, 10, 21}, - dictWord{10, 10, 392}, - dictWord{11, 10, 79}, - dictWord{12, 10, 499}, - dictWord{ - 13, - 10, - 199, - }, - dictWord{141, 10, 451}, - dictWord{4, 0, 407}, - dictWord{5, 11, 792}, - dictWord{133, 11, 900}, - dictWord{132, 0, 560}, - dictWord{135, 0, 183}, - dictWord{ - 13, - 0, - 490, - }, - dictWord{7, 10, 558}, - dictWord{136, 10, 353}, - dictWord{4, 0, 475}, - dictWord{6, 0, 731}, - dictWord{11, 0, 35}, - dictWord{13, 0, 71}, - dictWord{13, 0, 177}, - dictWord{14, 0, 422}, - dictWord{133, 10, 785}, - dictWord{8, 10, 81}, - dictWord{9, 10, 189}, - dictWord{9, 10, 201}, - dictWord{11, 10, 478}, - dictWord{11, 10, 712}, - dictWord{141, 10, 338}, - dictWord{4, 0, 418}, - dictWord{4, 0, 819}, - dictWord{133, 10, 353}, - dictWord{151, 10, 26}, - dictWord{4, 11, 901}, - dictWord{ - 133, - 11, - 776, - }, - dictWord{132, 0, 575}, - dictWord{7, 0, 818}, - dictWord{16, 0, 92}, - dictWord{17, 0, 14}, - dictWord{17, 0, 45}, - dictWord{18, 0, 75}, - dictWord{148, 0, 18}, - dictWord{ - 6, - 0, - 222, - }, - dictWord{7, 0, 636}, - dictWord{7, 0, 1620}, - dictWord{8, 0, 409}, - dictWord{9, 0, 693}, - dictWord{139, 0, 77}, - dictWord{6, 10, 25}, - dictWord{7, 10, 855}, - dictWord{7, 10, 1258}, - dictWord{144, 10, 32}, - dictWord{6, 0, 1880}, - dictWord{6, 0, 1887}, - dictWord{6, 0, 1918}, - dictWord{6, 0, 1924}, - dictWord{9, 0, 967}, - dictWord{9, 0, 995}, - dictWord{9, 0, 1015}, - dictWord{12, 0, 826}, - dictWord{12, 0, 849}, - dictWord{12, 0, 857}, - dictWord{12, 0, 860}, - dictWord{12, 0, 886}, - dictWord{ - 12, - 0, - 932, - }, - dictWord{18, 0, 228}, - dictWord{18, 0, 231}, - dictWord{146, 0, 240}, - dictWord{134, 0, 633}, - dictWord{134, 0, 1308}, - dictWord{4, 11, 37}, - dictWord{ - 5, - 11, - 334, - }, - dictWord{135, 11, 1253}, - dictWord{10, 0, 86}, - dictWord{4, 10, 4}, - dictWord{7, 10, 1118}, - dictWord{7, 10, 1320}, - dictWord{7, 10, 1706}, - dictWord{ - 8, - 10, - 277, - }, - dictWord{9, 10, 622}, - dictWord{11, 10, 724}, - dictWord{12, 10, 350}, - dictWord{12, 10, 397}, - dictWord{13, 10, 28}, - dictWord{13, 10, 159}, - dictWord{ - 15, - 10, - 89, - }, - dictWord{18, 10, 5}, - dictWord{19, 10, 9}, - dictWord{20, 10, 34}, - dictWord{150, 10, 47}, - dictWord{132, 11, 508}, - dictWord{137, 11, 448}, - dictWord{ - 12, - 11, - 107, - }, - dictWord{146, 11, 31}, - dictWord{132, 0, 817}, - dictWord{134, 0, 663}, - dictWord{133, 0, 882}, - dictWord{134, 0, 914}, - dictWord{132, 11, 540}, - dictWord{132, 11, 533}, - dictWord{136, 11, 608}, - dictWord{8, 0, 885}, - dictWord{138, 0, 865}, - dictWord{132, 0, 426}, - dictWord{6, 0, 58}, - dictWord{7, 0, 745}, - dictWord{7, 0, 1969}, - dictWord{8, 0, 399}, - dictWord{8, 0, 675}, - dictWord{9, 0, 479}, - dictWord{9, 0, 731}, - dictWord{10, 0, 330}, - dictWord{10, 0, 593}, - dictWord{ - 10, - 0, - 817, - }, - dictWord{11, 0, 32}, - dictWord{11, 0, 133}, - dictWord{11, 0, 221}, - dictWord{145, 0, 68}, - dictWord{134, 10, 255}, - dictWord{7, 0, 102}, - dictWord{ - 137, - 0, - 538, - }, - dictWord{137, 10, 216}, - dictWord{7, 11, 253}, - dictWord{136, 11, 549}, - dictWord{135, 11, 912}, - dictWord{9, 10, 183}, - dictWord{139, 10, 286}, - dictWord{11, 10, 956}, - dictWord{151, 10, 3}, - dictWord{8, 11, 527}, - dictWord{18, 11, 60}, - dictWord{147, 11, 24}, - dictWord{4, 10, 536}, - dictWord{7, 10, 1141}, - dictWord{10, 10, 723}, - dictWord{139, 10, 371}, - dictWord{133, 11, 920}, - dictWord{7, 0, 876}, - dictWord{135, 10, 285}, - dictWord{135, 10, 560}, - dictWord{ - 132, - 10, - 690, - }, - dictWord{142, 11, 126}, - dictWord{11, 10, 33}, - dictWord{12, 10, 571}, - dictWord{149, 10, 1}, - dictWord{133, 0, 566}, - dictWord{9, 0, 139}, - dictWord{ - 10, - 0, - 399, - }, - dictWord{11, 0, 469}, - dictWord{12, 0, 634}, - dictWord{13, 0, 223}, - dictWord{132, 11, 483}, - dictWord{6, 0, 48}, - dictWord{135, 0, 63}, - dictWord{18, 0, 12}, - dictWord{7, 10, 1862}, - dictWord{12, 10, 491}, - dictWord{12, 10, 520}, - dictWord{13, 10, 383}, - dictWord{142, 10, 244}, - dictWord{135, 11, 1665}, - dictWord{132, 11, 448}, - dictWord{9, 11, 495}, - dictWord{146, 11, 104}, - dictWord{6, 0, 114}, - dictWord{7, 0, 1224}, - dictWord{7, 0, 1556}, - dictWord{136, 0, 3}, - dictWord{ - 4, - 10, - 190, - }, - dictWord{133, 10, 554}, - dictWord{8, 0, 576}, - dictWord{9, 0, 267}, - dictWord{133, 10, 1001}, - dictWord{133, 10, 446}, - dictWord{133, 0, 933}, - dictWord{139, 11, 1009}, - dictWord{8, 11, 653}, - dictWord{13, 11, 93}, - dictWord{147, 11, 14}, - dictWord{6, 0, 692}, - dictWord{6, 0, 821}, - dictWord{134, 0, 1077}, - dictWord{5, 11, 172}, - dictWord{135, 11, 801}, - dictWord{138, 0, 752}, - dictWord{4, 0, 375}, - dictWord{134, 0, 638}, - dictWord{134, 0, 1011}, - dictWord{ - 140, - 11, - 540, - }, - dictWord{9, 0, 96}, - dictWord{133, 11, 260}, - dictWord{139, 11, 587}, - dictWord{135, 10, 1231}, - dictWord{12, 0, 30}, - dictWord{13, 0, 148}, - dictWord{ - 14, - 0, - 87, - }, - dictWord{14, 0, 182}, - dictWord{16, 0, 42}, - dictWord{20, 0, 70}, - dictWord{132, 10, 304}, - dictWord{6, 0, 1398}, - dictWord{7, 0, 56}, - dictWord{7, 0, 1989}, - dictWord{8, 0, 337}, - dictWord{8, 0, 738}, - dictWord{9, 0, 600}, - dictWord{12, 0, 37}, - dictWord{13, 0, 447}, - dictWord{142, 0, 92}, - dictWord{138, 0, 666}, - dictWord{ - 5, - 0, - 394, - }, - dictWord{7, 0, 487}, - dictWord{136, 0, 246}, - dictWord{9, 0, 437}, - dictWord{6, 10, 53}, - dictWord{6, 10, 199}, - dictWord{7, 10, 1408}, - dictWord{8, 10, 32}, - dictWord{8, 10, 93}, - dictWord{10, 10, 397}, - dictWord{10, 10, 629}, - dictWord{11, 10, 593}, - dictWord{11, 10, 763}, - dictWord{13, 10, 326}, - dictWord{145, 10, 35}, - dictWord{134, 10, 105}, - dictWord{9, 0, 320}, - dictWord{10, 0, 506}, - dictWord{138, 10, 794}, - dictWord{7, 11, 57}, - dictWord{8, 11, 167}, - dictWord{8, 11, 375}, - dictWord{9, 11, 82}, - dictWord{9, 11, 561}, - dictWord{10, 11, 620}, - dictWord{10, 11, 770}, - dictWord{11, 10, 704}, - dictWord{141, 10, 396}, - dictWord{6, 0, 1003}, - dictWord{5, 10, 114}, - dictWord{5, 10, 255}, - dictWord{141, 10, 285}, - dictWord{7, 0, 866}, - dictWord{135, 0, 1163}, - dictWord{133, 11, 531}, - dictWord{ - 132, - 0, - 328, - }, - dictWord{7, 10, 2035}, - dictWord{8, 10, 19}, - dictWord{9, 10, 89}, - dictWord{138, 10, 831}, - dictWord{8, 11, 194}, - dictWord{136, 11, 756}, - dictWord{ - 136, - 0, - 1000, - }, - dictWord{5, 11, 453}, - dictWord{134, 11, 441}, - dictWord{4, 0, 101}, - dictWord{5, 0, 833}, - dictWord{7, 0, 1171}, - dictWord{136, 0, 744}, - dictWord{ - 133, - 0, - 726, - }, - dictWord{136, 10, 746}, - dictWord{138, 0, 176}, - dictWord{6, 0, 9}, - dictWord{6, 0, 397}, - dictWord{7, 0, 53}, - dictWord{7, 0, 1742}, - dictWord{10, 0, 632}, - dictWord{11, 0, 828}, - dictWord{140, 0, 146}, - dictWord{135, 11, 22}, - dictWord{145, 11, 64}, - dictWord{132, 0, 839}, - dictWord{11, 0, 417}, - dictWord{12, 0, 223}, - dictWord{140, 0, 265}, - dictWord{4, 11, 102}, - dictWord{7, 11, 815}, - dictWord{7, 11, 1699}, - dictWord{139, 11, 964}, - dictWord{5, 10, 955}, - dictWord{ - 136, - 10, - 814, - }, - dictWord{6, 0, 1931}, - dictWord{6, 0, 2007}, - dictWord{18, 0, 246}, - dictWord{146, 0, 247}, - dictWord{8, 0, 198}, - dictWord{11, 0, 29}, - dictWord{140, 0, 534}, - dictWord{135, 0, 1771}, - dictWord{6, 0, 846}, - dictWord{7, 11, 1010}, - dictWord{11, 11, 733}, - dictWord{11, 11, 759}, - dictWord{12, 11, 563}, - dictWord{ - 13, - 11, - 34, - }, - dictWord{14, 11, 101}, - dictWord{18, 11, 45}, - dictWord{146, 11, 129}, - dictWord{4, 0, 186}, - dictWord{5, 0, 157}, - dictWord{8, 0, 168}, - dictWord{138, 0, 6}, - dictWord{132, 11, 899}, - dictWord{133, 10, 56}, - dictWord{148, 10, 100}, - dictWord{133, 0, 875}, - dictWord{5, 0, 773}, - dictWord{5, 0, 991}, - dictWord{6, 0, 1635}, - dictWord{134, 0, 1788}, - dictWord{6, 0, 1274}, - dictWord{9, 0, 477}, - dictWord{141, 0, 78}, - dictWord{4, 0, 639}, - dictWord{7, 0, 111}, - dictWord{8, 0, 581}, - dictWord{ - 12, - 0, - 177, - }, - dictWord{6, 11, 52}, - dictWord{9, 11, 104}, - dictWord{9, 11, 559}, - dictWord{10, 10, 4}, - dictWord{10, 10, 13}, - dictWord{11, 10, 638}, - dictWord{ - 12, - 11, - 308, - }, - dictWord{19, 11, 87}, - dictWord{148, 10, 57}, - dictWord{132, 11, 604}, - dictWord{4, 11, 301}, - dictWord{133, 10, 738}, - dictWord{133, 10, 758}, - dictWord{134, 0, 1747}, - dictWord{7, 11, 1440}, - dictWord{11, 11, 854}, - dictWord{11, 11, 872}, - dictWord{11, 11, 921}, - dictWord{12, 11, 551}, - dictWord{ - 13, - 11, - 472, - }, - dictWord{142, 11, 367}, - dictWord{7, 0, 1364}, - dictWord{7, 0, 1907}, - dictWord{141, 0, 158}, - dictWord{134, 0, 873}, - dictWord{4, 0, 404}, - dictWord{ - 4, - 0, - 659, - }, - dictWord{7, 0, 552}, - dictWord{135, 0, 675}, - dictWord{135, 10, 1112}, - dictWord{139, 10, 328}, - dictWord{7, 11, 508}, - dictWord{137, 10, 133}, - dictWord{133, 0, 391}, - dictWord{5, 10, 110}, - dictWord{6, 10, 169}, - dictWord{6, 10, 1702}, - dictWord{7, 10, 400}, - dictWord{8, 10, 538}, - dictWord{9, 10, 184}, - dictWord{ - 9, - 10, - 524, - }, - dictWord{140, 10, 218}, - dictWord{6, 11, 310}, - dictWord{7, 11, 1849}, - dictWord{8, 11, 72}, - dictWord{8, 11, 272}, - dictWord{8, 11, 431}, - dictWord{ - 9, - 11, - 12, - }, - dictWord{9, 11, 351}, - dictWord{10, 11, 563}, - dictWord{10, 11, 630}, - dictWord{10, 11, 810}, - dictWord{11, 11, 367}, - dictWord{11, 11, 599}, - dictWord{11, 11, 686}, - dictWord{140, 11, 672}, - dictWord{5, 0, 540}, - dictWord{6, 0, 1697}, - dictWord{136, 0, 668}, - dictWord{132, 0, 883}, - dictWord{134, 0, 78}, - dictWord{12, 0, 628}, - dictWord{18, 0, 79}, - dictWord{6, 10, 133}, - dictWord{9, 10, 353}, - dictWord{139, 10, 993}, - dictWord{6, 11, 181}, - dictWord{7, 11, 537}, - dictWord{ - 8, - 11, - 64, - }, - dictWord{9, 11, 127}, - dictWord{10, 11, 496}, - dictWord{12, 11, 510}, - dictWord{141, 11, 384}, - dictWord{6, 10, 93}, - dictWord{7, 10, 1422}, - dictWord{ - 7, - 10, - 1851, - }, - dictWord{8, 10, 673}, - dictWord{9, 10, 529}, - dictWord{140, 10, 43}, - dictWord{137, 10, 371}, - dictWord{134, 0, 1460}, - dictWord{134, 0, 962}, - dictWord{4, 11, 244}, - dictWord{135, 11, 233}, - dictWord{9, 10, 25}, - dictWord{10, 10, 467}, - dictWord{138, 10, 559}, - dictWord{4, 10, 335}, - dictWord{ - 135, - 10, - 942, - }, - dictWord{133, 0, 460}, - dictWord{135, 11, 334}, - dictWord{134, 11, 1650}, - dictWord{4, 0, 199}, - dictWord{139, 0, 34}, - dictWord{5, 10, 601}, - dictWord{ - 8, - 10, - 39, - }, - dictWord{10, 10, 773}, - dictWord{11, 10, 84}, - dictWord{12, 10, 205}, - dictWord{142, 10, 1}, - dictWord{133, 10, 870}, - dictWord{134, 0, 388}, - dictWord{14, 0, 474}, - dictWord{148, 0, 120}, - dictWord{133, 11, 369}, - dictWord{139, 0, 271}, - dictWord{4, 0, 511}, - dictWord{9, 0, 333}, - dictWord{9, 0, 379}, - dictWord{ - 10, - 0, - 602, - }, - dictWord{11, 0, 441}, - dictWord{11, 0, 723}, - dictWord{11, 0, 976}, - dictWord{12, 0, 357}, - dictWord{132, 10, 181}, - dictWord{134, 0, 608}, - dictWord{134, 10, 1652}, - dictWord{22, 0, 49}, - dictWord{137, 11, 338}, - dictWord{140, 0, 988}, - dictWord{134, 0, 617}, - dictWord{5, 0, 938}, - dictWord{136, 0, 707}, - dictWord{132, 10, 97}, - dictWord{5, 10, 147}, - dictWord{6, 10, 286}, - dictWord{7, 10, 1362}, - dictWord{141, 10, 176}, - dictWord{6, 0, 756}, - dictWord{ - 134, - 0, - 1149, - }, - dictWord{133, 11, 896}, - dictWord{6, 10, 375}, - dictWord{7, 10, 169}, - dictWord{7, 10, 254}, - dictWord{136, 10, 780}, - dictWord{134, 0, 1583}, - dictWord{135, 10, 1447}, - dictWord{139, 0, 285}, - dictWord{7, 11, 1117}, - dictWord{8, 11, 393}, - dictWord{136, 11, 539}, - dictWord{135, 0, 344}, - dictWord{ - 6, - 0, - 469, - }, - dictWord{7, 0, 1709}, - dictWord{138, 0, 515}, - dictWord{5, 10, 629}, - dictWord{135, 10, 1549}, - dictWord{5, 11, 4}, - dictWord{5, 11, 810}, - dictWord{ - 6, - 11, - 13, - }, - dictWord{6, 11, 538}, - dictWord{6, 11, 1690}, - dictWord{6, 11, 1726}, - dictWord{7, 11, 499}, - dictWord{7, 11, 1819}, - dictWord{8, 11, 148}, - dictWord{ - 8, - 11, - 696, - }, - dictWord{8, 11, 791}, - dictWord{12, 11, 125}, - dictWord{13, 11, 54}, - dictWord{143, 11, 9}, - dictWord{135, 11, 1268}, - dictWord{137, 0, 404}, - dictWord{ - 132, - 0, - 500, - }, - dictWord{5, 0, 68}, - dictWord{134, 0, 383}, - dictWord{11, 0, 216}, - dictWord{139, 0, 340}, - dictWord{4, 11, 925}, - dictWord{5, 11, 803}, - dictWord{ - 8, - 11, - 698, - }, - dictWord{138, 11, 828}, - dictWord{4, 0, 337}, - dictWord{6, 0, 353}, - dictWord{7, 0, 1934}, - dictWord{8, 0, 488}, - dictWord{137, 0, 429}, - dictWord{7, 0, 236}, - dictWord{7, 0, 1795}, - dictWord{8, 0, 259}, - dictWord{9, 0, 135}, - dictWord{9, 0, 177}, - dictWord{9, 0, 860}, - dictWord{10, 0, 825}, - dictWord{11, 0, 115}, - dictWord{ - 11, - 0, - 370, - }, - dictWord{11, 0, 405}, - dictWord{11, 0, 604}, - dictWord{12, 0, 10}, - dictWord{12, 0, 667}, - dictWord{12, 0, 669}, - dictWord{13, 0, 76}, - dictWord{14, 0, 310}, - dictWord{15, 0, 76}, - dictWord{15, 0, 147}, - dictWord{148, 0, 23}, - dictWord{4, 0, 15}, - dictWord{4, 0, 490}, - dictWord{5, 0, 22}, - dictWord{6, 0, 244}, - dictWord{7, 0, 40}, - dictWord{7, 0, 200}, - dictWord{7, 0, 906}, - dictWord{7, 0, 1199}, - dictWord{9, 0, 616}, - dictWord{10, 0, 716}, - dictWord{11, 0, 635}, - dictWord{11, 0, 801}, - dictWord{ - 140, - 0, - 458, - }, - dictWord{12, 0, 756}, - dictWord{132, 10, 420}, - dictWord{134, 0, 1504}, - dictWord{6, 0, 757}, - dictWord{133, 11, 383}, - dictWord{6, 0, 1266}, - dictWord{ - 135, - 0, - 1735, - }, - dictWord{5, 0, 598}, - dictWord{7, 0, 791}, - dictWord{8, 0, 108}, - dictWord{9, 0, 123}, - dictWord{7, 10, 1570}, - dictWord{140, 10, 542}, - dictWord{ - 142, - 11, - 410, - }, - dictWord{9, 11, 660}, - dictWord{138, 11, 347}, -} diff --git a/vendor/github.com/andybalholm/brotli/symbol_list.go b/vendor/github.com/andybalholm/brotli/symbol_list.go deleted file mode 100644 index c5cb49e5a9..0000000000 --- a/vendor/github.com/andybalholm/brotli/symbol_list.go +++ /dev/null @@ -1,22 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Utilities for building Huffman decoding tables. */ - -type symbolList struct { - storage []uint16 - offset int -} - -func symbolListGet(sl symbolList, i int) uint16 { - return sl.storage[i+sl.offset] -} - -func symbolListPut(sl symbolList, i int, val uint16) { - sl.storage[i+sl.offset] = val -} diff --git a/vendor/github.com/andybalholm/brotli/transform.go b/vendor/github.com/andybalholm/brotli/transform.go deleted file mode 100644 index d2c043a622..0000000000 --- a/vendor/github.com/andybalholm/brotli/transform.go +++ /dev/null @@ -1,641 +0,0 @@ -package brotli - -const ( - transformIdentity = 0 - transformOmitLast1 = 1 - transformOmitLast2 = 2 - transformOmitLast3 = 3 - transformOmitLast4 = 4 - transformOmitLast5 = 5 - transformOmitLast6 = 6 - transformOmitLast7 = 7 - transformOmitLast8 = 8 - transformOmitLast9 = 9 - transformUppercaseFirst = 10 - transformUppercaseAll = 11 - transformOmitFirst1 = 12 - transformOmitFirst2 = 13 - transformOmitFirst3 = 14 - transformOmitFirst4 = 15 - transformOmitFirst5 = 16 - transformOmitFirst6 = 17 - transformOmitFirst7 = 18 - transformOmitFirst8 = 19 - transformOmitFirst9 = 20 - transformShiftFirst = 21 - transformShiftAll = 22 + iota - 22 - numTransformTypes -) - -const transformsMaxCutOff = transformOmitLast9 - -type transforms struct { - prefix_suffix_size uint16 - prefix_suffix []byte - prefix_suffix_map []uint16 - num_transforms uint32 - transforms []byte - params []byte - cutOffTransforms [transformsMaxCutOff + 1]int16 -} - -func transformPrefixId(t *transforms, I int) byte { - return t.transforms[(I*3)+0] -} - -func transformType(t *transforms, I int) byte { - return t.transforms[(I*3)+1] -} - -func transformSuffixId(t *transforms, I int) byte { - return t.transforms[(I*3)+2] -} - -func transformPrefix(t *transforms, I int) []byte { - return t.prefix_suffix[t.prefix_suffix_map[transformPrefixId(t, I)]:] -} - -func transformSuffix(t *transforms, I int) []byte { - return t.prefix_suffix[t.prefix_suffix_map[transformSuffixId(t, I)]:] -} - -/* RFC 7932 transforms string data */ -const kPrefixSuffix string = "\001 \002, \010 of the \004 of \002s \001.\005 and \004 " + "in \001\"\004 to \002\">\001\n\002. \001]\005 for \003 a \006 " + "that \001'\006 with \006 from \004 by \001(\006. T" + "he \004 on \004 as \004 is \004ing \002\n\t\001:\003ed " + "\002=\"\004 at \003ly \001,\002='\005.com/\007. This \005" + " not \003er \003al \004ful \004ive \005less \004es" + "t \004ize \002\xc2\xa0\004ous \005 the \002e \000" - -var kPrefixSuffixMap = [50]uint16{ - 0x00, - 0x02, - 0x05, - 0x0E, - 0x13, - 0x16, - 0x18, - 0x1E, - 0x23, - 0x25, - 0x2A, - 0x2D, - 0x2F, - 0x32, - 0x34, - 0x3A, - 0x3E, - 0x45, - 0x47, - 0x4E, - 0x55, - 0x5A, - 0x5C, - 0x63, - 0x68, - 0x6D, - 0x72, - 0x77, - 0x7A, - 0x7C, - 0x80, - 0x83, - 0x88, - 0x8C, - 0x8E, - 0x91, - 0x97, - 0x9F, - 0xA5, - 0xA9, - 0xAD, - 0xB2, - 0xB7, - 0xBD, - 0xC2, - 0xC7, - 0xCA, - 0xCF, - 0xD5, - 0xD8, -} - -/* RFC 7932 transforms */ -var kTransformsData = []byte{ - 49, - transformIdentity, - 49, - 49, - transformIdentity, - 0, - 0, - transformIdentity, - 0, - 49, - transformOmitFirst1, - 49, - 49, - transformUppercaseFirst, - 0, - 49, - transformIdentity, - 47, - 0, - transformIdentity, - 49, - 4, - transformIdentity, - 0, - 49, - transformIdentity, - 3, - 49, - transformUppercaseFirst, - 49, - 49, - transformIdentity, - 6, - 49, - transformOmitFirst2, - 49, - 49, - transformOmitLast1, - 49, - 1, - transformIdentity, - 0, - 49, - transformIdentity, - 1, - 0, - transformUppercaseFirst, - 0, - 49, - transformIdentity, - 7, - 49, - transformIdentity, - 9, - 48, - transformIdentity, - 0, - 49, - transformIdentity, - 8, - 49, - transformIdentity, - 5, - 49, - transformIdentity, - 10, - 49, - transformIdentity, - 11, - 49, - transformOmitLast3, - 49, - 49, - transformIdentity, - 13, - 49, - transformIdentity, - 14, - 49, - transformOmitFirst3, - 49, - 49, - transformOmitLast2, - 49, - 49, - transformIdentity, - 15, - 49, - transformIdentity, - 16, - 0, - transformUppercaseFirst, - 49, - 49, - transformIdentity, - 12, - 5, - transformIdentity, - 49, - 0, - transformIdentity, - 1, - 49, - transformOmitFirst4, - 49, - 49, - transformIdentity, - 18, - 49, - transformIdentity, - 17, - 49, - transformIdentity, - 19, - 49, - transformIdentity, - 20, - 49, - transformOmitFirst5, - 49, - 49, - transformOmitFirst6, - 49, - 47, - transformIdentity, - 49, - 49, - transformOmitLast4, - 49, - 49, - transformIdentity, - 22, - 49, - transformUppercaseAll, - 49, - 49, - transformIdentity, - 23, - 49, - transformIdentity, - 24, - 49, - transformIdentity, - 25, - 49, - transformOmitLast7, - 49, - 49, - transformOmitLast1, - 26, - 49, - transformIdentity, - 27, - 49, - transformIdentity, - 28, - 0, - transformIdentity, - 12, - 49, - transformIdentity, - 29, - 49, - transformOmitFirst9, - 49, - 49, - transformOmitFirst7, - 49, - 49, - transformOmitLast6, - 49, - 49, - transformIdentity, - 21, - 49, - transformUppercaseFirst, - 1, - 49, - transformOmitLast8, - 49, - 49, - transformIdentity, - 31, - 49, - transformIdentity, - 32, - 47, - transformIdentity, - 3, - 49, - transformOmitLast5, - 49, - 49, - transformOmitLast9, - 49, - 0, - transformUppercaseFirst, - 1, - 49, - transformUppercaseFirst, - 8, - 5, - transformIdentity, - 21, - 49, - transformUppercaseAll, - 0, - 49, - transformUppercaseFirst, - 10, - 49, - transformIdentity, - 30, - 0, - transformIdentity, - 5, - 35, - transformIdentity, - 49, - 47, - transformIdentity, - 2, - 49, - transformUppercaseFirst, - 17, - 49, - transformIdentity, - 36, - 49, - transformIdentity, - 33, - 5, - transformIdentity, - 0, - 49, - transformUppercaseFirst, - 21, - 49, - transformUppercaseFirst, - 5, - 49, - transformIdentity, - 37, - 0, - transformIdentity, - 30, - 49, - transformIdentity, - 38, - 0, - transformUppercaseAll, - 0, - 49, - transformIdentity, - 39, - 0, - transformUppercaseAll, - 49, - 49, - transformIdentity, - 34, - 49, - transformUppercaseAll, - 8, - 49, - transformUppercaseFirst, - 12, - 0, - transformIdentity, - 21, - 49, - transformIdentity, - 40, - 0, - transformUppercaseFirst, - 12, - 49, - transformIdentity, - 41, - 49, - transformIdentity, - 42, - 49, - transformUppercaseAll, - 17, - 49, - transformIdentity, - 43, - 0, - transformUppercaseFirst, - 5, - 49, - transformUppercaseAll, - 10, - 0, - transformIdentity, - 34, - 49, - transformUppercaseFirst, - 33, - 49, - transformIdentity, - 44, - 49, - transformUppercaseAll, - 5, - 45, - transformIdentity, - 49, - 0, - transformIdentity, - 33, - 49, - transformUppercaseFirst, - 30, - 49, - transformUppercaseAll, - 30, - 49, - transformIdentity, - 46, - 49, - transformUppercaseAll, - 1, - 49, - transformUppercaseFirst, - 34, - 0, - transformUppercaseFirst, - 33, - 0, - transformUppercaseAll, - 30, - 0, - transformUppercaseAll, - 1, - 49, - transformUppercaseAll, - 33, - 49, - transformUppercaseAll, - 21, - 49, - transformUppercaseAll, - 12, - 0, - transformUppercaseAll, - 5, - 49, - transformUppercaseAll, - 34, - 0, - transformUppercaseAll, - 12, - 0, - transformUppercaseFirst, - 30, - 0, - transformUppercaseAll, - 34, - 0, - transformUppercaseFirst, - 34, -} - -var kBrotliTransforms = transforms{ - 217, - []byte(kPrefixSuffix), - kPrefixSuffixMap[:], - 121, - kTransformsData, - nil, /* no extra parameters */ - [transformsMaxCutOff + 1]int16{0, 12, 27, 23, 42, 63, 56, 48, 59, 64}, -} - -func getTransforms() *transforms { - return &kBrotliTransforms -} - -func toUpperCase(p []byte) int { - if p[0] < 0xC0 { - if p[0] >= 'a' && p[0] <= 'z' { - p[0] ^= 32 - } - - return 1 - } - - /* An overly simplified uppercasing model for UTF-8. */ - if p[0] < 0xE0 { - p[1] ^= 32 - return 2 - } - - /* An arbitrary transform for three byte characters. */ - p[2] ^= 5 - - return 3 -} - -func shiftTransform(word []byte, word_len int, parameter uint16) int { - /* Limited sign extension: scalar < (1 << 24). */ - var scalar uint32 = (uint32(parameter) & 0x7FFF) + (0x1000000 - (uint32(parameter) & 0x8000)) - if word[0] < 0x80 { - /* 1-byte rune / 0sssssss / 7 bit scalar (ASCII). */ - scalar += uint32(word[0]) - - word[0] = byte(scalar & 0x7F) - return 1 - } else if word[0] < 0xC0 { - /* Continuation / 10AAAAAA. */ - return 1 - } else if word[0] < 0xE0 { - /* 2-byte rune / 110sssss AAssssss / 11 bit scalar. */ - if word_len < 2 { - return 1 - } - scalar += uint32(word[1]&0x3F | (word[0]&0x1F)<<6) - word[0] = byte(0xC0 | (scalar>>6)&0x1F) - word[1] = byte(uint32(word[1]&0xC0) | scalar&0x3F) - return 2 - } else if word[0] < 0xF0 { - /* 3-byte rune / 1110ssss AAssssss BBssssss / 16 bit scalar. */ - if word_len < 3 { - return word_len - } - scalar += uint32(word[2])&0x3F | uint32(word[1]&0x3F)<<6 | uint32(word[0]&0x0F)<<12 - word[0] = byte(0xE0 | (scalar>>12)&0x0F) - word[1] = byte(uint32(word[1]&0xC0) | (scalar>>6)&0x3F) - word[2] = byte(uint32(word[2]&0xC0) | scalar&0x3F) - return 3 - } else if word[0] < 0xF8 { - /* 4-byte rune / 11110sss AAssssss BBssssss CCssssss / 21 bit scalar. */ - if word_len < 4 { - return word_len - } - scalar += uint32(word[3])&0x3F | uint32(word[2]&0x3F)<<6 | uint32(word[1]&0x3F)<<12 | uint32(word[0]&0x07)<<18 - word[0] = byte(0xF0 | (scalar>>18)&0x07) - word[1] = byte(uint32(word[1]&0xC0) | (scalar>>12)&0x3F) - word[2] = byte(uint32(word[2]&0xC0) | (scalar>>6)&0x3F) - word[3] = byte(uint32(word[3]&0xC0) | scalar&0x3F) - return 4 - } - - return 1 -} - -func transformDictionaryWord(dst []byte, word []byte, len int, trans *transforms, transform_idx int) int { - var idx int = 0 - var prefix []byte = transformPrefix(trans, transform_idx) - var type_ byte = transformType(trans, transform_idx) - var suffix []byte = transformSuffix(trans, transform_idx) - { - var prefix_len int = int(prefix[0]) - prefix = prefix[1:] - for { - tmp1 := prefix_len - prefix_len-- - if tmp1 == 0 { - break - } - dst[idx] = prefix[0] - idx++ - prefix = prefix[1:] - } - } - { - var t int = int(type_) - var i int = 0 - if t <= transformOmitLast9 { - len -= t - } else if t >= transformOmitFirst1 && t <= transformOmitFirst9 { - var skip int = t - (transformOmitFirst1 - 1) - word = word[skip:] - len -= skip - } - - for i < len { - dst[idx] = word[i] - idx++ - i++ - } - if t == transformUppercaseFirst { - toUpperCase(dst[idx-len:]) - } else if t == transformUppercaseAll { - var uppercase []byte = dst - uppercase = uppercase[idx-len:] - for len > 0 { - var step int = toUpperCase(uppercase) - uppercase = uppercase[step:] - len -= step - } - } else if t == transformShiftFirst { - var param uint16 = uint16(trans.params[transform_idx*2]) + uint16(trans.params[transform_idx*2+1])<<8 - shiftTransform(dst[idx-len:], int(len), param) - } else if t == transformShiftAll { - var param uint16 = uint16(trans.params[transform_idx*2]) + uint16(trans.params[transform_idx*2+1])<<8 - var shift []byte = dst - shift = shift[idx-len:] - for len > 0 { - var step int = shiftTransform(shift, int(len), param) - shift = shift[step:] - len -= step - } - } - } - { - var suffix_len int = int(suffix[0]) - suffix = suffix[1:] - for { - tmp2 := suffix_len - suffix_len-- - if tmp2 == 0 { - break - } - dst[idx] = suffix[0] - idx++ - suffix = suffix[1:] - } - return idx - } -} diff --git a/vendor/github.com/andybalholm/brotli/utf8_util.go b/vendor/github.com/andybalholm/brotli/utf8_util.go deleted file mode 100644 index f86de3d209..0000000000 --- a/vendor/github.com/andybalholm/brotli/utf8_util.go +++ /dev/null @@ -1,71 +0,0 @@ -package brotli - -/* Copyright 2013 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Heuristics for deciding about the UTF8-ness of strings. */ - -const kMinUTF8Ratio float64 = 0.75 - -/* Returns 1 if at least min_fraction of the bytes between pos and - pos + length in the (data, mask) ring-buffer is UTF8-encoded, otherwise - returns 0. */ -func parseAsUTF8(symbol *int, input []byte, size uint) uint { - /* ASCII */ - if input[0]&0x80 == 0 { - *symbol = int(input[0]) - if *symbol > 0 { - return 1 - } - } - - /* 2-byte UTF8 */ - if size > 1 && input[0]&0xE0 == 0xC0 && input[1]&0xC0 == 0x80 { - *symbol = (int(input[0])&0x1F)<<6 | int(input[1])&0x3F - if *symbol > 0x7F { - return 2 - } - } - - /* 3-byte UFT8 */ - if size > 2 && input[0]&0xF0 == 0xE0 && input[1]&0xC0 == 0x80 && input[2]&0xC0 == 0x80 { - *symbol = (int(input[0])&0x0F)<<12 | (int(input[1])&0x3F)<<6 | int(input[2])&0x3F - if *symbol > 0x7FF { - return 3 - } - } - - /* 4-byte UFT8 */ - if size > 3 && input[0]&0xF8 == 0xF0 && input[1]&0xC0 == 0x80 && input[2]&0xC0 == 0x80 && input[3]&0xC0 == 0x80 { - *symbol = (int(input[0])&0x07)<<18 | (int(input[1])&0x3F)<<12 | (int(input[2])&0x3F)<<6 | int(input[3])&0x3F - if *symbol > 0xFFFF && *symbol <= 0x10FFFF { - return 4 - } - } - - /* Not UTF8, emit a special symbol above the UTF8-code space */ - *symbol = 0x110000 | int(input[0]) - - return 1 -} - -/* Returns 1 if at least min_fraction of the data is UTF8-encoded.*/ -func isMostlyUTF8(data []byte, pos uint, mask uint, length uint, min_fraction float64) bool { - var size_utf8 uint = 0 - var i uint = 0 - for i < length { - var symbol int - var current_data []byte - current_data = data[(pos+i)&mask:] - var bytes_read uint = parseAsUTF8(&symbol, current_data, length-i) - i += bytes_read - if symbol < 0x110000 { - size_utf8 += bytes_read - } - } - - return float64(size_utf8) > min_fraction*float64(length) -} diff --git a/vendor/github.com/andybalholm/brotli/util.go b/vendor/github.com/andybalholm/brotli/util.go deleted file mode 100644 index a84553a639..0000000000 --- a/vendor/github.com/andybalholm/brotli/util.go +++ /dev/null @@ -1,7 +0,0 @@ -package brotli - -func assert(cond bool) { - if !cond { - panic("assertion failure") - } -} diff --git a/vendor/github.com/andybalholm/brotli/write_bits.go b/vendor/github.com/andybalholm/brotli/write_bits.go deleted file mode 100644 index 2d216d7ccd..0000000000 --- a/vendor/github.com/andybalholm/brotli/write_bits.go +++ /dev/null @@ -1,94 +0,0 @@ -package brotli - -/* Copyright 2010 Google Inc. All Rights Reserved. - - Distributed under MIT license. - See file LICENSE for detail or copy at https://opensource.org/licenses/MIT -*/ - -/* Write bits into a byte array. */ - -type bitWriter struct { - dst []byte - - // Data waiting to be written is the low nbits of bits. - bits uint64 - nbits uint -} - -func (w *bitWriter) writeBits(nb uint, b uint64) { - w.bits |= b << w.nbits - w.nbits += nb - if w.nbits >= 32 { - bits := w.bits - w.bits >>= 32 - w.nbits -= 32 - w.dst = append(w.dst, - byte(bits), - byte(bits>>8), - byte(bits>>16), - byte(bits>>24), - ) - } -} - -func (w *bitWriter) writeSingleBit(bit bool) { - if bit { - w.writeBits(1, 1) - } else { - w.writeBits(1, 0) - } -} - -func (w *bitWriter) jumpToByteBoundary() { - dst := w.dst - for w.nbits != 0 { - dst = append(dst, byte(w.bits)) - w.bits >>= 8 - if w.nbits > 8 { // Avoid underflow - w.nbits -= 8 - } else { - w.nbits = 0 - } - } - w.bits = 0 - w.dst = dst -} - -func (w *bitWriter) writeBytes(b []byte) { - if w.nbits&7 != 0 { - panic("writeBytes with unfinished bits") - } - for w.nbits != 0 { - w.dst = append(w.dst, byte(w.bits)) - w.bits >>= 8 - w.nbits -= 8 - } - w.dst = append(w.dst, b...) -} - -func (w *bitWriter) getPos() uint { - return uint(len(w.dst)<<3) + w.nbits -} - -func (w *bitWriter) rewind(p uint) { - w.bits = uint64(w.dst[p>>3] & byte((1<<(p&7))-1)) - w.nbits = p & 7 - w.dst = w.dst[:p>>3] -} - -func (w *bitWriter) updateBits(n_bits uint, bits uint32, pos uint) { - for n_bits > 0 { - var byte_pos uint = pos >> 3 - var n_unchanged_bits uint = pos & 7 - var n_changed_bits uint = brotli_min_size_t(n_bits, 8-n_unchanged_bits) - var total_bits uint = n_unchanged_bits + n_changed_bits - var mask uint32 = (^((1 << total_bits) - 1)) | ((1 << n_unchanged_bits) - 1) - var unchanged_bits uint32 = uint32(w.dst[byte_pos]) & mask - var changed_bits uint32 = bits & ((1 << n_changed_bits) - 1) - w.dst[byte_pos] = byte(changed_bits<>= n_changed_bits - pos += n_changed_bits - } -} diff --git a/vendor/github.com/andybalholm/brotli/writer.go b/vendor/github.com/andybalholm/brotli/writer.go deleted file mode 100644 index 63676b4673..0000000000 --- a/vendor/github.com/andybalholm/brotli/writer.go +++ /dev/null @@ -1,118 +0,0 @@ -package brotli - -import ( - "errors" - "io" -) - -const ( - BestSpeed = 0 - BestCompression = 11 - DefaultCompression = 6 -) - -// WriterOptions configures Writer. -type WriterOptions struct { - // Quality controls the compression-speed vs compression-density trade-offs. - // The higher the quality, the slower the compression. Range is 0 to 11. - Quality int - // LGWin is the base 2 logarithm of the sliding window size. - // Range is 10 to 24. 0 indicates automatic configuration based on Quality. - LGWin int -} - -var ( - errEncode = errors.New("brotli: encode error") - errWriterClosed = errors.New("brotli: Writer is closed") -) - -// Writes to the returned writer are compressed and written to dst. -// It is the caller's responsibility to call Close on the Writer when done. -// Writes may be buffered and not flushed until Close. -func NewWriter(dst io.Writer) *Writer { - return NewWriterLevel(dst, DefaultCompression) -} - -// NewWriterLevel is like NewWriter but specifies the compression level instead -// of assuming DefaultCompression. -// The compression level can be DefaultCompression or any integer value between -// BestSpeed and BestCompression inclusive. -func NewWriterLevel(dst io.Writer, level int) *Writer { - return NewWriterOptions(dst, WriterOptions{ - Quality: level, - }) -} - -// NewWriterOptions is like NewWriter but specifies WriterOptions -func NewWriterOptions(dst io.Writer, options WriterOptions) *Writer { - w := new(Writer) - w.options = options - w.Reset(dst) - return w -} - -// Reset discards the Writer's state and makes it equivalent to the result of -// its original state from NewWriter or NewWriterLevel, but writing to dst -// instead. This permits reusing a Writer rather than allocating a new one. -func (w *Writer) Reset(dst io.Writer) { - encoderInitState(w) - w.params.quality = w.options.Quality - if w.options.LGWin > 0 { - w.params.lgwin = uint(w.options.LGWin) - } - w.dst = dst -} - -func (w *Writer) writeChunk(p []byte, op int) (n int, err error) { - if w.dst == nil { - return 0, errWriterClosed - } - if w.err != nil { - return 0, w.err - } - - for { - availableIn := uint(len(p)) - nextIn := p - success := encoderCompressStream(w, op, &availableIn, &nextIn) - bytesConsumed := len(p) - int(availableIn) - p = p[bytesConsumed:] - n += bytesConsumed - if !success { - return n, errEncode - } - - if len(p) == 0 || w.err != nil { - return n, w.err - } - } -} - -// Flush outputs encoded data for all input provided to Write. The resulting -// output can be decoded to match all input before Flush, but the stream is -// not yet complete until after Close. -// Flush has a negative impact on compression. -func (w *Writer) Flush() error { - _, err := w.writeChunk(nil, operationFlush) - return err -} - -// Close flushes remaining data to the decorated writer. -func (w *Writer) Close() error { - // If stream is already closed, it is reported by `writeChunk`. - _, err := w.writeChunk(nil, operationFinish) - w.dst = nil - return err -} - -// Write implements io.Writer. Flush or Close must be called to ensure that the -// encoded bytes are actually flushed to the underlying Writer. -func (w *Writer) Write(p []byte) (n int, err error) { - return w.writeChunk(p, operationProcess) -} - -type nopCloser struct { - io.Writer -} - -func (nopCloser) Close() error { return nil } diff --git a/vendor/github.com/dsnet/compress/.travis.yml b/vendor/github.com/dsnet/compress/.travis.yml deleted file mode 100644 index 7e79820edc..0000000000 --- a/vendor/github.com/dsnet/compress/.travis.yml +++ /dev/null @@ -1,36 +0,0 @@ -sudo: false -language: go -before_install: - - curl -L https://github.com/google/brotli/archive/v1.0.2.tar.gz | tar -zxv - - (cd brotli-1.0.2 && mkdir out && cd out && ../configure-cmake && make && sudo make install) - - rm -rf brotli-1.0.2 - - curl -L https://github.com/facebook/zstd/archive/v1.3.2.tar.gz | tar -zxv - - (cd zstd-1.3.2 && sudo make install) - - rm -rf zstd-1.3.2 - - sudo ldconfig - - mkdir /tmp/go1.12 - - curl -L -s https://dl.google.com/go/go1.12.linux-amd64.tar.gz | tar -zxf - -C /tmp/go1.12 --strip-components 1 - - unset GOROOT - - (GO111MODULE=on /tmp/go1.12/bin/go mod vendor) - - (cd /tmp && GO111MODULE=on /tmp/go1.12/bin/go get golang.org/x/lint/golint@8f45f776aaf18cebc8d65861cc70c33c60471952) - - (cd /tmp && GO111MODULE=on /tmp/go1.12/bin/go get honnef.co/go/tools/cmd/staticcheck@2019.1) -matrix: - include: - - go: 1.9.x - script: - - go test -v -race ./... - - go: 1.10.x - script: - - go test -v -race ./... - - go: 1.11.x - script: - - go test -v -race ./... - - go: 1.12.x - script: - - ./ztest.sh - - go: master - script: - - go test -v -race ./... - allow_failures: - - go: master - fast_finish: true diff --git a/vendor/github.com/dsnet/compress/LICENSE.md b/vendor/github.com/dsnet/compress/LICENSE.md deleted file mode 100644 index 945b396cfd..0000000000 --- a/vendor/github.com/dsnet/compress/LICENSE.md +++ /dev/null @@ -1,24 +0,0 @@ -Copyright © 2015, Joe Tsai and The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, -this list of conditions and the following disclaimer in the documentation and/or -other materials provided with the distribution. -* Neither the copyright holder nor the names of its contributors may be used to -endorse or promote products derived from this software without specific prior -written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/dsnet/compress/README.md b/vendor/github.com/dsnet/compress/README.md deleted file mode 100644 index 63afb01c4c..0000000000 --- a/vendor/github.com/dsnet/compress/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# Collection of compression libraries for Go # - -[![GoDoc](https://godoc.org/github.com/dsnet/compress/cmp?status.svg)](https://godoc.org/github.com/dsnet/compress) -[![Build Status](https://travis-ci.org/dsnet/compress.svg?branch=master)](https://travis-ci.org/dsnet/compress) -[![Report Card](https://goreportcard.com/badge/github.com/dsnet/compress)](https://goreportcard.com/report/github.com/dsnet/compress) - -## Introduction ## - -**NOTE: This library is in active development. As such, there are no guarantees about the stability of the API. The author reserves the right to arbitrarily break the API for any reason.** - -This repository hosts a collection of compression related libraries. The goal of this project is to provide pure Go implementations for popular compression algorithms beyond what the Go standard library provides. The goals for these packages are as follows: -* Maintainable: That the code remains well documented, well tested, readable, easy to maintain, and easy to verify that it conforms to the specification for the format being implemented. -* Performant: To be able to compress and decompress within at least 80% of the rates that the C implementations are able to achieve. -* Flexible: That the code provides low-level and fine granularity control over the compression streams similar to what the C APIs would provide. - -Of these three, the first objective is often at odds with the other two objectives and provides interesting challenges. Higher performance can often be achieved by muddling abstraction layers or using non-intuitive low-level primitives. Also, more features and functionality, while useful in some situations, often complicates the API. Thus, this package will attempt to satisfy all the goals, but will defer to favoring maintainability when the performance or flexibility benefits are not significant enough. - - -## Library Status ## - -For the packages available, only some features are currently implemented: - -| Package | Reader | Writer | -| ------- | :----: | :----: | -| brotli | :white_check_mark: | | -| bzip2 | :white_check_mark: | :white_check_mark: | -| flate | :white_check_mark: | | -| xflate | :white_check_mark: | :white_check_mark: | - -This library is in active development. As such, there are no guarantees about the stability of the API. The author reserves the right to arbitrarily break the API for any reason. When the library becomes more mature, it is planned to eventually conform to some strict versioning scheme like [Semantic Versioning](http://semver.org/). - -However, in the meanwhile, this library does provide some basic API guarantees. For the types defined below, the method signatures are guaranteed to not change. Note that the author still reserves the right to change the fields within each ```Reader``` and ```Writer``` structs. -```go -type ReaderConfig struct { ... } -type Reader struct { ... } - func NewReader(io.Reader, *ReaderConfig) (*Reader, error) { ... } - func (*Reader) Read([]byte) (int, error) { ... } - func (*Reader) Close() error { ... } - -type WriterConfig struct { ... } -type Writer struct { ... } - func NewWriter(io.Writer, *WriterConfig) (*Writer, error) { ... } - func (*Writer) Write([]byte) (int, error) { ... } - func (*Writer) Close() error { ... } -``` - -To see what work still remains, see the [Task List](https://github.com/dsnet/compress/wiki/Task-List). - -## Performance ## - -See [Performance Metrics](https://github.com/dsnet/compress/wiki/Performance-Metrics). - - -## Frequently Asked Questions ## - -See [Frequently Asked Questions](https://github.com/dsnet/compress/wiki/Frequently-Asked-Questions). - - -## Installation ## - -Run the command: - -```go get -u github.com/dsnet/compress``` - -This library requires `Go1.9` or higher in order to build. - - -## Packages ## - -| Package | Description | -| :------ | :---------- | -| [brotli](http://godoc.org/github.com/dsnet/compress/brotli) | Package brotli implements the Brotli format, described in RFC 7932. | -| [bzip2](http://godoc.org/github.com/dsnet/compress/bzip2) | Package bzip2 implements the BZip2 compressed data format. | -| [flate](http://godoc.org/github.com/dsnet/compress/flate) | Package flate implements the DEFLATE format, described in RFC 1951. | -| [xflate](http://godoc.org/github.com/dsnet/compress/xflate) | Package xflate implements the XFLATE format, an random-access extension to DEFLATE. | diff --git a/vendor/github.com/dsnet/compress/api.go b/vendor/github.com/dsnet/compress/api.go deleted file mode 100644 index f80a9232aa..0000000000 --- a/vendor/github.com/dsnet/compress/api.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Package compress is a collection of compression libraries. -package compress - -import ( - "bufio" - "io" - - "github.com/dsnet/compress/internal/errors" -) - -// The Error interface identifies all compression related errors. -type Error interface { - error - CompressError() - - // IsDeprecated reports the use of a deprecated and unsupported feature. - IsDeprecated() bool - - // IsCorrupted reports whether the input stream was corrupted. - IsCorrupted() bool -} - -var _ Error = errors.Error{} - -// ByteReader is an interface accepted by all decompression Readers. -// It guarantees that the decompressor never reads more data than is necessary -// from the underlying io.Reader. -type ByteReader interface { - io.Reader - io.ByteReader -} - -var _ ByteReader = (*bufio.Reader)(nil) - -// BufferedReader is an interface accepted by all decompression Readers. -// It guarantees that the decompressor never reads more data than is necessary -// from the underlying io.Reader. Since BufferedReader allows a decompressor -// to peek at bytes further along in the stream without advancing the read -// pointer, decompression can experience a significant performance gain when -// provided a reader that satisfies this interface. Thus, a decompressor will -// prefer this interface over ByteReader for performance reasons. -// -// The bufio.Reader satisfies this interface. -type BufferedReader interface { - io.Reader - - // Buffered returns the number of bytes currently buffered. - // - // This value becomes invalid following the next Read/Discard operation. - Buffered() int - - // Peek returns the next n bytes without advancing the reader. - // - // If Peek returns fewer than n bytes, it also returns an error explaining - // why the peek is short. Peek must support peeking of at least 8 bytes. - // If 0 <= n <= Buffered(), Peek is guaranteed to succeed without reading - // from the underlying io.Reader. - // - // This result becomes invalid following the next Read/Discard operation. - Peek(n int) ([]byte, error) - - // Discard skips the next n bytes, returning the number of bytes discarded. - // - // If Discard skips fewer than n bytes, it also returns an error. - // If 0 <= n <= Buffered(), Discard is guaranteed to succeed without reading - // from the underlying io.Reader. - Discard(n int) (int, error) -} - -var _ BufferedReader = (*bufio.Reader)(nil) diff --git a/vendor/github.com/dsnet/compress/bzip2/bwt.go b/vendor/github.com/dsnet/compress/bzip2/bwt.go deleted file mode 100644 index 44a2541fe4..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/bwt.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package bzip2 - -import "github.com/dsnet/compress/bzip2/internal/sais" - -// The Burrows-Wheeler Transform implementation used here is based on the -// Suffix Array by Induced Sorting (SA-IS) methodology by Nong, Zhang, and Chan. -// This implementation uses the sais algorithm originally written by Yuta Mori. -// -// The SA-IS algorithm runs in O(n) and outputs a Suffix Array. There is a -// mathematical relationship between Suffix Arrays and the Burrows-Wheeler -// Transform, such that a SA can be converted to a BWT in O(n) time. -// -// References: -// http://www.hpl.hp.com/techreports/Compaq-DEC/SRC-RR-124.pdf -// https://github.com/cscott/compressjs/blob/master/lib/BWT.js -// https://www.quora.com/How-can-I-optimize-burrows-wheeler-transform-and-inverse-transform-to-work-in-O-n-time-O-n-space -type burrowsWheelerTransform struct { - buf []byte - sa []int - perm []uint32 -} - -func (bwt *burrowsWheelerTransform) Encode(buf []byte) (ptr int) { - if len(buf) == 0 { - return -1 - } - - // TODO(dsnet): Find a way to avoid the duplicate input string method. - // We only need to do this because suffix arrays (by definition) only - // operate non-wrapped suffixes of a string. On the other hand, - // the BWT specifically used in bzip2 operate on a strings that wrap-around - // when being sorted. - - // Step 1: Concatenate the input string to itself so that we can use the - // suffix array algorithm for bzip2's variant of BWT. - n := len(buf) - bwt.buf = append(append(bwt.buf[:0], buf...), buf...) - if cap(bwt.sa) < 2*n { - bwt.sa = make([]int, 2*n) - } - t := bwt.buf[:2*n] - sa := bwt.sa[:2*n] - - // Step 2: Compute the suffix array (SA). The input string, t, will not be - // modified, while the results will be written to the output, sa. - sais.ComputeSA(t, sa) - - // Step 3: Convert the SA to a BWT. Since ComputeSA does not mutate the - // input, we have two copies of the input; in buf and buf2. Thus, we write - // the transformation to buf, while using buf2. - var j int - buf2 := t[n:] - for _, i := range sa { - if i < n { - if i == 0 { - ptr = j - i = n - } - buf[j] = buf2[i-1] - j++ - } - } - return ptr -} - -func (bwt *burrowsWheelerTransform) Decode(buf []byte, ptr int) { - if len(buf) == 0 { - return - } - - // Step 1: Compute cumm, where cumm[ch] reports the total number of - // characters that precede the character ch in the alphabet. - var cumm [256]int - for _, v := range buf { - cumm[v]++ - } - var sum int - for i, v := range cumm { - cumm[i] = sum - sum += v - } - - // Step 2: Compute perm, where perm[ptr] contains a pointer to the next - // byte in buf and the next pointer in perm itself. - if cap(bwt.perm) < len(buf) { - bwt.perm = make([]uint32, len(buf)) - } - perm := bwt.perm[:len(buf)] - for i, b := range buf { - perm[cumm[b]] = uint32(i) - cumm[b]++ - } - - // Step 3: Follow each pointer in perm to the next byte, starting with the - // origin pointer. - if cap(bwt.buf) < len(buf) { - bwt.buf = make([]byte, len(buf)) - } - buf2 := bwt.buf[:len(buf)] - i := perm[ptr] - for j := range buf2 { - buf2[j] = buf[i] - i = perm[i] - } - copy(buf, buf2) -} diff --git a/vendor/github.com/dsnet/compress/bzip2/common.go b/vendor/github.com/dsnet/compress/bzip2/common.go deleted file mode 100644 index c633981526..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/common.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Package bzip2 implements the BZip2 compressed data format. -// -// Canonical C implementation: -// http://bzip.org -// -// Unofficial format specification: -// https://github.com/dsnet/compress/blob/master/doc/bzip2-format.pdf -package bzip2 - -import ( - "fmt" - "hash/crc32" - - "github.com/dsnet/compress/internal" - "github.com/dsnet/compress/internal/errors" -) - -// There does not exist a formal specification of the BZip2 format. As such, -// much of this work is derived by either reverse engineering the original C -// source code or using secondary sources. -// -// Significant amounts of fuzz testing is done to ensure that outputs from -// this package is properly decoded by the C library. Furthermore, we test that -// both this package and the C library agree about what inputs are invalid. -// -// Compression stack: -// Run-length encoding 1 (RLE1) -// Burrows-Wheeler transform (BWT) -// Move-to-front transform (MTF) -// Run-length encoding 2 (RLE2) -// Prefix encoding (PE) -// -// References: -// http://bzip.org/ -// https://en.wikipedia.org/wiki/Bzip2 -// https://code.google.com/p/jbzip2/ - -const ( - BestSpeed = 1 - BestCompression = 9 - DefaultCompression = 6 -) - -const ( - hdrMagic = 0x425a // Hex of "BZ" - blkMagic = 0x314159265359 // BCD of PI - endMagic = 0x177245385090 // BCD of sqrt(PI) - - blockSize = 100000 -) - -func errorf(c int, f string, a ...interface{}) error { - return errors.Error{Code: c, Pkg: "bzip2", Msg: fmt.Sprintf(f, a...)} -} - -func panicf(c int, f string, a ...interface{}) { - errors.Panic(errorf(c, f, a...)) -} - -// errWrap converts a lower-level errors.Error to be one from this package. -// The replaceCode passed in will be used to replace the code for any errors -// with the errors.Invalid code. -// -// For the Reader, set this to errors.Corrupted. -// For the Writer, set this to errors.Internal. -func errWrap(err error, replaceCode int) error { - if cerr, ok := err.(errors.Error); ok { - if errors.IsInvalid(cerr) { - cerr.Code = replaceCode - } - err = errorf(cerr.Code, "%s", cerr.Msg) - } - return err -} - -var errClosed = errorf(errors.Closed, "") - -// crc computes the CRC-32 used by BZip2. -// -// The CRC-32 computation in bzip2 treats bytes as having bits in big-endian -// order. That is, the MSB is read before the LSB. Thus, we can use the -// standard library version of CRC-32 IEEE with some minor adjustments. -// -// The byte array is used as an intermediate buffer to swap the bits of every -// byte of the input. -type crc struct { - val uint32 - buf [256]byte -} - -// update computes the CRC-32 of appending buf to c. -func (c *crc) update(buf []byte) { - cval := internal.ReverseUint32(c.val) - for len(buf) > 0 { - n := len(buf) - if n > len(c.buf) { - n = len(c.buf) - } - for i, b := range buf[:n] { - c.buf[i] = internal.ReverseLUT[b] - } - cval = crc32.Update(cval, crc32.IEEETable, c.buf[:n]) - buf = buf[n:] - } - c.val = internal.ReverseUint32(cval) -} diff --git a/vendor/github.com/dsnet/compress/bzip2/fuzz_off.go b/vendor/github.com/dsnet/compress/bzip2/fuzz_off.go deleted file mode 100644 index ddd32f5065..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/fuzz_off.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2016, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// +build !gofuzz - -// This file exists to suppress fuzzing details from release builds. - -package bzip2 - -type fuzzReader struct{} - -func (*fuzzReader) updateChecksum(int64, uint32) {} diff --git a/vendor/github.com/dsnet/compress/bzip2/fuzz_on.go b/vendor/github.com/dsnet/compress/bzip2/fuzz_on.go deleted file mode 100644 index 54122351c5..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/fuzz_on.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2016, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// +build gofuzz - -// This file exists to export internal implementation details for fuzz testing. - -package bzip2 - -func ForwardBWT(buf []byte) (ptr int) { - var bwt burrowsWheelerTransform - return bwt.Encode(buf) -} - -func ReverseBWT(buf []byte, ptr int) { - var bwt burrowsWheelerTransform - bwt.Decode(buf, ptr) -} - -type fuzzReader struct { - Checksums Checksums -} - -// updateChecksum updates Checksums. -// -// If a valid pos is provided, it appends the (pos, val) pair to the slice. -// Otherwise, it will update the last record with the new value. -func (fr *fuzzReader) updateChecksum(pos int64, val uint32) { - if pos >= 0 { - fr.Checksums = append(fr.Checksums, Checksum{pos, val}) - } else { - fr.Checksums[len(fr.Checksums)-1].Value = val - } -} - -type Checksum struct { - Offset int64 // Bit offset of the checksum - Value uint32 // Checksum value -} - -type Checksums []Checksum - -// Apply overwrites all checksum fields in d with the ones in cs. -func (cs Checksums) Apply(d []byte) []byte { - d = append([]byte(nil), d...) - for _, c := range cs { - setU32(d, c.Offset, c.Value) - } - return d -} - -func setU32(d []byte, pos int64, val uint32) { - for i := uint(0); i < 32; i++ { - bpos := uint64(pos) + uint64(i) - d[bpos/8] &= ^byte(1 << (7 - bpos%8)) - d[bpos/8] |= byte(val>>(31-i)) << (7 - bpos%8) - } -} - -// Verify checks that all checksum fields in d matches those in cs. -func (cs Checksums) Verify(d []byte) bool { - for _, c := range cs { - if getU32(d, c.Offset) != c.Value { - return false - } - } - return true -} - -func getU32(d []byte, pos int64) (val uint32) { - for i := uint(0); i < 32; i++ { - bpos := uint64(pos) + uint64(i) - val |= (uint32(d[bpos/8] >> (7 - bpos%8))) << (31 - i) - } - return val -} diff --git a/vendor/github.com/dsnet/compress/bzip2/internal/sais/common.go b/vendor/github.com/dsnet/compress/bzip2/internal/sais/common.go deleted file mode 100644 index cd4eee82db..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/internal/sais/common.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Package sais implements a linear time suffix array algorithm. -package sais - -//go:generate go run sais_gen.go byte sais_byte.go -//go:generate go run sais_gen.go int sais_int.go - -// This package ports the C sais implementation by Yuta Mori. The ports are -// located in sais_byte.go and sais_int.go, which are identical to each other -// except for the types. Since Go does not support generics, we use generators to -// create the two files. -// -// References: -// https://sites.google.com/site/yuta256/sais -// https://www.researchgate.net/publication/221313676_Linear_Time_Suffix_Array_Construction_Using_D-Critical_Substrings -// https://www.researchgate.net/publication/224176324_Two_Efficient_Algorithms_for_Linear_Time_Suffix_Array_Construction - -// ComputeSA computes the suffix array of t and places the result in sa. -// Both t and sa must be the same length. -func ComputeSA(t []byte, sa []int) { - if len(sa) != len(t) { - panic("mismatching sizes") - } - computeSA_byte(t, sa, 0, len(t), 256) -} diff --git a/vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_byte.go b/vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_byte.go deleted file mode 100644 index 01b8529b4e..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_byte.go +++ /dev/null @@ -1,661 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Code generated by sais_gen.go. DO NOT EDIT. - -// ==================================================== -// Copyright (c) 2008-2010 Yuta Mori All Rights Reserved. -// -// Permission is hereby granted, free of charge, to any person -// obtaining a copy of this software and associated documentation -// files (the "Software"), to deal in the Software without -// restriction, including without limitation the rights to use, -// copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the -// Software is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// ==================================================== - -package sais - -func getCounts_byte(T []byte, C []int, n, k int) { - var i int - for i = 0; i < k; i++ { - C[i] = 0 - } - for i = 0; i < n; i++ { - C[T[i]]++ - } -} - -func getBuckets_byte(C, B []int, k int, end bool) { - var i, sum int - if end { - for i = 0; i < k; i++ { - sum += C[i] - B[i] = sum - } - } else { - for i = 0; i < k; i++ { - sum += C[i] - B[i] = sum - C[i] - } - } -} - -func sortLMS1_byte(T []byte, SA, C, B []int, n, k int) { - var b, i, j int - var c0, c1 int - - // Compute SAl. - if &C[0] == &B[0] { - getCounts_byte(T, C, n, k) - } - getBuckets_byte(C, B, k, false) // Find starts of buckets - j = n - 1 - c1 = int(T[j]) - b = B[c1] - j-- - if int(T[j]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - for i = 0; i < n; i++ { - if j = SA[i]; j > 0 { - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - if int(T[j]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - SA[i] = 0 - } else if j < 0 { - SA[i] = ^j - } - } - - // Compute SAs. - if &C[0] == &B[0] { - getCounts_byte(T, C, n, k) - } - getBuckets_byte(C, B, k, true) // Find ends of buckets - c1 = 0 - b = B[c1] - for i = n - 1; i >= 0; i-- { - if j = SA[i]; j > 0 { - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - b-- - if int(T[j]) > c1 { - SA[b] = ^(j + 1) - } else { - SA[b] = j - } - SA[i] = 0 - } - } -} - -func postProcLMS1_byte(T []byte, SA []int, n, m int) int { - var i, j, p, q, plen, qlen, name int - var c0, c1 int - var diff bool - - // Compact all the sorted substrings into the first m items of SA. - // 2*m must be not larger than n (provable). - for i = 0; SA[i] < 0; i++ { - SA[i] = ^SA[i] - } - if i < m { - for j, i = i, i+1; ; i++ { - if p = SA[i]; p < 0 { - SA[j] = ^p - j++ - SA[i] = 0 - if j == m { - break - } - } - } - } - - // Store the length of all substrings. - i = n - 1 - j = n - 1 - c0 = int(T[n-1]) - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - for i >= 0 { - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 > c1 { - break - } - } - if i >= 0 { - SA[m+((i+1)>>1)] = j - i - j = i + 1 - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - } - } - - // Find the lexicographic names of all substrings. - name = 0 - qlen = 0 - for i, q = 0, n; i < m; i++ { - p = SA[i] - plen = SA[m+(p>>1)] - diff = true - if (plen == qlen) && ((q + plen) < n) { - for j = 0; (j < plen) && (T[p+j] == T[q+j]); j++ { - } - if j == plen { - diff = false - } - } - if diff { - name++ - q = p - qlen = plen - } - SA[m+(p>>1)] = name - } - return name -} - -func sortLMS2_byte(T []byte, SA, C, B, D []int, n, k int) { - var b, i, j, t, d int - var c0, c1 int - - // Compute SAl. - getBuckets_byte(C, B, k, false) // Find starts of buckets - j = n - 1 - c1 = int(T[j]) - b = B[c1] - j-- - if int(T[j]) < c1 { - t = 1 - } else { - t = 0 - } - j += n - if t&1 > 0 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - for i, d = 0, 0; i < n; i++ { - if j = SA[i]; j > 0 { - if n <= j { - d += 1 - j -= n - } - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - t = int(c0) << 1 - if int(T[j]) < c1 { - t |= 1 - } - if D[t] != d { - j += n - D[t] = d - } - if t&1 > 0 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - SA[i] = 0 - } else if j < 0 { - SA[i] = ^j - } - } - for i = n - 1; 0 <= i; i-- { - if SA[i] > 0 { - if SA[i] < n { - SA[i] += n - for j = i - 1; SA[j] < n; j-- { - } - SA[j] -= n - i = j - } - } - } - - // Compute SAs. - getBuckets_byte(C, B, k, true) // Find ends of buckets - c1 = 0 - b = B[c1] - for i, d = n-1, d+1; i >= 0; i-- { - if j = SA[i]; j > 0 { - if n <= j { - d += 1 - j -= n - } - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - t = int(c0) << 1 - if int(T[j]) > c1 { - t |= 1 - } - if D[t] != d { - j += n - D[t] = d - } - b-- - if t&1 > 0 { - SA[b] = ^(j + 1) - } else { - SA[b] = j - } - SA[i] = 0 - } - } -} - -func postProcLMS2_byte(SA []int, n, m int) int { - var i, j, d, name int - - // Compact all the sorted LMS substrings into the first m items of SA. - name = 0 - for i = 0; SA[i] < 0; i++ { - j = ^SA[i] - if n <= j { - name += 1 - } - SA[i] = j - } - if i < m { - for d, i = i, i+1; ; i++ { - if j = SA[i]; j < 0 { - j = ^j - if n <= j { - name += 1 - } - SA[d] = j - d++ - SA[i] = 0 - if d == m { - break - } - } - } - } - if name < m { - // Store the lexicographic names. - for i, d = m-1, name+1; 0 <= i; i-- { - if j = SA[i]; n <= j { - j -= n - d-- - } - SA[m+(j>>1)] = d - } - } else { - // Unset flags. - for i = 0; i < m; i++ { - if j = SA[i]; n <= j { - j -= n - SA[i] = j - } - } - } - return name -} - -func induceSA_byte(T []byte, SA, C, B []int, n, k int) { - var b, i, j int - var c0, c1 int - - // Compute SAl. - if &C[0] == &B[0] { - getCounts_byte(T, C, n, k) - } - getBuckets_byte(C, B, k, false) // Find starts of buckets - j = n - 1 - c1 = int(T[j]) - b = B[c1] - if j > 0 && int(T[j-1]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - for i = 0; i < n; i++ { - j = SA[i] - SA[i] = ^j - if j > 0 { - j-- - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - if j > 0 && int(T[j-1]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - } - } - - // Compute SAs. - if &C[0] == &B[0] { - getCounts_byte(T, C, n, k) - } - getBuckets_byte(C, B, k, true) // Find ends of buckets - c1 = 0 - b = B[c1] - for i = n - 1; i >= 0; i-- { - if j = SA[i]; j > 0 { - j-- - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - b-- - if (j == 0) || (int(T[j-1]) > c1) { - SA[b] = ^j - } else { - SA[b] = j - } - } else { - SA[i] = ^j - } - } -} - -func computeSA_byte(T []byte, SA []int, fs, n, k int) { - const ( - minBucketSize = 512 - sortLMS2Limit = 0x3fffffff - ) - - var C, B, D, RA []int - var bo int // Offset of B relative to SA - var b, i, j, m, p, q, name, newfs int - var c0, c1 int - var flags uint - - if k <= minBucketSize { - C = make([]int, k) - if k <= fs { - bo = n + fs - k - B = SA[bo:] - flags = 1 - } else { - B = make([]int, k) - flags = 3 - } - } else if k <= fs { - C = SA[n+fs-k:] - if k <= fs-k { - bo = n + fs - 2*k - B = SA[bo:] - flags = 0 - } else if k <= 4*minBucketSize { - B = make([]int, k) - flags = 2 - } else { - B = C - flags = 8 - } - } else { - C = make([]int, k) - B = C - flags = 4 | 8 - } - if n <= sortLMS2Limit && 2 <= (n/k) { - if flags&1 > 0 { - if 2*k <= fs-k { - flags |= 32 - } else { - flags |= 16 - } - } else if flags == 0 && 2*k <= (fs-2*k) { - flags |= 32 - } - } - - // Stage 1: Reduce the problem by at least 1/2. - // Sort all the LMS-substrings. - getCounts_byte(T, C, n, k) - getBuckets_byte(C, B, k, true) // Find ends of buckets - for i = 0; i < n; i++ { - SA[i] = 0 - } - b = -1 - i = n - 1 - j = n - m = 0 - c0 = int(T[n-1]) - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - for i >= 0 { - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 > c1 { - break - } - } - if i >= 0 { - if b >= 0 { - SA[b] = j - } - B[c1]-- - b = B[c1] - j = i - m++ - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - } - } - - if m > 1 { - if flags&(16|32) > 0 { - if flags&16 > 0 { - D = make([]int, 2*k) - } else { - D = SA[bo-2*k:] - } - B[T[j+1]]++ - for i, j = 0, 0; i < k; i++ { - j += C[i] - if B[i] != j { - SA[B[i]] += n - } - D[i] = 0 - D[i+k] = 0 - } - sortLMS2_byte(T, SA, C, B, D, n, k) - name = postProcLMS2_byte(SA, n, m) - } else { - sortLMS1_byte(T, SA, C, B, n, k) - name = postProcLMS1_byte(T, SA, n, m) - } - } else if m == 1 { - SA[b] = j + 1 - name = 1 - } else { - name = 0 - } - - // Stage 2: Solve the reduced problem. - // Recurse if names are not yet unique. - if name < m { - newfs = n + fs - 2*m - if flags&(1|4|8) == 0 { - if k+name <= newfs { - newfs -= k - } else { - flags |= 8 - } - } - RA = SA[m+newfs:] - for i, j = m+(n>>1)-1, m-1; m <= i; i-- { - if SA[i] != 0 { - RA[j] = SA[i] - 1 - j-- - } - } - computeSA_int(RA, SA, newfs, m, name) - - i = n - 1 - j = m - 1 - c0 = int(T[n-1]) - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - for i >= 0 { - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 > c1 { - break - } - } - if i >= 0 { - RA[j] = i + 1 - j-- - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - } - } - for i = 0; i < m; i++ { - SA[i] = RA[SA[i]] - } - if flags&4 > 0 { - B = make([]int, k) - C = B - } - if flags&2 > 0 { - B = make([]int, k) - } - } - - // Stage 3: Induce the result for the original problem. - if flags&8 > 0 { - getCounts_byte(T, C, n, k) - } - // Put all left-most S characters into their buckets. - if m > 1 { - getBuckets_byte(C, B, k, true) // Find ends of buckets - i = m - 1 - j = n - p = SA[m-1] - c1 = int(T[p]) - for { - c0 = c1 - q = B[c0] - for q < j { - j-- - SA[j] = 0 - } - for { - j-- - SA[j] = p - if i--; i < 0 { - break - } - p = SA[i] - if c1 = int(T[p]); c1 != c0 { - break - } - } - if i < 0 { - break - } - } - for j > 0 { - j-- - SA[j] = 0 - } - } - induceSA_byte(T, SA, C, B, n, k) -} diff --git a/vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_int.go b/vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_int.go deleted file mode 100644 index 280682f0d7..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/internal/sais/sais_int.go +++ /dev/null @@ -1,661 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Code generated by sais_gen.go. DO NOT EDIT. - -// ==================================================== -// Copyright (c) 2008-2010 Yuta Mori All Rights Reserved. -// -// Permission is hereby granted, free of charge, to any person -// obtaining a copy of this software and associated documentation -// files (the "Software"), to deal in the Software without -// restriction, including without limitation the rights to use, -// copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the -// Software is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// ==================================================== - -package sais - -func getCounts_int(T []int, C []int, n, k int) { - var i int - for i = 0; i < k; i++ { - C[i] = 0 - } - for i = 0; i < n; i++ { - C[T[i]]++ - } -} - -func getBuckets_int(C, B []int, k int, end bool) { - var i, sum int - if end { - for i = 0; i < k; i++ { - sum += C[i] - B[i] = sum - } - } else { - for i = 0; i < k; i++ { - sum += C[i] - B[i] = sum - C[i] - } - } -} - -func sortLMS1_int(T []int, SA, C, B []int, n, k int) { - var b, i, j int - var c0, c1 int - - // Compute SAl. - if &C[0] == &B[0] { - getCounts_int(T, C, n, k) - } - getBuckets_int(C, B, k, false) // Find starts of buckets - j = n - 1 - c1 = int(T[j]) - b = B[c1] - j-- - if int(T[j]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - for i = 0; i < n; i++ { - if j = SA[i]; j > 0 { - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - if int(T[j]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - SA[i] = 0 - } else if j < 0 { - SA[i] = ^j - } - } - - // Compute SAs. - if &C[0] == &B[0] { - getCounts_int(T, C, n, k) - } - getBuckets_int(C, B, k, true) // Find ends of buckets - c1 = 0 - b = B[c1] - for i = n - 1; i >= 0; i-- { - if j = SA[i]; j > 0 { - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - b-- - if int(T[j]) > c1 { - SA[b] = ^(j + 1) - } else { - SA[b] = j - } - SA[i] = 0 - } - } -} - -func postProcLMS1_int(T []int, SA []int, n, m int) int { - var i, j, p, q, plen, qlen, name int - var c0, c1 int - var diff bool - - // Compact all the sorted substrings into the first m items of SA. - // 2*m must be not larger than n (provable). - for i = 0; SA[i] < 0; i++ { - SA[i] = ^SA[i] - } - if i < m { - for j, i = i, i+1; ; i++ { - if p = SA[i]; p < 0 { - SA[j] = ^p - j++ - SA[i] = 0 - if j == m { - break - } - } - } - } - - // Store the length of all substrings. - i = n - 1 - j = n - 1 - c0 = int(T[n-1]) - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - for i >= 0 { - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 > c1 { - break - } - } - if i >= 0 { - SA[m+((i+1)>>1)] = j - i - j = i + 1 - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - } - } - - // Find the lexicographic names of all substrings. - name = 0 - qlen = 0 - for i, q = 0, n; i < m; i++ { - p = SA[i] - plen = SA[m+(p>>1)] - diff = true - if (plen == qlen) && ((q + plen) < n) { - for j = 0; (j < plen) && (T[p+j] == T[q+j]); j++ { - } - if j == plen { - diff = false - } - } - if diff { - name++ - q = p - qlen = plen - } - SA[m+(p>>1)] = name - } - return name -} - -func sortLMS2_int(T []int, SA, C, B, D []int, n, k int) { - var b, i, j, t, d int - var c0, c1 int - - // Compute SAl. - getBuckets_int(C, B, k, false) // Find starts of buckets - j = n - 1 - c1 = int(T[j]) - b = B[c1] - j-- - if int(T[j]) < c1 { - t = 1 - } else { - t = 0 - } - j += n - if t&1 > 0 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - for i, d = 0, 0; i < n; i++ { - if j = SA[i]; j > 0 { - if n <= j { - d += 1 - j -= n - } - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - t = int(c0) << 1 - if int(T[j]) < c1 { - t |= 1 - } - if D[t] != d { - j += n - D[t] = d - } - if t&1 > 0 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - SA[i] = 0 - } else if j < 0 { - SA[i] = ^j - } - } - for i = n - 1; 0 <= i; i-- { - if SA[i] > 0 { - if SA[i] < n { - SA[i] += n - for j = i - 1; SA[j] < n; j-- { - } - SA[j] -= n - i = j - } - } - } - - // Compute SAs. - getBuckets_int(C, B, k, true) // Find ends of buckets - c1 = 0 - b = B[c1] - for i, d = n-1, d+1; i >= 0; i-- { - if j = SA[i]; j > 0 { - if n <= j { - d += 1 - j -= n - } - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - j-- - t = int(c0) << 1 - if int(T[j]) > c1 { - t |= 1 - } - if D[t] != d { - j += n - D[t] = d - } - b-- - if t&1 > 0 { - SA[b] = ^(j + 1) - } else { - SA[b] = j - } - SA[i] = 0 - } - } -} - -func postProcLMS2_int(SA []int, n, m int) int { - var i, j, d, name int - - // Compact all the sorted LMS substrings into the first m items of SA. - name = 0 - for i = 0; SA[i] < 0; i++ { - j = ^SA[i] - if n <= j { - name += 1 - } - SA[i] = j - } - if i < m { - for d, i = i, i+1; ; i++ { - if j = SA[i]; j < 0 { - j = ^j - if n <= j { - name += 1 - } - SA[d] = j - d++ - SA[i] = 0 - if d == m { - break - } - } - } - } - if name < m { - // Store the lexicographic names. - for i, d = m-1, name+1; 0 <= i; i-- { - if j = SA[i]; n <= j { - j -= n - d-- - } - SA[m+(j>>1)] = d - } - } else { - // Unset flags. - for i = 0; i < m; i++ { - if j = SA[i]; n <= j { - j -= n - SA[i] = j - } - } - } - return name -} - -func induceSA_int(T []int, SA, C, B []int, n, k int) { - var b, i, j int - var c0, c1 int - - // Compute SAl. - if &C[0] == &B[0] { - getCounts_int(T, C, n, k) - } - getBuckets_int(C, B, k, false) // Find starts of buckets - j = n - 1 - c1 = int(T[j]) - b = B[c1] - if j > 0 && int(T[j-1]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - for i = 0; i < n; i++ { - j = SA[i] - SA[i] = ^j - if j > 0 { - j-- - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - if j > 0 && int(T[j-1]) < c1 { - SA[b] = ^j - } else { - SA[b] = j - } - b++ - } - } - - // Compute SAs. - if &C[0] == &B[0] { - getCounts_int(T, C, n, k) - } - getBuckets_int(C, B, k, true) // Find ends of buckets - c1 = 0 - b = B[c1] - for i = n - 1; i >= 0; i-- { - if j = SA[i]; j > 0 { - j-- - if c0 = int(T[j]); c0 != c1 { - B[c1] = b - c1 = c0 - b = B[c1] - } - b-- - if (j == 0) || (int(T[j-1]) > c1) { - SA[b] = ^j - } else { - SA[b] = j - } - } else { - SA[i] = ^j - } - } -} - -func computeSA_int(T []int, SA []int, fs, n, k int) { - const ( - minBucketSize = 512 - sortLMS2Limit = 0x3fffffff - ) - - var C, B, D, RA []int - var bo int // Offset of B relative to SA - var b, i, j, m, p, q, name, newfs int - var c0, c1 int - var flags uint - - if k <= minBucketSize { - C = make([]int, k) - if k <= fs { - bo = n + fs - k - B = SA[bo:] - flags = 1 - } else { - B = make([]int, k) - flags = 3 - } - } else if k <= fs { - C = SA[n+fs-k:] - if k <= fs-k { - bo = n + fs - 2*k - B = SA[bo:] - flags = 0 - } else if k <= 4*minBucketSize { - B = make([]int, k) - flags = 2 - } else { - B = C - flags = 8 - } - } else { - C = make([]int, k) - B = C - flags = 4 | 8 - } - if n <= sortLMS2Limit && 2 <= (n/k) { - if flags&1 > 0 { - if 2*k <= fs-k { - flags |= 32 - } else { - flags |= 16 - } - } else if flags == 0 && 2*k <= (fs-2*k) { - flags |= 32 - } - } - - // Stage 1: Reduce the problem by at least 1/2. - // Sort all the LMS-substrings. - getCounts_int(T, C, n, k) - getBuckets_int(C, B, k, true) // Find ends of buckets - for i = 0; i < n; i++ { - SA[i] = 0 - } - b = -1 - i = n - 1 - j = n - m = 0 - c0 = int(T[n-1]) - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - for i >= 0 { - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 > c1 { - break - } - } - if i >= 0 { - if b >= 0 { - SA[b] = j - } - B[c1]-- - b = B[c1] - j = i - m++ - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - } - } - - if m > 1 { - if flags&(16|32) > 0 { - if flags&16 > 0 { - D = make([]int, 2*k) - } else { - D = SA[bo-2*k:] - } - B[T[j+1]]++ - for i, j = 0, 0; i < k; i++ { - j += C[i] - if B[i] != j { - SA[B[i]] += n - } - D[i] = 0 - D[i+k] = 0 - } - sortLMS2_int(T, SA, C, B, D, n, k) - name = postProcLMS2_int(SA, n, m) - } else { - sortLMS1_int(T, SA, C, B, n, k) - name = postProcLMS1_int(T, SA, n, m) - } - } else if m == 1 { - SA[b] = j + 1 - name = 1 - } else { - name = 0 - } - - // Stage 2: Solve the reduced problem. - // Recurse if names are not yet unique. - if name < m { - newfs = n + fs - 2*m - if flags&(1|4|8) == 0 { - if k+name <= newfs { - newfs -= k - } else { - flags |= 8 - } - } - RA = SA[m+newfs:] - for i, j = m+(n>>1)-1, m-1; m <= i; i-- { - if SA[i] != 0 { - RA[j] = SA[i] - 1 - j-- - } - } - computeSA_int(RA, SA, newfs, m, name) - - i = n - 1 - j = m - 1 - c0 = int(T[n-1]) - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - for i >= 0 { - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 > c1 { - break - } - } - if i >= 0 { - RA[j] = i + 1 - j-- - for { - c1 = c0 - if i--; i < 0 { - break - } - if c0 = int(T[i]); c0 < c1 { - break - } - } - } - } - for i = 0; i < m; i++ { - SA[i] = RA[SA[i]] - } - if flags&4 > 0 { - B = make([]int, k) - C = B - } - if flags&2 > 0 { - B = make([]int, k) - } - } - - // Stage 3: Induce the result for the original problem. - if flags&8 > 0 { - getCounts_int(T, C, n, k) - } - // Put all left-most S characters into their buckets. - if m > 1 { - getBuckets_int(C, B, k, true) // Find ends of buckets - i = m - 1 - j = n - p = SA[m-1] - c1 = int(T[p]) - for { - c0 = c1 - q = B[c0] - for q < j { - j-- - SA[j] = 0 - } - for { - j-- - SA[j] = p - if i--; i < 0 { - break - } - p = SA[i] - if c1 = int(T[p]); c1 != c0 { - break - } - } - if i < 0 { - break - } - } - for j > 0 { - j-- - SA[j] = 0 - } - } - induceSA_int(T, SA, C, B, n, k) -} diff --git a/vendor/github.com/dsnet/compress/bzip2/mtf_rle2.go b/vendor/github.com/dsnet/compress/bzip2/mtf_rle2.go deleted file mode 100644 index 5c71b3431d..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/mtf_rle2.go +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package bzip2 - -import "github.com/dsnet/compress/internal/errors" - -// moveToFront implements both the MTF and RLE stages of bzip2 at the same time. -// Any runs of zeros in the encoded output will be replaced by a sequence of -// RUNA and RUNB symbols are encode the length of the run. -// -// The RLE encoding used can actually be encoded to and decoded from using -// normal two's complement arithmetic. The methodology for doing so is below. -// -// Assuming the following: -// num: The value being encoded by RLE encoding. -// run: A sequence of RUNA and RUNB symbols represented as a binary integer, -// where RUNA is the 0 bit, RUNB is the 1 bit, and least-significant RUN -// symbols are at the least-significant bit positions. -// cnt: The number of RUNA and RUNB symbols. -// -// Then the RLE encoding used by bzip2 has this mathematical property: -// num+1 == (1< len(mtf.dictBuf) { - panicf(errors.Internal, "alphabet too large") - } - copy(mtf.dictBuf[:], dict) - mtf.dictLen = len(dict) - mtf.blkSize = blkSize -} - -func (mtf *moveToFront) Encode(vals []byte) (syms []uint16) { - dict := mtf.dictBuf[:mtf.dictLen] - syms = mtf.syms[:0] - - if len(vals) > mtf.blkSize { - panicf(errors.Internal, "exceeded block size") - } - - var lastNum uint32 - for _, val := range vals { - // Normal move-to-front transform. - var idx uint8 // Reverse lookup idx in dict - for di, dv := range dict { - if dv == val { - idx = uint8(di) - break - } - } - copy(dict[1:], dict[:idx]) - dict[0] = val - - // Run-length encoding augmentation. - if idx == 0 { - lastNum++ - continue - } - if lastNum > 0 { - for rc := lastNum + 1; rc != 1; rc >>= 1 { - syms = append(syms, uint16(rc&1)) - } - lastNum = 0 - } - syms = append(syms, uint16(idx)+1) - } - if lastNum > 0 { - for rc := lastNum + 1; rc != 1; rc >>= 1 { - syms = append(syms, uint16(rc&1)) - } - } - mtf.syms = syms - return syms -} - -func (mtf *moveToFront) Decode(syms []uint16) (vals []byte) { - dict := mtf.dictBuf[:mtf.dictLen] - vals = mtf.vals[:0] - - var lastCnt uint - var lastRun uint32 - for _, sym := range syms { - // Run-length encoding augmentation. - if sym < 2 { - lastRun |= uint32(sym) << lastCnt - lastCnt++ - continue - } - if lastCnt > 0 { - cnt := int((1< mtf.blkSize || lastCnt > 24 { - panicf(errors.Corrupted, "run-length decoding exceeded block size") - } - for i := cnt; i > 0; i-- { - vals = append(vals, dict[0]) - } - lastCnt, lastRun = 0, 0 - } - - // Normal move-to-front transform. - val := dict[sym-1] // Forward lookup val in dict - copy(dict[1:], dict[:sym-1]) - dict[0] = val - - if len(vals) >= mtf.blkSize { - panicf(errors.Corrupted, "run-length decoding exceeded block size") - } - vals = append(vals, val) - } - if lastCnt > 0 { - cnt := int((1< mtf.blkSize || lastCnt > 24 { - panicf(errors.Corrupted, "run-length decoding exceeded block size") - } - for i := cnt; i > 0; i-- { - vals = append(vals, dict[0]) - } - } - mtf.vals = vals - return vals -} diff --git a/vendor/github.com/dsnet/compress/bzip2/prefix.go b/vendor/github.com/dsnet/compress/bzip2/prefix.go deleted file mode 100644 index 4847d80965..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/prefix.go +++ /dev/null @@ -1,374 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package bzip2 - -import ( - "io" - - "github.com/dsnet/compress/internal" - "github.com/dsnet/compress/internal/errors" - "github.com/dsnet/compress/internal/prefix" -) - -const ( - minNumTrees = 2 - maxNumTrees = 6 - - maxPrefixBits = 20 // Maximum bit-width of a prefix code - maxNumSyms = 256 + 2 // Maximum number of symbols in the alphabet - numBlockSyms = 50 // Number of bytes in a block -) - -// encSel and decSel are used to handle the prefix encoding for tree selectors. -// The prefix encoding is as follows: -// -// Code TreeIdx -// 0 <=> 0 -// 10 <=> 1 -// 110 <=> 2 -// 1110 <=> 3 -// 11110 <=> 4 -// 111110 <=> 5 -// 111111 <=> 6 Invalid tree index, so should fail -// -var encSel, decSel = func() (e prefix.Encoder, d prefix.Decoder) { - var selCodes [maxNumTrees + 1]prefix.PrefixCode - for i := range selCodes { - selCodes[i] = prefix.PrefixCode{Sym: uint32(i), Len: uint32(i + 1)} - } - selCodes[maxNumTrees] = prefix.PrefixCode{Sym: maxNumTrees, Len: maxNumTrees} - prefix.GeneratePrefixes(selCodes[:]) - e.Init(selCodes[:]) - d.Init(selCodes[:]) - return -}() - -type prefixReader struct{ prefix.Reader } - -func (pr *prefixReader) Init(r io.Reader) { - pr.Reader.Init(r, true) -} - -func (pr *prefixReader) ReadBitsBE64(nb uint) uint64 { - if nb <= 32 { - v := uint32(pr.ReadBits(nb)) - return uint64(internal.ReverseUint32N(v, nb)) - } - v0 := internal.ReverseUint32(uint32(pr.ReadBits(32))) - v1 := internal.ReverseUint32(uint32(pr.ReadBits(nb - 32))) - v := uint64(v0)<<32 | uint64(v1) - return v >> (64 - nb) -} - -func (pr *prefixReader) ReadPrefixCodes(codes []prefix.PrefixCodes, trees []prefix.Decoder) { - for i, pc := range codes { - clen := int(pr.ReadBitsBE64(5)) - sum := 1 << maxPrefixBits - for sym := range pc { - for { - if clen < 1 || clen > maxPrefixBits { - panicf(errors.Corrupted, "invalid prefix bit-length: %d", clen) - } - - b, ok := pr.TryReadBits(1) - if !ok { - b = pr.ReadBits(1) - } - if b == 0 { - break - } - - b, ok = pr.TryReadBits(1) - if !ok { - b = pr.ReadBits(1) - } - clen -= int(b*2) - 1 // +1 or -1 - } - pc[sym] = prefix.PrefixCode{Sym: uint32(sym), Len: uint32(clen)} - sum -= (1 << maxPrefixBits) >> uint(clen) - } - - if sum == 0 { - // Fast path, but only handles complete trees. - if err := prefix.GeneratePrefixes(pc); err != nil { - errors.Panic(err) // Using complete trees; should never fail - } - } else { - // Slow path, but handles anything. - pc = handleDegenerateCodes(pc) // Never fails, but may fail later - codes[i] = pc - } - trees[i].Init(pc) - } -} - -type prefixWriter struct{ prefix.Writer } - -func (pw *prefixWriter) Init(w io.Writer) { - pw.Writer.Init(w, true) -} - -func (pw *prefixWriter) WriteBitsBE64(v uint64, nb uint) { - if nb <= 32 { - v := internal.ReverseUint32N(uint32(v), nb) - pw.WriteBits(uint(v), nb) - return - } - v <<= (64 - nb) - v0 := internal.ReverseUint32(uint32(v >> 32)) - v1 := internal.ReverseUint32(uint32(v)) - pw.WriteBits(uint(v0), 32) - pw.WriteBits(uint(v1), nb-32) - return -} - -func (pw *prefixWriter) WritePrefixCodes(codes []prefix.PrefixCodes, trees []prefix.Encoder) { - for i, pc := range codes { - if err := prefix.GeneratePrefixes(pc); err != nil { - errors.Panic(err) // Using complete trees; should never fail - } - trees[i].Init(pc) - - clen := int(pc[0].Len) - pw.WriteBitsBE64(uint64(clen), 5) - for _, c := range pc { - for int(c.Len) < clen { - pw.WriteBits(3, 2) // 11 - clen-- - } - for int(c.Len) > clen { - pw.WriteBits(1, 2) // 10 - clen++ - } - pw.WriteBits(0, 1) - } - } -} - -// handleDegenerateCodes converts a degenerate tree into a canonical tree. -// -// For example, when the input is an under-subscribed tree: -// input: []PrefixCode{ -// {Sym: 0, Len: 3}, -// {Sym: 1, Len: 4}, -// {Sym: 2, Len: 3}, -// } -// output: []PrefixCode{ -// {Sym: 0, Len: 3, Val: 0}, // 000 -// {Sym: 1, Len: 4, Val: 2}, // 0010 -// {Sym: 2, Len: 3, Val: 4}, // 100 -// {Sym: 258, Len: 4, Val: 10}, // 1010 -// {Sym: 259, Len: 3, Val: 6}, // 110 -// {Sym: 260, Len: 1, Val: 1}, // 1 -// } -// -// For example, when the input is an over-subscribed tree: -// input: []PrefixCode{ -// {Sym: 0, Len: 1}, -// {Sym: 1, Len: 3}, -// {Sym: 2, Len: 4}, -// {Sym: 3, Len: 3}, -// {Sym: 4, Len: 2}, -// } -// output: []PrefixCode{ -// {Sym: 0, Len: 1, Val: 0}, // 0 -// {Sym: 1, Len: 3, Val: 3}, // 011 -// {Sym: 3, Len: 3, Val: 7}, // 111 -// {Sym: 4, Len: 2, Val: 1}, // 01 -// } -func handleDegenerateCodes(codes prefix.PrefixCodes) prefix.PrefixCodes { - // Since there is no formal definition for the BZip2 format, there is no - // specification that says that the code lengths must form a complete - // prefix tree (IE: it is neither over-subscribed nor under-subscribed). - // Thus, the original C implementation becomes the reference for how prefix - // decoding is done in these edge cases. Unfortunately, the C version does - // not error when an invalid tree is used, but rather allows decoding to - // continue and only errors if some bit pattern happens to cause an error. - // Thus, it is possible for an invalid tree to end up decoding an input - // "properly" so long as invalid bit patterns are not present. In order to - // replicate this non-specified behavior, we use a ported version of the - // C code to generate the codes as a valid canonical tree by substituting - // invalid nodes with invalid symbols. - // - // ==================================================== - // This program, "bzip2", the associated library "libbzip2", and all - // documentation, are copyright (C) 1996-2010 Julian R Seward. All - // rights reserved. - // - // Redistribution and use in source and binary forms, with or without - // modification, are permitted provided that the following conditions - // are met: - // - // 1. Redistributions of source code must retain the above copyright - // notice, this list of conditions and the following disclaimer. - // - // 2. The origin of this software must not be misrepresented; you must - // not claim that you wrote the original software. If you use this - // software in a product, an acknowledgment in the product - // documentation would be appreciated but is not required. - // - // 3. Altered source versions must be plainly marked as such, and must - // not be misrepresented as being the original software. - // - // 4. The name of the author may not be used to endorse or promote - // products derived from this software without specific prior written - // permission. - // - // THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS - // OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - // ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY - // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE - // GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - // - // Julian Seward, jseward@bzip.org - // bzip2/libbzip2 version 1.0.6 of 6 September 2010 - // ==================================================== - var ( - limits [maxPrefixBits + 2]int32 - bases [maxPrefixBits + 2]int32 - perms [maxNumSyms]int32 - - minLen = uint32(maxPrefixBits) - maxLen = uint32(0) - ) - - const ( - statusOkay = iota - statusInvalid - statusNeedBits - statusMaxBits - ) - - // createTables is the BZ2_hbCreateDecodeTables function from the C code. - createTables := func(codes []prefix.PrefixCode) { - for _, c := range codes { - if c.Len > maxLen { - maxLen = c.Len - } - if c.Len < minLen { - minLen = c.Len - } - } - - var pp int - for i := minLen; i <= maxLen; i++ { - for j, c := range codes { - if c.Len == i { - perms[pp] = int32(j) - pp++ - } - } - } - - var vec int32 - for _, c := range codes { - bases[c.Len+1]++ - } - for i := 1; i < len(bases); i++ { - bases[i] += bases[i-1] - } - for i := minLen; i <= maxLen; i++ { - vec += bases[i+1] - bases[i] - limits[i] = vec - 1 - vec <<= 1 - } - for i := minLen + 1; i <= maxLen; i++ { - bases[i] = ((limits[i-1] + 1) << 1) - bases[i] - } - } - - // getSymbol is the GET_MTF_VAL macro from the C code. - getSymbol := func(c prefix.PrefixCode) (uint32, int) { - v := internal.ReverseUint32(c.Val) - n := c.Len - - zn := minLen - if zn > n { - return 0, statusNeedBits - } - zvec := int32(v >> (32 - zn)) - v <<= zn - for { - if zn > maxLen { - return 0, statusMaxBits - } - if zvec <= limits[zn] { - break - } - zn++ - if zn > n { - return 0, statusNeedBits - } - zvec = (zvec << 1) | int32(v>>31) - v <<= 1 - } - if zvec-bases[zn] < 0 || zvec-bases[zn] >= maxNumSyms { - return 0, statusInvalid - } - return uint32(perms[zvec-bases[zn]]), statusOkay - } - - // Step 1: Create the prefix trees using the C algorithm. - createTables(codes) - - // Step 2: Starting with the shortest bit pattern, explore the whole tree. - // If tree is under-subscribed, the worst-case runtime is O(1< 0 { - codes = append(codes, c) - } - } - return codes -} diff --git a/vendor/github.com/dsnet/compress/bzip2/reader.go b/vendor/github.com/dsnet/compress/bzip2/reader.go deleted file mode 100644 index 86d3f7186e..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/reader.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package bzip2 - -import ( - "io" - - "github.com/dsnet/compress/internal" - "github.com/dsnet/compress/internal/errors" - "github.com/dsnet/compress/internal/prefix" -) - -type Reader struct { - InputOffset int64 // Total number of bytes read from underlying io.Reader - OutputOffset int64 // Total number of bytes emitted from Read - - rd prefixReader - err error - level int // The current compression level - rdHdrFtr int // Number of times we read the stream header and footer - blkCRC uint32 // CRC-32 IEEE of each block (as stored) - endCRC uint32 // Checksum of all blocks using bzip2's custom method - - crc crc - mtf moveToFront - bwt burrowsWheelerTransform - rle runLengthEncoding - - // These fields are allocated with Reader and re-used later. - treeSels []uint8 - codes2D [maxNumTrees][maxNumSyms]prefix.PrefixCode - codes1D [maxNumTrees]prefix.PrefixCodes - trees1D [maxNumTrees]prefix.Decoder - syms []uint16 - - fuzzReader // Exported functionality when fuzz testing -} - -type ReaderConfig struct { - _ struct{} // Blank field to prevent unkeyed struct literals -} - -func NewReader(r io.Reader, conf *ReaderConfig) (*Reader, error) { - zr := new(Reader) - zr.Reset(r) - return zr, nil -} - -func (zr *Reader) Reset(r io.Reader) error { - *zr = Reader{ - rd: zr.rd, - - mtf: zr.mtf, - bwt: zr.bwt, - rle: zr.rle, - - treeSels: zr.treeSels, - trees1D: zr.trees1D, - syms: zr.syms, - } - zr.rd.Init(r) - return nil -} - -func (zr *Reader) Read(buf []byte) (int, error) { - for { - cnt, err := zr.rle.Read(buf) - if err != rleDone && zr.err == nil { - zr.err = err - } - if cnt > 0 { - zr.crc.update(buf[:cnt]) - zr.OutputOffset += int64(cnt) - return cnt, nil - } - if zr.err != nil || len(buf) == 0 { - return 0, zr.err - } - - // Read the next chunk. - zr.rd.Offset = zr.InputOffset - func() { - defer errors.Recover(&zr.err) - if zr.rdHdrFtr%2 == 0 { - // Check if we are already at EOF. - if err := zr.rd.PullBits(1); err != nil { - if err == io.ErrUnexpectedEOF && zr.rdHdrFtr > 0 { - err = io.EOF // EOF is okay if we read at least one stream - } - errors.Panic(err) - } - - // Read stream header. - if zr.rd.ReadBitsBE64(16) != hdrMagic { - panicf(errors.Corrupted, "invalid stream magic") - } - if ver := zr.rd.ReadBitsBE64(8); ver != 'h' { - if ver == '0' { - panicf(errors.Deprecated, "bzip1 format is not supported") - } - panicf(errors.Corrupted, "invalid version: %q", ver) - } - lvl := int(zr.rd.ReadBitsBE64(8)) - '0' - if lvl < BestSpeed || lvl > BestCompression { - panicf(errors.Corrupted, "invalid block size: %d", lvl*blockSize) - } - zr.level = lvl - zr.rdHdrFtr++ - } else { - // Check and update the CRC. - if internal.GoFuzz { - zr.updateChecksum(-1, zr.crc.val) // Update with value - zr.blkCRC = zr.crc.val // Suppress CRC failures - } - if zr.blkCRC != zr.crc.val { - panicf(errors.Corrupted, "mismatching block checksum") - } - zr.endCRC = (zr.endCRC<<1 | zr.endCRC>>31) ^ zr.blkCRC - } - buf := zr.decodeBlock() - zr.rle.Init(buf) - }() - if zr.InputOffset, err = zr.rd.Flush(); zr.err == nil { - zr.err = err - } - if zr.err != nil { - zr.err = errWrap(zr.err, errors.Corrupted) - return 0, zr.err - } - } -} - -func (zr *Reader) Close() error { - if zr.err == io.EOF || zr.err == errClosed { - zr.rle.Init(nil) // Make sure future reads fail - zr.err = errClosed - return nil - } - return zr.err // Return the persistent error -} - -func (zr *Reader) decodeBlock() []byte { - if magic := zr.rd.ReadBitsBE64(48); magic != blkMagic { - if magic == endMagic { - endCRC := uint32(zr.rd.ReadBitsBE64(32)) - if internal.GoFuzz { - zr.updateChecksum(zr.rd.BitsRead()-32, zr.endCRC) - endCRC = zr.endCRC // Suppress CRC failures - } - if zr.endCRC != endCRC { - panicf(errors.Corrupted, "mismatching stream checksum") - } - zr.endCRC = 0 - zr.rd.ReadPads() - zr.rdHdrFtr++ - return nil - } - panicf(errors.Corrupted, "invalid block or footer magic") - } - - zr.crc.val = 0 - zr.blkCRC = uint32(zr.rd.ReadBitsBE64(32)) - if internal.GoFuzz { - zr.updateChecksum(zr.rd.BitsRead()-32, 0) // Record offset only - } - if zr.rd.ReadBitsBE64(1) != 0 { - panicf(errors.Deprecated, "block randomization is not supported") - } - - // Read BWT related fields. - ptr := int(zr.rd.ReadBitsBE64(24)) // BWT origin pointer - - // Read MTF related fields. - var dictArr [256]uint8 - dict := dictArr[:0] - bmapHi := uint16(zr.rd.ReadBits(16)) - for i := 0; i < 256; i, bmapHi = i+16, bmapHi>>1 { - if bmapHi&1 > 0 { - bmapLo := uint16(zr.rd.ReadBits(16)) - for j := 0; j < 16; j, bmapLo = j+1, bmapLo>>1 { - if bmapLo&1 > 0 { - dict = append(dict, uint8(i+j)) - } - } - } - } - - // Step 1: Prefix encoding. - syms := zr.decodePrefix(len(dict)) - - // Step 2: Move-to-front transform and run-length encoding. - zr.mtf.Init(dict, zr.level*blockSize) - buf := zr.mtf.Decode(syms) - - // Step 3: Burrows-Wheeler transformation. - if ptr >= len(buf) { - panicf(errors.Corrupted, "origin pointer (0x%06x) exceeds block size: %d", ptr, len(buf)) - } - zr.bwt.Decode(buf, ptr) - - return buf -} - -func (zr *Reader) decodePrefix(numSyms int) (syms []uint16) { - numSyms += 2 // Remove 0 symbol, add RUNA, RUNB, and EOF symbols - if numSyms < 3 { - panicf(errors.Corrupted, "not enough prefix symbols: %d", numSyms) - } - - // Read information about the trees and tree selectors. - var mtf internal.MoveToFront - numTrees := int(zr.rd.ReadBitsBE64(3)) - if numTrees < minNumTrees || numTrees > maxNumTrees { - panicf(errors.Corrupted, "invalid number of prefix trees: %d", numTrees) - } - numSels := int(zr.rd.ReadBitsBE64(15)) - if cap(zr.treeSels) < numSels { - zr.treeSels = make([]uint8, numSels) - } - treeSels := zr.treeSels[:numSels] - for i := range treeSels { - sym, ok := zr.rd.TryReadSymbol(&decSel) - if !ok { - sym = zr.rd.ReadSymbol(&decSel) - } - if int(sym) >= numTrees { - panicf(errors.Corrupted, "invalid prefix tree selector: %d", sym) - } - treeSels[i] = uint8(sym) - } - mtf.Decode(treeSels) - zr.treeSels = treeSels - - // Initialize prefix codes. - for i := range zr.codes2D[:numTrees] { - zr.codes1D[i] = zr.codes2D[i][:numSyms] - } - zr.rd.ReadPrefixCodes(zr.codes1D[:numTrees], zr.trees1D[:numTrees]) - - // Read prefix encoded symbols of compressed data. - var tree *prefix.Decoder - var blkLen, selIdx int - syms = zr.syms[:0] - for { - if blkLen == 0 { - blkLen = numBlockSyms - if selIdx >= len(treeSels) { - panicf(errors.Corrupted, "not enough prefix tree selectors") - } - tree = &zr.trees1D[treeSels[selIdx]] - selIdx++ - } - blkLen-- - sym, ok := zr.rd.TryReadSymbol(tree) - if !ok { - sym = zr.rd.ReadSymbol(tree) - } - - if int(sym) == numSyms-1 { - break // EOF marker - } - if int(sym) >= numSyms { - panicf(errors.Corrupted, "invalid prefix symbol: %d", sym) - } - if len(syms) >= zr.level*blockSize { - panicf(errors.Corrupted, "number of prefix symbols exceeds block size") - } - syms = append(syms, uint16(sym)) - } - zr.syms = syms - return syms -} diff --git a/vendor/github.com/dsnet/compress/bzip2/rle1.go b/vendor/github.com/dsnet/compress/bzip2/rle1.go deleted file mode 100644 index 1d789f65f2..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/rle1.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package bzip2 - -import "github.com/dsnet/compress/internal/errors" - -// rleDone is a special "error" to indicate that the RLE stage is done. -var rleDone = errorf(errors.Unknown, "RLE1 stage is completed") - -// runLengthEncoding implements the first RLE stage of bzip2. Every sequence -// of 4..255 duplicated bytes is replaced by only the first 4 bytes, and a -// single byte representing the repeat length. Similar to the C bzip2 -// implementation, the encoder will always terminate repeat sequences with a -// count (even if it is the end of the buffer), and it will also never produce -// run lengths of 256..259. The decoder can handle the latter case. -// -// For example, if the input was: -// input: "AAAAAAABBBBCCCD" -// -// Then the output will be: -// output: "AAAA\x03BBBB\x00CCCD" -type runLengthEncoding struct { - buf []byte - idx int - lastVal byte - lastCnt int -} - -func (rle *runLengthEncoding) Init(buf []byte) { - *rle = runLengthEncoding{buf: buf} -} - -func (rle *runLengthEncoding) Write(buf []byte) (int, error) { - for i, b := range buf { - if rle.lastVal != b { - rle.lastCnt = 0 - } - rle.lastCnt++ - switch { - case rle.lastCnt < 4: - if rle.idx >= len(rle.buf) { - return i, rleDone - } - rle.buf[rle.idx] = b - rle.idx++ - case rle.lastCnt == 4: - if rle.idx+1 >= len(rle.buf) { - return i, rleDone - } - rle.buf[rle.idx] = b - rle.idx++ - rle.buf[rle.idx] = 0 - rle.idx++ - case rle.lastCnt < 256: - rle.buf[rle.idx-1]++ - default: - if rle.idx >= len(rle.buf) { - return i, rleDone - } - rle.lastCnt = 1 - rle.buf[rle.idx] = b - rle.idx++ - } - rle.lastVal = b - } - return len(buf), nil -} - -func (rle *runLengthEncoding) Read(buf []byte) (int, error) { - for i := range buf { - switch { - case rle.lastCnt == -4: - if rle.idx >= len(rle.buf) { - return i, errorf(errors.Corrupted, "missing terminating run-length repeater") - } - rle.lastCnt = int(rle.buf[rle.idx]) - rle.idx++ - if rle.lastCnt > 0 { - break // Break the switch - } - fallthrough // Count was zero, continue the work - case rle.lastCnt <= 0: - if rle.idx >= len(rle.buf) { - return i, rleDone - } - b := rle.buf[rle.idx] - rle.idx++ - if b != rle.lastVal { - rle.lastCnt = 0 - rle.lastVal = b - } - } - buf[i] = rle.lastVal - rle.lastCnt-- - } - return len(buf), nil -} - -func (rle *runLengthEncoding) Bytes() []byte { return rle.buf[:rle.idx] } diff --git a/vendor/github.com/dsnet/compress/bzip2/writer.go b/vendor/github.com/dsnet/compress/bzip2/writer.go deleted file mode 100644 index 5c1a4c664a..0000000000 --- a/vendor/github.com/dsnet/compress/bzip2/writer.go +++ /dev/null @@ -1,307 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package bzip2 - -import ( - "io" - - "github.com/dsnet/compress/internal" - "github.com/dsnet/compress/internal/errors" - "github.com/dsnet/compress/internal/prefix" -) - -type Writer struct { - InputOffset int64 // Total number of bytes issued to Write - OutputOffset int64 // Total number of bytes written to underlying io.Writer - - wr prefixWriter - err error - level int // The current compression level - wrHdr bool // Have we written the stream header? - blkCRC uint32 // CRC-32 IEEE of each block - endCRC uint32 // Checksum of all blocks using bzip2's custom method - - crc crc - rle runLengthEncoding - bwt burrowsWheelerTransform - mtf moveToFront - - // These fields are allocated with Writer and re-used later. - buf []byte - treeSels []uint8 - treeSelsMTF []uint8 - codes2D [maxNumTrees][maxNumSyms]prefix.PrefixCode - codes1D [maxNumTrees]prefix.PrefixCodes - trees1D [maxNumTrees]prefix.Encoder -} - -type WriterConfig struct { - Level int - - _ struct{} // Blank field to prevent unkeyed struct literals -} - -func NewWriter(w io.Writer, conf *WriterConfig) (*Writer, error) { - var lvl int - if conf != nil { - lvl = conf.Level - } - if lvl == 0 { - lvl = DefaultCompression - } - if lvl < BestSpeed || lvl > BestCompression { - return nil, errorf(errors.Invalid, "compression level: %d", lvl) - } - zw := new(Writer) - zw.level = lvl - zw.Reset(w) - return zw, nil -} - -func (zw *Writer) Reset(w io.Writer) error { - *zw = Writer{ - wr: zw.wr, - level: zw.level, - - rle: zw.rle, - bwt: zw.bwt, - mtf: zw.mtf, - - buf: zw.buf, - treeSels: zw.treeSels, - treeSelsMTF: zw.treeSelsMTF, - trees1D: zw.trees1D, - } - zw.wr.Init(w) - if len(zw.buf) != zw.level*blockSize { - zw.buf = make([]byte, zw.level*blockSize) - } - zw.rle.Init(zw.buf) - return nil -} - -func (zw *Writer) Write(buf []byte) (int, error) { - if zw.err != nil { - return 0, zw.err - } - - cnt := len(buf) - for { - wrCnt, err := zw.rle.Write(buf) - if err != rleDone && zw.err == nil { - zw.err = err - } - zw.crc.update(buf[:wrCnt]) - buf = buf[wrCnt:] - if len(buf) == 0 { - zw.InputOffset += int64(cnt) - return cnt, nil - } - if zw.err = zw.flush(); zw.err != nil { - return 0, zw.err - } - } -} - -func (zw *Writer) flush() error { - vals := zw.rle.Bytes() - if len(vals) == 0 { - return nil - } - zw.wr.Offset = zw.OutputOffset - func() { - defer errors.Recover(&zw.err) - if !zw.wrHdr { - // Write stream header. - zw.wr.WriteBitsBE64(hdrMagic, 16) - zw.wr.WriteBitsBE64('h', 8) - zw.wr.WriteBitsBE64(uint64('0'+zw.level), 8) - zw.wrHdr = true - } - zw.encodeBlock(vals) - }() - var err error - if zw.OutputOffset, err = zw.wr.Flush(); zw.err == nil { - zw.err = err - } - if zw.err != nil { - zw.err = errWrap(zw.err, errors.Internal) - return zw.err - } - zw.endCRC = (zw.endCRC<<1 | zw.endCRC>>31) ^ zw.blkCRC - zw.blkCRC = 0 - zw.rle.Init(zw.buf) - return nil -} - -func (zw *Writer) Close() error { - if zw.err == errClosed { - return nil - } - - // Flush RLE buffer if there is left-over data. - if zw.err = zw.flush(); zw.err != nil { - return zw.err - } - - // Write stream footer. - zw.wr.Offset = zw.OutputOffset - func() { - defer errors.Recover(&zw.err) - if !zw.wrHdr { - // Write stream header. - zw.wr.WriteBitsBE64(hdrMagic, 16) - zw.wr.WriteBitsBE64('h', 8) - zw.wr.WriteBitsBE64(uint64('0'+zw.level), 8) - zw.wrHdr = true - } - zw.wr.WriteBitsBE64(endMagic, 48) - zw.wr.WriteBitsBE64(uint64(zw.endCRC), 32) - zw.wr.WritePads(0) - }() - var err error - if zw.OutputOffset, err = zw.wr.Flush(); zw.err == nil { - zw.err = err - } - if zw.err != nil { - zw.err = errWrap(zw.err, errors.Internal) - return zw.err - } - - zw.err = errClosed - return nil -} - -func (zw *Writer) encodeBlock(buf []byte) { - zw.blkCRC = zw.crc.val - zw.wr.WriteBitsBE64(blkMagic, 48) - zw.wr.WriteBitsBE64(uint64(zw.blkCRC), 32) - zw.wr.WriteBitsBE64(0, 1) - zw.crc.val = 0 - - // Step 1: Burrows-Wheeler transformation. - ptr := zw.bwt.Encode(buf) - zw.wr.WriteBitsBE64(uint64(ptr), 24) - - // Step 2: Move-to-front transform and run-length encoding. - var dictMap [256]bool - for _, c := range buf { - dictMap[c] = true - } - - var dictArr [256]uint8 - var bmapLo [16]uint16 - dict := dictArr[:0] - bmapHi := uint16(0) - for i, b := range dictMap { - if b { - c := uint8(i) - dict = append(dict, c) - bmapHi |= 1 << (c >> 4) - bmapLo[c>>4] |= 1 << (c & 0xf) - } - } - - zw.wr.WriteBits(uint(bmapHi), 16) - for _, m := range bmapLo { - if m > 0 { - zw.wr.WriteBits(uint(m), 16) - } - } - - zw.mtf.Init(dict, len(buf)) - syms := zw.mtf.Encode(buf) - - // Step 3: Prefix encoding. - zw.encodePrefix(syms, len(dict)) -} - -func (zw *Writer) encodePrefix(syms []uint16, numSyms int) { - numSyms += 2 // Remove 0 symbol, add RUNA, RUNB, and EOB symbols - if numSyms < 3 { - panicf(errors.Internal, "unable to encode EOB marker") - } - syms = append(syms, uint16(numSyms-1)) // EOB marker - - // Compute number of prefix trees needed. - numTrees := maxNumTrees - for i, lim := range []int{200, 600, 1200, 2400} { - if len(syms) < lim { - numTrees = minNumTrees + i - break - } - } - - // Compute number of block selectors. - numSels := (len(syms) + numBlockSyms - 1) / numBlockSyms - if cap(zw.treeSels) < numSels { - zw.treeSels = make([]uint8, numSels) - } - treeSels := zw.treeSels[:numSels] - for i := range treeSels { - treeSels[i] = uint8(i % numTrees) - } - - // Initialize prefix codes. - for i := range zw.codes2D[:numTrees] { - pc := zw.codes2D[i][:numSyms] - for j := range pc { - pc[j] = prefix.PrefixCode{Sym: uint32(j)} - } - zw.codes1D[i] = pc - } - - // First cut at assigning prefix trees to each group. - var codes prefix.PrefixCodes - var blkLen, selIdx int - for _, sym := range syms { - if blkLen == 0 { - blkLen = numBlockSyms - codes = zw.codes2D[treeSels[selIdx]][:numSyms] - selIdx++ - } - blkLen-- - codes[sym].Cnt++ - } - - // TODO(dsnet): Use K-means to cluster groups to each prefix tree. - - // Generate lengths and prefixes based on symbol frequencies. - for i := range zw.trees1D[:numTrees] { - pc := prefix.PrefixCodes(zw.codes2D[i][:numSyms]) - pc.SortByCount() - if err := prefix.GenerateLengths(pc, maxPrefixBits); err != nil { - errors.Panic(err) - } - pc.SortBySymbol() - } - - // Write out information about the trees and tree selectors. - var mtf internal.MoveToFront - zw.wr.WriteBitsBE64(uint64(numTrees), 3) - zw.wr.WriteBitsBE64(uint64(numSels), 15) - zw.treeSelsMTF = append(zw.treeSelsMTF[:0], treeSels...) - mtf.Encode(zw.treeSelsMTF) - for _, sym := range zw.treeSelsMTF { - zw.wr.WriteSymbol(uint(sym), &encSel) - } - zw.wr.WritePrefixCodes(zw.codes1D[:numTrees], zw.trees1D[:numTrees]) - - // Write out prefix encoded symbols of compressed data. - var tree *prefix.Encoder - blkLen, selIdx = 0, 0 - for _, sym := range syms { - if blkLen == 0 { - blkLen = numBlockSyms - tree = &zw.trees1D[treeSels[selIdx]] - selIdx++ - } - blkLen-- - ok := zw.wr.TryWriteSymbol(uint(sym), tree) - if !ok { - zw.wr.WriteSymbol(uint(sym), tree) - } - } -} diff --git a/vendor/github.com/dsnet/compress/internal/common.go b/vendor/github.com/dsnet/compress/internal/common.go deleted file mode 100644 index da4e703434..0000000000 --- a/vendor/github.com/dsnet/compress/internal/common.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Package internal is a collection of common compression algorithms. -// -// For performance reasons, these packages lack strong error checking and -// require that the caller to ensure that strict invariants are kept. -package internal - -var ( - // IdentityLUT returns the input key itself. - IdentityLUT = func() (lut [256]byte) { - for i := range lut { - lut[i] = uint8(i) - } - return lut - }() - - // ReverseLUT returns the input key with its bits reversed. - ReverseLUT = func() (lut [256]byte) { - for i := range lut { - b := uint8(i) - b = (b&0xaa)>>1 | (b&0x55)<<1 - b = (b&0xcc)>>2 | (b&0x33)<<2 - b = (b&0xf0)>>4 | (b&0x0f)<<4 - lut[i] = b - } - return lut - }() -) - -// ReverseUint32 reverses all bits of v. -func ReverseUint32(v uint32) (x uint32) { - x |= uint32(ReverseLUT[byte(v>>0)]) << 24 - x |= uint32(ReverseLUT[byte(v>>8)]) << 16 - x |= uint32(ReverseLUT[byte(v>>16)]) << 8 - x |= uint32(ReverseLUT[byte(v>>24)]) << 0 - return x -} - -// ReverseUint32N reverses the lower n bits of v. -func ReverseUint32N(v uint32, n uint) (x uint32) { - return ReverseUint32(v << (32 - n)) -} - -// ReverseUint64 reverses all bits of v. -func ReverseUint64(v uint64) (x uint64) { - x |= uint64(ReverseLUT[byte(v>>0)]) << 56 - x |= uint64(ReverseLUT[byte(v>>8)]) << 48 - x |= uint64(ReverseLUT[byte(v>>16)]) << 40 - x |= uint64(ReverseLUT[byte(v>>24)]) << 32 - x |= uint64(ReverseLUT[byte(v>>32)]) << 24 - x |= uint64(ReverseLUT[byte(v>>40)]) << 16 - x |= uint64(ReverseLUT[byte(v>>48)]) << 8 - x |= uint64(ReverseLUT[byte(v>>56)]) << 0 - return x -} - -// ReverseUint64N reverses the lower n bits of v. -func ReverseUint64N(v uint64, n uint) (x uint64) { - return ReverseUint64(v << (64 - n)) -} - -// MoveToFront is a data structure that allows for more efficient move-to-front -// transformations. This specific implementation assumes that the alphabet is -// densely packed within 0..255. -type MoveToFront struct { - dict [256]uint8 // Mapping from indexes to values - tail int // Number of tail bytes that are already ordered -} - -func (m *MoveToFront) Encode(vals []uint8) { - copy(m.dict[:], IdentityLUT[:256-m.tail]) // Reset dict to be identity - - var max int - for i, val := range vals { - var idx uint8 // Reverse lookup idx in dict - for di, dv := range m.dict { - if dv == val { - idx = uint8(di) - break - } - } - vals[i] = idx - - max |= int(idx) - copy(m.dict[1:], m.dict[:idx]) - m.dict[0] = val - } - m.tail = 256 - max - 1 -} - -func (m *MoveToFront) Decode(idxs []uint8) { - copy(m.dict[:], IdentityLUT[:256-m.tail]) // Reset dict to be identity - - var max int - for i, idx := range idxs { - val := m.dict[idx] // Forward lookup val in dict - idxs[i] = val - - max |= int(idx) - copy(m.dict[1:], m.dict[:idx]) - m.dict[0] = val - } - m.tail = 256 - max - 1 -} diff --git a/vendor/github.com/dsnet/compress/internal/debug.go b/vendor/github.com/dsnet/compress/internal/debug.go deleted file mode 100644 index 01df1f8953..0000000000 --- a/vendor/github.com/dsnet/compress/internal/debug.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// +build debug,!gofuzz - -package internal - -const ( - Debug = true - GoFuzz = false -) diff --git a/vendor/github.com/dsnet/compress/internal/errors/errors.go b/vendor/github.com/dsnet/compress/internal/errors/errors.go deleted file mode 100644 index c631afbd62..0000000000 --- a/vendor/github.com/dsnet/compress/internal/errors/errors.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2016, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// Package errors implements functions to manipulate compression errors. -// -// In idiomatic Go, it is an anti-pattern to use panics as a form of error -// reporting in the API. Instead, the expected way to transmit errors is by -// returning an error value. Unfortunately, the checking of "err != nil" in -// tight loops commonly found in compression causes non-negligible performance -// degradation. While this may not be idiomatic, the internal packages of this -// repository rely on panics as a normal means to convey errors. In order to -// ensure that these panics do not leak across the public API, the public -// packages must recover from these panics and present an error value. -// -// The Panic and Recover functions in this package provide a safe way to -// recover from errors only generated from within this repository. -// -// Example usage: -// func Foo() (err error) { -// defer errors.Recover(&err) -// -// if rand.Intn(2) == 0 { -// // Unexpected panics will not be caught by Recover. -// io.Closer(nil).Close() -// } else { -// // Errors thrown by Panic will be caught by Recover. -// errors.Panic(errors.New("whoopsie")) -// } -// } -// -package errors - -import "strings" - -const ( - // Unknown indicates that there is no classification for this error. - Unknown = iota - - // Internal indicates that this error is due to an internal bug. - // Users should file a issue report if this type of error is encountered. - Internal - - // Invalid indicates that this error is due to the user misusing the API - // and is indicative of a bug on the user's part. - Invalid - - // Deprecated indicates the use of a deprecated and unsupported feature. - Deprecated - - // Corrupted indicates that the input stream is corrupted. - Corrupted - - // Closed indicates that the handlers are closed. - Closed -) - -var codeMap = map[int]string{ - Unknown: "unknown error", - Internal: "internal error", - Invalid: "invalid argument", - Deprecated: "deprecated format", - Corrupted: "corrupted input", - Closed: "closed handler", -} - -type Error struct { - Code int // The error type - Pkg string // Name of the package where the error originated - Msg string // Descriptive message about the error (optional) -} - -func (e Error) Error() string { - var ss []string - for _, s := range []string{e.Pkg, codeMap[e.Code], e.Msg} { - if s != "" { - ss = append(ss, s) - } - } - return strings.Join(ss, ": ") -} - -func (e Error) CompressError() {} -func (e Error) IsInternal() bool { return e.Code == Internal } -func (e Error) IsInvalid() bool { return e.Code == Invalid } -func (e Error) IsDeprecated() bool { return e.Code == Deprecated } -func (e Error) IsCorrupted() bool { return e.Code == Corrupted } -func (e Error) IsClosed() bool { return e.Code == Closed } - -func IsInternal(err error) bool { return isCode(err, Internal) } -func IsInvalid(err error) bool { return isCode(err, Invalid) } -func IsDeprecated(err error) bool { return isCode(err, Deprecated) } -func IsCorrupted(err error) bool { return isCode(err, Corrupted) } -func IsClosed(err error) bool { return isCode(err, Closed) } - -func isCode(err error, code int) bool { - if cerr, ok := err.(Error); ok && cerr.Code == code { - return true - } - return false -} - -// errWrap is used by Panic and Recover to ensure that only errors raised by -// Panic are recovered by Recover. -type errWrap struct{ e *error } - -func Recover(err *error) { - switch ex := recover().(type) { - case nil: - // Do nothing. - case errWrap: - *err = *ex.e - default: - panic(ex) - } -} - -func Panic(err error) { - panic(errWrap{&err}) -} diff --git a/vendor/github.com/dsnet/compress/internal/gofuzz.go b/vendor/github.com/dsnet/compress/internal/gofuzz.go deleted file mode 100644 index 5035c9d63f..0000000000 --- a/vendor/github.com/dsnet/compress/internal/gofuzz.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2016, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// +build gofuzz - -package internal - -const ( - Debug = true - GoFuzz = true -) diff --git a/vendor/github.com/dsnet/compress/internal/prefix/debug.go b/vendor/github.com/dsnet/compress/internal/prefix/debug.go deleted file mode 100644 index 04fce70bbb..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/debug.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// +build debug - -package prefix - -import ( - "fmt" - "math" - "strings" -) - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func lenBase2(n uint) int { - return int(math.Ceil(math.Log2(float64(n + 1)))) -} -func padBase2(v, n uint, m int) string { - s := fmt.Sprintf("%b", 1< 0 { - return strings.Repeat(" ", pad) + s - } - return s -} - -func lenBase10(n int) int { - return int(math.Ceil(math.Log10(float64(n + 1)))) -} -func padBase10(n, m int) string { - s := fmt.Sprintf("%d", n) - if pad := m - len(s); pad > 0 { - return strings.Repeat(" ", pad) + s - } - return s -} - -func (rc RangeCodes) String() string { - var maxLen, maxBase int - for _, c := range rc { - maxLen = max(maxLen, int(c.Len)) - maxBase = max(maxBase, int(c.Base)) - } - - var ss []string - ss = append(ss, "{") - for i, c := range rc { - base := padBase10(int(c.Base), lenBase10(maxBase)) - if c.Len > 0 { - base += fmt.Sprintf("-%d", c.End()-1) - } - ss = append(ss, fmt.Sprintf("\t%s: {len: %s, range: %s},", - padBase10(int(i), lenBase10(len(rc)-1)), - padBase10(int(c.Len), lenBase10(maxLen)), - base, - )) - } - ss = append(ss, "}") - return strings.Join(ss, "\n") -} - -func (pc PrefixCodes) String() string { - var maxSym, maxLen, maxCnt int - for _, c := range pc { - maxSym = max(maxSym, int(c.Sym)) - maxLen = max(maxLen, int(c.Len)) - maxCnt = max(maxCnt, int(c.Cnt)) - } - - var ss []string - ss = append(ss, "{") - for _, c := range pc { - var cntStr string - if maxCnt > 0 { - cnt := int(32*float32(c.Cnt)/float32(maxCnt) + 0.5) - cntStr = fmt.Sprintf("%s |%s", - padBase10(int(c.Cnt), lenBase10(maxCnt)), - strings.Repeat("#", cnt), - ) - } - ss = append(ss, fmt.Sprintf("\t%s: %s, %s", - padBase10(int(c.Sym), lenBase10(maxSym)), - padBase2(uint(c.Val), uint(c.Len), maxLen), - cntStr, - )) - } - ss = append(ss, "}") - return strings.Join(ss, "\n") -} - -func (pd Decoder) String() string { - var ss []string - ss = append(ss, "{") - if len(pd.chunks) > 0 { - ss = append(ss, "\tchunks: {") - for i, c := range pd.chunks { - label := "sym" - if uint(c&countMask) > uint(pd.chunkBits) { - label = "idx" - } - ss = append(ss, fmt.Sprintf("\t\t%s: {%s: %s, len: %s}", - padBase2(uint(i), uint(pd.chunkBits), int(pd.chunkBits)), - label, padBase10(int(c>>countBits), 3), - padBase10(int(c&countMask), 2), - )) - } - ss = append(ss, "\t},") - - for j, links := range pd.links { - ss = append(ss, fmt.Sprintf("\tlinks[%d]: {", j)) - linkBits := lenBase2(uint(pd.linkMask)) - for i, c := range links { - ss = append(ss, fmt.Sprintf("\t\t%s: {sym: %s, len: %s},", - padBase2(uint(i), uint(linkBits), int(linkBits)), - padBase10(int(c>>countBits), 3), - padBase10(int(c&countMask), 2), - )) - } - ss = append(ss, "\t},") - } - } - ss = append(ss, fmt.Sprintf("\tchunkMask: %b,", pd.chunkMask)) - ss = append(ss, fmt.Sprintf("\tlinkMask: %b,", pd.linkMask)) - ss = append(ss, fmt.Sprintf("\tchunkBits: %d,", pd.chunkBits)) - ss = append(ss, fmt.Sprintf("\tMinBits: %d,", pd.MinBits)) - ss = append(ss, fmt.Sprintf("\tNumSyms: %d,", pd.NumSyms)) - ss = append(ss, "}") - return strings.Join(ss, "\n") -} - -func (pe Encoder) String() string { - var maxLen int - for _, c := range pe.chunks { - maxLen = max(maxLen, int(c&countMask)) - } - - var ss []string - ss = append(ss, "{") - if len(pe.chunks) > 0 { - ss = append(ss, "\tchunks: {") - for i, c := range pe.chunks { - ss = append(ss, fmt.Sprintf("\t\t%s: %s,", - padBase10(i, 3), - padBase2(uint(c>>countBits), uint(c&countMask), maxLen), - )) - } - ss = append(ss, "\t},") - } - ss = append(ss, fmt.Sprintf("\tchunkMask: %b,", pe.chunkMask)) - ss = append(ss, fmt.Sprintf("\tNumSyms: %d,", pe.NumSyms)) - ss = append(ss, "}") - return strings.Join(ss, "\n") -} diff --git a/vendor/github.com/dsnet/compress/internal/prefix/decoder.go b/vendor/github.com/dsnet/compress/internal/prefix/decoder.go deleted file mode 100644 index a9bc2dcb9b..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/decoder.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package prefix - -import ( - "sort" - - "github.com/dsnet/compress/internal" -) - -// The algorithm used to decode variable length codes is based on the lookup -// method in zlib. If the code is less-than-or-equal to maxChunkBits, -// then the symbol can be decoded using a single lookup into the chunks table. -// Otherwise, the links table will be used for a second level lookup. -// -// The chunks slice is keyed by the contents of the bit buffer ANDed with -// the chunkMask to avoid a out-of-bounds lookup. The value of chunks is a tuple -// that is decoded as follow: -// -// var length = chunks[bitBuffer&chunkMask] & countMask -// var symbol = chunks[bitBuffer&chunkMask] >> countBits -// -// If the decoded length is larger than chunkBits, then an overflow link table -// must be used for further decoding. In this case, the symbol is actually the -// index into the links tables. The second-level links table returned is -// processed in the same way as the chunks table. -// -// if length > chunkBits { -// var index = symbol // Previous symbol is index into links tables -// length = links[index][bitBuffer>>chunkBits & linkMask] & countMask -// symbol = links[index][bitBuffer>>chunkBits & linkMask] >> countBits -// } -// -// See the following: -// http://www.gzip.org/algorithm.txt - -type Decoder struct { - chunks []uint32 // First-level lookup map - links [][]uint32 // Second-level lookup map - chunkMask uint32 // Mask the length of the chunks table - linkMask uint32 // Mask the length of the link table - chunkBits uint32 // Bit-length of the chunks table - - MinBits uint32 // The minimum number of bits to safely make progress - NumSyms uint32 // Number of symbols -} - -// Init initializes Decoder according to the codes provided. -func (pd *Decoder) Init(codes PrefixCodes) { - // Handle special case trees. - if len(codes) <= 1 { - switch { - case len(codes) == 0: // Empty tree (should error if used later) - *pd = Decoder{chunks: pd.chunks[:0], links: pd.links[:0], NumSyms: 0} - case len(codes) == 1 && codes[0].Len == 0: // Single code tree (bit-length of zero) - pd.chunks = append(pd.chunks[:0], codes[0].Sym< c.Len { - minBits = c.Len - } - if maxBits < c.Len { - maxBits = c.Len - } - } - - // Allocate chunks table as needed. - const maxChunkBits = 9 // This can be tuned for better performance - pd.NumSyms = uint32(len(codes)) - pd.MinBits = minBits - pd.chunkBits = maxBits - if pd.chunkBits > maxChunkBits { - pd.chunkBits = maxChunkBits - } - numChunks := 1 << pd.chunkBits - pd.chunks = allocUint32s(pd.chunks, numChunks) - pd.chunkMask = uint32(numChunks - 1) - - // Allocate links tables as needed. - pd.links = pd.links[:0] - pd.linkMask = 0 - if pd.chunkBits < maxBits { - numLinks := 1 << (maxBits - pd.chunkBits) - pd.linkMask = uint32(numLinks - 1) - - var linkIdx uint32 - for i := range pd.chunks { - pd.chunks[i] = 0 // Logic below relies on zero value as uninitialized - } - for _, c := range codes { - if c.Len > pd.chunkBits && pd.chunks[c.Val&pd.chunkMask] == 0 { - pd.chunks[c.Val&pd.chunkMask] = (linkIdx << countBits) | (pd.chunkBits + 1) - linkIdx++ - } - } - - pd.links = extendSliceUint32s(pd.links, int(linkIdx)) - linksFlat := allocUint32s(pd.links[0], numLinks*int(linkIdx)) - for i, j := 0, 0; i < len(pd.links); i, j = i+1, j+numLinks { - pd.links[i] = linksFlat[j : j+numLinks] - } - } - - // Fill out chunks and links tables with values. - for _, c := range codes { - chunk := c.Sym<> countBits - links := pd.links[linkIdx] - skip := 1 << uint(c.Len-pd.chunkBits) - for j := int(c.Val >> pd.chunkBits); j < len(links); j += skip { - links[j] = chunk - } - } - } -} diff --git a/vendor/github.com/dsnet/compress/internal/prefix/encoder.go b/vendor/github.com/dsnet/compress/internal/prefix/encoder.go deleted file mode 100644 index 4424a01195..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/encoder.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package prefix - -import ( - "sort" - - "github.com/dsnet/compress/internal" -) - -type Encoder struct { - chunks []uint32 // First-level lookup map - chunkMask uint32 // Mask the length of the chunks table - - NumSyms uint32 // Number of symbols -} - -// Init initializes Encoder according to the codes provided. -func (pe *Encoder) Init(codes PrefixCodes) { - // Handle special case trees. - if len(codes) <= 1 { - switch { - case len(codes) == 0: // Empty tree (should error if used later) - *pe = Encoder{chunks: pe.chunks[:0], NumSyms: 0} - case len(codes) == 1 && codes[0].Len == 0: // Single code tree (bit-length of zero) - pe.chunks = append(pe.chunks[:0], codes[0].Val< 0; n >>= 1 { - numChunks <<= 1 - } - pe.NumSyms = uint32(len(codes)) - -retry: - // Allocate and reset chunks. - pe.chunks = allocUint32s(pe.chunks, numChunks) - pe.chunkMask = uint32(numChunks - 1) - for i := range pe.chunks { - pe.chunks[i] = 0 // Logic below relies on zero value as uninitialized - } - - // Insert each symbol, checking that there are no conflicts. - for _, c := range codes { - if pe.chunks[c.Sym&pe.chunkMask] > 0 { - // Collision found our "hash" table, so grow and try again. - numChunks <<= 1 - goto retry - } - pe.chunks[c.Sym&pe.chunkMask] = c.Val<> uint(c.Len) - } - return sum == 0 || len(pc) == 0 -} - -// checkPrefixes reports whether all codes have non-overlapping prefixes. -func (pc PrefixCodes) checkPrefixes() bool { - for i, c1 := range pc { - for j, c2 := range pc { - mask := uint32(1)< 0 { - c.Val = internal.ReverseUint32N(c.Val, uint(c.Len)) - if vals[c.Len].Cnt > 0 && vals[c.Len].Val+1 != c.Val { - return false - } - vals[c.Len].Val = c.Val - vals[c.Len].Cnt++ - } - } - - // Rule 2. - var last PrefixCode - for _, v := range vals { - if v.Cnt > 0 { - curVal := v.Val - v.Cnt + 1 - if last.Cnt != 0 && last.Val >= curVal { - return false - } - last = v - } - } - return true -} - -// GenerateLengths assigns non-zero bit-lengths to all codes. Codes with high -// frequency counts will be assigned shorter codes to reduce bit entropy. -// This function is used primarily by compressors. -// -// The input codes must have the Cnt field populated, be sorted by count. -// Even if a code has a count of 0, a non-zero bit-length will be assigned. -// -// The result will have the Len field populated. The algorithm used guarantees -// that Len <= maxBits and that it is a complete prefix tree. The resulting -// codes will remain sorted by count. -func GenerateLengths(codes PrefixCodes, maxBits uint) error { - if len(codes) <= 1 { - if len(codes) == 1 { - codes[0].Len = 0 - } - return nil - } - - // Verify that the codes are in ascending order by count. - cntLast := codes[0].Cnt - for _, c := range codes[1:] { - if c.Cnt < cntLast { - return errorf(errors.Invalid, "non-monotonically increasing symbol counts") - } - cntLast = c.Cnt - } - - // Construct a Huffman tree used to generate the bit-lengths. - // - // The Huffman tree is a binary tree where each symbol lies as a leaf node - // on this tree. The length of the prefix code to assign is the depth of - // that leaf from the root. The Huffman algorithm, which runs in O(n), - // is used to generate the tree. It assumes that codes are sorted in - // increasing order of frequency. - // - // The algorithm is as follows: - // 1. Start with two queues, F and Q, where F contains all of the starting - // symbols sorted such that symbols with lowest counts come first. - // 2. While len(F)+len(Q) > 1: - // 2a. Dequeue the node from F or Q that has the lowest weight as N0. - // 2b. Dequeue the node from F or Q that has the lowest weight as N1. - // 2c. Create a new node N that has N0 and N1 as its children. - // 2d. Enqueue N into the back of Q. - // 3. The tree's root node is Q[0]. - type node struct { - cnt uint32 - - // n0 or c0 represent the left child of this node. - // Since Go does not have unions, only one of these will be set. - // Similarly, n1 or c1 represent the right child of this node. - // - // If n0 or n1 is set, then it represents a "pointer" to another - // node in the Huffman tree. Since Go's pointer analysis cannot reason - // that these node pointers do not escape (golang.org/issue/13493), - // we use an index to a node in the nodes slice as a pseudo-pointer. - // - // If c0 or c1 is set, then it represents a leaf "node" in the - // Huffman tree. The leaves are the PrefixCode values themselves. - n0, n1 int // Index to child nodes - c0, c1 *PrefixCode - } - var nodeIdx int - var nodeArr [1024]node // Large enough to handle most cases on the stack - nodes := nodeArr[:] - if len(nodes) < len(codes) { - nodes = make([]node, len(codes)) // Number of internal nodes < number of leaves - } - freqs, queue := codes, nodes[:0] - for len(freqs)+len(queue) > 1 { - // These are the two smallest nodes at the front of freqs and queue. - var n node - if len(queue) == 0 || (len(freqs) > 0 && freqs[0].Cnt <= queue[0].cnt) { - n.c0, freqs = &freqs[0], freqs[1:] - n.cnt += n.c0.Cnt - } else { - n.cnt += queue[0].cnt - n.n0 = nodeIdx // nodeIdx is same as &queue[0] - &nodes[0] - nodeIdx++ - queue = queue[1:] - } - if len(queue) == 0 || (len(freqs) > 0 && freqs[0].Cnt <= queue[0].cnt) { - n.c1, freqs = &freqs[0], freqs[1:] - n.cnt += n.c1.Cnt - } else { - n.cnt += queue[0].cnt - n.n1 = nodeIdx // nodeIdx is same as &queue[0] - &nodes[0] - nodeIdx++ - queue = queue[1:] - } - queue = append(queue, n) - } - rootIdx := nodeIdx - - // Search the whole binary tree, noting when we hit each leaf node. - // We do not care about the exact Huffman tree structure, but rather we only - // care about depth of each of the leaf nodes. That is, the depth determines - // how long each symbol is in bits. - // - // Since the number of leaves is n, there is at most n internal nodes. - // Thus, this algorithm runs in O(n). - var fixBits bool - var explore func(int, uint) - explore = func(rootIdx int, level uint) { - root := &nodes[rootIdx] - - // Explore left branch. - if root.c0 == nil { - explore(root.n0, level+1) - } else { - fixBits = fixBits || (level > maxBits) - root.c0.Len = uint32(level) - } - - // Explore right branch. - if root.c1 == nil { - explore(root.n1, level+1) - } else { - fixBits = fixBits || (level > maxBits) - root.c1.Len = uint32(level) - } - } - explore(rootIdx, 1) - - // Fix the bit-lengths if we violate the maxBits requirement. - if fixBits { - // Create histogram for number of symbols with each bit-length. - var symBitsArr [valueBits + 1]uint32 - symBits := symBitsArr[:] // symBits[nb] indicates number of symbols using nb bits - for _, c := range codes { - for int(c.Len) >= len(symBits) { - symBits = append(symBits, 0) - } - symBits[c.Len]++ - } - - // Fudge the tree such that the largest bit-length is <= maxBits. - // This is accomplish by effectively doing a tree rotation. That is, we - // increase the bit-length of some higher frequency code, so that the - // bit-lengths of lower frequency codes can be decreased. - // - // Visually, this looks like the following transform: - // - // Level Before After - // __ ___ - // / \ / \ - // n-1 X / \ /\ /\ - // n X /\ X X X X - // n+1 X X - // - var treeRotate func(uint) - treeRotate = func(nb uint) { - if symBits[nb-1] == 0 { - treeRotate(nb - 1) - } - symBits[nb-1] -= 1 // Push this node to the level below - symBits[nb] += 3 // This level gets one node from above, two from below - symBits[nb+1] -= 2 // Push two nodes to the level above - } - for i := uint(len(symBits)) - 1; i > maxBits; i-- { - for symBits[i] > 0 { - treeRotate(i - 1) - } - } - - // Assign bit-lengths to each code. Since codes is sorted in increasing - // order of frequency, that means that the most frequently used symbols - // should have the shortest bit-lengths. Thus, we copy symbols to codes - // from the back of codes first. - cs := codes - for nb, cnt := range symBits { - if cnt > 0 { - pos := len(cs) - int(cnt) - cs2 := cs[pos:] - for i := range cs2 { - cs2[i].Len = uint32(nb) - } - cs = cs[:pos] - } - } - if len(cs) != 0 { - panic("not all codes were used up") - } - } - - if internal.Debug && !codes.checkLengths() { - panic("incomplete prefix tree detected") - } - return nil -} - -// GeneratePrefixes assigns a prefix value to all codes according to the -// bit-lengths. This function is used by both compressors and decompressors. -// -// The input codes must have the Sym and Len fields populated and be -// sorted by symbol. The bit-lengths of each code must be properly allocated, -// such that it forms a complete tree. -// -// The result will have the Val field populated and will produce a canonical -// prefix tree. The resulting codes will remain sorted by symbol. -func GeneratePrefixes(codes PrefixCodes) error { - if len(codes) <= 1 { - if len(codes) == 1 { - if codes[0].Len != 0 { - return errorf(errors.Invalid, "degenerate prefix tree with one node") - } - codes[0].Val = 0 - } - return nil - } - - // Compute basic statistics on the symbols. - var bitCnts [valueBits + 1]uint - c0 := codes[0] - bitCnts[c0.Len]++ - minBits, maxBits, symLast := c0.Len, c0.Len, c0.Sym - for _, c := range codes[1:] { - if c.Sym <= symLast { - return errorf(errors.Invalid, "non-unique or non-monotonically increasing symbols") - } - if minBits > c.Len { - minBits = c.Len - } - if maxBits < c.Len { - maxBits = c.Len - } - bitCnts[c.Len]++ // Histogram of bit counts - symLast = c.Sym // Keep track of last symbol - } - if minBits == 0 { - return errorf(errors.Invalid, "invalid prefix bit-length") - } - - // Compute the next code for a symbol of a given bit length. - var nextCodes [valueBits + 1]uint - var code uint - for i := minBits; i <= maxBits; i++ { - code <<= 1 - nextCodes[i] = code - code += bitCnts[i] - } - if code != 1<= n { - return s[:n] - } - return make([]uint32, n, n*3/2) -} - -func extendSliceUint32s(s [][]uint32, n int) [][]uint32 { - if cap(s) >= n { - return s[:n] - } - ss := make([][]uint32, n, n*3/2) - copy(ss, s[:cap(s)]) - return ss -} diff --git a/vendor/github.com/dsnet/compress/internal/prefix/range.go b/vendor/github.com/dsnet/compress/internal/prefix/range.go deleted file mode 100644 index b7eddad537..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/range.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package prefix - -type RangeCode struct { - Base uint32 // Starting base offset of the range - Len uint32 // Bit-length of a subsequent integer to add to base offset -} -type RangeCodes []RangeCode - -type RangeEncoder struct { - rcs RangeCodes - lut [1024]uint32 - minBase uint -} - -// End reports the non-inclusive ending range. -func (rc RangeCode) End() uint32 { return rc.Base + (1 << rc.Len) } - -// MakeRangeCodes creates a RangeCodes, where each region is assumed to be -// contiguously stacked, without any gaps, with bit-lengths taken from bits. -func MakeRangeCodes(minBase uint, bits []uint) (rc RangeCodes) { - for _, nb := range bits { - rc = append(rc, RangeCode{Base: uint32(minBase), Len: uint32(nb)}) - minBase += 1 << nb - } - return rc -} - -// Base reports the inclusive starting range for all ranges. -func (rcs RangeCodes) Base() uint32 { return rcs[0].Base } - -// End reports the non-inclusive ending range for all ranges. -func (rcs RangeCodes) End() uint32 { return rcs[len(rcs)-1].End() } - -// checkValid reports whether the RangeCodes is valid. In order to be valid, -// the following must hold true: -// rcs[i-1].Base <= rcs[i].Base -// rcs[i-1].End <= rcs[i].End -// rcs[i-1].End >= rcs[i].Base -// -// Practically speaking, each range must be increasing and must not have any -// gaps in between. It is okay for ranges to overlap. -func (rcs RangeCodes) checkValid() bool { - if len(rcs) == 0 { - return false - } - pre := rcs[0] - for _, cur := range rcs[1:] { - preBase, preEnd := pre.Base, pre.End() - curBase, curEnd := cur.Base, cur.End() - if preBase > curBase || preEnd > curEnd || preEnd < curBase { - return false - } - pre = cur - } - return true -} - -func (re *RangeEncoder) Init(rcs RangeCodes) { - if !rcs.checkValid() { - panic("invalid range codes") - } - *re = RangeEncoder{rcs: rcs, minBase: uint(rcs.Base())} - for sym, rc := range rcs { - base := int(rc.Base) - int(re.minBase) - end := int(rc.End()) - int(re.minBase) - if base >= len(re.lut) { - break - } - if end > len(re.lut) { - end = len(re.lut) - } - for i := base; i < end; i++ { - re.lut[i] = uint32(sym) - } - } -} - -func (re *RangeEncoder) Encode(offset uint) (sym uint) { - if idx := int(offset - re.minBase); idx < len(re.lut) { - return uint(re.lut[idx]) - } - sym = uint(re.lut[len(re.lut)-1]) -retry: - if int(sym) >= len(re.rcs) || re.rcs[sym].Base > uint32(offset) { - return sym - 1 - } - sym++ - goto retry // Avoid for-loop so that this function can be inlined -} diff --git a/vendor/github.com/dsnet/compress/internal/prefix/reader.go b/vendor/github.com/dsnet/compress/internal/prefix/reader.go deleted file mode 100644 index e6252c957c..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/reader.go +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package prefix - -import ( - "bufio" - "bytes" - "encoding/binary" - "io" - "strings" - - "github.com/dsnet/compress" - "github.com/dsnet/compress/internal" - "github.com/dsnet/compress/internal/errors" -) - -// Reader implements a prefix decoder. If the input io.Reader satisfies the -// compress.ByteReader or compress.BufferedReader interface, then it also -// guarantees that it will never read more bytes than is necessary. -// -// For high performance, provide an io.Reader that satisfies the -// compress.BufferedReader interface. If the input does not satisfy either -// compress.ByteReader or compress.BufferedReader, then it will be internally -// wrapped with a bufio.Reader. -type Reader struct { - Offset int64 // Number of bytes read from the underlying io.Reader - - rd io.Reader - byteRd compress.ByteReader // Set if rd is a ByteReader - bufRd compress.BufferedReader // Set if rd is a BufferedReader - - bufBits uint64 // Buffer to hold some bits - numBits uint // Number of valid bits in bufBits - bigEndian bool // Do we treat input bytes as big endian? - - // These fields are only used if rd is a compress.BufferedReader. - bufPeek []byte // Buffer for the Peek data - discardBits int // Number of bits to discard from reader - fedBits uint // Number of bits fed in last call to PullBits - - // These fields are used to reduce allocations. - bb *buffer - br *bytesReader - sr *stringReader - bu *bufio.Reader -} - -// Init initializes the bit Reader to read from r. If bigEndian is true, then -// bits will be read starting from the most-significant bits of a byte -// (as done in bzip2), otherwise it will read starting from the -// least-significant bits of a byte (such as for deflate and brotli). -func (pr *Reader) Init(r io.Reader, bigEndian bool) { - *pr = Reader{ - rd: r, - bigEndian: bigEndian, - - bb: pr.bb, - br: pr.br, - sr: pr.sr, - bu: pr.bu, - } - switch rr := r.(type) { - case *bytes.Buffer: - if pr.bb == nil { - pr.bb = new(buffer) - } - *pr.bb = buffer{Buffer: rr} - pr.bufRd = pr.bb - case *bytes.Reader: - if pr.br == nil { - pr.br = new(bytesReader) - } - *pr.br = bytesReader{Reader: rr} - pr.bufRd = pr.br - case *strings.Reader: - if pr.sr == nil { - pr.sr = new(stringReader) - } - *pr.sr = stringReader{Reader: rr} - pr.bufRd = pr.sr - case compress.BufferedReader: - pr.bufRd = rr - case compress.ByteReader: - pr.byteRd = rr - default: - if pr.bu == nil { - pr.bu = bufio.NewReader(nil) - } - pr.bu.Reset(r) - pr.rd, pr.bufRd = pr.bu, pr.bu - } -} - -// BitsRead reports the total number of bits emitted from any Read method. -func (pr *Reader) BitsRead() int64 { - offset := 8*pr.Offset - int64(pr.numBits) - if pr.bufRd != nil { - discardBits := pr.discardBits + int(pr.fedBits-pr.numBits) - offset = 8*pr.Offset + int64(discardBits) - } - return offset -} - -// IsBufferedReader reports whether the underlying io.Reader is also a -// compress.BufferedReader. -func (pr *Reader) IsBufferedReader() bool { - return pr.bufRd != nil -} - -// ReadPads reads 0-7 bits from the bit buffer to achieve byte-alignment. -func (pr *Reader) ReadPads() uint { - nb := pr.numBits % 8 - val := uint(pr.bufBits & uint64(1<>= nb - pr.numBits -= nb - return val -} - -// Read reads bytes into buf. -// The bit-ordering mode does not affect this method. -func (pr *Reader) Read(buf []byte) (cnt int, err error) { - if pr.numBits > 0 { - if pr.numBits%8 != 0 { - return 0, errorf(errors.Invalid, "non-aligned bit buffer") - } - for cnt = 0; len(buf) > cnt && pr.numBits > 0; cnt++ { - if pr.bigEndian { - buf[cnt] = internal.ReverseLUT[byte(pr.bufBits)] - } else { - buf[cnt] = byte(pr.bufBits) - } - pr.bufBits >>= 8 - pr.numBits -= 8 - } - return cnt, nil - } - if _, err := pr.Flush(); err != nil { - return 0, err - } - cnt, err = pr.rd.Read(buf) - pr.Offset += int64(cnt) - return cnt, err -} - -// ReadOffset reads an offset value using the provided RangeCodes indexed by -// the symbol read. -func (pr *Reader) ReadOffset(pd *Decoder, rcs RangeCodes) uint { - rc := rcs[pr.ReadSymbol(pd)] - return uint(rc.Base) + pr.ReadBits(uint(rc.Len)) -} - -// TryReadBits attempts to read nb bits using the contents of the bit buffer -// alone. It returns the value and whether it succeeded. -// -// This method is designed to be inlined for performance reasons. -func (pr *Reader) TryReadBits(nb uint) (uint, bool) { - if pr.numBits < nb { - return 0, false - } - val := uint(pr.bufBits & uint64(1<>= nb - pr.numBits -= nb - return val, true -} - -// ReadBits reads nb bits in from the underlying reader. -func (pr *Reader) ReadBits(nb uint) uint { - if err := pr.PullBits(nb); err != nil { - errors.Panic(err) - } - val := uint(pr.bufBits & uint64(1<>= nb - pr.numBits -= nb - return val -} - -// TryReadSymbol attempts to decode the next symbol using the contents of the -// bit buffer alone. It returns the decoded symbol and whether it succeeded. -// -// This method is designed to be inlined for performance reasons. -func (pr *Reader) TryReadSymbol(pd *Decoder) (uint, bool) { - if pr.numBits < uint(pd.MinBits) || len(pd.chunks) == 0 { - return 0, false - } - chunk := pd.chunks[uint32(pr.bufBits)&pd.chunkMask] - nb := uint(chunk & countMask) - if nb > pr.numBits || nb > uint(pd.chunkBits) { - return 0, false - } - pr.bufBits >>= nb - pr.numBits -= nb - return uint(chunk >> countBits), true -} - -// ReadSymbol reads the next symbol using the provided prefix Decoder. -func (pr *Reader) ReadSymbol(pd *Decoder) uint { - if len(pd.chunks) == 0 { - panicf(errors.Invalid, "decode with empty prefix tree") - } - - nb := uint(pd.MinBits) - for { - if err := pr.PullBits(nb); err != nil { - errors.Panic(err) - } - chunk := pd.chunks[uint32(pr.bufBits)&pd.chunkMask] - nb = uint(chunk & countMask) - if nb > uint(pd.chunkBits) { - linkIdx := chunk >> countBits - chunk = pd.links[linkIdx][uint32(pr.bufBits>>pd.chunkBits)&pd.linkMask] - nb = uint(chunk & countMask) - } - if nb <= pr.numBits { - pr.bufBits >>= nb - pr.numBits -= nb - return uint(chunk >> countBits) - } - } -} - -// Flush updates the read offset of the underlying ByteReader. -// If reader is a compress.BufferedReader, then this calls Discard to update -// the read offset. -func (pr *Reader) Flush() (int64, error) { - if pr.bufRd == nil { - return pr.Offset, nil - } - - // Update the number of total bits to discard. - pr.discardBits += int(pr.fedBits - pr.numBits) - pr.fedBits = pr.numBits - - // Discard some bytes to update read offset. - var err error - nd := (pr.discardBits + 7) / 8 // Round up to nearest byte - nd, err = pr.bufRd.Discard(nd) - pr.discardBits -= nd * 8 // -7..0 - pr.Offset += int64(nd) - - // These are invalid after Discard. - pr.bufPeek = nil - return pr.Offset, err -} - -// PullBits ensures that at least nb bits exist in the bit buffer. -// If the underlying reader is a compress.BufferedReader, then this will fill -// the bit buffer with as many bits as possible, relying on Peek and Discard to -// properly advance the read offset. Otherwise, it will use ReadByte to fill the -// buffer with just the right number of bits. -func (pr *Reader) PullBits(nb uint) error { - if pr.bufRd != nil { - pr.discardBits += int(pr.fedBits - pr.numBits) - for { - if len(pr.bufPeek) == 0 { - pr.fedBits = pr.numBits // Don't discard bits just added - if _, err := pr.Flush(); err != nil { - return err - } - - // Peek no more bytes than necessary. - // The computation for cntPeek computes the minimum number of - // bytes to Peek to fill nb bits. - var err error - cntPeek := int(nb+(-nb&7)) / 8 - if cntPeek < pr.bufRd.Buffered() { - cntPeek = pr.bufRd.Buffered() - } - pr.bufPeek, err = pr.bufRd.Peek(cntPeek) - pr.bufPeek = pr.bufPeek[int(pr.numBits/8):] // Skip buffered bits - if len(pr.bufPeek) == 0 { - if pr.numBits >= nb { - break - } - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return err - } - } - - n := int(64-pr.numBits) / 8 // Number of bytes to copy to bit buffer - if len(pr.bufPeek) >= 8 { - // Starting with Go 1.7, the compiler should use a wide integer - // load here if the architecture supports it. - u := binary.LittleEndian.Uint64(pr.bufPeek) - if pr.bigEndian { - // Swap all the bits within each byte. - u = (u&0xaaaaaaaaaaaaaaaa)>>1 | (u&0x5555555555555555)<<1 - u = (u&0xcccccccccccccccc)>>2 | (u&0x3333333333333333)<<2 - u = (u&0xf0f0f0f0f0f0f0f0)>>4 | (u&0x0f0f0f0f0f0f0f0f)<<4 - } - - pr.bufBits |= u << pr.numBits - pr.numBits += uint(n * 8) - pr.bufPeek = pr.bufPeek[n:] - break - } else { - if n > len(pr.bufPeek) { - n = len(pr.bufPeek) - } - for _, c := range pr.bufPeek[:n] { - if pr.bigEndian { - c = internal.ReverseLUT[c] - } - pr.bufBits |= uint64(c) << pr.numBits - pr.numBits += 8 - } - pr.bufPeek = pr.bufPeek[n:] - if pr.numBits > 56 { - break - } - } - } - pr.fedBits = pr.numBits - } else { - for pr.numBits < nb { - c, err := pr.byteRd.ReadByte() - if err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return err - } - if pr.bigEndian { - c = internal.ReverseLUT[c] - } - pr.bufBits |= uint64(c) << pr.numBits - pr.numBits += 8 - pr.Offset++ - } - } - return nil -} diff --git a/vendor/github.com/dsnet/compress/internal/prefix/wrap.go b/vendor/github.com/dsnet/compress/internal/prefix/wrap.go deleted file mode 100644 index 49906d4af1..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/wrap.go +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package prefix - -import ( - "bytes" - "io" - "strings" -) - -// For some of the common Readers, we wrap and extend them to satisfy the -// compress.BufferedReader interface to improve performance. - -type buffer struct { - *bytes.Buffer -} - -type bytesReader struct { - *bytes.Reader - pos int64 - buf []byte - arr [512]byte -} - -type stringReader struct { - *strings.Reader - pos int64 - buf []byte - arr [512]byte -} - -func (r *buffer) Buffered() int { - return r.Len() -} - -func (r *buffer) Peek(n int) ([]byte, error) { - b := r.Bytes() - if len(b) < n { - return b, io.EOF - } - return b[:n], nil -} - -func (r *buffer) Discard(n int) (int, error) { - b := r.Next(n) - if len(b) < n { - return len(b), io.EOF - } - return n, nil -} - -func (r *bytesReader) Buffered() int { - r.update() - if r.Len() > len(r.buf) { - return len(r.buf) - } - return r.Len() -} - -func (r *bytesReader) Peek(n int) ([]byte, error) { - if n > len(r.arr) { - return nil, io.ErrShortBuffer - } - - // Return sub-slice of local buffer if possible. - r.update() - if len(r.buf) >= n { - return r.buf[:n], nil - } - - // Fill entire local buffer, and return appropriate sub-slice. - cnt, err := r.ReadAt(r.arr[:], r.pos) - r.buf = r.arr[:cnt] - if cnt < n { - return r.arr[:cnt], err - } - return r.arr[:n], nil -} - -func (r *bytesReader) Discard(n int) (int, error) { - var err error - if n > r.Len() { - n, err = r.Len(), io.EOF - } - r.Seek(int64(n), io.SeekCurrent) - return n, err -} - -// update reslices the internal buffer to be consistent with the read offset. -func (r *bytesReader) update() { - pos, _ := r.Seek(0, io.SeekCurrent) - if off := pos - r.pos; off >= 0 && off < int64(len(r.buf)) { - r.buf, r.pos = r.buf[off:], pos - } else { - r.buf, r.pos = nil, pos - } -} - -func (r *stringReader) Buffered() int { - r.update() - if r.Len() > len(r.buf) { - return len(r.buf) - } - return r.Len() -} - -func (r *stringReader) Peek(n int) ([]byte, error) { - if n > len(r.arr) { - return nil, io.ErrShortBuffer - } - - // Return sub-slice of local buffer if possible. - r.update() - if len(r.buf) >= n { - return r.buf[:n], nil - } - - // Fill entire local buffer, and return appropriate sub-slice. - cnt, err := r.ReadAt(r.arr[:], r.pos) - r.buf = r.arr[:cnt] - if cnt < n { - return r.arr[:cnt], err - } - return r.arr[:n], nil -} - -func (r *stringReader) Discard(n int) (int, error) { - var err error - if n > r.Len() { - n, err = r.Len(), io.EOF - } - r.Seek(int64(n), io.SeekCurrent) - return n, err -} - -// update reslices the internal buffer to be consistent with the read offset. -func (r *stringReader) update() { - pos, _ := r.Seek(0, io.SeekCurrent) - if off := pos - r.pos; off >= 0 && off < int64(len(r.buf)) { - r.buf, r.pos = r.buf[off:], pos - } else { - r.buf, r.pos = nil, pos - } -} diff --git a/vendor/github.com/dsnet/compress/internal/prefix/writer.go b/vendor/github.com/dsnet/compress/internal/prefix/writer.go deleted file mode 100644 index c9783905a7..0000000000 --- a/vendor/github.com/dsnet/compress/internal/prefix/writer.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -package prefix - -import ( - "encoding/binary" - "io" - - "github.com/dsnet/compress/internal/errors" -) - -// Writer implements a prefix encoder. For performance reasons, Writer will not -// write bytes immediately to the underlying stream. -type Writer struct { - Offset int64 // Number of bytes written to the underlying io.Writer - - wr io.Writer - bufBits uint64 // Buffer to hold some bits - numBits uint // Number of valid bits in bufBits - bigEndian bool // Are bits written in big-endian order? - - buf [512]byte - cntBuf int -} - -// Init initializes the bit Writer to write to w. If bigEndian is true, then -// bits will be written starting from the most-significant bits of a byte -// (as done in bzip2), otherwise it will write starting from the -// least-significant bits of a byte (such as for deflate and brotli). -func (pw *Writer) Init(w io.Writer, bigEndian bool) { - *pw = Writer{wr: w, bigEndian: bigEndian} - return -} - -// BitsWritten reports the total number of bits issued to any Write method. -func (pw *Writer) BitsWritten() int64 { - return 8*pw.Offset + 8*int64(pw.cntBuf) + int64(pw.numBits) -} - -// WritePads writes 0-7 bits to the bit buffer to achieve byte-alignment. -func (pw *Writer) WritePads(v uint) { - nb := -pw.numBits & 7 - pw.bufBits |= uint64(v) << pw.numBits - pw.numBits += nb -} - -// Write writes bytes from buf. -// The bit-ordering mode does not affect this method. -func (pw *Writer) Write(buf []byte) (cnt int, err error) { - if pw.numBits > 0 || pw.cntBuf > 0 { - if pw.numBits%8 != 0 { - return 0, errorf(errors.Invalid, "non-aligned bit buffer") - } - if _, err := pw.Flush(); err != nil { - return 0, err - } - } - cnt, err = pw.wr.Write(buf) - pw.Offset += int64(cnt) - return cnt, err -} - -// WriteOffset writes ofs in a (sym, extra) fashion using the provided prefix -// Encoder and RangeEncoder. -func (pw *Writer) WriteOffset(ofs uint, pe *Encoder, re *RangeEncoder) { - sym := re.Encode(ofs) - pw.WriteSymbol(sym, pe) - rc := re.rcs[sym] - pw.WriteBits(ofs-uint(rc.Base), uint(rc.Len)) -} - -// TryWriteBits attempts to write nb bits using the contents of the bit buffer -// alone. It reports whether it succeeded. -// -// This method is designed to be inlined for performance reasons. -func (pw *Writer) TryWriteBits(v, nb uint) bool { - if 64-pw.numBits < nb { - return false - } - pw.bufBits |= uint64(v) << pw.numBits - pw.numBits += nb - return true -} - -// WriteBits writes nb bits of v to the underlying writer. -func (pw *Writer) WriteBits(v, nb uint) { - if _, err := pw.PushBits(); err != nil { - errors.Panic(err) - } - pw.bufBits |= uint64(v) << pw.numBits - pw.numBits += nb -} - -// TryWriteSymbol attempts to encode the next symbol using the contents of the -// bit buffer alone. It reports whether it succeeded. -// -// This method is designed to be inlined for performance reasons. -func (pw *Writer) TryWriteSymbol(sym uint, pe *Encoder) bool { - chunk := pe.chunks[uint32(sym)&pe.chunkMask] - nb := uint(chunk & countMask) - if 64-pw.numBits < nb { - return false - } - pw.bufBits |= uint64(chunk>>countBits) << pw.numBits - pw.numBits += nb - return true -} - -// WriteSymbol writes the symbol using the provided prefix Encoder. -func (pw *Writer) WriteSymbol(sym uint, pe *Encoder) { - if _, err := pw.PushBits(); err != nil { - errors.Panic(err) - } - chunk := pe.chunks[uint32(sym)&pe.chunkMask] - nb := uint(chunk & countMask) - pw.bufBits |= uint64(chunk>>countBits) << pw.numBits - pw.numBits += nb -} - -// Flush flushes all complete bytes from the bit buffer to the byte buffer, and -// then flushes all bytes in the byte buffer to the underlying writer. -// After this call, the bit Writer is will only withhold 7 bits at most. -func (pw *Writer) Flush() (int64, error) { - if pw.numBits < 8 && pw.cntBuf == 0 { - return pw.Offset, nil - } - if _, err := pw.PushBits(); err != nil { - return pw.Offset, err - } - cnt, err := pw.wr.Write(pw.buf[:pw.cntBuf]) - pw.cntBuf -= cnt - pw.Offset += int64(cnt) - return pw.Offset, err -} - -// PushBits pushes as many bytes as possible from the bit buffer to the byte -// buffer, reporting the number of bits pushed. -func (pw *Writer) PushBits() (uint, error) { - if pw.cntBuf >= len(pw.buf)-8 { - cnt, err := pw.wr.Write(pw.buf[:pw.cntBuf]) - pw.cntBuf -= cnt - pw.Offset += int64(cnt) - if err != nil { - return 0, err - } - } - - u := pw.bufBits - if pw.bigEndian { - // Swap all the bits within each byte. - u = (u&0xaaaaaaaaaaaaaaaa)>>1 | (u&0x5555555555555555)<<1 - u = (u&0xcccccccccccccccc)>>2 | (u&0x3333333333333333)<<2 - u = (u&0xf0f0f0f0f0f0f0f0)>>4 | (u&0x0f0f0f0f0f0f0f0f)<<4 - } - // Starting with Go 1.7, the compiler should use a wide integer - // store here if the architecture supports it. - binary.LittleEndian.PutUint64(pw.buf[pw.cntBuf:], u) - - nb := pw.numBits / 8 // Number of bytes to copy from bit buffer - pw.cntBuf += int(nb) - pw.bufBits >>= 8 * nb - pw.numBits -= 8 * nb - return 8 * nb, nil -} diff --git a/vendor/github.com/dsnet/compress/internal/release.go b/vendor/github.com/dsnet/compress/internal/release.go deleted file mode 100644 index 0990be1c5a..0000000000 --- a/vendor/github.com/dsnet/compress/internal/release.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2015, Joe Tsai. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. - -// +build !debug,!gofuzz - -package internal - -// Debug indicates whether the debug build tag was set. -// -// If set, programs may choose to print with more human-readable -// debug information and also perform sanity checks that would otherwise be too -// expensive to run in a release build. -const Debug = false - -// GoFuzz indicates whether the gofuzz build tag was set. -// -// If set, programs may choose to disable certain checks (like checksums) that -// would be nearly impossible for gofuzz to properly get right. -// If GoFuzz is set, it implies that Debug is set as well. -const GoFuzz = false diff --git a/vendor/github.com/dsnet/compress/zbench.sh b/vendor/github.com/dsnet/compress/zbench.sh deleted file mode 100644 index 0205920dae..0000000000 --- a/vendor/github.com/dsnet/compress/zbench.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -# -# Copyright 2017, Joe Tsai. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE.md file. - -# zbench wraps internal/tool/bench and is useful for comparing benchmarks from -# the implementations in this repository relative to other implementations. -# -# See internal/tool/bench/main.go for more details. -cd $(dirname "${BASH_SOURCE[0]}")/internal/tool/bench -go run $(go list -f '{{ join .GoFiles "\n" }}') "$@" diff --git a/vendor/github.com/dsnet/compress/zfuzz.sh b/vendor/github.com/dsnet/compress/zfuzz.sh deleted file mode 100644 index 42958ed442..0000000000 --- a/vendor/github.com/dsnet/compress/zfuzz.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -# -# Copyright 2017, Joe Tsai. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE.md file. - -# zfuzz wraps internal/tool/fuzz and is useful for fuzz testing each of -# the implementations in this repository. -cd $(dirname "${BASH_SOURCE[0]}")/internal/tool/fuzz -./fuzz.sh "$@" diff --git a/vendor/github.com/dsnet/compress/zprof.sh b/vendor/github.com/dsnet/compress/zprof.sh deleted file mode 100644 index 3cd535beba..0000000000 --- a/vendor/github.com/dsnet/compress/zprof.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash -# -# Copyright 2017, Joe Tsai. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE.md file. - -if [ $# == 0 ]; then - echo "Usage: $0 PKG_PATH TEST_ARGS..." - echo "" - echo "Runs coverage and performance benchmarks for a given package." - echo "The results are stored in the _zprof_ directory." - echo "" - echo "Example:" - echo " $0 flate -test.bench=Decode/Twain/Default" - exit 1 -fi - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PKG_PATH=$1 -PKG_NAME=$(basename $PKG_PATH) -shift - -TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR $PKG_PATH/$PKG_NAME.test" SIGINT SIGTERM EXIT - -( - cd $DIR/$PKG_PATH - - # Print the go version. - go version - - # Perform coverage profiling. - go test github.com/dsnet/compress/$PKG_PATH -coverprofile $TMPDIR/cover.profile - if [ $? != 0 ]; then exit 1; fi - go tool cover -html $TMPDIR/cover.profile -o cover.html - - # Perform performance profiling. - if [ $# != 0 ]; then - go test -c github.com/dsnet/compress/$PKG_PATH - if [ $? != 0 ]; then exit 1; fi - ./$PKG_NAME.test -test.cpuprofile $TMPDIR/cpu.profile -test.memprofile $TMPDIR/mem.profile -test.run - "$@" - PPROF="go tool pprof" - $PPROF -output=cpu.svg -web $PKG_NAME.test $TMPDIR/cpu.profile 2> /dev/null - $PPROF -output=cpu.html -weblist=. $PKG_NAME.test $TMPDIR/cpu.profile 2> /dev/null - $PPROF -output=mem_objects.svg -alloc_objects -web $PKG_NAME.test $TMPDIR/mem.profile 2> /dev/null - $PPROF -output=mem_objects.html -alloc_objects -weblist=. $PKG_NAME.test $TMPDIR/mem.profile 2> /dev/null - $PPROF -output=mem_space.svg -alloc_space -web $PKG_NAME.test $TMPDIR/mem.profile 2> /dev/null - $PPROF -output=mem_space.html -alloc_space -weblist=. $PKG_NAME.test $TMPDIR/mem.profile 2> /dev/null - fi - - rm -rf $DIR/_zprof_/$PKG_NAME - mkdir -p $DIR/_zprof_/$PKG_NAME - mv *.html *.svg $DIR/_zprof_/$PKG_NAME 2> /dev/null -) diff --git a/vendor/github.com/dsnet/compress/ztest.sh b/vendor/github.com/dsnet/compress/ztest.sh deleted file mode 100644 index 15c4c00b9f..0000000000 --- a/vendor/github.com/dsnet/compress/ztest.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash -# -# Copyright 2017, Joe Tsai. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE.md file. - -cd $(go list -f '{{ .Dir }}' github.com/dsnet/compress) - -BOLD="\x1b[1mRunning: " -PASS="\x1b[32mPASS" -FAIL="\x1b[31mFAIL" -RESET="\x1b[0m" - -echo -e "${BOLD}fmt${RESET}" -RET_FMT=$(find . -name "*.go" | egrep -v "/(_.*_|\..*|testdata)/" | xargs gofmt -d) -if [[ ! -z "$RET_FMT" ]]; then echo "$RET_FMT"; echo; fi - -echo -e "${BOLD}test${RESET}" -RET_TEST=$(go test -race ./... | egrep -v "^(ok|[?])\s+") -if [[ ! -z "$RET_TEST" ]]; then echo "$RET_TEST"; echo; fi - -echo -e "${BOLD}staticcheck${RESET}" -RET_SCHK=$(staticcheck \ - -ignore " - github.com/dsnet/compress/brotli/*.go:SA4016 - github.com/dsnet/compress/brotli/*.go:S1023 - github.com/dsnet/compress/brotli/*.go:U1000 - github.com/dsnet/compress/bzip2/*.go:S1023 - github.com/dsnet/compress/flate/*.go:U1000 - github.com/dsnet/compress/internal/cgo/lzma/*.go:SA4000 - github.com/dsnet/compress/internal/prefix/*.go:S1004 - github.com/dsnet/compress/internal/prefix/*.go:S1023 - github.com/dsnet/compress/internal/prefix/*.go:SA4016 - github.com/dsnet/compress/internal/tool/bench/*.go:S1007 - github.com/dsnet/compress/xflate/internal/meta/*.go:S1023 - " ./... 2>&1) -if [[ ! -z "$RET_SCHK" ]]; then echo "$RET_SCHK"; echo; fi - -echo -e "${BOLD}lint${RESET}" -RET_LINT=$(golint ./... 2>&1 | - egrep -v "^vendor/" | - egrep -v "should have comment(.*)or be unexported" | - egrep -v "^(.*)type name will be used as(.*)by other packages" | - egrep -v "^brotli/transform.go:(.*)replace i [+]= 1 with i[+]{2}" | - egrep -v "^internal/prefix/prefix.go:(.*)replace symBits(.*) [-]= 1 with symBits(.*)[-]{2}" | - egrep -v "^xflate/common.go:(.*)NoCompression should be of the form" | - egrep -v "^exit status") -if [[ ! -z "$RET_LINT" ]]; then echo "$RET_LINT"; echo; fi - -if [[ ! -z "$RET_FMT" ]] || [ ! -z "$RET_TEST" ] || [[ ! -z "$RET_SCHK" ]] || [[ ! -z "$RET_LINT" ]]; then - echo -e "${FAIL}${RESET}"; exit 1 -else - echo -e "${PASS}${RESET}"; exit 0 -fi diff --git a/vendor/github.com/golang/snappy/.gitignore b/vendor/github.com/golang/snappy/.gitignore deleted file mode 100644 index 042091d9b3..0000000000 --- a/vendor/github.com/golang/snappy/.gitignore +++ /dev/null @@ -1,16 +0,0 @@ -cmd/snappytool/snappytool -testdata/bench - -# These explicitly listed benchmark data files are for an obsolete version of -# snappy_test.go. -testdata/alice29.txt -testdata/asyoulik.txt -testdata/fireworks.jpeg -testdata/geo.protodata -testdata/html -testdata/html_x_4 -testdata/kppkn.gtb -testdata/lcet10.txt -testdata/paper-100k.pdf -testdata/plrabn12.txt -testdata/urls.10K diff --git a/vendor/github.com/golang/snappy/AUTHORS b/vendor/github.com/golang/snappy/AUTHORS deleted file mode 100644 index 52ccb5a934..0000000000 --- a/vendor/github.com/golang/snappy/AUTHORS +++ /dev/null @@ -1,18 +0,0 @@ -# This is the official list of Snappy-Go authors for copyright purposes. -# This file is distinct from the CONTRIBUTORS files. -# See the latter for an explanation. - -# Names should be added to this file as -# Name or Organization -# The email address is not required for organizations. - -# Please keep the list sorted. - -Amazon.com, Inc -Damian Gryski -Eric Buth -Google Inc. -Jan Mercl <0xjnml@gmail.com> -Klaus Post -Rodolfo Carvalho -Sebastien Binet diff --git a/vendor/github.com/golang/snappy/CONTRIBUTORS b/vendor/github.com/golang/snappy/CONTRIBUTORS deleted file mode 100644 index ea6524ddd0..0000000000 --- a/vendor/github.com/golang/snappy/CONTRIBUTORS +++ /dev/null @@ -1,41 +0,0 @@ -# This is the official list of people who can contribute -# (and typically have contributed) code to the Snappy-Go repository. -# The AUTHORS file lists the copyright holders; this file -# lists people. For example, Google employees are listed here -# but not in AUTHORS, because Google holds the copyright. -# -# The submission process automatically checks to make sure -# that people submitting code are listed in this file (by email address). -# -# Names should be added to this file only after verifying that -# the individual or the individual's organization has agreed to -# the appropriate Contributor License Agreement, found here: -# -# http://code.google.com/legal/individual-cla-v1.0.html -# http://code.google.com/legal/corporate-cla-v1.0.html -# -# The agreement for individuals can be filled out on the web. -# -# When adding J Random Contributor's name to this file, -# either J's name or J's organization's name should be -# added to the AUTHORS file, depending on whether the -# individual or corporate CLA was used. - -# Names should be added to this file like so: -# Name - -# Please keep the list sorted. - -Alex Legg -Damian Gryski -Eric Buth -Jan Mercl <0xjnml@gmail.com> -Jonathan Swinney -Kai Backman -Klaus Post -Marc-Antoine Ruel -Nigel Tao -Rob Pike -Rodolfo Carvalho -Russ Cox -Sebastien Binet diff --git a/vendor/github.com/golang/snappy/LICENSE b/vendor/github.com/golang/snappy/LICENSE deleted file mode 100644 index 6050c10f4c..0000000000 --- a/vendor/github.com/golang/snappy/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golang/snappy/README b/vendor/github.com/golang/snappy/README deleted file mode 100644 index cea12879a0..0000000000 --- a/vendor/github.com/golang/snappy/README +++ /dev/null @@ -1,107 +0,0 @@ -The Snappy compression format in the Go programming language. - -To download and install from source: -$ go get github.com/golang/snappy - -Unless otherwise noted, the Snappy-Go source files are distributed -under the BSD-style license found in the LICENSE file. - - - -Benchmarks. - -The golang/snappy benchmarks include compressing (Z) and decompressing (U) ten -or so files, the same set used by the C++ Snappy code (github.com/google/snappy -and note the "google", not "golang"). On an "Intel(R) Core(TM) i7-3770 CPU @ -3.40GHz", Go's GOARCH=amd64 numbers as of 2016-05-29: - -"go test -test.bench=." - -_UFlat0-8 2.19GB/s ± 0% html -_UFlat1-8 1.41GB/s ± 0% urls -_UFlat2-8 23.5GB/s ± 2% jpg -_UFlat3-8 1.91GB/s ± 0% jpg_200 -_UFlat4-8 14.0GB/s ± 1% pdf -_UFlat5-8 1.97GB/s ± 0% html4 -_UFlat6-8 814MB/s ± 0% txt1 -_UFlat7-8 785MB/s ± 0% txt2 -_UFlat8-8 857MB/s ± 0% txt3 -_UFlat9-8 719MB/s ± 1% txt4 -_UFlat10-8 2.84GB/s ± 0% pb -_UFlat11-8 1.05GB/s ± 0% gaviota - -_ZFlat0-8 1.04GB/s ± 0% html -_ZFlat1-8 534MB/s ± 0% urls -_ZFlat2-8 15.7GB/s ± 1% jpg -_ZFlat3-8 740MB/s ± 3% jpg_200 -_ZFlat4-8 9.20GB/s ± 1% pdf -_ZFlat5-8 991MB/s ± 0% html4 -_ZFlat6-8 379MB/s ± 0% txt1 -_ZFlat7-8 352MB/s ± 0% txt2 -_ZFlat8-8 396MB/s ± 1% txt3 -_ZFlat9-8 327MB/s ± 1% txt4 -_ZFlat10-8 1.33GB/s ± 1% pb -_ZFlat11-8 605MB/s ± 1% gaviota - - - -"go test -test.bench=. -tags=noasm" - -_UFlat0-8 621MB/s ± 2% html -_UFlat1-8 494MB/s ± 1% urls -_UFlat2-8 23.2GB/s ± 1% jpg -_UFlat3-8 1.12GB/s ± 1% jpg_200 -_UFlat4-8 4.35GB/s ± 1% pdf -_UFlat5-8 609MB/s ± 0% html4 -_UFlat6-8 296MB/s ± 0% txt1 -_UFlat7-8 288MB/s ± 0% txt2 -_UFlat8-8 309MB/s ± 1% txt3 -_UFlat9-8 280MB/s ± 1% txt4 -_UFlat10-8 753MB/s ± 0% pb -_UFlat11-8 400MB/s ± 0% gaviota - -_ZFlat0-8 409MB/s ± 1% html -_ZFlat1-8 250MB/s ± 1% urls -_ZFlat2-8 12.3GB/s ± 1% jpg -_ZFlat3-8 132MB/s ± 0% jpg_200 -_ZFlat4-8 2.92GB/s ± 0% pdf -_ZFlat5-8 405MB/s ± 1% html4 -_ZFlat6-8 179MB/s ± 1% txt1 -_ZFlat7-8 170MB/s ± 1% txt2 -_ZFlat8-8 189MB/s ± 1% txt3 -_ZFlat9-8 164MB/s ± 1% txt4 -_ZFlat10-8 479MB/s ± 1% pb -_ZFlat11-8 270MB/s ± 1% gaviota - - - -For comparison (Go's encoded output is byte-for-byte identical to C++'s), here -are the numbers from C++ Snappy's - -make CXXFLAGS="-O2 -DNDEBUG -g" clean snappy_unittest.log && cat snappy_unittest.log - -BM_UFlat/0 2.4GB/s html -BM_UFlat/1 1.4GB/s urls -BM_UFlat/2 21.8GB/s jpg -BM_UFlat/3 1.5GB/s jpg_200 -BM_UFlat/4 13.3GB/s pdf -BM_UFlat/5 2.1GB/s html4 -BM_UFlat/6 1.0GB/s txt1 -BM_UFlat/7 959.4MB/s txt2 -BM_UFlat/8 1.0GB/s txt3 -BM_UFlat/9 864.5MB/s txt4 -BM_UFlat/10 2.9GB/s pb -BM_UFlat/11 1.2GB/s gaviota - -BM_ZFlat/0 944.3MB/s html (22.31 %) -BM_ZFlat/1 501.6MB/s urls (47.78 %) -BM_ZFlat/2 14.3GB/s jpg (99.95 %) -BM_ZFlat/3 538.3MB/s jpg_200 (73.00 %) -BM_ZFlat/4 8.3GB/s pdf (83.30 %) -BM_ZFlat/5 903.5MB/s html4 (22.52 %) -BM_ZFlat/6 336.0MB/s txt1 (57.88 %) -BM_ZFlat/7 312.3MB/s txt2 (61.91 %) -BM_ZFlat/8 353.1MB/s txt3 (54.99 %) -BM_ZFlat/9 289.9MB/s txt4 (66.26 %) -BM_ZFlat/10 1.2GB/s pb (19.68 %) -BM_ZFlat/11 527.4MB/s gaviota (37.72 %) diff --git a/vendor/github.com/golang/snappy/decode.go b/vendor/github.com/golang/snappy/decode.go deleted file mode 100644 index 23c6e26c6b..0000000000 --- a/vendor/github.com/golang/snappy/decode.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2011 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package snappy - -import ( - "encoding/binary" - "errors" - "io" -) - -var ( - // ErrCorrupt reports that the input is invalid. - ErrCorrupt = errors.New("snappy: corrupt input") - // ErrTooLarge reports that the uncompressed length is too large. - ErrTooLarge = errors.New("snappy: decoded block is too large") - // ErrUnsupported reports that the input isn't supported. - ErrUnsupported = errors.New("snappy: unsupported input") - - errUnsupportedLiteralLength = errors.New("snappy: unsupported literal length") -) - -// DecodedLen returns the length of the decoded block. -func DecodedLen(src []byte) (int, error) { - v, _, err := decodedLen(src) - return v, err -} - -// decodedLen returns the length of the decoded block and the number of bytes -// that the length header occupied. -func decodedLen(src []byte) (blockLen, headerLen int, err error) { - v, n := binary.Uvarint(src) - if n <= 0 || v > 0xffffffff { - return 0, 0, ErrCorrupt - } - - const wordSize = 32 << (^uint(0) >> 32 & 1) - if wordSize == 32 && v > 0x7fffffff { - return 0, 0, ErrTooLarge - } - return int(v), n, nil -} - -const ( - decodeErrCodeCorrupt = 1 - decodeErrCodeUnsupportedLiteralLength = 2 -) - -// Decode returns the decoded form of src. The returned slice may be a sub- -// slice of dst if dst was large enough to hold the entire decoded block. -// Otherwise, a newly allocated slice will be returned. -// -// The dst and src must not overlap. It is valid to pass a nil dst. -// -// Decode handles the Snappy block format, not the Snappy stream format. -func Decode(dst, src []byte) ([]byte, error) { - dLen, s, err := decodedLen(src) - if err != nil { - return nil, err - } - if dLen <= len(dst) { - dst = dst[:dLen] - } else { - dst = make([]byte, dLen) - } - switch decode(dst, src[s:]) { - case 0: - return dst, nil - case decodeErrCodeUnsupportedLiteralLength: - return nil, errUnsupportedLiteralLength - } - return nil, ErrCorrupt -} - -// NewReader returns a new Reader that decompresses from r, using the framing -// format described at -// https://github.com/google/snappy/blob/master/framing_format.txt -func NewReader(r io.Reader) *Reader { - return &Reader{ - r: r, - decoded: make([]byte, maxBlockSize), - buf: make([]byte, maxEncodedLenOfMaxBlockSize+checksumSize), - } -} - -// Reader is an io.Reader that can read Snappy-compressed bytes. -// -// Reader handles the Snappy stream format, not the Snappy block format. -type Reader struct { - r io.Reader - err error - decoded []byte - buf []byte - // decoded[i:j] contains decoded bytes that have not yet been passed on. - i, j int - readHeader bool -} - -// Reset discards any buffered data, resets all state, and switches the Snappy -// reader to read from r. This permits reusing a Reader rather than allocating -// a new one. -func (r *Reader) Reset(reader io.Reader) { - r.r = reader - r.err = nil - r.i = 0 - r.j = 0 - r.readHeader = false -} - -func (r *Reader) readFull(p []byte, allowEOF bool) (ok bool) { - if _, r.err = io.ReadFull(r.r, p); r.err != nil { - if r.err == io.ErrUnexpectedEOF || (r.err == io.EOF && !allowEOF) { - r.err = ErrCorrupt - } - return false - } - return true -} - -func (r *Reader) fill() error { - for r.i >= r.j { - if !r.readFull(r.buf[:4], true) { - return r.err - } - chunkType := r.buf[0] - if !r.readHeader { - if chunkType != chunkTypeStreamIdentifier { - r.err = ErrCorrupt - return r.err - } - r.readHeader = true - } - chunkLen := int(r.buf[1]) | int(r.buf[2])<<8 | int(r.buf[3])<<16 - if chunkLen > len(r.buf) { - r.err = ErrUnsupported - return r.err - } - - // The chunk types are specified at - // https://github.com/google/snappy/blob/master/framing_format.txt - switch chunkType { - case chunkTypeCompressedData: - // Section 4.2. Compressed data (chunk type 0x00). - if chunkLen < checksumSize { - r.err = ErrCorrupt - return r.err - } - buf := r.buf[:chunkLen] - if !r.readFull(buf, false) { - return r.err - } - checksum := uint32(buf[0]) | uint32(buf[1])<<8 | uint32(buf[2])<<16 | uint32(buf[3])<<24 - buf = buf[checksumSize:] - - n, err := DecodedLen(buf) - if err != nil { - r.err = err - return r.err - } - if n > len(r.decoded) { - r.err = ErrCorrupt - return r.err - } - if _, err := Decode(r.decoded, buf); err != nil { - r.err = err - return r.err - } - if crc(r.decoded[:n]) != checksum { - r.err = ErrCorrupt - return r.err - } - r.i, r.j = 0, n - continue - - case chunkTypeUncompressedData: - // Section 4.3. Uncompressed data (chunk type 0x01). - if chunkLen < checksumSize { - r.err = ErrCorrupt - return r.err - } - buf := r.buf[:checksumSize] - if !r.readFull(buf, false) { - return r.err - } - checksum := uint32(buf[0]) | uint32(buf[1])<<8 | uint32(buf[2])<<16 | uint32(buf[3])<<24 - // Read directly into r.decoded instead of via r.buf. - n := chunkLen - checksumSize - if n > len(r.decoded) { - r.err = ErrCorrupt - return r.err - } - if !r.readFull(r.decoded[:n], false) { - return r.err - } - if crc(r.decoded[:n]) != checksum { - r.err = ErrCorrupt - return r.err - } - r.i, r.j = 0, n - continue - - case chunkTypeStreamIdentifier: - // Section 4.1. Stream identifier (chunk type 0xff). - if chunkLen != len(magicBody) { - r.err = ErrCorrupt - return r.err - } - if !r.readFull(r.buf[:len(magicBody)], false) { - return r.err - } - for i := 0; i < len(magicBody); i++ { - if r.buf[i] != magicBody[i] { - r.err = ErrCorrupt - return r.err - } - } - continue - } - - if chunkType <= 0x7f { - // Section 4.5. Reserved unskippable chunks (chunk types 0x02-0x7f). - r.err = ErrUnsupported - return r.err - } - // Section 4.4 Padding (chunk type 0xfe). - // Section 4.6. Reserved skippable chunks (chunk types 0x80-0xfd). - if !r.readFull(r.buf[:chunkLen], false) { - return r.err - } - } - - return nil -} - -// Read satisfies the io.Reader interface. -func (r *Reader) Read(p []byte) (int, error) { - if r.err != nil { - return 0, r.err - } - - if err := r.fill(); err != nil { - return 0, err - } - - n := copy(p, r.decoded[r.i:r.j]) - r.i += n - return n, nil -} - -// ReadByte satisfies the io.ByteReader interface. -func (r *Reader) ReadByte() (byte, error) { - if r.err != nil { - return 0, r.err - } - - if err := r.fill(); err != nil { - return 0, err - } - - c := r.decoded[r.i] - r.i++ - return c, nil -} diff --git a/vendor/github.com/golang/snappy/decode_amd64.s b/vendor/github.com/golang/snappy/decode_amd64.s deleted file mode 100644 index e6179f65e3..0000000000 --- a/vendor/github.com/golang/snappy/decode_amd64.s +++ /dev/null @@ -1,490 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !appengine -// +build gc -// +build !noasm - -#include "textflag.h" - -// The asm code generally follows the pure Go code in decode_other.go, except -// where marked with a "!!!". - -// func decode(dst, src []byte) int -// -// All local variables fit into registers. The non-zero stack size is only to -// spill registers and push args when issuing a CALL. The register allocation: -// - AX scratch -// - BX scratch -// - CX length or x -// - DX offset -// - SI &src[s] -// - DI &dst[d] -// + R8 dst_base -// + R9 dst_len -// + R10 dst_base + dst_len -// + R11 src_base -// + R12 src_len -// + R13 src_base + src_len -// - R14 used by doCopy -// - R15 used by doCopy -// -// The registers R8-R13 (marked with a "+") are set at the start of the -// function, and after a CALL returns, and are not otherwise modified. -// -// The d variable is implicitly DI - R8, and len(dst)-d is R10 - DI. -// The s variable is implicitly SI - R11, and len(src)-s is R13 - SI. -TEXT ·decode(SB), NOSPLIT, $48-56 - // Initialize SI, DI and R8-R13. - MOVQ dst_base+0(FP), R8 - MOVQ dst_len+8(FP), R9 - MOVQ R8, DI - MOVQ R8, R10 - ADDQ R9, R10 - MOVQ src_base+24(FP), R11 - MOVQ src_len+32(FP), R12 - MOVQ R11, SI - MOVQ R11, R13 - ADDQ R12, R13 - -loop: - // for s < len(src) - CMPQ SI, R13 - JEQ end - - // CX = uint32(src[s]) - // - // switch src[s] & 0x03 - MOVBLZX (SI), CX - MOVL CX, BX - ANDL $3, BX - CMPL BX, $1 - JAE tagCopy - - // ---------------------------------------- - // The code below handles literal tags. - - // case tagLiteral: - // x := uint32(src[s] >> 2) - // switch - SHRL $2, CX - CMPL CX, $60 - JAE tagLit60Plus - - // case x < 60: - // s++ - INCQ SI - -doLit: - // This is the end of the inner "switch", when we have a literal tag. - // - // We assume that CX == x and x fits in a uint32, where x is the variable - // used in the pure Go decode_other.go code. - - // length = int(x) + 1 - // - // Unlike the pure Go code, we don't need to check if length <= 0 because - // CX can hold 64 bits, so the increment cannot overflow. - INCQ CX - - // Prepare to check if copying length bytes will run past the end of dst or - // src. - // - // AX = len(dst) - d - // BX = len(src) - s - MOVQ R10, AX - SUBQ DI, AX - MOVQ R13, BX - SUBQ SI, BX - - // !!! Try a faster technique for short (16 or fewer bytes) copies. - // - // if length > 16 || len(dst)-d < 16 || len(src)-s < 16 { - // goto callMemmove // Fall back on calling runtime·memmove. - // } - // - // The C++ snappy code calls this TryFastAppend. It also checks len(src)-s - // against 21 instead of 16, because it cannot assume that all of its input - // is contiguous in memory and so it needs to leave enough source bytes to - // read the next tag without refilling buffers, but Go's Decode assumes - // contiguousness (the src argument is a []byte). - CMPQ CX, $16 - JGT callMemmove - CMPQ AX, $16 - JLT callMemmove - CMPQ BX, $16 - JLT callMemmove - - // !!! Implement the copy from src to dst as a 16-byte load and store. - // (Decode's documentation says that dst and src must not overlap.) - // - // This always copies 16 bytes, instead of only length bytes, but that's - // OK. If the input is a valid Snappy encoding then subsequent iterations - // will fix up the overrun. Otherwise, Decode returns a nil []byte (and a - // non-nil error), so the overrun will be ignored. - // - // Note that on amd64, it is legal and cheap to issue unaligned 8-byte or - // 16-byte loads and stores. This technique probably wouldn't be as - // effective on architectures that are fussier about alignment. - MOVOU 0(SI), X0 - MOVOU X0, 0(DI) - - // d += length - // s += length - ADDQ CX, DI - ADDQ CX, SI - JMP loop - -callMemmove: - // if length > len(dst)-d || length > len(src)-s { etc } - CMPQ CX, AX - JGT errCorrupt - CMPQ CX, BX - JGT errCorrupt - - // copy(dst[d:], src[s:s+length]) - // - // This means calling runtime·memmove(&dst[d], &src[s], length), so we push - // DI, SI and CX as arguments. Coincidentally, we also need to spill those - // three registers to the stack, to save local variables across the CALL. - MOVQ DI, 0(SP) - MOVQ SI, 8(SP) - MOVQ CX, 16(SP) - MOVQ DI, 24(SP) - MOVQ SI, 32(SP) - MOVQ CX, 40(SP) - CALL runtime·memmove(SB) - - // Restore local variables: unspill registers from the stack and - // re-calculate R8-R13. - MOVQ 24(SP), DI - MOVQ 32(SP), SI - MOVQ 40(SP), CX - MOVQ dst_base+0(FP), R8 - MOVQ dst_len+8(FP), R9 - MOVQ R8, R10 - ADDQ R9, R10 - MOVQ src_base+24(FP), R11 - MOVQ src_len+32(FP), R12 - MOVQ R11, R13 - ADDQ R12, R13 - - // d += length - // s += length - ADDQ CX, DI - ADDQ CX, SI - JMP loop - -tagLit60Plus: - // !!! This fragment does the - // - // s += x - 58; if uint(s) > uint(len(src)) { etc } - // - // checks. In the asm version, we code it once instead of once per switch case. - ADDQ CX, SI - SUBQ $58, SI - MOVQ SI, BX - SUBQ R11, BX - CMPQ BX, R12 - JA errCorrupt - - // case x == 60: - CMPL CX, $61 - JEQ tagLit61 - JA tagLit62Plus - - // x = uint32(src[s-1]) - MOVBLZX -1(SI), CX - JMP doLit - -tagLit61: - // case x == 61: - // x = uint32(src[s-2]) | uint32(src[s-1])<<8 - MOVWLZX -2(SI), CX - JMP doLit - -tagLit62Plus: - CMPL CX, $62 - JA tagLit63 - - // case x == 62: - // x = uint32(src[s-3]) | uint32(src[s-2])<<8 | uint32(src[s-1])<<16 - MOVWLZX -3(SI), CX - MOVBLZX -1(SI), BX - SHLL $16, BX - ORL BX, CX - JMP doLit - -tagLit63: - // case x == 63: - // x = uint32(src[s-4]) | uint32(src[s-3])<<8 | uint32(src[s-2])<<16 | uint32(src[s-1])<<24 - MOVL -4(SI), CX - JMP doLit - -// The code above handles literal tags. -// ---------------------------------------- -// The code below handles copy tags. - -tagCopy4: - // case tagCopy4: - // s += 5 - ADDQ $5, SI - - // if uint(s) > uint(len(src)) { etc } - MOVQ SI, BX - SUBQ R11, BX - CMPQ BX, R12 - JA errCorrupt - - // length = 1 + int(src[s-5])>>2 - SHRQ $2, CX - INCQ CX - - // offset = int(uint32(src[s-4]) | uint32(src[s-3])<<8 | uint32(src[s-2])<<16 | uint32(src[s-1])<<24) - MOVLQZX -4(SI), DX - JMP doCopy - -tagCopy2: - // case tagCopy2: - // s += 3 - ADDQ $3, SI - - // if uint(s) > uint(len(src)) { etc } - MOVQ SI, BX - SUBQ R11, BX - CMPQ BX, R12 - JA errCorrupt - - // length = 1 + int(src[s-3])>>2 - SHRQ $2, CX - INCQ CX - - // offset = int(uint32(src[s-2]) | uint32(src[s-1])<<8) - MOVWQZX -2(SI), DX - JMP doCopy - -tagCopy: - // We have a copy tag. We assume that: - // - BX == src[s] & 0x03 - // - CX == src[s] - CMPQ BX, $2 - JEQ tagCopy2 - JA tagCopy4 - - // case tagCopy1: - // s += 2 - ADDQ $2, SI - - // if uint(s) > uint(len(src)) { etc } - MOVQ SI, BX - SUBQ R11, BX - CMPQ BX, R12 - JA errCorrupt - - // offset = int(uint32(src[s-2])&0xe0<<3 | uint32(src[s-1])) - MOVQ CX, DX - ANDQ $0xe0, DX - SHLQ $3, DX - MOVBQZX -1(SI), BX - ORQ BX, DX - - // length = 4 + int(src[s-2])>>2&0x7 - SHRQ $2, CX - ANDQ $7, CX - ADDQ $4, CX - -doCopy: - // This is the end of the outer "switch", when we have a copy tag. - // - // We assume that: - // - CX == length && CX > 0 - // - DX == offset - - // if offset <= 0 { etc } - CMPQ DX, $0 - JLE errCorrupt - - // if d < offset { etc } - MOVQ DI, BX - SUBQ R8, BX - CMPQ BX, DX - JLT errCorrupt - - // if length > len(dst)-d { etc } - MOVQ R10, BX - SUBQ DI, BX - CMPQ CX, BX - JGT errCorrupt - - // forwardCopy(dst[d:d+length], dst[d-offset:]); d += length - // - // Set: - // - R14 = len(dst)-d - // - R15 = &dst[d-offset] - MOVQ R10, R14 - SUBQ DI, R14 - MOVQ DI, R15 - SUBQ DX, R15 - - // !!! Try a faster technique for short (16 or fewer bytes) forward copies. - // - // First, try using two 8-byte load/stores, similar to the doLit technique - // above. Even if dst[d:d+length] and dst[d-offset:] can overlap, this is - // still OK if offset >= 8. Note that this has to be two 8-byte load/stores - // and not one 16-byte load/store, and the first store has to be before the - // second load, due to the overlap if offset is in the range [8, 16). - // - // if length > 16 || offset < 8 || len(dst)-d < 16 { - // goto slowForwardCopy - // } - // copy 16 bytes - // d += length - CMPQ CX, $16 - JGT slowForwardCopy - CMPQ DX, $8 - JLT slowForwardCopy - CMPQ R14, $16 - JLT slowForwardCopy - MOVQ 0(R15), AX - MOVQ AX, 0(DI) - MOVQ 8(R15), BX - MOVQ BX, 8(DI) - ADDQ CX, DI - JMP loop - -slowForwardCopy: - // !!! If the forward copy is longer than 16 bytes, or if offset < 8, we - // can still try 8-byte load stores, provided we can overrun up to 10 extra - // bytes. As above, the overrun will be fixed up by subsequent iterations - // of the outermost loop. - // - // The C++ snappy code calls this technique IncrementalCopyFastPath. Its - // commentary says: - // - // ---- - // - // The main part of this loop is a simple copy of eight bytes at a time - // until we've copied (at least) the requested amount of bytes. However, - // if d and d-offset are less than eight bytes apart (indicating a - // repeating pattern of length < 8), we first need to expand the pattern in - // order to get the correct results. For instance, if the buffer looks like - // this, with the eight-byte and patterns marked as - // intervals: - // - // abxxxxxxxxxxxx - // [------] d-offset - // [------] d - // - // a single eight-byte copy from to will repeat the pattern - // once, after which we can move two bytes without moving : - // - // ababxxxxxxxxxx - // [------] d-offset - // [------] d - // - // and repeat the exercise until the two no longer overlap. - // - // This allows us to do very well in the special case of one single byte - // repeated many times, without taking a big hit for more general cases. - // - // The worst case of extra writing past the end of the match occurs when - // offset == 1 and length == 1; the last copy will read from byte positions - // [0..7] and write to [4..11], whereas it was only supposed to write to - // position 1. Thus, ten excess bytes. - // - // ---- - // - // That "10 byte overrun" worst case is confirmed by Go's - // TestSlowForwardCopyOverrun, which also tests the fixUpSlowForwardCopy - // and finishSlowForwardCopy algorithm. - // - // if length > len(dst)-d-10 { - // goto verySlowForwardCopy - // } - SUBQ $10, R14 - CMPQ CX, R14 - JGT verySlowForwardCopy - -makeOffsetAtLeast8: - // !!! As above, expand the pattern so that offset >= 8 and we can use - // 8-byte load/stores. - // - // for offset < 8 { - // copy 8 bytes from dst[d-offset:] to dst[d:] - // length -= offset - // d += offset - // offset += offset - // // The two previous lines together means that d-offset, and therefore - // // R15, is unchanged. - // } - CMPQ DX, $8 - JGE fixUpSlowForwardCopy - MOVQ (R15), BX - MOVQ BX, (DI) - SUBQ DX, CX - ADDQ DX, DI - ADDQ DX, DX - JMP makeOffsetAtLeast8 - -fixUpSlowForwardCopy: - // !!! Add length (which might be negative now) to d (implied by DI being - // &dst[d]) so that d ends up at the right place when we jump back to the - // top of the loop. Before we do that, though, we save DI to AX so that, if - // length is positive, copying the remaining length bytes will write to the - // right place. - MOVQ DI, AX - ADDQ CX, DI - -finishSlowForwardCopy: - // !!! Repeat 8-byte load/stores until length <= 0. Ending with a negative - // length means that we overrun, but as above, that will be fixed up by - // subsequent iterations of the outermost loop. - CMPQ CX, $0 - JLE loop - MOVQ (R15), BX - MOVQ BX, (AX) - ADDQ $8, R15 - ADDQ $8, AX - SUBQ $8, CX - JMP finishSlowForwardCopy - -verySlowForwardCopy: - // verySlowForwardCopy is a simple implementation of forward copy. In C - // parlance, this is a do/while loop instead of a while loop, since we know - // that length > 0. In Go syntax: - // - // for { - // dst[d] = dst[d - offset] - // d++ - // length-- - // if length == 0 { - // break - // } - // } - MOVB (R15), BX - MOVB BX, (DI) - INCQ R15 - INCQ DI - DECQ CX - JNZ verySlowForwardCopy - JMP loop - -// The code above handles copy tags. -// ---------------------------------------- - -end: - // This is the end of the "for s < len(src)". - // - // if d != len(dst) { etc } - CMPQ DI, R10 - JNE errCorrupt - - // return 0 - MOVQ $0, ret+48(FP) - RET - -errCorrupt: - // return decodeErrCodeCorrupt - MOVQ $1, ret+48(FP) - RET diff --git a/vendor/github.com/golang/snappy/decode_arm64.s b/vendor/github.com/golang/snappy/decode_arm64.s deleted file mode 100644 index 7a3ead17ea..0000000000 --- a/vendor/github.com/golang/snappy/decode_arm64.s +++ /dev/null @@ -1,494 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !appengine -// +build gc -// +build !noasm - -#include "textflag.h" - -// The asm code generally follows the pure Go code in decode_other.go, except -// where marked with a "!!!". - -// func decode(dst, src []byte) int -// -// All local variables fit into registers. The non-zero stack size is only to -// spill registers and push args when issuing a CALL. The register allocation: -// - R2 scratch -// - R3 scratch -// - R4 length or x -// - R5 offset -// - R6 &src[s] -// - R7 &dst[d] -// + R8 dst_base -// + R9 dst_len -// + R10 dst_base + dst_len -// + R11 src_base -// + R12 src_len -// + R13 src_base + src_len -// - R14 used by doCopy -// - R15 used by doCopy -// -// The registers R8-R13 (marked with a "+") are set at the start of the -// function, and after a CALL returns, and are not otherwise modified. -// -// The d variable is implicitly R7 - R8, and len(dst)-d is R10 - R7. -// The s variable is implicitly R6 - R11, and len(src)-s is R13 - R6. -TEXT ·decode(SB), NOSPLIT, $56-56 - // Initialize R6, R7 and R8-R13. - MOVD dst_base+0(FP), R8 - MOVD dst_len+8(FP), R9 - MOVD R8, R7 - MOVD R8, R10 - ADD R9, R10, R10 - MOVD src_base+24(FP), R11 - MOVD src_len+32(FP), R12 - MOVD R11, R6 - MOVD R11, R13 - ADD R12, R13, R13 - -loop: - // for s < len(src) - CMP R13, R6 - BEQ end - - // R4 = uint32(src[s]) - // - // switch src[s] & 0x03 - MOVBU (R6), R4 - MOVW R4, R3 - ANDW $3, R3 - MOVW $1, R1 - CMPW R1, R3 - BGE tagCopy - - // ---------------------------------------- - // The code below handles literal tags. - - // case tagLiteral: - // x := uint32(src[s] >> 2) - // switch - MOVW $60, R1 - LSRW $2, R4, R4 - CMPW R4, R1 - BLS tagLit60Plus - - // case x < 60: - // s++ - ADD $1, R6, R6 - -doLit: - // This is the end of the inner "switch", when we have a literal tag. - // - // We assume that R4 == x and x fits in a uint32, where x is the variable - // used in the pure Go decode_other.go code. - - // length = int(x) + 1 - // - // Unlike the pure Go code, we don't need to check if length <= 0 because - // R4 can hold 64 bits, so the increment cannot overflow. - ADD $1, R4, R4 - - // Prepare to check if copying length bytes will run past the end of dst or - // src. - // - // R2 = len(dst) - d - // R3 = len(src) - s - MOVD R10, R2 - SUB R7, R2, R2 - MOVD R13, R3 - SUB R6, R3, R3 - - // !!! Try a faster technique for short (16 or fewer bytes) copies. - // - // if length > 16 || len(dst)-d < 16 || len(src)-s < 16 { - // goto callMemmove // Fall back on calling runtime·memmove. - // } - // - // The C++ snappy code calls this TryFastAppend. It also checks len(src)-s - // against 21 instead of 16, because it cannot assume that all of its input - // is contiguous in memory and so it needs to leave enough source bytes to - // read the next tag without refilling buffers, but Go's Decode assumes - // contiguousness (the src argument is a []byte). - CMP $16, R4 - BGT callMemmove - CMP $16, R2 - BLT callMemmove - CMP $16, R3 - BLT callMemmove - - // !!! Implement the copy from src to dst as a 16-byte load and store. - // (Decode's documentation says that dst and src must not overlap.) - // - // This always copies 16 bytes, instead of only length bytes, but that's - // OK. If the input is a valid Snappy encoding then subsequent iterations - // will fix up the overrun. Otherwise, Decode returns a nil []byte (and a - // non-nil error), so the overrun will be ignored. - // - // Note that on arm64, it is legal and cheap to issue unaligned 8-byte or - // 16-byte loads and stores. This technique probably wouldn't be as - // effective on architectures that are fussier about alignment. - LDP 0(R6), (R14, R15) - STP (R14, R15), 0(R7) - - // d += length - // s += length - ADD R4, R7, R7 - ADD R4, R6, R6 - B loop - -callMemmove: - // if length > len(dst)-d || length > len(src)-s { etc } - CMP R2, R4 - BGT errCorrupt - CMP R3, R4 - BGT errCorrupt - - // copy(dst[d:], src[s:s+length]) - // - // This means calling runtime·memmove(&dst[d], &src[s], length), so we push - // R7, R6 and R4 as arguments. Coincidentally, we also need to spill those - // three registers to the stack, to save local variables across the CALL. - MOVD R7, 8(RSP) - MOVD R6, 16(RSP) - MOVD R4, 24(RSP) - MOVD R7, 32(RSP) - MOVD R6, 40(RSP) - MOVD R4, 48(RSP) - CALL runtime·memmove(SB) - - // Restore local variables: unspill registers from the stack and - // re-calculate R8-R13. - MOVD 32(RSP), R7 - MOVD 40(RSP), R6 - MOVD 48(RSP), R4 - MOVD dst_base+0(FP), R8 - MOVD dst_len+8(FP), R9 - MOVD R8, R10 - ADD R9, R10, R10 - MOVD src_base+24(FP), R11 - MOVD src_len+32(FP), R12 - MOVD R11, R13 - ADD R12, R13, R13 - - // d += length - // s += length - ADD R4, R7, R7 - ADD R4, R6, R6 - B loop - -tagLit60Plus: - // !!! This fragment does the - // - // s += x - 58; if uint(s) > uint(len(src)) { etc } - // - // checks. In the asm version, we code it once instead of once per switch case. - ADD R4, R6, R6 - SUB $58, R6, R6 - MOVD R6, R3 - SUB R11, R3, R3 - CMP R12, R3 - BGT errCorrupt - - // case x == 60: - MOVW $61, R1 - CMPW R1, R4 - BEQ tagLit61 - BGT tagLit62Plus - - // x = uint32(src[s-1]) - MOVBU -1(R6), R4 - B doLit - -tagLit61: - // case x == 61: - // x = uint32(src[s-2]) | uint32(src[s-1])<<8 - MOVHU -2(R6), R4 - B doLit - -tagLit62Plus: - CMPW $62, R4 - BHI tagLit63 - - // case x == 62: - // x = uint32(src[s-3]) | uint32(src[s-2])<<8 | uint32(src[s-1])<<16 - MOVHU -3(R6), R4 - MOVBU -1(R6), R3 - ORR R3<<16, R4 - B doLit - -tagLit63: - // case x == 63: - // x = uint32(src[s-4]) | uint32(src[s-3])<<8 | uint32(src[s-2])<<16 | uint32(src[s-1])<<24 - MOVWU -4(R6), R4 - B doLit - - // The code above handles literal tags. - // ---------------------------------------- - // The code below handles copy tags. - -tagCopy4: - // case tagCopy4: - // s += 5 - ADD $5, R6, R6 - - // if uint(s) > uint(len(src)) { etc } - MOVD R6, R3 - SUB R11, R3, R3 - CMP R12, R3 - BGT errCorrupt - - // length = 1 + int(src[s-5])>>2 - MOVD $1, R1 - ADD R4>>2, R1, R4 - - // offset = int(uint32(src[s-4]) | uint32(src[s-3])<<8 | uint32(src[s-2])<<16 | uint32(src[s-1])<<24) - MOVWU -4(R6), R5 - B doCopy - -tagCopy2: - // case tagCopy2: - // s += 3 - ADD $3, R6, R6 - - // if uint(s) > uint(len(src)) { etc } - MOVD R6, R3 - SUB R11, R3, R3 - CMP R12, R3 - BGT errCorrupt - - // length = 1 + int(src[s-3])>>2 - MOVD $1, R1 - ADD R4>>2, R1, R4 - - // offset = int(uint32(src[s-2]) | uint32(src[s-1])<<8) - MOVHU -2(R6), R5 - B doCopy - -tagCopy: - // We have a copy tag. We assume that: - // - R3 == src[s] & 0x03 - // - R4 == src[s] - CMP $2, R3 - BEQ tagCopy2 - BGT tagCopy4 - - // case tagCopy1: - // s += 2 - ADD $2, R6, R6 - - // if uint(s) > uint(len(src)) { etc } - MOVD R6, R3 - SUB R11, R3, R3 - CMP R12, R3 - BGT errCorrupt - - // offset = int(uint32(src[s-2])&0xe0<<3 | uint32(src[s-1])) - MOVD R4, R5 - AND $0xe0, R5 - MOVBU -1(R6), R3 - ORR R5<<3, R3, R5 - - // length = 4 + int(src[s-2])>>2&0x7 - MOVD $7, R1 - AND R4>>2, R1, R4 - ADD $4, R4, R4 - -doCopy: - // This is the end of the outer "switch", when we have a copy tag. - // - // We assume that: - // - R4 == length && R4 > 0 - // - R5 == offset - - // if offset <= 0 { etc } - MOVD $0, R1 - CMP R1, R5 - BLE errCorrupt - - // if d < offset { etc } - MOVD R7, R3 - SUB R8, R3, R3 - CMP R5, R3 - BLT errCorrupt - - // if length > len(dst)-d { etc } - MOVD R10, R3 - SUB R7, R3, R3 - CMP R3, R4 - BGT errCorrupt - - // forwardCopy(dst[d:d+length], dst[d-offset:]); d += length - // - // Set: - // - R14 = len(dst)-d - // - R15 = &dst[d-offset] - MOVD R10, R14 - SUB R7, R14, R14 - MOVD R7, R15 - SUB R5, R15, R15 - - // !!! Try a faster technique for short (16 or fewer bytes) forward copies. - // - // First, try using two 8-byte load/stores, similar to the doLit technique - // above. Even if dst[d:d+length] and dst[d-offset:] can overlap, this is - // still OK if offset >= 8. Note that this has to be two 8-byte load/stores - // and not one 16-byte load/store, and the first store has to be before the - // second load, due to the overlap if offset is in the range [8, 16). - // - // if length > 16 || offset < 8 || len(dst)-d < 16 { - // goto slowForwardCopy - // } - // copy 16 bytes - // d += length - CMP $16, R4 - BGT slowForwardCopy - CMP $8, R5 - BLT slowForwardCopy - CMP $16, R14 - BLT slowForwardCopy - MOVD 0(R15), R2 - MOVD R2, 0(R7) - MOVD 8(R15), R3 - MOVD R3, 8(R7) - ADD R4, R7, R7 - B loop - -slowForwardCopy: - // !!! If the forward copy is longer than 16 bytes, or if offset < 8, we - // can still try 8-byte load stores, provided we can overrun up to 10 extra - // bytes. As above, the overrun will be fixed up by subsequent iterations - // of the outermost loop. - // - // The C++ snappy code calls this technique IncrementalCopyFastPath. Its - // commentary says: - // - // ---- - // - // The main part of this loop is a simple copy of eight bytes at a time - // until we've copied (at least) the requested amount of bytes. However, - // if d and d-offset are less than eight bytes apart (indicating a - // repeating pattern of length < 8), we first need to expand the pattern in - // order to get the correct results. For instance, if the buffer looks like - // this, with the eight-byte and patterns marked as - // intervals: - // - // abxxxxxxxxxxxx - // [------] d-offset - // [------] d - // - // a single eight-byte copy from to will repeat the pattern - // once, after which we can move two bytes without moving : - // - // ababxxxxxxxxxx - // [------] d-offset - // [------] d - // - // and repeat the exercise until the two no longer overlap. - // - // This allows us to do very well in the special case of one single byte - // repeated many times, without taking a big hit for more general cases. - // - // The worst case of extra writing past the end of the match occurs when - // offset == 1 and length == 1; the last copy will read from byte positions - // [0..7] and write to [4..11], whereas it was only supposed to write to - // position 1. Thus, ten excess bytes. - // - // ---- - // - // That "10 byte overrun" worst case is confirmed by Go's - // TestSlowForwardCopyOverrun, which also tests the fixUpSlowForwardCopy - // and finishSlowForwardCopy algorithm. - // - // if length > len(dst)-d-10 { - // goto verySlowForwardCopy - // } - SUB $10, R14, R14 - CMP R14, R4 - BGT verySlowForwardCopy - -makeOffsetAtLeast8: - // !!! As above, expand the pattern so that offset >= 8 and we can use - // 8-byte load/stores. - // - // for offset < 8 { - // copy 8 bytes from dst[d-offset:] to dst[d:] - // length -= offset - // d += offset - // offset += offset - // // The two previous lines together means that d-offset, and therefore - // // R15, is unchanged. - // } - CMP $8, R5 - BGE fixUpSlowForwardCopy - MOVD (R15), R3 - MOVD R3, (R7) - SUB R5, R4, R4 - ADD R5, R7, R7 - ADD R5, R5, R5 - B makeOffsetAtLeast8 - -fixUpSlowForwardCopy: - // !!! Add length (which might be negative now) to d (implied by R7 being - // &dst[d]) so that d ends up at the right place when we jump back to the - // top of the loop. Before we do that, though, we save R7 to R2 so that, if - // length is positive, copying the remaining length bytes will write to the - // right place. - MOVD R7, R2 - ADD R4, R7, R7 - -finishSlowForwardCopy: - // !!! Repeat 8-byte load/stores until length <= 0. Ending with a negative - // length means that we overrun, but as above, that will be fixed up by - // subsequent iterations of the outermost loop. - MOVD $0, R1 - CMP R1, R4 - BLE loop - MOVD (R15), R3 - MOVD R3, (R2) - ADD $8, R15, R15 - ADD $8, R2, R2 - SUB $8, R4, R4 - B finishSlowForwardCopy - -verySlowForwardCopy: - // verySlowForwardCopy is a simple implementation of forward copy. In C - // parlance, this is a do/while loop instead of a while loop, since we know - // that length > 0. In Go syntax: - // - // for { - // dst[d] = dst[d - offset] - // d++ - // length-- - // if length == 0 { - // break - // } - // } - MOVB (R15), R3 - MOVB R3, (R7) - ADD $1, R15, R15 - ADD $1, R7, R7 - SUB $1, R4, R4 - CBNZ R4, verySlowForwardCopy - B loop - - // The code above handles copy tags. - // ---------------------------------------- - -end: - // This is the end of the "for s < len(src)". - // - // if d != len(dst) { etc } - CMP R10, R7 - BNE errCorrupt - - // return 0 - MOVD $0, ret+48(FP) - RET - -errCorrupt: - // return decodeErrCodeCorrupt - MOVD $1, R2 - MOVD R2, ret+48(FP) - RET diff --git a/vendor/github.com/golang/snappy/decode_asm.go b/vendor/github.com/golang/snappy/decode_asm.go deleted file mode 100644 index 7082b34919..0000000000 --- a/vendor/github.com/golang/snappy/decode_asm.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2016 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !appengine -// +build gc -// +build !noasm -// +build amd64 arm64 - -package snappy - -// decode has the same semantics as in decode_other.go. -// -//go:noescape -func decode(dst, src []byte) int diff --git a/vendor/github.com/golang/snappy/decode_other.go b/vendor/github.com/golang/snappy/decode_other.go deleted file mode 100644 index 2f672be557..0000000000 --- a/vendor/github.com/golang/snappy/decode_other.go +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright 2016 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !amd64,!arm64 appengine !gc noasm - -package snappy - -// decode writes the decoding of src to dst. It assumes that the varint-encoded -// length of the decompressed bytes has already been read, and that len(dst) -// equals that length. -// -// It returns 0 on success or a decodeErrCodeXxx error code on failure. -func decode(dst, src []byte) int { - var d, s, offset, length int - for s < len(src) { - switch src[s] & 0x03 { - case tagLiteral: - x := uint32(src[s] >> 2) - switch { - case x < 60: - s++ - case x == 60: - s += 2 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - x = uint32(src[s-1]) - case x == 61: - s += 3 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - x = uint32(src[s-2]) | uint32(src[s-1])<<8 - case x == 62: - s += 4 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - x = uint32(src[s-3]) | uint32(src[s-2])<<8 | uint32(src[s-1])<<16 - case x == 63: - s += 5 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - x = uint32(src[s-4]) | uint32(src[s-3])<<8 | uint32(src[s-2])<<16 | uint32(src[s-1])<<24 - } - length = int(x) + 1 - if length <= 0 { - return decodeErrCodeUnsupportedLiteralLength - } - if length > len(dst)-d || length > len(src)-s { - return decodeErrCodeCorrupt - } - copy(dst[d:], src[s:s+length]) - d += length - s += length - continue - - case tagCopy1: - s += 2 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - length = 4 + int(src[s-2])>>2&0x7 - offset = int(uint32(src[s-2])&0xe0<<3 | uint32(src[s-1])) - - case tagCopy2: - s += 3 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - length = 1 + int(src[s-3])>>2 - offset = int(uint32(src[s-2]) | uint32(src[s-1])<<8) - - case tagCopy4: - s += 5 - if uint(s) > uint(len(src)) { // The uint conversions catch overflow from the previous line. - return decodeErrCodeCorrupt - } - length = 1 + int(src[s-5])>>2 - offset = int(uint32(src[s-4]) | uint32(src[s-3])<<8 | uint32(src[s-2])<<16 | uint32(src[s-1])<<24) - } - - if offset <= 0 || d < offset || length > len(dst)-d { - return decodeErrCodeCorrupt - } - // Copy from an earlier sub-slice of dst to a later sub-slice. - // If no overlap, use the built-in copy: - if offset >= length { - copy(dst[d:d+length], dst[d-offset:]) - d += length - continue - } - - // Unlike the built-in copy function, this byte-by-byte copy always runs - // forwards, even if the slices overlap. Conceptually, this is: - // - // d += forwardCopy(dst[d:d+length], dst[d-offset:]) - // - // We align the slices into a and b and show the compiler they are the same size. - // This allows the loop to run without bounds checks. - a := dst[d : d+length] - b := dst[d-offset:] - b = b[:len(a)] - for i := range a { - a[i] = b[i] - } - d += length - } - if d != len(dst) { - return decodeErrCodeCorrupt - } - return 0 -} diff --git a/vendor/github.com/golang/snappy/encode.go b/vendor/github.com/golang/snappy/encode.go deleted file mode 100644 index 7f23657076..0000000000 --- a/vendor/github.com/golang/snappy/encode.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2011 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package snappy - -import ( - "encoding/binary" - "errors" - "io" -) - -// Encode returns the encoded form of src. The returned slice may be a sub- -// slice of dst if dst was large enough to hold the entire encoded block. -// Otherwise, a newly allocated slice will be returned. -// -// The dst and src must not overlap. It is valid to pass a nil dst. -// -// Encode handles the Snappy block format, not the Snappy stream format. -func Encode(dst, src []byte) []byte { - if n := MaxEncodedLen(len(src)); n < 0 { - panic(ErrTooLarge) - } else if len(dst) < n { - dst = make([]byte, n) - } - - // The block starts with the varint-encoded length of the decompressed bytes. - d := binary.PutUvarint(dst, uint64(len(src))) - - for len(src) > 0 { - p := src - src = nil - if len(p) > maxBlockSize { - p, src = p[:maxBlockSize], p[maxBlockSize:] - } - if len(p) < minNonLiteralBlockSize { - d += emitLiteral(dst[d:], p) - } else { - d += encodeBlock(dst[d:], p) - } - } - return dst[:d] -} - -// inputMargin is the minimum number of extra input bytes to keep, inside -// encodeBlock's inner loop. On some architectures, this margin lets us -// implement a fast path for emitLiteral, where the copy of short (<= 16 byte) -// literals can be implemented as a single load to and store from a 16-byte -// register. That literal's actual length can be as short as 1 byte, so this -// can copy up to 15 bytes too much, but that's OK as subsequent iterations of -// the encoding loop will fix up the copy overrun, and this inputMargin ensures -// that we don't overrun the dst and src buffers. -const inputMargin = 16 - 1 - -// minNonLiteralBlockSize is the minimum size of the input to encodeBlock that -// could be encoded with a copy tag. This is the minimum with respect to the -// algorithm used by encodeBlock, not a minimum enforced by the file format. -// -// The encoded output must start with at least a 1 byte literal, as there are -// no previous bytes to copy. A minimal (1 byte) copy after that, generated -// from an emitCopy call in encodeBlock's main loop, would require at least -// another inputMargin bytes, for the reason above: we want any emitLiteral -// calls inside encodeBlock's main loop to use the fast path if possible, which -// requires being able to overrun by inputMargin bytes. Thus, -// minNonLiteralBlockSize equals 1 + 1 + inputMargin. -// -// The C++ code doesn't use this exact threshold, but it could, as discussed at -// https://groups.google.com/d/topic/snappy-compression/oGbhsdIJSJ8/discussion -// The difference between Go (2+inputMargin) and C++ (inputMargin) is purely an -// optimization. It should not affect the encoded form. This is tested by -// TestSameEncodingAsCppShortCopies. -const minNonLiteralBlockSize = 1 + 1 + inputMargin - -// MaxEncodedLen returns the maximum length of a snappy block, given its -// uncompressed length. -// -// It will return a negative value if srcLen is too large to encode. -func MaxEncodedLen(srcLen int) int { - n := uint64(srcLen) - if n > 0xffffffff { - return -1 - } - // Compressed data can be defined as: - // compressed := item* literal* - // item := literal* copy - // - // The trailing literal sequence has a space blowup of at most 62/60 - // since a literal of length 60 needs one tag byte + one extra byte - // for length information. - // - // Item blowup is trickier to measure. Suppose the "copy" op copies - // 4 bytes of data. Because of a special check in the encoding code, - // we produce a 4-byte copy only if the offset is < 65536. Therefore - // the copy op takes 3 bytes to encode, and this type of item leads - // to at most the 62/60 blowup for representing literals. - // - // Suppose the "copy" op copies 5 bytes of data. If the offset is big - // enough, it will take 5 bytes to encode the copy op. Therefore the - // worst case here is a one-byte literal followed by a five-byte copy. - // That is, 6 bytes of input turn into 7 bytes of "compressed" data. - // - // This last factor dominates the blowup, so the final estimate is: - n = 32 + n + n/6 - if n > 0xffffffff { - return -1 - } - return int(n) -} - -var errClosed = errors.New("snappy: Writer is closed") - -// NewWriter returns a new Writer that compresses to w. -// -// The Writer returned does not buffer writes. There is no need to Flush or -// Close such a Writer. -// -// Deprecated: the Writer returned is not suitable for many small writes, only -// for few large writes. Use NewBufferedWriter instead, which is efficient -// regardless of the frequency and shape of the writes, and remember to Close -// that Writer when done. -func NewWriter(w io.Writer) *Writer { - return &Writer{ - w: w, - obuf: make([]byte, obufLen), - } -} - -// NewBufferedWriter returns a new Writer that compresses to w, using the -// framing format described at -// https://github.com/google/snappy/blob/master/framing_format.txt -// -// The Writer returned buffers writes. Users must call Close to guarantee all -// data has been forwarded to the underlying io.Writer. They may also call -// Flush zero or more times before calling Close. -func NewBufferedWriter(w io.Writer) *Writer { - return &Writer{ - w: w, - ibuf: make([]byte, 0, maxBlockSize), - obuf: make([]byte, obufLen), - } -} - -// Writer is an io.Writer that can write Snappy-compressed bytes. -// -// Writer handles the Snappy stream format, not the Snappy block format. -type Writer struct { - w io.Writer - err error - - // ibuf is a buffer for the incoming (uncompressed) bytes. - // - // Its use is optional. For backwards compatibility, Writers created by the - // NewWriter function have ibuf == nil, do not buffer incoming bytes, and - // therefore do not need to be Flush'ed or Close'd. - ibuf []byte - - // obuf is a buffer for the outgoing (compressed) bytes. - obuf []byte - - // wroteStreamHeader is whether we have written the stream header. - wroteStreamHeader bool -} - -// Reset discards the writer's state and switches the Snappy writer to write to -// w. This permits reusing a Writer rather than allocating a new one. -func (w *Writer) Reset(writer io.Writer) { - w.w = writer - w.err = nil - if w.ibuf != nil { - w.ibuf = w.ibuf[:0] - } - w.wroteStreamHeader = false -} - -// Write satisfies the io.Writer interface. -func (w *Writer) Write(p []byte) (nRet int, errRet error) { - if w.ibuf == nil { - // Do not buffer incoming bytes. This does not perform or compress well - // if the caller of Writer.Write writes many small slices. This - // behavior is therefore deprecated, but still supported for backwards - // compatibility with code that doesn't explicitly Flush or Close. - return w.write(p) - } - - // The remainder of this method is based on bufio.Writer.Write from the - // standard library. - - for len(p) > (cap(w.ibuf)-len(w.ibuf)) && w.err == nil { - var n int - if len(w.ibuf) == 0 { - // Large write, empty buffer. - // Write directly from p to avoid copy. - n, _ = w.write(p) - } else { - n = copy(w.ibuf[len(w.ibuf):cap(w.ibuf)], p) - w.ibuf = w.ibuf[:len(w.ibuf)+n] - w.Flush() - } - nRet += n - p = p[n:] - } - if w.err != nil { - return nRet, w.err - } - n := copy(w.ibuf[len(w.ibuf):cap(w.ibuf)], p) - w.ibuf = w.ibuf[:len(w.ibuf)+n] - nRet += n - return nRet, nil -} - -func (w *Writer) write(p []byte) (nRet int, errRet error) { - if w.err != nil { - return 0, w.err - } - for len(p) > 0 { - obufStart := len(magicChunk) - if !w.wroteStreamHeader { - w.wroteStreamHeader = true - copy(w.obuf, magicChunk) - obufStart = 0 - } - - var uncompressed []byte - if len(p) > maxBlockSize { - uncompressed, p = p[:maxBlockSize], p[maxBlockSize:] - } else { - uncompressed, p = p, nil - } - checksum := crc(uncompressed) - - // Compress the buffer, discarding the result if the improvement - // isn't at least 12.5%. - compressed := Encode(w.obuf[obufHeaderLen:], uncompressed) - chunkType := uint8(chunkTypeCompressedData) - chunkLen := 4 + len(compressed) - obufEnd := obufHeaderLen + len(compressed) - if len(compressed) >= len(uncompressed)-len(uncompressed)/8 { - chunkType = chunkTypeUncompressedData - chunkLen = 4 + len(uncompressed) - obufEnd = obufHeaderLen - } - - // Fill in the per-chunk header that comes before the body. - w.obuf[len(magicChunk)+0] = chunkType - w.obuf[len(magicChunk)+1] = uint8(chunkLen >> 0) - w.obuf[len(magicChunk)+2] = uint8(chunkLen >> 8) - w.obuf[len(magicChunk)+3] = uint8(chunkLen >> 16) - w.obuf[len(magicChunk)+4] = uint8(checksum >> 0) - w.obuf[len(magicChunk)+5] = uint8(checksum >> 8) - w.obuf[len(magicChunk)+6] = uint8(checksum >> 16) - w.obuf[len(magicChunk)+7] = uint8(checksum >> 24) - - if _, err := w.w.Write(w.obuf[obufStart:obufEnd]); err != nil { - w.err = err - return nRet, err - } - if chunkType == chunkTypeUncompressedData { - if _, err := w.w.Write(uncompressed); err != nil { - w.err = err - return nRet, err - } - } - nRet += len(uncompressed) - } - return nRet, nil -} - -// Flush flushes the Writer to its underlying io.Writer. -func (w *Writer) Flush() error { - if w.err != nil { - return w.err - } - if len(w.ibuf) == 0 { - return nil - } - w.write(w.ibuf) - w.ibuf = w.ibuf[:0] - return w.err -} - -// Close calls Flush and then closes the Writer. -func (w *Writer) Close() error { - w.Flush() - ret := w.err - if w.err == nil { - w.err = errClosed - } - return ret -} diff --git a/vendor/github.com/golang/snappy/encode_amd64.s b/vendor/github.com/golang/snappy/encode_amd64.s deleted file mode 100644 index adfd979fe2..0000000000 --- a/vendor/github.com/golang/snappy/encode_amd64.s +++ /dev/null @@ -1,730 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !appengine -// +build gc -// +build !noasm - -#include "textflag.h" - -// The XXX lines assemble on Go 1.4, 1.5 and 1.7, but not 1.6, due to a -// Go toolchain regression. See https://github.com/golang/go/issues/15426 and -// https://github.com/golang/snappy/issues/29 -// -// As a workaround, the package was built with a known good assembler, and -// those instructions were disassembled by "objdump -d" to yield the -// 4e 0f b7 7c 5c 78 movzwq 0x78(%rsp,%r11,2),%r15 -// style comments, in AT&T asm syntax. Note that rsp here is a physical -// register, not Go/asm's SP pseudo-register (see https://golang.org/doc/asm). -// The instructions were then encoded as "BYTE $0x.." sequences, which assemble -// fine on Go 1.6. - -// The asm code generally follows the pure Go code in encode_other.go, except -// where marked with a "!!!". - -// ---------------------------------------------------------------------------- - -// func emitLiteral(dst, lit []byte) int -// -// All local variables fit into registers. The register allocation: -// - AX len(lit) -// - BX n -// - DX return value -// - DI &dst[i] -// - R10 &lit[0] -// -// The 24 bytes of stack space is to call runtime·memmove. -// -// The unusual register allocation of local variables, such as R10 for the -// source pointer, matches the allocation used at the call site in encodeBlock, -// which makes it easier to manually inline this function. -TEXT ·emitLiteral(SB), NOSPLIT, $24-56 - MOVQ dst_base+0(FP), DI - MOVQ lit_base+24(FP), R10 - MOVQ lit_len+32(FP), AX - MOVQ AX, DX - MOVL AX, BX - SUBL $1, BX - - CMPL BX, $60 - JLT oneByte - CMPL BX, $256 - JLT twoBytes - -threeBytes: - MOVB $0xf4, 0(DI) - MOVW BX, 1(DI) - ADDQ $3, DI - ADDQ $3, DX - JMP memmove - -twoBytes: - MOVB $0xf0, 0(DI) - MOVB BX, 1(DI) - ADDQ $2, DI - ADDQ $2, DX - JMP memmove - -oneByte: - SHLB $2, BX - MOVB BX, 0(DI) - ADDQ $1, DI - ADDQ $1, DX - -memmove: - MOVQ DX, ret+48(FP) - - // copy(dst[i:], lit) - // - // This means calling runtime·memmove(&dst[i], &lit[0], len(lit)), so we push - // DI, R10 and AX as arguments. - MOVQ DI, 0(SP) - MOVQ R10, 8(SP) - MOVQ AX, 16(SP) - CALL runtime·memmove(SB) - RET - -// ---------------------------------------------------------------------------- - -// func emitCopy(dst []byte, offset, length int) int -// -// All local variables fit into registers. The register allocation: -// - AX length -// - SI &dst[0] -// - DI &dst[i] -// - R11 offset -// -// The unusual register allocation of local variables, such as R11 for the -// offset, matches the allocation used at the call site in encodeBlock, which -// makes it easier to manually inline this function. -TEXT ·emitCopy(SB), NOSPLIT, $0-48 - MOVQ dst_base+0(FP), DI - MOVQ DI, SI - MOVQ offset+24(FP), R11 - MOVQ length+32(FP), AX - -loop0: - // for length >= 68 { etc } - CMPL AX, $68 - JLT step1 - - // Emit a length 64 copy, encoded as 3 bytes. - MOVB $0xfe, 0(DI) - MOVW R11, 1(DI) - ADDQ $3, DI - SUBL $64, AX - JMP loop0 - -step1: - // if length > 64 { etc } - CMPL AX, $64 - JLE step2 - - // Emit a length 60 copy, encoded as 3 bytes. - MOVB $0xee, 0(DI) - MOVW R11, 1(DI) - ADDQ $3, DI - SUBL $60, AX - -step2: - // if length >= 12 || offset >= 2048 { goto step3 } - CMPL AX, $12 - JGE step3 - CMPL R11, $2048 - JGE step3 - - // Emit the remaining copy, encoded as 2 bytes. - MOVB R11, 1(DI) - SHRL $8, R11 - SHLB $5, R11 - SUBB $4, AX - SHLB $2, AX - ORB AX, R11 - ORB $1, R11 - MOVB R11, 0(DI) - ADDQ $2, DI - - // Return the number of bytes written. - SUBQ SI, DI - MOVQ DI, ret+40(FP) - RET - -step3: - // Emit the remaining copy, encoded as 3 bytes. - SUBL $1, AX - SHLB $2, AX - ORB $2, AX - MOVB AX, 0(DI) - MOVW R11, 1(DI) - ADDQ $3, DI - - // Return the number of bytes written. - SUBQ SI, DI - MOVQ DI, ret+40(FP) - RET - -// ---------------------------------------------------------------------------- - -// func extendMatch(src []byte, i, j int) int -// -// All local variables fit into registers. The register allocation: -// - DX &src[0] -// - SI &src[j] -// - R13 &src[len(src) - 8] -// - R14 &src[len(src)] -// - R15 &src[i] -// -// The unusual register allocation of local variables, such as R15 for a source -// pointer, matches the allocation used at the call site in encodeBlock, which -// makes it easier to manually inline this function. -TEXT ·extendMatch(SB), NOSPLIT, $0-48 - MOVQ src_base+0(FP), DX - MOVQ src_len+8(FP), R14 - MOVQ i+24(FP), R15 - MOVQ j+32(FP), SI - ADDQ DX, R14 - ADDQ DX, R15 - ADDQ DX, SI - MOVQ R14, R13 - SUBQ $8, R13 - -cmp8: - // As long as we are 8 or more bytes before the end of src, we can load and - // compare 8 bytes at a time. If those 8 bytes are equal, repeat. - CMPQ SI, R13 - JA cmp1 - MOVQ (R15), AX - MOVQ (SI), BX - CMPQ AX, BX - JNE bsf - ADDQ $8, R15 - ADDQ $8, SI - JMP cmp8 - -bsf: - // If those 8 bytes were not equal, XOR the two 8 byte values, and return - // the index of the first byte that differs. The BSF instruction finds the - // least significant 1 bit, the amd64 architecture is little-endian, and - // the shift by 3 converts a bit index to a byte index. - XORQ AX, BX - BSFQ BX, BX - SHRQ $3, BX - ADDQ BX, SI - - // Convert from &src[ret] to ret. - SUBQ DX, SI - MOVQ SI, ret+40(FP) - RET - -cmp1: - // In src's tail, compare 1 byte at a time. - CMPQ SI, R14 - JAE extendMatchEnd - MOVB (R15), AX - MOVB (SI), BX - CMPB AX, BX - JNE extendMatchEnd - ADDQ $1, R15 - ADDQ $1, SI - JMP cmp1 - -extendMatchEnd: - // Convert from &src[ret] to ret. - SUBQ DX, SI - MOVQ SI, ret+40(FP) - RET - -// ---------------------------------------------------------------------------- - -// func encodeBlock(dst, src []byte) (d int) -// -// All local variables fit into registers, other than "var table". The register -// allocation: -// - AX . . -// - BX . . -// - CX 56 shift (note that amd64 shifts by non-immediates must use CX). -// - DX 64 &src[0], tableSize -// - SI 72 &src[s] -// - DI 80 &dst[d] -// - R9 88 sLimit -// - R10 . &src[nextEmit] -// - R11 96 prevHash, currHash, nextHash, offset -// - R12 104 &src[base], skip -// - R13 . &src[nextS], &src[len(src) - 8] -// - R14 . len(src), bytesBetweenHashLookups, &src[len(src)], x -// - R15 112 candidate -// -// The second column (56, 64, etc) is the stack offset to spill the registers -// when calling other functions. We could pack this slightly tighter, but it's -// simpler to have a dedicated spill map independent of the function called. -// -// "var table [maxTableSize]uint16" takes up 32768 bytes of stack space. An -// extra 56 bytes, to call other functions, and an extra 64 bytes, to spill -// local variables (registers) during calls gives 32768 + 56 + 64 = 32888. -TEXT ·encodeBlock(SB), 0, $32888-56 - MOVQ dst_base+0(FP), DI - MOVQ src_base+24(FP), SI - MOVQ src_len+32(FP), R14 - - // shift, tableSize := uint32(32-8), 1<<8 - MOVQ $24, CX - MOVQ $256, DX - -calcShift: - // for ; tableSize < maxTableSize && tableSize < len(src); tableSize *= 2 { - // shift-- - // } - CMPQ DX, $16384 - JGE varTable - CMPQ DX, R14 - JGE varTable - SUBQ $1, CX - SHLQ $1, DX - JMP calcShift - -varTable: - // var table [maxTableSize]uint16 - // - // In the asm code, unlike the Go code, we can zero-initialize only the - // first tableSize elements. Each uint16 element is 2 bytes and each MOVOU - // writes 16 bytes, so we can do only tableSize/8 writes instead of the - // 2048 writes that would zero-initialize all of table's 32768 bytes. - SHRQ $3, DX - LEAQ table-32768(SP), BX - PXOR X0, X0 - -memclr: - MOVOU X0, 0(BX) - ADDQ $16, BX - SUBQ $1, DX - JNZ memclr - - // !!! DX = &src[0] - MOVQ SI, DX - - // sLimit := len(src) - inputMargin - MOVQ R14, R9 - SUBQ $15, R9 - - // !!! Pre-emptively spill CX, DX and R9 to the stack. Their values don't - // change for the rest of the function. - MOVQ CX, 56(SP) - MOVQ DX, 64(SP) - MOVQ R9, 88(SP) - - // nextEmit := 0 - MOVQ DX, R10 - - // s := 1 - ADDQ $1, SI - - // nextHash := hash(load32(src, s), shift) - MOVL 0(SI), R11 - IMULL $0x1e35a7bd, R11 - SHRL CX, R11 - -outer: - // for { etc } - - // skip := 32 - MOVQ $32, R12 - - // nextS := s - MOVQ SI, R13 - - // candidate := 0 - MOVQ $0, R15 - -inner0: - // for { etc } - - // s := nextS - MOVQ R13, SI - - // bytesBetweenHashLookups := skip >> 5 - MOVQ R12, R14 - SHRQ $5, R14 - - // nextS = s + bytesBetweenHashLookups - ADDQ R14, R13 - - // skip += bytesBetweenHashLookups - ADDQ R14, R12 - - // if nextS > sLimit { goto emitRemainder } - MOVQ R13, AX - SUBQ DX, AX - CMPQ AX, R9 - JA emitRemainder - - // candidate = int(table[nextHash]) - // XXX: MOVWQZX table-32768(SP)(R11*2), R15 - // XXX: 4e 0f b7 7c 5c 78 movzwq 0x78(%rsp,%r11,2),%r15 - BYTE $0x4e - BYTE $0x0f - BYTE $0xb7 - BYTE $0x7c - BYTE $0x5c - BYTE $0x78 - - // table[nextHash] = uint16(s) - MOVQ SI, AX - SUBQ DX, AX - - // XXX: MOVW AX, table-32768(SP)(R11*2) - // XXX: 66 42 89 44 5c 78 mov %ax,0x78(%rsp,%r11,2) - BYTE $0x66 - BYTE $0x42 - BYTE $0x89 - BYTE $0x44 - BYTE $0x5c - BYTE $0x78 - - // nextHash = hash(load32(src, nextS), shift) - MOVL 0(R13), R11 - IMULL $0x1e35a7bd, R11 - SHRL CX, R11 - - // if load32(src, s) != load32(src, candidate) { continue } break - MOVL 0(SI), AX - MOVL (DX)(R15*1), BX - CMPL AX, BX - JNE inner0 - -fourByteMatch: - // As per the encode_other.go code: - // - // A 4-byte match has been found. We'll later see etc. - - // !!! Jump to a fast path for short (<= 16 byte) literals. See the comment - // on inputMargin in encode.go. - MOVQ SI, AX - SUBQ R10, AX - CMPQ AX, $16 - JLE emitLiteralFastPath - - // ---------------------------------------- - // Begin inline of the emitLiteral call. - // - // d += emitLiteral(dst[d:], src[nextEmit:s]) - - MOVL AX, BX - SUBL $1, BX - - CMPL BX, $60 - JLT inlineEmitLiteralOneByte - CMPL BX, $256 - JLT inlineEmitLiteralTwoBytes - -inlineEmitLiteralThreeBytes: - MOVB $0xf4, 0(DI) - MOVW BX, 1(DI) - ADDQ $3, DI - JMP inlineEmitLiteralMemmove - -inlineEmitLiteralTwoBytes: - MOVB $0xf0, 0(DI) - MOVB BX, 1(DI) - ADDQ $2, DI - JMP inlineEmitLiteralMemmove - -inlineEmitLiteralOneByte: - SHLB $2, BX - MOVB BX, 0(DI) - ADDQ $1, DI - -inlineEmitLiteralMemmove: - // Spill local variables (registers) onto the stack; call; unspill. - // - // copy(dst[i:], lit) - // - // This means calling runtime·memmove(&dst[i], &lit[0], len(lit)), so we push - // DI, R10 and AX as arguments. - MOVQ DI, 0(SP) - MOVQ R10, 8(SP) - MOVQ AX, 16(SP) - ADDQ AX, DI // Finish the "d +=" part of "d += emitLiteral(etc)". - MOVQ SI, 72(SP) - MOVQ DI, 80(SP) - MOVQ R15, 112(SP) - CALL runtime·memmove(SB) - MOVQ 56(SP), CX - MOVQ 64(SP), DX - MOVQ 72(SP), SI - MOVQ 80(SP), DI - MOVQ 88(SP), R9 - MOVQ 112(SP), R15 - JMP inner1 - -inlineEmitLiteralEnd: - // End inline of the emitLiteral call. - // ---------------------------------------- - -emitLiteralFastPath: - // !!! Emit the 1-byte encoding "uint8(len(lit)-1)<<2". - MOVB AX, BX - SUBB $1, BX - SHLB $2, BX - MOVB BX, (DI) - ADDQ $1, DI - - // !!! Implement the copy from lit to dst as a 16-byte load and store. - // (Encode's documentation says that dst and src must not overlap.) - // - // This always copies 16 bytes, instead of only len(lit) bytes, but that's - // OK. Subsequent iterations will fix up the overrun. - // - // Note that on amd64, it is legal and cheap to issue unaligned 8-byte or - // 16-byte loads and stores. This technique probably wouldn't be as - // effective on architectures that are fussier about alignment. - MOVOU 0(R10), X0 - MOVOU X0, 0(DI) - ADDQ AX, DI - -inner1: - // for { etc } - - // base := s - MOVQ SI, R12 - - // !!! offset := base - candidate - MOVQ R12, R11 - SUBQ R15, R11 - SUBQ DX, R11 - - // ---------------------------------------- - // Begin inline of the extendMatch call. - // - // s = extendMatch(src, candidate+4, s+4) - - // !!! R14 = &src[len(src)] - MOVQ src_len+32(FP), R14 - ADDQ DX, R14 - - // !!! R13 = &src[len(src) - 8] - MOVQ R14, R13 - SUBQ $8, R13 - - // !!! R15 = &src[candidate + 4] - ADDQ $4, R15 - ADDQ DX, R15 - - // !!! s += 4 - ADDQ $4, SI - -inlineExtendMatchCmp8: - // As long as we are 8 or more bytes before the end of src, we can load and - // compare 8 bytes at a time. If those 8 bytes are equal, repeat. - CMPQ SI, R13 - JA inlineExtendMatchCmp1 - MOVQ (R15), AX - MOVQ (SI), BX - CMPQ AX, BX - JNE inlineExtendMatchBSF - ADDQ $8, R15 - ADDQ $8, SI - JMP inlineExtendMatchCmp8 - -inlineExtendMatchBSF: - // If those 8 bytes were not equal, XOR the two 8 byte values, and return - // the index of the first byte that differs. The BSF instruction finds the - // least significant 1 bit, the amd64 architecture is little-endian, and - // the shift by 3 converts a bit index to a byte index. - XORQ AX, BX - BSFQ BX, BX - SHRQ $3, BX - ADDQ BX, SI - JMP inlineExtendMatchEnd - -inlineExtendMatchCmp1: - // In src's tail, compare 1 byte at a time. - CMPQ SI, R14 - JAE inlineExtendMatchEnd - MOVB (R15), AX - MOVB (SI), BX - CMPB AX, BX - JNE inlineExtendMatchEnd - ADDQ $1, R15 - ADDQ $1, SI - JMP inlineExtendMatchCmp1 - -inlineExtendMatchEnd: - // End inline of the extendMatch call. - // ---------------------------------------- - - // ---------------------------------------- - // Begin inline of the emitCopy call. - // - // d += emitCopy(dst[d:], base-candidate, s-base) - - // !!! length := s - base - MOVQ SI, AX - SUBQ R12, AX - -inlineEmitCopyLoop0: - // for length >= 68 { etc } - CMPL AX, $68 - JLT inlineEmitCopyStep1 - - // Emit a length 64 copy, encoded as 3 bytes. - MOVB $0xfe, 0(DI) - MOVW R11, 1(DI) - ADDQ $3, DI - SUBL $64, AX - JMP inlineEmitCopyLoop0 - -inlineEmitCopyStep1: - // if length > 64 { etc } - CMPL AX, $64 - JLE inlineEmitCopyStep2 - - // Emit a length 60 copy, encoded as 3 bytes. - MOVB $0xee, 0(DI) - MOVW R11, 1(DI) - ADDQ $3, DI - SUBL $60, AX - -inlineEmitCopyStep2: - // if length >= 12 || offset >= 2048 { goto inlineEmitCopyStep3 } - CMPL AX, $12 - JGE inlineEmitCopyStep3 - CMPL R11, $2048 - JGE inlineEmitCopyStep3 - - // Emit the remaining copy, encoded as 2 bytes. - MOVB R11, 1(DI) - SHRL $8, R11 - SHLB $5, R11 - SUBB $4, AX - SHLB $2, AX - ORB AX, R11 - ORB $1, R11 - MOVB R11, 0(DI) - ADDQ $2, DI - JMP inlineEmitCopyEnd - -inlineEmitCopyStep3: - // Emit the remaining copy, encoded as 3 bytes. - SUBL $1, AX - SHLB $2, AX - ORB $2, AX - MOVB AX, 0(DI) - MOVW R11, 1(DI) - ADDQ $3, DI - -inlineEmitCopyEnd: - // End inline of the emitCopy call. - // ---------------------------------------- - - // nextEmit = s - MOVQ SI, R10 - - // if s >= sLimit { goto emitRemainder } - MOVQ SI, AX - SUBQ DX, AX - CMPQ AX, R9 - JAE emitRemainder - - // As per the encode_other.go code: - // - // We could immediately etc. - - // x := load64(src, s-1) - MOVQ -1(SI), R14 - - // prevHash := hash(uint32(x>>0), shift) - MOVL R14, R11 - IMULL $0x1e35a7bd, R11 - SHRL CX, R11 - - // table[prevHash] = uint16(s-1) - MOVQ SI, AX - SUBQ DX, AX - SUBQ $1, AX - - // XXX: MOVW AX, table-32768(SP)(R11*2) - // XXX: 66 42 89 44 5c 78 mov %ax,0x78(%rsp,%r11,2) - BYTE $0x66 - BYTE $0x42 - BYTE $0x89 - BYTE $0x44 - BYTE $0x5c - BYTE $0x78 - - // currHash := hash(uint32(x>>8), shift) - SHRQ $8, R14 - MOVL R14, R11 - IMULL $0x1e35a7bd, R11 - SHRL CX, R11 - - // candidate = int(table[currHash]) - // XXX: MOVWQZX table-32768(SP)(R11*2), R15 - // XXX: 4e 0f b7 7c 5c 78 movzwq 0x78(%rsp,%r11,2),%r15 - BYTE $0x4e - BYTE $0x0f - BYTE $0xb7 - BYTE $0x7c - BYTE $0x5c - BYTE $0x78 - - // table[currHash] = uint16(s) - ADDQ $1, AX - - // XXX: MOVW AX, table-32768(SP)(R11*2) - // XXX: 66 42 89 44 5c 78 mov %ax,0x78(%rsp,%r11,2) - BYTE $0x66 - BYTE $0x42 - BYTE $0x89 - BYTE $0x44 - BYTE $0x5c - BYTE $0x78 - - // if uint32(x>>8) == load32(src, candidate) { continue } - MOVL (DX)(R15*1), BX - CMPL R14, BX - JEQ inner1 - - // nextHash = hash(uint32(x>>16), shift) - SHRQ $8, R14 - MOVL R14, R11 - IMULL $0x1e35a7bd, R11 - SHRL CX, R11 - - // s++ - ADDQ $1, SI - - // break out of the inner1 for loop, i.e. continue the outer loop. - JMP outer - -emitRemainder: - // if nextEmit < len(src) { etc } - MOVQ src_len+32(FP), AX - ADDQ DX, AX - CMPQ R10, AX - JEQ encodeBlockEnd - - // d += emitLiteral(dst[d:], src[nextEmit:]) - // - // Push args. - MOVQ DI, 0(SP) - MOVQ $0, 8(SP) // Unnecessary, as the callee ignores it, but conservative. - MOVQ $0, 16(SP) // Unnecessary, as the callee ignores it, but conservative. - MOVQ R10, 24(SP) - SUBQ R10, AX - MOVQ AX, 32(SP) - MOVQ AX, 40(SP) // Unnecessary, as the callee ignores it, but conservative. - - // Spill local variables (registers) onto the stack; call; unspill. - MOVQ DI, 80(SP) - CALL ·emitLiteral(SB) - MOVQ 80(SP), DI - - // Finish the "d +=" part of "d += emitLiteral(etc)". - ADDQ 48(SP), DI - -encodeBlockEnd: - MOVQ dst_base+0(FP), AX - SUBQ AX, DI - MOVQ DI, d+48(FP) - RET diff --git a/vendor/github.com/golang/snappy/encode_arm64.s b/vendor/github.com/golang/snappy/encode_arm64.s deleted file mode 100644 index f8d54adfc5..0000000000 --- a/vendor/github.com/golang/snappy/encode_arm64.s +++ /dev/null @@ -1,722 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !appengine -// +build gc -// +build !noasm - -#include "textflag.h" - -// The asm code generally follows the pure Go code in encode_other.go, except -// where marked with a "!!!". - -// ---------------------------------------------------------------------------- - -// func emitLiteral(dst, lit []byte) int -// -// All local variables fit into registers. The register allocation: -// - R3 len(lit) -// - R4 n -// - R6 return value -// - R8 &dst[i] -// - R10 &lit[0] -// -// The 32 bytes of stack space is to call runtime·memmove. -// -// The unusual register allocation of local variables, such as R10 for the -// source pointer, matches the allocation used at the call site in encodeBlock, -// which makes it easier to manually inline this function. -TEXT ·emitLiteral(SB), NOSPLIT, $32-56 - MOVD dst_base+0(FP), R8 - MOVD lit_base+24(FP), R10 - MOVD lit_len+32(FP), R3 - MOVD R3, R6 - MOVW R3, R4 - SUBW $1, R4, R4 - - CMPW $60, R4 - BLT oneByte - CMPW $256, R4 - BLT twoBytes - -threeBytes: - MOVD $0xf4, R2 - MOVB R2, 0(R8) - MOVW R4, 1(R8) - ADD $3, R8, R8 - ADD $3, R6, R6 - B memmove - -twoBytes: - MOVD $0xf0, R2 - MOVB R2, 0(R8) - MOVB R4, 1(R8) - ADD $2, R8, R8 - ADD $2, R6, R6 - B memmove - -oneByte: - LSLW $2, R4, R4 - MOVB R4, 0(R8) - ADD $1, R8, R8 - ADD $1, R6, R6 - -memmove: - MOVD R6, ret+48(FP) - - // copy(dst[i:], lit) - // - // This means calling runtime·memmove(&dst[i], &lit[0], len(lit)), so we push - // R8, R10 and R3 as arguments. - MOVD R8, 8(RSP) - MOVD R10, 16(RSP) - MOVD R3, 24(RSP) - CALL runtime·memmove(SB) - RET - -// ---------------------------------------------------------------------------- - -// func emitCopy(dst []byte, offset, length int) int -// -// All local variables fit into registers. The register allocation: -// - R3 length -// - R7 &dst[0] -// - R8 &dst[i] -// - R11 offset -// -// The unusual register allocation of local variables, such as R11 for the -// offset, matches the allocation used at the call site in encodeBlock, which -// makes it easier to manually inline this function. -TEXT ·emitCopy(SB), NOSPLIT, $0-48 - MOVD dst_base+0(FP), R8 - MOVD R8, R7 - MOVD offset+24(FP), R11 - MOVD length+32(FP), R3 - -loop0: - // for length >= 68 { etc } - CMPW $68, R3 - BLT step1 - - // Emit a length 64 copy, encoded as 3 bytes. - MOVD $0xfe, R2 - MOVB R2, 0(R8) - MOVW R11, 1(R8) - ADD $3, R8, R8 - SUB $64, R3, R3 - B loop0 - -step1: - // if length > 64 { etc } - CMP $64, R3 - BLE step2 - - // Emit a length 60 copy, encoded as 3 bytes. - MOVD $0xee, R2 - MOVB R2, 0(R8) - MOVW R11, 1(R8) - ADD $3, R8, R8 - SUB $60, R3, R3 - -step2: - // if length >= 12 || offset >= 2048 { goto step3 } - CMP $12, R3 - BGE step3 - CMPW $2048, R11 - BGE step3 - - // Emit the remaining copy, encoded as 2 bytes. - MOVB R11, 1(R8) - LSRW $3, R11, R11 - AND $0xe0, R11, R11 - SUB $4, R3, R3 - LSLW $2, R3 - AND $0xff, R3, R3 - ORRW R3, R11, R11 - ORRW $1, R11, R11 - MOVB R11, 0(R8) - ADD $2, R8, R8 - - // Return the number of bytes written. - SUB R7, R8, R8 - MOVD R8, ret+40(FP) - RET - -step3: - // Emit the remaining copy, encoded as 3 bytes. - SUB $1, R3, R3 - AND $0xff, R3, R3 - LSLW $2, R3, R3 - ORRW $2, R3, R3 - MOVB R3, 0(R8) - MOVW R11, 1(R8) - ADD $3, R8, R8 - - // Return the number of bytes written. - SUB R7, R8, R8 - MOVD R8, ret+40(FP) - RET - -// ---------------------------------------------------------------------------- - -// func extendMatch(src []byte, i, j int) int -// -// All local variables fit into registers. The register allocation: -// - R6 &src[0] -// - R7 &src[j] -// - R13 &src[len(src) - 8] -// - R14 &src[len(src)] -// - R15 &src[i] -// -// The unusual register allocation of local variables, such as R15 for a source -// pointer, matches the allocation used at the call site in encodeBlock, which -// makes it easier to manually inline this function. -TEXT ·extendMatch(SB), NOSPLIT, $0-48 - MOVD src_base+0(FP), R6 - MOVD src_len+8(FP), R14 - MOVD i+24(FP), R15 - MOVD j+32(FP), R7 - ADD R6, R14, R14 - ADD R6, R15, R15 - ADD R6, R7, R7 - MOVD R14, R13 - SUB $8, R13, R13 - -cmp8: - // As long as we are 8 or more bytes before the end of src, we can load and - // compare 8 bytes at a time. If those 8 bytes are equal, repeat. - CMP R13, R7 - BHI cmp1 - MOVD (R15), R3 - MOVD (R7), R4 - CMP R4, R3 - BNE bsf - ADD $8, R15, R15 - ADD $8, R7, R7 - B cmp8 - -bsf: - // If those 8 bytes were not equal, XOR the two 8 byte values, and return - // the index of the first byte that differs. - // RBIT reverses the bit order, then CLZ counts the leading zeros, the - // combination of which finds the least significant bit which is set. - // The arm64 architecture is little-endian, and the shift by 3 converts - // a bit index to a byte index. - EOR R3, R4, R4 - RBIT R4, R4 - CLZ R4, R4 - ADD R4>>3, R7, R7 - - // Convert from &src[ret] to ret. - SUB R6, R7, R7 - MOVD R7, ret+40(FP) - RET - -cmp1: - // In src's tail, compare 1 byte at a time. - CMP R7, R14 - BLS extendMatchEnd - MOVB (R15), R3 - MOVB (R7), R4 - CMP R4, R3 - BNE extendMatchEnd - ADD $1, R15, R15 - ADD $1, R7, R7 - B cmp1 - -extendMatchEnd: - // Convert from &src[ret] to ret. - SUB R6, R7, R7 - MOVD R7, ret+40(FP) - RET - -// ---------------------------------------------------------------------------- - -// func encodeBlock(dst, src []byte) (d int) -// -// All local variables fit into registers, other than "var table". The register -// allocation: -// - R3 . . -// - R4 . . -// - R5 64 shift -// - R6 72 &src[0], tableSize -// - R7 80 &src[s] -// - R8 88 &dst[d] -// - R9 96 sLimit -// - R10 . &src[nextEmit] -// - R11 104 prevHash, currHash, nextHash, offset -// - R12 112 &src[base], skip -// - R13 . &src[nextS], &src[len(src) - 8] -// - R14 . len(src), bytesBetweenHashLookups, &src[len(src)], x -// - R15 120 candidate -// - R16 . hash constant, 0x1e35a7bd -// - R17 . &table -// - . 128 table -// -// The second column (64, 72, etc) is the stack offset to spill the registers -// when calling other functions. We could pack this slightly tighter, but it's -// simpler to have a dedicated spill map independent of the function called. -// -// "var table [maxTableSize]uint16" takes up 32768 bytes of stack space. An -// extra 64 bytes, to call other functions, and an extra 64 bytes, to spill -// local variables (registers) during calls gives 32768 + 64 + 64 = 32896. -TEXT ·encodeBlock(SB), 0, $32896-56 - MOVD dst_base+0(FP), R8 - MOVD src_base+24(FP), R7 - MOVD src_len+32(FP), R14 - - // shift, tableSize := uint32(32-8), 1<<8 - MOVD $24, R5 - MOVD $256, R6 - MOVW $0xa7bd, R16 - MOVKW $(0x1e35<<16), R16 - -calcShift: - // for ; tableSize < maxTableSize && tableSize < len(src); tableSize *= 2 { - // shift-- - // } - MOVD $16384, R2 - CMP R2, R6 - BGE varTable - CMP R14, R6 - BGE varTable - SUB $1, R5, R5 - LSL $1, R6, R6 - B calcShift - -varTable: - // var table [maxTableSize]uint16 - // - // In the asm code, unlike the Go code, we can zero-initialize only the - // first tableSize elements. Each uint16 element is 2 bytes and each - // iterations writes 64 bytes, so we can do only tableSize/32 writes - // instead of the 2048 writes that would zero-initialize all of table's - // 32768 bytes. This clear could overrun the first tableSize elements, but - // it won't overrun the allocated stack size. - ADD $128, RSP, R17 - MOVD R17, R4 - - // !!! R6 = &src[tableSize] - ADD R6<<1, R17, R6 - -memclr: - STP.P (ZR, ZR), 64(R4) - STP (ZR, ZR), -48(R4) - STP (ZR, ZR), -32(R4) - STP (ZR, ZR), -16(R4) - CMP R4, R6 - BHI memclr - - // !!! R6 = &src[0] - MOVD R7, R6 - - // sLimit := len(src) - inputMargin - MOVD R14, R9 - SUB $15, R9, R9 - - // !!! Pre-emptively spill R5, R6 and R9 to the stack. Their values don't - // change for the rest of the function. - MOVD R5, 64(RSP) - MOVD R6, 72(RSP) - MOVD R9, 96(RSP) - - // nextEmit := 0 - MOVD R6, R10 - - // s := 1 - ADD $1, R7, R7 - - // nextHash := hash(load32(src, s), shift) - MOVW 0(R7), R11 - MULW R16, R11, R11 - LSRW R5, R11, R11 - -outer: - // for { etc } - - // skip := 32 - MOVD $32, R12 - - // nextS := s - MOVD R7, R13 - - // candidate := 0 - MOVD $0, R15 - -inner0: - // for { etc } - - // s := nextS - MOVD R13, R7 - - // bytesBetweenHashLookups := skip >> 5 - MOVD R12, R14 - LSR $5, R14, R14 - - // nextS = s + bytesBetweenHashLookups - ADD R14, R13, R13 - - // skip += bytesBetweenHashLookups - ADD R14, R12, R12 - - // if nextS > sLimit { goto emitRemainder } - MOVD R13, R3 - SUB R6, R3, R3 - CMP R9, R3 - BHI emitRemainder - - // candidate = int(table[nextHash]) - MOVHU 0(R17)(R11<<1), R15 - - // table[nextHash] = uint16(s) - MOVD R7, R3 - SUB R6, R3, R3 - - MOVH R3, 0(R17)(R11<<1) - - // nextHash = hash(load32(src, nextS), shift) - MOVW 0(R13), R11 - MULW R16, R11 - LSRW R5, R11, R11 - - // if load32(src, s) != load32(src, candidate) { continue } break - MOVW 0(R7), R3 - MOVW (R6)(R15), R4 - CMPW R4, R3 - BNE inner0 - -fourByteMatch: - // As per the encode_other.go code: - // - // A 4-byte match has been found. We'll later see etc. - - // !!! Jump to a fast path for short (<= 16 byte) literals. See the comment - // on inputMargin in encode.go. - MOVD R7, R3 - SUB R10, R3, R3 - CMP $16, R3 - BLE emitLiteralFastPath - - // ---------------------------------------- - // Begin inline of the emitLiteral call. - // - // d += emitLiteral(dst[d:], src[nextEmit:s]) - - MOVW R3, R4 - SUBW $1, R4, R4 - - MOVW $60, R2 - CMPW R2, R4 - BLT inlineEmitLiteralOneByte - MOVW $256, R2 - CMPW R2, R4 - BLT inlineEmitLiteralTwoBytes - -inlineEmitLiteralThreeBytes: - MOVD $0xf4, R1 - MOVB R1, 0(R8) - MOVW R4, 1(R8) - ADD $3, R8, R8 - B inlineEmitLiteralMemmove - -inlineEmitLiteralTwoBytes: - MOVD $0xf0, R1 - MOVB R1, 0(R8) - MOVB R4, 1(R8) - ADD $2, R8, R8 - B inlineEmitLiteralMemmove - -inlineEmitLiteralOneByte: - LSLW $2, R4, R4 - MOVB R4, 0(R8) - ADD $1, R8, R8 - -inlineEmitLiteralMemmove: - // Spill local variables (registers) onto the stack; call; unspill. - // - // copy(dst[i:], lit) - // - // This means calling runtime·memmove(&dst[i], &lit[0], len(lit)), so we push - // R8, R10 and R3 as arguments. - MOVD R8, 8(RSP) - MOVD R10, 16(RSP) - MOVD R3, 24(RSP) - - // Finish the "d +=" part of "d += emitLiteral(etc)". - ADD R3, R8, R8 - MOVD R7, 80(RSP) - MOVD R8, 88(RSP) - MOVD R15, 120(RSP) - CALL runtime·memmove(SB) - MOVD 64(RSP), R5 - MOVD 72(RSP), R6 - MOVD 80(RSP), R7 - MOVD 88(RSP), R8 - MOVD 96(RSP), R9 - MOVD 120(RSP), R15 - ADD $128, RSP, R17 - MOVW $0xa7bd, R16 - MOVKW $(0x1e35<<16), R16 - B inner1 - -inlineEmitLiteralEnd: - // End inline of the emitLiteral call. - // ---------------------------------------- - -emitLiteralFastPath: - // !!! Emit the 1-byte encoding "uint8(len(lit)-1)<<2". - MOVB R3, R4 - SUBW $1, R4, R4 - AND $0xff, R4, R4 - LSLW $2, R4, R4 - MOVB R4, (R8) - ADD $1, R8, R8 - - // !!! Implement the copy from lit to dst as a 16-byte load and store. - // (Encode's documentation says that dst and src must not overlap.) - // - // This always copies 16 bytes, instead of only len(lit) bytes, but that's - // OK. Subsequent iterations will fix up the overrun. - // - // Note that on arm64, it is legal and cheap to issue unaligned 8-byte or - // 16-byte loads and stores. This technique probably wouldn't be as - // effective on architectures that are fussier about alignment. - LDP 0(R10), (R0, R1) - STP (R0, R1), 0(R8) - ADD R3, R8, R8 - -inner1: - // for { etc } - - // base := s - MOVD R7, R12 - - // !!! offset := base - candidate - MOVD R12, R11 - SUB R15, R11, R11 - SUB R6, R11, R11 - - // ---------------------------------------- - // Begin inline of the extendMatch call. - // - // s = extendMatch(src, candidate+4, s+4) - - // !!! R14 = &src[len(src)] - MOVD src_len+32(FP), R14 - ADD R6, R14, R14 - - // !!! R13 = &src[len(src) - 8] - MOVD R14, R13 - SUB $8, R13, R13 - - // !!! R15 = &src[candidate + 4] - ADD $4, R15, R15 - ADD R6, R15, R15 - - // !!! s += 4 - ADD $4, R7, R7 - -inlineExtendMatchCmp8: - // As long as we are 8 or more bytes before the end of src, we can load and - // compare 8 bytes at a time. If those 8 bytes are equal, repeat. - CMP R13, R7 - BHI inlineExtendMatchCmp1 - MOVD (R15), R3 - MOVD (R7), R4 - CMP R4, R3 - BNE inlineExtendMatchBSF - ADD $8, R15, R15 - ADD $8, R7, R7 - B inlineExtendMatchCmp8 - -inlineExtendMatchBSF: - // If those 8 bytes were not equal, XOR the two 8 byte values, and return - // the index of the first byte that differs. - // RBIT reverses the bit order, then CLZ counts the leading zeros, the - // combination of which finds the least significant bit which is set. - // The arm64 architecture is little-endian, and the shift by 3 converts - // a bit index to a byte index. - EOR R3, R4, R4 - RBIT R4, R4 - CLZ R4, R4 - ADD R4>>3, R7, R7 - B inlineExtendMatchEnd - -inlineExtendMatchCmp1: - // In src's tail, compare 1 byte at a time. - CMP R7, R14 - BLS inlineExtendMatchEnd - MOVB (R15), R3 - MOVB (R7), R4 - CMP R4, R3 - BNE inlineExtendMatchEnd - ADD $1, R15, R15 - ADD $1, R7, R7 - B inlineExtendMatchCmp1 - -inlineExtendMatchEnd: - // End inline of the extendMatch call. - // ---------------------------------------- - - // ---------------------------------------- - // Begin inline of the emitCopy call. - // - // d += emitCopy(dst[d:], base-candidate, s-base) - - // !!! length := s - base - MOVD R7, R3 - SUB R12, R3, R3 - -inlineEmitCopyLoop0: - // for length >= 68 { etc } - MOVW $68, R2 - CMPW R2, R3 - BLT inlineEmitCopyStep1 - - // Emit a length 64 copy, encoded as 3 bytes. - MOVD $0xfe, R1 - MOVB R1, 0(R8) - MOVW R11, 1(R8) - ADD $3, R8, R8 - SUBW $64, R3, R3 - B inlineEmitCopyLoop0 - -inlineEmitCopyStep1: - // if length > 64 { etc } - MOVW $64, R2 - CMPW R2, R3 - BLE inlineEmitCopyStep2 - - // Emit a length 60 copy, encoded as 3 bytes. - MOVD $0xee, R1 - MOVB R1, 0(R8) - MOVW R11, 1(R8) - ADD $3, R8, R8 - SUBW $60, R3, R3 - -inlineEmitCopyStep2: - // if length >= 12 || offset >= 2048 { goto inlineEmitCopyStep3 } - MOVW $12, R2 - CMPW R2, R3 - BGE inlineEmitCopyStep3 - MOVW $2048, R2 - CMPW R2, R11 - BGE inlineEmitCopyStep3 - - // Emit the remaining copy, encoded as 2 bytes. - MOVB R11, 1(R8) - LSRW $8, R11, R11 - LSLW $5, R11, R11 - SUBW $4, R3, R3 - AND $0xff, R3, R3 - LSLW $2, R3, R3 - ORRW R3, R11, R11 - ORRW $1, R11, R11 - MOVB R11, 0(R8) - ADD $2, R8, R8 - B inlineEmitCopyEnd - -inlineEmitCopyStep3: - // Emit the remaining copy, encoded as 3 bytes. - SUBW $1, R3, R3 - LSLW $2, R3, R3 - ORRW $2, R3, R3 - MOVB R3, 0(R8) - MOVW R11, 1(R8) - ADD $3, R8, R8 - -inlineEmitCopyEnd: - // End inline of the emitCopy call. - // ---------------------------------------- - - // nextEmit = s - MOVD R7, R10 - - // if s >= sLimit { goto emitRemainder } - MOVD R7, R3 - SUB R6, R3, R3 - CMP R3, R9 - BLS emitRemainder - - // As per the encode_other.go code: - // - // We could immediately etc. - - // x := load64(src, s-1) - MOVD -1(R7), R14 - - // prevHash := hash(uint32(x>>0), shift) - MOVW R14, R11 - MULW R16, R11, R11 - LSRW R5, R11, R11 - - // table[prevHash] = uint16(s-1) - MOVD R7, R3 - SUB R6, R3, R3 - SUB $1, R3, R3 - - MOVHU R3, 0(R17)(R11<<1) - - // currHash := hash(uint32(x>>8), shift) - LSR $8, R14, R14 - MOVW R14, R11 - MULW R16, R11, R11 - LSRW R5, R11, R11 - - // candidate = int(table[currHash]) - MOVHU 0(R17)(R11<<1), R15 - - // table[currHash] = uint16(s) - ADD $1, R3, R3 - MOVHU R3, 0(R17)(R11<<1) - - // if uint32(x>>8) == load32(src, candidate) { continue } - MOVW (R6)(R15), R4 - CMPW R4, R14 - BEQ inner1 - - // nextHash = hash(uint32(x>>16), shift) - LSR $8, R14, R14 - MOVW R14, R11 - MULW R16, R11, R11 - LSRW R5, R11, R11 - - // s++ - ADD $1, R7, R7 - - // break out of the inner1 for loop, i.e. continue the outer loop. - B outer - -emitRemainder: - // if nextEmit < len(src) { etc } - MOVD src_len+32(FP), R3 - ADD R6, R3, R3 - CMP R3, R10 - BEQ encodeBlockEnd - - // d += emitLiteral(dst[d:], src[nextEmit:]) - // - // Push args. - MOVD R8, 8(RSP) - MOVD $0, 16(RSP) // Unnecessary, as the callee ignores it, but conservative. - MOVD $0, 24(RSP) // Unnecessary, as the callee ignores it, but conservative. - MOVD R10, 32(RSP) - SUB R10, R3, R3 - MOVD R3, 40(RSP) - MOVD R3, 48(RSP) // Unnecessary, as the callee ignores it, but conservative. - - // Spill local variables (registers) onto the stack; call; unspill. - MOVD R8, 88(RSP) - CALL ·emitLiteral(SB) - MOVD 88(RSP), R8 - - // Finish the "d +=" part of "d += emitLiteral(etc)". - MOVD 56(RSP), R1 - ADD R1, R8, R8 - -encodeBlockEnd: - MOVD dst_base+0(FP), R3 - SUB R3, R8, R8 - MOVD R8, d+48(FP) - RET diff --git a/vendor/github.com/golang/snappy/encode_asm.go b/vendor/github.com/golang/snappy/encode_asm.go deleted file mode 100644 index 107c1e7141..0000000000 --- a/vendor/github.com/golang/snappy/encode_asm.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2016 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !appengine -// +build gc -// +build !noasm -// +build amd64 arm64 - -package snappy - -// emitLiteral has the same semantics as in encode_other.go. -// -//go:noescape -func emitLiteral(dst, lit []byte) int - -// emitCopy has the same semantics as in encode_other.go. -// -//go:noescape -func emitCopy(dst []byte, offset, length int) int - -// extendMatch has the same semantics as in encode_other.go. -// -//go:noescape -func extendMatch(src []byte, i, j int) int - -// encodeBlock has the same semantics as in encode_other.go. -// -//go:noescape -func encodeBlock(dst, src []byte) (d int) diff --git a/vendor/github.com/golang/snappy/encode_other.go b/vendor/github.com/golang/snappy/encode_other.go deleted file mode 100644 index 296d7f0beb..0000000000 --- a/vendor/github.com/golang/snappy/encode_other.go +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright 2016 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !amd64,!arm64 appengine !gc noasm - -package snappy - -func load32(b []byte, i int) uint32 { - b = b[i : i+4 : len(b)] // Help the compiler eliminate bounds checks on the next line. - return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 -} - -func load64(b []byte, i int) uint64 { - b = b[i : i+8 : len(b)] // Help the compiler eliminate bounds checks on the next line. - return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | - uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 -} - -// emitLiteral writes a literal chunk and returns the number of bytes written. -// -// It assumes that: -// dst is long enough to hold the encoded bytes -// 1 <= len(lit) && len(lit) <= 65536 -func emitLiteral(dst, lit []byte) int { - i, n := 0, uint(len(lit)-1) - switch { - case n < 60: - dst[0] = uint8(n)<<2 | tagLiteral - i = 1 - case n < 1<<8: - dst[0] = 60<<2 | tagLiteral - dst[1] = uint8(n) - i = 2 - default: - dst[0] = 61<<2 | tagLiteral - dst[1] = uint8(n) - dst[2] = uint8(n >> 8) - i = 3 - } - return i + copy(dst[i:], lit) -} - -// emitCopy writes a copy chunk and returns the number of bytes written. -// -// It assumes that: -// dst is long enough to hold the encoded bytes -// 1 <= offset && offset <= 65535 -// 4 <= length && length <= 65535 -func emitCopy(dst []byte, offset, length int) int { - i := 0 - // The maximum length for a single tagCopy1 or tagCopy2 op is 64 bytes. The - // threshold for this loop is a little higher (at 68 = 64 + 4), and the - // length emitted down below is is a little lower (at 60 = 64 - 4), because - // it's shorter to encode a length 67 copy as a length 60 tagCopy2 followed - // by a length 7 tagCopy1 (which encodes as 3+2 bytes) than to encode it as - // a length 64 tagCopy2 followed by a length 3 tagCopy2 (which encodes as - // 3+3 bytes). The magic 4 in the 64±4 is because the minimum length for a - // tagCopy1 op is 4 bytes, which is why a length 3 copy has to be an - // encodes-as-3-bytes tagCopy2 instead of an encodes-as-2-bytes tagCopy1. - for length >= 68 { - // Emit a length 64 copy, encoded as 3 bytes. - dst[i+0] = 63<<2 | tagCopy2 - dst[i+1] = uint8(offset) - dst[i+2] = uint8(offset >> 8) - i += 3 - length -= 64 - } - if length > 64 { - // Emit a length 60 copy, encoded as 3 bytes. - dst[i+0] = 59<<2 | tagCopy2 - dst[i+1] = uint8(offset) - dst[i+2] = uint8(offset >> 8) - i += 3 - length -= 60 - } - if length >= 12 || offset >= 2048 { - // Emit the remaining copy, encoded as 3 bytes. - dst[i+0] = uint8(length-1)<<2 | tagCopy2 - dst[i+1] = uint8(offset) - dst[i+2] = uint8(offset >> 8) - return i + 3 - } - // Emit the remaining copy, encoded as 2 bytes. - dst[i+0] = uint8(offset>>8)<<5 | uint8(length-4)<<2 | tagCopy1 - dst[i+1] = uint8(offset) - return i + 2 -} - -// extendMatch returns the largest k such that k <= len(src) and that -// src[i:i+k-j] and src[j:k] have the same contents. -// -// It assumes that: -// 0 <= i && i < j && j <= len(src) -func extendMatch(src []byte, i, j int) int { - for ; j < len(src) && src[i] == src[j]; i, j = i+1, j+1 { - } - return j -} - -func hash(u, shift uint32) uint32 { - return (u * 0x1e35a7bd) >> shift -} - -// encodeBlock encodes a non-empty src to a guaranteed-large-enough dst. It -// assumes that the varint-encoded length of the decompressed bytes has already -// been written. -// -// It also assumes that: -// len(dst) >= MaxEncodedLen(len(src)) && -// minNonLiteralBlockSize <= len(src) && len(src) <= maxBlockSize -func encodeBlock(dst, src []byte) (d int) { - // Initialize the hash table. Its size ranges from 1<<8 to 1<<14 inclusive. - // The table element type is uint16, as s < sLimit and sLimit < len(src) - // and len(src) <= maxBlockSize and maxBlockSize == 65536. - const ( - maxTableSize = 1 << 14 - // tableMask is redundant, but helps the compiler eliminate bounds - // checks. - tableMask = maxTableSize - 1 - ) - shift := uint32(32 - 8) - for tableSize := 1 << 8; tableSize < maxTableSize && tableSize < len(src); tableSize *= 2 { - shift-- - } - // In Go, all array elements are zero-initialized, so there is no advantage - // to a smaller tableSize per se. However, it matches the C++ algorithm, - // and in the asm versions of this code, we can get away with zeroing only - // the first tableSize elements. - var table [maxTableSize]uint16 - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := len(src) - inputMargin - - // nextEmit is where in src the next emitLiteral should start from. - nextEmit := 0 - - // The encoded form must start with a literal, as there are no previous - // bytes to copy, so we start looking for hash matches at s == 1. - s := 1 - nextHash := hash(load32(src, s), shift) - - for { - // Copied from the C++ snappy implementation: - // - // Heuristic match skipping: If 32 bytes are scanned with no matches - // found, start looking only at every other byte. If 32 more bytes are - // scanned (or skipped), look at every third byte, etc.. When a match - // is found, immediately go back to looking at every byte. This is a - // small loss (~5% performance, ~0.1% density) for compressible data - // due to more bookkeeping, but for non-compressible data (such as - // JPEG) it's a huge win since the compressor quickly "realizes" the - // data is incompressible and doesn't bother looking for matches - // everywhere. - // - // The "skip" variable keeps track of how many bytes there are since - // the last match; dividing it by 32 (ie. right-shifting by five) gives - // the number of bytes to move ahead for each iteration. - skip := 32 - - nextS := s - candidate := 0 - for { - s = nextS - bytesBetweenHashLookups := skip >> 5 - nextS = s + bytesBetweenHashLookups - skip += bytesBetweenHashLookups - if nextS > sLimit { - goto emitRemainder - } - candidate = int(table[nextHash&tableMask]) - table[nextHash&tableMask] = uint16(s) - nextHash = hash(load32(src, nextS), shift) - if load32(src, s) == load32(src, candidate) { - break - } - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - d += emitLiteral(dst[d:], src[nextEmit:s]) - - // Call emitCopy, and then see if another emitCopy could be our next - // move. Repeat until we find no match for the input immediately after - // what was consumed by the last emitCopy call. - // - // If we exit this loop normally then we need to call emitLiteral next, - // though we don't yet know how big the literal will be. We handle that - // by proceeding to the next iteration of the main loop. We also can - // exit this loop via goto if we get close to exhausting the input. - for { - // Invariant: we have a 4-byte match at s, and no need to emit any - // literal bytes prior to s. - base := s - - // Extend the 4-byte match as long as possible. - // - // This is an inlined version of: - // s = extendMatch(src, candidate+4, s+4) - s += 4 - for i := candidate + 4; s < len(src) && src[i] == src[s]; i, s = i+1, s+1 { - } - - d += emitCopy(dst[d:], base-candidate, s-base) - nextEmit = s - if s >= sLimit { - goto emitRemainder - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-1 and at s. If - // another emitCopy is not our next move, also calculate nextHash - // at s+1. At least on GOARCH=amd64, these three hash calculations - // are faster as one load64 call (with some shifts) instead of - // three load32 calls. - x := load64(src, s-1) - prevHash := hash(uint32(x>>0), shift) - table[prevHash&tableMask] = uint16(s - 1) - currHash := hash(uint32(x>>8), shift) - candidate = int(table[currHash&tableMask]) - table[currHash&tableMask] = uint16(s) - if uint32(x>>8) != load32(src, candidate) { - nextHash = hash(uint32(x>>16), shift) - s++ - break - } - } - } - -emitRemainder: - if nextEmit < len(src) { - d += emitLiteral(dst[d:], src[nextEmit:]) - } - return d -} diff --git a/vendor/github.com/golang/snappy/snappy.go b/vendor/github.com/golang/snappy/snappy.go deleted file mode 100644 index ece692ea46..0000000000 --- a/vendor/github.com/golang/snappy/snappy.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2011 The Snappy-Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package snappy implements the Snappy compression format. It aims for very -// high speeds and reasonable compression. -// -// There are actually two Snappy formats: block and stream. They are related, -// but different: trying to decompress block-compressed data as a Snappy stream -// will fail, and vice versa. The block format is the Decode and Encode -// functions and the stream format is the Reader and Writer types. -// -// The block format, the more common case, is used when the complete size (the -// number of bytes) of the original data is known upfront, at the time -// compression starts. The stream format, also known as the framing format, is -// for when that isn't always true. -// -// The canonical, C++ implementation is at https://github.com/google/snappy and -// it only implements the block format. -package snappy // import "github.com/golang/snappy" - -import ( - "hash/crc32" -) - -/* -Each encoded block begins with the varint-encoded length of the decoded data, -followed by a sequence of chunks. Chunks begin and end on byte boundaries. The -first byte of each chunk is broken into its 2 least and 6 most significant bits -called l and m: l ranges in [0, 4) and m ranges in [0, 64). l is the chunk tag. -Zero means a literal tag. All other values mean a copy tag. - -For literal tags: - - If m < 60, the next 1 + m bytes are literal bytes. - - Otherwise, let n be the little-endian unsigned integer denoted by the next - m - 59 bytes. The next 1 + n bytes after that are literal bytes. - -For copy tags, length bytes are copied from offset bytes ago, in the style of -Lempel-Ziv compression algorithms. In particular: - - For l == 1, the offset ranges in [0, 1<<11) and the length in [4, 12). - The length is 4 + the low 3 bits of m. The high 3 bits of m form bits 8-10 - of the offset. The next byte is bits 0-7 of the offset. - - For l == 2, the offset ranges in [0, 1<<16) and the length in [1, 65). - The length is 1 + m. The offset is the little-endian unsigned integer - denoted by the next 2 bytes. - - For l == 3, this tag is a legacy format that is no longer issued by most - encoders. Nonetheless, the offset ranges in [0, 1<<32) and the length in - [1, 65). The length is 1 + m. The offset is the little-endian unsigned - integer denoted by the next 4 bytes. -*/ -const ( - tagLiteral = 0x00 - tagCopy1 = 0x01 - tagCopy2 = 0x02 - tagCopy4 = 0x03 -) - -const ( - checksumSize = 4 - chunkHeaderSize = 4 - magicChunk = "\xff\x06\x00\x00" + magicBody - magicBody = "sNaPpY" - - // maxBlockSize is the maximum size of the input to encodeBlock. It is not - // part of the wire format per se, but some parts of the encoder assume - // that an offset fits into a uint16. - // - // Also, for the framing format (Writer type instead of Encode function), - // https://github.com/google/snappy/blob/master/framing_format.txt says - // that "the uncompressed data in a chunk must be no longer than 65536 - // bytes". - maxBlockSize = 65536 - - // maxEncodedLenOfMaxBlockSize equals MaxEncodedLen(maxBlockSize), but is - // hard coded to be a const instead of a variable, so that obufLen can also - // be a const. Their equivalence is confirmed by - // TestMaxEncodedLenOfMaxBlockSize. - maxEncodedLenOfMaxBlockSize = 76490 - - obufHeaderLen = len(magicChunk) + checksumSize + chunkHeaderSize - obufLen = obufHeaderLen + maxEncodedLenOfMaxBlockSize -) - -const ( - chunkTypeCompressedData = 0x00 - chunkTypeUncompressedData = 0x01 - chunkTypePadding = 0xfe - chunkTypeStreamIdentifier = 0xff -) - -var crcTable = crc32.MakeTable(crc32.Castagnoli) - -// crc implements the checksum specified in section 3 of -// https://github.com/google/snappy/blob/master/framing_format.txt -func crc(b []byte) uint32 { - c := crc32.Update(0, crcTable, b) - return uint32(c>>15|c<<17) + 0xa282ead8 -} diff --git a/vendor/github.com/klauspost/compress/flate/deflate.go b/vendor/github.com/klauspost/compress/flate/deflate.go deleted file mode 100644 index de912e187c..0000000000 --- a/vendor/github.com/klauspost/compress/flate/deflate.go +++ /dev/null @@ -1,1017 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Copyright (c) 2015 Klaus Post -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -import ( - "encoding/binary" - "errors" - "fmt" - "io" - "math" -) - -const ( - NoCompression = 0 - BestSpeed = 1 - BestCompression = 9 - DefaultCompression = -1 - - // HuffmanOnly disables Lempel-Ziv match searching and only performs Huffman - // entropy encoding. This mode is useful in compressing data that has - // already been compressed with an LZ style algorithm (e.g. Snappy or LZ4) - // that lacks an entropy encoder. Compression gains are achieved when - // certain bytes in the input stream occur more frequently than others. - // - // Note that HuffmanOnly produces a compressed output that is - // RFC 1951 compliant. That is, any valid DEFLATE decompressor will - // continue to be able to decompress this output. - HuffmanOnly = -2 - ConstantCompression = HuffmanOnly // compatibility alias. - - logWindowSize = 15 - windowSize = 1 << logWindowSize - windowMask = windowSize - 1 - logMaxOffsetSize = 15 // Standard DEFLATE - minMatchLength = 4 // The smallest match that the compressor looks for - maxMatchLength = 258 // The longest match for the compressor - minOffsetSize = 1 // The shortest offset that makes any sense - - // The maximum number of tokens we will encode at the time. - // Smaller sizes usually creates less optimal blocks. - // Bigger can make context switching slow. - // We use this for levels 7-9, so we make it big. - maxFlateBlockTokens = 1 << 15 - maxStoreBlockSize = 65535 - hashBits = 17 // After 17 performance degrades - hashSize = 1 << hashBits - hashMask = (1 << hashBits) - 1 - hashShift = (hashBits + minMatchLength - 1) / minMatchLength - maxHashOffset = 1 << 28 - - skipNever = math.MaxInt32 - - debugDeflate = false -) - -type compressionLevel struct { - good, lazy, nice, chain, fastSkipHashing, level int -} - -// Compression levels have been rebalanced from zlib deflate defaults -// to give a bigger spread in speed and compression. -// See https://blog.klauspost.com/rebalancing-deflate-compression-levels/ -var levels = []compressionLevel{ - {}, // 0 - // Level 1-6 uses specialized algorithm - values not used - {0, 0, 0, 0, 0, 1}, - {0, 0, 0, 0, 0, 2}, - {0, 0, 0, 0, 0, 3}, - {0, 0, 0, 0, 0, 4}, - {0, 0, 0, 0, 0, 5}, - {0, 0, 0, 0, 0, 6}, - // Levels 7-9 use increasingly more lazy matching - // and increasingly stringent conditions for "good enough". - {8, 12, 16, 24, skipNever, 7}, - {16, 30, 40, 64, skipNever, 8}, - {32, 258, 258, 1024, skipNever, 9}, -} - -// advancedState contains state for the advanced levels, with bigger hash tables, etc. -type advancedState struct { - // deflate state - length int - offset int - maxInsertIndex int - chainHead int - hashOffset int - - ii uint16 // position of last match, intended to overflow to reset. - - // input window: unprocessed data is window[index:windowEnd] - index int - hashMatch [maxMatchLength + minMatchLength]uint32 - - // Input hash chains - // hashHead[hashValue] contains the largest inputIndex with the specified hash value - // If hashHead[hashValue] is within the current window, then - // hashPrev[hashHead[hashValue] & windowMask] contains the previous index - // with the same hash value. - hashHead [hashSize]uint32 - hashPrev [windowSize]uint32 -} - -type compressor struct { - compressionLevel - - h *huffmanEncoder - w *huffmanBitWriter - - // compression algorithm - fill func(*compressor, []byte) int // copy data to window - step func(*compressor) // process window - - window []byte - windowEnd int - blockStart int // window index where current tokens start - err error - - // queued output tokens - tokens tokens - fast fastEnc - state *advancedState - - sync bool // requesting flush - byteAvailable bool // if true, still need to process window[index-1]. -} - -func (d *compressor) fillDeflate(b []byte) int { - s := d.state - if s.index >= 2*windowSize-(minMatchLength+maxMatchLength) { - // shift the window by windowSize - //copy(d.window[:], d.window[windowSize:2*windowSize]) - *(*[windowSize]byte)(d.window) = *(*[windowSize]byte)(d.window[windowSize:]) - s.index -= windowSize - d.windowEnd -= windowSize - if d.blockStart >= windowSize { - d.blockStart -= windowSize - } else { - d.blockStart = math.MaxInt32 - } - s.hashOffset += windowSize - if s.hashOffset > maxHashOffset { - delta := s.hashOffset - 1 - s.hashOffset -= delta - s.chainHead -= delta - // Iterate over slices instead of arrays to avoid copying - // the entire table onto the stack (Issue #18625). - for i, v := range s.hashPrev[:] { - if int(v) > delta { - s.hashPrev[i] = uint32(int(v) - delta) - } else { - s.hashPrev[i] = 0 - } - } - for i, v := range s.hashHead[:] { - if int(v) > delta { - s.hashHead[i] = uint32(int(v) - delta) - } else { - s.hashHead[i] = 0 - } - } - } - } - n := copy(d.window[d.windowEnd:], b) - d.windowEnd += n - return n -} - -func (d *compressor) writeBlock(tok *tokens, index int, eof bool) error { - if index > 0 || eof { - var window []byte - if d.blockStart <= index { - window = d.window[d.blockStart:index] - } - d.blockStart = index - //d.w.writeBlock(tok, eof, window) - d.w.writeBlockDynamic(tok, eof, window, d.sync) - return d.w.err - } - return nil -} - -// writeBlockSkip writes the current block and uses the number of tokens -// to determine if the block should be stored on no matches, or -// only huffman encoded. -func (d *compressor) writeBlockSkip(tok *tokens, index int, eof bool) error { - if index > 0 || eof { - if d.blockStart <= index { - window := d.window[d.blockStart:index] - // If we removed less than a 64th of all literals - // we huffman compress the block. - if int(tok.n) > len(window)-int(tok.n>>6) { - d.w.writeBlockHuff(eof, window, d.sync) - } else { - // Write a dynamic huffman block. - d.w.writeBlockDynamic(tok, eof, window, d.sync) - } - } else { - d.w.writeBlock(tok, eof, nil) - } - d.blockStart = index - return d.w.err - } - return nil -} - -// fillWindow will fill the current window with the supplied -// dictionary and calculate all hashes. -// This is much faster than doing a full encode. -// Should only be used after a start/reset. -func (d *compressor) fillWindow(b []byte) { - // Do not fill window if we are in store-only or huffman mode. - if d.level <= 0 { - return - } - if d.fast != nil { - // encode the last data, but discard the result - if len(b) > maxMatchOffset { - b = b[len(b)-maxMatchOffset:] - } - d.fast.Encode(&d.tokens, b) - d.tokens.Reset() - return - } - s := d.state - // If we are given too much, cut it. - if len(b) > windowSize { - b = b[len(b)-windowSize:] - } - // Add all to window. - n := copy(d.window[d.windowEnd:], b) - - // Calculate 256 hashes at the time (more L1 cache hits) - loops := (n + 256 - minMatchLength) / 256 - for j := 0; j < loops; j++ { - startindex := j * 256 - end := startindex + 256 + minMatchLength - 1 - if end > n { - end = n - } - tocheck := d.window[startindex:end] - dstSize := len(tocheck) - minMatchLength + 1 - - if dstSize <= 0 { - continue - } - - dst := s.hashMatch[:dstSize] - bulkHash4(tocheck, dst) - var newH uint32 - for i, val := range dst { - di := i + startindex - newH = val & hashMask - // Get previous value with the same hash. - // Our chain should point to the previous value. - s.hashPrev[di&windowMask] = s.hashHead[newH] - // Set the head of the hash chain to us. - s.hashHead[newH] = uint32(di + s.hashOffset) - } - } - // Update window information. - d.windowEnd += n - s.index = n -} - -// Try to find a match starting at index whose length is greater than prevSize. -// We only look at chainCount possibilities before giving up. -// pos = s.index, prevHead = s.chainHead-s.hashOffset, prevLength=minMatchLength-1, lookahead -func (d *compressor) findMatch(pos int, prevHead int, lookahead int) (length, offset int, ok bool) { - minMatchLook := maxMatchLength - if lookahead < minMatchLook { - minMatchLook = lookahead - } - - win := d.window[0 : pos+minMatchLook] - - // We quit when we get a match that's at least nice long - nice := len(win) - pos - if d.nice < nice { - nice = d.nice - } - - // If we've got a match that's good enough, only look in 1/4 the chain. - tries := d.chain - length = minMatchLength - 1 - - wEnd := win[pos+length] - wPos := win[pos:] - minIndex := pos - windowSize - if minIndex < 0 { - minIndex = 0 - } - offset = 0 - - if d.chain < 100 { - for i := prevHead; tries > 0; tries-- { - if wEnd == win[i+length] { - n := matchLen(win[i:i+minMatchLook], wPos) - if n > length { - length = n - offset = pos - i - ok = true - if n >= nice { - // The match is good enough that we don't try to find a better one. - break - } - wEnd = win[pos+n] - } - } - if i <= minIndex { - // hashPrev[i & windowMask] has already been overwritten, so stop now. - break - } - i = int(d.state.hashPrev[i&windowMask]) - d.state.hashOffset - if i < minIndex { - break - } - } - return - } - - // Minimum gain to accept a match. - cGain := 4 - - // Some like it higher (CSV), some like it lower (JSON) - const baseCost = 3 - // Base is 4 bytes at with an additional cost. - // Matches must be better than this. - - for i := prevHead; tries > 0; tries-- { - if wEnd == win[i+length] { - n := matchLen(win[i:i+minMatchLook], wPos) - if n > length { - // Calculate gain. Estimate - newGain := d.h.bitLengthRaw(wPos[:n]) - int(offsetExtraBits[offsetCode(uint32(pos-i))]) - baseCost - int(lengthExtraBits[lengthCodes[(n-3)&255]]) - - //fmt.Println("gain:", newGain, "prev:", cGain, "raw:", d.h.bitLengthRaw(wPos[:n]), "this-len:", n, "prev-len:", length) - if newGain > cGain { - length = n - offset = pos - i - cGain = newGain - ok = true - if n >= nice { - // The match is good enough that we don't try to find a better one. - break - } - wEnd = win[pos+n] - } - } - } - if i <= minIndex { - // hashPrev[i & windowMask] has already been overwritten, so stop now. - break - } - i = int(d.state.hashPrev[i&windowMask]) - d.state.hashOffset - if i < minIndex { - break - } - } - return -} - -func (d *compressor) writeStoredBlock(buf []byte) error { - if d.w.writeStoredHeader(len(buf), false); d.w.err != nil { - return d.w.err - } - d.w.writeBytes(buf) - return d.w.err -} - -// hash4 returns a hash representation of the first 4 bytes -// of the supplied slice. -// The caller must ensure that len(b) >= 4. -func hash4(b []byte) uint32 { - return hash4u(binary.LittleEndian.Uint32(b), hashBits) -} - -// hash4 returns the hash of u to fit in a hash table with h bits. -// Preferably h should be a constant and should always be <32. -func hash4u(u uint32, h uint8) uint32 { - return (u * prime4bytes) >> (32 - h) -} - -// bulkHash4 will compute hashes using the same -// algorithm as hash4 -func bulkHash4(b []byte, dst []uint32) { - if len(b) < 4 { - return - } - hb := binary.LittleEndian.Uint32(b) - - dst[0] = hash4u(hb, hashBits) - end := len(b) - 4 + 1 - for i := 1; i < end; i++ { - hb = (hb >> 8) | uint32(b[i+3])<<24 - dst[i] = hash4u(hb, hashBits) - } -} - -func (d *compressor) initDeflate() { - d.window = make([]byte, 2*windowSize) - d.byteAvailable = false - d.err = nil - if d.state == nil { - return - } - s := d.state - s.index = 0 - s.hashOffset = 1 - s.length = minMatchLength - 1 - s.offset = 0 - s.chainHead = -1 -} - -// deflateLazy is the same as deflate, but with d.fastSkipHashing == skipNever, -// meaning it always has lazy matching on. -func (d *compressor) deflateLazy() { - s := d.state - // Sanity enables additional runtime tests. - // It's intended to be used during development - // to supplement the currently ad-hoc unit tests. - const sanity = debugDeflate - - if d.windowEnd-s.index < minMatchLength+maxMatchLength && !d.sync { - return - } - if d.windowEnd != s.index && d.chain > 100 { - // Get literal huffman coder. - if d.h == nil { - d.h = newHuffmanEncoder(maxFlateBlockTokens) - } - var tmp [256]uint16 - for _, v := range d.window[s.index:d.windowEnd] { - tmp[v]++ - } - d.h.generate(tmp[:], 15) - } - - s.maxInsertIndex = d.windowEnd - (minMatchLength - 1) - - for { - if sanity && s.index > d.windowEnd { - panic("index > windowEnd") - } - lookahead := d.windowEnd - s.index - if lookahead < minMatchLength+maxMatchLength { - if !d.sync { - return - } - if sanity && s.index > d.windowEnd { - panic("index > windowEnd") - } - if lookahead == 0 { - // Flush current output block if any. - if d.byteAvailable { - // There is still one pending token that needs to be flushed - d.tokens.AddLiteral(d.window[s.index-1]) - d.byteAvailable = false - } - if d.tokens.n > 0 { - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - return - } - } - if s.index < s.maxInsertIndex { - // Update the hash - hash := hash4(d.window[s.index:]) - ch := s.hashHead[hash] - s.chainHead = int(ch) - s.hashPrev[s.index&windowMask] = ch - s.hashHead[hash] = uint32(s.index + s.hashOffset) - } - prevLength := s.length - prevOffset := s.offset - s.length = minMatchLength - 1 - s.offset = 0 - minIndex := s.index - windowSize - if minIndex < 0 { - minIndex = 0 - } - - if s.chainHead-s.hashOffset >= minIndex && lookahead > prevLength && prevLength < d.lazy { - if newLength, newOffset, ok := d.findMatch(s.index, s.chainHead-s.hashOffset, lookahead); ok { - s.length = newLength - s.offset = newOffset - } - } - - if prevLength >= minMatchLength && s.length <= prevLength { - // No better match, but check for better match at end... - // - // Skip forward a number of bytes. - // Offset of 2 seems to yield best results. 3 is sometimes better. - const checkOff = 2 - - // Check all, except full length - if prevLength < maxMatchLength-checkOff { - prevIndex := s.index - 1 - if prevIndex+prevLength < s.maxInsertIndex { - end := lookahead - if lookahead > maxMatchLength+checkOff { - end = maxMatchLength + checkOff - } - end += prevIndex - - // Hash at match end. - h := hash4(d.window[prevIndex+prevLength:]) - ch2 := int(s.hashHead[h]) - s.hashOffset - prevLength - if prevIndex-ch2 != prevOffset && ch2 > minIndex+checkOff { - length := matchLen(d.window[prevIndex+checkOff:end], d.window[ch2+checkOff:]) - // It seems like a pure length metric is best. - if length > prevLength { - prevLength = length - prevOffset = prevIndex - ch2 - - // Extend back... - for i := checkOff - 1; i >= 0; i-- { - if prevLength >= maxMatchLength || d.window[prevIndex+i] != d.window[ch2+i] { - // Emit tokens we "owe" - for j := 0; j <= i; j++ { - d.tokens.AddLiteral(d.window[prevIndex+j]) - if d.tokens.n == maxFlateBlockTokens { - // The block includes the current character - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - s.index++ - if s.index < s.maxInsertIndex { - h := hash4(d.window[s.index:]) - ch := s.hashHead[h] - s.chainHead = int(ch) - s.hashPrev[s.index&windowMask] = ch - s.hashHead[h] = uint32(s.index + s.hashOffset) - } - } - break - } else { - prevLength++ - } - } - } else if false { - // Check one further ahead. - // Only rarely better, disabled for now. - prevIndex++ - h := hash4(d.window[prevIndex+prevLength:]) - ch2 := int(s.hashHead[h]) - s.hashOffset - prevLength - if prevIndex-ch2 != prevOffset && ch2 > minIndex+checkOff { - length := matchLen(d.window[prevIndex+checkOff:end], d.window[ch2+checkOff:]) - // It seems like a pure length metric is best. - if length > prevLength+checkOff { - prevLength = length - prevOffset = prevIndex - ch2 - prevIndex-- - - // Extend back... - for i := checkOff; i >= 0; i-- { - if prevLength >= maxMatchLength || d.window[prevIndex+i] != d.window[ch2+i-1] { - // Emit tokens we "owe" - for j := 0; j <= i; j++ { - d.tokens.AddLiteral(d.window[prevIndex+j]) - if d.tokens.n == maxFlateBlockTokens { - // The block includes the current character - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - s.index++ - if s.index < s.maxInsertIndex { - h := hash4(d.window[s.index:]) - ch := s.hashHead[h] - s.chainHead = int(ch) - s.hashPrev[s.index&windowMask] = ch - s.hashHead[h] = uint32(s.index + s.hashOffset) - } - } - break - } else { - prevLength++ - } - } - } - } - } - } - } - } - // There was a match at the previous step, and the current match is - // not better. Output the previous match. - d.tokens.AddMatch(uint32(prevLength-3), uint32(prevOffset-minOffsetSize)) - - // Insert in the hash table all strings up to the end of the match. - // index and index-1 are already inserted. If there is not enough - // lookahead, the last two strings are not inserted into the hash - // table. - newIndex := s.index + prevLength - 1 - // Calculate missing hashes - end := newIndex - if end > s.maxInsertIndex { - end = s.maxInsertIndex - } - end += minMatchLength - 1 - startindex := s.index + 1 - if startindex > s.maxInsertIndex { - startindex = s.maxInsertIndex - } - tocheck := d.window[startindex:end] - dstSize := len(tocheck) - minMatchLength + 1 - if dstSize > 0 { - dst := s.hashMatch[:dstSize] - bulkHash4(tocheck, dst) - var newH uint32 - for i, val := range dst { - di := i + startindex - newH = val & hashMask - // Get previous value with the same hash. - // Our chain should point to the previous value. - s.hashPrev[di&windowMask] = s.hashHead[newH] - // Set the head of the hash chain to us. - s.hashHead[newH] = uint32(di + s.hashOffset) - } - } - - s.index = newIndex - d.byteAvailable = false - s.length = minMatchLength - 1 - if d.tokens.n == maxFlateBlockTokens { - // The block includes the current character - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - s.ii = 0 - } else { - // Reset, if we got a match this run. - if s.length >= minMatchLength { - s.ii = 0 - } - // We have a byte waiting. Emit it. - if d.byteAvailable { - s.ii++ - d.tokens.AddLiteral(d.window[s.index-1]) - if d.tokens.n == maxFlateBlockTokens { - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - s.index++ - - // If we have a long run of no matches, skip additional bytes - // Resets when s.ii overflows after 64KB. - if n := int(s.ii) - d.chain; n > 0 { - n = 1 + int(n>>6) - for j := 0; j < n; j++ { - if s.index >= d.windowEnd-1 { - break - } - d.tokens.AddLiteral(d.window[s.index-1]) - if d.tokens.n == maxFlateBlockTokens { - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - // Index... - if s.index < s.maxInsertIndex { - h := hash4(d.window[s.index:]) - ch := s.hashHead[h] - s.chainHead = int(ch) - s.hashPrev[s.index&windowMask] = ch - s.hashHead[h] = uint32(s.index + s.hashOffset) - } - s.index++ - } - // Flush last byte - d.tokens.AddLiteral(d.window[s.index-1]) - d.byteAvailable = false - // s.length = minMatchLength - 1 // not needed, since s.ii is reset above, so it should never be > minMatchLength - if d.tokens.n == maxFlateBlockTokens { - if d.err = d.writeBlock(&d.tokens, s.index, false); d.err != nil { - return - } - d.tokens.Reset() - } - } - } else { - s.index++ - d.byteAvailable = true - } - } - } -} - -func (d *compressor) store() { - if d.windowEnd > 0 && (d.windowEnd == maxStoreBlockSize || d.sync) { - d.err = d.writeStoredBlock(d.window[:d.windowEnd]) - d.windowEnd = 0 - } -} - -// fillWindow will fill the buffer with data for huffman-only compression. -// The number of bytes copied is returned. -func (d *compressor) fillBlock(b []byte) int { - n := copy(d.window[d.windowEnd:], b) - d.windowEnd += n - return n -} - -// storeHuff will compress and store the currently added data, -// if enough has been accumulated or we at the end of the stream. -// Any error that occurred will be in d.err -func (d *compressor) storeHuff() { - if d.windowEnd < len(d.window) && !d.sync || d.windowEnd == 0 { - return - } - d.w.writeBlockHuff(false, d.window[:d.windowEnd], d.sync) - d.err = d.w.err - d.windowEnd = 0 -} - -// storeFast will compress and store the currently added data, -// if enough has been accumulated or we at the end of the stream. -// Any error that occurred will be in d.err -func (d *compressor) storeFast() { - // We only compress if we have maxStoreBlockSize. - if d.windowEnd < len(d.window) { - if !d.sync { - return - } - // Handle extremely small sizes. - if d.windowEnd < 128 { - if d.windowEnd == 0 { - return - } - if d.windowEnd <= 32 { - d.err = d.writeStoredBlock(d.window[:d.windowEnd]) - } else { - d.w.writeBlockHuff(false, d.window[:d.windowEnd], true) - d.err = d.w.err - } - d.tokens.Reset() - d.windowEnd = 0 - d.fast.Reset() - return - } - } - - d.fast.Encode(&d.tokens, d.window[:d.windowEnd]) - // If we made zero matches, store the block as is. - if d.tokens.n == 0 { - d.err = d.writeStoredBlock(d.window[:d.windowEnd]) - // If we removed less than 1/16th, huffman compress the block. - } else if int(d.tokens.n) > d.windowEnd-(d.windowEnd>>4) { - d.w.writeBlockHuff(false, d.window[:d.windowEnd], d.sync) - d.err = d.w.err - } else { - d.w.writeBlockDynamic(&d.tokens, false, d.window[:d.windowEnd], d.sync) - d.err = d.w.err - } - d.tokens.Reset() - d.windowEnd = 0 -} - -// write will add input byte to the stream. -// Unless an error occurs all bytes will be consumed. -func (d *compressor) write(b []byte) (n int, err error) { - if d.err != nil { - return 0, d.err - } - n = len(b) - for len(b) > 0 { - if d.windowEnd == len(d.window) || d.sync { - d.step(d) - } - b = b[d.fill(d, b):] - if d.err != nil { - return 0, d.err - } - } - return n, d.err -} - -func (d *compressor) syncFlush() error { - d.sync = true - if d.err != nil { - return d.err - } - d.step(d) - if d.err == nil { - d.w.writeStoredHeader(0, false) - d.w.flush() - d.err = d.w.err - } - d.sync = false - return d.err -} - -func (d *compressor) init(w io.Writer, level int) (err error) { - d.w = newHuffmanBitWriter(w) - - switch { - case level == NoCompression: - d.window = make([]byte, maxStoreBlockSize) - d.fill = (*compressor).fillBlock - d.step = (*compressor).store - case level == ConstantCompression: - d.w.logNewTablePenalty = 10 - d.window = make([]byte, 32<<10) - d.fill = (*compressor).fillBlock - d.step = (*compressor).storeHuff - case level == DefaultCompression: - level = 5 - fallthrough - case level >= 1 && level <= 6: - d.w.logNewTablePenalty = 7 - d.fast = newFastEnc(level) - d.window = make([]byte, maxStoreBlockSize) - d.fill = (*compressor).fillBlock - d.step = (*compressor).storeFast - case 7 <= level && level <= 9: - d.w.logNewTablePenalty = 8 - d.state = &advancedState{} - d.compressionLevel = levels[level] - d.initDeflate() - d.fill = (*compressor).fillDeflate - d.step = (*compressor).deflateLazy - case -level >= MinCustomWindowSize && -level <= MaxCustomWindowSize: - d.w.logNewTablePenalty = 7 - d.fast = &fastEncL5Window{maxOffset: int32(-level), cur: maxStoreBlockSize} - d.window = make([]byte, maxStoreBlockSize) - d.fill = (*compressor).fillBlock - d.step = (*compressor).storeFast - default: - return fmt.Errorf("flate: invalid compression level %d: want value in range [-2, 9]", level) - } - d.level = level - return nil -} - -// reset the state of the compressor. -func (d *compressor) reset(w io.Writer) { - d.w.reset(w) - d.sync = false - d.err = nil - // We only need to reset a few things for Snappy. - if d.fast != nil { - d.fast.Reset() - d.windowEnd = 0 - d.tokens.Reset() - return - } - switch d.compressionLevel.chain { - case 0: - // level was NoCompression or ConstantCompresssion. - d.windowEnd = 0 - default: - s := d.state - s.chainHead = -1 - for i := range s.hashHead { - s.hashHead[i] = 0 - } - for i := range s.hashPrev { - s.hashPrev[i] = 0 - } - s.hashOffset = 1 - s.index, d.windowEnd = 0, 0 - d.blockStart, d.byteAvailable = 0, false - d.tokens.Reset() - s.length = minMatchLength - 1 - s.offset = 0 - s.ii = 0 - s.maxInsertIndex = 0 - } -} - -func (d *compressor) close() error { - if d.err != nil { - return d.err - } - d.sync = true - d.step(d) - if d.err != nil { - return d.err - } - if d.w.writeStoredHeader(0, true); d.w.err != nil { - return d.w.err - } - d.w.flush() - d.w.reset(nil) - return d.w.err -} - -// NewWriter returns a new Writer compressing data at the given level. -// Following zlib, levels range from 1 (BestSpeed) to 9 (BestCompression); -// higher levels typically run slower but compress more. -// Level 0 (NoCompression) does not attempt any compression; it only adds the -// necessary DEFLATE framing. -// Level -1 (DefaultCompression) uses the default compression level. -// Level -2 (ConstantCompression) will use Huffman compression only, giving -// a very fast compression for all types of input, but sacrificing considerable -// compression efficiency. -// -// If level is in the range [-2, 9] then the error returned will be nil. -// Otherwise the error returned will be non-nil. -func NewWriter(w io.Writer, level int) (*Writer, error) { - var dw Writer - if err := dw.d.init(w, level); err != nil { - return nil, err - } - return &dw, nil -} - -// NewWriterDict is like NewWriter but initializes the new -// Writer with a preset dictionary. The returned Writer behaves -// as if the dictionary had been written to it without producing -// any compressed output. The compressed data written to w -// can only be decompressed by a Reader initialized with the -// same dictionary. -func NewWriterDict(w io.Writer, level int, dict []byte) (*Writer, error) { - zw, err := NewWriter(w, level) - if err != nil { - return nil, err - } - zw.d.fillWindow(dict) - zw.dict = append(zw.dict, dict...) // duplicate dictionary for Reset method. - return zw, err -} - -// MinCustomWindowSize is the minimum window size that can be sent to NewWriterWindow. -const MinCustomWindowSize = 32 - -// MaxCustomWindowSize is the maximum custom window that can be sent to NewWriterWindow. -const MaxCustomWindowSize = windowSize - -// NewWriterWindow returns a new Writer compressing data with a custom window size. -// windowSize must be from MinCustomWindowSize to MaxCustomWindowSize. -func NewWriterWindow(w io.Writer, windowSize int) (*Writer, error) { - if windowSize < MinCustomWindowSize { - return nil, errors.New("flate: requested window size less than MinWindowSize") - } - if windowSize > MaxCustomWindowSize { - return nil, errors.New("flate: requested window size bigger than MaxCustomWindowSize") - } - var dw Writer - if err := dw.d.init(w, -windowSize); err != nil { - return nil, err - } - return &dw, nil -} - -// A Writer takes data written to it and writes the compressed -// form of that data to an underlying writer (see NewWriter). -type Writer struct { - d compressor - dict []byte -} - -// Write writes data to w, which will eventually write the -// compressed form of data to its underlying writer. -func (w *Writer) Write(data []byte) (n int, err error) { - return w.d.write(data) -} - -// Flush flushes any pending data to the underlying writer. -// It is useful mainly in compressed network protocols, to ensure that -// a remote reader has enough data to reconstruct a packet. -// Flush does not return until the data has been written. -// Calling Flush when there is no pending data still causes the Writer -// to emit a sync marker of at least 4 bytes. -// If the underlying writer returns an error, Flush returns that error. -// -// In the terminology of the zlib library, Flush is equivalent to Z_SYNC_FLUSH. -func (w *Writer) Flush() error { - // For more about flushing: - // http://www.bolet.org/~pornin/deflate-flush.html - return w.d.syncFlush() -} - -// Close flushes and closes the writer. -func (w *Writer) Close() error { - return w.d.close() -} - -// Reset discards the writer's state and makes it equivalent to -// the result of NewWriter or NewWriterDict called with dst -// and w's level and dictionary. -func (w *Writer) Reset(dst io.Writer) { - if len(w.dict) > 0 { - // w was created with NewWriterDict - w.d.reset(dst) - if dst != nil { - w.d.fillWindow(w.dict) - } - } else { - // w was created with NewWriter - w.d.reset(dst) - } -} - -// ResetDict discards the writer's state and makes it equivalent to -// the result of NewWriter or NewWriterDict called with dst -// and w's level, but sets a specific dictionary. -func (w *Writer) ResetDict(dst io.Writer, dict []byte) { - w.dict = dict - w.d.reset(dst) - w.d.fillWindow(w.dict) -} diff --git a/vendor/github.com/klauspost/compress/flate/dict_decoder.go b/vendor/github.com/klauspost/compress/flate/dict_decoder.go deleted file mode 100644 index bb36351a5a..0000000000 --- a/vendor/github.com/klauspost/compress/flate/dict_decoder.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -// dictDecoder implements the LZ77 sliding dictionary as used in decompression. -// LZ77 decompresses data through sequences of two forms of commands: -// -// - Literal insertions: Runs of one or more symbols are inserted into the data -// stream as is. This is accomplished through the writeByte method for a -// single symbol, or combinations of writeSlice/writeMark for multiple symbols. -// Any valid stream must start with a literal insertion if no preset dictionary -// is used. -// -// - Backward copies: Runs of one or more symbols are copied from previously -// emitted data. Backward copies come as the tuple (dist, length) where dist -// determines how far back in the stream to copy from and length determines how -// many bytes to copy. Note that it is valid for the length to be greater than -// the distance. Since LZ77 uses forward copies, that situation is used to -// perform a form of run-length encoding on repeated runs of symbols. -// The writeCopy and tryWriteCopy are used to implement this command. -// -// For performance reasons, this implementation performs little to no sanity -// checks about the arguments. As such, the invariants documented for each -// method call must be respected. -type dictDecoder struct { - hist []byte // Sliding window history - - // Invariant: 0 <= rdPos <= wrPos <= len(hist) - wrPos int // Current output position in buffer - rdPos int // Have emitted hist[:rdPos] already - full bool // Has a full window length been written yet? -} - -// init initializes dictDecoder to have a sliding window dictionary of the given -// size. If a preset dict is provided, it will initialize the dictionary with -// the contents of dict. -func (dd *dictDecoder) init(size int, dict []byte) { - *dd = dictDecoder{hist: dd.hist} - - if cap(dd.hist) < size { - dd.hist = make([]byte, size) - } - dd.hist = dd.hist[:size] - - if len(dict) > len(dd.hist) { - dict = dict[len(dict)-len(dd.hist):] - } - dd.wrPos = copy(dd.hist, dict) - if dd.wrPos == len(dd.hist) { - dd.wrPos = 0 - dd.full = true - } - dd.rdPos = dd.wrPos -} - -// histSize reports the total amount of historical data in the dictionary. -func (dd *dictDecoder) histSize() int { - if dd.full { - return len(dd.hist) - } - return dd.wrPos -} - -// availRead reports the number of bytes that can be flushed by readFlush. -func (dd *dictDecoder) availRead() int { - return dd.wrPos - dd.rdPos -} - -// availWrite reports the available amount of output buffer space. -func (dd *dictDecoder) availWrite() int { - return len(dd.hist) - dd.wrPos -} - -// writeSlice returns a slice of the available buffer to write data to. -// -// This invariant will be kept: len(s) <= availWrite() -func (dd *dictDecoder) writeSlice() []byte { - return dd.hist[dd.wrPos:] -} - -// writeMark advances the writer pointer by cnt. -// -// This invariant must be kept: 0 <= cnt <= availWrite() -func (dd *dictDecoder) writeMark(cnt int) { - dd.wrPos += cnt -} - -// writeByte writes a single byte to the dictionary. -// -// This invariant must be kept: 0 < availWrite() -func (dd *dictDecoder) writeByte(c byte) { - dd.hist[dd.wrPos] = c - dd.wrPos++ -} - -// writeCopy copies a string at a given (dist, length) to the output. -// This returns the number of bytes copied and may be less than the requested -// length if the available space in the output buffer is too small. -// -// This invariant must be kept: 0 < dist <= histSize() -func (dd *dictDecoder) writeCopy(dist, length int) int { - dstBase := dd.wrPos - dstPos := dstBase - srcPos := dstPos - dist - endPos := dstPos + length - if endPos > len(dd.hist) { - endPos = len(dd.hist) - } - - // Copy non-overlapping section after destination position. - // - // This section is non-overlapping in that the copy length for this section - // is always less than or equal to the backwards distance. This can occur - // if a distance refers to data that wraps-around in the buffer. - // Thus, a backwards copy is performed here; that is, the exact bytes in - // the source prior to the copy is placed in the destination. - if srcPos < 0 { - srcPos += len(dd.hist) - dstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:]) - srcPos = 0 - } - - // Copy possibly overlapping section before destination position. - // - // This section can overlap if the copy length for this section is larger - // than the backwards distance. This is allowed by LZ77 so that repeated - // strings can be succinctly represented using (dist, length) pairs. - // Thus, a forwards copy is performed here; that is, the bytes copied is - // possibly dependent on the resulting bytes in the destination as the copy - // progresses along. This is functionally equivalent to the following: - // - // for i := 0; i < endPos-dstPos; i++ { - // dd.hist[dstPos+i] = dd.hist[srcPos+i] - // } - // dstPos = endPos - // - for dstPos < endPos { - dstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:dstPos]) - } - - dd.wrPos = dstPos - return dstPos - dstBase -} - -// tryWriteCopy tries to copy a string at a given (distance, length) to the -// output. This specialized version is optimized for short distances. -// -// This method is designed to be inlined for performance reasons. -// -// This invariant must be kept: 0 < dist <= histSize() -func (dd *dictDecoder) tryWriteCopy(dist, length int) int { - dstPos := dd.wrPos - endPos := dstPos + length - if dstPos < dist || endPos > len(dd.hist) { - return 0 - } - dstBase := dstPos - srcPos := dstPos - dist - - // Copy possibly overlapping section before destination position. -loop: - dstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:dstPos]) - if dstPos < endPos { - goto loop // Avoid for-loop so that this function can be inlined - } - - dd.wrPos = dstPos - return dstPos - dstBase -} - -// readFlush returns a slice of the historical buffer that is ready to be -// emitted to the user. The data returned by readFlush must be fully consumed -// before calling any other dictDecoder methods. -func (dd *dictDecoder) readFlush() []byte { - toRead := dd.hist[dd.rdPos:dd.wrPos] - dd.rdPos = dd.wrPos - if dd.wrPos == len(dd.hist) { - dd.wrPos, dd.rdPos = 0, 0 - dd.full = true - } - return toRead -} diff --git a/vendor/github.com/klauspost/compress/flate/fast_encoder.go b/vendor/github.com/klauspost/compress/flate/fast_encoder.go deleted file mode 100644 index c8124b5c49..0000000000 --- a/vendor/github.com/klauspost/compress/flate/fast_encoder.go +++ /dev/null @@ -1,193 +0,0 @@ -// Copyright 2011 The Snappy-Go Authors. All rights reserved. -// Modified for deflate by Klaus Post (c) 2015. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -import ( - "encoding/binary" - "fmt" -) - -type fastEnc interface { - Encode(dst *tokens, src []byte) - Reset() -} - -func newFastEnc(level int) fastEnc { - switch level { - case 1: - return &fastEncL1{fastGen: fastGen{cur: maxStoreBlockSize}} - case 2: - return &fastEncL2{fastGen: fastGen{cur: maxStoreBlockSize}} - case 3: - return &fastEncL3{fastGen: fastGen{cur: maxStoreBlockSize}} - case 4: - return &fastEncL4{fastGen: fastGen{cur: maxStoreBlockSize}} - case 5: - return &fastEncL5{fastGen: fastGen{cur: maxStoreBlockSize}} - case 6: - return &fastEncL6{fastGen: fastGen{cur: maxStoreBlockSize}} - default: - panic("invalid level specified") - } -} - -const ( - tableBits = 15 // Bits used in the table - tableSize = 1 << tableBits // Size of the table - tableShift = 32 - tableBits // Right-shift to get the tableBits most significant bits of a uint32. - baseMatchOffset = 1 // The smallest match offset - baseMatchLength = 3 // The smallest match length per the RFC section 3.2.5 - maxMatchOffset = 1 << 15 // The largest match offset - - bTableBits = 17 // Bits used in the big tables - bTableSize = 1 << bTableBits // Size of the table - allocHistory = maxStoreBlockSize * 5 // Size to preallocate for history. - bufferReset = (1 << 31) - allocHistory - maxStoreBlockSize - 1 // Reset the buffer offset when reaching this. -) - -const ( - prime3bytes = 506832829 - prime4bytes = 2654435761 - prime5bytes = 889523592379 - prime6bytes = 227718039650203 - prime7bytes = 58295818150454627 - prime8bytes = 0xcf1bbcdcb7a56463 -) - -func load3232(b []byte, i int32) uint32 { - return binary.LittleEndian.Uint32(b[i:]) -} - -func load6432(b []byte, i int32) uint64 { - return binary.LittleEndian.Uint64(b[i:]) -} - -type tableEntry struct { - offset int32 -} - -// fastGen maintains the table for matches, -// and the previous byte block for level 2. -// This is the generic implementation. -type fastGen struct { - hist []byte - cur int32 -} - -func (e *fastGen) addBlock(src []byte) int32 { - // check if we have space already - if len(e.hist)+len(src) > cap(e.hist) { - if cap(e.hist) == 0 { - e.hist = make([]byte, 0, allocHistory) - } else { - if cap(e.hist) < maxMatchOffset*2 { - panic("unexpected buffer size") - } - // Move down - offset := int32(len(e.hist)) - maxMatchOffset - // copy(e.hist[0:maxMatchOffset], e.hist[offset:]) - *(*[maxMatchOffset]byte)(e.hist) = *(*[maxMatchOffset]byte)(e.hist[offset:]) - e.cur += offset - e.hist = e.hist[:maxMatchOffset] - } - } - s := int32(len(e.hist)) - e.hist = append(e.hist, src...) - return s -} - -type tableEntryPrev struct { - Cur tableEntry - Prev tableEntry -} - -// hash7 returns the hash of the lowest 7 bytes of u to fit in a hash table with h bits. -// Preferably h should be a constant and should always be <64. -func hash7(u uint64, h uint8) uint32 { - return uint32(((u << (64 - 56)) * prime7bytes) >> ((64 - h) & reg8SizeMask64)) -} - -// hashLen returns a hash of the lowest mls bytes of with length output bits. -// mls must be >=3 and <=8. Any other value will return hash for 4 bytes. -// length should always be < 32. -// Preferably length and mls should be a constant for inlining. -func hashLen(u uint64, length, mls uint8) uint32 { - switch mls { - case 3: - return (uint32(u<<8) * prime3bytes) >> (32 - length) - case 5: - return uint32(((u << (64 - 40)) * prime5bytes) >> (64 - length)) - case 6: - return uint32(((u << (64 - 48)) * prime6bytes) >> (64 - length)) - case 7: - return uint32(((u << (64 - 56)) * prime7bytes) >> (64 - length)) - case 8: - return uint32((u * prime8bytes) >> (64 - length)) - default: - return (uint32(u) * prime4bytes) >> (32 - length) - } -} - -// matchlen will return the match length between offsets and t in src. -// The maximum length returned is maxMatchLength - 4. -// It is assumed that s > t, that t >=0 and s < len(src). -func (e *fastGen) matchlen(s, t int32, src []byte) int32 { - if debugDecode { - if t >= s { - panic(fmt.Sprint("t >=s:", t, s)) - } - if int(s) >= len(src) { - panic(fmt.Sprint("s >= len(src):", s, len(src))) - } - if t < 0 { - panic(fmt.Sprint("t < 0:", t)) - } - if s-t > maxMatchOffset { - panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")")) - } - } - s1 := int(s) + maxMatchLength - 4 - if s1 > len(src) { - s1 = len(src) - } - - // Extend the match to be as long as possible. - return int32(matchLen(src[s:s1], src[t:])) -} - -// matchlenLong will return the match length between offsets and t in src. -// It is assumed that s > t, that t >=0 and s < len(src). -func (e *fastGen) matchlenLong(s, t int32, src []byte) int32 { - if debugDeflate { - if t >= s { - panic(fmt.Sprint("t >=s:", t, s)) - } - if int(s) >= len(src) { - panic(fmt.Sprint("s >= len(src):", s, len(src))) - } - if t < 0 { - panic(fmt.Sprint("t < 0:", t)) - } - if s-t > maxMatchOffset { - panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")")) - } - } - // Extend the match to be as long as possible. - return int32(matchLen(src[s:], src[t:])) -} - -// Reset the encoding table. -func (e *fastGen) Reset() { - if cap(e.hist) < allocHistory { - e.hist = make([]byte, 0, allocHistory) - } - // We offset current position so everything will be out of reach. - // If we are above the buffer reset it will be cleared anyway since len(hist) == 0. - if e.cur <= bufferReset { - e.cur += maxMatchOffset + int32(len(e.hist)) - } - e.hist = e.hist[:0] -} diff --git a/vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go b/vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go deleted file mode 100644 index f70594c34e..0000000000 --- a/vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go +++ /dev/null @@ -1,1182 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -import ( - "encoding/binary" - "fmt" - "io" - "math" -) - -const ( - // The largest offset code. - offsetCodeCount = 30 - - // The special code used to mark the end of a block. - endBlockMarker = 256 - - // The first length code. - lengthCodesStart = 257 - - // The number of codegen codes. - codegenCodeCount = 19 - badCode = 255 - - // maxPredefinedTokens is the maximum number of tokens - // where we check if fixed size is smaller. - maxPredefinedTokens = 250 - - // bufferFlushSize indicates the buffer size - // after which bytes are flushed to the writer. - // Should preferably be a multiple of 6, since - // we accumulate 6 bytes between writes to the buffer. - bufferFlushSize = 246 -) - -// Minimum length code that emits bits. -const lengthExtraBitsMinCode = 8 - -// The number of extra bits needed by length code X - LENGTH_CODES_START. -var lengthExtraBits = [32]uint8{ - /* 257 */ 0, 0, 0, - /* 260 */ 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, - /* 270 */ 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, - /* 280 */ 4, 5, 5, 5, 5, 0, -} - -// The length indicated by length code X - LENGTH_CODES_START. -var lengthBase = [32]uint8{ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, - 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, - 64, 80, 96, 112, 128, 160, 192, 224, 255, -} - -// Minimum offset code that emits bits. -const offsetExtraBitsMinCode = 4 - -// offset code word extra bits. -var offsetExtraBits = [32]int8{ - 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, - 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, - 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, - /* extended window */ - 14, 14, -} - -var offsetCombined = [32]uint32{} - -func init() { - var offsetBase = [32]uint32{ - /* normal deflate */ - 0x000000, 0x000001, 0x000002, 0x000003, 0x000004, - 0x000006, 0x000008, 0x00000c, 0x000010, 0x000018, - 0x000020, 0x000030, 0x000040, 0x000060, 0x000080, - 0x0000c0, 0x000100, 0x000180, 0x000200, 0x000300, - 0x000400, 0x000600, 0x000800, 0x000c00, 0x001000, - 0x001800, 0x002000, 0x003000, 0x004000, 0x006000, - - /* extended window */ - 0x008000, 0x00c000, - } - - for i := range offsetCombined[:] { - // Don't use extended window values... - if offsetExtraBits[i] == 0 || offsetBase[i] > 0x006000 { - continue - } - offsetCombined[i] = uint32(offsetExtraBits[i]) | (offsetBase[i] << 8) - } -} - -// The odd order in which the codegen code sizes are written. -var codegenOrder = []uint32{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15} - -type huffmanBitWriter struct { - // writer is the underlying writer. - // Do not use it directly; use the write method, which ensures - // that Write errors are sticky. - writer io.Writer - - // Data waiting to be written is bytes[0:nbytes] - // and then the low nbits of bits. - bits uint64 - nbits uint8 - nbytes uint8 - lastHuffMan bool - literalEncoding *huffmanEncoder - tmpLitEncoding *huffmanEncoder - offsetEncoding *huffmanEncoder - codegenEncoding *huffmanEncoder - err error - lastHeader int - // Set between 0 (reused block can be up to 2x the size) - logNewTablePenalty uint - bytes [256 + 8]byte - literalFreq [lengthCodesStart + 32]uint16 - offsetFreq [32]uint16 - codegenFreq [codegenCodeCount]uint16 - - // codegen must have an extra space for the final symbol. - codegen [literalCount + offsetCodeCount + 1]uint8 -} - -// Huffman reuse. -// -// The huffmanBitWriter supports reusing huffman tables and thereby combining block sections. -// -// This is controlled by several variables: -// -// If lastHeader is non-zero the Huffman table can be reused. -// This also indicates that a Huffman table has been generated that can output all -// possible symbols. -// It also indicates that an EOB has not yet been emitted, so if a new tabel is generated -// an EOB with the previous table must be written. -// -// If lastHuffMan is set, a table for outputting literals has been generated and offsets are invalid. -// -// An incoming block estimates the output size of a new table using a 'fresh' by calculating the -// optimal size and adding a penalty in 'logNewTablePenalty'. -// A Huffman table is not optimal, which is why we add a penalty, and generating a new table -// is slower both for compression and decompression. - -func newHuffmanBitWriter(w io.Writer) *huffmanBitWriter { - return &huffmanBitWriter{ - writer: w, - literalEncoding: newHuffmanEncoder(literalCount), - tmpLitEncoding: newHuffmanEncoder(literalCount), - codegenEncoding: newHuffmanEncoder(codegenCodeCount), - offsetEncoding: newHuffmanEncoder(offsetCodeCount), - } -} - -func (w *huffmanBitWriter) reset(writer io.Writer) { - w.writer = writer - w.bits, w.nbits, w.nbytes, w.err = 0, 0, 0, nil - w.lastHeader = 0 - w.lastHuffMan = false -} - -func (w *huffmanBitWriter) canReuse(t *tokens) (ok bool) { - a := t.offHist[:offsetCodeCount] - b := w.offsetEncoding.codes - b = b[:len(a)] - for i, v := range a { - if v != 0 && b[i].zero() { - return false - } - } - - a = t.extraHist[:literalCount-256] - b = w.literalEncoding.codes[256:literalCount] - b = b[:len(a)] - for i, v := range a { - if v != 0 && b[i].zero() { - return false - } - } - - a = t.litHist[:256] - b = w.literalEncoding.codes[:len(a)] - for i, v := range a { - if v != 0 && b[i].zero() { - return false - } - } - return true -} - -func (w *huffmanBitWriter) flush() { - if w.err != nil { - w.nbits = 0 - return - } - if w.lastHeader > 0 { - // We owe an EOB - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - } - n := w.nbytes - for w.nbits != 0 { - w.bytes[n] = byte(w.bits) - w.bits >>= 8 - if w.nbits > 8 { // Avoid underflow - w.nbits -= 8 - } else { - w.nbits = 0 - } - n++ - } - w.bits = 0 - w.write(w.bytes[:n]) - w.nbytes = 0 -} - -func (w *huffmanBitWriter) write(b []byte) { - if w.err != nil { - return - } - _, w.err = w.writer.Write(b) -} - -func (w *huffmanBitWriter) writeBits(b int32, nb uint8) { - w.bits |= uint64(b) << (w.nbits & 63) - w.nbits += nb - if w.nbits >= 48 { - w.writeOutBits() - } -} - -func (w *huffmanBitWriter) writeBytes(bytes []byte) { - if w.err != nil { - return - } - n := w.nbytes - if w.nbits&7 != 0 { - w.err = InternalError("writeBytes with unfinished bits") - return - } - for w.nbits != 0 { - w.bytes[n] = byte(w.bits) - w.bits >>= 8 - w.nbits -= 8 - n++ - } - if n != 0 { - w.write(w.bytes[:n]) - } - w.nbytes = 0 - w.write(bytes) -} - -// RFC 1951 3.2.7 specifies a special run-length encoding for specifying -// the literal and offset lengths arrays (which are concatenated into a single -// array). This method generates that run-length encoding. -// -// The result is written into the codegen array, and the frequencies -// of each code is written into the codegenFreq array. -// Codes 0-15 are single byte codes. Codes 16-18 are followed by additional -// information. Code badCode is an end marker -// -// numLiterals The number of literals in literalEncoding -// numOffsets The number of offsets in offsetEncoding -// litenc, offenc The literal and offset encoder to use -func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int, litEnc, offEnc *huffmanEncoder) { - for i := range w.codegenFreq { - w.codegenFreq[i] = 0 - } - // Note that we are using codegen both as a temporary variable for holding - // a copy of the frequencies, and as the place where we put the result. - // This is fine because the output is always shorter than the input used - // so far. - codegen := w.codegen[:] // cache - // Copy the concatenated code sizes to codegen. Put a marker at the end. - cgnl := codegen[:numLiterals] - for i := range cgnl { - cgnl[i] = litEnc.codes[i].len() - } - - cgnl = codegen[numLiterals : numLiterals+numOffsets] - for i := range cgnl { - cgnl[i] = offEnc.codes[i].len() - } - codegen[numLiterals+numOffsets] = badCode - - size := codegen[0] - count := 1 - outIndex := 0 - for inIndex := 1; size != badCode; inIndex++ { - // INVARIANT: We have seen "count" copies of size that have not yet - // had output generated for them. - nextSize := codegen[inIndex] - if nextSize == size { - count++ - continue - } - // We need to generate codegen indicating "count" of size. - if size != 0 { - codegen[outIndex] = size - outIndex++ - w.codegenFreq[size]++ - count-- - for count >= 3 { - n := 6 - if n > count { - n = count - } - codegen[outIndex] = 16 - outIndex++ - codegen[outIndex] = uint8(n - 3) - outIndex++ - w.codegenFreq[16]++ - count -= n - } - } else { - for count >= 11 { - n := 138 - if n > count { - n = count - } - codegen[outIndex] = 18 - outIndex++ - codegen[outIndex] = uint8(n - 11) - outIndex++ - w.codegenFreq[18]++ - count -= n - } - if count >= 3 { - // count >= 3 && count <= 10 - codegen[outIndex] = 17 - outIndex++ - codegen[outIndex] = uint8(count - 3) - outIndex++ - w.codegenFreq[17]++ - count = 0 - } - } - count-- - for ; count >= 0; count-- { - codegen[outIndex] = size - outIndex++ - w.codegenFreq[size]++ - } - // Set up invariant for next time through the loop. - size = nextSize - count = 1 - } - // Marker indicating the end of the codegen. - codegen[outIndex] = badCode -} - -func (w *huffmanBitWriter) codegens() int { - numCodegens := len(w.codegenFreq) - for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 { - numCodegens-- - } - return numCodegens -} - -func (w *huffmanBitWriter) headerSize() (size, numCodegens int) { - numCodegens = len(w.codegenFreq) - for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 { - numCodegens-- - } - return 3 + 5 + 5 + 4 + (3 * numCodegens) + - w.codegenEncoding.bitLength(w.codegenFreq[:]) + - int(w.codegenFreq[16])*2 + - int(w.codegenFreq[17])*3 + - int(w.codegenFreq[18])*7, numCodegens -} - -// dynamicSize returns the size of dynamically encoded data in bits. -func (w *huffmanBitWriter) dynamicReuseSize(litEnc, offEnc *huffmanEncoder) (size int) { - size = litEnc.bitLength(w.literalFreq[:]) + - offEnc.bitLength(w.offsetFreq[:]) - return size -} - -// dynamicSize returns the size of dynamically encoded data in bits. -func (w *huffmanBitWriter) dynamicSize(litEnc, offEnc *huffmanEncoder, extraBits int) (size, numCodegens int) { - header, numCodegens := w.headerSize() - size = header + - litEnc.bitLength(w.literalFreq[:]) + - offEnc.bitLength(w.offsetFreq[:]) + - extraBits - return size, numCodegens -} - -// extraBitSize will return the number of bits that will be written -// as "extra" bits on matches. -func (w *huffmanBitWriter) extraBitSize() int { - total := 0 - for i, n := range w.literalFreq[257:literalCount] { - total += int(n) * int(lengthExtraBits[i&31]) - } - for i, n := range w.offsetFreq[:offsetCodeCount] { - total += int(n) * int(offsetExtraBits[i&31]) - } - return total -} - -// fixedSize returns the size of dynamically encoded data in bits. -func (w *huffmanBitWriter) fixedSize(extraBits int) int { - return 3 + - fixedLiteralEncoding.bitLength(w.literalFreq[:]) + - fixedOffsetEncoding.bitLength(w.offsetFreq[:]) + - extraBits -} - -// storedSize calculates the stored size, including header. -// The function returns the size in bits and whether the block -// fits inside a single block. -func (w *huffmanBitWriter) storedSize(in []byte) (int, bool) { - if in == nil { - return 0, false - } - if len(in) <= maxStoreBlockSize { - return (len(in) + 5) * 8, true - } - return 0, false -} - -func (w *huffmanBitWriter) writeCode(c hcode) { - // The function does not get inlined if we "& 63" the shift. - w.bits |= c.code64() << (w.nbits & 63) - w.nbits += c.len() - if w.nbits >= 48 { - w.writeOutBits() - } -} - -// writeOutBits will write bits to the buffer. -func (w *huffmanBitWriter) writeOutBits() { - bits := w.bits - w.bits >>= 48 - w.nbits -= 48 - n := w.nbytes - - // We over-write, but faster... - binary.LittleEndian.PutUint64(w.bytes[n:], bits) - n += 6 - - if n >= bufferFlushSize { - if w.err != nil { - n = 0 - return - } - w.write(w.bytes[:n]) - n = 0 - } - - w.nbytes = n -} - -// Write the header of a dynamic Huffman block to the output stream. -// -// numLiterals The number of literals specified in codegen -// numOffsets The number of offsets specified in codegen -// numCodegens The number of codegens used in codegen -func (w *huffmanBitWriter) writeDynamicHeader(numLiterals int, numOffsets int, numCodegens int, isEof bool) { - if w.err != nil { - return - } - var firstBits int32 = 4 - if isEof { - firstBits = 5 - } - w.writeBits(firstBits, 3) - w.writeBits(int32(numLiterals-257), 5) - w.writeBits(int32(numOffsets-1), 5) - w.writeBits(int32(numCodegens-4), 4) - - for i := 0; i < numCodegens; i++ { - value := uint(w.codegenEncoding.codes[codegenOrder[i]].len()) - w.writeBits(int32(value), 3) - } - - i := 0 - for { - var codeWord = uint32(w.codegen[i]) - i++ - if codeWord == badCode { - break - } - w.writeCode(w.codegenEncoding.codes[codeWord]) - - switch codeWord { - case 16: - w.writeBits(int32(w.codegen[i]), 2) - i++ - case 17: - w.writeBits(int32(w.codegen[i]), 3) - i++ - case 18: - w.writeBits(int32(w.codegen[i]), 7) - i++ - } - } -} - -// writeStoredHeader will write a stored header. -// If the stored block is only used for EOF, -// it is replaced with a fixed huffman block. -func (w *huffmanBitWriter) writeStoredHeader(length int, isEof bool) { - if w.err != nil { - return - } - if w.lastHeader > 0 { - // We owe an EOB - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - } - - // To write EOF, use a fixed encoding block. 10 bits instead of 5 bytes. - if length == 0 && isEof { - w.writeFixedHeader(isEof) - // EOB: 7 bits, value: 0 - w.writeBits(0, 7) - w.flush() - return - } - - var flag int32 - if isEof { - flag = 1 - } - w.writeBits(flag, 3) - w.flush() - w.writeBits(int32(length), 16) - w.writeBits(int32(^uint16(length)), 16) -} - -func (w *huffmanBitWriter) writeFixedHeader(isEof bool) { - if w.err != nil { - return - } - if w.lastHeader > 0 { - // We owe an EOB - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - } - - // Indicate that we are a fixed Huffman block - var value int32 = 2 - if isEof { - value = 3 - } - w.writeBits(value, 3) -} - -// writeBlock will write a block of tokens with the smallest encoding. -// The original input can be supplied, and if the huffman encoded data -// is larger than the original bytes, the data will be written as a -// stored block. -// If the input is nil, the tokens will always be Huffman encoded. -func (w *huffmanBitWriter) writeBlock(tokens *tokens, eof bool, input []byte) { - if w.err != nil { - return - } - - tokens.AddEOB() - if w.lastHeader > 0 { - // We owe an EOB - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - } - numLiterals, numOffsets := w.indexTokens(tokens, false) - w.generate() - var extraBits int - storedSize, storable := w.storedSize(input) - if storable { - extraBits = w.extraBitSize() - } - - // Figure out smallest code. - // Fixed Huffman baseline. - var literalEncoding = fixedLiteralEncoding - var offsetEncoding = fixedOffsetEncoding - var size = math.MaxInt32 - if tokens.n < maxPredefinedTokens { - size = w.fixedSize(extraBits) - } - - // Dynamic Huffman? - var numCodegens int - - // Generate codegen and codegenFrequencies, which indicates how to encode - // the literalEncoding and the offsetEncoding. - w.generateCodegen(numLiterals, numOffsets, w.literalEncoding, w.offsetEncoding) - w.codegenEncoding.generate(w.codegenFreq[:], 7) - dynamicSize, numCodegens := w.dynamicSize(w.literalEncoding, w.offsetEncoding, extraBits) - - if dynamicSize < size { - size = dynamicSize - literalEncoding = w.literalEncoding - offsetEncoding = w.offsetEncoding - } - - // Stored bytes? - if storable && storedSize <= size { - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - - // Huffman. - if literalEncoding == fixedLiteralEncoding { - w.writeFixedHeader(eof) - } else { - w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof) - } - - // Write the tokens. - w.writeTokens(tokens.Slice(), literalEncoding.codes, offsetEncoding.codes) -} - -// writeBlockDynamic encodes a block using a dynamic Huffman table. -// This should be used if the symbols used have a disproportionate -// histogram distribution. -// If input is supplied and the compression savings are below 1/16th of the -// input size the block is stored. -func (w *huffmanBitWriter) writeBlockDynamic(tokens *tokens, eof bool, input []byte, sync bool) { - if w.err != nil { - return - } - - sync = sync || eof - if sync { - tokens.AddEOB() - } - - // We cannot reuse pure huffman table, and must mark as EOF. - if (w.lastHuffMan || eof) && w.lastHeader > 0 { - // We will not try to reuse. - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - w.lastHuffMan = false - } - - // fillReuse enables filling of empty values. - // This will make encodings always reusable without testing. - // However, this does not appear to benefit on most cases. - const fillReuse = false - - // Check if we can reuse... - if !fillReuse && w.lastHeader > 0 && !w.canReuse(tokens) { - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - } - - numLiterals, numOffsets := w.indexTokens(tokens, !sync) - extraBits := 0 - ssize, storable := w.storedSize(input) - - const usePrefs = true - if storable || w.lastHeader > 0 { - extraBits = w.extraBitSize() - } - - var size int - - // Check if we should reuse. - if w.lastHeader > 0 { - // Estimate size for using a new table. - // Use the previous header size as the best estimate. - newSize := w.lastHeader + tokens.EstimatedBits() - newSize += int(w.literalEncoding.codes[endBlockMarker].len()) + newSize>>w.logNewTablePenalty - - // The estimated size is calculated as an optimal table. - // We add a penalty to make it more realistic and re-use a bit more. - reuseSize := w.dynamicReuseSize(w.literalEncoding, w.offsetEncoding) + extraBits - - // Check if a new table is better. - if newSize < reuseSize { - // Write the EOB we owe. - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - size = newSize - w.lastHeader = 0 - } else { - size = reuseSize - } - - if tokens.n < maxPredefinedTokens { - if preSize := w.fixedSize(extraBits) + 7; usePrefs && preSize < size { - // Check if we get a reasonable size decrease. - if storable && ssize <= size { - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - w.writeFixedHeader(eof) - if !sync { - tokens.AddEOB() - } - w.writeTokens(tokens.Slice(), fixedLiteralEncoding.codes, fixedOffsetEncoding.codes) - return - } - } - // Check if we get a reasonable size decrease. - if storable && ssize <= size { - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - } - - // We want a new block/table - if w.lastHeader == 0 { - if fillReuse && !sync { - w.fillTokens() - numLiterals, numOffsets = maxNumLit, maxNumDist - } else { - w.literalFreq[endBlockMarker] = 1 - } - - w.generate() - // Generate codegen and codegenFrequencies, which indicates how to encode - // the literalEncoding and the offsetEncoding. - w.generateCodegen(numLiterals, numOffsets, w.literalEncoding, w.offsetEncoding) - w.codegenEncoding.generate(w.codegenFreq[:], 7) - - var numCodegens int - if fillReuse && !sync { - // Reindex for accurate size... - w.indexTokens(tokens, true) - } - size, numCodegens = w.dynamicSize(w.literalEncoding, w.offsetEncoding, extraBits) - - // Store predefined, if we don't get a reasonable improvement. - if tokens.n < maxPredefinedTokens { - if preSize := w.fixedSize(extraBits); usePrefs && preSize <= size { - // Store bytes, if we don't get an improvement. - if storable && ssize <= preSize { - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - w.writeFixedHeader(eof) - if !sync { - tokens.AddEOB() - } - w.writeTokens(tokens.Slice(), fixedLiteralEncoding.codes, fixedOffsetEncoding.codes) - return - } - } - - if storable && ssize <= size { - // Store bytes, if we don't get an improvement. - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - - // Write Huffman table. - w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof) - if !sync { - w.lastHeader, _ = w.headerSize() - } - w.lastHuffMan = false - } - - if sync { - w.lastHeader = 0 - } - // Write the tokens. - w.writeTokens(tokens.Slice(), w.literalEncoding.codes, w.offsetEncoding.codes) -} - -func (w *huffmanBitWriter) fillTokens() { - for i, v := range w.literalFreq[:literalCount] { - if v == 0 { - w.literalFreq[i] = 1 - } - } - for i, v := range w.offsetFreq[:offsetCodeCount] { - if v == 0 { - w.offsetFreq[i] = 1 - } - } -} - -// indexTokens indexes a slice of tokens, and updates -// literalFreq and offsetFreq, and generates literalEncoding -// and offsetEncoding. -// The number of literal and offset tokens is returned. -func (w *huffmanBitWriter) indexTokens(t *tokens, filled bool) (numLiterals, numOffsets int) { - //copy(w.literalFreq[:], t.litHist[:]) - *(*[256]uint16)(w.literalFreq[:]) = t.litHist - //copy(w.literalFreq[256:], t.extraHist[:]) - *(*[32]uint16)(w.literalFreq[256:]) = t.extraHist - w.offsetFreq = t.offHist - - if t.n == 0 { - return - } - if filled { - return maxNumLit, maxNumDist - } - // get the number of literals - numLiterals = len(w.literalFreq) - for w.literalFreq[numLiterals-1] == 0 { - numLiterals-- - } - // get the number of offsets - numOffsets = len(w.offsetFreq) - for numOffsets > 0 && w.offsetFreq[numOffsets-1] == 0 { - numOffsets-- - } - if numOffsets == 0 { - // We haven't found a single match. If we want to go with the dynamic encoding, - // we should count at least one offset to be sure that the offset huffman tree could be encoded. - w.offsetFreq[0] = 1 - numOffsets = 1 - } - return -} - -func (w *huffmanBitWriter) generate() { - w.literalEncoding.generate(w.literalFreq[:literalCount], 15) - w.offsetEncoding.generate(w.offsetFreq[:offsetCodeCount], 15) -} - -// writeTokens writes a slice of tokens to the output. -// codes for literal and offset encoding must be supplied. -func (w *huffmanBitWriter) writeTokens(tokens []token, leCodes, oeCodes []hcode) { - if w.err != nil { - return - } - if len(tokens) == 0 { - return - } - - // Only last token should be endBlockMarker. - var deferEOB bool - if tokens[len(tokens)-1] == endBlockMarker { - tokens = tokens[:len(tokens)-1] - deferEOB = true - } - - // Create slices up to the next power of two to avoid bounds checks. - lits := leCodes[:256] - offs := oeCodes[:32] - lengths := leCodes[lengthCodesStart:] - lengths = lengths[:32] - - // Go 1.16 LOVES having these on stack. - bits, nbits, nbytes := w.bits, w.nbits, w.nbytes - - for _, t := range tokens { - if t < 256 { - //w.writeCode(lits[t.literal()]) - c := lits[t] - bits |= c.code64() << (nbits & 63) - nbits += c.len() - if nbits >= 48 { - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits - bits >>= 48 - nbits -= 48 - nbytes += 6 - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - } - continue - } - - // Write the length - length := t.length() - lengthCode := lengthCode(length) & 31 - if false { - w.writeCode(lengths[lengthCode]) - } else { - // inlined - c := lengths[lengthCode] - bits |= c.code64() << (nbits & 63) - nbits += c.len() - if nbits >= 48 { - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits - bits >>= 48 - nbits -= 48 - nbytes += 6 - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - } - } - - if lengthCode >= lengthExtraBitsMinCode { - extraLengthBits := lengthExtraBits[lengthCode] - //w.writeBits(extraLength, extraLengthBits) - extraLength := int32(length - lengthBase[lengthCode]) - bits |= uint64(extraLength) << (nbits & 63) - nbits += extraLengthBits - if nbits >= 48 { - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits - bits >>= 48 - nbits -= 48 - nbytes += 6 - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - } - } - // Write the offset - offset := t.offset() - offsetCode := (offset >> 16) & 31 - if false { - w.writeCode(offs[offsetCode]) - } else { - // inlined - c := offs[offsetCode] - bits |= c.code64() << (nbits & 63) - nbits += c.len() - if nbits >= 48 { - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits - bits >>= 48 - nbits -= 48 - nbytes += 6 - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - } - } - - if offsetCode >= offsetExtraBitsMinCode { - offsetComb := offsetCombined[offsetCode] - //w.writeBits(extraOffset, extraOffsetBits) - bits |= uint64((offset-(offsetComb>>8))&matchOffsetOnlyMask) << (nbits & 63) - nbits += uint8(offsetComb) - if nbits >= 48 { - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits - bits >>= 48 - nbits -= 48 - nbytes += 6 - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - } - } - } - // Restore... - w.bits, w.nbits, w.nbytes = bits, nbits, nbytes - - if deferEOB { - w.writeCode(leCodes[endBlockMarker]) - } -} - -// huffOffset is a static offset encoder used for huffman only encoding. -// It can be reused since we will not be encoding offset values. -var huffOffset *huffmanEncoder - -func init() { - w := newHuffmanBitWriter(nil) - w.offsetFreq[0] = 1 - huffOffset = newHuffmanEncoder(offsetCodeCount) - huffOffset.generate(w.offsetFreq[:offsetCodeCount], 15) -} - -// writeBlockHuff encodes a block of bytes as either -// Huffman encoded literals or uncompressed bytes if the -// results only gains very little from compression. -func (w *huffmanBitWriter) writeBlockHuff(eof bool, input []byte, sync bool) { - if w.err != nil { - return - } - - // Clear histogram - for i := range w.literalFreq[:] { - w.literalFreq[i] = 0 - } - if !w.lastHuffMan { - for i := range w.offsetFreq[:] { - w.offsetFreq[i] = 0 - } - } - - const numLiterals = endBlockMarker + 1 - const numOffsets = 1 - - // Add everything as literals - // We have to estimate the header size. - // Assume header is around 70 bytes: - // https://stackoverflow.com/a/25454430 - const guessHeaderSizeBits = 70 * 8 - histogram(input, w.literalFreq[:numLiterals]) - ssize, storable := w.storedSize(input) - if storable && len(input) > 1024 { - // Quick check for incompressible content. - abs := float64(0) - avg := float64(len(input)) / 256 - max := float64(len(input) * 2) - for _, v := range w.literalFreq[:256] { - diff := float64(v) - avg - abs += diff * diff - if abs > max { - break - } - } - if abs < max { - if debugDeflate { - fmt.Println("stored", abs, "<", max) - } - // No chance we can compress this... - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - } - w.literalFreq[endBlockMarker] = 1 - w.tmpLitEncoding.generate(w.literalFreq[:numLiterals], 15) - estBits := w.tmpLitEncoding.canReuseBits(w.literalFreq[:numLiterals]) - if estBits < math.MaxInt32 { - estBits += w.lastHeader - if w.lastHeader == 0 { - estBits += guessHeaderSizeBits - } - estBits += estBits >> w.logNewTablePenalty - } - - // Store bytes, if we don't get a reasonable improvement. - if storable && ssize <= estBits { - if debugDeflate { - fmt.Println("stored,", ssize, "<=", estBits) - } - w.writeStoredHeader(len(input), eof) - w.writeBytes(input) - return - } - - if w.lastHeader > 0 { - reuseSize := w.literalEncoding.canReuseBits(w.literalFreq[:256]) - - if estBits < reuseSize { - if debugDeflate { - fmt.Println("NOT reusing, reuse:", reuseSize/8, "> new:", estBits/8, "header est:", w.lastHeader/8, "bytes") - } - // We owe an EOB - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - } else if debugDeflate { - fmt.Println("reusing, reuse:", reuseSize/8, "> new:", estBits/8, "- header est:", w.lastHeader/8) - } - } - - count := 0 - if w.lastHeader == 0 { - // Use the temp encoding, so swap. - w.literalEncoding, w.tmpLitEncoding = w.tmpLitEncoding, w.literalEncoding - // Generate codegen and codegenFrequencies, which indicates how to encode - // the literalEncoding and the offsetEncoding. - w.generateCodegen(numLiterals, numOffsets, w.literalEncoding, huffOffset) - w.codegenEncoding.generate(w.codegenFreq[:], 7) - numCodegens := w.codegens() - - // Huffman. - w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof) - w.lastHuffMan = true - w.lastHeader, _ = w.headerSize() - if debugDeflate { - count += w.lastHeader - fmt.Println("header:", count/8) - } - } - - encoding := w.literalEncoding.codes[:256] - // Go 1.16 LOVES having these on stack. At least 1.5x the speed. - bits, nbits, nbytes := w.bits, w.nbits, w.nbytes - - if debugDeflate { - count -= int(nbytes)*8 + int(nbits) - } - // Unroll, write 3 codes/loop. - // Fastest number of unrolls. - for len(input) > 3 { - // We must have at least 48 bits free. - if nbits >= 8 { - n := nbits >> 3 - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - bits >>= (n * 8) & 63 - nbits -= n * 8 - nbytes += n - } - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - if debugDeflate { - count += int(nbytes) * 8 - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - a, b := encoding[input[0]], encoding[input[1]] - bits |= a.code64() << (nbits & 63) - bits |= b.code64() << ((nbits + a.len()) & 63) - c := encoding[input[2]] - nbits += b.len() + a.len() - bits |= c.code64() << (nbits & 63) - nbits += c.len() - input = input[3:] - } - - // Remaining... - for _, t := range input { - if nbits >= 48 { - binary.LittleEndian.PutUint64(w.bytes[nbytes:], bits) - //*(*uint64)(unsafe.Pointer(&w.bytes[nbytes])) = bits - bits >>= 48 - nbits -= 48 - nbytes += 6 - if nbytes >= bufferFlushSize { - if w.err != nil { - nbytes = 0 - return - } - if debugDeflate { - count += int(nbytes) * 8 - } - _, w.err = w.writer.Write(w.bytes[:nbytes]) - nbytes = 0 - } - } - // Bitwriting inlined, ~30% speedup - c := encoding[t] - bits |= c.code64() << (nbits & 63) - - nbits += c.len() - if debugDeflate { - count += int(c.len()) - } - } - // Restore... - w.bits, w.nbits, w.nbytes = bits, nbits, nbytes - - if debugDeflate { - nb := count + int(nbytes)*8 + int(nbits) - fmt.Println("wrote", nb, "bits,", nb/8, "bytes.") - } - // Flush if needed to have space. - if w.nbits >= 48 { - w.writeOutBits() - } - - if eof || sync { - w.writeCode(w.literalEncoding.codes[endBlockMarker]) - w.lastHeader = 0 - w.lastHuffMan = false - } -} diff --git a/vendor/github.com/klauspost/compress/flate/huffman_code.go b/vendor/github.com/klauspost/compress/flate/huffman_code.go deleted file mode 100644 index be7b58b473..0000000000 --- a/vendor/github.com/klauspost/compress/flate/huffman_code.go +++ /dev/null @@ -1,417 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -import ( - "math" - "math/bits" -) - -const ( - maxBitsLimit = 16 - // number of valid literals - literalCount = 286 -) - -// hcode is a huffman code with a bit code and bit length. -type hcode uint32 - -func (h hcode) len() uint8 { - return uint8(h) -} - -func (h hcode) code64() uint64 { - return uint64(h >> 8) -} - -func (h hcode) zero() bool { - return h == 0 -} - -type huffmanEncoder struct { - codes []hcode - bitCount [17]int32 - - // Allocate a reusable buffer with the longest possible frequency table. - // Possible lengths are codegenCodeCount, offsetCodeCount and literalCount. - // The largest of these is literalCount, so we allocate for that case. - freqcache [literalCount + 1]literalNode -} - -type literalNode struct { - literal uint16 - freq uint16 -} - -// A levelInfo describes the state of the constructed tree for a given depth. -type levelInfo struct { - // Our level. for better printing - level int32 - - // The frequency of the last node at this level - lastFreq int32 - - // The frequency of the next character to add to this level - nextCharFreq int32 - - // The frequency of the next pair (from level below) to add to this level. - // Only valid if the "needed" value of the next lower level is 0. - nextPairFreq int32 - - // The number of chains remaining to generate for this level before moving - // up to the next level - needed int32 -} - -// set sets the code and length of an hcode. -func (h *hcode) set(code uint16, length uint8) { - *h = hcode(length) | (hcode(code) << 8) -} - -func newhcode(code uint16, length uint8) hcode { - return hcode(length) | (hcode(code) << 8) -} - -func reverseBits(number uint16, bitLength byte) uint16 { - return bits.Reverse16(number << ((16 - bitLength) & 15)) -} - -func maxNode() literalNode { return literalNode{math.MaxUint16, math.MaxUint16} } - -func newHuffmanEncoder(size int) *huffmanEncoder { - // Make capacity to next power of two. - c := uint(bits.Len32(uint32(size - 1))) - return &huffmanEncoder{codes: make([]hcode, size, 1<= 3 -// The cases of 0, 1, and 2 literals are handled by special case code. -// -// list An array of the literals with non-zero frequencies -// -// and their associated frequencies. The array is in order of increasing -// frequency, and has as its last element a special element with frequency -// MaxInt32 -// -// maxBits The maximum number of bits that should be used to encode any literal. -// -// Must be less than 16. -// -// return An integer array in which array[i] indicates the number of literals -// -// that should be encoded in i bits. -func (h *huffmanEncoder) bitCounts(list []literalNode, maxBits int32) []int32 { - if maxBits >= maxBitsLimit { - panic("flate: maxBits too large") - } - n := int32(len(list)) - list = list[0 : n+1] - list[n] = maxNode() - - // The tree can't have greater depth than n - 1, no matter what. This - // saves a little bit of work in some small cases - if maxBits > n-1 { - maxBits = n - 1 - } - - // Create information about each of the levels. - // A bogus "Level 0" whose sole purpose is so that - // level1.prev.needed==0. This makes level1.nextPairFreq - // be a legitimate value that never gets chosen. - var levels [maxBitsLimit]levelInfo - // leafCounts[i] counts the number of literals at the left - // of ancestors of the rightmost node at level i. - // leafCounts[i][j] is the number of literals at the left - // of the level j ancestor. - var leafCounts [maxBitsLimit][maxBitsLimit]int32 - - // Descending to only have 1 bounds check. - l2f := int32(list[2].freq) - l1f := int32(list[1].freq) - l0f := int32(list[0].freq) + int32(list[1].freq) - - for level := int32(1); level <= maxBits; level++ { - // For every level, the first two items are the first two characters. - // We initialize the levels as if we had already figured this out. - levels[level] = levelInfo{ - level: level, - lastFreq: l1f, - nextCharFreq: l2f, - nextPairFreq: l0f, - } - leafCounts[level][level] = 2 - if level == 1 { - levels[level].nextPairFreq = math.MaxInt32 - } - } - - // We need a total of 2*n - 2 items at top level and have already generated 2. - levels[maxBits].needed = 2*n - 4 - - level := uint32(maxBits) - for level < 16 { - l := &levels[level] - if l.nextPairFreq == math.MaxInt32 && l.nextCharFreq == math.MaxInt32 { - // We've run out of both leafs and pairs. - // End all calculations for this level. - // To make sure we never come back to this level or any lower level, - // set nextPairFreq impossibly large. - l.needed = 0 - levels[level+1].nextPairFreq = math.MaxInt32 - level++ - continue - } - - prevFreq := l.lastFreq - if l.nextCharFreq < l.nextPairFreq { - // The next item on this row is a leaf node. - n := leafCounts[level][level] + 1 - l.lastFreq = l.nextCharFreq - // Lower leafCounts are the same of the previous node. - leafCounts[level][level] = n - e := list[n] - if e.literal < math.MaxUint16 { - l.nextCharFreq = int32(e.freq) - } else { - l.nextCharFreq = math.MaxInt32 - } - } else { - // The next item on this row is a pair from the previous row. - // nextPairFreq isn't valid until we generate two - // more values in the level below - l.lastFreq = l.nextPairFreq - // Take leaf counts from the lower level, except counts[level] remains the same. - if true { - save := leafCounts[level][level] - leafCounts[level] = leafCounts[level-1] - leafCounts[level][level] = save - } else { - copy(leafCounts[level][:level], leafCounts[level-1][:level]) - } - levels[l.level-1].needed = 2 - } - - if l.needed--; l.needed == 0 { - // We've done everything we need to do for this level. - // Continue calculating one level up. Fill in nextPairFreq - // of that level with the sum of the two nodes we've just calculated on - // this level. - if l.level == maxBits { - // All done! - break - } - levels[l.level+1].nextPairFreq = prevFreq + l.lastFreq - level++ - } else { - // If we stole from below, move down temporarily to replenish it. - for levels[level-1].needed > 0 { - level-- - } - } - } - - // Somethings is wrong if at the end, the top level is null or hasn't used - // all of the leaves. - if leafCounts[maxBits][maxBits] != n { - panic("leafCounts[maxBits][maxBits] != n") - } - - bitCount := h.bitCount[:maxBits+1] - bits := 1 - counts := &leafCounts[maxBits] - for level := maxBits; level > 0; level-- { - // chain.leafCount gives the number of literals requiring at least "bits" - // bits to encode. - bitCount[bits] = counts[level] - counts[level-1] - bits++ - } - return bitCount -} - -// Look at the leaves and assign them a bit count and an encoding as specified -// in RFC 1951 3.2.2 -func (h *huffmanEncoder) assignEncodingAndSize(bitCount []int32, list []literalNode) { - code := uint16(0) - for n, bits := range bitCount { - code <<= 1 - if n == 0 || bits == 0 { - continue - } - // The literals list[len(list)-bits] .. list[len(list)-bits] - // are encoded using "bits" bits, and get the values - // code, code + 1, .... The code values are - // assigned in literal order (not frequency order). - chunk := list[len(list)-int(bits):] - - sortByLiteral(chunk) - for _, node := range chunk { - h.codes[node.literal] = newhcode(reverseBits(code, uint8(n)), uint8(n)) - code++ - } - list = list[0 : len(list)-int(bits)] - } -} - -// Update this Huffman Code object to be the minimum code for the specified frequency count. -// -// freq An array of frequencies, in which frequency[i] gives the frequency of literal i. -// maxBits The maximum number of bits to use for any literal. -func (h *huffmanEncoder) generate(freq []uint16, maxBits int32) { - list := h.freqcache[:len(freq)+1] - codes := h.codes[:len(freq)] - // Number of non-zero literals - count := 0 - // Set list to be the set of all non-zero literals and their frequencies - for i, f := range freq { - if f != 0 { - list[count] = literalNode{uint16(i), f} - count++ - } else { - codes[i] = 0 - } - } - list[count] = literalNode{} - - list = list[:count] - if count <= 2 { - // Handle the small cases here, because they are awkward for the general case code. With - // two or fewer literals, everything has bit length 1. - for i, node := range list { - // "list" is in order of increasing literal value. - h.codes[node.literal].set(uint16(i), 1) - } - return - } - sortByFreq(list) - - // Get the number of literals for each bit count - bitCount := h.bitCounts(list, maxBits) - // And do the assignment - h.assignEncodingAndSize(bitCount, list) -} - -// atLeastOne clamps the result between 1 and 15. -func atLeastOne(v float32) float32 { - if v < 1 { - return 1 - } - if v > 15 { - return 15 - } - return v -} - -func histogram(b []byte, h []uint16) { - if true && len(b) >= 8<<10 { - // Split for bigger inputs - histogramSplit(b, h) - } else { - h = h[:256] - for _, t := range b { - h[t]++ - } - } -} - -func histogramSplit(b []byte, h []uint16) { - // Tested, and slightly faster than 2-way. - // Writing to separate arrays and combining is also slightly slower. - h = h[:256] - for len(b)&3 != 0 { - h[b[0]]++ - b = b[1:] - } - n := len(b) / 4 - x, y, z, w := b[:n], b[n:], b[n+n:], b[n+n+n:] - y, z, w = y[:len(x)], z[:len(x)], w[:len(x)] - for i, t := range x { - v0 := &h[t] - v1 := &h[y[i]] - v3 := &h[w[i]] - v2 := &h[z[i]] - *v0++ - *v1++ - *v2++ - *v3++ - } -} diff --git a/vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go b/vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go deleted file mode 100644 index 6c05ba8c1c..0000000000 --- a/vendor/github.com/klauspost/compress/flate/huffman_sortByFreq.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -// Sort sorts data. -// It makes one call to data.Len to determine n, and O(n*log(n)) calls to -// data.Less and data.Swap. The sort is not guaranteed to be stable. -func sortByFreq(data []literalNode) { - n := len(data) - quickSortByFreq(data, 0, n, maxDepth(n)) -} - -func quickSortByFreq(data []literalNode, a, b, maxDepth int) { - for b-a > 12 { // Use ShellSort for slices <= 12 elements - if maxDepth == 0 { - heapSort(data, a, b) - return - } - maxDepth-- - mlo, mhi := doPivotByFreq(data, a, b) - // Avoiding recursion on the larger subproblem guarantees - // a stack depth of at most lg(b-a). - if mlo-a < b-mhi { - quickSortByFreq(data, a, mlo, maxDepth) - a = mhi // i.e., quickSortByFreq(data, mhi, b) - } else { - quickSortByFreq(data, mhi, b, maxDepth) - b = mlo // i.e., quickSortByFreq(data, a, mlo) - } - } - if b-a > 1 { - // Do ShellSort pass with gap 6 - // It could be written in this simplified form cause b-a <= 12 - for i := a + 6; i < b; i++ { - if data[i].freq == data[i-6].freq && data[i].literal < data[i-6].literal || data[i].freq < data[i-6].freq { - data[i], data[i-6] = data[i-6], data[i] - } - } - insertionSortByFreq(data, a, b) - } -} - -func doPivotByFreq(data []literalNode, lo, hi int) (midlo, midhi int) { - m := int(uint(lo+hi) >> 1) // Written like this to avoid integer overflow. - if hi-lo > 40 { - // Tukey's ``Ninther,'' median of three medians of three. - s := (hi - lo) / 8 - medianOfThreeSortByFreq(data, lo, lo+s, lo+2*s) - medianOfThreeSortByFreq(data, m, m-s, m+s) - medianOfThreeSortByFreq(data, hi-1, hi-1-s, hi-1-2*s) - } - medianOfThreeSortByFreq(data, lo, m, hi-1) - - // Invariants are: - // data[lo] = pivot (set up by ChoosePivot) - // data[lo < i < a] < pivot - // data[a <= i < b] <= pivot - // data[b <= i < c] unexamined - // data[c <= i < hi-1] > pivot - // data[hi-1] >= pivot - pivot := lo - a, c := lo+1, hi-1 - - for ; a < c && (data[a].freq == data[pivot].freq && data[a].literal < data[pivot].literal || data[a].freq < data[pivot].freq); a++ { - } - b := a - for { - for ; b < c && (data[pivot].freq == data[b].freq && data[pivot].literal > data[b].literal || data[pivot].freq > data[b].freq); b++ { // data[b] <= pivot - } - for ; b < c && (data[pivot].freq == data[c-1].freq && data[pivot].literal < data[c-1].literal || data[pivot].freq < data[c-1].freq); c-- { // data[c-1] > pivot - } - if b >= c { - break - } - // data[b] > pivot; data[c-1] <= pivot - data[b], data[c-1] = data[c-1], data[b] - b++ - c-- - } - // If hi-c<3 then there are duplicates (by property of median of nine). - // Let's be a bit more conservative, and set border to 5. - protect := hi-c < 5 - if !protect && hi-c < (hi-lo)/4 { - // Lets test some points for equality to pivot - dups := 0 - if data[pivot].freq == data[hi-1].freq && data[pivot].literal > data[hi-1].literal || data[pivot].freq > data[hi-1].freq { // data[hi-1] = pivot - data[c], data[hi-1] = data[hi-1], data[c] - c++ - dups++ - } - if data[b-1].freq == data[pivot].freq && data[b-1].literal > data[pivot].literal || data[b-1].freq > data[pivot].freq { // data[b-1] = pivot - b-- - dups++ - } - // m-lo = (hi-lo)/2 > 6 - // b-lo > (hi-lo)*3/4-1 > 8 - // ==> m < b ==> data[m] <= pivot - if data[m].freq == data[pivot].freq && data[m].literal > data[pivot].literal || data[m].freq > data[pivot].freq { // data[m] = pivot - data[m], data[b-1] = data[b-1], data[m] - b-- - dups++ - } - // if at least 2 points are equal to pivot, assume skewed distribution - protect = dups > 1 - } - if protect { - // Protect against a lot of duplicates - // Add invariant: - // data[a <= i < b] unexamined - // data[b <= i < c] = pivot - for { - for ; a < b && (data[b-1].freq == data[pivot].freq && data[b-1].literal > data[pivot].literal || data[b-1].freq > data[pivot].freq); b-- { // data[b] == pivot - } - for ; a < b && (data[a].freq == data[pivot].freq && data[a].literal < data[pivot].literal || data[a].freq < data[pivot].freq); a++ { // data[a] < pivot - } - if a >= b { - break - } - // data[a] == pivot; data[b-1] < pivot - data[a], data[b-1] = data[b-1], data[a] - a++ - b-- - } - } - // Swap pivot into middle - data[pivot], data[b-1] = data[b-1], data[pivot] - return b - 1, c -} - -// Insertion sort -func insertionSortByFreq(data []literalNode, a, b int) { - for i := a + 1; i < b; i++ { - for j := i; j > a && (data[j].freq == data[j-1].freq && data[j].literal < data[j-1].literal || data[j].freq < data[j-1].freq); j-- { - data[j], data[j-1] = data[j-1], data[j] - } - } -} - -// quickSortByFreq, loosely following Bentley and McIlroy, -// ``Engineering a Sort Function,'' SP&E November 1993. - -// medianOfThreeSortByFreq moves the median of the three values data[m0], data[m1], data[m2] into data[m1]. -func medianOfThreeSortByFreq(data []literalNode, m1, m0, m2 int) { - // sort 3 elements - if data[m1].freq == data[m0].freq && data[m1].literal < data[m0].literal || data[m1].freq < data[m0].freq { - data[m1], data[m0] = data[m0], data[m1] - } - // data[m0] <= data[m1] - if data[m2].freq == data[m1].freq && data[m2].literal < data[m1].literal || data[m2].freq < data[m1].freq { - data[m2], data[m1] = data[m1], data[m2] - // data[m0] <= data[m2] && data[m1] < data[m2] - if data[m1].freq == data[m0].freq && data[m1].literal < data[m0].literal || data[m1].freq < data[m0].freq { - data[m1], data[m0] = data[m0], data[m1] - } - } - // now data[m0] <= data[m1] <= data[m2] -} diff --git a/vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go b/vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go deleted file mode 100644 index 93f1aea109..0000000000 --- a/vendor/github.com/klauspost/compress/flate/huffman_sortByLiteral.go +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -// Sort sorts data. -// It makes one call to data.Len to determine n, and O(n*log(n)) calls to -// data.Less and data.Swap. The sort is not guaranteed to be stable. -func sortByLiteral(data []literalNode) { - n := len(data) - quickSort(data, 0, n, maxDepth(n)) -} - -func quickSort(data []literalNode, a, b, maxDepth int) { - for b-a > 12 { // Use ShellSort for slices <= 12 elements - if maxDepth == 0 { - heapSort(data, a, b) - return - } - maxDepth-- - mlo, mhi := doPivot(data, a, b) - // Avoiding recursion on the larger subproblem guarantees - // a stack depth of at most lg(b-a). - if mlo-a < b-mhi { - quickSort(data, a, mlo, maxDepth) - a = mhi // i.e., quickSort(data, mhi, b) - } else { - quickSort(data, mhi, b, maxDepth) - b = mlo // i.e., quickSort(data, a, mlo) - } - } - if b-a > 1 { - // Do ShellSort pass with gap 6 - // It could be written in this simplified form cause b-a <= 12 - for i := a + 6; i < b; i++ { - if data[i].literal < data[i-6].literal { - data[i], data[i-6] = data[i-6], data[i] - } - } - insertionSort(data, a, b) - } -} -func heapSort(data []literalNode, a, b int) { - first := a - lo := 0 - hi := b - a - - // Build heap with greatest element at top. - for i := (hi - 1) / 2; i >= 0; i-- { - siftDown(data, i, hi, first) - } - - // Pop elements, largest first, into end of data. - for i := hi - 1; i >= 0; i-- { - data[first], data[first+i] = data[first+i], data[first] - siftDown(data, lo, i, first) - } -} - -// siftDown implements the heap property on data[lo, hi). -// first is an offset into the array where the root of the heap lies. -func siftDown(data []literalNode, lo, hi, first int) { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && data[first+child].literal < data[first+child+1].literal { - child++ - } - if data[first+root].literal > data[first+child].literal { - return - } - data[first+root], data[first+child] = data[first+child], data[first+root] - root = child - } -} -func doPivot(data []literalNode, lo, hi int) (midlo, midhi int) { - m := int(uint(lo+hi) >> 1) // Written like this to avoid integer overflow. - if hi-lo > 40 { - // Tukey's ``Ninther,'' median of three medians of three. - s := (hi - lo) / 8 - medianOfThree(data, lo, lo+s, lo+2*s) - medianOfThree(data, m, m-s, m+s) - medianOfThree(data, hi-1, hi-1-s, hi-1-2*s) - } - medianOfThree(data, lo, m, hi-1) - - // Invariants are: - // data[lo] = pivot (set up by ChoosePivot) - // data[lo < i < a] < pivot - // data[a <= i < b] <= pivot - // data[b <= i < c] unexamined - // data[c <= i < hi-1] > pivot - // data[hi-1] >= pivot - pivot := lo - a, c := lo+1, hi-1 - - for ; a < c && data[a].literal < data[pivot].literal; a++ { - } - b := a - for { - for ; b < c && data[pivot].literal > data[b].literal; b++ { // data[b] <= pivot - } - for ; b < c && data[pivot].literal < data[c-1].literal; c-- { // data[c-1] > pivot - } - if b >= c { - break - } - // data[b] > pivot; data[c-1] <= pivot - data[b], data[c-1] = data[c-1], data[b] - b++ - c-- - } - // If hi-c<3 then there are duplicates (by property of median of nine). - // Let's be a bit more conservative, and set border to 5. - protect := hi-c < 5 - if !protect && hi-c < (hi-lo)/4 { - // Lets test some points for equality to pivot - dups := 0 - if data[pivot].literal > data[hi-1].literal { // data[hi-1] = pivot - data[c], data[hi-1] = data[hi-1], data[c] - c++ - dups++ - } - if data[b-1].literal > data[pivot].literal { // data[b-1] = pivot - b-- - dups++ - } - // m-lo = (hi-lo)/2 > 6 - // b-lo > (hi-lo)*3/4-1 > 8 - // ==> m < b ==> data[m] <= pivot - if data[m].literal > data[pivot].literal { // data[m] = pivot - data[m], data[b-1] = data[b-1], data[m] - b-- - dups++ - } - // if at least 2 points are equal to pivot, assume skewed distribution - protect = dups > 1 - } - if protect { - // Protect against a lot of duplicates - // Add invariant: - // data[a <= i < b] unexamined - // data[b <= i < c] = pivot - for { - for ; a < b && data[b-1].literal > data[pivot].literal; b-- { // data[b] == pivot - } - for ; a < b && data[a].literal < data[pivot].literal; a++ { // data[a] < pivot - } - if a >= b { - break - } - // data[a] == pivot; data[b-1] < pivot - data[a], data[b-1] = data[b-1], data[a] - a++ - b-- - } - } - // Swap pivot into middle - data[pivot], data[b-1] = data[b-1], data[pivot] - return b - 1, c -} - -// Insertion sort -func insertionSort(data []literalNode, a, b int) { - for i := a + 1; i < b; i++ { - for j := i; j > a && data[j].literal < data[j-1].literal; j-- { - data[j], data[j-1] = data[j-1], data[j] - } - } -} - -// maxDepth returns a threshold at which quicksort should switch -// to heapsort. It returns 2*ceil(lg(n+1)). -func maxDepth(n int) int { - var depth int - for i := n; i > 0; i >>= 1 { - depth++ - } - return depth * 2 -} - -// medianOfThree moves the median of the three values data[m0], data[m1], data[m2] into data[m1]. -func medianOfThree(data []literalNode, m1, m0, m2 int) { - // sort 3 elements - if data[m1].literal < data[m0].literal { - data[m1], data[m0] = data[m0], data[m1] - } - // data[m0] <= data[m1] - if data[m2].literal < data[m1].literal { - data[m2], data[m1] = data[m1], data[m2] - // data[m0] <= data[m2] && data[m1] < data[m2] - if data[m1].literal < data[m0].literal { - data[m1], data[m0] = data[m0], data[m1] - } - } - // now data[m0] <= data[m1] <= data[m2] -} diff --git a/vendor/github.com/klauspost/compress/flate/inflate.go b/vendor/github.com/klauspost/compress/flate/inflate.go deleted file mode 100644 index 414c0bea9f..0000000000 --- a/vendor/github.com/klauspost/compress/flate/inflate.go +++ /dev/null @@ -1,793 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package flate implements the DEFLATE compressed data format, described in -// RFC 1951. The gzip and zlib packages implement access to DEFLATE-based file -// formats. -package flate - -import ( - "bufio" - "compress/flate" - "fmt" - "io" - "math/bits" - "sync" -) - -const ( - maxCodeLen = 16 // max length of Huffman code - maxCodeLenMask = 15 // mask for max length of Huffman code - // The next three numbers come from the RFC section 3.2.7, with the - // additional proviso in section 3.2.5 which implies that distance codes - // 30 and 31 should never occur in compressed data. - maxNumLit = 286 - maxNumDist = 30 - numCodes = 19 // number of codes in Huffman meta-code - - debugDecode = false -) - -// Value of length - 3 and extra bits. -type lengthExtra struct { - length, extra uint8 -} - -var decCodeToLen = [32]lengthExtra{{length: 0x0, extra: 0x0}, {length: 0x1, extra: 0x0}, {length: 0x2, extra: 0x0}, {length: 0x3, extra: 0x0}, {length: 0x4, extra: 0x0}, {length: 0x5, extra: 0x0}, {length: 0x6, extra: 0x0}, {length: 0x7, extra: 0x0}, {length: 0x8, extra: 0x1}, {length: 0xa, extra: 0x1}, {length: 0xc, extra: 0x1}, {length: 0xe, extra: 0x1}, {length: 0x10, extra: 0x2}, {length: 0x14, extra: 0x2}, {length: 0x18, extra: 0x2}, {length: 0x1c, extra: 0x2}, {length: 0x20, extra: 0x3}, {length: 0x28, extra: 0x3}, {length: 0x30, extra: 0x3}, {length: 0x38, extra: 0x3}, {length: 0x40, extra: 0x4}, {length: 0x50, extra: 0x4}, {length: 0x60, extra: 0x4}, {length: 0x70, extra: 0x4}, {length: 0x80, extra: 0x5}, {length: 0xa0, extra: 0x5}, {length: 0xc0, extra: 0x5}, {length: 0xe0, extra: 0x5}, {length: 0xff, extra: 0x0}, {length: 0x0, extra: 0x0}, {length: 0x0, extra: 0x0}, {length: 0x0, extra: 0x0}} - -var bitMask32 = [32]uint32{ - 0, 1, 3, 7, 0xF, 0x1F, 0x3F, 0x7F, 0xFF, - 0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF, - 0x1ffff, 0x3ffff, 0x7FFFF, 0xfFFFF, 0x1fFFFF, 0x3fFFFF, 0x7fFFFF, 0xffFFFF, - 0x1ffFFFF, 0x3ffFFFF, 0x7ffFFFF, 0xfffFFFF, 0x1fffFFFF, 0x3fffFFFF, 0x7fffFFFF, -} // up to 32 bits - -// Initialize the fixedHuffmanDecoder only once upon first use. -var fixedOnce sync.Once -var fixedHuffmanDecoder huffmanDecoder - -// A CorruptInputError reports the presence of corrupt input at a given offset. -type CorruptInputError = flate.CorruptInputError - -// An InternalError reports an error in the flate code itself. -type InternalError string - -func (e InternalError) Error() string { return "flate: internal error: " + string(e) } - -// A ReadError reports an error encountered while reading input. -// -// Deprecated: No longer returned. -type ReadError = flate.ReadError - -// A WriteError reports an error encountered while writing output. -// -// Deprecated: No longer returned. -type WriteError = flate.WriteError - -// Resetter resets a ReadCloser returned by NewReader or NewReaderDict to -// to switch to a new underlying Reader. This permits reusing a ReadCloser -// instead of allocating a new one. -type Resetter interface { - // Reset discards any buffered data and resets the Resetter as if it was - // newly initialized with the given reader. - Reset(r io.Reader, dict []byte) error -} - -// The data structure for decoding Huffman tables is based on that of -// zlib. There is a lookup table of a fixed bit width (huffmanChunkBits), -// For codes smaller than the table width, there are multiple entries -// (each combination of trailing bits has the same value). For codes -// larger than the table width, the table contains a link to an overflow -// table. The width of each entry in the link table is the maximum code -// size minus the chunk width. -// -// Note that you can do a lookup in the table even without all bits -// filled. Since the extra bits are zero, and the DEFLATE Huffman codes -// have the property that shorter codes come before longer ones, the -// bit length estimate in the result is a lower bound on the actual -// number of bits. -// -// See the following: -// http://www.gzip.org/algorithm.txt - -// chunk & 15 is number of bits -// chunk >> 4 is value, including table link - -const ( - huffmanChunkBits = 9 - huffmanNumChunks = 1 << huffmanChunkBits - huffmanCountMask = 15 - huffmanValueShift = 4 -) - -type huffmanDecoder struct { - maxRead int // the maximum number of bits we can read and not overread - chunks *[huffmanNumChunks]uint16 // chunks as described above - links [][]uint16 // overflow links - linkMask uint32 // mask the width of the link table -} - -// Initialize Huffman decoding tables from array of code lengths. -// Following this function, h is guaranteed to be initialized into a complete -// tree (i.e., neither over-subscribed nor under-subscribed). The exception is a -// degenerate case where the tree has only a single symbol with length 1. Empty -// trees are permitted. -func (h *huffmanDecoder) init(lengths []int) bool { - // Sanity enables additional runtime tests during Huffman - // table construction. It's intended to be used during - // development to supplement the currently ad-hoc unit tests. - const sanity = false - - if h.chunks == nil { - h.chunks = &[huffmanNumChunks]uint16{} - } - if h.maxRead != 0 { - *h = huffmanDecoder{chunks: h.chunks, links: h.links} - } - - // Count number of codes of each length, - // compute maxRead and max length. - var count [maxCodeLen]int - var min, max int - for _, n := range lengths { - if n == 0 { - continue - } - if min == 0 || n < min { - min = n - } - if n > max { - max = n - } - count[n&maxCodeLenMask]++ - } - - // Empty tree. The decompressor.huffSym function will fail later if the tree - // is used. Technically, an empty tree is only valid for the HDIST tree and - // not the HCLEN and HLIT tree. However, a stream with an empty HCLEN tree - // is guaranteed to fail since it will attempt to use the tree to decode the - // codes for the HLIT and HDIST trees. Similarly, an empty HLIT tree is - // guaranteed to fail later since the compressed data section must be - // composed of at least one symbol (the end-of-block marker). - if max == 0 { - return true - } - - code := 0 - var nextcode [maxCodeLen]int - for i := min; i <= max; i++ { - code <<= 1 - nextcode[i&maxCodeLenMask] = code - code += count[i&maxCodeLenMask] - } - - // Check that the coding is complete (i.e., that we've - // assigned all 2-to-the-max possible bit sequences). - // Exception: To be compatible with zlib, we also need to - // accept degenerate single-code codings. See also - // TestDegenerateHuffmanCoding. - if code != 1< huffmanChunkBits { - numLinks := 1 << (uint(max) - huffmanChunkBits) - h.linkMask = uint32(numLinks - 1) - - // create link tables - link := nextcode[huffmanChunkBits+1] >> 1 - if cap(h.links) < huffmanNumChunks-link { - h.links = make([][]uint16, huffmanNumChunks-link) - } else { - h.links = h.links[:huffmanNumChunks-link] - } - for j := uint(link); j < huffmanNumChunks; j++ { - reverse := int(bits.Reverse16(uint16(j))) - reverse >>= uint(16 - huffmanChunkBits) - off := j - uint(link) - if sanity && h.chunks[reverse] != 0 { - panic("impossible: overwriting existing chunk") - } - h.chunks[reverse] = uint16(off<>= uint(16 - n) - if n <= huffmanChunkBits { - for off := reverse; off < len(h.chunks); off += 1 << uint(n) { - // We should never need to overwrite - // an existing chunk. Also, 0 is - // never a valid chunk, because the - // lower 4 "count" bits should be - // between 1 and 15. - if sanity && h.chunks[off] != 0 { - panic("impossible: overwriting existing chunk") - } - h.chunks[off] = chunk - } - } else { - j := reverse & (huffmanNumChunks - 1) - if sanity && h.chunks[j]&huffmanCountMask != huffmanChunkBits+1 { - // Longer codes should have been - // associated with a link table above. - panic("impossible: not an indirect chunk") - } - value := h.chunks[j] >> huffmanValueShift - linktab := h.links[value] - reverse >>= huffmanChunkBits - for off := reverse; off < len(linktab); off += 1 << uint(n-huffmanChunkBits) { - if sanity && linktab[off] != 0 { - panic("impossible: overwriting existing chunk") - } - linktab[off] = chunk - } - } - } - - if sanity { - // Above we've sanity checked that we never overwrote - // an existing entry. Here we additionally check that - // we filled the tables completely. - for i, chunk := range h.chunks { - if chunk == 0 { - // As an exception, in the degenerate - // single-code case, we allow odd - // chunks to be missing. - if code == 1 && i%2 == 1 { - continue - } - panic("impossible: missing chunk") - } - } - for _, linktab := range h.links { - for _, chunk := range linktab { - if chunk == 0 { - panic("impossible: missing chunk") - } - } - } - } - - return true -} - -// The actual read interface needed by NewReader. -// If the passed in io.Reader does not also have ReadByte, -// the NewReader will introduce its own buffering. -type Reader interface { - io.Reader - io.ByteReader -} - -// Decompress state. -type decompressor struct { - // Input source. - r Reader - roffset int64 - - // Huffman decoders for literal/length, distance. - h1, h2 huffmanDecoder - - // Length arrays used to define Huffman codes. - bits *[maxNumLit + maxNumDist]int - codebits *[numCodes]int - - // Output history, buffer. - dict dictDecoder - - // Next step in the decompression, - // and decompression state. - step func(*decompressor) - stepState int - err error - toRead []byte - hl, hd *huffmanDecoder - copyLen int - copyDist int - - // Temporary buffer (avoids repeated allocation). - buf [4]byte - - // Input bits, in top of b. - b uint32 - - nb uint - final bool -} - -func (f *decompressor) nextBlock() { - for f.nb < 1+2 { - if f.err = f.moreBits(); f.err != nil { - return - } - } - f.final = f.b&1 == 1 - f.b >>= 1 - typ := f.b & 3 - f.b >>= 2 - f.nb -= 1 + 2 - switch typ { - case 0: - f.dataBlock() - if debugDecode { - fmt.Println("stored block") - } - case 1: - // compressed, fixed Huffman tables - f.hl = &fixedHuffmanDecoder - f.hd = nil - f.huffmanBlockDecoder()() - if debugDecode { - fmt.Println("predefinied huffman block") - } - case 2: - // compressed, dynamic Huffman tables - if f.err = f.readHuffman(); f.err != nil { - break - } - f.hl = &f.h1 - f.hd = &f.h2 - f.huffmanBlockDecoder()() - if debugDecode { - fmt.Println("dynamic huffman block") - } - default: - // 3 is reserved. - if debugDecode { - fmt.Println("reserved data block encountered") - } - f.err = CorruptInputError(f.roffset) - } -} - -func (f *decompressor) Read(b []byte) (int, error) { - for { - if len(f.toRead) > 0 { - n := copy(b, f.toRead) - f.toRead = f.toRead[n:] - if len(f.toRead) == 0 { - return n, f.err - } - return n, nil - } - if f.err != nil { - return 0, f.err - } - f.step(f) - if f.err != nil && len(f.toRead) == 0 { - f.toRead = f.dict.readFlush() // Flush what's left in case of error - } - } -} - -// Support the io.WriteTo interface for io.Copy and friends. -func (f *decompressor) WriteTo(w io.Writer) (int64, error) { - total := int64(0) - flushed := false - for { - if len(f.toRead) > 0 { - n, err := w.Write(f.toRead) - total += int64(n) - if err != nil { - f.err = err - return total, err - } - if n != len(f.toRead) { - return total, io.ErrShortWrite - } - f.toRead = f.toRead[:0] - } - if f.err != nil && flushed { - if f.err == io.EOF { - return total, nil - } - return total, f.err - } - if f.err == nil { - f.step(f) - } - if len(f.toRead) == 0 && f.err != nil && !flushed { - f.toRead = f.dict.readFlush() // Flush what's left in case of error - flushed = true - } - } -} - -func (f *decompressor) Close() error { - if f.err == io.EOF { - return nil - } - return f.err -} - -// RFC 1951 section 3.2.7. -// Compression with dynamic Huffman codes - -var codeOrder = [...]int{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15} - -func (f *decompressor) readHuffman() error { - // HLIT[5], HDIST[5], HCLEN[4]. - for f.nb < 5+5+4 { - if err := f.moreBits(); err != nil { - return err - } - } - nlit := int(f.b&0x1F) + 257 - if nlit > maxNumLit { - if debugDecode { - fmt.Println("nlit > maxNumLit", nlit) - } - return CorruptInputError(f.roffset) - } - f.b >>= 5 - ndist := int(f.b&0x1F) + 1 - if ndist > maxNumDist { - if debugDecode { - fmt.Println("ndist > maxNumDist", ndist) - } - return CorruptInputError(f.roffset) - } - f.b >>= 5 - nclen := int(f.b&0xF) + 4 - // numCodes is 19, so nclen is always valid. - f.b >>= 4 - f.nb -= 5 + 5 + 4 - - // (HCLEN+4)*3 bits: code lengths in the magic codeOrder order. - for i := 0; i < nclen; i++ { - for f.nb < 3 { - if err := f.moreBits(); err != nil { - return err - } - } - f.codebits[codeOrder[i]] = int(f.b & 0x7) - f.b >>= 3 - f.nb -= 3 - } - for i := nclen; i < len(codeOrder); i++ { - f.codebits[codeOrder[i]] = 0 - } - if !f.h1.init(f.codebits[0:]) { - if debugDecode { - fmt.Println("init codebits failed") - } - return CorruptInputError(f.roffset) - } - - // HLIT + 257 code lengths, HDIST + 1 code lengths, - // using the code length Huffman code. - for i, n := 0, nlit+ndist; i < n; { - x, err := f.huffSym(&f.h1) - if err != nil { - return err - } - if x < 16 { - // Actual length. - f.bits[i] = x - i++ - continue - } - // Repeat previous length or zero. - var rep int - var nb uint - var b int - switch x { - default: - return InternalError("unexpected length code") - case 16: - rep = 3 - nb = 2 - if i == 0 { - if debugDecode { - fmt.Println("i==0") - } - return CorruptInputError(f.roffset) - } - b = f.bits[i-1] - case 17: - rep = 3 - nb = 3 - b = 0 - case 18: - rep = 11 - nb = 7 - b = 0 - } - for f.nb < nb { - if err := f.moreBits(); err != nil { - if debugDecode { - fmt.Println("morebits:", err) - } - return err - } - } - rep += int(f.b & uint32(1<<(nb®SizeMaskUint32)-1)) - f.b >>= nb & regSizeMaskUint32 - f.nb -= nb - if i+rep > n { - if debugDecode { - fmt.Println("i+rep > n", i, rep, n) - } - return CorruptInputError(f.roffset) - } - for j := 0; j < rep; j++ { - f.bits[i] = b - i++ - } - } - - if !f.h1.init(f.bits[0:nlit]) || !f.h2.init(f.bits[nlit:nlit+ndist]) { - if debugDecode { - fmt.Println("init2 failed") - } - return CorruptInputError(f.roffset) - } - - // As an optimization, we can initialize the maxRead bits to read at a time - // for the HLIT tree to the length of the EOB marker since we know that - // every block must terminate with one. This preserves the property that - // we never read any extra bytes after the end of the DEFLATE stream. - if f.h1.maxRead < f.bits[endBlockMarker] { - f.h1.maxRead = f.bits[endBlockMarker] - } - if !f.final { - // If not the final block, the smallest block possible is - // a predefined table, BTYPE=01, with a single EOB marker. - // This will take up 3 + 7 bits. - f.h1.maxRead += 10 - } - - return nil -} - -// Copy a single uncompressed data block from input to output. -func (f *decompressor) dataBlock() { - // Uncompressed. - // Discard current half-byte. - left := (f.nb) & 7 - f.nb -= left - f.b >>= left - - offBytes := f.nb >> 3 - // Unfilled values will be overwritten. - f.buf[0] = uint8(f.b) - f.buf[1] = uint8(f.b >> 8) - f.buf[2] = uint8(f.b >> 16) - f.buf[3] = uint8(f.b >> 24) - - f.roffset += int64(offBytes) - f.nb, f.b = 0, 0 - - // Length then ones-complement of length. - nr, err := io.ReadFull(f.r, f.buf[offBytes:4]) - f.roffset += int64(nr) - if err != nil { - f.err = noEOF(err) - return - } - n := uint16(f.buf[0]) | uint16(f.buf[1])<<8 - nn := uint16(f.buf[2]) | uint16(f.buf[3])<<8 - if nn != ^n { - if debugDecode { - ncomp := ^n - fmt.Println("uint16(nn) != uint16(^n)", nn, ncomp) - } - f.err = CorruptInputError(f.roffset) - return - } - - if n == 0 { - f.toRead = f.dict.readFlush() - f.finishBlock() - return - } - - f.copyLen = int(n) - f.copyData() -} - -// copyData copies f.copyLen bytes from the underlying reader into f.hist. -// It pauses for reads when f.hist is full. -func (f *decompressor) copyData() { - buf := f.dict.writeSlice() - if len(buf) > f.copyLen { - buf = buf[:f.copyLen] - } - - cnt, err := io.ReadFull(f.r, buf) - f.roffset += int64(cnt) - f.copyLen -= cnt - f.dict.writeMark(cnt) - if err != nil { - f.err = noEOF(err) - return - } - - if f.dict.availWrite() == 0 || f.copyLen > 0 { - f.toRead = f.dict.readFlush() - f.step = (*decompressor).copyData - return - } - f.finishBlock() -} - -func (f *decompressor) finishBlock() { - if f.final { - if f.dict.availRead() > 0 { - f.toRead = f.dict.readFlush() - } - f.err = io.EOF - } - f.step = (*decompressor).nextBlock -} - -// noEOF returns err, unless err == io.EOF, in which case it returns io.ErrUnexpectedEOF. -func noEOF(e error) error { - if e == io.EOF { - return io.ErrUnexpectedEOF - } - return e -} - -func (f *decompressor) moreBits() error { - c, err := f.r.ReadByte() - if err != nil { - return noEOF(err) - } - f.roffset++ - f.b |= uint32(c) << (f.nb & regSizeMaskUint32) - f.nb += 8 - return nil -} - -// Read the next Huffman-encoded symbol from f according to h. -func (f *decompressor) huffSym(h *huffmanDecoder) (int, error) { - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(h.maxRead) - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - nb, b := f.nb, f.b - for { - for nb < n { - c, err := f.r.ReadByte() - if err != nil { - f.b = b - f.nb = nb - return 0, noEOF(err) - } - f.roffset++ - b |= uint32(c) << (nb & regSizeMaskUint32) - nb += 8 - } - chunk := h.chunks[b&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = h.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&h.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= nb { - if n == 0 { - f.b = b - f.nb = nb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return 0, f.err - } - f.b = b >> (n & regSizeMaskUint32) - f.nb = nb - n - return int(chunk >> huffmanValueShift), nil - } - } -} - -func makeReader(r io.Reader) Reader { - if rr, ok := r.(Reader); ok { - return rr - } - return bufio.NewReader(r) -} - -func fixedHuffmanDecoderInit() { - fixedOnce.Do(func() { - // These come from the RFC section 3.2.6. - var bits [288]int - for i := 0; i < 144; i++ { - bits[i] = 8 - } - for i := 144; i < 256; i++ { - bits[i] = 9 - } - for i := 256; i < 280; i++ { - bits[i] = 7 - } - for i := 280; i < 288; i++ { - bits[i] = 8 - } - fixedHuffmanDecoder.init(bits[:]) - }) -} - -func (f *decompressor) Reset(r io.Reader, dict []byte) error { - *f = decompressor{ - r: makeReader(r), - bits: f.bits, - codebits: f.codebits, - h1: f.h1, - h2: f.h2, - dict: f.dict, - step: (*decompressor).nextBlock, - } - f.dict.init(maxMatchOffset, dict) - return nil -} - -// NewReader returns a new ReadCloser that can be used -// to read the uncompressed version of r. -// If r does not also implement io.ByteReader, -// the decompressor may read more data than necessary from r. -// It is the caller's responsibility to call Close on the ReadCloser -// when finished reading. -// -// The ReadCloser returned by NewReader also implements Resetter. -func NewReader(r io.Reader) io.ReadCloser { - fixedHuffmanDecoderInit() - - var f decompressor - f.r = makeReader(r) - f.bits = new([maxNumLit + maxNumDist]int) - f.codebits = new([numCodes]int) - f.step = (*decompressor).nextBlock - f.dict.init(maxMatchOffset, nil) - return &f -} - -// NewReaderDict is like NewReader but initializes the reader -// with a preset dictionary. The returned Reader behaves as if -// the uncompressed data stream started with the given dictionary, -// which has already been read. NewReaderDict is typically used -// to read data compressed by NewWriterDict. -// -// The ReadCloser returned by NewReader also implements Resetter. -func NewReaderDict(r io.Reader, dict []byte) io.ReadCloser { - fixedHuffmanDecoderInit() - - var f decompressor - f.r = makeReader(r) - f.bits = new([maxNumLit + maxNumDist]int) - f.codebits = new([numCodes]int) - f.step = (*decompressor).nextBlock - f.dict.init(maxMatchOffset, dict) - return &f -} diff --git a/vendor/github.com/klauspost/compress/flate/inflate_gen.go b/vendor/github.com/klauspost/compress/flate/inflate_gen.go deleted file mode 100644 index 61342b6b88..0000000000 --- a/vendor/github.com/klauspost/compress/flate/inflate_gen.go +++ /dev/null @@ -1,1283 +0,0 @@ -// Code generated by go generate gen_inflate.go. DO NOT EDIT. - -package flate - -import ( - "bufio" - "bytes" - "fmt" - "math/bits" - "strings" -) - -// Decode a single Huffman block from f. -// hl and hd are the Huffman states for the lit/length values -// and the distance values, respectively. If hd == nil, using the -// fixed distance encoding associated with fixed Huffman blocks. -func (f *decompressor) huffmanBytesBuffer() { - const ( - stateInit = iota // Zero value must be stateInit - stateDict - ) - fr := f.r.(*bytes.Buffer) - - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - fnb, fb, dict := f.nb, f.b, &f.dict - - switch f.stepState { - case stateInit: - goto readLiteral - case stateDict: - goto copyHistory - } - -readLiteral: - // Read literal and/or (length, distance) according to RFC section 3.2.3. - { - var v int - { - // Inlined v, err := f.huffSym(f.hl) - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hl.maxRead) - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hl.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - v = int(chunk >> huffmanValueShift) - break - } - } - } - - var length int - switch { - case v < 256: - dict.writeByte(byte(v)) - if dict.availWrite() == 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanBytesBuffer - f.stepState = stateInit - f.b, f.nb = fb, fnb - return - } - goto readLiteral - case v == 256: - f.b, f.nb = fb, fnb - f.finishBlock() - return - // otherwise, reference to older data - case v < 265: - length = v - (257 - 3) - case v < maxNumLit: - val := decCodeToLen[(v - 257)] - length = int(val.length) + 3 - n := uint(val.extra) - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits n>0:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - length += int(fb & bitMask32[n]) - fb >>= n & regSizeMaskUint32 - fnb -= n - default: - if debugDecode { - fmt.Println(v, ">= maxNumLit") - } - f.err = CorruptInputError(f.roffset) - f.b, f.nb = fb, fnb - return - } - - var dist uint32 - if f.hd == nil { - for fnb < 5 { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb<5:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3))) - fb >>= 5 - fnb -= 5 - } else { - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hd.maxRead) - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hd.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - dist = uint32(chunk >> huffmanValueShift) - break - } - } - } - - switch { - case dist < 4: - dist++ - case dist < maxNumDist: - nb := uint(dist-2) >> 1 - // have 1 bit in bottom of dist, need nb more. - extra := (dist & 1) << (nb & regSizeMaskUint32) - for fnb < nb { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb>= nb & regSizeMaskUint32 - fnb -= nb - dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra - // slower: dist = bitMask32[nb+1] + 2 + extra - default: - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist too big:", dist, maxNumDist) - } - f.err = CorruptInputError(f.roffset) - return - } - - // No check on length; encoding can be prescient. - if dist > uint32(dict.histSize()) { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist > dict.histSize():", dist, dict.histSize()) - } - f.err = CorruptInputError(f.roffset) - return - } - - f.copyLen, f.copyDist = length, int(dist) - goto copyHistory - } - -copyHistory: - // Perform a backwards copy according to RFC section 3.2.3. - { - cnt := dict.tryWriteCopy(f.copyDist, f.copyLen) - if cnt == 0 { - cnt = dict.writeCopy(f.copyDist, f.copyLen) - } - f.copyLen -= cnt - - if dict.availWrite() == 0 || f.copyLen > 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanBytesBuffer // We need to continue this work - f.stepState = stateDict - f.b, f.nb = fb, fnb - return - } - goto readLiteral - } - // Not reached -} - -// Decode a single Huffman block from f. -// hl and hd are the Huffman states for the lit/length values -// and the distance values, respectively. If hd == nil, using the -// fixed distance encoding associated with fixed Huffman blocks. -func (f *decompressor) huffmanBytesReader() { - const ( - stateInit = iota // Zero value must be stateInit - stateDict - ) - fr := f.r.(*bytes.Reader) - - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - fnb, fb, dict := f.nb, f.b, &f.dict - - switch f.stepState { - case stateInit: - goto readLiteral - case stateDict: - goto copyHistory - } - -readLiteral: - // Read literal and/or (length, distance) according to RFC section 3.2.3. - { - var v int - { - // Inlined v, err := f.huffSym(f.hl) - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hl.maxRead) - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hl.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - v = int(chunk >> huffmanValueShift) - break - } - } - } - - var length int - switch { - case v < 256: - dict.writeByte(byte(v)) - if dict.availWrite() == 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanBytesReader - f.stepState = stateInit - f.b, f.nb = fb, fnb - return - } - goto readLiteral - case v == 256: - f.b, f.nb = fb, fnb - f.finishBlock() - return - // otherwise, reference to older data - case v < 265: - length = v - (257 - 3) - case v < maxNumLit: - val := decCodeToLen[(v - 257)] - length = int(val.length) + 3 - n := uint(val.extra) - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits n>0:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - length += int(fb & bitMask32[n]) - fb >>= n & regSizeMaskUint32 - fnb -= n - default: - if debugDecode { - fmt.Println(v, ">= maxNumLit") - } - f.err = CorruptInputError(f.roffset) - f.b, f.nb = fb, fnb - return - } - - var dist uint32 - if f.hd == nil { - for fnb < 5 { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb<5:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3))) - fb >>= 5 - fnb -= 5 - } else { - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hd.maxRead) - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hd.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - dist = uint32(chunk >> huffmanValueShift) - break - } - } - } - - switch { - case dist < 4: - dist++ - case dist < maxNumDist: - nb := uint(dist-2) >> 1 - // have 1 bit in bottom of dist, need nb more. - extra := (dist & 1) << (nb & regSizeMaskUint32) - for fnb < nb { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb>= nb & regSizeMaskUint32 - fnb -= nb - dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra - // slower: dist = bitMask32[nb+1] + 2 + extra - default: - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist too big:", dist, maxNumDist) - } - f.err = CorruptInputError(f.roffset) - return - } - - // No check on length; encoding can be prescient. - if dist > uint32(dict.histSize()) { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist > dict.histSize():", dist, dict.histSize()) - } - f.err = CorruptInputError(f.roffset) - return - } - - f.copyLen, f.copyDist = length, int(dist) - goto copyHistory - } - -copyHistory: - // Perform a backwards copy according to RFC section 3.2.3. - { - cnt := dict.tryWriteCopy(f.copyDist, f.copyLen) - if cnt == 0 { - cnt = dict.writeCopy(f.copyDist, f.copyLen) - } - f.copyLen -= cnt - - if dict.availWrite() == 0 || f.copyLen > 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanBytesReader // We need to continue this work - f.stepState = stateDict - f.b, f.nb = fb, fnb - return - } - goto readLiteral - } - // Not reached -} - -// Decode a single Huffman block from f. -// hl and hd are the Huffman states for the lit/length values -// and the distance values, respectively. If hd == nil, using the -// fixed distance encoding associated with fixed Huffman blocks. -func (f *decompressor) huffmanBufioReader() { - const ( - stateInit = iota // Zero value must be stateInit - stateDict - ) - fr := f.r.(*bufio.Reader) - - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - fnb, fb, dict := f.nb, f.b, &f.dict - - switch f.stepState { - case stateInit: - goto readLiteral - case stateDict: - goto copyHistory - } - -readLiteral: - // Read literal and/or (length, distance) according to RFC section 3.2.3. - { - var v int - { - // Inlined v, err := f.huffSym(f.hl) - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hl.maxRead) - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hl.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - v = int(chunk >> huffmanValueShift) - break - } - } - } - - var length int - switch { - case v < 256: - dict.writeByte(byte(v)) - if dict.availWrite() == 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanBufioReader - f.stepState = stateInit - f.b, f.nb = fb, fnb - return - } - goto readLiteral - case v == 256: - f.b, f.nb = fb, fnb - f.finishBlock() - return - // otherwise, reference to older data - case v < 265: - length = v - (257 - 3) - case v < maxNumLit: - val := decCodeToLen[(v - 257)] - length = int(val.length) + 3 - n := uint(val.extra) - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits n>0:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - length += int(fb & bitMask32[n]) - fb >>= n & regSizeMaskUint32 - fnb -= n - default: - if debugDecode { - fmt.Println(v, ">= maxNumLit") - } - f.err = CorruptInputError(f.roffset) - f.b, f.nb = fb, fnb - return - } - - var dist uint32 - if f.hd == nil { - for fnb < 5 { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb<5:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3))) - fb >>= 5 - fnb -= 5 - } else { - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hd.maxRead) - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hd.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - dist = uint32(chunk >> huffmanValueShift) - break - } - } - } - - switch { - case dist < 4: - dist++ - case dist < maxNumDist: - nb := uint(dist-2) >> 1 - // have 1 bit in bottom of dist, need nb more. - extra := (dist & 1) << (nb & regSizeMaskUint32) - for fnb < nb { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb>= nb & regSizeMaskUint32 - fnb -= nb - dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra - // slower: dist = bitMask32[nb+1] + 2 + extra - default: - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist too big:", dist, maxNumDist) - } - f.err = CorruptInputError(f.roffset) - return - } - - // No check on length; encoding can be prescient. - if dist > uint32(dict.histSize()) { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist > dict.histSize():", dist, dict.histSize()) - } - f.err = CorruptInputError(f.roffset) - return - } - - f.copyLen, f.copyDist = length, int(dist) - goto copyHistory - } - -copyHistory: - // Perform a backwards copy according to RFC section 3.2.3. - { - cnt := dict.tryWriteCopy(f.copyDist, f.copyLen) - if cnt == 0 { - cnt = dict.writeCopy(f.copyDist, f.copyLen) - } - f.copyLen -= cnt - - if dict.availWrite() == 0 || f.copyLen > 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanBufioReader // We need to continue this work - f.stepState = stateDict - f.b, f.nb = fb, fnb - return - } - goto readLiteral - } - // Not reached -} - -// Decode a single Huffman block from f. -// hl and hd are the Huffman states for the lit/length values -// and the distance values, respectively. If hd == nil, using the -// fixed distance encoding associated with fixed Huffman blocks. -func (f *decompressor) huffmanStringsReader() { - const ( - stateInit = iota // Zero value must be stateInit - stateDict - ) - fr := f.r.(*strings.Reader) - - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - fnb, fb, dict := f.nb, f.b, &f.dict - - switch f.stepState { - case stateInit: - goto readLiteral - case stateDict: - goto copyHistory - } - -readLiteral: - // Read literal and/or (length, distance) according to RFC section 3.2.3. - { - var v int - { - // Inlined v, err := f.huffSym(f.hl) - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hl.maxRead) - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hl.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - v = int(chunk >> huffmanValueShift) - break - } - } - } - - var length int - switch { - case v < 256: - dict.writeByte(byte(v)) - if dict.availWrite() == 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanStringsReader - f.stepState = stateInit - f.b, f.nb = fb, fnb - return - } - goto readLiteral - case v == 256: - f.b, f.nb = fb, fnb - f.finishBlock() - return - // otherwise, reference to older data - case v < 265: - length = v - (257 - 3) - case v < maxNumLit: - val := decCodeToLen[(v - 257)] - length = int(val.length) + 3 - n := uint(val.extra) - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits n>0:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - length += int(fb & bitMask32[n]) - fb >>= n & regSizeMaskUint32 - fnb -= n - default: - if debugDecode { - fmt.Println(v, ">= maxNumLit") - } - f.err = CorruptInputError(f.roffset) - f.b, f.nb = fb, fnb - return - } - - var dist uint32 - if f.hd == nil { - for fnb < 5 { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb<5:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3))) - fb >>= 5 - fnb -= 5 - } else { - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hd.maxRead) - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hd.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - dist = uint32(chunk >> huffmanValueShift) - break - } - } - } - - switch { - case dist < 4: - dist++ - case dist < maxNumDist: - nb := uint(dist-2) >> 1 - // have 1 bit in bottom of dist, need nb more. - extra := (dist & 1) << (nb & regSizeMaskUint32) - for fnb < nb { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb>= nb & regSizeMaskUint32 - fnb -= nb - dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra - // slower: dist = bitMask32[nb+1] + 2 + extra - default: - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist too big:", dist, maxNumDist) - } - f.err = CorruptInputError(f.roffset) - return - } - - // No check on length; encoding can be prescient. - if dist > uint32(dict.histSize()) { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist > dict.histSize():", dist, dict.histSize()) - } - f.err = CorruptInputError(f.roffset) - return - } - - f.copyLen, f.copyDist = length, int(dist) - goto copyHistory - } - -copyHistory: - // Perform a backwards copy according to RFC section 3.2.3. - { - cnt := dict.tryWriteCopy(f.copyDist, f.copyLen) - if cnt == 0 { - cnt = dict.writeCopy(f.copyDist, f.copyLen) - } - f.copyLen -= cnt - - if dict.availWrite() == 0 || f.copyLen > 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanStringsReader // We need to continue this work - f.stepState = stateDict - f.b, f.nb = fb, fnb - return - } - goto readLiteral - } - // Not reached -} - -// Decode a single Huffman block from f. -// hl and hd are the Huffman states for the lit/length values -// and the distance values, respectively. If hd == nil, using the -// fixed distance encoding associated with fixed Huffman blocks. -func (f *decompressor) huffmanGenericReader() { - const ( - stateInit = iota // Zero value must be stateInit - stateDict - ) - fr := f.r.(Reader) - - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - fnb, fb, dict := f.nb, f.b, &f.dict - - switch f.stepState { - case stateInit: - goto readLiteral - case stateDict: - goto copyHistory - } - -readLiteral: - // Read literal and/or (length, distance) according to RFC section 3.2.3. - { - var v int - { - // Inlined v, err := f.huffSym(f.hl) - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hl.maxRead) - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hl.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hl.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hl.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - v = int(chunk >> huffmanValueShift) - break - } - } - } - - var length int - switch { - case v < 256: - dict.writeByte(byte(v)) - if dict.availWrite() == 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanGenericReader - f.stepState = stateInit - f.b, f.nb = fb, fnb - return - } - goto readLiteral - case v == 256: - f.b, f.nb = fb, fnb - f.finishBlock() - return - // otherwise, reference to older data - case v < 265: - length = v - (257 - 3) - case v < maxNumLit: - val := decCodeToLen[(v - 257)] - length = int(val.length) + 3 - n := uint(val.extra) - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits n>0:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - length += int(fb & bitMask32[n]) - fb >>= n & regSizeMaskUint32 - fnb -= n - default: - if debugDecode { - fmt.Println(v, ">= maxNumLit") - } - f.err = CorruptInputError(f.roffset) - f.b, f.nb = fb, fnb - return - } - - var dist uint32 - if f.hd == nil { - for fnb < 5 { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb<5:", err) - } - f.err = err - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - dist = uint32(bits.Reverse8(uint8(fb & 0x1F << 3))) - fb >>= 5 - fnb -= 5 - } else { - // Since a huffmanDecoder can be empty or be composed of a degenerate tree - // with single element, huffSym must error on these two edge cases. In both - // cases, the chunks slice will be 0 for the invalid sequence, leading it - // satisfy the n == 0 check below. - n := uint(f.hd.maxRead) - // Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers, - // but is smart enough to keep local variables in registers, so use nb and b, - // inline call to moreBits and reassign b,nb back to f on return. - for { - for fnb < n { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - f.err = noEOF(err) - return - } - f.roffset++ - fb |= uint32(c) << (fnb & regSizeMaskUint32) - fnb += 8 - } - chunk := f.hd.chunks[fb&(huffmanNumChunks-1)] - n = uint(chunk & huffmanCountMask) - if n > huffmanChunkBits { - chunk = f.hd.links[chunk>>huffmanValueShift][(fb>>huffmanChunkBits)&f.hd.linkMask] - n = uint(chunk & huffmanCountMask) - } - if n <= fnb { - if n == 0 { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("huffsym: n==0") - } - f.err = CorruptInputError(f.roffset) - return - } - fb = fb >> (n & regSizeMaskUint32) - fnb = fnb - n - dist = uint32(chunk >> huffmanValueShift) - break - } - } - } - - switch { - case dist < 4: - dist++ - case dist < maxNumDist: - nb := uint(dist-2) >> 1 - // have 1 bit in bottom of dist, need nb more. - extra := (dist & 1) << (nb & regSizeMaskUint32) - for fnb < nb { - c, err := fr.ReadByte() - if err != nil { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("morebits f.nb>= nb & regSizeMaskUint32 - fnb -= nb - dist = 1<<((nb+1)®SizeMaskUint32) + 1 + extra - // slower: dist = bitMask32[nb+1] + 2 + extra - default: - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist too big:", dist, maxNumDist) - } - f.err = CorruptInputError(f.roffset) - return - } - - // No check on length; encoding can be prescient. - if dist > uint32(dict.histSize()) { - f.b, f.nb = fb, fnb - if debugDecode { - fmt.Println("dist > dict.histSize():", dist, dict.histSize()) - } - f.err = CorruptInputError(f.roffset) - return - } - - f.copyLen, f.copyDist = length, int(dist) - goto copyHistory - } - -copyHistory: - // Perform a backwards copy according to RFC section 3.2.3. - { - cnt := dict.tryWriteCopy(f.copyDist, f.copyLen) - if cnt == 0 { - cnt = dict.writeCopy(f.copyDist, f.copyLen) - } - f.copyLen -= cnt - - if dict.availWrite() == 0 || f.copyLen > 0 { - f.toRead = dict.readFlush() - f.step = (*decompressor).huffmanGenericReader // We need to continue this work - f.stepState = stateDict - f.b, f.nb = fb, fnb - return - } - goto readLiteral - } - // Not reached -} - -func (f *decompressor) huffmanBlockDecoder() func() { - switch f.r.(type) { - case *bytes.Buffer: - return f.huffmanBytesBuffer - case *bytes.Reader: - return f.huffmanBytesReader - case *bufio.Reader: - return f.huffmanBufioReader - case *strings.Reader: - return f.huffmanStringsReader - case Reader: - return f.huffmanGenericReader - default: - return f.huffmanGenericReader - } -} diff --git a/vendor/github.com/klauspost/compress/flate/level1.go b/vendor/github.com/klauspost/compress/flate/level1.go deleted file mode 100644 index 703b9a89aa..0000000000 --- a/vendor/github.com/klauspost/compress/flate/level1.go +++ /dev/null @@ -1,241 +0,0 @@ -package flate - -import ( - "encoding/binary" - "fmt" - "math/bits" -) - -// fastGen maintains the table for matches, -// and the previous byte block for level 2. -// This is the generic implementation. -type fastEncL1 struct { - fastGen - table [tableSize]tableEntry -} - -// EncodeL1 uses a similar algorithm to level 1 -func (e *fastEncL1) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - hashBytes = 5 - ) - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntry{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.table[i].offset = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - - for { - const skipLog = 5 - const doEvery = 2 - - nextS := s - var candidate tableEntry - for { - nextHash := hashLen(cv, tableBits, hashBytes) - candidate = e.table[nextHash] - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - - now := load6432(src, nextS) - e.table[nextHash] = tableEntry{offset: s + e.cur} - nextHash = hashLen(now, tableBits, hashBytes) - - offset := s - (candidate.offset - e.cur) - if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) { - e.table[nextHash] = tableEntry{offset: nextS + e.cur} - break - } - - // Do one right away... - cv = now - s = nextS - nextS++ - candidate = e.table[nextHash] - now >>= 8 - e.table[nextHash] = tableEntry{offset: s + e.cur} - - offset = s - (candidate.offset - e.cur) - if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) { - e.table[nextHash] = tableEntry{offset: nextS + e.cur} - break - } - cv = now - s = nextS - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - for { - // Invariant: we have a 4-byte match at s, and no need to emit any - // literal bytes prior to s. - - // Extend the 4-byte match as long as possible. - t := candidate.offset - e.cur - var l = int32(4) - if false { - l = e.matchlenLong(s+4, t+4, src) + 4 - } else { - // inlined: - a := src[s+4:] - b := src[t+4:] - for len(a) >= 8 { - if diff := binary.LittleEndian.Uint64(a) ^ binary.LittleEndian.Uint64(b); diff != 0 { - l += int32(bits.TrailingZeros64(diff) >> 3) - break - } - l += 8 - a = a[8:] - b = b[8:] - } - if len(a) < 8 { - b = b[:len(a)] - for i := range a { - if a[i] != b[i] { - break - } - l++ - } - } - } - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - - // Save the match found - if false { - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - } else { - // Inlined... - xoffset := uint32(s - t - baseMatchOffset) - xlength := l - oc := offsetCode(xoffset) - xoffset |= oc << 16 - for xlength > 0 { - xl := xlength - if xl > 258 { - if xl > 258+baseMatchLength { - xl = 258 - } else { - xl = 258 - baseMatchLength - } - } - xlength -= xl - xl -= baseMatchLength - dst.extraHist[lengthCodes1[uint8(xl)]]++ - dst.offHist[oc]++ - dst.tokens[dst.n] = token(matchType | uint32(xl)<= s { - s = nextS + 1 - } - if s >= sLimit { - // Index first pair after match end. - if int(s+l+8) < len(src) { - cv := load6432(src, s) - e.table[hashLen(cv, tableBits, hashBytes)] = tableEntry{offset: s + e.cur} - } - goto emitRemainder - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-2 and at s. If - // another emitCopy is not our next move, also calculate nextHash - // at s+1. At least on GOARCH=amd64, these three hash calculations - // are faster as one load64 call (with some shifts) instead of - // three load32 calls. - x := load6432(src, s-2) - o := e.cur + s - 2 - prevHash := hashLen(x, tableBits, hashBytes) - e.table[prevHash] = tableEntry{offset: o} - x >>= 16 - currHash := hashLen(x, tableBits, hashBytes) - candidate = e.table[currHash] - e.table[currHash] = tableEntry{offset: o + 2} - - offset := s - (candidate.offset - e.cur) - if offset > maxMatchOffset || uint32(x) != load3232(src, candidate.offset-e.cur) { - cv = x >> 8 - s++ - break - } - } - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - emitLiteral(dst, src[nextEmit:]) - } -} diff --git a/vendor/github.com/klauspost/compress/flate/level2.go b/vendor/github.com/klauspost/compress/flate/level2.go deleted file mode 100644 index 876dfbe305..0000000000 --- a/vendor/github.com/klauspost/compress/flate/level2.go +++ /dev/null @@ -1,214 +0,0 @@ -package flate - -import "fmt" - -// fastGen maintains the table for matches, -// and the previous byte block for level 2. -// This is the generic implementation. -type fastEncL2 struct { - fastGen - table [bTableSize]tableEntry -} - -// EncodeL2 uses a similar algorithm to level 1, but is capable -// of matching across blocks giving better compression at a small slowdown. -func (e *fastEncL2) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - hashBytes = 5 - ) - - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntry{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.table[i].offset = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - for { - // When should we start skipping if we haven't found matches in a long while. - const skipLog = 5 - const doEvery = 2 - - nextS := s - var candidate tableEntry - for { - nextHash := hashLen(cv, bTableBits, hashBytes) - s = nextS - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - candidate = e.table[nextHash] - now := load6432(src, nextS) - e.table[nextHash] = tableEntry{offset: s + e.cur} - nextHash = hashLen(now, bTableBits, hashBytes) - - offset := s - (candidate.offset - e.cur) - if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) { - e.table[nextHash] = tableEntry{offset: nextS + e.cur} - break - } - - // Do one right away... - cv = now - s = nextS - nextS++ - candidate = e.table[nextHash] - now >>= 8 - e.table[nextHash] = tableEntry{offset: s + e.cur} - - offset = s - (candidate.offset - e.cur) - if offset < maxMatchOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) { - break - } - cv = now - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - - // Call emitCopy, and then see if another emitCopy could be our next - // move. Repeat until we find no match for the input immediately after - // what was consumed by the last emitCopy call. - // - // If we exit this loop normally then we need to call emitLiteral next, - // though we don't yet know how big the literal will be. We handle that - // by proceeding to the next iteration of the main loop. We also can - // exit this loop via goto if we get close to exhausting the input. - for { - // Invariant: we have a 4-byte match at s, and no need to emit any - // literal bytes prior to s. - - // Extend the 4-byte match as long as possible. - t := candidate.offset - e.cur - l := e.matchlenLong(s+4, t+4, src) + 4 - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - - if s >= sLimit { - // Index first pair after match end. - if int(s+l+8) < len(src) { - cv := load6432(src, s) - e.table[hashLen(cv, bTableBits, hashBytes)] = tableEntry{offset: s + e.cur} - } - goto emitRemainder - } - - // Store every second hash in-between, but offset by 1. - for i := s - l + 2; i < s-5; i += 7 { - x := load6432(src, i) - nextHash := hashLen(x, bTableBits, hashBytes) - e.table[nextHash] = tableEntry{offset: e.cur + i} - // Skip one - x >>= 16 - nextHash = hashLen(x, bTableBits, hashBytes) - e.table[nextHash] = tableEntry{offset: e.cur + i + 2} - // Skip one - x >>= 16 - nextHash = hashLen(x, bTableBits, hashBytes) - e.table[nextHash] = tableEntry{offset: e.cur + i + 4} - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-2 to s. If - // another emitCopy is not our next move, also calculate nextHash - // at s+1. At least on GOARCH=amd64, these three hash calculations - // are faster as one load64 call (with some shifts) instead of - // three load32 calls. - x := load6432(src, s-2) - o := e.cur + s - 2 - prevHash := hashLen(x, bTableBits, hashBytes) - prevHash2 := hashLen(x>>8, bTableBits, hashBytes) - e.table[prevHash] = tableEntry{offset: o} - e.table[prevHash2] = tableEntry{offset: o + 1} - currHash := hashLen(x>>16, bTableBits, hashBytes) - candidate = e.table[currHash] - e.table[currHash] = tableEntry{offset: o + 2} - - offset := s - (candidate.offset - e.cur) - if offset > maxMatchOffset || uint32(x>>16) != load3232(src, candidate.offset-e.cur) { - cv = x >> 24 - s++ - break - } - } - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - - emitLiteral(dst, src[nextEmit:]) - } -} diff --git a/vendor/github.com/klauspost/compress/flate/level3.go b/vendor/github.com/klauspost/compress/flate/level3.go deleted file mode 100644 index 7aa2b72a12..0000000000 --- a/vendor/github.com/klauspost/compress/flate/level3.go +++ /dev/null @@ -1,241 +0,0 @@ -package flate - -import "fmt" - -// fastEncL3 -type fastEncL3 struct { - fastGen - table [1 << 16]tableEntryPrev -} - -// Encode uses a similar algorithm to level 2, will check up to two candidates. -func (e *fastEncL3) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - tableBits = 16 - tableSize = 1 << tableBits - hashBytes = 5 - ) - - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntryPrev{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i] - if v.Cur.offset <= minOff { - v.Cur.offset = 0 - } else { - v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset - } - if v.Prev.offset <= minOff { - v.Prev.offset = 0 - } else { - v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset - } - e.table[i] = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // Skip if too small. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - for { - const skipLog = 7 - nextS := s - var candidate tableEntry - for { - nextHash := hashLen(cv, tableBits, hashBytes) - s = nextS - nextS = s + 1 + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - candidates := e.table[nextHash] - now := load6432(src, nextS) - - // Safe offset distance until s + 4... - minOffset := e.cur + s - (maxMatchOffset - 4) - e.table[nextHash] = tableEntryPrev{Prev: candidates.Cur, Cur: tableEntry{offset: s + e.cur}} - - // Check both candidates - candidate = candidates.Cur - if candidate.offset < minOffset { - cv = now - // Previous will also be invalid, we have nothing. - continue - } - - if uint32(cv) == load3232(src, candidate.offset-e.cur) { - if candidates.Prev.offset < minOffset || uint32(cv) != load3232(src, candidates.Prev.offset-e.cur) { - break - } - // Both match and are valid, pick longest. - offset := s - (candidate.offset - e.cur) - o2 := s - (candidates.Prev.offset - e.cur) - l1, l2 := matchLen(src[s+4:], src[s-offset+4:]), matchLen(src[s+4:], src[s-o2+4:]) - if l2 > l1 { - candidate = candidates.Prev - } - break - } else { - // We only check if value mismatches. - // Offset will always be invalid in other cases. - candidate = candidates.Prev - if candidate.offset > minOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) { - break - } - } - cv = now - } - - // Call emitCopy, and then see if another emitCopy could be our next - // move. Repeat until we find no match for the input immediately after - // what was consumed by the last emitCopy call. - // - // If we exit this loop normally then we need to call emitLiteral next, - // though we don't yet know how big the literal will be. We handle that - // by proceeding to the next iteration of the main loop. We also can - // exit this loop via goto if we get close to exhausting the input. - for { - // Invariant: we have a 4-byte match at s, and no need to emit any - // literal bytes prior to s. - - // Extend the 4-byte match as long as possible. - // - t := candidate.offset - e.cur - l := e.matchlenLong(s+4, t+4, src) + 4 - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - - if s >= sLimit { - t += l - // Index first pair after match end. - if int(t+8) < len(src) && t > 0 { - cv = load6432(src, t) - nextHash := hashLen(cv, tableBits, hashBytes) - e.table[nextHash] = tableEntryPrev{ - Prev: e.table[nextHash].Cur, - Cur: tableEntry{offset: e.cur + t}, - } - } - goto emitRemainder - } - - // Store every 5th hash in-between. - for i := s - l + 2; i < s-5; i += 6 { - nextHash := hashLen(load6432(src, i), tableBits, hashBytes) - e.table[nextHash] = tableEntryPrev{ - Prev: e.table[nextHash].Cur, - Cur: tableEntry{offset: e.cur + i}} - } - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-2 to s. - x := load6432(src, s-2) - prevHash := hashLen(x, tableBits, hashBytes) - - e.table[prevHash] = tableEntryPrev{ - Prev: e.table[prevHash].Cur, - Cur: tableEntry{offset: e.cur + s - 2}, - } - x >>= 8 - prevHash = hashLen(x, tableBits, hashBytes) - - e.table[prevHash] = tableEntryPrev{ - Prev: e.table[prevHash].Cur, - Cur: tableEntry{offset: e.cur + s - 1}, - } - x >>= 8 - currHash := hashLen(x, tableBits, hashBytes) - candidates := e.table[currHash] - cv = x - e.table[currHash] = tableEntryPrev{ - Prev: candidates.Cur, - Cur: tableEntry{offset: s + e.cur}, - } - - // Check both candidates - candidate = candidates.Cur - minOffset := e.cur + s - (maxMatchOffset - 4) - - if candidate.offset > minOffset { - if uint32(cv) == load3232(src, candidate.offset-e.cur) { - // Found a match... - continue - } - candidate = candidates.Prev - if candidate.offset > minOffset && uint32(cv) == load3232(src, candidate.offset-e.cur) { - // Match at prev... - continue - } - } - cv = x >> 8 - s++ - break - } - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - - emitLiteral(dst, src[nextEmit:]) - } -} diff --git a/vendor/github.com/klauspost/compress/flate/level4.go b/vendor/github.com/klauspost/compress/flate/level4.go deleted file mode 100644 index 23c08b325c..0000000000 --- a/vendor/github.com/klauspost/compress/flate/level4.go +++ /dev/null @@ -1,221 +0,0 @@ -package flate - -import "fmt" - -type fastEncL4 struct { - fastGen - table [tableSize]tableEntry - bTable [tableSize]tableEntry -} - -func (e *fastEncL4) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - hashShortBytes = 4 - ) - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntry{} - } - for i := range e.bTable[:] { - e.bTable[i] = tableEntry{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.table[i].offset = v - } - for i := range e.bTable[:] { - v := e.bTable[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.bTable[i].offset = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - for { - const skipLog = 6 - const doEvery = 1 - - nextS := s - var t int32 - for { - nextHashS := hashLen(cv, tableBits, hashShortBytes) - nextHashL := hash7(cv, tableBits) - - s = nextS - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - // Fetch a short+long candidate - sCandidate := e.table[nextHashS] - lCandidate := e.bTable[nextHashL] - next := load6432(src, nextS) - entry := tableEntry{offset: s + e.cur} - e.table[nextHashS] = entry - e.bTable[nextHashL] = entry - - t = lCandidate.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.offset-e.cur) { - // We got a long match. Use that. - break - } - - t = sCandidate.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) { - // Found a 4 match... - lCandidate = e.bTable[hash7(next, tableBits)] - - // If the next long is a candidate, check if we should use that instead... - lOff := nextS - (lCandidate.offset - e.cur) - if lOff < maxMatchOffset && load3232(src, lCandidate.offset-e.cur) == uint32(next) { - l1, l2 := matchLen(src[s+4:], src[t+4:]), matchLen(src[nextS+4:], src[nextS-lOff+4:]) - if l2 > l1 { - s = nextS - t = lCandidate.offset - e.cur - } - } - break - } - cv = next - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - - // Extend the 4-byte match as long as possible. - l := e.matchlenLong(s+4, t+4, src) + 4 - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - if debugDeflate { - if t >= s { - panic("s-t") - } - if (s - t) > maxMatchOffset { - panic(fmt.Sprintln("mmo", t)) - } - if l < baseMatchLength { - panic("bml") - } - } - - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - - if s >= sLimit { - // Index first pair after match end. - if int(s+8) < len(src) { - cv := load6432(src, s) - e.table[hashLen(cv, tableBits, hashShortBytes)] = tableEntry{offset: s + e.cur} - e.bTable[hash7(cv, tableBits)] = tableEntry{offset: s + e.cur} - } - goto emitRemainder - } - - // Store every 3rd hash in-between - if true { - i := nextS - if i < s-1 { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - t2 := tableEntry{offset: t.offset + 1} - e.bTable[hash7(cv, tableBits)] = t - e.bTable[hash7(cv>>8, tableBits)] = t2 - e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2 - - i += 3 - for ; i < s-1; i += 3 { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - t2 := tableEntry{offset: t.offset + 1} - e.bTable[hash7(cv, tableBits)] = t - e.bTable[hash7(cv>>8, tableBits)] = t2 - e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2 - } - } - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-1 and at s. - x := load6432(src, s-1) - o := e.cur + s - 1 - prevHashS := hashLen(x, tableBits, hashShortBytes) - prevHashL := hash7(x, tableBits) - e.table[prevHashS] = tableEntry{offset: o} - e.bTable[prevHashL] = tableEntry{offset: o} - cv = x >> 8 - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - - emitLiteral(dst, src[nextEmit:]) - } -} diff --git a/vendor/github.com/klauspost/compress/flate/level5.go b/vendor/github.com/klauspost/compress/flate/level5.go deleted file mode 100644 index 1f61ec1829..0000000000 --- a/vendor/github.com/klauspost/compress/flate/level5.go +++ /dev/null @@ -1,708 +0,0 @@ -package flate - -import "fmt" - -type fastEncL5 struct { - fastGen - table [tableSize]tableEntry - bTable [tableSize]tableEntryPrev -} - -func (e *fastEncL5) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - hashShortBytes = 4 - ) - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntry{} - } - for i := range e.bTable[:] { - e.bTable[i] = tableEntryPrev{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.table[i].offset = v - } - for i := range e.bTable[:] { - v := e.bTable[i] - if v.Cur.offset <= minOff { - v.Cur.offset = 0 - v.Prev.offset = 0 - } else { - v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset - if v.Prev.offset <= minOff { - v.Prev.offset = 0 - } else { - v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset - } - } - e.bTable[i] = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - for { - const skipLog = 6 - const doEvery = 1 - - nextS := s - var l int32 - var t int32 - for { - nextHashS := hashLen(cv, tableBits, hashShortBytes) - nextHashL := hash7(cv, tableBits) - - s = nextS - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - // Fetch a short+long candidate - sCandidate := e.table[nextHashS] - lCandidate := e.bTable[nextHashL] - next := load6432(src, nextS) - entry := tableEntry{offset: s + e.cur} - e.table[nextHashS] = entry - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = entry, eLong.Cur - - nextHashS = hashLen(next, tableBits, hashShortBytes) - nextHashL = hash7(next, tableBits) - - t = lCandidate.Cur.offset - e.cur - if s-t < maxMatchOffset { - if uint32(cv) == load3232(src, lCandidate.Cur.offset-e.cur) { - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - - t2 := lCandidate.Prev.offset - e.cur - if s-t2 < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) { - l = e.matchlen(s+4, t+4, src) + 4 - ml1 := e.matchlen(s+4, t2+4, src) + 4 - if ml1 > l { - t = t2 - l = ml1 - break - } - } - break - } - t = lCandidate.Prev.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) { - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - break - } - } - - t = sCandidate.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) { - // Found a 4 match... - l = e.matchlen(s+4, t+4, src) + 4 - lCandidate = e.bTable[nextHashL] - // Store the next match - - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - - // If the next long is a candidate, use that... - t2 := lCandidate.Cur.offset - e.cur - if nextS-t2 < maxMatchOffset { - if load3232(src, lCandidate.Cur.offset-e.cur) == uint32(next) { - ml := e.matchlen(nextS+4, t2+4, src) + 4 - if ml > l { - t = t2 - s = nextS - l = ml - break - } - } - // If the previous long is a candidate, use that... - t2 = lCandidate.Prev.offset - e.cur - if nextS-t2 < maxMatchOffset && load3232(src, lCandidate.Prev.offset-e.cur) == uint32(next) { - ml := e.matchlen(nextS+4, t2+4, src) + 4 - if ml > l { - t = t2 - s = nextS - l = ml - break - } - } - } - break - } - cv = next - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - - if l == 0 { - // Extend the 4-byte match as long as possible. - l = e.matchlenLong(s+4, t+4, src) + 4 - } else if l == maxMatchLength { - l += e.matchlenLong(s+l, t+l, src) - } - - // Try to locate a better match by checking the end of best match... - if sAt := s + l; l < 30 && sAt < sLimit { - // Allow some bytes at the beginning to mismatch. - // Sweet spot is 2/3 bytes depending on input. - // 3 is only a little better when it is but sometimes a lot worse. - // The skipped bytes are tested in Extend backwards, - // and still picked up as part of the match if they do. - const skipBeginning = 2 - eLong := e.bTable[hash7(load6432(src, sAt), tableBits)].Cur.offset - t2 := eLong - e.cur - l + skipBeginning - s2 := s + skipBeginning - off := s2 - t2 - if t2 >= 0 && off < maxMatchOffset && off > 0 { - if l2 := e.matchlenLong(s2, t2, src); l2 > l { - t = t2 - l = l2 - s = s2 - } - } - } - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - if debugDeflate { - if t >= s { - panic(fmt.Sprintln("s-t", s, t)) - } - if (s - t) > maxMatchOffset { - panic(fmt.Sprintln("mmo", s-t)) - } - if l < baseMatchLength { - panic("bml") - } - } - - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - - if s >= sLimit { - goto emitRemainder - } - - // Store every 3rd hash in-between. - if true { - const hashEvery = 3 - i := s - l + 1 - if i < s-1 { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - e.table[hashLen(cv, tableBits, hashShortBytes)] = t - eLong := &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = t, eLong.Cur - - // Do an long at i+1 - cv >>= 8 - t = tableEntry{offset: t.offset + 1} - eLong = &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = t, eLong.Cur - - // We only have enough bits for a short entry at i+2 - cv >>= 8 - t = tableEntry{offset: t.offset + 1} - e.table[hashLen(cv, tableBits, hashShortBytes)] = t - - // Skip one - otherwise we risk hitting 's' - i += 4 - for ; i < s-1; i += hashEvery { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - t2 := tableEntry{offset: t.offset + 1} - eLong := &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = t, eLong.Cur - e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2 - } - } - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-1 and at s. - x := load6432(src, s-1) - o := e.cur + s - 1 - prevHashS := hashLen(x, tableBits, hashShortBytes) - prevHashL := hash7(x, tableBits) - e.table[prevHashS] = tableEntry{offset: o} - eLong := &e.bTable[prevHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: o}, eLong.Cur - cv = x >> 8 - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - - emitLiteral(dst, src[nextEmit:]) - } -} - -// fastEncL5Window is a level 5 encoder, -// but with a custom window size. -type fastEncL5Window struct { - hist []byte - cur int32 - maxOffset int32 - table [tableSize]tableEntry - bTable [tableSize]tableEntryPrev -} - -func (e *fastEncL5Window) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - hashShortBytes = 4 - ) - maxMatchOffset := e.maxOffset - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntry{} - } - for i := range e.bTable[:] { - e.bTable[i] = tableEntryPrev{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.table[i].offset = v - } - for i := range e.bTable[:] { - v := e.bTable[i] - if v.Cur.offset <= minOff { - v.Cur.offset = 0 - v.Prev.offset = 0 - } else { - v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset - if v.Prev.offset <= minOff { - v.Prev.offset = 0 - } else { - v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset - } - } - e.bTable[i] = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - for { - const skipLog = 6 - const doEvery = 1 - - nextS := s - var l int32 - var t int32 - for { - nextHashS := hashLen(cv, tableBits, hashShortBytes) - nextHashL := hash7(cv, tableBits) - - s = nextS - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - // Fetch a short+long candidate - sCandidate := e.table[nextHashS] - lCandidate := e.bTable[nextHashL] - next := load6432(src, nextS) - entry := tableEntry{offset: s + e.cur} - e.table[nextHashS] = entry - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = entry, eLong.Cur - - nextHashS = hashLen(next, tableBits, hashShortBytes) - nextHashL = hash7(next, tableBits) - - t = lCandidate.Cur.offset - e.cur - if s-t < maxMatchOffset { - if uint32(cv) == load3232(src, lCandidate.Cur.offset-e.cur) { - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - - t2 := lCandidate.Prev.offset - e.cur - if s-t2 < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) { - l = e.matchlen(s+4, t+4, src) + 4 - ml1 := e.matchlen(s+4, t2+4, src) + 4 - if ml1 > l { - t = t2 - l = ml1 - break - } - } - break - } - t = lCandidate.Prev.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) { - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - break - } - } - - t = sCandidate.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) { - // Found a 4 match... - l = e.matchlen(s+4, t+4, src) + 4 - lCandidate = e.bTable[nextHashL] - // Store the next match - - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - - // If the next long is a candidate, use that... - t2 := lCandidate.Cur.offset - e.cur - if nextS-t2 < maxMatchOffset { - if load3232(src, lCandidate.Cur.offset-e.cur) == uint32(next) { - ml := e.matchlen(nextS+4, t2+4, src) + 4 - if ml > l { - t = t2 - s = nextS - l = ml - break - } - } - // If the previous long is a candidate, use that... - t2 = lCandidate.Prev.offset - e.cur - if nextS-t2 < maxMatchOffset && load3232(src, lCandidate.Prev.offset-e.cur) == uint32(next) { - ml := e.matchlen(nextS+4, t2+4, src) + 4 - if ml > l { - t = t2 - s = nextS - l = ml - break - } - } - } - break - } - cv = next - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - - if l == 0 { - // Extend the 4-byte match as long as possible. - l = e.matchlenLong(s+4, t+4, src) + 4 - } else if l == maxMatchLength { - l += e.matchlenLong(s+l, t+l, src) - } - - // Try to locate a better match by checking the end of best match... - if sAt := s + l; l < 30 && sAt < sLimit { - // Allow some bytes at the beginning to mismatch. - // Sweet spot is 2/3 bytes depending on input. - // 3 is only a little better when it is but sometimes a lot worse. - // The skipped bytes are tested in Extend backwards, - // and still picked up as part of the match if they do. - const skipBeginning = 2 - eLong := e.bTable[hash7(load6432(src, sAt), tableBits)].Cur.offset - t2 := eLong - e.cur - l + skipBeginning - s2 := s + skipBeginning - off := s2 - t2 - if t2 >= 0 && off < maxMatchOffset && off > 0 { - if l2 := e.matchlenLong(s2, t2, src); l2 > l { - t = t2 - l = l2 - s = s2 - } - } - } - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - if debugDeflate { - if t >= s { - panic(fmt.Sprintln("s-t", s, t)) - } - if (s - t) > maxMatchOffset { - panic(fmt.Sprintln("mmo", s-t)) - } - if l < baseMatchLength { - panic("bml") - } - } - - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - - if s >= sLimit { - goto emitRemainder - } - - // Store every 3rd hash in-between. - if true { - const hashEvery = 3 - i := s - l + 1 - if i < s-1 { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - e.table[hashLen(cv, tableBits, hashShortBytes)] = t - eLong := &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = t, eLong.Cur - - // Do an long at i+1 - cv >>= 8 - t = tableEntry{offset: t.offset + 1} - eLong = &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = t, eLong.Cur - - // We only have enough bits for a short entry at i+2 - cv >>= 8 - t = tableEntry{offset: t.offset + 1} - e.table[hashLen(cv, tableBits, hashShortBytes)] = t - - // Skip one - otherwise we risk hitting 's' - i += 4 - for ; i < s-1; i += hashEvery { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - t2 := tableEntry{offset: t.offset + 1} - eLong := &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = t, eLong.Cur - e.table[hashLen(cv>>8, tableBits, hashShortBytes)] = t2 - } - } - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-1 and at s. - x := load6432(src, s-1) - o := e.cur + s - 1 - prevHashS := hashLen(x, tableBits, hashShortBytes) - prevHashL := hash7(x, tableBits) - e.table[prevHashS] = tableEntry{offset: o} - eLong := &e.bTable[prevHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: o}, eLong.Cur - cv = x >> 8 - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - - emitLiteral(dst, src[nextEmit:]) - } -} - -// Reset the encoding table. -func (e *fastEncL5Window) Reset() { - // We keep the same allocs, since we are compressing the same block sizes. - if cap(e.hist) < allocHistory { - e.hist = make([]byte, 0, allocHistory) - } - - // We offset current position so everything will be out of reach. - // If we are above the buffer reset it will be cleared anyway since len(hist) == 0. - if e.cur <= int32(bufferReset) { - e.cur += e.maxOffset + int32(len(e.hist)) - } - e.hist = e.hist[:0] -} - -func (e *fastEncL5Window) addBlock(src []byte) int32 { - // check if we have space already - maxMatchOffset := e.maxOffset - - if len(e.hist)+len(src) > cap(e.hist) { - if cap(e.hist) == 0 { - e.hist = make([]byte, 0, allocHistory) - } else { - if cap(e.hist) < int(maxMatchOffset*2) { - panic("unexpected buffer size") - } - // Move down - offset := int32(len(e.hist)) - maxMatchOffset - copy(e.hist[0:maxMatchOffset], e.hist[offset:]) - e.cur += offset - e.hist = e.hist[:maxMatchOffset] - } - } - s := int32(len(e.hist)) - e.hist = append(e.hist, src...) - return s -} - -// matchlen will return the match length between offsets and t in src. -// The maximum length returned is maxMatchLength - 4. -// It is assumed that s > t, that t >=0 and s < len(src). -func (e *fastEncL5Window) matchlen(s, t int32, src []byte) int32 { - if debugDecode { - if t >= s { - panic(fmt.Sprint("t >=s:", t, s)) - } - if int(s) >= len(src) { - panic(fmt.Sprint("s >= len(src):", s, len(src))) - } - if t < 0 { - panic(fmt.Sprint("t < 0:", t)) - } - if s-t > e.maxOffset { - panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")")) - } - } - s1 := int(s) + maxMatchLength - 4 - if s1 > len(src) { - s1 = len(src) - } - - // Extend the match to be as long as possible. - return int32(matchLen(src[s:s1], src[t:])) -} - -// matchlenLong will return the match length between offsets and t in src. -// It is assumed that s > t, that t >=0 and s < len(src). -func (e *fastEncL5Window) matchlenLong(s, t int32, src []byte) int32 { - if debugDeflate { - if t >= s { - panic(fmt.Sprint("t >=s:", t, s)) - } - if int(s) >= len(src) { - panic(fmt.Sprint("s >= len(src):", s, len(src))) - } - if t < 0 { - panic(fmt.Sprint("t < 0:", t)) - } - if s-t > e.maxOffset { - panic(fmt.Sprint(s, "-", t, "(", s-t, ") > maxMatchLength (", maxMatchOffset, ")")) - } - } - // Extend the match to be as long as possible. - return int32(matchLen(src[s:], src[t:])) -} diff --git a/vendor/github.com/klauspost/compress/flate/level6.go b/vendor/github.com/klauspost/compress/flate/level6.go deleted file mode 100644 index f1e9d98fa5..0000000000 --- a/vendor/github.com/klauspost/compress/flate/level6.go +++ /dev/null @@ -1,325 +0,0 @@ -package flate - -import "fmt" - -type fastEncL6 struct { - fastGen - table [tableSize]tableEntry - bTable [tableSize]tableEntryPrev -} - -func (e *fastEncL6) Encode(dst *tokens, src []byte) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - hashShortBytes = 4 - ) - if debugDeflate && e.cur < 0 { - panic(fmt.Sprint("e.cur < 0: ", e.cur)) - } - - // Protect against e.cur wraparound. - for e.cur >= bufferReset { - if len(e.hist) == 0 { - for i := range e.table[:] { - e.table[i] = tableEntry{} - } - for i := range e.bTable[:] { - e.bTable[i] = tableEntryPrev{} - } - e.cur = maxMatchOffset - break - } - // Shift down everything in the table that isn't already too far away. - minOff := e.cur + int32(len(e.hist)) - maxMatchOffset - for i := range e.table[:] { - v := e.table[i].offset - if v <= minOff { - v = 0 - } else { - v = v - e.cur + maxMatchOffset - } - e.table[i].offset = v - } - for i := range e.bTable[:] { - v := e.bTable[i] - if v.Cur.offset <= minOff { - v.Cur.offset = 0 - v.Prev.offset = 0 - } else { - v.Cur.offset = v.Cur.offset - e.cur + maxMatchOffset - if v.Prev.offset <= minOff { - v.Prev.offset = 0 - } else { - v.Prev.offset = v.Prev.offset - e.cur + maxMatchOffset - } - } - e.bTable[i] = v - } - e.cur = maxMatchOffset - } - - s := e.addBlock(src) - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = uint16(len(src)) - return - } - - // Override src - src = e.hist - nextEmit := s - - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int32(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load6432(src, s) - // Repeat MUST be > 1 and within range - repeat := int32(1) - for { - const skipLog = 7 - const doEvery = 1 - - nextS := s - var l int32 - var t int32 - for { - nextHashS := hashLen(cv, tableBits, hashShortBytes) - nextHashL := hash7(cv, tableBits) - s = nextS - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit { - goto emitRemainder - } - // Fetch a short+long candidate - sCandidate := e.table[nextHashS] - lCandidate := e.bTable[nextHashL] - next := load6432(src, nextS) - entry := tableEntry{offset: s + e.cur} - e.table[nextHashS] = entry - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = entry, eLong.Cur - - // Calculate hashes of 'next' - nextHashS = hashLen(next, tableBits, hashShortBytes) - nextHashL = hash7(next, tableBits) - - t = lCandidate.Cur.offset - e.cur - if s-t < maxMatchOffset { - if uint32(cv) == load3232(src, lCandidate.Cur.offset-e.cur) { - // Long candidate matches at least 4 bytes. - - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - - // Check the previous long candidate as well. - t2 := lCandidate.Prev.offset - e.cur - if s-t2 < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) { - l = e.matchlen(s+4, t+4, src) + 4 - ml1 := e.matchlen(s+4, t2+4, src) + 4 - if ml1 > l { - t = t2 - l = ml1 - break - } - } - break - } - // Current value did not match, but check if previous long value does. - t = lCandidate.Prev.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, lCandidate.Prev.offset-e.cur) { - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - break - } - } - - t = sCandidate.offset - e.cur - if s-t < maxMatchOffset && uint32(cv) == load3232(src, sCandidate.offset-e.cur) { - // Found a 4 match... - l = e.matchlen(s+4, t+4, src) + 4 - - // Look up next long candidate (at nextS) - lCandidate = e.bTable[nextHashL] - - // Store the next match - e.table[nextHashS] = tableEntry{offset: nextS + e.cur} - eLong := &e.bTable[nextHashL] - eLong.Cur, eLong.Prev = tableEntry{offset: nextS + e.cur}, eLong.Cur - - // Check repeat at s + repOff - const repOff = 1 - t2 := s - repeat + repOff - if load3232(src, t2) == uint32(cv>>(8*repOff)) { - ml := e.matchlen(s+4+repOff, t2+4, src) + 4 - if ml > l { - t = t2 - l = ml - s += repOff - // Not worth checking more. - break - } - } - - // If the next long is a candidate, use that... - t2 = lCandidate.Cur.offset - e.cur - if nextS-t2 < maxMatchOffset { - if load3232(src, lCandidate.Cur.offset-e.cur) == uint32(next) { - ml := e.matchlen(nextS+4, t2+4, src) + 4 - if ml > l { - t = t2 - s = nextS - l = ml - // This is ok, but check previous as well. - } - } - // If the previous long is a candidate, use that... - t2 = lCandidate.Prev.offset - e.cur - if nextS-t2 < maxMatchOffset && load3232(src, lCandidate.Prev.offset-e.cur) == uint32(next) { - ml := e.matchlen(nextS+4, t2+4, src) + 4 - if ml > l { - t = t2 - s = nextS - l = ml - break - } - } - } - break - } - cv = next - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - - // Extend the 4-byte match as long as possible. - if l == 0 { - l = e.matchlenLong(s+4, t+4, src) + 4 - } else if l == maxMatchLength { - l += e.matchlenLong(s+l, t+l, src) - } - - // Try to locate a better match by checking the end-of-match... - if sAt := s + l; sAt < sLimit { - // Allow some bytes at the beginning to mismatch. - // Sweet spot is 2/3 bytes depending on input. - // 3 is only a little better when it is but sometimes a lot worse. - // The skipped bytes are tested in Extend backwards, - // and still picked up as part of the match if they do. - const skipBeginning = 2 - eLong := &e.bTable[hash7(load6432(src, sAt), tableBits)] - // Test current - t2 := eLong.Cur.offset - e.cur - l + skipBeginning - s2 := s + skipBeginning - off := s2 - t2 - if off < maxMatchOffset { - if off > 0 && t2 >= 0 { - if l2 := e.matchlenLong(s2, t2, src); l2 > l { - t = t2 - l = l2 - s = s2 - } - } - // Test next: - t2 = eLong.Prev.offset - e.cur - l + skipBeginning - off := s2 - t2 - if off > 0 && off < maxMatchOffset && t2 >= 0 { - if l2 := e.matchlenLong(s2, t2, src); l2 > l { - t = t2 - l = l2 - s = s2 - } - } - } - } - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - if false { - if t >= s { - panic(fmt.Sprintln("s-t", s, t)) - } - if (s - t) > maxMatchOffset { - panic(fmt.Sprintln("mmo", s-t)) - } - if l < baseMatchLength { - panic("bml") - } - } - - dst.AddMatchLong(l, uint32(s-t-baseMatchOffset)) - repeat = s - t - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - - if s >= sLimit { - // Index after match end. - for i := nextS + 1; i < int32(len(src))-8; i += 2 { - cv := load6432(src, i) - e.table[hashLen(cv, tableBits, hashShortBytes)] = tableEntry{offset: i + e.cur} - eLong := &e.bTable[hash7(cv, tableBits)] - eLong.Cur, eLong.Prev = tableEntry{offset: i + e.cur}, eLong.Cur - } - goto emitRemainder - } - - // Store every long hash in-between and every second short. - if true { - for i := nextS + 1; i < s-1; i += 2 { - cv := load6432(src, i) - t := tableEntry{offset: i + e.cur} - t2 := tableEntry{offset: t.offset + 1} - eLong := &e.bTable[hash7(cv, tableBits)] - eLong2 := &e.bTable[hash7(cv>>8, tableBits)] - e.table[hashLen(cv, tableBits, hashShortBytes)] = t - eLong.Cur, eLong.Prev = t, eLong.Cur - eLong2.Cur, eLong2.Prev = t2, eLong2.Cur - } - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-1 and at s. - cv = load6432(src, s) - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - - emitLiteral(dst, src[nextEmit:]) - } -} diff --git a/vendor/github.com/klauspost/compress/flate/matchlen_amd64.go b/vendor/github.com/klauspost/compress/flate/matchlen_amd64.go deleted file mode 100644 index 4bd3885841..0000000000 --- a/vendor/github.com/klauspost/compress/flate/matchlen_amd64.go +++ /dev/null @@ -1,16 +0,0 @@ -//go:build amd64 && !appengine && !noasm && gc -// +build amd64,!appengine,!noasm,gc - -// Copyright 2019+ Klaus Post. All rights reserved. -// License information can be found in the LICENSE file. - -package flate - -// matchLen returns how many bytes match in a and b -// -// It assumes that: -// -// len(a) <= len(b) and len(a) > 0 -// -//go:noescape -func matchLen(a []byte, b []byte) int diff --git a/vendor/github.com/klauspost/compress/flate/matchlen_amd64.s b/vendor/github.com/klauspost/compress/flate/matchlen_amd64.s deleted file mode 100644 index 9a7655c0f7..0000000000 --- a/vendor/github.com/klauspost/compress/flate/matchlen_amd64.s +++ /dev/null @@ -1,68 +0,0 @@ -// Copied from S2 implementation. - -//go:build !appengine && !noasm && gc && !noasm - -#include "textflag.h" - -// func matchLen(a []byte, b []byte) int -// Requires: BMI -TEXT ·matchLen(SB), NOSPLIT, $0-56 - MOVQ a_base+0(FP), AX - MOVQ b_base+24(FP), CX - MOVQ a_len+8(FP), DX - - // matchLen - XORL SI, SI - CMPL DX, $0x08 - JB matchlen_match4_standalone - -matchlen_loopback_standalone: - MOVQ (AX)(SI*1), BX - XORQ (CX)(SI*1), BX - TESTQ BX, BX - JZ matchlen_loop_standalone - -#ifdef GOAMD64_v3 - TZCNTQ BX, BX -#else - BSFQ BX, BX -#endif - SARQ $0x03, BX - LEAL (SI)(BX*1), SI - JMP gen_match_len_end - -matchlen_loop_standalone: - LEAL -8(DX), DX - LEAL 8(SI), SI - CMPL DX, $0x08 - JAE matchlen_loopback_standalone - -matchlen_match4_standalone: - CMPL DX, $0x04 - JB matchlen_match2_standalone - MOVL (AX)(SI*1), BX - CMPL (CX)(SI*1), BX - JNE matchlen_match2_standalone - LEAL -4(DX), DX - LEAL 4(SI), SI - -matchlen_match2_standalone: - CMPL DX, $0x02 - JB matchlen_match1_standalone - MOVW (AX)(SI*1), BX - CMPW (CX)(SI*1), BX - JNE matchlen_match1_standalone - LEAL -2(DX), DX - LEAL 2(SI), SI - -matchlen_match1_standalone: - CMPL DX, $0x01 - JB gen_match_len_end - MOVB (AX)(SI*1), BL - CMPB (CX)(SI*1), BL - JNE gen_match_len_end - INCL SI - -gen_match_len_end: - MOVQ SI, ret+48(FP) - RET diff --git a/vendor/github.com/klauspost/compress/flate/matchlen_generic.go b/vendor/github.com/klauspost/compress/flate/matchlen_generic.go deleted file mode 100644 index ad5cd814b9..0000000000 --- a/vendor/github.com/klauspost/compress/flate/matchlen_generic.go +++ /dev/null @@ -1,33 +0,0 @@ -//go:build !amd64 || appengine || !gc || noasm -// +build !amd64 appengine !gc noasm - -// Copyright 2019+ Klaus Post. All rights reserved. -// License information can be found in the LICENSE file. - -package flate - -import ( - "encoding/binary" - "math/bits" -) - -// matchLen returns the maximum common prefix length of a and b. -// a must be the shortest of the two. -func matchLen(a, b []byte) (n int) { - for ; len(a) >= 8 && len(b) >= 8; a, b = a[8:], b[8:] { - diff := binary.LittleEndian.Uint64(a) ^ binary.LittleEndian.Uint64(b) - if diff != 0 { - return n + bits.TrailingZeros64(diff)>>3 - } - n += 8 - } - - for i := range a { - if a[i] != b[i] { - break - } - n++ - } - return n - -} diff --git a/vendor/github.com/klauspost/compress/flate/regmask_amd64.go b/vendor/github.com/klauspost/compress/flate/regmask_amd64.go deleted file mode 100644 index 6ed28061b2..0000000000 --- a/vendor/github.com/klauspost/compress/flate/regmask_amd64.go +++ /dev/null @@ -1,37 +0,0 @@ -package flate - -const ( - // Masks for shifts with register sizes of the shift value. - // This can be used to work around the x86 design of shifting by mod register size. - // It can be used when a variable shift is always smaller than the register size. - - // reg8SizeMaskX - shift value is 8 bits, shifted is X - reg8SizeMask8 = 7 - reg8SizeMask16 = 15 - reg8SizeMask32 = 31 - reg8SizeMask64 = 63 - - // reg16SizeMaskX - shift value is 16 bits, shifted is X - reg16SizeMask8 = reg8SizeMask8 - reg16SizeMask16 = reg8SizeMask16 - reg16SizeMask32 = reg8SizeMask32 - reg16SizeMask64 = reg8SizeMask64 - - // reg32SizeMaskX - shift value is 32 bits, shifted is X - reg32SizeMask8 = reg8SizeMask8 - reg32SizeMask16 = reg8SizeMask16 - reg32SizeMask32 = reg8SizeMask32 - reg32SizeMask64 = reg8SizeMask64 - - // reg64SizeMaskX - shift value is 64 bits, shifted is X - reg64SizeMask8 = reg8SizeMask8 - reg64SizeMask16 = reg8SizeMask16 - reg64SizeMask32 = reg8SizeMask32 - reg64SizeMask64 = reg8SizeMask64 - - // regSizeMaskUintX - shift value is uint, shifted is X - regSizeMaskUint8 = reg8SizeMask8 - regSizeMaskUint16 = reg8SizeMask16 - regSizeMaskUint32 = reg8SizeMask32 - regSizeMaskUint64 = reg8SizeMask64 -) diff --git a/vendor/github.com/klauspost/compress/flate/regmask_other.go b/vendor/github.com/klauspost/compress/flate/regmask_other.go deleted file mode 100644 index 1b7a2cbd79..0000000000 --- a/vendor/github.com/klauspost/compress/flate/regmask_other.go +++ /dev/null @@ -1,40 +0,0 @@ -//go:build !amd64 -// +build !amd64 - -package flate - -const ( - // Masks for shifts with register sizes of the shift value. - // This can be used to work around the x86 design of shifting by mod register size. - // It can be used when a variable shift is always smaller than the register size. - - // reg8SizeMaskX - shift value is 8 bits, shifted is X - reg8SizeMask8 = 0xff - reg8SizeMask16 = 0xff - reg8SizeMask32 = 0xff - reg8SizeMask64 = 0xff - - // reg16SizeMaskX - shift value is 16 bits, shifted is X - reg16SizeMask8 = 0xffff - reg16SizeMask16 = 0xffff - reg16SizeMask32 = 0xffff - reg16SizeMask64 = 0xffff - - // reg32SizeMaskX - shift value is 32 bits, shifted is X - reg32SizeMask8 = 0xffffffff - reg32SizeMask16 = 0xffffffff - reg32SizeMask32 = 0xffffffff - reg32SizeMask64 = 0xffffffff - - // reg64SizeMaskX - shift value is 64 bits, shifted is X - reg64SizeMask8 = 0xffffffffffffffff - reg64SizeMask16 = 0xffffffffffffffff - reg64SizeMask32 = 0xffffffffffffffff - reg64SizeMask64 = 0xffffffffffffffff - - // regSizeMaskUintX - shift value is uint, shifted is X - regSizeMaskUint8 = ^uint(0) - regSizeMaskUint16 = ^uint(0) - regSizeMaskUint32 = ^uint(0) - regSizeMaskUint64 = ^uint(0) -) diff --git a/vendor/github.com/klauspost/compress/flate/stateless.go b/vendor/github.com/klauspost/compress/flate/stateless.go deleted file mode 100644 index f3d4139ef3..0000000000 --- a/vendor/github.com/klauspost/compress/flate/stateless.go +++ /dev/null @@ -1,318 +0,0 @@ -package flate - -import ( - "io" - "math" - "sync" -) - -const ( - maxStatelessBlock = math.MaxInt16 - // dictionary will be taken from maxStatelessBlock, so limit it. - maxStatelessDict = 8 << 10 - - slTableBits = 13 - slTableSize = 1 << slTableBits - slTableShift = 32 - slTableBits -) - -type statelessWriter struct { - dst io.Writer - closed bool -} - -func (s *statelessWriter) Close() error { - if s.closed { - return nil - } - s.closed = true - // Emit EOF block - return StatelessDeflate(s.dst, nil, true, nil) -} - -func (s *statelessWriter) Write(p []byte) (n int, err error) { - err = StatelessDeflate(s.dst, p, false, nil) - if err != nil { - return 0, err - } - return len(p), nil -} - -func (s *statelessWriter) Reset(w io.Writer) { - s.dst = w - s.closed = false -} - -// NewStatelessWriter will do compression but without maintaining any state -// between Write calls. -// There will be no memory kept between Write calls, -// but compression and speed will be suboptimal. -// Because of this, the size of actual Write calls will affect output size. -func NewStatelessWriter(dst io.Writer) io.WriteCloser { - return &statelessWriter{dst: dst} -} - -// bitWriterPool contains bit writers that can be reused. -var bitWriterPool = sync.Pool{ - New: func() interface{} { - return newHuffmanBitWriter(nil) - }, -} - -// StatelessDeflate allows compressing directly to a Writer without retaining state. -// When returning everything will be flushed. -// Up to 8KB of an optional dictionary can be given which is presumed to precede the block. -// Longer dictionaries will be truncated and will still produce valid output. -// Sending nil dictionary is perfectly fine. -func StatelessDeflate(out io.Writer, in []byte, eof bool, dict []byte) error { - var dst tokens - bw := bitWriterPool.Get().(*huffmanBitWriter) - bw.reset(out) - defer func() { - // don't keep a reference to our output - bw.reset(nil) - bitWriterPool.Put(bw) - }() - if eof && len(in) == 0 { - // Just write an EOF block. - // Could be faster... - bw.writeStoredHeader(0, true) - bw.flush() - return bw.err - } - - // Truncate dict - if len(dict) > maxStatelessDict { - dict = dict[len(dict)-maxStatelessDict:] - } - - // For subsequent loops, keep shallow dict reference to avoid alloc+copy. - var inDict []byte - - for len(in) > 0 { - todo := in - if len(inDict) > 0 { - if len(todo) > maxStatelessBlock-maxStatelessDict { - todo = todo[:maxStatelessBlock-maxStatelessDict] - } - } else if len(todo) > maxStatelessBlock-len(dict) { - todo = todo[:maxStatelessBlock-len(dict)] - } - inOrg := in - in = in[len(todo):] - uncompressed := todo - if len(dict) > 0 { - // combine dict and source - bufLen := len(todo) + len(dict) - combined := make([]byte, bufLen) - copy(combined, dict) - copy(combined[len(dict):], todo) - todo = combined - } - // Compress - if len(inDict) == 0 { - statelessEnc(&dst, todo, int16(len(dict))) - } else { - statelessEnc(&dst, inDict[:maxStatelessDict+len(todo)], maxStatelessDict) - } - isEof := eof && len(in) == 0 - - if dst.n == 0 { - bw.writeStoredHeader(len(uncompressed), isEof) - if bw.err != nil { - return bw.err - } - bw.writeBytes(uncompressed) - } else if int(dst.n) > len(uncompressed)-len(uncompressed)>>4 { - // If we removed less than 1/16th, huffman compress the block. - bw.writeBlockHuff(isEof, uncompressed, len(in) == 0) - } else { - bw.writeBlockDynamic(&dst, isEof, uncompressed, len(in) == 0) - } - if len(in) > 0 { - // Retain a dict if we have more - inDict = inOrg[len(uncompressed)-maxStatelessDict:] - dict = nil - dst.Reset() - } - if bw.err != nil { - return bw.err - } - } - if !eof { - // Align, only a stored block can do that. - bw.writeStoredHeader(0, false) - } - bw.flush() - return bw.err -} - -func hashSL(u uint32) uint32 { - return (u * 0x1e35a7bd) >> slTableShift -} - -func load3216(b []byte, i int16) uint32 { - // Help the compiler eliminate bounds checks on the read so it can be done in a single read. - b = b[i:] - b = b[:4] - return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 -} - -func load6416(b []byte, i int16) uint64 { - // Help the compiler eliminate bounds checks on the read so it can be done in a single read. - b = b[i:] - b = b[:8] - return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | - uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 -} - -func statelessEnc(dst *tokens, src []byte, startAt int16) { - const ( - inputMargin = 12 - 1 - minNonLiteralBlockSize = 1 + 1 + inputMargin - ) - - type tableEntry struct { - offset int16 - } - - var table [slTableSize]tableEntry - - // This check isn't in the Snappy implementation, but there, the caller - // instead of the callee handles this case. - if len(src)-int(startAt) < minNonLiteralBlockSize { - // We do not fill the token table. - // This will be picked up by caller. - dst.n = 0 - return - } - // Index until startAt - if startAt > 0 { - cv := load3232(src, 0) - for i := int16(0); i < startAt; i++ { - table[hashSL(cv)] = tableEntry{offset: i} - cv = (cv >> 8) | (uint32(src[i+4]) << 24) - } - } - - s := startAt + 1 - nextEmit := startAt - // sLimit is when to stop looking for offset/length copies. The inputMargin - // lets us use a fast path for emitLiteral in the main loop, while we are - // looking for copies. - sLimit := int16(len(src) - inputMargin) - - // nextEmit is where in src the next emitLiteral should start from. - cv := load3216(src, s) - - for { - const skipLog = 5 - const doEvery = 2 - - nextS := s - var candidate tableEntry - for { - nextHash := hashSL(cv) - candidate = table[nextHash] - nextS = s + doEvery + (s-nextEmit)>>skipLog - if nextS > sLimit || nextS <= 0 { - goto emitRemainder - } - - now := load6416(src, nextS) - table[nextHash] = tableEntry{offset: s} - nextHash = hashSL(uint32(now)) - - if cv == load3216(src, candidate.offset) { - table[nextHash] = tableEntry{offset: nextS} - break - } - - // Do one right away... - cv = uint32(now) - s = nextS - nextS++ - candidate = table[nextHash] - now >>= 8 - table[nextHash] = tableEntry{offset: s} - - if cv == load3216(src, candidate.offset) { - table[nextHash] = tableEntry{offset: nextS} - break - } - cv = uint32(now) - s = nextS - } - - // A 4-byte match has been found. We'll later see if more than 4 bytes - // match. But, prior to the match, src[nextEmit:s] are unmatched. Emit - // them as literal bytes. - for { - // Invariant: we have a 4-byte match at s, and no need to emit any - // literal bytes prior to s. - - // Extend the 4-byte match as long as possible. - t := candidate.offset - l := int16(matchLen(src[s+4:], src[t+4:]) + 4) - - // Extend backwards - for t > 0 && s > nextEmit && src[t-1] == src[s-1] { - s-- - t-- - l++ - } - if nextEmit < s { - if false { - emitLiteral(dst, src[nextEmit:s]) - } else { - for _, v := range src[nextEmit:s] { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } - } - } - - // Save the match found - dst.AddMatchLong(int32(l), uint32(s-t-baseMatchOffset)) - s += l - nextEmit = s - if nextS >= s { - s = nextS + 1 - } - if s >= sLimit { - goto emitRemainder - } - - // We could immediately start working at s now, but to improve - // compression we first update the hash table at s-2 and at s. If - // another emitCopy is not our next move, also calculate nextHash - // at s+1. At least on GOARCH=amd64, these three hash calculations - // are faster as one load64 call (with some shifts) instead of - // three load32 calls. - x := load6416(src, s-2) - o := s - 2 - prevHash := hashSL(uint32(x)) - table[prevHash] = tableEntry{offset: o} - x >>= 16 - currHash := hashSL(uint32(x)) - candidate = table[currHash] - table[currHash] = tableEntry{offset: o + 2} - - if uint32(x) != load3216(src, candidate.offset) { - cv = uint32(x >> 8) - s++ - break - } - } - } - -emitRemainder: - if int(nextEmit) < len(src) { - // If nothing was added, don't encode literals. - if dst.n == 0 { - return - } - emitLiteral(dst, src[nextEmit:]) - } -} diff --git a/vendor/github.com/klauspost/compress/flate/token.go b/vendor/github.com/klauspost/compress/flate/token.go deleted file mode 100644 index d818790c13..0000000000 --- a/vendor/github.com/klauspost/compress/flate/token.go +++ /dev/null @@ -1,379 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package flate - -import ( - "bytes" - "encoding/binary" - "fmt" - "io" - "math" -) - -const ( - // bits 0-16 xoffset = offset - MIN_OFFSET_SIZE, or literal - 16 bits - // bits 16-22 offsetcode - 5 bits - // bits 22-30 xlength = length - MIN_MATCH_LENGTH - 8 bits - // bits 30-32 type 0 = literal 1=EOF 2=Match 3=Unused - 2 bits - lengthShift = 22 - offsetMask = 1<maxnumlit - offHist [32]uint16 // offset codes - litHist [256]uint16 // codes 0->255 - nFilled int - n uint16 // Must be able to contain maxStoreBlockSize - tokens [maxStoreBlockSize + 1]token -} - -func (t *tokens) Reset() { - if t.n == 0 { - return - } - t.n = 0 - t.nFilled = 0 - for i := range t.litHist[:] { - t.litHist[i] = 0 - } - for i := range t.extraHist[:] { - t.extraHist[i] = 0 - } - for i := range t.offHist[:] { - t.offHist[i] = 0 - } -} - -func (t *tokens) Fill() { - if t.n == 0 { - return - } - for i, v := range t.litHist[:] { - if v == 0 { - t.litHist[i] = 1 - t.nFilled++ - } - } - for i, v := range t.extraHist[:literalCount-256] { - if v == 0 { - t.nFilled++ - t.extraHist[i] = 1 - } - } - for i, v := range t.offHist[:offsetCodeCount] { - if v == 0 { - t.offHist[i] = 1 - } - } -} - -func indexTokens(in []token) tokens { - var t tokens - t.indexTokens(in) - return t -} - -func (t *tokens) indexTokens(in []token) { - t.Reset() - for _, tok := range in { - if tok < matchType { - t.AddLiteral(tok.literal()) - continue - } - t.AddMatch(uint32(tok.length()), tok.offset()&matchOffsetOnlyMask) - } -} - -// emitLiteral writes a literal chunk and returns the number of bytes written. -func emitLiteral(dst *tokens, lit []byte) { - for _, v := range lit { - dst.tokens[dst.n] = token(v) - dst.litHist[v]++ - dst.n++ - } -} - -func (t *tokens) AddLiteral(lit byte) { - t.tokens[t.n] = token(lit) - t.litHist[lit]++ - t.n++ -} - -// from https://stackoverflow.com/a/28730362 -func mFastLog2(val float32) float32 { - ux := int32(math.Float32bits(val)) - log2 := (float32)(((ux >> 23) & 255) - 128) - ux &= -0x7f800001 - ux += 127 << 23 - uval := math.Float32frombits(uint32(ux)) - log2 += ((-0.34484843)*uval+2.02466578)*uval - 0.67487759 - return log2 -} - -// EstimatedBits will return an minimum size estimated by an *optimal* -// compression of the block. -// The size of the block -func (t *tokens) EstimatedBits() int { - shannon := float32(0) - bits := int(0) - nMatches := 0 - total := int(t.n) + t.nFilled - if total > 0 { - invTotal := 1.0 / float32(total) - for _, v := range t.litHist[:] { - if v > 0 { - n := float32(v) - shannon += atLeastOne(-mFastLog2(n*invTotal)) * n - } - } - // Just add 15 for EOB - shannon += 15 - for i, v := range t.extraHist[1 : literalCount-256] { - if v > 0 { - n := float32(v) - shannon += atLeastOne(-mFastLog2(n*invTotal)) * n - bits += int(lengthExtraBits[i&31]) * int(v) - nMatches += int(v) - } - } - } - if nMatches > 0 { - invTotal := 1.0 / float32(nMatches) - for i, v := range t.offHist[:offsetCodeCount] { - if v > 0 { - n := float32(v) - shannon += atLeastOne(-mFastLog2(n*invTotal)) * n - bits += int(offsetExtraBits[i&31]) * int(v) - } - } - } - return int(shannon) + bits -} - -// AddMatch adds a match to the tokens. -// This function is very sensitive to inlining and right on the border. -func (t *tokens) AddMatch(xlength uint32, xoffset uint32) { - if debugDeflate { - if xlength >= maxMatchLength+baseMatchLength { - panic(fmt.Errorf("invalid length: %v", xlength)) - } - if xoffset >= maxMatchOffset+baseMatchOffset { - panic(fmt.Errorf("invalid offset: %v", xoffset)) - } - } - oCode := offsetCode(xoffset) - xoffset |= oCode << 16 - - t.extraHist[lengthCodes1[uint8(xlength)]]++ - t.offHist[oCode&31]++ - t.tokens[t.n] = token(matchType | xlength<= maxMatchOffset+baseMatchOffset { - panic(fmt.Errorf("invalid offset: %v", xoffset)) - } - } - oc := offsetCode(xoffset) - xoffset |= oc << 16 - for xlength > 0 { - xl := xlength - if xl > 258 { - // We need to have at least baseMatchLength left over for next loop. - if xl > 258+baseMatchLength { - xl = 258 - } else { - xl = 258 - baseMatchLength - } - } - xlength -= xl - xl -= baseMatchLength - t.extraHist[lengthCodes1[uint8(xl)]]++ - t.offHist[oc&31]++ - t.tokens[t.n] = token(matchType | uint32(xl)<> lengthShift) } - -// Convert length to code. -func lengthCode(len uint8) uint8 { return lengthCodes[len] } - -// Returns the offset code corresponding to a specific offset -func offsetCode(off uint32) uint32 { - if false { - if off < uint32(len(offsetCodes)) { - return offsetCodes[off&255] - } else if off>>7 < uint32(len(offsetCodes)) { - return offsetCodes[(off>>7)&255] + 14 - } else { - return offsetCodes[(off>>14)&255] + 28 - } - } - if off < uint32(len(offsetCodes)) { - return offsetCodes[uint8(off)] - } - return offsetCodes14[uint8(off>>7)] -} diff --git a/vendor/github.com/klauspost/compress/gzip/gunzip.go b/vendor/github.com/klauspost/compress/gzip/gunzip.go deleted file mode 100644 index dc2362a63b..0000000000 --- a/vendor/github.com/klauspost/compress/gzip/gunzip.go +++ /dev/null @@ -1,375 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package gzip implements reading and writing of gzip format compressed files, -// as specified in RFC 1952. -package gzip - -import ( - "bufio" - "compress/gzip" - "encoding/binary" - "hash/crc32" - "io" - "time" - - "github.com/klauspost/compress/flate" -) - -const ( - gzipID1 = 0x1f - gzipID2 = 0x8b - gzipDeflate = 8 - flagText = 1 << 0 - flagHdrCrc = 1 << 1 - flagExtra = 1 << 2 - flagName = 1 << 3 - flagComment = 1 << 4 -) - -var ( - // ErrChecksum is returned when reading GZIP data that has an invalid checksum. - ErrChecksum = gzip.ErrChecksum - // ErrHeader is returned when reading GZIP data that has an invalid header. - ErrHeader = gzip.ErrHeader -) - -var le = binary.LittleEndian - -// noEOF converts io.EOF to io.ErrUnexpectedEOF. -func noEOF(err error) error { - if err == io.EOF { - return io.ErrUnexpectedEOF - } - return err -} - -// The gzip file stores a header giving metadata about the compressed file. -// That header is exposed as the fields of the Writer and Reader structs. -// -// Strings must be UTF-8 encoded and may only contain Unicode code points -// U+0001 through U+00FF, due to limitations of the GZIP file format. -type Header struct { - Comment string // comment - Extra []byte // "extra data" - ModTime time.Time // modification time - Name string // file name - OS byte // operating system type -} - -// A Reader is an io.Reader that can be read to retrieve -// uncompressed data from a gzip-format compressed file. -// -// In general, a gzip file can be a concatenation of gzip files, -// each with its own header. Reads from the Reader -// return the concatenation of the uncompressed data of each. -// Only the first header is recorded in the Reader fields. -// -// Gzip files store a length and checksum of the uncompressed data. -// The Reader will return a ErrChecksum when Read -// reaches the end of the uncompressed data if it does not -// have the expected length or checksum. Clients should treat data -// returned by Read as tentative until they receive the io.EOF -// marking the end of the data. -type Reader struct { - Header // valid after NewReader or Reader.Reset - r flate.Reader - br *bufio.Reader - decompressor io.ReadCloser - digest uint32 // CRC-32, IEEE polynomial (section 8) - size uint32 // Uncompressed size (section 2.3.1) - buf [512]byte - err error - multistream bool -} - -// NewReader creates a new Reader reading the given reader. -// If r does not also implement io.ByteReader, -// the decompressor may read more data than necessary from r. -// -// It is the caller's responsibility to call Close on the Reader when done. -// -// The Reader.Header fields will be valid in the Reader returned. -func NewReader(r io.Reader) (*Reader, error) { - z := new(Reader) - if err := z.Reset(r); err != nil { - return nil, err - } - return z, nil -} - -// Reset discards the Reader z's state and makes it equivalent to the -// result of its original state from NewReader, but reading from r instead. -// This permits reusing a Reader rather than allocating a new one. -func (z *Reader) Reset(r io.Reader) error { - *z = Reader{ - decompressor: z.decompressor, - multistream: true, - br: z.br, - } - if rr, ok := r.(flate.Reader); ok { - z.r = rr - } else { - // Reuse if we can. - if z.br != nil { - z.br.Reset(r) - } else { - z.br = bufio.NewReader(r) - } - z.r = z.br - } - z.Header, z.err = z.readHeader() - return z.err -} - -// Multistream controls whether the reader supports multistream files. -// -// If enabled (the default), the Reader expects the input to be a sequence -// of individually gzipped data streams, each with its own header and -// trailer, ending at EOF. The effect is that the concatenation of a sequence -// of gzipped files is treated as equivalent to the gzip of the concatenation -// of the sequence. This is standard behavior for gzip readers. -// -// Calling Multistream(false) disables this behavior; disabling the behavior -// can be useful when reading file formats that distinguish individual gzip -// data streams or mix gzip data streams with other data streams. -// In this mode, when the Reader reaches the end of the data stream, -// Read returns io.EOF. If the underlying reader implements io.ByteReader, -// it will be left positioned just after the gzip stream. -// To start the next stream, call z.Reset(r) followed by z.Multistream(false). -// If there is no next stream, z.Reset(r) will return io.EOF. -func (z *Reader) Multistream(ok bool) { - z.multistream = ok -} - -// readString reads a NUL-terminated string from z.r. -// It treats the bytes read as being encoded as ISO 8859-1 (Latin-1) and -// will output a string encoded using UTF-8. -// This method always updates z.digest with the data read. -func (z *Reader) readString() (string, error) { - var err error - needConv := false - for i := 0; ; i++ { - if i >= len(z.buf) { - return "", ErrHeader - } - z.buf[i], err = z.r.ReadByte() - if err != nil { - return "", err - } - if z.buf[i] > 0x7f { - needConv = true - } - if z.buf[i] == 0 { - // Digest covers the NUL terminator. - z.digest = crc32.Update(z.digest, crc32.IEEETable, z.buf[:i+1]) - - // Strings are ISO 8859-1, Latin-1 (RFC 1952, section 2.3.1). - if needConv { - s := make([]rune, 0, i) - for _, v := range z.buf[:i] { - s = append(s, rune(v)) - } - return string(s), nil - } - return string(z.buf[:i]), nil - } - } -} - -// readHeader reads the GZIP header according to section 2.3.1. -// This method does not set z.err. -func (z *Reader) readHeader() (hdr Header, err error) { - if _, err = io.ReadFull(z.r, z.buf[:10]); err != nil { - // RFC 1952, section 2.2, says the following: - // A gzip file consists of a series of "members" (compressed data sets). - // - // Other than this, the specification does not clarify whether a - // "series" is defined as "one or more" or "zero or more". To err on the - // side of caution, Go interprets this to mean "zero or more". - // Thus, it is okay to return io.EOF here. - return hdr, err - } - if z.buf[0] != gzipID1 || z.buf[1] != gzipID2 || z.buf[2] != gzipDeflate { - return hdr, ErrHeader - } - flg := z.buf[3] - hdr.ModTime = time.Unix(int64(le.Uint32(z.buf[4:8])), 0) - // z.buf[8] is XFL and is currently ignored. - hdr.OS = z.buf[9] - z.digest = crc32.ChecksumIEEE(z.buf[:10]) - - if flg&flagExtra != 0 { - if _, err = io.ReadFull(z.r, z.buf[:2]); err != nil { - return hdr, noEOF(err) - } - z.digest = crc32.Update(z.digest, crc32.IEEETable, z.buf[:2]) - data := make([]byte, le.Uint16(z.buf[:2])) - if _, err = io.ReadFull(z.r, data); err != nil { - return hdr, noEOF(err) - } - z.digest = crc32.Update(z.digest, crc32.IEEETable, data) - hdr.Extra = data - } - - var s string - if flg&flagName != 0 { - if s, err = z.readString(); err != nil { - return hdr, err - } - hdr.Name = s - } - - if flg&flagComment != 0 { - if s, err = z.readString(); err != nil { - return hdr, err - } - hdr.Comment = s - } - - if flg&flagHdrCrc != 0 { - if _, err = io.ReadFull(z.r, z.buf[:2]); err != nil { - return hdr, noEOF(err) - } - digest := le.Uint16(z.buf[:2]) - if digest != uint16(z.digest) { - return hdr, ErrHeader - } - } - - z.digest = 0 - if z.decompressor == nil { - z.decompressor = flate.NewReader(z.r) - } else { - z.decompressor.(flate.Resetter).Reset(z.r, nil) - } - return hdr, nil -} - -// Read implements io.Reader, reading uncompressed bytes from its underlying Reader. -func (z *Reader) Read(p []byte) (n int, err error) { - if z.err != nil { - return 0, z.err - } - - for n == 0 { - n, z.err = z.decompressor.Read(p) - z.digest = crc32.Update(z.digest, crc32.IEEETable, p[:n]) - z.size += uint32(n) - if z.err != io.EOF { - // In the normal case we return here. - return n, z.err - } - - // Finished file; check checksum and size. - if _, err := io.ReadFull(z.r, z.buf[:8]); err != nil { - z.err = noEOF(err) - return n, z.err - } - digest := le.Uint32(z.buf[:4]) - size := le.Uint32(z.buf[4:8]) - if digest != z.digest || size != z.size { - z.err = ErrChecksum - return n, z.err - } - z.digest, z.size = 0, 0 - - // File is ok; check if there is another. - if !z.multistream { - return n, io.EOF - } - z.err = nil // Remove io.EOF - - if _, z.err = z.readHeader(); z.err != nil { - return n, z.err - } - } - - return n, nil -} - -type crcer interface { - io.Writer - Sum32() uint32 - Reset() -} -type crcUpdater struct { - z *Reader -} - -func (c *crcUpdater) Write(p []byte) (int, error) { - c.z.digest = crc32.Update(c.z.digest, crc32.IEEETable, p) - return len(p), nil -} - -func (c *crcUpdater) Sum32() uint32 { - return c.z.digest -} - -func (c *crcUpdater) Reset() { - c.z.digest = 0 -} - -// WriteTo support the io.WriteTo interface for io.Copy and friends. -func (z *Reader) WriteTo(w io.Writer) (int64, error) { - total := int64(0) - crcWriter := crcer(crc32.NewIEEE()) - if z.digest != 0 { - crcWriter = &crcUpdater{z: z} - } - for { - if z.err != nil { - if z.err == io.EOF { - return total, nil - } - return total, z.err - } - - // We write both to output and digest. - mw := io.MultiWriter(w, crcWriter) - n, err := z.decompressor.(io.WriterTo).WriteTo(mw) - total += n - z.size += uint32(n) - if err != nil { - z.err = err - return total, z.err - } - - // Finished file; check checksum + size. - if _, err := io.ReadFull(z.r, z.buf[0:8]); err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - z.err = err - return total, err - } - z.digest = crcWriter.Sum32() - digest := le.Uint32(z.buf[:4]) - size := le.Uint32(z.buf[4:8]) - if digest != z.digest || size != z.size { - z.err = ErrChecksum - return total, z.err - } - z.digest, z.size = 0, 0 - - // File is ok; check if there is another. - if !z.multistream { - return total, nil - } - crcWriter.Reset() - z.err = nil // Remove io.EOF - - if _, z.err = z.readHeader(); z.err != nil { - if z.err == io.EOF { - return total, nil - } - return total, z.err - } - } -} - -// Close closes the Reader. It does not close the underlying io.Reader. -// In order for the GZIP checksum to be verified, the reader must be -// fully consumed until the io.EOF. -func (z *Reader) Close() error { return z.decompressor.Close() } diff --git a/vendor/github.com/klauspost/compress/gzip/gzip.go b/vendor/github.com/klauspost/compress/gzip/gzip.go deleted file mode 100644 index 5bc720593e..0000000000 --- a/vendor/github.com/klauspost/compress/gzip/gzip.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gzip - -import ( - "errors" - "fmt" - "hash/crc32" - "io" - - "github.com/klauspost/compress/flate" -) - -// These constants are copied from the flate package, so that code that imports -// "compress/gzip" does not also have to import "compress/flate". -const ( - NoCompression = flate.NoCompression - BestSpeed = flate.BestSpeed - BestCompression = flate.BestCompression - DefaultCompression = flate.DefaultCompression - ConstantCompression = flate.ConstantCompression - HuffmanOnly = flate.HuffmanOnly - - // StatelessCompression will do compression but without maintaining any state - // between Write calls. - // There will be no memory kept between Write calls, - // but compression and speed will be suboptimal. - // Because of this, the size of actual Write calls will affect output size. - StatelessCompression = -3 -) - -// A Writer is an io.WriteCloser. -// Writes to a Writer are compressed and written to w. -type Writer struct { - Header // written at first call to Write, Flush, or Close - w io.Writer - level int - err error - compressor *flate.Writer - digest uint32 // CRC-32, IEEE polynomial (section 8) - size uint32 // Uncompressed size (section 2.3.1) - wroteHeader bool - closed bool - buf [10]byte -} - -// NewWriter returns a new Writer. -// Writes to the returned writer are compressed and written to w. -// -// It is the caller's responsibility to call Close on the WriteCloser when done. -// Writes may be buffered and not flushed until Close. -// -// Callers that wish to set the fields in Writer.Header must do so before -// the first call to Write, Flush, or Close. -func NewWriter(w io.Writer) *Writer { - z, _ := NewWriterLevel(w, DefaultCompression) - return z -} - -// NewWriterLevel is like NewWriter but specifies the compression level instead -// of assuming DefaultCompression. -// -// The compression level can be DefaultCompression, NoCompression, or any -// integer value between BestSpeed and BestCompression inclusive. The error -// returned will be nil if the level is valid. -func NewWriterLevel(w io.Writer, level int) (*Writer, error) { - if level < StatelessCompression || level > BestCompression { - return nil, fmt.Errorf("gzip: invalid compression level: %d", level) - } - z := new(Writer) - z.init(w, level) - return z, nil -} - -// MinCustomWindowSize is the minimum window size that can be sent to NewWriterWindow. -const MinCustomWindowSize = flate.MinCustomWindowSize - -// MaxCustomWindowSize is the maximum custom window that can be sent to NewWriterWindow. -const MaxCustomWindowSize = flate.MaxCustomWindowSize - -// NewWriterWindow returns a new Writer compressing data with a custom window size. -// windowSize must be from MinCustomWindowSize to MaxCustomWindowSize. -func NewWriterWindow(w io.Writer, windowSize int) (*Writer, error) { - if windowSize < MinCustomWindowSize { - return nil, errors.New("gzip: requested window size less than MinWindowSize") - } - if windowSize > MaxCustomWindowSize { - return nil, errors.New("gzip: requested window size bigger than MaxCustomWindowSize") - } - - z := new(Writer) - z.init(w, -windowSize) - return z, nil -} - -func (z *Writer) init(w io.Writer, level int) { - compressor := z.compressor - if level != StatelessCompression { - if compressor != nil { - compressor.Reset(w) - } - } - - *z = Writer{ - Header: Header{ - OS: 255, // unknown - }, - w: w, - level: level, - compressor: compressor, - } -} - -// Reset discards the Writer z's state and makes it equivalent to the -// result of its original state from NewWriter or NewWriterLevel, but -// writing to w instead. This permits reusing a Writer rather than -// allocating a new one. -func (z *Writer) Reset(w io.Writer) { - z.init(w, z.level) -} - -// writeBytes writes a length-prefixed byte slice to z.w. -func (z *Writer) writeBytes(b []byte) error { - if len(b) > 0xffff { - return errors.New("gzip.Write: Extra data is too large") - } - le.PutUint16(z.buf[:2], uint16(len(b))) - _, err := z.w.Write(z.buf[:2]) - if err != nil { - return err - } - _, err = z.w.Write(b) - return err -} - -// writeString writes a UTF-8 string s in GZIP's format to z.w. -// GZIP (RFC 1952) specifies that strings are NUL-terminated ISO 8859-1 (Latin-1). -func (z *Writer) writeString(s string) (err error) { - // GZIP stores Latin-1 strings; error if non-Latin-1; convert if non-ASCII. - needconv := false - for _, v := range s { - if v == 0 || v > 0xff { - return errors.New("gzip.Write: non-Latin-1 header string") - } - if v > 0x7f { - needconv = true - } - } - if needconv { - b := make([]byte, 0, len(s)) - for _, v := range s { - b = append(b, byte(v)) - } - _, err = z.w.Write(b) - } else { - _, err = io.WriteString(z.w, s) - } - if err != nil { - return err - } - // GZIP strings are NUL-terminated. - z.buf[0] = 0 - _, err = z.w.Write(z.buf[:1]) - return err -} - -// Write writes a compressed form of p to the underlying io.Writer. The -// compressed bytes are not necessarily flushed until the Writer is closed. -func (z *Writer) Write(p []byte) (int, error) { - if z.err != nil { - return 0, z.err - } - var n int - // Write the GZIP header lazily. - if !z.wroteHeader { - z.wroteHeader = true - z.buf[0] = gzipID1 - z.buf[1] = gzipID2 - z.buf[2] = gzipDeflate - z.buf[3] = 0 - if z.Extra != nil { - z.buf[3] |= 0x04 - } - if z.Name != "" { - z.buf[3] |= 0x08 - } - if z.Comment != "" { - z.buf[3] |= 0x10 - } - le.PutUint32(z.buf[4:8], uint32(z.ModTime.Unix())) - if z.level == BestCompression { - z.buf[8] = 2 - } else if z.level == BestSpeed { - z.buf[8] = 4 - } else { - z.buf[8] = 0 - } - z.buf[9] = z.OS - n, z.err = z.w.Write(z.buf[:10]) - if z.err != nil { - return n, z.err - } - if z.Extra != nil { - z.err = z.writeBytes(z.Extra) - if z.err != nil { - return n, z.err - } - } - if z.Name != "" { - z.err = z.writeString(z.Name) - if z.err != nil { - return n, z.err - } - } - if z.Comment != "" { - z.err = z.writeString(z.Comment) - if z.err != nil { - return n, z.err - } - } - - if z.compressor == nil && z.level != StatelessCompression { - z.compressor, _ = flate.NewWriter(z.w, z.level) - } - } - z.size += uint32(len(p)) - z.digest = crc32.Update(z.digest, crc32.IEEETable, p) - if z.level == StatelessCompression { - return len(p), flate.StatelessDeflate(z.w, p, false, nil) - } - n, z.err = z.compressor.Write(p) - return n, z.err -} - -// Flush flushes any pending compressed data to the underlying writer. -// -// It is useful mainly in compressed network protocols, to ensure that -// a remote reader has enough data to reconstruct a packet. Flush does -// not return until the data has been written. If the underlying -// writer returns an error, Flush returns that error. -// -// In the terminology of the zlib library, Flush is equivalent to Z_SYNC_FLUSH. -func (z *Writer) Flush() error { - if z.err != nil { - return z.err - } - if z.closed || z.level == StatelessCompression { - return nil - } - if !z.wroteHeader { - z.Write(nil) - if z.err != nil { - return z.err - } - } - z.err = z.compressor.Flush() - return z.err -} - -// Close closes the Writer, flushing any unwritten data to the underlying -// io.Writer, but does not close the underlying io.Writer. -func (z *Writer) Close() error { - if z.err != nil { - return z.err - } - if z.closed { - return nil - } - z.closed = true - if !z.wroteHeader { - z.Write(nil) - if z.err != nil { - return z.err - } - } - if z.level == StatelessCompression { - z.err = flate.StatelessDeflate(z.w, nil, true, nil) - } else { - z.err = z.compressor.Close() - } - if z.err != nil { - return z.err - } - le.PutUint32(z.buf[:4], z.digest) - le.PutUint32(z.buf[4:8], z.size) - _, z.err = z.w.Write(z.buf[:8]) - return z.err -} diff --git a/vendor/github.com/klauspost/compress/zip/reader.go b/vendor/github.com/klauspost/compress/zip/reader.go deleted file mode 100644 index 460394ca1f..0000000000 --- a/vendor/github.com/klauspost/compress/zip/reader.go +++ /dev/null @@ -1,901 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package zip - -import ( - "bufio" - "encoding/binary" - "errors" - "hash" - "hash/crc32" - "io" - "io/fs" - "os" - "path" - "sort" - "strings" - "sync" - "time" -) - -var ( - ErrFormat = errors.New("zip: not a valid zip file") - ErrAlgorithm = errors.New("zip: unsupported compression algorithm") - ErrChecksum = errors.New("zip: checksum error") -) - -// A Reader serves content from a ZIP archive. -type Reader struct { - r io.ReaderAt - File []*File - Comment string - decompressors map[uint16]Decompressor - - // Some JAR files are zip files with a prefix that is a bash script. - // The baseOffset field is the start of the zip file proper. - baseOffset int64 - - // fileList is a list of files sorted by ename, - // for use by the Open method. - fileListOnce sync.Once - fileList []fileListEntry -} - -// A ReadCloser is a Reader that must be closed when no longer needed. -type ReadCloser struct { - f *os.File - Reader -} - -// A File is a single file in a ZIP archive. -// The file information is in the embedded FileHeader. -// The file content can be accessed by calling Open. -type File struct { - FileHeader - zip *Reader - zipr io.ReaderAt - headerOffset int64 // includes overall ZIP archive baseOffset - zip64 bool // zip64 extended information extra field presence -} - -// OpenReader will open the Zip file specified by name and return a ReadCloser. -func OpenReader(name string) (*ReadCloser, error) { - f, err := os.Open(name) - if err != nil { - return nil, err - } - fi, err := f.Stat() - if err != nil { - f.Close() - return nil, err - } - r := new(ReadCloser) - if err := r.init(f, fi.Size()); err != nil { - f.Close() - return nil, err - } - r.f = f - return r, nil -} - -// NewReader returns a new Reader reading from r, which is assumed to -// have the given size in bytes. -func NewReader(r io.ReaderAt, size int64) (*Reader, error) { - if size < 0 { - return nil, errors.New("zip: size cannot be negative") - } - zr := new(Reader) - if err := zr.init(r, size); err != nil { - return nil, err - } - return zr, nil -} - -func (z *Reader) init(r io.ReaderAt, size int64) error { - end, baseOffset, err := readDirectoryEnd(r, size) - if err != nil { - return err - } - z.r = r - z.baseOffset = baseOffset - // Since the number of directory records is not validated, it is not - // safe to preallocate z.File without first checking that the specified - // number of files is reasonable, since a malformed archive may - // indicate it contains up to 1 << 128 - 1 files. Since each file has a - // header which will be _at least_ 30 bytes we can safely preallocate - // if (data size / 30) >= end.directoryRecords. - if end.directorySize < uint64(size) && (uint64(size)-end.directorySize)/30 >= end.directoryRecords { - z.File = make([]*File, 0, end.directoryRecords) - } - z.Comment = end.comment - rs := io.NewSectionReader(r, 0, size) - if _, err = rs.Seek(z.baseOffset+int64(end.directoryOffset), io.SeekStart); err != nil { - return err - } - buf := bufio.NewReader(rs) - - // The count of files inside a zip is truncated to fit in a uint16. - // Gloss over this by reading headers until we encounter - // a bad one, and then only report an ErrFormat or UnexpectedEOF if - // the file count modulo 65536 is incorrect. - for { - f := &File{zip: z, zipr: r} - err = readDirectoryHeader(f, buf) - - // For compatibility with other zip programs, - // if we have a non-zero base offset and can't read - // the first directory header, try again with a zero - // base offset. - if err == ErrFormat && z.baseOffset != 0 && len(z.File) == 0 { - z.baseOffset = 0 - if _, err = rs.Seek(int64(end.directoryOffset), io.SeekStart); err != nil { - return err - } - buf.Reset(rs) - continue - } - - if err == ErrFormat || err == io.ErrUnexpectedEOF { - break - } - if err != nil { - return err - } - f.headerOffset += z.baseOffset - z.File = append(z.File, f) - } - if uint16(len(z.File)) != uint16(end.directoryRecords) { // only compare 16 bits here - // Return the readDirectoryHeader error if we read - // the wrong number of directory entries. - return err - } - return nil -} - -// RegisterDecompressor registers or overrides a custom decompressor for a -// specific method ID. If a decompressor for a given method is not found, -// Reader will default to looking up the decompressor at the package level. -func (z *Reader) RegisterDecompressor(method uint16, dcomp Decompressor) { - if z.decompressors == nil { - z.decompressors = make(map[uint16]Decompressor) - } - z.decompressors[method] = dcomp -} - -func (z *Reader) decompressor(method uint16) Decompressor { - dcomp := z.decompressors[method] - if dcomp == nil { - dcomp = decompressor(method) - } - return dcomp -} - -// Close closes the Zip file, rendering it unusable for I/O. -func (rc *ReadCloser) Close() error { - return rc.f.Close() -} - -// DataOffset returns the offset of the file's possibly-compressed -// data, relative to the beginning of the zip file. -// -// Most callers should instead use Open, which transparently -// decompresses data and verifies checksums. -func (f *File) DataOffset() (offset int64, err error) { - bodyOffset, err := f.findBodyOffset() - if err != nil { - return - } - return f.headerOffset + bodyOffset, nil -} - -// Open returns a ReadCloser that provides access to the File's contents. -// Multiple files may be read concurrently. -func (f *File) Open() (io.ReadCloser, error) { - bodyOffset, err := f.findBodyOffset() - if err != nil { - return nil, err - } - size := int64(f.CompressedSize64) - r := io.NewSectionReader(f.zipr, f.headerOffset+bodyOffset, size) - dcomp := f.zip.decompressor(f.Method) - if dcomp == nil { - return nil, ErrAlgorithm - } - var rc io.ReadCloser = dcomp(r) - var desr io.Reader - if f.hasDataDescriptor() { - desr = io.NewSectionReader(f.zipr, f.headerOffset+bodyOffset+size, dataDescriptorLen) - } - rc = &checksumReader{ - rc: rc, - hash: crc32.NewIEEE(), - f: f, - desr: desr, - } - return rc, nil -} - -// OpenRaw returns a Reader that provides access to the File's contents without -// decompression. -func (f *File) OpenRaw() (io.Reader, error) { - bodyOffset, err := f.findBodyOffset() - if err != nil { - return nil, err - } - r := io.NewSectionReader(f.zipr, f.headerOffset+bodyOffset, int64(f.CompressedSize64)) - return r, nil -} - -type checksumReader struct { - rc io.ReadCloser - hash hash.Hash32 - nread uint64 // number of bytes read so far - f *File - desr io.Reader // if non-nil, where to read the data descriptor - err error // sticky error -} - -func (r *checksumReader) Stat() (fs.FileInfo, error) { - return headerFileInfo{&r.f.FileHeader}, nil -} - -func (r *checksumReader) Read(b []byte) (n int, err error) { - if r.err != nil { - return 0, r.err - } - n, err = r.rc.Read(b) - r.hash.Write(b[:n]) - r.nread += uint64(n) - if r.nread > r.f.UncompressedSize64 { - return 0, ErrFormat - } - if err == nil { - return - } - if err == io.EOF { - if r.nread != r.f.UncompressedSize64 { - return 0, io.ErrUnexpectedEOF - } - if r.desr != nil { - if err1 := readDataDescriptor(r.desr, r.f); err1 != nil { - if err1 == io.EOF { - err = io.ErrUnexpectedEOF - } else { - err = err1 - } - } else if r.hash.Sum32() != r.f.CRC32 { - err = ErrChecksum - } - } else { - // If there's not a data descriptor, we still compare - // the CRC32 of what we've read against the file header - // or TOC's CRC32, if it seems like it was set. - if r.f.CRC32 != 0 && r.hash.Sum32() != r.f.CRC32 { - err = ErrChecksum - } - } - } - r.err = err - return -} - -func (r *checksumReader) Close() error { return r.rc.Close() } - -// findBodyOffset does the minimum work to verify the file has a header -// and returns the file body offset. -func (f *File) findBodyOffset() (int64, error) { - var buf [fileHeaderLen]byte - if _, err := f.zipr.ReadAt(buf[:], f.headerOffset); err != nil { - return 0, err - } - b := readBuf(buf[:]) - if sig := b.uint32(); sig != fileHeaderSignature { - return 0, ErrFormat - } - b = b[22:] // skip over most of the header - filenameLen := int(b.uint16()) - extraLen := int(b.uint16()) - return int64(fileHeaderLen + filenameLen + extraLen), nil -} - -// readDirectoryHeader attempts to read a directory header from r. -// It returns io.ErrUnexpectedEOF if it cannot read a complete header, -// and ErrFormat if it doesn't find a valid header signature. -func readDirectoryHeader(f *File, r io.Reader) error { - var buf [directoryHeaderLen]byte - if _, err := io.ReadFull(r, buf[:]); err != nil { - return err - } - b := readBuf(buf[:]) - if sig := b.uint32(); sig != directoryHeaderSignature { - return ErrFormat - } - f.CreatorVersion = b.uint16() - f.ReaderVersion = b.uint16() - f.Flags = b.uint16() - f.Method = b.uint16() - f.ModifiedTime = b.uint16() - f.ModifiedDate = b.uint16() - f.CRC32 = b.uint32() - f.CompressedSize = b.uint32() - f.UncompressedSize = b.uint32() - f.CompressedSize64 = uint64(f.CompressedSize) - f.UncompressedSize64 = uint64(f.UncompressedSize) - filenameLen := int(b.uint16()) - extraLen := int(b.uint16()) - commentLen := int(b.uint16()) - b = b[4:] // skipped start disk number and internal attributes (2x uint16) - f.ExternalAttrs = b.uint32() - f.headerOffset = int64(b.uint32()) - d := make([]byte, filenameLen+extraLen+commentLen) - if _, err := io.ReadFull(r, d); err != nil { - return err - } - f.Name = string(d[:filenameLen]) - f.Extra = d[filenameLen : filenameLen+extraLen] - f.Comment = string(d[filenameLen+extraLen:]) - - // Determine the character encoding. - utf8Valid1, utf8Require1 := detectUTF8(f.Name) - utf8Valid2, utf8Require2 := detectUTF8(f.Comment) - switch { - case !utf8Valid1 || !utf8Valid2: - // Name and Comment definitely not UTF-8. - f.NonUTF8 = true - case !utf8Require1 && !utf8Require2: - // Name and Comment use only single-byte runes that overlap with UTF-8. - f.NonUTF8 = false - default: - // Might be UTF-8, might be some other encoding; preserve existing flag. - // Some ZIP writers use UTF-8 encoding without setting the UTF-8 flag. - // Since it is impossible to always distinguish valid UTF-8 from some - // other encoding (e.g., GBK or Shift-JIS), we trust the flag. - f.NonUTF8 = f.Flags&0x800 == 0 - } - - needUSize := f.UncompressedSize == ^uint32(0) - needCSize := f.CompressedSize == ^uint32(0) - needHeaderOffset := f.headerOffset == int64(^uint32(0)) - - // Best effort to find what we need. - // Other zip authors might not even follow the basic format, - // and we'll just ignore the Extra content in that case. - var modified time.Time -parseExtras: - for extra := readBuf(f.Extra); len(extra) >= 4; { // need at least tag and size - fieldTag := extra.uint16() - fieldSize := int(extra.uint16()) - if len(extra) < fieldSize { - break - } - fieldBuf := extra.sub(fieldSize) - - switch fieldTag { - case zip64ExtraID: - f.zip64 = true - - // update directory values from the zip64 extra block. - // They should only be consulted if the sizes read earlier - // are maxed out. - // See golang.org/issue/13367. - if needUSize { - needUSize = false - if len(fieldBuf) < 8 { - return ErrFormat - } - f.UncompressedSize64 = fieldBuf.uint64() - } - if needCSize { - needCSize = false - if len(fieldBuf) < 8 { - return ErrFormat - } - f.CompressedSize64 = fieldBuf.uint64() - } - if needHeaderOffset { - needHeaderOffset = false - if len(fieldBuf) < 8 { - return ErrFormat - } - f.headerOffset = int64(fieldBuf.uint64()) - } - case ntfsExtraID: - if len(fieldBuf) < 4 { - continue parseExtras - } - fieldBuf.uint32() // reserved (ignored) - for len(fieldBuf) >= 4 { // need at least tag and size - attrTag := fieldBuf.uint16() - attrSize := int(fieldBuf.uint16()) - if len(fieldBuf) < attrSize { - continue parseExtras - } - attrBuf := fieldBuf.sub(attrSize) - if attrTag != 1 || attrSize != 24 { - continue // Ignore irrelevant attributes - } - - const ticksPerSecond = 1e7 // Windows timestamp resolution - ts := int64(attrBuf.uint64()) // ModTime since Windows epoch - secs := int64(ts / ticksPerSecond) - nsecs := (1e9 / ticksPerSecond) * int64(ts%ticksPerSecond) - epoch := time.Date(1601, time.January, 1, 0, 0, 0, 0, time.UTC) - modified = time.Unix(epoch.Unix()+secs, nsecs) - } - case unixExtraID, infoZipUnixExtraID: - if len(fieldBuf) < 8 { - continue parseExtras - } - fieldBuf.uint32() // AcTime (ignored) - ts := int64(fieldBuf.uint32()) // ModTime since Unix epoch - modified = time.Unix(ts, 0) - case extTimeExtraID: - if len(fieldBuf) < 5 || fieldBuf.uint8()&1 == 0 { - continue parseExtras - } - ts := int64(fieldBuf.uint32()) // ModTime since Unix epoch - modified = time.Unix(ts, 0) - } - } - - msdosModified := msDosTimeToTime(f.ModifiedDate, f.ModifiedTime) - f.Modified = msdosModified - if !modified.IsZero() { - f.Modified = modified.UTC() - - // If legacy MS-DOS timestamps are set, we can use the delta between - // the legacy and extended versions to estimate timezone offset. - // - // A non-UTC timezone is always used (even if offset is zero). - // Thus, FileHeader.Modified.Location() == time.UTC is useful for - // determining whether extended timestamps are present. - // This is necessary for users that need to do additional time - // calculations when dealing with legacy ZIP formats. - if f.ModifiedTime != 0 || f.ModifiedDate != 0 { - f.Modified = modified.In(timeZone(msdosModified.Sub(modified))) - } - } - - // Assume that uncompressed size 2³²-1 could plausibly happen in - // an old zip32 file that was sharding inputs into the largest chunks - // possible (or is just malicious; search the web for 42.zip). - // If needUSize is true still, it means we didn't see a zip64 extension. - // As long as the compressed size is not also 2³²-1 (implausible) - // and the header is not also 2³²-1 (equally implausible), - // accept the uncompressed size 2³²-1 as valid. - // If nothing else, this keeps archive/zip working with 42.zip. - _ = needUSize - - if needCSize || needHeaderOffset { - return ErrFormat - } - - return nil -} - -func readDataDescriptor(r io.Reader, f *File) error { - var buf [dataDescriptorLen]byte - // The spec says: "Although not originally assigned a - // signature, the value 0x08074b50 has commonly been adopted - // as a signature value for the data descriptor record. - // Implementers should be aware that ZIP files may be - // encountered with or without this signature marking data - // descriptors and should account for either case when reading - // ZIP files to ensure compatibility." - // - // dataDescriptorLen includes the size of the signature but - // first read just those 4 bytes to see if it exists. - if _, err := io.ReadFull(r, buf[:4]); err != nil { - return err - } - off := 0 - maybeSig := readBuf(buf[:4]) - if maybeSig.uint32() != dataDescriptorSignature { - // No data descriptor signature. Keep these four - // bytes. - off += 4 - } - if _, err := io.ReadFull(r, buf[off:12]); err != nil { - return err - } - b := readBuf(buf[:12]) - if b.uint32() != f.CRC32 { - return ErrChecksum - } - - // The two sizes that follow here can be either 32 bits or 64 bits - // but the spec is not very clear on this and different - // interpretations has been made causing incompatibilities. We - // already have the sizes from the central directory so we can - // just ignore these. - - return nil -} - -func readDirectoryEnd(r io.ReaderAt, size int64) (dir *directoryEnd, baseOffset int64, err error) { - // look for directoryEndSignature in the last 1k, then in the last 65k - var buf []byte - var directoryEndOffset int64 - for i, bLen := range []int64{1024, 65 * 1024} { - if bLen > size { - bLen = size - } - buf = make([]byte, int(bLen)) - if _, err := r.ReadAt(buf, size-bLen); err != nil && err != io.EOF { - return nil, 0, err - } - if p := findSignatureInBlock(buf); p >= 0 { - buf = buf[p:] - directoryEndOffset = size - bLen + int64(p) - break - } - if i == 1 || bLen == size { - return nil, 0, ErrFormat - } - } - - // read header into struct - b := readBuf(buf[4:]) // skip signature - d := &directoryEnd{ - diskNbr: uint32(b.uint16()), - dirDiskNbr: uint32(b.uint16()), - dirRecordsThisDisk: uint64(b.uint16()), - directoryRecords: uint64(b.uint16()), - directorySize: uint64(b.uint32()), - directoryOffset: uint64(b.uint32()), - commentLen: b.uint16(), - } - l := int(d.commentLen) - if l > len(b) { - return nil, 0, errors.New("zip: invalid comment length") - } - d.comment = string(b[:l]) - - // These values mean that the file can be a zip64 file - if d.directoryRecords == 0xffff || d.directorySize == 0xffff || d.directoryOffset == 0xffffffff { - p, err := findDirectory64End(r, directoryEndOffset) - if err == nil && p >= 0 { - directoryEndOffset = p - err = readDirectory64End(r, p, d) - } - if err != nil { - return nil, 0, err - } - } - - baseOffset = directoryEndOffset - int64(d.directorySize) - int64(d.directoryOffset) - - // Make sure directoryOffset points to somewhere in our file. - if o := baseOffset + int64(d.directoryOffset); o < 0 || o >= size { - return nil, 0, ErrFormat - } - return d, baseOffset, nil -} - -// findDirectory64End tries to read the zip64 locator just before the -// directory end and returns the offset of the zip64 directory end if -// found. -func findDirectory64End(r io.ReaderAt, directoryEndOffset int64) (int64, error) { - locOffset := directoryEndOffset - directory64LocLen - if locOffset < 0 { - return -1, nil // no need to look for a header outside the file - } - buf := make([]byte, directory64LocLen) - if _, err := r.ReadAt(buf, locOffset); err != nil { - return -1, err - } - b := readBuf(buf) - if sig := b.uint32(); sig != directory64LocSignature { - return -1, nil - } - if b.uint32() != 0 { // number of the disk with the start of the zip64 end of central directory - return -1, nil // the file is not a valid zip64-file - } - p := b.uint64() // relative offset of the zip64 end of central directory record - if b.uint32() != 1 { // total number of disks - return -1, nil // the file is not a valid zip64-file - } - return int64(p), nil -} - -// readDirectory64End reads the zip64 directory end and updates the -// directory end with the zip64 directory end values. -func readDirectory64End(r io.ReaderAt, offset int64, d *directoryEnd) (err error) { - buf := make([]byte, directory64EndLen) - if _, err := r.ReadAt(buf, offset); err != nil { - return err - } - - b := readBuf(buf) - if sig := b.uint32(); sig != directory64EndSignature { - return ErrFormat - } - - b = b[12:] // skip dir size, version and version needed (uint64 + 2x uint16) - d.diskNbr = b.uint32() // number of this disk - d.dirDiskNbr = b.uint32() // number of the disk with the start of the central directory - d.dirRecordsThisDisk = b.uint64() // total number of entries in the central directory on this disk - d.directoryRecords = b.uint64() // total number of entries in the central directory - d.directorySize = b.uint64() // size of the central directory - d.directoryOffset = b.uint64() // offset of start of central directory with respect to the starting disk number - - return nil -} - -func findSignatureInBlock(b []byte) int { - for i := len(b) - directoryEndLen; i >= 0; i-- { - // defined from directoryEndSignature in struct.go - if b[i] == 'P' && b[i+1] == 'K' && b[i+2] == 0x05 && b[i+3] == 0x06 { - // n is length of comment - n := int(b[i+directoryEndLen-2]) | int(b[i+directoryEndLen-1])<<8 - if n+directoryEndLen+i <= len(b) { - return i - } - } - } - return -1 -} - -type readBuf []byte - -func (b *readBuf) uint8() uint8 { - v := (*b)[0] - *b = (*b)[1:] - return v -} - -func (b *readBuf) uint16() uint16 { - v := binary.LittleEndian.Uint16(*b) - *b = (*b)[2:] - return v -} - -func (b *readBuf) uint32() uint32 { - v := binary.LittleEndian.Uint32(*b) - *b = (*b)[4:] - return v -} - -func (b *readBuf) uint64() uint64 { - v := binary.LittleEndian.Uint64(*b) - *b = (*b)[8:] - return v -} - -func (b *readBuf) sub(n int) readBuf { - b2 := (*b)[:n] - *b = (*b)[n:] - return b2 -} - -// A fileListEntry is a File and its ename. -// If file == nil, the fileListEntry describes a directory without metadata. -type fileListEntry struct { - name string - file *File - isDir bool - isDup bool -} - -type fileInfoDirEntry interface { - fs.FileInfo - fs.DirEntry -} - -func (e *fileListEntry) stat() (fileInfoDirEntry, error) { - if e.isDup { - return nil, errors.New(e.name + ": duplicate entries in zip file") - } - if !e.isDir { - return headerFileInfo{&e.file.FileHeader}, nil - } - return e, nil -} - -// Only used for directories. -func (f *fileListEntry) Name() string { _, elem, _ := split(f.name); return elem } -func (f *fileListEntry) Size() int64 { return 0 } -func (f *fileListEntry) Mode() fs.FileMode { return fs.ModeDir | 0555 } -func (f *fileListEntry) Type() fs.FileMode { return fs.ModeDir } -func (f *fileListEntry) IsDir() bool { return true } -func (f *fileListEntry) Sys() interface{} { return nil } - -func (f *fileListEntry) ModTime() time.Time { - if f.file == nil { - return time.Time{} - } - return f.file.FileHeader.Modified.UTC() -} - -func (f *fileListEntry) Info() (fs.FileInfo, error) { return f, nil } - -// toValidName coerces name to be a valid name for fs.FS.Open. -func toValidName(name string) string { - name = strings.ReplaceAll(name, `\`, `/`) - p := path.Clean(name) - p = strings.TrimPrefix(p, "/") - p = strings.TrimPrefix(p, "../") - return p -} - -func (r *Reader) initFileList() { - r.fileListOnce.Do(func() { - // files and knownDirs map from a file/directory name - // to an index into the r.fileList entry that we are - // building. They are used to mark duplicate entries. - files := make(map[string]int) - knownDirs := make(map[string]int) - - // dirs[name] is true if name is known to be a directory, - // because it appears as a prefix in a path. - dirs := make(map[string]bool) - - for _, file := range r.File { - isDir := len(file.Name) > 0 && file.Name[len(file.Name)-1] == '/' - name := toValidName(file.Name) - if name == "" { - continue - } - - if idx, ok := files[name]; ok { - r.fileList[idx].isDup = true - continue - } - if idx, ok := knownDirs[name]; ok { - r.fileList[idx].isDup = true - continue - } - - for dir := path.Dir(name); dir != "."; dir = path.Dir(dir) { - dirs[dir] = true - } - - idx := len(r.fileList) - entry := fileListEntry{ - name: name, - file: file, - isDir: isDir, - } - r.fileList = append(r.fileList, entry) - if isDir { - knownDirs[name] = idx - } else { - files[name] = idx - } - } - for dir := range dirs { - if _, ok := knownDirs[dir]; !ok { - if idx, ok := files[dir]; ok { - r.fileList[idx].isDup = true - } else { - entry := fileListEntry{ - name: dir, - file: nil, - isDir: true, - } - r.fileList = append(r.fileList, entry) - } - } - } - - sort.Slice(r.fileList, func(i, j int) bool { return fileEntryLess(r.fileList[i].name, r.fileList[j].name) }) - }) -} - -func fileEntryLess(x, y string) bool { - xdir, xelem, _ := split(x) - ydir, yelem, _ := split(y) - return xdir < ydir || xdir == ydir && xelem < yelem -} - -// Open opens the named file in the ZIP archive, -// using the semantics of fs.FS.Open: -// paths are always slash separated, with no -// leading / or ../ elements. -func (r *Reader) Open(name string) (fs.File, error) { - r.initFileList() - - if !fs.ValidPath(name) { - return nil, &fs.PathError{Op: "open", Path: name, Err: fs.ErrInvalid} - } - e := r.openLookup(name) - if e == nil { - return nil, &fs.PathError{Op: "open", Path: name, Err: fs.ErrNotExist} - } - if e.isDir { - return &openDir{e, r.openReadDir(name), 0}, nil - } - rc, err := e.file.Open() - if err != nil { - return nil, err - } - return rc.(fs.File), nil -} - -func split(name string) (dir, elem string, isDir bool) { - if len(name) > 0 && name[len(name)-1] == '/' { - isDir = true - name = name[:len(name)-1] - } - i := len(name) - 1 - for i >= 0 && name[i] != '/' { - i-- - } - if i < 0 { - return ".", name, isDir - } - return name[:i], name[i+1:], isDir -} - -var dotFile = &fileListEntry{name: "./", isDir: true} - -func (r *Reader) openLookup(name string) *fileListEntry { - if name == "." { - return dotFile - } - - dir, elem, _ := split(name) - files := r.fileList - i := sort.Search(len(files), func(i int) bool { - idir, ielem, _ := split(files[i].name) - return idir > dir || idir == dir && ielem >= elem - }) - if i < len(files) { - fname := files[i].name - if fname == name || len(fname) == len(name)+1 && fname[len(name)] == '/' && fname[:len(name)] == name { - return &files[i] - } - } - return nil -} - -func (r *Reader) openReadDir(dir string) []fileListEntry { - files := r.fileList - i := sort.Search(len(files), func(i int) bool { - idir, _, _ := split(files[i].name) - return idir >= dir - }) - j := sort.Search(len(files), func(j int) bool { - jdir, _, _ := split(files[j].name) - return jdir > dir - }) - return files[i:j] -} - -type openDir struct { - e *fileListEntry - files []fileListEntry - offset int -} - -func (d *openDir) Close() error { return nil } -func (d *openDir) Stat() (fs.FileInfo, error) { return d.e.stat() } - -func (d *openDir) Read([]byte) (int, error) { - return 0, &fs.PathError{Op: "read", Path: d.e.name, Err: errors.New("is a directory")} -} - -func (d *openDir) ReadDir(count int) ([]fs.DirEntry, error) { - n := len(d.files) - d.offset - if count > 0 && n > count { - n = count - } - if n == 0 { - if count <= 0 { - return nil, nil - } - return nil, io.EOF - } - list := make([]fs.DirEntry, n) - for i := range list { - s, err := d.files[d.offset+i].stat() - if err != nil { - return nil, err - } - list[i] = s - } - d.offset += n - return list, nil -} diff --git a/vendor/github.com/klauspost/compress/zip/register.go b/vendor/github.com/klauspost/compress/zip/register.go deleted file mode 100644 index ca8c13ce92..0000000000 --- a/vendor/github.com/klauspost/compress/zip/register.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package zip - -import ( - "errors" - "io" - "sync" - - "github.com/klauspost/compress/flate" -) - -// A Compressor returns a new compressing writer, writing to w. -// The WriteCloser's Close method must be used to flush pending data to w. -// The Compressor itself must be safe to invoke from multiple goroutines -// simultaneously, but each returned writer will be used only by -// one goroutine at a time. -type Compressor func(w io.Writer) (io.WriteCloser, error) - -// A Decompressor returns a new decompressing reader, reading from r. -// The ReadCloser's Close method must be used to release associated resources. -// The Decompressor itself must be safe to invoke from multiple goroutines -// simultaneously, but each returned reader will be used only by -// one goroutine at a time. -type Decompressor func(r io.Reader) io.ReadCloser - -var flateWriterPool sync.Pool - -func newFlateWriter(w io.Writer) io.WriteCloser { - fw, ok := flateWriterPool.Get().(*flate.Writer) - if ok { - fw.Reset(w) - } else { - fw, _ = flate.NewWriter(w, 5) - } - return &pooledFlateWriter{fw: fw} -} - -type pooledFlateWriter struct { - mu sync.Mutex // guards Close and Write - fw *flate.Writer -} - -func (w *pooledFlateWriter) Write(p []byte) (n int, err error) { - w.mu.Lock() - defer w.mu.Unlock() - if w.fw == nil { - return 0, errors.New("Write after Close") - } - return w.fw.Write(p) -} - -func (w *pooledFlateWriter) Close() error { - w.mu.Lock() - defer w.mu.Unlock() - var err error - if w.fw != nil { - err = w.fw.Close() - flateWriterPool.Put(w.fw) - w.fw = nil - } - return err -} - -var flateReaderPool sync.Pool - -func newFlateReader(r io.Reader) io.ReadCloser { - fr, ok := flateReaderPool.Get().(io.ReadCloser) - if ok { - fr.(flate.Resetter).Reset(r, nil) - } else { - fr = flate.NewReader(r) - } - return &pooledFlateReader{fr: fr} -} - -type pooledFlateReader struct { - mu sync.Mutex // guards Close and Read - fr io.ReadCloser -} - -func (r *pooledFlateReader) Read(p []byte) (n int, err error) { - r.mu.Lock() - defer r.mu.Unlock() - if r.fr == nil { - return 0, errors.New("Read after Close") - } - return r.fr.Read(p) -} - -func (r *pooledFlateReader) Close() error { - r.mu.Lock() - defer r.mu.Unlock() - var err error - if r.fr != nil { - err = r.fr.Close() - flateReaderPool.Put(r.fr) - r.fr = nil - } - return err -} - -var ( - compressors sync.Map // map[uint16]Compressor - decompressors sync.Map // map[uint16]Decompressor -) - -func init() { - compressors.Store(Store, Compressor(func(w io.Writer) (io.WriteCloser, error) { return &nopCloser{w}, nil })) - compressors.Store(Deflate, Compressor(func(w io.Writer) (io.WriteCloser, error) { return newFlateWriter(w), nil })) - - decompressors.Store(Store, Decompressor(io.NopCloser)) - decompressors.Store(Deflate, Decompressor(newFlateReader)) -} - -// RegisterDecompressor allows custom decompressors for a specified method ID. -// The common methods Store and Deflate are built in. -func RegisterDecompressor(method uint16, dcomp Decompressor) { - if _, dup := decompressors.LoadOrStore(method, dcomp); dup { - panic("decompressor already registered") - } -} - -// RegisterCompressor registers custom compressors for a specified method ID. -// The common methods Store and Deflate are built in. -func RegisterCompressor(method uint16, comp Compressor) { - if _, dup := compressors.LoadOrStore(method, comp); dup { - panic("compressor already registered") - } -} - -func compressor(method uint16) Compressor { - ci, ok := compressors.Load(method) - if !ok { - return nil - } - return ci.(Compressor) -} - -func decompressor(method uint16) Decompressor { - di, ok := decompressors.Load(method) - if !ok { - return nil - } - return di.(Decompressor) -} diff --git a/vendor/github.com/klauspost/compress/zip/struct.go b/vendor/github.com/klauspost/compress/zip/struct.go deleted file mode 100644 index 88effedc0f..0000000000 --- a/vendor/github.com/klauspost/compress/zip/struct.go +++ /dev/null @@ -1,392 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package zip provides support for reading and writing ZIP archives. - -See: https://www.pkware.com/appnote - -This package does not support disk spanning. - -A note about ZIP64: - -To be backwards compatible the FileHeader has both 32 and 64 bit Size -fields. The 64 bit fields will always contain the correct value and -for normal archives both fields will be the same. For files requiring -the ZIP64 format the 32 bit fields will be 0xffffffff and the 64 bit -fields must be used instead. -*/ -package zip - -import ( - "io/fs" - "path" - "time" -) - -// Compression methods. -const ( - Store uint16 = 0 // no compression - Deflate uint16 = 8 // DEFLATE compressed -) - -const ( - fileHeaderSignature = 0x04034b50 - directoryHeaderSignature = 0x02014b50 - directoryEndSignature = 0x06054b50 - directory64LocSignature = 0x07064b50 - directory64EndSignature = 0x06064b50 - dataDescriptorSignature = 0x08074b50 // de-facto standard; required by OS X Finder - fileHeaderLen = 30 // + filename + extra - directoryHeaderLen = 46 // + filename + extra + comment - directoryEndLen = 22 // + comment - dataDescriptorLen = 16 // four uint32: descriptor signature, crc32, compressed size, size - dataDescriptor64Len = 24 // two uint32: signature, crc32 | two uint64: compressed size, size - directory64LocLen = 20 // - directory64EndLen = 56 // + extra - - // Constants for the first byte in CreatorVersion. - creatorFAT = 0 - creatorUnix = 3 - creatorNTFS = 11 - creatorVFAT = 14 - creatorMacOSX = 19 - - // Version numbers. - zipVersion20 = 20 // 2.0 - zipVersion45 = 45 // 4.5 (reads and writes zip64 archives) - - // Limits for non zip64 files. - uint16max = (1 << 16) - 1 - uint32max = (1 << 32) - 1 - - // Extra header IDs. - // - // IDs 0..31 are reserved for official use by PKWARE. - // IDs above that range are defined by third-party vendors. - // Since ZIP lacked high precision timestamps (nor a official specification - // of the timezone used for the date fields), many competing extra fields - // have been invented. Pervasive use effectively makes them "official". - // - // See http://mdfs.net/Docs/Comp/Archiving/Zip/ExtraField - zip64ExtraID = 0x0001 // Zip64 extended information - ntfsExtraID = 0x000a // NTFS - unixExtraID = 0x000d // UNIX - extTimeExtraID = 0x5455 // Extended timestamp - infoZipUnixExtraID = 0x5855 // Info-ZIP Unix extension -) - -// FileHeader describes a file within a zip file. -// See the zip spec for details. -type FileHeader struct { - // Name is the name of the file. - // - // It must be a relative path, not start with a drive letter (such as "C:"), - // and must use forward slashes instead of back slashes. A trailing slash - // indicates that this file is a directory and should have no data. - // - // When reading zip files, the Name field is populated from - // the zip file directly and is not validated for correctness. - // It is the caller's responsibility to sanitize it as - // appropriate, including canonicalizing slash directions, - // validating that paths are relative, and preventing path - // traversal through filenames ("../../../"). - Name string - - // Comment is any arbitrary user-defined string shorter than 64KiB. - Comment string - - // NonUTF8 indicates that Name and Comment are not encoded in UTF-8. - // - // By specification, the only other encoding permitted should be CP-437, - // but historically many ZIP readers interpret Name and Comment as whatever - // the system's local character encoding happens to be. - // - // This flag should only be set if the user intends to encode a non-portable - // ZIP file for a specific localized region. Otherwise, the Writer - // automatically sets the ZIP format's UTF-8 flag for valid UTF-8 strings. - NonUTF8 bool - - CreatorVersion uint16 - ReaderVersion uint16 - Flags uint16 - - // Method is the compression method. If zero, Store is used. - Method uint16 - - // Modified is the modified time of the file. - // - // When reading, an extended timestamp is preferred over the legacy MS-DOS - // date field, and the offset between the times is used as the timezone. - // If only the MS-DOS date is present, the timezone is assumed to be UTC. - // - // When writing, an extended timestamp (which is timezone-agnostic) is - // always emitted. The legacy MS-DOS date field is encoded according to the - // location of the Modified time. - Modified time.Time - ModifiedTime uint16 // Deprecated: Legacy MS-DOS date; use Modified instead. - ModifiedDate uint16 // Deprecated: Legacy MS-DOS time; use Modified instead. - - CRC32 uint32 - CompressedSize uint32 // Deprecated: Use CompressedSize64 instead. - UncompressedSize uint32 // Deprecated: Use UncompressedSize64 instead. - CompressedSize64 uint64 - UncompressedSize64 uint64 - Extra []byte - ExternalAttrs uint32 // Meaning depends on CreatorVersion -} - -// FileInfo returns an fs.FileInfo for the FileHeader. -func (h *FileHeader) FileInfo() fs.FileInfo { - return headerFileInfo{h} -} - -// headerFileInfo implements fs.FileInfo. -type headerFileInfo struct { - fh *FileHeader -} - -func (fi headerFileInfo) Name() string { return path.Base(fi.fh.Name) } -func (fi headerFileInfo) Size() int64 { - if fi.fh.UncompressedSize64 > 0 { - return int64(fi.fh.UncompressedSize64) - } - return int64(fi.fh.UncompressedSize) -} -func (fi headerFileInfo) IsDir() bool { return fi.Mode().IsDir() } -func (fi headerFileInfo) ModTime() time.Time { - if fi.fh.Modified.IsZero() { - return fi.fh.ModTime() - } - return fi.fh.Modified.UTC() -} -func (fi headerFileInfo) Mode() fs.FileMode { return fi.fh.Mode() } -func (fi headerFileInfo) Type() fs.FileMode { return fi.fh.Mode().Type() } -func (fi headerFileInfo) Sys() interface{} { return fi.fh } - -func (fi headerFileInfo) Info() (fs.FileInfo, error) { return fi, nil } - -// FileInfoHeader creates a partially-populated FileHeader from an -// fs.FileInfo. -// Because fs.FileInfo's Name method returns only the base name of -// the file it describes, it may be necessary to modify the Name field -// of the returned header to provide the full path name of the file. -// If compression is desired, callers should set the FileHeader.Method -// field; it is unset by default. -func FileInfoHeader(fi fs.FileInfo) (*FileHeader, error) { - size := fi.Size() - fh := &FileHeader{ - Name: fi.Name(), - UncompressedSize64: uint64(size), - } - fh.SetModTime(fi.ModTime()) - fh.SetMode(fi.Mode()) - if fh.UncompressedSize64 > uint32max { - fh.UncompressedSize = uint32max - } else { - fh.UncompressedSize = uint32(fh.UncompressedSize64) - } - return fh, nil -} - -type directoryEnd struct { - diskNbr uint32 // unused - dirDiskNbr uint32 // unused - dirRecordsThisDisk uint64 // unused - directoryRecords uint64 - directorySize uint64 - directoryOffset uint64 // relative to file - commentLen uint16 - comment string -} - -// timeZone returns a *time.Location based on the provided offset. -// If the offset is non-sensible, then this uses an offset of zero. -func timeZone(offset time.Duration) *time.Location { - const ( - minOffset = -12 * time.Hour // E.g., Baker island at -12:00 - maxOffset = +14 * time.Hour // E.g., Line island at +14:00 - offsetAlias = 15 * time.Minute // E.g., Nepal at +5:45 - ) - offset = offset.Round(offsetAlias) - if offset < minOffset || maxOffset < offset { - offset = 0 - } - return time.FixedZone("", int(offset/time.Second)) -} - -// msDosTimeToTime converts an MS-DOS date and time into a time.Time. -// The resolution is 2s. -// See: https://msdn.microsoft.com/en-us/library/ms724247(v=VS.85).aspx -func msDosTimeToTime(dosDate, dosTime uint16) time.Time { - return time.Date( - // date bits 0-4: day of month; 5-8: month; 9-15: years since 1980 - int(dosDate>>9+1980), - time.Month(dosDate>>5&0xf), - int(dosDate&0x1f), - - // time bits 0-4: second/2; 5-10: minute; 11-15: hour - int(dosTime>>11), - int(dosTime>>5&0x3f), - int(dosTime&0x1f*2), - 0, // nanoseconds - - time.UTC, - ) -} - -// timeToMsDosTime converts a time.Time to an MS-DOS date and time. -// The resolution is 2s. -// See: https://msdn.microsoft.com/en-us/library/ms724274(v=VS.85).aspx -func timeToMsDosTime(t time.Time) (fDate uint16, fTime uint16) { - fDate = uint16(t.Day() + int(t.Month())<<5 + (t.Year()-1980)<<9) - fTime = uint16(t.Second()/2 + t.Minute()<<5 + t.Hour()<<11) - return -} - -// ModTime returns the modification time in UTC using the legacy -// ModifiedDate and ModifiedTime fields. -// -// Deprecated: Use Modified instead. -func (h *FileHeader) ModTime() time.Time { - return msDosTimeToTime(h.ModifiedDate, h.ModifiedTime) -} - -// SetModTime sets the Modified, ModifiedTime, and ModifiedDate fields -// to the given time in UTC. -// -// Deprecated: Use Modified instead. -func (h *FileHeader) SetModTime(t time.Time) { - t = t.UTC() // Convert to UTC for compatibility - h.Modified = t - h.ModifiedDate, h.ModifiedTime = timeToMsDosTime(t) -} - -const ( - // Unix constants. The specification doesn't mention them, - // but these seem to be the values agreed on by tools. - s_IFMT = 0xf000 - s_IFSOCK = 0xc000 - s_IFLNK = 0xa000 - s_IFREG = 0x8000 - s_IFBLK = 0x6000 - s_IFDIR = 0x4000 - s_IFCHR = 0x2000 - s_IFIFO = 0x1000 - s_ISUID = 0x800 - s_ISGID = 0x400 - s_ISVTX = 0x200 - - msdosDir = 0x10 - msdosReadOnly = 0x01 -) - -// Mode returns the permission and mode bits for the FileHeader. -func (h *FileHeader) Mode() (mode fs.FileMode) { - switch h.CreatorVersion >> 8 { - case creatorUnix, creatorMacOSX: - mode = unixModeToFileMode(h.ExternalAttrs >> 16) - case creatorNTFS, creatorVFAT, creatorFAT: - mode = msdosModeToFileMode(h.ExternalAttrs) - } - if len(h.Name) > 0 && h.Name[len(h.Name)-1] == '/' { - mode |= fs.ModeDir - } - return mode -} - -// SetMode changes the permission and mode bits for the FileHeader. -func (h *FileHeader) SetMode(mode fs.FileMode) { - h.CreatorVersion = h.CreatorVersion&0xff | creatorUnix<<8 - h.ExternalAttrs = fileModeToUnixMode(mode) << 16 - - // set MSDOS attributes too, as the original zip does. - if mode&fs.ModeDir != 0 { - h.ExternalAttrs |= msdosDir - } - if mode&0200 == 0 { - h.ExternalAttrs |= msdosReadOnly - } -} - -// isZip64 reports whether the file size exceeds the 32 bit limit -func (h *FileHeader) isZip64() bool { - return h.CompressedSize64 >= uint32max || h.UncompressedSize64 >= uint32max -} - -func (f *FileHeader) hasDataDescriptor() bool { - return f.Flags&0x8 != 0 -} - -func msdosModeToFileMode(m uint32) (mode fs.FileMode) { - if m&msdosDir != 0 { - mode = fs.ModeDir | 0777 - } else { - mode = 0666 - } - if m&msdosReadOnly != 0 { - mode &^= 0222 - } - return mode -} - -func fileModeToUnixMode(mode fs.FileMode) uint32 { - var m uint32 - switch mode & fs.ModeType { - default: - m = s_IFREG - case fs.ModeDir: - m = s_IFDIR - case fs.ModeSymlink: - m = s_IFLNK - case fs.ModeNamedPipe: - m = s_IFIFO - case fs.ModeSocket: - m = s_IFSOCK - case fs.ModeDevice: - m = s_IFBLK - case fs.ModeDevice | fs.ModeCharDevice: - m = s_IFCHR - } - if mode&fs.ModeSetuid != 0 { - m |= s_ISUID - } - if mode&fs.ModeSetgid != 0 { - m |= s_ISGID - } - if mode&fs.ModeSticky != 0 { - m |= s_ISVTX - } - return m | uint32(mode&0777) -} - -func unixModeToFileMode(m uint32) fs.FileMode { - mode := fs.FileMode(m & 0777) - switch m & s_IFMT { - case s_IFBLK: - mode |= fs.ModeDevice - case s_IFCHR: - mode |= fs.ModeDevice | fs.ModeCharDevice - case s_IFDIR: - mode |= fs.ModeDir - case s_IFIFO: - mode |= fs.ModeNamedPipe - case s_IFLNK: - mode |= fs.ModeSymlink - case s_IFREG: - // nothing to do - case s_IFSOCK: - mode |= fs.ModeSocket - } - if m&s_ISGID != 0 { - mode |= fs.ModeSetgid - } - if m&s_ISUID != 0 { - mode |= fs.ModeSetuid - } - if m&s_ISVTX != 0 { - mode |= fs.ModeSticky - } - return mode -} diff --git a/vendor/github.com/klauspost/compress/zip/writer.go b/vendor/github.com/klauspost/compress/zip/writer.go deleted file mode 100644 index d3ef0eb8da..0000000000 --- a/vendor/github.com/klauspost/compress/zip/writer.go +++ /dev/null @@ -1,641 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package zip - -import ( - "bufio" - "encoding/binary" - "errors" - "hash" - "hash/crc32" - "io" - "strings" - "unicode/utf8" -) - -var ( - errLongName = errors.New("zip: FileHeader.Name too long") - errLongExtra = errors.New("zip: FileHeader.Extra too long") -) - -// Writer implements a zip file writer. -type Writer struct { - cw *countWriter - dir []*header - last *fileWriter - closed bool - compressors map[uint16]Compressor - comment string - - // testHookCloseSizeOffset if non-nil is called with the size - // of offset of the central directory at Close. - testHookCloseSizeOffset func(size, offset uint64) -} - -type header struct { - *FileHeader - offset uint64 - raw bool -} - -// NewWriter returns a new Writer writing a zip file to w. -func NewWriter(w io.Writer) *Writer { - return &Writer{cw: &countWriter{w: bufio.NewWriter(w)}} -} - -// SetOffset sets the offset of the beginning of the zip data within the -// underlying writer. It should be used when the zip data is appended to an -// existing file, such as a binary executable. -// It must be called before any data is written. -func (w *Writer) SetOffset(n int64) { - if w.cw.count != 0 { - panic("zip: SetOffset called after data was written") - } - w.cw.count = n -} - -// Flush flushes any buffered data to the underlying writer. -// Calling Flush is not normally necessary; calling Close is sufficient. -func (w *Writer) Flush() error { - return w.cw.w.(*bufio.Writer).Flush() -} - -// SetComment sets the end-of-central-directory comment field. -// It can only be called before Close. -func (w *Writer) SetComment(comment string) error { - if len(comment) > uint16max { - return errors.New("zip: Writer.Comment too long") - } - w.comment = comment - return nil -} - -// Close finishes writing the zip file by writing the central directory. -// It does not close the underlying writer. -func (w *Writer) Close() error { - if w.last != nil && !w.last.closed { - if err := w.last.close(); err != nil { - return err - } - w.last = nil - } - if w.closed { - return errors.New("zip: writer closed twice") - } - w.closed = true - - // write central directory - start := w.cw.count - for _, h := range w.dir { - var buf [directoryHeaderLen]byte - b := writeBuf(buf[:]) - b.uint32(uint32(directoryHeaderSignature)) - b.uint16(h.CreatorVersion) - b.uint16(h.ReaderVersion) - b.uint16(h.Flags) - b.uint16(h.Method) - b.uint16(h.ModifiedTime) - b.uint16(h.ModifiedDate) - b.uint32(h.CRC32) - if h.isZip64() || h.offset >= uint32max { - // the file needs a zip64 header. store maxint in both - // 32 bit size fields (and offset later) to signal that the - // zip64 extra header should be used. - b.uint32(uint32max) // compressed size - b.uint32(uint32max) // uncompressed size - - // append a zip64 extra block to Extra - var buf [28]byte // 2x uint16 + 3x uint64 - eb := writeBuf(buf[:]) - eb.uint16(zip64ExtraID) - eb.uint16(24) // size = 3x uint64 - eb.uint64(h.UncompressedSize64) - eb.uint64(h.CompressedSize64) - eb.uint64(h.offset) - h.Extra = append(h.Extra, buf[:]...) - } else { - b.uint32(h.CompressedSize) - b.uint32(h.UncompressedSize) - } - - b.uint16(uint16(len(h.Name))) - b.uint16(uint16(len(h.Extra))) - b.uint16(uint16(len(h.Comment))) - b = b[4:] // skip disk number start and internal file attr (2x uint16) - b.uint32(h.ExternalAttrs) - if h.isZip64() || h.offset > uint32max { - b.uint32(uint32max) - } else { - b.uint32(uint32(h.offset)) - } - if _, err := w.cw.Write(buf[:]); err != nil { - return err - } - if _, err := io.WriteString(w.cw, h.Name); err != nil { - return err - } - if _, err := w.cw.Write(h.Extra); err != nil { - return err - } - if _, err := io.WriteString(w.cw, h.Comment); err != nil { - return err - } - } - end := w.cw.count - - records := uint64(len(w.dir)) - size := uint64(end - start) - offset := uint64(start) - - if f := w.testHookCloseSizeOffset; f != nil { - f(size, offset) - } - - if records >= uint16max || size >= uint32max || offset >= uint32max { - var buf [directory64EndLen + directory64LocLen]byte - b := writeBuf(buf[:]) - - // zip64 end of central directory record - b.uint32(directory64EndSignature) - b.uint64(directory64EndLen - 12) // length minus signature (uint32) and length fields (uint64) - b.uint16(zipVersion45) // version made by - b.uint16(zipVersion45) // version needed to extract - b.uint32(0) // number of this disk - b.uint32(0) // number of the disk with the start of the central directory - b.uint64(records) // total number of entries in the central directory on this disk - b.uint64(records) // total number of entries in the central directory - b.uint64(size) // size of the central directory - b.uint64(offset) // offset of start of central directory with respect to the starting disk number - - // zip64 end of central directory locator - b.uint32(directory64LocSignature) - b.uint32(0) // number of the disk with the start of the zip64 end of central directory - b.uint64(uint64(end)) // relative offset of the zip64 end of central directory record - b.uint32(1) // total number of disks - - if _, err := w.cw.Write(buf[:]); err != nil { - return err - } - - // store max values in the regular end record to signal - // that the zip64 values should be used instead - records = uint16max - size = uint32max - offset = uint32max - } - - // write end record - var buf [directoryEndLen]byte - b := writeBuf(buf[:]) - b.uint32(uint32(directoryEndSignature)) - b = b[4:] // skip over disk number and first disk number (2x uint16) - b.uint16(uint16(records)) // number of entries this disk - b.uint16(uint16(records)) // number of entries total - b.uint32(uint32(size)) // size of directory - b.uint32(uint32(offset)) // start of directory - b.uint16(uint16(len(w.comment))) // byte size of EOCD comment - if _, err := w.cw.Write(buf[:]); err != nil { - return err - } - if _, err := io.WriteString(w.cw, w.comment); err != nil { - return err - } - - return w.cw.w.(*bufio.Writer).Flush() -} - -// Create adds a file to the zip file using the provided name. -// It returns a Writer to which the file contents should be written. -// The file contents will be compressed using the Deflate method. -// The name must be a relative path: it must not start with a drive -// letter (e.g. C:) or leading slash, and only forward slashes are -// allowed. To create a directory instead of a file, add a trailing -// slash to the name. -// The file's contents must be written to the io.Writer before the next -// call to Create, CreateHeader, or Close. -func (w *Writer) Create(name string) (io.Writer, error) { - header := &FileHeader{ - Name: name, - Method: Deflate, - } - return w.CreateHeader(header) -} - -// detectUTF8 reports whether s is a valid UTF-8 string, and whether the string -// must be considered UTF-8 encoding (i.e., not compatible with CP-437, ASCII, -// or any other common encoding). -func detectUTF8(s string) (valid, require bool) { - for i := 0; i < len(s); { - r, size := utf8.DecodeRuneInString(s[i:]) - i += size - // Officially, ZIP uses CP-437, but many readers use the system's - // local character encoding. Most encoding are compatible with a large - // subset of CP-437, which itself is ASCII-like. - // - // Forbid 0x7e and 0x5c since EUC-KR and Shift-JIS replace those - // characters with localized currency and overline characters. - if r < 0x20 || r > 0x7d || r == 0x5c { - if !utf8.ValidRune(r) || (r == utf8.RuneError && size == 1) { - return false, false - } - require = true - } - } - return true, require -} - -// prepare performs the bookkeeping operations required at the start of -// CreateHeader and CreateRaw. -func (w *Writer) prepare(fh *FileHeader) error { - if w.last != nil && !w.last.closed { - if err := w.last.close(); err != nil { - return err - } - } - if len(w.dir) > 0 && w.dir[len(w.dir)-1].FileHeader == fh { - // See https://golang.org/issue/11144 confusion. - return errors.New("archive/zip: invalid duplicate FileHeader") - } - return nil -} - -// CreateHeader adds a file to the zip archive using the provided FileHeader -// for the file metadata. Writer takes ownership of fh and may mutate -// its fields. The caller must not modify fh after calling CreateHeader. -// -// This returns a Writer to which the file contents should be written. -// The file's contents must be written to the io.Writer before the next -// call to Create, CreateHeader, CreateRaw, or Close. -func (w *Writer) CreateHeader(fh *FileHeader) (io.Writer, error) { - if err := w.prepare(fh); err != nil { - return nil, err - } - - // The ZIP format has a sad state of affairs regarding character encoding. - // Officially, the name and comment fields are supposed to be encoded - // in CP-437 (which is mostly compatible with ASCII), unless the UTF-8 - // flag bit is set. However, there are several problems: - // - // * Many ZIP readers still do not support UTF-8. - // * If the UTF-8 flag is cleared, several readers simply interpret the - // name and comment fields as whatever the local system encoding is. - // - // In order to avoid breaking readers without UTF-8 support, - // we avoid setting the UTF-8 flag if the strings are CP-437 compatible. - // However, if the strings require multibyte UTF-8 encoding and is a - // valid UTF-8 string, then we set the UTF-8 bit. - // - // For the case, where the user explicitly wants to specify the encoding - // as UTF-8, they will need to set the flag bit themselves. - utf8Valid1, utf8Require1 := detectUTF8(fh.Name) - utf8Valid2, utf8Require2 := detectUTF8(fh.Comment) - switch { - case fh.NonUTF8: - fh.Flags &^= 0x800 - case (utf8Require1 || utf8Require2) && (utf8Valid1 && utf8Valid2): - fh.Flags |= 0x800 - } - - fh.CreatorVersion = fh.CreatorVersion&0xff00 | zipVersion20 // preserve compatibility byte - fh.ReaderVersion = zipVersion20 - - // If Modified is set, this takes precedence over MS-DOS timestamp fields. - if !fh.Modified.IsZero() { - // Contrary to the FileHeader.SetModTime method, we intentionally - // do not convert to UTC, because we assume the user intends to encode - // the date using the specified timezone. A user may want this control - // because many legacy ZIP readers interpret the timestamp according - // to the local timezone. - // - // The timezone is only non-UTC if a user directly sets the Modified - // field directly themselves. All other approaches sets UTC. - fh.ModifiedDate, fh.ModifiedTime = timeToMsDosTime(fh.Modified) - - // Use "extended timestamp" format since this is what Info-ZIP uses. - // Nearly every major ZIP implementation uses a different format, - // but at least most seem to be able to understand the other formats. - // - // This format happens to be identical for both local and central header - // if modification time is the only timestamp being encoded. - var mbuf [9]byte // 2*SizeOf(uint16) + SizeOf(uint8) + SizeOf(uint32) - mt := uint32(fh.Modified.Unix()) - eb := writeBuf(mbuf[:]) - eb.uint16(extTimeExtraID) - eb.uint16(5) // Size: SizeOf(uint8) + SizeOf(uint32) - eb.uint8(1) // Flags: ModTime - eb.uint32(mt) // ModTime - fh.Extra = append(fh.Extra, mbuf[:]...) - } - - var ( - ow io.Writer - fw *fileWriter - ) - h := &header{ - FileHeader: fh, - offset: uint64(w.cw.count), - } - - if strings.HasSuffix(fh.Name, "/") { - // Set the compression method to Store to ensure data length is truly zero, - // which the writeHeader method always encodes for the size fields. - // This is necessary as most compression formats have non-zero lengths - // even when compressing an empty string. - fh.Method = Store - fh.Flags &^= 0x8 // we will not write a data descriptor - - // Explicitly clear sizes as they have no meaning for directories. - fh.CompressedSize = 0 - fh.CompressedSize64 = 0 - fh.UncompressedSize = 0 - fh.UncompressedSize64 = 0 - - ow = dirWriter{} - } else { - fh.Flags |= 0x8 // we will write a data descriptor - - fw = &fileWriter{ - zipw: w.cw, - compCount: &countWriter{w: w.cw}, - crc32: crc32.NewIEEE(), - } - comp := w.compressor(fh.Method) - if comp == nil { - return nil, ErrAlgorithm - } - var err error - fw.comp, err = comp(fw.compCount) - if err != nil { - return nil, err - } - fw.rawCount = &countWriter{w: fw.comp} - fw.header = h - ow = fw - } - w.dir = append(w.dir, h) - if err := writeHeader(w.cw, h); err != nil { - return nil, err - } - // If we're creating a directory, fw is nil. - w.last = fw - return ow, nil -} - -func writeHeader(w io.Writer, h *header) error { - const maxUint16 = 1<<16 - 1 - if len(h.Name) > maxUint16 { - return errLongName - } - if len(h.Extra) > maxUint16 { - return errLongExtra - } - - var buf [fileHeaderLen]byte - b := writeBuf(buf[:]) - b.uint32(uint32(fileHeaderSignature)) - b.uint16(h.ReaderVersion) - b.uint16(h.Flags) - b.uint16(h.Method) - b.uint16(h.ModifiedTime) - b.uint16(h.ModifiedDate) - // In raw mode (caller does the compression), the values are either - // written here or in the trailing data descriptor based on the header - // flags. - if h.raw && !h.hasDataDescriptor() { - b.uint32(h.CRC32) - b.uint32(uint32(min64(h.CompressedSize64, uint32max))) - b.uint32(uint32(min64(h.UncompressedSize64, uint32max))) - } else { - // When this package handle the compression, these values are - // always written to the trailing data descriptor. - b.uint32(0) // crc32 - b.uint32(0) // compressed size - b.uint32(0) // uncompressed size - } - b.uint16(uint16(len(h.Name))) - b.uint16(uint16(len(h.Extra))) - if _, err := w.Write(buf[:]); err != nil { - return err - } - if _, err := io.WriteString(w, h.Name); err != nil { - return err - } - _, err := w.Write(h.Extra) - return err -} - -func min64(x, y uint64) uint64 { - if x < y { - return x - } - return y -} - -// CreateHeaderRaw is replaced by CreateRaw. -// Deprecated: CreateHeaderRaw is replaced by CreateRaw (stdlib name). -func (w *Writer) CreateHeaderRaw(fh *FileHeader) (io.Writer, error) { - return w.CreateRaw(fh) -} - -// CreateRaw adds a file to the zip archive using the provided FileHeader and -// returns a Writer to which the file contents should be written. The file's -// contents must be written to the io.Writer before the next call to Create, -// CreateHeader, CreateRaw, or Close. -// -// In contrast to CreateHeader, the bytes passed to Writer are not compressed. -func (w *Writer) CreateRaw(fh *FileHeader) (io.Writer, error) { - if err := w.prepare(fh); err != nil { - return nil, err - } - - fh.CompressedSize = uint32(min64(fh.CompressedSize64, uint32max)) - fh.UncompressedSize = uint32(min64(fh.UncompressedSize64, uint32max)) - - h := &header{ - FileHeader: fh, - offset: uint64(w.cw.count), - raw: true, - } - w.dir = append(w.dir, h) - if err := writeHeader(w.cw, h); err != nil { - return nil, err - } - - if strings.HasSuffix(fh.Name, "/") { - w.last = nil - return dirWriter{}, nil - } - - fw := &fileWriter{ - header: h, - zipw: w.cw, - } - w.last = fw - return fw, nil -} - -// Copy copies the file f (obtained from a Reader) into w. It copies the raw -// form directly bypassing decompression, compression, and validation. -// CHANGE: Optional file name cannot be specified any more due to stdlib api. -func (w *Writer) Copy(f *File) error { - r, err := f.OpenRaw() - if err != nil { - return err - } - fw, err := w.CreateRaw(&f.FileHeader) - if err != nil { - return err - } - _, err = io.Copy(fw, r) - return err -} - -// RegisterCompressor registers or overrides a custom compressor for a specific -// method ID. If a compressor for a given method is not found, Writer will -// default to looking up the compressor at the package level. -func (w *Writer) RegisterCompressor(method uint16, comp Compressor) { - if w.compressors == nil { - w.compressors = make(map[uint16]Compressor) - } - w.compressors[method] = comp -} - -func (w *Writer) compressor(method uint16) Compressor { - comp := w.compressors[method] - if comp == nil { - comp = compressor(method) - } - return comp -} - -type dirWriter struct{} - -func (dirWriter) Write(b []byte) (int, error) { - if len(b) == 0 { - return 0, nil - } - return 0, errors.New("zip: write to directory") -} - -type fileWriter struct { - *header - zipw io.Writer - rawCount *countWriter - comp io.WriteCloser - compCount *countWriter - crc32 hash.Hash32 - closed bool -} - -func (w *fileWriter) Write(p []byte) (int, error) { - if w.closed { - return 0, errors.New("zip: write to closed file") - } - if w.raw { - return w.zipw.Write(p) - } - w.crc32.Write(p) - return w.rawCount.Write(p) -} - -func (w *fileWriter) close() error { - if w.closed { - return errors.New("zip: file closed twice") - } - w.closed = true - if w.raw { - return w.writeDataDescriptor() - } - if err := w.comp.Close(); err != nil { - return err - } - - // update FileHeader - fh := w.header.FileHeader - fh.CRC32 = w.crc32.Sum32() - fh.CompressedSize64 = uint64(w.compCount.count) - fh.UncompressedSize64 = uint64(w.rawCount.count) - - if fh.isZip64() { - fh.CompressedSize = uint32max - fh.UncompressedSize = uint32max - fh.ReaderVersion = zipVersion45 // requires 4.5 - File uses ZIP64 format extensions - } else { - fh.CompressedSize = uint32(fh.CompressedSize64) - fh.UncompressedSize = uint32(fh.UncompressedSize64) - } - - return w.writeDataDescriptor() -} - -func (w *fileWriter) writeDataDescriptor() error { - if !w.hasDataDescriptor() { - return nil - } - // Write data descriptor. This is more complicated than one would - // think, see e.g. comments in zipfile.c:putextended() and - // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7073588. - // The approach here is to write 8 byte sizes if needed without - // adding a zip64 extra in the local header (too late anyway). - var buf []byte - if w.isZip64() { - buf = make([]byte, dataDescriptor64Len) - } else { - buf = make([]byte, dataDescriptorLen) - } - b := writeBuf(buf) - b.uint32(dataDescriptorSignature) // de-facto standard, required by OS X - b.uint32(w.CRC32) - if w.isZip64() { - b.uint64(w.CompressedSize64) - b.uint64(w.UncompressedSize64) - } else { - b.uint32(w.CompressedSize) - b.uint32(w.UncompressedSize) - } - _, err := w.zipw.Write(buf) - return err -} - -type countWriter struct { - w io.Writer - count int64 -} - -func (w *countWriter) Write(p []byte) (int, error) { - n, err := w.w.Write(p) - w.count += int64(n) - return n, err -} - -type nopCloser struct { - io.Writer -} - -func (w nopCloser) Close() error { - return nil -} - -type writeBuf []byte - -func (b *writeBuf) uint8(v uint8) { - (*b)[0] = v - *b = (*b)[1:] -} - -func (b *writeBuf) uint16(v uint16) { - binary.LittleEndian.PutUint16(*b, v) - *b = (*b)[2:] -} - -func (b *writeBuf) uint32(v uint32) { - binary.LittleEndian.PutUint32(*b, v) - *b = (*b)[4:] -} - -func (b *writeBuf) uint64(v uint64) { - binary.LittleEndian.PutUint64(*b, v) - *b = (*b)[8:] -} diff --git a/vendor/github.com/klauspost/pgzip/.gitignore b/vendor/github.com/klauspost/pgzip/.gitignore deleted file mode 100644 index daf913b1b3..0000000000 --- a/vendor/github.com/klauspost/pgzip/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof diff --git a/vendor/github.com/klauspost/pgzip/.travis.yml b/vendor/github.com/klauspost/pgzip/.travis.yml deleted file mode 100644 index acfec4bb09..0000000000 --- a/vendor/github.com/klauspost/pgzip/.travis.yml +++ /dev/null @@ -1,24 +0,0 @@ -language: go - -os: - - linux - - osx - -go: - - 1.13.x - - 1.14.x - - 1.15.x - - master - -env: - - GO111MODULE=off - -script: - - diff <(gofmt -d .) <(printf "") - - go test -v -cpu=1,2,4 . - - go test -v -cpu=2 -race -short . - -matrix: - allow_failures: - - go: 'master' - fast_finish: true diff --git a/vendor/github.com/klauspost/pgzip/GO_LICENSE b/vendor/github.com/klauspost/pgzip/GO_LICENSE deleted file mode 100644 index 7448756763..0000000000 --- a/vendor/github.com/klauspost/pgzip/GO_LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/klauspost/pgzip/LICENSE b/vendor/github.com/klauspost/pgzip/LICENSE deleted file mode 100644 index 3909da4103..0000000000 --- a/vendor/github.com/klauspost/pgzip/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2014 Klaus Post - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/klauspost/pgzip/README.md b/vendor/github.com/klauspost/pgzip/README.md deleted file mode 100644 index 171b978fdc..0000000000 --- a/vendor/github.com/klauspost/pgzip/README.md +++ /dev/null @@ -1,135 +0,0 @@ -pgzip -===== - -Go parallel gzip compression/decompression. This is a fully gzip compatible drop in replacement for "compress/gzip". - -This will split compression into blocks that are compressed in parallel. -This can be useful for compressing big amounts of data. The output is a standard gzip file. - -The gzip decompression is modified so it decompresses ahead of the current reader. -This means that reads will be non-blocking if the decompressor can keep ahead of your code reading from it. -CRC calculation also takes place in a separate goroutine. - -You should only use this if you are (de)compressing big amounts of data, -say **more than 1MB** at the time, otherwise you will not see any benefit, -and it will likely be faster to use the internal gzip library -or [this package](https://github.com/klauspost/compress). - -It is important to note that this library creates and reads *standard gzip files*. -You do not have to match the compressor/decompressor to get the described speedups, -and the gzip files are fully compatible with other gzip readers/writers. - -A golang variant of this is [bgzf](https://godoc.org/github.com/biogo/hts/bgzf), -which has the same feature, as well as seeking in the resulting file. -The only drawback is a slightly bigger overhead compared to this and pure gzip. -See a comparison below. - -[![GoDoc][1]][2] [![Build Status][3]][4] - -[1]: https://godoc.org/github.com/klauspost/pgzip?status.svg -[2]: https://godoc.org/github.com/klauspost/pgzip -[3]: https://travis-ci.org/klauspost/pgzip.svg -[4]: https://travis-ci.org/klauspost/pgzip - -Installation -==== -```go get github.com/klauspost/pgzip/...``` - -You might need to get/update the dependencies: - -``` -go get -u github.com/klauspost/compress -``` - -Usage -==== -[Godoc Doumentation](https://godoc.org/github.com/klauspost/pgzip) - -To use as a replacement for gzip, exchange - -```import "compress/gzip"``` -with -```import gzip "github.com/klauspost/pgzip"```. - -# Changes - -* Oct 6, 2016: Fixed an issue if the destination writer returned an error. -* Oct 6, 2016: Better buffer reuse, should now generate less garbage. -* Oct 6, 2016: Output does not change based on write sizes. -* Dec 8, 2015: Decoder now supports the io.WriterTo interface, giving a speedup and less GC pressure. -* Oct 9, 2015: Reduced allocations by ~35 by using sync.Pool. ~15% overall speedup. - -Changes in [github.com/klauspost/compress](https://github.com/klauspost/compress#changelog) are also carried over, so see that for more changes. - -## Compression -The simplest way to use this is to simply do the same as you would when using [compress/gzip](http://golang.org/pkg/compress/gzip). - -To change the block size, use the added (*pgzip.Writer).SetConcurrency(blockSize, blocks int) function. With this you can control the approximate size of your blocks, as well as how many you want to be processing in parallel. Default values for this is SetConcurrency(1MB, runtime.GOMAXPROCS(0)), meaning blocks are split at 1 MB and up to the number of CPU threads blocks can be processing at once before the writer blocks. - - -Example: -``` -var b bytes.Buffer -w := gzip.NewWriter(&b) -w.SetConcurrency(100000, 10) -w.Write([]byte("hello, world\n")) -w.Close() -``` - -To get any performance gains, you should at least be compressing more than 1 megabyte of data at the time. - -You should at least have a block size of 100k and at least a number of blocks that match the number of cores your would like to utilize, but about twice the number of blocks would be the best. - -Another side effect of this is, that it is likely to speed up your other code, since writes to the compressor only blocks if the compressor is already compressing the number of blocks you have specified. This also means you don't have worry about buffering input to the compressor. - -## Decompression - -Decompression works similar to compression. That means that you simply call pgzip the same way as you would call [compress/gzip](http://golang.org/pkg/compress/gzip). - -The only difference is that if you want to specify your own readahead, you have to use `pgzip.NewReaderN(r io.Reader, blockSize, blocks int)` to get a reader with your custom blocksizes. The `blockSize` is the size of each block decoded, and `blocks` is the maximum number of blocks that is decoded ahead. - -See [Example on playground](http://play.golang.org/p/uHv1B5NbDh) - -Performance -==== -## Compression - -See my blog post in [Benchmarks of Golang Gzip](https://blog.klauspost.com/go-gzipdeflate-benchmarks/). - -Compression cost is usually about 0.2% with default settings with a block size of 250k. - -Example with GOMAXPROC set to 32 (16 core CPU) - -Content is [Matt Mahoneys 10GB corpus](http://mattmahoney.net/dc/10gb.html). Compression level 6. - -Compressor | MB/sec | speedup | size | size overhead (lower=better) -------------|----------|---------|------|--------- -[gzip](http://golang.org/pkg/compress/gzip) (golang) | 15.44MB/s (1 thread) | 1.0x | 4781329307 | 0% -[gzip](http://github.com/klauspost/compress/gzip) (klauspost) | 135.04MB/s (1 thread) | 8.74x | 4894858258 | +2.37% -[pgzip](https://github.com/klauspost/pgzip) (klauspost) | 1573.23MB/s| 101.9x | 4902285651 | +2.53% -[bgzf](https://godoc.org/github.com/biogo/hts/bgzf) (biogo) | 361.40MB/s | 23.4x | 4869686090 | +1.85% -[pargzip](https://godoc.org/github.com/golang/build/pargzip) (builder) | 306.01MB/s | 19.8x | 4786890417 | +0.12% - -pgzip also contains a [linear time compression](https://github.com/klauspost/compress#linear-time-compression-huffman-only) mode, that will allow compression at ~250MB per core per second, independent of the content. - -See the [complete sheet](https://docs.google.com/spreadsheets/d/1nuNE2nPfuINCZJRMt6wFWhKpToF95I47XjSsc-1rbPQ/edit?usp=sharing) for different content types and compression settings. - -## Decompression - -The decompression speedup is there because it allows you to do other work while the decompression is taking place. - -In the example above, the numbers are as follows on a 4 CPU machine: - -Decompressor | Time | Speedup --------------|------|-------- -[gzip](http://golang.org/pkg/compress/gzip) (golang) | 1m28.85s | 0% -[pgzip](https://github.com/klauspost/pgzip) (golang) | 43.48s | 104% - -But wait, since gzip decompression is inherently singlethreaded (aside from CRC calculation) how can it be more than 100% faster? Because pgzip due to its design also acts as a buffer. When using unbuffered gzip, you are also waiting for io when you are decompressing. If the gzip decoder can keep up, it will always have data ready for your reader, and you will not be waiting for input to the gzip decompressor to complete. - -This is pretty much an optimal situation for pgzip, but it reflects most common usecases for CPU intensive gzip usage. - -I haven't included [bgzf](https://godoc.org/github.com/biogo/hts/bgzf) in this comparison, since it only can decompress files created by a compatible encoder, and therefore cannot be considered a generic gzip decompressor. But if you are able to compress your files with a bgzf compatible program, you can expect it to scale beyond 100%. - -# License -This contains large portions of code from the go repository - see GO_LICENSE for more information. The changes are released under MIT License. See LICENSE for more information. diff --git a/vendor/github.com/klauspost/pgzip/gunzip.go b/vendor/github.com/klauspost/pgzip/gunzip.go deleted file mode 100644 index d1ae730b25..0000000000 --- a/vendor/github.com/klauspost/pgzip/gunzip.go +++ /dev/null @@ -1,584 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package pgzip implements reading and writing of gzip format compressed files, -// as specified in RFC 1952. -// -// This is a drop in replacement for "compress/gzip". -// This will split compression into blocks that are compressed in parallel. -// This can be useful for compressing big amounts of data. -// The gzip decompression has not been modified, but remains in the package, -// so you can use it as a complete replacement for "compress/gzip". -// -// See more at https://github.com/klauspost/pgzip -package pgzip - -import ( - "bufio" - "errors" - "hash" - "hash/crc32" - "io" - "sync" - "time" - - "github.com/klauspost/compress/flate" -) - -const ( - gzipID1 = 0x1f - gzipID2 = 0x8b - gzipDeflate = 8 - flagText = 1 << 0 - flagHdrCrc = 1 << 1 - flagExtra = 1 << 2 - flagName = 1 << 3 - flagComment = 1 << 4 -) - -func makeReader(r io.Reader) flate.Reader { - if rr, ok := r.(flate.Reader); ok { - return rr - } - return bufio.NewReader(r) -} - -var ( - // ErrChecksum is returned when reading GZIP data that has an invalid checksum. - ErrChecksum = errors.New("gzip: invalid checksum") - // ErrHeader is returned when reading GZIP data that has an invalid header. - ErrHeader = errors.New("gzip: invalid header") -) - -// The gzip file stores a header giving metadata about the compressed file. -// That header is exposed as the fields of the Writer and Reader structs. -type Header struct { - Comment string // comment - Extra []byte // "extra data" - ModTime time.Time // modification time - Name string // file name - OS byte // operating system type -} - -// A Reader is an io.Reader that can be read to retrieve -// uncompressed data from a gzip-format compressed file. -// -// In general, a gzip file can be a concatenation of gzip files, -// each with its own header. Reads from the Reader -// return the concatenation of the uncompressed data of each. -// Only the first header is recorded in the Reader fields. -// -// Gzip files store a length and checksum of the uncompressed data. -// The Reader will return a ErrChecksum when Read -// reaches the end of the uncompressed data if it does not -// have the expected length or checksum. Clients should treat data -// returned by Read as tentative until they receive the io.EOF -// marking the end of the data. -type Reader struct { - Header - r flate.Reader - decompressor io.ReadCloser - digest hash.Hash32 - size uint32 - flg byte - buf [512]byte - err error - closeErr chan error - multistream bool - - readAhead chan read - roff int // read offset - current []byte - closeReader chan struct{} - lastBlock bool - blockSize int - blocks int - - activeRA bool // Indication if readahead is active - mu sync.Mutex // Lock for above - - blockPool chan []byte -} - -type read struct { - b []byte - err error -} - -// NewReader creates a new Reader reading the given reader. -// The implementation buffers input and may read more data than necessary from r. -// It is the caller's responsibility to call Close on the Reader when done. -func NewReader(r io.Reader) (*Reader, error) { - z := new(Reader) - z.blocks = defaultBlocks - z.blockSize = defaultBlockSize - z.r = makeReader(r) - z.digest = crc32.NewIEEE() - z.multistream = true - z.blockPool = make(chan []byte, z.blocks) - for i := 0; i < z.blocks; i++ { - z.blockPool <- make([]byte, z.blockSize) - } - if err := z.readHeader(true); err != nil { - return nil, err - } - return z, nil -} - -// NewReaderN creates a new Reader reading the given reader. -// The implementation buffers input and may read more data than necessary from r. -// It is the caller's responsibility to call Close on the Reader when done. -// -// With this you can control the approximate size of your blocks, -// as well as how many blocks you want to have prefetched. -// -// Default values for this is blockSize = 250000, blocks = 16, -// meaning up to 16 blocks of maximum 250000 bytes will be -// prefetched. -func NewReaderN(r io.Reader, blockSize, blocks int) (*Reader, error) { - z := new(Reader) - z.blocks = blocks - z.blockSize = blockSize - z.r = makeReader(r) - z.digest = crc32.NewIEEE() - z.multistream = true - - // Account for too small values - if z.blocks <= 0 { - z.blocks = defaultBlocks - } - if z.blockSize <= 512 { - z.blockSize = defaultBlockSize - } - z.blockPool = make(chan []byte, z.blocks) - for i := 0; i < z.blocks; i++ { - z.blockPool <- make([]byte, z.blockSize) - } - if err := z.readHeader(true); err != nil { - return nil, err - } - return z, nil -} - -// Reset discards the Reader z's state and makes it equivalent to the -// result of its original state from NewReader, but reading from r instead. -// This permits reusing a Reader rather than allocating a new one. -func (z *Reader) Reset(r io.Reader) error { - z.killReadAhead() - z.r = makeReader(r) - z.digest = crc32.NewIEEE() - z.size = 0 - z.err = nil - z.multistream = true - - // Account for uninitialized values - if z.blocks <= 0 { - z.blocks = defaultBlocks - } - if z.blockSize <= 512 { - z.blockSize = defaultBlockSize - } - - if z.blockPool == nil { - z.blockPool = make(chan []byte, z.blocks) - for i := 0; i < z.blocks; i++ { - z.blockPool <- make([]byte, z.blockSize) - } - } - - return z.readHeader(true) -} - -// Multistream controls whether the reader supports multistream files. -// -// If enabled (the default), the Reader expects the input to be a sequence -// of individually gzipped data streams, each with its own header and -// trailer, ending at EOF. The effect is that the concatenation of a sequence -// of gzipped files is treated as equivalent to the gzip of the concatenation -// of the sequence. This is standard behavior for gzip readers. -// -// Calling Multistream(false) disables this behavior; disabling the behavior -// can be useful when reading file formats that distinguish individual gzip -// data streams or mix gzip data streams with other data streams. -// In this mode, when the Reader reaches the end of the data stream, -// Read returns io.EOF. If the underlying reader implements io.ByteReader, -// it will be left positioned just after the gzip stream. -// To start the next stream, call z.Reset(r) followed by z.Multistream(false). -// If there is no next stream, z.Reset(r) will return io.EOF. -func (z *Reader) Multistream(ok bool) { - z.multistream = ok -} - -// GZIP (RFC 1952) is little-endian, unlike ZLIB (RFC 1950). -func get4(p []byte) uint32 { - return uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24 -} - -func (z *Reader) readString() (string, error) { - var err error - needconv := false - for i := 0; ; i++ { - if i >= len(z.buf) { - return "", ErrHeader - } - z.buf[i], err = z.r.ReadByte() - if err != nil { - return "", err - } - if z.buf[i] > 0x7f { - needconv = true - } - if z.buf[i] == 0 { - // GZIP (RFC 1952) specifies that strings are NUL-terminated ISO 8859-1 (Latin-1). - if needconv { - s := make([]rune, 0, i) - for _, v := range z.buf[0:i] { - s = append(s, rune(v)) - } - return string(s), nil - } - return string(z.buf[0:i]), nil - } - } -} - -func (z *Reader) read2() (uint32, error) { - _, err := io.ReadFull(z.r, z.buf[0:2]) - if err != nil { - return 0, err - } - return uint32(z.buf[0]) | uint32(z.buf[1])<<8, nil -} - -func (z *Reader) readHeader(save bool) error { - z.killReadAhead() - - _, err := io.ReadFull(z.r, z.buf[0:10]) - if err != nil { - return err - } - if z.buf[0] != gzipID1 || z.buf[1] != gzipID2 || z.buf[2] != gzipDeflate { - return ErrHeader - } - z.flg = z.buf[3] - if save { - z.ModTime = time.Unix(int64(get4(z.buf[4:8])), 0) - // z.buf[8] is xfl, ignored - z.OS = z.buf[9] - } - z.digest.Reset() - z.digest.Write(z.buf[0:10]) - - if z.flg&flagExtra != 0 { - n, err := z.read2() - if err != nil { - return err - } - data := make([]byte, n) - if _, err = io.ReadFull(z.r, data); err != nil { - return err - } - if save { - z.Extra = data - } - } - - var s string - if z.flg&flagName != 0 { - if s, err = z.readString(); err != nil { - return err - } - if save { - z.Name = s - } - } - - if z.flg&flagComment != 0 { - if s, err = z.readString(); err != nil { - return err - } - if save { - z.Comment = s - } - } - - if z.flg&flagHdrCrc != 0 { - n, err := z.read2() - if err != nil { - return err - } - sum := z.digest.Sum32() & 0xFFFF - if n != sum { - return ErrHeader - } - } - - z.digest.Reset() - z.decompressor = flate.NewReader(z.r) - z.doReadAhead() - return nil -} - -func (z *Reader) killReadAhead() error { - z.mu.Lock() - defer z.mu.Unlock() - if z.activeRA { - if z.closeReader != nil { - close(z.closeReader) - } - - // Wait for decompressor to be closed and return error, if any. - e, ok := <-z.closeErr - z.activeRA = false - - for blk := range z.readAhead { - if blk.b != nil { - z.blockPool <- blk.b - } - } - if cap(z.current) > 0 { - z.blockPool <- z.current - z.current = nil - } - if !ok { - // Channel is closed, so if there was any error it has already been returned. - return nil - } - return e - } - return nil -} - -// Starts readahead. -// Will return on error (including io.EOF) -// or when z.closeReader is closed. -func (z *Reader) doReadAhead() { - z.mu.Lock() - defer z.mu.Unlock() - z.activeRA = true - - if z.blocks <= 0 { - z.blocks = defaultBlocks - } - if z.blockSize <= 512 { - z.blockSize = defaultBlockSize - } - ra := make(chan read, z.blocks) - z.readAhead = ra - closeReader := make(chan struct{}, 0) - z.closeReader = closeReader - z.lastBlock = false - closeErr := make(chan error, 1) - z.closeErr = closeErr - z.size = 0 - z.roff = 0 - z.current = nil - decomp := z.decompressor - - go func() { - defer func() { - closeErr <- decomp.Close() - close(closeErr) - close(ra) - }() - - // We hold a local reference to digest, since - // it way be changed by reset. - digest := z.digest - var wg sync.WaitGroup - for { - var buf []byte - select { - case buf = <-z.blockPool: - case <-closeReader: - return - } - buf = buf[0:z.blockSize] - // Try to fill the buffer - n, err := io.ReadFull(decomp, buf) - if err == io.ErrUnexpectedEOF { - if n > 0 { - err = nil - } else { - // If we got zero bytes, we need to establish if - // we reached end of stream or truncated stream. - _, err = decomp.Read([]byte{}) - if err == io.EOF { - err = nil - } - } - } - if n < len(buf) { - buf = buf[0:n] - } - wg.Wait() - wg.Add(1) - go func() { - digest.Write(buf) - wg.Done() - }() - z.size += uint32(n) - - // If we return any error, out digest must be ready - if err != nil { - wg.Wait() - } - select { - case z.readAhead <- read{b: buf, err: err}: - case <-closeReader: - // Sent on close, we don't care about the next results - z.blockPool <- buf - return - } - if err != nil { - return - } - } - }() -} - -func (z *Reader) Read(p []byte) (n int, err error) { - if z.err != nil { - return 0, z.err - } - if len(p) == 0 { - return 0, nil - } - - for { - if len(z.current) == 0 && !z.lastBlock { - read := <-z.readAhead - - if read.err != nil { - // If not nil, the reader will have exited - z.closeReader = nil - - if read.err != io.EOF { - z.err = read.err - return - } - if read.err == io.EOF { - z.lastBlock = true - err = nil - } - } - z.current = read.b - z.roff = 0 - } - avail := z.current[z.roff:] - if len(p) >= len(avail) { - // If len(p) >= len(current), return all content of current - n = copy(p, avail) - z.blockPool <- z.current - z.current = nil - if z.lastBlock { - err = io.EOF - break - } - } else { - // We copy as much as there is space for - n = copy(p, avail) - z.roff += n - } - return - } - - // Finished file; check checksum + size. - if _, err := io.ReadFull(z.r, z.buf[0:8]); err != nil { - z.err = err - return 0, err - } - crc32, isize := get4(z.buf[0:4]), get4(z.buf[4:8]) - sum := z.digest.Sum32() - if sum != crc32 || isize != z.size { - z.err = ErrChecksum - return 0, z.err - } - - // File is ok; should we attempt reading one more? - if !z.multistream { - return 0, io.EOF - } - - // Is there another? - if err = z.readHeader(false); err != nil { - z.err = err - return - } - - // Yes. Reset and read from it. - return z.Read(p) -} - -func (z *Reader) WriteTo(w io.Writer) (n int64, err error) { - total := int64(0) - for { - if z.err != nil { - return total, z.err - } - // We write both to output and digest. - for { - // Read from input - read := <-z.readAhead - if read.err != nil { - // If not nil, the reader will have exited - z.closeReader = nil - - if read.err != io.EOF { - z.err = read.err - return total, z.err - } - if read.err == io.EOF { - z.lastBlock = true - err = nil - } - } - // Write what we got - n, err := w.Write(read.b) - if n != len(read.b) { - return total, io.ErrShortWrite - } - total += int64(n) - if err != nil { - return total, err - } - // Put block back - z.blockPool <- read.b - if z.lastBlock { - break - } - } - - // Finished file; check checksum + size. - if _, err := io.ReadFull(z.r, z.buf[0:8]); err != nil { - z.err = err - return total, err - } - crc32, isize := get4(z.buf[0:4]), get4(z.buf[4:8]) - sum := z.digest.Sum32() - if sum != crc32 || isize != z.size { - z.err = ErrChecksum - return total, z.err - } - // File is ok; should we attempt reading one more? - if !z.multistream { - return total, nil - } - - // Is there another? - err = z.readHeader(false) - if err == io.EOF { - return total, nil - } - if err != nil { - z.err = err - return total, err - } - } -} - -// Close closes the Reader. It does not close the underlying io.Reader. -func (z *Reader) Close() error { - return z.killReadAhead() -} diff --git a/vendor/github.com/klauspost/pgzip/gzip.go b/vendor/github.com/klauspost/pgzip/gzip.go deleted file mode 100644 index 257c4d299f..0000000000 --- a/vendor/github.com/klauspost/pgzip/gzip.go +++ /dev/null @@ -1,519 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pgzip - -import ( - "bytes" - "errors" - "fmt" - "hash" - "hash/crc32" - "io" - "runtime" - "sync" - "time" - - "github.com/klauspost/compress/flate" -) - -const ( - defaultBlockSize = 1 << 20 - tailSize = 16384 - defaultBlocks = 4 -) - -// These constants are copied from the flate package, so that code that imports -// "compress/gzip" does not also have to import "compress/flate". -const ( - NoCompression = flate.NoCompression - BestSpeed = flate.BestSpeed - BestCompression = flate.BestCompression - DefaultCompression = flate.DefaultCompression - ConstantCompression = flate.ConstantCompression - HuffmanOnly = flate.HuffmanOnly -) - -// A Writer is an io.WriteCloser. -// Writes to a Writer are compressed and written to w. -type Writer struct { - Header - w io.Writer - level int - wroteHeader bool - blockSize int - blocks int - currentBuffer []byte - prevTail []byte - digest hash.Hash32 - size int - closed bool - buf [10]byte - errMu sync.RWMutex - err error - pushedErr chan struct{} - results chan result - dictFlatePool sync.Pool - dstPool sync.Pool - wg sync.WaitGroup -} - -type result struct { - result chan []byte - notifyWritten chan struct{} -} - -// Use SetConcurrency to finetune the concurrency level if needed. -// -// With this you can control the approximate size of your blocks, -// as well as how many you want to be processing in parallel. -// -// Default values for this is SetConcurrency(defaultBlockSize, runtime.GOMAXPROCS(0)), -// meaning blocks are split at 1 MB and up to the number of CPU threads -// can be processing at once before the writer blocks. -func (z *Writer) SetConcurrency(blockSize, blocks int) error { - if blockSize <= tailSize { - return fmt.Errorf("gzip: block size cannot be less than or equal to %d", tailSize) - } - if blocks <= 0 { - return errors.New("gzip: blocks cannot be zero or less") - } - if blockSize == z.blockSize && blocks == z.blocks { - return nil - } - z.blockSize = blockSize - z.results = make(chan result, blocks) - z.blocks = blocks - z.dstPool.New = func() interface{} { return make([]byte, 0, blockSize+(blockSize)>>4) } - return nil -} - -// NewWriter returns a new Writer. -// Writes to the returned writer are compressed and written to w. -// -// It is the caller's responsibility to call Close on the WriteCloser when done. -// Writes may be buffered and not flushed until Close. -// -// Callers that wish to set the fields in Writer.Header must do so before -// the first call to Write or Close. The Comment and Name header fields are -// UTF-8 strings in Go, but the underlying format requires NUL-terminated ISO -// 8859-1 (Latin-1). NUL or non-Latin-1 runes in those strings will lead to an -// error on Write. -func NewWriter(w io.Writer) *Writer { - z, _ := NewWriterLevel(w, DefaultCompression) - return z -} - -// NewWriterLevel is like NewWriter but specifies the compression level instead -// of assuming DefaultCompression. -// -// The compression level can be DefaultCompression, NoCompression, or any -// integer value between BestSpeed and BestCompression inclusive. The error -// returned will be nil if the level is valid. -func NewWriterLevel(w io.Writer, level int) (*Writer, error) { - if level < ConstantCompression || level > BestCompression { - return nil, fmt.Errorf("gzip: invalid compression level: %d", level) - } - z := new(Writer) - z.SetConcurrency(defaultBlockSize, runtime.GOMAXPROCS(0)) - z.init(w, level) - return z, nil -} - -// This function must be used by goroutines to set an -// error condition, since z.err access is restricted -// to the callers goruotine. -func (z *Writer) pushError(err error) { - z.errMu.Lock() - if z.err != nil { - z.errMu.Unlock() - return - } - z.err = err - close(z.pushedErr) - z.errMu.Unlock() -} - -func (z *Writer) init(w io.Writer, level int) { - z.wg.Wait() - digest := z.digest - if digest != nil { - digest.Reset() - } else { - digest = crc32.NewIEEE() - } - z.Header = Header{OS: 255} - z.w = w - z.level = level - z.digest = digest - z.pushedErr = make(chan struct{}, 0) - z.results = make(chan result, z.blocks) - z.err = nil - z.closed = false - z.Comment = "" - z.Extra = nil - z.ModTime = time.Time{} - z.wroteHeader = false - z.currentBuffer = nil - z.buf = [10]byte{} - z.prevTail = nil - z.size = 0 - if z.dictFlatePool.New == nil { - z.dictFlatePool.New = func() interface{} { - f, _ := flate.NewWriterDict(w, level, nil) - return f - } - } -} - -// Reset discards the Writer z's state and makes it equivalent to the -// result of its original state from NewWriter or NewWriterLevel, but -// writing to w instead. This permits reusing a Writer rather than -// allocating a new one. -func (z *Writer) Reset(w io.Writer) { - if z.results != nil && !z.closed { - close(z.results) - } - z.SetConcurrency(defaultBlockSize, runtime.GOMAXPROCS(0)) - z.init(w, z.level) -} - -// GZIP (RFC 1952) is little-endian, unlike ZLIB (RFC 1950). -func put2(p []byte, v uint16) { - p[0] = uint8(v >> 0) - p[1] = uint8(v >> 8) -} - -func put4(p []byte, v uint32) { - p[0] = uint8(v >> 0) - p[1] = uint8(v >> 8) - p[2] = uint8(v >> 16) - p[3] = uint8(v >> 24) -} - -// writeBytes writes a length-prefixed byte slice to z.w. -func (z *Writer) writeBytes(b []byte) error { - if len(b) > 0xffff { - return errors.New("gzip.Write: Extra data is too large") - } - put2(z.buf[0:2], uint16(len(b))) - _, err := z.w.Write(z.buf[0:2]) - if err != nil { - return err - } - _, err = z.w.Write(b) - return err -} - -// writeString writes a UTF-8 string s in GZIP's format to z.w. -// GZIP (RFC 1952) specifies that strings are NUL-terminated ISO 8859-1 (Latin-1). -func (z *Writer) writeString(s string) (err error) { - // GZIP stores Latin-1 strings; error if non-Latin-1; convert if non-ASCII. - needconv := false - for _, v := range s { - if v == 0 || v > 0xff { - return errors.New("gzip.Write: non-Latin-1 header string") - } - if v > 0x7f { - needconv = true - } - } - if needconv { - b := make([]byte, 0, len(s)) - for _, v := range s { - b = append(b, byte(v)) - } - _, err = z.w.Write(b) - } else { - _, err = io.WriteString(z.w, s) - } - if err != nil { - return err - } - // GZIP strings are NUL-terminated. - z.buf[0] = 0 - _, err = z.w.Write(z.buf[0:1]) - return err -} - -// compressCurrent will compress the data currently buffered -// This should only be called from the main writer/flush/closer -func (z *Writer) compressCurrent(flush bool) { - c := z.currentBuffer - if len(c) > z.blockSize { - // This can never happen through the public interface. - panic("len(z.currentBuffer) > z.blockSize (most likely due to concurrent Write race)") - } - - r := result{} - r.result = make(chan []byte, 1) - r.notifyWritten = make(chan struct{}, 0) - // Reserve a result slot - select { - case z.results <- r: - case <-z.pushedErr: - return - } - - z.wg.Add(1) - tail := z.prevTail - if len(c) > tailSize { - buf := z.dstPool.Get().([]byte) // Put in .compressBlock - // Copy tail from current buffer before handing the buffer over to the - // compressBlock goroutine. - buf = append(buf[:0], c[len(c)-tailSize:]...) - z.prevTail = buf - } else { - z.prevTail = nil - } - go z.compressBlock(c, tail, r, z.closed) - - z.currentBuffer = z.dstPool.Get().([]byte) // Put in .compressBlock - z.currentBuffer = z.currentBuffer[:0] - - // Wait if flushing - if flush { - <-r.notifyWritten - } -} - -// Returns an error if it has been set. -// Cannot be used by functions that are from internal goroutines. -func (z *Writer) checkError() error { - z.errMu.RLock() - err := z.err - z.errMu.RUnlock() - return err -} - -// Write writes a compressed form of p to the underlying io.Writer. The -// compressed bytes are not necessarily flushed to output until -// the Writer is closed or Flush() is called. -// -// The function will return quickly, if there are unused buffers. -// The sent slice (p) is copied, and the caller is free to re-use the buffer -// when the function returns. -// -// Errors that occur during compression will be reported later, and a nil error -// does not signify that the compression succeeded (since it is most likely still running) -// That means that the call that returns an error may not be the call that caused it. -// Only Flush and Close functions are guaranteed to return any errors up to that point. -func (z *Writer) Write(p []byte) (int, error) { - if err := z.checkError(); err != nil { - return 0, err - } - // Write the GZIP header lazily. - if !z.wroteHeader { - z.wroteHeader = true - z.buf[0] = gzipID1 - z.buf[1] = gzipID2 - z.buf[2] = gzipDeflate - z.buf[3] = 0 - if z.Extra != nil { - z.buf[3] |= 0x04 - } - if z.Name != "" { - z.buf[3] |= 0x08 - } - if z.Comment != "" { - z.buf[3] |= 0x10 - } - put4(z.buf[4:8], uint32(z.ModTime.Unix())) - if z.level == BestCompression { - z.buf[8] = 2 - } else if z.level == BestSpeed { - z.buf[8] = 4 - } else { - z.buf[8] = 0 - } - z.buf[9] = z.OS - var n int - var err error - n, err = z.w.Write(z.buf[0:10]) - if err != nil { - z.pushError(err) - return n, err - } - if z.Extra != nil { - err = z.writeBytes(z.Extra) - if err != nil { - z.pushError(err) - return n, err - } - } - if z.Name != "" { - err = z.writeString(z.Name) - if err != nil { - z.pushError(err) - return n, err - } - } - if z.Comment != "" { - err = z.writeString(z.Comment) - if err != nil { - z.pushError(err) - return n, err - } - } - // Start receiving data from compressors - go func() { - listen := z.results - var failed bool - for { - r, ok := <-listen - // If closed, we are finished. - if !ok { - return - } - if failed { - close(r.notifyWritten) - continue - } - buf := <-r.result - n, err := z.w.Write(buf) - if err != nil { - z.pushError(err) - close(r.notifyWritten) - failed = true - continue - } - if n != len(buf) { - z.pushError(fmt.Errorf("gzip: short write %d should be %d", n, len(buf))) - failed = true - close(r.notifyWritten) - continue - } - z.dstPool.Put(buf) - close(r.notifyWritten) - } - }() - z.currentBuffer = z.dstPool.Get().([]byte) - z.currentBuffer = z.currentBuffer[:0] - } - q := p - for len(q) > 0 { - length := len(q) - if length+len(z.currentBuffer) > z.blockSize { - length = z.blockSize - len(z.currentBuffer) - } - z.digest.Write(q[:length]) - z.currentBuffer = append(z.currentBuffer, q[:length]...) - if len(z.currentBuffer) > z.blockSize { - panic("z.currentBuffer too large (most likely due to concurrent Write race)") - } - if len(z.currentBuffer) == z.blockSize { - z.compressCurrent(false) - if err := z.checkError(); err != nil { - return len(p) - len(q), err - } - } - z.size += length - q = q[length:] - } - return len(p), z.checkError() -} - -// Step 1: compresses buffer to buffer -// Step 2: send writer to channel -// Step 3: Close result channel to indicate we are done -func (z *Writer) compressBlock(p, prevTail []byte, r result, closed bool) { - defer func() { - close(r.result) - z.wg.Done() - }() - buf := z.dstPool.Get().([]byte) // Corresponding Put in .Write's result writer - dest := bytes.NewBuffer(buf[:0]) - - compressor := z.dictFlatePool.Get().(*flate.Writer) // Put below - compressor.ResetDict(dest, prevTail) - compressor.Write(p) - z.dstPool.Put(p) // Corresponding Get in .Write and .compressCurrent - - err := compressor.Flush() - if err != nil { - z.pushError(err) - return - } - if closed { - err = compressor.Close() - if err != nil { - z.pushError(err) - return - } - } - z.dictFlatePool.Put(compressor) // Get above - - if prevTail != nil { - z.dstPool.Put(prevTail) // Get in .compressCurrent - } - - // Read back buffer - buf = dest.Bytes() - r.result <- buf -} - -// Flush flushes any pending compressed data to the underlying writer. -// -// It is useful mainly in compressed network protocols, to ensure that -// a remote reader has enough data to reconstruct a packet. Flush does -// not return until the data has been written. If the underlying -// writer returns an error, Flush returns that error. -// -// In the terminology of the zlib library, Flush is equivalent to Z_SYNC_FLUSH. -func (z *Writer) Flush() error { - if err := z.checkError(); err != nil { - return err - } - if z.closed { - return nil - } - if !z.wroteHeader { - _, err := z.Write(nil) - if err != nil { - return err - } - } - // We send current block to compression - z.compressCurrent(true) - - return z.checkError() -} - -// UncompressedSize will return the number of bytes written. -// pgzip only, not a function in the official gzip package. -func (z *Writer) UncompressedSize() int { - return z.size -} - -// Close closes the Writer, flushing any unwritten data to the underlying -// io.Writer, but does not close the underlying io.Writer. -func (z *Writer) Close() error { - if err := z.checkError(); err != nil { - return err - } - if z.closed { - return nil - } - - z.closed = true - if !z.wroteHeader { - z.Write(nil) - if err := z.checkError(); err != nil { - return err - } - } - z.compressCurrent(true) - if err := z.checkError(); err != nil { - return err - } - close(z.results) - put4(z.buf[0:4], z.digest.Sum32()) - put4(z.buf[4:8], uint32(z.size)) - _, err := z.w.Write(z.buf[0:8]) - if err != nil { - z.pushError(err) - return err - } - return nil -} diff --git a/vendor/github.com/mholt/archiver/v3/.gitignore b/vendor/github.com/mholt/archiver/v3/.gitignore deleted file mode 100644 index 4a87fc1aaf..0000000000 --- a/vendor/github.com/mholt/archiver/v3/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -/arc -/cmd/arc/arc -/dist/ -/vendor/ - -.DS_Store -_gitignore -builds/ -*.test -.*.sw* diff --git a/vendor/github.com/mholt/archiver/v3/.goreleaser.yml b/vendor/github.com/mholt/archiver/v3/.goreleaser.yml deleted file mode 100644 index 13cc2a679b..0000000000 --- a/vendor/github.com/mholt/archiver/v3/.goreleaser.yml +++ /dev/null @@ -1,41 +0,0 @@ -# This is an example goreleaser.yaml file with some sane defaults. -# Make sure to check the documentation at http://goreleaser.com -project_name: arc -before: - hooks: - # You may remove this if you don't use go modules. - - go mod download - # you may remove this if you don't need go generate - - go generate ./... -builds: - - - env: - - CGO_ENABLED=0 - main: ./cmd/arc - goos: - - linux - - windows - - darwin - goarch: - - 386 - - amd64 - - arm - - arm64 - goarm: - - 6 - - 7 -archives: - - - format: binary - replacements: - darwin: mac -checksum: - name_template: 'checksums.txt' -snapshot: - name_template: "{{ .Tag }}-next" -changelog: - sort: asc - filters: - exclude: - - '^docs:' - - '^test:' diff --git a/vendor/github.com/mholt/archiver/v3/.prettierrc b/vendor/github.com/mholt/archiver/v3/.prettierrc deleted file mode 100644 index f9f5139c57..0000000000 --- a/vendor/github.com/mholt/archiver/v3/.prettierrc +++ /dev/null @@ -1,4 +0,0 @@ -{ - "bracketSpacing": true, - "printWidth": 120, -} diff --git a/vendor/github.com/mholt/archiver/v3/LICENSE b/vendor/github.com/mholt/archiver/v3/LICENSE deleted file mode 100644 index 315d04f280..0000000000 --- a/vendor/github.com/mholt/archiver/v3/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2016 Matthew Holt - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/mholt/archiver/v3/README.md b/vendor/github.com/mholt/archiver/v3/README.md deleted file mode 100644 index c8de5e7e55..0000000000 --- a/vendor/github.com/mholt/archiver/v3/README.md +++ /dev/null @@ -1,324 +0,0 @@ -# archiver [![archiver GoDoc](https://img.shields.io/badge/reference-godoc-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/mholt/archiver?tab=doc) [![Ubuntu-latest](https://github.com/mholt/archiver/actions/workflows/ubuntu-latest.yml/badge.svg)](https://github.com/mholt/archiver/actions/workflows/ubuntu-latest.yml) [![Macos-latest](https://github.com/mholt/archiver/actions/workflows/macos-latest.yml/badge.svg)](https://github.com/mholt/archiver/actions/workflows/macos-latest.yml) [![Windows-latest](https://github.com/mholt/archiver/actions/workflows/windows-latest.yml/badge.svg)](https://github.com/mholt/archiver/actions/workflows/windows-latest.yml) - -Introducing **Archiver 3.1** - a cross-platform, multi-format archive utility and Go library. A powerful and flexible library meets an elegant CLI in this generic replacement for several platform-specific or format-specific archive utilities. - -## Features - -Package archiver makes it trivially easy to make and extract common archive formats such as tarball (and its compressed variants) and zip. Simply name the input and output file(s). The `arc` command runs the same on all platforms and has no external dependencies (not even libc). It is powered by the Go standard library and several third-party, pure-Go libraries. - -Files are put into the root of the archive; directories are recursively added, preserving structure. - -- Make whole archives from a list of files -- Open whole archives to a folder -- Extract specific files/folders from archives -- Stream files in and out of archives without needing actual files on disk -- Traverse archive contents without loading them -- Compress files -- Decompress files -- Streaming compression and decompression -- Several archive and compression formats supported - -### Format-dependent features - -- Gzip is multithreaded -- Optionally create a top-level folder to avoid littering a directory or archive root with files -- Toggle overwrite existing files -- Adjust compression level -- Zip: store (not compress) already-compressed files -- Make all necessary directories -- Open password-protected RAR archives -- Optionally continue with other files after an error - -### Supported compression formats - -- brotli (br) -- bzip2 (bz2) -- flate (zip) -- gzip (gz) -- lz4 -- snappy (sz) -- xz -- zstandard (zstd) - -### Supported archive formats - -- .zip -- .tar (including any compressed variants like .tar.gz) -- .rar (read-only) - -Tar files can optionally be compressed using any of the above compression formats. - -## GoDoc - -See - -## Install - -### With webi - -[`webi`](https://webinstall.dev/arc) will install `webi` and `arc` to `~/.local/bin/` and update your `PATH`. - -#### Mac, Linux, Raspberry Pi - -```bash -curl -fsS https://webinstall.dev/arc | bash -``` - -#### Windows 10 - -```pwsh -curl.exe -fsS -A MS https://webinstall.dev/arc | powershell -``` - -### With Go - -To install the runnable binary to your \$GOPATH/bin: - -```bash -go install github.com/mholt/archiver/v3/cmd/arc@latest -``` - -### Manually - -To install manually - -1. Download the binary for your platform from the [Github Releases](https://github.com/mholt/archiver/releases) page. -2. Move the binary to a location in your path, for example: - - without `sudo`: - ```bash - chmod a+x ~/Downloads/arc_* - mkdir -p ~/.local/bin - mv ~/Downloads/arc_* ~/.local/bin/arc - ``` - - as `root`: - ```bash - chmod a+x ~/Downloads/arc_* - sudo mkdir -p /usr/local/bin - sudo mv ~/Downloads/arc_* /usr/local/bin/arc - ``` -3. If needed, update `~/.bashrc` or `~/.profile` to include add `arc` in your `PATH`, for example: - ``` - echo 'PATH="$HOME:/.local/bin:$PATH"' >> ~/.bashrc - ``` - -## Build from Source - -You can successfully build `arc` with just the go tooling, or with `goreleaser`. - -### With `go` - -```bash -go build cmd/arc/*.go -``` - -### Multi-platform with `goreleaser` - -Builds with `goreleaser` will also include version info. - -```bash -goreleaser --snapshot --skip-publish --rm-dist -``` - -## Command Use - -### Make new archive - -```bash -# Syntax: arc archive [archive name] [input files...] - -arc archive test.tar.gz file1.txt images/file2.jpg folder/subfolder -``` - -(At least one input file is required.) - -### Extract entire archive - -```bash -# Syntax: arc unarchive [archive name] [destination] - -arc unarchive test.tar.gz -``` - -(The destination path is optional; default is current directory.) - -The archive name must end with a supported file extension—this is how it knows what kind of archive to make. Run `arc help` for more help. - -### List archive contents - -```bash -# Syntax: arc ls [archive name] - -arc ls caddy_dist.tar.gz -``` - -```txt -drwxr-xr-x matt staff 0 2018-09-19 15:47:18 -0600 MDT dist/ --rw-r--r-- matt staff 6148 2017-08-07 18:34:22 -0600 MDT dist/.DS_Store --rw-r--r-- matt staff 22481 2018-09-19 15:47:18 -0600 MDT dist/CHANGES.txt --rw-r--r-- matt staff 17189 2018-09-19 15:47:18 -0600 MDT dist/EULA.txt --rw-r--r-- matt staff 25261 2016-03-07 16:32:00 -0700 MST dist/LICENSES.txt --rw-r--r-- matt staff 1017 2018-09-19 15:47:18 -0600 MDT dist/README.txt --rw-r--r-- matt staff 288 2016-03-21 11:52:38 -0600 MDT dist/gitcookie.sh.enc -... -``` - -### Extract a specific file or folder from an archive - -```bash -# Syntax: arc extract [archive name] [path in archive] [destination on disk] - -arc extract test.tar.gz foo/hello.txt extracted/hello.txt -``` - -### Compress a single file - -```bash -# Syntax: arc compress [input file] [output file] - -arc compress test.txt compressed_test.txt.gz -arc compress test.txt gz -``` - -For convenience, the output file (second argument) may simply be a compression format (without leading dot), in which case the output filename will be the same as the input filename but with the format extension appended, and the input file will be deleted if successful. - -### Decompress a single file - -```bash -# Syntax: arc decompress [input file] [output file] - -arc decompress test.txt.gz original_test.txt -arc decompress test.txt.gz -``` - -For convenience, the output file (second argument) may be omitted. In that case, the output filename will have the same name as the input filename, but with the compression extension stripped from the end; and the input file will be deleted if successful. - -### Flags - -Flags are specified before the subcommand. Use `arc help` or `arc -h` to get usage help and a description of flags with their default values. - -## Library Use - -The archiver package allows you to easily create and open archives, walk their contents, extract specific files, compress and decompress files, and even stream archives in and out using pure io.Reader and io.Writer interfaces, without ever needing to touch the disk. - -To use as a dependency in your project: - -```bash -go get github.com/mholt/archiver/v3 -``` - -```go -import "github.com/mholt/archiver/v3" -``` - -[See the package's GoDoc](https://pkg.go.dev/github.com/mholt/archiver?tab=doc) for full API documentation. - -For example, creating or unpacking an archive file: - -```go -err := archiver.Archive([]string{"testdata", "other/file.txt"}, "test.zip") -// ... -err = archiver.Unarchive("test.tar.gz", "test") -``` - -The archive format is determined by file extension. (There are [several functions in this package](https://pkg.go.dev/github.com/mholt/archiver?tab=doc) which perform a task by inferring the format from file extension or file header, including `Archive()`, `Unarchive()`, `CompressFile()`, and `DecompressFile()`.) - -To configure the archiver used or perform, create an instance of the format's type: - -```go -z := archiver.Zip{ - CompressionLevel: flate.DefaultCompression, - MkdirAll: true, - SelectiveCompression: true, - ContinueOnError: false, - OverwriteExisting: false, - ImplicitTopLevelFolder: false, -} - -err := z.Archive([]string{"testdata", "other/file.txt"}, "/Users/matt/Desktop/test.zip") -``` - -Inspecting an archive: - -```go -err = z.Walk("/Users/matt/Desktop/test.zip", func(f archiver.File) error { - zfh, ok := f.Header.(zip.FileHeader) - if ok { - fmt.Println("Filename:", zfh.Name) - } - return nil -}) -``` - -Streaming files into an archive that is being written to the HTTP response: - -```go -err = z.Create(responseWriter) -if err != nil { - return err -} -defer z.Close() - -for _, fname := range filenames { - info, err := os.Stat(fname) - if err != nil { - return err - } - - // get file's name for the inside of the archive - internalName, err := archiver.NameInArchive(info, fname, fname) - if err != nil { - return err - } - - // open the file - file, err := os.Open(f) - if err != nil { - return err - } - - // write it to the archive - err = z.Write(archiver.File{ - FileInfo: archiver.FileInfo{ - FileInfo: info, - CustomName: internalName, - }, - ReadCloser: file, - }) - file.Close() - if err != nil { - return err - } -} -``` - -The `archiver.File` type allows you to use actual files with archives, or to mimic files when you only have streams. - -There's a lot more that can be done, too. [See the GoDoc](https://pkg.go.dev/github.com/mholt/archiver?tab=doc) for full API documentation. - -**Security note: This package does NOT attempt to mitigate zip-slip attacks.** It is [extremely difficult](https://github.com/rubyzip/rubyzip/pull/376) [to do properly](https://github.com/mholt/archiver/pull/65#issuecomment-395988244) and [seemingly impossible to mitigate effectively across platforms](https://github.com/golang/go/issues/20126). [Attempted fixes have broken processing of legitimate files in production](https://github.com/mholt/archiver/pull/70#issuecomment-423267320), rendering the program unusable. Our recommendation instead is to inspect the contents of an untrusted archive before extracting it (this package provides `Walkers`) and decide if you want to proceed with extraction. - -## Project Values - -This project has a few principle-based goals that guide its development: - -- **Do our thing really well.** Our thing is creating, opening, inspecting, compressing, and streaming archive files. It is not meant to be a replacement for specific archive format tools like tar, zip, etc. that have lots of features and customizability. (Some customizability is OK, but not to the extent that it becomes overly complicated or error-prone.) - -- **Have good tests.** Changes should be covered by tests. - -- **Limit dependencies.** Keep the package lightweight. - -- **Pure Go.** This means no cgo or other external/system dependencies. This package should be able to stand on its own and cross-compile easily to any platform -- and that includes its library dependencies. - -- **Idiomatic Go.** Keep interfaces small, variable names semantic, vet shows no errors, the linter is generally quiet, etc. - -- **Be elegant.** This package should be elegant to use and its code should be elegant when reading and testing. If it doesn't feel good, fix it up. - -- **Well-documented.** Use comments prudently; explain why non-obvious code is necessary (and use tests to enforce it). Keep the docs updated, and have examples where helpful. - -- **Keep it efficient.** This often means keep it simple. Fast code is valuable. - -- **Consensus.** Contributions should ideally be approved by multiple reviewers before being merged. Generally, avoid merging multi-chunk changes that do not go through at least one or two iterations/reviews. Except for trivial changes, PRs are seldom ready to merge right away. - -- **Have fun contributing.** Coding is awesome! - -We welcome contributions and appreciate your efforts! However, please open issues to discuss any changes before spending the time preparing a pull request. This will save time, reduce frustration, and help coordinate the work. Thank you! diff --git a/vendor/github.com/mholt/archiver/v3/SECURITY.md b/vendor/github.com/mholt/archiver/v3/SECURITY.md deleted file mode 100644 index f915712462..0000000000 --- a/vendor/github.com/mholt/archiver/v3/SECURITY.md +++ /dev/null @@ -1,15 +0,0 @@ -# Security Policy - -## Supported Versions - -| Version | Supported | -| ------- | ------------------ | -| >= 3.x | :white_check_mark: | -| < 3.0 | :x: | - -## Reporting a Vulnerability - -Please send the details to both of us: - -- AJ ONeal -- Matthew Holt diff --git a/vendor/github.com/mholt/archiver/v3/archiver.go b/vendor/github.com/mholt/archiver/v3/archiver.go deleted file mode 100644 index 6fdadadc4c..0000000000 --- a/vendor/github.com/mholt/archiver/v3/archiver.go +++ /dev/null @@ -1,540 +0,0 @@ -// Package archiver facilitates convenient, cross-platform, high-level archival -// and compression operations for a variety of formats and compression algorithms. -// -// This package and its dependencies are written in pure Go (not cgo) and -// have no external dependencies, so they should run on all major platforms. -// (It also comes with a command for CLI use in the cmd/arc folder.) -// -// Each supported format or algorithm has a unique type definition that -// implements the interfaces corresponding to the tasks they perform. For -// example, the Tar type implements Reader, Writer, Archiver, Unarchiver, -// Walker, and several other interfaces. -// -// The most common functions are implemented at the package level for -// convenience: Archive, Unarchive, Walk, Extract, CompressFile, and -// DecompressFile. With these, the format type is chosen implicitly, -// and a sane default configuration is used. -// -// To customize a format's configuration, create an instance of its struct -// with its fields set to the desired values. You can also use and customize -// the handy Default* (replace the wildcard with the format's type name) -// for a quick, one-off instance of the format's type. -// -// To obtain a new instance of a format's struct with the default config, use -// the provided New*() functions. This is not required, however. An empty -// struct of any type, for example &Zip{} is perfectly valid, so you may -// create the structs manually, too. The examples on this page show how -// either may be done. -// -// See the examples in this package for an idea of how to wield this package -// for common tasks. Most of the examples which are specific to a certain -// format type, for example Zip, can be applied to other types that implement -// the same interfaces. For example, using Zip is very similar to using Tar -// or TarGz (etc), and using Gz is very similar to using Sz or Xz (etc). -// -// When creating archives or compressing files using a specific instance of -// the format's type, the name of the output file MUST match that of the -// format, to prevent confusion later on. If you absolutely need a different -// file extension, you may rename the file afterward. -// -// Values in this package are NOT safe for concurrent use. There is no -// performance benefit of reusing them, and since they may contain important -// state (especially while walking, reading, or writing), it is NOT -// recommended to reuse values from this package or change their configuration -// after they are in use. -package archiver - -import ( - "fmt" - "io" - "os" - "path" - "path/filepath" - "runtime" - "strings" -) - -// Archiver is a type that can create an archive file -// from a list of source file names. -type Archiver interface { - ExtensionChecker - - // Archive adds all the files or folders in sources - // to an archive to be created at destination. Files - // are added to the root of the archive, and directories - // are walked and recursively added, preserving folder - // structure. - Archive(sources []string, destination string) error -} - -// ExtensionChecker validates file extensions -type ExtensionChecker interface { - CheckExt(name string) error -} - -// FilenameChecker validates filenames to prevent path traversal attacks -type FilenameChecker interface { - CheckPath(to, filename string) error -} - -// Unarchiver is a type that can extract archive files -// into a folder. -type Unarchiver interface { - Unarchive(source, destination string) error -} - -// Writer can write discrete byte streams of files to -// an output stream. -type Writer interface { - Create(out io.Writer) error - Write(f File) error - Close() error -} - -// Reader can read discrete byte streams of files from -// an input stream. -type Reader interface { - Open(in io.Reader, size int64) error - Read() (File, error) - Close() error -} - -// Extractor can extract a specific file from a source -// archive to a specific destination folder on disk. -type Extractor interface { - Extract(source, target, destination string) error -} - -// File provides methods for accessing information about -// or contents of a file within an archive. -type File struct { - os.FileInfo - - // The original header info; depends on - // type of archive -- could be nil, too. - Header interface{} - - // Allow the file contents to be read (and closed) - io.ReadCloser -} - -// FileInfo is an os.FileInfo but optionally with -// a custom name, useful if dealing with files that -// are not actual files on disk, or which have a -// different name in an archive than on disk. -type FileInfo struct { - os.FileInfo - CustomName string - // Stores path to the source. - // Used when reading a symlink. - SourcePath string -} - -// Name returns fi.CustomName if not empty; -// otherwise it returns fi.FileInfo.Name(). -func (fi FileInfo) Name() string { - if fi.CustomName != "" { - return fi.CustomName - } - return fi.FileInfo.Name() -} - -// ReadFakeCloser is an io.Reader that has -// a no-op close method to satisfy the -// io.ReadCloser interface. -type ReadFakeCloser struct { - io.Reader -} - -// Close implements io.Closer. -func (rfc ReadFakeCloser) Close() error { return nil } - -// Walker can walk an archive file and return information -// about each item in the archive. -type Walker interface { - Walk(archive string, walkFn WalkFunc) error -} - -// WalkFunc is called at each item visited by Walk. -// If an error is returned, the walk may continue -// if the Walker is configured to continue on error. -// The sole exception is the error value ErrStopWalk, -// which stops the walk without an actual error. -type WalkFunc func(f File) error - -// ErrStopWalk signals Walk to break without error. -var ErrStopWalk = fmt.Errorf("walk stopped") - -// ErrFormatNotRecognized is an error that will be -// returned if the file is not a valid archive format. -var ErrFormatNotRecognized = fmt.Errorf("format not recognized") - -// Compressor compresses to out what it reads from in. -// It also ensures a compatible or matching file extension. -type Compressor interface { - ExtensionChecker - Compress(in io.Reader, out io.Writer) error -} - -// Decompressor decompresses to out what it reads from in. -type Decompressor interface { - Decompress(in io.Reader, out io.Writer) error -} - -// Matcher is a type that can return whether the given -// file appears to match the implementation's format. -// Implementations should return the file's read position -// to where it was when the method was called. -type Matcher interface { - Match(io.ReadSeeker) (bool, error) -} - -// Archive creates an archive of the source files to a new file at destination. -// The archive format is chosen implicitly by file extension. -func Archive(sources []string, destination string) error { - aIface, err := ByExtension(destination) - if err != nil { - return err - } - a, ok := aIface.(Archiver) - if !ok { - return fmt.Errorf("format specified by destination filename is not an archive format: %s (%T)", destination, aIface) - } - return a.Archive(sources, destination) -} - -// Unarchive unarchives the given archive file into the destination folder. -// The archive format is selected implicitly. -func Unarchive(source, destination string) error { - uaIface, err := ByExtension(source) - if err != nil { - return err - } - u, ok := uaIface.(Unarchiver) - if !ok { - return fmt.Errorf("format specified by source filename is not an archive format: %s (%T)", source, uaIface) - } - return u.Unarchive(source, destination) -} - -// Walk calls walkFn for each file within the given archive file. -// The archive format is chosen implicitly. -func Walk(archive string, walkFn WalkFunc) error { - wIface, err := ByExtension(archive) - if err != nil { - return err - } - w, ok := wIface.(Walker) - if !ok { - return fmt.Errorf("format specified by archive filename is not a walker format: %s (%T)", archive, wIface) - } - return w.Walk(archive, walkFn) -} - -// Extract extracts a single file from the given source archive. If the target -// is a directory, the entire folder will be extracted into destination. The -// archive format is chosen implicitly. -func Extract(source, target, destination string) error { - eIface, err := ByExtension(source) - if err != nil { - return err - } - e, ok := eIface.(Extractor) - if !ok { - return fmt.Errorf("format specified by source filename is not an extractor format: %s (%T)", source, eIface) - } - return e.Extract(source, target, destination) -} - -// CompressFile is a convenience function to simply compress a file. -// The compression algorithm is selected implicitly based on the -// destination's extension. -func CompressFile(source, destination string) error { - cIface, err := ByExtension(destination) - if err != nil { - return err - } - c, ok := cIface.(Compressor) - if !ok { - return fmt.Errorf("format specified by destination filename is not a recognized compression algorithm: %s", destination) - } - return FileCompressor{Compressor: c}.CompressFile(source, destination) -} - -// DecompressFile is a convenience function to simply decompress a file. -// The decompression algorithm is selected implicitly based on the -// source's extension. -func DecompressFile(source, destination string) error { - cIface, err := ByExtension(source) - if err != nil { - return err - } - c, ok := cIface.(Decompressor) - if !ok { - return fmt.Errorf("format specified by source filename is not a recognized compression algorithm: %s", source) - } - return FileCompressor{Decompressor: c}.DecompressFile(source, destination) -} - -func fileExists(name string) bool { - _, err := os.Stat(name) - return !os.IsNotExist(err) -} - -func mkdir(dirPath string, dirMode os.FileMode) error { - err := os.MkdirAll(dirPath, dirMode) - if err != nil { - return fmt.Errorf("%s: making directory: %v", dirPath, err) - } - return nil -} - -func writeNewFile(fpath string, in io.Reader, fm os.FileMode) error { - err := os.MkdirAll(filepath.Dir(fpath), 0755) - if err != nil { - return fmt.Errorf("%s: making directory for file: %v", fpath, err) - } - - out, err := os.Create(fpath) - if err != nil { - return fmt.Errorf("%s: creating new file: %v", fpath, err) - } - defer out.Close() - - err = out.Chmod(fm) - if err != nil && runtime.GOOS != "windows" { - return fmt.Errorf("%s: changing file mode: %v", fpath, err) - } - - _, err = io.Copy(out, in) - if err != nil { - return fmt.Errorf("%s: writing file: %v", fpath, err) - } - return nil -} - -func writeNewSymbolicLink(fpath string, target string) error { - err := os.MkdirAll(filepath.Dir(fpath), 0755) - if err != nil { - return fmt.Errorf("%s: making directory for file: %v", fpath, err) - } - - _, err = os.Lstat(fpath) - if err == nil { - err = os.Remove(fpath) - if err != nil { - return fmt.Errorf("%s: failed to unlink: %+v", fpath, err) - } - } - - err = os.Symlink(target, fpath) - if err != nil { - return fmt.Errorf("%s: making symbolic link for: %v", fpath, err) - } - return nil -} - -func writeNewHardLink(fpath string, target string) error { - err := os.MkdirAll(filepath.Dir(fpath), 0755) - if err != nil { - return fmt.Errorf("%s: making directory for file: %v", fpath, err) - } - - _, err = os.Lstat(fpath) - if err == nil { - err = os.Remove(fpath) - if err != nil { - return fmt.Errorf("%s: failed to unlink: %+v", fpath, err) - } - } - - err = os.Link(target, fpath) - if err != nil { - return fmt.Errorf("%s: making hard link for: %v", fpath, err) - } - return nil -} - -func isSymlink(fi os.FileInfo) bool { - return fi.Mode()&os.ModeSymlink != 0 -} - -// within returns true if sub is within or equal to parent. -func within(parent, sub string) bool { - rel, err := filepath.Rel(parent, sub) - if err != nil { - return false - } - return !strings.Contains(rel, "..") -} - -// multipleTopLevels returns true if the paths do not -// share a common top-level folder. -func multipleTopLevels(paths []string) bool { - if len(paths) < 2 { - return false - } - var lastTop string - for _, p := range paths { - p = strings.TrimPrefix(strings.Replace(p, `\`, "/", -1), "/") - for { - next := path.Dir(p) - if next == "." { - break - } - p = next - } - if lastTop == "" { - lastTop = p - } - if p != lastTop { - return true - } - } - return false -} - -// folderNameFromFileName returns a name for a folder -// that is suitable based on the filename, which will -// be stripped of its extensions. -func folderNameFromFileName(filename string) string { - base := filepath.Base(filename) - firstDot := strings.Index(base, ".") - if firstDot > -1 { - return base[:firstDot] - } - return base -} - -// makeNameInArchive returns the filename for the file given by fpath to be used within -// the archive. sourceInfo is the FileInfo obtained by calling os.Stat on source, and baseDir -// is an optional base directory that becomes the root of the archive. fpath should be the -// unaltered file path of the file given to a filepath.WalkFunc. -func makeNameInArchive(sourceInfo os.FileInfo, source, baseDir, fpath string) (string, error) { - name := filepath.Base(fpath) // start with the file or dir name - if sourceInfo.IsDir() { - // preserve internal directory structure; that's the path components - // between the source directory's leaf and this file's leaf - dir, err := filepath.Rel(filepath.Dir(source), filepath.Dir(fpath)) - if err != nil { - return "", err - } - // prepend the internal directory structure to the leaf name, - // and convert path separators to forward slashes as per spec - name = path.Join(filepath.ToSlash(dir), name) - } - return path.Join(baseDir, name), nil // prepend the base directory -} - -// NameInArchive returns a name for the file at fpath suitable for -// the inside of an archive. The source and its associated sourceInfo -// is the path where walking a directory started, and if no directory -// was walked, source may == fpath. The returned name is essentially -// the components of the path between source and fpath, preserving -// the internal directory structure. -func NameInArchive(sourceInfo os.FileInfo, source, fpath string) (string, error) { - return makeNameInArchive(sourceInfo, source, "", fpath) -} - -// ByExtension returns an archiver and unarchiver, or compressor -// and decompressor, based on the extension of the filename. -func ByExtension(filename string) (interface{}, error) { - var ec interface{} - for _, c := range extCheckers { - if err := c.CheckExt(filename); err == nil { - ec = c - break - } - } - switch ec.(type) { - case *Rar: - return NewRar(), nil - case *Tar: - return NewTar(), nil - case *TarBrotli: - return NewTarBrotli(), nil - case *TarBz2: - return NewTarBz2(), nil - case *TarGz: - return NewTarGz(), nil - case *TarLz4: - return NewTarLz4(), nil - case *TarSz: - return NewTarSz(), nil - case *TarXz: - return NewTarXz(), nil - case *TarZstd: - return NewTarZstd(), nil - case *Zip: - return NewZip(), nil - case *Gz: - return NewGz(), nil - case *Bz2: - return NewBz2(), nil - case *Lz4: - return NewLz4(), nil - case *Snappy: - return NewSnappy(), nil - case *Xz: - return NewXz(), nil - case *Zstd: - return NewZstd(), nil - } - return nil, fmt.Errorf("format unrecognized by filename: %s", filename) -} - -// ByHeader returns the unarchiver value that matches the input's -// file header. It does not affect the current read position. -// If the file's header is not a recognized archive format, then -// ErrFormatNotRecognized will be returned. -func ByHeader(input io.ReadSeeker) (Unarchiver, error) { - var matcher Matcher - for _, m := range matchers { - ok, err := m.Match(input) - if err != nil { - return nil, fmt.Errorf("matching on format %s: %v", m, err) - } - if ok { - matcher = m - break - } - } - switch matcher.(type) { - case *Zip: - return NewZip(), nil - case *Tar: - return NewTar(), nil - case *Rar: - return NewRar(), nil - } - return nil, ErrFormatNotRecognized -} - -// extCheckers is a list of the format implementations -// that can check extensions. Only to be used for -// checking extensions - not any archival operations. -var extCheckers = []ExtensionChecker{ - &TarBrotli{}, - &TarBz2{}, - &TarGz{}, - &TarLz4{}, - &TarSz{}, - &TarXz{}, - &TarZstd{}, - &Rar{}, - &Tar{}, - &Zip{}, - &Brotli{}, - &Gz{}, - &Bz2{}, - &Lz4{}, - &Snappy{}, - &Xz{}, - &Zstd{}, -} - -var matchers = []Matcher{ - &Rar{}, - &Tar{}, - &Zip{}, -} diff --git a/vendor/github.com/mholt/archiver/v3/brotli.go b/vendor/github.com/mholt/archiver/v3/brotli.go deleted file mode 100644 index d594d66f2d..0000000000 --- a/vendor/github.com/mholt/archiver/v3/brotli.go +++ /dev/null @@ -1,55 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/andybalholm/brotli" -) - -// Brotli facilitates brotli compression. -type Brotli struct { - Quality int -} - -// Compress reads in, compresses it, and writes it to out. -func (br *Brotli) Compress(in io.Reader, out io.Writer) error { - w := brotli.NewWriterLevel(out, br.Quality) - defer w.Close() - _, err := io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (br *Brotli) Decompress(in io.Reader, out io.Writer) error { - r := brotli.NewReader(in) - _, err := io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (br *Brotli) CheckExt(filename string) error { - if filepath.Ext(filename) != ".br" { - return fmt.Errorf("filename must have a .br extension") - } - return nil -} - -func (br *Brotli) String() string { return "brotli" } - -// NewBrotli returns a new, default instance ready to be customized and used. -func NewBrotli() *Brotli { - return &Brotli{ - Quality: brotli.DefaultCompression, - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Brotli)) - _ = Decompressor(new(Brotli)) -) - -// DefaultBrotli is a default instance that is conveniently ready to use. -var DefaultBrotli = NewBrotli() diff --git a/vendor/github.com/mholt/archiver/v3/build.bash b/vendor/github.com/mholt/archiver/v3/build.bash deleted file mode 100644 index 225ffc2da5..0000000000 --- a/vendor/github.com/mholt/archiver/v3/build.bash +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash -set -ex - -# This script builds archiver for most common platforms. - -export CGO_ENABLED=0 - -cd cmd/arc -GOOS=linux GOARCH=amd64 go build -o ../../builds/arc_linux_amd64 -GOOS=linux GOARCH=arm go build -o ../../builds/arc_linux_arm7 -GOOS=darwin GOARCH=amd64 go build -o ../../builds/arc_mac_amd64 -GOOS=windows GOARCH=amd64 go build -o ../../builds/arc_windows_amd64.exe -cd ../.. diff --git a/vendor/github.com/mholt/archiver/v3/bz2.go b/vendor/github.com/mholt/archiver/v3/bz2.go deleted file mode 100644 index 2eb4ac2b88..0000000000 --- a/vendor/github.com/mholt/archiver/v3/bz2.go +++ /dev/null @@ -1,64 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/dsnet/compress/bzip2" -) - -// Bz2 facilitates bzip2 compression. -type Bz2 struct { - CompressionLevel int -} - -// Compress reads in, compresses it, and writes it to out. -func (bz *Bz2) Compress(in io.Reader, out io.Writer) error { - w, err := bzip2.NewWriter(out, &bzip2.WriterConfig{ - Level: bz.CompressionLevel, - }) - if err != nil { - return err - } - defer w.Close() - _, err = io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (bz *Bz2) Decompress(in io.Reader, out io.Writer) error { - r, err := bzip2.NewReader(in, nil) - if err != nil { - return err - } - defer r.Close() - _, err = io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (bz *Bz2) CheckExt(filename string) error { - if filepath.Ext(filename) != ".bz2" { - return fmt.Errorf("filename must have a .bz2 extension") - } - return nil -} - -func (bz *Bz2) String() string { return "bz2" } - -// NewBz2 returns a new, default instance ready to be customized and used. -func NewBz2() *Bz2 { - return &Bz2{ - CompressionLevel: bzip2.DefaultCompression, - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Bz2)) - _ = Decompressor(new(Bz2)) -) - -// DefaultBz2 is a default instance that is conveniently ready to use. -var DefaultBz2 = NewBz2() diff --git a/vendor/github.com/mholt/archiver/v3/error.go b/vendor/github.com/mholt/archiver/v3/error.go deleted file mode 100644 index a46235c652..0000000000 --- a/vendor/github.com/mholt/archiver/v3/error.go +++ /dev/null @@ -1,27 +0,0 @@ -package archiver - -import ( - "fmt" - "strings" -) - -// IllegalPathError is an error returned when an illegal -// path is detected during the archival process. -// -// By default, only the Filename is showed on error, but you might -// also get the absolute value of the invalid path on the AbsolutePath -// field. -type IllegalPathError struct { - AbsolutePath string - Filename string -} - -func (err *IllegalPathError) Error() string { - return fmt.Sprintf("illegal file path: %s", err.Filename) -} - -// IsIllegalPathError returns true if the provided error is of -// the type IllegalPathError. -func IsIllegalPathError(err error) bool { - return err != nil && strings.Contains(err.Error(), "illegal file path: ") -} diff --git a/vendor/github.com/mholt/archiver/v3/filecompressor.go b/vendor/github.com/mholt/archiver/v3/filecompressor.go deleted file mode 100644 index ab1fd3b8c0..0000000000 --- a/vendor/github.com/mholt/archiver/v3/filecompressor.go +++ /dev/null @@ -1,67 +0,0 @@ -package archiver - -import ( - "fmt" - "os" -) - -// FileCompressor can compress and decompress single files. -type FileCompressor struct { - Compressor - Decompressor - - // Whether to overwrite existing files when creating files. - OverwriteExisting bool -} - -// CompressFile reads the source file and compresses it to destination. -// The destination must have a matching extension. -func (fc FileCompressor) CompressFile(source, destination string) error { - if err := fc.CheckExt(destination); err != nil { - return err - } - if fc.Compressor == nil { - return fmt.Errorf("no compressor specified") - } - if !fc.OverwriteExisting && fileExists(destination) { - return fmt.Errorf("file exists: %s", destination) - } - - in, err := os.Open(source) - if err != nil { - return err - } - defer in.Close() - - out, err := os.Create(destination) - if err != nil { - return err - } - defer out.Close() - - return fc.Compress(in, out) -} - -// DecompressFile reads the source file and decompresses it to destination. -func (fc FileCompressor) DecompressFile(source, destination string) error { - if fc.Decompressor == nil { - return fmt.Errorf("no decompressor specified") - } - if !fc.OverwriteExisting && fileExists(destination) { - return fmt.Errorf("file exists: %s", destination) - } - - in, err := os.Open(source) - if err != nil { - return err - } - defer in.Close() - - out, err := os.Create(destination) - if err != nil { - return err - } - defer out.Close() - - return fc.Decompress(in, out) -} diff --git a/vendor/github.com/mholt/archiver/v3/gz.go b/vendor/github.com/mholt/archiver/v3/gz.go deleted file mode 100644 index 650718d0f3..0000000000 --- a/vendor/github.com/mholt/archiver/v3/gz.go +++ /dev/null @@ -1,76 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/klauspost/compress/gzip" - "github.com/klauspost/pgzip" -) - -// Gz facilitates gzip compression. -type Gz struct { - CompressionLevel int - SingleThreaded bool -} - -// Compress reads in, compresses it, and writes it to out. -func (gz *Gz) Compress(in io.Reader, out io.Writer) error { - var w io.WriteCloser - var err error - if gz.SingleThreaded { - w, err = gzip.NewWriterLevel(out, gz.CompressionLevel) - } else { - w, err = pgzip.NewWriterLevel(out, gz.CompressionLevel) - } - if err != nil { - return err - } - defer w.Close() - _, err = io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (gz *Gz) Decompress(in io.Reader, out io.Writer) error { - var r io.ReadCloser - var err error - if gz.SingleThreaded { - r, err = gzip.NewReader(in) - } else { - r, err = pgzip.NewReader(in) - } - if err != nil { - return err - } - defer r.Close() - _, err = io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (gz *Gz) CheckExt(filename string) error { - if filepath.Ext(filename) != ".gz" { - return fmt.Errorf("filename must have a .gz extension") - } - return nil -} - -func (gz *Gz) String() string { return "gz" } - -// NewGz returns a new, default instance ready to be customized and used. -func NewGz() *Gz { - return &Gz{ - CompressionLevel: gzip.DefaultCompression, - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Gz)) - _ = Decompressor(new(Gz)) -) - -// DefaultGz is a default instance that is conveniently ready to use. -var DefaultGz = NewGz() diff --git a/vendor/github.com/mholt/archiver/v3/lz4.go b/vendor/github.com/mholt/archiver/v3/lz4.go deleted file mode 100644 index 3d6b0a212d..0000000000 --- a/vendor/github.com/mholt/archiver/v3/lz4.go +++ /dev/null @@ -1,63 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/pierrec/lz4/v4" -) - -// Lz4 facilitates LZ4 compression. -type Lz4 struct { - CompressionLevel int -} - -// Compress reads in, compresses it, and writes it to out. -func (lz *Lz4) Compress(in io.Reader, out io.Writer) error { - w := lz4.NewWriter(out) - // TODO archiver v4: use proper lz4.Fast - // bitshifting for backwards compatibility with lz4/v3 - options := []lz4.Option{ - lz4.CompressionLevelOption(lz4.CompressionLevel(1 << (8 + lz.CompressionLevel))), - } - if err := w.Apply(options...); err != nil { - return err - } - defer w.Close() - _, err := io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (lz *Lz4) Decompress(in io.Reader, out io.Writer) error { - r := lz4.NewReader(in) - _, err := io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (lz *Lz4) CheckExt(filename string) error { - if filepath.Ext(filename) != ".lz4" { - return fmt.Errorf("filename must have a .lz4 extension") - } - return nil -} - -func (lz *Lz4) String() string { return "lz4" } - -// NewLz4 returns a new, default instance ready to be customized and used. -func NewLz4() *Lz4 { - return &Lz4{ - CompressionLevel: 9, // https://github.com/lz4/lz4/blob/1b819bfd633ae285df2dfe1b0589e1ec064f2873/lib/lz4hc.h#L48 - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Lz4)) - _ = Decompressor(new(Lz4)) -) - -// DefaultLz4 is a default instance that is conveniently ready to use. -var DefaultLz4 = NewLz4() diff --git a/vendor/github.com/mholt/archiver/v3/rar.go b/vendor/github.com/mholt/archiver/v3/rar.go deleted file mode 100644 index 35fd60b676..0000000000 --- a/vendor/github.com/mholt/archiver/v3/rar.go +++ /dev/null @@ -1,446 +0,0 @@ -package archiver - -import ( - "bytes" - "fmt" - "io" - "log" - "os" - "path" - "path/filepath" - "strings" - "time" - - "github.com/nwaples/rardecode" -) - -// Rar provides facilities for reading RAR archives. -// See https://www.rarlab.com/technote.htm. -type Rar struct { - // Whether to overwrite existing files; if false, - // an error is returned if the file exists. - OverwriteExisting bool - - // Whether to make all the directories necessary - // to create a rar archive in the desired path. - MkdirAll bool - - // A single top-level folder can be implicitly - // created by the Unarchive method if the files - // to be extracted from the archive do not all - // have a common root. This roughly mimics the - // behavior of archival tools integrated into OS - // file browsers which create a subfolder to - // avoid unexpectedly littering the destination - // folder with potentially many files, causing a - // problematic cleanup/organization situation. - // This feature is available for both creation - // and extraction of archives, but may be slightly - // inefficient with lots and lots of files, - // especially on extraction. - ImplicitTopLevelFolder bool - - // Strip number of leading paths. This feature is available - // only during unpacking of the entire archive. - StripComponents int - - // If true, errors encountered during reading - // or writing a single file will be logged and - // the operation will continue on remaining files. - ContinueOnError bool - - // The password to open archives (optional). - Password string - - rr *rardecode.Reader // underlying stream reader - rc *rardecode.ReadCloser // supports multi-volume archives (files only) -} - -// CheckExt ensures the file extension matches the format. -func (*Rar) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".rar") { - return fmt.Errorf("filename must have a .rar extension") - } - return nil -} - -// CheckPath ensures that the filename has not been crafted to perform path traversal attacks -func (*Rar) CheckPath(to, filename string) error { - to, _ = filepath.Abs(to) //explicit the destination folder to prevent that 'string.HasPrefix' check can be 'bypassed' when no destination folder is supplied in input - dest := filepath.Join(to, filename) - //prevent path traversal attacks - if !strings.HasPrefix(dest, to) { - return &IllegalPathError{AbsolutePath: dest, Filename: filename} - } - return nil -} - -// Unarchive unpacks the .rar file at source to destination. -// Destination will be treated as a folder name. It supports -// multi-volume archives. -func (r *Rar) Unarchive(source, destination string) error { - if !fileExists(destination) && r.MkdirAll { - err := mkdir(destination, 0755) - if err != nil { - return fmt.Errorf("preparing destination: %v", err) - } - } - - // if the files in the archive do not all share a common - // root, then make sure we extract to a single subfolder - // rather than potentially littering the destination... - if r.ImplicitTopLevelFolder { - var err error - destination, err = r.addTopLevelFolder(source, destination) - if err != nil { - return fmt.Errorf("scanning source archive: %v", err) - } - } - - err := r.OpenFile(source) - if err != nil { - return fmt.Errorf("opening rar archive for reading: %v", err) - } - defer r.Close() - - for { - err := r.unrarNext(destination) - if err == io.EOF { - break - } - if err != nil { - if r.ContinueOnError || IsIllegalPathError(err) { - log.Printf("[ERROR] Reading file in rar archive: %v", err) - continue - } - return fmt.Errorf("reading file in rar archive: %v", err) - } - } - - return nil -} - -// addTopLevelFolder scans the files contained inside -// the tarball named sourceArchive and returns a modified -// destination if all the files do not share the same -// top-level folder. -func (r *Rar) addTopLevelFolder(sourceArchive, destination string) (string, error) { - file, err := os.Open(sourceArchive) - if err != nil { - return "", fmt.Errorf("opening source archive: %v", err) - } - defer file.Close() - - rc, err := rardecode.NewReader(file, r.Password) - if err != nil { - return "", fmt.Errorf("creating archive reader: %v", err) - } - - var files []string - for { - hdr, err := rc.Next() - if err == io.EOF { - break - } - if err != nil { - return "", fmt.Errorf("scanning tarball's file listing: %v", err) - } - files = append(files, hdr.Name) - } - - if multipleTopLevels(files) { - destination = filepath.Join(destination, folderNameFromFileName(sourceArchive)) - } - - return destination, nil -} - -func (r *Rar) unrarNext(to string) error { - f, err := r.Read() - if err != nil { - return err // don't wrap error; calling loop must break on io.EOF - } - defer f.Close() - - header, ok := f.Header.(*rardecode.FileHeader) - if !ok { - return fmt.Errorf("expected header to be *rardecode.FileHeader but was %T", f.Header) - } - - errPath := r.CheckPath(to, header.Name) - if errPath != nil { - return fmt.Errorf("checking path traversal attempt: %v", errPath) - } - - if r.StripComponents > 0 { - if strings.Count(header.Name, "/") < r.StripComponents { - return nil // skip path with fewer components - } - - for i := 0; i < r.StripComponents; i++ { - slash := strings.Index(header.Name, "/") - header.Name = header.Name[slash+1:] - } - } - - return r.unrarFile(f, filepath.Join(to, header.Name)) -} - -func (r *Rar) unrarFile(f File, to string) error { - // do not overwrite existing files, if configured - if !f.IsDir() && !r.OverwriteExisting && fileExists(to) { - return fmt.Errorf("file already exists: %s", to) - } - - hdr, ok := f.Header.(*rardecode.FileHeader) - if !ok { - return fmt.Errorf("expected header to be *rardecode.FileHeader but was %T", f.Header) - } - - if f.IsDir() { - if fileExists("testdata") { - err := os.Chmod(to, hdr.Mode()) - if err != nil { - return fmt.Errorf("changing dir mode: %v", err) - } - } else { - err := mkdir(to, hdr.Mode()) - if err != nil { - return fmt.Errorf("making directories: %v", err) - } - } - return nil - } - - // if files come before their containing folders, then we must - // create their folders before writing the file - err := mkdir(filepath.Dir(to), 0755) - if err != nil { - return fmt.Errorf("making parent directories: %v", err) - } - - if (hdr.Mode() & os.ModeSymlink) != 0 { - return nil - } - - return writeNewFile(to, r.rr, hdr.Mode()) -} - -// OpenFile opens filename for reading. This method supports -// multi-volume archives, whereas Open does not (but Open -// supports any stream, not just files). -func (r *Rar) OpenFile(filename string) error { - if r.rr != nil { - return fmt.Errorf("rar archive is already open for reading") - } - var err error - r.rc, err = rardecode.OpenReader(filename, r.Password) - if err != nil { - return err - } - r.rr = &r.rc.Reader - return nil -} - -// Open opens t for reading an archive from -// in. The size parameter is not used. -func (r *Rar) Open(in io.Reader, size int64) error { - if r.rr != nil { - return fmt.Errorf("rar archive is already open for reading") - } - var err error - r.rr, err = rardecode.NewReader(in, r.Password) - return err -} - -// Read reads the next file from t, which must have -// already been opened for reading. If there are no -// more files, the error is io.EOF. The File must -// be closed when finished reading from it. -func (r *Rar) Read() (File, error) { - if r.rr == nil { - return File{}, fmt.Errorf("rar archive is not open") - } - - hdr, err := r.rr.Next() - if err != nil { - return File{}, err // don't wrap error; preserve io.EOF - } - - file := File{ - FileInfo: rarFileInfo{hdr}, - Header: hdr, - ReadCloser: ReadFakeCloser{r.rr}, - } - - return file, nil -} - -// Close closes the rar archive(s) opened by Create and Open. -func (r *Rar) Close() error { - var err error - if r.rc != nil { - rc := r.rc - r.rc = nil - err = rc.Close() - } - if r.rr != nil { - r.rr = nil - } - return err -} - -// Walk calls walkFn for each visited item in archive. -func (r *Rar) Walk(archive string, walkFn WalkFunc) error { - file, err := os.Open(archive) - if err != nil { - return fmt.Errorf("opening archive file: %v", err) - } - defer file.Close() - - err = r.Open(file, 0) - if err != nil { - return fmt.Errorf("opening archive: %v", err) - } - defer r.Close() - - for { - f, err := r.Read() - if err == io.EOF { - break - } - if err != nil { - if r.ContinueOnError { - log.Printf("[ERROR] Opening next file: %v", err) - continue - } - return fmt.Errorf("opening next file: %v", err) - } - err = walkFn(f) - if err != nil { - if err == ErrStopWalk { - break - } - if r.ContinueOnError { - log.Printf("[ERROR] Walking %s: %v", f.Name(), err) - continue - } - return fmt.Errorf("walking %s: %v", f.Name(), err) - } - } - - return nil -} - -// Extract extracts a single file from the rar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (r *Rar) Extract(source, target, destination string) error { - // target refers to a path inside the archive, which should be clean also - target = path.Clean(target) - - // if the target ends up being a directory, then - // we will continue walking and extracting files - // until we are no longer within that directory - var targetDirPath string - - return r.Walk(source, func(f File) error { - th, ok := f.Header.(*rardecode.FileHeader) - if !ok { - return fmt.Errorf("expected header to be *rardecode.FileHeader but was %T", f.Header) - } - - // importantly, cleaning the path strips tailing slash, - // which must be appended to folders within the archive - name := path.Clean(th.Name) - if f.IsDir() && target == name { - targetDirPath = path.Dir(name) - } - - if within(target, th.Name) { - // either this is the exact file we want, or is - // in the directory we want to extract - - // build the filename we will extract to - end, err := filepath.Rel(targetDirPath, th.Name) - if err != nil { - return fmt.Errorf("relativizing paths: %v", err) - } - joined := filepath.Join(destination, end) - - err = r.unrarFile(f, joined) - if err != nil { - return fmt.Errorf("extracting file %s: %v", th.Name, err) - } - - // if our target was not a directory, stop walk - if targetDirPath == "" { - return ErrStopWalk - } - } else if targetDirPath != "" { - // finished walking the entire directory - return ErrStopWalk - } - - return nil - }) -} - -// Match returns true if the format of file matches this -// type's format. It should not affect reader position. -func (*Rar) Match(file io.ReadSeeker) (bool, error) { - currentPos, err := file.Seek(0, io.SeekCurrent) - if err != nil { - return false, err - } - _, err = file.Seek(0, 0) - if err != nil { - return false, err - } - defer func() { - _, _ = file.Seek(currentPos, io.SeekStart) - }() - - buf := make([]byte, 8) - if n, err := file.Read(buf); err != nil || n < 8 { - return false, nil - } - hasRarHeader := bytes.Equal(buf[:7], []byte("Rar!\x1a\x07\x00")) || // ver 1.5 - bytes.Equal(buf, []byte("Rar!\x1a\x07\x01\x00")) // ver 5.0 - return hasRarHeader, nil -} - -func (r *Rar) String() string { return "rar" } - -// NewRar returns a new, default instance ready to be customized and used. -func NewRar() *Rar { - return &Rar{ - MkdirAll: true, - } -} - -type rarFileInfo struct { - fh *rardecode.FileHeader -} - -func (rfi rarFileInfo) Name() string { return rfi.fh.Name } -func (rfi rarFileInfo) Size() int64 { return rfi.fh.UnPackedSize } -func (rfi rarFileInfo) Mode() os.FileMode { return rfi.fh.Mode() } -func (rfi rarFileInfo) ModTime() time.Time { return rfi.fh.ModificationTime } -func (rfi rarFileInfo) IsDir() bool { return rfi.fh.IsDir } -func (rfi rarFileInfo) Sys() interface{} { return nil } - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(Rar)) - _ = Unarchiver(new(Rar)) - _ = Walker(new(Rar)) - _ = Extractor(new(Rar)) - _ = Matcher(new(Rar)) - _ = ExtensionChecker(new(Rar)) - _ = FilenameChecker(new(Rar)) - _ = os.FileInfo(rarFileInfo{}) -) - -// DefaultRar is a default instance that is conveniently ready to use. -var DefaultRar = NewRar() diff --git a/vendor/github.com/mholt/archiver/v3/sz.go b/vendor/github.com/mholt/archiver/v3/sz.go deleted file mode 100644 index 02009b528f..0000000000 --- a/vendor/github.com/mholt/archiver/v3/sz.go +++ /dev/null @@ -1,51 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/golang/snappy" -) - -// Snappy facilitates Snappy compression. -type Snappy struct{} - -// Compress reads in, compresses it, and writes it to out. -func (s *Snappy) Compress(in io.Reader, out io.Writer) error { - w := snappy.NewBufferedWriter(out) - defer w.Close() - _, err := io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (s *Snappy) Decompress(in io.Reader, out io.Writer) error { - r := snappy.NewReader(in) - _, err := io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (s *Snappy) CheckExt(filename string) error { - if filepath.Ext(filename) != ".sz" { - return fmt.Errorf("filename must have a .sz extension") - } - return nil -} - -func (s *Snappy) String() string { return "sz" } - -// NewSnappy returns a new, default instance ready to be customized and used. -func NewSnappy() *Snappy { - return new(Snappy) -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Snappy)) - _ = Decompressor(new(Snappy)) -) - -// DefaultSnappy is a default instance that is conveniently ready to use. -var DefaultSnappy = NewSnappy() diff --git a/vendor/github.com/mholt/archiver/v3/tar.go b/vendor/github.com/mholt/archiver/v3/tar.go deleted file mode 100644 index be8986657d..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tar.go +++ /dev/null @@ -1,659 +0,0 @@ -package archiver - -import ( - "archive/tar" - "bytes" - "fmt" - "io" - "log" - "os" - "path" - "path/filepath" - "strconv" - "strings" -) - -// Tar provides facilities for operating TAR archives. -// See http://www.gnu.org/software/tar/manual/html_node/Standard.html. -type Tar struct { - // Whether to overwrite existing files; if false, - // an error is returned if the file exists. - OverwriteExisting bool - - // Whether to make all the directories necessary - // to create a tar archive in the desired path. - MkdirAll bool - - // A single top-level folder can be implicitly - // created by the Archive or Unarchive methods - // if the files to be added to the archive - // or the files to be extracted from the archive - // do not all have a common root. This roughly - // mimics the behavior of archival tools integrated - // into OS file browsers which create a subfolder - // to avoid unexpectedly littering the destination - // folder with potentially many files, causing a - // problematic cleanup/organization situation. - // This feature is available for both creation - // and extraction of archives, but may be slightly - // inefficient with lots and lots of files, - // especially on extraction. - ImplicitTopLevelFolder bool - - // Strip number of leading paths. This feature is available - // only during unpacking of the entire archive. - StripComponents int - - // If true, errors encountered during reading - // or writing a single file will be logged and - // the operation will continue on remaining files. - ContinueOnError bool - - tw *tar.Writer - tr *tar.Reader - - readerWrapFn func(io.Reader) (io.Reader, error) - writerWrapFn func(io.Writer) (io.Writer, error) - cleanupWrapFn func() -} - -// CheckExt ensures the file extension matches the format. -func (*Tar) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar") { - return fmt.Errorf("filename must have a .tar extension") - } - return nil -} - -// CheckPath ensures that the filename has not been crafted to perform path traversal attacks -func (*Tar) CheckPath(to, filename string) error { - to, _ = filepath.Abs(to) //explicit the destination folder to prevent that 'string.HasPrefix' check can be 'bypassed' when no destination folder is supplied in input - dest := filepath.Join(to, filename) - //prevent path traversal attacks - if !strings.HasPrefix(dest, to) { - return &IllegalPathError{AbsolutePath: dest, Filename: filename} - } - return nil -} - -// Archive creates a tarball file at destination containing -// the files listed in sources. The destination must end with -// ".tar". File paths can be those of regular files or -// directories; directories will be recursively added. -func (t *Tar) Archive(sources []string, destination string) error { - err := t.CheckExt(destination) - if t.writerWrapFn == nil && err != nil { - return fmt.Errorf("checking extension: %v", err) - } - if !t.OverwriteExisting && fileExists(destination) { - return fmt.Errorf("file already exists: %s", destination) - } - - // make the folder to contain the resulting archive - // if it does not already exist - destDir := filepath.Dir(destination) - if t.MkdirAll && !fileExists(destDir) { - err := mkdir(destDir, 0755) - if err != nil { - return fmt.Errorf("making folder for destination: %v", err) - } - } - - out, err := os.Create(destination) - if err != nil { - return fmt.Errorf("creating %s: %v", destination, err) - } - defer out.Close() - - err = t.Create(out) - if err != nil { - return fmt.Errorf("creating tar: %v", err) - } - defer t.Close() - - var topLevelFolder string - if t.ImplicitTopLevelFolder && multipleTopLevels(sources) { - topLevelFolder = folderNameFromFileName(destination) - } - - for _, source := range sources { - err := t.writeWalk(source, topLevelFolder, destination) - if err != nil { - return fmt.Errorf("walking %s: %v", source, err) - } - } - - return nil -} - -// Unarchive unpacks the .tar file at source to destination. -// Destination will be treated as a folder name. -func (t *Tar) Unarchive(source, destination string) error { - if !fileExists(destination) && t.MkdirAll { - err := mkdir(destination, 0755) - if err != nil { - return fmt.Errorf("preparing destination: %v", err) - } - } - - // if the files in the archive do not all share a common - // root, then make sure we extract to a single subfolder - // rather than potentially littering the destination... - if t.ImplicitTopLevelFolder { - var err error - destination, err = t.addTopLevelFolder(source, destination) - if err != nil { - return fmt.Errorf("scanning source archive: %v", err) - } - } - - file, err := os.Open(source) - if err != nil { - return fmt.Errorf("opening source archive: %v", err) - } - defer file.Close() - - err = t.Open(file, 0) - if err != nil { - return fmt.Errorf("opening tar archive for reading: %v", err) - } - defer t.Close() - - for { - err := t.untarNext(destination) - if err == io.EOF { - break - } - if err != nil { - if t.ContinueOnError || IsIllegalPathError(err) { - log.Printf("[ERROR] Reading file in tar archive: %v", err) - continue - } - return fmt.Errorf("reading file in tar archive: %v", err) - } - } - - return nil -} - -// addTopLevelFolder scans the files contained inside -// the tarball named sourceArchive and returns a modified -// destination if all the files do not share the same -// top-level folder. -func (t *Tar) addTopLevelFolder(sourceArchive, destination string) (string, error) { - file, err := os.Open(sourceArchive) - if err != nil { - return "", fmt.Errorf("opening source archive: %v", err) - } - defer file.Close() - - // if the reader is to be wrapped, ensure we do that now - // or we will not be able to read the archive successfully - reader := io.Reader(file) - if t.readerWrapFn != nil { - reader, err = t.readerWrapFn(reader) - if err != nil { - return "", fmt.Errorf("wrapping reader: %v", err) - } - } - if t.cleanupWrapFn != nil { - defer t.cleanupWrapFn() - } - - tr := tar.NewReader(reader) - - var files []string - for { - hdr, err := tr.Next() - if err == io.EOF { - break - } - if err != nil { - return "", fmt.Errorf("scanning tarball's file listing: %v", err) - } - files = append(files, hdr.Name) - } - - if multipleTopLevels(files) { - destination = filepath.Join(destination, folderNameFromFileName(sourceArchive)) - } - - return destination, nil -} - -func (t *Tar) untarNext(destination string) error { - f, err := t.Read() - if err != nil { - return err // don't wrap error; calling loop must break on io.EOF - } - defer f.Close() - - header, ok := f.Header.(*tar.Header) - if !ok { - return fmt.Errorf("expected header to be *tar.Header but was %T", f.Header) - } - - errPath := t.CheckPath(destination, header.Name) - if errPath != nil { - return fmt.Errorf("checking path traversal attempt: %v", errPath) - } - - if t.StripComponents > 0 { - if strings.Count(header.Name, "/") < t.StripComponents { - return nil // skip path with fewer components - } - - for i := 0; i < t.StripComponents; i++ { - slash := strings.Index(header.Name, "/") - header.Name = header.Name[slash+1:] - } - } - return t.untarFile(f, destination, header) -} - -func (t *Tar) untarFile(f File, destination string, hdr *tar.Header) error { - to := filepath.Join(destination, hdr.Name) - - // do not overwrite existing files, if configured - if !f.IsDir() && !t.OverwriteExisting && fileExists(to) { - return fmt.Errorf("file already exists: %s", to) - } - - switch hdr.Typeflag { - case tar.TypeDir: - return mkdir(to, f.Mode()) - case tar.TypeReg, tar.TypeRegA, tar.TypeChar, tar.TypeBlock, tar.TypeFifo, tar.TypeGNUSparse: - return writeNewFile(to, f, f.Mode()) - case tar.TypeSymlink: - return writeNewSymbolicLink(to, hdr.Linkname) - case tar.TypeLink: - return writeNewHardLink(to, filepath.Join(destination, hdr.Linkname)) - case tar.TypeXGlobalHeader: - return nil // ignore the pax global header from git-generated tarballs - default: - return fmt.Errorf("%s: unknown type flag: %c", hdr.Name, hdr.Typeflag) - } -} - -func (t *Tar) writeWalk(source, topLevelFolder, destination string) error { - sourceInfo, err := os.Stat(source) - if err != nil { - return fmt.Errorf("%s: stat: %v", source, err) - } - destAbs, err := filepath.Abs(destination) - if err != nil { - return fmt.Errorf("%s: getting absolute path of destination %s: %v", source, destination, err) - } - - return filepath.Walk(source, func(fpath string, info os.FileInfo, err error) error { - handleErr := func(err error) error { - if t.ContinueOnError { - log.Printf("[ERROR] Walking %s: %v", fpath, err) - return nil - } - return err - } - if err != nil { - return handleErr(fmt.Errorf("traversing %s: %v", fpath, err)) - } - if info == nil { - return handleErr(fmt.Errorf("no file info")) - } - - // make sure we do not copy our output file into itself - fpathAbs, err := filepath.Abs(fpath) - if err != nil { - return handleErr(fmt.Errorf("%s: getting absolute path: %v", fpath, err)) - } - if within(fpathAbs, destAbs) { - return nil - } - - // build the name to be used within the archive - nameInArchive, err := makeNameInArchive(sourceInfo, source, topLevelFolder, fpath) - if err != nil { - return handleErr(err) - } - - var file io.ReadCloser - if info.Mode().IsRegular() { - file, err = os.Open(fpath) - if err != nil { - return handleErr(fmt.Errorf("%s: opening: %v", fpath, err)) - } - defer file.Close() - } - err = t.Write(File{ - FileInfo: FileInfo{ - FileInfo: info, - CustomName: nameInArchive, - SourcePath: fpath, - }, - ReadCloser: file, - }) - if err != nil { - return handleErr(fmt.Errorf("%s: writing: %s", fpath, err)) - } - - return nil - }) -} - -// Create opens t for writing a tar archive to out. -func (t *Tar) Create(out io.Writer) error { - if t.tw != nil { - return fmt.Errorf("tar archive is already created for writing") - } - - // wrapping writers allows us to output - // compressed tarballs, for example - if t.writerWrapFn != nil { - var err error - out, err = t.writerWrapFn(out) - if err != nil { - return fmt.Errorf("wrapping writer: %v", err) - } - } - - t.tw = tar.NewWriter(out) - return nil -} - -// Write writes f to t, which must have been opened for writing first. -func (t *Tar) Write(f File) error { - if t.tw == nil { - return fmt.Errorf("tar archive was not created for writing first") - } - if f.FileInfo == nil { - return fmt.Errorf("no file info") - } - if f.FileInfo.Name() == "" { - return fmt.Errorf("missing file name") - } - - var linkTarget string - if isSymlink(f) { - fi, ok := f.FileInfo.(FileInfo) - if !ok { - return fmt.Errorf("failed to cast fs.FileInfo to archiver.FileInfo: %v", f) - } - var err error - linkTarget, err = os.Readlink(fi.SourcePath) - if err != nil { - return fmt.Errorf("%s: readlink: %v", fi.SourcePath, err) - } - } - - hdr, err := tar.FileInfoHeader(f, filepath.ToSlash(linkTarget)) - if err != nil { - return fmt.Errorf("%s: making header: %v", f.Name(), err) - } - - err = t.tw.WriteHeader(hdr) - if err != nil { - return fmt.Errorf("%s: writing header: %w", hdr.Name, err) - } - - if f.IsDir() { - return nil // directories have no contents - } - - if hdr.Typeflag == tar.TypeReg { - if f.ReadCloser == nil { - return fmt.Errorf("%s: no way to read file contents", f.Name()) - } - _, err := io.Copy(t.tw, f) - if err != nil { - return fmt.Errorf("%s: copying contents: %w", f.Name(), err) - } - } - - return nil -} - -// Open opens t for reading an archive from -// in. The size parameter is not used. -func (t *Tar) Open(in io.Reader, size int64) error { - if t.tr != nil { - return fmt.Errorf("tar archive is already open for reading") - } - // wrapping readers allows us to open compressed tarballs - if t.readerWrapFn != nil { - var err error - in, err = t.readerWrapFn(in) - if err != nil { - return fmt.Errorf("wrapping file reader: %v", err) - } - } - t.tr = tar.NewReader(in) - return nil -} - -// Read reads the next file from t, which must have -// already been opened for reading. If there are no -// more files, the error is io.EOF. The File must -// be closed when finished reading from it. -func (t *Tar) Read() (File, error) { - if t.tr == nil { - return File{}, fmt.Errorf("tar archive is not open") - } - - hdr, err := t.tr.Next() - if err != nil { - return File{}, err // don't wrap error; preserve io.EOF - } - - file := File{ - FileInfo: hdr.FileInfo(), - Header: hdr, - ReadCloser: ReadFakeCloser{t.tr}, - } - - return file, nil -} - -// Close closes the tar archive(s) opened by Create and Open. -func (t *Tar) Close() error { - var err error - if t.tr != nil { - t.tr = nil - } - if t.tw != nil { - tw := t.tw - t.tw = nil - err = tw.Close() - } - // make sure cleanup of "Reader/Writer wrapper" - // (say that ten times fast) happens AFTER the - // underlying stream is closed - if t.cleanupWrapFn != nil { - t.cleanupWrapFn() - } - return err -} - -// Walk calls walkFn for each visited item in archive. -func (t *Tar) Walk(archive string, walkFn WalkFunc) error { - file, err := os.Open(archive) - if err != nil { - return fmt.Errorf("opening archive file: %v", err) - } - defer file.Close() - - err = t.Open(file, 0) - if err != nil { - return fmt.Errorf("opening archive: %v", err) - } - defer t.Close() - - for { - f, err := t.Read() - if err == io.EOF { - break - } - if err != nil { - if t.ContinueOnError { - log.Printf("[ERROR] Opening next file: %v", err) - continue - } - return fmt.Errorf("opening next file: %v", err) - } - err = walkFn(f) - if err != nil { - if err == ErrStopWalk { - break - } - if t.ContinueOnError { - log.Printf("[ERROR] Walking %s: %v", f.Name(), err) - continue - } - return fmt.Errorf("walking %s: %v", f.Name(), err) - } - } - - return nil -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (t *Tar) Extract(source, target, destination string) error { - // target refers to a path inside the archive, which should be clean also - target = path.Clean(target) - - // if the target ends up being a directory, then - // we will continue walking and extracting files - // until we are no longer within that directory - var targetDirPath string - - return t.Walk(source, func(f File) error { - th, ok := f.Header.(*tar.Header) - if !ok { - return fmt.Errorf("expected header to be *tar.Header but was %T", f.Header) - } - - // importantly, cleaning the path strips tailing slash, - // which must be appended to folders within the archive - name := path.Clean(th.Name) - if f.IsDir() && target == name { - targetDirPath = path.Dir(name) - } - - if within(target, th.Name) { - // either this is the exact file we want, or is - // in the directory we want to extract - - // build the filename we will extract to - end, err := filepath.Rel(targetDirPath, th.Name) - if err != nil { - return fmt.Errorf("relativizing paths: %v", err) - } - th.Name = end - - // relativize any hardlink names - if th.Typeflag == tar.TypeLink { - th.Linkname = filepath.Join(filepath.Base(filepath.Dir(th.Linkname)), filepath.Base(th.Linkname)) - } - - err = t.untarFile(f, destination, th) - if err != nil { - return fmt.Errorf("extracting file %s: %v", th.Name, err) - } - - // if our target was not a directory, stop walk - if targetDirPath == "" { - return ErrStopWalk - } - } else if targetDirPath != "" { - // finished walking the entire directory - return ErrStopWalk - } - - return nil - }) -} - -// Match returns true if the format of file matches this -// type's format. It should not affect reader position. -func (*Tar) Match(file io.ReadSeeker) (bool, error) { - currentPos, err := file.Seek(0, io.SeekCurrent) - if err != nil { - return false, err - } - _, err = file.Seek(0, 0) - if err != nil { - return false, err - } - defer func() { - _, _ = file.Seek(currentPos, io.SeekStart) - }() - - buf := make([]byte, tarBlockSize) - if _, err = io.ReadFull(file, buf); err != nil { - return false, nil - } - return hasTarHeader(buf), nil -} - -// hasTarHeader checks passed bytes has a valid tar header or not. buf must -// contain at least 512 bytes and if not, it always returns false. -func hasTarHeader(buf []byte) bool { - if len(buf) < tarBlockSize { - return false - } - - b := buf[148:156] - b = bytes.Trim(b, " \x00") // clean up all spaces and null bytes - if len(b) == 0 { - return false // unknown format - } - hdrSum, err := strconv.ParseUint(string(b), 8, 64) - if err != nil { - return false - } - - // According to the go official archive/tar, Sun tar uses signed byte - // values so this calcs both signed and unsigned - var usum uint64 - var sum int64 - for i, c := range buf { - if 148 <= i && i < 156 { - c = ' ' // checksum field itself is counted as branks - } - usum += uint64(uint8(c)) - sum += int64(int8(c)) - } - - if hdrSum != usum && int64(hdrSum) != sum { - return false // invalid checksum - } - - return true -} - -func (t *Tar) String() string { return "tar" } - -// NewTar returns a new, default instance ready to be customized and used. -func NewTar() *Tar { - return &Tar{ - MkdirAll: true, - } -} - -const tarBlockSize = 512 - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(Tar)) - _ = Writer(new(Tar)) - _ = Archiver(new(Tar)) - _ = Unarchiver(new(Tar)) - _ = Walker(new(Tar)) - _ = Extractor(new(Tar)) - _ = Matcher(new(Tar)) - _ = ExtensionChecker(new(Tar)) - _ = FilenameChecker(new(Tar)) -) - -// DefaultTar is a default instance that is conveniently ready to use. -var DefaultTar = NewTar() diff --git a/vendor/github.com/mholt/archiver/v3/tarbrotli.go b/vendor/github.com/mholt/archiver/v3/tarbrotli.go deleted file mode 100644 index 83a455d66a..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tarbrotli.go +++ /dev/null @@ -1,114 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/andybalholm/brotli" -) - -// TarBrotli facilitates brotli compression of tarball archives. -type TarBrotli struct { - *Tar - Quality int -} - -// CheckExt ensures the file extension matches the format. -func (*TarBrotli) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.br") && - !strings.HasSuffix(filename, ".tbr") { - return fmt.Errorf("filename must have a .tar.br or .tbr extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.br" or ".tbr". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (tbr *TarBrotli) Archive(sources []string, destination string) error { - err := tbr.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - tbr.wrapWriter() - return tbr.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (tbr *TarBrotli) Unarchive(source, destination string) error { - tbr.wrapReader() - return tbr.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (tbr *TarBrotli) Walk(archive string, walkFn WalkFunc) error { - tbr.wrapReader() - return tbr.Tar.Walk(archive, walkFn) -} - -// Create opens txz for writing a compressed -// tar archive to out. -func (tbr *TarBrotli) Create(out io.Writer) error { - tbr.wrapWriter() - return tbr.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (tbr *TarBrotli) Open(in io.Reader, size int64) error { - tbr.wrapReader() - return tbr.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (tbr *TarBrotli) Extract(source, target, destination string) error { - tbr.wrapReader() - return tbr.Tar.Extract(source, target, destination) -} - -func (tbr *TarBrotli) wrapWriter() { - var brw *brotli.Writer - tbr.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - brw = brotli.NewWriterLevel(w, tbr.Quality) - return brw, nil - } - tbr.Tar.cleanupWrapFn = func() { - brw.Close() - } -} - -func (tbr *TarBrotli) wrapReader() { - tbr.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - return brotli.NewReader(r), nil - } -} - -func (tbr *TarBrotli) String() string { return "tar.br" } - -// NewTarBrotli returns a new, default instance ready to be customized and used. -func NewTarBrotli() *TarBrotli { - return &TarBrotli{ - Tar: NewTar(), - Quality: brotli.DefaultCompression, - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarBrotli)) - _ = Writer(new(TarBrotli)) - _ = Archiver(new(TarBrotli)) - _ = Unarchiver(new(TarBrotli)) - _ = Walker(new(TarBrotli)) - _ = Extractor(new(TarBrotli)) -) - -// DefaultTarBrotli is a convenient archiver ready to use. -var DefaultTarBrotli = NewTarBrotli() diff --git a/vendor/github.com/mholt/archiver/v3/tarbz2.go b/vendor/github.com/mholt/archiver/v3/tarbz2.go deleted file mode 100644 index e5870a7d29..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tarbz2.go +++ /dev/null @@ -1,126 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/dsnet/compress/bzip2" -) - -// TarBz2 facilitates bzip2 compression -// (https://github.com/dsnet/compress/blob/master/doc/bzip2-format.pdf) -// of tarball archives. -type TarBz2 struct { - *Tar - - CompressionLevel int -} - -// CheckExt ensures the file extension matches the format. -func (*TarBz2) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.bz2") && - !strings.HasSuffix(filename, ".tbz2") { - return fmt.Errorf("filename must have a .tar.bz2 or .tbz2 extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.bz2" or ".tbz2". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (tbz2 *TarBz2) Archive(sources []string, destination string) error { - err := tbz2.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - tbz2.wrapWriter() - return tbz2.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (tbz2 *TarBz2) Unarchive(source, destination string) error { - tbz2.wrapReader() - return tbz2.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (tbz2 *TarBz2) Walk(archive string, walkFn WalkFunc) error { - tbz2.wrapReader() - return tbz2.Tar.Walk(archive, walkFn) -} - -// Create opens tbz2 for writing a compressed -// tar archive to out. -func (tbz2 *TarBz2) Create(out io.Writer) error { - tbz2.wrapWriter() - return tbz2.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (tbz2 *TarBz2) Open(in io.Reader, size int64) error { - tbz2.wrapReader() - return tbz2.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (tbz2 *TarBz2) Extract(source, target, destination string) error { - tbz2.wrapReader() - return tbz2.Tar.Extract(source, target, destination) -} - -func (tbz2 *TarBz2) wrapWriter() { - var bz2w *bzip2.Writer - tbz2.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - var err error - bz2w, err = bzip2.NewWriter(w, &bzip2.WriterConfig{ - Level: tbz2.CompressionLevel, - }) - return bz2w, err - } - tbz2.Tar.cleanupWrapFn = func() { - bz2w.Close() - } -} - -func (tbz2 *TarBz2) wrapReader() { - var bz2r *bzip2.Reader - tbz2.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - var err error - bz2r, err = bzip2.NewReader(r, nil) - return bz2r, err - } - tbz2.Tar.cleanupWrapFn = func() { - bz2r.Close() - } -} - -func (tbz2 *TarBz2) String() string { return "tar.bz2" } - -// NewTarBz2 returns a new, default instance ready to be customized and used. -func NewTarBz2() *TarBz2 { - return &TarBz2{ - CompressionLevel: bzip2.DefaultCompression, - Tar: NewTar(), - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarBz2)) - _ = Writer(new(TarBz2)) - _ = Archiver(new(TarBz2)) - _ = Unarchiver(new(TarBz2)) - _ = Walker(new(TarBz2)) - _ = Extractor(new(TarBz2)) -) - -// DefaultTarBz2 is a convenient archiver ready to use. -var DefaultTarBz2 = NewTarBz2() diff --git a/vendor/github.com/mholt/archiver/v3/targz.go b/vendor/github.com/mholt/archiver/v3/targz.go deleted file mode 100644 index 283fd01b2b..0000000000 --- a/vendor/github.com/mholt/archiver/v3/targz.go +++ /dev/null @@ -1,137 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/klauspost/compress/gzip" - "github.com/klauspost/pgzip" -) - -// TarGz facilitates gzip compression -// (RFC 1952) of tarball archives. -type TarGz struct { - *Tar - - // The compression level to use, as described - // in the compress/gzip package. - CompressionLevel int - - // Disables parallel gzip. - SingleThreaded bool -} - -// CheckExt ensures the file extension matches the format. -func (*TarGz) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.gz") && - !strings.HasSuffix(filename, ".tgz") { - return fmt.Errorf("filename must have a .tar.gz or .tgz extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.gz" or ".tgz". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (tgz *TarGz) Archive(sources []string, destination string) error { - err := tgz.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - tgz.wrapWriter() - return tgz.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (tgz *TarGz) Unarchive(source, destination string) error { - tgz.wrapReader() - return tgz.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (tgz *TarGz) Walk(archive string, walkFn WalkFunc) error { - tgz.wrapReader() - return tgz.Tar.Walk(archive, walkFn) -} - -// Create opens txz for writing a compressed -// tar archive to out. -func (tgz *TarGz) Create(out io.Writer) error { - tgz.wrapWriter() - return tgz.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (tgz *TarGz) Open(in io.Reader, size int64) error { - tgz.wrapReader() - return tgz.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (tgz *TarGz) Extract(source, target, destination string) error { - tgz.wrapReader() - return tgz.Tar.Extract(source, target, destination) -} - -func (tgz *TarGz) wrapWriter() { - var gzw io.WriteCloser - tgz.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - var err error - if tgz.SingleThreaded { - gzw, err = gzip.NewWriterLevel(w, tgz.CompressionLevel) - } else { - gzw, err = pgzip.NewWriterLevel(w, tgz.CompressionLevel) - } - return gzw, err - } - tgz.Tar.cleanupWrapFn = func() { - gzw.Close() - } -} - -func (tgz *TarGz) wrapReader() { - var gzr io.ReadCloser - tgz.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - var err error - if tgz.SingleThreaded { - gzr, err = gzip.NewReader(r) - } else { - gzr, err = pgzip.NewReader(r) - } - return gzr, err - } - tgz.Tar.cleanupWrapFn = func() { - gzr.Close() - } -} - -func (tgz *TarGz) String() string { return "tar.gz" } - -// NewTarGz returns a new, default instance ready to be customized and used. -func NewTarGz() *TarGz { - return &TarGz{ - CompressionLevel: gzip.DefaultCompression, - Tar: NewTar(), - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarGz)) - _ = Writer(new(TarGz)) - _ = Archiver(new(TarGz)) - _ = Unarchiver(new(TarGz)) - _ = Walker(new(TarGz)) - _ = Extractor(new(TarGz)) -) - -// DefaultTarGz is a convenient archiver ready to use. -var DefaultTarGz = NewTarGz() diff --git a/vendor/github.com/mholt/archiver/v3/tarlz4.go b/vendor/github.com/mholt/archiver/v3/tarlz4.go deleted file mode 100644 index 42cbc90bbb..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tarlz4.go +++ /dev/null @@ -1,129 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/pierrec/lz4/v4" -) - -// TarLz4 facilitates lz4 compression -// (https://github.com/lz4/lz4/tree/master/doc) -// of tarball archives. -type TarLz4 struct { - *Tar - - // The compression level to use when writing. - // Minimum 0 (fast compression), maximum 12 - // (most space savings). - CompressionLevel int -} - -// CheckExt ensures the file extension matches the format. -func (*TarLz4) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.lz4") && - !strings.HasSuffix(filename, ".tlz4") { - - return fmt.Errorf("filename must have a .tar.lz4 or .tlz4 extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.lz4" or ".tlz4". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (tlz4 *TarLz4) Archive(sources []string, destination string) error { - err := tlz4.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - tlz4.wrapWriter() - return tlz4.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (tlz4 *TarLz4) Unarchive(source, destination string) error { - tlz4.wrapReader() - return tlz4.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (tlz4 *TarLz4) Walk(archive string, walkFn WalkFunc) error { - tlz4.wrapReader() - return tlz4.Tar.Walk(archive, walkFn) -} - -// Create opens tlz4 for writing a compressed -// tar archive to out. -func (tlz4 *TarLz4) Create(out io.Writer) error { - tlz4.wrapWriter() - return tlz4.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (tlz4 *TarLz4) Open(in io.Reader, size int64) error { - tlz4.wrapReader() - return tlz4.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (tlz4 *TarLz4) Extract(source, target, destination string) error { - tlz4.wrapReader() - return tlz4.Tar.Extract(source, target, destination) -} - -func (tlz4 *TarLz4) wrapWriter() { - var lz4w *lz4.Writer - tlz4.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - lz4w = lz4.NewWriter(w) - // TODO archiver v4: use proper lz4.Fast - // bitshifting for backwards compatibility with lz4/v3 - options := []lz4.Option{ - lz4.CompressionLevelOption(lz4.CompressionLevel(1 << (8 + tlz4.CompressionLevel))), - } - if err := lz4w.Apply(options...); err != nil { - return lz4w, err - } - return lz4w, nil - } - tlz4.Tar.cleanupWrapFn = func() { - lz4w.Close() - } -} - -func (tlz4 *TarLz4) wrapReader() { - tlz4.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - return lz4.NewReader(r), nil - } -} - -func (tlz4 *TarLz4) String() string { return "tar.lz4" } - -// NewTarLz4 returns a new, default instance ready to be customized and used. -func NewTarLz4() *TarLz4 { - return &TarLz4{ - CompressionLevel: 9, // https://github.com/lz4/lz4/blob/1b819bfd633ae285df2dfe1b0589e1ec064f2873/lib/lz4hc.h#L48 - Tar: NewTar(), - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarLz4)) - _ = Writer(new(TarLz4)) - _ = Archiver(new(TarLz4)) - _ = Unarchiver(new(TarLz4)) - _ = Walker(new(TarLz4)) - _ = Extractor(new(TarLz4)) -) - -// DefaultTarLz4 is a convenient archiver ready to use. -var DefaultTarLz4 = NewTarLz4() diff --git a/vendor/github.com/mholt/archiver/v3/tarsz.go b/vendor/github.com/mholt/archiver/v3/tarsz.go deleted file mode 100644 index ee3808e63d..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tarsz.go +++ /dev/null @@ -1,114 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/golang/snappy" -) - -// TarSz facilitates Snappy compression -// (https://github.com/google/snappy) -// of tarball archives. -type TarSz struct { - *Tar -} - -// CheckExt ensures the file extension matches the format. -func (*TarSz) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.sz") && - !strings.HasSuffix(filename, ".tsz") { - return fmt.Errorf("filename must have a .tar.sz or .tsz extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.sz" or ".tsz". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (tsz *TarSz) Archive(sources []string, destination string) error { - err := tsz.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - tsz.wrapWriter() - return tsz.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (tsz *TarSz) Unarchive(source, destination string) error { - tsz.wrapReader() - return tsz.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (tsz *TarSz) Walk(archive string, walkFn WalkFunc) error { - tsz.wrapReader() - return tsz.Tar.Walk(archive, walkFn) -} - -// Create opens tsz for writing a compressed -// tar archive to out. -func (tsz *TarSz) Create(out io.Writer) error { - tsz.wrapWriter() - return tsz.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (tsz *TarSz) Open(in io.Reader, size int64) error { - tsz.wrapReader() - return tsz.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (tsz *TarSz) Extract(source, target, destination string) error { - tsz.wrapReader() - return tsz.Tar.Extract(source, target, destination) -} - -func (tsz *TarSz) wrapWriter() { - var sw *snappy.Writer - tsz.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - sw = snappy.NewBufferedWriter(w) - return sw, nil - } - tsz.Tar.cleanupWrapFn = func() { - sw.Close() - } -} - -func (tsz *TarSz) wrapReader() { - tsz.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - return snappy.NewReader(r), nil - } -} - -func (tsz *TarSz) String() string { return "tar.sz" } - -// NewTarSz returns a new, default instance ready to be customized and used. -func NewTarSz() *TarSz { - return &TarSz{ - Tar: NewTar(), - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarSz)) - _ = Writer(new(TarSz)) - _ = Archiver(new(TarSz)) - _ = Unarchiver(new(TarSz)) - _ = Walker(new(TarSz)) - _ = Extractor(new(TarSz)) -) - -// DefaultTarSz is a convenient archiver ready to use. -var DefaultTarSz = NewTarSz() diff --git a/vendor/github.com/mholt/archiver/v3/tarxz.go b/vendor/github.com/mholt/archiver/v3/tarxz.go deleted file mode 100644 index 5679a067be..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tarxz.go +++ /dev/null @@ -1,119 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/ulikunitz/xz" - fastxz "github.com/xi2/xz" -) - -// TarXz facilitates xz compression -// (https://tukaani.org/xz/format.html) -// of tarball archives. -type TarXz struct { - *Tar -} - -// CheckExt ensures the file extension matches the format. -func (*TarXz) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.xz") && - !strings.HasSuffix(filename, ".txz") { - return fmt.Errorf("filename must have a .tar.xz or .txz extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.xz" or ".txz". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (txz *TarXz) Archive(sources []string, destination string) error { - err := txz.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - txz.wrapWriter() - return txz.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (txz *TarXz) Unarchive(source, destination string) error { - txz.wrapReader() - return txz.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (txz *TarXz) Walk(archive string, walkFn WalkFunc) error { - txz.wrapReader() - return txz.Tar.Walk(archive, walkFn) -} - -// Create opens txz for writing a compressed -// tar archive to out. -func (txz *TarXz) Create(out io.Writer) error { - txz.wrapWriter() - return txz.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (txz *TarXz) Open(in io.Reader, size int64) error { - txz.wrapReader() - return txz.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (txz *TarXz) Extract(source, target, destination string) error { - txz.wrapReader() - return txz.Tar.Extract(source, target, destination) -} - -func (txz *TarXz) wrapWriter() { - var xzw *xz.Writer - txz.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - var err error - xzw, err = xz.NewWriter(w) - return xzw, err - } - txz.Tar.cleanupWrapFn = func() { - xzw.Close() - } -} - -func (txz *TarXz) wrapReader() { - var xzr *fastxz.Reader - txz.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - var err error - xzr, err = fastxz.NewReader(r, 0) - return xzr, err - } -} - -func (txz *TarXz) String() string { return "tar.xz" } - -// NewTarXz returns a new, default instance ready to be customized and used. -func NewTarXz() *TarXz { - return &TarXz{ - Tar: NewTar(), - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarXz)) - _ = Writer(new(TarXz)) - _ = Archiver(new(TarXz)) - _ = Unarchiver(new(TarXz)) - _ = Walker(new(TarXz)) - _ = Extractor(new(TarXz)) -) - -// DefaultTarXz is a convenient archiver ready to use. -var DefaultTarXz = NewTarXz() diff --git a/vendor/github.com/mholt/archiver/v3/tarzst.go b/vendor/github.com/mholt/archiver/v3/tarzst.go deleted file mode 100644 index 3b2fe43189..0000000000 --- a/vendor/github.com/mholt/archiver/v3/tarzst.go +++ /dev/null @@ -1,120 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "strings" - - "github.com/klauspost/compress/zstd" -) - -// TarZstd facilitates Zstandard compression -// (RFC 8478) of tarball archives. -type TarZstd struct { - *Tar -} - -// CheckExt ensures the file extension matches the format. -func (*TarZstd) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".tar.zst") { - return fmt.Errorf("filename must have a .tar.zst extension") - } - return nil -} - -// Archive creates a compressed tar file at destination -// containing the files listed in sources. The destination -// must end with ".tar.zst" or ".tzst". File paths can be -// those of regular files or directories; directories will -// be recursively added. -func (tzst *TarZstd) Archive(sources []string, destination string) error { - err := tzst.CheckExt(destination) - if err != nil { - return fmt.Errorf("output %s", err.Error()) - } - tzst.wrapWriter() - return tzst.Tar.Archive(sources, destination) -} - -// Unarchive unpacks the compressed tarball at -// source to destination. Destination will be -// treated as a folder name. -func (tzst *TarZstd) Unarchive(source, destination string) error { - tzst.wrapReader() - return tzst.Tar.Unarchive(source, destination) -} - -// Walk calls walkFn for each visited item in archive. -func (tzst *TarZstd) Walk(archive string, walkFn WalkFunc) error { - tzst.wrapReader() - return tzst.Tar.Walk(archive, walkFn) -} - -// Create opens txz for writing a compressed -// tar archive to out. -func (tzst *TarZstd) Create(out io.Writer) error { - tzst.wrapWriter() - return tzst.Tar.Create(out) -} - -// Open opens t for reading a compressed archive from -// in. The size parameter is not used. -func (tzst *TarZstd) Open(in io.Reader, size int64) error { - tzst.wrapReader() - return tzst.Tar.Open(in, size) -} - -// Extract extracts a single file from the tar archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (tzst *TarZstd) Extract(source, target, destination string) error { - tzst.wrapReader() - return tzst.Tar.Extract(source, target, destination) -} - -func (tzst *TarZstd) wrapWriter() { - var zstdw *zstd.Encoder - tzst.Tar.writerWrapFn = func(w io.Writer) (io.Writer, error) { - var err error - zstdw, err = zstd.NewWriter(w) - return zstdw, err - } - tzst.Tar.cleanupWrapFn = func() { - zstdw.Close() - } -} - -func (tzst *TarZstd) wrapReader() { - var zstdr *zstd.Decoder - tzst.Tar.readerWrapFn = func(r io.Reader) (io.Reader, error) { - var err error - zstdr, err = zstd.NewReader(r) - return zstdr, err - } - tzst.Tar.cleanupWrapFn = func() { - zstdr.Close() - } -} - -func (tzst *TarZstd) String() string { return "tar.zst" } - -// NewTarZstd returns a new, default instance ready to be customized and used. -func NewTarZstd() *TarZstd { - return &TarZstd{ - Tar: NewTar(), - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(TarZstd)) - _ = Writer(new(TarZstd)) - _ = Archiver(new(TarZstd)) - _ = Unarchiver(new(TarZstd)) - _ = Walker(new(TarZstd)) - _ = ExtensionChecker(new(TarZstd)) - _ = Extractor(new(TarZstd)) -) - -// DefaultTarZstd is a convenient archiver ready to use. -var DefaultTarZstd = NewTarZstd() diff --git a/vendor/github.com/mholt/archiver/v3/xz.go b/vendor/github.com/mholt/archiver/v3/xz.go deleted file mode 100644 index c60d5eaec6..0000000000 --- a/vendor/github.com/mholt/archiver/v3/xz.go +++ /dev/null @@ -1,58 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/ulikunitz/xz" - fastxz "github.com/xi2/xz" -) - -// Xz facilitates XZ compression. -type Xz struct{} - -// Compress reads in, compresses it, and writes it to out. -func (x *Xz) Compress(in io.Reader, out io.Writer) error { - w, err := xz.NewWriter(out) - if err != nil { - return err - } - defer w.Close() - _, err = io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (x *Xz) Decompress(in io.Reader, out io.Writer) error { - r, err := fastxz.NewReader(in, 0) - if err != nil { - return err - } - _, err = io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (x *Xz) CheckExt(filename string) error { - if filepath.Ext(filename) != ".xz" { - return fmt.Errorf("filename must have a .xz extension") - } - return nil -} - -func (x *Xz) String() string { return "xz" } - -// NewXz returns a new, default instance ready to be customized and used. -func NewXz() *Xz { - return new(Xz) -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Xz)) - _ = Decompressor(new(Xz)) -) - -// DefaultXz is a default instance that is conveniently ready to use. -var DefaultXz = NewXz() diff --git a/vendor/github.com/mholt/archiver/v3/zip.go b/vendor/github.com/mholt/archiver/v3/zip.go deleted file mode 100644 index c6af8efbc6..0000000000 --- a/vendor/github.com/mholt/archiver/v3/zip.go +++ /dev/null @@ -1,711 +0,0 @@ -package archiver - -import ( - "bytes" - "compress/flate" - "fmt" - "io" - "io/ioutil" - "log" - "os" - "path" - "path/filepath" - "strings" - - "github.com/dsnet/compress/bzip2" - "github.com/klauspost/compress/zip" - "github.com/klauspost/compress/zstd" - "github.com/ulikunitz/xz" -) - -// ZipCompressionMethod Compression type -type ZipCompressionMethod uint16 - -// Compression methods. -// see https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT. -// Note LZMA: Disabled - because 7z isn't able to unpack ZIP+LZMA ZIP+LZMA2 archives made this way - and vice versa. -const ( - Store ZipCompressionMethod = 0 - Deflate ZipCompressionMethod = 8 - BZIP2 ZipCompressionMethod = 12 - LZMA ZipCompressionMethod = 14 - ZSTD ZipCompressionMethod = 93 - XZ ZipCompressionMethod = 95 -) - -// Zip provides facilities for operating ZIP archives. -// See https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT. -type Zip struct { - // The compression level to use, as described - // in the compress/flate package. - CompressionLevel int - - // Whether to overwrite existing files; if false, - // an error is returned if the file exists. - OverwriteExisting bool - - // Whether to make all the directories necessary - // to create a zip archive in the desired path. - MkdirAll bool - - // If enabled, selective compression will only - // compress files which are not already in a - // compressed format; this is decided based - // simply on file extension. - SelectiveCompression bool - - // A single top-level folder can be implicitly - // created by the Archive or Unarchive methods - // if the files to be added to the archive - // or the files to be extracted from the archive - // do not all have a common root. This roughly - // mimics the behavior of archival tools integrated - // into OS file browsers which create a subfolder - // to avoid unexpectedly littering the destination - // folder with potentially many files, causing a - // problematic cleanup/organization situation. - // This feature is available for both creation - // and extraction of archives, but may be slightly - // inefficient with lots and lots of files, - // especially on extraction. - ImplicitTopLevelFolder bool - - // Strip number of leading paths. This feature is available - // only during unpacking of the entire archive. - StripComponents int - - // If true, errors encountered during reading - // or writing a single file will be logged and - // the operation will continue on remaining files. - ContinueOnError bool - - // Compression algorithm - FileMethod ZipCompressionMethod - zw *zip.Writer - zr *zip.Reader - ridx int - //decinitialized bool -} - -// CheckExt ensures the file extension matches the format. -func (*Zip) CheckExt(filename string) error { - if !strings.HasSuffix(filename, ".zip") { - return fmt.Errorf("filename must have a .zip extension") - } - return nil -} - -// Registering a global decompressor is not reentrant and may panic -func registerDecompressor(zr *zip.Reader) { - // register zstd decompressor - zr.RegisterDecompressor(uint16(ZSTD), func(r io.Reader) io.ReadCloser { - zr, err := zstd.NewReader(r) - if err != nil { - return nil - } - return zr.IOReadCloser() - }) - zr.RegisterDecompressor(uint16(BZIP2), func(r io.Reader) io.ReadCloser { - bz2r, err := bzip2.NewReader(r, nil) - if err != nil { - return nil - } - return bz2r - }) - zr.RegisterDecompressor(uint16(XZ), func(r io.Reader) io.ReadCloser { - xr, err := xz.NewReader(r) - if err != nil { - return nil - } - return ioutil.NopCloser(xr) - }) -} - -// CheckPath ensures the file extension matches the format. -func (*Zip) CheckPath(to, filename string) error { - to, _ = filepath.Abs(to) //explicit the destination folder to prevent that 'string.HasPrefix' check can be 'bypassed' when no destination folder is supplied in input - dest := filepath.Join(to, filename) - //prevent path traversal attacks - if !strings.HasPrefix(dest, to) { - return &IllegalPathError{AbsolutePath: dest, Filename: filename} - } - return nil -} - -// Archive creates a .zip file at destination containing -// the files listed in sources. The destination must end -// with ".zip". File paths can be those of regular files -// or directories. Regular files are stored at the 'root' -// of the archive, and directories are recursively added. -func (z *Zip) Archive(sources []string, destination string) error { - err := z.CheckExt(destination) - if err != nil { - return fmt.Errorf("checking extension: %v", err) - } - if !z.OverwriteExisting && fileExists(destination) { - return fmt.Errorf("file already exists: %s", destination) - } - - // make the folder to contain the resulting archive - // if it does not already exist - destDir := filepath.Dir(destination) - if z.MkdirAll && !fileExists(destDir) { - err := mkdir(destDir, 0755) - if err != nil { - return fmt.Errorf("making folder for destination: %v", err) - } - } - - out, err := os.Create(destination) - if err != nil { - return fmt.Errorf("creating %s: %v", destination, err) - } - defer out.Close() - - err = z.Create(out) - if err != nil { - return fmt.Errorf("creating zip: %v", err) - } - defer z.Close() - - var topLevelFolder string - if z.ImplicitTopLevelFolder && multipleTopLevels(sources) { - topLevelFolder = folderNameFromFileName(destination) - } - - for _, source := range sources { - err := z.writeWalk(source, topLevelFolder, destination) - if err != nil { - return fmt.Errorf("walking %s: %v", source, err) - } - } - - return nil -} - -// Unarchive unpacks the .zip file at source to destination. -// Destination will be treated as a folder name. -func (z *Zip) Unarchive(source, destination string) error { - if !fileExists(destination) && z.MkdirAll { - err := mkdir(destination, 0755) - if err != nil { - return fmt.Errorf("preparing destination: %v", err) - } - } - - file, err := os.Open(source) - if err != nil { - return fmt.Errorf("opening source file: %v", err) - } - defer file.Close() - - fileInfo, err := file.Stat() - if err != nil { - return fmt.Errorf("statting source file: %v", err) - } - - err = z.Open(file, fileInfo.Size()) - if err != nil { - return fmt.Errorf("opening zip archive for reading: %v", err) - } - defer z.Close() - - // if the files in the archive do not all share a common - // root, then make sure we extract to a single subfolder - // rather than potentially littering the destination... - if z.ImplicitTopLevelFolder { - files := make([]string, len(z.zr.File)) - for i := range z.zr.File { - files[i] = z.zr.File[i].Name - } - if multipleTopLevels(files) { - destination = filepath.Join(destination, folderNameFromFileName(source)) - } - } - - for { - err := z.extractNext(destination) - if err == io.EOF { - break - } - if err != nil { - if z.ContinueOnError || IsIllegalPathError(err) { - log.Printf("[ERROR] Reading file in zip archive: %v", err) - continue - } - return fmt.Errorf("reading file in zip archive: %v", err) - } - } - - return nil -} - -func (z *Zip) extractNext(to string) error { - f, err := z.Read() - if err != nil { - return err // don't wrap error; calling loop must break on io.EOF - } - defer f.Close() - - header, ok := f.Header.(zip.FileHeader) - if !ok { - return fmt.Errorf("expected header to be zip.FileHeader but was %T", f.Header) - } - - errPath := z.CheckPath(to, header.Name) - if errPath != nil { - return fmt.Errorf("checking path traversal attempt: %v", errPath) - } - - if z.StripComponents > 0 { - if strings.Count(header.Name, "/") < z.StripComponents { - return nil // skip path with fewer components - } - - for i := 0; i < z.StripComponents; i++ { - slash := strings.Index(header.Name, "/") - header.Name = header.Name[slash+1:] - } - } - return z.extractFile(f, to, &header) -} - -func (z *Zip) extractFile(f File, to string, header *zip.FileHeader) error { - to = filepath.Join(to, header.Name) - - // if a directory, no content; simply make the directory and return - if f.IsDir() { - return mkdir(to, f.Mode()) - } - - // do not overwrite existing files, if configured - if !z.OverwriteExisting && fileExists(to) { - return fmt.Errorf("file already exists: %s", to) - } - - // extract symbolic links as symbolic links - if isSymlink(header.FileInfo()) { - // symlink target is the contents of the file - buf := new(bytes.Buffer) - _, err := io.Copy(buf, f) - if err != nil { - return fmt.Errorf("%s: reading symlink target: %v", header.Name, err) - } - return writeNewSymbolicLink(to, strings.TrimSpace(buf.String())) - } - - return writeNewFile(to, f, f.Mode()) -} - -func (z *Zip) writeWalk(source, topLevelFolder, destination string) error { - sourceInfo, err := os.Stat(source) - if err != nil { - return fmt.Errorf("%s: stat: %v", source, err) - } - destAbs, err := filepath.Abs(destination) - if err != nil { - return fmt.Errorf("%s: getting absolute path of destination %s: %v", source, destination, err) - } - - return filepath.Walk(source, func(fpath string, info os.FileInfo, err error) error { - handleErr := func(err error) error { - if z.ContinueOnError { - log.Printf("[ERROR] Walking %s: %v", fpath, err) - return nil - } - return err - } - if err != nil { - return handleErr(fmt.Errorf("traversing %s: %v", fpath, err)) - } - if info == nil { - return handleErr(fmt.Errorf("%s: no file info", fpath)) - } - - // make sure we do not copy the output file into the output - // file; that results in an infinite loop and disk exhaustion! - fpathAbs, err := filepath.Abs(fpath) - if err != nil { - return handleErr(fmt.Errorf("%s: getting absolute path: %v", fpath, err)) - } - if within(fpathAbs, destAbs) { - return nil - } - - // build the name to be used within the archive - nameInArchive, err := makeNameInArchive(sourceInfo, source, topLevelFolder, fpath) - if err != nil { - return handleErr(err) - } - - var file io.ReadCloser - if info.Mode().IsRegular() { - file, err = os.Open(fpath) - if err != nil { - return handleErr(fmt.Errorf("%s: opening: %v", fpath, err)) - } - defer file.Close() - } - err = z.Write(File{ - FileInfo: FileInfo{ - FileInfo: info, - CustomName: nameInArchive, - SourcePath: fpath, - }, - ReadCloser: file, - }) - if err != nil { - return handleErr(fmt.Errorf("%s: writing: %s", fpath, err)) - } - - return nil - }) -} - -// Create opens z for writing a ZIP archive to out. -func (z *Zip) Create(out io.Writer) error { - if z.zw != nil { - return fmt.Errorf("zip archive is already created for writing") - } - z.zw = zip.NewWriter(out) - if z.CompressionLevel != flate.DefaultCompression { - z.zw.RegisterCompressor(zip.Deflate, func(out io.Writer) (io.WriteCloser, error) { - return flate.NewWriter(out, z.CompressionLevel) - }) - } - switch z.FileMethod { - case BZIP2: - z.zw.RegisterCompressor(uint16(BZIP2), func(out io.Writer) (io.WriteCloser, error) { - return bzip2.NewWriter(out, &bzip2.WriterConfig{Level: z.CompressionLevel}) - }) - case ZSTD: - z.zw.RegisterCompressor(uint16(ZSTD), func(out io.Writer) (io.WriteCloser, error) { - return zstd.NewWriter(out) - }) - case XZ: - z.zw.RegisterCompressor(uint16(XZ), func(out io.Writer) (io.WriteCloser, error) { - return xz.NewWriter(out) - }) - } - return nil -} - -// Write writes f to z, which must have been opened for writing first. -func (z *Zip) Write(f File) error { - if z.zw == nil { - return fmt.Errorf("zip archive was not created for writing first") - } - if f.FileInfo == nil { - return fmt.Errorf("no file info") - } - if f.FileInfo.Name() == "" { - return fmt.Errorf("missing file name") - } - - header, err := zip.FileInfoHeader(f) - if err != nil { - return fmt.Errorf("%s: getting header: %v", f.Name(), err) - } - - if f.IsDir() { - header.Name += "/" // required - strangely no mention of this in zip spec? but is in godoc... - header.Method = zip.Store - } else { - ext := strings.ToLower(path.Ext(header.Name)) - if _, ok := compressedFormats[ext]; ok && z.SelectiveCompression { - header.Method = zip.Store - } else { - header.Method = uint16(z.FileMethod) - } - } - - writer, err := z.zw.CreateHeader(header) - if err != nil { - return fmt.Errorf("%s: making header: %w", f.Name(), err) - } - - return z.writeFile(f, writer) -} - -func (z *Zip) writeFile(f File, writer io.Writer) error { - if f.IsDir() { - return nil // directories have no contents - } - if isSymlink(f) { - fi, ok := f.FileInfo.(FileInfo) - if !ok { - return fmt.Errorf("failed to cast fs.FileInfo to archiver.FileInfo: %v", f) - } - // file body for symlinks is the symlink target - linkTarget, err := os.Readlink(fi.SourcePath) - if err != nil { - return fmt.Errorf("%s: readlink: %v", fi.SourcePath, err) - } - _, err = writer.Write([]byte(filepath.ToSlash(linkTarget))) - if err != nil { - return fmt.Errorf("%s: writing symlink target: %v", fi.SourcePath, err) - } - return nil - } - - if f.ReadCloser == nil { - return fmt.Errorf("%s: no way to read file contents", f.Name()) - } - _, err := io.Copy(writer, f) - if err != nil { - return fmt.Errorf("%s: copying contents: %w", f.Name(), err) - } - - return nil -} - -// Open opens z for reading an archive from in, -// which is expected to have the given size and -// which must be an io.ReaderAt. -func (z *Zip) Open(in io.Reader, size int64) error { - inRdrAt, ok := in.(io.ReaderAt) - if !ok { - return fmt.Errorf("reader must be io.ReaderAt") - } - if z.zr != nil { - return fmt.Errorf("zip archive is already open for reading") - } - var err error - z.zr, err = zip.NewReader(inRdrAt, size) - if err != nil { - return fmt.Errorf("creating reader: %v", err) - } - registerDecompressor(z.zr) - z.ridx = 0 - return nil -} - -// Read reads the next file from z, which must have -// already been opened for reading. If there are no -// more files, the error is io.EOF. The File must -// be closed when finished reading from it. -func (z *Zip) Read() (File, error) { - if z.zr == nil { - return File{}, fmt.Errorf("zip archive is not open") - } - if z.ridx >= len(z.zr.File) { - return File{}, io.EOF - } - - // access the file and increment counter so that - // if there is an error processing this file, the - // caller can still iterate to the next file - zf := z.zr.File[z.ridx] - z.ridx++ - - file := File{ - FileInfo: zf.FileInfo(), - Header: zf.FileHeader, - } - - rc, err := zf.Open() - if err != nil { - return file, fmt.Errorf("%s: open compressed file: %v", zf.Name, err) - } - file.ReadCloser = rc - - return file, nil -} - -// Close closes the zip archive(s) opened by Create and Open. -func (z *Zip) Close() error { - if z.zr != nil { - z.zr = nil - } - if z.zw != nil { - zw := z.zw - z.zw = nil - return zw.Close() - } - return nil -} - -// Walk calls walkFn for each visited item in archive. -func (z *Zip) Walk(archive string, walkFn WalkFunc) error { - zr, err := zip.OpenReader(archive) - if err != nil { - return fmt.Errorf("opening zip reader: %v", err) - } - defer zr.Close() - registerDecompressor(&zr.Reader) - for _, zf := range zr.File { - zfrc, err := zf.Open() - if err != nil { - if zfrc != nil { - zfrc.Close() - } - if z.ContinueOnError { - log.Printf("[ERROR] Opening %s: %v", zf.Name, err) - continue - } - return fmt.Errorf("opening %s: %v", zf.Name, err) - } - - err = walkFn(File{ - FileInfo: zf.FileInfo(), - Header: zf.FileHeader, - ReadCloser: zfrc, - }) - zfrc.Close() - if err != nil { - if err == ErrStopWalk { - break - } - if z.ContinueOnError { - log.Printf("[ERROR] Walking %s: %v", zf.Name, err) - continue - } - return fmt.Errorf("walking %s: %v", zf.Name, err) - } - } - - return nil -} - -// Extract extracts a single file from the zip archive. -// If the target is a directory, the entire folder will -// be extracted into destination. -func (z *Zip) Extract(source, target, destination string) error { - // target refers to a path inside the archive, which should be clean also - target = path.Clean(target) - - // if the target ends up being a directory, then - // we will continue walking and extracting files - // until we are no longer within that directory - var targetDirPath string - - return z.Walk(source, func(f File) error { - zfh, ok := f.Header.(zip.FileHeader) - if !ok { - return fmt.Errorf("expected header to be zip.FileHeader but was %T", f.Header) - } - - // importantly, cleaning the path strips tailing slash, - // which must be appended to folders within the archive - name := path.Clean(zfh.Name) - if f.IsDir() && target == name { - targetDirPath = path.Dir(name) - } - - if within(target, zfh.Name) { - // either this is the exact file we want, or is - // in the directory we want to extract - - // build the filename we will extract to - end, err := filepath.Rel(targetDirPath, zfh.Name) - if err != nil { - return fmt.Errorf("relativizing paths: %v", err) - } - joined := filepath.Join(destination, end) - - err = z.extractFile(f, joined, &zfh) - if err != nil { - return fmt.Errorf("extracting file %s: %v", zfh.Name, err) - } - - // if our target was not a directory, stop walk - if targetDirPath == "" { - return ErrStopWalk - } - } else if targetDirPath != "" { - // finished walking the entire directory - return ErrStopWalk - } - - return nil - }) -} - -// Match returns true if the format of file matches this -// type's format. It should not affect reader position. -func (*Zip) Match(file io.ReadSeeker) (bool, error) { - currentPos, err := file.Seek(0, io.SeekCurrent) - if err != nil { - return false, err - } - _, err = file.Seek(0, 0) - if err != nil { - return false, err - } - defer func() { - _, _ = file.Seek(currentPos, io.SeekStart) - }() - - buf := make([]byte, 4) - if n, err := file.Read(buf); err != nil || n < 4 { - return false, nil - } - return bytes.Equal(buf, []byte("PK\x03\x04")), nil -} - -func (z *Zip) String() string { return "zip" } - -// NewZip returns a new, default instance ready to be customized and used. -func NewZip() *Zip { - return &Zip{ - CompressionLevel: flate.DefaultCompression, - MkdirAll: true, - SelectiveCompression: true, - FileMethod: Deflate, - } -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Reader(new(Zip)) - _ = Writer(new(Zip)) - _ = Archiver(new(Zip)) - _ = Unarchiver(new(Zip)) - _ = Walker(new(Zip)) - _ = Extractor(new(Zip)) - _ = Matcher(new(Zip)) - _ = ExtensionChecker(new(Zip)) - _ = FilenameChecker(new(Zip)) -) - -// compressedFormats is a (non-exhaustive) set of lowercased -// file extensions for formats that are typically already -// compressed. Compressing files that are already compressed -// is inefficient, so use this set of extension to avoid that. -var compressedFormats = map[string]struct{}{ - ".7z": {}, - ".avi": {}, - ".br": {}, - ".bz2": {}, - ".cab": {}, - ".docx": {}, - ".gif": {}, - ".gz": {}, - ".jar": {}, - ".jpeg": {}, - ".jpg": {}, - ".lz": {}, - ".lz4": {}, - ".lzma": {}, - ".m4v": {}, - ".mov": {}, - ".mp3": {}, - ".mp4": {}, - ".mpeg": {}, - ".mpg": {}, - ".png": {}, - ".pptx": {}, - ".rar": {}, - ".sz": {}, - ".tbz2": {}, - ".tgz": {}, - ".tsz": {}, - ".txz": {}, - ".xlsx": {}, - ".xz": {}, - ".zip": {}, - ".zipx": {}, -} - -// DefaultZip is a default instance that is conveniently ready to use. -var DefaultZip = NewZip() diff --git a/vendor/github.com/mholt/archiver/v3/zstd.go b/vendor/github.com/mholt/archiver/v3/zstd.go deleted file mode 100644 index 60c11efc49..0000000000 --- a/vendor/github.com/mholt/archiver/v3/zstd.go +++ /dev/null @@ -1,61 +0,0 @@ -package archiver - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/klauspost/compress/zstd" -) - -// Zstd facilitates Zstandard compression. -type Zstd struct { - EncoderOptions []zstd.EOption - DecoderOptions []zstd.DOption -} - -// Compress reads in, compresses it, and writes it to out. -func (zs *Zstd) Compress(in io.Reader, out io.Writer) error { - w, err := zstd.NewWriter(out, zs.EncoderOptions...) - if err != nil { - return err - } - defer w.Close() - _, err = io.Copy(w, in) - return err -} - -// Decompress reads in, decompresses it, and writes it to out. -func (zs *Zstd) Decompress(in io.Reader, out io.Writer) error { - r, err := zstd.NewReader(in, zs.DecoderOptions...) - if err != nil { - return err - } - defer r.Close() - _, err = io.Copy(out, r) - return err -} - -// CheckExt ensures the file extension matches the format. -func (zs *Zstd) CheckExt(filename string) error { - if filepath.Ext(filename) != ".zst" { - return fmt.Errorf("filename must have a .zst extension") - } - return nil -} - -func (zs *Zstd) String() string { return "zstd" } - -// NewZstd returns a new, default instance ready to be customized and used. -func NewZstd() *Zstd { - return new(Zstd) -} - -// Compile-time checks to ensure type implements desired interfaces. -var ( - _ = Compressor(new(Zstd)) - _ = Decompressor(new(Zstd)) -) - -// DefaultZstd is a default instance that is conveniently ready to use. -var DefaultZstd = NewZstd() diff --git a/vendor/github.com/nwaples/rardecode/LICENSE b/vendor/github.com/nwaples/rardecode/LICENSE deleted file mode 100644 index 0050f92dfc..0000000000 --- a/vendor/github.com/nwaples/rardecode/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) 2015, Nicholas Waples -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nwaples/rardecode/README.md b/vendor/github.com/nwaples/rardecode/README.md deleted file mode 100644 index 513464c251..0000000000 --- a/vendor/github.com/nwaples/rardecode/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# rardecode -[![GoDoc](https://godoc.org/github.com/nwaples/rardecode?status.svg)](https://godoc.org/github.com/nwaples/rardecode) - -A go package for reading RAR archives. diff --git a/vendor/github.com/nwaples/rardecode/archive.go b/vendor/github.com/nwaples/rardecode/archive.go deleted file mode 100644 index f878751149..0000000000 --- a/vendor/github.com/nwaples/rardecode/archive.go +++ /dev/null @@ -1,309 +0,0 @@ -package rardecode - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "regexp" - "strconv" - "strings" -) - -const ( - maxSfxSize = 0x100000 // maximum number of bytes to read when searching for RAR signature - sigPrefix = "Rar!\x1A\x07" - - fileFmt15 = iota + 1 // Version 1.5 archive file format - fileFmt50 // Version 5.0 archive file format -) - -var ( - errNoSig = errors.New("rardecode: RAR signature not found") - errVerMismatch = errors.New("rardecode: volume version mistmatch") - errCorruptHeader = errors.New("rardecode: corrupt block header") - errCorruptFileHeader = errors.New("rardecode: corrupt file header") - errBadHeaderCrc = errors.New("rardecode: bad header crc") - errUnknownArc = errors.New("rardecode: unknown archive version") - errUnknownDecoder = errors.New("rardecode: unknown decoder version") - errUnsupportedDecoder = errors.New("rardecode: unsupported decoder version") - errArchiveContinues = errors.New("rardecode: archive continues in next volume") - errArchiveEnd = errors.New("rardecode: archive end reached") - errDecoderOutOfData = errors.New("rardecode: decoder expected more data than is in packed file") - - reDigits = regexp.MustCompile(`\d+`) -) - -type readBuf []byte - -func (b *readBuf) byte() byte { - v := (*b)[0] - *b = (*b)[1:] - return v -} - -func (b *readBuf) uint16() uint16 { - v := uint16((*b)[0]) | uint16((*b)[1])<<8 - *b = (*b)[2:] - return v -} - -func (b *readBuf) uint32() uint32 { - v := uint32((*b)[0]) | uint32((*b)[1])<<8 | uint32((*b)[2])<<16 | uint32((*b)[3])<<24 - *b = (*b)[4:] - return v -} - -func (b *readBuf) bytes(n int) []byte { - v := (*b)[:n] - *b = (*b)[n:] - return v -} - -func (b *readBuf) uvarint() uint64 { - var x uint64 - var s uint - for i, n := range *b { - if n < 0x80 { - *b = (*b)[i+1:] - return x | uint64(n)< '9' || v.file[i+3] < '0' || v.file[i+3] > '9') { - v.file = v.file[:i+2] + "00" - return - } - } - // new style volume naming - if !v.old { - // find all numbers in volume name - m := reDigits.FindAllStringIndex(v.file, -1) - if l := len(m); l > 1 { - // More than 1 match so assume name.part###of###.rar style. - // Take the last 2 matches where the first is the volume number. - m = m[l-2 : l] - if strings.Contains(v.file[m[0][1]:m[1][0]], ".") || !strings.Contains(v.file[:m[0][0]], ".") { - // Didn't match above style as volume had '.' between the two numbers or didnt have a '.' - // before the first match. Use the second number as volume number. - m = m[1:] - } - } - // extract and increment volume number - lo, hi := m[0][0], m[0][1] - n, err := strconv.Atoi(v.file[lo:hi]) - if err != nil { - n = 0 - } else { - n++ - } - // volume number must use at least the same number of characters as previous volume - vol := fmt.Sprintf("%0"+fmt.Sprint(hi-lo)+"d", n) - v.file = v.file[:lo] + vol + v.file[hi:] - return - } - // old style volume naming - i := strings.LastIndex(v.file, ".") - // get file extension - b := []byte(v.file[i+1:]) - // start incrementing volume number digits from rightmost - for j := 2; j >= 0; j-- { - if b[j] != '9' { - b[j]++ - break - } - // digit overflow - if j == 0 { - // last character before '.' - b[j] = 'A' - } else { - // set to '0' and loop to next character - b[j] = '0' - } - } - v.file = v.file[:i+1] + string(b) -} - -func (v *volume) next() (*fileBlockHeader, error) { - for { - var atEOF bool - - h, err := v.fileBlockReader.next() - switch err { - case errArchiveContinues: - case io.EOF: - // Read all of volume without finding an end block. The only way - // to tell if the archive continues is to try to open the next volume. - atEOF = true - default: - return h, err - } - - v.f.Close() - v.nextVolName() - v.f, err = os.Open(v.dir + v.file) // Open next volume file - if err != nil { - if atEOF && os.IsNotExist(err) { - // volume not found so assume that the archive has ended - return nil, io.EOF - } - return nil, err - } - v.num++ - v.br.Reset(v.f) - ver, err := findSig(v.br) - if err != nil { - return nil, err - } - if v.version() != ver { - return nil, errVerMismatch - } - v.files = append(v.files, v.dir+v.file) - v.reset() // reset encryption - } -} - -func (v *volume) Close() error { - // may be nil if os.Open fails in next() - if v.f == nil { - return nil - } - return v.f.Close() -} - -func openVolume(name, password string) (*volume, error) { - var err error - v := new(volume) - v.dir, v.file = filepath.Split(name) - v.f, err = os.Open(name) - if err != nil { - return nil, err - } - v.br = bufio.NewReader(v.f) - v.fileBlockReader, err = newFileBlockReader(v.br, password) - if err != nil { - v.f.Close() - return nil, err - } - v.files = append(v.files, name) - return v, nil -} - -func newFileBlockReader(br *bufio.Reader, pass string) (fileBlockReader, error) { - runes := []rune(pass) - if len(runes) > maxPassword { - pass = string(runes[:maxPassword]) - } - ver, err := findSig(br) - if err != nil { - return nil, err - } - switch ver { - case fileFmt15: - return newArchive15(br, pass), nil - case fileFmt50: - return newArchive50(br, pass), nil - } - return nil, errUnknownArc -} diff --git a/vendor/github.com/nwaples/rardecode/archive15.go b/vendor/github.com/nwaples/rardecode/archive15.go deleted file mode 100644 index 260176c06b..0000000000 --- a/vendor/github.com/nwaples/rardecode/archive15.go +++ /dev/null @@ -1,468 +0,0 @@ -package rardecode - -import ( - "bufio" - "bytes" - "crypto/sha1" - "errors" - "hash" - "hash/crc32" - "io" - "io/ioutil" - "strconv" - "strings" - "time" - "unicode/utf16" -) - -const ( - // block types - blockArc = 0x73 - blockFile = 0x74 - blockService = 0x7a - blockEnd = 0x7b - - // block flags - blockHasData = 0x8000 - - // archive block flags - arcVolume = 0x0001 - arcSolid = 0x0008 - arcNewNaming = 0x0010 - arcEncrypted = 0x0080 - - // file block flags - fileSplitBefore = 0x0001 - fileSplitAfter = 0x0002 - fileEncrypted = 0x0004 - fileSolid = 0x0010 - fileWindowMask = 0x00e0 - fileLargeData = 0x0100 - fileUnicode = 0x0200 - fileSalt = 0x0400 - fileVersion = 0x0800 - fileExtTime = 0x1000 - - // end block flags - endArcNotLast = 0x0001 - - saltSize = 8 // size of salt for calculating AES keys - cacheSize30 = 4 // number of AES keys to cache - hashRounds = 0x40000 -) - -var ( - errMultipleDecoders = errors.New("rardecode: multiple decoders in a single archive not supported") -) - -type blockHeader15 struct { - htype byte // block header type - flags uint16 - data readBuf // header data - dataSize int64 // size of extra block data -} - -// fileHash32 implements fileChecksum for 32-bit hashes -type fileHash32 struct { - hash.Hash32 // hash to write file contents to - sum uint32 // 32bit checksum for file -} - -func (h *fileHash32) valid() bool { - return h.sum == h.Sum32() -} - -// archive15 implements fileBlockReader for RAR 1.5 file format archives -type archive15 struct { - byteReader // reader for current block data - v *bufio.Reader // reader for current archive volume - dec decoder // current decoder - decVer byte // current decoder version - multi bool // archive is multi-volume - old bool // archive uses old naming scheme - solid bool // archive is a solid archive - encrypted bool - pass []uint16 // password in UTF-16 - checksum fileHash32 // file checksum - buf readBuf // temporary buffer - keyCache [cacheSize30]struct { // cache of previously calculated decryption keys - salt []byte - key []byte - iv []byte - } -} - -// Calculates the key and iv for AES decryption given a password and salt. -func calcAes30Params(pass []uint16, salt []byte) (key, iv []byte) { - p := make([]byte, 0, len(pass)*2+len(salt)) - for _, v := range pass { - p = append(p, byte(v), byte(v>>8)) - } - p = append(p, salt...) - - hash := sha1.New() - iv = make([]byte, 16) - s := make([]byte, 0, hash.Size()) - for i := 0; i < hashRounds; i++ { - hash.Write(p) - hash.Write([]byte{byte(i), byte(i >> 8), byte(i >> 16)}) - if i%(hashRounds/16) == 0 { - s = hash.Sum(s[:0]) - iv[i/(hashRounds/16)] = s[4*4+3] - } - } - key = hash.Sum(s[:0]) - key = key[:16] - - for k := key; len(k) >= 4; k = k[4:] { - k[0], k[1], k[2], k[3] = k[3], k[2], k[1], k[0] - } - return key, iv -} - -// parseDosTime converts a 32bit DOS time value to time.Time -func parseDosTime(t uint32) time.Time { - n := int(t) - sec := n & 0x1f << 1 - min := n >> 5 & 0x3f - hr := n >> 11 & 0x1f - day := n >> 16 & 0x1f - mon := time.Month(n >> 21 & 0x0f) - yr := n>>25&0x7f + 1980 - return time.Date(yr, mon, day, hr, min, sec, 0, time.Local) -} - -// decodeName decodes a non-unicode filename from a file header. -func decodeName(buf []byte) string { - i := bytes.IndexByte(buf, 0) - if i < 0 { - return string(buf) // filename is UTF-8 - } - - name := buf[:i] - encName := readBuf(buf[i+1:]) - if len(encName) < 2 { - return "" // invalid encoding - } - highByte := uint16(encName.byte()) << 8 - flags := encName.byte() - flagBits := 8 - var wchars []uint16 // decoded characters are UTF-16 - for len(wchars) < len(name) && len(encName) > 0 { - if flagBits == 0 { - flags = encName.byte() - flagBits = 8 - if len(encName) == 0 { - break - } - } - switch flags >> 6 { - case 0: - wchars = append(wchars, uint16(encName.byte())) - case 1: - wchars = append(wchars, uint16(encName.byte())|highByte) - case 2: - if len(encName) < 2 { - break - } - wchars = append(wchars, encName.uint16()) - case 3: - n := encName.byte() - b := name[len(wchars):] - if l := int(n&0x7f) + 2; l < len(b) { - b = b[:l] - } - if n&0x80 > 0 { - if len(encName) < 1 { - break - } - ec := encName.byte() - for _, c := range b { - wchars = append(wchars, uint16(c+ec)|highByte) - } - } else { - for _, c := range b { - wchars = append(wchars, uint16(c)) - } - } - } - flags <<= 2 - flagBits -= 2 - } - return string(utf16.Decode(wchars)) -} - -// readExtTimes reads and parses the optional extra time field from the file header. -func readExtTimes(f *fileBlockHeader, b *readBuf) { - if len(*b) < 2 { - return // invalid, not enough data - } - flags := b.uint16() - - ts := []*time.Time{&f.ModificationTime, &f.CreationTime, &f.AccessTime} - - for i, t := range ts { - n := flags >> uint((3-i)*4) - if n&0x8 == 0 { - continue - } - if i != 0 { // ModificationTime already read so skip - if len(*b) < 4 { - return // invalid, not enough data - } - *t = parseDosTime(b.uint32()) - } - if n&0x4 > 0 { - *t = t.Add(time.Second) - } - n &= 0x3 - if n == 0 { - continue - } - if len(*b) < int(n) { - return // invalid, not enough data - } - // add extra time data in 100's of nanoseconds - d := time.Duration(0) - for j := 3 - n; j < n; j++ { - d |= time.Duration(b.byte()) << (j * 8) - } - d *= 100 - *t = t.Add(d) - } -} - -func (a *archive15) getKeys(salt []byte) (key, iv []byte) { - // check cache of keys - for _, v := range a.keyCache { - if bytes.Equal(v.salt[:], salt) { - return v.key, v.iv - } - } - key, iv = calcAes30Params(a.pass, salt) - - // save a copy in the cache - copy(a.keyCache[1:], a.keyCache[:]) - a.keyCache[0].salt = append([]byte(nil), salt...) // copy so byte slice can be reused - a.keyCache[0].key = key - a.keyCache[0].iv = iv - - return key, iv -} - -func (a *archive15) parseFileHeader(h *blockHeader15) (*fileBlockHeader, error) { - f := new(fileBlockHeader) - - f.first = h.flags&fileSplitBefore == 0 - f.last = h.flags&fileSplitAfter == 0 - - f.solid = h.flags&fileSolid > 0 - f.IsDir = h.flags&fileWindowMask == fileWindowMask - if !f.IsDir { - f.winSize = uint(h.flags&fileWindowMask)>>5 + 16 - } - - b := h.data - if len(b) < 21 { - return nil, errCorruptFileHeader - } - - f.PackedSize = h.dataSize - f.UnPackedSize = int64(b.uint32()) - f.HostOS = b.byte() + 1 - if f.HostOS > HostOSBeOS { - f.HostOS = HostOSUnknown - } - a.checksum.sum = b.uint32() - - f.ModificationTime = parseDosTime(b.uint32()) - unpackver := b.byte() // decoder version - method := b.byte() - 0x30 // decryption method - namesize := int(b.uint16()) - f.Attributes = int64(b.uint32()) - if h.flags&fileLargeData > 0 { - if len(b) < 8 { - return nil, errCorruptFileHeader - } - _ = b.uint32() // already read large PackedSize in readBlockHeader - f.UnPackedSize |= int64(b.uint32()) << 32 - f.UnKnownSize = f.UnPackedSize == -1 - } else if int32(f.UnPackedSize) == -1 { - f.UnKnownSize = true - f.UnPackedSize = -1 - } - if len(b) < namesize { - return nil, errCorruptFileHeader - } - name := b.bytes(namesize) - if h.flags&fileUnicode == 0 { - f.Name = string(name) - } else { - f.Name = decodeName(name) - } - // Rar 4.x uses '\' as file separator - f.Name = strings.Replace(f.Name, "\\", "/", -1) - - if h.flags&fileVersion > 0 { - // file version is stored as ';n' appended to file name - i := strings.LastIndex(f.Name, ";") - if i > 0 { - j, err := strconv.Atoi(f.Name[i+1:]) - if err == nil && j >= 0 { - f.Version = j - f.Name = f.Name[:i] - } - } - } - - var salt []byte - if h.flags&fileSalt > 0 { - if len(b) < saltSize { - return nil, errCorruptFileHeader - } - salt = b.bytes(saltSize) - } - if h.flags&fileExtTime > 0 { - readExtTimes(f, &b) - } - - if !f.first { - return f, nil - } - // fields only needed for first block in a file - if h.flags&fileEncrypted > 0 && len(salt) == saltSize { - f.key, f.iv = a.getKeys(salt) - } - a.checksum.Reset() - f.cksum = &a.checksum - if method == 0 { - return f, nil - } - if a.dec == nil { - switch unpackver { - case 15, 20, 26: - return nil, errUnsupportedDecoder - case 29: - a.dec = new(decoder29) - default: - return nil, errUnknownDecoder - } - a.decVer = unpackver - } else if a.decVer != unpackver { - return nil, errMultipleDecoders - } - f.decoder = a.dec - return f, nil -} - -// readBlockHeader returns the next block header in the archive. -// It will return io.EOF if there were no bytes read. -func (a *archive15) readBlockHeader() (*blockHeader15, error) { - var err error - b := a.buf[:7] - r := io.Reader(a.v) - if a.encrypted { - salt := a.buf[:saltSize] - _, err = io.ReadFull(r, salt) - if err != nil { - return nil, err - } - key, iv := a.getKeys(salt) - r = newAesDecryptReader(r, key, iv) - err = readFull(r, b) - } else { - _, err = io.ReadFull(r, b) - } - if err != nil { - return nil, err - } - - crc := b.uint16() - hash := crc32.NewIEEE() - hash.Write(b) - h := new(blockHeader15) - h.htype = b.byte() - h.flags = b.uint16() - size := b.uint16() - if size < 7 { - return nil, errCorruptHeader - } - size -= 7 - if int(size) > cap(a.buf) { - a.buf = readBuf(make([]byte, size)) - } - h.data = a.buf[:size] - if err := readFull(r, h.data); err != nil { - return nil, err - } - hash.Write(h.data) - if crc != uint16(hash.Sum32()) { - return nil, errBadHeaderCrc - } - if h.flags&blockHasData > 0 { - if len(h.data) < 4 { - return nil, errCorruptHeader - } - h.dataSize = int64(h.data.uint32()) - } - if (h.htype == blockService || h.htype == blockFile) && h.flags&fileLargeData > 0 { - if len(h.data) < 25 { - return nil, errCorruptHeader - } - b := h.data[21:25] - h.dataSize |= int64(b.uint32()) << 32 - } - return h, nil -} - -// next advances to the next file block in the archive -func (a *archive15) next() (*fileBlockHeader, error) { - for { - // could return an io.EOF here as 1.5 archives may not have an end block. - h, err := a.readBlockHeader() - if err != nil { - return nil, err - } - a.byteReader = limitByteReader(a.v, h.dataSize) // reader for block data - - switch h.htype { - case blockFile: - return a.parseFileHeader(h) - case blockArc: - a.encrypted = h.flags&arcEncrypted > 0 - a.multi = h.flags&arcVolume > 0 - a.old = h.flags&arcNewNaming == 0 - a.solid = h.flags&arcSolid > 0 - case blockEnd: - if h.flags&endArcNotLast == 0 || !a.multi { - return nil, errArchiveEnd - } - return nil, errArchiveContinues - default: - _, err = io.Copy(ioutil.Discard, a.byteReader) - } - if err != nil { - return nil, err - } - } -} - -func (a *archive15) version() int { return fileFmt15 } - -func (a *archive15) reset() { - a.encrypted = false // reset encryption when opening new volume file -} - -func (a *archive15) isSolid() bool { - return a.solid -} - -// newArchive15 creates a new fileBlockReader for a Version 1.5 archive -func newArchive15(r *bufio.Reader, password string) fileBlockReader { - a := new(archive15) - a.v = r - a.pass = utf16.Encode([]rune(password)) // convert to UTF-16 - a.checksum.Hash32 = crc32.NewIEEE() - a.buf = readBuf(make([]byte, 100)) - return a -} diff --git a/vendor/github.com/nwaples/rardecode/archive50.go b/vendor/github.com/nwaples/rardecode/archive50.go deleted file mode 100644 index 1d8f850dcd..0000000000 --- a/vendor/github.com/nwaples/rardecode/archive50.go +++ /dev/null @@ -1,475 +0,0 @@ -package rardecode - -import ( - "bufio" - "bytes" - "crypto/hmac" - "crypto/sha256" - "errors" - "hash" - "hash/crc32" - "io" - "io/ioutil" - "time" -) - -const ( - // block types - block5Arc = 1 - block5File = 2 - block5Service = 3 - block5Encrypt = 4 - block5End = 5 - - // block flags - block5HasExtra = 0x0001 - block5HasData = 0x0002 - block5DataNotFirst = 0x0008 - block5DataNotLast = 0x0010 - - // end block flags - endArc5NotLast = 0x0001 - - // archive encryption block flags - enc5CheckPresent = 0x0001 // password check data is present - - // main archive block flags - arc5MultiVol = 0x0001 - arc5Solid = 0x0004 - - // file block flags - file5IsDir = 0x0001 - file5HasUnixMtime = 0x0002 - file5HasCRC32 = 0x0004 - file5UnpSizeUnknown = 0x0008 - - // file encryption record flags - file5EncCheckPresent = 0x0001 // password check data is present - file5EncUseMac = 0x0002 // use MAC instead of plain checksum - - cacheSize50 = 4 - maxPbkdf2Salt = 64 - pwCheckSize = 8 - maxKdfCount = 24 - - minHeaderSize = 7 -) - -var ( - errBadPassword = errors.New("rardecode: incorrect password") - errCorruptEncrypt = errors.New("rardecode: corrupt encryption data") - errUnknownEncMethod = errors.New("rardecode: unknown encryption method") -) - -type extra struct { - ftype uint64 // field type - data readBuf // field data -} - -type blockHeader50 struct { - htype uint64 // block type - flags uint64 - data readBuf // block header data - extra []extra // extra fields - dataSize int64 // size of block data -} - -// leHash32 wraps a hash.Hash32 to return the result of Sum in little -// endian format. -type leHash32 struct { - hash.Hash32 -} - -func (h leHash32) Sum(b []byte) []byte { - s := h.Sum32() - return append(b, byte(s), byte(s>>8), byte(s>>16), byte(s>>24)) -} - -func newLittleEndianCRC32() hash.Hash32 { - return leHash32{crc32.NewIEEE()} -} - -// hash50 implements fileChecksum for RAR 5 archives -type hash50 struct { - hash.Hash // hash file data is written to - sum []byte // file checksum - key []byte // if present used with hmac in calculating checksum from hash -} - -func (h *hash50) valid() bool { - sum := h.Sum(nil) - if len(h.key) > 0 { - mac := hmac.New(sha256.New, h.key) - mac.Write(sum) - sum = mac.Sum(sum[:0]) - if len(h.sum) == 4 { - // CRC32 - for i, v := range sum[4:] { - sum[i&3] ^= v - } - sum = sum[:4] - } - } - return bytes.Equal(sum, h.sum) -} - -// archive50 implements fileBlockReader for RAR 5 file format archives -type archive50 struct { - byteReader // reader for current block data - v *bufio.Reader // reader for current archive volume - pass []byte - blockKey []byte // key used to encrypt blocks - multi bool // archive is multi-volume - solid bool // is a solid archive - checksum hash50 // file checksum - dec decoder // optional decoder used to unpack file - buf readBuf // temporary buffer - keyCache [cacheSize50]struct { // encryption key cache - kdfCount int - salt []byte - keys [][]byte - } -} - -// calcKeys50 calculates the keys used in RAR 5 archive processing. -// The returned slice of byte slices contains 3 keys. -// Key 0 is used for block or file decryption. -// Key 1 is optionally used for file checksum calculation. -// Key 2 is optionally used for password checking. -func calcKeys50(pass, salt []byte, kdfCount int) [][]byte { - if len(salt) > maxPbkdf2Salt { - salt = salt[:maxPbkdf2Salt] - } - keys := make([][]byte, 3) - if len(keys) == 0 { - return keys - } - - prf := hmac.New(sha256.New, pass) - prf.Write(salt) - prf.Write([]byte{0, 0, 0, 1}) - - t := prf.Sum(nil) - u := append([]byte(nil), t...) - - kdfCount-- - - for i, iter := range []int{kdfCount, 16, 16} { - for iter > 0 { - prf.Reset() - prf.Write(u) - u = prf.Sum(u[:0]) - for j := range u { - t[j] ^= u[j] - } - iter-- - } - keys[i] = append([]byte(nil), t...) - } - - pwcheck := keys[2] - for i, v := range pwcheck[pwCheckSize:] { - pwcheck[i&(pwCheckSize-1)] ^= v - } - keys[2] = pwcheck[:pwCheckSize] - - return keys -} - -// getKeys reads kdfcount and salt from b and returns the corresponding encryption keys. -func (a *archive50) getKeys(b *readBuf) (keys [][]byte, err error) { - if len(*b) < 17 { - return nil, errCorruptEncrypt - } - // read kdf count and salt - kdfCount := int(b.byte()) - if kdfCount > maxKdfCount { - return nil, errCorruptEncrypt - } - kdfCount = 1 << uint(kdfCount) - salt := b.bytes(16) - - // check cache of keys for match - for _, v := range a.keyCache { - if kdfCount == v.kdfCount && bytes.Equal(salt, v.salt) { - return v.keys, nil - } - } - // not found, calculate keys - keys = calcKeys50(a.pass, salt, kdfCount) - - // store in cache - copy(a.keyCache[1:], a.keyCache[:]) - a.keyCache[0].kdfCount = kdfCount - a.keyCache[0].salt = append([]byte(nil), salt...) - a.keyCache[0].keys = keys - - return keys, nil -} - -// checkPassword calculates if a password is correct given password check data and keys. -func checkPassword(b *readBuf, keys [][]byte) error { - if len(*b) < 12 { - return nil // not enough bytes, ignore for the moment - } - pwcheck := b.bytes(8) - sum := b.bytes(4) - csum := sha256.Sum256(pwcheck) - if bytes.Equal(sum, csum[:len(sum)]) && !bytes.Equal(pwcheck, keys[2]) { - return errBadPassword - } - return nil -} - -// parseFileEncryptionRecord processes the optional file encryption record from a file header. -func (a *archive50) parseFileEncryptionRecord(b readBuf, f *fileBlockHeader) error { - if ver := b.uvarint(); ver != 0 { - return errUnknownEncMethod - } - flags := b.uvarint() - - keys, err := a.getKeys(&b) - if err != nil { - return err - } - - f.key = keys[0] - if len(b) < 16 { - return errCorruptEncrypt - } - f.iv = b.bytes(16) - - if flags&file5EncCheckPresent > 0 { - if err := checkPassword(&b, keys); err != nil { - return err - } - } - if flags&file5EncUseMac > 0 { - a.checksum.key = keys[1] - } - return nil -} - -func (a *archive50) parseFileHeader(h *blockHeader50) (*fileBlockHeader, error) { - a.checksum.sum = nil - a.checksum.key = nil - - f := new(fileBlockHeader) - - f.first = h.flags&block5DataNotFirst == 0 - f.last = h.flags&block5DataNotLast == 0 - - flags := h.data.uvarint() // file flags - f.IsDir = flags&file5IsDir > 0 - f.UnKnownSize = flags&file5UnpSizeUnknown > 0 - f.UnPackedSize = int64(h.data.uvarint()) - f.PackedSize = h.dataSize - f.Attributes = int64(h.data.uvarint()) - if flags&file5HasUnixMtime > 0 { - if len(h.data) < 4 { - return nil, errCorruptFileHeader - } - f.ModificationTime = time.Unix(int64(h.data.uint32()), 0) - } - if flags&file5HasCRC32 > 0 { - if len(h.data) < 4 { - return nil, errCorruptFileHeader - } - a.checksum.sum = append([]byte(nil), h.data.bytes(4)...) - if f.first { - a.checksum.Hash = newLittleEndianCRC32() - f.cksum = &a.checksum - } - } - - flags = h.data.uvarint() // compression flags - f.solid = flags&0x0040 > 0 - f.winSize = uint(flags&0x3C00)>>10 + 17 - method := (flags >> 7) & 7 // compression method (0 == none) - if f.first && method != 0 { - unpackver := flags & 0x003f - if unpackver != 0 { - return nil, errUnknownDecoder - } - if a.dec == nil { - a.dec = new(decoder50) - } - f.decoder = a.dec - } - switch h.data.uvarint() { - case 0: - f.HostOS = HostOSWindows - case 1: - f.HostOS = HostOSUnix - default: - f.HostOS = HostOSUnknown - } - nlen := int(h.data.uvarint()) - if len(h.data) < nlen { - return nil, errCorruptFileHeader - } - f.Name = string(h.data.bytes(nlen)) - - // parse optional extra records - for _, e := range h.extra { - var err error - switch e.ftype { - case 1: // encryption - err = a.parseFileEncryptionRecord(e.data, f) - case 2: - // TODO: hash - case 3: - // TODO: time - case 4: // version - _ = e.data.uvarint() // ignore flags field - f.Version = int(e.data.uvarint()) - case 5: - // TODO: redirection - case 6: - // TODO: owner - } - if err != nil { - return nil, err - } - } - return f, nil -} - -// parseEncryptionBlock calculates the key for block encryption. -func (a *archive50) parseEncryptionBlock(b readBuf) error { - if ver := b.uvarint(); ver != 0 { - return errUnknownEncMethod - } - flags := b.uvarint() - keys, err := a.getKeys(&b) - if err != nil { - return err - } - if flags&enc5CheckPresent > 0 { - if err := checkPassword(&b, keys); err != nil { - return err - } - } - a.blockKey = keys[0] - return nil -} - -func (a *archive50) readBlockHeader() (*blockHeader50, error) { - r := io.Reader(a.v) - if a.blockKey != nil { - // block is encrypted - iv := a.buf[:16] - if err := readFull(r, iv); err != nil { - return nil, err - } - r = newAesDecryptReader(r, a.blockKey, iv) - } - - b := a.buf[:minHeaderSize] - if err := readFull(r, b); err != nil { - return nil, err - } - crc := b.uint32() - - hash := crc32.NewIEEE() - hash.Write(b) - - size := int(b.uvarint()) // header size - if size > cap(a.buf) { - a.buf = readBuf(make([]byte, size)) - } else { - a.buf = a.buf[:size] - } - n := copy(a.buf, b) // copy left over bytes - if err := readFull(r, a.buf[n:]); err != nil { // read rest of header - return nil, err - } - - // check header crc - hash.Write(a.buf[n:]) - if crc != hash.Sum32() { - return nil, errBadHeaderCrc - } - - b = a.buf - h := new(blockHeader50) - h.htype = b.uvarint() - h.flags = b.uvarint() - - var extraSize int - if h.flags&block5HasExtra > 0 { - extraSize = int(b.uvarint()) - } - if h.flags&block5HasData > 0 { - h.dataSize = int64(b.uvarint()) - } - if len(b) < extraSize { - return nil, errCorruptHeader - } - h.data = b.bytes(len(b) - extraSize) - - // read header extra records - for len(b) > 0 { - size = int(b.uvarint()) - if len(b) < size { - return nil, errCorruptHeader - } - data := readBuf(b.bytes(size)) - ftype := data.uvarint() - h.extra = append(h.extra, extra{ftype, data}) - } - - return h, nil -} - -// next advances to the next file block in the archive -func (a *archive50) next() (*fileBlockHeader, error) { - for { - h, err := a.readBlockHeader() - if err != nil { - return nil, err - } - a.byteReader = limitByteReader(a.v, h.dataSize) - switch h.htype { - case block5File: - return a.parseFileHeader(h) - case block5Arc: - flags := h.data.uvarint() - a.multi = flags&arc5MultiVol > 0 - a.solid = flags&arc5Solid > 0 - case block5Encrypt: - err = a.parseEncryptionBlock(h.data) - case block5End: - flags := h.data.uvarint() - if flags&endArc5NotLast == 0 || !a.multi { - return nil, errArchiveEnd - } - return nil, errArchiveContinues - default: - // discard block data - _, err = io.Copy(ioutil.Discard, a.byteReader) - } - if err != nil { - return nil, err - } - } -} - -func (a *archive50) version() int { return fileFmt50 } - -func (a *archive50) reset() { - a.blockKey = nil // reset encryption when opening new volume file -} - -func (a *archive50) isSolid() bool { - return a.solid -} - -// newArchive50 creates a new fileBlockReader for a Version 5 archive. -func newArchive50(r *bufio.Reader, password string) fileBlockReader { - a := new(archive50) - a.v = r - a.pass = []byte(password) - a.buf = make([]byte, 100) - return a -} diff --git a/vendor/github.com/nwaples/rardecode/bit_reader.go b/vendor/github.com/nwaples/rardecode/bit_reader.go deleted file mode 100644 index 9b284efa31..0000000000 --- a/vendor/github.com/nwaples/rardecode/bit_reader.go +++ /dev/null @@ -1,119 +0,0 @@ -package rardecode - -import "io" - -type bitReader interface { - readBits(n uint) (int, error) // read n bits of data - unreadBits(n uint) // revert the reading of the last n bits read -} - -type limitedBitReader struct { - br bitReader - n int - err error // error to return if br returns EOF before all n bits have been read -} - -// limitBitReader returns a bitReader that reads from br and stops with io.EOF after n bits. -// If br returns an io.EOF before reading n bits, err is returned. -func limitBitReader(br bitReader, n int, err error) bitReader { - return &limitedBitReader{br, n, err} -} - -func (l *limitedBitReader) readBits(n uint) (int, error) { - if int(n) > l.n { - return 0, io.EOF - } - v, err := l.br.readBits(n) - if err == nil { - l.n -= int(n) - } else if err == io.EOF { - err = l.err - } - return v, err -} - -func (l *limitedBitReader) unreadBits(n uint) { - l.n += int(n) - l.br.unreadBits(n) -} - -// rarBitReader wraps an io.ByteReader to perform various bit and byte -// reading utility functions used in RAR file processing. -type rarBitReader struct { - r io.ByteReader - v int - n uint -} - -func (r *rarBitReader) reset(br io.ByteReader) { - r.r = br - r.n = 0 - r.v = 0 -} - -func (r *rarBitReader) readBits(n uint) (int, error) { - for n > r.n { - c, err := r.r.ReadByte() - if err != nil { - return 0, err - } - r.v = r.v<<8 | int(c) - r.n += 8 - } - r.n -= n - return (r.v >> r.n) & ((1 << n) - 1), nil -} - -func (r *rarBitReader) unreadBits(n uint) { - r.n += n -} - -// alignByte aligns the current bit reading input to the next byte boundary. -func (r *rarBitReader) alignByte() { - r.n -= r.n % 8 -} - -// readUint32 reads a RAR V3 encoded uint32 -func (r *rarBitReader) readUint32() (uint32, error) { - n, err := r.readBits(2) - if err != nil { - return 0, err - } - if n != 1 { - n, err = r.readBits(4 << uint(n)) - return uint32(n), err - } - n, err = r.readBits(4) - if err != nil { - return 0, err - } - if n == 0 { - n, err = r.readBits(8) - n |= -1 << 8 - return uint32(n), err - } - nlow, err := r.readBits(4) - n = n<<4 | nlow - return uint32(n), err -} - -func (r *rarBitReader) ReadByte() (byte, error) { - n, err := r.readBits(8) - return byte(n), err -} - -// readFull reads len(p) bytes into p. If fewer bytes are read an error is returned. -func (r *rarBitReader) readFull(p []byte) error { - for i := range p { - c, err := r.ReadByte() - if err != nil { - return err - } - p[i] = c - } - return nil -} - -func newRarBitReader(r io.ByteReader) *rarBitReader { - return &rarBitReader{r: r} -} diff --git a/vendor/github.com/nwaples/rardecode/decode29.go b/vendor/github.com/nwaples/rardecode/decode29.go deleted file mode 100644 index 638645e79b..0000000000 --- a/vendor/github.com/nwaples/rardecode/decode29.go +++ /dev/null @@ -1,264 +0,0 @@ -package rardecode - -import ( - "bytes" - "errors" - "io" -) - -const ( - maxCodeSize = 0x10000 - maxUniqueFilters = 1024 -) - -var ( - // Errors marking the end of the decoding block and/or file - endOfFile = errors.New("rardecode: end of file") - endOfBlock = errors.New("rardecode: end of block") - endOfBlockAndFile = errors.New("rardecode: end of block and file") -) - -// decoder29 implements the decoder interface for RAR 3.0 compression (unpack version 29) -// Decode input is broken up into 1 or more blocks. The start of each block specifies -// the decoding algorithm (ppm or lz) and optional data to initialize with. -// Block length is not stored, it is determined only after decoding an end of file and/or -// block marker in the data. -type decoder29 struct { - br *rarBitReader - eof bool // at file eof - fnum int // current filter number (index into filters) - flen []int // filter block length history - filters []v3Filter // list of current filters used by archive encoding - - // current decode function (lz or ppm). - // When called it should perform a single decode operation, and either apply the - // data to the window or return they raw bytes for a filter. - decode func(w *window) ([]byte, error) - - lz lz29Decoder // lz decoder - ppm ppm29Decoder // ppm decoder -} - -// init intializes the decoder for decoding a new file. -func (d *decoder29) init(r io.ByteReader, reset bool) error { - if d.br == nil { - d.br = newRarBitReader(r) - } else { - d.br.reset(r) - } - d.eof = false - if reset { - d.initFilters() - d.lz.reset() - d.ppm.reset() - d.decode = nil - } - if d.decode == nil { - return d.readBlockHeader() - } - return nil -} - -func (d *decoder29) initFilters() { - d.fnum = 0 - d.flen = nil - d.filters = nil -} - -// readVMCode reads the raw bytes for the code/commands used in a vm filter -func readVMCode(br *rarBitReader) ([]byte, error) { - n, err := br.readUint32() - if err != nil { - return nil, err - } - if n > maxCodeSize || n == 0 { - return nil, errInvalidFilter - } - buf := make([]byte, n) - err = br.readFull(buf) - if err != nil { - return nil, err - } - var x byte - for _, c := range buf[1:] { - x ^= c - } - // simple xor checksum on data - if x != buf[0] { - return nil, errInvalidFilter - } - return buf, nil -} - -func (d *decoder29) parseVMFilter(buf []byte) (*filterBlock, error) { - flags := buf[0] - br := newRarBitReader(bytes.NewReader(buf[1:])) - fb := new(filterBlock) - - // Find the filter number which is an index into d.filters. - // If filter number == len(d.filters) it is a new filter to be added. - if flags&0x80 > 0 { - n, err := br.readUint32() - if err != nil { - return nil, err - } - if n == 0 { - d.initFilters() - fb.reset = true - } else { - n-- - if n > maxUniqueFilters { - return nil, errInvalidFilter - } - if int(n) > len(d.filters) { - return nil, errInvalidFilter - } - } - d.fnum = int(n) - } - - // filter offset - n, err := br.readUint32() - if err != nil { - return nil, err - } - if flags&0x40 > 0 { - n += 258 - } - fb.offset = int(n) - - // filter length - if d.fnum == len(d.flen) { - d.flen = append(d.flen, 0) - } - if flags&0x20 > 0 { - n, err = br.readUint32() - if err != nil { - return nil, err - } - //fb.length = int(n) - d.flen[d.fnum] = int(n) - } - fb.length = d.flen[d.fnum] - - // initial register values - r := make(map[int]uint32) - if flags&0x10 > 0 { - bits, err := br.readBits(vmRegs - 1) - if err != nil { - return nil, err - } - for i := 0; i < vmRegs-1; i++ { - if bits&1 > 0 { - r[i], err = br.readUint32() - if err != nil { - return nil, err - } - } - bits >>= 1 - } - } - - // filter is new so read the code for it - if d.fnum == len(d.filters) { - code, err := readVMCode(br) - if err != nil { - return nil, err - } - f, err := getV3Filter(code) - if err != nil { - return nil, err - } - d.filters = append(d.filters, f) - d.flen = append(d.flen, fb.length) - } - - // read global data - var g []byte - if flags&0x08 > 0 { - n, err := br.readUint32() - if err != nil { - return nil, err - } - if n > vmGlobalSize-vmFixedGlobalSize { - return nil, errInvalidFilter - } - g = make([]byte, n) - err = br.readFull(g) - if err != nil { - return nil, err - } - } - - // create filter function - f := d.filters[d.fnum] - fb.filter = func(buf []byte, offset int64) ([]byte, error) { - return f(r, g, buf, offset) - } - - return fb, nil -} - -// readBlockHeader determines and initializes the current decoder for a new decode block. -func (d *decoder29) readBlockHeader() error { - d.br.alignByte() - n, err := d.br.readBits(1) - if err == nil { - if n > 0 { - d.decode = d.ppm.decode - err = d.ppm.init(d.br) - } else { - d.decode = d.lz.decode - err = d.lz.init(d.br) - } - } - if err == io.EOF { - err = errDecoderOutOfData - } - return err - -} - -func (d *decoder29) fill(w *window) ([]*filterBlock, error) { - if d.eof { - return nil, io.EOF - } - - var fl []*filterBlock - - for w.available() > 0 { - b, err := d.decode(w) // perform a single decode operation - if len(b) > 0 && err == nil { - // parse raw data for filter and add to list of filters - var f *filterBlock - f, err = d.parseVMFilter(b) - if f != nil { - // make offset relative to read index (from write index) - f.offset += w.buffered() - fl = append(fl, f) - } - } - - switch err { - case nil: - continue - case endOfBlock: - err = d.readBlockHeader() - if err == nil { - continue - } - case endOfFile: - d.eof = true - err = io.EOF - case endOfBlockAndFile: - d.eof = true - d.decode = nil // clear decoder, it will be setup by next init() - err = io.EOF - case io.EOF: - err = errDecoderOutOfData - } - return fl, err - } - // return filters - return fl, nil -} diff --git a/vendor/github.com/nwaples/rardecode/decode29_lz.go b/vendor/github.com/nwaples/rardecode/decode29_lz.go deleted file mode 100644 index 94470853dc..0000000000 --- a/vendor/github.com/nwaples/rardecode/decode29_lz.go +++ /dev/null @@ -1,247 +0,0 @@ -package rardecode - -const ( - mainSize = 299 - offsetSize = 60 - lowOffsetSize = 17 - lengthSize = 28 - tableSize = mainSize + offsetSize + lowOffsetSize + lengthSize -) - -var ( - lengthBase = [28]int{0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, - 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224} - lengthExtraBits = [28]uint{0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, - 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5} - - offsetBase = [60]int{0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, 48, 64, 96, - 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072, 4096, - 6144, 8192, 12288, 16384, 24576, 32768, 49152, 65536, 98304, - 131072, 196608, 262144, 327680, 393216, 458752, 524288, - 589824, 655360, 720896, 786432, 851968, 917504, 983040, - 1048576, 1310720, 1572864, 1835008, 2097152, 2359296, 2621440, - 2883584, 3145728, 3407872, 3670016, 3932160} - offsetExtraBits = [60]uint{0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, - 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, - 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18} - - shortOffsetBase = [8]int{0, 4, 8, 16, 32, 64, 128, 192} - shortOffsetExtraBits = [8]uint{2, 2, 3, 4, 5, 6, 6, 6} -) - -type lz29Decoder struct { - codeLength [tableSize]byte - - mainDecoder huffmanDecoder - offsetDecoder huffmanDecoder - lowOffsetDecoder huffmanDecoder - lengthDecoder huffmanDecoder - - offset [4]int // history of previous offsets - length int // previous length - lowOffset int - lowOffsetRepeats int - - br *rarBitReader -} - -func (d *lz29Decoder) reset() { - for i := range d.offset { - d.offset[i] = 0 - } - d.length = 0 - for i := range d.codeLength { - d.codeLength[i] = 0 - } -} - -func (d *lz29Decoder) init(br *rarBitReader) error { - d.br = br - d.lowOffset = 0 - d.lowOffsetRepeats = 0 - - n, err := d.br.readBits(1) - if err != nil { - return err - } - addOld := n > 0 - - cl := d.codeLength[:] - if err = readCodeLengthTable(d.br, cl, addOld); err != nil { - return err - } - - d.mainDecoder.init(cl[:mainSize]) - cl = cl[mainSize:] - d.offsetDecoder.init(cl[:offsetSize]) - cl = cl[offsetSize:] - d.lowOffsetDecoder.init(cl[:lowOffsetSize]) - cl = cl[lowOffsetSize:] - d.lengthDecoder.init(cl) - - return nil -} - -func (d *lz29Decoder) readFilterData() (b []byte, err error) { - flags, err := d.br.ReadByte() - if err != nil { - return nil, err - } - - n := (int(flags) & 7) + 1 - switch n { - case 7: - n, err = d.br.readBits(8) - n += 7 - if err != nil { - return nil, err - } - case 8: - n, err = d.br.readBits(16) - if err != nil { - return nil, err - } - } - - buf := make([]byte, n+1) - buf[0] = flags - err = d.br.readFull(buf[1:]) - - return buf, err -} - -func (d *lz29Decoder) readEndOfBlock() error { - n, err := d.br.readBits(1) - if err != nil { - return err - } - if n > 0 { - return endOfBlock - } - n, err = d.br.readBits(1) - if err != nil { - return err - } - if n > 0 { - return endOfBlockAndFile - } - return endOfFile -} - -func (d *lz29Decoder) decode(win *window) ([]byte, error) { - sym, err := d.mainDecoder.readSym(d.br) - if err != nil { - return nil, err - } - - switch { - case sym < 256: - // literal - win.writeByte(byte(sym)) - return nil, nil - case sym == 256: - return nil, d.readEndOfBlock() - case sym == 257: - return d.readFilterData() - case sym == 258: - // use previous offset and length - case sym < 263: - i := sym - 259 - offset := d.offset[i] - copy(d.offset[1:i+1], d.offset[:i]) - d.offset[0] = offset - - i, err := d.lengthDecoder.readSym(d.br) - if err != nil { - return nil, err - } - d.length = lengthBase[i] + 2 - bits := lengthExtraBits[i] - if bits > 0 { - n, err := d.br.readBits(bits) - if err != nil { - return nil, err - } - d.length += n - } - case sym < 271: - i := sym - 263 - copy(d.offset[1:], d.offset[:]) - offset := shortOffsetBase[i] + 1 - bits := shortOffsetExtraBits[i] - if bits > 0 { - n, err := d.br.readBits(bits) - if err != nil { - return nil, err - } - offset += n - } - d.offset[0] = offset - - d.length = 2 - default: - i := sym - 271 - d.length = lengthBase[i] + 3 - bits := lengthExtraBits[i] - if bits > 0 { - n, err := d.br.readBits(bits) - if err != nil { - return nil, err - } - d.length += n - } - - i, err = d.offsetDecoder.readSym(d.br) - if err != nil { - return nil, err - } - offset := offsetBase[i] + 1 - bits = offsetExtraBits[i] - - switch { - case bits >= 4: - if bits > 4 { - n, err := d.br.readBits(bits - 4) - if err != nil { - return nil, err - } - offset += n << 4 - } - - if d.lowOffsetRepeats > 0 { - d.lowOffsetRepeats-- - offset += d.lowOffset - } else { - n, err := d.lowOffsetDecoder.readSym(d.br) - if err != nil { - return nil, err - } - if n == 16 { - d.lowOffsetRepeats = 15 - offset += d.lowOffset - } else { - offset += n - d.lowOffset = n - } - } - case bits > 0: - n, err := d.br.readBits(bits) - if err != nil { - return nil, err - } - offset += n - } - - if offset >= 0x2000 { - d.length++ - if offset >= 0x40000 { - d.length++ - } - } - copy(d.offset[1:], d.offset[:]) - d.offset[0] = offset - } - win.copyBytes(d.length, d.offset[0]) - return nil, nil -} diff --git a/vendor/github.com/nwaples/rardecode/decode29_ppm.go b/vendor/github.com/nwaples/rardecode/decode29_ppm.go deleted file mode 100644 index 39c3199584..0000000000 --- a/vendor/github.com/nwaples/rardecode/decode29_ppm.go +++ /dev/null @@ -1,132 +0,0 @@ -package rardecode - -import "io" - -type ppm29Decoder struct { - m model // ppm model - esc byte // escape character - br io.ByteReader -} - -func (d *ppm29Decoder) init(br *rarBitReader) error { - maxOrder, err := br.readBits(7) - if err != nil { - return err - } - reset := maxOrder&0x20 > 0 - - // Should have flushed all unread bits from bitReader by now, - // use underlying ByteReader - d.br = br.r - - var maxMB int - if reset { - c, err := d.br.ReadByte() - if err != nil { - return err - } - maxMB = int(c) + 1 - } - - if maxOrder&0x40 > 0 { - d.esc, err = d.br.ReadByte() - if err != nil { - return err - } - } - - maxOrder = (maxOrder & 0x1f) + 1 - if maxOrder > 16 { - maxOrder = 16 + (maxOrder-16)*3 - } - - return d.m.init(d.br, reset, maxOrder, maxMB) -} - -func (d *ppm29Decoder) reset() { - d.esc = 2 -} - -func (d *ppm29Decoder) readFilterData() ([]byte, error) { - c, err := d.m.ReadByte() - if err != nil { - return nil, err - } - n := int(c&7) + 1 - if n == 7 { - b, err := d.m.ReadByte() - if err != nil { - return nil, err - } - n += int(b) - } else if n == 8 { - b, err := d.m.ReadByte() - if err != nil { - return nil, err - } - n = int(b) << 8 - b, err = d.m.ReadByte() - if err != nil { - return nil, err - } - n |= int(b) - } - - n++ - buf := make([]byte, n) - buf[0] = byte(c) - for i := 1; i < n; i++ { - buf[i], err = d.m.ReadByte() - if err != nil { - return nil, err - } - } - return buf, nil -} - -func (d *ppm29Decoder) decode(w *window) ([]byte, error) { - c, err := d.m.ReadByte() - if err != nil { - return nil, err - } - if c != d.esc { - w.writeByte(c) - return nil, nil - } - c, err = d.m.ReadByte() - if err != nil { - return nil, err - } - - switch c { - case 0: - return nil, endOfBlock - case 2: - return nil, endOfBlockAndFile - case 3: - return d.readFilterData() - case 4: - offset := 0 - for i := 0; i < 3; i++ { - c, err = d.m.ReadByte() - if err != nil { - return nil, err - } - offset = offset<<8 | int(c) - } - len, err := d.m.ReadByte() - if err != nil { - return nil, err - } - w.copyBytes(int(len)+32, offset+2) - case 5: - len, err := d.m.ReadByte() - if err != nil { - return nil, err - } - w.copyBytes(int(len)+4, 1) - default: - w.writeByte(d.esc) - } - return nil, nil -} diff --git a/vendor/github.com/nwaples/rardecode/decode50.go b/vendor/github.com/nwaples/rardecode/decode50.go deleted file mode 100644 index 1939a444ab..0000000000 --- a/vendor/github.com/nwaples/rardecode/decode50.go +++ /dev/null @@ -1,294 +0,0 @@ -package rardecode - -import ( - "errors" - "io" -) - -const ( - mainSize5 = 306 - offsetSize5 = 64 - lowoffsetSize5 = 16 - lengthSize5 = 44 - tableSize5 = mainSize5 + offsetSize5 + lowoffsetSize5 + lengthSize5 -) - -var ( - errUnknownFilter = errors.New("rardecode: unknown V5 filter") - errCorruptDecodeHeader = errors.New("rardecode: corrupt decode header") -) - -// decoder50 implements the decoder interface for RAR 5 compression. -// Decode input it broken up into 1 or more blocks. Each block starts with -// a header containing block length and optional code length tables to initialize -// the huffman decoders with. -type decoder50 struct { - r io.ByteReader - br bitReader // bit reader for current data block - codeLength [tableSize5]byte - - lastBlock bool // current block is last block in compressed file - - mainDecoder huffmanDecoder - offsetDecoder huffmanDecoder - lowoffsetDecoder huffmanDecoder - lengthDecoder huffmanDecoder - - offset [4]int - length int -} - -func (d *decoder50) init(r io.ByteReader, reset bool) error { - d.r = r - d.lastBlock = false - - if reset { - for i := range d.offset { - d.offset[i] = 0 - } - d.length = 0 - for i := range d.codeLength { - d.codeLength[i] = 0 - } - } - err := d.readBlockHeader() - if err == io.EOF { - return errDecoderOutOfData - } - return err -} - -func (d *decoder50) readBlockHeader() error { - flags, err := d.r.ReadByte() - if err != nil { - return err - } - - bytecount := (flags>>3)&3 + 1 - if bytecount == 4 { - return errCorruptDecodeHeader - } - - hsum, err := d.r.ReadByte() - if err != nil { - return err - } - - blockBits := int(flags)&0x07 + 1 - blockBytes := 0 - sum := 0x5a ^ flags - for i := byte(0); i < bytecount; i++ { - n, err := d.r.ReadByte() - if err != nil { - return err - } - sum ^= n - blockBytes |= int(n) << (i * 8) - } - if sum != hsum { // bad header checksum - return errCorruptDecodeHeader - } - blockBits += (blockBytes - 1) * 8 - - // create bit reader for block - d.br = limitBitReader(newRarBitReader(d.r), blockBits, errDecoderOutOfData) - d.lastBlock = flags&0x40 > 0 - - if flags&0x80 > 0 { - // read new code length tables and reinitialize huffman decoders - cl := d.codeLength[:] - err = readCodeLengthTable(d.br, cl, false) - if err != nil { - return err - } - d.mainDecoder.init(cl[:mainSize5]) - cl = cl[mainSize5:] - d.offsetDecoder.init(cl[:offsetSize5]) - cl = cl[offsetSize5:] - d.lowoffsetDecoder.init(cl[:lowoffsetSize5]) - cl = cl[lowoffsetSize5:] - d.lengthDecoder.init(cl) - } - return nil -} - -func slotToLength(br bitReader, n int) (int, error) { - if n >= 8 { - bits := uint(n/4 - 1) - n = (4 | (n & 3)) << bits - if bits > 0 { - b, err := br.readBits(bits) - if err != nil { - return 0, err - } - n |= b - } - } - n += 2 - return n, nil -} - -// readFilter5Data reads an encoded integer used in V5 filters. -func readFilter5Data(br bitReader) (int, error) { - // TODO: should data really be uint? (for 32bit ints). - // It will be masked later anyway by decode window mask. - bytes, err := br.readBits(2) - if err != nil { - return 0, err - } - bytes++ - - var data int - for i := 0; i < bytes; i++ { - n, err := br.readBits(8) - if err != nil { - return 0, err - } - data |= n << (uint(i) * 8) - } - return data, nil -} - -func readFilter(br bitReader) (*filterBlock, error) { - fb := new(filterBlock) - var err error - - fb.offset, err = readFilter5Data(br) - if err != nil { - return nil, err - } - fb.length, err = readFilter5Data(br) - if err != nil { - return nil, err - } - ftype, err := br.readBits(3) - if err != nil { - return nil, err - } - switch ftype { - case 0: - n, err := br.readBits(5) - if err != nil { - return nil, err - } - fb.filter = func(buf []byte, offset int64) ([]byte, error) { return filterDelta(n+1, buf) } - case 1: - fb.filter = func(buf []byte, offset int64) ([]byte, error) { return filterE8(0xe8, true, buf, offset) } - case 2: - fb.filter = func(buf []byte, offset int64) ([]byte, error) { return filterE8(0xe9, true, buf, offset) } - case 3: - fb.filter = filterArm - default: - return nil, errUnknownFilter - } - return fb, nil -} - -func (d *decoder50) decodeSym(win *window, sym int) (*filterBlock, error) { - switch { - case sym < 256: - // literal - win.writeByte(byte(sym)) - return nil, nil - case sym == 256: - f, err := readFilter(d.br) - f.offset += win.buffered() - return f, err - case sym == 257: - // use previous offset and length - case sym < 262: - i := sym - 258 - offset := d.offset[i] - copy(d.offset[1:i+1], d.offset[:i]) - d.offset[0] = offset - - sl, err := d.lengthDecoder.readSym(d.br) - if err != nil { - return nil, err - } - d.length, err = slotToLength(d.br, sl) - if err != nil { - return nil, err - } - default: - length, err := slotToLength(d.br, sym-262) - if err != nil { - return nil, err - } - - offset := 1 - slot, err := d.offsetDecoder.readSym(d.br) - if err != nil { - return nil, err - } - if slot < 4 { - offset += slot - } else { - bits := uint(slot/2 - 1) - offset += (2 | (slot & 1)) << bits - - if bits >= 4 { - if bits > 4 { - n, err := d.br.readBits(bits - 4) - if err != nil { - return nil, err - } - offset += n << 4 - } - n, err := d.lowoffsetDecoder.readSym(d.br) - if err != nil { - return nil, err - } - offset += n - } else { - n, err := d.br.readBits(bits) - if err != nil { - return nil, err - } - offset += n - } - } - if offset > 0x100 { - length++ - if offset > 0x2000 { - length++ - if offset > 0x40000 { - length++ - } - } - } - copy(d.offset[1:], d.offset[:]) - d.offset[0] = offset - d.length = length - } - win.copyBytes(d.length, d.offset[0]) - return nil, nil -} - -func (d *decoder50) fill(w *window) ([]*filterBlock, error) { - var fl []*filterBlock - - for w.available() > 0 { - sym, err := d.mainDecoder.readSym(d.br) - if err == nil { - var f *filterBlock - f, err = d.decodeSym(w, sym) - if f != nil { - fl = append(fl, f) - } - } else if err == io.EOF { - // reached end of the block - if d.lastBlock { - return fl, io.EOF - } - err = d.readBlockHeader() - } - if err != nil { - if err == io.EOF { - return fl, errDecoderOutOfData - } - return fl, err - } - } - return fl, nil -} diff --git a/vendor/github.com/nwaples/rardecode/decode_reader.go b/vendor/github.com/nwaples/rardecode/decode_reader.go deleted file mode 100644 index b346936ce3..0000000000 --- a/vendor/github.com/nwaples/rardecode/decode_reader.go +++ /dev/null @@ -1,290 +0,0 @@ -package rardecode - -import ( - "errors" - "io" -) - -const ( - minWindowSize = 0x40000 - maxQueuedFilters = 8192 -) - -var ( - errTooManyFilters = errors.New("rardecode: too many filters") - errInvalidFilter = errors.New("rardecode: invalid filter") -) - -// filter functions take a byte slice, the current output offset and -// returns transformed data. -type filter func(b []byte, offset int64) ([]byte, error) - -// filterBlock is a block of data to be processed by a filter. -type filterBlock struct { - length int // length of block - offset int // bytes to be read before start of block - reset bool // drop all existing queued filters - filter filter // filter function -} - -// decoder is the interface for decoding compressed data -type decoder interface { - init(r io.ByteReader, reset bool) error // initialize decoder for current file - fill(w *window) ([]*filterBlock, error) // fill window with decoded data, returning any filters -} - -// window is a sliding window buffer. -type window struct { - buf []byte - mask int // buf length mask - r int // index in buf for reads (beginning) - w int // index in buf for writes (end) - l int // length of bytes to be processed by copyBytes - o int // offset of bytes to be processed by copyBytes -} - -// buffered returns the number of bytes yet to be read from window -func (w *window) buffered() int { return (w.w - w.r) & w.mask } - -// available returns the number of bytes that can be written before the window is full -func (w *window) available() int { return (w.r - w.w - 1) & w.mask } - -func (w *window) reset(log2size uint, clear bool) { - size := 1 << log2size - if size < minWindowSize { - size = minWindowSize - } - if size > len(w.buf) { - b := make([]byte, size) - if clear { - w.w = 0 - } else if len(w.buf) > 0 { - n := copy(b, w.buf[w.w:]) - n += copy(b[n:], w.buf[:w.w]) - w.w = n - } - w.buf = b - w.mask = size - 1 - } else if clear { - for i := range w.buf { - w.buf[i] = 0 - } - w.w = 0 - } - w.r = w.w -} - -// writeByte writes c to the end of the window -func (w *window) writeByte(c byte) { - w.buf[w.w] = c - w.w = (w.w + 1) & w.mask -} - -// copyBytes copies len bytes at off distance from the end -// to the end of the window. -func (w *window) copyBytes(len, off int) { - len &= w.mask - - n := w.available() - if len > n { - // if there is not enough space availaible we copy - // as much as we can and save the offset and length - // of the remaining data to be copied later. - w.l = len - n - w.o = off - len = n - } - - i := (w.w - off) & w.mask - for ; len > 0; len-- { - w.buf[w.w] = w.buf[i] - w.w = (w.w + 1) & w.mask - i = (i + 1) & w.mask - } -} - -// read reads bytes from the beginning of the window into p -func (w *window) read(p []byte) (n int) { - if w.r > w.w { - n = copy(p, w.buf[w.r:]) - w.r = (w.r + n) & w.mask - p = p[n:] - } - if w.r < w.w { - l := copy(p, w.buf[w.r:w.w]) - w.r += l - n += l - } - if w.l > 0 && n > 0 { - // if we have successfully read data, copy any - // leftover data from a previous copyBytes. - l := w.l - w.l = 0 - w.copyBytes(l, w.o) - } - return n -} - -// decodeReader implements io.Reader for decoding compressed data in RAR archives. -type decodeReader struct { - win window // sliding window buffer used as decode dictionary - dec decoder // decoder being used to unpack file - tot int64 // total bytes read - buf []byte // filter input/output buffer - outbuf []byte // filter output not yet read - err error - filters []*filterBlock // list of filterBlock's, each with offset relative to previous in list -} - -func (d *decodeReader) init(r io.ByteReader, dec decoder, winsize uint, reset bool) error { - if reset { - d.filters = nil - } - d.err = nil - d.outbuf = nil - d.tot = 0 - d.win.reset(winsize, reset) - d.dec = dec - return d.dec.init(r, reset) -} - -func (d *decodeReader) readErr() error { - err := d.err - d.err = nil - return err -} - -// queueFilter adds a filterBlock to the end decodeReader's filters. -func (d *decodeReader) queueFilter(f *filterBlock) error { - if f.reset { - d.filters = nil - } - if len(d.filters) >= maxQueuedFilters { - return errTooManyFilters - } - // offset & length must be < window size - f.offset &= d.win.mask - f.length &= d.win.mask - // make offset relative to previous filter in list - for _, fb := range d.filters { - if f.offset < fb.offset { - // filter block must not start before previous filter - return errInvalidFilter - } - f.offset -= fb.offset - } - d.filters = append(d.filters, f) - return nil -} - -// processFilters processes any filters valid at the current read index -// and stores the output in outbuf. -func (d *decodeReader) processFilters() (err error) { - f := d.filters[0] - if f.offset > 0 { - return nil - } - d.filters = d.filters[1:] - if d.win.buffered() < f.length { - // fill() didn't return enough bytes - err = d.readErr() - if err == nil || err == io.EOF { - return errInvalidFilter - } - return err - } - - if cap(d.buf) < f.length { - d.buf = make([]byte, f.length) - } - d.outbuf = d.buf[:f.length] - n := d.win.read(d.outbuf) - for { - // run filter passing buffer and total bytes read so far - d.outbuf, err = f.filter(d.outbuf, d.tot) - if err != nil { - return err - } - if cap(d.outbuf) > cap(d.buf) { - // Filter returned a bigger buffer, save it for future filters. - d.buf = d.outbuf - } - if len(d.filters) == 0 { - return nil - } - f = d.filters[0] - - if f.offset != 0 { - // next filter not at current offset - f.offset -= n - return nil - } - if f.length != len(d.outbuf) { - return errInvalidFilter - } - d.filters = d.filters[1:] - - if cap(d.outbuf) < cap(d.buf) { - // Filter returned a smaller buffer. Copy it back to the saved buffer - // so the next filter can make use of the larger buffer if needed. - d.outbuf = append(d.buf[:0], d.outbuf...) - } - } -} - -// fill fills the decodeReader's window -func (d *decodeReader) fill() { - if d.err != nil { - return - } - var fl []*filterBlock - fl, d.err = d.dec.fill(&d.win) // fill window using decoder - for _, f := range fl { - err := d.queueFilter(f) - if err != nil { - d.err = err - return - } - } -} - -// Read decodes data and stores it in p. -func (d *decodeReader) Read(p []byte) (n int, err error) { - if len(d.outbuf) == 0 { - // no filter output, see if we need to create more - if d.win.buffered() == 0 { - // fill empty window - d.fill() - if d.win.buffered() == 0 { - return 0, d.readErr() - } - } else if len(d.filters) > 0 { - f := d.filters[0] - if f.offset == 0 && f.length > d.win.buffered() { - d.fill() // filter at current offset needs more data - } - } - if len(d.filters) > 0 { - if err := d.processFilters(); err != nil { - return 0, err - } - } - } - if len(d.outbuf) > 0 { - // copy filter output into p - n = copy(p, d.outbuf) - d.outbuf = d.outbuf[n:] - } else if len(d.filters) > 0 { - f := d.filters[0] - if f.offset < len(p) { - // only read data up to beginning of next filter - p = p[:f.offset] - } - n = d.win.read(p) // read directly from window - f.offset -= n // adjust first filter offset by bytes just read - } else { - n = d.win.read(p) // read directly from window - } - d.tot += int64(n) - return n, nil -} diff --git a/vendor/github.com/nwaples/rardecode/decrypt_reader.go b/vendor/github.com/nwaples/rardecode/decrypt_reader.go deleted file mode 100644 index bb9f279c43..0000000000 --- a/vendor/github.com/nwaples/rardecode/decrypt_reader.go +++ /dev/null @@ -1,126 +0,0 @@ -package rardecode - -import ( - "crypto/aes" - "crypto/cipher" - "io" -) - -// cipherBlockReader implements Block Mode decryption of an io.Reader object. -type cipherBlockReader struct { - r io.Reader - mode cipher.BlockMode - inbuf []byte // input buffer for partial data block - outbuf []byte // output buffer used when output slice < block size - n int // bytes read from outbuf - err error -} - -// read reads and decrypts one or more input blocks into p. -// len(p) must be >= cipher block size. -func (cr *cipherBlockReader) read(p []byte) (n int, err error) { - bs := cr.mode.BlockSize() - // round p down to a multiple of the block size - l := len(p) - len(p)%bs - p = p[:l] - - l = len(cr.inbuf) - if l > 0 { - // copy any buffered input into p - copy(p, cr.inbuf) - cr.inbuf = cr.inbuf[:0] - } - // read data for at least one block - n, err = io.ReadAtLeast(cr.r, p[l:], bs-l) - n += l - p = p[:n] - - l = n % bs - // check if p is a multiple of the cipher block size - if l > 0 { - n -= l - // save trailing partial block to process later - cr.inbuf = append(cr.inbuf, p[n:]...) - p = p[:n] - } - - if err != nil { - if err == io.ErrUnexpectedEOF || err == io.ErrShortBuffer { - // ignore trailing bytes < block size length - err = io.EOF - } - return 0, err - } - cr.mode.CryptBlocks(p, p) // decrypt block(s) - return n, nil -} - -// Read reads and decrypts data into p. -// If the input is not a multiple of the cipher block size, -// the trailing bytes will be ignored. -func (cr *cipherBlockReader) Read(p []byte) (n int, err error) { - for { - if cr.n < len(cr.outbuf) { - // return buffered output - n = copy(p, cr.outbuf[cr.n:]) - cr.n += n - return n, nil - } - if cr.err != nil { - err = cr.err - cr.err = nil - return 0, err - } - if len(p) >= cap(cr.outbuf) { - break - } - // p is not large enough to process a block, use outbuf instead - n, cr.err = cr.read(cr.outbuf[:cap(cr.outbuf)]) - cr.outbuf = cr.outbuf[:n] - cr.n = 0 - } - // read blocks into p - return cr.read(p) -} - -// ReadByte returns the next decrypted byte. -func (cr *cipherBlockReader) ReadByte() (byte, error) { - for { - if cr.n < len(cr.outbuf) { - c := cr.outbuf[cr.n] - cr.n++ - return c, nil - } - if cr.err != nil { - err := cr.err - cr.err = nil - return 0, err - } - // refill outbuf - var n int - n, cr.err = cr.read(cr.outbuf[:cap(cr.outbuf)]) - cr.outbuf = cr.outbuf[:n] - cr.n = 0 - } -} - -// newCipherBlockReader returns a cipherBlockReader that decrypts the given io.Reader using -// the provided block mode cipher. -func newCipherBlockReader(r io.Reader, mode cipher.BlockMode) *cipherBlockReader { - cr := &cipherBlockReader{r: r, mode: mode} - cr.outbuf = make([]byte, 0, mode.BlockSize()) - cr.inbuf = make([]byte, 0, mode.BlockSize()) - return cr -} - -// newAesDecryptReader returns a cipherBlockReader that decrypts input from a given io.Reader using AES. -// It will panic if the provided key is invalid. -func newAesDecryptReader(r io.Reader, key, iv []byte) *cipherBlockReader { - block, err := aes.NewCipher(key) - if err != nil { - panic(err) - } - mode := cipher.NewCBCDecrypter(block, iv) - - return newCipherBlockReader(r, mode) -} diff --git a/vendor/github.com/nwaples/rardecode/filters.go b/vendor/github.com/nwaples/rardecode/filters.go deleted file mode 100644 index a9eb0407d9..0000000000 --- a/vendor/github.com/nwaples/rardecode/filters.go +++ /dev/null @@ -1,416 +0,0 @@ -package rardecode - -import ( - "bytes" - "encoding/binary" - "hash/crc32" - "io" -) - -const ( - fileSize = 0x1000000 - - vmGlobalAddr = 0x3C000 - vmGlobalSize = 0x02000 - vmFixedGlobalSize = 0x40 - - maxUint32 = 1<<32 - 1 -) - -// v3Filter is the interface type for RAR V3 filters. -// v3Filter performs the same function as the filter type, except that it also takes -// the initial register values r, and global data as input for the RAR V3 VM. -type v3Filter func(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) - -var ( - // standardV3Filters is a list of known filters. We can replace the use of a vm - // filter with a custom filter function. - standardV3Filters = []struct { - crc uint32 // crc of code byte slice for filter - len int // length of code byte slice for filter - f v3Filter // replacement filter function - }{ - {0xad576887, 53, e8FilterV3}, - {0x3cd7e57e, 57, e8e9FilterV3}, - {0x3769893f, 120, itaniumFilterV3}, - {0x0e06077d, 29, deltaFilterV3}, - {0x1c2c5dc8, 149, filterRGBV3}, - {0xbc85e701, 216, filterAudioV3}, - } - - // itanium filter byte masks - byteMask = []int{4, 4, 6, 6, 0, 0, 7, 7, 4, 4, 0, 0, 4, 4, 0, 0} -) - -func filterE8(c byte, v5 bool, buf []byte, offset int64) ([]byte, error) { - off := int32(offset) - for b := buf; len(b) >= 5; { - ch := b[0] - b = b[1:] - off++ - if ch != 0xe8 && ch != c { - continue - } - if v5 { - off %= fileSize - } - addr := int32(binary.LittleEndian.Uint32(b)) - if addr < 0 { - if addr+off >= 0 { - binary.LittleEndian.PutUint32(b, uint32(addr+fileSize)) - } - } else if addr < fileSize { - binary.LittleEndian.PutUint32(b, uint32(addr-off)) - } - off += 4 - b = b[4:] - } - return buf, nil -} - -func e8FilterV3(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - return filterE8(0xe8, false, buf, offset) -} - -func e8e9FilterV3(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - return filterE8(0xe9, false, buf, offset) -} - -func getBits(buf []byte, pos, count uint) uint32 { - n := binary.LittleEndian.Uint32(buf[pos/8:]) - n >>= pos & 7 - mask := uint32(maxUint32) >> (32 - count) - return n & mask -} - -func setBits(buf []byte, pos, count uint, bits uint32) { - mask := uint32(maxUint32) >> (32 - count) - mask <<= pos & 7 - bits <<= pos & 7 - n := binary.LittleEndian.Uint32(buf[pos/8:]) - n = (n & ^mask) | (bits & mask) - binary.LittleEndian.PutUint32(buf[pos/8:], n) -} - -func itaniumFilterV3(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - fileOffset := uint32(offset) >> 4 - - for b := buf; len(b) > 21; b = b[16:] { - c := int(b[0]&0x1f) - 0x10 - if c >= 0 { - mask := byteMask[c] - if mask != 0 { - for i := uint(0); i <= 2; i++ { - if mask&(1<= 2*l { - res = buf[l : 2*l] // use unused capacity - } else { - res = make([]byte, l, 2*l) - } - - i := 0 - for j := 0; j < n; j++ { - var c byte - for k := j; k < len(res); k += n { - c -= buf[i] - i++ - res[k] = c - } - } - return res, nil -} - -func deltaFilterV3(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - return filterDelta(int(r[0]), buf) -} - -func abs(n int) int { - if n < 0 { - n = -n - } - return n -} - -func filterRGBV3(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - width := int(r[0] - 3) - posR := int(r[1]) - if posR < 0 || width < 0 { - return buf, nil - } - - var res []byte - l := len(buf) - if cap(buf) >= 2*l { - res = buf[l : 2*l] // use unused capacity - } else { - res = make([]byte, l, 2*l) - } - - for c := 0; c < 3; c++ { - var prevByte int - for i := c; i < len(res); i += 3 { - var predicted int - upperPos := i - width - if upperPos >= 3 { - upperByte := int(res[upperPos]) - upperLeftByte := int(res[upperPos-3]) - predicted = prevByte + upperByte - upperLeftByte - pa := abs(predicted - prevByte) - pb := abs(predicted - upperByte) - pc := abs(predicted - upperLeftByte) - if pa <= pb && pa <= pc { - predicted = prevByte - } else if pb <= pc { - predicted = upperByte - } else { - predicted = upperLeftByte - } - } else { - predicted = prevByte - } - prevByte = (predicted - int(buf[0])) & 0xFF - res[i] = uint8(prevByte) - buf = buf[1:] - } - - } - for i := posR; i < len(res)-2; i += 3 { - c := res[i+1] - res[i] += c - res[i+2] += c - } - return res, nil -} - -func filterAudioV3(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - var res []byte - l := len(buf) - if cap(buf) >= 2*l { - res = buf[l : 2*l] // use unused capacity - } else { - res = make([]byte, l, 2*l) - } - - chans := int(r[0]) - for c := 0; c < chans; c++ { - var prevByte, byteCount int - var diff [7]int - var d, k [3]int - - for i := c; i < len(res); i += chans { - predicted := prevByte<<3 + k[0]*d[0] + k[1]*d[1] + k[2]*d[2] - predicted = int(int8(predicted >> 3)) - - curByte := int(int8(buf[0])) - buf = buf[1:] - predicted -= curByte - res[i] = uint8(predicted) - - dd := curByte << 3 - diff[0] += abs(dd) - diff[1] += abs(dd - d[0]) - diff[2] += abs(dd + d[0]) - diff[3] += abs(dd - d[1]) - diff[4] += abs(dd + d[1]) - diff[5] += abs(dd - d[2]) - diff[6] += abs(dd + d[2]) - - prevDelta := int(int8(predicted - prevByte)) - prevByte = predicted - d[2] = d[1] - d[1] = prevDelta - d[0] - d[0] = prevDelta - - if byteCount&0x1f == 0 { - min := diff[0] - diff[0] = 0 - n := 0 - for j := 1; j < len(diff); j++ { - if diff[j] < min { - min = diff[j] - n = j - } - diff[j] = 0 - } - n-- - if n >= 0 { - m := n / 2 - if n%2 == 0 { - if k[m] >= -16 { - k[m]-- - } - } else { - if k[m] < 16 { - k[m]++ - } - } - } - } - byteCount++ - } - - } - return res, nil -} - -func filterArm(buf []byte, offset int64) ([]byte, error) { - for i := 0; len(buf)-i > 3; i += 4 { - if buf[i+3] == 0xeb { - n := uint(buf[i]) - n += uint(buf[i+1]) * 0x100 - n += uint(buf[i+2]) * 0x10000 - n -= (uint(offset) + uint(i)) / 4 - buf[i] = byte(n) - buf[i+1] = byte(n >> 8) - buf[i+2] = byte(n >> 16) - } - } - return buf, nil -} - -type vmFilter struct { - execCount uint32 - global []byte - static []byte - code []command -} - -// execute implements v3filter type for VM based RAR 3 filters. -func (f *vmFilter) execute(r map[int]uint32, global, buf []byte, offset int64) ([]byte, error) { - if len(buf) > vmGlobalAddr { - return buf, errInvalidFilter - } - v := newVM(buf) - - // register setup - v.r[3] = vmGlobalAddr - v.r[4] = uint32(len(buf)) - v.r[5] = f.execCount - for i, n := range r { - v.r[i] = n - } - - // vm global data memory block - vg := v.m[vmGlobalAddr : vmGlobalAddr+vmGlobalSize] - - // initialize fixed global memory - for i, n := range v.r[:vmRegs-1] { - binary.LittleEndian.PutUint32(vg[i*4:], n) - } - binary.LittleEndian.PutUint32(vg[0x1c:], uint32(len(buf))) - binary.LittleEndian.PutUint64(vg[0x24:], uint64(offset)) - binary.LittleEndian.PutUint32(vg[0x2c:], f.execCount) - - // registers - v.r[6] = uint32(offset) - - // copy program global memory - var n int - if len(f.global) > 0 { - n = copy(vg[vmFixedGlobalSize:], f.global) // use saved global instead - } else { - n = copy(vg[vmFixedGlobalSize:], global) - } - copy(vg[vmFixedGlobalSize+n:], f.static) - - v.execute(f.code) - - f.execCount++ - - // keep largest global buffer - if cap(global) > cap(f.global) { - f.global = global[:0] - } else if len(f.global) > 0 { - f.global = f.global[:0] - } - - // check for global data to be saved for next program execution - globalSize := binary.LittleEndian.Uint32(vg[0x30:]) - if globalSize > 0 { - if globalSize > vmGlobalSize-vmFixedGlobalSize { - globalSize = vmGlobalSize - vmFixedGlobalSize - } - if cap(f.global) < int(globalSize) { - f.global = make([]byte, globalSize) - } else { - f.global = f.global[:globalSize] - } - copy(f.global, vg[vmFixedGlobalSize:]) - } - - // find program output - length := binary.LittleEndian.Uint32(vg[0x1c:]) & vmMask - start := binary.LittleEndian.Uint32(vg[0x20:]) & vmMask - if start+length > vmSize { - // TODO: error - start = 0 - length = 0 - } - if start != 0 && cap(v.m) > cap(buf) { - // Initial buffer was to small for vm. - // Copy output to beginning of vm memory so that decodeReader - // will re-use the newly allocated vm memory and we will not - // have to reallocate again next time. - copy(v.m, v.m[start:start+length]) - start = 0 - } - return v.m[start : start+length], nil -} - -// getV3Filter returns a V3 filter function from a code byte slice. -func getV3Filter(code []byte) (v3Filter, error) { - // check if filter is a known standard filter - c := crc32.ChecksumIEEE(code) - for _, f := range standardV3Filters { - if f.crc == c && f.len == len(code) { - return f.f, nil - } - } - - // create new vm filter - f := new(vmFilter) - r := newRarBitReader(bytes.NewReader(code[1:])) // skip first xor byte check - - // read static data - n, err := r.readBits(1) - if err != nil { - return nil, err - } - if n > 0 { - m, err := r.readUint32() - if err != nil { - return nil, err - } - f.static = make([]byte, m+1) - err = r.readFull(f.static) - if err != nil { - return nil, err - } - } - - f.code, err = readCommands(r) - if err == io.EOF { - err = nil - } - - return f.execute, err -} diff --git a/vendor/github.com/nwaples/rardecode/huffman.go b/vendor/github.com/nwaples/rardecode/huffman.go deleted file mode 100644 index 4acb69d5a9..0000000000 --- a/vendor/github.com/nwaples/rardecode/huffman.go +++ /dev/null @@ -1,208 +0,0 @@ -package rardecode - -import ( - "errors" - "io" -) - -const ( - maxCodeLength = 15 // maximum code length in bits - maxQuickBits = 10 - maxQuickSize = 1 << maxQuickBits -) - -var ( - errHuffDecodeFailed = errors.New("rardecode: huffman decode failed") - errInvalidLengthTable = errors.New("rardecode: invalid huffman code length table") -) - -type huffmanDecoder struct { - limit [maxCodeLength + 1]int - pos [maxCodeLength + 1]int - symbol []int - min uint - quickbits uint - quicklen [maxQuickSize]uint - quicksym [maxQuickSize]int -} - -func (h *huffmanDecoder) init(codeLengths []byte) { - var count [maxCodeLength + 1]int - - for _, n := range codeLengths { - if n == 0 { - continue - } - count[n]++ - } - - h.pos[0] = 0 - h.limit[0] = 0 - h.min = 0 - for i := uint(1); i <= maxCodeLength; i++ { - h.limit[i] = h.limit[i-1] + count[i]<<(maxCodeLength-i) - h.pos[i] = h.pos[i-1] + count[i-1] - if h.min == 0 && h.limit[i] > 0 { - h.min = i - } - } - - if cap(h.symbol) >= len(codeLengths) { - h.symbol = h.symbol[:len(codeLengths)] - for i := range h.symbol { - h.symbol[i] = 0 - } - } else { - h.symbol = make([]int, len(codeLengths)) - } - - copy(count[:], h.pos[:]) - for i, n := range codeLengths { - if n != 0 { - h.symbol[count[n]] = i - count[n]++ - } - } - - if len(codeLengths) >= 298 { - h.quickbits = maxQuickBits - } else { - h.quickbits = maxQuickBits - 3 - } - - bits := uint(1) - for i := 0; i < 1<= h.limit[bits] && bits < maxCodeLength { - bits++ - } - h.quicklen[i] = bits - - dist := v - h.limit[bits-1] - dist >>= (maxCodeLength - bits) - - pos := h.pos[bits] + dist - if pos < len(h.symbol) { - h.quicksym[i] = h.symbol[pos] - } else { - h.quicksym[i] = 0 - } - } -} - -func (h *huffmanDecoder) readSym(r bitReader) (int, error) { - bits := uint(maxCodeLength) - v, err := r.readBits(maxCodeLength) - if err != nil { - if err != io.EOF { - return 0, err - } - // fall back to 1 bit at a time if we read past EOF - for i := uint(1); i <= maxCodeLength; i++ { - b, err := r.readBits(1) - if err != nil { - return 0, err // not enough bits return error - } - v |= b << (maxCodeLength - i) - if v < h.limit[i] { - bits = i - break - } - } - } else { - if v < h.limit[h.quickbits] { - i := v >> (maxCodeLength - h.quickbits) - r.unreadBits(maxCodeLength - h.quicklen[i]) - return h.quicksym[i], nil - } - - for i, n := range h.limit[h.min:] { - if v < n { - bits = h.min + uint(i) - r.unreadBits(maxCodeLength - bits) - break - } - } - } - - dist := v - h.limit[bits-1] - dist >>= maxCodeLength - bits - - pos := h.pos[bits] + dist - if pos >= len(h.symbol) { - return 0, errHuffDecodeFailed - } - - return h.symbol[pos], nil -} - -// readCodeLengthTable reads a new code length table into codeLength from br. -// If addOld is set the old table is added to the new one. -func readCodeLengthTable(br bitReader, codeLength []byte, addOld bool) error { - var bitlength [20]byte - for i := 0; i < len(bitlength); i++ { - n, err := br.readBits(4) - if err != nil { - return err - } - if n == 0xf { - cnt, err := br.readBits(4) - if err != nil { - return err - } - if cnt > 0 { - // array already zero'd dont need to explicitly set - i += cnt + 1 - continue - } - } - bitlength[i] = byte(n) - } - - var bl huffmanDecoder - bl.init(bitlength[:]) - - for i := 0; i < len(codeLength); i++ { - l, err := bl.readSym(br) - if err != nil { - return err - } - - if l < 16 { - if addOld { - codeLength[i] = (codeLength[i] + byte(l)) & 0xf - } else { - codeLength[i] = byte(l) - } - continue - } - - var count int - var value byte - - switch l { - case 16, 18: - count, err = br.readBits(3) - count += 3 - default: - count, err = br.readBits(7) - count += 11 - } - if err != nil { - return err - } - if l < 18 { - if i == 0 { - return errInvalidLengthTable - } - value = codeLength[i-1] - } - for ; count > 0 && i < len(codeLength); i++ { - codeLength[i] = value - count-- - } - i-- - } - return nil -} diff --git a/vendor/github.com/nwaples/rardecode/ppm_model.go b/vendor/github.com/nwaples/rardecode/ppm_model.go deleted file mode 100644 index 58a545aa92..0000000000 --- a/vendor/github.com/nwaples/rardecode/ppm_model.go +++ /dev/null @@ -1,1096 +0,0 @@ -package rardecode - -import ( - "errors" - "io" -) - -const ( - rangeBottom = 1 << 15 - rangeTop = 1 << 24 - - maxFreq = 124 - - intBits = 7 - periodBits = 7 - binScale = 1 << (intBits + periodBits) - - n0 = 1 - n1 = 4 - n2 = 4 - n3 = 4 - n4 = (128 + 3 - 1*n1 - 2*n2 - 3*n3) / 4 - nIndexes = n0 + n1 + n2 + n3 + n4 - - // memory is allocated in units. A unit contains unitSize number of bytes. - // A unit can store one context or two states. - unitSize = 12 - - maxUint16 = 1<<16 - 1 - freeMark = -1 -) - -var ( - errCorruptPPM = errors.New("rardecode: corrupt ppm data") - - expEscape = []byte{25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2} - initBinEsc = []uint16{0x3CDD, 0x1F3F, 0x59BF, 0x48F3, 0x64A1, 0x5ABC, 0x6632, 0x6051} - - ns2Index [256]byte - ns2BSIndex [256]byte - - // units2Index maps the number of units in a block to a freelist index - units2Index [128 + 1]byte - // index2Units maps a freelist index to the size of the block in units - index2Units [nIndexes]int32 -) - -func init() { - ns2BSIndex[0] = 2 * 0 - ns2BSIndex[1] = 2 * 1 - for i := 2; i < 11; i++ { - ns2BSIndex[i] = 2 * 2 - } - for i := 11; i < 256; i++ { - ns2BSIndex[i] = 2 * 3 - } - - var j, n byte - for i := range ns2Index { - ns2Index[i] = n - if j <= 3 { - n++ - j = n - } else { - j-- - } - } - - var ii byte - var iu, units int32 - for i, n := range []int{n0, n1, n2, n3, n4} { - for j := 0; j < n; j++ { - units += int32(i) - index2Units[ii] = units - for iu <= units { - units2Index[iu] = ii - iu++ - } - ii++ - } - } -} - -type rangeCoder struct { - br io.ByteReader - code uint32 - low uint32 - rnge uint32 -} - -func (r *rangeCoder) init(br io.ByteReader) error { - r.br = br - r.low = 0 - r.rnge = ^uint32(0) - for i := 0; i < 4; i++ { - c, err := r.br.ReadByte() - if err != nil { - return err - } - r.code = r.code<<8 | uint32(c) - } - return nil -} - -func (r *rangeCoder) currentCount(scale uint32) uint32 { - r.rnge /= scale - return (r.code - r.low) / r.rnge -} - -func (r *rangeCoder) normalize() error { - for { - if r.low^(r.low+r.rnge) >= rangeTop { - if r.rnge >= rangeBottom { - return nil - } - r.rnge = -r.low & (rangeBottom - 1) - } - c, err := r.br.ReadByte() - if err != nil { - return err - } - r.code = r.code<<8 | uint32(c) - r.rnge <<= 8 - r.low <<= 8 - } -} - -func (r *rangeCoder) decode(lowCount, highCount uint32) error { - r.low += r.rnge * lowCount - r.rnge *= highCount - lowCount - - return r.normalize() -} - -type see2Context struct { - summ uint16 - shift byte - count byte -} - -func newSee2Context(i uint16) see2Context { - return see2Context{i << (periodBits - 4), (periodBits - 4), 4} -} - -func (s *see2Context) mean() uint32 { - if s == nil { - return 1 - } - n := s.summ >> s.shift - if n == 0 { - return 1 - } - s.summ -= n - return uint32(n) -} - -func (s *see2Context) update() { - if s == nil || s.shift >= periodBits { - return - } - s.count-- - if s.count == 0 { - s.summ += s.summ - s.count = 3 << s.shift - s.shift++ - } -} - -type state struct { - sym byte - freq byte - - // succ can point to a context or byte in memory. - // A context pointer is a positive integer. It is an index into the states - // array that points to the first of two states which the context is - // marshalled into. - // A byte pointer is a negative integer. The magnitude represents the position - // in bytes from the bottom of the memory. As memory is modelled as an array of - // states, this is used to calculate which state, and where in the state the - // byte is stored. - // A zero value represents a nil pointer. - succ int32 -} - -// uint16 return a uint16 stored in the sym and freq fields of a state -func (s state) uint16() uint16 { return uint16(s.sym) | uint16(s.freq)<<8 } - -// setUint16 stores a uint16 in the sym and freq fields of a state -func (s *state) setUint16(n uint16) { s.sym = byte(n); s.freq = byte(n >> 8) } - -// A context is marshalled into a slice of two states. -// The first state contains the number of states, and the suffix pointer. -// If there is only one state, the second state contains that state. -// If there is more than one state, the second state contains the summFreq -// and the index to the slice of states. -type context struct { - i int32 // index into the states array for context - s []state // slice of two states representing context - a *subAllocator -} - -// succPtr returns a pointer value for the context to be stored in a state.succ -func (c *context) succPtr() int32 { return c.i } - -func (c *context) numStates() int { return int(c.s[0].uint16()) } - -func (c *context) setNumStates(n int) { c.s[0].setUint16(uint16(n)) } - -func (c *context) statesIndex() int32 { return c.s[1].succ } - -func (c *context) setStatesIndex(n int32) { c.s[1].succ = n } - -func (c *context) suffix() *context { return c.a.succContext(c.s[0].succ) } - -func (c *context) setSuffix(sc *context) { c.s[0].succ = sc.i } - -func (c *context) summFreq() uint16 { return c.s[1].uint16() } - -func (c *context) setSummFreq(f uint16) { c.s[1].setUint16(f) } - -func (c *context) notEq(ctx *context) bool { return c.i != ctx.i } - -func (c *context) states() []state { - if ns := int32(c.s[0].uint16()); ns != 1 { - i := c.s[1].succ - return c.a.states[i : i+ns] - } - return c.s[1:] -} - -// shrinkStates shrinks the state list down to size states -func (c *context) shrinkStates(states []state, size int) []state { - i1 := units2Index[(len(states)+1)>>1] - i2 := units2Index[(size+1)>>1] - - if size == 1 { - // store state in context, and free states block - n := c.statesIndex() - c.s[1] = states[0] - states = c.s[1:] - c.a.addFreeBlock(n, i1) - } else if i1 != i2 { - if n := c.a.removeFreeBlock(i2); n > 0 { - // allocate new block and copy - copy(c.a.states[n:], states[:size]) - states = c.a.states[n:] - // free old block - c.a.addFreeBlock(c.statesIndex(), i1) - c.setStatesIndex(n) - } else { - // split current block, and free units not needed - n = c.statesIndex() + index2Units[i2]<<1 - u := index2Units[i1] - index2Units[i2] - c.a.freeUnits(n, u) - } - } - c.setNumStates(size) - return states[:size] -} - -// expandStates expands the states list by one -func (c *context) expandStates() []state { - states := c.states() - ns := len(states) - if ns == 1 { - s := states[0] - n := c.a.allocUnits(1) - if n == 0 { - return nil - } - c.setStatesIndex(n) - states = c.a.states[n:] - states[0] = s - } else if ns&0x1 == 0 { - u := ns >> 1 - i1 := units2Index[u] - i2 := units2Index[u+1] - if i1 != i2 { - n := c.a.allocUnits(i2) - if n == 0 { - return nil - } - copy(c.a.states[n:], states) - c.a.addFreeBlock(c.statesIndex(), i1) - c.setStatesIndex(n) - states = c.a.states[n:] - } - } - c.setNumStates(ns + 1) - return states[:ns+1] -} - -type subAllocator struct { - // memory for allocation is split into two heaps - - heap1MaxBytes int32 // maximum bytes available in heap1 - heap1Lo int32 // heap1 bottom in number of bytes - heap1Hi int32 // heap1 top in number of bytes - heap2Lo int32 // heap2 bottom index in states - heap2Hi int32 // heap2 top index in states - glueCount int - - // Each freeList entry contains an index into states for the beginning - // of a free block. The first state in that block may contain an index - // to another free block and so on. The size of the free block in units - // (2 states) for that freeList index can be determined from the - // index2Units array. - freeList [nIndexes]int32 - - // Instead of bytes, memory is represented by a slice of states. - // context's are marshalled to and from a pair of states. - // multiple bytes are stored in a state. - states []state -} - -func (a *subAllocator) init(maxMB int) { - bytes := int32(maxMB) << 20 - heap2Units := bytes / 8 / unitSize * 7 - a.heap1MaxBytes = bytes - heap2Units*unitSize - // Add one for the case when bytes are not a multiple of unitSize - heap1Units := a.heap1MaxBytes/unitSize + 1 - // Calculate total size in state's. Add 1 unit so we can reserve the first unit. - // This will allow us to use the zero index as a nil pointer. - n := int(1+heap1Units+heap2Units) * 2 - if cap(a.states) > n { - a.states = a.states[:n] - } else { - a.states = make([]state, n) - } -} - -func (a *subAllocator) restart() { - // Pad heap1 start by 1 unit and enough bytes so that there is no - // gap between heap1 end and heap2 start. - a.heap1Lo = unitSize + (unitSize - a.heap1MaxBytes%unitSize) - a.heap1Hi = unitSize + (a.heap1MaxBytes/unitSize+1)*unitSize - a.heap2Lo = a.heap1Hi / unitSize * 2 - a.heap2Hi = int32(len(a.states)) - a.glueCount = 0 - for i := range a.freeList { - a.freeList[i] = 0 - } - for i := range a.states { - a.states[i] = state{} - } -} - -// pushByte puts a byte on the heap and returns a state.succ index that -// can be used to retrieve it. -func (a *subAllocator) pushByte(c byte) int32 { - si := a.heap1Lo / 6 // state index - oi := a.heap1Lo % 6 // byte position in state - switch oi { - case 0: - a.states[si].sym = c - case 1: - a.states[si].freq = c - default: - n := (uint(oi) - 2) * 8 - mask := ^(uint32(0xFF) << n) - succ := uint32(a.states[si].succ) & mask - succ |= uint32(c) << n - a.states[si].succ = int32(succ) - } - a.heap1Lo++ - if a.heap1Lo >= a.heap1Hi { - return 0 - } - return -a.heap1Lo -} - -// popByte reverses the previous pushByte -func (a *subAllocator) popByte() { a.heap1Lo-- } - -// succByte returns a byte from the heap given a state.succ index -func (a *subAllocator) succByte(i int32) byte { - i = -i - si := i / 6 - oi := i % 6 - switch oi { - case 0: - return a.states[si].sym - case 1: - return a.states[si].freq - default: - n := (uint(oi) - 2) * 8 - succ := uint32(a.states[si].succ) >> n - return byte(succ & 0xff) - } -} - -// succContext returns a context given a state.succ index -func (a *subAllocator) succContext(i int32) *context { - if i <= 0 { - return nil - } - return &context{i: i, s: a.states[i : i+2 : i+2], a: a} -} - -// succIsNil returns whether a state.succ points to nothing -func (a *subAllocator) succIsNil(i int32) bool { return i == 0 } - -// nextByteAddr takes a state.succ value representing a pointer -// to a byte, and returns the next bytes address -func (a *subAllocator) nextByteAddr(n int32) int32 { return n - 1 } - -func (a *subAllocator) removeFreeBlock(i byte) int32 { - n := a.freeList[i] - if n != 0 { - a.freeList[i] = a.states[n].succ - a.states[n] = state{} - } - return n -} - -func (a *subAllocator) addFreeBlock(n int32, i byte) { - a.states[n].succ = a.freeList[i] - a.freeList[i] = n -} - -func (a *subAllocator) freeUnits(n, u int32) { - i := units2Index[u] - if u != index2Units[i] { - i-- - a.addFreeBlock(n, i) - u -= index2Units[i] - n += index2Units[i] << 1 - i = units2Index[u] - } - a.addFreeBlock(n, i) -} - -func (a *subAllocator) glueFreeBlocks() { - var freeIndex int32 - - for i, n := range a.freeList { - s := state{succ: freeMark} - s.setUint16(uint16(index2Units[i])) - for n != 0 { - states := a.states[n:] - states[1].succ = freeIndex - freeIndex = n - n = states[0].succ - states[0] = s - } - a.freeList[i] = 0 - } - - for i := freeIndex; i != 0; i = a.states[i+1].succ { - if a.states[i].succ != freeMark { - continue - } - u := int32(a.states[i].uint16()) - states := a.states[i+u<<1:] - for len(states) > 0 && states[0].succ == freeMark { - u += int32(states[0].uint16()) - if u > maxUint16 { - break - } - states[0].succ = 0 - a.states[i].setUint16(uint16(u)) - states = a.states[i+u<<1:] - } - } - - for n := freeIndex; n != 0; n = a.states[n+1].succ { - if a.states[n].succ != freeMark { - continue - } - a.states[n].succ = 0 - u := int32(a.states[n].uint16()) - m := n - for u > 128 { - a.addFreeBlock(m, nIndexes-1) - u -= 128 - m += 256 - } - a.freeUnits(m, u) - } -} - -func (a *subAllocator) allocUnitsRare(index byte) int32 { - if a.glueCount == 0 { - a.glueCount = 255 - a.glueFreeBlocks() - if n := a.removeFreeBlock(index); n > 0 { - return n - } - } - // try to find a larger free block and split it - for i := index + 1; i < nIndexes; i++ { - if n := a.removeFreeBlock(i); n > 0 { - u := index2Units[i] - index2Units[index] - a.freeUnits(n+index2Units[index]<<1, u) - return n - } - } - a.glueCount-- - - // try to allocate units from the top of heap1 - n := a.heap1Hi - index2Units[index]*unitSize - if n > a.heap1Lo { - a.heap1Hi = n - return a.heap1Hi / unitSize * 2 - } - return 0 -} - -func (a *subAllocator) allocUnits(i byte) int32 { - // try to allocate a free block - if n := a.removeFreeBlock(i); n > 0 { - return n - } - // try to allocate from the bottom of heap2 - n := index2Units[i] << 1 - if a.heap2Lo+n <= a.heap2Hi { - lo := a.heap2Lo - a.heap2Lo += n - return lo - } - return a.allocUnitsRare(i) -} - -func (a *subAllocator) newContext(s state, suffix *context) *context { - var n int32 - if a.heap2Lo < a.heap2Hi { - // allocate from top of heap2 - a.heap2Hi -= 2 - n = a.heap2Hi - } else if n = a.removeFreeBlock(1); n == 0 { - if n = a.allocUnitsRare(1); n == 0 { - return nil - } - } - c := &context{i: n, s: a.states[n : n+2 : n+2], a: a} - c.s[0] = state{} - c.setNumStates(1) - c.s[1] = s - if suffix != nil { - c.setSuffix(suffix) - } - return c -} - -func (a *subAllocator) newContextSize(ns int) *context { - c := a.newContext(state{}, nil) - c.setNumStates(ns) - i := units2Index[(ns+1)>>1] - n := a.allocUnits(i) - c.setStatesIndex(n) - return c -} - -type model struct { - maxOrder int - orderFall int - initRL int - runLength int - prevSuccess byte - escCount byte - prevSym byte - initEsc byte - minC *context - maxC *context - rc rangeCoder - a subAllocator - charMask [256]byte - binSumm [128][64]uint16 - see2Cont [25][16]see2Context -} - -func (m *model) restart() { - for i := range m.charMask { - m.charMask[i] = 0 - } - m.escCount = 1 - - if m.maxOrder < 12 { - m.initRL = -m.maxOrder - 1 - } else { - m.initRL = -12 - 1 - } - m.orderFall = m.maxOrder - m.runLength = m.initRL - m.prevSuccess = 0 - - m.a.restart() - - c := m.a.newContextSize(256) - c.setSummFreq(257) - states := c.states() - for i := range states { - states[i] = state{sym: byte(i), freq: 1} - } - m.minC = c - m.maxC = c - m.prevSym = 0 - - for i := range m.binSumm { - for j, esc := range initBinEsc { - n := binScale - esc/(uint16(i)+2) - for k := j; k < len(m.binSumm[i]); k += len(initBinEsc) { - m.binSumm[i][k] = n - } - } - } - - for i := range m.see2Cont { - see := newSee2Context(5*uint16(i) + 10) - for j := range m.see2Cont[i] { - m.see2Cont[i][j] = see - } - } -} - -func (m *model) init(br io.ByteReader, reset bool, maxOrder, maxMB int) error { - err := m.rc.init(br) - if err != nil { - return err - } - if !reset { - if m.minC == nil { - return errCorruptPPM - } - return nil - } - - m.a.init(maxMB) - - if maxOrder == 1 { - return errCorruptPPM - } - m.maxOrder = maxOrder - m.restart() - return nil -} - -func (m *model) rescale(s *state) *state { - if s.freq <= maxFreq { - return s - } - c := m.minC - - var summFreq uint16 - - s.freq += 4 - states := c.states() - escFreq := c.summFreq() + 4 - - for i := range states { - f := states[i].freq - escFreq -= uint16(f) - if m.orderFall != 0 { - f++ - } - f >>= 1 - summFreq += uint16(f) - states[i].freq = f - - if i == 0 || f <= states[i-1].freq { - continue - } - j := i - 1 - for j > 0 && f > states[j-1].freq { - j-- - } - t := states[i] - copy(states[j+1:i+1], states[j:i]) - states[j] = t - } - - i := len(states) - 1 - for states[i].freq == 0 { - i-- - escFreq++ - } - if i != len(states)-1 { - states = c.shrinkStates(states, i+1) - } - s = &states[0] - if i == 0 { - for { - s.freq -= s.freq >> 1 - escFreq >>= 1 - if escFreq <= 1 { - return s - } - } - } - summFreq += escFreq - (escFreq >> 1) - c.setSummFreq(summFreq) - return s -} - -func (m *model) decodeBinSymbol() (*state, error) { - c := m.minC - s := &c.states()[0] - - ns := c.suffix().numStates() - i := m.prevSuccess + ns2BSIndex[ns-1] + byte(m.runLength>>26)&0x20 - if m.prevSym >= 64 { - i += 8 - } - if s.sym >= 64 { - i += 2 * 8 - } - bs := &m.binSumm[s.freq-1][i] - mean := (*bs + 1<<(periodBits-2)) >> periodBits - - if m.rc.currentCount(binScale) < uint32(*bs) { - err := m.rc.decode(0, uint32(*bs)) - if s.freq < 128 { - s.freq++ - } - *bs += 1<>10] - m.charMask[s.sym] = m.escCount - m.prevSuccess = 0 - return nil, err -} - -func (m *model) decodeSymbol1() (*state, error) { - c := m.minC - states := c.states() - scale := uint32(c.summFreq()) - // protect against divide by zero - // TODO: look at why this happens, may be problem elsewhere - if scale == 0 { - return nil, errCorruptPPM - } - count := m.rc.currentCount(scale) - m.prevSuccess = 0 - - var n uint32 - for i := range states { - s := &states[i] - n += uint32(s.freq) - if n <= count { - continue - } - err := m.rc.decode(n-uint32(s.freq), n) - s.freq += 4 - c.setSummFreq(uint16(scale + 4)) - if i == 0 { - if 2*n > scale { - m.prevSuccess = 1 - m.runLength++ - } - } else { - if s.freq <= states[i-1].freq { - return s, err - } - states[i-1], states[i] = states[i], states[i-1] - s = &states[i-1] - } - return m.rescale(s), err - } - - for _, s := range states { - m.charMask[s.sym] = m.escCount - } - return nil, m.rc.decode(n, scale) -} - -func (m *model) makeEscFreq(c *context, numMasked int) *see2Context { - ns := c.numStates() - if ns == 256 { - return nil - } - diff := ns - numMasked - - var i int - if m.prevSym >= 64 { - i = 8 - } - if diff < c.suffix().numStates()-ns { - i++ - } - if int(c.summFreq()) < 11*ns { - i += 2 - } - if numMasked > diff { - i += 4 - } - return &m.see2Cont[ns2Index[diff-1]][i] -} - -func (m *model) decodeSymbol2(numMasked int) (*state, error) { - c := m.minC - - see := m.makeEscFreq(c, numMasked) - scale := see.mean() - - var i int - var hi uint32 - states := c.states() - sl := make([]*state, len(states)-numMasked) - for j := range sl { - for m.charMask[states[i].sym] == m.escCount { - i++ - } - hi += uint32(states[i].freq) - sl[j] = &states[i] - i++ - } - - scale += hi - count := m.rc.currentCount(scale) - - if count >= scale { - return nil, errCorruptPPM - } - if count >= hi { - err := m.rc.decode(hi, scale) - if see != nil { - see.summ += uint16(scale) - } - for _, s := range sl { - m.charMask[s.sym] = m.escCount - } - return nil, err - } - - hi = uint32(sl[0].freq) - for hi <= count { - sl = sl[1:] - hi += uint32(sl[0].freq) - } - s := sl[0] - - err := m.rc.decode(hi-uint32(s.freq), hi) - - see.update() - - m.escCount++ - m.runLength = m.initRL - - s.freq += 4 - c.setSummFreq(c.summFreq() + 4) - return m.rescale(s), err -} - -func (c *context) findState(sym byte) *state { - var i int - states := c.states() - for i = range states { - if states[i].sym == sym { - break - } - } - return &states[i] -} - -func (m *model) createSuccessors(s, ss *state) *context { - var sl []*state - - if m.orderFall != 0 { - sl = append(sl, s) - } - - c := m.minC - for suff := c.suffix(); suff != nil; suff = c.suffix() { - c = suff - - if ss == nil { - ss = c.findState(s.sym) - } - if ss.succ != s.succ { - c = m.a.succContext(ss.succ) - break - } - sl = append(sl, ss) - ss = nil - } - - if len(sl) == 0 { - return c - } - - var up state - up.sym = m.a.succByte(s.succ) - up.succ = m.a.nextByteAddr(s.succ) - - states := c.states() - if len(states) > 1 { - s = c.findState(up.sym) - - cf := uint16(s.freq) - 1 - s0 := c.summFreq() - uint16(len(states)) - cf - - if 2*cf <= s0 { - if 5*cf > s0 { - up.freq = 2 - } else { - up.freq = 1 - } - } else { - up.freq = byte(1 + (2*cf+3*s0-1)/(2*s0)) - } - } else { - up.freq = states[0].freq - } - - for i := len(sl) - 1; i >= 0; i-- { - c = m.a.newContext(up, c) - if c == nil { - return nil - } - sl[i].succ = c.succPtr() - } - return c -} - -func (m *model) update(s *state) { - if m.orderFall == 0 { - if c := m.a.succContext(s.succ); c != nil { - m.minC = c - m.maxC = c - return - } - } - - if m.escCount == 0 { - m.escCount = 1 - for i := range m.charMask { - m.charMask[i] = 0 - } - } - - var ss *state // matching minC.suffix state - - if s.freq < maxFreq/4 && m.minC.suffix() != nil { - c := m.minC.suffix() - states := c.states() - - var i int - if len(states) > 1 { - for states[i].sym != s.sym { - i++ - } - if i > 0 && states[i].freq >= states[i-1].freq { - states[i-1], states[i] = states[i], states[i-1] - i-- - } - if states[i].freq < maxFreq-9 { - states[i].freq += 2 - c.setSummFreq(c.summFreq() + 2) - } - } else if states[0].freq < 32 { - states[0].freq++ - } - ss = &states[i] // save later for createSuccessors - } - - if m.orderFall == 0 { - c := m.createSuccessors(s, ss) - if c == nil { - m.restart() - } else { - m.minC = c - m.maxC = c - s.succ = c.succPtr() - } - return - } - - succ := m.a.pushByte(s.sym) - if m.a.succIsNil(succ) { - m.restart() - return - } - - var minC *context - if m.a.succIsNil(s.succ) { - s.succ = succ - minC = m.minC - } else { - minC = m.a.succContext(s.succ) - if minC == nil { - minC = m.createSuccessors(s, ss) - if minC == nil { - m.restart() - return - } - } - m.orderFall-- - if m.orderFall == 0 { - succ = minC.succPtr() - if m.maxC.notEq(m.minC) { - m.a.popByte() - } - } - } - - n := m.minC.numStates() - s0 := int(m.minC.summFreq()) - n - int(s.freq-1) - for c := m.maxC; c.notEq(m.minC); c = c.suffix() { - var summFreq uint16 - - states := c.expandStates() - if states == nil { - m.restart() - return - } - if ns := len(states) - 1; ns != 1 { - summFreq = c.summFreq() - if 4*ns <= n && int(summFreq) <= 8*ns { - summFreq += 2 - } - if 2*ns < n { - summFreq++ - } - } else { - p := &states[0] - if p.freq < maxFreq/4-1 { - p.freq += p.freq - } else { - p.freq = maxFreq - 4 - } - summFreq = uint16(p.freq) + uint16(m.initEsc) - if n > 3 { - summFreq++ - } - } - - cf := 2 * int(s.freq) * int(summFreq+6) - sf := s0 + int(summFreq) - var freq byte - if cf >= 6*sf { - switch { - case cf >= 15*sf: - freq = 7 - case cf >= 12*sf: - freq = 6 - case cf >= 9*sf: - freq = 5 - default: - freq = 4 - } - summFreq += uint16(freq) - } else { - switch { - case cf >= 4*sf: - freq = 3 - case cf > sf: - freq = 2 - default: - freq = 1 - } - summFreq += 3 - } - states[len(states)-1] = state{sym: s.sym, freq: freq, succ: succ} - c.setSummFreq(summFreq) - } - m.minC = minC - m.maxC = minC -} - -func (m *model) ReadByte() (byte, error) { - if m.minC == nil { - return 0, errCorruptPPM - } - var s *state - var err error - if m.minC.numStates() == 1 { - s, err = m.decodeBinSymbol() - } else { - s, err = m.decodeSymbol1() - } - for s == nil && err == nil { - n := m.minC.numStates() - for m.minC.numStates() == n { - m.orderFall++ - m.minC = m.minC.suffix() - if m.minC == nil { - return 0, errCorruptPPM - } - } - s, err = m.decodeSymbol2(n) - } - if err != nil { - return 0, err - } - - // save sym so it doesn't get overwritten by a possible restart() - sym := s.sym - m.update(s) - m.prevSym = sym - return sym, nil -} diff --git a/vendor/github.com/nwaples/rardecode/reader.go b/vendor/github.com/nwaples/rardecode/reader.go deleted file mode 100644 index 11adc4fea7..0000000000 --- a/vendor/github.com/nwaples/rardecode/reader.go +++ /dev/null @@ -1,376 +0,0 @@ -package rardecode - -import ( - "bufio" - "bytes" - "errors" - "io" - "io/ioutil" - "os" - "time" -) - -// FileHeader HostOS types -const ( - HostOSUnknown = 0 - HostOSMSDOS = 1 - HostOSOS2 = 2 - HostOSWindows = 3 - HostOSUnix = 4 - HostOSMacOS = 5 - HostOSBeOS = 6 -) - -const ( - maxPassword = 128 -) - -var ( - errShortFile = errors.New("rardecode: decoded file too short") - errInvalidFileBlock = errors.New("rardecode: invalid file block") - errUnexpectedArcEnd = errors.New("rardecode: unexpected end of archive") - errBadFileChecksum = errors.New("rardecode: bad file checksum") -) - -type byteReader interface { - io.Reader - io.ByteReader -} - -type limitedReader struct { - r io.Reader - n int64 // bytes remaining - shortErr error // error returned when r returns io.EOF with n > 0 -} - -func (l *limitedReader) Read(p []byte) (int, error) { - if l.n <= 0 { - return 0, io.EOF - } - if int64(len(p)) > l.n { - p = p[0:l.n] - } - n, err := l.r.Read(p) - l.n -= int64(n) - if err == io.EOF && l.n > 0 { - return n, l.shortErr - } - return n, err -} - -type limitedByteReader struct { - limitedReader - br io.ByteReader -} - -func (l *limitedByteReader) ReadByte() (byte, error) { - if l.n <= 0 { - return 0, io.EOF - } - c, err := l.br.ReadByte() - if err == nil { - l.n-- - } else if err == io.EOF && l.n > 0 { - return 0, l.shortErr - } - return c, err -} - -// limitByteReader returns a limitedByteReader that reads from r and stops with -// io.EOF after n bytes. -// If r returns an io.EOF before reading n bytes, io.ErrUnexpectedEOF is returned. -func limitByteReader(r byteReader, n int64) *limitedByteReader { - return &limitedByteReader{limitedReader{r, n, io.ErrUnexpectedEOF}, r} -} - -// fileChecksum allows file checksum validations to be performed. -// File contents must first be written to fileChecksum. Then valid is -// called to perform the file checksum calculation to determine -// if the file contents are valid or not. -type fileChecksum interface { - io.Writer - valid() bool -} - -// FileHeader represents a single file in a RAR archive. -type FileHeader struct { - Name string // file name using '/' as the directory separator - IsDir bool // is a directory - HostOS byte // Host OS the archive was created on - Attributes int64 // Host OS specific file attributes - PackedSize int64 // packed file size (or first block if the file spans volumes) - UnPackedSize int64 // unpacked file size - UnKnownSize bool // unpacked file size is not known - ModificationTime time.Time // modification time (non-zero if set) - CreationTime time.Time // creation time (non-zero if set) - AccessTime time.Time // access time (non-zero if set) - Version int // file version -} - -// Mode returns an os.FileMode for the file, calculated from the Attributes field. -func (f *FileHeader) Mode() os.FileMode { - var m os.FileMode - - if f.IsDir { - m = os.ModeDir - } - if f.HostOS == HostOSWindows { - if f.IsDir { - m |= 0777 - } else if f.Attributes&1 > 0 { - m |= 0444 // readonly - } else { - m |= 0666 - } - return m - } - // assume unix perms for all remaining os types - m |= os.FileMode(f.Attributes) & os.ModePerm - - // only check other bits on unix host created archives - if f.HostOS != HostOSUnix { - return m - } - - if f.Attributes&0x200 != 0 { - m |= os.ModeSticky - } - if f.Attributes&0x400 != 0 { - m |= os.ModeSetgid - } - if f.Attributes&0x800 != 0 { - m |= os.ModeSetuid - } - - // Check for additional file types. - if f.Attributes&0xF000 == 0xA000 { - m |= os.ModeSymlink - } - return m -} - -// fileBlockHeader represents a file block in a RAR archive. -// Files may comprise one or more file blocks. -// Solid files retain decode tables and dictionary from previous solid files in the archive. -type fileBlockHeader struct { - first bool // first block in file - last bool // last block in file - solid bool // file is solid - winSize uint // log base 2 of decode window size - cksum fileChecksum // file checksum - decoder decoder // decoder to use for file - key []byte // key for AES, non-empty if file encrypted - iv []byte // iv for AES, non-empty if file encrypted - FileHeader -} - -// fileBlockReader provides sequential access to file blocks in a RAR archive. -type fileBlockReader interface { - io.Reader // Read's read data from the current file block - io.ByteReader // Read bytes from current file block - next() (*fileBlockHeader, error) // reads the next file block header at current position - reset() // resets encryption - isSolid() bool // is archive solid - version() int // returns current archive format version -} - -// packedFileReader provides sequential access to packed files in a RAR archive. -type packedFileReader struct { - r fileBlockReader - h *fileBlockHeader // current file header -} - -// nextBlockInFile reads the next file block in the current file at the current -// archive file position, or returns an error if there is a problem. -// It is invalid to call this when already at the last block in the current file. -func (f *packedFileReader) nextBlockInFile() error { - h, err := f.r.next() - if err != nil { - if err == io.EOF { - // archive ended, but file hasn't - return errUnexpectedArcEnd - } - return err - } - if h.first || h.Name != f.h.Name { - return errInvalidFileBlock - } - f.h = h - return nil -} - -// next advances to the next packed file in the RAR archive. -func (f *packedFileReader) next() (*fileBlockHeader, error) { - if f.h != nil { - // skip to last block in current file - for !f.h.last { - // discard remaining block data - if _, err := io.Copy(ioutil.Discard, f.r); err != nil { - return nil, err - } - if err := f.nextBlockInFile(); err != nil { - return nil, err - } - } - // discard last block data - if _, err := io.Copy(ioutil.Discard, f.r); err != nil { - return nil, err - } - } - var err error - f.h, err = f.r.next() // get next file block - if err != nil { - if err == errArchiveEnd { - return nil, io.EOF - } - return nil, err - } - if !f.h.first { - return nil, errInvalidFileBlock - } - return f.h, nil -} - -// Read reads the packed data for the current file into p. -func (f *packedFileReader) Read(p []byte) (int, error) { - n, err := f.r.Read(p) // read current block data - for err == io.EOF { // current block empty - if n > 0 { - return n, nil - } - if f.h == nil || f.h.last { - return 0, io.EOF // last block so end of file - } - if err := f.nextBlockInFile(); err != nil { - return 0, err - } - n, err = f.r.Read(p) // read new block data - } - return n, err -} - -func (f *packedFileReader) ReadByte() (byte, error) { - c, err := f.r.ReadByte() // read current block data - for err == io.EOF && f.h != nil && !f.h.last { // current block empty - if err := f.nextBlockInFile(); err != nil { - return 0, err - } - c, err = f.r.ReadByte() // read new block data - } - return c, err -} - -// Reader provides sequential access to files in a RAR archive. -type Reader struct { - r io.Reader // reader for current unpacked file - pr packedFileReader // reader for current packed file - dr decodeReader // reader for decoding and filters if file is compressed - cksum fileChecksum // current file checksum - solidr io.Reader // reader for solid file -} - -// Read reads from the current file in the RAR archive. -func (r *Reader) Read(p []byte) (int, error) { - n, err := r.r.Read(p) - if err == io.EOF && r.cksum != nil && !r.cksum.valid() { - return n, errBadFileChecksum - } - return n, err -} - -// Next advances to the next file in the archive. -func (r *Reader) Next() (*FileHeader, error) { - if r.solidr != nil { - // solid files must be read fully to update decoder information - if _, err := io.Copy(ioutil.Discard, r.solidr); err != nil { - return nil, err - } - } - - h, err := r.pr.next() // skip to next file - if err != nil { - return nil, err - } - r.solidr = nil - - br := byteReader(&r.pr) // start with packed file reader - - // check for encryption - if len(h.key) > 0 && len(h.iv) > 0 { - br = newAesDecryptReader(br, h.key, h.iv) // decrypt - } - r.r = br - // check for compression - if h.decoder != nil { - err = r.dr.init(br, h.decoder, h.winSize, !h.solid) - if err != nil { - return nil, err - } - r.r = &r.dr - if r.pr.r.isSolid() { - r.solidr = r.r - } - } - if h.UnPackedSize >= 0 && !h.UnKnownSize { - // Limit reading to UnPackedSize as there may be padding - r.r = &limitedReader{r.r, h.UnPackedSize, errShortFile} - } - r.cksum = h.cksum - if r.cksum != nil { - r.r = io.TeeReader(r.r, h.cksum) // write file data to checksum as it is read - } - fh := new(FileHeader) - *fh = h.FileHeader - return fh, nil -} - -func (r *Reader) init(fbr fileBlockReader) { - r.r = bytes.NewReader(nil) // initial reads will always return EOF - r.pr.r = fbr -} - -// NewReader creates a Reader reading from r. -// NewReader only supports single volume archives. -// Multi-volume archives must use OpenReader. -func NewReader(r io.Reader, password string) (*Reader, error) { - br, ok := r.(*bufio.Reader) - if !ok { - br = bufio.NewReader(r) - } - fbr, err := newFileBlockReader(br, password) - if err != nil { - return nil, err - } - rr := new(Reader) - rr.init(fbr) - return rr, nil -} - -type ReadCloser struct { - v *volume - Reader -} - -// Close closes the rar file. -func (rc *ReadCloser) Close() error { - return rc.v.Close() -} - -// Volumes returns the volume filenames that have been used in decoding the archive -// up to this point. This will include the current open volume if the archive is still -// being processed. -func (rc *ReadCloser) Volumes() []string { - return rc.v.files -} - -// OpenReader opens a RAR archive specified by the name and returns a ReadCloser. -func OpenReader(name, password string) (*ReadCloser, error) { - v, err := openVolume(name, password) - if err != nil { - return nil, err - } - rc := new(ReadCloser) - rc.v = v - rc.Reader.init(v) - return rc, nil -} diff --git a/vendor/github.com/nwaples/rardecode/vm.go b/vendor/github.com/nwaples/rardecode/vm.go deleted file mode 100644 index fd26a5a0ae..0000000000 --- a/vendor/github.com/nwaples/rardecode/vm.go +++ /dev/null @@ -1,687 +0,0 @@ -package rardecode - -import ( - "encoding/binary" - "errors" -) - -const ( - // vm flag bits - flagC = 1 // Carry - flagZ = 2 // Zero - flagS = 0x80000000 // Sign - - maxCommands = 25000000 // maximum number of commands that can be run in a program - - vmRegs = 8 // number if registers - vmSize = 0x40000 // memory size - vmMask = vmSize - 1 -) - -var ( - errInvalidVMInstruction = errors.New("rardecode: invalid vm instruction") -) - -type vm struct { - ip uint32 // instruction pointer - ipMod bool // ip was modified - fl uint32 // flag bits - r [vmRegs]uint32 // registers - m []byte // memory -} - -func (v *vm) setIP(ip uint32) { - v.ip = ip - v.ipMod = true -} - -// execute runs a list of commands on the vm. -func (v *vm) execute(cmd []command) { - v.ip = 0 // reset instruction pointer - for n := 0; n < maxCommands; n++ { - ip := v.ip - if ip >= uint32(len(cmd)) { - return - } - ins := cmd[ip] - ins.f(v, ins.bm, ins.op) // run cpu instruction - if v.ipMod { - // command modified ip, don't increment - v.ipMod = false - } else { - v.ip++ // increment ip for next command - } - } -} - -// newVM creates a new RAR virtual machine using the byte slice as memory. -func newVM(mem []byte) *vm { - v := new(vm) - - if cap(mem) < vmSize+4 { - v.m = make([]byte, vmSize+4) - copy(v.m, mem) - } else { - v.m = mem[:vmSize+4] - for i := len(mem); i < len(v.m); i++ { - v.m[i] = 0 - } - } - v.r[7] = vmSize - return v -} - -type operand interface { - get(v *vm, byteMode bool) uint32 - set(v *vm, byteMode bool, n uint32) -} - -// Immediate Operand -type opI uint32 - -func (op opI) get(v *vm, bm bool) uint32 { return uint32(op) } -func (op opI) set(v *vm, bm bool, n uint32) {} - -// Direct Operand -type opD uint32 - -func (op opD) get(v *vm, byteMode bool) uint32 { - if byteMode { - return uint32(v.m[op]) - } - return binary.LittleEndian.Uint32(v.m[op:]) -} - -func (op opD) set(v *vm, byteMode bool, n uint32) { - if byteMode { - v.m[op] = byte(n) - } else { - binary.LittleEndian.PutUint32(v.m[op:], n) - } -} - -// Register Operand -type opR uint32 - -func (op opR) get(v *vm, byteMode bool) uint32 { - if byteMode { - return v.r[op] & 0xFF - } - return v.r[op] -} - -func (op opR) set(v *vm, byteMode bool, n uint32) { - if byteMode { - v.r[op] = (v.r[op] & 0xFFFFFF00) | (n & 0xFF) - } else { - v.r[op] = n - } -} - -// Register Indirect Operand -type opRI uint32 - -func (op opRI) get(v *vm, byteMode bool) uint32 { - i := v.r[op] & vmMask - if byteMode { - return uint32(v.m[i]) - } - return binary.LittleEndian.Uint32(v.m[i:]) -} -func (op opRI) set(v *vm, byteMode bool, n uint32) { - i := v.r[op] & vmMask - if byteMode { - v.m[i] = byte(n) - } else { - binary.LittleEndian.PutUint32(v.m[i:], n) - } -} - -// Base Plus Index Indirect Operand -type opBI struct { - r uint32 - i uint32 -} - -func (op opBI) get(v *vm, byteMode bool) uint32 { - i := (v.r[op.r] + op.i) & vmMask - if byteMode { - return uint32(v.m[i]) - } - return binary.LittleEndian.Uint32(v.m[i:]) -} -func (op opBI) set(v *vm, byteMode bool, n uint32) { - i := (v.r[op.r] + op.i) & vmMask - if byteMode { - v.m[i] = byte(n) - } else { - binary.LittleEndian.PutUint32(v.m[i:], n) - } -} - -type commandFunc func(v *vm, byteMode bool, op []operand) - -type command struct { - f commandFunc - bm bool // is byte mode - op []operand -} - -var ( - ops = []struct { - f commandFunc - byteMode bool // supports byte mode - nops int // number of operands - jop bool // is a jump op - }{ - {mov, true, 2, false}, - {cmp, true, 2, false}, - {add, true, 2, false}, - {sub, true, 2, false}, - {jz, false, 1, true}, - {jnz, false, 1, true}, - {inc, true, 1, false}, - {dec, true, 1, false}, - {jmp, false, 1, true}, - {xor, true, 2, false}, - {and, true, 2, false}, - {or, true, 2, false}, - {test, true, 2, false}, - {js, false, 1, true}, - {jns, false, 1, true}, - {jb, false, 1, true}, - {jbe, false, 1, true}, - {ja, false, 1, true}, - {jae, false, 1, true}, - {push, false, 1, false}, - {pop, false, 1, false}, - {call, false, 1, true}, - {ret, false, 0, false}, - {not, true, 1, false}, - {shl, true, 2, false}, - {shr, true, 2, false}, - {sar, true, 2, false}, - {neg, true, 1, false}, - {pusha, false, 0, false}, - {popa, false, 0, false}, - {pushf, false, 0, false}, - {popf, false, 0, false}, - {movzx, false, 2, false}, - {movsx, false, 2, false}, - {xchg, true, 2, false}, - {mul, true, 2, false}, - {div, true, 2, false}, - {adc, true, 2, false}, - {sbb, true, 2, false}, - {print, false, 0, false}, - } -) - -func mov(v *vm, bm bool, op []operand) { - op[0].set(v, bm, op[1].get(v, bm)) -} - -func cmp(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - r := v1 - op[1].get(v, bm) - if r == 0 { - v.fl = flagZ - } else { - v.fl = 0 - if r > v1 { - v.fl = flagC - } - v.fl |= r & flagS - } -} - -func add(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - r := v1 + op[1].get(v, bm) - v.fl = 0 - signBit := uint32(flagS) - if bm { - r &= 0xFF - signBit = 0x80 - } - if r < v1 { - v.fl |= flagC - } - if r == 0 { - v.fl |= flagZ - } else if r&signBit > 0 { - v.fl |= flagS - } - op[0].set(v, bm, r) -} - -func sub(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - r := v1 - op[1].get(v, bm) - v.fl = 0 - - if r == 0 { - v.fl = flagZ - } else { - v.fl = 0 - if r > v1 { - v.fl = flagC - } - v.fl |= r & flagS - } - op[0].set(v, bm, r) -} - -func jz(v *vm, bm bool, op []operand) { - if v.fl&flagZ > 0 { - v.setIP(op[0].get(v, false)) - } -} - -func jnz(v *vm, bm bool, op []operand) { - if v.fl&flagZ == 0 { - v.setIP(op[0].get(v, false)) - } -} - -func inc(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) + 1 - if bm { - r &= 0xFF - } - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } -} - -func dec(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) - 1 - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } -} - -func jmp(v *vm, bm bool, op []operand) { - v.setIP(op[0].get(v, false)) -} - -func xor(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) ^ op[1].get(v, bm) - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } -} - -func and(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) & op[1].get(v, bm) - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } -} - -func or(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) | op[1].get(v, bm) - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } -} - -func test(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) & op[1].get(v, bm) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } -} - -func js(v *vm, bm bool, op []operand) { - if v.fl&flagS > 0 { - v.setIP(op[0].get(v, false)) - } -} - -func jns(v *vm, bm bool, op []operand) { - if v.fl&flagS == 0 { - v.setIP(op[0].get(v, false)) - } -} - -func jb(v *vm, bm bool, op []operand) { - if v.fl&flagC > 0 { - v.setIP(op[0].get(v, false)) - } -} - -func jbe(v *vm, bm bool, op []operand) { - if v.fl&(flagC|flagZ) > 0 { - v.setIP(op[0].get(v, false)) - } -} - -func ja(v *vm, bm bool, op []operand) { - if v.fl&(flagC|flagZ) == 0 { - v.setIP(op[0].get(v, false)) - } -} - -func jae(v *vm, bm bool, op []operand) { - if v.fl&flagC == 0 { - v.setIP(op[0].get(v, false)) - } -} - -func push(v *vm, bm bool, op []operand) { - v.r[7] -= 4 - opRI(7).set(v, false, op[0].get(v, false)) - -} - -func pop(v *vm, bm bool, op []operand) { - op[0].set(v, false, opRI(7).get(v, false)) - v.r[7] += 4 -} - -func call(v *vm, bm bool, op []operand) { - v.r[7] -= 4 - opRI(7).set(v, false, v.ip+1) - v.setIP(op[0].get(v, false)) -} - -func ret(v *vm, bm bool, op []operand) { - r7 := v.r[7] - if r7 >= vmSize { - v.setIP(0xFFFFFFFF) // trigger end of program - } else { - v.setIP(binary.LittleEndian.Uint32(v.m[r7:])) - v.r[7] += 4 - } -} - -func not(v *vm, bm bool, op []operand) { - op[0].set(v, bm, ^op[0].get(v, bm)) -} - -func shl(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - v2 := op[1].get(v, bm) - r := v1 << v2 - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } - if (v1<<(v2-1))&0x80000000 > 0 { - v.fl |= flagC - } -} - -func shr(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - v2 := op[1].get(v, bm) - r := v1 >> v2 - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } - if (v1>>(v2-1))&0x1 > 0 { - v.fl |= flagC - } -} - -func sar(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - v2 := op[1].get(v, bm) - r := uint32(int32(v1) >> v2) - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } - if (v1>>(v2-1))&0x1 > 0 { - v.fl |= flagC - } -} - -func neg(v *vm, bm bool, op []operand) { - r := 0 - op[0].get(v, bm) - op[0].set(v, bm, r) - if r == 0 { - v.fl = flagZ - } else { - v.fl = r&flagS | flagC - } -} - -func pusha(v *vm, bm bool, op []operand) { - sp := opD(v.r[7]) - for _, r := range v.r { - sp = (sp - 4) & vmMask - sp.set(v, false, r) - } - v.r[7] = uint32(sp) -} - -func popa(v *vm, bm bool, op []operand) { - sp := opD(v.r[7]) - for i := 7; i >= 0; i-- { - v.r[i] = sp.get(v, false) - sp = (sp + 4) & vmMask - } -} - -func pushf(v *vm, bm bool, op []operand) { - v.r[7] -= 4 - opRI(7).set(v, false, v.fl) -} - -func popf(v *vm, bm bool, op []operand) { - v.fl = opRI(7).get(v, false) - v.r[7] += 4 -} - -func movzx(v *vm, bm bool, op []operand) { - op[0].set(v, false, op[1].get(v, true)) -} - -func movsx(v *vm, bm bool, op []operand) { - op[0].set(v, false, uint32(int8(op[1].get(v, true)))) -} - -func xchg(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - op[0].set(v, bm, op[1].get(v, bm)) - op[1].set(v, bm, v1) -} - -func mul(v *vm, bm bool, op []operand) { - r := op[0].get(v, bm) * op[1].get(v, bm) - op[0].set(v, bm, r) -} - -func div(v *vm, bm bool, op []operand) { - div := op[1].get(v, bm) - if div != 0 { - r := op[0].get(v, bm) / div - op[0].set(v, bm, r) - } -} - -func adc(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - fc := v.fl & flagC - r := v1 + op[1].get(v, bm) + fc - if bm { - r &= 0xFF - } - op[0].set(v, bm, r) - - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } - if r < v1 || (r == v1 && fc > 0) { - v.fl |= flagC - } -} - -func sbb(v *vm, bm bool, op []operand) { - v1 := op[0].get(v, bm) - fc := v.fl & flagC - r := v1 - op[1].get(v, bm) - fc - if bm { - r &= 0xFF - } - op[0].set(v, bm, r) - - if r == 0 { - v.fl = flagZ - } else { - v.fl = r & flagS - } - if r > v1 || (r == v1 && fc > 0) { - v.fl |= flagC - } -} - -func print(v *vm, bm bool, op []operand) { - // TODO: ignore print for the moment -} - -func decodeArg(br *rarBitReader, byteMode bool) (operand, error) { - n, err := br.readBits(1) - if err != nil { - return nil, err - } - if n > 0 { // Register - n, err = br.readBits(3) - return opR(n), err - } - n, err = br.readBits(1) - if err != nil { - return nil, err - } - if n == 0 { // Immediate - if byteMode { - n, err = br.readBits(8) - } else { - m, err := br.readUint32() - return opI(m), err - } - return opI(n), err - } - n, err = br.readBits(1) - if err != nil { - return nil, err - } - if n == 0 { - // Register Indirect - n, err = br.readBits(3) - return opRI(n), err - } - n, err = br.readBits(1) - if err != nil { - return nil, err - } - if n == 0 { - // Base + Index Indirect - n, err = br.readBits(3) - if err != nil { - return nil, err - } - i, err := br.readUint32() - return opBI{r: uint32(n), i: i}, err - } - // Direct addressing - m, err := br.readUint32() - return opD(m & vmMask), err -} - -func fixJumpOp(op operand, off int) operand { - n, ok := op.(opI) - if !ok { - return op - } - if n >= 256 { - return n - 256 - } - if n >= 136 { - n -= 264 - } else if n >= 16 { - n -= 8 - } else if n >= 8 { - n -= 16 - } - return n + opI(off) -} - -func readCommands(br *rarBitReader) ([]command, error) { - var cmds []command - - for { - code, err := br.readBits(4) - if err != nil { - return cmds, err - } - if code&0x08 > 0 { - n, err := br.readBits(2) - if err != nil { - return cmds, err - } - code = (code<<2 | n) - 24 - } - - if code >= len(ops) { - return cmds, errInvalidVMInstruction - } - ins := ops[code] - - var com command - - if ins.byteMode { - n, err := br.readBits(1) - if err != nil { - return cmds, err - } - com.bm = n > 0 - } - com.f = ins.f - - if ins.nops > 0 { - com.op = make([]operand, ins.nops) - com.op[0], err = decodeArg(br, com.bm) - if err != nil { - return cmds, err - } - if ins.nops == 2 { - com.op[1], err = decodeArg(br, com.bm) - if err != nil { - return cmds, err - } - } else if ins.jop { - com.op[0] = fixJumpOp(com.op[0], len(cmds)) - } - } - cmds = append(cmds, com) - } -} diff --git a/vendor/github.com/pierrec/lz4/v4/.gitignore b/vendor/github.com/pierrec/lz4/v4/.gitignore deleted file mode 100644 index 5d7e88de0a..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/.gitignore +++ /dev/null @@ -1,36 +0,0 @@ -# Created by https://www.gitignore.io/api/macos - -### macOS ### -*.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -# End of https://www.gitignore.io/api/macos - -cmd/*/*exe -.idea - -fuzz/*.zip diff --git a/vendor/github.com/pierrec/lz4/v4/LICENSE b/vendor/github.com/pierrec/lz4/v4/LICENSE deleted file mode 100644 index bd899d8353..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2015, Pierre Curto -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of xxHash nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/vendor/github.com/pierrec/lz4/v4/README.md b/vendor/github.com/pierrec/lz4/v4/README.md deleted file mode 100644 index df027e2c30..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# lz4 : LZ4 compression in pure Go - -[![Go Reference](https://pkg.go.dev/badge/github.com/pierrec/lz4/v4.svg)](https://pkg.go.dev/github.com/pierrec/lz4/v4) -[![CI](https://github.com/pierrec/lz4/workflows/ci/badge.svg)](https://github.com/pierrec/lz4/actions) -[![Go Report Card](https://goreportcard.com/badge/github.com/pierrec/lz4)](https://goreportcard.com/report/github.com/pierrec/lz4) -[![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/pierrec/lz4.svg?style=social)](https://github.com/pierrec/lz4/tags) - -## Overview - -This package provides a streaming interface to [LZ4 data streams](http://fastcompression.blogspot.fr/2013/04/lz4-streaming-format-final.html) as well as low level compress and uncompress functions for LZ4 data blocks. -The implementation is based on the reference C [one](https://github.com/lz4/lz4). - -## Install - -Assuming you have the go toolchain installed: - -``` -go get github.com/pierrec/lz4/v4 -``` - -There is a command line interface tool to compress and decompress LZ4 files. - -``` -go install github.com/pierrec/lz4/v4/cmd/lz4c -``` - -Usage - -``` -Usage of lz4c: - -version - print the program version - -Subcommands: -Compress the given files or from stdin to stdout. -compress [arguments] [ ...] - -bc - enable block checksum - -l int - compression level (0=fastest) - -sc - disable stream checksum - -size string - block max size [64K,256K,1M,4M] (default "4M") - -Uncompress the given files or from stdin to stdout. -uncompress [arguments] [ ...] - -``` - - -## Example - -``` -// Compress and uncompress an input string. -s := "hello world" -r := strings.NewReader(s) - -// The pipe will uncompress the data from the writer. -pr, pw := io.Pipe() -zw := lz4.NewWriter(pw) -zr := lz4.NewReader(pr) - -go func() { - // Compress the input string. - _, _ = io.Copy(zw, r) - _ = zw.Close() // Make sure the writer is closed - _ = pw.Close() // Terminate the pipe -}() - -_, _ = io.Copy(os.Stdout, zr) - -// Output: -// hello world -``` - -## Contributing - -Contributions are very welcome for bug fixing, performance improvements...! - -- Open an issue with a proper description -- Send a pull request with appropriate test case(s) - -## Contributors - -Thanks to all [contributors](https://github.com/pierrec/lz4/graphs/contributors) so far! - -Special thanks to [@Zariel](https://github.com/Zariel) for his asm implementation of the decoder. - -Special thanks to [@klauspost](https://github.com/klauspost) for his work on optimizing the code. diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/block.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4block/block.go deleted file mode 100644 index f382649430..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/block.go +++ /dev/null @@ -1,469 +0,0 @@ -package lz4block - -import ( - "encoding/binary" - "math/bits" - "sync" - - "github.com/pierrec/lz4/v4/internal/lz4errors" -) - -const ( - // The following constants are used to setup the compression algorithm. - minMatch = 4 // the minimum size of the match sequence size (4 bytes) - winSizeLog = 16 // LZ4 64Kb window size limit - winSize = 1 << winSizeLog - winMask = winSize - 1 // 64Kb window of previous data for dependent blocks - - // hashLog determines the size of the hash table used to quickly find a previous match position. - // Its value influences the compression speed and memory usage, the lower the faster, - // but at the expense of the compression ratio. - // 16 seems to be the best compromise for fast compression. - hashLog = 16 - htSize = 1 << hashLog - - mfLimit = 10 + minMatch // The last match cannot start within the last 14 bytes. -) - -func recoverBlock(e *error) { - if r := recover(); r != nil && *e == nil { - *e = lz4errors.ErrInvalidSourceShortBuffer - } -} - -// blockHash hashes the lower 6 bytes into a value < htSize. -func blockHash(x uint64) uint32 { - const prime6bytes = 227718039650203 - return uint32(((x << (64 - 48)) * prime6bytes) >> (64 - hashLog)) -} - -func CompressBlockBound(n int) int { - return n + n/255 + 16 -} - -func UncompressBlock(src, dst []byte) (int, error) { - if len(src) == 0 { - return 0, nil - } - if di := decodeBlock(dst, src); di >= 0 { - return di, nil - } - return 0, lz4errors.ErrInvalidSourceShortBuffer -} - -type Compressor struct { - // Offsets are at most 64kiB, so we can store only the lower 16 bits of - // match positions: effectively, an offset from some 64kiB block boundary. - // - // When we retrieve such an offset, we interpret it as relative to the last - // block boundary si &^ 0xffff, or the one before, (si &^ 0xffff) - 0x10000, - // depending on which of these is inside the current window. If a table - // entry was generated more than 64kiB back in the input, we find out by - // inspecting the input stream. - table [htSize]uint16 - - needsReset bool -} - -// Get returns the position of a presumptive match for the hash h. -// The match may be a false positive due to a hash collision or an old entry. -// If si < winSize, the return value may be negative. -func (c *Compressor) get(h uint32, si int) int { - h &= htSize - 1 - i := int(c.table[h]) - i += si &^ winMask - if i >= si { - // Try previous 64kiB block (negative when in first block). - i -= winSize - } - return i -} - -func (c *Compressor) put(h uint32, si int) { - h &= htSize - 1 - c.table[h] = uint16(si) -} - -var compressorPool = sync.Pool{New: func() interface{} { return new(Compressor) }} - -func CompressBlock(src, dst []byte) (int, error) { - c := compressorPool.Get().(*Compressor) - n, err := c.CompressBlock(src, dst) - compressorPool.Put(c) - return n, err -} - -func (c *Compressor) CompressBlock(src, dst []byte) (int, error) { - if c.needsReset { - // Zero out reused table to avoid non-deterministic output (issue #65). - c.table = [htSize]uint16{} - } - c.needsReset = true // Only false on first call. - - // Return 0, nil only if the destination buffer size is < CompressBlockBound. - isNotCompressible := len(dst) < CompressBlockBound(len(src)) - - // adaptSkipLog sets how quickly the compressor begins skipping blocks when data is incompressible. - // This significantly speeds up incompressible data and usually has very small impact on compression. - // bytes to skip = 1 + (bytes since last match >> adaptSkipLog) - const adaptSkipLog = 7 - - // si: Current position of the search. - // anchor: Position of the current literals. - var si, di, anchor int - sn := len(src) - mfLimit - if sn <= 0 { - goto lastLiterals - } - - // Fast scan strategy: the hash table only stores the last 4 bytes sequences. - for si < sn { - // Hash the next 6 bytes (sequence)... - match := binary.LittleEndian.Uint64(src[si:]) - h := blockHash(match) - h2 := blockHash(match >> 8) - - // We check a match at s, s+1 and s+2 and pick the first one we get. - // Checking 3 only requires us to load the source one. - ref := c.get(h, si) - ref2 := c.get(h2, si) - c.put(h, si) - c.put(h2, si+1) - - offset := si - ref - - if offset <= 0 || offset >= winSize || uint32(match) != binary.LittleEndian.Uint32(src[ref:]) { - // No match. Start calculating another hash. - // The processor can usually do this out-of-order. - h = blockHash(match >> 16) - ref3 := c.get(h, si+2) - - // Check the second match at si+1 - si += 1 - offset = si - ref2 - - if offset <= 0 || offset >= winSize || uint32(match>>8) != binary.LittleEndian.Uint32(src[ref2:]) { - // No match. Check the third match at si+2 - si += 1 - offset = si - ref3 - c.put(h, si) - - if offset <= 0 || offset >= winSize || uint32(match>>16) != binary.LittleEndian.Uint32(src[ref3:]) { - // Skip one extra byte (at si+3) before we check 3 matches again. - si += 2 + (si-anchor)>>adaptSkipLog - continue - } - } - } - - // Match found. - lLen := si - anchor // Literal length. - // We already matched 4 bytes. - mLen := 4 - - // Extend backwards if we can, reducing literals. - tOff := si - offset - 1 - for lLen > 0 && tOff >= 0 && src[si-1] == src[tOff] { - si-- - tOff-- - lLen-- - mLen++ - } - - // Add the match length, so we continue search at the end. - // Use mLen to store the offset base. - si, mLen = si+mLen, si+minMatch - - // Find the longest match by looking by batches of 8 bytes. - for si+8 < sn { - x := binary.LittleEndian.Uint64(src[si:]) ^ binary.LittleEndian.Uint64(src[si-offset:]) - if x == 0 { - si += 8 - } else { - // Stop is first non-zero byte. - si += bits.TrailingZeros64(x) >> 3 - break - } - } - - mLen = si - mLen - if mLen < 0xF { - dst[di] = byte(mLen) - } else { - dst[di] = 0xF - } - - // Encode literals length. - if lLen < 0xF { - dst[di] |= byte(lLen << 4) - } else { - dst[di] |= 0xF0 - di++ - l := lLen - 0xF - for ; l >= 0xFF; l -= 0xFF { - dst[di] = 0xFF - di++ - } - dst[di] = byte(l) - } - di++ - - // Literals. - if di+lLen > len(dst) { - return 0, lz4errors.ErrInvalidSourceShortBuffer - } - copy(dst[di:di+lLen], src[anchor:anchor+lLen]) - di += lLen + 2 - anchor = si - - // Encode offset. - if di > len(dst) { - return 0, lz4errors.ErrInvalidSourceShortBuffer - } - dst[di-2], dst[di-1] = byte(offset), byte(offset>>8) - - // Encode match length part 2. - if mLen >= 0xF { - for mLen -= 0xF; mLen >= 0xFF && di < len(dst); mLen -= 0xFF { - dst[di] = 0xFF - di++ - } - if di >= len(dst) { - return 0, lz4errors.ErrInvalidSourceShortBuffer - } - dst[di] = byte(mLen) - di++ - } - // Check if we can load next values. - if si >= sn { - break - } - // Hash match end-2 - h = blockHash(binary.LittleEndian.Uint64(src[si-2:])) - c.put(h, si-2) - } - -lastLiterals: - if isNotCompressible && anchor == 0 { - // Incompressible. - return 0, nil - } - - // Last literals. - if di >= len(dst) { - return 0, lz4errors.ErrInvalidSourceShortBuffer - } - lLen := len(src) - anchor - if lLen < 0xF { - dst[di] = byte(lLen << 4) - } else { - dst[di] = 0xF0 - di++ - for lLen -= 0xF; lLen >= 0xFF && di < len(dst); lLen -= 0xFF { - dst[di] = 0xFF - di++ - } - if di >= len(dst) { - return 0, lz4errors.ErrInvalidSourceShortBuffer - } - dst[di] = byte(lLen) - } - di++ - - // Write the last literals. - if isNotCompressible && di >= anchor { - // Incompressible. - return 0, nil - } - if di+len(src)-anchor > len(dst) { - return 0, lz4errors.ErrInvalidSourceShortBuffer - } - di += copy(dst[di:di+len(src)-anchor], src[anchor:]) - return di, nil -} - -// blockHash hashes 4 bytes into a value < winSize. -func blockHashHC(x uint32) uint32 { - const hasher uint32 = 2654435761 // Knuth multiplicative hash. - return x * hasher >> (32 - winSizeLog) -} - -type CompressorHC struct { - // hashTable: stores the last position found for a given hash - // chainTable: stores previous positions for a given hash - hashTable, chainTable [htSize]int - needsReset bool -} - -var compressorHCPool = sync.Pool{New: func() interface{} { return new(CompressorHC) }} - -func CompressBlockHC(src, dst []byte, depth CompressionLevel) (int, error) { - c := compressorHCPool.Get().(*CompressorHC) - n, err := c.CompressBlock(src, dst, depth) - compressorHCPool.Put(c) - return n, err -} - -func (c *CompressorHC) CompressBlock(src, dst []byte, depth CompressionLevel) (_ int, err error) { - if c.needsReset { - // Zero out reused table to avoid non-deterministic output (issue #65). - c.hashTable = [htSize]int{} - c.chainTable = [htSize]int{} - } - c.needsReset = true // Only false on first call. - - defer recoverBlock(&err) - - // Return 0, nil only if the destination buffer size is < CompressBlockBound. - isNotCompressible := len(dst) < CompressBlockBound(len(src)) - - // adaptSkipLog sets how quickly the compressor begins skipping blocks when data is incompressible. - // This significantly speeds up incompressible data and usually has very small impact on compression. - // bytes to skip = 1 + (bytes since last match >> adaptSkipLog) - const adaptSkipLog = 7 - - var si, di, anchor int - sn := len(src) - mfLimit - if sn <= 0 { - goto lastLiterals - } - - if depth == 0 { - depth = winSize - } - - for si < sn { - // Hash the next 4 bytes (sequence). - match := binary.LittleEndian.Uint32(src[si:]) - h := blockHashHC(match) - - // Follow the chain until out of window and give the longest match. - mLen := 0 - offset := 0 - for next, try := c.hashTable[h], depth; try > 0 && next > 0 && si-next < winSize; next, try = c.chainTable[next&winMask], try-1 { - // The first (mLen==0) or next byte (mLen>=minMatch) at current match length - // must match to improve on the match length. - if src[next+mLen] != src[si+mLen] { - continue - } - ml := 0 - // Compare the current position with a previous with the same hash. - for ml < sn-si { - x := binary.LittleEndian.Uint64(src[next+ml:]) ^ binary.LittleEndian.Uint64(src[si+ml:]) - if x == 0 { - ml += 8 - } else { - // Stop is first non-zero byte. - ml += bits.TrailingZeros64(x) >> 3 - break - } - } - if ml < minMatch || ml <= mLen { - // Match too small (>adaptSkipLog - continue - } - - // Match found. - // Update hash/chain tables with overlapping bytes: - // si already hashed, add everything from si+1 up to the match length. - winStart := si + 1 - if ws := si + mLen - winSize; ws > winStart { - winStart = ws - } - for si, ml := winStart, si+mLen; si < ml; { - match >>= 8 - match |= uint32(src[si+3]) << 24 - h := blockHashHC(match) - c.chainTable[si&winMask] = c.hashTable[h] - c.hashTable[h] = si - si++ - } - - lLen := si - anchor - si += mLen - mLen -= minMatch // Match length does not include minMatch. - - if mLen < 0xF { - dst[di] = byte(mLen) - } else { - dst[di] = 0xF - } - - // Encode literals length. - if lLen < 0xF { - dst[di] |= byte(lLen << 4) - } else { - dst[di] |= 0xF0 - di++ - l := lLen - 0xF - for ; l >= 0xFF; l -= 0xFF { - dst[di] = 0xFF - di++ - } - dst[di] = byte(l) - } - di++ - - // Literals. - copy(dst[di:di+lLen], src[anchor:anchor+lLen]) - di += lLen - anchor = si - - // Encode offset. - di += 2 - dst[di-2], dst[di-1] = byte(offset), byte(offset>>8) - - // Encode match length part 2. - if mLen >= 0xF { - for mLen -= 0xF; mLen >= 0xFF; mLen -= 0xFF { - dst[di] = 0xFF - di++ - } - dst[di] = byte(mLen) - di++ - } - } - - if isNotCompressible && anchor == 0 { - // Incompressible. - return 0, nil - } - - // Last literals. -lastLiterals: - lLen := len(src) - anchor - if lLen < 0xF { - dst[di] = byte(lLen << 4) - } else { - dst[di] = 0xF0 - di++ - lLen -= 0xF - for ; lLen >= 0xFF; lLen -= 0xFF { - dst[di] = 0xFF - di++ - } - dst[di] = byte(lLen) - } - di++ - - // Write the last literals. - if isNotCompressible && di >= anchor { - // Incompressible. - return 0, nil - } - di += copy(dst[di:di+len(src)-anchor], src[anchor:]) - return di, nil -} diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/blocks.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4block/blocks.go deleted file mode 100644 index e6cf88d71c..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/blocks.go +++ /dev/null @@ -1,88 +0,0 @@ -// Package lz4block provides LZ4 BlockSize types and pools of buffers. -package lz4block - -import "sync" - -const ( - Block64Kb uint32 = 1 << (16 + iota*2) - Block256Kb - Block1Mb - Block4Mb - Block8Mb = 2 * Block4Mb - legacyBlockSize = Block8Mb + Block8Mb/255 + 16 // CompressBound(Block8Mb) -) - -var ( - BlockPool64K = sync.Pool{New: func() interface{} { return make([]byte, Block64Kb) }} - BlockPool256K = sync.Pool{New: func() interface{} { return make([]byte, Block256Kb) }} - BlockPool1M = sync.Pool{New: func() interface{} { return make([]byte, Block1Mb) }} - BlockPool4M = sync.Pool{New: func() interface{} { return make([]byte, Block4Mb) }} - BlockPool8M = sync.Pool{New: func() interface{} { return make([]byte, legacyBlockSize) }} -) - -func Index(b uint32) BlockSizeIndex { - switch b { - case Block64Kb: - return 4 - case Block256Kb: - return 5 - case Block1Mb: - return 6 - case Block4Mb: - return 7 - case Block8Mb: // only valid in legacy mode - return 3 - } - return 0 -} - -func IsValid(b uint32) bool { - return Index(b) > 0 -} - -type BlockSizeIndex uint8 - -func (b BlockSizeIndex) IsValid() bool { - switch b { - case 4, 5, 6, 7: - return true - } - return false -} - -func (b BlockSizeIndex) Get() []byte { - var buf interface{} - switch b { - case 4: - buf = BlockPool64K.Get() - case 5: - buf = BlockPool256K.Get() - case 6: - buf = BlockPool1M.Get() - case 7: - buf = BlockPool4M.Get() - case 3: - buf = BlockPool8M.Get() - } - return buf.([]byte) -} - -func Put(buf []byte) { - // Safeguard: do not allow invalid buffers. - switch c := cap(buf); uint32(c) { - case Block64Kb: - BlockPool64K.Put(buf[:c]) - case Block256Kb: - BlockPool256K.Put(buf[:c]) - case Block1Mb: - BlockPool1M.Put(buf[:c]) - case Block4Mb: - BlockPool4M.Put(buf[:c]) - case legacyBlockSize: - BlockPool8M.Put(buf[:c]) - } -} - -type CompressionLevel uint32 - -const Fast CompressionLevel = 0 diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_amd64.s b/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_amd64.s deleted file mode 100644 index be79faa3fe..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_amd64.s +++ /dev/null @@ -1,369 +0,0 @@ -// +build !appengine -// +build gc -// +build !noasm - -#include "textflag.h" - -// AX scratch -// BX scratch -// CX scratch -// DX token -// -// DI &dst -// SI &src -// R8 &dst + len(dst) -// R9 &src + len(src) -// R11 &dst -// R12 short output end -// R13 short input end -// func decodeBlock(dst, src []byte) int -// using 50 bytes of stack currently -TEXT ·decodeBlock(SB), NOSPLIT, $64-56 - MOVQ dst_base+0(FP), DI - MOVQ DI, R11 - MOVQ dst_len+8(FP), R8 - ADDQ DI, R8 - - MOVQ src_base+24(FP), SI - MOVQ src_len+32(FP), R9 - CMPQ R9, $0 - JE err_corrupt - ADDQ SI, R9 - - // shortcut ends - // short output end - MOVQ R8, R12 - SUBQ $32, R12 - // short input end - MOVQ R9, R13 - SUBQ $16, R13 - -loop: - // for si < len(src) - CMPQ SI, R9 - JGE end - - // token := uint32(src[si]) - MOVBQZX (SI), DX - INCQ SI - - // lit_len = token >> 4 - // if lit_len > 0 - // CX = lit_len - MOVQ DX, CX - SHRQ $4, CX - - // if lit_len != 0xF - CMPQ CX, $0xF - JEQ lit_len_loop_pre - CMPQ DI, R12 - JGE lit_len_loop_pre - CMPQ SI, R13 - JGE lit_len_loop_pre - - // copy shortcut - - // A two-stage shortcut for the most common case: - // 1) If the literal length is 0..14, and there is enough space, - // enter the shortcut and copy 16 bytes on behalf of the literals - // (in the fast mode, only 8 bytes can be safely copied this way). - // 2) Further if the match length is 4..18, copy 18 bytes in a similar - // manner; but we ensure that there's enough space in the output for - // those 18 bytes earlier, upon entering the shortcut (in other words, - // there is a combined check for both stages). - - // copy literal - MOVOU (SI), X0 - MOVOU X0, (DI) - ADDQ CX, DI - ADDQ CX, SI - - MOVQ DX, CX - ANDQ $0xF, CX - - // The second stage: prepare for match copying, decode full info. - // If it doesn't work out, the info won't be wasted. - // offset := uint16(data[:2]) - MOVWQZX (SI), DX - ADDQ $2, SI - - MOVQ DI, AX - SUBQ DX, AX - CMPQ AX, DI - JGT err_short_buf - - // if we can't do the second stage then jump straight to read the - // match length, we already have the offset. - CMPQ CX, $0xF - JEQ match_len_loop_pre - CMPQ DX, $8 - JLT match_len_loop_pre - CMPQ AX, R11 - JLT err_short_buf - - // memcpy(op + 0, match + 0, 8); - MOVQ (AX), BX - MOVQ BX, (DI) - // memcpy(op + 8, match + 8, 8); - MOVQ 8(AX), BX - MOVQ BX, 8(DI) - // memcpy(op +16, match +16, 2); - MOVW 16(AX), BX - MOVW BX, 16(DI) - - LEAQ 4(DI)(CX*1), DI // minmatch - - // shortcut complete, load next token - JMP loop - -lit_len_loop_pre: - // if lit_len > 0 - CMPQ CX, $0 - JEQ offset - CMPQ CX, $0xF - JNE copy_literal - -lit_len_loop: - // for src[si] == 0xFF - CMPB (SI), $0xFF - JNE lit_len_finalise - - // bounds check src[si+1] - LEAQ 1(SI), AX - CMPQ AX, R9 - JGT err_short_buf - - // lit_len += 0xFF - ADDQ $0xFF, CX - INCQ SI - JMP lit_len_loop - -lit_len_finalise: - // lit_len += int(src[si]) - // si++ - MOVBQZX (SI), AX - ADDQ AX, CX - INCQ SI - -copy_literal: - // bounds check src and dst - LEAQ (SI)(CX*1), AX - CMPQ AX, R9 - JGT err_short_buf - - LEAQ (DI)(CX*1), AX - CMPQ AX, R8 - JGT err_short_buf - - // whats a good cut off to call memmove? - CMPQ CX, $16 - JGT memmove_lit - - // if len(dst[di:]) < 16 - MOVQ R8, AX - SUBQ DI, AX - CMPQ AX, $16 - JLT memmove_lit - - // if len(src[si:]) < 16 - MOVQ R9, AX - SUBQ SI, AX - CMPQ AX, $16 - JLT memmove_lit - - MOVOU (SI), X0 - MOVOU X0, (DI) - - JMP finish_lit_copy - -memmove_lit: - // memmove(to, from, len) - MOVQ DI, 0(SP) - MOVQ SI, 8(SP) - MOVQ CX, 16(SP) - // spill - MOVQ DI, 24(SP) - MOVQ SI, 32(SP) - MOVQ CX, 40(SP) // need len to inc SI, DI after - MOVB DX, 48(SP) - CALL runtime·memmove(SB) - - // restore registers - MOVQ 24(SP), DI - MOVQ 32(SP), SI - MOVQ 40(SP), CX - MOVB 48(SP), DX - - // recalc initial values - MOVQ dst_base+0(FP), R8 - MOVQ R8, R11 - ADDQ dst_len+8(FP), R8 - MOVQ src_base+24(FP), R9 - ADDQ src_len+32(FP), R9 - MOVQ R8, R12 - SUBQ $32, R12 - MOVQ R9, R13 - SUBQ $16, R13 - -finish_lit_copy: - ADDQ CX, SI - ADDQ CX, DI - - CMPQ SI, R9 - JGE end - -offset: - // CX := mLen - // free up DX to use for offset - MOVQ DX, CX - - LEAQ 2(SI), AX - CMPQ AX, R9 - JGT err_short_buf - - // offset - // DX := int(src[si]) | int(src[si+1])<<8 - MOVWQZX (SI), DX - ADDQ $2, SI - - // 0 offset is invalid - CMPQ DX, $0 - JEQ err_corrupt - - ANDB $0xF, CX - -match_len_loop_pre: - // if mlen != 0xF - CMPB CX, $0xF - JNE copy_match - -match_len_loop: - // for src[si] == 0xFF - // lit_len += 0xFF - CMPB (SI), $0xFF - JNE match_len_finalise - - // bounds check src[si+1] - LEAQ 1(SI), AX - CMPQ AX, R9 - JGT err_short_buf - - ADDQ $0xFF, CX - INCQ SI - JMP match_len_loop - -match_len_finalise: - // lit_len += int(src[si]) - // si++ - MOVBQZX (SI), AX - ADDQ AX, CX - INCQ SI - -copy_match: - // mLen += minMatch - ADDQ $4, CX - - // check we have match_len bytes left in dst - // di+match_len < len(dst) - LEAQ (DI)(CX*1), AX - CMPQ AX, R8 - JGT err_short_buf - - // DX = offset - // CX = match_len - // BX = &dst + (di - offset) - MOVQ DI, BX - SUBQ DX, BX - - // check BX is within dst - // if BX < &dst - CMPQ BX, R11 - JLT err_short_buf - - // if offset + match_len < di - LEAQ (BX)(CX*1), AX - CMPQ DI, AX - JGT copy_interior_match - - // AX := len(dst[:di]) - // MOVQ DI, AX - // SUBQ R11, AX - - // copy 16 bytes at a time - // if di-offset < 16 copy 16-(di-offset) bytes to di - // then do the remaining - -copy_match_loop: - // for match_len >= 0 - // dst[di] = dst[i] - // di++ - // i++ - MOVB (BX), AX - MOVB AX, (DI) - INCQ DI - INCQ BX - DECQ CX - - CMPQ CX, $0 - JGT copy_match_loop - - JMP loop - -copy_interior_match: - CMPQ CX, $16 - JGT memmove_match - - // if len(dst[di:]) < 16 - MOVQ R8, AX - SUBQ DI, AX - CMPQ AX, $16 - JLT memmove_match - - MOVOU (BX), X0 - MOVOU X0, (DI) - - ADDQ CX, DI - JMP loop - -memmove_match: - // memmove(to, from, len) - MOVQ DI, 0(SP) - MOVQ BX, 8(SP) - MOVQ CX, 16(SP) - // spill - MOVQ DI, 24(SP) - MOVQ SI, 32(SP) - MOVQ CX, 40(SP) // need len to inc SI, DI after - CALL runtime·memmove(SB) - - // restore registers - MOVQ 24(SP), DI - MOVQ 32(SP), SI - MOVQ 40(SP), CX - - // recalc initial values - MOVQ dst_base+0(FP), R8 - MOVQ R8, R11 // TODO: make these sensible numbers - ADDQ dst_len+8(FP), R8 - MOVQ src_base+24(FP), R9 - ADDQ src_len+32(FP), R9 - MOVQ R8, R12 - SUBQ $32, R12 - MOVQ R9, R13 - SUBQ $16, R13 - - ADDQ CX, DI - JMP loop - -err_corrupt: - MOVQ $-1, ret+48(FP) - RET - -err_short_buf: - MOVQ $-2, ret+48(FP) - RET - -end: - SUBQ R11, DI - MOVQ DI, ret+48(FP) - RET diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_arm.s b/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_arm.s deleted file mode 100644 index 64be9adcaa..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_arm.s +++ /dev/null @@ -1,197 +0,0 @@ -// +build gc -// +build !noasm - -#include "textflag.h" - -// Register allocation. -#define dst R0 -#define dstorig R1 -#define src R2 -#define dstend R3 -#define srcend R4 -#define match R5 // Match address. -#define token R6 -#define len R7 // Literal and match lengths. -#define offset R6 // Match offset; overlaps with token. -#define tmp1 R8 -#define tmp2 R9 -#define tmp3 R12 - -#define minMatch $4 - -// func decodeBlock(dst, src []byte) int -TEXT ·decodeBlock(SB), NOFRAME|NOSPLIT, $-4-28 - MOVW dst_base +0(FP), dst - MOVW dst_len +4(FP), dstend - MOVW src_base+12(FP), src - MOVW src_len +16(FP), srcend - - CMP $0, srcend - BEQ shortSrc - - ADD dst, dstend - ADD src, srcend - - MOVW dst, dstorig - -loop: - // Read token. Extract literal length. - MOVBU.P 1(src), token - MOVW token >> 4, len - CMP $15, len - BNE readLitlenDone - -readLitlenLoop: - CMP src, srcend - BEQ shortSrc - MOVBU.P 1(src), tmp1 - ADD tmp1, len - CMP $255, tmp1 - BEQ readLitlenLoop - -readLitlenDone: - CMP $0, len - BEQ copyLiteralDone - - // Bounds check dst+len and src+len. - ADD dst, len, tmp1 - CMP dstend, tmp1 - //BHI shortDst // Uncomment for distinct error codes. - ADD src, len, tmp2 - CMP.LS srcend, tmp2 - BHI shortSrc - - // Copy literal. - CMP $4, len - BLO copyLiteralFinish - - // Copy 0-3 bytes until src is aligned. - TST $1, src - MOVBU.NE.P 1(src), tmp1 - MOVB.NE.P tmp1, 1(dst) - SUB.NE $1, len - - TST $2, src - MOVHU.NE.P 2(src), tmp2 - MOVB.NE.P tmp2, 1(dst) - MOVW.NE tmp2 >> 8, tmp1 - MOVB.NE.P tmp1, 1(dst) - SUB.NE $2, len - - B copyLiteralLoopCond - -copyLiteralLoop: - // Aligned load, unaligned write. - MOVW.P 4(src), tmp1 - MOVW tmp1 >> 8, tmp2 - MOVB tmp2, 1(dst) - MOVW tmp1 >> 16, tmp3 - MOVB tmp3, 2(dst) - MOVW tmp1 >> 24, tmp2 - MOVB tmp2, 3(dst) - MOVB.P tmp1, 4(dst) -copyLiteralLoopCond: - // Loop until len-4 < 0. - SUB.S $4, len - BPL copyLiteralLoop - - // Restore len, which is now negative. - ADD $4, len - -copyLiteralFinish: - // Copy remaining 0-3 bytes. - TST $2, len - MOVHU.NE.P 2(src), tmp2 - MOVB.NE.P tmp2, 1(dst) - MOVW.NE tmp2 >> 8, tmp1 - MOVB.NE.P tmp1, 1(dst) - TST $1, len - MOVBU.NE.P 1(src), tmp1 - MOVB.NE.P tmp1, 1(dst) - -copyLiteralDone: - CMP src, srcend - BEQ end - - // Initial part of match length. - // This frees up the token register for reuse as offset. - AND $15, token, len - - // Read offset. - ADD $2, src - CMP srcend, src - BHI shortSrc - MOVBU -2(src), offset - MOVBU -1(src), tmp1 - ORR tmp1 << 8, offset - CMP $0, offset - BEQ corrupt - - // Read rest of match length. - CMP $15, len - BNE readMatchlenDone - -readMatchlenLoop: - CMP src, srcend - BEQ shortSrc - MOVBU.P 1(src), tmp1 - ADD tmp1, len - CMP $255, tmp1 - BEQ readMatchlenLoop - -readMatchlenDone: - // Bounds check dst+len+minMatch and match = dst-offset. - ADD dst, len, tmp1 - ADD minMatch, tmp1 - CMP dstend, tmp1 - //BHI shortDst // Uncomment for distinct error codes. - SUB offset, dst, match - CMP.LS match, dstorig - BHI corrupt - - // Since len+minMatch is at least four, we can do a 4× unrolled - // byte copy loop. Using MOVW instead of four byte loads is faster, - // but to remain portable we'd have to align match first, which is - // too expensive. By alternating loads and stores, we also handle - // the case offset < 4. -copyMatch4: - SUB.S $4, len - MOVBU.P 4(match), tmp1 - MOVB.P tmp1, 4(dst) - MOVBU -3(match), tmp2 - MOVB tmp2, -3(dst) - MOVBU -2(match), tmp3 - MOVB tmp3, -2(dst) - MOVBU -1(match), tmp1 - MOVB tmp1, -1(dst) - BPL copyMatch4 - - // Restore len, which is now negative. - ADD.S $4, len - BEQ copyMatchDone - -copyMatch: - // Finish with a byte-at-a-time copy. - SUB.S $1, len - MOVBU.P 1(match), tmp2 - MOVB.P tmp2, 1(dst) - BNE copyMatch - -copyMatchDone: - CMP src, srcend - BNE loop - -end: - SUB dstorig, dst, tmp1 - MOVW tmp1, ret+24(FP) - RET - - // The three error cases have distinct labels so we can put different - // return codes here when debugging, or if the error returns need to - // be changed. -shortDst: -shortSrc: -corrupt: - MOVW $-1, tmp1 - MOVW tmp1, ret+24(FP) - RET diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_asm.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_asm.go deleted file mode 100644 index e26f8cd613..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_asm.go +++ /dev/null @@ -1,9 +0,0 @@ -// +build amd64 arm -// +build !appengine -// +build gc -// +build !noasm - -package lz4block - -//go:noescape -func decodeBlock(dst, src []byte) int diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_other.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_other.go deleted file mode 100644 index 52df2f2b8e..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4block/decode_other.go +++ /dev/null @@ -1,108 +0,0 @@ -// +build !amd64,!arm appengine !gc noasm - -package lz4block - -import "encoding/binary" - -func decodeBlock(dst, src []byte) (ret int) { - // Restrict capacities so we don't read or write out of bounds. - dst = dst[:len(dst):len(dst)] - src = src[:len(src):len(src)] - - const hasError = -2 - defer func() { - if recover() != nil { - ret = hasError - } - }() - - var si, di uint - for { - // Literals and match lengths (token). - b := uint(src[si]) - si++ - - // Literals. - if lLen := b >> 4; lLen > 0 { - switch { - case lLen < 0xF && si+16 < uint(len(src)): - // Shortcut 1 - // if we have enough room in src and dst, and the literals length - // is small enough (0..14) then copy all 16 bytes, even if not all - // are part of the literals. - copy(dst[di:], src[si:si+16]) - si += lLen - di += lLen - if mLen := b & 0xF; mLen < 0xF { - // Shortcut 2 - // if the match length (4..18) fits within the literals, then copy - // all 18 bytes, even if not all are part of the literals. - mLen += 4 - if offset := u16(src[si:]); mLen <= offset { - i := di - offset - end := i + 18 - if end > uint(len(dst)) { - // The remaining buffer may not hold 18 bytes. - // See https://github.com/pierrec/lz4/issues/51. - end = uint(len(dst)) - } - copy(dst[di:], dst[i:end]) - si += 2 - di += mLen - continue - } - } - case lLen == 0xF: - for src[si] == 0xFF { - lLen += 0xFF - si++ - } - lLen += uint(src[si]) - si++ - fallthrough - default: - copy(dst[di:di+lLen], src[si:si+lLen]) - si += lLen - di += lLen - } - } - if si == uint(len(src)) { - return int(di) - } else if si > uint(len(src)) { - return hasError - } - - offset := u16(src[si:]) - if offset == 0 { - return hasError - } - si += 2 - - // Match. - mLen := b & 0xF - if mLen == 0xF { - for src[si] == 0xFF { - mLen += 0xFF - si++ - } - mLen += uint(src[si]) - si++ - } - mLen += minMatch - - // Copy the match. - expanded := dst[di-offset:] - if mLen > offset { - // Efficiently copy the match dst[di-offset:di] into the dst slice. - bytesToCopy := offset * (mLen / offset) - for n := offset; n <= bytesToCopy+offset; n *= 2 { - copy(expanded[n:], expanded[:n]) - } - di += bytesToCopy - mLen -= bytesToCopy - } - di += uint(copy(dst[di:di+mLen], expanded[:mLen])) - } -} - -func u16(p []byte) uint { return uint(binary.LittleEndian.Uint16(p)) } diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4errors/errors.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4errors/errors.go deleted file mode 100644 index 710ea42812..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4errors/errors.go +++ /dev/null @@ -1,19 +0,0 @@ -package lz4errors - -type Error string - -func (e Error) Error() string { return string(e) } - -const ( - ErrInvalidSourceShortBuffer Error = "lz4: invalid source or destination buffer too short" - ErrInvalidFrame Error = "lz4: bad magic number" - ErrInternalUnhandledState Error = "lz4: unhandled state" - ErrInvalidHeaderChecksum Error = "lz4: invalid header checksum" - ErrInvalidBlockChecksum Error = "lz4: invalid block checksum" - ErrInvalidFrameChecksum Error = "lz4: invalid frame checksum" - ErrOptionInvalidCompressionLevel Error = "lz4: invalid compression level" - ErrOptionClosedOrError Error = "lz4: cannot apply options on closed or in error object" - ErrOptionInvalidBlockSize Error = "lz4: invalid block size" - ErrOptionNotApplicable Error = "lz4: option not applicable" - ErrWriterNotClosed Error = "lz4: writer not closed" -) diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/block.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/block.go deleted file mode 100644 index 5e0c062ec0..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/block.go +++ /dev/null @@ -1,332 +0,0 @@ -package lz4stream - -import ( - "encoding/binary" - "fmt" - "io" - "sync" - - "github.com/pierrec/lz4/v4/internal/lz4block" - "github.com/pierrec/lz4/v4/internal/lz4errors" - "github.com/pierrec/lz4/v4/internal/xxh32" -) - -type Blocks struct { - Block *FrameDataBlock - Blocks chan chan *FrameDataBlock - mu sync.Mutex - err error -} - -func (b *Blocks) initW(f *Frame, dst io.Writer, num int) { - if num == 1 { - b.Blocks = nil - b.Block = NewFrameDataBlock(f) - return - } - b.Block = nil - if cap(b.Blocks) != num { - b.Blocks = make(chan chan *FrameDataBlock, num) - } - // goroutine managing concurrent block compression goroutines. - go func() { - // Process next block compression item. - for c := range b.Blocks { - // Read the next compressed block result. - // Waiting here ensures that the blocks are output in the order they were sent. - // The incoming channel is always closed as it indicates to the caller that - // the block has been processed. - block := <-c - if block == nil { - // Notify the block compression routine that we are done with its result. - // This is used when a sentinel block is sent to terminate the compression. - close(c) - return - } - // Do not attempt to write the block upon any previous failure. - if b.err == nil { - // Write the block. - if err := block.Write(f, dst); err != nil { - // Keep the first error. - b.err = err - // All pending compression goroutines need to shut down, so we need to keep going. - } - } - close(c) - } - }() -} - -func (b *Blocks) close(f *Frame, num int) error { - if num == 1 { - if b.Block != nil { - b.Block.Close(f) - } - err := b.err - b.err = nil - return err - } - if b.Blocks == nil { - err := b.err - b.err = nil - return err - } - c := make(chan *FrameDataBlock) - b.Blocks <- c - c <- nil - <-c - err := b.err - b.err = nil - return err -} - -// ErrorR returns any error set while uncompressing a stream. -func (b *Blocks) ErrorR() error { - b.mu.Lock() - defer b.mu.Unlock() - return b.err -} - -// initR returns a channel that streams the uncompressed blocks if in concurrent -// mode and no error. When the channel is closed, check for any error with b.ErrorR. -// -// If not in concurrent mode, the uncompressed block is b.Block and the returned error -// needs to be checked. -func (b *Blocks) initR(f *Frame, num int, src io.Reader) (chan []byte, error) { - size := f.Descriptor.Flags.BlockSizeIndex() - if num == 1 { - b.Blocks = nil - b.Block = NewFrameDataBlock(f) - return nil, nil - } - b.Block = nil - blocks := make(chan chan []byte, num) - // data receives the uncompressed blocks. - data := make(chan []byte) - // Read blocks from the source sequentially - // and uncompress them concurrently. - - // In legacy mode, accrue the uncompress sizes in cum. - var cum uint32 - go func() { - var cumx uint32 - var err error - for b.ErrorR() == nil { - block := NewFrameDataBlock(f) - cumx, err = block.Read(f, src, 0) - if err != nil { - break - } - // Recheck for an error as reading may be slow and uncompressing is expensive. - if b.ErrorR() != nil { - break - } - c := make(chan []byte) - blocks <- c - go func() { - data, err := block.Uncompress(f, size.Get(), false) - if err != nil { - b.closeR(err) - } else { - c <- data - } - }() - } - // End the collection loop and the data channel. - c := make(chan []byte) - blocks <- c - c <- nil // signal the collection loop that we are done - <-c // wait for the collect loop to complete - if f.isLegacy() && cum == cumx { - err = io.EOF - } - b.closeR(err) - close(data) - }() - // Collect the uncompressed blocks and make them available - // on the returned channel. - go func(leg bool) { - defer close(blocks) - for c := range blocks { - buf := <-c - if buf == nil { - // Signal to end the loop. - close(c) - return - } - // Perform checksum now as the blocks are received in order. - if f.Descriptor.Flags.ContentChecksum() { - _, _ = f.checksum.Write(buf) - } - if leg { - cum += uint32(len(buf)) - } - data <- buf - close(c) - } - }(f.isLegacy()) - return data, nil -} - -// closeR safely sets the error on b if not already set. -func (b *Blocks) closeR(err error) { - b.mu.Lock() - if b.err == nil { - b.err = err - } - b.mu.Unlock() -} - -func NewFrameDataBlock(f *Frame) *FrameDataBlock { - buf := f.Descriptor.Flags.BlockSizeIndex().Get() - return &FrameDataBlock{Data: buf, data: buf} -} - -type FrameDataBlock struct { - Size DataBlockSize - Data []byte // compressed or uncompressed data (.data or .src) - Checksum uint32 - data []byte // buffer for compressed data - src []byte // uncompressed data - err error // used in concurrent mode -} - -func (b *FrameDataBlock) Close(f *Frame) { - b.Size = 0 - b.Checksum = 0 - b.err = nil - if b.data != nil { - // Block was not already closed. - lz4block.Put(b.data) - b.Data = nil - b.data = nil - b.src = nil - } -} - -// Block compression errors are ignored since the buffer is sized appropriately. -func (b *FrameDataBlock) Compress(f *Frame, src []byte, level lz4block.CompressionLevel) *FrameDataBlock { - data := b.data - if f.isLegacy() { - data = data[:cap(data)] - } else { - data = data[:len(src)] // trigger the incompressible flag in CompressBlock - } - var n int - switch level { - case lz4block.Fast: - n, _ = lz4block.CompressBlock(src, data) - default: - n, _ = lz4block.CompressBlockHC(src, data, level) - } - if n == 0 { - b.Size.UncompressedSet(true) - b.Data = src - } else { - b.Size.UncompressedSet(false) - b.Data = data[:n] - } - b.Size.sizeSet(len(b.Data)) - b.src = src // keep track of the source for content checksum - - if f.Descriptor.Flags.BlockChecksum() { - b.Checksum = xxh32.ChecksumZero(src) - } - return b -} - -func (b *FrameDataBlock) Write(f *Frame, dst io.Writer) error { - // Write is called in the same order as blocks are compressed, - // so content checksum must be done here. - if f.Descriptor.Flags.ContentChecksum() { - _, _ = f.checksum.Write(b.src) - } - buf := f.buf[:] - binary.LittleEndian.PutUint32(buf, uint32(b.Size)) - if _, err := dst.Write(buf[:4]); err != nil { - return err - } - - if _, err := dst.Write(b.Data); err != nil { - return err - } - - if b.Checksum == 0 { - return nil - } - binary.LittleEndian.PutUint32(buf, b.Checksum) - _, err := dst.Write(buf[:4]) - return err -} - -// Read updates b with the next block data, size and checksum if available. -func (b *FrameDataBlock) Read(f *Frame, src io.Reader, cum uint32) (uint32, error) { - x, err := f.readUint32(src) - if err != nil { - return 0, err - } - if f.isLegacy() { - switch x { - case frameMagicLegacy: - // Concatenated legacy frame. - return b.Read(f, src, cum) - case cum: - // Only works in non concurrent mode, for concurrent mode - // it is handled separately. - // Linux kernel format appends the total uncompressed size at the end. - return 0, io.EOF - } - } else if x == 0 { - // Marker for end of stream. - return 0, io.EOF - } - b.Size = DataBlockSize(x) - - size := b.Size.size() - if size > cap(b.data) { - return x, lz4errors.ErrOptionInvalidBlockSize - } - b.data = b.data[:size] - if _, err := io.ReadFull(src, b.data); err != nil { - return x, err - } - if f.Descriptor.Flags.BlockChecksum() { - sum, err := f.readUint32(src) - if err != nil { - return 0, err - } - b.Checksum = sum - } - return x, nil -} - -func (b *FrameDataBlock) Uncompress(f *Frame, dst []byte, sum bool) ([]byte, error) { - if b.Size.Uncompressed() { - n := copy(dst, b.data) - dst = dst[:n] - } else { - n, err := lz4block.UncompressBlock(b.data, dst) - if err != nil { - return nil, err - } - dst = dst[:n] - } - if f.Descriptor.Flags.BlockChecksum() { - if c := xxh32.ChecksumZero(dst); c != b.Checksum { - err := fmt.Errorf("%w: got %x; expected %x", lz4errors.ErrInvalidBlockChecksum, c, b.Checksum) - return nil, err - } - } - if sum && f.Descriptor.Flags.ContentChecksum() { - _, _ = f.checksum.Write(dst) - } - return dst, nil -} - -func (f *Frame) readUint32(r io.Reader) (x uint32, err error) { - if _, err = io.ReadFull(r, f.buf[:4]); err != nil { - return - } - x = binary.LittleEndian.Uint32(f.buf[:4]) - return -} diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame.go deleted file mode 100644 index cfbd5674d9..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame.go +++ /dev/null @@ -1,200 +0,0 @@ -// Package lz4stream provides the types that support reading and writing LZ4 data streams. -package lz4stream - -import ( - "encoding/binary" - "fmt" - "io" - "io/ioutil" - - "github.com/pierrec/lz4/v4/internal/lz4block" - "github.com/pierrec/lz4/v4/internal/lz4errors" - "github.com/pierrec/lz4/v4/internal/xxh32" -) - -//go:generate go run gen.go - -const ( - frameMagic uint32 = 0x184D2204 - frameSkipMagic uint32 = 0x184D2A50 - frameMagicLegacy uint32 = 0x184C2102 -) - -func NewFrame() *Frame { - return &Frame{} -} - -type Frame struct { - buf [15]byte // frame descriptor needs at most 4(magic)+4+8+1=11 bytes - Magic uint32 - Descriptor FrameDescriptor - Blocks Blocks - Checksum uint32 - checksum xxh32.XXHZero -} - -// Reset allows reusing the Frame. -// The Descriptor configuration is not modified. -func (f *Frame) Reset(num int) { - f.Magic = 0 - f.Descriptor.Checksum = 0 - f.Descriptor.ContentSize = 0 - _ = f.Blocks.close(f, num) - f.Checksum = 0 -} - -func (f *Frame) InitW(dst io.Writer, num int, legacy bool) { - if legacy { - f.Magic = frameMagicLegacy - idx := lz4block.Index(lz4block.Block8Mb) - f.Descriptor.Flags.BlockSizeIndexSet(idx) - } else { - f.Magic = frameMagic - f.Descriptor.initW() - } - f.Blocks.initW(f, dst, num) - f.checksum.Reset() -} - -func (f *Frame) CloseW(dst io.Writer, num int) error { - if err := f.Blocks.close(f, num); err != nil { - return err - } - if f.isLegacy() { - return nil - } - buf := f.buf[:0] - // End mark (data block size of uint32(0)). - buf = append(buf, 0, 0, 0, 0) - if f.Descriptor.Flags.ContentChecksum() { - buf = f.checksum.Sum(buf) - } - _, err := dst.Write(buf) - return err -} - -func (f *Frame) isLegacy() bool { - return f.Magic == frameMagicLegacy -} - -func (f *Frame) InitR(src io.Reader, num int) (chan []byte, error) { - if f.Magic > 0 { - // Header already read. - return nil, nil - } - -newFrame: - var err error - if f.Magic, err = f.readUint32(src); err != nil { - return nil, err - } - switch m := f.Magic; { - case m == frameMagic || m == frameMagicLegacy: - // All 16 values of frameSkipMagic are valid. - case m>>8 == frameSkipMagic>>8: - skip, err := f.readUint32(src) - if err != nil { - return nil, err - } - if _, err := io.CopyN(ioutil.Discard, src, int64(skip)); err != nil { - return nil, err - } - goto newFrame - default: - return nil, lz4errors.ErrInvalidFrame - } - if err := f.Descriptor.initR(f, src); err != nil { - return nil, err - } - f.checksum.Reset() - return f.Blocks.initR(f, num, src) -} - -func (f *Frame) CloseR(src io.Reader) (err error) { - if f.isLegacy() { - return nil - } - if !f.Descriptor.Flags.ContentChecksum() { - return nil - } - if f.Checksum, err = f.readUint32(src); err != nil { - return err - } - if c := f.checksum.Sum32(); c != f.Checksum { - return fmt.Errorf("%w: got %x; expected %x", lz4errors.ErrInvalidFrameChecksum, c, f.Checksum) - } - return nil -} - -type FrameDescriptor struct { - Flags DescriptorFlags - ContentSize uint64 - Checksum uint8 -} - -func (fd *FrameDescriptor) initW() { - fd.Flags.VersionSet(1) - fd.Flags.BlockIndependenceSet(true) -} - -func (fd *FrameDescriptor) Write(f *Frame, dst io.Writer) error { - if fd.Checksum > 0 { - // Header already written. - return nil - } - - buf := f.buf[:4] - // Write the magic number here even though it belongs to the Frame. - binary.LittleEndian.PutUint32(buf, f.Magic) - if !f.isLegacy() { - buf = buf[:4+2] - binary.LittleEndian.PutUint16(buf[4:], uint16(fd.Flags)) - - if fd.Flags.Size() { - buf = buf[:4+2+8] - binary.LittleEndian.PutUint64(buf[4+2:], fd.ContentSize) - } - fd.Checksum = descriptorChecksum(buf[4:]) - buf = append(buf, fd.Checksum) - } - - _, err := dst.Write(buf) - return err -} - -func (fd *FrameDescriptor) initR(f *Frame, src io.Reader) error { - if f.isLegacy() { - idx := lz4block.Index(lz4block.Block8Mb) - f.Descriptor.Flags.BlockSizeIndexSet(idx) - return nil - } - // Read the flags and the checksum, hoping that there is not content size. - buf := f.buf[:3] - if _, err := io.ReadFull(src, buf); err != nil { - return err - } - descr := binary.LittleEndian.Uint16(buf) - fd.Flags = DescriptorFlags(descr) - if fd.Flags.Size() { - // Append the 8 missing bytes. - buf = buf[:3+8] - if _, err := io.ReadFull(src, buf[3:]); err != nil { - return err - } - fd.ContentSize = binary.LittleEndian.Uint64(buf[2:]) - } - fd.Checksum = buf[len(buf)-1] // the checksum is the last byte - buf = buf[:len(buf)-1] // all descriptor fields except checksum - if c := descriptorChecksum(buf); fd.Checksum != c { - return fmt.Errorf("%w: got %x; expected %x", lz4errors.ErrInvalidHeaderChecksum, c, fd.Checksum) - } - // Validate the elements that can be. - if idx := fd.Flags.BlockSizeIndex(); !idx.IsValid() { - return lz4errors.ErrOptionInvalidBlockSize - } - return nil -} - -func descriptorChecksum(buf []byte) byte { - return byte(xxh32.ChecksumZero(buf) >> 8) -} diff --git a/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame_gen.go b/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame_gen.go deleted file mode 100644 index d33a6be95c..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/lz4stream/frame_gen.go +++ /dev/null @@ -1,103 +0,0 @@ -// Code generated by `gen.exe`. DO NOT EDIT. - -package lz4stream - -import "github.com/pierrec/lz4/v4/internal/lz4block" - -// DescriptorFlags is defined as follow: -// field bits -// ----- ---- -// _ 2 -// ContentChecksum 1 -// Size 1 -// BlockChecksum 1 -// BlockIndependence 1 -// Version 2 -// _ 4 -// BlockSizeIndex 3 -// _ 1 -type DescriptorFlags uint16 - -// Getters. -func (x DescriptorFlags) ContentChecksum() bool { return x>>2&1 != 0 } -func (x DescriptorFlags) Size() bool { return x>>3&1 != 0 } -func (x DescriptorFlags) BlockChecksum() bool { return x>>4&1 != 0 } -func (x DescriptorFlags) BlockIndependence() bool { return x>>5&1 != 0 } -func (x DescriptorFlags) Version() uint16 { return uint16(x >> 6 & 0x3) } -func (x DescriptorFlags) BlockSizeIndex() lz4block.BlockSizeIndex { - return lz4block.BlockSizeIndex(x >> 12 & 0x7) -} - -// Setters. -func (x *DescriptorFlags) ContentChecksumSet(v bool) *DescriptorFlags { - const b = 1 << 2 - if v { - *x = *x&^b | b - } else { - *x &^= b - } - return x -} -func (x *DescriptorFlags) SizeSet(v bool) *DescriptorFlags { - const b = 1 << 3 - if v { - *x = *x&^b | b - } else { - *x &^= b - } - return x -} -func (x *DescriptorFlags) BlockChecksumSet(v bool) *DescriptorFlags { - const b = 1 << 4 - if v { - *x = *x&^b | b - } else { - *x &^= b - } - return x -} -func (x *DescriptorFlags) BlockIndependenceSet(v bool) *DescriptorFlags { - const b = 1 << 5 - if v { - *x = *x&^b | b - } else { - *x &^= b - } - return x -} -func (x *DescriptorFlags) VersionSet(v uint16) *DescriptorFlags { - *x = *x&^(0x3<<6) | (DescriptorFlags(v) & 0x3 << 6) - return x -} -func (x *DescriptorFlags) BlockSizeIndexSet(v lz4block.BlockSizeIndex) *DescriptorFlags { - *x = *x&^(0x7<<12) | (DescriptorFlags(v) & 0x7 << 12) - return x -} - -// Code generated by `gen.exe`. DO NOT EDIT. - -// DataBlockSize is defined as follow: -// field bits -// ----- ---- -// size 31 -// Uncompressed 1 -type DataBlockSize uint32 - -// Getters. -func (x DataBlockSize) size() int { return int(x & 0x7FFFFFFF) } -func (x DataBlockSize) Uncompressed() bool { return x>>31&1 != 0 } - -// Setters. -func (x *DataBlockSize) sizeSet(v int) *DataBlockSize { - *x = *x&^0x7FFFFFFF | DataBlockSize(v)&0x7FFFFFFF - return x -} -func (x *DataBlockSize) UncompressedSet(v bool) *DataBlockSize { - const b = 1 << 31 - if v { - *x = *x&^b | b - } else { - *x &^= b - } - return x -} diff --git a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero.go b/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero.go deleted file mode 100644 index 8d3206a87c..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero.go +++ /dev/null @@ -1,212 +0,0 @@ -// Package xxh32 implements the very fast XXH hashing algorithm (32 bits version). -// (https://github.com/Cyan4973/XXH/) -package xxh32 - -import ( - "encoding/binary" -) - -const ( - prime1 uint32 = 2654435761 - prime2 uint32 = 2246822519 - prime3 uint32 = 3266489917 - prime4 uint32 = 668265263 - prime5 uint32 = 374761393 - - primeMask = 0xFFFFFFFF - prime1plus2 = uint32((uint64(prime1) + uint64(prime2)) & primeMask) // 606290984 - prime1minus = uint32((-int64(prime1)) & primeMask) // 1640531535 -) - -// XXHZero represents an xxhash32 object with seed 0. -type XXHZero struct { - v [4]uint32 - totalLen uint64 - buf [16]byte - bufused int -} - -// Sum appends the current hash to b and returns the resulting slice. -// It does not change the underlying hash state. -func (xxh XXHZero) Sum(b []byte) []byte { - h32 := xxh.Sum32() - return append(b, byte(h32), byte(h32>>8), byte(h32>>16), byte(h32>>24)) -} - -// Reset resets the Hash to its initial state. -func (xxh *XXHZero) Reset() { - xxh.v[0] = prime1plus2 - xxh.v[1] = prime2 - xxh.v[2] = 0 - xxh.v[3] = prime1minus - xxh.totalLen = 0 - xxh.bufused = 0 -} - -// Size returns the number of bytes returned by Sum(). -func (xxh *XXHZero) Size() int { - return 4 -} - -// BlockSizeIndex gives the minimum number of bytes accepted by Write(). -func (xxh *XXHZero) BlockSize() int { - return 1 -} - -// Write adds input bytes to the Hash. -// It never returns an error. -func (xxh *XXHZero) Write(input []byte) (int, error) { - if xxh.totalLen == 0 { - xxh.Reset() - } - n := len(input) - m := xxh.bufused - - xxh.totalLen += uint64(n) - - r := len(xxh.buf) - m - if n < r { - copy(xxh.buf[m:], input) - xxh.bufused += len(input) - return n, nil - } - - var buf *[16]byte - if m != 0 { - // some data left from previous update - buf = &xxh.buf - c := copy(buf[m:], input) - n -= c - input = input[c:] - } - update(&xxh.v, buf, input) - xxh.bufused = copy(xxh.buf[:], input[n-n%16:]) - - return n, nil -} - -// Portable version of update. This updates v by processing all of buf -// (if not nil) and all full 16-byte blocks of input. -func updateGo(v *[4]uint32, buf *[16]byte, input []byte) { - // Causes compiler to work directly from registers instead of stack: - v1, v2, v3, v4 := v[0], v[1], v[2], v[3] - - if buf != nil { - v1 = rol13(v1+binary.LittleEndian.Uint32(buf[:])*prime2) * prime1 - v2 = rol13(v2+binary.LittleEndian.Uint32(buf[4:])*prime2) * prime1 - v3 = rol13(v3+binary.LittleEndian.Uint32(buf[8:])*prime2) * prime1 - v4 = rol13(v4+binary.LittleEndian.Uint32(buf[12:])*prime2) * prime1 - } - - for ; len(input) >= 16; input = input[16:] { - sub := input[:16] //BCE hint for compiler - v1 = rol13(v1+binary.LittleEndian.Uint32(sub[:])*prime2) * prime1 - v2 = rol13(v2+binary.LittleEndian.Uint32(sub[4:])*prime2) * prime1 - v3 = rol13(v3+binary.LittleEndian.Uint32(sub[8:])*prime2) * prime1 - v4 = rol13(v4+binary.LittleEndian.Uint32(sub[12:])*prime2) * prime1 - } - v[0], v[1], v[2], v[3] = v1, v2, v3, v4 -} - -// Sum32 returns the 32 bits Hash value. -func (xxh *XXHZero) Sum32() uint32 { - h32 := uint32(xxh.totalLen) - if h32 >= 16 { - h32 += rol1(xxh.v[0]) + rol7(xxh.v[1]) + rol12(xxh.v[2]) + rol18(xxh.v[3]) - } else { - h32 += prime5 - } - - p := 0 - n := xxh.bufused - buf := xxh.buf - for n := n - 4; p <= n; p += 4 { - h32 += binary.LittleEndian.Uint32(buf[p:p+4]) * prime3 - h32 = rol17(h32) * prime4 - } - for ; p < n; p++ { - h32 += uint32(buf[p]) * prime5 - h32 = rol11(h32) * prime1 - } - - h32 ^= h32 >> 15 - h32 *= prime2 - h32 ^= h32 >> 13 - h32 *= prime3 - h32 ^= h32 >> 16 - - return h32 -} - -// Portable version of ChecksumZero. -func checksumZeroGo(input []byte) uint32 { - n := len(input) - h32 := uint32(n) - - if n < 16 { - h32 += prime5 - } else { - v1 := prime1plus2 - v2 := prime2 - v3 := uint32(0) - v4 := prime1minus - p := 0 - for n := n - 16; p <= n; p += 16 { - sub := input[p:][:16] //BCE hint for compiler - v1 = rol13(v1+binary.LittleEndian.Uint32(sub[:])*prime2) * prime1 - v2 = rol13(v2+binary.LittleEndian.Uint32(sub[4:])*prime2) * prime1 - v3 = rol13(v3+binary.LittleEndian.Uint32(sub[8:])*prime2) * prime1 - v4 = rol13(v4+binary.LittleEndian.Uint32(sub[12:])*prime2) * prime1 - } - input = input[p:] - n -= p - h32 += rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - } - - p := 0 - for n := n - 4; p <= n; p += 4 { - h32 += binary.LittleEndian.Uint32(input[p:p+4]) * prime3 - h32 = rol17(h32) * prime4 - } - for p < n { - h32 += uint32(input[p]) * prime5 - h32 = rol11(h32) * prime1 - p++ - } - - h32 ^= h32 >> 15 - h32 *= prime2 - h32 ^= h32 >> 13 - h32 *= prime3 - h32 ^= h32 >> 16 - - return h32 -} - -func rol1(u uint32) uint32 { - return u<<1 | u>>31 -} - -func rol7(u uint32) uint32 { - return u<<7 | u>>25 -} - -func rol11(u uint32) uint32 { - return u<<11 | u>>21 -} - -func rol12(u uint32) uint32 { - return u<<12 | u>>20 -} - -func rol13(u uint32) uint32 { - return u<<13 | u>>19 -} - -func rol17(u uint32) uint32 { - return u<<17 | u>>15 -} - -func rol18(u uint32) uint32 { - return u<<18 | u>>14 -} diff --git a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.go b/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.go deleted file mode 100644 index 0978b2665b..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build !noasm - -package xxh32 - -// ChecksumZero returns the 32-bit hash of input. -// -//go:noescape -func ChecksumZero(input []byte) uint32 - -//go:noescape -func update(v *[4]uint32, buf *[16]byte, input []byte) diff --git a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.s b/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.s deleted file mode 100644 index 0e9f146a36..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_arm.s +++ /dev/null @@ -1,259 +0,0 @@ -// +build !noasm - -#include "textflag.h" - -#define prime1 $2654435761 -#define prime2 $2246822519 -#define prime3 $3266489917 -#define prime4 $668265263 -#define prime5 $374761393 - -#define prime1plus2 $606290984 -#define prime1minus $1640531535 - -// Register allocation. -#define p R0 -#define n R1 -#define h R2 -#define v1 R2 // Alias for h. -#define v2 R3 -#define v3 R4 -#define v4 R5 -#define x1 R6 -#define x2 R7 -#define x3 R8 -#define x4 R9 - -// We need the primes in registers. The 16-byte loop only uses prime{1,2}. -#define prime1r R11 -#define prime2r R12 -#define prime3r R3 // The rest can alias v{2-4}. -#define prime4r R4 -#define prime5r R5 - -// Update round macros. These read from and increment p. - -#define round16aligned \ - MOVM.IA.W (p), [x1, x2, x3, x4] \ - \ - MULA x1, prime2r, v1, v1 \ - MULA x2, prime2r, v2, v2 \ - MULA x3, prime2r, v3, v3 \ - MULA x4, prime2r, v4, v4 \ - \ - MOVW v1 @> 19, v1 \ - MOVW v2 @> 19, v2 \ - MOVW v3 @> 19, v3 \ - MOVW v4 @> 19, v4 \ - \ - MUL prime1r, v1 \ - MUL prime1r, v2 \ - MUL prime1r, v3 \ - MUL prime1r, v4 \ - -#define round16unaligned \ - MOVBU.P 16(p), x1 \ - MOVBU -15(p), x2 \ - ORR x2 << 8, x1 \ - MOVBU -14(p), x3 \ - MOVBU -13(p), x4 \ - ORR x4 << 8, x3 \ - ORR x3 << 16, x1 \ - \ - MULA x1, prime2r, v1, v1 \ - MOVW v1 @> 19, v1 \ - MUL prime1r, v1 \ - \ - MOVBU -12(p), x1 \ - MOVBU -11(p), x2 \ - ORR x2 << 8, x1 \ - MOVBU -10(p), x3 \ - MOVBU -9(p), x4 \ - ORR x4 << 8, x3 \ - ORR x3 << 16, x1 \ - \ - MULA x1, prime2r, v2, v2 \ - MOVW v2 @> 19, v2 \ - MUL prime1r, v2 \ - \ - MOVBU -8(p), x1 \ - MOVBU -7(p), x2 \ - ORR x2 << 8, x1 \ - MOVBU -6(p), x3 \ - MOVBU -5(p), x4 \ - ORR x4 << 8, x3 \ - ORR x3 << 16, x1 \ - \ - MULA x1, prime2r, v3, v3 \ - MOVW v3 @> 19, v3 \ - MUL prime1r, v3 \ - \ - MOVBU -4(p), x1 \ - MOVBU -3(p), x2 \ - ORR x2 << 8, x1 \ - MOVBU -2(p), x3 \ - MOVBU -1(p), x4 \ - ORR x4 << 8, x3 \ - ORR x3 << 16, x1 \ - \ - MULA x1, prime2r, v4, v4 \ - MOVW v4 @> 19, v4 \ - MUL prime1r, v4 \ - - -// func ChecksumZero([]byte) uint32 -TEXT ·ChecksumZero(SB), NOFRAME|NOSPLIT, $-4-16 - MOVW input_base+0(FP), p - MOVW input_len+4(FP), n - - MOVW prime1, prime1r - MOVW prime2, prime2r - - // Set up h for n < 16. It's tempting to say {ADD prime5, n, h} - // here, but that's a pseudo-op that generates a load through R11. - MOVW prime5, prime5r - ADD prime5r, n, h - CMP $0, n - BEQ end - - // We let n go negative so we can do comparisons with SUB.S - // instead of separate CMP. - SUB.S $16, n - BMI loop16done - - MOVW prime1plus2, v1 - MOVW prime2, v2 - MOVW $0, v3 - MOVW prime1minus, v4 - - TST $3, p - BNE loop16unaligned - -loop16aligned: - SUB.S $16, n - round16aligned - BPL loop16aligned - B loop16finish - -loop16unaligned: - SUB.S $16, n - round16unaligned - BPL loop16unaligned - -loop16finish: - MOVW v1 @> 31, h - ADD v2 @> 25, h - ADD v3 @> 20, h - ADD v4 @> 14, h - - // h += len(input) with v2 as temporary. - MOVW input_len+4(FP), v2 - ADD v2, h - -loop16done: - ADD $16, n // Restore number of bytes left. - - SUB.S $4, n - MOVW prime3, prime3r - BMI loop4done - MOVW prime4, prime4r - - TST $3, p - BNE loop4unaligned - -loop4aligned: - SUB.S $4, n - - MOVW.P 4(p), x1 - MULA prime3r, x1, h, h - MOVW h @> 15, h - MUL prime4r, h - - BPL loop4aligned - B loop4done - -loop4unaligned: - SUB.S $4, n - - MOVBU.P 4(p), x1 - MOVBU -3(p), x2 - ORR x2 << 8, x1 - MOVBU -2(p), x3 - ORR x3 << 16, x1 - MOVBU -1(p), x4 - ORR x4 << 24, x1 - - MULA prime3r, x1, h, h - MOVW h @> 15, h - MUL prime4r, h - - BPL loop4unaligned - -loop4done: - ADD.S $4, n // Restore number of bytes left. - BEQ end - - MOVW prime5, prime5r - -loop1: - SUB.S $1, n - - MOVBU.P 1(p), x1 - MULA prime5r, x1, h, h - MOVW h @> 21, h - MUL prime1r, h - - BNE loop1 - -end: - MOVW prime3, prime3r - EOR h >> 15, h - MUL prime2r, h - EOR h >> 13, h - MUL prime3r, h - EOR h >> 16, h - - MOVW h, ret+12(FP) - RET - - -// func update(v *[4]uint64, buf *[16]byte, p []byte) -TEXT ·update(SB), NOFRAME|NOSPLIT, $-4-20 - MOVW v+0(FP), p - MOVM.IA (p), [v1, v2, v3, v4] - - MOVW prime1, prime1r - MOVW prime2, prime2r - - // Process buf, if not nil. - MOVW buf+4(FP), p - CMP $0, p - BEQ noBuffered - - round16aligned - -noBuffered: - MOVW input_base +8(FP), p - MOVW input_len +12(FP), n - - SUB.S $16, n - BMI end - - TST $3, p - BNE loop16unaligned - -loop16aligned: - SUB.S $16, n - round16aligned - BPL loop16aligned - B end - -loop16unaligned: - SUB.S $16, n - round16unaligned - BPL loop16unaligned - -end: - MOVW v+0(FP), p - MOVM.IA [v1, v2, v3, v4], (p) - RET diff --git a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_other.go b/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_other.go deleted file mode 100644 index c96b59b8c3..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/internal/xxh32/xxh32zero_other.go +++ /dev/null @@ -1,10 +0,0 @@ -// +build !arm noasm - -package xxh32 - -// ChecksumZero returns the 32-bit hash of input. -func ChecksumZero(input []byte) uint32 { return checksumZeroGo(input) } - -func update(v *[4]uint32, buf *[16]byte, input []byte) { - updateGo(v, buf, input) -} diff --git a/vendor/github.com/pierrec/lz4/v4/lz4.go b/vendor/github.com/pierrec/lz4/v4/lz4.go deleted file mode 100644 index c585d4064f..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/lz4.go +++ /dev/null @@ -1,147 +0,0 @@ -// Package lz4 implements reading and writing lz4 compressed data. -// -// The package supports both the LZ4 stream format, -// as specified in http://fastcompression.blogspot.fr/2013/04/lz4-streaming-format-final.html, -// and the LZ4 block format, defined at -// http://fastcompression.blogspot.fr/2011/05/lz4-explained.html. -// -// See https://github.com/lz4/lz4 for the reference C implementation. -package lz4 - -import ( - "github.com/pierrec/lz4/v4/internal/lz4block" - "github.com/pierrec/lz4/v4/internal/lz4errors" -) - -func _() { - // Safety checks for duplicated elements. - var x [1]struct{} - _ = x[lz4block.CompressionLevel(Fast)-lz4block.Fast] - _ = x[Block64Kb-BlockSize(lz4block.Block64Kb)] - _ = x[Block256Kb-BlockSize(lz4block.Block256Kb)] - _ = x[Block1Mb-BlockSize(lz4block.Block1Mb)] - _ = x[Block4Mb-BlockSize(lz4block.Block4Mb)] -} - -// CompressBlockBound returns the maximum size of a given buffer of size n, when not compressible. -func CompressBlockBound(n int) int { - return lz4block.CompressBlockBound(n) -} - -// UncompressBlock uncompresses the source buffer into the destination one, -// and returns the uncompressed size. -// -// The destination buffer must be sized appropriately. -// -// An error is returned if the source data is invalid or the destination buffer is too small. -func UncompressBlock(src, dst []byte) (int, error) { - return lz4block.UncompressBlock(src, dst) -} - -// A Compressor compresses data into the LZ4 block format. -// It uses a fast compression algorithm. -// -// A Compressor is not safe for concurrent use by multiple goroutines. -// -// Use a Writer to compress into the LZ4 stream format. -type Compressor struct{ c lz4block.Compressor } - -// CompressBlock compresses the source buffer src into the destination dst. -// -// If compression is successful, the first return value is the size of the -// compressed data, which is always >0. -// -// If dst has length at least CompressBlockBound(len(src)), compression always -// succeeds. Otherwise, the first return value is zero. The error return is -// non-nil if the compressed data does not fit in dst, but it might fit in a -// larger buffer that is still smaller than CompressBlockBound(len(src)). The -// return value (0, nil) means the data is likely incompressible and a buffer -// of length CompressBlockBound(len(src)) should be passed in. -func (c *Compressor) CompressBlock(src, dst []byte) (int, error) { - return c.c.CompressBlock(src, dst) -} - -// CompressBlock compresses the source buffer into the destination one. -// This is the fast version of LZ4 compression and also the default one. -// -// The argument hashTable is scratch space for a hash table used by the -// compressor. If provided, it should have length at least 1<<16. If it is -// shorter (or nil), CompressBlock allocates its own hash table. -// -// The size of the compressed data is returned. -// -// If the destination buffer size is lower than CompressBlockBound and -// the compressed size is 0 and no error, then the data is incompressible. -// -// An error is returned if the destination buffer is too small. - -// CompressBlock is equivalent to Compressor.CompressBlock. -// The final argument is ignored and should be set to nil. -// -// This function is deprecated. Use a Compressor instead. -func CompressBlock(src, dst []byte, _ []int) (int, error) { - return lz4block.CompressBlock(src, dst) -} - -// A CompressorHC compresses data into the LZ4 block format. -// Its compression ratio is potentially better than that of a Compressor, -// but it is also slower and requires more memory. -// -// A Compressor is not safe for concurrent use by multiple goroutines. -// -// Use a Writer to compress into the LZ4 stream format. -type CompressorHC struct { - // Level is the maximum search depth for compression. - // Values <= 0 mean no maximum. - Level CompressionLevel - c lz4block.CompressorHC -} - -// CompressBlock compresses the source buffer src into the destination dst. -// -// If compression is successful, the first return value is the size of the -// compressed data, which is always >0. -// -// If dst has length at least CompressBlockBound(len(src)), compression always -// succeeds. Otherwise, the first return value is zero. The error return is -// non-nil if the compressed data does not fit in dst, but it might fit in a -// larger buffer that is still smaller than CompressBlockBound(len(src)). The -// return value (0, nil) means the data is likely incompressible and a buffer -// of length CompressBlockBound(len(src)) should be passed in. -func (c *CompressorHC) CompressBlock(src, dst []byte) (int, error) { - return c.c.CompressBlock(src, dst, lz4block.CompressionLevel(c.Level)) -} - -// CompressBlockHC is equivalent to CompressorHC.CompressBlock. -// The final two arguments are ignored and should be set to nil. -// -// This function is deprecated. Use a CompressorHC instead. -func CompressBlockHC(src, dst []byte, depth CompressionLevel, _, _ []int) (int, error) { - return lz4block.CompressBlockHC(src, dst, lz4block.CompressionLevel(depth)) -} - -const ( - // ErrInvalidSourceShortBuffer is returned by UncompressBlock or CompressBLock when a compressed - // block is corrupted or the destination buffer is not large enough for the uncompressed data. - ErrInvalidSourceShortBuffer = lz4errors.ErrInvalidSourceShortBuffer - // ErrInvalidFrame is returned when reading an invalid LZ4 archive. - ErrInvalidFrame = lz4errors.ErrInvalidFrame - // ErrInternalUnhandledState is an internal error. - ErrInternalUnhandledState = lz4errors.ErrInternalUnhandledState - // ErrInvalidHeaderChecksum is returned when reading a frame. - ErrInvalidHeaderChecksum = lz4errors.ErrInvalidHeaderChecksum - // ErrInvalidBlockChecksum is returned when reading a frame. - ErrInvalidBlockChecksum = lz4errors.ErrInvalidBlockChecksum - // ErrInvalidFrameChecksum is returned when reading a frame. - ErrInvalidFrameChecksum = lz4errors.ErrInvalidFrameChecksum - // ErrOptionInvalidCompressionLevel is returned when the supplied compression level is invalid. - ErrOptionInvalidCompressionLevel = lz4errors.ErrOptionInvalidCompressionLevel - // ErrOptionClosedOrError is returned when an option is applied to a closed or in error object. - ErrOptionClosedOrError = lz4errors.ErrOptionClosedOrError - // ErrOptionInvalidBlockSize is returned when - ErrOptionInvalidBlockSize = lz4errors.ErrOptionInvalidBlockSize - // ErrOptionNotApplicable is returned when trying to apply an option to an object not supporting it. - ErrOptionNotApplicable = lz4errors.ErrOptionNotApplicable - // ErrWriterNotClosed is returned when attempting to reset an unclosed writer. - ErrWriterNotClosed = lz4errors.ErrWriterNotClosed -) diff --git a/vendor/github.com/pierrec/lz4/v4/options.go b/vendor/github.com/pierrec/lz4/v4/options.go deleted file mode 100644 index 4e1b6703b5..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/options.go +++ /dev/null @@ -1,213 +0,0 @@ -package lz4 - -import ( - "fmt" - "github.com/pierrec/lz4/v4/internal/lz4block" - "github.com/pierrec/lz4/v4/internal/lz4errors" - "reflect" - "runtime" -) - -//go:generate go run golang.org/x/tools/cmd/stringer -type=BlockSize,CompressionLevel -output options_gen.go - -type ( - applier interface { - Apply(...Option) error - private() - } - // Option defines the parameters to setup an LZ4 Writer or Reader. - Option func(applier) error -) - -// String returns a string representation of the option with its parameter(s). -func (o Option) String() string { - return o(nil).Error() -} - -// Default options. -var ( - DefaultBlockSizeOption = BlockSizeOption(Block4Mb) - DefaultChecksumOption = ChecksumOption(true) - DefaultConcurrency = ConcurrencyOption(1) - defaultOnBlockDone = OnBlockDoneOption(nil) -) - -const ( - Block64Kb BlockSize = 1 << (16 + iota*2) - Block256Kb - Block1Mb - Block4Mb -) - -// BlockSizeIndex defines the size of the blocks to be compressed. -type BlockSize uint32 - -// BlockSizeOption defines the maximum size of compressed blocks (default=Block4Mb). -func BlockSizeOption(size BlockSize) Option { - return func(a applier) error { - switch w := a.(type) { - case nil: - s := fmt.Sprintf("BlockSizeOption(%s)", size) - return lz4errors.Error(s) - case *Writer: - size := uint32(size) - if !lz4block.IsValid(size) { - return fmt.Errorf("%w: %d", lz4errors.ErrOptionInvalidBlockSize, size) - } - w.frame.Descriptor.Flags.BlockSizeIndexSet(lz4block.Index(size)) - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -// BlockChecksumOption enables or disables block checksum (default=false). -func BlockChecksumOption(flag bool) Option { - return func(a applier) error { - switch w := a.(type) { - case nil: - s := fmt.Sprintf("BlockChecksumOption(%v)", flag) - return lz4errors.Error(s) - case *Writer: - w.frame.Descriptor.Flags.BlockChecksumSet(flag) - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -// ChecksumOption enables/disables all blocks or content checksum (default=true). -func ChecksumOption(flag bool) Option { - return func(a applier) error { - switch w := a.(type) { - case nil: - s := fmt.Sprintf("ChecksumOption(%v)", flag) - return lz4errors.Error(s) - case *Writer: - w.frame.Descriptor.Flags.ContentChecksumSet(flag) - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -// SizeOption sets the size of the original uncompressed data (default=0). It is useful to know the size of the -// whole uncompressed data stream. -func SizeOption(size uint64) Option { - return func(a applier) error { - switch w := a.(type) { - case nil: - s := fmt.Sprintf("SizeOption(%d)", size) - return lz4errors.Error(s) - case *Writer: - w.frame.Descriptor.Flags.SizeSet(size > 0) - w.frame.Descriptor.ContentSize = size - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -// ConcurrencyOption sets the number of go routines used for compression. -// If n <= 0, then the output of runtime.GOMAXPROCS(0) is used. -func ConcurrencyOption(n int) Option { - if n <= 0 { - n = runtime.GOMAXPROCS(0) - } - return func(a applier) error { - switch rw := a.(type) { - case nil: - s := fmt.Sprintf("ConcurrencyOption(%d)", n) - return lz4errors.Error(s) - case *Writer: - rw.num = n - return nil - case *Reader: - rw.num = n - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -// CompressionLevel defines the level of compression to use. The higher the better, but slower, compression. -type CompressionLevel uint32 - -const ( - Fast CompressionLevel = 0 - Level1 CompressionLevel = 1 << (8 + iota) - Level2 - Level3 - Level4 - Level5 - Level6 - Level7 - Level8 - Level9 -) - -// CompressionLevelOption defines the compression level (default=Fast). -func CompressionLevelOption(level CompressionLevel) Option { - return func(a applier) error { - switch w := a.(type) { - case nil: - s := fmt.Sprintf("CompressionLevelOption(%s)", level) - return lz4errors.Error(s) - case *Writer: - switch level { - case Fast, Level1, Level2, Level3, Level4, Level5, Level6, Level7, Level8, Level9: - default: - return fmt.Errorf("%w: %d", lz4errors.ErrOptionInvalidCompressionLevel, level) - } - w.level = lz4block.CompressionLevel(level) - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -func onBlockDone(int) {} - -// OnBlockDoneOption is triggered when a block has been processed. For a Writer, it is when is has been compressed, -// for a Reader, it is when it has been uncompressed. -func OnBlockDoneOption(handler func(size int)) Option { - if handler == nil { - handler = onBlockDone - } - return func(a applier) error { - switch rw := a.(type) { - case nil: - s := fmt.Sprintf("OnBlockDoneOption(%s)", reflect.TypeOf(handler).String()) - return lz4errors.Error(s) - case *Writer: - rw.handler = handler - return nil - case *Reader: - rw.handler = handler - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} - -// LegacyOption provides support for writing LZ4 frames in the legacy format. -// -// See https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md#legacy-frame. -// -// NB. compressed Linux kernel images use a tweaked LZ4 legacy format where -// the compressed stream is followed by the original (uncompressed) size of -// the kernel (https://events.static.linuxfound.org/sites/events/files/lcjpcojp13_klee.pdf). -// This is also supported as a special case. -func LegacyOption(legacy bool) Option { - return func(a applier) error { - switch rw := a.(type) { - case nil: - s := fmt.Sprintf("LegacyOption(%v)", legacy) - return lz4errors.Error(s) - case *Writer: - rw.legacy = legacy - return nil - } - return lz4errors.ErrOptionNotApplicable - } -} diff --git a/vendor/github.com/pierrec/lz4/v4/options_gen.go b/vendor/github.com/pierrec/lz4/v4/options_gen.go deleted file mode 100644 index 2de814909e..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/options_gen.go +++ /dev/null @@ -1,92 +0,0 @@ -// Code generated by "stringer -type=BlockSize,CompressionLevel -output options_gen.go"; DO NOT EDIT. - -package lz4 - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[Block64Kb-65536] - _ = x[Block256Kb-262144] - _ = x[Block1Mb-1048576] - _ = x[Block4Mb-4194304] -} - -const ( - _BlockSize_name_0 = "Block64Kb" - _BlockSize_name_1 = "Block256Kb" - _BlockSize_name_2 = "Block1Mb" - _BlockSize_name_3 = "Block4Mb" -) - -func (i BlockSize) String() string { - switch { - case i == 65536: - return _BlockSize_name_0 - case i == 262144: - return _BlockSize_name_1 - case i == 1048576: - return _BlockSize_name_2 - case i == 4194304: - return _BlockSize_name_3 - default: - return "BlockSize(" + strconv.FormatInt(int64(i), 10) + ")" - } -} -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[Fast-0] - _ = x[Level1-512] - _ = x[Level2-1024] - _ = x[Level3-2048] - _ = x[Level4-4096] - _ = x[Level5-8192] - _ = x[Level6-16384] - _ = x[Level7-32768] - _ = x[Level8-65536] - _ = x[Level9-131072] -} - -const ( - _CompressionLevel_name_0 = "Fast" - _CompressionLevel_name_1 = "Level1" - _CompressionLevel_name_2 = "Level2" - _CompressionLevel_name_3 = "Level3" - _CompressionLevel_name_4 = "Level4" - _CompressionLevel_name_5 = "Level5" - _CompressionLevel_name_6 = "Level6" - _CompressionLevel_name_7 = "Level7" - _CompressionLevel_name_8 = "Level8" - _CompressionLevel_name_9 = "Level9" -) - -func (i CompressionLevel) String() string { - switch { - case i == 0: - return _CompressionLevel_name_0 - case i == 512: - return _CompressionLevel_name_1 - case i == 1024: - return _CompressionLevel_name_2 - case i == 2048: - return _CompressionLevel_name_3 - case i == 4096: - return _CompressionLevel_name_4 - case i == 8192: - return _CompressionLevel_name_5 - case i == 16384: - return _CompressionLevel_name_6 - case i == 32768: - return _CompressionLevel_name_7 - case i == 65536: - return _CompressionLevel_name_8 - case i == 131072: - return _CompressionLevel_name_9 - default: - return "CompressionLevel(" + strconv.FormatInt(int64(i), 10) + ")" - } -} diff --git a/vendor/github.com/pierrec/lz4/v4/reader.go b/vendor/github.com/pierrec/lz4/v4/reader.go deleted file mode 100644 index 403aaf697a..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/reader.go +++ /dev/null @@ -1,243 +0,0 @@ -package lz4 - -import ( - "io" - - "github.com/pierrec/lz4/v4/internal/lz4block" - "github.com/pierrec/lz4/v4/internal/lz4errors" - "github.com/pierrec/lz4/v4/internal/lz4stream" -) - -var readerStates = []aState{ - noState: newState, - errorState: newState, - newState: readState, - readState: closedState, - closedState: newState, -} - -// NewReader returns a new LZ4 frame decoder. -func NewReader(r io.Reader) *Reader { - return newReader(r, false) -} - -func newReader(r io.Reader, legacy bool) *Reader { - zr := &Reader{frame: lz4stream.NewFrame()} - zr.state.init(readerStates) - _ = zr.Apply(DefaultConcurrency, defaultOnBlockDone) - zr.Reset(r) - return zr -} - -// Reader allows reading an LZ4 stream. -type Reader struct { - state _State - src io.Reader // source reader - num int // concurrency level - frame *lz4stream.Frame // frame being read - data []byte // block buffer allocated in non concurrent mode - reads chan []byte // pending data - idx int // size of pending data - handler func(int) - cum uint32 -} - -func (*Reader) private() {} - -func (r *Reader) Apply(options ...Option) (err error) { - defer r.state.check(&err) - switch r.state.state { - case newState: - case errorState: - return r.state.err - default: - return lz4errors.ErrOptionClosedOrError - } - for _, o := range options { - if err = o(r); err != nil { - return - } - } - return -} - -// Size returns the size of the underlying uncompressed data, if set in the stream. -func (r *Reader) Size() int { - switch r.state.state { - case readState, closedState: - if r.frame.Descriptor.Flags.Size() { - return int(r.frame.Descriptor.ContentSize) - } - } - return 0 -} - -func (r *Reader) isNotConcurrent() bool { - return r.num == 1 -} - -func (r *Reader) init() error { - data, err := r.frame.InitR(r.src, r.num) - if err != nil { - return err - } - r.reads = data - r.idx = 0 - size := r.frame.Descriptor.Flags.BlockSizeIndex() - r.data = size.Get() - r.cum = 0 - return nil -} - -func (r *Reader) Read(buf []byte) (n int, err error) { - defer r.state.check(&err) - switch r.state.state { - case readState: - case closedState, errorState: - return 0, r.state.err - case newState: - // First initialization. - if err = r.init(); r.state.next(err) { - return - } - default: - return 0, r.state.fail() - } - for len(buf) > 0 { - var bn int - if r.idx == 0 { - if r.isNotConcurrent() { - bn, err = r.read(buf) - } else { - lz4block.Put(r.data) - r.data = <-r.reads - if len(r.data) == 0 { - // No uncompressed data: something went wrong or we are done. - err = r.frame.Blocks.ErrorR() - } - } - switch err { - case nil: - case io.EOF: - if er := r.frame.CloseR(r.src); er != nil { - err = er - } - lz4block.Put(r.data) - r.data = nil - return - default: - return - } - } - if bn == 0 { - // Fill buf with buffered data. - bn = copy(buf, r.data[r.idx:]) - r.idx += bn - if r.idx == len(r.data) { - // All data read, get ready for the next Read. - r.idx = 0 - } - } - buf = buf[bn:] - n += bn - r.handler(bn) - } - return -} - -// read uncompresses the next block as follow: -// - if buf has enough room, the block is uncompressed into it directly -// and the lenght of used space is returned -// - else, the uncompress data is stored in r.data and 0 is returned -func (r *Reader) read(buf []byte) (int, error) { - block := r.frame.Blocks.Block - _, err := block.Read(r.frame, r.src, r.cum) - if err != nil { - return 0, err - } - var direct bool - dst := r.data[:cap(r.data)] - if len(buf) >= len(dst) { - // Uncompress directly into buf. - direct = true - dst = buf - } - dst, err = block.Uncompress(r.frame, dst, true) - if err != nil { - return 0, err - } - r.cum += uint32(len(dst)) - if direct { - return len(dst), nil - } - r.data = dst - return 0, nil -} - -// Reset clears the state of the Reader r such that it is equivalent to its -// initial state from NewReader, but instead writing to writer. -// No access to reader is performed. -// -// w.Close must be called before Reset. -func (r *Reader) Reset(reader io.Reader) { - if r.data != nil { - lz4block.Put(r.data) - r.data = nil - } - r.frame.Reset(r.num) - r.state.reset() - r.src = reader - r.reads = nil -} - -// WriteTo efficiently uncompresses the data from the Reader underlying source to w. -func (r *Reader) WriteTo(w io.Writer) (n int64, err error) { - switch r.state.state { - case closedState, errorState: - return 0, r.state.err - case newState: - if err = r.init(); r.state.next(err) { - return - } - default: - return 0, r.state.fail() - } - defer r.state.nextd(&err) - - var data []byte - if r.isNotConcurrent() { - size := r.frame.Descriptor.Flags.BlockSizeIndex() - data = size.Get() - defer lz4block.Put(data) - } - for { - var bn int - var dst []byte - if r.isNotConcurrent() { - bn, err = r.read(data) - dst = data[:bn] - } else { - lz4block.Put(dst) - dst = <-r.reads - bn = len(dst) - if bn == 0 { - // No uncompressed data: something went wrong or we are done. - err = r.frame.Blocks.ErrorR() - } - } - switch err { - case nil: - case io.EOF: - err = r.frame.CloseR(r.src) - return - default: - return - } - r.handler(bn) - bn, err = w.Write(dst) - n += int64(bn) - if err != nil { - return - } - } -} diff --git a/vendor/github.com/pierrec/lz4/v4/state.go b/vendor/github.com/pierrec/lz4/v4/state.go deleted file mode 100644 index d94f04d05e..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/state.go +++ /dev/null @@ -1,75 +0,0 @@ -package lz4 - -import ( - "errors" - "fmt" - "io" - - "github.com/pierrec/lz4/v4/internal/lz4errors" -) - -//go:generate go run golang.org/x/tools/cmd/stringer -type=aState -output state_gen.go - -const ( - noState aState = iota // uninitialized reader - errorState // unrecoverable error encountered - newState // instantiated object - readState // reading data - writeState // writing data - closedState // all done -) - -type ( - aState uint8 - _State struct { - states []aState - state aState - err error - } -) - -func (s *_State) init(states []aState) { - s.states = states - s.state = states[0] -} - -func (s *_State) reset() { - s.state = s.states[0] - s.err = nil -} - -// next sets the state to the next one unless it is passed a non nil error. -// It returns whether or not it is in error. -func (s *_State) next(err error) bool { - if err != nil { - s.err = fmt.Errorf("%s: %w", s.state, err) - s.state = errorState - return true - } - s.state = s.states[s.state] - return false -} - -// nextd is like next but for defers. -func (s *_State) nextd(errp *error) bool { - return errp != nil && s.next(*errp) -} - -// check sets s in error if not already in error and if the error is not nil or io.EOF, -func (s *_State) check(errp *error) { - if s.state == errorState || errp == nil { - return - } - if err := *errp; err != nil { - s.err = fmt.Errorf("%w[%s]", err, s.state) - if !errors.Is(err, io.EOF) { - s.state = errorState - } - } -} - -func (s *_State) fail() error { - s.state = errorState - s.err = fmt.Errorf("%w[%s]", lz4errors.ErrInternalUnhandledState, s.state) - return s.err -} diff --git a/vendor/github.com/pierrec/lz4/v4/state_gen.go b/vendor/github.com/pierrec/lz4/v4/state_gen.go deleted file mode 100644 index 75fb828924..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/state_gen.go +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated by "stringer -type=aState -output state_gen.go"; DO NOT EDIT. - -package lz4 - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[noState-0] - _ = x[errorState-1] - _ = x[newState-2] - _ = x[readState-3] - _ = x[writeState-4] - _ = x[closedState-5] -} - -const _aState_name = "noStateerrorStatenewStatereadStatewriteStateclosedState" - -var _aState_index = [...]uint8{0, 7, 17, 25, 34, 44, 55} - -func (i aState) String() string { - if i >= aState(len(_aState_index)-1) { - return "aState(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _aState_name[_aState_index[i]:_aState_index[i+1]] -} diff --git a/vendor/github.com/pierrec/lz4/v4/writer.go b/vendor/github.com/pierrec/lz4/v4/writer.go deleted file mode 100644 index 44a43d251b..0000000000 --- a/vendor/github.com/pierrec/lz4/v4/writer.go +++ /dev/null @@ -1,233 +0,0 @@ -package lz4 - -import ( - "io" - - "github.com/pierrec/lz4/v4/internal/lz4block" - "github.com/pierrec/lz4/v4/internal/lz4errors" - "github.com/pierrec/lz4/v4/internal/lz4stream" -) - -var writerStates = []aState{ - noState: newState, - newState: writeState, - writeState: closedState, - closedState: newState, - errorState: newState, -} - -// NewWriter returns a new LZ4 frame encoder. -func NewWriter(w io.Writer) *Writer { - zw := &Writer{frame: lz4stream.NewFrame()} - zw.state.init(writerStates) - _ = zw.Apply(DefaultBlockSizeOption, DefaultChecksumOption, DefaultConcurrency, defaultOnBlockDone) - zw.Reset(w) - return zw -} - -// Writer allows writing an LZ4 stream. -type Writer struct { - state _State - src io.Writer // destination writer - level lz4block.CompressionLevel // how hard to try - num int // concurrency level - frame *lz4stream.Frame // frame being built - data []byte // pending data - idx int // size of pending data - handler func(int) - legacy bool -} - -func (*Writer) private() {} - -func (w *Writer) Apply(options ...Option) (err error) { - defer w.state.check(&err) - switch w.state.state { - case newState: - case errorState: - return w.state.err - default: - return lz4errors.ErrOptionClosedOrError - } - for _, o := range options { - if err = o(w); err != nil { - return - } - } - w.Reset(w.src) - return -} - -func (w *Writer) isNotConcurrent() bool { - return w.num == 1 -} - -// init sets up the Writer when in newState. It does not change the Writer state. -func (w *Writer) init() error { - w.frame.InitW(w.src, w.num, w.legacy) - if true || !w.isNotConcurrent() { - size := w.frame.Descriptor.Flags.BlockSizeIndex() - w.data = size.Get() - } - w.idx = 0 - return w.frame.Descriptor.Write(w.frame, w.src) -} - -func (w *Writer) Write(buf []byte) (n int, err error) { - defer w.state.check(&err) - switch w.state.state { - case writeState: - case closedState, errorState: - return 0, w.state.err - case newState: - if err = w.init(); w.state.next(err) { - return - } - default: - return 0, w.state.fail() - } - - zn := len(w.data) - for len(buf) > 0 { - if w.idx == 0 && len(buf) >= zn { - // Avoid a copy as there is enough data for a block. - if err = w.write(buf[:zn], false); err != nil { - return - } - n += zn - buf = buf[zn:] - continue - } - // Accumulate the data to be compressed. - m := copy(w.data[w.idx:], buf) - n += m - w.idx += m - buf = buf[m:] - - if w.idx < len(w.data) { - // Buffer not filled. - return - } - - // Buffer full. - if err = w.write(w.data, true); err != nil { - return - } - if !w.isNotConcurrent() { - size := w.frame.Descriptor.Flags.BlockSizeIndex() - w.data = size.Get() - } - w.idx = 0 - } - return -} - -func (w *Writer) write(data []byte, safe bool) error { - if w.isNotConcurrent() { - block := w.frame.Blocks.Block - err := block.Compress(w.frame, data, w.level).Write(w.frame, w.src) - w.handler(len(block.Data)) - return err - } - c := make(chan *lz4stream.FrameDataBlock) - w.frame.Blocks.Blocks <- c - go func(c chan *lz4stream.FrameDataBlock, data []byte, safe bool) { - b := lz4stream.NewFrameDataBlock(w.frame) - c <- b.Compress(w.frame, data, w.level) - <-c - w.handler(len(b.Data)) - b.Close(w.frame) - if safe { - // safe to put it back as the last usage of it was FrameDataBlock.Write() called before c is closed - lz4block.Put(data) - } - }(c, data, safe) - - return nil -} - -// Close closes the Writer, flushing any unwritten data to the underlying io.Writer, -// but does not close the underlying io.Writer. -func (w *Writer) Close() (err error) { - switch w.state.state { - case writeState: - case errorState: - return w.state.err - default: - return nil - } - defer w.state.nextd(&err) - if w.idx > 0 { - // Flush pending data, disable w.data freeing as it is done later on. - if err = w.write(w.data[:w.idx], false); err != nil { - return err - } - w.idx = 0 - } - err = w.frame.CloseW(w.src, w.num) - // It is now safe to free the buffer. - if w.data != nil { - lz4block.Put(w.data) - w.data = nil - } - return -} - -// Reset clears the state of the Writer w such that it is equivalent to its -// initial state from NewWriter, but instead writing to writer. -// Reset keeps the previous options unless overwritten by the supplied ones. -// No access to writer is performed. -// -// w.Close must be called before Reset or pending data may be dropped. -func (w *Writer) Reset(writer io.Writer) { - w.frame.Reset(w.num) - w.state.reset() - w.src = writer -} - -// ReadFrom efficiently reads from r and compressed into the Writer destination. -func (w *Writer) ReadFrom(r io.Reader) (n int64, err error) { - switch w.state.state { - case closedState, errorState: - return 0, w.state.err - case newState: - if err = w.init(); w.state.next(err) { - return - } - default: - return 0, w.state.fail() - } - defer w.state.check(&err) - - size := w.frame.Descriptor.Flags.BlockSizeIndex() - var done bool - var rn int - data := size.Get() - if w.isNotConcurrent() { - // Keep the same buffer for the whole process. - defer lz4block.Put(data) - } - for !done { - rn, err = io.ReadFull(r, data) - switch err { - case nil: - case io.EOF, io.ErrUnexpectedEOF: // read may be partial - done = true - default: - return - } - n += int64(rn) - err = w.write(data[:rn], true) - if err != nil { - return - } - w.handler(rn) - if !done && !w.isNotConcurrent() { - // The buffer will be returned automatically by go routines (safe=true) - // so get a new one fo the next round. - data = size.Get() - } - } - err = w.Close() - return -} diff --git a/vendor/github.com/ulikunitz/xz/.gitignore b/vendor/github.com/ulikunitz/xz/.gitignore deleted file mode 100644 index e3c2fc2f1d..0000000000 --- a/vendor/github.com/ulikunitz/xz/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# .gitignore - -TODO.html -README.html - -lzma/writer.txt -lzma/reader.txt - -cmd/gxz/gxz -cmd/xb/xb - -# test executables -*.test - -# profile files -*.out - -# vim swap file -.*.swp - -# executables on windows -*.exe - -# default compression test file -enwik8* diff --git a/vendor/github.com/ulikunitz/xz/LICENSE b/vendor/github.com/ulikunitz/xz/LICENSE deleted file mode 100644 index 009b848706..0000000000 --- a/vendor/github.com/ulikunitz/xz/LICENSE +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 2014-2021 Ulrich Kunitz -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* My name, Ulrich Kunitz, may not be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ulikunitz/xz/README.md b/vendor/github.com/ulikunitz/xz/README.md deleted file mode 100644 index 0a2dc8284f..0000000000 --- a/vendor/github.com/ulikunitz/xz/README.md +++ /dev/null @@ -1,73 +0,0 @@ -# Package xz - -This Go language package supports the reading and writing of xz -compressed streams. It includes also a gxz command for compressing and -decompressing data. The package is completely written in Go and doesn't -have any dependency on any C code. - -The package is currently under development. There might be bugs and APIs -are not considered stable. At this time the package cannot compete with -the xz tool regarding compression speed and size. The algorithms there -have been developed over a long time and are highly optimized. However -there are a number of improvements planned and I'm very optimistic about -parallel compression and decompression. Stay tuned! - -## Using the API - -The following example program shows how to use the API. - -```go -package main - -import ( - "bytes" - "io" - "log" - "os" - - "github.com/ulikunitz/xz" -) - -func main() { - const text = "The quick brown fox jumps over the lazy dog.\n" - var buf bytes.Buffer - // compress text - w, err := xz.NewWriter(&buf) - if err != nil { - log.Fatalf("xz.NewWriter error %s", err) - } - if _, err := io.WriteString(w, text); err != nil { - log.Fatalf("WriteString error %s", err) - } - if err := w.Close(); err != nil { - log.Fatalf("w.Close error %s", err) - } - // decompress buffer and write output to stdout - r, err := xz.NewReader(&buf) - if err != nil { - log.Fatalf("NewReader error %s", err) - } - if _, err = io.Copy(os.Stdout, r); err != nil { - log.Fatalf("io.Copy error %s", err) - } -} -``` - -## Using the gxz compression tool - -The package includes a gxz command line utility for compression and -decompression. - -Use following command for installation: - - $ go get github.com/ulikunitz/xz/cmd/gxz - -To test it call the following command. - - $ gxz bigfile - -After some time a much smaller file bigfile.xz will replace bigfile. -To decompress it use the following command. - - $ gxz -d bigfile.xz - diff --git a/vendor/github.com/ulikunitz/xz/SECURITY.md b/vendor/github.com/ulikunitz/xz/SECURITY.md deleted file mode 100644 index 5f7ec01b3b..0000000000 --- a/vendor/github.com/ulikunitz/xz/SECURITY.md +++ /dev/null @@ -1,10 +0,0 @@ -# Security Policy - -## Supported Versions - -Currently the last minor version v0.5.x is supported. - -## Reporting a Vulnerability - -Report a vulnerability by creating a Github issue at -. Expect a response in a week. diff --git a/vendor/github.com/ulikunitz/xz/TODO.md b/vendor/github.com/ulikunitz/xz/TODO.md deleted file mode 100644 index 594e0c7fed..0000000000 --- a/vendor/github.com/ulikunitz/xz/TODO.md +++ /dev/null @@ -1,363 +0,0 @@ -# TODO list - -## Release v0.5.x - -1. Support check flag in gxz command. - -## Release v0.6 - -1. Review encoder and check for lzma improvements under xz. -2. Fix binary tree matcher. -3. Compare compression ratio with xz tool using comparable parameters and optimize parameters -4. rename operation action and make it a simple type of size 8 -5. make maxMatches, wordSize parameters -6. stop searching after a certain length is found (parameter sweetLen) - -## Release v0.7 - -1. Optimize code -2. Do statistical analysis to get linear presets. -3. Test sync.Pool compatability for xz and lzma Writer and Reader -4. Fuzz optimized code. - -## Release v0.8 - -1. Support parallel go routines for writing and reading xz files. -2. Support a ReaderAt interface for xz files with small block sizes. -3. Improve compatibility between gxz and xz -4. Provide manual page for gxz - -## Release v0.9 - -1. Improve documentation -2. Fuzz again - -## Release v1.0 - -1. Full functioning gxz -2. Add godoc URL to README.md (godoc.org) -3. Resolve all issues. -4. Define release candidates. -5. Public announcement. - -## Package lzma - -### v0.6 - -* Rewrite Encoder into a simple greedy one-op-at-a-time encoder including - * simple scan at the dictionary head for the same byte - * use the killer byte (requiring matches to get longer, the first test should be the byte that would make the match longer) - -## Optimizations - -* There may be a lot of false sharing in lzma. State; check whether this can be improved by reorganizing the internal structure of it. - -* Check whether batching encoding and decoding improves speed. - -### DAG optimizations - -* Use full buffer to create minimal bit-length above range encoder. -* Might be too slow (see v0.4) - -### Different match finders - -* hashes with 2, 3 characters additional to 4 characters -* binary trees with 2-7 characters (uint64 as key, use uint32 as - - pointers into a an array) - -* rb-trees with 2-7 characters (uint64 as key, use uint32 as pointers - - into an array with bit-steeling for the colors) - -## Release Procedure - -* execute goch -l for all packages; probably with lower param like 0.5. -* check orthography with gospell -* Write release notes in doc/relnotes. -* Update README.md -* xb copyright . in xz directory to ensure all new files have Copyright header -* `VERSION= go generate github.com/ulikunitz/xz/...` to update version files -* Execute test for Linux/amd64, Linux/x86 and Windows/amd64. -* Update TODO.md - write short log entry -* `git checkout master && git merge dev` -* `git tag -a ` -* `git push` - -## Log - -### 2021-02-02 - -Mituo Heijo has fuzzed xz and found a bug in the function readIndexBody. The -function allocated a slice of records immediately after reading the value -without further checks. Since the number has been too large the make function -did panic. The fix is to check the number against the expected number of records -before allocating the records. - -### 2020-12-17 - -Release v0.5.9 fixes warnings, a typo and adds SECURITY.md. - -One fix is interesting. - -```go -const ( - a byte = 0x1 - b = 0x2 -) -``` - -The constants a and b don't have the same type. Correct is - -```go -const ( - a byte = 0x1 - b byte = 0x2 -) -``` - -### 2020-08-19 - -Release v0.5.8 fixes issue -[issue #35](https://github.com/ulikunitz/xz/issues/35). - -### 2020-02-24 - -Release v0.5.7 supports the check-ID None and fixes -[issue #27](https://github.com/ulikunitz/xz/issues/27). - -### 2019-02-20 - -Release v0.5.6 supports the go.mod file. - -### 2018-10-28 - -Release v0.5.5 fixes issues #19 observing ErrLimit outputs. - -### 2017-06-05 - -Release v0.5.4 fixes issues #15 of another problem with the padding size -check for the xz block header. I removed the check completely. - -### 2017-02-15 - -Release v0.5.3 fixes issue #12 regarding the decompression of an empty -XZ stream. Many thanks to Tomasz KÅ‚ak, who reported the issue. - -### 2016-12-02 - -Release v0.5.2 became necessary to allow the decoding of xz files with -4-byte padding in the block header. Many thanks to Greg, who reported -the issue. - -### 2016-07-23 - -Release v0.5.1 became necessary to fix problems with 32-bit platforms. -Many thanks to Bruno Brigas, who reported the issue. - -### 2016-07-04 - -Release v0.5 provides improvements to the compressor and provides support for -the decompression of xz files with multiple xz streams. - -### 2016-01-31 - -Another compression rate increase by checking the byte at length of the -best match first, before checking the whole prefix. This makes the -compressor even faster. We have now a large time budget to beat the -compression ratio of the xz tool. For enwik8 we have now over 40 seconds -to reduce the compressed file size for another 7 MiB. - -### 2016-01-30 - -I simplified the encoder. Speed and compression rate increased -dramatically. A high compression rate affects also the decompression -speed. The approach with the buffer and optimizing for operation -compression rate has not been successful. Going for the maximum length -appears to be the best approach. - -### 2016-01-28 - -The release v0.4 is ready. It provides a working xz implementation, -which is rather slow, but works and is interoperable with the xz tool. -It is an important milestone. - -### 2016-01-10 - -I have the first working implementation of an xz reader and writer. I'm -happy about reaching this milestone. - -### 2015-12-02 - -I'm now ready to implement xz because, I have a working LZMA2 -implementation. I decided today that v0.4 will use the slow encoder -using the operations buffer to be able to go back, if I intend to do so. - -### 2015-10-21 - -I have restarted the work on the library. While trying to implement -LZMA2, I discovered that I need to resimplify the encoder and decoder -functions. The option approach is too complicated. Using a limited byte -writer and not caring for written bytes at all and not to try to handle -uncompressed data simplifies the LZMA encoder and decoder much. -Processing uncompressed data and handling limits is a feature of the -LZMA2 format not of LZMA. - -I learned an interesting method from the LZO format. If the last copy is -too far away they are moving the head one 2 bytes and not 1 byte to -reduce processing times. - -### 2015-08-26 - -I have now reimplemented the lzma package. The code is reasonably fast, -but can still be optimized. The next step is to implement LZMA2 and then -xz. - -### 2015-07-05 - -Created release v0.3. The version is the foundation for a full xz -implementation that is the target of v0.4. - -### 2015-06-11 - -The gflag package has been developed because I couldn't use flag and -pflag for a fully compatible support of gzip's and lzma's options. It -seems to work now quite nicely. - -### 2015-06-05 - -The overflow issue was interesting to research, however Henry S. Warren -Jr. Hacker's Delight book was very helpful as usual and had the issue -explained perfectly. Fefe's information on his website was based on the -C FAQ and quite bad, because it didn't address the issue of -MININT == -MININT. - -### 2015-06-04 - -It has been a productive day. I improved the interface of lzma. Reader -and lzma. Writer and fixed the error handling. - -### 2015-06-01 - -By computing the bit length of the LZMA operations I was able to -improve the greedy algorithm implementation. By using an 8 MByte buffer -the compression rate was not as good as for xz but already better then -gzip default. - -Compression is currently slow, but this is something we will be able to -improve over time. - -### 2015-05-26 - -Checked the license of ogier/pflag. The binary lzmago binary should -include the license terms for the pflag library. - -I added the endorsement clause as used by Google for the Go sources the -LICENSE file. - -### 2015-05-22 - -The package lzb contains now the basic implementation for creating or -reading LZMA byte streams. It allows the support for the implementation -of the DAG-shortest-path algorithm for the compression function. - -### 2015-04-23 - -Completed yesterday the lzbase classes. I'm a little bit concerned that -using the components may require too much code, but on the other hand -there is a lot of flexibility. - -### 2015-04-22 - -Implemented Reader and Writer during the Bayern game against Porto. The -second half gave me enough time. - -### 2015-04-21 - -While showering today morning I discovered that the design for OpEncoder -and OpDecoder doesn't work, because encoding/decoding might depend on -the current status of the dictionary. This is not exactly the right way -to start the day. - -Therefore we need to keep the Reader and Writer design. This time around -we simplify it by ignoring size limits. These can be added by wrappers -around the Reader and Writer interfaces. The Parameters type isn't -needed anymore. - -However I will implement a ReaderState and WriterState type to use -static typing to ensure the right State object is combined with the -right lzbase. Reader and lzbase. Writer. - -As a start I have implemented ReaderState and WriterState to ensure -that the state for reading is only used by readers and WriterState only -used by Writers. - -### 2015-04-20 - -Today I implemented the OpDecoder and tested OpEncoder and OpDecoder. - -### 2015-04-08 - -Came up with a new simplified design for lzbase. I implemented already -the type State that replaces OpCodec. - -### 2015-04-06 - -The new lzma package is now fully usable and lzmago is using it now. The -old lzma package has been completely removed. - -### 2015-04-05 - -Implemented lzma. Reader and tested it. - -### 2015-04-04 - -Implemented baseReader by adapting code form lzma. Reader. - -### 2015-04-03 - -The opCodec has been copied yesterday to lzma2. opCodec has a high -number of dependencies on other files in lzma2. Therefore I had to copy -almost all files from lzma. - -### 2015-03-31 - -Removed only a TODO item. - -However in Francesco Campoy's presentation "Go for Javaneros -(Javaïstes?)" is the the idea that using an embedded field E, all the -methods of E will be defined on T. If E is an interface T satisfies E. - - - -I have never used this, but it seems to be a cool idea. - -### 2015-03-30 - -Finished the type writerDict and wrote a simple test. - -### 2015-03-25 - -I started to implement the writerDict. - -### 2015-03-24 - -After thinking long about the LZMA2 code and several false starts, I -have now a plan to create a self-sufficient lzma2 package that supports -the classic LZMA format as well as LZMA2. The core idea is to support a -baseReader and baseWriter type that support the basic LZMA stream -without any headers. Both types must support the reuse of dictionaries -and the opCodec. - -### 2015-01-10 - -1. Implemented simple lzmago tool -2. Tested tool against large 4.4G file - * compression worked correctly; tested decompression with lzma - * decompression hits a full buffer condition -3. Fixed a bug in the compressor and wrote a test for it -4. Executed full cycle for 4.4 GB file; performance can be improved ;-) - -### 2015-01-11 - -* Release v0.2 because of the working LZMA encoder and decoder diff --git a/vendor/github.com/ulikunitz/xz/bits.go b/vendor/github.com/ulikunitz/xz/bits.go deleted file mode 100644 index e48450c2ca..0000000000 --- a/vendor/github.com/ulikunitz/xz/bits.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xz - -import ( - "errors" - "io" -) - -// putUint32LE puts the little-endian representation of x into the first -// four bytes of p. -func putUint32LE(p []byte, x uint32) { - p[0] = byte(x) - p[1] = byte(x >> 8) - p[2] = byte(x >> 16) - p[3] = byte(x >> 24) -} - -// putUint64LE puts the little-endian representation of x into the first -// eight bytes of p. -func putUint64LE(p []byte, x uint64) { - p[0] = byte(x) - p[1] = byte(x >> 8) - p[2] = byte(x >> 16) - p[3] = byte(x >> 24) - p[4] = byte(x >> 32) - p[5] = byte(x >> 40) - p[6] = byte(x >> 48) - p[7] = byte(x >> 56) -} - -// uint32LE converts a little endian representation to an uint32 value. -func uint32LE(p []byte) uint32 { - return uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | - uint32(p[3])<<24 -} - -// putUvarint puts a uvarint representation of x into the byte slice. -func putUvarint(p []byte, x uint64) int { - i := 0 - for x >= 0x80 { - p[i] = byte(x) | 0x80 - x >>= 7 - i++ - } - p[i] = byte(x) - return i + 1 -} - -// errOverflow indicates an overflow of the 64-bit unsigned integer. -var errOverflowU64 = errors.New("xz: uvarint overflows 64-bit unsigned integer") - -// readUvarint reads a uvarint from the given byte reader. -func readUvarint(r io.ByteReader) (x uint64, n int, err error) { - const maxUvarintLen = 10 - - var s uint - i := 0 - for { - b, err := r.ReadByte() - if err != nil { - return x, i, err - } - i++ - if i > maxUvarintLen { - return x, i, errOverflowU64 - } - if b < 0x80 { - if i == maxUvarintLen && b > 1 { - return x, i, errOverflowU64 - } - return x | uint64(b)< 0 { - k = 4 - k - } - return k -} - -/*** Header ***/ - -// headerMagic stores the magic bytes for the header -var headerMagic = []byte{0xfd, '7', 'z', 'X', 'Z', 0x00} - -// HeaderLen provides the length of the xz file header. -const HeaderLen = 12 - -// Constants for the checksum methods supported by xz. -const ( - None byte = 0x0 - CRC32 byte = 0x1 - CRC64 byte = 0x4 - SHA256 byte = 0xa -) - -// errInvalidFlags indicates that flags are invalid. -var errInvalidFlags = errors.New("xz: invalid flags") - -// verifyFlags returns the error errInvalidFlags if the value is -// invalid. -func verifyFlags(flags byte) error { - switch flags { - case None, CRC32, CRC64, SHA256: - return nil - default: - return errInvalidFlags - } -} - -// flagstrings maps flag values to strings. -var flagstrings = map[byte]string{ - None: "None", - CRC32: "CRC-32", - CRC64: "CRC-64", - SHA256: "SHA-256", -} - -// flagString returns the string representation for the given flags. -func flagString(flags byte) string { - s, ok := flagstrings[flags] - if !ok { - return "invalid" - } - return s -} - -// newHashFunc returns a function that creates hash instances for the -// hash method encoded in flags. -func newHashFunc(flags byte) (newHash func() hash.Hash, err error) { - switch flags { - case None: - newHash = newNoneHash - case CRC32: - newHash = newCRC32 - case CRC64: - newHash = newCRC64 - case SHA256: - newHash = sha256.New - default: - err = errInvalidFlags - } - return -} - -// header provides the actual content of the xz file header: the flags. -type header struct { - flags byte -} - -// Errors returned by readHeader. -var errHeaderMagic = errors.New("xz: invalid header magic bytes") - -// ValidHeader checks whether data is a correct xz file header. The -// length of data must be HeaderLen. -func ValidHeader(data []byte) bool { - var h header - err := h.UnmarshalBinary(data) - return err == nil -} - -// String returns a string representation of the flags. -func (h header) String() string { - return flagString(h.flags) -} - -// UnmarshalBinary reads header from the provided data slice. -func (h *header) UnmarshalBinary(data []byte) error { - // header length - if len(data) != HeaderLen { - return errors.New("xz: wrong file header length") - } - - // magic header - if !bytes.Equal(headerMagic, data[:6]) { - return errHeaderMagic - } - - // checksum - crc := crc32.NewIEEE() - crc.Write(data[6:8]) - if uint32LE(data[8:]) != crc.Sum32() { - return errors.New("xz: invalid checksum for file header") - } - - // stream flags - if data[6] != 0 { - return errInvalidFlags - } - flags := data[7] - if err := verifyFlags(flags); err != nil { - return err - } - - h.flags = flags - return nil -} - -// MarshalBinary generates the xz file header. -func (h *header) MarshalBinary() (data []byte, err error) { - if err = verifyFlags(h.flags); err != nil { - return nil, err - } - - data = make([]byte, 12) - copy(data, headerMagic) - data[7] = h.flags - - crc := crc32.NewIEEE() - crc.Write(data[6:8]) - putUint32LE(data[8:], crc.Sum32()) - - return data, nil -} - -/*** Footer ***/ - -// footerLen defines the length of the footer. -const footerLen = 12 - -// footerMagic contains the footer magic bytes. -var footerMagic = []byte{'Y', 'Z'} - -// footer represents the content of the xz file footer. -type footer struct { - indexSize int64 - flags byte -} - -// String prints a string representation of the footer structure. -func (f footer) String() string { - return fmt.Sprintf("%s index size %d", flagString(f.flags), f.indexSize) -} - -// Minimum and maximum for the size of the index (backward size). -const ( - minIndexSize = 4 - maxIndexSize = (1 << 32) * 4 -) - -// MarshalBinary converts footer values into an xz file footer. Note -// that the footer value is checked for correctness. -func (f *footer) MarshalBinary() (data []byte, err error) { - if err = verifyFlags(f.flags); err != nil { - return nil, err - } - if !(minIndexSize <= f.indexSize && f.indexSize <= maxIndexSize) { - return nil, errors.New("xz: index size out of range") - } - if f.indexSize%4 != 0 { - return nil, errors.New( - "xz: index size not aligned to four bytes") - } - - data = make([]byte, footerLen) - - // backward size (index size) - s := (f.indexSize / 4) - 1 - putUint32LE(data[4:], uint32(s)) - // flags - data[9] = f.flags - // footer magic - copy(data[10:], footerMagic) - - // CRC-32 - crc := crc32.NewIEEE() - crc.Write(data[4:10]) - putUint32LE(data, crc.Sum32()) - - return data, nil -} - -// UnmarshalBinary sets the footer value by unmarshalling an xz file -// footer. -func (f *footer) UnmarshalBinary(data []byte) error { - if len(data) != footerLen { - return errors.New("xz: wrong footer length") - } - - // magic bytes - if !bytes.Equal(data[10:], footerMagic) { - return errors.New("xz: footer magic invalid") - } - - // CRC-32 - crc := crc32.NewIEEE() - crc.Write(data[4:10]) - if uint32LE(data) != crc.Sum32() { - return errors.New("xz: footer checksum error") - } - - var g footer - // backward size (index size) - g.indexSize = (int64(uint32LE(data[4:])) + 1) * 4 - - // flags - if data[8] != 0 { - return errInvalidFlags - } - g.flags = data[9] - if err := verifyFlags(g.flags); err != nil { - return err - } - - *f = g - return nil -} - -/*** Block Header ***/ - -// blockHeader represents the content of an xz block header. -type blockHeader struct { - compressedSize int64 - uncompressedSize int64 - filters []filter -} - -// String converts the block header into a string. -func (h blockHeader) String() string { - var buf bytes.Buffer - first := true - if h.compressedSize >= 0 { - fmt.Fprintf(&buf, "compressed size %d", h.compressedSize) - first = false - } - if h.uncompressedSize >= 0 { - if !first { - buf.WriteString(" ") - } - fmt.Fprintf(&buf, "uncompressed size %d", h.uncompressedSize) - first = false - } - for _, f := range h.filters { - if !first { - buf.WriteString(" ") - } - fmt.Fprintf(&buf, "filter %s", f) - first = false - } - return buf.String() -} - -// Masks for the block flags. -const ( - filterCountMask = 0x03 - compressedSizePresent = 0x40 - uncompressedSizePresent = 0x80 - reservedBlockFlags = 0x3C -) - -// errIndexIndicator signals that an index indicator (0x00) has been found -// instead of an expected block header indicator. -var errIndexIndicator = errors.New("xz: found index indicator") - -// readBlockHeader reads the block header. -func readBlockHeader(r io.Reader) (h *blockHeader, n int, err error) { - var buf bytes.Buffer - buf.Grow(20) - - // block header size - z, err := io.CopyN(&buf, r, 1) - n = int(z) - if err != nil { - return nil, n, err - } - s := buf.Bytes()[0] - if s == 0 { - return nil, n, errIndexIndicator - } - - // read complete header - headerLen := (int(s) + 1) * 4 - buf.Grow(headerLen - 1) - z, err = io.CopyN(&buf, r, int64(headerLen-1)) - n += int(z) - if err != nil { - return nil, n, err - } - - // unmarshal block header - h = new(blockHeader) - if err = h.UnmarshalBinary(buf.Bytes()); err != nil { - return nil, n, err - } - - return h, n, nil -} - -// readSizeInBlockHeader reads the uncompressed or compressed size -// fields in the block header. The present value informs the function -// whether the respective field is actually present in the header. -func readSizeInBlockHeader(r io.ByteReader, present bool) (n int64, err error) { - if !present { - return -1, nil - } - x, _, err := readUvarint(r) - if err != nil { - return 0, err - } - if x >= 1<<63 { - return 0, errors.New("xz: size overflow in block header") - } - return int64(x), nil -} - -// UnmarshalBinary unmarshals the block header. -func (h *blockHeader) UnmarshalBinary(data []byte) error { - // Check header length - s := data[0] - if data[0] == 0 { - return errIndexIndicator - } - headerLen := (int(s) + 1) * 4 - if len(data) != headerLen { - return fmt.Errorf("xz: data length %d; want %d", len(data), - headerLen) - } - n := headerLen - 4 - - // Check CRC-32 - crc := crc32.NewIEEE() - crc.Write(data[:n]) - if crc.Sum32() != uint32LE(data[n:]) { - return errors.New("xz: checksum error for block header") - } - - // Block header flags - flags := data[1] - if flags&reservedBlockFlags != 0 { - return errors.New("xz: reserved block header flags set") - } - - r := bytes.NewReader(data[2:n]) - - // Compressed size - var err error - h.compressedSize, err = readSizeInBlockHeader( - r, flags&compressedSizePresent != 0) - if err != nil { - return err - } - - // Uncompressed size - h.uncompressedSize, err = readSizeInBlockHeader( - r, flags&uncompressedSizePresent != 0) - if err != nil { - return err - } - - h.filters, err = readFilters(r, int(flags&filterCountMask)+1) - if err != nil { - return err - } - - // Check padding - // Since headerLen is a multiple of 4 we don't need to check - // alignment. - k := r.Len() - // The standard spec says that the padding should have not more - // than 3 bytes. However we found paddings of 4 or 5 in the - // wild. See https://github.com/ulikunitz/xz/pull/11 and - // https://github.com/ulikunitz/xz/issues/15 - // - // The only reasonable approach seems to be to ignore the - // padding size. We still check that all padding bytes are zero. - if !allZeros(data[n-k : n]) { - return errPadding - } - return nil -} - -// MarshalBinary marshals the binary header. -func (h *blockHeader) MarshalBinary() (data []byte, err error) { - if !(minFilters <= len(h.filters) && len(h.filters) <= maxFilters) { - return nil, errors.New("xz: filter count wrong") - } - for i, f := range h.filters { - if i < len(h.filters)-1 { - if f.id() == lzmaFilterID { - return nil, errors.New( - "xz: LZMA2 filter is not the last") - } - } else { - // last filter - if f.id() != lzmaFilterID { - return nil, errors.New("xz: " + - "last filter must be the LZMA2 filter") - } - } - } - - var buf bytes.Buffer - // header size must set at the end - buf.WriteByte(0) - - // flags - flags := byte(len(h.filters) - 1) - if h.compressedSize >= 0 { - flags |= compressedSizePresent - } - if h.uncompressedSize >= 0 { - flags |= uncompressedSizePresent - } - buf.WriteByte(flags) - - p := make([]byte, 10) - if h.compressedSize >= 0 { - k := putUvarint(p, uint64(h.compressedSize)) - buf.Write(p[:k]) - } - if h.uncompressedSize >= 0 { - k := putUvarint(p, uint64(h.uncompressedSize)) - buf.Write(p[:k]) - } - - for _, f := range h.filters { - fp, err := f.MarshalBinary() - if err != nil { - return nil, err - } - buf.Write(fp) - } - - // padding - for i := padLen(int64(buf.Len())); i > 0; i-- { - buf.WriteByte(0) - } - - // crc place holder - buf.Write(p[:4]) - - data = buf.Bytes() - if len(data)%4 != 0 { - panic("data length not aligned") - } - s := len(data)/4 - 1 - if !(1 < s && s <= 255) { - panic("wrong block header size") - } - data[0] = byte(s) - - crc := crc32.NewIEEE() - crc.Write(data[:len(data)-4]) - putUint32LE(data[len(data)-4:], crc.Sum32()) - - return data, nil -} - -// Constants used for marshalling and unmarshalling filters in the xz -// block header. -const ( - minFilters = 1 - maxFilters = 4 - minReservedID = 1 << 62 -) - -// filter represents a filter in the block header. -type filter interface { - id() uint64 - UnmarshalBinary(data []byte) error - MarshalBinary() (data []byte, err error) - reader(r io.Reader, c *ReaderConfig) (fr io.Reader, err error) - writeCloser(w io.WriteCloser, c *WriterConfig) (fw io.WriteCloser, err error) - // filter must be last filter - last() bool -} - -// readFilter reads a block filter from the block header. At this point -// in time only the LZMA2 filter is supported. -func readFilter(r io.Reader) (f filter, err error) { - br := lzma.ByteReader(r) - - // index - id, _, err := readUvarint(br) - if err != nil { - return nil, err - } - - var data []byte - switch id { - case lzmaFilterID: - data = make([]byte, lzmaFilterLen) - data[0] = lzmaFilterID - if _, err = io.ReadFull(r, data[1:]); err != nil { - return nil, err - } - f = new(lzmaFilter) - default: - if id >= minReservedID { - return nil, errors.New( - "xz: reserved filter id in block stream header") - } - return nil, errors.New("xz: invalid filter id") - } - if err = f.UnmarshalBinary(data); err != nil { - return nil, err - } - return f, err -} - -// readFilters reads count filters. At this point in time only the count -// 1 is supported. -func readFilters(r io.Reader, count int) (filters []filter, err error) { - if count != 1 { - return nil, errors.New("xz: unsupported filter count") - } - f, err := readFilter(r) - if err != nil { - return nil, err - } - return []filter{f}, err -} - -/*** Index ***/ - -// record describes a block in the xz file index. -type record struct { - unpaddedSize int64 - uncompressedSize int64 -} - -// readRecord reads an index record. -func readRecord(r io.ByteReader) (rec record, n int, err error) { - u, k, err := readUvarint(r) - n += k - if err != nil { - return rec, n, err - } - rec.unpaddedSize = int64(u) - if rec.unpaddedSize < 0 { - return rec, n, errors.New("xz: unpadded size negative") - } - - u, k, err = readUvarint(r) - n += k - if err != nil { - return rec, n, err - } - rec.uncompressedSize = int64(u) - if rec.uncompressedSize < 0 { - return rec, n, errors.New("xz: uncompressed size negative") - } - - return rec, n, nil -} - -// MarshalBinary converts an index record in its binary encoding. -func (rec *record) MarshalBinary() (data []byte, err error) { - // maximum length of a uvarint is 10 - p := make([]byte, 20) - n := putUvarint(p, uint64(rec.unpaddedSize)) - n += putUvarint(p[n:], uint64(rec.uncompressedSize)) - return p[:n], nil -} - -// writeIndex writes the index, a sequence of records. -func writeIndex(w io.Writer, index []record) (n int64, err error) { - crc := crc32.NewIEEE() - mw := io.MultiWriter(w, crc) - - // index indicator - k, err := mw.Write([]byte{0}) - n += int64(k) - if err != nil { - return n, err - } - - // number of records - p := make([]byte, 10) - k = putUvarint(p, uint64(len(index))) - k, err = mw.Write(p[:k]) - n += int64(k) - if err != nil { - return n, err - } - - // list of records - for _, rec := range index { - p, err := rec.MarshalBinary() - if err != nil { - return n, err - } - k, err = mw.Write(p) - n += int64(k) - if err != nil { - return n, err - } - } - - // index padding - k, err = mw.Write(make([]byte, padLen(int64(n)))) - n += int64(k) - if err != nil { - return n, err - } - - // crc32 checksum - putUint32LE(p, crc.Sum32()) - k, err = w.Write(p[:4]) - n += int64(k) - - return n, err -} - -// readIndexBody reads the index from the reader. It assumes that the -// index indicator has already been read. -func readIndexBody(r io.Reader, expectedRecordLen int) (records []record, n int64, err error) { - crc := crc32.NewIEEE() - // index indicator - crc.Write([]byte{0}) - - br := lzma.ByteReader(io.TeeReader(r, crc)) - - // number of records - u, k, err := readUvarint(br) - n += int64(k) - if err != nil { - return nil, n, err - } - recLen := int(u) - if recLen < 0 || uint64(recLen) != u { - return nil, n, errors.New("xz: record number overflow") - } - if recLen != expectedRecordLen { - return nil, n, fmt.Errorf( - "xz: index length is %d; want %d", - recLen, expectedRecordLen) - } - - // list of records - records = make([]record, recLen) - for i := range records { - records[i], k, err = readRecord(br) - n += int64(k) - if err != nil { - return nil, n, err - } - } - - p := make([]byte, padLen(int64(n+1)), 4) - k, err = io.ReadFull(br.(io.Reader), p) - n += int64(k) - if err != nil { - return nil, n, err - } - if !allZeros(p) { - return nil, n, errors.New("xz: non-zero byte in index padding") - } - - // crc32 - s := crc.Sum32() - p = p[:4] - k, err = io.ReadFull(br.(io.Reader), p) - n += int64(k) - if err != nil { - return records, n, err - } - if uint32LE(p) != s { - return nil, n, errors.New("xz: wrong checksum for index") - } - - return records, n, nil -} diff --git a/vendor/github.com/ulikunitz/xz/fox-check-none.xz b/vendor/github.com/ulikunitz/xz/fox-check-none.xz deleted file mode 100644 index 46043f7dc89b610dc3badb9db3426620c4c97462..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 96 zcmexsUKJ6=z`*cd=%ynRgCe6CkX@qxbTK1?PDnLRM*R tL9s%9S!$6&2~avGv8qxbB|lw{3#g5Ofzej?!NQIFY(?{`7{LOOQ2>-O93KDx diff --git a/vendor/github.com/ulikunitz/xz/fox.xz b/vendor/github.com/ulikunitz/xz/fox.xz deleted file mode 100644 index 4b820bd5a16e83fe5db4fb315639a4337f862483..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 104 zcmexsUKJ6=z`*kC+7>q^21Q0O1_p)_{ill=8FWH2QWXkIGn2Cwl8W-n^AytZD-^Oy za|?dFO$zmVVdxt0+m!4eq- E0K@hlng9R* diff --git a/vendor/github.com/ulikunitz/xz/internal/hash/cyclic_poly.go b/vendor/github.com/ulikunitz/xz/internal/hash/cyclic_poly.go deleted file mode 100644 index f723cf252d..0000000000 --- a/vendor/github.com/ulikunitz/xz/internal/hash/cyclic_poly.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package hash - -// CyclicPoly provides a cyclic polynomial rolling hash. -type CyclicPoly struct { - h uint64 - p []uint64 - i int -} - -// ror rotates the unsigned 64-bit integer to right. The argument s must be -// less than 64. -func ror(x uint64, s uint) uint64 { - return (x >> s) | (x << (64 - s)) -} - -// NewCyclicPoly creates a new instance of the CyclicPoly structure. The -// argument n gives the number of bytes for which a hash will be executed. -// This number must be positive; the method panics if this isn't the case. -func NewCyclicPoly(n int) *CyclicPoly { - if n < 1 { - panic("argument n must be positive") - } - return &CyclicPoly{p: make([]uint64, 0, n)} -} - -// Len returns the length of the byte sequence for which a hash is generated. -func (r *CyclicPoly) Len() int { - return cap(r.p) -} - -// RollByte hashes the next byte and returns a hash value. The complete becomes -// available after at least Len() bytes have been hashed. -func (r *CyclicPoly) RollByte(x byte) uint64 { - y := hash[x] - if len(r.p) < cap(r.p) { - r.h = ror(r.h, 1) ^ y - r.p = append(r.p, y) - } else { - r.h ^= ror(r.p[r.i], uint(cap(r.p)-1)) - r.h = ror(r.h, 1) ^ y - r.p[r.i] = y - r.i = (r.i + 1) % cap(r.p) - } - return r.h -} - -// Stores the hash for the individual bytes. -var hash = [256]uint64{ - 0x2e4fc3f904065142, 0xc790984cfbc99527, - 0x879f95eb8c62f187, 0x3b61be86b5021ef2, - 0x65a896a04196f0a5, 0xc5b307b80470b59e, - 0xd3bff376a70df14b, 0xc332f04f0b3f1701, - 0x753b5f0e9abf3e0d, 0xb41538fdfe66ef53, - 0x1906a10c2c1c0208, 0xfb0c712a03421c0d, - 0x38be311a65c9552b, 0xfee7ee4ca6445c7e, - 0x71aadeded184f21e, 0xd73426fccda23b2d, - 0x29773fb5fb9600b5, 0xce410261cd32981a, - 0xfe2848b3c62dbc2d, 0x459eaaff6e43e11c, - 0xc13e35fc9c73a887, 0xf30ed5c201e76dbc, - 0xa5f10b3910482cea, 0x2945d59be02dfaad, - 0x06ee334ff70571b5, 0xbabf9d8070f44380, - 0xee3e2e9912ffd27c, 0x2a7118d1ea6b8ea7, - 0x26183cb9f7b1664c, 0xea71dac7da068f21, - 0xea92eca5bd1d0bb7, 0x415595862defcd75, - 0x248a386023c60648, 0x9cf021ab284b3c8a, - 0xfc9372df02870f6c, 0x2b92d693eeb3b3fc, - 0x73e799d139dc6975, 0x7b15ae312486363c, - 0xb70e5454a2239c80, 0x208e3fb31d3b2263, - 0x01f563cabb930f44, 0x2ac4533d2a3240d8, - 0x84231ed1064f6f7c, 0xa9f020977c2a6d19, - 0x213c227271c20122, 0x09fe8a9a0a03d07a, - 0x4236dc75bcaf910c, 0x460a8b2bead8f17e, - 0xd9b27be1aa07055f, 0xd202d5dc4b11c33e, - 0x70adb010543bea12, 0xcdae938f7ea6f579, - 0x3f3d870208672f4d, 0x8e6ccbce9d349536, - 0xe4c0871a389095ae, 0xf5f2a49152bca080, - 0x9a43f9b97269934e, 0xc17b3753cb6f475c, - 0xd56d941e8e206bd4, 0xac0a4f3e525eda00, - 0xa06d5a011912a550, 0x5537ed19537ad1df, - 0xa32fe713d611449d, 0x2a1d05b47c3b579f, - 0x991d02dbd30a2a52, 0x39e91e7e28f93eb0, - 0x40d06adb3e92c9ac, 0x9b9d3afde1c77c97, - 0x9a3f3f41c02c616f, 0x22ecd4ba00f60c44, - 0x0b63d5d801708420, 0x8f227ca8f37ffaec, - 0x0256278670887c24, 0x107e14877dbf540b, - 0x32c19f2786ac1c05, 0x1df5b12bb4bc9c61, - 0xc0cac129d0d4c4e2, 0x9fdb52ee9800b001, - 0x31f601d5d31c48c4, 0x72ff3c0928bcaec7, - 0xd99264421147eb03, 0x535a2d6d38aefcfe, - 0x6ba8b4454a916237, 0xfa39366eaae4719c, - 0x10f00fd7bbb24b6f, 0x5bd23185c76c84d4, - 0xb22c3d7e1b00d33f, 0x3efc20aa6bc830a8, - 0xd61c2503fe639144, 0x30ce625441eb92d3, - 0xe5d34cf359e93100, 0xa8e5aa13f2b9f7a5, - 0x5c2b8d851ca254a6, 0x68fb6c5e8b0d5fdf, - 0xc7ea4872c96b83ae, 0x6dd5d376f4392382, - 0x1be88681aaa9792f, 0xfef465ee1b6c10d9, - 0x1f98b65ed43fcb2e, 0x4d1ca11eb6e9a9c9, - 0x7808e902b3857d0b, 0x171c9c4ea4607972, - 0x58d66274850146df, 0x42b311c10d3981d1, - 0x647fa8c621c41a4c, 0xf472771c66ddfedc, - 0x338d27e3f847b46b, 0x6402ce3da97545ce, - 0x5162db616fc38638, 0x9c83be97bc22a50e, - 0x2d3d7478a78d5e72, 0xe621a9b938fd5397, - 0x9454614eb0f81c45, 0x395fb6e742ed39b6, - 0x77dd9179d06037bf, 0xc478d0fee4d2656d, - 0x35d9d6cb772007af, 0x83a56e92c883f0f6, - 0x27937453250c00a1, 0x27bd6ebc3a46a97d, - 0x9f543bf784342d51, 0xd158f38c48b0ed52, - 0x8dd8537c045f66b4, 0x846a57230226f6d5, - 0x6b13939e0c4e7cdf, 0xfca25425d8176758, - 0x92e5fc6cd52788e6, 0x9992e13d7a739170, - 0x518246f7a199e8ea, 0xf104c2a71b9979c7, - 0x86b3ffaabea4768f, 0x6388061cf3e351ad, - 0x09d9b5295de5bbb5, 0x38bf1638c2599e92, - 0x1d759846499e148d, 0x4c0ff015e5f96ef4, - 0xa41a94cfa270f565, 0x42d76f9cb2326c0b, - 0x0cf385dd3c9c23ba, 0x0508a6c7508d6e7a, - 0x337523aabbe6cf8d, 0x646bb14001d42b12, - 0xc178729d138adc74, 0xf900ef4491f24086, - 0xee1a90d334bb5ac4, 0x9755c92247301a50, - 0xb999bf7c4ff1b610, 0x6aeeb2f3b21e8fc9, - 0x0fa8084cf91ac6ff, 0x10d226cf136e6189, - 0xd302057a07d4fb21, 0x5f03800e20a0fcc3, - 0x80118d4ae46bd210, 0x58ab61a522843733, - 0x51edd575c5432a4b, 0x94ee6ff67f9197f7, - 0x765669e0e5e8157b, 0xa5347830737132f0, - 0x3ba485a69f01510c, 0x0b247d7b957a01c3, - 0x1b3d63449fd807dc, 0x0fdc4721c30ad743, - 0x8b535ed3829b2b14, 0xee41d0cad65d232c, - 0xe6a99ed97a6a982f, 0x65ac6194c202003d, - 0x692accf3a70573eb, 0xcc3c02c3e200d5af, - 0x0d419e8b325914a3, 0x320f160f42c25e40, - 0x00710d647a51fe7a, 0x3c947692330aed60, - 0x9288aa280d355a7a, 0xa1806a9b791d1696, - 0x5d60e38496763da1, 0x6c69e22e613fd0f4, - 0x977fc2a5aadffb17, 0xfb7bd063fc5a94ba, - 0x460c17992cbaece1, 0xf7822c5444d3297f, - 0x344a9790c69b74aa, 0xb80a42e6cae09dce, - 0x1b1361eaf2b1e757, 0xd84c1e758e236f01, - 0x88e0b7be347627cc, 0x45246009b7a99490, - 0x8011c6dd3fe50472, 0xc341d682bffb99d7, - 0x2511be93808e2d15, 0xd5bc13d7fd739840, - 0x2a3cd030679ae1ec, 0x8ad9898a4b9ee157, - 0x3245fef0a8eaf521, 0x3d6d8dbbb427d2b0, - 0x1ed146d8968b3981, 0x0c6a28bf7d45f3fc, - 0x4a1fd3dbcee3c561, 0x4210ff6a476bf67e, - 0xa559cce0d9199aac, 0xde39d47ef3723380, - 0xe5b69d848ce42e35, 0xefa24296f8e79f52, - 0x70190b59db9a5afc, 0x26f166cdb211e7bf, - 0x4deaf2df3c6b8ef5, 0xf171dbdd670f1017, - 0xb9059b05e9420d90, 0x2f0da855c9388754, - 0x611d5e9ab77949cc, 0x2912038ac01163f4, - 0x0231df50402b2fba, 0x45660fc4f3245f58, - 0xb91cc97c7c8dac50, 0xb72d2aafe4953427, - 0xfa6463f87e813d6b, 0x4515f7ee95d5c6a2, - 0x1310e1c1a48d21c3, 0xad48a7810cdd8544, - 0x4d5bdfefd5c9e631, 0xa43ed43f1fdcb7de, - 0xe70cfc8fe1ee9626, 0xef4711b0d8dda442, - 0xb80dd9bd4dab6c93, 0xa23be08d31ba4d93, - 0x9b37db9d0335a39c, 0x494b6f870f5cfebc, - 0x6d1b3c1149dda943, 0x372c943a518c1093, - 0xad27af45e77c09c4, 0x3b6f92b646044604, - 0xac2917909f5fcf4f, 0x2069a60e977e5557, - 0x353a469e71014de5, 0x24be356281f55c15, - 0x2b6d710ba8e9adea, 0x404ad1751c749c29, - 0xed7311bf23d7f185, 0xba4f6976b4acc43e, - 0x32d7198d2bc39000, 0xee667019014d6e01, - 0x494ef3e128d14c83, 0x1f95a152baecd6be, - 0x201648dff1f483a5, 0x68c28550c8384af6, - 0x5fc834a6824a7f48, 0x7cd06cb7365eaf28, - 0xd82bbd95e9b30909, 0x234f0d1694c53f6d, - 0xd2fb7f4a96d83f4a, 0xff0d5da83acac05e, - 0xf8f6b97f5585080a, 0x74236084be57b95b, - 0xa25e40c03bbc36ad, 0x6b6e5c14ce88465b, - 0x4378ffe93e1528c5, 0x94ca92a17118e2d2, -} diff --git a/vendor/github.com/ulikunitz/xz/internal/hash/doc.go b/vendor/github.com/ulikunitz/xz/internal/hash/doc.go deleted file mode 100644 index cc60a6b5ce..0000000000 --- a/vendor/github.com/ulikunitz/xz/internal/hash/doc.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package hash provides rolling hashes. - -Rolling hashes have to be used for maintaining the positions of n-byte -sequences in the dictionary buffer. - -The package provides currently the Rabin-Karp rolling hash and a Cyclic -Polynomial hash. Both support the Hashes method to be used with an interface. -*/ -package hash diff --git a/vendor/github.com/ulikunitz/xz/internal/hash/rabin_karp.go b/vendor/github.com/ulikunitz/xz/internal/hash/rabin_karp.go deleted file mode 100644 index c6432913fd..0000000000 --- a/vendor/github.com/ulikunitz/xz/internal/hash/rabin_karp.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package hash - -// A is the default constant for Robin-Karp rolling hash. This is a random -// prime. -const A = 0x97b548add41d5da1 - -// RabinKarp supports the computation of a rolling hash. -type RabinKarp struct { - A uint64 - // a^n - aOldest uint64 - h uint64 - p []byte - i int -} - -// NewRabinKarp creates a new RabinKarp value. The argument n defines the -// length of the byte sequence to be hashed. The default constant will will be -// used. -func NewRabinKarp(n int) *RabinKarp { - return NewRabinKarpConst(n, A) -} - -// NewRabinKarpConst creates a new RabinKarp value. The argument n defines the -// length of the byte sequence to be hashed. The argument a provides the -// constant used to compute the hash. -func NewRabinKarpConst(n int, a uint64) *RabinKarp { - if n <= 0 { - panic("number of bytes n must be positive") - } - aOldest := uint64(1) - // There are faster methods. For the small n required by the LZMA - // compressor O(n) is sufficient. - for i := 0; i < n; i++ { - aOldest *= a - } - return &RabinKarp{ - A: a, aOldest: aOldest, - p: make([]byte, 0, n), - } -} - -// Len returns the length of the byte sequence. -func (r *RabinKarp) Len() int { - return cap(r.p) -} - -// RollByte computes the hash after x has been added. -func (r *RabinKarp) RollByte(x byte) uint64 { - if len(r.p) < cap(r.p) { - r.h += uint64(x) - r.h *= r.A - r.p = append(r.p, x) - } else { - r.h -= uint64(r.p[r.i]) * r.aOldest - r.h += uint64(x) - r.h *= r.A - r.p[r.i] = x - r.i = (r.i + 1) % cap(r.p) - } - return r.h -} diff --git a/vendor/github.com/ulikunitz/xz/internal/hash/roller.go b/vendor/github.com/ulikunitz/xz/internal/hash/roller.go deleted file mode 100644 index f1de88b445..0000000000 --- a/vendor/github.com/ulikunitz/xz/internal/hash/roller.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package hash - -// Roller provides an interface for rolling hashes. The hash value will become -// valid after hash has been called Len times. -type Roller interface { - Len() int - RollByte(x byte) uint64 -} - -// Hashes computes all hash values for the array p. Note that the state of the -// roller is changed. -func Hashes(r Roller, p []byte) []uint64 { - n := r.Len() - if len(p) < n { - return nil - } - h := make([]uint64, len(p)-n+1) - for i := 0; i < n-1; i++ { - r.RollByte(p[i]) - } - for i := range h { - h[i] = r.RollByte(p[i+n-1]) - } - return h -} diff --git a/vendor/github.com/ulikunitz/xz/internal/xlog/xlog.go b/vendor/github.com/ulikunitz/xz/internal/xlog/xlog.go deleted file mode 100644 index 6c20c77ba6..0000000000 --- a/vendor/github.com/ulikunitz/xz/internal/xlog/xlog.go +++ /dev/null @@ -1,457 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package xlog provides a simple logging package that allows to disable -// certain message categories. It defines a type, Logger, with multiple -// methods for formatting output. The package has also a predefined -// 'standard' Logger accessible through helper function Print[f|ln], -// Fatal[f|ln], Panic[f|ln], Warn[f|ln], Print[f|ln] and Debug[f|ln] -// that are easier to use then creating a Logger manually. That logger -// writes to standard error and prints the date and time of each logged -// message, which can be configured using the function SetFlags. -// -// The Fatal functions call os.Exit(1) after the message is output -// unless not suppressed by the flags. The Panic functions call panic -// after the writing the log message unless suppressed. -package xlog - -import ( - "fmt" - "io" - "os" - "runtime" - "sync" - "time" -) - -// The flags define what information is prefixed to each log entry -// generated by the Logger. The Lno* versions allow the suppression of -// specific output. The bits are or'ed together to control what will be -// printed. There is no control over the order of the items printed and -// the format. The full format is: -// -// 2009-01-23 01:23:23.123123 /a/b/c/d.go:23: message -// -const ( - Ldate = 1 << iota // the date: 2009-01-23 - Ltime // the time: 01:23:23 - Lmicroseconds // microsecond resolution: 01:23:23.123123 - Llongfile // full file name and line number: /a/b/c/d.go:23 - Lshortfile // final file name element and line number: d.go:23 - Lnopanic // suppresses output from Panic[f|ln] but not the panic call - Lnofatal // suppresses output from Fatal[f|ln] but not the exit - Lnowarn // suppresses output from Warn[f|ln] - Lnoprint // suppresses output from Print[f|ln] - Lnodebug // suppresses output from Debug[f|ln] - // initial values for the standard logger - Lstdflags = Ldate | Ltime | Lnodebug -) - -// A Logger represents an active logging object that generates lines of -// output to an io.Writer. Each logging operation if not suppressed -// makes a single call to the Writer's Write method. A Logger can be -// used simultaneously from multiple goroutines; it guarantees to -// serialize access to the Writer. -type Logger struct { - mu sync.Mutex // ensures atomic writes; and protects the following - // fields - prefix string // prefix to write at beginning of each line - flag int // properties - out io.Writer // destination for output - buf []byte // for accumulating text to write -} - -// New creates a new Logger. The out argument sets the destination to -// which the log output will be written. The prefix appears at the -// beginning of each log line. The flag argument defines the logging -// properties. -func New(out io.Writer, prefix string, flag int) *Logger { - return &Logger{out: out, prefix: prefix, flag: flag} -} - -// std is the standard logger used by the package scope functions. -var std = New(os.Stderr, "", Lstdflags) - -// itoa converts the integer to ASCII. A negative widths will avoid -// zero-padding. The function supports only non-negative integers. -func itoa(buf *[]byte, i int, wid int) { - var u = uint(i) - if u == 0 && wid <= 1 { - *buf = append(*buf, '0') - return - } - var b [32]byte - bp := len(b) - for ; u > 0 || wid > 0; u /= 10 { - bp-- - wid-- - b[bp] = byte(u%10) + '0' - } - *buf = append(*buf, b[bp:]...) -} - -// formatHeader puts the header into the buf field of the buffer. -func (l *Logger) formatHeader(t time.Time, file string, line int) { - l.buf = append(l.buf, l.prefix...) - if l.flag&(Ldate|Ltime|Lmicroseconds) != 0 { - if l.flag&Ldate != 0 { - year, month, day := t.Date() - itoa(&l.buf, year, 4) - l.buf = append(l.buf, '-') - itoa(&l.buf, int(month), 2) - l.buf = append(l.buf, '-') - itoa(&l.buf, day, 2) - l.buf = append(l.buf, ' ') - } - if l.flag&(Ltime|Lmicroseconds) != 0 { - hour, min, sec := t.Clock() - itoa(&l.buf, hour, 2) - l.buf = append(l.buf, ':') - itoa(&l.buf, min, 2) - l.buf = append(l.buf, ':') - itoa(&l.buf, sec, 2) - if l.flag&Lmicroseconds != 0 { - l.buf = append(l.buf, '.') - itoa(&l.buf, t.Nanosecond()/1e3, 6) - } - l.buf = append(l.buf, ' ') - } - } - if l.flag&(Lshortfile|Llongfile) != 0 { - if l.flag&Lshortfile != 0 { - short := file - for i := len(file) - 1; i > 0; i-- { - if file[i] == '/' { - short = file[i+1:] - break - } - } - file = short - } - l.buf = append(l.buf, file...) - l.buf = append(l.buf, ':') - itoa(&l.buf, line, -1) - l.buf = append(l.buf, ": "...) - } -} - -func (l *Logger) output(calldepth int, now time.Time, s string) error { - var file string - var line int - if l.flag&(Lshortfile|Llongfile) != 0 { - l.mu.Unlock() - var ok bool - _, file, line, ok = runtime.Caller(calldepth) - if !ok { - file = "???" - line = 0 - } - l.mu.Lock() - } - l.buf = l.buf[:0] - l.formatHeader(now, file, line) - l.buf = append(l.buf, s...) - if len(s) == 0 || s[len(s)-1] != '\n' { - l.buf = append(l.buf, '\n') - } - _, err := l.out.Write(l.buf) - return err -} - -// Output writes the string s with the header controlled by the flags to -// the l.out writer. A newline will be appended if s doesn't end in a -// newline. Calldepth is used to recover the PC, although all current -// calls of Output use the call depth 2. Access to the function is serialized. -func (l *Logger) Output(calldepth, noflag int, v ...interface{}) error { - now := time.Now() - l.mu.Lock() - defer l.mu.Unlock() - if l.flag&noflag != 0 { - return nil - } - s := fmt.Sprint(v...) - return l.output(calldepth+1, now, s) -} - -// Outputf works like output but formats the output like Printf. -func (l *Logger) Outputf(calldepth int, noflag int, format string, v ...interface{}) error { - now := time.Now() - l.mu.Lock() - defer l.mu.Unlock() - if l.flag&noflag != 0 { - return nil - } - s := fmt.Sprintf(format, v...) - return l.output(calldepth+1, now, s) -} - -// Outputln works like output but formats the output like Println. -func (l *Logger) Outputln(calldepth int, noflag int, v ...interface{}) error { - now := time.Now() - l.mu.Lock() - defer l.mu.Unlock() - if l.flag&noflag != 0 { - return nil - } - s := fmt.Sprintln(v...) - return l.output(calldepth+1, now, s) -} - -// Panic prints the message like Print and calls panic. The printing -// might be suppressed by the flag Lnopanic. -func (l *Logger) Panic(v ...interface{}) { - l.Output(2, Lnopanic, v...) - s := fmt.Sprint(v...) - panic(s) -} - -// Panic prints the message like Print and calls panic. The printing -// might be suppressed by the flag Lnopanic. -func Panic(v ...interface{}) { - std.Output(2, Lnopanic, v...) - s := fmt.Sprint(v...) - panic(s) -} - -// Panicf prints the message like Printf and calls panic. The printing -// might be suppressed by the flag Lnopanic. -func (l *Logger) Panicf(format string, v ...interface{}) { - l.Outputf(2, Lnopanic, format, v...) - s := fmt.Sprintf(format, v...) - panic(s) -} - -// Panicf prints the message like Printf and calls panic. The printing -// might be suppressed by the flag Lnopanic. -func Panicf(format string, v ...interface{}) { - std.Outputf(2, Lnopanic, format, v...) - s := fmt.Sprintf(format, v...) - panic(s) -} - -// Panicln prints the message like Println and calls panic. The printing -// might be suppressed by the flag Lnopanic. -func (l *Logger) Panicln(v ...interface{}) { - l.Outputln(2, Lnopanic, v...) - s := fmt.Sprintln(v...) - panic(s) -} - -// Panicln prints the message like Println and calls panic. The printing -// might be suppressed by the flag Lnopanic. -func Panicln(v ...interface{}) { - std.Outputln(2, Lnopanic, v...) - s := fmt.Sprintln(v...) - panic(s) -} - -// Fatal prints the message like Print and calls os.Exit(1). The -// printing might be suppressed by the flag Lnofatal. -func (l *Logger) Fatal(v ...interface{}) { - l.Output(2, Lnofatal, v...) - os.Exit(1) -} - -// Fatal prints the message like Print and calls os.Exit(1). The -// printing might be suppressed by the flag Lnofatal. -func Fatal(v ...interface{}) { - std.Output(2, Lnofatal, v...) - os.Exit(1) -} - -// Fatalf prints the message like Printf and calls os.Exit(1). The -// printing might be suppressed by the flag Lnofatal. -func (l *Logger) Fatalf(format string, v ...interface{}) { - l.Outputf(2, Lnofatal, format, v...) - os.Exit(1) -} - -// Fatalf prints the message like Printf and calls os.Exit(1). The -// printing might be suppressed by the flag Lnofatal. -func Fatalf(format string, v ...interface{}) { - std.Outputf(2, Lnofatal, format, v...) - os.Exit(1) -} - -// Fatalln prints the message like Println and calls os.Exit(1). The -// printing might be suppressed by the flag Lnofatal. -func (l *Logger) Fatalln(format string, v ...interface{}) { - l.Outputln(2, Lnofatal, v...) - os.Exit(1) -} - -// Fatalln prints the message like Println and calls os.Exit(1). The -// printing might be suppressed by the flag Lnofatal. -func Fatalln(format string, v ...interface{}) { - std.Outputln(2, Lnofatal, v...) - os.Exit(1) -} - -// Warn prints the message like Print. The printing might be suppressed -// by the flag Lnowarn. -func (l *Logger) Warn(v ...interface{}) { - l.Output(2, Lnowarn, v...) -} - -// Warn prints the message like Print. The printing might be suppressed -// by the flag Lnowarn. -func Warn(v ...interface{}) { - std.Output(2, Lnowarn, v...) -} - -// Warnf prints the message like Printf. The printing might be suppressed -// by the flag Lnowarn. -func (l *Logger) Warnf(format string, v ...interface{}) { - l.Outputf(2, Lnowarn, format, v...) -} - -// Warnf prints the message like Printf. The printing might be suppressed -// by the flag Lnowarn. -func Warnf(format string, v ...interface{}) { - std.Outputf(2, Lnowarn, format, v...) -} - -// Warnln prints the message like Println. The printing might be suppressed -// by the flag Lnowarn. -func (l *Logger) Warnln(v ...interface{}) { - l.Outputln(2, Lnowarn, v...) -} - -// Warnln prints the message like Println. The printing might be suppressed -// by the flag Lnowarn. -func Warnln(v ...interface{}) { - std.Outputln(2, Lnowarn, v...) -} - -// Print prints the message like fmt.Print. The printing might be suppressed -// by the flag Lnoprint. -func (l *Logger) Print(v ...interface{}) { - l.Output(2, Lnoprint, v...) -} - -// Print prints the message like fmt.Print. The printing might be suppressed -// by the flag Lnoprint. -func Print(v ...interface{}) { - std.Output(2, Lnoprint, v...) -} - -// Printf prints the message like fmt.Printf. The printing might be suppressed -// by the flag Lnoprint. -func (l *Logger) Printf(format string, v ...interface{}) { - l.Outputf(2, Lnoprint, format, v...) -} - -// Printf prints the message like fmt.Printf. The printing might be suppressed -// by the flag Lnoprint. -func Printf(format string, v ...interface{}) { - std.Outputf(2, Lnoprint, format, v...) -} - -// Println prints the message like fmt.Println. The printing might be -// suppressed by the flag Lnoprint. -func (l *Logger) Println(v ...interface{}) { - l.Outputln(2, Lnoprint, v...) -} - -// Println prints the message like fmt.Println. The printing might be -// suppressed by the flag Lnoprint. -func Println(v ...interface{}) { - std.Outputln(2, Lnoprint, v...) -} - -// Debug prints the message like Print. The printing might be suppressed -// by the flag Lnodebug. -func (l *Logger) Debug(v ...interface{}) { - l.Output(2, Lnodebug, v...) -} - -// Debug prints the message like Print. The printing might be suppressed -// by the flag Lnodebug. -func Debug(v ...interface{}) { - std.Output(2, Lnodebug, v...) -} - -// Debugf prints the message like Printf. The printing might be suppressed -// by the flag Lnodebug. -func (l *Logger) Debugf(format string, v ...interface{}) { - l.Outputf(2, Lnodebug, format, v...) -} - -// Debugf prints the message like Printf. The printing might be suppressed -// by the flag Lnodebug. -func Debugf(format string, v ...interface{}) { - std.Outputf(2, Lnodebug, format, v...) -} - -// Debugln prints the message like Println. The printing might be suppressed -// by the flag Lnodebug. -func (l *Logger) Debugln(v ...interface{}) { - l.Outputln(2, Lnodebug, v...) -} - -// Debugln prints the message like Println. The printing might be suppressed -// by the flag Lnodebug. -func Debugln(v ...interface{}) { - std.Outputln(2, Lnodebug, v...) -} - -// Flags returns the current flags used by the logger. -func (l *Logger) Flags() int { - l.mu.Lock() - defer l.mu.Unlock() - return l.flag -} - -// Flags returns the current flags used by the standard logger. -func Flags() int { - return std.Flags() -} - -// SetFlags sets the flags of the logger. -func (l *Logger) SetFlags(flag int) { - l.mu.Lock() - defer l.mu.Unlock() - l.flag = flag -} - -// SetFlags sets the flags for the standard logger. -func SetFlags(flag int) { - std.SetFlags(flag) -} - -// Prefix returns the prefix used by the logger. -func (l *Logger) Prefix() string { - l.mu.Lock() - defer l.mu.Unlock() - return l.prefix -} - -// Prefix returns the prefix used by the standard logger of the package. -func Prefix() string { - return std.Prefix() -} - -// SetPrefix sets the prefix for the logger. -func (l *Logger) SetPrefix(prefix string) { - l.mu.Lock() - defer l.mu.Unlock() - l.prefix = prefix -} - -// SetPrefix sets the prefix of the standard logger of the package. -func SetPrefix(prefix string) { - std.SetPrefix(prefix) -} - -// SetOutput sets the output of the logger. -func (l *Logger) SetOutput(w io.Writer) { - l.mu.Lock() - defer l.mu.Unlock() - l.out = w -} - -// SetOutput sets the output for the standard logger of the package. -func SetOutput(w io.Writer) { - std.SetOutput(w) -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/bintree.go b/vendor/github.com/ulikunitz/xz/lzma/bintree.go deleted file mode 100644 index 2a7bd19ec1..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/bintree.go +++ /dev/null @@ -1,522 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "unicode" -) - -// node represents a node in the binary tree. -type node struct { - // x is the search value - x uint32 - // p parent node - p uint32 - // l left child - l uint32 - // r right child - r uint32 -} - -// wordLen is the number of bytes represented by the v field of a node. -const wordLen = 4 - -// binTree supports the identification of the next operation based on a -// binary tree. -// -// Nodes will be identified by their index into the ring buffer. -type binTree struct { - dict *encoderDict - // ring buffer of nodes - node []node - // absolute offset of the entry for the next node. Position 4 - // byte larger. - hoff int64 - // front position in the node ring buffer - front uint32 - // index of the root node - root uint32 - // current x value - x uint32 - // preallocated array - data []byte -} - -// null represents the nonexistent index. We can't use zero because it -// would always exist or we would need to decrease the index for each -// reference. -const null uint32 = 1<<32 - 1 - -// newBinTree initializes the binTree structure. The capacity defines -// the size of the buffer and defines the maximum distance for which -// matches will be found. -func newBinTree(capacity int) (t *binTree, err error) { - if capacity < 1 { - return nil, errors.New( - "newBinTree: capacity must be larger than zero") - } - if int64(capacity) >= int64(null) { - return nil, errors.New( - "newBinTree: capacity must less 2^{32}-1") - } - t = &binTree{ - node: make([]node, capacity), - hoff: -int64(wordLen), - root: null, - data: make([]byte, maxMatchLen), - } - return t, nil -} - -func (t *binTree) SetDict(d *encoderDict) { t.dict = d } - -// WriteByte writes a single byte into the binary tree. -func (t *binTree) WriteByte(c byte) error { - t.x = (t.x << 8) | uint32(c) - t.hoff++ - if t.hoff < 0 { - return nil - } - v := t.front - if int64(v) < t.hoff { - // We are overwriting old nodes stored in the tree. - t.remove(v) - } - t.node[v].x = t.x - t.add(v) - t.front++ - if int64(t.front) >= int64(len(t.node)) { - t.front = 0 - } - return nil -} - -// Writes writes a sequence of bytes into the binTree structure. -func (t *binTree) Write(p []byte) (n int, err error) { - for _, c := range p { - t.WriteByte(c) - } - return len(p), nil -} - -// add puts the node v into the tree. The node must not be part of the -// tree before. -func (t *binTree) add(v uint32) { - vn := &t.node[v] - // Set left and right to null indices. - vn.l, vn.r = null, null - // If the binary tree is empty make v the root. - if t.root == null { - t.root = v - vn.p = null - return - } - x := vn.x - p := t.root - // Search for the right leave link and add the new node. - for { - pn := &t.node[p] - if x <= pn.x { - if pn.l == null { - pn.l = v - vn.p = p - return - } - p = pn.l - } else { - if pn.r == null { - pn.r = v - vn.p = p - return - } - p = pn.r - } - } -} - -// parent returns the parent node index of v and the pointer to v value -// in the parent. -func (t *binTree) parent(v uint32) (p uint32, ptr *uint32) { - if t.root == v { - return null, &t.root - } - p = t.node[v].p - if t.node[p].l == v { - ptr = &t.node[p].l - } else { - ptr = &t.node[p].r - } - return -} - -// Remove node v. -func (t *binTree) remove(v uint32) { - vn := &t.node[v] - p, ptr := t.parent(v) - l, r := vn.l, vn.r - if l == null { - // Move the right child up. - *ptr = r - if r != null { - t.node[r].p = p - } - return - } - if r == null { - // Move the left child up. - *ptr = l - t.node[l].p = p - return - } - - // Search the in-order predecessor u. - un := &t.node[l] - ur := un.r - if ur == null { - // In order predecessor is l. Move it up. - un.r = r - t.node[r].p = l - un.p = p - *ptr = l - return - } - var u uint32 - for { - // Look for the max value in the tree where l is root. - u = ur - ur = t.node[u].r - if ur == null { - break - } - } - // replace u with ul - un = &t.node[u] - ul := un.l - up := un.p - t.node[up].r = ul - if ul != null { - t.node[ul].p = up - } - - // replace v by u - un.l, un.r = l, r - t.node[l].p = u - t.node[r].p = u - *ptr = u - un.p = p -} - -// search looks for the node that have the value x or for the nodes that -// brace it. The node highest in the tree with the value x will be -// returned. All other nodes with the same value live in left subtree of -// the returned node. -func (t *binTree) search(v uint32, x uint32) (a, b uint32) { - a, b = null, null - if v == null { - return - } - for { - vn := &t.node[v] - if x <= vn.x { - if x == vn.x { - return v, v - } - b = v - if vn.l == null { - return - } - v = vn.l - } else { - a = v - if vn.r == null { - return - } - v = vn.r - } - } -} - -// max returns the node with maximum value in the subtree with v as -// root. -func (t *binTree) max(v uint32) uint32 { - if v == null { - return null - } - for { - r := t.node[v].r - if r == null { - return v - } - v = r - } -} - -// min returns the node with the minimum value in the subtree with v as -// root. -func (t *binTree) min(v uint32) uint32 { - if v == null { - return null - } - for { - l := t.node[v].l - if l == null { - return v - } - v = l - } -} - -// pred returns the in-order predecessor of node v. -func (t *binTree) pred(v uint32) uint32 { - if v == null { - return null - } - u := t.max(t.node[v].l) - if u != null { - return u - } - for { - p := t.node[v].p - if p == null { - return null - } - if t.node[p].r == v { - return p - } - v = p - } -} - -// succ returns the in-order successor of node v. -func (t *binTree) succ(v uint32) uint32 { - if v == null { - return null - } - u := t.min(t.node[v].r) - if u != null { - return u - } - for { - p := t.node[v].p - if p == null { - return null - } - if t.node[p].l == v { - return p - } - v = p - } -} - -// xval converts the first four bytes of a into an 32-bit unsigned -// integer in big-endian order. -func xval(a []byte) uint32 { - var x uint32 - switch len(a) { - default: - x |= uint32(a[3]) - fallthrough - case 3: - x |= uint32(a[2]) << 8 - fallthrough - case 2: - x |= uint32(a[1]) << 16 - fallthrough - case 1: - x |= uint32(a[0]) << 24 - case 0: - } - return x -} - -// dumpX converts value x into a four-letter string. -func dumpX(x uint32) string { - a := make([]byte, 4) - for i := 0; i < 4; i++ { - c := byte(x >> uint((3-i)*8)) - if unicode.IsGraphic(rune(c)) { - a[i] = c - } else { - a[i] = '.' - } - } - return string(a) -} - -/* -// dumpNode writes a representation of the node v into the io.Writer. -func (t *binTree) dumpNode(w io.Writer, v uint32, indent int) { - if v == null { - return - } - - vn := &t.node[v] - - t.dumpNode(w, vn.r, indent+2) - - for i := 0; i < indent; i++ { - fmt.Fprint(w, " ") - } - if vn.p == null { - fmt.Fprintf(w, "node %d %q parent null\n", v, dumpX(vn.x)) - } else { - fmt.Fprintf(w, "node %d %q parent %d\n", v, dumpX(vn.x), vn.p) - } - - t.dumpNode(w, vn.l, indent+2) -} - -// dump prints a representation of the binary tree into the writer. -func (t *binTree) dump(w io.Writer) error { - bw := bufio.NewWriter(w) - t.dumpNode(bw, t.root, 0) - return bw.Flush() -} -*/ - -func (t *binTree) distance(v uint32) int { - dist := int(t.front) - int(v) - if dist <= 0 { - dist += len(t.node) - } - return dist -} - -type matchParams struct { - rep [4]uint32 - // length when match will be accepted - nAccept int - // nodes to check - check int - // finish if length get shorter - stopShorter bool -} - -func (t *binTree) match(m match, distIter func() (int, bool), p matchParams, -) (r match, checked int, accepted bool) { - buf := &t.dict.buf - for { - if checked >= p.check { - return m, checked, true - } - dist, ok := distIter() - if !ok { - return m, checked, false - } - checked++ - if m.n > 0 { - i := buf.rear - dist + m.n - 1 - if i < 0 { - i += len(buf.data) - } else if i >= len(buf.data) { - i -= len(buf.data) - } - if buf.data[i] != t.data[m.n-1] { - if p.stopShorter { - return m, checked, false - } - continue - } - } - n := buf.matchLen(dist, t.data) - switch n { - case 0: - if p.stopShorter { - return m, checked, false - } - continue - case 1: - if uint32(dist-minDistance) != p.rep[0] { - continue - } - } - if n < m.n || (n == m.n && int64(dist) >= m.distance) { - continue - } - m = match{int64(dist), n} - if n >= p.nAccept { - return m, checked, true - } - } -} - -func (t *binTree) NextOp(rep [4]uint32) operation { - // retrieve maxMatchLen data - n, _ := t.dict.buf.Peek(t.data[:maxMatchLen]) - if n == 0 { - panic("no data in buffer") - } - t.data = t.data[:n] - - var ( - m match - x, u, v uint32 - iterPred, iterSucc func() (int, bool) - ) - p := matchParams{ - rep: rep, - nAccept: maxMatchLen, - check: 32, - } - i := 4 - iterSmall := func() (dist int, ok bool) { - i-- - if i <= 0 { - return 0, false - } - return i, true - } - m, checked, accepted := t.match(m, iterSmall, p) - if accepted { - goto end - } - p.check -= checked - x = xval(t.data) - u, v = t.search(t.root, x) - if u == v && len(t.data) == 4 { - iter := func() (dist int, ok bool) { - if u == null { - return 0, false - } - dist = t.distance(u) - u, v = t.search(t.node[u].l, x) - if u != v { - u = null - } - return dist, true - } - m, _, _ = t.match(m, iter, p) - goto end - } - p.stopShorter = true - iterSucc = func() (dist int, ok bool) { - if v == null { - return 0, false - } - dist = t.distance(v) - v = t.succ(v) - return dist, true - } - m, checked, accepted = t.match(m, iterSucc, p) - if accepted { - goto end - } - p.check -= checked - iterPred = func() (dist int, ok bool) { - if u == null { - return 0, false - } - dist = t.distance(u) - u = t.pred(u) - return dist, true - } - m, _, _ = t.match(m, iterPred, p) -end: - if m.n == 0 { - return lit{t.data[0]} - } - return m -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/bitops.go b/vendor/github.com/ulikunitz/xz/lzma/bitops.go deleted file mode 100644 index d2c07e8c91..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/bitops.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -/* Naming conventions follows the CodeReviewComments in the Go Wiki. */ - -// ntz32Const is used by the functions NTZ and NLZ. -const ntz32Const = 0x04d7651f - -// ntz32Table is a helper table for de Bruijn algorithm by Danny Dubé. -// See Henry S. Warren, Jr. "Hacker's Delight" section 5-1 figure 5-26. -var ntz32Table = [32]int8{ - 0, 1, 2, 24, 3, 19, 6, 25, - 22, 4, 20, 10, 16, 7, 12, 26, - 31, 23, 18, 5, 21, 9, 15, 11, - 30, 17, 8, 14, 29, 13, 28, 27, -} - -/* -// ntz32 computes the number of trailing zeros for an unsigned 32-bit integer. -func ntz32(x uint32) int { - if x == 0 { - return 32 - } - x = (x & -x) * ntz32Const - return int(ntz32Table[x>>27]) -} -*/ - -// nlz32 computes the number of leading zeros for an unsigned 32-bit integer. -func nlz32(x uint32) int { - // Smear left most bit to the right - x |= x >> 1 - x |= x >> 2 - x |= x >> 4 - x |= x >> 8 - x |= x >> 16 - // Use ntz mechanism to calculate nlz. - x++ - if x == 0 { - return 0 - } - x *= ntz32Const - return 32 - int(ntz32Table[x>>27]) -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/breader.go b/vendor/github.com/ulikunitz/xz/lzma/breader.go deleted file mode 100644 index 939be8845a..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/breader.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "io" -) - -// breader provides the ReadByte function for a Reader. It doesn't read -// more data from the reader than absolutely necessary. -type breader struct { - io.Reader - // helper slice to save allocations - p []byte -} - -// ByteReader converts an io.Reader into an io.ByteReader. -func ByteReader(r io.Reader) io.ByteReader { - br, ok := r.(io.ByteReader) - if !ok { - return &breader{r, make([]byte, 1)} - } - return br -} - -// ReadByte read byte function. -func (r *breader) ReadByte() (c byte, err error) { - n, err := r.Reader.Read(r.p) - if n < 1 { - if err == nil { - err = errors.New("breader.ReadByte: no data") - } - return 0, err - } - return r.p[0], nil -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/buffer.go b/vendor/github.com/ulikunitz/xz/lzma/buffer.go deleted file mode 100644 index 2761de5f0b..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/buffer.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" -) - -// buffer provides a circular buffer of bytes. If the front index equals -// the rear index the buffer is empty. As a consequence front cannot be -// equal rear for a full buffer. So a full buffer has a length that is -// one byte less the the length of the data slice. -type buffer struct { - data []byte - front int - rear int -} - -// newBuffer creates a buffer with the given size. -func newBuffer(size int) *buffer { - return &buffer{data: make([]byte, size+1)} -} - -// Cap returns the capacity of the buffer. -func (b *buffer) Cap() int { - return len(b.data) - 1 -} - -// Resets the buffer. The front and rear index are set to zero. -func (b *buffer) Reset() { - b.front = 0 - b.rear = 0 -} - -// Buffered returns the number of bytes buffered. -func (b *buffer) Buffered() int { - delta := b.front - b.rear - if delta < 0 { - delta += len(b.data) - } - return delta -} - -// Available returns the number of bytes available for writing. -func (b *buffer) Available() int { - delta := b.rear - 1 - b.front - if delta < 0 { - delta += len(b.data) - } - return delta -} - -// addIndex adds a non-negative integer to the index i and returns the -// resulting index. The function takes care of wrapping the index as -// well as potential overflow situations. -func (b *buffer) addIndex(i int, n int) int { - // subtraction of len(b.data) prevents overflow - i += n - len(b.data) - if i < 0 { - i += len(b.data) - } - return i -} - -// Read reads bytes from the buffer into p and returns the number of -// bytes read. The function never returns an error but might return less -// data than requested. -func (b *buffer) Read(p []byte) (n int, err error) { - n, err = b.Peek(p) - b.rear = b.addIndex(b.rear, n) - return n, err -} - -// Peek reads bytes from the buffer into p without changing the buffer. -// Peek will never return an error but might return less data than -// requested. -func (b *buffer) Peek(p []byte) (n int, err error) { - m := b.Buffered() - n = len(p) - if m < n { - n = m - p = p[:n] - } - k := copy(p, b.data[b.rear:]) - if k < n { - copy(p[k:], b.data) - } - return n, nil -} - -// Discard skips the n next bytes to read from the buffer, returning the -// bytes discarded. -// -// If Discards skips fewer than n bytes, it returns an error. -func (b *buffer) Discard(n int) (discarded int, err error) { - if n < 0 { - return 0, errors.New("buffer.Discard: negative argument") - } - m := b.Buffered() - if m < n { - n = m - err = errors.New( - "buffer.Discard: discarded less bytes then requested") - } - b.rear = b.addIndex(b.rear, n) - return n, err -} - -// ErrNoSpace indicates that there is insufficient space for the Write -// operation. -var ErrNoSpace = errors.New("insufficient space") - -// Write puts data into the buffer. If less bytes are written than -// requested ErrNoSpace is returned. -func (b *buffer) Write(p []byte) (n int, err error) { - m := b.Available() - n = len(p) - if m < n { - n = m - p = p[:m] - err = ErrNoSpace - } - k := copy(b.data[b.front:], p) - if k < n { - copy(b.data, p[k:]) - } - b.front = b.addIndex(b.front, n) - return n, err -} - -// WriteByte writes a single byte into the buffer. The error ErrNoSpace -// is returned if no single byte is available in the buffer for writing. -func (b *buffer) WriteByte(c byte) error { - if b.Available() < 1 { - return ErrNoSpace - } - b.data[b.front] = c - b.front = b.addIndex(b.front, 1) - return nil -} - -// prefixLen returns the length of the common prefix of a and b. -func prefixLen(a, b []byte) int { - if len(a) > len(b) { - a, b = b, a - } - for i, c := range a { - if b[i] != c { - return i - } - } - return len(a) -} - -// matchLen returns the length of the common prefix for the given -// distance from the rear and the byte slice p. -func (b *buffer) matchLen(distance int, p []byte) int { - var n int - i := b.rear - distance - if i < 0 { - if n = prefixLen(p, b.data[len(b.data)+i:]); n < -i { - return n - } - p = p[n:] - i = 0 - } - n += prefixLen(p, b.data[i:]) - return n -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/bytewriter.go b/vendor/github.com/ulikunitz/xz/lzma/bytewriter.go deleted file mode 100644 index 040874c1a4..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/bytewriter.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "io" -) - -// ErrLimit indicates that the limit of the LimitedByteWriter has been -// reached. -var ErrLimit = errors.New("limit reached") - -// LimitedByteWriter provides a byte writer that can be written until a -// limit is reached. The field N provides the number of remaining -// bytes. -type LimitedByteWriter struct { - BW io.ByteWriter - N int64 -} - -// WriteByte writes a single byte to the limited byte writer. It returns -// ErrLimit if the limit has been reached. If the byte is successfully -// written the field N of the LimitedByteWriter will be decremented by -// one. -func (l *LimitedByteWriter) WriteByte(c byte) error { - if l.N <= 0 { - return ErrLimit - } - if err := l.BW.WriteByte(c); err != nil { - return err - } - l.N-- - return nil -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/decoder.go b/vendor/github.com/ulikunitz/xz/lzma/decoder.go deleted file mode 100644 index cbb943a062..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/decoder.go +++ /dev/null @@ -1,277 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" - "io" -) - -// decoder decodes a raw LZMA stream without any header. -type decoder struct { - // dictionary; the rear pointer of the buffer will be used for - // reading the data. - Dict *decoderDict - // decoder state - State *state - // range decoder - rd *rangeDecoder - // start stores the head value of the dictionary for the LZMA - // stream - start int64 - // size of uncompressed data - size int64 - // end-of-stream encountered - eos bool - // EOS marker found - eosMarker bool -} - -// newDecoder creates a new decoder instance. The parameter size provides -// the expected byte size of the decompressed data. If the size is -// unknown use a negative value. In that case the decoder will look for -// a terminating end-of-stream marker. -func newDecoder(br io.ByteReader, state *state, dict *decoderDict, size int64) (d *decoder, err error) { - rd, err := newRangeDecoder(br) - if err != nil { - return nil, err - } - d = &decoder{ - State: state, - Dict: dict, - rd: rd, - size: size, - start: dict.pos(), - } - return d, nil -} - -// Reopen restarts the decoder with a new byte reader and a new size. Reopen -// resets the Decompressed counter to zero. -func (d *decoder) Reopen(br io.ByteReader, size int64) error { - var err error - if d.rd, err = newRangeDecoder(br); err != nil { - return err - } - d.start = d.Dict.pos() - d.size = size - d.eos = false - return nil -} - -// decodeLiteral decodes a single literal from the LZMA stream. -func (d *decoder) decodeLiteral() (op operation, err error) { - litState := d.State.litState(d.Dict.byteAt(1), d.Dict.head) - match := d.Dict.byteAt(int(d.State.rep[0]) + 1) - s, err := d.State.litCodec.Decode(d.rd, d.State.state, match, litState) - if err != nil { - return nil, err - } - return lit{s}, nil -} - -// errEOS indicates that an EOS marker has been found. -var errEOS = errors.New("EOS marker found") - -// readOp decodes the next operation from the compressed stream. It -// returns the operation. If an explicit end of stream marker is -// identified the eos error is returned. -func (d *decoder) readOp() (op operation, err error) { - // Value of the end of stream (EOS) marker - const eosDist = 1<<32 - 1 - - state, state2, posState := d.State.states(d.Dict.head) - - b, err := d.State.isMatch[state2].Decode(d.rd) - if err != nil { - return nil, err - } - if b == 0 { - // literal - op, err := d.decodeLiteral() - if err != nil { - return nil, err - } - d.State.updateStateLiteral() - return op, nil - } - b, err = d.State.isRep[state].Decode(d.rd) - if err != nil { - return nil, err - } - if b == 0 { - // simple match - d.State.rep[3], d.State.rep[2], d.State.rep[1] = - d.State.rep[2], d.State.rep[1], d.State.rep[0] - - d.State.updateStateMatch() - // The length decoder returns the length offset. - n, err := d.State.lenCodec.Decode(d.rd, posState) - if err != nil { - return nil, err - } - // The dist decoder returns the distance offset. The actual - // distance is 1 higher. - d.State.rep[0], err = d.State.distCodec.Decode(d.rd, n) - if err != nil { - return nil, err - } - if d.State.rep[0] == eosDist { - d.eosMarker = true - return nil, errEOS - } - op = match{n: int(n) + minMatchLen, - distance: int64(d.State.rep[0]) + minDistance} - return op, nil - } - b, err = d.State.isRepG0[state].Decode(d.rd) - if err != nil { - return nil, err - } - dist := d.State.rep[0] - if b == 0 { - // rep match 0 - b, err = d.State.isRepG0Long[state2].Decode(d.rd) - if err != nil { - return nil, err - } - if b == 0 { - d.State.updateStateShortRep() - op = match{n: 1, distance: int64(dist) + minDistance} - return op, nil - } - } else { - b, err = d.State.isRepG1[state].Decode(d.rd) - if err != nil { - return nil, err - } - if b == 0 { - dist = d.State.rep[1] - } else { - b, err = d.State.isRepG2[state].Decode(d.rd) - if err != nil { - return nil, err - } - if b == 0 { - dist = d.State.rep[2] - } else { - dist = d.State.rep[3] - d.State.rep[3] = d.State.rep[2] - } - d.State.rep[2] = d.State.rep[1] - } - d.State.rep[1] = d.State.rep[0] - d.State.rep[0] = dist - } - n, err := d.State.repLenCodec.Decode(d.rd, posState) - if err != nil { - return nil, err - } - d.State.updateStateRep() - op = match{n: int(n) + minMatchLen, distance: int64(dist) + minDistance} - return op, nil -} - -// apply takes the operation and transforms the decoder dictionary accordingly. -func (d *decoder) apply(op operation) error { - var err error - switch x := op.(type) { - case match: - err = d.Dict.writeMatch(x.distance, x.n) - case lit: - err = d.Dict.WriteByte(x.b) - default: - panic("op is neither a match nor a literal") - } - return err -} - -// decompress fills the dictionary unless no space for new data is -// available. If the end of the LZMA stream has been reached io.EOF will -// be returned. -func (d *decoder) decompress() error { - if d.eos { - return io.EOF - } - for d.Dict.Available() >= maxMatchLen { - op, err := d.readOp() - switch err { - case nil: - // break - case errEOS: - d.eos = true - if !d.rd.possiblyAtEnd() { - return errDataAfterEOS - } - if d.size >= 0 && d.size != d.Decompressed() { - return errSize - } - return io.EOF - case io.EOF: - d.eos = true - return io.ErrUnexpectedEOF - default: - return err - } - if err = d.apply(op); err != nil { - return err - } - if d.size >= 0 && d.Decompressed() >= d.size { - d.eos = true - if d.Decompressed() > d.size { - return errSize - } - if !d.rd.possiblyAtEnd() { - switch _, err = d.readOp(); err { - case nil: - return errSize - case io.EOF: - return io.ErrUnexpectedEOF - case errEOS: - break - default: - return err - } - } - return io.EOF - } - } - return nil -} - -// Errors that may be returned while decoding data. -var ( - errDataAfterEOS = errors.New("lzma: data after end of stream marker") - errSize = errors.New("lzma: wrong uncompressed data size") -) - -// Read reads data from the buffer. If no more data is available io.EOF is -// returned. -func (d *decoder) Read(p []byte) (n int, err error) { - var k int - for { - // Read of decoder dict never returns an error. - k, err = d.Dict.Read(p[n:]) - if err != nil { - panic(fmt.Errorf("dictionary read error %s", err)) - } - if k == 0 && d.eos { - return n, io.EOF - } - n += k - if n >= len(p) { - return n, nil - } - if err = d.decompress(); err != nil && err != io.EOF { - return n, err - } - } -} - -// Decompressed returns the number of bytes decompressed by the decoder. -func (d *decoder) Decompressed() int64 { - return d.Dict.pos() - d.start -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/decoderdict.go b/vendor/github.com/ulikunitz/xz/lzma/decoderdict.go deleted file mode 100644 index 8cd616ef9b..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/decoderdict.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" -) - -// decoderDict provides the dictionary for the decoder. The whole -// dictionary is used as reader buffer. -type decoderDict struct { - buf buffer - head int64 -} - -// newDecoderDict creates a new decoder dictionary. The whole dictionary -// will be used as reader buffer. -func newDecoderDict(dictCap int) (d *decoderDict, err error) { - // lower limit supports easy test cases - if !(1 <= dictCap && int64(dictCap) <= MaxDictCap) { - return nil, errors.New("lzma: dictCap out of range") - } - d = &decoderDict{buf: *newBuffer(dictCap)} - return d, nil -} - -// Reset clears the dictionary. The read buffer is not changed, so the -// buffered data can still be read. -func (d *decoderDict) Reset() { - d.head = 0 -} - -// WriteByte writes a single byte into the dictionary. It is used to -// write literals into the dictionary. -func (d *decoderDict) WriteByte(c byte) error { - if err := d.buf.WriteByte(c); err != nil { - return err - } - d.head++ - return nil -} - -// pos returns the position of the dictionary head. -func (d *decoderDict) pos() int64 { return d.head } - -// dictLen returns the actual length of the dictionary. -func (d *decoderDict) dictLen() int { - capacity := d.buf.Cap() - if d.head >= int64(capacity) { - return capacity - } - return int(d.head) -} - -// byteAt returns a byte stored in the dictionary. If the distance is -// non-positive or exceeds the current length of the dictionary the zero -// byte is returned. -func (d *decoderDict) byteAt(dist int) byte { - if !(0 < dist && dist <= d.dictLen()) { - return 0 - } - i := d.buf.front - dist - if i < 0 { - i += len(d.buf.data) - } - return d.buf.data[i] -} - -// writeMatch writes the match at the top of the dictionary. The given -// distance must point in the current dictionary and the length must not -// exceed the maximum length 273 supported in LZMA. -// -// The error value ErrNoSpace indicates that no space is available in -// the dictionary for writing. You need to read from the dictionary -// first. -func (d *decoderDict) writeMatch(dist int64, length int) error { - if !(0 < dist && dist <= int64(d.dictLen())) { - return errors.New("writeMatch: distance out of range") - } - if !(0 < length && length <= maxMatchLen) { - return errors.New("writeMatch: length out of range") - } - if length > d.buf.Available() { - return ErrNoSpace - } - d.head += int64(length) - - i := d.buf.front - int(dist) - if i < 0 { - i += len(d.buf.data) - } - for length > 0 { - var p []byte - if i >= d.buf.front { - p = d.buf.data[i:] - i = 0 - } else { - p = d.buf.data[i:d.buf.front] - i = d.buf.front - } - if len(p) > length { - p = p[:length] - } - if _, err := d.buf.Write(p); err != nil { - panic(fmt.Errorf("d.buf.Write returned error %s", err)) - } - length -= len(p) - } - return nil -} - -// Write writes the given bytes into the dictionary and advances the -// head. -func (d *decoderDict) Write(p []byte) (n int, err error) { - n, err = d.buf.Write(p) - d.head += int64(n) - return n, err -} - -// Available returns the number of available bytes for writing into the -// decoder dictionary. -func (d *decoderDict) Available() int { return d.buf.Available() } - -// Read reads data from the buffer contained in the decoder dictionary. -func (d *decoderDict) Read(p []byte) (n int, err error) { return d.buf.Read(p) } diff --git a/vendor/github.com/ulikunitz/xz/lzma/directcodec.go b/vendor/github.com/ulikunitz/xz/lzma/directcodec.go deleted file mode 100644 index 20b256a9d6..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/directcodec.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -// directCodec allows the encoding and decoding of values with a fixed number -// of bits. The number of bits must be in the range [1,32]. -type directCodec byte - -// Bits returns the number of bits supported by this codec. -func (dc directCodec) Bits() int { - return int(dc) -} - -// Encode uses the range encoder to encode a value with the fixed number of -// bits. The most-significant bit is encoded first. -func (dc directCodec) Encode(e *rangeEncoder, v uint32) error { - for i := int(dc) - 1; i >= 0; i-- { - if err := e.DirectEncodeBit(v >> uint(i)); err != nil { - return err - } - } - return nil -} - -// Decode uses the range decoder to decode a value with the given number of -// given bits. The most-significant bit is decoded first. -func (dc directCodec) Decode(d *rangeDecoder) (v uint32, err error) { - for i := int(dc) - 1; i >= 0; i-- { - x, err := d.DirectDecodeBit() - if err != nil { - return 0, err - } - v = (v << 1) | x - } - return v, nil -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/distcodec.go b/vendor/github.com/ulikunitz/xz/lzma/distcodec.go deleted file mode 100644 index 60ed9aef13..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/distcodec.go +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -// Constants used by the distance codec. -const ( - // minimum supported distance - minDistance = 1 - // maximum supported distance, value is used for the eos marker. - maxDistance = 1 << 32 - // number of the supported len states - lenStates = 4 - // start for the position models - startPosModel = 4 - // first index with align bits support - endPosModel = 14 - // bits for the position slots - posSlotBits = 6 - // number of align bits - alignBits = 4 -) - -// distCodec provides encoding and decoding of distance values. -type distCodec struct { - posSlotCodecs [lenStates]treeCodec - posModel [endPosModel - startPosModel]treeReverseCodec - alignCodec treeReverseCodec -} - -// deepcopy initializes dc as deep copy of the source. -func (dc *distCodec) deepcopy(src *distCodec) { - if dc == src { - return - } - for i := range dc.posSlotCodecs { - dc.posSlotCodecs[i].deepcopy(&src.posSlotCodecs[i]) - } - for i := range dc.posModel { - dc.posModel[i].deepcopy(&src.posModel[i]) - } - dc.alignCodec.deepcopy(&src.alignCodec) -} - -// newDistCodec creates a new distance codec. -func (dc *distCodec) init() { - for i := range dc.posSlotCodecs { - dc.posSlotCodecs[i] = makeTreeCodec(posSlotBits) - } - for i := range dc.posModel { - posSlot := startPosModel + i - bits := (posSlot >> 1) - 1 - dc.posModel[i] = makeTreeReverseCodec(bits) - } - dc.alignCodec = makeTreeReverseCodec(alignBits) -} - -// lenState converts the value l to a supported lenState value. -func lenState(l uint32) uint32 { - if l >= lenStates { - l = lenStates - 1 - } - return l -} - -// Encode encodes the distance using the parameter l. Dist can have values from -// the full range of uint32 values. To get the distance offset the actual match -// distance has to be decreased by 1. A distance offset of 0xffffffff (eos) -// indicates the end of the stream. -func (dc *distCodec) Encode(e *rangeEncoder, dist uint32, l uint32) (err error) { - // Compute the posSlot using nlz32 - var posSlot uint32 - var bits uint32 - if dist < startPosModel { - posSlot = dist - } else { - bits = uint32(30 - nlz32(dist)) - posSlot = startPosModel - 2 + (bits << 1) - posSlot += (dist >> uint(bits)) & 1 - } - - if err = dc.posSlotCodecs[lenState(l)].Encode(e, posSlot); err != nil { - return - } - - switch { - case posSlot < startPosModel: - return nil - case posSlot < endPosModel: - tc := &dc.posModel[posSlot-startPosModel] - return tc.Encode(dist, e) - } - dic := directCodec(bits - alignBits) - if err = dic.Encode(e, dist>>alignBits); err != nil { - return - } - return dc.alignCodec.Encode(dist, e) -} - -// Decode decodes the distance offset using the parameter l. The dist value -// 0xffffffff (eos) indicates the end of the stream. Add one to the distance -// offset to get the actual match distance. -func (dc *distCodec) Decode(d *rangeDecoder, l uint32) (dist uint32, err error) { - posSlot, err := dc.posSlotCodecs[lenState(l)].Decode(d) - if err != nil { - return - } - - // posSlot equals distance - if posSlot < startPosModel { - return posSlot, nil - } - - // posSlot uses the individual models - bits := (posSlot >> 1) - 1 - dist = (2 | (posSlot & 1)) << bits - var u uint32 - if posSlot < endPosModel { - tc := &dc.posModel[posSlot-startPosModel] - if u, err = tc.Decode(d); err != nil { - return 0, err - } - dist += u - return dist, nil - } - - // posSlots use direct encoding and a single model for the four align - // bits. - dic := directCodec(bits - alignBits) - if u, err = dic.Decode(d); err != nil { - return 0, err - } - dist += u << alignBits - if u, err = dc.alignCodec.Decode(d); err != nil { - return 0, err - } - dist += u - return dist, nil -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/encoder.go b/vendor/github.com/ulikunitz/xz/lzma/encoder.go deleted file mode 100644 index 5ed057a718..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/encoder.go +++ /dev/null @@ -1,268 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "fmt" - "io" -) - -// opLenMargin provides the upper limit of the number of bytes required -// to encode a single operation. -const opLenMargin = 16 - -// compressFlags control the compression process. -type compressFlags uint32 - -// Values for compressFlags. -const ( - // all data should be compressed, even if compression is not - // optimal. - all compressFlags = 1 << iota -) - -// encoderFlags provide the flags for an encoder. -type encoderFlags uint32 - -// Flags for the encoder. -const ( - // eosMarker requests an EOS marker to be written. - eosMarker encoderFlags = 1 << iota -) - -// Encoder compresses data buffered in the encoder dictionary and writes -// it into a byte writer. -type encoder struct { - dict *encoderDict - state *state - re *rangeEncoder - start int64 - // generate eos marker - marker bool - limit bool - margin int -} - -// newEncoder creates a new encoder. If the byte writer must be -// limited use LimitedByteWriter provided by this package. The flags -// argument supports the eosMarker flag, controlling whether a -// terminating end-of-stream marker must be written. -func newEncoder(bw io.ByteWriter, state *state, dict *encoderDict, - flags encoderFlags) (e *encoder, err error) { - - re, err := newRangeEncoder(bw) - if err != nil { - return nil, err - } - e = &encoder{ - dict: dict, - state: state, - re: re, - marker: flags&eosMarker != 0, - start: dict.Pos(), - margin: opLenMargin, - } - if e.marker { - e.margin += 5 - } - return e, nil -} - -// Write writes the bytes from p into the dictionary. If not enough -// space is available the data in the dictionary buffer will be -// compressed to make additional space available. If the limit of the -// underlying writer has been reached ErrLimit will be returned. -func (e *encoder) Write(p []byte) (n int, err error) { - for { - k, err := e.dict.Write(p[n:]) - n += k - if err == ErrNoSpace { - if err = e.compress(0); err != nil { - return n, err - } - continue - } - return n, err - } -} - -// Reopen reopens the encoder with a new byte writer. -func (e *encoder) Reopen(bw io.ByteWriter) error { - var err error - if e.re, err = newRangeEncoder(bw); err != nil { - return err - } - e.start = e.dict.Pos() - e.limit = false - return nil -} - -// writeLiteral writes a literal into the LZMA stream -func (e *encoder) writeLiteral(l lit) error { - var err error - state, state2, _ := e.state.states(e.dict.Pos()) - if err = e.state.isMatch[state2].Encode(e.re, 0); err != nil { - return err - } - litState := e.state.litState(e.dict.ByteAt(1), e.dict.Pos()) - match := e.dict.ByteAt(int(e.state.rep[0]) + 1) - err = e.state.litCodec.Encode(e.re, l.b, state, match, litState) - if err != nil { - return err - } - e.state.updateStateLiteral() - return nil -} - -// iverson implements the Iverson operator as proposed by Donald Knuth in his -// book Concrete Mathematics. -func iverson(ok bool) uint32 { - if ok { - return 1 - } - return 0 -} - -// writeMatch writes a repetition operation into the operation stream -func (e *encoder) writeMatch(m match) error { - var err error - if !(minDistance <= m.distance && m.distance <= maxDistance) { - panic(fmt.Errorf("match distance %d out of range", m.distance)) - } - dist := uint32(m.distance - minDistance) - if !(minMatchLen <= m.n && m.n <= maxMatchLen) && - !(dist == e.state.rep[0] && m.n == 1) { - panic(fmt.Errorf( - "match length %d out of range; dist %d rep[0] %d", - m.n, dist, e.state.rep[0])) - } - state, state2, posState := e.state.states(e.dict.Pos()) - if err = e.state.isMatch[state2].Encode(e.re, 1); err != nil { - return err - } - g := 0 - for ; g < 4; g++ { - if e.state.rep[g] == dist { - break - } - } - b := iverson(g < 4) - if err = e.state.isRep[state].Encode(e.re, b); err != nil { - return err - } - n := uint32(m.n - minMatchLen) - if b == 0 { - // simple match - e.state.rep[3], e.state.rep[2], e.state.rep[1], e.state.rep[0] = - e.state.rep[2], e.state.rep[1], e.state.rep[0], dist - e.state.updateStateMatch() - if err = e.state.lenCodec.Encode(e.re, n, posState); err != nil { - return err - } - return e.state.distCodec.Encode(e.re, dist, n) - } - b = iverson(g != 0) - if err = e.state.isRepG0[state].Encode(e.re, b); err != nil { - return err - } - if b == 0 { - // g == 0 - b = iverson(m.n != 1) - if err = e.state.isRepG0Long[state2].Encode(e.re, b); err != nil { - return err - } - if b == 0 { - e.state.updateStateShortRep() - return nil - } - } else { - // g in {1,2,3} - b = iverson(g != 1) - if err = e.state.isRepG1[state].Encode(e.re, b); err != nil { - return err - } - if b == 1 { - // g in {2,3} - b = iverson(g != 2) - err = e.state.isRepG2[state].Encode(e.re, b) - if err != nil { - return err - } - if b == 1 { - e.state.rep[3] = e.state.rep[2] - } - e.state.rep[2] = e.state.rep[1] - } - e.state.rep[1] = e.state.rep[0] - e.state.rep[0] = dist - } - e.state.updateStateRep() - return e.state.repLenCodec.Encode(e.re, n, posState) -} - -// writeOp writes a single operation to the range encoder. The function -// checks whether there is enough space available to close the LZMA -// stream. -func (e *encoder) writeOp(op operation) error { - if e.re.Available() < int64(e.margin) { - return ErrLimit - } - switch x := op.(type) { - case lit: - return e.writeLiteral(x) - case match: - return e.writeMatch(x) - default: - panic("unexpected operation") - } -} - -// compress compressed data from the dictionary buffer. If the flag all -// is set, all data in the dictionary buffer will be compressed. The -// function returns ErrLimit if the underlying writer has reached its -// limit. -func (e *encoder) compress(flags compressFlags) error { - n := 0 - if flags&all == 0 { - n = maxMatchLen - 1 - } - d := e.dict - m := d.m - for d.Buffered() > n { - op := m.NextOp(e.state.rep) - if err := e.writeOp(op); err != nil { - return err - } - d.Discard(op.Len()) - } - return nil -} - -// eosMatch is a pseudo operation that indicates the end of the stream. -var eosMatch = match{distance: maxDistance, n: minMatchLen} - -// Close terminates the LZMA stream. If requested the end-of-stream -// marker will be written. If the byte writer limit has been or will be -// reached during compression of the remaining data in the buffer the -// LZMA stream will be closed and data will remain in the buffer. -func (e *encoder) Close() error { - err := e.compress(all) - if err != nil && err != ErrLimit { - return err - } - if e.marker { - if err := e.writeMatch(eosMatch); err != nil { - return err - } - } - err = e.re.Close() - return err -} - -// Compressed returns the number bytes of the input data that been -// compressed. -func (e *encoder) Compressed() int64 { - return e.dict.Pos() - e.start -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/encoderdict.go b/vendor/github.com/ulikunitz/xz/lzma/encoderdict.go deleted file mode 100644 index 056f89757c..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/encoderdict.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" - "io" -) - -// matcher is an interface that supports the identification of the next -// operation. -type matcher interface { - io.Writer - SetDict(d *encoderDict) - NextOp(rep [4]uint32) operation -} - -// encoderDict provides the dictionary of the encoder. It includes an -// additional buffer atop of the actual dictionary. -type encoderDict struct { - buf buffer - m matcher - head int64 - capacity int - // preallocated array - data [maxMatchLen]byte -} - -// newEncoderDict creates the encoder dictionary. The argument bufSize -// defines the size of the additional buffer. -func newEncoderDict(dictCap, bufSize int, m matcher) (d *encoderDict, err error) { - if !(1 <= dictCap && int64(dictCap) <= MaxDictCap) { - return nil, errors.New( - "lzma: dictionary capacity out of range") - } - if bufSize < 1 { - return nil, errors.New( - "lzma: buffer size must be larger than zero") - } - d = &encoderDict{ - buf: *newBuffer(dictCap + bufSize), - capacity: dictCap, - m: m, - } - m.SetDict(d) - return d, nil -} - -// Discard discards n bytes. Note that n must not be larger than -// MaxMatchLen. -func (d *encoderDict) Discard(n int) { - p := d.data[:n] - k, _ := d.buf.Read(p) - if k < n { - panic(fmt.Errorf("lzma: can't discard %d bytes", n)) - } - d.head += int64(n) - d.m.Write(p) -} - -// Len returns the data available in the encoder dictionary. -func (d *encoderDict) Len() int { - n := d.buf.Available() - if int64(n) > d.head { - return int(d.head) - } - return n -} - -// DictLen returns the actual length of data in the dictionary. -func (d *encoderDict) DictLen() int { - if d.head < int64(d.capacity) { - return int(d.head) - } - return d.capacity -} - -// Available returns the number of bytes that can be written by a -// following Write call. -func (d *encoderDict) Available() int { - return d.buf.Available() - d.DictLen() -} - -// Write writes data into the dictionary buffer. Note that the position -// of the dictionary head will not be moved. If there is not enough -// space in the buffer ErrNoSpace will be returned. -func (d *encoderDict) Write(p []byte) (n int, err error) { - m := d.Available() - if len(p) > m { - p = p[:m] - err = ErrNoSpace - } - var e error - if n, e = d.buf.Write(p); e != nil { - err = e - } - return n, err -} - -// Pos returns the position of the head. -func (d *encoderDict) Pos() int64 { return d.head } - -// ByteAt returns the byte at the given distance. -func (d *encoderDict) ByteAt(distance int) byte { - if !(0 < distance && distance <= d.Len()) { - return 0 - } - i := d.buf.rear - distance - if i < 0 { - i += len(d.buf.data) - } - return d.buf.data[i] -} - -// CopyN copies the last n bytes from the dictionary into the provided -// writer. This is used for copying uncompressed data into an -// uncompressed segment. -func (d *encoderDict) CopyN(w io.Writer, n int) (written int, err error) { - if n <= 0 { - return 0, nil - } - m := d.Len() - if n > m { - n = m - err = ErrNoSpace - } - i := d.buf.rear - n - var e error - if i < 0 { - i += len(d.buf.data) - if written, e = w.Write(d.buf.data[i:]); e != nil { - return written, e - } - i = 0 - } - var k int - k, e = w.Write(d.buf.data[i:d.buf.rear]) - written += k - if e != nil { - err = e - } - return written, err -} - -// Buffered returns the number of bytes in the buffer. -func (d *encoderDict) Buffered() int { return d.buf.Buffered() } diff --git a/vendor/github.com/ulikunitz/xz/lzma/fox.lzma b/vendor/github.com/ulikunitz/xz/lzma/fox.lzma deleted file mode 100644 index 5edad633266eb5173a7c39761dc8b9e71efbfe80..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 67 zcma!LU}#|Y4+RWbQXGqzRntCtR~%i$`d{za%}WYWYfXMUl6~Q5_UjH?=5CuO0w(I5 UuQ#VXelz{mI_3ZW`W7$%0HEw6g#Z8m diff --git a/vendor/github.com/ulikunitz/xz/lzma/hashtable.go b/vendor/github.com/ulikunitz/xz/lzma/hashtable.go deleted file mode 100644 index 0fb7910bc0..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/hashtable.go +++ /dev/null @@ -1,309 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" - - "github.com/ulikunitz/xz/internal/hash" -) - -/* For compression we need to find byte sequences that match the byte - * sequence at the dictionary head. A hash table is a simple method to - * provide this capability. - */ - -// maxMatches limits the number of matches requested from the Matches -// function. This controls the speed of the overall encoding. -const maxMatches = 16 - -// shortDists defines the number of short distances supported by the -// implementation. -const shortDists = 8 - -// The minimum is somehow arbitrary but the maximum is limited by the -// memory requirements of the hash table. -const ( - minTableExponent = 9 - maxTableExponent = 20 -) - -// newRoller contains the function used to create an instance of the -// hash.Roller. -var newRoller = func(n int) hash.Roller { return hash.NewCyclicPoly(n) } - -// hashTable stores the hash table including the rolling hash method. -// -// We implement chained hashing into a circular buffer. Each entry in -// the circular buffer stores the delta distance to the next position with a -// word that has the same hash value. -type hashTable struct { - dict *encoderDict - // actual hash table - t []int64 - // circular list data with the offset to the next word - data []uint32 - front int - // mask for computing the index for the hash table - mask uint64 - // hash offset; initial value is -int64(wordLen) - hoff int64 - // length of the hashed word - wordLen int - // hash roller for computing the hash values for the Write - // method - wr hash.Roller - // hash roller for computing arbitrary hashes - hr hash.Roller - // preallocated slices - p [maxMatches]int64 - distances [maxMatches + shortDists]int -} - -// hashTableExponent derives the hash table exponent from the dictionary -// capacity. -func hashTableExponent(n uint32) int { - e := 30 - nlz32(n) - switch { - case e < minTableExponent: - e = minTableExponent - case e > maxTableExponent: - e = maxTableExponent - } - return e -} - -// newHashTable creates a new hash table for words of length wordLen -func newHashTable(capacity int, wordLen int) (t *hashTable, err error) { - if !(0 < capacity) { - return nil, errors.New( - "newHashTable: capacity must not be negative") - } - exp := hashTableExponent(uint32(capacity)) - if !(1 <= wordLen && wordLen <= 4) { - return nil, errors.New("newHashTable: " + - "argument wordLen out of range") - } - n := 1 << uint(exp) - if n <= 0 { - panic("newHashTable: exponent is too large") - } - t = &hashTable{ - t: make([]int64, n), - data: make([]uint32, capacity), - mask: (uint64(1) << uint(exp)) - 1, - hoff: -int64(wordLen), - wordLen: wordLen, - wr: newRoller(wordLen), - hr: newRoller(wordLen), - } - return t, nil -} - -func (t *hashTable) SetDict(d *encoderDict) { t.dict = d } - -// buffered returns the number of bytes that are currently hashed. -func (t *hashTable) buffered() int { - n := t.hoff + 1 - switch { - case n <= 0: - return 0 - case n >= int64(len(t.data)): - return len(t.data) - } - return int(n) -} - -// addIndex adds n to an index ensuring that is stays inside the -// circular buffer for the hash chain. -func (t *hashTable) addIndex(i, n int) int { - i += n - len(t.data) - if i < 0 { - i += len(t.data) - } - return i -} - -// putDelta puts the delta instance at the current front of the circular -// chain buffer. -func (t *hashTable) putDelta(delta uint32) { - t.data[t.front] = delta - t.front = t.addIndex(t.front, 1) -} - -// putEntry puts a new entry into the hash table. If there is already a -// value stored it is moved into the circular chain buffer. -func (t *hashTable) putEntry(h uint64, pos int64) { - if pos < 0 { - return - } - i := h & t.mask - old := t.t[i] - 1 - t.t[i] = pos + 1 - var delta int64 - if old >= 0 { - delta = pos - old - if delta > 1<<32-1 || delta > int64(t.buffered()) { - delta = 0 - } - } - t.putDelta(uint32(delta)) -} - -// WriteByte converts a single byte into a hash and puts them into the hash -// table. -func (t *hashTable) WriteByte(b byte) error { - h := t.wr.RollByte(b) - t.hoff++ - t.putEntry(h, t.hoff) - return nil -} - -// Write converts the bytes provided into hash tables and stores the -// abbreviated offsets into the hash table. The method will never return an -// error. -func (t *hashTable) Write(p []byte) (n int, err error) { - for _, b := range p { - // WriteByte doesn't generate an error. - t.WriteByte(b) - } - return len(p), nil -} - -// getMatches the matches for a specific hash. The functions returns the -// number of positions found. -// -// TODO: Make a getDistances because that we are actually interested in. -func (t *hashTable) getMatches(h uint64, positions []int64) (n int) { - if t.hoff < 0 || len(positions) == 0 { - return 0 - } - buffered := t.buffered() - tailPos := t.hoff + 1 - int64(buffered) - rear := t.front - buffered - if rear >= 0 { - rear -= len(t.data) - } - // get the slot for the hash - pos := t.t[h&t.mask] - 1 - delta := pos - tailPos - for { - if delta < 0 { - return n - } - positions[n] = tailPos + delta - n++ - if n >= len(positions) { - return n - } - i := rear + int(delta) - if i < 0 { - i += len(t.data) - } - u := t.data[i] - if u == 0 { - return n - } - delta -= int64(u) - } -} - -// hash computes the rolling hash for the word stored in p. For correct -// results its length must be equal to t.wordLen. -func (t *hashTable) hash(p []byte) uint64 { - var h uint64 - for _, b := range p { - h = t.hr.RollByte(b) - } - return h -} - -// Matches fills the positions slice with potential matches. The -// functions returns the number of positions filled into positions. The -// byte slice p must have word length of the hash table. -func (t *hashTable) Matches(p []byte, positions []int64) int { - if len(p) != t.wordLen { - panic(fmt.Errorf( - "byte slice must have length %d", t.wordLen)) - } - h := t.hash(p) - return t.getMatches(h, positions) -} - -// NextOp identifies the next operation using the hash table. -// -// TODO: Use all repetitions to find matches. -func (t *hashTable) NextOp(rep [4]uint32) operation { - // get positions - data := t.dict.data[:maxMatchLen] - n, _ := t.dict.buf.Peek(data) - data = data[:n] - var p []int64 - if n < t.wordLen { - p = t.p[:0] - } else { - p = t.p[:maxMatches] - n = t.Matches(data[:t.wordLen], p) - p = p[:n] - } - - // convert positions in potential distances - head := t.dict.head - dists := append(t.distances[:0], 1, 2, 3, 4, 5, 6, 7, 8) - for _, pos := range p { - dis := int(head - pos) - if dis > shortDists { - dists = append(dists, dis) - } - } - - // check distances - var m match - dictLen := t.dict.DictLen() - for _, dist := range dists { - if dist > dictLen { - continue - } - - // Here comes a trick. We are only interested in matches - // that are longer than the matches we have been found - // before. So before we test the whole byte sequence at - // the given distance, we test the first byte that would - // make the match longer. If it doesn't match the byte - // to match, we don't to care any longer. - i := t.dict.buf.rear - dist + m.n - if i < 0 { - i += len(t.dict.buf.data) - } - if t.dict.buf.data[i] != data[m.n] { - // We can't get a longer match. Jump to the next - // distance. - continue - } - - n := t.dict.buf.matchLen(dist, data) - switch n { - case 0: - continue - case 1: - if uint32(dist-minDistance) != rep[0] { - continue - } - } - if n > m.n { - m = match{int64(dist), n} - if n == len(data) { - // No better match will be found. - break - } - } - } - - if m.n == 0 { - return lit{data[0]} - } - return m -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/header.go b/vendor/github.com/ulikunitz/xz/lzma/header.go deleted file mode 100644 index 04276c8163..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/header.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" -) - -// uint32LE reads an uint32 integer from a byte slice -func uint32LE(b []byte) uint32 { - x := uint32(b[3]) << 24 - x |= uint32(b[2]) << 16 - x |= uint32(b[1]) << 8 - x |= uint32(b[0]) - return x -} - -// uint64LE converts the uint64 value stored as little endian to an uint64 -// value. -func uint64LE(b []byte) uint64 { - x := uint64(b[7]) << 56 - x |= uint64(b[6]) << 48 - x |= uint64(b[5]) << 40 - x |= uint64(b[4]) << 32 - x |= uint64(b[3]) << 24 - x |= uint64(b[2]) << 16 - x |= uint64(b[1]) << 8 - x |= uint64(b[0]) - return x -} - -// putUint32LE puts an uint32 integer into a byte slice that must have at least -// a length of 4 bytes. -func putUint32LE(b []byte, x uint32) { - b[0] = byte(x) - b[1] = byte(x >> 8) - b[2] = byte(x >> 16) - b[3] = byte(x >> 24) -} - -// putUint64LE puts the uint64 value into the byte slice as little endian -// value. The byte slice b must have at least place for 8 bytes. -func putUint64LE(b []byte, x uint64) { - b[0] = byte(x) - b[1] = byte(x >> 8) - b[2] = byte(x >> 16) - b[3] = byte(x >> 24) - b[4] = byte(x >> 32) - b[5] = byte(x >> 40) - b[6] = byte(x >> 48) - b[7] = byte(x >> 56) -} - -// noHeaderSize defines the value of the length field in the LZMA header. -const noHeaderSize uint64 = 1<<64 - 1 - -// HeaderLen provides the length of the LZMA file header. -const HeaderLen = 13 - -// header represents the header of an LZMA file. -type header struct { - properties Properties - dictCap int - // uncompressed size; negative value if no size is given - size int64 -} - -// marshalBinary marshals the header. -func (h *header) marshalBinary() (data []byte, err error) { - if err = h.properties.verify(); err != nil { - return nil, err - } - if !(0 <= h.dictCap && int64(h.dictCap) <= MaxDictCap) { - return nil, fmt.Errorf("lzma: DictCap %d out of range", - h.dictCap) - } - - data = make([]byte, 13) - - // property byte - data[0] = h.properties.Code() - - // dictionary capacity - putUint32LE(data[1:5], uint32(h.dictCap)) - - // uncompressed size - var s uint64 - if h.size > 0 { - s = uint64(h.size) - } else { - s = noHeaderSize - } - putUint64LE(data[5:], s) - - return data, nil -} - -// unmarshalBinary unmarshals the header. -func (h *header) unmarshalBinary(data []byte) error { - if len(data) != HeaderLen { - return errors.New("lzma.unmarshalBinary: data has wrong length") - } - - // properties - var err error - if h.properties, err = PropertiesForCode(data[0]); err != nil { - return err - } - - // dictionary capacity - h.dictCap = int(uint32LE(data[1:])) - if h.dictCap < 0 { - return errors.New( - "LZMA header: dictionary capacity exceeds maximum " + - "integer") - } - - // uncompressed size - s := uint64LE(data[5:]) - if s == noHeaderSize { - h.size = -1 - } else { - h.size = int64(s) - if h.size < 0 { - return errors.New( - "LZMA header: uncompressed size " + - "out of int64 range") - } - } - - return nil -} - -// validDictCap checks whether the dictionary capacity is correct. This -// is used to weed out wrong file headers. -func validDictCap(dictcap int) bool { - if int64(dictcap) == MaxDictCap { - return true - } - for n := uint(10); n < 32; n++ { - if dictcap == 1<= 10 or 2^32-1. If -// there is an explicit size it must not exceed 256 GiB. The length of -// the data argument must be HeaderLen. -func ValidHeader(data []byte) bool { - var h header - if err := h.unmarshalBinary(data); err != nil { - return false - } - if !validDictCap(h.dictCap) { - return false - } - return h.size < 0 || h.size <= 1<<38 -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/header2.go b/vendor/github.com/ulikunitz/xz/lzma/header2.go deleted file mode 100644 index be54dd85fd..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/header2.go +++ /dev/null @@ -1,398 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" - "io" -) - -const ( - // maximum size of compressed data in a chunk - maxCompressed = 1 << 16 - // maximum size of uncompressed data in a chunk - maxUncompressed = 1 << 21 -) - -// chunkType represents the type of an LZMA2 chunk. Note that this -// value is an internal representation and no actual encoding of a LZMA2 -// chunk header. -type chunkType byte - -// Possible values for the chunk type. -const ( - // end of stream - cEOS chunkType = iota - // uncompressed; reset dictionary - cUD - // uncompressed; no reset of dictionary - cU - // LZMA compressed; no reset - cL - // LZMA compressed; reset state - cLR - // LZMA compressed; reset state; new property value - cLRN - // LZMA compressed; reset state; new property value; reset dictionary - cLRND -) - -// chunkTypeStrings provide a string representation for the chunk types. -var chunkTypeStrings = [...]string{ - cEOS: "EOS", - cU: "U", - cUD: "UD", - cL: "L", - cLR: "LR", - cLRN: "LRN", - cLRND: "LRND", -} - -// String returns a string representation of the chunk type. -func (c chunkType) String() string { - if !(cEOS <= c && c <= cLRND) { - return "unknown" - } - return chunkTypeStrings[c] -} - -// Actual encodings for the chunk types in the value. Note that the high -// uncompressed size bits are stored in the header byte additionally. -const ( - hEOS = 0 - hUD = 1 - hU = 2 - hL = 1 << 7 - hLR = 1<<7 | 1<<5 - hLRN = 1<<7 | 1<<6 - hLRND = 1<<7 | 1<<6 | 1<<5 -) - -// errHeaderByte indicates an unsupported value for the chunk header -// byte. These bytes starts the variable-length chunk header. -var errHeaderByte = errors.New("lzma: unsupported chunk header byte") - -// headerChunkType converts the header byte into a chunk type. It -// ignores the uncompressed size bits in the chunk header byte. -func headerChunkType(h byte) (c chunkType, err error) { - if h&hL == 0 { - // no compression - switch h { - case hEOS: - c = cEOS - case hUD: - c = cUD - case hU: - c = cU - default: - return 0, errHeaderByte - } - return - } - switch h & hLRND { - case hL: - c = cL - case hLR: - c = cLR - case hLRN: - c = cLRN - case hLRND: - c = cLRND - default: - return 0, errHeaderByte - } - return -} - -// uncompressedHeaderLen provides the length of an uncompressed header -const uncompressedHeaderLen = 3 - -// headerLen returns the length of the LZMA2 header for a given chunk -// type. -func headerLen(c chunkType) int { - switch c { - case cEOS: - return 1 - case cU, cUD: - return uncompressedHeaderLen - case cL, cLR: - return 5 - case cLRN, cLRND: - return 6 - } - panic(fmt.Errorf("unsupported chunk type %d", c)) -} - -// chunkHeader represents the contents of a chunk header. -type chunkHeader struct { - ctype chunkType - uncompressed uint32 - compressed uint16 - props Properties -} - -// String returns a string representation of the chunk header. -func (h *chunkHeader) String() string { - return fmt.Sprintf("%s %d %d %s", h.ctype, h.uncompressed, - h.compressed, &h.props) -} - -// UnmarshalBinary reads the content of the chunk header from the data -// slice. The slice must have the correct length. -func (h *chunkHeader) UnmarshalBinary(data []byte) error { - if len(data) == 0 { - return errors.New("no data") - } - c, err := headerChunkType(data[0]) - if err != nil { - return err - } - - n := headerLen(c) - if len(data) < n { - return errors.New("incomplete data") - } - if len(data) > n { - return errors.New("invalid data length") - } - - *h = chunkHeader{ctype: c} - if c == cEOS { - return nil - } - - h.uncompressed = uint32(uint16BE(data[1:3])) - if c <= cU { - return nil - } - h.uncompressed |= uint32(data[0]&^hLRND) << 16 - - h.compressed = uint16BE(data[3:5]) - if c <= cLR { - return nil - } - - h.props, err = PropertiesForCode(data[5]) - return err -} - -// MarshalBinary encodes the chunk header value. The function checks -// whether the content of the chunk header is correct. -func (h *chunkHeader) MarshalBinary() (data []byte, err error) { - if h.ctype > cLRND { - return nil, errors.New("invalid chunk type") - } - if err = h.props.verify(); err != nil { - return nil, err - } - - data = make([]byte, headerLen(h.ctype)) - - switch h.ctype { - case cEOS: - return data, nil - case cUD: - data[0] = hUD - case cU: - data[0] = hU - case cL: - data[0] = hL - case cLR: - data[0] = hLR - case cLRN: - data[0] = hLRN - case cLRND: - data[0] = hLRND - } - - putUint16BE(data[1:3], uint16(h.uncompressed)) - if h.ctype <= cU { - return data, nil - } - data[0] |= byte(h.uncompressed>>16) &^ hLRND - - putUint16BE(data[3:5], h.compressed) - if h.ctype <= cLR { - return data, nil - } - - data[5] = h.props.Code() - return data, nil -} - -// readChunkHeader reads the chunk header from the IO reader. -func readChunkHeader(r io.Reader) (h *chunkHeader, err error) { - p := make([]byte, 1, 6) - if _, err = io.ReadFull(r, p); err != nil { - return - } - c, err := headerChunkType(p[0]) - if err != nil { - return - } - p = p[:headerLen(c)] - if _, err = io.ReadFull(r, p[1:]); err != nil { - return - } - h = new(chunkHeader) - if err = h.UnmarshalBinary(p); err != nil { - return nil, err - } - return h, nil -} - -// uint16BE converts a big-endian uint16 representation to an uint16 -// value. -func uint16BE(p []byte) uint16 { - return uint16(p[0])<<8 | uint16(p[1]) -} - -// putUint16BE puts the big-endian uint16 presentation into the given -// slice. -func putUint16BE(p []byte, x uint16) { - p[0] = byte(x >> 8) - p[1] = byte(x) -} - -// chunkState is used to manage the state of the chunks -type chunkState byte - -// start and stop define the initial and terminating state of the chunk -// state -const ( - start chunkState = 'S' - stop chunkState = 'T' -) - -// errors for the chunk state handling -var ( - errChunkType = errors.New("lzma: unexpected chunk type") - errState = errors.New("lzma: wrong chunk state") -) - -// next transitions state based on chunk type input -func (c *chunkState) next(ctype chunkType) error { - switch *c { - // start state - case 'S': - switch ctype { - case cEOS: - *c = 'T' - case cUD: - *c = 'R' - case cLRND: - *c = 'L' - default: - return errChunkType - } - // normal LZMA mode - case 'L': - switch ctype { - case cEOS: - *c = 'T' - case cUD: - *c = 'R' - case cU: - *c = 'U' - case cL, cLR, cLRN, cLRND: - break - default: - return errChunkType - } - // reset required - case 'R': - switch ctype { - case cEOS: - *c = 'T' - case cUD, cU: - break - case cLRN, cLRND: - *c = 'L' - default: - return errChunkType - } - // uncompressed - case 'U': - switch ctype { - case cEOS: - *c = 'T' - case cUD: - *c = 'R' - case cU: - break - case cL, cLR, cLRN, cLRND: - *c = 'L' - default: - return errChunkType - } - // terminal state - case 'T': - return errChunkType - default: - return errState - } - return nil -} - -// defaultChunkType returns the default chunk type for each chunk state. -func (c chunkState) defaultChunkType() chunkType { - switch c { - case 'S': - return cLRND - case 'L', 'U': - return cL - case 'R': - return cLRN - default: - // no error - return cEOS - } -} - -// maxDictCap defines the maximum dictionary capacity supported by the -// LZMA2 dictionary capacity encoding. -const maxDictCap = 1<<32 - 1 - -// maxDictCapCode defines the maximum dictionary capacity code. -const maxDictCapCode = 40 - -// The function decodes the dictionary capacity byte, but doesn't change -// for the correct range of the given byte. -func decodeDictCap(c byte) int64 { - return (2 | int64(c)&1) << (11 + (c>>1)&0x1f) -} - -// DecodeDictCap decodes the encoded dictionary capacity. The function -// returns an error if the code is out of range. -func DecodeDictCap(c byte) (n int64, err error) { - if c >= maxDictCapCode { - if c == maxDictCapCode { - return maxDictCap, nil - } - return 0, errors.New("lzma: invalid dictionary size code") - } - return decodeDictCap(c), nil -} - -// EncodeDictCap encodes a dictionary capacity. The function returns the -// code for the capacity that is greater or equal n. If n exceeds the -// maximum support dictionary capacity, the maximum value is returned. -func EncodeDictCap(n int64) byte { - a, b := byte(0), byte(40) - for a < b { - c := a + (b-a)>>1 - m := decodeDictCap(c) - if n <= m { - if n == m { - return c - } - b = c - } else { - a = c + 1 - } - } - return a -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/lengthcodec.go b/vendor/github.com/ulikunitz/xz/lzma/lengthcodec.go deleted file mode 100644 index 6e0edfc8c0..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/lengthcodec.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import "errors" - -// maxPosBits defines the number of bits of the position value that are used to -// to compute the posState value. The value is used to select the tree codec -// for length encoding and decoding. -const maxPosBits = 4 - -// minMatchLen and maxMatchLen give the minimum and maximum values for -// encoding and decoding length values. minMatchLen is also used as base -// for the encoded length values. -const ( - minMatchLen = 2 - maxMatchLen = minMatchLen + 16 + 256 - 1 -) - -// lengthCodec support the encoding of the length value. -type lengthCodec struct { - choice [2]prob - low [1 << maxPosBits]treeCodec - mid [1 << maxPosBits]treeCodec - high treeCodec -} - -// deepcopy initializes the lc value as deep copy of the source value. -func (lc *lengthCodec) deepcopy(src *lengthCodec) { - if lc == src { - return - } - lc.choice = src.choice - for i := range lc.low { - lc.low[i].deepcopy(&src.low[i]) - } - for i := range lc.mid { - lc.mid[i].deepcopy(&src.mid[i]) - } - lc.high.deepcopy(&src.high) -} - -// init initializes a new length codec. -func (lc *lengthCodec) init() { - for i := range lc.choice { - lc.choice[i] = probInit - } - for i := range lc.low { - lc.low[i] = makeTreeCodec(3) - } - for i := range lc.mid { - lc.mid[i] = makeTreeCodec(3) - } - lc.high = makeTreeCodec(8) -} - -// Encode encodes the length offset. The length offset l can be compute by -// subtracting minMatchLen (2) from the actual length. -// -// l = length - minMatchLen -// -func (lc *lengthCodec) Encode(e *rangeEncoder, l uint32, posState uint32, -) (err error) { - if l > maxMatchLen-minMatchLen { - return errors.New("lengthCodec.Encode: l out of range") - } - if l < 8 { - if err = lc.choice[0].Encode(e, 0); err != nil { - return - } - return lc.low[posState].Encode(e, l) - } - if err = lc.choice[0].Encode(e, 1); err != nil { - return - } - if l < 16 { - if err = lc.choice[1].Encode(e, 0); err != nil { - return - } - return lc.mid[posState].Encode(e, l-8) - } - if err = lc.choice[1].Encode(e, 1); err != nil { - return - } - if err = lc.high.Encode(e, l-16); err != nil { - return - } - return nil -} - -// Decode reads the length offset. Add minMatchLen to compute the actual length -// to the length offset l. -func (lc *lengthCodec) Decode(d *rangeDecoder, posState uint32, -) (l uint32, err error) { - var b uint32 - if b, err = lc.choice[0].Decode(d); err != nil { - return - } - if b == 0 { - l, err = lc.low[posState].Decode(d) - return - } - if b, err = lc.choice[1].Decode(d); err != nil { - return - } - if b == 0 { - l, err = lc.mid[posState].Decode(d) - l += 8 - return - } - l, err = lc.high.Decode(d) - l += 16 - return -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/literalcodec.go b/vendor/github.com/ulikunitz/xz/lzma/literalcodec.go deleted file mode 100644 index 0bfc763cee..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/literalcodec.go +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -// literalCodec supports the encoding of literal. It provides 768 probability -// values per literal state. The upper 512 probabilities are used with the -// context of a match bit. -type literalCodec struct { - probs []prob -} - -// deepcopy initializes literal codec c as a deep copy of the source. -func (c *literalCodec) deepcopy(src *literalCodec) { - if c == src { - return - } - c.probs = make([]prob, len(src.probs)) - copy(c.probs, src.probs) -} - -// init initializes the literal codec. -func (c *literalCodec) init(lc, lp int) { - switch { - case !(minLC <= lc && lc <= maxLC): - panic("lc out of range") - case !(minLP <= lp && lp <= maxLP): - panic("lp out of range") - } - c.probs = make([]prob, 0x300<= 7 { - m := uint32(match) - for { - matchBit := (m >> 7) & 1 - m <<= 1 - bit := (r >> 7) & 1 - r <<= 1 - i := ((1 + matchBit) << 8) | symbol - if err = probs[i].Encode(e, bit); err != nil { - return - } - symbol = (symbol << 1) | bit - if matchBit != bit { - break - } - if symbol >= 0x100 { - break - } - } - } - for symbol < 0x100 { - bit := (r >> 7) & 1 - r <<= 1 - if err = probs[symbol].Encode(e, bit); err != nil { - return - } - symbol = (symbol << 1) | bit - } - return nil -} - -// Decode decodes a literal byte using the range decoder as well as the LZMA -// state, a match byte, and the literal state. -func (c *literalCodec) Decode(d *rangeDecoder, - state uint32, match byte, litState uint32, -) (s byte, err error) { - k := litState * 0x300 - probs := c.probs[k : k+0x300] - symbol := uint32(1) - if state >= 7 { - m := uint32(match) - for { - matchBit := (m >> 7) & 1 - m <<= 1 - i := ((1 + matchBit) << 8) | symbol - bit, err := d.DecodeBit(&probs[i]) - if err != nil { - return 0, err - } - symbol = (symbol << 1) | bit - if matchBit != bit { - break - } - if symbol >= 0x100 { - break - } - } - } - for symbol < 0x100 { - bit, err := d.DecodeBit(&probs[symbol]) - if err != nil { - return 0, err - } - symbol = (symbol << 1) | bit - } - s = byte(symbol - 0x100) - return s, nil -} - -// minLC and maxLC define the range for LC values. -const ( - minLC = 0 - maxLC = 8 -) - -// minLC and maxLC define the range for LP values. -const ( - minLP = 0 - maxLP = 4 -) diff --git a/vendor/github.com/ulikunitz/xz/lzma/matchalgorithm.go b/vendor/github.com/ulikunitz/xz/lzma/matchalgorithm.go deleted file mode 100644 index 96ebda0fd3..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/matchalgorithm.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import "errors" - -// MatchAlgorithm identifies an algorithm to find matches in the -// dictionary. -type MatchAlgorithm byte - -// Supported matcher algorithms. -const ( - HashTable4 MatchAlgorithm = iota - BinaryTree -) - -// maStrings are used by the String method. -var maStrings = map[MatchAlgorithm]string{ - HashTable4: "HashTable4", - BinaryTree: "BinaryTree", -} - -// String returns a string representation of the Matcher. -func (a MatchAlgorithm) String() string { - if s, ok := maStrings[a]; ok { - return s - } - return "unknown" -} - -var errUnsupportedMatchAlgorithm = errors.New( - "lzma: unsupported match algorithm value") - -// verify checks whether the matcher value is supported. -func (a MatchAlgorithm) verify() error { - if _, ok := maStrings[a]; !ok { - return errUnsupportedMatchAlgorithm - } - return nil -} - -func (a MatchAlgorithm) new(dictCap int) (m matcher, err error) { - switch a { - case HashTable4: - return newHashTable(dictCap, 4) - case BinaryTree: - return newBinTree(dictCap) - } - return nil, errUnsupportedMatchAlgorithm -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/operation.go b/vendor/github.com/ulikunitz/xz/lzma/operation.go deleted file mode 100644 index 026ce48af2..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/operation.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "fmt" - "unicode" -) - -// operation represents an operation on the dictionary during encoding or -// decoding. -type operation interface { - Len() int -} - -// rep represents a repetition at the given distance and the given length -type match struct { - // supports all possible distance values, including the eos marker - distance int64 - // length - n int -} - -// Len returns the number of bytes matched. -func (m match) Len() int { - return m.n -} - -// String returns a string representation for the repetition. -func (m match) String() string { - return fmt.Sprintf("M{%d,%d}", m.distance, m.n) -} - -// lit represents a single byte literal. -type lit struct { - b byte -} - -// Len returns 1 for the single byte literal. -func (l lit) Len() int { - return 1 -} - -// String returns a string representation for the literal. -func (l lit) String() string { - var c byte - if unicode.IsPrint(rune(l.b)) { - c = l.b - } else { - c = '.' - } - return fmt.Sprintf("L{%c/%02x}", c, l.b) -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/prob.go b/vendor/github.com/ulikunitz/xz/lzma/prob.go deleted file mode 100644 index 9a2648e0f7..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/prob.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -// movebits defines the number of bits used for the updates of probability -// values. -const movebits = 5 - -// probbits defines the number of bits of a probability value. -const probbits = 11 - -// probInit defines 0.5 as initial value for prob values. -const probInit prob = 1 << (probbits - 1) - -// Type prob represents probabilities. The type can also be used to encode and -// decode single bits. -type prob uint16 - -// Dec decreases the probability. The decrease is proportional to the -// probability value. -func (p *prob) dec() { - *p -= *p >> movebits -} - -// Inc increases the probability. The Increase is proportional to the -// difference of 1 and the probability value. -func (p *prob) inc() { - *p += ((1 << probbits) - *p) >> movebits -} - -// Computes the new bound for a given range using the probability value. -func (p prob) bound(r uint32) uint32 { - return (r >> probbits) * uint32(p) -} - -// Bits returns 1. One is the number of bits that can be encoded or decoded -// with a single prob value. -func (p prob) Bits() int { - return 1 -} - -// Encode encodes the least-significant bit of v. Note that the p value will be -// changed. -func (p *prob) Encode(e *rangeEncoder, v uint32) error { - return e.EncodeBit(v, p) -} - -// Decode decodes a single bit. Note that the p value will change. -func (p *prob) Decode(d *rangeDecoder) (v uint32, err error) { - return d.DecodeBit(p) -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/properties.go b/vendor/github.com/ulikunitz/xz/lzma/properties.go deleted file mode 100644 index f229fc9fe8..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/properties.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "fmt" -) - -// maximum and minimum values for the LZMA properties. -const ( - minPB = 0 - maxPB = 4 -) - -// maxPropertyCode is the possible maximum of a properties code byte. -const maxPropertyCode = (maxPB+1)*(maxLP+1)*(maxLC+1) - 1 - -// Properties contains the parameters LC, LP and PB. The parameter LC -// defines the number of literal context bits; parameter LP the number -// of literal position bits and PB the number of position bits. -type Properties struct { - LC int - LP int - PB int -} - -// String returns the properties in a string representation. -func (p *Properties) String() string { - return fmt.Sprintf("LC %d LP %d PB %d", p.LC, p.LP, p.PB) -} - -// PropertiesForCode converts a properties code byte into a Properties value. -func PropertiesForCode(code byte) (p Properties, err error) { - if code > maxPropertyCode { - return p, errors.New("lzma: invalid properties code") - } - p.LC = int(code % 9) - code /= 9 - p.LP = int(code % 5) - code /= 5 - p.PB = int(code % 5) - return p, err -} - -// verify checks the properties for correctness. -func (p *Properties) verify() error { - if p == nil { - return errors.New("lzma: properties are nil") - } - if !(minLC <= p.LC && p.LC <= maxLC) { - return errors.New("lzma: lc out of range") - } - if !(minLP <= p.LP && p.LP <= maxLP) { - return errors.New("lzma: lp out of range") - } - if !(minPB <= p.PB && p.PB <= maxPB) { - return errors.New("lzma: pb out of range") - } - return nil -} - -// Code converts the properties to a byte. The function assumes that -// the properties components are all in range. -func (p Properties) Code() byte { - return byte((p.PB*5+p.LP)*9 + p.LC) -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/rangecodec.go b/vendor/github.com/ulikunitz/xz/lzma/rangecodec.go deleted file mode 100644 index 57f1ab904a..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/rangecodec.go +++ /dev/null @@ -1,222 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "io" -) - -// rangeEncoder implements range encoding of single bits. The low value can -// overflow therefore we need uint64. The cache value is used to handle -// overflows. -type rangeEncoder struct { - lbw *LimitedByteWriter - nrange uint32 - low uint64 - cacheLen int64 - cache byte -} - -// maxInt64 provides the maximal value of the int64 type -const maxInt64 = 1<<63 - 1 - -// newRangeEncoder creates a new range encoder. -func newRangeEncoder(bw io.ByteWriter) (re *rangeEncoder, err error) { - lbw, ok := bw.(*LimitedByteWriter) - if !ok { - lbw = &LimitedByteWriter{BW: bw, N: maxInt64} - } - return &rangeEncoder{ - lbw: lbw, - nrange: 0xffffffff, - cacheLen: 1}, nil -} - -// Available returns the number of bytes that still can be written. The -// method takes the bytes that will be currently written by Close into -// account. -func (e *rangeEncoder) Available() int64 { - return e.lbw.N - (e.cacheLen + 4) -} - -// writeByte writes a single byte to the underlying writer. An error is -// returned if the limit is reached. The written byte will be counted if -// the underlying writer doesn't return an error. -func (e *rangeEncoder) writeByte(c byte) error { - if e.Available() < 1 { - return ErrLimit - } - return e.lbw.WriteByte(c) -} - -// DirectEncodeBit encodes the least-significant bit of b with probability 1/2. -func (e *rangeEncoder) DirectEncodeBit(b uint32) error { - e.nrange >>= 1 - e.low += uint64(e.nrange) & (0 - (uint64(b) & 1)) - - // normalize - const top = 1 << 24 - if e.nrange >= top { - return nil - } - e.nrange <<= 8 - return e.shiftLow() -} - -// EncodeBit encodes the least significant bit of b. The p value will be -// updated by the function depending on the bit encoded. -func (e *rangeEncoder) EncodeBit(b uint32, p *prob) error { - bound := p.bound(e.nrange) - if b&1 == 0 { - e.nrange = bound - p.inc() - } else { - e.low += uint64(bound) - e.nrange -= bound - p.dec() - } - - // normalize - const top = 1 << 24 - if e.nrange >= top { - return nil - } - e.nrange <<= 8 - return e.shiftLow() -} - -// Close writes a complete copy of the low value. -func (e *rangeEncoder) Close() error { - for i := 0; i < 5; i++ { - if err := e.shiftLow(); err != nil { - return err - } - } - return nil -} - -// shiftLow shifts the low value for 8 bit. The shifted byte is written into -// the byte writer. The cache value is used to handle overflows. -func (e *rangeEncoder) shiftLow() error { - if uint32(e.low) < 0xff000000 || (e.low>>32) != 0 { - tmp := e.cache - for { - err := e.writeByte(tmp + byte(e.low>>32)) - if err != nil { - return err - } - tmp = 0xff - e.cacheLen-- - if e.cacheLen <= 0 { - if e.cacheLen < 0 { - panic("negative cacheLen") - } - break - } - } - e.cache = byte(uint32(e.low) >> 24) - } - e.cacheLen++ - e.low = uint64(uint32(e.low) << 8) - return nil -} - -// rangeDecoder decodes single bits of the range encoding stream. -type rangeDecoder struct { - br io.ByteReader - nrange uint32 - code uint32 -} - -// newRangeDecoder initializes a range decoder. It reads five bytes from the -// reader and therefore may return an error. -func newRangeDecoder(br io.ByteReader) (d *rangeDecoder, err error) { - d = &rangeDecoder{br: br, nrange: 0xffffffff} - - b, err := d.br.ReadByte() - if err != nil { - return nil, err - } - if b != 0 { - return nil, errors.New("newRangeDecoder: first byte not zero") - } - - for i := 0; i < 4; i++ { - if err = d.updateCode(); err != nil { - return nil, err - } - } - - if d.code >= d.nrange { - return nil, errors.New("newRangeDecoder: d.code >= d.nrange") - } - - return d, nil -} - -// possiblyAtEnd checks whether the decoder may be at the end of the stream. -func (d *rangeDecoder) possiblyAtEnd() bool { - return d.code == 0 -} - -// DirectDecodeBit decodes a bit with probability 1/2. The return value b will -// contain the bit at the least-significant position. All other bits will be -// zero. -func (d *rangeDecoder) DirectDecodeBit() (b uint32, err error) { - d.nrange >>= 1 - d.code -= d.nrange - t := 0 - (d.code >> 31) - d.code += d.nrange & t - b = (t + 1) & 1 - - // d.code will stay less then d.nrange - - // normalize - // assume d.code < d.nrange - const top = 1 << 24 - if d.nrange >= top { - return b, nil - } - d.nrange <<= 8 - // d.code < d.nrange will be maintained - return b, d.updateCode() -} - -// decodeBit decodes a single bit. The bit will be returned at the -// least-significant position. All other bits will be zero. The probability -// value will be updated. -func (d *rangeDecoder) DecodeBit(p *prob) (b uint32, err error) { - bound := p.bound(d.nrange) - if d.code < bound { - d.nrange = bound - p.inc() - b = 0 - } else { - d.code -= bound - d.nrange -= bound - p.dec() - b = 1 - } - // normalize - // assume d.code < d.nrange - const top = 1 << 24 - if d.nrange >= top { - return b, nil - } - d.nrange <<= 8 - // d.code < d.nrange will be maintained - return b, d.updateCode() -} - -// updateCode reads a new byte into the code. -func (d *rangeDecoder) updateCode() error { - b, err := d.br.ReadByte() - if err != nil { - return err - } - d.code = (d.code << 8) | uint32(b) - return nil -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/reader.go b/vendor/github.com/ulikunitz/xz/lzma/reader.go deleted file mode 100644 index 2ed13c886e..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/reader.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package lzma supports the decoding and encoding of LZMA streams. -// Reader and Writer support the classic LZMA format. Reader2 and -// Writer2 support the decoding and encoding of LZMA2 streams. -// -// The package is written completely in Go and doesn't rely on any external -// library. -package lzma - -import ( - "errors" - "io" -) - -// ReaderConfig stores the parameters for the reader of the classic LZMA -// format. -type ReaderConfig struct { - DictCap int -} - -// fill converts the zero values of the configuration to the default values. -func (c *ReaderConfig) fill() { - if c.DictCap == 0 { - c.DictCap = 8 * 1024 * 1024 - } -} - -// Verify checks the reader configuration for errors. Zero values will -// be replaced by default values. -func (c *ReaderConfig) Verify() error { - c.fill() - if !(MinDictCap <= c.DictCap && int64(c.DictCap) <= MaxDictCap) { - return errors.New("lzma: dictionary capacity is out of range") - } - return nil -} - -// Reader provides a reader for LZMA files or streams. -type Reader struct { - lzma io.Reader - h header - d *decoder -} - -// NewReader creates a new reader for an LZMA stream using the classic -// format. NewReader reads and checks the header of the LZMA stream. -func NewReader(lzma io.Reader) (r *Reader, err error) { - return ReaderConfig{}.NewReader(lzma) -} - -// NewReader creates a new reader for an LZMA stream in the classic -// format. The function reads and verifies the the header of the LZMA -// stream. -func (c ReaderConfig) NewReader(lzma io.Reader) (r *Reader, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - data := make([]byte, HeaderLen) - if _, err := io.ReadFull(lzma, data); err != nil { - if err == io.EOF { - return nil, errors.New("lzma: unexpected EOF") - } - return nil, err - } - r = &Reader{lzma: lzma} - if err = r.h.unmarshalBinary(data); err != nil { - return nil, err - } - if r.h.dictCap < MinDictCap { - return nil, errors.New("lzma: dictionary capacity too small") - } - dictCap := r.h.dictCap - if c.DictCap > dictCap { - dictCap = c.DictCap - } - - state := newState(r.h.properties) - dict, err := newDecoderDict(dictCap) - if err != nil { - return nil, err - } - r.d, err = newDecoder(ByteReader(lzma), state, dict, r.h.size) - if err != nil { - return nil, err - } - return r, nil -} - -// EOSMarker indicates that an EOS marker has been encountered. -func (r *Reader) EOSMarker() bool { - return r.d.eosMarker -} - -// Read returns uncompressed data. -func (r *Reader) Read(p []byte) (n int, err error) { - return r.d.Read(p) -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/reader2.go b/vendor/github.com/ulikunitz/xz/lzma/reader2.go deleted file mode 100644 index de3da37ee6..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/reader2.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "errors" - "io" - - "github.com/ulikunitz/xz/internal/xlog" -) - -// Reader2Config stores the parameters for the LZMA2 reader. -// format. -type Reader2Config struct { - DictCap int -} - -// fill converts the zero values of the configuration to the default values. -func (c *Reader2Config) fill() { - if c.DictCap == 0 { - c.DictCap = 8 * 1024 * 1024 - } -} - -// Verify checks the reader configuration for errors. Zero configuration values -// will be replaced by default values. -func (c *Reader2Config) Verify() error { - c.fill() - if !(MinDictCap <= c.DictCap && int64(c.DictCap) <= MaxDictCap) { - return errors.New("lzma: dictionary capacity is out of range") - } - return nil -} - -// Reader2 supports the reading of LZMA2 chunk sequences. Note that the -// first chunk should have a dictionary reset and the first compressed -// chunk a properties reset. The chunk sequence may not be terminated by -// an end-of-stream chunk. -type Reader2 struct { - r io.Reader - err error - - dict *decoderDict - ur *uncompressedReader - decoder *decoder - chunkReader io.Reader - - cstate chunkState -} - -// NewReader2 creates a reader for an LZMA2 chunk sequence. -func NewReader2(lzma2 io.Reader) (r *Reader2, err error) { - return Reader2Config{}.NewReader2(lzma2) -} - -// NewReader2 creates an LZMA2 reader using the given configuration. -func (c Reader2Config) NewReader2(lzma2 io.Reader) (r *Reader2, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - r = &Reader2{r: lzma2, cstate: start} - r.dict, err = newDecoderDict(c.DictCap) - if err != nil { - return nil, err - } - if err = r.startChunk(); err != nil { - r.err = err - } - return r, nil -} - -// uncompressed tests whether the chunk type specifies an uncompressed -// chunk. -func uncompressed(ctype chunkType) bool { - return ctype == cU || ctype == cUD -} - -// startChunk parses a new chunk. -func (r *Reader2) startChunk() error { - r.chunkReader = nil - header, err := readChunkHeader(r.r) - if err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return err - } - xlog.Debugf("chunk header %v", header) - if err = r.cstate.next(header.ctype); err != nil { - return err - } - if r.cstate == stop { - return io.EOF - } - if header.ctype == cUD || header.ctype == cLRND { - r.dict.Reset() - } - size := int64(header.uncompressed) + 1 - if uncompressed(header.ctype) { - if r.ur != nil { - r.ur.Reopen(r.r, size) - } else { - r.ur = newUncompressedReader(r.r, r.dict, size) - } - r.chunkReader = r.ur - return nil - } - br := ByteReader(io.LimitReader(r.r, int64(header.compressed)+1)) - if r.decoder == nil { - state := newState(header.props) - r.decoder, err = newDecoder(br, state, r.dict, size) - if err != nil { - return err - } - r.chunkReader = r.decoder - return nil - } - switch header.ctype { - case cLR: - r.decoder.State.Reset() - case cLRN, cLRND: - r.decoder.State = newState(header.props) - } - err = r.decoder.Reopen(br, size) - if err != nil { - return err - } - r.chunkReader = r.decoder - return nil -} - -// Read reads data from the LZMA2 chunk sequence. -func (r *Reader2) Read(p []byte) (n int, err error) { - if r.err != nil { - return 0, r.err - } - for n < len(p) { - var k int - k, err = r.chunkReader.Read(p[n:]) - n += k - if err != nil { - if err == io.EOF { - err = r.startChunk() - if err == nil { - continue - } - } - r.err = err - return n, err - } - if k == 0 { - r.err = errors.New("lzma: Reader2 doesn't get data") - return n, r.err - } - } - return n, nil -} - -// EOS returns whether the LZMA2 stream has been terminated by an -// end-of-stream chunk. -func (r *Reader2) EOS() bool { - return r.cstate == stop -} - -// uncompressedReader is used to read uncompressed chunks. -type uncompressedReader struct { - lr io.LimitedReader - Dict *decoderDict - eof bool - err error -} - -// newUncompressedReader initializes a new uncompressedReader. -func newUncompressedReader(r io.Reader, dict *decoderDict, size int64) *uncompressedReader { - ur := &uncompressedReader{ - lr: io.LimitedReader{R: r, N: size}, - Dict: dict, - } - return ur -} - -// Reopen reinitializes an uncompressed reader. -func (ur *uncompressedReader) Reopen(r io.Reader, size int64) { - ur.err = nil - ur.eof = false - ur.lr = io.LimitedReader{R: r, N: size} -} - -// fill reads uncompressed data into the dictionary. -func (ur *uncompressedReader) fill() error { - if !ur.eof { - n, err := io.CopyN(ur.Dict, &ur.lr, int64(ur.Dict.Available())) - if err != io.EOF { - return err - } - ur.eof = true - if n > 0 { - return nil - } - } - if ur.lr.N != 0 { - return io.ErrUnexpectedEOF - } - return io.EOF -} - -// Read reads uncompressed data from the limited reader. -func (ur *uncompressedReader) Read(p []byte) (n int, err error) { - if ur.err != nil { - return 0, ur.err - } - for { - var k int - k, err = ur.Dict.Read(p[n:]) - n += k - if n >= len(p) { - return n, nil - } - if err != nil { - break - } - err = ur.fill() - if err != nil { - break - } - } - ur.err = err - return n, err -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/state.go b/vendor/github.com/ulikunitz/xz/lzma/state.go deleted file mode 100644 index 09d62f7d99..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/state.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -// states defines the overall state count -const states = 12 - -// State maintains the full state of the operation encoding or decoding -// process. -type state struct { - rep [4]uint32 - isMatch [states << maxPosBits]prob - isRepG0Long [states << maxPosBits]prob - isRep [states]prob - isRepG0 [states]prob - isRepG1 [states]prob - isRepG2 [states]prob - litCodec literalCodec - lenCodec lengthCodec - repLenCodec lengthCodec - distCodec distCodec - state uint32 - posBitMask uint32 - Properties Properties -} - -// initProbSlice initializes a slice of probabilities. -func initProbSlice(p []prob) { - for i := range p { - p[i] = probInit - } -} - -// Reset sets all state information to the original values. -func (s *state) Reset() { - p := s.Properties - *s = state{ - Properties: p, - // dict: s.dict, - posBitMask: (uint32(1) << uint(p.PB)) - 1, - } - initProbSlice(s.isMatch[:]) - initProbSlice(s.isRep[:]) - initProbSlice(s.isRepG0[:]) - initProbSlice(s.isRepG1[:]) - initProbSlice(s.isRepG2[:]) - initProbSlice(s.isRepG0Long[:]) - s.litCodec.init(p.LC, p.LP) - s.lenCodec.init() - s.repLenCodec.init() - s.distCodec.init() -} - -// newState creates a new state from the give Properties. -func newState(p Properties) *state { - s := &state{Properties: p} - s.Reset() - return s -} - -// deepcopy initializes s as a deep copy of the source. -func (s *state) deepcopy(src *state) { - if s == src { - return - } - s.rep = src.rep - s.isMatch = src.isMatch - s.isRepG0Long = src.isRepG0Long - s.isRep = src.isRep - s.isRepG0 = src.isRepG0 - s.isRepG1 = src.isRepG1 - s.isRepG2 = src.isRepG2 - s.litCodec.deepcopy(&src.litCodec) - s.lenCodec.deepcopy(&src.lenCodec) - s.repLenCodec.deepcopy(&src.repLenCodec) - s.distCodec.deepcopy(&src.distCodec) - s.state = src.state - s.posBitMask = src.posBitMask - s.Properties = src.Properties -} - -// cloneState creates a new clone of the give state. -func cloneState(src *state) *state { - s := new(state) - s.deepcopy(src) - return s -} - -// updateStateLiteral updates the state for a literal. -func (s *state) updateStateLiteral() { - switch { - case s.state < 4: - s.state = 0 - return - case s.state < 10: - s.state -= 3 - return - } - s.state -= 6 -} - -// updateStateMatch updates the state for a match. -func (s *state) updateStateMatch() { - if s.state < 7 { - s.state = 7 - } else { - s.state = 10 - } -} - -// updateStateRep updates the state for a repetition. -func (s *state) updateStateRep() { - if s.state < 7 { - s.state = 8 - } else { - s.state = 11 - } -} - -// updateStateShortRep updates the state for a short repetition. -func (s *state) updateStateShortRep() { - if s.state < 7 { - s.state = 9 - } else { - s.state = 11 - } -} - -// states computes the states of the operation codec. -func (s *state) states(dictHead int64) (state1, state2, posState uint32) { - state1 = s.state - posState = uint32(dictHead) & s.posBitMask - state2 = (s.state << maxPosBits) | posState - return -} - -// litState computes the literal state. -func (s *state) litState(prev byte, dictHead int64) uint32 { - lp, lc := uint(s.Properties.LP), uint(s.Properties.LC) - litState := ((uint32(dictHead) & ((1 << lp) - 1)) << lc) | - (uint32(prev) >> (8 - lc)) - return litState -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/treecodecs.go b/vendor/github.com/ulikunitz/xz/lzma/treecodecs.go deleted file mode 100644 index 6e927e9359..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/treecodecs.go +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -// treeCodec encodes or decodes values with a fixed bit size. It is using a -// tree of probability value. The root of the tree is the most-significant bit. -type treeCodec struct { - probTree -} - -// makeTreeCodec makes a tree codec. The bits value must be inside the range -// [1,32]. -func makeTreeCodec(bits int) treeCodec { - return treeCodec{makeProbTree(bits)} -} - -// deepcopy initializes tc as a deep copy of the source. -func (tc *treeCodec) deepcopy(src *treeCodec) { - tc.probTree.deepcopy(&src.probTree) -} - -// Encode uses the range encoder to encode a fixed-bit-size value. -func (tc *treeCodec) Encode(e *rangeEncoder, v uint32) (err error) { - m := uint32(1) - for i := int(tc.bits) - 1; i >= 0; i-- { - b := (v >> uint(i)) & 1 - if err := e.EncodeBit(b, &tc.probs[m]); err != nil { - return err - } - m = (m << 1) | b - } - return nil -} - -// Decodes uses the range decoder to decode a fixed-bit-size value. Errors may -// be caused by the range decoder. -func (tc *treeCodec) Decode(d *rangeDecoder) (v uint32, err error) { - m := uint32(1) - for j := 0; j < int(tc.bits); j++ { - b, err := d.DecodeBit(&tc.probs[m]) - if err != nil { - return 0, err - } - m = (m << 1) | b - } - return m - (1 << uint(tc.bits)), nil -} - -// treeReverseCodec is another tree codec, where the least-significant bit is -// the start of the probability tree. -type treeReverseCodec struct { - probTree -} - -// deepcopy initializes the treeReverseCodec as a deep copy of the -// source. -func (tc *treeReverseCodec) deepcopy(src *treeReverseCodec) { - tc.probTree.deepcopy(&src.probTree) -} - -// makeTreeReverseCodec creates treeReverseCodec value. The bits argument must -// be in the range [1,32]. -func makeTreeReverseCodec(bits int) treeReverseCodec { - return treeReverseCodec{makeProbTree(bits)} -} - -// Encode uses range encoder to encode a fixed-bit-size value. The range -// encoder may cause errors. -func (tc *treeReverseCodec) Encode(v uint32, e *rangeEncoder) (err error) { - m := uint32(1) - for i := uint(0); i < uint(tc.bits); i++ { - b := (v >> i) & 1 - if err := e.EncodeBit(b, &tc.probs[m]); err != nil { - return err - } - m = (m << 1) | b - } - return nil -} - -// Decodes uses the range decoder to decode a fixed-bit-size value. Errors -// returned by the range decoder will be returned. -func (tc *treeReverseCodec) Decode(d *rangeDecoder) (v uint32, err error) { - m := uint32(1) - for j := uint(0); j < uint(tc.bits); j++ { - b, err := d.DecodeBit(&tc.probs[m]) - if err != nil { - return 0, err - } - m = (m << 1) | b - v |= b << j - } - return v, nil -} - -// probTree stores enough probability values to be used by the treeEncode and -// treeDecode methods of the range coder types. -type probTree struct { - probs []prob - bits byte -} - -// deepcopy initializes the probTree value as a deep copy of the source. -func (t *probTree) deepcopy(src *probTree) { - if t == src { - return - } - t.probs = make([]prob, len(src.probs)) - copy(t.probs, src.probs) - t.bits = src.bits -} - -// makeProbTree initializes a probTree structure. -func makeProbTree(bits int) probTree { - if !(1 <= bits && bits <= 32) { - panic("bits outside of range [1,32]") - } - t := probTree{ - bits: byte(bits), - probs: make([]prob, 1< 0 { - c.SizeInHeader = true - } - if !c.SizeInHeader { - c.EOSMarker = true - } -} - -// Verify checks WriterConfig for errors. Verify will replace zero -// values with default values. -func (c *WriterConfig) Verify() error { - c.fill() - var err error - if c == nil { - return errors.New("lzma: WriterConfig is nil") - } - if c.Properties == nil { - return errors.New("lzma: WriterConfig has no Properties set") - } - if err = c.Properties.verify(); err != nil { - return err - } - if !(MinDictCap <= c.DictCap && int64(c.DictCap) <= MaxDictCap) { - return errors.New("lzma: dictionary capacity is out of range") - } - if !(maxMatchLen <= c.BufSize) { - return errors.New("lzma: lookahead buffer size too small") - } - if c.SizeInHeader { - if c.Size < 0 { - return errors.New("lzma: negative size not supported") - } - } else if !c.EOSMarker { - return errors.New("lzma: EOS marker is required") - } - if err = c.Matcher.verify(); err != nil { - return err - } - - return nil -} - -// header returns the header structure for this configuration. -func (c *WriterConfig) header() header { - h := header{ - properties: *c.Properties, - dictCap: c.DictCap, - size: -1, - } - if c.SizeInHeader { - h.size = c.Size - } - return h -} - -// Writer writes an LZMA stream in the classic format. -type Writer struct { - h header - bw io.ByteWriter - buf *bufio.Writer - e *encoder -} - -// NewWriter creates a new LZMA writer for the classic format. The -// method will write the header to the underlying stream. -func (c WriterConfig) NewWriter(lzma io.Writer) (w *Writer, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - w = &Writer{h: c.header()} - - var ok bool - w.bw, ok = lzma.(io.ByteWriter) - if !ok { - w.buf = bufio.NewWriter(lzma) - w.bw = w.buf - } - state := newState(w.h.properties) - m, err := c.Matcher.new(w.h.dictCap) - if err != nil { - return nil, err - } - dict, err := newEncoderDict(w.h.dictCap, c.BufSize, m) - if err != nil { - return nil, err - } - var flags encoderFlags - if c.EOSMarker { - flags = eosMarker - } - if w.e, err = newEncoder(w.bw, state, dict, flags); err != nil { - return nil, err - } - - if err = w.writeHeader(); err != nil { - return nil, err - } - return w, nil -} - -// NewWriter creates a new LZMA writer using the classic format. The -// function writes the header to the underlying stream. -func NewWriter(lzma io.Writer) (w *Writer, err error) { - return WriterConfig{}.NewWriter(lzma) -} - -// writeHeader writes the LZMA header into the stream. -func (w *Writer) writeHeader() error { - data, err := w.h.marshalBinary() - if err != nil { - return err - } - _, err = w.bw.(io.Writer).Write(data) - return err -} - -// Write puts data into the Writer. -func (w *Writer) Write(p []byte) (n int, err error) { - if w.h.size >= 0 { - m := w.h.size - m -= w.e.Compressed() + int64(w.e.dict.Buffered()) - if m < 0 { - m = 0 - } - if m < int64(len(p)) { - p = p[:m] - err = ErrNoSpace - } - } - var werr error - if n, werr = w.e.Write(p); werr != nil { - err = werr - } - return n, err -} - -// Close closes the writer stream. It ensures that all data from the -// buffer will be compressed and the LZMA stream will be finished. -func (w *Writer) Close() error { - if w.h.size >= 0 { - n := w.e.Compressed() + int64(w.e.dict.Buffered()) - if n != w.h.size { - return errSize - } - } - err := w.e.Close() - if w.buf != nil { - ferr := w.buf.Flush() - if err == nil { - err = ferr - } - } - return err -} diff --git a/vendor/github.com/ulikunitz/xz/lzma/writer2.go b/vendor/github.com/ulikunitz/xz/lzma/writer2.go deleted file mode 100644 index dfaaec95b6..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzma/writer2.go +++ /dev/null @@ -1,305 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lzma - -import ( - "bytes" - "errors" - "io" -) - -// Writer2Config is used to create a Writer2 using parameters. -type Writer2Config struct { - // The properties for the encoding. If the it is nil the value - // {LC: 3, LP: 0, PB: 2} will be chosen. - Properties *Properties - // The capacity of the dictionary. If DictCap is zero, the value - // 8 MiB will be chosen. - DictCap int - // Size of the lookahead buffer; value 0 indicates default size - // 4096 - BufSize int - // Match algorithm - Matcher MatchAlgorithm -} - -// fill replaces zero values with default values. -func (c *Writer2Config) fill() { - if c.Properties == nil { - c.Properties = &Properties{LC: 3, LP: 0, PB: 2} - } - if c.DictCap == 0 { - c.DictCap = 8 * 1024 * 1024 - } - if c.BufSize == 0 { - c.BufSize = 4096 - } -} - -// Verify checks the Writer2Config for correctness. Zero values will be -// replaced by default values. -func (c *Writer2Config) Verify() error { - c.fill() - var err error - if c == nil { - return errors.New("lzma: WriterConfig is nil") - } - if c.Properties == nil { - return errors.New("lzma: WriterConfig has no Properties set") - } - if err = c.Properties.verify(); err != nil { - return err - } - if !(MinDictCap <= c.DictCap && int64(c.DictCap) <= MaxDictCap) { - return errors.New("lzma: dictionary capacity is out of range") - } - if !(maxMatchLen <= c.BufSize) { - return errors.New("lzma: lookahead buffer size too small") - } - if c.Properties.LC+c.Properties.LP > 4 { - return errors.New("lzma: sum of lc and lp exceeds 4") - } - if err = c.Matcher.verify(); err != nil { - return err - } - return nil -} - -// Writer2 supports the creation of an LZMA2 stream. But note that -// written data is buffered, so call Flush or Close to write data to the -// underlying writer. The Close method writes the end-of-stream marker -// to the stream. So you may be able to concatenate the output of two -// writers as long the output of the first writer has only been flushed -// but not closed. -// -// Any change to the fields Properties, DictCap must be done before the -// first call to Write, Flush or Close. -type Writer2 struct { - w io.Writer - - start *state - encoder *encoder - - cstate chunkState - ctype chunkType - - buf bytes.Buffer - lbw LimitedByteWriter -} - -// NewWriter2 creates an LZMA2 chunk sequence writer with the default -// parameters and options. -func NewWriter2(lzma2 io.Writer) (w *Writer2, err error) { - return Writer2Config{}.NewWriter2(lzma2) -} - -// NewWriter2 creates a new LZMA2 writer using the given configuration. -func (c Writer2Config) NewWriter2(lzma2 io.Writer) (w *Writer2, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - w = &Writer2{ - w: lzma2, - start: newState(*c.Properties), - cstate: start, - ctype: start.defaultChunkType(), - } - w.buf.Grow(maxCompressed) - w.lbw = LimitedByteWriter{BW: &w.buf, N: maxCompressed} - m, err := c.Matcher.new(c.DictCap) - if err != nil { - return nil, err - } - d, err := newEncoderDict(c.DictCap, c.BufSize, m) - if err != nil { - return nil, err - } - w.encoder, err = newEncoder(&w.lbw, cloneState(w.start), d, 0) - if err != nil { - return nil, err - } - return w, nil -} - -// written returns the number of bytes written to the current chunk -func (w *Writer2) written() int { - if w.encoder == nil { - return 0 - } - return int(w.encoder.Compressed()) + w.encoder.dict.Buffered() -} - -// errClosed indicates that the writer is closed. -var errClosed = errors.New("lzma: writer closed") - -// Writes data to LZMA2 stream. Note that written data will be buffered. -// Use Flush or Close to ensure that data is written to the underlying -// writer. -func (w *Writer2) Write(p []byte) (n int, err error) { - if w.cstate == stop { - return 0, errClosed - } - for n < len(p) { - m := maxUncompressed - w.written() - if m <= 0 { - panic("lzma: maxUncompressed reached") - } - var q []byte - if n+m < len(p) { - q = p[n : n+m] - } else { - q = p[n:] - } - k, err := w.encoder.Write(q) - n += k - if err != nil && err != ErrLimit { - return n, err - } - if err == ErrLimit || k == m { - if err = w.flushChunk(); err != nil { - return n, err - } - } - } - return n, nil -} - -// writeUncompressedChunk writes an uncompressed chunk to the LZMA2 -// stream. -func (w *Writer2) writeUncompressedChunk() error { - u := w.encoder.Compressed() - if u <= 0 { - return errors.New("lzma: can't write empty uncompressed chunk") - } - if u > maxUncompressed { - panic("overrun of uncompressed data limit") - } - switch w.ctype { - case cLRND: - w.ctype = cUD - default: - w.ctype = cU - } - w.encoder.state = w.start - - header := chunkHeader{ - ctype: w.ctype, - uncompressed: uint32(u - 1), - } - hdata, err := header.MarshalBinary() - if err != nil { - return err - } - if _, err = w.w.Write(hdata); err != nil { - return err - } - _, err = w.encoder.dict.CopyN(w.w, int(u)) - return err -} - -// writeCompressedChunk writes a compressed chunk to the underlying -// writer. -func (w *Writer2) writeCompressedChunk() error { - if w.ctype == cU || w.ctype == cUD { - panic("chunk type uncompressed") - } - - u := w.encoder.Compressed() - if u <= 0 { - return errors.New("writeCompressedChunk: empty chunk") - } - if u > maxUncompressed { - panic("overrun of uncompressed data limit") - } - c := w.buf.Len() - if c <= 0 { - panic("no compressed data") - } - if c > maxCompressed { - panic("overrun of compressed data limit") - } - header := chunkHeader{ - ctype: w.ctype, - uncompressed: uint32(u - 1), - compressed: uint16(c - 1), - props: w.encoder.state.Properties, - } - hdata, err := header.MarshalBinary() - if err != nil { - return err - } - if _, err = w.w.Write(hdata); err != nil { - return err - } - _, err = io.Copy(w.w, &w.buf) - return err -} - -// writes a single chunk to the underlying writer. -func (w *Writer2) writeChunk() error { - u := int(uncompressedHeaderLen + w.encoder.Compressed()) - c := headerLen(w.ctype) + w.buf.Len() - if u < c { - return w.writeUncompressedChunk() - } - return w.writeCompressedChunk() -} - -// flushChunk terminates the current chunk. The encoder will be reset -// to support the next chunk. -func (w *Writer2) flushChunk() error { - if w.written() == 0 { - return nil - } - var err error - if err = w.encoder.Close(); err != nil { - return err - } - if err = w.writeChunk(); err != nil { - return err - } - w.buf.Reset() - w.lbw.N = maxCompressed - if err = w.encoder.Reopen(&w.lbw); err != nil { - return err - } - if err = w.cstate.next(w.ctype); err != nil { - return err - } - w.ctype = w.cstate.defaultChunkType() - w.start = cloneState(w.encoder.state) - return nil -} - -// Flush writes all buffered data out to the underlying stream. This -// could result in multiple chunks to be created. -func (w *Writer2) Flush() error { - if w.cstate == stop { - return errClosed - } - for w.written() > 0 { - if err := w.flushChunk(); err != nil { - return err - } - } - return nil -} - -// Close terminates the LZMA2 stream with an EOS chunk. -func (w *Writer2) Close() error { - if w.cstate == stop { - return errClosed - } - if err := w.Flush(); err != nil { - return nil - } - // write zero byte EOS chunk - _, err := w.w.Write([]byte{0}) - if err != nil { - return err - } - w.cstate = stop - return nil -} diff --git a/vendor/github.com/ulikunitz/xz/lzmafilter.go b/vendor/github.com/ulikunitz/xz/lzmafilter.go deleted file mode 100644 index 4f1bb33935..0000000000 --- a/vendor/github.com/ulikunitz/xz/lzmafilter.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xz - -import ( - "errors" - "fmt" - "io" - - "github.com/ulikunitz/xz/lzma" -) - -// LZMA filter constants. -const ( - lzmaFilterID = 0x21 - lzmaFilterLen = 3 -) - -// lzmaFilter declares the LZMA2 filter information stored in an xz -// block header. -type lzmaFilter struct { - dictCap int64 -} - -// String returns a representation of the LZMA filter. -func (f lzmaFilter) String() string { - return fmt.Sprintf("LZMA dict cap %#x", f.dictCap) -} - -// id returns the ID for the LZMA2 filter. -func (f lzmaFilter) id() uint64 { return lzmaFilterID } - -// MarshalBinary converts the lzmaFilter in its encoded representation. -func (f lzmaFilter) MarshalBinary() (data []byte, err error) { - c := lzma.EncodeDictCap(f.dictCap) - return []byte{lzmaFilterID, 1, c}, nil -} - -// UnmarshalBinary unmarshals the given data representation of the LZMA2 -// filter. -func (f *lzmaFilter) UnmarshalBinary(data []byte) error { - if len(data) != lzmaFilterLen { - return errors.New("xz: data for LZMA2 filter has wrong length") - } - if data[0] != lzmaFilterID { - return errors.New("xz: wrong LZMA2 filter id") - } - if data[1] != 1 { - return errors.New("xz: wrong LZMA2 filter size") - } - dc, err := lzma.DecodeDictCap(data[2]) - if err != nil { - return errors.New("xz: wrong LZMA2 dictionary size property") - } - - f.dictCap = dc - return nil -} - -// reader creates a new reader for the LZMA2 filter. -func (f lzmaFilter) reader(r io.Reader, c *ReaderConfig) (fr io.Reader, - err error) { - - config := new(lzma.Reader2Config) - if c != nil { - config.DictCap = c.DictCap - } - dc := int(f.dictCap) - if dc < 1 { - return nil, errors.New("xz: LZMA2 filter parameter " + - "dictionary capacity overflow") - } - if dc > config.DictCap { - config.DictCap = dc - } - - fr, err = config.NewReader2(r) - if err != nil { - return nil, err - } - return fr, nil -} - -// writeCloser creates a io.WriteCloser for the LZMA2 filter. -func (f lzmaFilter) writeCloser(w io.WriteCloser, c *WriterConfig, -) (fw io.WriteCloser, err error) { - config := new(lzma.Writer2Config) - if c != nil { - *config = lzma.Writer2Config{ - Properties: c.Properties, - DictCap: c.DictCap, - BufSize: c.BufSize, - Matcher: c.Matcher, - } - } - - dc := int(f.dictCap) - if dc < 1 { - return nil, errors.New("xz: LZMA2 filter parameter " + - "dictionary capacity overflow") - } - if dc > config.DictCap { - config.DictCap = dc - } - - fw, err = config.NewWriter2(w) - if err != nil { - return nil, err - } - return fw, nil -} - -// last returns true, because an LZMA2 filter must be the last filter in -// the filter list. -func (f lzmaFilter) last() bool { return true } diff --git a/vendor/github.com/ulikunitz/xz/make-docs b/vendor/github.com/ulikunitz/xz/make-docs deleted file mode 100644 index a8c612ce17..0000000000 --- a/vendor/github.com/ulikunitz/xz/make-docs +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -set -x -pandoc -t html5 -f markdown -s --css=doc/md.css -o README.html README.md -pandoc -t html5 -f markdown -s --css=doc/md.css -o TODO.html TODO.md diff --git a/vendor/github.com/ulikunitz/xz/none-check.go b/vendor/github.com/ulikunitz/xz/none-check.go deleted file mode 100644 index 95240135d5..0000000000 --- a/vendor/github.com/ulikunitz/xz/none-check.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xz - -import "hash" - -type noneHash struct{} - -func (h noneHash) Write(p []byte) (n int, err error) { return len(p), nil } - -func (h noneHash) Sum(b []byte) []byte { return b } - -func (h noneHash) Reset() {} - -func (h noneHash) Size() int { return 0 } - -func (h noneHash) BlockSize() int { return 0 } - -func newNoneHash() hash.Hash { - return &noneHash{} -} diff --git a/vendor/github.com/ulikunitz/xz/reader.go b/vendor/github.com/ulikunitz/xz/reader.go deleted file mode 100644 index 7f974ffc56..0000000000 --- a/vendor/github.com/ulikunitz/xz/reader.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package xz supports the compression and decompression of xz files. It -// supports version 1.0.4 of the specification without the non-LZMA2 -// filters. See http://tukaani.org/xz/xz-file-format-1.0.4.txt -package xz - -import ( - "bytes" - "errors" - "fmt" - "hash" - "io" - - "github.com/ulikunitz/xz/internal/xlog" - "github.com/ulikunitz/xz/lzma" -) - -// ReaderConfig defines the parameters for the xz reader. The -// SingleStream parameter requests the reader to assume that the -// underlying stream contains only a single stream. -type ReaderConfig struct { - DictCap int - SingleStream bool -} - -// Verify checks the reader parameters for Validity. Zero values will be -// replaced by default values. -func (c *ReaderConfig) Verify() error { - if c == nil { - return errors.New("xz: reader parameters are nil") - } - lc := lzma.Reader2Config{DictCap: c.DictCap} - if err := lc.Verify(); err != nil { - return err - } - return nil -} - -// Reader supports the reading of one or multiple xz streams. -type Reader struct { - ReaderConfig - - xz io.Reader - sr *streamReader -} - -// streamReader decodes a single xz stream -type streamReader struct { - ReaderConfig - - xz io.Reader - br *blockReader - newHash func() hash.Hash - h header - index []record -} - -// NewReader creates a new xz reader using the default parameters. -// The function reads and checks the header of the first XZ stream. The -// reader will process multiple streams including padding. -func NewReader(xz io.Reader) (r *Reader, err error) { - return ReaderConfig{}.NewReader(xz) -} - -// NewReader creates an xz stream reader. The created reader will be -// able to process multiple streams and padding unless a SingleStream -// has been set in the reader configuration c. -func (c ReaderConfig) NewReader(xz io.Reader) (r *Reader, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - r = &Reader{ - ReaderConfig: c, - xz: xz, - } - if r.sr, err = c.newStreamReader(xz); err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return nil, err - } - return r, nil -} - -var errUnexpectedData = errors.New("xz: unexpected data after stream") - -// Read reads uncompressed data from the stream. -func (r *Reader) Read(p []byte) (n int, err error) { - for n < len(p) { - if r.sr == nil { - if r.SingleStream { - data := make([]byte, 1) - _, err = io.ReadFull(r.xz, data) - if err != io.EOF { - return n, errUnexpectedData - } - return n, io.EOF - } - for { - r.sr, err = r.ReaderConfig.newStreamReader(r.xz) - if err != errPadding { - break - } - } - if err != nil { - return n, err - } - } - k, err := r.sr.Read(p[n:]) - n += k - if err != nil { - if err == io.EOF { - r.sr = nil - continue - } - return n, err - } - } - return n, nil -} - -var errPadding = errors.New("xz: padding (4 zero bytes) encountered") - -// newStreamReader creates a new xz stream reader using the given configuration -// parameters. NewReader reads and checks the header of the xz stream. -func (c ReaderConfig) newStreamReader(xz io.Reader) (r *streamReader, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - data := make([]byte, HeaderLen) - if _, err := io.ReadFull(xz, data[:4]); err != nil { - return nil, err - } - if bytes.Equal(data[:4], []byte{0, 0, 0, 0}) { - return nil, errPadding - } - if _, err = io.ReadFull(xz, data[4:]); err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return nil, err - } - r = &streamReader{ - ReaderConfig: c, - xz: xz, - index: make([]record, 0, 4), - } - if err = r.h.UnmarshalBinary(data); err != nil { - return nil, err - } - xlog.Debugf("xz header %s", r.h) - if r.newHash, err = newHashFunc(r.h.flags); err != nil { - return nil, err - } - return r, nil -} - -// readTail reads the index body and the xz footer. -func (r *streamReader) readTail() error { - index, n, err := readIndexBody(r.xz, len(r.index)) - if err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return err - } - - for i, rec := range r.index { - if rec != index[i] { - return fmt.Errorf("xz: record %d is %v; want %v", - i, rec, index[i]) - } - } - - p := make([]byte, footerLen) - if _, err = io.ReadFull(r.xz, p); err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return err - } - var f footer - if err = f.UnmarshalBinary(p); err != nil { - return err - } - xlog.Debugf("xz footer %s", f) - if f.flags != r.h.flags { - return errors.New("xz: footer flags incorrect") - } - if f.indexSize != int64(n)+1 { - return errors.New("xz: index size in footer wrong") - } - return nil -} - -// Read reads actual data from the xz stream. -func (r *streamReader) Read(p []byte) (n int, err error) { - for n < len(p) { - if r.br == nil { - bh, hlen, err := readBlockHeader(r.xz) - if err != nil { - if err == errIndexIndicator { - if err = r.readTail(); err != nil { - return n, err - } - return n, io.EOF - } - return n, err - } - xlog.Debugf("block %v", *bh) - r.br, err = r.ReaderConfig.newBlockReader(r.xz, bh, - hlen, r.newHash()) - if err != nil { - return n, err - } - } - k, err := r.br.Read(p[n:]) - n += k - if err != nil { - if err == io.EOF { - r.index = append(r.index, r.br.record()) - r.br = nil - } else { - return n, err - } - } - } - return n, nil -} - -// countingReader is a reader that counts the bytes read. -type countingReader struct { - r io.Reader - n int64 -} - -// Read reads data from the wrapped reader and adds it to the n field. -func (lr *countingReader) Read(p []byte) (n int, err error) { - n, err = lr.r.Read(p) - lr.n += int64(n) - return n, err -} - -// blockReader supports the reading of a block. -type blockReader struct { - lxz countingReader - header *blockHeader - headerLen int - n int64 - hash hash.Hash - r io.Reader -} - -// newBlockReader creates a new block reader. -func (c *ReaderConfig) newBlockReader(xz io.Reader, h *blockHeader, - hlen int, hash hash.Hash) (br *blockReader, err error) { - - br = &blockReader{ - lxz: countingReader{r: xz}, - header: h, - headerLen: hlen, - hash: hash, - } - - fr, err := c.newFilterReader(&br.lxz, h.filters) - if err != nil { - return nil, err - } - if br.hash.Size() != 0 { - br.r = io.TeeReader(fr, br.hash) - } else { - br.r = fr - } - - return br, nil -} - -// uncompressedSize returns the uncompressed size of the block. -func (br *blockReader) uncompressedSize() int64 { - return br.n -} - -// compressedSize returns the compressed size of the block. -func (br *blockReader) compressedSize() int64 { - return br.lxz.n -} - -// unpaddedSize computes the unpadded size for the block. -func (br *blockReader) unpaddedSize() int64 { - n := int64(br.headerLen) - n += br.compressedSize() - n += int64(br.hash.Size()) - return n -} - -// record returns the index record for the current block. -func (br *blockReader) record() record { - return record{br.unpaddedSize(), br.uncompressedSize()} -} - -// Read reads data from the block. -func (br *blockReader) Read(p []byte) (n int, err error) { - n, err = br.r.Read(p) - br.n += int64(n) - - u := br.header.uncompressedSize - if u >= 0 && br.uncompressedSize() > u { - return n, errors.New("xz: wrong uncompressed size for block") - } - c := br.header.compressedSize - if c >= 0 && br.compressedSize() > c { - return n, errors.New("xz: wrong compressed size for block") - } - if err != io.EOF { - return n, err - } - if br.uncompressedSize() < u || br.compressedSize() < c { - return n, io.ErrUnexpectedEOF - } - - s := br.hash.Size() - k := padLen(br.lxz.n) - q := make([]byte, k+s, k+2*s) - if _, err = io.ReadFull(br.lxz.r, q); err != nil { - if err == io.EOF { - err = io.ErrUnexpectedEOF - } - return n, err - } - if !allZeros(q[:k]) { - return n, errors.New("xz: non-zero block padding") - } - checkSum := q[k:] - computedSum := br.hash.Sum(checkSum[s:]) - if !bytes.Equal(checkSum, computedSum) { - return n, errors.New("xz: checksum error for block") - } - return n, io.EOF -} - -func (c *ReaderConfig) newFilterReader(r io.Reader, f []filter) (fr io.Reader, - err error) { - - if err = verifyFilters(f); err != nil { - return nil, err - } - - fr = r - for i := len(f) - 1; i >= 0; i-- { - fr, err = f[i].reader(fr, c) - if err != nil { - return nil, err - } - } - return fr, nil -} diff --git a/vendor/github.com/ulikunitz/xz/writer.go b/vendor/github.com/ulikunitz/xz/writer.go deleted file mode 100644 index 6b3a66620f..0000000000 --- a/vendor/github.com/ulikunitz/xz/writer.go +++ /dev/null @@ -1,399 +0,0 @@ -// Copyright 2014-2021 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xz - -import ( - "errors" - "fmt" - "hash" - "io" - - "github.com/ulikunitz/xz/lzma" -) - -// WriterConfig describe the parameters for an xz writer. -type WriterConfig struct { - Properties *lzma.Properties - DictCap int - BufSize int - BlockSize int64 - // checksum method: CRC32, CRC64 or SHA256 (default: CRC64) - CheckSum byte - // Forces NoChecksum (default: false) - NoCheckSum bool - // match algorithm - Matcher lzma.MatchAlgorithm -} - -// fill replaces zero values with default values. -func (c *WriterConfig) fill() { - if c.Properties == nil { - c.Properties = &lzma.Properties{LC: 3, LP: 0, PB: 2} - } - if c.DictCap == 0 { - c.DictCap = 8 * 1024 * 1024 - } - if c.BufSize == 0 { - c.BufSize = 4096 - } - if c.BlockSize == 0 { - c.BlockSize = maxInt64 - } - if c.CheckSum == 0 { - c.CheckSum = CRC64 - } - if c.NoCheckSum { - c.CheckSum = None - } -} - -// Verify checks the configuration for errors. Zero values will be -// replaced by default values. -func (c *WriterConfig) Verify() error { - if c == nil { - return errors.New("xz: writer configuration is nil") - } - c.fill() - lc := lzma.Writer2Config{ - Properties: c.Properties, - DictCap: c.DictCap, - BufSize: c.BufSize, - Matcher: c.Matcher, - } - if err := lc.Verify(); err != nil { - return err - } - if c.BlockSize <= 0 { - return errors.New("xz: block size out of range") - } - if err := verifyFlags(c.CheckSum); err != nil { - return err - } - return nil -} - -// filters creates the filter list for the given parameters. -func (c *WriterConfig) filters() []filter { - return []filter{&lzmaFilter{int64(c.DictCap)}} -} - -// maxInt64 defines the maximum 64-bit signed integer. -const maxInt64 = 1<<63 - 1 - -// verifyFilters checks the filter list for the length and the right -// sequence of filters. -func verifyFilters(f []filter) error { - if len(f) == 0 { - return errors.New("xz: no filters") - } - if len(f) > 4 { - return errors.New("xz: more than four filters") - } - for _, g := range f[:len(f)-1] { - if g.last() { - return errors.New("xz: last filter is not last") - } - } - if !f[len(f)-1].last() { - return errors.New("xz: wrong last filter") - } - return nil -} - -// newFilterWriteCloser converts a filter list into a WriteCloser that -// can be used by a blockWriter. -func (c *WriterConfig) newFilterWriteCloser(w io.Writer, f []filter) (fw io.WriteCloser, err error) { - if err = verifyFilters(f); err != nil { - return nil, err - } - fw = nopWriteCloser(w) - for i := len(f) - 1; i >= 0; i-- { - fw, err = f[i].writeCloser(fw, c) - if err != nil { - return nil, err - } - } - return fw, nil -} - -// nopWCloser implements a WriteCloser with a Close method not doing -// anything. -type nopWCloser struct { - io.Writer -} - -// Close returns nil and doesn't do anything else. -func (c nopWCloser) Close() error { - return nil -} - -// nopWriteCloser converts the Writer into a WriteCloser with a Close -// function that does nothing beside returning nil. -func nopWriteCloser(w io.Writer) io.WriteCloser { - return nopWCloser{w} -} - -// Writer compresses data written to it. It is an io.WriteCloser. -type Writer struct { - WriterConfig - - xz io.Writer - bw *blockWriter - newHash func() hash.Hash - h header - index []record - closed bool -} - -// newBlockWriter creates a new block writer writes the header out. -func (w *Writer) newBlockWriter() error { - var err error - w.bw, err = w.WriterConfig.newBlockWriter(w.xz, w.newHash()) - if err != nil { - return err - } - if err = w.bw.writeHeader(w.xz); err != nil { - return err - } - return nil -} - -// closeBlockWriter closes a block writer and records the sizes in the -// index. -func (w *Writer) closeBlockWriter() error { - var err error - if err = w.bw.Close(); err != nil { - return err - } - w.index = append(w.index, w.bw.record()) - return nil -} - -// NewWriter creates a new xz writer using default parameters. -func NewWriter(xz io.Writer) (w *Writer, err error) { - return WriterConfig{}.NewWriter(xz) -} - -// NewWriter creates a new Writer using the given configuration parameters. -func (c WriterConfig) NewWriter(xz io.Writer) (w *Writer, err error) { - if err = c.Verify(); err != nil { - return nil, err - } - w = &Writer{ - WriterConfig: c, - xz: xz, - h: header{c.CheckSum}, - index: make([]record, 0, 4), - } - if w.newHash, err = newHashFunc(c.CheckSum); err != nil { - return nil, err - } - data, err := w.h.MarshalBinary() - if err != nil { - return nil, fmt.Errorf("w.h.MarshalBinary(): error %w", err) - } - if _, err = xz.Write(data); err != nil { - return nil, err - } - if err = w.newBlockWriter(); err != nil { - return nil, err - } - return w, nil - -} - -// Write compresses the uncompressed data provided. -func (w *Writer) Write(p []byte) (n int, err error) { - if w.closed { - return 0, errClosed - } - for { - k, err := w.bw.Write(p[n:]) - n += k - if err != errNoSpace { - return n, err - } - if err = w.closeBlockWriter(); err != nil { - return n, err - } - if err = w.newBlockWriter(); err != nil { - return n, err - } - } -} - -// Close closes the writer and adds the footer to the Writer. Close -// doesn't close the underlying writer. -func (w *Writer) Close() error { - if w.closed { - return errClosed - } - w.closed = true - var err error - if err = w.closeBlockWriter(); err != nil { - return err - } - - f := footer{flags: w.h.flags} - if f.indexSize, err = writeIndex(w.xz, w.index); err != nil { - return err - } - data, err := f.MarshalBinary() - if err != nil { - return err - } - if _, err = w.xz.Write(data); err != nil { - return err - } - return nil -} - -// countingWriter is a writer that counts all data written to it. -type countingWriter struct { - w io.Writer - n int64 -} - -// Write writes data to the countingWriter. -func (cw *countingWriter) Write(p []byte) (n int, err error) { - n, err = cw.w.Write(p) - cw.n += int64(n) - if err == nil && cw.n < 0 { - return n, errors.New("xz: counter overflow") - } - return -} - -// blockWriter is writes a single block. -type blockWriter struct { - cxz countingWriter - // mw combines io.WriteCloser w and the hash. - mw io.Writer - w io.WriteCloser - n int64 - blockSize int64 - closed bool - headerLen int - - filters []filter - hash hash.Hash -} - -// newBlockWriter creates a new block writer. -func (c *WriterConfig) newBlockWriter(xz io.Writer, hash hash.Hash) (bw *blockWriter, err error) { - bw = &blockWriter{ - cxz: countingWriter{w: xz}, - blockSize: c.BlockSize, - filters: c.filters(), - hash: hash, - } - bw.w, err = c.newFilterWriteCloser(&bw.cxz, bw.filters) - if err != nil { - return nil, err - } - if bw.hash.Size() != 0 { - bw.mw = io.MultiWriter(bw.w, bw.hash) - } else { - bw.mw = bw.w - } - return bw, nil -} - -// writeHeader writes the header. If the function is called after Close -// the commpressedSize and uncompressedSize fields will be filled. -func (bw *blockWriter) writeHeader(w io.Writer) error { - h := blockHeader{ - compressedSize: -1, - uncompressedSize: -1, - filters: bw.filters, - } - if bw.closed { - h.compressedSize = bw.compressedSize() - h.uncompressedSize = bw.uncompressedSize() - } - data, err := h.MarshalBinary() - if err != nil { - return err - } - if _, err = w.Write(data); err != nil { - return err - } - bw.headerLen = len(data) - return nil -} - -// compressed size returns the amount of data written to the underlying -// stream. -func (bw *blockWriter) compressedSize() int64 { - return bw.cxz.n -} - -// uncompressedSize returns the number of data written to the -// blockWriter -func (bw *blockWriter) uncompressedSize() int64 { - return bw.n -} - -// unpaddedSize returns the sum of the header length, the uncompressed -// size of the block and the hash size. -func (bw *blockWriter) unpaddedSize() int64 { - if bw.headerLen <= 0 { - panic("xz: block header not written") - } - n := int64(bw.headerLen) - n += bw.compressedSize() - n += int64(bw.hash.Size()) - return n -} - -// record returns the record for the current stream. Call Close before -// calling this method. -func (bw *blockWriter) record() record { - return record{bw.unpaddedSize(), bw.uncompressedSize()} -} - -var errClosed = errors.New("xz: writer already closed") - -var errNoSpace = errors.New("xz: no space") - -// Write writes uncompressed data to the block writer. -func (bw *blockWriter) Write(p []byte) (n int, err error) { - if bw.closed { - return 0, errClosed - } - - t := bw.blockSize - bw.n - if int64(len(p)) > t { - err = errNoSpace - p = p[:t] - } - - var werr error - n, werr = bw.mw.Write(p) - bw.n += int64(n) - if werr != nil { - return n, werr - } - return n, err -} - -// Close closes the writer. -func (bw *blockWriter) Close() error { - if bw.closed { - return errClosed - } - bw.closed = true - if err := bw.w.Close(); err != nil { - return err - } - s := bw.hash.Size() - k := padLen(bw.cxz.n) - p := make([]byte, k+s) - bw.hash.Sum(p[k:k]) - if _, err := bw.cxz.w.Write(p); err != nil { - return err - } - return nil -} diff --git a/vendor/github.com/xi2/xz/AUTHORS b/vendor/github.com/xi2/xz/AUTHORS deleted file mode 100644 index 657330e1fa..0000000000 --- a/vendor/github.com/xi2/xz/AUTHORS +++ /dev/null @@ -1,8 +0,0 @@ -# Package xz authors - -Michael Cross - -# XZ Embedded authors - -Lasse Collin -Igor Pavlov diff --git a/vendor/github.com/xi2/xz/LICENSE b/vendor/github.com/xi2/xz/LICENSE deleted file mode 100644 index b56f2e6a2c..0000000000 --- a/vendor/github.com/xi2/xz/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Licensing of github.com/xi2/xz -============================== - - This Go package is a modified version of - - XZ Embedded - - The contents of the testdata directory are modified versions of - the test files from - - XZ Utils - - All the files in this package have been written by Michael Cross, - Lasse Collin and/or Igor PavLov. All these files have been put - into the public domain. You can do whatever you want with these - files. - - This software is provided "as is", without any warranty. diff --git a/vendor/github.com/xi2/xz/README.md b/vendor/github.com/xi2/xz/README.md deleted file mode 100644 index 2190af553d..0000000000 --- a/vendor/github.com/xi2/xz/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Xz - -Package xz implements XZ decompression natively in Go. - -Documentation at . - -Download and install with `go get github.com/xi2/xz`. - -If you need compression as well as decompression, you might want to -look at . diff --git a/vendor/github.com/xi2/xz/dec_bcj.go b/vendor/github.com/xi2/xz/dec_bcj.go deleted file mode 100644 index a8a3df920f..0000000000 --- a/vendor/github.com/xi2/xz/dec_bcj.go +++ /dev/null @@ -1,461 +0,0 @@ -/* - * Branch/Call/Jump (BCJ) filter decoders - * - * Authors: Lasse Collin - * Igor Pavlov - * - * Translation to Go: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -/* from linux/lib/xz/xz_dec_bcj.c *************************************/ - -type xzDecBCJ struct { - /* Type of the BCJ filter being used */ - typ xzFilterID - /* - * Return value of the next filter in the chain. We need to preserve - * this information across calls, because we must not call the next - * filter anymore once it has returned xzStreamEnd - */ - ret xzRet - /* - * Absolute position relative to the beginning of the uncompressed - * data (in a single .xz Block). - */ - pos int - /* x86 filter state */ - x86PrevMask uint32 - /* Temporary space to hold the variables from xzBuf */ - out []byte - outPos int - temp struct { - /* Amount of already filtered data in the beginning of buf */ - filtered int - /* - * Buffer to hold a mix of filtered and unfiltered data. This - * needs to be big enough to hold Alignment + 2 * Look-ahead: - * - * Type Alignment Look-ahead - * x86 1 4 - * PowerPC 4 0 - * IA-64 16 0 - * ARM 4 0 - * ARM-Thumb 2 2 - * SPARC 4 0 - */ - buf []byte // slice buf will be backed by bufArray - bufArray [16]byte - } -} - -/* - * This is used to test the most significant byte of a memory address - * in an x86 instruction. - */ -func bcjX86TestMSByte(b byte) bool { - return b == 0x00 || b == 0xff -} - -func bcjX86Filter(s *xzDecBCJ, buf []byte) int { - var maskToAllowedStatus = []bool{ - true, true, true, false, true, false, false, false, - } - var maskToBitNum = []byte{0, 1, 2, 2, 3, 3, 3, 3} - var i int - var prevPos int = -1 - var prevMask uint32 = s.x86PrevMask - var src uint32 - var dest uint32 - var j uint32 - var b byte - if len(buf) <= 4 { - return 0 - } - for i = 0; i < len(buf)-4; i++ { - if buf[i]&0xfe != 0xe8 { - continue - } - prevPos = i - prevPos - if prevPos > 3 { - prevMask = 0 - } else { - prevMask = (prevMask << (uint(prevPos) - 1)) & 7 - if prevMask != 0 { - b = buf[i+4-int(maskToBitNum[prevMask])] - if !maskToAllowedStatus[prevMask] || bcjX86TestMSByte(b) { - prevPos = i - prevMask = prevMask<<1 | 1 - continue - } - } - } - prevPos = i - if bcjX86TestMSByte(buf[i+4]) { - src = getLE32(buf[i+1:]) - for { - dest = src - uint32(s.pos+i+5) - if prevMask == 0 { - break - } - j = uint32(maskToBitNum[prevMask]) * 8 - b = byte(dest >> (24 - j)) - if !bcjX86TestMSByte(b) { - break - } - src = dest ^ (1<<(32-j) - 1) - } - dest &= 0x01FFFFFF - dest |= 0 - dest&0x01000000 - putLE32(dest, buf[i+1:]) - i += 4 - } else { - prevMask = prevMask<<1 | 1 - } - } - prevPos = i - prevPos - if prevPos > 3 { - s.x86PrevMask = 0 - } else { - s.x86PrevMask = prevMask << (uint(prevPos) - 1) - } - return i -} - -func bcjPowerPCFilter(s *xzDecBCJ, buf []byte) int { - var i int - var instr uint32 - for i = 0; i+4 <= len(buf); i += 4 { - instr = getBE32(buf[i:]) - if instr&0xFC000003 == 0x48000001 { - instr &= 0x03FFFFFC - instr -= uint32(s.pos + i) - instr &= 0x03FFFFFC - instr |= 0x48000001 - putBE32(instr, buf[i:]) - } - } - return i -} - -var bcjIA64BranchTable = [...]byte{ - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 4, 4, 6, 6, 0, 0, 7, 7, - 4, 4, 0, 0, 4, 4, 0, 0, -} - -func bcjIA64Filter(s *xzDecBCJ, buf []byte) int { - var branchTable = bcjIA64BranchTable[:] - /* - * The local variables take a little bit stack space, but it's less - * than what LZMA2 decoder takes, so it doesn't make sense to reduce - * stack usage here without doing that for the LZMA2 decoder too. - */ - /* Loop counters */ - var i int - var j int - /* Instruction slot (0, 1, or 2) in the 128-bit instruction word */ - var slot uint32 - /* Bitwise offset of the instruction indicated by slot */ - var bitPos uint32 - /* bit_pos split into byte and bit parts */ - var bytePos uint32 - var bitRes uint32 - /* Address part of an instruction */ - var addr uint32 - /* Mask used to detect which instructions to convert */ - var mask uint32 - /* 41-bit instruction stored somewhere in the lowest 48 bits */ - var instr uint64 - /* Instruction normalized with bit_res for easier manipulation */ - var norm uint64 - for i = 0; i+16 <= len(buf); i += 16 { - mask = uint32(branchTable[buf[i]&0x1f]) - for slot, bitPos = 0, 5; slot < 3; slot, bitPos = slot+1, bitPos+41 { - if (mask>>slot)&1 == 0 { - continue - } - bytePos = bitPos >> 3 - bitRes = bitPos & 7 - instr = 0 - for j = 0; j < 6; j++ { - instr |= uint64(buf[i+j+int(bytePos)]) << (8 * uint(j)) - } - norm = instr >> bitRes - if (norm>>37)&0x0f == 0x05 && (norm>>9)&0x07 == 0 { - addr = uint32((norm >> 13) & 0x0fffff) - addr |= (uint32(norm>>36) & 1) << 20 - addr <<= 4 - addr -= uint32(s.pos + i) - addr >>= 4 - norm &= ^(uint64(0x8fffff) << 13) - norm |= uint64(addr&0x0fffff) << 13 - norm |= uint64(addr&0x100000) << (36 - 20) - instr &= 1<> (8 * uint(j))) - } - } - } - } - return i -} - -func bcjARMFilter(s *xzDecBCJ, buf []byte) int { - var i int - var addr uint32 - for i = 0; i+4 <= len(buf); i += 4 { - if buf[i+3] == 0xeb { - addr = uint32(buf[i]) | uint32(buf[i+1])<<8 | - uint32(buf[i+2])<<16 - addr <<= 2 - addr -= uint32(s.pos + i + 8) - addr >>= 2 - buf[i] = byte(addr) - buf[i+1] = byte(addr >> 8) - buf[i+2] = byte(addr >> 16) - } - } - return i -} - -func bcjARMThumbFilter(s *xzDecBCJ, buf []byte) int { - var i int - var addr uint32 - for i = 0; i+4 <= len(buf); i += 2 { - if buf[i+1]&0xf8 == 0xf0 && buf[i+3]&0xf8 == 0xf8 { - addr = uint32(buf[i+1]&0x07)<<19 | - uint32(buf[i])<<11 | - uint32(buf[i+3]&0x07)<<8 | - uint32(buf[i+2]) - addr <<= 1 - addr -= uint32(s.pos + i + 4) - addr >>= 1 - buf[i+1] = byte(0xf0 | (addr>>19)&0x07) - buf[i] = byte(addr >> 11) - buf[i+3] = byte(0xf8 | (addr>>8)&0x07) - buf[i+2] = byte(addr) - i += 2 - } - } - return i -} - -func bcjSPARCFilter(s *xzDecBCJ, buf []byte) int { - var i int - var instr uint32 - for i = 0; i+4 <= len(buf); i += 4 { - instr = getBE32(buf[i:]) - if instr>>22 == 0x100 || instr>>22 == 0x1ff { - instr <<= 2 - instr -= uint32(s.pos + i) - instr >>= 2 - instr = (0x40000000 - instr&0x400000) | - 0x40000000 | (instr & 0x3FFFFF) - putBE32(instr, buf[i:]) - } - } - return i -} - -/* - * Apply the selected BCJ filter. Update *pos and s.pos to match the amount - * of data that got filtered. - */ -func bcjApply(s *xzDecBCJ, buf []byte, pos *int) { - var filtered int - buf = buf[*pos:] - switch s.typ { - case idBCJX86: - filtered = bcjX86Filter(s, buf) - case idBCJPowerPC: - filtered = bcjPowerPCFilter(s, buf) - case idBCJIA64: - filtered = bcjIA64Filter(s, buf) - case idBCJARM: - filtered = bcjARMFilter(s, buf) - case idBCJARMThumb: - filtered = bcjARMThumbFilter(s, buf) - case idBCJSPARC: - filtered = bcjSPARCFilter(s, buf) - default: - /* Never reached */ - } - *pos += filtered - s.pos += filtered -} - -/* - * Flush pending filtered data from temp to the output buffer. - * Move the remaining mixture of possibly filtered and unfiltered - * data to the beginning of temp. - */ -func bcjFlush(s *xzDecBCJ, b *xzBuf) { - var copySize int - copySize = len(b.out) - b.outPos - if copySize > s.temp.filtered { - copySize = s.temp.filtered - } - copy(b.out[b.outPos:], s.temp.buf[:copySize]) - b.outPos += copySize - s.temp.filtered -= copySize - copy(s.temp.buf, s.temp.buf[copySize:]) - s.temp.buf = s.temp.buf[:len(s.temp.buf)-copySize] -} - -/* - * Decode raw stream which has a BCJ filter as the first filter. - * - * The BCJ filter functions are primitive in sense that they process the - * data in chunks of 1-16 bytes. To hide this issue, this function does - * some buffering. - */ -func xzDecBCJRun(s *xzDecBCJ, b *xzBuf, chain func(*xzBuf) xzRet) xzRet { - var outStart int - /* - * Flush pending already filtered data to the output buffer. Return - * immediately if we couldn't flush everything, or if the next - * filter in the chain had already returned xzStreamEnd. - */ - if s.temp.filtered > 0 { - bcjFlush(s, b) - if s.temp.filtered > 0 { - return xzOK - } - if s.ret == xzStreamEnd { - return xzStreamEnd - } - } - /* - * If we have more output space than what is currently pending in - * temp, copy the unfiltered data from temp to the output buffer - * and try to fill the output buffer by decoding more data from the - * next filter in the chain. Apply the BCJ filter on the new data - * in the output buffer. If everything cannot be filtered, copy it - * to temp and rewind the output buffer position accordingly. - * - * This needs to be always run when len(temp.buf) == 0 to handle a special - * case where the output buffer is full and the next filter has no - * more output coming but hasn't returned xzStreamEnd yet. - */ - if len(s.temp.buf) < len(b.out)-b.outPos || len(s.temp.buf) == 0 { - outStart = b.outPos - copy(b.out[b.outPos:], s.temp.buf) - b.outPos += len(s.temp.buf) - s.ret = chain(b) - if s.ret != xzStreamEnd && s.ret != xzOK { - return s.ret - } - bcjApply(s, b.out[:b.outPos], &outStart) - /* - * As an exception, if the next filter returned xzStreamEnd, - * we can do that too, since the last few bytes that remain - * unfiltered are meant to remain unfiltered. - */ - if s.ret == xzStreamEnd { - return xzStreamEnd - } - s.temp.buf = s.temp.bufArray[:b.outPos-outStart] - b.outPos -= len(s.temp.buf) - copy(s.temp.buf, b.out[b.outPos:]) - /* - * If there wasn't enough input to the next filter to fill - * the output buffer with unfiltered data, there's no point - * to try decoding more data to temp. - */ - if b.outPos+len(s.temp.buf) < len(b.out) { - return xzOK - } - } - /* - * We have unfiltered data in temp. If the output buffer isn't full - * yet, try to fill the temp buffer by decoding more data from the - * next filter. Apply the BCJ filter on temp. Then we hopefully can - * fill the actual output buffer by copying filtered data from temp. - * A mix of filtered and unfiltered data may be left in temp; it will - * be taken care on the next call to this function. - */ - if b.outPos < len(b.out) { - /* Make b.out temporarily point to s.temp. */ - s.out = b.out - s.outPos = b.outPos - b.out = s.temp.bufArray[:] - b.outPos = len(s.temp.buf) - s.ret = chain(b) - s.temp.buf = s.temp.bufArray[:b.outPos] - b.out = s.out - b.outPos = s.outPos - if s.ret != xzOK && s.ret != xzStreamEnd { - return s.ret - } - bcjApply(s, s.temp.buf, &s.temp.filtered) - /* - * If the next filter returned xzStreamEnd, we mark that - * everything is filtered, since the last unfiltered bytes - * of the stream are meant to be left as is. - */ - if s.ret == xzStreamEnd { - s.temp.filtered = len(s.temp.buf) - } - bcjFlush(s, b) - if s.temp.filtered > 0 { - return xzOK - } - } - return s.ret -} - -/* - * Allocate memory for BCJ decoders. xzDecBCJReset must be used before - * calling xzDecBCJRun. - */ -func xzDecBCJCreate() *xzDecBCJ { - return new(xzDecBCJ) -} - -/* - * Decode the Filter ID of a BCJ filter and check the start offset is - * valid. Returns xzOK if the given Filter ID and offset is - * supported. Otherwise xzOptionsError is returned. - */ -func xzDecBCJReset(s *xzDecBCJ, id xzFilterID, offset int) xzRet { - switch id { - case idBCJX86: - case idBCJPowerPC: - case idBCJIA64: - case idBCJARM: - case idBCJARMThumb: - case idBCJSPARC: - default: - /* Unsupported Filter ID */ - return xzOptionsError - } - // check offset is a multiple of alignment - switch id { - case idBCJPowerPC, idBCJARM, idBCJSPARC: - if offset%4 != 0 { - return xzOptionsError - } - case idBCJIA64: - if offset%16 != 0 { - return xzOptionsError - } - case idBCJARMThumb: - if offset%2 != 0 { - return xzOptionsError - } - } - s.typ = id - s.ret = xzOK - s.pos = offset - s.x86PrevMask = 0 - s.temp.filtered = 0 - s.temp.buf = nil - return xzOK -} diff --git a/vendor/github.com/xi2/xz/dec_delta.go b/vendor/github.com/xi2/xz/dec_delta.go deleted file mode 100644 index 19df590862..0000000000 --- a/vendor/github.com/xi2/xz/dec_delta.go +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Delta decoder - * - * Author: Lasse Collin - * - * Translation to Go: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -type xzDecDelta struct { - delta [256]byte - pos byte - distance int // in range [1, 256] -} - -/* - * Decode raw stream which has a delta filter as the first filter. - */ -func xzDecDeltaRun(s *xzDecDelta, b *xzBuf, chain func(*xzBuf) xzRet) xzRet { - outStart := b.outPos - ret := chain(b) - for i := outStart; i < b.outPos; i++ { - tmp := b.out[i] + s.delta[byte(s.distance+int(s.pos))] - s.delta[s.pos] = tmp - b.out[i] = tmp - s.pos-- - } - return ret -} - -/* - * Allocate memory for a delta decoder. xzDecDeltaReset must be used - * before calling xzDecDeltaRun. - */ -func xzDecDeltaCreate() *xzDecDelta { - return new(xzDecDelta) -} - -/* - * Returns xzOK if the given distance is valid. Otherwise - * xzOptionsError is returned. - */ -func xzDecDeltaReset(s *xzDecDelta, distance int) xzRet { - if distance < 1 || distance > 256 { - return xzOptionsError - } - s.delta = [256]byte{} - s.pos = 0 - s.distance = distance - return xzOK -} diff --git a/vendor/github.com/xi2/xz/dec_lzma2.go b/vendor/github.com/xi2/xz/dec_lzma2.go deleted file mode 100644 index fa42e47157..0000000000 --- a/vendor/github.com/xi2/xz/dec_lzma2.go +++ /dev/null @@ -1,1235 +0,0 @@ -/* - * LZMA2 decoder - * - * Authors: Lasse Collin - * Igor Pavlov - * - * Translation to Go: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -/* from linux/lib/xz/xz_lzma2.h ***************************************/ - -/* Range coder constants */ -const ( - rcShiftBits = 8 - rcTopBits = 24 - rcTopValue = 1 << rcTopBits - rcBitModelTotalBits = 11 - rcBitModelTotal = 1 << rcBitModelTotalBits - rcMoveBits = 5 -) - -/* - * Maximum number of position states. A position state is the lowest pb - * number of bits of the current uncompressed offset. In some places there - * are different sets of probabilities for different position states. - */ -const posStatesMax = 1 << 4 - -/* - * lzmaState is used to track which LZMA symbols have occurred most recently - * and in which order. This information is used to predict the next symbol. - * - * Symbols: - * - Literal: One 8-bit byte - * - Match: Repeat a chunk of data at some distance - * - Long repeat: Multi-byte match at a recently seen distance - * - Short repeat: One-byte repeat at a recently seen distance - * - * The symbol names are in from STATE-oldest-older-previous. REP means - * either short or long repeated match, and NONLIT means any non-literal. - */ -type lzmaState int - -const ( - stateLitLit lzmaState = iota - stateMatchLitLit - stateRepLitLit - stateShortrepLitLit - stateMatchLit - stateRepList - stateShortrepLit - stateLitMatch - stateLitLongrep - stateLitShortrep - stateNonlitMatch - stateNonlitRep -) - -/* Total number of states */ -const states = 12 - -/* The lowest 7 states indicate that the previous state was a literal. */ -const litStates = 7 - -/* Indicate that the latest symbol was a literal. */ -func lzmaStateLiteral(state *lzmaState) { - switch { - case *state <= stateShortrepLitLit: - *state = stateLitLit - case *state <= stateLitShortrep: - *state -= 3 - default: - *state -= 6 - } -} - -/* Indicate that the latest symbol was a match. */ -func lzmaStateMatch(state *lzmaState) { - if *state < litStates { - *state = stateLitMatch - } else { - *state = stateNonlitMatch - } -} - -/* Indicate that the latest state was a long repeated match. */ -func lzmaStateLongRep(state *lzmaState) { - if *state < litStates { - *state = stateLitLongrep - } else { - *state = stateNonlitRep - } -} - -/* Indicate that the latest symbol was a short match. */ -func lzmaStateShortRep(state *lzmaState) { - if *state < litStates { - *state = stateLitShortrep - } else { - *state = stateNonlitRep - } -} - -/* Test if the previous symbol was a literal. */ -func lzmaStateIsLiteral(state lzmaState) bool { - return state < litStates -} - -/* Each literal coder is divided in three sections: - * - 0x001-0x0FF: Without match byte - * - 0x101-0x1FF: With match byte; match bit is 0 - * - 0x201-0x2FF: With match byte; match bit is 1 - * - * Match byte is used when the previous LZMA symbol was something else than - * a literal (that is, it was some kind of match). - */ -const literalCoderSize = 0x300 - -/* Maximum number of literal coders */ -const literalCodersMax = 1 << 4 - -/* Minimum length of a match is two bytes. */ -const matchLenMin = 2 - -/* Match length is encoded with 4, 5, or 10 bits. - * - * Length Bits - * 2-9 4 = Choice=0 + 3 bits - * 10-17 5 = Choice=1 + Choice2=0 + 3 bits - * 18-273 10 = Choice=1 + Choice2=1 + 8 bits - */ -const ( - lenLowBits = 3 - lenLowSymbols = 1 << lenLowBits - lenMidBits = 3 - lenMidSymbols = 1 << lenMidBits - lenHighBits = 8 - lenHighSymbols = 1 << lenHighBits -) - -/* - * Different sets of probabilities are used for match distances that have - * very short match length: Lengths of 2, 3, and 4 bytes have a separate - * set of probabilities for each length. The matches with longer length - * use a shared set of probabilities. - */ -const distStates = 4 - -/* - * Get the index of the appropriate probability array for decoding - * the distance slot. - */ -func lzmaGetDistState(len uint32) uint32 { - if len < distStates+matchLenMin { - return len - matchLenMin - } else { - return distStates - 1 - } -} - -/* - * The highest two bits of a 32-bit match distance are encoded using six bits. - * This six-bit value is called a distance slot. This way encoding a 32-bit - * value takes 6-36 bits, larger values taking more bits. - */ -const ( - distSlotBits = 6 - distSlots = 1 << distSlotBits -) - -/* Match distances up to 127 are fully encoded using probabilities. Since - * the highest two bits (distance slot) are always encoded using six bits, - * the distances 0-3 don't need any additional bits to encode, since the - * distance slot itself is the same as the actual distance. distModelStart - * indicates the first distance slot where at least one additional bit is - * needed. - */ -const distModelStart = 4 - -/* - * Match distances greater than 127 are encoded in three pieces: - * - distance slot: the highest two bits - * - direct bits: 2-26 bits below the highest two bits - * - alignment bits: four lowest bits - * - * Direct bits don't use any probabilities. - * - * The distance slot value of 14 is for distances 128-191. - */ -const distModelEnd = 14 - -/* Distance slots that indicate a distance <= 127. */ -const ( - fullDistancesBits = distModelEnd / 2 - fullDistances = 1 << fullDistancesBits -) - -/* - * For match distances greater than 127, only the highest two bits and the - * lowest four bits (alignment) is encoded using probabilities. - */ -const ( - alignBits = 4 - alignSize = 1 << alignBits -) - -/* from linux/lib/xz/xz_dec_lzma2.c ***********************************/ - -/* - * Range decoder initialization eats the first five bytes of each LZMA chunk. - */ -const rcInitBytes = 5 - -/* - * Minimum number of usable input buffer to safely decode one LZMA symbol. - * The worst case is that we decode 22 bits using probabilities and 26 - * direct bits. This may decode at maximum of 20 bytes of input. However, - * lzmaMain does an extra normalization before returning, thus we - * need to put 21 here. - */ -const lzmaInRequired = 21 - -/* - * Dictionary (history buffer) - * - * These are always true: - * start <= pos <= full <= end - * pos <= limit <= end - * end == size - * size <= sizeMax - * len(buf) <= size - */ -type dictionary struct { - /* The history buffer */ - buf []byte - /* Old position in buf (before decoding more data) */ - start uint32 - /* Position in buf */ - pos uint32 - /* - * How full dictionary is. This is used to detect corrupt input that - * would read beyond the beginning of the uncompressed stream. - */ - full uint32 - /* Write limit; we don't write to buf[limit] or later bytes. */ - limit uint32 - /* - * End of the dictionary buffer. This is the same as the - * dictionary size. - */ - end uint32 - /* - * Size of the dictionary as specified in Block Header. This is used - * together with "full" to detect corrupt input that would make us - * read beyond the beginning of the uncompressed stream. - */ - size uint32 - /* Maximum allowed dictionary size. */ - sizeMax uint32 -} - -/* Range decoder */ -type rcDec struct { - rnge uint32 - code uint32 - /* - * Number of initializing bytes remaining to be read - * by rcReadInit. - */ - initBytesLeft uint32 - /* - * Buffer from which we read our input. It can be either - * temp.buf or the caller-provided input buffer. - */ - in []byte - inPos int - inLimit int -} - -/* Probabilities for a length decoder. */ -type lzmaLenDec struct { - /* Probability of match length being at least 10 */ - choice uint16 - /* Probability of match length being at least 18 */ - choice2 uint16 - /* Probabilities for match lengths 2-9 */ - low [posStatesMax][lenLowSymbols]uint16 - /* Probabilities for match lengths 10-17 */ - mid [posStatesMax][lenMidSymbols]uint16 - /* Probabilities for match lengths 18-273 */ - high [lenHighSymbols]uint16 -} - -type lzmaDec struct { - /* Distances of latest four matches */ - rep0 uint32 - rep1 uint32 - rep2 uint32 - rep3 uint32 - /* Types of the most recently seen LZMA symbols */ - state lzmaState - /* - * Length of a match. This is updated so that dictRepeat can - * be called again to finish repeating the whole match. - */ - len uint32 - /* - * LZMA properties or related bit masks (number of literal - * context bits, a mask derived from the number of literal - * position bits, and a mask derived from the number - * position bits) - */ - lc uint32 - literalPosMask uint32 - posMask uint32 - /* If 1, it's a match. Otherwise it's a single 8-bit literal. */ - isMatch [states][posStatesMax]uint16 - /* If 1, it's a repeated match. The distance is one of rep0 .. rep3. */ - isRep [states]uint16 - /* - * If 0, distance of a repeated match is rep0. - * Otherwise check is_rep1. - */ - isRep0 [states]uint16 - /* - * If 0, distance of a repeated match is rep1. - * Otherwise check is_rep2. - */ - isRep1 [states]uint16 - /* If 0, distance of a repeated match is rep2. Otherwise it is rep3. */ - isRep2 [states]uint16 - /* - * If 1, the repeated match has length of one byte. Otherwise - * the length is decoded from rep_len_decoder. - */ - isRep0Long [states][posStatesMax]uint16 - /* - * Probability tree for the highest two bits of the match - * distance. There is a separate probability tree for match - * lengths of 2 (i.e. MATCH_LEN_MIN), 3, 4, and [5, 273]. - */ - distSlot [distStates][distSlots]uint16 - /* - * Probility trees for additional bits for match distance - * when the distance is in the range [4, 127]. - */ - distSpecial [fullDistances - distModelEnd]uint16 - /* - * Probability tree for the lowest four bits of a match - * distance that is equal to or greater than 128. - */ - distAlign [alignSize]uint16 - /* Length of a normal match */ - matchLenDec lzmaLenDec - /* Length of a repeated match */ - repLenDec lzmaLenDec - /* Probabilities of literals */ - literal [literalCodersMax][literalCoderSize]uint16 -} - -// type of lzma2Dec.sequence -type lzma2Seq int - -const ( - seqControl lzma2Seq = iota - seqUncompressed1 - seqUncompressed2 - seqCompressed0 - seqCompressed1 - seqProperties - seqLZMAPrepare - seqLZMARun - seqCopy -) - -type lzma2Dec struct { - /* Position in xzDecLZMA2Run. */ - sequence lzma2Seq - /* Next position after decoding the compressed size of the chunk. */ - nextSequence lzma2Seq - /* Uncompressed size of LZMA chunk (2 MiB at maximum) */ - uncompressed int - /* - * Compressed size of LZMA chunk or compressed/uncompressed - * size of uncompressed chunk (64 KiB at maximum) - */ - compressed int - /* - * True if dictionary reset is needed. This is false before - * the first chunk (LZMA or uncompressed). - */ - needDictReset bool - /* - * True if new LZMA properties are needed. This is false - * before the first LZMA chunk. - */ - needProps bool -} - -type xzDecLZMA2 struct { - /* - * The order below is important on x86 to reduce code size and - * it shouldn't hurt on other platforms. Everything up to and - * including lzma.pos_mask are in the first 128 bytes on x86-32, - * which allows using smaller instructions to access those - * variables. On x86-64, fewer variables fit into the first 128 - * bytes, but this is still the best order without sacrificing - * the readability by splitting the structures. - */ - rc rcDec - dict dictionary - lzma2 lzma2Dec - lzma lzmaDec - /* - * Temporary buffer which holds small number of input bytes between - * decoder calls. See lzma2LZMA for details. - */ - temp struct { - buf []byte // slice buf will be backed by bufArray - bufArray [3 * lzmaInRequired]byte - } -} - -/************** - * Dictionary * - **************/ - -/* - * Reset the dictionary state. When in single-call mode, set up the beginning - * of the dictionary to point to the actual output buffer. - */ -func dictReset(dict *dictionary, b *xzBuf) { - dict.start = 0 - dict.pos = 0 - dict.limit = 0 - dict.full = 0 -} - -/* Set dictionary write limit */ -func dictLimit(dict *dictionary, outMax int) { - if dict.end-dict.pos <= uint32(outMax) { - dict.limit = dict.end - } else { - dict.limit = dict.pos + uint32(outMax) - } -} - -/* Return true if at least one byte can be written into the dictionary. */ -func dictHasSpace(dict *dictionary) bool { - return dict.pos < dict.limit -} - -/* - * Get a byte from the dictionary at the given distance. The distance is - * assumed to valid, or as a special case, zero when the dictionary is - * still empty. This special case is needed for single-call decoding to - * avoid writing a '\x00' to the end of the destination buffer. - */ -func dictGet(dict *dictionary, dist uint32) uint32 { - var offset uint32 = dict.pos - dist - 1 - if dist >= dict.pos { - offset += dict.end - } - if dict.full > 0 { - return uint32(dict.buf[offset]) - } - return 0 -} - -/* - * Put one byte into the dictionary. It is assumed that there is space for it. - */ -func dictPut(dict *dictionary, byte byte) { - dict.buf[dict.pos] = byte - dict.pos++ - if dict.full < dict.pos { - dict.full = dict.pos - } -} - -/* - * Repeat given number of bytes from the given distance. If the distance is - * invalid, false is returned. On success, true is returned and *len is - * updated to indicate how many bytes were left to be repeated. - */ -func dictRepeat(dict *dictionary, len *uint32, dist uint32) bool { - var back uint32 - var left uint32 - if dist >= dict.full || dist >= dict.size { - return false - } - left = dict.limit - dict.pos - if left > *len { - left = *len - } - *len -= left - back = dict.pos - dist - 1 - if dist >= dict.pos { - back += dict.end - } - for { - dict.buf[dict.pos] = dict.buf[back] - dict.pos++ - back++ - if back == dict.end { - back = 0 - } - left-- - if !(left > 0) { - break - } - } - if dict.full < dict.pos { - dict.full = dict.pos - } - return true -} - -/* Copy uncompressed data as is from input to dictionary and output buffers. */ -func dictUncompressed(dict *dictionary, b *xzBuf, left *int) { - var copySize int - for *left > 0 && b.inPos < len(b.in) && b.outPos < len(b.out) { - copySize = len(b.in) - b.inPos - if copySize > len(b.out)-b.outPos { - copySize = len(b.out) - b.outPos - } - if copySize > int(dict.end-dict.pos) { - copySize = int(dict.end - dict.pos) - } - if copySize > *left { - copySize = *left - } - *left -= copySize - copy(dict.buf[dict.pos:], b.in[b.inPos:b.inPos+copySize]) - dict.pos += uint32(copySize) - if dict.full < dict.pos { - dict.full = dict.pos - } - if dict.pos == dict.end { - dict.pos = 0 - } - copy(b.out[b.outPos:], b.in[b.inPos:b.inPos+copySize]) - dict.start = dict.pos - b.outPos += copySize - b.inPos += copySize - } -} - -/* - * Flush pending data from dictionary to b.out. It is assumed that there is - * enough space in b.out. This is guaranteed because caller uses dictLimit - * before decoding data into the dictionary. - */ -func dictFlush(dict *dictionary, b *xzBuf) int { - var copySize int = int(dict.pos - dict.start) - if dict.pos == dict.end { - dict.pos = 0 - } - copy(b.out[b.outPos:], dict.buf[dict.start:dict.start+uint32(copySize)]) - dict.start = dict.pos - b.outPos += copySize - return copySize -} - -/***************** - * Range decoder * - *****************/ - -/* Reset the range decoder. */ -func rcReset(rc *rcDec) { - rc.rnge = ^uint32(0) - rc.code = 0 - rc.initBytesLeft = rcInitBytes -} - -/* - * Read the first five initial bytes into rc->code if they haven't been - * read already. (Yes, the first byte gets completely ignored.) - */ -func rcReadInit(rc *rcDec, b *xzBuf) bool { - for rc.initBytesLeft > 0 { - if b.inPos == len(b.in) { - return false - } - rc.code = rc.code<<8 + uint32(b.in[b.inPos]) - b.inPos++ - rc.initBytesLeft-- - } - return true -} - -/* Return true if there may not be enough input for the next decoding loop. */ -func rcLimitExceeded(rc *rcDec) bool { - return rc.inPos > rc.inLimit -} - -/* - * Return true if it is possible (from point of view of range decoder) that - * we have reached the end of the LZMA chunk. - */ -func rcIsFinished(rc *rcDec) bool { - return rc.code == 0 -} - -/* Read the next input byte if needed. */ -func rcNormalize(rc *rcDec) { - if rc.rnge < rcTopValue { - rc.rnge <<= rcShiftBits - rc.code = rc.code<> rcBitModelTotalBits) * uint32(*prob) - if rc.code < bound { - rc.rnge = bound - *prob += (rcBitModelTotal - *prob) >> rcMoveBits - bit = false - } else { - rc.rnge -= bound - rc.code -= bound - *prob -= *prob >> rcMoveBits - bit = true - } - return bit -} - -/* Decode a bittree starting from the most significant bit. */ -func rcBittree(rc *rcDec, probs []uint16, limit uint32) uint32 { - var symbol uint32 = 1 - for { - if rcBit(rc, &probs[symbol-1]) { - symbol = symbol<<1 + 1 - } else { - symbol <<= 1 - } - if !(symbol < limit) { - break - } - } - return symbol -} - -/* Decode a bittree starting from the least significant bit. */ -func rcBittreeReverse(rc *rcDec, probs []uint16, dest *uint32, limit uint32) { - var symbol uint32 = 1 - var i uint32 = 0 - for { - if rcBit(rc, &probs[symbol-1]) { - symbol = symbol<<1 + 1 - *dest += 1 << i - } else { - symbol <<= 1 - } - i++ - if !(i < limit) { - break - } - } -} - -/* Decode direct bits (fixed fifty-fifty probability) */ -func rcDirect(rc *rcDec, dest *uint32, limit uint32) { - var mask uint32 - for { - rcNormalize(rc) - rc.rnge >>= 1 - rc.code -= rc.rnge - mask = 0 - rc.code>>31 - rc.code += rc.rnge & mask - *dest = *dest<<1 + mask + 1 - limit-- - if !(limit > 0) { - break - } - } -} - -/******** - * LZMA * - ********/ - -/* Get pointer to literal coder probability array. */ -func lzmaLiteralProbs(s *xzDecLZMA2) []uint16 { - var prevByte uint32 = dictGet(&s.dict, 0) - var low uint32 = prevByte >> (8 - s.lzma.lc) - var high uint32 = (s.dict.pos & s.lzma.literalPosMask) << s.lzma.lc - return s.lzma.literal[low+high][:] -} - -/* Decode a literal (one 8-bit byte) */ -func lzmaLiteral(s *xzDecLZMA2) { - var probs []uint16 - var symbol uint32 - var matchByte uint32 - var matchBit uint32 - var offset uint32 - var i uint32 - probs = lzmaLiteralProbs(s) - if lzmaStateIsLiteral(s.lzma.state) { - symbol = rcBittree(&s.rc, probs[1:], 0x100) - } else { - symbol = 1 - matchByte = dictGet(&s.dict, s.lzma.rep0) << 1 - offset = 0x100 - for { - matchBit = matchByte & offset - matchByte <<= 1 - i = offset + matchBit + symbol - if rcBit(&s.rc, &probs[i]) { - symbol = symbol<<1 + 1 - offset &= matchBit - } else { - symbol <<= 1 - offset &= ^matchBit - } - if !(symbol < 0x100) { - break - } - } - } - dictPut(&s.dict, byte(symbol)) - lzmaStateLiteral(&s.lzma.state) -} - -/* Decode the length of the match into s.lzma.len. */ -func lzmaLen(s *xzDecLZMA2, l *lzmaLenDec, posState uint32) { - var probs []uint16 - var limit uint32 - switch { - case !rcBit(&s.rc, &l.choice): - probs = l.low[posState][:] - limit = lenLowSymbols - s.lzma.len = matchLenMin - case !rcBit(&s.rc, &l.choice2): - probs = l.mid[posState][:] - limit = lenMidSymbols - s.lzma.len = matchLenMin + lenLowSymbols - default: - probs = l.high[:] - limit = lenHighSymbols - s.lzma.len = matchLenMin + lenLowSymbols + lenMidSymbols - } - s.lzma.len += rcBittree(&s.rc, probs[1:], limit) - limit -} - -/* Decode a match. The distance will be stored in s.lzma.rep0. */ -func lzmaMatch(s *xzDecLZMA2, posState uint32) { - var probs []uint16 - var distSlot uint32 - var limit uint32 - lzmaStateMatch(&s.lzma.state) - s.lzma.rep3 = s.lzma.rep2 - s.lzma.rep2 = s.lzma.rep1 - s.lzma.rep1 = s.lzma.rep0 - lzmaLen(s, &s.lzma.matchLenDec, posState) - probs = s.lzma.distSlot[lzmaGetDistState(s.lzma.len)][:] - distSlot = rcBittree(&s.rc, probs[1:], distSlots) - distSlots - if distSlot < distModelStart { - s.lzma.rep0 = distSlot - } else { - limit = distSlot>>1 - 1 - s.lzma.rep0 = 2 + distSlot&1 - if distSlot < distModelEnd { - s.lzma.rep0 <<= limit - probs = s.lzma.distSpecial[s.lzma.rep0-distSlot:] - rcBittreeReverse(&s.rc, probs, &s.lzma.rep0, limit) - } else { - rcDirect(&s.rc, &s.lzma.rep0, limit-alignBits) - s.lzma.rep0 <<= alignBits - rcBittreeReverse( - &s.rc, s.lzma.distAlign[1:], &s.lzma.rep0, alignBits) - } - } -} - -/* - * Decode a repeated match. The distance is one of the four most recently - * seen matches. The distance will be stored in s.lzma.rep0. - */ -func lzmaRepMatch(s *xzDecLZMA2, posState uint32) { - var tmp uint32 - if !rcBit(&s.rc, &s.lzma.isRep0[s.lzma.state]) { - if !rcBit(&s.rc, &s.lzma.isRep0Long[s.lzma.state][posState]) { - lzmaStateShortRep(&s.lzma.state) - s.lzma.len = 1 - return - } - } else { - if !rcBit(&s.rc, &s.lzma.isRep1[s.lzma.state]) { - tmp = s.lzma.rep1 - } else { - if !rcBit(&s.rc, &s.lzma.isRep2[s.lzma.state]) { - tmp = s.lzma.rep2 - } else { - tmp = s.lzma.rep3 - s.lzma.rep3 = s.lzma.rep2 - } - s.lzma.rep2 = s.lzma.rep1 - } - s.lzma.rep1 = s.lzma.rep0 - s.lzma.rep0 = tmp - } - lzmaStateLongRep(&s.lzma.state) - lzmaLen(s, &s.lzma.repLenDec, posState) -} - -/* LZMA decoder core */ -func lzmaMain(s *xzDecLZMA2) bool { - var posState uint32 - /* - * If the dictionary was reached during the previous call, try to - * finish the possibly pending repeat in the dictionary. - */ - if dictHasSpace(&s.dict) && s.lzma.len > 0 { - dictRepeat(&s.dict, &s.lzma.len, s.lzma.rep0) - } - /* - * Decode more LZMA symbols. One iteration may consume up to - * lzmaInRequired - 1 bytes. - */ - for dictHasSpace(&s.dict) && !rcLimitExceeded(&s.rc) { - posState = s.dict.pos & s.lzma.posMask - if !rcBit(&s.rc, &s.lzma.isMatch[s.lzma.state][posState]) { - lzmaLiteral(s) - } else { - if rcBit(&s.rc, &s.lzma.isRep[s.lzma.state]) { - lzmaRepMatch(s, posState) - } else { - lzmaMatch(s, posState) - } - if !dictRepeat(&s.dict, &s.lzma.len, s.lzma.rep0) { - return false - } - } - } - /* - * Having the range decoder always normalized when we are outside - * this function makes it easier to correctly handle end of the chunk. - */ - rcNormalize(&s.rc) - return true -} - -/* - * Reset the LZMA decoder and range decoder state. Dictionary is not reset - * here, because LZMA state may be reset without resetting the dictionary. - */ -func lzmaReset(s *xzDecLZMA2) { - s.lzma.state = stateLitLit - s.lzma.rep0 = 0 - s.lzma.rep1 = 0 - s.lzma.rep2 = 0 - s.lzma.rep3 = 0 - /* All probabilities are initialized to the same value, v */ - v := uint16(rcBitModelTotal / 2) - s.lzma.matchLenDec.choice = v - s.lzma.matchLenDec.choice2 = v - s.lzma.repLenDec.choice = v - s.lzma.repLenDec.choice2 = v - for _, m := range [][]uint16{ - s.lzma.isRep[:], s.lzma.isRep0[:], s.lzma.isRep1[:], - s.lzma.isRep2[:], s.lzma.distSpecial[:], s.lzma.distAlign[:], - s.lzma.matchLenDec.high[:], s.lzma.repLenDec.high[:], - } { - for j := range m { - m[j] = v - } - } - for i := range s.lzma.isMatch { - for j := range s.lzma.isMatch[i] { - s.lzma.isMatch[i][j] = v - } - } - for i := range s.lzma.isRep0Long { - for j := range s.lzma.isRep0Long[i] { - s.lzma.isRep0Long[i][j] = v - } - } - for i := range s.lzma.distSlot { - for j := range s.lzma.distSlot[i] { - s.lzma.distSlot[i][j] = v - } - } - for i := range s.lzma.literal { - for j := range s.lzma.literal[i] { - s.lzma.literal[i][j] = v - } - } - for i := range s.lzma.matchLenDec.low { - for j := range s.lzma.matchLenDec.low[i] { - s.lzma.matchLenDec.low[i][j] = v - } - } - for i := range s.lzma.matchLenDec.mid { - for j := range s.lzma.matchLenDec.mid[i] { - s.lzma.matchLenDec.mid[i][j] = v - } - } - for i := range s.lzma.repLenDec.low { - for j := range s.lzma.repLenDec.low[i] { - s.lzma.repLenDec.low[i][j] = v - } - } - for i := range s.lzma.repLenDec.mid { - for j := range s.lzma.repLenDec.mid[i] { - s.lzma.repLenDec.mid[i][j] = v - } - } - rcReset(&s.rc) -} - -/* - * Decode and validate LZMA properties (lc/lp/pb) and calculate the bit masks - * from the decoded lp and pb values. On success, the LZMA decoder state is - * reset and true is returned. - */ -func lzmaProps(s *xzDecLZMA2, props byte) bool { - if props > (4*5+4)*9+8 { - return false - } - s.lzma.posMask = 0 - for props >= 9*5 { - props -= 9 * 5 - s.lzma.posMask++ - } - s.lzma.posMask = 1<= 9 { - props -= 9 - s.lzma.literalPosMask++ - } - s.lzma.lc = uint32(props) - if s.lzma.lc+s.lzma.literalPosMask > 4 { - return false - } - s.lzma.literalPosMask = 1< 0 || s.lzma2.compressed == 0 { - tmp = 2*lzmaInRequired - len(s.temp.buf) - if tmp > s.lzma2.compressed-len(s.temp.buf) { - tmp = s.lzma2.compressed - len(s.temp.buf) - } - if tmp > inAvail { - tmp = inAvail - } - copy(s.temp.bufArray[len(s.temp.buf):], b.in[b.inPos:b.inPos+tmp]) - switch { - case len(s.temp.buf)+tmp == s.lzma2.compressed: - for i := len(s.temp.buf) + tmp; i < len(s.temp.bufArray); i++ { - s.temp.bufArray[i] = 0 - } - s.rc.inLimit = len(s.temp.buf) + tmp - case len(s.temp.buf)+tmp < lzmaInRequired: - s.temp.buf = s.temp.bufArray[:len(s.temp.buf)+tmp] - b.inPos += tmp - return true - default: - s.rc.inLimit = len(s.temp.buf) + tmp - lzmaInRequired - } - s.rc.in = s.temp.bufArray[:] - s.rc.inPos = 0 - if !lzmaMain(s) || s.rc.inPos > len(s.temp.buf)+tmp { - return false - } - s.lzma2.compressed -= s.rc.inPos - if s.rc.inPos < len(s.temp.buf) { - copy(s.temp.buf, s.temp.buf[s.rc.inPos:]) - s.temp.buf = s.temp.buf[:len(s.temp.buf)-s.rc.inPos] - return true - } - b.inPos += s.rc.inPos - len(s.temp.buf) - s.temp.buf = nil - } - inAvail = len(b.in) - b.inPos - if inAvail >= lzmaInRequired { - s.rc.in = b.in - s.rc.inPos = b.inPos - if inAvail >= s.lzma2.compressed+lzmaInRequired { - s.rc.inLimit = b.inPos + s.lzma2.compressed - } else { - s.rc.inLimit = len(b.in) - lzmaInRequired - } - if !lzmaMain(s) { - return false - } - inAvail = s.rc.inPos - b.inPos - if inAvail > s.lzma2.compressed { - return false - } - s.lzma2.compressed -= inAvail - b.inPos = s.rc.inPos - } - inAvail = len(b.in) - b.inPos - if inAvail < lzmaInRequired { - if inAvail > s.lzma2.compressed { - inAvail = s.lzma2.compressed - } - s.temp.buf = s.temp.bufArray[:inAvail] - copy(s.temp.buf, b.in[b.inPos:]) - b.inPos += inAvail - } - return true -} - -/* - * Take care of the LZMA2 control layer, and forward the job of actual LZMA - * decoding or copying of uncompressed chunks to other functions. - */ -func xzDecLZMA2Run(s *xzDecLZMA2, b *xzBuf) xzRet { - var tmp int - for b.inPos < len(b.in) || s.lzma2.sequence == seqLZMARun { - switch s.lzma2.sequence { - case seqControl: - /* - * LZMA2 control byte - * - * Exact values: - * 0x00 End marker - * 0x01 Dictionary reset followed by - * an uncompressed chunk - * 0x02 Uncompressed chunk (no dictionary reset) - * - * Highest three bits (s.control & 0xE0): - * 0xE0 Dictionary reset, new properties and state - * reset, followed by LZMA compressed chunk - * 0xC0 New properties and state reset, followed - * by LZMA compressed chunk (no dictionary - * reset) - * 0xA0 State reset using old properties, - * followed by LZMA compressed chunk (no - * dictionary reset) - * 0x80 LZMA chunk (no dictionary or state reset) - * - * For LZMA compressed chunks, the lowest five bits - * (s.control & 1F) are the highest bits of the - * uncompressed size (bits 16-20). - * - * A new LZMA2 stream must begin with a dictionary - * reset. The first LZMA chunk must set new - * properties and reset the LZMA state. - * - * Values that don't match anything described above - * are invalid and we return xzDataError. - */ - tmp = int(b.in[b.inPos]) - b.inPos++ - if tmp == 0x00 { - return xzStreamEnd - } - switch { - case tmp >= 0xe0 || tmp == 0x01: - s.lzma2.needProps = true - s.lzma2.needDictReset = false - dictReset(&s.dict, b) - case s.lzma2.needDictReset: - return xzDataError - } - if tmp >= 0x80 { - s.lzma2.uncompressed = (tmp & 0x1f) << 16 - s.lzma2.sequence = seqUncompressed1 - switch { - case tmp >= 0xc0: - /* - * When there are new properties, - * state reset is done at - * seqProperties. - */ - s.lzma2.needProps = false - s.lzma2.nextSequence = seqProperties - case s.lzma2.needProps: - return xzDataError - default: - s.lzma2.nextSequence = seqLZMAPrepare - if tmp >= 0xa0 { - lzmaReset(s) - } - } - } else { - if tmp > 0x02 { - return xzDataError - } - s.lzma2.sequence = seqCompressed0 - s.lzma2.nextSequence = seqCopy - } - case seqUncompressed1: - s.lzma2.uncompressed += int(b.in[b.inPos]) << 8 - b.inPos++ - s.lzma2.sequence = seqUncompressed2 - case seqUncompressed2: - s.lzma2.uncompressed += int(b.in[b.inPos]) + 1 - b.inPos++ - s.lzma2.sequence = seqCompressed0 - case seqCompressed0: - s.lzma2.compressed += int(b.in[b.inPos]) << 8 - b.inPos++ - s.lzma2.sequence = seqCompressed1 - case seqCompressed1: - s.lzma2.compressed += int(b.in[b.inPos]) + 1 - b.inPos++ - s.lzma2.sequence = s.lzma2.nextSequence - case seqProperties: - if !lzmaProps(s, b.in[b.inPos]) { - return xzDataError - } - b.inPos++ - s.lzma2.sequence = seqLZMAPrepare - fallthrough - case seqLZMAPrepare: - if s.lzma2.compressed < rcInitBytes { - return xzDataError - } - if !rcReadInit(&s.rc, b) { - return xzOK - } - s.lzma2.compressed -= rcInitBytes - s.lzma2.sequence = seqLZMARun - fallthrough - case seqLZMARun: - /* - * Set dictionary limit to indicate how much we want - * to be encoded at maximum. Decode new data into the - * dictionary. Flush the new data from dictionary to - * b.out. Check if we finished decoding this chunk. - * In case the dictionary got full but we didn't fill - * the output buffer yet, we may run this loop - * multiple times without changing s.lzma2.sequence. - */ - outMax := len(b.out) - b.outPos - if outMax > s.lzma2.uncompressed { - outMax = s.lzma2.uncompressed - } - dictLimit(&s.dict, outMax) - if !lzma2LZMA(s, b) { - return xzDataError - } - s.lzma2.uncompressed -= dictFlush(&s.dict, b) - switch { - case s.lzma2.uncompressed == 0: - if s.lzma2.compressed > 0 || s.lzma.len > 0 || - !rcIsFinished(&s.rc) { - return xzDataError - } - rcReset(&s.rc) - s.lzma2.sequence = seqControl - case b.outPos == len(b.out) || - b.inPos == len(b.in) && - len(s.temp.buf) < s.lzma2.compressed: - return xzOK - } - case seqCopy: - dictUncompressed(&s.dict, b, &s.lzma2.compressed) - if s.lzma2.compressed > 0 { - return xzOK - } - s.lzma2.sequence = seqControl - } - } - return xzOK -} - -/* - * Allocate memory for LZMA2 decoder. xzDecLZMA2Reset must be used - * before calling xzDecLZMA2Run. - */ -func xzDecLZMA2Create(dictMax uint32) *xzDecLZMA2 { - s := new(xzDecLZMA2) - s.dict.sizeMax = dictMax - return s -} - -/* - * Decode the LZMA2 properties (one byte) and reset the decoder. Return - * xzOK on success, xzMemlimitError if the preallocated dictionary is not - * big enough, and xzOptionsError if props indicates something that this - * decoder doesn't support. - */ -func xzDecLZMA2Reset(s *xzDecLZMA2, props byte) xzRet { - if props > 40 { - return xzOptionsError // Bigger than 4 GiB - } - if props == 40 { - s.dict.size = ^uint32(0) - } else { - s.dict.size = uint32(2 + props&1) - s.dict.size <<= props>>1 + 11 - } - if s.dict.size > s.dict.sizeMax { - return xzMemlimitError - } - s.dict.end = s.dict.size - if len(s.dict.buf) < int(s.dict.size) { - s.dict.buf = make([]byte, s.dict.size) - } - s.lzma.len = 0 - s.lzma2.sequence = seqControl - s.lzma2.compressed = 0 - s.lzma2.uncompressed = 0 - s.lzma2.needDictReset = true - s.temp.buf = nil - return xzOK -} diff --git a/vendor/github.com/xi2/xz/dec_stream.go b/vendor/github.com/xi2/xz/dec_stream.go deleted file mode 100644 index 9381a3c896..0000000000 --- a/vendor/github.com/xi2/xz/dec_stream.go +++ /dev/null @@ -1,932 +0,0 @@ -/* - * .xz Stream decoder - * - * Author: Lasse Collin - * - * Translation to Go: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -import ( - "bytes" - "crypto/sha256" - "hash" - "hash/crc32" - "hash/crc64" -) - -/* from linux/lib/xz/xz_stream.h **************************************/ - -/* - * See the .xz file format specification at - * http://tukaani.org/xz/xz-file-format.txt - * to understand the container format. - */ -const ( - streamHeaderSize = 12 - headerMagic = "\xfd7zXZ\x00" - footerMagic = "YZ" -) - -/* - * Variable-length integer can hold a 63-bit unsigned integer or a special - * value indicating that the value is unknown. - */ -type vliType uint64 - -const ( - vliUnknown vliType = ^vliType(0) - /* Maximum encoded size of a VLI */ - vliBytesMax = 8 * 8 / 7 // (Sizeof(vliType) * 8 / 7) -) - -/* from linux/lib/xz/xz_dec_stream.c **********************************/ - -/* Hash used to validate the Index field */ -type xzDecHash struct { - unpadded vliType - uncompressed vliType - sha256 hash.Hash -} - -// type of xzDec.sequence -type xzDecSeq int - -const ( - seqStreamHeader xzDecSeq = iota - seqBlockStart - seqBlockHeader - seqBlockUncompress - seqBlockPadding - seqBlockCheck - seqIndex - seqIndexPadding - seqIndexCRC32 - seqStreamFooter -) - -// type of xzDec.index.sequence -type xzDecIndexSeq int - -const ( - seqIndexCount xzDecIndexSeq = iota - seqIndexUnpadded - seqIndexUncompressed -) - -/** - * xzDec - Opaque type to hold the XZ decoder state - */ -type xzDec struct { - /* Position in decMain */ - sequence xzDecSeq - /* Position in variable-length integers and Check fields */ - pos int - /* Variable-length integer decoded by decVLI */ - vli vliType - /* Saved inPos and outPos */ - inStart int - outStart int - /* CRC32 checksum hash used in Index */ - crc32 hash.Hash - /* Hashes used in Blocks */ - checkCRC32 hash.Hash - checkCRC64 hash.Hash - checkSHA256 hash.Hash - /* for checkTypes CRC32/CRC64/SHA256, check is one of the above 3 hashes */ - check hash.Hash - /* Embedded stream header struct containing CheckType */ - *Header - /* - * True if the next call to xzDecRun is allowed to return - * xzBufError. - */ - allowBufError bool - /* Information stored in Block Header */ - blockHeader struct { - /* - * Value stored in the Compressed Size field, or - * vliUnknown if Compressed Size is not present. - */ - compressed vliType - /* - * Value stored in the Uncompressed Size field, or - * vliUnknown if Uncompressed Size is not present. - */ - uncompressed vliType - /* Size of the Block Header field */ - size int - } - /* Information collected when decoding Blocks */ - block struct { - /* Observed compressed size of the current Block */ - compressed vliType - /* Observed uncompressed size of the current Block */ - uncompressed vliType - /* Number of Blocks decoded so far */ - count vliType - /* - * Hash calculated from the Block sizes. This is used to - * validate the Index field. - */ - hash xzDecHash - } - /* Variables needed when verifying the Index field */ - index struct { - /* Position in decIndex */ - sequence xzDecIndexSeq - /* Size of the Index in bytes */ - size vliType - /* Number of Records (matches block.count in valid files) */ - count vliType - /* - * Hash calculated from the Records (matches block.hash in - * valid files). - */ - hash xzDecHash - } - /* - * Temporary buffer needed to hold Stream Header, Block Header, - * and Stream Footer. The Block Header is the biggest (1 KiB) - * so we reserve space according to that. bufArray has to be aligned - * to a multiple of four bytes; the variables before it - * should guarantee this. - */ - temp struct { - pos int - buf []byte // slice buf will be backed by bufArray - bufArray [1024]byte - } - // chain is the function (or to be more precise, closure) which - // does the decompression and will call into the lzma2 and other - // filter code as needed. It is constructed by decBlockHeader - chain func(b *xzBuf) xzRet - // lzma2 holds the state of the last filter (which must be LZMA2) - lzma2 *xzDecLZMA2 - // pointers to allocated BCJ/Delta filters - bcjs []*xzDecBCJ - deltas []*xzDecDelta - // number of currently in use BCJ/Delta filters from the above - bcjsUsed int - deltasUsed int -} - -/* Sizes of the Check field with different Check IDs */ -var checkSizes = [...]byte{ - 0, - 4, 4, 4, - 8, 8, 8, - 16, 16, 16, - 32, 32, 32, - 64, 64, 64, -} - -/* - * Fill s.temp by copying data starting from b.in[b.inPos]. Caller - * must have set s.temp.pos to indicate how much data we are supposed - * to copy into s.temp.buf. Return true once s.temp.pos has reached - * len(s.temp.buf). - */ -func fillTemp(s *xzDec, b *xzBuf) bool { - copySize := len(b.in) - b.inPos - tempRemaining := len(s.temp.buf) - s.temp.pos - if copySize > tempRemaining { - copySize = tempRemaining - } - copy(s.temp.buf[s.temp.pos:], b.in[b.inPos:]) - b.inPos += copySize - s.temp.pos += copySize - if s.temp.pos == len(s.temp.buf) { - s.temp.pos = 0 - return true - } - return false -} - -/* Decode a variable-length integer (little-endian base-128 encoding) */ -func decVLI(s *xzDec, in []byte, inPos *int) xzRet { - var byte byte - if s.pos == 0 { - s.vli = 0 - } - for *inPos < len(in) { - byte = in[*inPos] - *inPos++ - s.vli |= vliType(byte&0x7f) << uint(s.pos) - if byte&0x80 == 0 { - /* Don't allow non-minimal encodings. */ - if byte == 0 && s.pos != 0 { - return xzDataError - } - s.pos = 0 - return xzStreamEnd - } - s.pos += 7 - if s.pos == 7*vliBytesMax { - return xzDataError - } - } - return xzOK -} - -/* - * Decode the Compressed Data field from a Block. Update and validate - * the observed compressed and uncompressed sizes of the Block so that - * they don't exceed the values possibly stored in the Block Header - * (validation assumes that no integer overflow occurs, since vliType - * is uint64). Update s.check if presence of the CRC32/CRC64/SHA256 - * field was indicated in Stream Header. - * - * Once the decoding is finished, validate that the observed sizes match - * the sizes possibly stored in the Block Header. Update the hash and - * Block count, which are later used to validate the Index field. - */ -func decBlock(s *xzDec, b *xzBuf) xzRet { - var ret xzRet - s.inStart = b.inPos - s.outStart = b.outPos - ret = s.chain(b) - s.block.compressed += vliType(b.inPos - s.inStart) - s.block.uncompressed += vliType(b.outPos - s.outStart) - /* - * There is no need to separately check for vliUnknown since - * the observed sizes are always smaller than vliUnknown. - */ - if s.block.compressed > s.blockHeader.compressed || - s.block.uncompressed > s.blockHeader.uncompressed { - return xzDataError - } - switch s.CheckType { - case CheckCRC32, CheckCRC64, CheckSHA256: - _, _ = s.check.Write(b.out[s.outStart:b.outPos]) - } - if ret == xzStreamEnd { - if s.blockHeader.compressed != vliUnknown && - s.blockHeader.compressed != s.block.compressed { - return xzDataError - } - if s.blockHeader.uncompressed != vliUnknown && - s.blockHeader.uncompressed != s.block.uncompressed { - return xzDataError - } - s.block.hash.unpadded += - vliType(s.blockHeader.size) + s.block.compressed - s.block.hash.unpadded += vliType(checkSizes[s.CheckType]) - s.block.hash.uncompressed += s.block.uncompressed - var buf [2 * 8]byte // 2*Sizeof(vliType) - putLE64(uint64(s.block.hash.unpadded), buf[:]) - putLE64(uint64(s.block.hash.uncompressed), buf[8:]) - _, _ = s.block.hash.sha256.Write(buf[:]) - s.block.count++ - } - return ret -} - -/* Update the Index size and the CRC32 hash. */ -func indexUpdate(s *xzDec, b *xzBuf) { - inUsed := b.inPos - s.inStart - s.index.size += vliType(inUsed) - _, _ = s.crc32.Write(b.in[s.inStart : s.inStart+inUsed]) -} - -/* - * Decode the Number of Records, Unpadded Size, and Uncompressed Size - * fields from the Index field. That is, Index Padding and CRC32 are not - * decoded by this function. - * - * This can return xzOK (more input needed), xzStreamEnd (everything - * successfully decoded), or xzDataError (input is corrupt). - */ -func decIndex(s *xzDec, b *xzBuf) xzRet { - var ret xzRet - for { - ret = decVLI(s, b.in, &b.inPos) - if ret != xzStreamEnd { - indexUpdate(s, b) - return ret - } - switch s.index.sequence { - case seqIndexCount: - s.index.count = s.vli - /* - * Validate that the Number of Records field - * indicates the same number of Records as - * there were Blocks in the Stream. - */ - if s.index.count != s.block.count { - return xzDataError - } - s.index.sequence = seqIndexUnpadded - case seqIndexUnpadded: - s.index.hash.unpadded += s.vli - s.index.sequence = seqIndexUncompressed - case seqIndexUncompressed: - s.index.hash.uncompressed += s.vli - var buf [2 * 8]byte // 2*Sizeof(vliType) - putLE64(uint64(s.index.hash.unpadded), buf[:]) - putLE64(uint64(s.index.hash.uncompressed), buf[8:]) - _, _ = s.index.hash.sha256.Write(buf[:]) - s.index.count-- - s.index.sequence = seqIndexUnpadded - } - if !(s.index.count > 0) { - break - } - } - return xzStreamEnd -} - -/* - * Validate that the next 4 bytes match s.crc32.Sum(nil). s.pos must - * be zero when starting to validate the first byte. - */ -func crcValidate(s *xzDec, b *xzBuf) xzRet { - sum := s.crc32.Sum(nil) - // CRC32 - reverse slice - sum[0], sum[1], sum[2], sum[3] = sum[3], sum[2], sum[1], sum[0] - for { - if b.inPos == len(b.in) { - return xzOK - } - if sum[s.pos] != b.in[b.inPos] { - return xzDataError - } - b.inPos++ - s.pos++ - if !(s.pos < 4) { - break - } - } - s.crc32.Reset() - s.pos = 0 - return xzStreamEnd -} - -/* - * Validate that the next 4/8/32 bytes match s.check.Sum(nil). s.pos - * must be zero when starting to validate the first byte. - */ -func checkValidate(s *xzDec, b *xzBuf) xzRet { - sum := s.check.Sum(nil) - if s.CheckType == CheckCRC32 || s.CheckType == CheckCRC64 { - // CRC32/64 - reverse slice - for i, j := 0, len(sum)-1; i < j; i, j = i+1, j-1 { - sum[i], sum[j] = sum[j], sum[i] - } - } - for { - if b.inPos == len(b.in) { - return xzOK - } - if sum[s.pos] != b.in[b.inPos] { - return xzDataError - } - b.inPos++ - s.pos++ - if !(s.pos < len(sum)) { - break - } - } - s.check.Reset() - s.pos = 0 - return xzStreamEnd -} - -/* - * Skip over the Check field when the Check ID is not supported. - * Returns true once the whole Check field has been skipped over. - */ -func checkSkip(s *xzDec, b *xzBuf) bool { - for s.pos < int(checkSizes[s.CheckType]) { - if b.inPos == len(b.in) { - return false - } - b.inPos++ - s.pos++ - } - s.pos = 0 - return true -} - -/* polynomial table used in decStreamHeader below */ -var xzCRC64Table = crc64.MakeTable(crc64.ECMA) - -/* Decode the Stream Header field (the first 12 bytes of the .xz Stream). */ -func decStreamHeader(s *xzDec) xzRet { - if string(s.temp.buf[:len(headerMagic)]) != headerMagic { - return xzFormatError - } - if crc32.ChecksumIEEE(s.temp.buf[len(headerMagic):len(headerMagic)+2]) != - getLE32(s.temp.buf[len(headerMagic)+2:]) { - return xzDataError - } - if s.temp.buf[len(headerMagic)] != 0 { - return xzOptionsError - } - /* - * Of integrity checks, we support none (Check ID = 0), - * CRC32 (Check ID = 1), CRC64 (Check ID = 4) and SHA256 (Check ID = 10) - * However, we will accept other check types too, but then the check - * won't be verified and a warning (xzUnsupportedCheck) will be given. - */ - s.CheckType = CheckID(s.temp.buf[len(headerMagic)+1]) - if s.CheckType > checkMax { - return xzOptionsError - } - switch s.CheckType { - case CheckNone: - // CheckNone: no action needed - case CheckCRC32: - if s.checkCRC32 == nil { - s.checkCRC32 = crc32.NewIEEE() - } else { - s.checkCRC32.Reset() - } - s.check = s.checkCRC32 - case CheckCRC64: - if s.checkCRC64 == nil { - s.checkCRC64 = crc64.New(xzCRC64Table) - } else { - s.checkCRC64.Reset() - } - s.check = s.checkCRC64 - case CheckSHA256: - if s.checkSHA256 == nil { - s.checkSHA256 = sha256.New() - } else { - s.checkSHA256.Reset() - } - s.check = s.checkSHA256 - default: - return xzUnsupportedCheck - } - return xzOK -} - -/* Decode the Stream Footer field (the last 12 bytes of the .xz Stream) */ -func decStreamFooter(s *xzDec) xzRet { - if string(s.temp.buf[10:10+len(footerMagic)]) != footerMagic { - return xzDataError - } - if crc32.ChecksumIEEE(s.temp.buf[4:10]) != getLE32(s.temp.buf) { - return xzDataError - } - /* - * Validate Backward Size. Note that we never added the size of the - * Index CRC32 field to s->index.size, thus we use s->index.size / 4 - * instead of s->index.size / 4 - 1. - */ - if s.index.size>>2 != vliType(getLE32(s.temp.buf[4:])) { - return xzDataError - } - if s.temp.buf[8] != 0 || CheckID(s.temp.buf[9]) != s.CheckType { - return xzDataError - } - /* - * Use xzStreamEnd instead of xzOK to be more convenient - * for the caller. - */ - return xzStreamEnd -} - -/* Decode the Block Header and initialize the filter chain. */ -func decBlockHeader(s *xzDec) xzRet { - var ret xzRet - /* - * Validate the CRC32. We know that the temp buffer is at least - * eight bytes so this is safe. - */ - crc := getLE32(s.temp.buf[len(s.temp.buf)-4:]) - s.temp.buf = s.temp.buf[:len(s.temp.buf)-4] - if crc32.ChecksumIEEE(s.temp.buf) != crc { - return xzDataError - } - s.temp.pos = 2 - /* - * Catch unsupported Block Flags. - */ - if s.temp.buf[1]&0x3C != 0 { - return xzOptionsError - } - /* Compressed Size */ - if s.temp.buf[1]&0x40 != 0 { - if decVLI(s, s.temp.buf, &s.temp.pos) != xzStreamEnd { - return xzDataError - } - if s.vli >= 1<<63-8 { - // the whole block must stay smaller than 2^63 bytes - // the block header cannot be smaller than 8 bytes - return xzDataError - } - if s.vli == 0 { - // compressed size must be non-zero - return xzDataError - } - s.blockHeader.compressed = s.vli - } else { - s.blockHeader.compressed = vliUnknown - } - /* Uncompressed Size */ - if s.temp.buf[1]&0x80 != 0 { - if decVLI(s, s.temp.buf, &s.temp.pos) != xzStreamEnd { - return xzDataError - } - s.blockHeader.uncompressed = s.vli - } else { - s.blockHeader.uncompressed = vliUnknown - } - // get total number of filters (1-4) - filterTotal := int(s.temp.buf[1]&0x03) + 1 - // slice to hold decoded filters - filterList := make([]struct { - id xzFilterID - props uint32 - }, filterTotal) - // decode the non-last filters which cannot be LZMA2 - for i := 0; i < filterTotal-1; i++ { - /* Valid Filter Flags always take at least two bytes. */ - if len(s.temp.buf)-s.temp.pos < 2 { - return xzDataError - } - s.temp.pos += 2 - switch id := xzFilterID(s.temp.buf[s.temp.pos-2]); id { - case idDelta: - // delta filter - if s.temp.buf[s.temp.pos-1] != 0x01 { - return xzOptionsError - } - /* Filter Properties contains distance - 1 */ - if len(s.temp.buf)-s.temp.pos < 1 { - return xzDataError - } - props := uint32(s.temp.buf[s.temp.pos]) - s.temp.pos++ - filterList[i] = struct { - id xzFilterID - props uint32 - }{id: id, props: props} - case idBCJX86, idBCJPowerPC, idBCJIA64, - idBCJARM, idBCJARMThumb, idBCJSPARC: - // bcj filter - var props uint32 - switch s.temp.buf[s.temp.pos-1] { - case 0x00: - props = 0 - case 0x04: - if len(s.temp.buf)-s.temp.pos < 4 { - return xzDataError - } - props = getLE32(s.temp.buf[s.temp.pos:]) - s.temp.pos += 4 - default: - return xzOptionsError - } - filterList[i] = struct { - id xzFilterID - props uint32 - }{id: id, props: props} - default: - return xzOptionsError - } - } - /* - * decode the last filter which must be LZMA2 - */ - if len(s.temp.buf)-s.temp.pos < 2 { - return xzDataError - } - /* Filter ID = LZMA2 */ - if xzFilterID(s.temp.buf[s.temp.pos]) != idLZMA2 { - return xzOptionsError - } - s.temp.pos++ - /* Size of Properties = 1-byte Filter Properties */ - if s.temp.buf[s.temp.pos] != 0x01 { - return xzOptionsError - } - s.temp.pos++ - /* Filter Properties contains LZMA2 dictionary size. */ - if len(s.temp.buf)-s.temp.pos < 1 { - return xzDataError - } - props := uint32(s.temp.buf[s.temp.pos]) - s.temp.pos++ - filterList[filterTotal-1] = struct { - id xzFilterID - props uint32 - }{id: idLZMA2, props: props} - /* - * Process the filter list and create s.chain, going from last - * filter (LZMA2) to first filter - * - * First, LZMA2. - */ - ret = xzDecLZMA2Reset(s.lzma2, byte(filterList[filterTotal-1].props)) - if ret != xzOK { - return ret - } - s.chain = func(b *xzBuf) xzRet { - return xzDecLZMA2Run(s.lzma2, b) - } - /* - * Now the non-last filters - */ - for i := filterTotal - 2; i >= 0; i-- { - switch id := filterList[i].id; id { - case idDelta: - // delta filter - var delta *xzDecDelta - if s.deltasUsed < len(s.deltas) { - delta = s.deltas[s.deltasUsed] - } else { - delta = xzDecDeltaCreate() - s.deltas = append(s.deltas, delta) - } - s.deltasUsed++ - ret = xzDecDeltaReset(delta, int(filterList[i].props)+1) - if ret != xzOK { - return ret - } - chain := s.chain - s.chain = func(b *xzBuf) xzRet { - return xzDecDeltaRun(delta, b, chain) - } - case idBCJX86, idBCJPowerPC, idBCJIA64, - idBCJARM, idBCJARMThumb, idBCJSPARC: - // bcj filter - var bcj *xzDecBCJ - if s.bcjsUsed < len(s.bcjs) { - bcj = s.bcjs[s.bcjsUsed] - } else { - bcj = xzDecBCJCreate() - s.bcjs = append(s.bcjs, bcj) - } - s.bcjsUsed++ - ret = xzDecBCJReset(bcj, id, int(filterList[i].props)) - if ret != xzOK { - return ret - } - chain := s.chain - s.chain = func(b *xzBuf) xzRet { - return xzDecBCJRun(bcj, b, chain) - } - } - } - /* The rest must be Header Padding. */ - for s.temp.pos < len(s.temp.buf) { - if s.temp.buf[s.temp.pos] != 0x00 { - return xzOptionsError - } - s.temp.pos++ - } - s.temp.pos = 0 - s.block.compressed = 0 - s.block.uncompressed = 0 - return xzOK -} - -func decMain(s *xzDec, b *xzBuf) xzRet { - var ret xzRet - /* - * Store the start position for the case when we are in the middle - * of the Index field. - */ - s.inStart = b.inPos - for { - switch s.sequence { - case seqStreamHeader: - /* - * Stream Header is copied to s.temp, and then - * decoded from there. This way if the caller - * gives us only little input at a time, we can - * still keep the Stream Header decoding code - * simple. Similar approach is used in many places - * in this file. - */ - if !fillTemp(s, b) { - return xzOK - } - /* - * If decStreamHeader returns - * xzUnsupportedCheck, it is still possible - * to continue decoding. Thus, update s.sequence - * before calling decStreamHeader. - */ - s.sequence = seqBlockStart - ret = decStreamHeader(s) - if ret != xzOK { - return ret - } - fallthrough - case seqBlockStart: - /* We need one byte of input to continue. */ - if b.inPos == len(b.in) { - return xzOK - } - /* See if this is the beginning of the Index field. */ - if b.in[b.inPos] == 0 { - s.inStart = b.inPos - b.inPos++ - s.sequence = seqIndex - break - } - /* - * Calculate the size of the Block Header and - * prepare to decode it. - */ - s.blockHeader.size = (int(b.in[b.inPos]) + 1) * 4 - s.temp.buf = s.temp.bufArray[:s.blockHeader.size] - s.temp.pos = 0 - s.sequence = seqBlockHeader - fallthrough - case seqBlockHeader: - if !fillTemp(s, b) { - return xzOK - } - ret = decBlockHeader(s) - if ret != xzOK { - return ret - } - s.sequence = seqBlockUncompress - fallthrough - case seqBlockUncompress: - ret = decBlock(s, b) - if ret != xzStreamEnd { - return ret - } - s.sequence = seqBlockPadding - fallthrough - case seqBlockPadding: - /* - * Size of Compressed Data + Block Padding - * must be a multiple of four. We don't need - * s->block.compressed for anything else - * anymore, so we use it here to test the size - * of the Block Padding field. - */ - for s.block.compressed&3 != 0 { - if b.inPos == len(b.in) { - return xzOK - } - if b.in[b.inPos] != 0 { - return xzDataError - } - b.inPos++ - s.block.compressed++ - } - s.sequence = seqBlockCheck - fallthrough - case seqBlockCheck: - switch s.CheckType { - case CheckCRC32, CheckCRC64, CheckSHA256: - ret = checkValidate(s, b) - if ret != xzStreamEnd { - return ret - } - default: - if !checkSkip(s, b) { - return xzOK - } - } - s.sequence = seqBlockStart - case seqIndex: - ret = decIndex(s, b) - if ret != xzStreamEnd { - return ret - } - s.sequence = seqIndexPadding - fallthrough - case seqIndexPadding: - for (s.index.size+vliType(b.inPos-s.inStart))&3 != 0 { - if b.inPos == len(b.in) { - indexUpdate(s, b) - return xzOK - } - if b.in[b.inPos] != 0 { - return xzDataError - } - b.inPos++ - } - /* Finish the CRC32 value and Index size. */ - indexUpdate(s, b) - /* Compare the hashes to validate the Index field. */ - if !bytes.Equal( - s.block.hash.sha256.Sum(nil), s.index.hash.sha256.Sum(nil)) { - return xzDataError - } - s.sequence = seqIndexCRC32 - fallthrough - case seqIndexCRC32: - ret = crcValidate(s, b) - if ret != xzStreamEnd { - return ret - } - s.temp.buf = s.temp.bufArray[:streamHeaderSize] - s.sequence = seqStreamFooter - fallthrough - case seqStreamFooter: - if !fillTemp(s, b) { - return xzOK - } - return decStreamFooter(s) - } - } - /* Never reached */ -} - -/** - * xzDecRun - Run the XZ decoder - * @s: Decoder state allocated using xzDecInit - * @b: Input and output buffers - * - * See xzRet for details of return values. - * - * xzDecRun is a wrapper for decMain to handle some special cases. - * - * We must return xzBufError when it seems clear that we are not - * going to make any progress anymore. This is to prevent the caller - * from calling us infinitely when the input file is truncated or - * otherwise corrupt. Since zlib-style API allows that the caller - * fills the input buffer only when the decoder doesn't produce any - * new output, we have to be careful to avoid returning xzBufError - * too easily: xzBufError is returned only after the second - * consecutive call to xzDecRun that makes no progress. - */ -func xzDecRun(s *xzDec, b *xzBuf) xzRet { - inStart := b.inPos - outStart := b.outPos - ret := decMain(s, b) - if ret == xzOK && inStart == b.inPos && outStart == b.outPos { - if s.allowBufError { - ret = xzBufError - } - s.allowBufError = true - } else { - s.allowBufError = false - } - return ret -} - -/** - * xzDecInit - Allocate and initialize a XZ decoder state - * @dictMax: Maximum size of the LZMA2 dictionary (history buffer) for - * decoding. LZMA2 dictionary is always 2^n bytes - * or 2^n + 2^(n-1) bytes (the latter sizes are less common - * in practice), so other values for dictMax don't make sense. - * - * dictMax specifies the maximum allowed dictionary size that xzDecRun - * may allocate once it has parsed the dictionary size from the stream - * headers. This way excessive allocations can be avoided while still - * limiting the maximum memory usage to a sane value to prevent running the - * system out of memory when decompressing streams from untrusted sources. - * - * xzDecInit returns a pointer to an xzDec, which is ready to be used with - * xzDecRun. - */ -func xzDecInit(dictMax uint32, header *Header) *xzDec { - s := new(xzDec) - s.crc32 = crc32.NewIEEE() - s.Header = header - s.block.hash.sha256 = sha256.New() - s.index.hash.sha256 = sha256.New() - s.lzma2 = xzDecLZMA2Create(dictMax) - xzDecReset(s) - return s -} - -/** - * xzDecReset - Reset an already allocated decoder state - * @s: Decoder state allocated using xzDecInit - * - * This function can be used to reset the decoder state without - * reallocating memory with xzDecInit. - */ -func xzDecReset(s *xzDec) { - s.sequence = seqStreamHeader - s.allowBufError = false - s.pos = 0 - s.crc32.Reset() - s.check = nil - s.CheckType = checkUnset - s.block.compressed = 0 - s.block.uncompressed = 0 - s.block.count = 0 - s.block.hash.unpadded = 0 - s.block.hash.uncompressed = 0 - s.block.hash.sha256.Reset() - s.index.sequence = seqIndexCount - s.index.size = 0 - s.index.count = 0 - s.index.hash.unpadded = 0 - s.index.hash.uncompressed = 0 - s.index.hash.sha256.Reset() - s.temp.pos = 0 - s.temp.buf = s.temp.bufArray[:streamHeaderSize] - s.chain = nil - s.bcjsUsed = 0 - s.deltasUsed = 0 -} diff --git a/vendor/github.com/xi2/xz/dec_util.go b/vendor/github.com/xi2/xz/dec_util.go deleted file mode 100644 index c422752219..0000000000 --- a/vendor/github.com/xi2/xz/dec_util.go +++ /dev/null @@ -1,52 +0,0 @@ -/* - * XZ decompressor utility functions - * - * Author: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -func getLE32(buf []byte) uint32 { - return uint32(buf[0]) | - uint32(buf[1])<<8 | - uint32(buf[2])<<16 | - uint32(buf[3])<<24 -} - -func getBE32(buf []byte) uint32 { - return uint32(buf[0])<<24 | - uint32(buf[1])<<16 | - uint32(buf[2])<<8 | - uint32(buf[3]) -} - -func putLE32(val uint32, buf []byte) { - buf[0] = byte(val) - buf[1] = byte(val >> 8) - buf[2] = byte(val >> 16) - buf[3] = byte(val >> 24) - return -} - -func putBE32(val uint32, buf []byte) { - buf[0] = byte(val >> 24) - buf[1] = byte(val >> 16) - buf[2] = byte(val >> 8) - buf[3] = byte(val) - return -} - -func putLE64(val uint64, buf []byte) { - buf[0] = byte(val) - buf[1] = byte(val >> 8) - buf[2] = byte(val >> 16) - buf[3] = byte(val >> 24) - buf[4] = byte(val >> 32) - buf[5] = byte(val >> 40) - buf[6] = byte(val >> 48) - buf[7] = byte(val >> 56) - return -} diff --git a/vendor/github.com/xi2/xz/dec_xz.go b/vendor/github.com/xi2/xz/dec_xz.go deleted file mode 100644 index 1b18a83831..0000000000 --- a/vendor/github.com/xi2/xz/dec_xz.go +++ /dev/null @@ -1,124 +0,0 @@ -/* - * XZ decompressor - * - * Authors: Lasse Collin - * Igor Pavlov - * - * Translation to Go: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -/* from linux/include/linux/xz.h **************************************/ - -/** - * xzRet - Return codes - * @xzOK: Everything is OK so far. More input or more - * output space is required to continue. - * @xzStreamEnd: Operation finished successfully. - * @xzUnSupportedCheck: Integrity check type is not supported. Decoding - * is still possible by simply calling xzDecRun - * again. - * @xzMemlimitError: A bigger LZMA2 dictionary would be needed than - * allowed by the dictMax argument given to - * xzDecInit. - * @xzFormatError: File format was not recognized (wrong magic - * bytes). - * @xzOptionsError: This implementation doesn't support the requested - * compression options. In the decoder this means - * that the header CRC32 matches, but the header - * itself specifies something that we don't support. - * @xzDataError: Compressed data is corrupt. - * @xzBufError: Cannot make any progress. - * - * xzBufError is returned when two consecutive calls to XZ code cannot - * consume any input and cannot produce any new output. This happens - * when there is no new input available, or the output buffer is full - * while at least one output byte is still pending. Assuming your code - * is not buggy, you can get this error only when decoding a - * compressed stream that is truncated or otherwise corrupt. - */ -type xzRet int - -const ( - xzOK xzRet = iota - xzStreamEnd - xzUnsupportedCheck - xzMemlimitError - xzFormatError - xzOptionsError - xzDataError - xzBufError -) - -/** - * xzBuf - Passing input and output buffers to XZ code - * @in: Input buffer. - * @inPos: Current position in the input buffer. This must not exceed - * input buffer size. - * @out: Output buffer. - * @outPos: Current position in the output buffer. This must not exceed - * output buffer size. - * - * Only the contents of the output buffer from out[outPos] onward, and - * the variables inPos and outPos are modified by the XZ code. - */ -type xzBuf struct { - in []byte - inPos int - out []byte - outPos int -} - -/* All XZ filter IDs */ -type xzFilterID int64 - -const ( - idDelta xzFilterID = 0x03 - idBCJX86 xzFilterID = 0x04 - idBCJPowerPC xzFilterID = 0x05 - idBCJIA64 xzFilterID = 0x06 - idBCJARM xzFilterID = 0x07 - idBCJARMThumb xzFilterID = 0x08 - idBCJSPARC xzFilterID = 0x09 - idLZMA2 xzFilterID = 0x21 -) - -// CheckID is the type of the data integrity check in an XZ stream -// calculated from the uncompressed data. -type CheckID int - -func (id CheckID) String() string { - switch id { - case CheckNone: - return "None" - case CheckCRC32: - return "CRC32" - case CheckCRC64: - return "CRC64" - case CheckSHA256: - return "SHA256" - default: - return "Unknown" - } -} - -const ( - CheckNone CheckID = 0x00 - CheckCRC32 CheckID = 0x01 - CheckCRC64 CheckID = 0x04 - CheckSHA256 CheckID = 0x0A - checkMax CheckID = 0x0F - checkUnset CheckID = -1 -) - -// An XZ stream contains a stream header which holds information about -// the stream. That information is exposed as fields of the -// Reader. Currently it contains only the stream's data integrity -// check type. -type Header struct { - CheckType CheckID // type of the stream's data integrity check -} diff --git a/vendor/github.com/xi2/xz/doc.go b/vendor/github.com/xi2/xz/doc.go deleted file mode 100644 index f8c62e6248..0000000000 --- a/vendor/github.com/xi2/xz/doc.go +++ /dev/null @@ -1,35 +0,0 @@ -// Package xz implements XZ decompression natively in Go. -// -// Usage -// -// For ease of use, this package is designed to have a similar API to -// compress/gzip. See the examples for further details. -// -// Implementation -// -// This package is a translation from C to Go of XZ Embedded -// (http://tukaani.org/xz/embedded.html) with enhancements made so as -// to implement all mandatory and optional parts of the XZ file format -// specification v1.0.4. It supports all filters and block check -// types, supports multiple streams, and performs index verification -// using SHA-256 as recommended by the specification. -// -// Speed -// -// On the author's Intel Ivybridge i5, decompression speed is about -// half that of the standard XZ Utils (tested with a recent linux -// kernel tarball). -// -// Thanks -// -// Thanks are due to Lasse Collin and Igor Pavlov, the authors of XZ -// Embedded, on whose code package xz is based. It would not exist -// without their decision to allow others to modify and reuse their -// code. -// -// Bug reports -// -// For bug reports relating to this package please contact the author -// through https://github.com/xi2/xz/issues, and not the authors of XZ -// Embedded. -package xz diff --git a/vendor/github.com/xi2/xz/reader.go b/vendor/github.com/xi2/xz/reader.go deleted file mode 100644 index e321d755f2..0000000000 --- a/vendor/github.com/xi2/xz/reader.go +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Package xz Go Reader API - * - * Author: Michael Cross - * - * This file has been put into the public domain. - * You can do whatever you want with this file. - */ - -package xz - -import ( - "errors" - "io" -) - -// Package specific errors. -var ( - ErrUnsupportedCheck = errors.New("xz: integrity check type not supported") - ErrMemlimit = errors.New("xz: LZMA2 dictionary size exceeds max") - ErrFormat = errors.New("xz: file format not recognized") - ErrOptions = errors.New("xz: compression options not supported") - ErrData = errors.New("xz: data is corrupt") - ErrBuf = errors.New("xz: data is truncated or corrupt") -) - -// DefaultDictMax is the default maximum dictionary size in bytes used -// by the decoder. This value is sufficient to decompress files -// created with XZ Utils "xz -9". -const DefaultDictMax = 1 << 26 // 64 MiB - -// inBufSize is the input buffer size used by the decoder. -const inBufSize = 1 << 13 // 8 KiB - -// A Reader is an io.Reader that can be used to retrieve uncompressed -// data from an XZ file. -// -// In general, an XZ file can be a concatenation of other XZ -// files. Reads from the Reader return the concatenation of the -// uncompressed data of each. -type Reader struct { - Header - r io.Reader // the wrapped io.Reader - multistream bool // true if reader is in multistream mode - rEOF bool // true after io.EOF received on r - dEOF bool // true after decoder has completed - padding int // bytes of stream padding read (or -1) - in [inBufSize]byte // backing array for buf.in - buf *xzBuf // decoder input/output buffers - dec *xzDec // decoder state - err error // the result of the last decoder call -} - -// NewReader creates a new Reader reading from r. The decompressor -// will use an LZMA2 dictionary size up to dictMax bytes in -// size. Passing a value of zero sets dictMax to DefaultDictMax. If -// an individual XZ stream requires a dictionary size greater than -// dictMax in order to decompress, Read will return ErrMemlimit. -// -// If NewReader is passed a value of nil for r then a Reader is -// created such that all read attempts will return io.EOF. This is -// useful if you just want to allocate memory for a Reader which will -// later be initialized with Reset. -// -// Due to internal buffering, the Reader may read more data than -// necessary from r. -func NewReader(r io.Reader, dictMax uint32) (*Reader, error) { - if dictMax == 0 { - dictMax = DefaultDictMax - } - z := &Reader{ - r: r, - multistream: true, - padding: -1, - buf: &xzBuf{}, - } - if r == nil { - z.rEOF, z.dEOF = true, true - } - z.dec = xzDecInit(dictMax, &z.Header) - var err error - if r != nil { - _, err = z.Read(nil) // read stream header - } - return z, err -} - -// decode is a wrapper around xzDecRun that additionally handles -// stream padding. It treats the padding as a kind of stream that -// decodes to nothing. -// -// When decoding padding, z.padding >= 0 -// When decoding a real stream, z.padding == -1 -func (z *Reader) decode() (ret xzRet) { - if z.padding >= 0 { - // read all padding in input buffer - for z.buf.inPos < len(z.buf.in) && - z.buf.in[z.buf.inPos] == 0 { - z.buf.inPos++ - z.padding++ - } - switch { - case z.buf.inPos == len(z.buf.in) && z.rEOF: - // case: out of padding. no more input data available - if z.padding%4 != 0 { - ret = xzDataError - } else { - ret = xzStreamEnd - } - case z.buf.inPos == len(z.buf.in): - // case: read more padding next loop iteration - ret = xzOK - default: - // case: out of padding. more input data available - if z.padding%4 != 0 { - ret = xzDataError - } else { - xzDecReset(z.dec) - ret = xzStreamEnd - } - } - } else { - ret = xzDecRun(z.dec, z.buf) - } - return -} - -func (z *Reader) Read(p []byte) (n int, err error) { - // restore err - err = z.err - // set decoder output buffer to p - z.buf.out = p - z.buf.outPos = 0 - for { - // update n - n = z.buf.outPos - // if last call to decoder ended with an error, return that error - if err != nil { - break - } - // if decoder has finished, return with err == io.EOF - if z.dEOF { - err = io.EOF - break - } - // if p full, return with err == nil, unless we have not yet - // read the stream header with Read(nil) - if n == len(p) && z.CheckType != checkUnset { - break - } - // if needed, read more data from z.r - if z.buf.inPos == len(z.buf.in) && !z.rEOF { - rn, e := z.r.Read(z.in[:]) - if e != nil && e != io.EOF { - // read error - err = e - break - } - if e == io.EOF { - z.rEOF = true - } - // set new input buffer in z.buf - z.buf.in = z.in[:rn] - z.buf.inPos = 0 - } - // decode more data - ret := z.decode() - switch ret { - case xzOK: - // no action needed - case xzStreamEnd: - if z.padding >= 0 { - z.padding = -1 - if !z.multistream || z.rEOF { - z.dEOF = true - } - } else { - z.padding = 0 - } - case xzUnsupportedCheck: - err = ErrUnsupportedCheck - case xzMemlimitError: - err = ErrMemlimit - case xzFormatError: - err = ErrFormat - case xzOptionsError: - err = ErrOptions - case xzDataError: - err = ErrData - case xzBufError: - err = ErrBuf - } - // save err - z.err = err - } - return -} - -// Multistream controls whether the reader is operating in multistream -// mode. -// -// If enabled (the default), the Reader expects the input to be a -// sequence of XZ streams, possibly interspersed with stream padding, -// which it reads one after another. The effect is that the -// concatenation of a sequence of XZ streams or XZ files is -// treated as equivalent to the compressed result of the concatenation -// of the sequence. This is standard behaviour for XZ readers. -// -// Calling Multistream(false) disables this behaviour; disabling the -// behaviour can be useful when reading file formats that distinguish -// individual XZ streams. In this mode, when the Reader reaches the -// end of the stream, Read returns io.EOF. To start the next stream, -// call z.Reset(nil) followed by z.Multistream(false). If there is no -// next stream, z.Reset(nil) will return io.EOF. -func (z *Reader) Multistream(ok bool) { - z.multistream = ok -} - -// Reset, for non-nil values of io.Reader r, discards the Reader z's -// state and makes it equivalent to the result of its original state -// from NewReader, but reading from r instead. This permits reusing a -// Reader rather than allocating a new one. -// -// If you wish to leave r unchanged use z.Reset(nil). This keeps r -// unchanged and ensures internal buffering is preserved. If the -// Reader was at the end of a stream it is then ready to read any -// follow on streams. If there are no follow on streams z.Reset(nil) -// returns io.EOF. If the Reader was not at the end of a stream then -// z.Reset(nil) does nothing. -func (z *Reader) Reset(r io.Reader) error { - switch { - case r == nil: - z.multistream = true - if !z.dEOF { - return nil - } - if z.rEOF { - return io.EOF - } - z.dEOF = false - _, err := z.Read(nil) // read stream header - return err - default: - z.r = r - z.multistream = true - z.rEOF = false - z.dEOF = false - z.padding = -1 - z.buf.in = nil - z.buf.inPos = 0 - xzDecReset(z.dec) - z.err = nil - _, err := z.Read(nil) // read stream header - return err - } -} diff --git a/vendor/modules.txt b/vendor/modules.txt index 53b9ad701a..d63f94954f 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -27,9 +27,6 @@ github.com/Masterminds/squirrel # github.com/Microsoft/hcsshim v0.11.4 ## explicit; go 1.18 github.com/Microsoft/hcsshim/osversion -# github.com/andybalholm/brotli v1.0.1 -## explicit; go 1.12 -github.com/andybalholm/brotli # github.com/armon/go-metrics v0.4.1 ## explicit; go 1.12 github.com/armon/go-metrics @@ -599,14 +596,6 @@ github.com/docker/go-connections/tlsconfig # github.com/docker/go-metrics v0.0.1 ## explicit; go 1.11 github.com/docker/go-metrics -# github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 -## explicit; go 1.9 -github.com/dsnet/compress -github.com/dsnet/compress/bzip2 -github.com/dsnet/compress/bzip2/internal/sais -github.com/dsnet/compress/internal -github.com/dsnet/compress/internal/errors -github.com/dsnet/compress/internal/prefix # github.com/emicklei/go-restful/v3 v3.11.2 ## explicit; go 1.13 github.com/emicklei/go-restful/v3 @@ -715,9 +704,6 @@ github.com/golang/protobuf/ptypes/duration github.com/golang/protobuf/ptypes/struct github.com/golang/protobuf/ptypes/timestamp github.com/golang/protobuf/ptypes/wrappers -# github.com/golang/snappy v0.0.4 -## explicit -github.com/golang/snappy # github.com/google/btree v1.1.2 ## explicit; go 1.18 github.com/google/btree @@ -844,18 +830,12 @@ github.com/json-iterator/go # github.com/klauspost/compress v1.17.0 ## explicit; go 1.18 github.com/klauspost/compress -github.com/klauspost/compress/flate github.com/klauspost/compress/fse -github.com/klauspost/compress/gzip github.com/klauspost/compress/huff0 github.com/klauspost/compress/internal/cpuinfo github.com/klauspost/compress/internal/snapref -github.com/klauspost/compress/zip github.com/klauspost/compress/zstd github.com/klauspost/compress/zstd/internal/xxhash -# github.com/klauspost/pgzip v1.2.5 -## explicit -github.com/klauspost/pgzip # github.com/kr/pretty v0.3.1 ## explicit; go 1.12 github.com/kr/pretty @@ -896,9 +876,6 @@ github.com/mattn/go-isatty # github.com/mattn/go-runewidth v0.0.13 ## explicit; go 1.9 github.com/mattn/go-runewidth -# github.com/mholt/archiver/v3 v3.5.1 -## explicit; go 1.13 -github.com/mholt/archiver/v3 # github.com/mitchellh/copystructure v1.2.0 ## explicit; go 1.15 github.com/mitchellh/copystructure @@ -940,9 +917,6 @@ github.com/munnerz/goautoneg # github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f ## explicit github.com/mxk/go-flowrate/flowrate -# github.com/nwaples/rardecode v1.1.0 -## explicit -github.com/nwaples/rardecode # github.com/oklog/ulid v1.3.1 ## explicit github.com/oklog/ulid @@ -974,13 +948,6 @@ github.com/peterbourgon/diskv # github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc ## explicit; go 1.17 github.com/petermattis/goid -# github.com/pierrec/lz4/v4 v4.1.2 -## explicit; go 1.14 -github.com/pierrec/lz4/v4 -github.com/pierrec/lz4/v4/internal/lz4block -github.com/pierrec/lz4/v4/internal/lz4errors -github.com/pierrec/lz4/v4/internal/lz4stream -github.com/pierrec/lz4/v4/internal/xxh32 # github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c ## explicit; go 1.14 github.com/pkg/browser @@ -1077,12 +1044,6 @@ github.com/stretchr/testify/require # github.com/subosito/gotenv v1.6.0 ## explicit; go 1.18 github.com/subosito/gotenv -# github.com/ulikunitz/xz v0.5.10 -## explicit; go 1.12 -github.com/ulikunitz/xz -github.com/ulikunitz/xz/internal/hash -github.com/ulikunitz/xz/internal/xlog -github.com/ulikunitz/xz/lzma # github.com/vishvananda/netlink v1.2.1-beta.2.0.20231127184239-0ced8385386a ## explicit; go 1.12 github.com/vishvananda/netlink @@ -1102,9 +1063,6 @@ github.com/xeipuuv/gojsonreference # github.com/xeipuuv/gojsonschema v1.2.0 ## explicit github.com/xeipuuv/gojsonschema -# github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 -## explicit -github.com/xi2/xz # github.com/xlab/treeprint v1.2.0 ## explicit; go 1.13 github.com/xlab/treeprint