Update Files

This commit is contained in:
2025-01-22 16:18:30 +01:00
parent ed4603cf95
commit a36294b518
16718 changed files with 2960346 additions and 0 deletions

View File

@ -0,0 +1,42 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
class Compress {
public function new(level:Int) {
throw new haxe.exceptions.NotImplementedException("Not implemented for this platform");
}
public function execute(src:haxe.io.Bytes, srcPos:Int, dst:haxe.io.Bytes, dstPos:Int):{done:Bool, read:Int, write:Int} {
return null;
}
public function setFlushMode(f:FlushMode) {}
public function close() {}
public static function run(s:haxe.io.Bytes, level:Int):haxe.io.Bytes {
throw new haxe.exceptions.NotImplementedException("Not implemented for this platform");
return null;
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
import haxe.ds.List;
enum ExtraField {
FUnknown(tag:Int, bytes:haxe.io.Bytes);
FInfoZipUnicodePath(name:String, crc:Int);
FUtf8;
}
typedef Entry = {
var fileName:String;
var fileSize:Int;
var fileTime:Date;
var compressed:Bool;
var dataSize:Int;
var data:Null<haxe.io.Bytes>;
var crc32:Null<Int>;
var ?extraFields:List<ExtraField>;
}

View File

@ -0,0 +1,31 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
enum FlushMode {
NO;
SYNC;
FULL;
FINISH;
BLOCK;
}

View File

@ -0,0 +1,120 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
enum Huffman {
Found(i:Int);
NeedBit(left:Huffman, right:Huffman);
NeedBits(n:Int, table:Array<Huffman>);
}
class HuffTools {
public function new() {}
function treeDepth(t) {
return switch (t) {
case Found(_): 0;
case NeedBits(_, _): throw "assert";
case NeedBit(a, b):
var da = treeDepth(a);
var db = treeDepth(b);
1 + ((da < db) ? da : db);
}
}
function treeCompress(t) {
var d = treeDepth(t);
if (d == 0)
return t;
if (d == 1)
return switch (t) {
case NeedBit(a, b): NeedBit(treeCompress(a), treeCompress(b));
default: throw "assert";
}
var size = 1 << d;
var table = new Array();
for (i in 0...size)
table.push(Found(-1));
treeWalk(table, 0, 0, d, t);
return NeedBits(d, table);
}
function treeWalk(table, p, cd, d, t) {
switch (t) {
case NeedBit(a, b):
if (d > 0) {
treeWalk(table, p, cd + 1, d - 1, a);
treeWalk(table, p | (1 << cd), cd + 1, d - 1, b);
} else
table[p] = treeCompress(t);
default:
table[p] = treeCompress(t);
}
}
function treeMake(bits:haxe.ds.IntMap<Int>, maxbits:Int, v:Int, len:Int) {
if (len > maxbits)
throw "Invalid huffman";
var idx = (v << 5) | len;
if (bits.exists(idx))
return Found(bits.get(idx));
v <<= 1;
len += 1;
return NeedBit(treeMake(bits, maxbits, v, len), treeMake(bits, maxbits, v | 1, len));
}
public function make(lengths, pos, nlengths, maxbits) {
if (nlengths == 1) {
return NeedBit(Found(0), Found(0));
}
var counts = new Array();
var tmp = new Array();
if (maxbits > 32)
throw "Invalid huffman";
for (i in 0...maxbits) {
counts.push(0);
tmp.push(0);
}
for (i in 0...nlengths) {
var p = lengths[i + pos];
if (p >= maxbits)
throw "Invalid huffman";
counts[p]++;
}
var code = 0;
for (i in 1...maxbits - 1) {
code = (code + counts[i]) << 1;
tmp[i] = code;
}
var bits = new haxe.ds.IntMap();
for (i in 0...nlengths) {
var l = lengths[i + pos];
if (l != 0) {
var n = tmp[l - 1];
tmp[l - 1] = n + 1;
bits.set((n << 5) | l, i);
}
}
return treeCompress(NeedBit(treeMake(bits, maxbits, 0, 1), treeMake(bits, maxbits, 1, 1)));
}
}

View File

@ -0,0 +1,400 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
import haxe.zip.Huffman;
import haxe.crypto.Adler32;
private class Window {
public static inline var SIZE = 1 << 15;
public static inline var BUFSIZE = 1 << 16;
public var buffer:haxe.io.Bytes;
public var pos:Int;
var crc:Adler32;
public function new(hasCrc) {
buffer = haxe.io.Bytes.alloc(BUFSIZE);
pos = 0;
if (hasCrc)
crc = new Adler32();
}
public function slide() {
if (crc != null)
crc.update(buffer, 0, SIZE);
var b = haxe.io.Bytes.alloc(BUFSIZE);
pos -= SIZE;
b.blit(0, buffer, SIZE, pos);
buffer = b;
}
public function addBytes(b, p, len) {
if (pos + len > BUFSIZE)
slide();
buffer.blit(pos, b, p, len);
pos += len;
}
public function addByte(c) {
if (pos == BUFSIZE)
slide();
buffer.set(pos, c);
pos++;
}
public function getLastChar() {
return buffer.get(pos - 1);
}
public function available() {
return pos;
}
public function checksum() {
if (crc != null)
crc.update(buffer, 0, pos);
return crc;
}
}
private enum State {
Head;
Block;
CData;
Flat;
Crc;
Dist;
DistOne;
Done;
}
/**
A pure Haxe implementation of the ZLIB Inflate algorithm which allows reading compressed data without any platform-specific support.
**/
class InflateImpl {
static var LEN_EXTRA_BITS_TBL = [
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, -1, -1
];
static var LEN_BASE_VAL_TBL = [
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258
];
static var DIST_EXTRA_BITS_TBL = [
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, -1, -1
];
static var DIST_BASE_VAL_TBL = [
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
];
static var CODE_LENGTHS_POS = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
var nbits:Int;
var bits:Int;
var state:State;
var isFinal:Bool;
var huffman:Huffman;
var huffdist:Null<Huffman>;
var htools:HuffTools;
var len:Int;
var dist:Int;
var needed:Int;
var output:haxe.io.Bytes;
var outpos:Int;
var input:haxe.io.Input;
var lengths:Array<Int>;
var window:Window;
static var FIXED_HUFFMAN = null;
public function new(i, ?header = true, ?crc = true) {
isFinal = false;
htools = new HuffTools();
huffman = buildFixedHuffman();
huffdist = null;
len = 0;
dist = 0;
state = header ? Head : Block;
input = i;
bits = 0;
nbits = 0;
needed = 0;
output = null;
outpos = 0;
lengths = new Array();
for (i in 0...19)
lengths.push(-1);
window = new Window(crc);
}
function buildFixedHuffman() {
if (FIXED_HUFFMAN != null)
return FIXED_HUFFMAN;
var a = new Array();
for (n in 0...288)
a.push(if (n <= 143) 8 else if (n <= 255) 9 else if (n <= 279) 7 else 8);
FIXED_HUFFMAN = htools.make(a, 0, 288, 10);
return FIXED_HUFFMAN;
}
public function readBytes(b, pos, len) {
needed = len;
outpos = pos;
output = b;
if (len > 0)
while (inflateLoop()) {}
return len - needed;
}
function getBits(n) {
while (nbits < n) {
bits |= input.readByte() << nbits;
nbits += 8;
}
var b = bits & ((1 << n) - 1);
nbits -= n;
bits >>= n;
return b;
}
function getBit() {
if (nbits == 0) {
nbits = 8;
bits = input.readByte();
}
var b = bits & 1 == 1;
nbits--;
bits >>= 1;
return b;
}
function getRevBits(n) {
return if (n == 0)
0
else if (getBit())
(1 << (n - 1)) | getRevBits(n - 1)
else
getRevBits(n - 1);
}
function resetBits() {
bits = 0;
nbits = 0;
}
function addBytes(b, p, len) {
window.addBytes(b, p, len);
output.blit(outpos, b, p, len);
needed -= len;
outpos += len;
}
function addByte(b) {
window.addByte(b);
output.set(outpos, b);
needed--;
outpos++;
}
function addDistOne(n) {
var c = window.getLastChar();
for (i in 0...n)
addByte(c);
}
function addDist(d, len) {
addBytes(window.buffer, window.pos - d, len);
}
function applyHuffman(h) {
return switch (h) {
case Found(n): n;
case NeedBit(a, b): applyHuffman(getBit() ? b : a);
case NeedBits(n, tbl): applyHuffman(tbl[getBits(n)]);
}
}
function inflateLengths(a, max) {
var i = 0;
var prev = 0;
while (i < max) {
var n = applyHuffman(huffman);
switch (n) {
case 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15:
prev = n;
a[i] = n;
i++;
case 16:
var end = i + 3 + getBits(2);
if (end > max)
throw "Invalid data";
while (i < end) {
a[i] = prev;
i++;
}
case 17:
i += 3 + getBits(3);
if (i > max)
throw "Invalid data";
case 18:
i += 11 + getBits(7);
if (i > max)
throw "Invalid data";
default:
throw "Invalid data";
}
}
}
function inflateLoop() {
switch (state) {
case Head:
var cmf = input.readByte();
var cm = cmf & 15;
var cinfo = cmf >> 4;
if (cm != 8)
throw "Invalid data";
var flg = input.readByte();
// var fcheck = flg & 31;
var fdict = flg & 32 != 0;
// var flevel = flg >> 6;
if (((cmf << 8) + flg) % 31 != 0)
throw "Invalid data";
if (fdict)
throw "Unsupported dictionary";
state = Block;
return true;
case Crc:
var calc = window.checksum();
if (calc == null) {
state = Done;
return true;
}
var crc = Adler32.read(input);
if (!calc.equals(crc))
throw "Invalid CRC";
state = Done;
return true;
case Done:
// nothing
return false;
case Block:
isFinal = getBit();
switch (getBits(2)) {
case 0: // no compression
len = input.readUInt16();
var nlen = input.readUInt16();
if (nlen != 0xFFFF - len) throw "Invalid data";
state = Flat;
var r = inflateLoop();
resetBits();
return r;
case 1: // fixed Huffman
huffman = buildFixedHuffman();
huffdist = null;
state = CData;
return true;
case 2: // dynamic Huffman
var hlit = getBits(5) + 257;
var hdist = getBits(5) + 1;
var hclen = getBits(4) + 4;
for (i in 0...hclen)
lengths[CODE_LENGTHS_POS[i]] = getBits(3);
for (i in hclen...19)
lengths[CODE_LENGTHS_POS[i]] = 0;
huffman = htools.make(lengths, 0, 19, 8);
var lengths = new Array();
for (i in 0...hlit + hdist)
lengths.push(0);
inflateLengths(lengths, hlit + hdist);
huffdist = htools.make(lengths, hlit, hdist, 16);
huffman = htools.make(lengths, 0, hlit, 16);
state = CData;
return true;
default:
throw "Invalid data";
}
case Flat:
var rlen = (len < needed) ? len : needed;
var bytes = input.read(rlen);
len -= rlen;
addBytes(bytes, 0, rlen);
if (len == 0)
state = isFinal ? Crc : Block;
return needed > 0;
case DistOne:
var rlen = (len < needed) ? len : needed;
addDistOne(rlen);
len -= rlen;
if (len == 0)
state = CData;
return needed > 0;
case Dist:
while (len > 0 && needed > 0) {
var rdist = (len < dist) ? len : dist;
var rlen = (needed < rdist) ? needed : rdist;
addDist(dist, rlen);
len -= rlen;
}
if (len == 0)
state = CData;
return needed > 0;
case CData:
var n = applyHuffman(huffman);
if (n < 256) {
addByte(n);
return needed > 0;
} else if (n == 256) {
state = isFinal ? Crc : Block;
return true;
} else {
n -= 257;
var extra_bits = LEN_EXTRA_BITS_TBL[n];
if (extra_bits == -1)
throw "Invalid data";
len = LEN_BASE_VAL_TBL[n] + getBits(extra_bits);
var dist_code = if (huffdist == null) getRevBits(5) else applyHuffman(huffdist);
extra_bits = DIST_EXTRA_BITS_TBL[dist_code];
if (extra_bits == -1)
throw "Invalid data";
dist = DIST_BASE_VAL_TBL[dist_code] + getBits(extra_bits);
if (dist > window.available())
throw "Invalid data";
state = (dist == 1) ? DistOne : Dist;
return true;
}
}
}
public static function run(i:haxe.io.Input, ?bufsize = 65536) {
var buf = haxe.io.Bytes.alloc(bufsize);
var output = new haxe.io.BytesBuffer();
var inflate = new InflateImpl(i);
while (true) {
var len = inflate.readBytes(buf, 0, bufsize);
output.addBytes(buf, 0, len);
if (len < bufsize)
break;
}
return output.getBytes();
}
}

View File

@ -0,0 +1,213 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
import haxe.zip.Entry;
import haxe.ds.List;
// see http://www.pkware.com/documents/casestudies/APPNOTE.TXT
class Reader {
var i:haxe.io.Input;
public function new(i) {
this.i = i;
}
function readZipDate() {
var t = i.readUInt16();
var hour = (t >> 11) & 31;
var min = (t >> 5) & 63;
var sec = t & 31;
var d = i.readUInt16();
var year = d >> 9;
var month = (d >> 5) & 15;
var day = d & 31;
return new Date(year + 1980, month - 1, day, hour, min, sec << 1);
}
function readExtraFields(length) {
var fields = new List();
while (length > 0) {
if (length < 4)
throw "Invalid extra fields data";
var tag = i.readUInt16();
var len = i.readUInt16();
if (length < len)
throw "Invalid extra fields data";
switch (tag) {
case 0x7075:
var version = i.readByte();
if (version != 1) {
var data = new haxe.io.BytesBuffer();
data.addByte(version);
data.add(i.read(len - 1));
fields.add(FUnknown(tag, data.getBytes()));
} else {
var crc = i.readInt32();
var name = i.read(len - 5).toString();
fields.add(FInfoZipUnicodePath(name, crc));
}
default:
fields.add(FUnknown(tag, i.read(len)));
}
length -= 4 + len;
}
return fields;
}
public function readEntryHeader():Entry {
var i = this.i;
var h = i.readInt32();
if (h == 0x02014B50 || h == 0x06054B50)
return null;
if (h != 0x04034B50)
throw "Invalid Zip Data";
var version = i.readUInt16();
var flags = i.readUInt16();
var utf8 = flags & 0x800 != 0;
if ((flags & 0xF7F1) != 0)
throw "Unsupported flags " + flags;
var compression = i.readUInt16();
var compressed = (compression != 0);
if (compressed && compression != 8)
throw "Unsupported compression " + compression;
var mtime = readZipDate();
var crc32:Null<Int> = i.readInt32();
var csize = i.readInt32();
var usize = i.readInt32();
var fnamelen = i.readInt16();
var elen = i.readInt16();
var fname = i.readString(fnamelen);
var fields = readExtraFields(elen);
if (utf8)
fields.push(FUtf8);
var data = null;
// we have a data descriptor that store the real crc/sizes
// after the compressed data, let's wait for it
if ((flags & 8) != 0)
crc32 = null;
return {
fileName: fname,
fileSize: usize,
fileTime: mtime,
compressed: compressed,
dataSize: csize,
data: data,
crc32: crc32,
extraFields: fields,
};
}
public function read():List<Entry> {
var l = new List();
var buf = null;
var tmp = null;
while (true) {
var e = readEntryHeader();
if (e == null)
break;
// do we have a data descriptor? (see readEntryHeader)
if (e.crc32 == null) {
if (e.compressed) {
#if neko
// enter progressive mode : we use a different input which has
// a temporary buffer, this is necessary since we have to uncompress
// progressively, and after that we might have pending read data
// that needs to be processed
var bufSize = 65536;
if (buf == null) {
buf = new haxe.io.BufferInput(i, haxe.io.Bytes.alloc(bufSize));
tmp = haxe.io.Bytes.alloc(bufSize);
i = buf;
}
var out = new haxe.io.BytesBuffer();
var z = new neko.zip.Uncompress(-15);
z.setFlushMode(neko.zip.Flush.SYNC);
while (true) {
if (buf.available == 0)
buf.refill();
var p = bufSize - buf.available;
if (p != buf.pos) {
// because of lack of "srcLen" in zip api, we need to always be stuck to the buffer end
buf.buf.blit(p, buf.buf, buf.pos, buf.available);
buf.pos = p;
}
var r = z.execute(buf.buf, buf.pos, tmp, 0);
out.addBytes(tmp, 0, r.write);
buf.pos += r.read;
buf.available -= r.read;
if (r.done)
break;
}
e.data = out.getBytes();
#else
var bufSize = 65536;
if (tmp == null)
tmp = haxe.io.Bytes.alloc(bufSize);
var out = new haxe.io.BytesBuffer();
var z = new InflateImpl(i, false, false);
while (true) {
var n = z.readBytes(tmp, 0, bufSize);
out.addBytes(tmp, 0, n);
if (n < bufSize)
break;
}
e.data = out.getBytes();
#end
} else
e.data = i.read(e.dataSize);
e.crc32 = i.readInt32();
if (e.crc32 == 0x08074b50)
e.crc32 = i.readInt32();
e.dataSize = i.readInt32();
e.fileSize = i.readInt32();
// set data to uncompressed
e.dataSize = e.fileSize;
e.compressed = false;
} else
e.data = i.read(e.dataSize);
l.add(e);
}
return l;
}
public static function readZip(i:haxe.io.Input) {
var r = new Reader(i);
return r.read();
}
public static function unzip(f:Entry) {
if (!f.compressed)
return f.data;
var c = new haxe.zip.Uncompress(-15);
var s = haxe.io.Bytes.alloc(f.fileSize);
var r = c.execute(f.data, 0, s, 0);
c.close();
if (!r.done || r.read != f.data.length || r.write != f.fileSize)
throw "Invalid compressed data for " + f.fileName;
f.compressed = false;
f.dataSize = f.fileSize;
f.data = s;
return f.data;
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
class Tools {
public static function compress(f:Entry, level:Int) {
if (f.compressed)
return;
// this should be optimized with a temp buffer
// that would discard the first two bytes
// (in order to prevent 2x mem usage for large files)
var data = haxe.zip.Compress.run(f.data, level);
f.compressed = true;
f.data = data.sub(2, data.length - 6);
f.dataSize = f.data.length;
}
public static function uncompress(f:Entry) {
if( !f.compressed )
return;
var c = new Uncompress(-15);
var s = haxe.io.Bytes.alloc(f.fileSize);
var r = c.execute(f.data,0,s,0);
c.close();
if( !r.done || r.read != f.data.length || r.write != f.fileSize )
throw "Invalid compressed data for "+f.fileName;
f.compressed = false;
f.dataSize = f.fileSize;
f.data = s;
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
class Uncompress {
public function new(?windowBits:Int) {
throw new haxe.exceptions.NotImplementedException("Not implemented for this platform");
}
public function execute(src:haxe.io.Bytes, srcPos:Int, dst:haxe.io.Bytes, dstPos:Int):{done:Bool, read:Int, write:Int} {
return null;
}
public function setFlushMode(f:FlushMode) {}
public function close() {}
public static function run(src:haxe.io.Bytes, ?bufsize:Int):haxe.io.Bytes {
return InflateImpl.run(new haxe.io.BytesInput(src), bufsize);
}
}

View File

@ -0,0 +1,193 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
import haxe.ds.List;
class Writer {
/**
The next constant is required for computing the Central
Directory Record(CDR) size. CDR consists of some fields
of constant size and a filename. Constant represents
total length of all fields with constant size for each
file in archive
**/
inline static var CENTRAL_DIRECTORY_RECORD_FIELDS_SIZE = 46;
/**
The following constant is the total size of all fields
of Local File Header. It's required for calculating
offset of start of central directory record
**/
inline static var LOCAL_FILE_HEADER_FIELDS_SIZE = 30;
var o:haxe.io.Output;
var files:List<{
name:String,
compressed:Bool,
clen:Int,
size:Int,
crc:Int,
date:Date,
fields:haxe.io.Bytes
}>;
public function new(o:haxe.io.Output) {
this.o = o;
files = new List();
}
function writeZipDate(date:Date) {
var hour = date.getHours();
var min = date.getMinutes();
var sec = date.getSeconds() >> 1;
o.writeUInt16((hour << 11) | (min << 5) | sec);
var year = date.getFullYear() - 1980;
var month = date.getMonth() + 1;
var day = date.getDate();
o.writeUInt16((year << 9) | (month << 5) | day);
}
public function writeEntryHeader(f:Entry) {
var o = this.o;
var flags = 0;
if (f.extraFields != null) {
for (e in f.extraFields)
switch (e) {
case FUtf8:
flags |= 0x800;
default:
}
}
o.writeInt32(0x04034B50);
o.writeUInt16(0x0014); // version
o.writeUInt16(flags); // flags
if (f.data == null) {
f.fileSize = 0;
f.dataSize = 0;
f.crc32 = 0;
f.compressed = false;
f.data = haxe.io.Bytes.alloc(0);
} else {
if (f.crc32 == null) {
if (f.compressed)
throw "CRC32 must be processed before compression";
f.crc32 = haxe.crypto.Crc32.make(f.data);
}
if (!f.compressed)
f.fileSize = f.data.length;
f.dataSize = f.data.length;
}
o.writeUInt16(f.compressed ? 8 : 0);
writeZipDate(f.fileTime);
o.writeInt32(f.crc32);
o.writeInt32(f.dataSize);
o.writeInt32(f.fileSize);
o.writeUInt16(f.fileName.length);
var e = new haxe.io.BytesOutput();
if (f.extraFields != null) {
for (f in f.extraFields)
switch (f) {
case FInfoZipUnicodePath(name, crc):
var namebytes = haxe.io.Bytes.ofString(name);
e.writeUInt16(0x7075);
e.writeUInt16(namebytes.length + 5);
e.writeByte(1); // version
e.writeInt32(crc);
e.write(namebytes);
case FUnknown(tag, bytes):
e.writeUInt16(tag);
e.writeUInt16(bytes.length);
e.write(bytes);
case FUtf8:
// nothing
}
}
var ebytes = e.getBytes();
o.writeUInt16(ebytes.length);
o.writeString(f.fileName);
o.write(ebytes);
files.add({
name: f.fileName,
compressed: f.compressed,
clen: f.data.length,
size: f.fileSize,
crc: f.crc32,
date: f.fileTime,
fields: ebytes
});
}
public function write(files:List<Entry>) {
for (f in files) {
writeEntryHeader(f);
o.writeFullBytes(f.data, 0, f.data.length);
}
writeCDR();
}
public function writeCDR() {
var cdr_size = 0;
var cdr_offset = 0;
for (f in files) {
var namelen = f.name.length;
var extraFieldsLength = f.fields.length;
o.writeInt32(0x02014B50); // header
o.writeUInt16(0x0014); // version made-by
o.writeUInt16(0x0014); // version
o.writeUInt16(0); // flags
o.writeUInt16(f.compressed ? 8 : 0);
writeZipDate(f.date);
o.writeInt32(f.crc);
o.writeInt32(f.clen);
o.writeInt32(f.size);
o.writeUInt16(namelen);
o.writeUInt16(extraFieldsLength);
o.writeUInt16(0); // comment length always 0
o.writeUInt16(0); // disk number start
o.writeUInt16(0); // internal file attributes
o.writeInt32(0); // external file attributes
o.writeInt32(cdr_offset); // relative offset of local header
o.writeString(f.name);
o.write(f.fields);
cdr_size += CENTRAL_DIRECTORY_RECORD_FIELDS_SIZE + namelen + extraFieldsLength;
cdr_offset += LOCAL_FILE_HEADER_FIELDS_SIZE + namelen + extraFieldsLength + f.clen;
}
// end of central dir signature
o.writeInt32(0x06054B50);
// number of this disk
o.writeUInt16(0);
// number of the disk with the start of the central directory
o.writeUInt16(0);
// total number of entries in the central directory on this disk
o.writeUInt16(files.length);
// total number of entries in the central directory
o.writeUInt16(files.length);
// size of the central directory record
o.writeInt32(cdr_size);
// offset of start of central directory with respect to the starting disk number
o.writeInt32(cdr_offset);
// .ZIP file comment length
o.writeUInt16(0);
}
}