Update Files

This commit is contained in:
2025-01-22 16:18:30 +01:00
parent ed4603cf95
commit a36294b518
16718 changed files with 2960346 additions and 0 deletions

View File

@ -0,0 +1,112 @@
package haxe;
import java.NativeArray;
import java.lang.Throwable;
import java.lang.RuntimeException;
import java.lang.StackTraceElement;
import java.io.PrintStream;
import java.io.PrintWriter;
@:coreApi
class Exception extends NativeException {
public var message(get,never):String;
public var stack(get,never):CallStack;
public var previous(get,never):Null<Exception>;
public var native(get,never):Any;
@:noCompletion var __exceptionStack:Null<CallStack>;
@:noCompletion var __nativeException:Throwable;
@:noCompletion var __previousException:Null<Exception>;
static function caught(value:Any):Exception {
if(Std.isOfType(value, Exception)) {
return value;
} else if(Std.isOfType(value, Throwable)) {
return new Exception((value:Throwable).getMessage(), null, value);
} else {
return new ValueException(value, null, value);
}
}
static function thrown(value:Any):Any {
if(Std.isOfType(value, Exception)) {
var native = (value:Exception).__nativeException;
return Std.isOfType(native, RuntimeException) ? native : value;
} else if(Std.isOfType(value, RuntimeException)) {
return value;
} else if(Std.isOfType(value, Throwable)) {
return new Exception((value:Throwable).getMessage(), null, value);
} else {
var e = new ValueException(value);
var stack = e.getStackTrace();
if(stack.length > 1) {
e.setStackTrace(java.util.Arrays.copyOfRange(stack, 1, stack.length));
}
return e;
}
}
public function new(message:String, ?previous:Exception, ?native:Any) {
super(message, cast previous);
__previousException = previous;
if(native != null && Std.isOfType(native, Throwable)) {
__nativeException = native;
setStackTrace(__nativeException.getStackTrace());
} else {
__nativeException = cast this;
}
}
function unwrap():Any {
return __nativeException;
}
override public function toString():String {
return message;
}
public function details():String {
return inline CallStack.exceptionToString(this);
}
function get_message():String {
return this.getMessage();
}
function get_previous():Null<Exception> {
return __previousException;
}
final function get_native():Any {
return __nativeException;
}
function get_stack():CallStack {
return switch __exceptionStack {
case null:
__exceptionStack = NativeStackTrace.toHaxe(__nativeException.getStackTrace());
case s: s;
}
}
}
@:dox(hide)
@:noCompletion
@:native('java.lang.RuntimeException')
private extern class NativeException {
@:noCompletion private function new(?message:String, ?cause:Throwable):Void;
@:noCompletion @:skipReflection private function addSuppressed (param1:Throwable):Void;
@:noCompletion @:skipReflection private function fillInStackTrace ():Throwable;
@:noCompletion @:skipReflection private function getCause ():Throwable;
@:noCompletion @:skipReflection private function getLocalizedMessage ():String;
@:noCompletion @:skipReflection private function getMessage ():String;
@:noCompletion @:skipReflection private function getStackTrace ():NativeArray<StackTraceElement>;
@:noCompletion @:skipReflection private function getSuppressed ():NativeArray<Throwable>;
@:noCompletion @:skipReflection private function initCause (param1:Throwable):Throwable;
@:noCompletion @:skipReflection @:overload private function printStackTrace (param1:PrintWriter):Void;
@:noCompletion @:skipReflection @:overload private function printStackTrace ():Void;
@:noCompletion @:skipReflection @:overload private function printStackTrace (param1:PrintStream):Void;
@:noCompletion @:skipReflection private function setStackTrace (param1:NativeArray<StackTraceElement>):Void;
@:noCompletion @:skipReflection private function toString ():String;
}

View File

@ -0,0 +1,247 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe;
using haxe.Int64;
import haxe.Int64Helper;
private typedef __Int64 = java.StdTypes.Int64;
@:coreApi
@:transitive
abstract Int64(__Int64) from __Int64 to __Int64 {
#if jvm
extern public static function make(high:Int32, low:Int32):Int64;
#else
public static inline function make(high:Int32, low:Int32):Int64
return new Int64(((cast high : __Int64) << 32) | ((cast low : __Int64) & (untyped __java__('0xffffffffL') : Int64)));
#end
private inline function new(x:__Int64)
this = x;
private var val(get, set):__Int64;
inline function get_val():__Int64
return this;
inline function set_val(x:__Int64):__Int64
return this = x;
public var high(get, never):Int32;
inline function get_high():Int32
return cast(this >> 32);
public var low(get, never):Int32;
inline function get_low():Int32
return cast this;
public inline function copy():Int64
return new Int64(this);
@:from public static inline function ofInt(x:Int):Int64
return cast x;
@:deprecated('haxe.Int64.is() is deprecated. Use haxe.Int64.isInt64() instead')
inline public static function is(val:Dynamic):Bool
return Std.isOfType(val, java.lang.Long.LongClass);
inline public static function isInt64(val:Dynamic):Bool
return Std.isOfType(val, java.lang.Long.LongClass);
public static inline function toInt(x:Int64):Int {
if (x.val < 0x80000000 || x.val > 0x7FFFFFFF)
throw "Overflow";
return cast x.val;
}
public static inline function getHigh(x:Int64):Int32
return cast(x.val >> 32);
public static inline function getLow(x:Int64):Int32
return cast(x.val);
public static inline function isNeg(x:Int64):Bool
return x.val < 0;
public static inline function isZero(x:Int64):Bool
return x.val == 0;
public static inline function compare(a:Int64, b:Int64):Int {
if (a.val < b.val)
return -1;
if (a.val > b.val)
return 1;
return 0;
}
public static inline function ucompare(a:Int64, b:Int64):Int {
if (a.val < 0)
return (b.val < 0) ? compare(a, b) : 1;
return (b.val < 0) ? -1 : compare(a, b);
}
public static inline function toStr(x:Int64):String
return '${x.val}';
public static inline function divMod(dividend:Int64, divisor:Int64):{quotient:Int64, modulus:Int64}
return {quotient: dividend / divisor, modulus: dividend % divisor};
private inline function toString():String
return '$this';
public static function parseString(sParam:String):Int64 {
// can this be done?: return new Int64( java.lang.Long.LongClass.parseLong( sParam ) );
return Int64Helper.parseString(sParam);
}
public static function fromFloat(f:Float):Int64 {
return Int64Helper.fromFloat(f);
}
@:op(-A) public static function neg(x:Int64):Int64
return -x.val;
@:op(++A) private inline function preIncrement():Int64
return ++this;
@:op(A++) private inline function postIncrement():Int64
return this++;
@:op(--A) private inline function preDecrement():Int64
return --this;
@:op(A--) private inline function postDecrement():Int64
return this--;
@:op(A + B) public static inline function add(a:Int64, b:Int64):Int64
return a.val + b.val;
@:op(A + B) @:commutative private static inline function addInt(a:Int64, b:Int):Int64
return a.val + b;
@:op(A - B) public static inline function sub(a:Int64, b:Int64):Int64
return a.val - b.val;
@:op(A - B) private static inline function subInt(a:Int64, b:Int):Int64
return a.val - b;
@:op(A - B) private static inline function intSub(a:Int, b:Int64):Int64
return a - b.val;
@:op(A * B) public static inline function mul(a:Int64, b:Int64):Int64
return a.val * b.val;
@:op(A * B) @:commutative private static inline function mulInt(a:Int64, b:Int):Int64
return a.val * b;
@:op(A / B) public static inline function div(a:Int64, b:Int64):Int64
return a.val / b.val;
@:op(A / B) private static inline function divInt(a:Int64, b:Int):Int64
return a.val / b;
@:op(A / B) private static inline function intDiv(a:Int, b:Int64):Int64
return a / b.val;
@:op(A % B) public static inline function mod(a:Int64, b:Int64):Int64
return a.val % b.val;
@:op(A % B) private static inline function modInt(a:Int64, b:Int):Int64
return a.val % b;
@:op(A % B) private static inline function intMod(a:Int, b:Int64):Int64
return a % b.val;
@:op(A == B) public static inline function eq(a:Int64, b:Int64):Bool
return a.val == b.val;
@:op(A == B) @:commutative private static inline function eqInt(a:Int64, b:Int):Bool
return a.val == b;
@:op(A != B) public static inline function neq(a:Int64, b:Int64):Bool
return a.val != b.val;
@:op(A != B) @:commutative private static inline function neqInt(a:Int64, b:Int):Bool
return a.val != b;
@:op(A < B) private static inline function lt(a:Int64, b:Int64):Bool
return a.val < b.val;
@:op(A < B) private static inline function ltInt(a:Int64, b:Int):Bool
return a.val < b;
@:op(A < B) private static inline function intLt(a:Int, b:Int64):Bool
return a < b.val;
@:op(A <= B) private static inline function lte(a:Int64, b:Int64):Bool
return a.val <= b.val;
@:op(A <= B) private static inline function lteInt(a:Int64, b:Int):Bool
return a.val <= b;
@:op(A <= B) private static inline function intLte(a:Int, b:Int64):Bool
return a <= b.val;
@:op(A > B) private static inline function gt(a:Int64, b:Int64):Bool
return a.val > b.val;
@:op(A > B) private static inline function gtInt(a:Int64, b:Int):Bool
return a.val > b;
@:op(A > B) private static inline function intGt(a:Int, b:Int64):Bool
return a > b.val;
@:op(A >= B) private static inline function gte(a:Int64, b:Int64):Bool
return a.val >= b.val;
@:op(A >= B) private static inline function gteInt(a:Int64, b:Int):Bool
return a.val >= b;
@:op(A >= B) private static inline function intGte(a:Int, b:Int64):Bool
return a >= b.val;
@:op(~A) private static inline function complement(x:Int64):Int64
return ~x.val;
@:op(A & B) public static inline function and(a:Int64, b:Int64):Int64
return a.val & b.val;
@:op(A | B) public static inline function or(a:Int64, b:Int64):Int64
return a.val | b.val;
@:op(A ^ B) public static inline function xor(a:Int64, b:Int64):Int64
return a.val ^ b.val;
@:op(A << B) public static inline function shl(a:Int64, b:Int):Int64
return a.val << b;
@:op(A >> B) public static inline function shr(a:Int64, b:Int):Int64
return a.val >> b;
@:op(A >>> B) public static inline function ushr(a:Int64, b:Int):Int64
return a.val >>> b;
}

View File

@ -0,0 +1,55 @@
package haxe;
import java.NativeArray;
import java.lang.ThreadLocal;
import java.lang.Throwable;
import java.lang.Thread;
import java.lang.StackTraceElement;
import haxe.CallStack.StackItem;
/**
Do not use manually.
**/
@:dox(hide)
@:noCompletion
class NativeStackTrace {
static var exception = new ThreadLocal<Throwable>();
@:ifFeature('haxe.NativeStackTrace.exceptionStack')
static public inline function saveStack(e:Throwable):Void {
exception.set(e);
}
static public function callStack():NativeArray<StackTraceElement> {
var stack = Thread.currentThread().getStackTrace();
return stack.length <= 3 ? stack : java.util.Arrays.copyOfRange(stack, 3, stack.length);
}
static public function exceptionStack():NativeArray<StackTraceElement> {
return switch exception.get() {
case null: new NativeArray(0);
case e: e.getStackTrace();
}
}
static public function toHaxe(native:NativeArray<StackTraceElement>, skip:Int = 0):Array<StackItem> {
var stack = [];
for (i in 0...native.length) {
if(skip > i) {
continue;
}
var el = native[i];
var className = el.getClassName();
var methodName = el.getMethodName();
var fileName = el.getFileName();
var lineNumber = el.getLineNumber();
var method = Method(className, methodName);
if (fileName != null || lineNumber >= 0) {
stack.push(FilePos(method, fileName, lineNumber));
} else {
stack.push(method);
}
}
return stack;
}
}

View File

@ -0,0 +1,51 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe;
@:coreApi class Resource {
@:keep static var content:Array<String>;
public static inline function listNames():Array<String> {
return content.copy();
}
@:access(haxe.io.Path.escape)
public static function getString(name:String):String {
name = haxe.io.Path.escape(name, true);
var stream = cast(Resource, java.lang.Class<Dynamic>).getResourceAsStream("/" + name);
if (stream == null)
return null;
var stream = new java.io.NativeInput(stream);
return stream.readAll().toString();
}
@:access(haxe.io.Path.escape)
public static function getBytes(name:String):haxe.io.Bytes {
name = haxe.io.Path.escape(name, true);
var stream = cast(Resource, java.lang.Class<Dynamic>).getResourceAsStream("/" + name);
if (stream == null)
return null;
var stream = new java.io.NativeInput(stream);
return stream.readAll();
}
}

View File

@ -0,0 +1,64 @@
package haxe;
import haxe.iterators.RestIterator;
import haxe.iterators.RestKeyValueIterator;
import java.NativeArray;
import java.lang.System;
import java.lang.Object;
import java.util.Arrays;
private typedef NativeRest<T> = NativeArray<Object>;
@:coreApi
abstract Rest<T>(NativeRest<T>) {
public var length(get,never):Int;
inline function get_length():Int
return this.length;
@:from static public function of<T>(array:Array<T>):Rest<T> {
var native = @:privateAccess array.__a;
var result:NativeRest<T>;
#if jvm
result = (cast native:Object).clone();
#else
result = new NativeRest<T>(native.length);
for(i in 0...native.length)
result[i] = cast native[i];
#end
return new Rest(result);
}
inline function new(a:NativeRest<T>):Void
this = a;
@:arrayAccess inline function get(index:Int):T
return cast this[index];
@:to public function toArray():Array<T> {
return [for(i in 0...this.length) cast this[i]];
}
public inline function iterator():RestIterator<T>
return new RestIterator<T>(this);
public inline function keyValueIterator():RestKeyValueIterator<T>
return new RestKeyValueIterator<T>(this);
public function append(item:T):Rest<T> {
var result = new NativeRest<T>(this.length + 1);
System.arraycopy(this, 0, result, 0, this.length);
result[this.length] = cast item;
return new Rest(result);
}
public function prepend(item:T):Rest<T> {
var result = new NativeRest<T>(this.length + 1);
System.arraycopy(this, 0, result, 1, this.length);
result[0] = cast item;
return new Rest(result);
}
public function toString():String {
return toArray().toString();
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.crypto;
import haxe.io.Bytes;
import haxe.io.BytesData;
import java.security.MessageDigest;
import java.nio.charset.StandardCharsets;
@:coreApi
class Md5 {
public static function encode(s:String):String {
return Bytes.ofData(digest((cast s : java.NativeString).getBytes(StandardCharsets.UTF_8))).toHex();
}
public static function make(b:haxe.io.Bytes):haxe.io.Bytes {
return Bytes.ofData(digest(b.getData()));
}
inline static function digest(b:BytesData):BytesData {
return MessageDigest.getInstance("MD5").digest(b);
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.crypto;
import haxe.io.Bytes;
import haxe.io.BytesData;
import java.security.MessageDigest;
import java.nio.charset.StandardCharsets;
@:coreApi
class Sha1 {
public static function encode(s:String):String {
return Bytes.ofData(digest((cast s : java.NativeString).getBytes(StandardCharsets.UTF_8))).toHex();
}
public static function make(b:haxe.io.Bytes):haxe.io.Bytes {
return Bytes.ofData(digest(b.getData()));
}
inline static function digest(b:BytesData):BytesData {
return MessageDigest.getInstance("SHA-1").digest(b);
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.crypto;
import haxe.io.Bytes;
import haxe.io.BytesData;
import java.security.MessageDigest;
import java.nio.charset.StandardCharsets;
@:coreApi
class Sha256 {
public static function encode(s:String):String {
return Bytes.ofData(digest((cast s : java.NativeString).getBytes(StandardCharsets.UTF_8))).toHex();
}
public static function make(b:haxe.io.Bytes):haxe.io.Bytes {
return Bytes.ofData(digest(b.getData()));
}
inline static function digest(b:BytesData):BytesData {
return MessageDigest.getInstance("SHA-256").digest(b);
}
}

View File

@ -0,0 +1,523 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.ds;
import java.NativeArray;
/*
* This IntMap implementation is based on khash (https://github.com/attractivechaos/klib/blob/master/khash.h)
* Copyright goes to Attractive Chaos <attractor@live.co.uk> and his contributors
*
* Thanks also to Jonas Malaco Filho for his Haxe-written IntMap code inspired by Python tables.
* (https://jonasmalaco.com/fossil/test/jonas-haxe/artifact/887b53126e237d6c68951111d594033403889304)
*/
@:coreApi class IntMap<T> implements haxe.Constraints.IMap<Int, T> {
private static inline var HASH_UPPER = 0.7;
private var flags:NativeArray<Int>;
private var _keys:NativeArray<Int>;
private var vals:NativeArray<T>;
private var nBuckets:Int;
private var size:Int;
private var nOccupied:Int;
private var upperBound:Int;
#if !no_map_cache
private var cachedKey:Int;
private var cachedIndex:Int;
#end
public function new():Void {
#if !no_map_cache
cachedIndex = -1;
#end
}
public function set(key:Int, value:T):Void {
var targetIndex:Int;
if (nOccupied >= upperBound) {
if (nBuckets > (size << 1)) {
resize(nBuckets - 1); // clear "deleted" elements
} else {
resize(nBuckets + 1);
}
}
var flags = flags, _keys = _keys;
{
var mask = nBuckets - 1,
hashedKey = hash(key),
curIndex = hashedKey & mask;
var delKey = -1, curFlag = 0;
// to speed things up, don't loop if the first bucket is already free
if (isEmpty(getFlag(flags, curIndex))) {
targetIndex = curIndex;
} else {
var inc = getInc(hashedKey, mask), last = curIndex;
while (!(_keys[curIndex] == key || isEmpty(curFlag = getFlag(flags, curIndex)))) {
if (delKey == -1 && isDel(curFlag)) {
delKey = curIndex;
}
curIndex = (curIndex + inc) & mask;
#if debug
assert(curIndex != last);
#end
}
if (delKey != -1 && isEmpty(getFlag(flags, curIndex))) {
targetIndex = delKey;
} else {
targetIndex = curIndex;
}
}
}
var flag = getFlag(flags, targetIndex);
if (isEmpty(flag)) {
_keys[targetIndex] = key;
vals[targetIndex] = value;
setIsBothFalse(flags, targetIndex);
size++;
nOccupied++;
} else if (isDel(flag)) {
_keys[targetIndex] = key;
vals[targetIndex] = value;
setIsBothFalse(flags, targetIndex);
size++;
} else {
#if debug
assert(_keys[targetIndex] == key);
#end
vals[targetIndex] = value;
}
}
private final function lookup(key:Int):Int {
if (nBuckets != 0) {
var flags = flags, _keys = _keys;
var mask = nBuckets - 1,
k = hash(key),
index = k & mask,
curFlag = -1,
inc = getInc(k, mask), /* inc == 1 for linear probing */
last = index;
do {
if (_keys[index] == key) {
if (isEmpty(curFlag = getFlag(flags, index))) {
index = (index + inc) & mask;
continue;
} else if (isDel(curFlag)) {
return -1;
} else {
return index;
}
} else {
index = (index + inc) & mask;
}
} while (index != last);
}
return -1;
}
public function get(key:Int):Null<T> {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return vals[idx];
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return vals[idx];
}
return null;
}
private function getDefault(key:Int, def:T):T {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return vals[idx];
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return vals[idx];
}
return def;
}
public function exists(key:Int):Bool {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return true;
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return true;
}
return false;
}
public function remove(key:Int):Bool {
var idx = -1;
#if !no_map_cache
if (!(cachedKey == key && ((idx = cachedIndex) != -1)))
#end
{
idx = lookup(key);
}
if (idx == -1) {
return false;
} else {
#if !no_map_cache
if (cachedKey == key) {
cachedIndex = -1;
}
#end
if (!isEither(getFlag(flags, idx))) {
setIsDelTrue(flags, idx);
--size;
vals[idx] = null;
// we do NOT reset the keys here, as unlike StringMap, we check for keys equality
// and stop if we find a key that is equal to the one we're looking for
// setting this to 0 will allow the hash to contain duplicate `0` keys
// (see #6457)
// _keys[idx] = 0;
}
return true;
}
}
private final function resize(newNBuckets:Int):Void {
// This function uses 0.25*n_bucktes bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets.
var newFlags = null;
var j = 1;
{
newNBuckets = roundUp(newNBuckets);
if (newNBuckets < 4)
newNBuckets = 4;
if (size >= (newNBuckets * HASH_UPPER + 0.5))
/* requested size is too small */ {
j = 0;
} else { /* hash table size to be changed (shrink or expand); rehash */
var nfSize = flagsSize(newNBuckets);
newFlags = new NativeArray(nfSize);
for (i in 0...nfSize) {
newFlags[i] = 0xaaaaaaaa; // isEmpty = true; isDel = false
}
if (nBuckets < newNBuckets) // expand
{
var k = new NativeArray(newNBuckets);
if (_keys != null) {
arrayCopy(_keys, 0, k, 0, nBuckets);
}
_keys = k;
var v = new NativeArray(newNBuckets);
if (vals != null) {
arrayCopy(vals, 0, v, 0, nBuckets);
}
vals = v;
} // otherwise shrink
}
}
if (j != 0) { // rehashing is required
#if !no_map_cache
// resetting cache
cachedKey = 0;
cachedIndex = -1;
#end
j = -1;
var nBuckets = nBuckets, _keys = _keys, vals = vals, flags = flags;
var newMask = newNBuckets - 1;
while (++j < nBuckets) {
if (!isEither(getFlag(flags, j))) {
var key = _keys[j];
var val = vals[j];
// do not set keys as 0 - see comment about #6457
// _keys[j] = 0;
vals[j] = cast null;
setIsDelTrue(flags, j);
while (true)
/* kick-out process; sort of like in Cuckoo hashing */ {
var k = hash(key);
var inc = getInc(k, newMask);
var i = k & newMask;
while (!isEmpty(getFlag(newFlags, i))) {
i = (i + inc) & newMask;
}
setIsEmptyFalse(newFlags, i);
if (i < nBuckets && !isEither(getFlag(flags, i)))
/* kick out the existing element */ {
{
var tmp = _keys[i];
_keys[i] = key;
key = tmp;
} {
var tmp = vals[i];
vals[i] = val;
val = tmp;
}
setIsDelTrue(flags, i); /* mark it as deleted in the old hash table */
} else { /* write the element and jump out of the loop */
_keys[i] = key;
vals[i] = val;
break;
}
}
}
}
if (nBuckets > newNBuckets)
/* shrink the hash table */ {
{
var k = new NativeArray(newNBuckets);
arrayCopy(_keys, 0, k, 0, newNBuckets);
this._keys = k;
} {
var v = new NativeArray(newNBuckets);
arrayCopy(vals, 0, v, 0, newNBuckets);
this.vals = v;
}
}
this.flags = newFlags;
this.nBuckets = newNBuckets;
this.nOccupied = size;
this.upperBound = Std.int(newNBuckets * HASH_UPPER + .5);
}
}
public inline function keys():Iterator<Int> {
return new IntMapKeyIterator(this);
}
public inline function iterator():Iterator<T> {
return new IntMapValueIterator(this);
}
@:runtime public inline function keyValueIterator():KeyValueIterator<Int, T> {
return new haxe.iterators.MapKeyValueIterator(this);
}
public function copy():IntMap<T> {
var copied = new IntMap();
for (key in keys())
copied.set(key, get(key));
return copied;
}
public function toString():String {
var s = new StringBuf();
s.add("{");
var it = keys();
for (i in it) {
s.add(i);
s.add(" => ");
s.add(Std.string(get(i)));
if (it.hasNext())
s.add(", ");
}
s.add("}");
return s.toString();
}
public function clear():Void {
flags = null;
_keys = null;
vals = null;
nBuckets = 0;
size = 0;
nOccupied = 0;
upperBound = 0;
#if !no_map_cache
cachedKey = 0;
cachedIndex = -1;
#end
}
private static inline function assert(x:Bool):Void {
#if debug
if (!x)
throw "assert failed";
#end
}
private static inline function defaultK():Int
return 0;
private static inline function arrayCopy(sourceArray:Dynamic, sourceIndex:Int, destinationArray:Dynamic, destinationIndex:Int, length:Int):Void
java.lang.System.arraycopy(sourceArray, sourceIndex, destinationArray, destinationIndex, length);
private static inline function getInc(k:Int, mask:Int):Int
return (((k) >> 3 ^ (k) << 3) | 1) & (mask);
private static inline function hash(i:Int):Int
return i;
// flags represents a bit array with 2 significant bits for each index
// one bit for deleted (1), one for empty (2)
// so what this function does is:
// * gets the integer with (flags / 16)
// * shifts those bits to the right ((flags % 16) * 2) places
// * masks it with 0b11
private static inline function getFlag(flags:NativeArray<Int>, i:Int):Int {
return ((flags[i >> 4] >>> ((i & 0xf) << 1)) & 3);
}
private static inline function isDel(flag:Int):Bool {
return (flag & 1) != 0;
}
private static inline function isEmpty(flag:Int):Bool {
return (flag & 2) != 0;
}
private static inline function isEither(flag:Int):Bool {
return flag != 0;
}
private static inline function setIsDelFalse(flags:NativeArray<Int>, i:Int):Void {
flags[i >> 4] &= ~(1 << ((i & 0xf) << 1));
}
private static inline function setIsEmptyFalse(flags:NativeArray<Int>, i:Int):Void {
flags[i >> 4] &= ~(2 << ((i & 0xf) << 1));
}
private static inline function setIsBothFalse(flags:NativeArray<Int>, i:Int):Void {
flags[i >> 4] &= ~(3 << ((i & 0xf) << 1));
}
private static inline function setIsDelTrue(flags:NativeArray<Int>, i:Int):Void {
flags[i >> 4] |= 1 << ((i & 0xf) << 1);
}
private static inline function roundUp(x:Int):Int {
--x;
x |= (x) >>> 1;
x |= (x) >>> 2;
x |= (x) >>> 4;
x |= (x) >>> 8;
x |= (x) >>> 16;
return ++x;
}
private static inline function flagsSize(m:Int):Int
return ((m) < 16 ? 1 : (m) >> 4);
}
@:access(haxe.ds.IntMap)
private final class IntMapKeyIterator<T> {
var m:IntMap<T>;
var i:Int;
var len:Int;
public function new(m:IntMap<T>) {
this.i = 0;
this.m = m;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!IntMap.isEither(IntMap.getFlag(m.flags, j))) {
i = j;
return true;
}
}
return false;
}
public function next():Int {
var ret = m._keys[i];
#if !no_map_cache
m.cachedIndex = i;
m.cachedKey = ret;
#end
i++;
return ret;
}
}
@:access(haxe.ds.IntMap)
private final class IntMapValueIterator<T> {
var m:IntMap<T>;
var i:Int;
var len:Int;
public function new(m:IntMap<T>) {
this.i = 0;
this.m = m;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!IntMap.isEither(IntMap.getFlag(m.flags, j))) {
i = j;
return true;
}
}
return false;
}
public inline function next():T {
return m.vals[i++];
}
}

View File

@ -0,0 +1,538 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.ds;
import java.NativeArray;
@:coreApi class ObjectMap<K:{}, V> implements haxe.Constraints.IMap<K, V> {
extern private static inline var HASH_UPPER = 0.77;
extern private static inline var FLAG_EMPTY = 0;
extern private static inline var FLAG_DEL = 1;
/**
* This is the most important structure here and the reason why it's so fast.
* It's an array of all the hashes contained in the table. These hashes cannot be 0 nor 1,
* which stand for "empty" and "deleted" states.
*
* The lookup algorithm will keep looking until a 0 or the key wanted is found;
* The insertion algorithm will do the same but will also break when FLAG_DEL is found;
*/
private var hashes:NativeArray<HashType>;
private var _keys:NativeArray<K>;
private var vals:NativeArray<V>;
private var nBuckets:Int;
private var size:Int;
private var nOccupied:Int;
private var upperBound:Int;
#if !no_map_cache
private var cachedKey:K;
private var cachedIndex:Int;
#end
#if DEBUG_HASHTBL
private var totalProbes:Int;
private var probeTimes:Int;
private var sameHash:Int;
private var maxProbe:Int;
#end
public function new():Void {
#if !no_map_cache
cachedIndex = -1;
#end
}
public function set(key:K, value:V):Void {
var x:Int, k:Int;
if (nOccupied >= upperBound) {
if (nBuckets > (size << 1))
resize(nBuckets - 1); // clear "deleted" elements
else
resize(nBuckets + 2);
}
var hashes = hashes, keys = _keys, hashes = hashes;
{
var mask = (nBuckets == 0) ? 0 : nBuckets - 1;
var site = x = nBuckets;
k = hash(key);
var i = k & mask, nProbes = 0;
var delKey = -1;
// for speed up
if (isEmpty(hashes[i])) {
x = i;
} else {
// var inc = getInc(k, mask);
var last = i, flag;
while (!(isEmpty(flag = hashes[i]) || (flag == k && (cast keys[i] : java.lang.Object).equals(key)))) {
if (isDel(flag) && delKey == -1)
delKey = i;
i = (i + ++nProbes) & mask;
#if DEBUG_HASHTBL
probeTimes++;
if (i == last)
throw "assert";
#end
}
if (isEmpty(flag) && delKey != -1)
x = delKey;
else
x = i;
}
#if DEBUG_HASHTBL
if (nProbes > maxProbe)
maxProbe = nProbes;
totalProbes++;
#end
}
var flag = hashes[x];
if (isEmpty(flag)) {
keys[x] = key;
vals[x] = value;
hashes[x] = k;
size++;
nOccupied++;
} else if (isDel(flag)) {
keys[x] = key;
vals[x] = value;
hashes[x] = k;
size++;
} else {
assert(keys[x] == key);
vals[x] = value;
}
#if !no_map_cache
cachedIndex = x;
cachedKey = key;
#end
}
private final function lookup(key:K):Int {
if (nBuckets != 0) {
var hashes = hashes, keys = _keys;
var mask = nBuckets - 1, hash = hash(key), k = hash, nProbes = 0;
var i = k & mask;
var last = i, flag;
// var inc = getInc(k, mask);
while (!isEmpty(flag = hashes[i]) && (isDel(flag) || flag != k || !((cast keys[i] : java.lang.Object).equals(key)))) {
i = (i + ++nProbes) & mask;
#if DEBUG_HASHTBL
probeTimes++;
if (i == last)
throw "assert";
#end
}
#if DEBUG_HASHTBL
if (nProbes > maxProbe)
maxProbe = nProbes;
totalProbes++;
#end
return isEither(flag) ? -1 : i;
}
return -1;
}
@:private final function resize(newNBuckets:Int):Void {
// This function uses 0.25*n_bucktes bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets.
var newHash = null;
var j = 1;
{
newNBuckets = roundUp(newNBuckets);
if (newNBuckets < 4)
newNBuckets = 4;
if (size >= (newNBuckets * HASH_UPPER + 0.5))
/* requested size is too small */ {
j = 0;
} else { /* hash table size to be changed (shrink or expand); rehash */
var nfSize = newNBuckets;
newHash = new NativeArray(nfSize);
if (nBuckets < newNBuckets) // expand
{
var k = new NativeArray(newNBuckets);
if (_keys != null)
arrayCopy(_keys, 0, k, 0, nBuckets);
_keys = k;
var v = new NativeArray(newNBuckets);
if (vals != null)
arrayCopy(vals, 0, v, 0, nBuckets);
vals = v;
} // otherwise shrink
}
}
if (j != 0) { // rehashing is required
// resetting cache
#if !no_map_cache
cachedKey = null;
cachedIndex = -1;
#end
j = -1;
var nBuckets = nBuckets,
_keys = _keys,
vals = vals,
hashes = hashes;
var newMask = newNBuckets - 1;
while (++j < nBuckets) {
var k;
if (!isEither(k = hashes[j])) {
var key = _keys[j];
var val = vals[j];
_keys[j] = null;
vals[j] = cast null;
hashes[j] = FLAG_DEL;
while (true)
/* kick-out process; sort of like in Cuckoo hashing */ {
var nProbes = 0;
// var inc = getInc(k, newMask);
var i = k & newMask;
while (!isEmpty(newHash[i]))
i = (i + ++nProbes) & newMask;
newHash[i] = k;
if (i < nBuckets && !isEither(k = hashes[i]))
/* kick out the existing element */ {
{
var tmp = _keys[i];
_keys[i] = key;
key = tmp;
} {
var tmp = vals[i];
vals[i] = val;
val = tmp;
}
hashes[i] = FLAG_DEL; /* mark it as deleted in the old hash table */
} else { /* write the element and jump out of the loop */
_keys[i] = key;
vals[i] = val;
break;
}
}
}
}
if (nBuckets > newNBuckets)
/* shrink the hash table */ {
{
var k = new NativeArray(newNBuckets);
arrayCopy(_keys, 0, k, 0, newNBuckets);
this._keys = k;
} {
var v = new NativeArray(newNBuckets);
arrayCopy(vals, 0, v, 0, newNBuckets);
this.vals = v;
}
}
this.hashes = newHash;
this.nBuckets = newNBuckets;
this.nOccupied = size;
this.upperBound = Std.int(newNBuckets * HASH_UPPER + .5);
}
}
public function get(key:K):Null<V> {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return vals[idx];
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return vals[idx];
}
return null;
}
private function getDefault(key:K, def:V):V {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return vals[idx];
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return vals[idx];
}
return def;
}
public function exists(key:K):Bool {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return true;
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return true;
}
return false;
}
public function remove(key:K):Bool {
var idx = -1;
#if !no_map_cache
if (!(cachedKey == key && ((idx = cachedIndex) != -1)))
#end
{
idx = lookup(key);
}
if (idx == -1) {
return false;
} else {
#if !no_map_cache
if (cachedKey == key)
cachedIndex = -1;
#end
hashes[idx] = FLAG_DEL;
_keys[idx] = null;
vals[idx] = null;
--size;
return true;
}
}
public function keys():Iterator<K> {
return new ObjectMapKeyIterator(this);
}
public function iterator():Iterator<V> {
return new ObjectMapValueIterator(this);
}
@:runtime public inline function keyValueIterator():KeyValueIterator<K, V> {
return new haxe.iterators.MapKeyValueIterator(this);
}
public function copy():ObjectMap<K, V> {
var copied = new ObjectMap();
for (key in keys())
copied.set(key, get(key));
return copied;
}
public function toString():String {
var s = new StringBuf();
s.add("{");
var it = keys();
for (i in it) {
s.add(Std.string(i));
s.add(" => ");
s.add(Std.string(get(i)));
if (it.hasNext())
s.add(", ");
}
s.add("}");
return s.toString();
}
public function clear():Void {
hashes = null;
_keys = null;
vals = null;
nBuckets = 0;
size = 0;
nOccupied = 0;
upperBound = 0;
#if !no_map_cache
cachedKey = null;
cachedIndex = -1;
#end
#if DEBUG_HASHTBL
totalProbes = 0;
probeTimes = 0;
sameHash = 0;
maxProbe = 0;
#end
}
extern private static inline function roundUp(x:Int):Int {
--x;
x |= (x) >>> 1;
x |= (x) >>> 2;
x |= (x) >>> 4;
x |= (x) >>> 8;
x |= (x) >>> 16;
return ++x;
}
extern private static inline function getInc(k:Int, mask:Int):Int // return 1 for linear probing
return (((k) >> 3 ^ (k) << 3) | 1) & (mask);
extern private static inline function isEither(v:HashType):Bool
return (v & 0xFFFFFFFE) == 0;
extern private static inline function isEmpty(v:HashType):Bool
return v == FLAG_EMPTY;
extern private static inline function isDel(v:HashType):Bool
return v == FLAG_DEL;
// guarantee: Whatever this function is, it will never return 0 nor 1
extern private static inline function hash(s:Dynamic):HashType {
var k:Int = (cast s : java.lang.Object).hashCode();
// k *= 357913941;
// k ^= k << 24;
// k += ~357913941;
// k ^= k >> 31;
// k ^= k << 31;
k = (k + 0x7ed55d16) + (k << 12);
k = (k ^ 0xc761c23c) ^ (k >> 19);
k = (k + 0x165667b1) + (k << 5);
k = (k + 0xd3a2646c) ^ (k << 9);
k = (k + 0xfd7046c5) + (k << 3);
k = (k ^ 0xb55a4f09) ^ (k >> 16);
var ret = k;
if (isEither(ret)) {
if (ret == 0)
ret = 2;
else
ret = 0xFFFFFFFF;
}
return ret;
}
extern private static inline function arrayCopy(sourceArray:Dynamic, sourceIndex:Int, destinationArray:Dynamic, destinationIndex:Int, length:Int):Void
java.lang.System.arraycopy(sourceArray, sourceIndex, destinationArray, destinationIndex, length);
extern private static inline function assert(x:Bool):Void {
#if DEBUG_HASHTBL
if (!x)
throw "assert failed";
#end
}
}
@:access(haxe.ds.ObjectMap)
private final class ObjectMapKeyIterator<T:{}, V> {
var m:ObjectMap<T, V>;
var i:Int;
var len:Int;
public function new(m:ObjectMap<T, V>) {
this.i = 0;
this.m = m;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!ObjectMap.isEither(m.hashes[j])) {
i = j;
return true;
}
}
return false;
}
public function next():T {
var ret = m._keys[i];
#if !no_map_cache
m.cachedIndex = i;
m.cachedKey = ret;
#end
i = i + 1;
return ret;
}
}
@:access(haxe.ds.ObjectMap)
private final class ObjectMapValueIterator<K:{}, T> {
var m:ObjectMap<K, T>;
var i:Int;
var len:Int;
public function new(m:ObjectMap<K, T>) {
this.i = 0;
this.m = m;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!ObjectMap.isEither(m.hashes[j])) {
i = j;
return true;
}
}
return false;
}
public inline function next():T {
var ret = m.vals[i];
i = i + 1;
return ret;
}
}
private typedef HashType = Int;

View File

@ -0,0 +1,533 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.ds;
import java.NativeArray;
@:coreApi class StringMap<T> implements haxe.Constraints.IMap<String, T> {
extern private static inline var HASH_UPPER = 0.77;
extern private static inline var FLAG_EMPTY = 0;
extern private static inline var FLAG_DEL = 1;
/**
* This is the most important structure here and the reason why it's so fast.
* It's an array of all the hashes contained in the table. These hashes cannot be 0 nor 1,
* which stand for "empty" and "deleted" states.
*
* The lookup algorithm will keep looking until a 0 or the key wanted is found;
* The insertion algorithm will do the same but will also break when FLAG_DEL is found;
*/
private var hashes:NativeArray<HashType>;
private var _keys:NativeArray<String>;
private var vals:NativeArray<T>;
private var nBuckets:Int;
private var size:Int;
private var nOccupied:Int;
private var upperBound:Int;
#if !no_map_cache
private var cachedKey:String;
private var cachedIndex:Int;
#end
#if DEBUG_HASHTBL
private var totalProbes:Int;
private var probeTimes:Int;
private var sameHash:Int;
private var maxProbe:Int;
#end
public function new():Void {
#if !no_map_cache
cachedIndex = -1;
#end
}
public function set(key:String, value:T):Void {
var x:Int, k:Int;
if (nOccupied >= upperBound) {
if (nBuckets > (size << 1)) {
resize(nBuckets - 1); // clear "deleted" elements
} else {
resize(nBuckets + 2);
}
}
var hashes = hashes, keys = _keys, hashes = hashes;
{
var mask = (nBuckets == 0) ? 0 : nBuckets - 1;
var site = x = nBuckets;
k = hash(key);
var i = k & mask, nProbes = 0;
var delKey = -1;
// to speed things up, don't loop if the first bucket is already free
if (isEmpty(hashes[i])) {
x = i;
} else {
var last = i, flag;
while (!(isEmpty(flag = hashes[i]) || (flag == k && _keys[i] == key))) {
if (isDel(flag) && delKey == -1) {
delKey = i;
}
i = (i + ++nProbes) & mask;
#if DEBUG_HASHTBL
probeTimes++;
if (i == last)
throw "assert";
#end
}
if (isEmpty(flag) && delKey != -1) {
x = delKey;
} else {
x = i;
}
}
#if DEBUG_HASHTBL
if (nProbes > maxProbe)
maxProbe = nProbes;
totalProbes++;
#end
}
var flag = hashes[x];
if (isEmpty(flag)) {
keys[x] = key;
vals[x] = value;
hashes[x] = k;
size++;
nOccupied++;
} else if (isDel(flag)) {
keys[x] = key;
vals[x] = value;
hashes[x] = k;
size++;
} else {
assert(_keys[x] == key);
vals[x] = value;
}
#if !no_map_cache
cachedIndex = x;
cachedKey = key;
#end
}
private final function lookup(key:String):Int {
if (nBuckets != 0) {
var hashes = hashes, keys = _keys;
var mask = nBuckets - 1, hash = hash(key), k = hash, nProbes = 0;
var i = k & mask;
var last = i, flag;
// if we hit an empty bucket, it means we're done
while (!isEmpty(flag = hashes[i]) && (isDel(flag) || flag != k || keys[i] != key)) {
i = (i + ++nProbes) & mask;
#if DEBUG_HASHTBL
probeTimes++;
if (i == last)
throw "assert";
#end
}
#if DEBUG_HASHTBL
if (nProbes > maxProbe)
maxProbe = nProbes;
totalProbes++;
#end
return isEither(flag) ? -1 : i;
}
return -1;
}
@:private final function resize(newNBuckets:Int):Void {
// This function uses 0.25*n_bucktes bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets.
var newHash = null;
var j = 1;
{
newNBuckets = roundUp(newNBuckets);
if (newNBuckets < 4)
newNBuckets = 4;
if (size >= (newNBuckets * HASH_UPPER + 0.5))
/* requested size is too small */ {
j = 0;
} else { /* hash table size to be changed (shrink or expand); rehash */
var nfSize = newNBuckets;
newHash = new NativeArray(nfSize);
if (nBuckets < newNBuckets) // expand
{
var k = new NativeArray(newNBuckets);
if (_keys != null)
arrayCopy(_keys, 0, k, 0, nBuckets);
_keys = k;
var v = new NativeArray(newNBuckets);
if (vals != null)
arrayCopy(vals, 0, v, 0, nBuckets);
vals = v;
} // otherwise shrink
}
}
if (j != 0) { // rehashing is required
// resetting cache
#if !no_map_cache
cachedKey = null;
cachedIndex = -1;
#end
j = -1;
var nBuckets = nBuckets,
_keys = _keys,
vals = vals,
hashes = hashes;
var newMask = newNBuckets - 1;
while (++j < nBuckets) {
var k;
if (!isEither(k = hashes[j])) {
var key = _keys[j];
var val = vals[j];
_keys[j] = null;
vals[j] = cast null;
hashes[j] = FLAG_DEL;
while (true)
/* kick-out process; sort of like in Cuckoo hashing */ {
var nProbes = 0;
var i = k & newMask;
while (!isEmpty(newHash[i])) {
i = (i + ++nProbes) & newMask;
}
newHash[i] = k;
if (i < nBuckets && !isEither(k = hashes[i]))
/* kick out the existing element */ {
{
var tmp = _keys[i];
_keys[i] = key;
key = tmp;
} {
var tmp = vals[i];
vals[i] = val;
val = tmp;
}
hashes[i] = FLAG_DEL; /* mark it as deleted in the old hash table */
} else { /* write the element and jump out of the loop */
_keys[i] = key;
vals[i] = val;
break;
}
}
}
}
if (nBuckets > newNBuckets)
/* shrink the hash table */ {
{
var k = new NativeArray(newNBuckets);
arrayCopy(_keys, 0, k, 0, newNBuckets);
this._keys = k;
} {
var v = new NativeArray(newNBuckets);
arrayCopy(vals, 0, v, 0, newNBuckets);
this.vals = v;
}
}
this.hashes = newHash;
this.nBuckets = newNBuckets;
this.nOccupied = size;
this.upperBound = Std.int(newNBuckets * HASH_UPPER + .5);
}
}
public function get(key:String):Null<T> {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return vals[idx];
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return vals[idx];
}
return null;
}
private function getDefault(key:String, def:T):T {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return vals[idx];
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return vals[idx];
}
return def;
}
public function exists(key:String):Bool {
var idx = -1;
#if !no_map_cache
if (cachedKey == key && ((idx = cachedIndex) != -1)) {
return true;
}
#end
idx = lookup(key);
if (idx != -1) {
#if !no_map_cache
cachedKey = key;
cachedIndex = idx;
#end
return true;
}
return false;
}
public function remove(key:String):Bool {
var idx = -1;
#if !no_map_cache
if (!(cachedKey == key && ((idx = cachedIndex) != -1)))
#end
{
idx = lookup(key);
}
if (idx == -1) {
return false;
} else {
#if !no_map_cache
if (cachedKey == key) {
cachedIndex = -1;
}
#end
hashes[idx] = FLAG_DEL;
_keys[idx] = null;
vals[idx] = null;
--size;
return true;
}
}
public inline function keys():Iterator<String> {
return new StringMapKeyIterator(this);
}
@:runtime public inline function keyValueIterator():KeyValueIterator<String, T> {
return new haxe.iterators.MapKeyValueIterator(this);
}
public inline function iterator():Iterator<T> {
return new StringMapValueIterator(this);
}
public function copy():StringMap<T> {
var copied = new StringMap();
for (key in keys())
copied.set(key, get(key));
return copied;
}
public function toString():String {
var s = new StringBuf();
s.add("{");
var it = keys();
for (i in it) {
s.add(i);
s.add(" => ");
s.add(Std.string(get(i)));
if (it.hasNext())
s.add(", ");
}
s.add("}");
return s.toString();
}
public function clear():Void {
hashes = null;
_keys = null;
vals = null;
nBuckets = 0;
size = 0;
nOccupied = 0;
upperBound = 0;
#if !no_map_cache
cachedKey = null;
cachedIndex = -1;
#end
#if DEBUG_HASHTBL
totalProbes = 0;
probeTimes = 0;
sameHash = 0;
maxProbe = 0;
#end
}
extern private static inline function roundUp(x:Int):Int {
--x;
x |= (x) >>> 1;
x |= (x) >>> 2;
x |= (x) >>> 4;
x |= (x) >>> 8;
x |= (x) >>> 16;
return ++x;
}
extern private static inline function getInc(k:Int, mask:Int):Int // return 1 for linear probing
return (((k) >> 3 ^ (k) << 3) | 1) & (mask);
extern private static inline function isEither(v:HashType):Bool
return (v & 0xFFFFFFFE) == 0;
extern private static inline function isEmpty(v:HashType):Bool
return v == FLAG_EMPTY;
extern private static inline function isDel(v:HashType):Bool
return v == FLAG_DEL;
// guarantee: Whatever this function is, it will never return 0 nor 1
extern private static inline function hash(s:String):HashType {
var k:Int = (cast s : java.NativeString).hashCode();
// k *= 357913941;
// k ^= k << 24;
// k += ~357913941;
// k ^= k >> 31;
// k ^= k << 31;
k = (k + 0x7ed55d16) + (k << 12);
k = (k ^ 0xc761c23c) ^ (k >> 19);
k = (k + 0x165667b1) + (k << 5);
k = (k + 0xd3a2646c) ^ (k << 9);
k = (k + 0xfd7046c5) + (k << 3);
k = (k ^ 0xb55a4f09) ^ (k >> 16);
var ret = k;
if (isEither(ret)) {
if (ret == 0)
ret = 2;
else
ret = 0xFFFFFFFF;
}
return ret;
}
extern private static inline function arrayCopy(sourceArray:Dynamic, sourceIndex:Int, destinationArray:Dynamic, destinationIndex:Int, length:Int):Void
java.lang.System.arraycopy(sourceArray, sourceIndex, destinationArray, destinationIndex, length);
extern private static inline function assert(x:Bool):Void {
#if DEBUG_HASHTBL
if (!x)
throw "assert failed";
#end
}
}
private typedef HashType = Int;
@:access(haxe.ds.StringMap)
private final class StringMapKeyIterator<T> {
var m:StringMap<T>;
var i:Int;
var len:Int;
public function new(m:StringMap<T>) {
this.m = m;
this.i = 0;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!StringMap.isEither(m.hashes[j])) {
i = j;
return true;
}
}
return false;
}
public function next():String {
var ret = m._keys[i];
#if !no_map_cache
m.cachedIndex = i;
m.cachedKey = ret;
#end
i++;
return ret;
}
}
@:access(haxe.ds.StringMap)
private final class StringMapValueIterator<T> {
var m:StringMap<T>;
var i:Int;
var len:Int;
public function new(m:StringMap<T>) {
this.m = m;
this.i = 0;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!StringMap.isEither(m.hashes[j])) {
i = j;
return true;
}
}
return false;
}
public inline function next():T {
return m.vals[i++];
}
}

View File

@ -0,0 +1,582 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.ds;
import java.NativeArray;
import java.lang.ref.WeakReference;
import java.lang.ref.ReferenceQueue;
@:coreApi class WeakMap<K:{}, V> implements haxe.Constraints.IMap<K, V> {
extern private static inline var HASH_UPPER = 0.77;
extern private static inline var FLAG_EMPTY = 0;
extern private static inline var FLAG_DEL = 1;
/**
* This is the most important structure here and the reason why it's so fast.
* It's an array of all the hashes contained in the table. These hashes cannot be 0 nor 1,
* which stand for "empty" and "deleted" states.
*
* The lookup algorithm will keep looking until a 0 or the key wanted is found;
* The insertion algorithm will do the same but will also break when FLAG_DEL is found;
*/
private var hashes:NativeArray<HashType>;
private var entries:NativeArray<Entry<K, V>>;
// weak map specific
private var queue:ReferenceQueue<K>;
private var nBuckets:Int;
private var size:Int;
private var nOccupied:Int;
private var upperBound:Int;
#if !no_map_cache
private var cachedEntry:Entry<K, V>;
private var cachedIndex:Int;
#end
#if DEBUG_HASHTBL
private var totalProbes:Int;
private var probeTimes:Int;
private var sameHash:Int;
private var maxProbe:Int;
#end
public function new():Void {
#if !no_map_cache
cachedIndex = -1;
#end
queue = new ReferenceQueue();
}
@:analyzer(ignore)
private function cleanupRefs():Void {
var x:Dynamic = null, nOccupied = nOccupied;
while ((x = queue.poll()) != null) {
// even if not found on hashtable (already removed), release value
var x:Entry<K, V> = cast x;
x.value = null;
// lookup index
if (nOccupied != 0) {
var mask = nBuckets - 1, hash = x.hash, nProbes = 0;
var i = hash & mask;
var last = i, flag;
while (!isEmpty(flag = hashes[i]) && (isDel(flag) || flag != hash || entries[i] != x)) {
i = (i + ++nProbes) & mask;
}
if (entries[i] == x) {
#if !no_map_cache
if (cachedIndex == i) {
cachedIndex = -1;
cachedEntry = null;
}
#end
entries[i] = null;
hashes[i] = FLAG_DEL;
--size;
}
}
}
}
public function set(key:K, value:V):Void {
cleanupRefs();
var x:Int, k:Int;
if (nOccupied >= upperBound) {
if (nBuckets > (size << 1))
resize(nBuckets - 1); // clear "deleted" elements
else
resize(nBuckets + 2);
}
k = hash(key);
var hashes = hashes, entries = entries;
{
var mask = (nBuckets == 0) ? 0 : nBuckets - 1;
var site = x = nBuckets;
var i = k & mask, nProbes = 0;
var delKey = -1;
// for speed up
if (isEmpty(hashes[i])) {
x = i;
} else {
// var inc = getInc(k, mask);
var last = i, flag;
while (!(isEmpty(flag = hashes[i]) || (flag == k && entries[i].keyEquals(key)))) {
if (delKey == -1 && isDel(flag))
delKey = i;
i = (i + ++nProbes) & mask;
#if DEBUG_HASHTBL
probeTimes++;
if (i == last)
throw "assert";
#end
}
if (isEmpty(flag) && delKey != -1)
x = delKey;
else
x = i;
}
#if DEBUG_HASHTBL
if (nProbes > maxProbe)
maxProbe = nProbes;
totalProbes++;
#end
}
var flag = hashes[x], entry = new Entry(key, value, k, queue);
if (isEmpty(flag)) {
entries[x] = entry;
hashes[x] = k;
size++;
nOccupied++;
} else if (isDel(flag)) {
entries[x] = entry;
hashes[x] = k;
size++;
} else {
assert(entries[x].keyEquals(key));
entries[x] = entry;
}
#if !no_map_cache
cachedIndex = x;
cachedEntry = entry;
#end
}
private final function lookup(key:K):Int {
if (nBuckets != 0) {
var hashes = hashes, entries = entries;
var mask = nBuckets - 1, hash = hash(key), k = hash, nProbes = 0;
var i = k & mask;
var last = i, flag;
// var inc = getInc(k, mask);
while (!isEmpty(flag = hashes[i]) && (isDel(flag) || flag != k || !entries[i].keyEquals(key))) {
i = (i + ++nProbes) & mask;
#if DEBUG_HASHTBL
probeTimes++;
if (i == last)
throw "assert";
#end
}
#if DEBUG_HASHTBL
if (nProbes > maxProbe)
maxProbe = nProbes;
totalProbes++;
#end
return isEither(flag) ? -1 : i;
}
return -1;
}
@:private final function resize(newNBuckets:Int):Void {
// This function uses 0.25*n_bucktes bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets.
var newHash = null;
var j = 1;
{
newNBuckets = roundUp(newNBuckets);
if (newNBuckets < 4)
newNBuckets = 4;
if (size >= (newNBuckets * HASH_UPPER + 0.5))
/* requested size is too small */ {
j = 0;
} else { /* hash table size to be changed (shrink or expand); rehash */
var nfSize = newNBuckets;
newHash = new NativeArray(nfSize);
if (nBuckets < newNBuckets) // expand
{
var e = new NativeArray(newNBuckets);
if (entries != null)
arrayCopy(entries, 0, e, 0, nBuckets);
entries = e;
} // otherwise shrink
}
}
if (j != 0) { // rehashing is required
// resetting cache
#if !no_map_cache
cachedEntry = null;
cachedIndex = -1;
#end
j = -1;
var nBuckets = nBuckets, entries = entries, hashes = hashes;
var newMask = newNBuckets - 1;
while (++j < nBuckets) {
var k;
if (!isEither(k = hashes[j])) {
var entry = entries[j];
entries[j] = null;
hashes[j] = FLAG_DEL;
while (true)
/* kick-out process; sort of like in Cuckoo hashing */ {
var nProbes = 0;
var i = k & newMask;
while (!isEmpty(newHash[i]))
i = (i + ++nProbes) & newMask;
newHash[i] = k;
if (i < nBuckets && !isEither(k = hashes[i]))
/* kick out the existing element */ {
{
var tmp = entries[i];
entries[i] = entry;
entry = tmp;
}
hashes[i] = FLAG_DEL; /* mark it as deleted in the old hash table */
} else { /* write the element and jump out of the loop */
entries[i] = entry;
break;
}
}
}
}
if (nBuckets > newNBuckets)
/* shrink the hash table */ {
{
var e = new NativeArray(newNBuckets);
arrayCopy(entries, 0, e, 0, newNBuckets);
this.entries = e;
}
}
this.hashes = newHash;
this.nBuckets = newNBuckets;
this.nOccupied = size;
this.upperBound = Std.int(newNBuckets * HASH_UPPER + .5);
}
}
public function get(key:K):Null<V> {
cleanupRefs();
var idx = -1;
#if !no_map_cache
if (cachedEntry != null && cachedEntry.keyEquals(key) && ((idx = cachedIndex) != -1)) {
return cachedEntry.value;
}
#end
idx = lookup(key);
if (idx != -1) {
var entry = entries[idx];
#if !no_map_cache
cachedEntry = entry;
cachedIndex = idx;
#end
return entry.value;
}
return null;
}
private function getDefault(key:K, def:V):V {
cleanupRefs();
var idx = -1;
#if !no_map_cache
if (cachedEntry != null && cachedEntry.keyEquals(key) && ((idx = cachedIndex) != -1)) {
return cachedEntry.value;
}
#end
idx = lookup(key);
if (idx != -1) {
var entry = entries[idx];
#if !no_map_cache
cachedEntry = entry;
cachedIndex = idx;
#end
return entry.value;
}
return def;
}
public function exists(key:K):Bool {
cleanupRefs();
var idx = -1;
#if !no_map_cache
if (cachedEntry != null && cachedEntry.keyEquals(key) && ((idx = cachedIndex) != -1)) {
return true;
}
#end
idx = lookup(key);
if (idx != -1) {
var entry = entries[idx];
#if !no_map_cache
cachedEntry = entry;
cachedIndex = idx;
#end
return true;
}
return false;
}
public function remove(key:K):Bool {
cleanupRefs();
var idx = -1;
#if !no_map_cache
if (!(cachedEntry != null && cachedEntry.keyEquals(key) && ((idx = cachedIndex) != -1)))
#end
{
idx = lookup(key);
}
if (idx == -1) {
return false;
} else {
#if !no_map_cache
if (cachedEntry != null && cachedEntry.keyEquals(key)) {
cachedIndex = -1;
cachedEntry = null;
}
#end
hashes[idx] = FLAG_DEL;
entries[idx] = null;
--size;
return true;
}
}
public inline function keys():Iterator<K> {
cleanupRefs();
return new WeakMapKeyIterator(this);
}
public inline function iterator():Iterator<V> {
cleanupRefs();
return new WeakMapValueIterator(this);
}
public inline function keyValueIterator():KeyValueIterator<K, V> {
return new haxe.iterators.MapKeyValueIterator(this);
}
public function copy():WeakMap<K, V> {
var copied = new WeakMap();
for (key in keys())
copied.set(key, get(key));
return copied;
}
public function toString():String {
var s = new StringBuf();
s.add("{");
var it = keys();
for (i in it) {
s.add(Std.string(i));
s.add(" => ");
s.add(Std.string(get(i)));
if (it.hasNext())
s.add(", ");
}
s.add("}");
return s.toString();
}
public function clear():Void {
hashes = null;
entries = null;
queue = new ReferenceQueue();
nBuckets = 0;
size = 0;
nOccupied = 0;
upperBound = 0;
#if !no_map_cache
cachedEntry = null;
cachedIndex = -1;
#end
#if DEBUG_HASHTBL
totalProbes = 0;
probeTimes = 0;
sameHash = 0;
maxProbe = 0;
#end
}
extern private static inline function roundUp(x:Int):Int {
--x;
x |= (x) >>> 1;
x |= (x) >>> 2;
x |= (x) >>> 4;
x |= (x) >>> 8;
x |= (x) >>> 16;
return ++x;
}
extern private static inline function getInc(k:Int, mask:Int):Int // return 1 for linear probing
return (((k) >> 3 ^ (k) << 3) | 1) & (mask);
extern private static inline function isEither(v:HashType):Bool
return (v & 0xFFFFFFFE) == 0;
extern private static inline function isEmpty(v:HashType):Bool
return v == FLAG_EMPTY;
extern private static inline function isDel(v:HashType):Bool
return v == FLAG_DEL;
// guarantee: Whatever this function is, it will never return 0 nor 1
extern private static inline function hash(s:Dynamic):HashType {
var k:Int = untyped s.hashCode();
// k *= 357913941;
// k ^= k << 24;
// k += ~357913941;
// k ^= k >> 31;
// k ^= k << 31;
k = (k + 0x7ed55d16) + (k << 12);
k = (k ^ 0xc761c23c) ^ (k >> 19);
k = (k + 0x165667b1) + (k << 5);
k = (k + 0xd3a2646c) ^ (k << 9);
k = (k + 0xfd7046c5) + (k << 3);
k = (k ^ 0xb55a4f09) ^ (k >> 16);
var ret = k;
if (isEither(ret)) {
if (ret == 0)
ret = 2;
else
ret = 0xFFFFFFFF;
}
return ret;
}
extern private static inline function arrayCopy(sourceArray:Dynamic, sourceIndex:Int, destinationArray:Dynamic, destinationIndex:Int, length:Int):Void
java.lang.System.arraycopy(sourceArray, sourceIndex, destinationArray, destinationIndex, length);
extern private static inline function assert(x:Bool):Void {
#if DEBUG_HASHTBL
if (!x)
throw "assert failed";
#end
}
}
private class Entry<K, V> extends WeakReference<K> {
public var value:V;
public var hash(default, null):Int;
public function new(key:K, value:V, hash:Int, queue:ReferenceQueue<K>) {
super(key, queue);
this.value = value;
this.hash = hash;
}
final inline public function keyEquals(k:K):Bool {
return k != null && untyped k.equals(get());
}
}
@:access(haxe.ds.WeakMap)
private final class WeakMapKeyIterator<T:{}, V> {
var m:WeakMap<T, V>;
var i:Int;
var len:Int;
var lastKey:T;
public function new(m:WeakMap<T, V>) {
this.i = 0;
this.m = m;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!WeakMap.isEither(m.hashes[j])) {
var entry = m.entries[j], last = entry.get();
if (last != null) {
#if !no_map_cache
m.cachedIndex = i;
m.cachedEntry = entry;
#end
lastKey = last; // keep a strong reference to the key while iterating, so it doesn't get collected
i = j;
return true;
}
}
}
lastKey = null;
return false;
}
public function next():T {
i = i + 1;
return lastKey;
}
}
@:access(haxe.ds.WeakMap)
private final class WeakMapValueIterator<K:{}, T> {
var m:WeakMap<K, T>;
var i:Int;
var len:Int;
public function new(m:WeakMap<K, T>) {
this.i = 0;
this.m = m;
this.len = m.nBuckets;
}
public function hasNext():Bool {
for (j in i...len) {
if (!WeakMap.isEither(m.hashes[j]) && m.entries[j].get() != null) {
i = j;
return true;
}
}
return false;
}
public inline function next():T {
var ret = m.entries[i];
i = i + 1;
return ret.value;
}
}
private typedef HashType = Int;

View File

@ -0,0 +1,82 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
import java.util.zip.Deflater;
class Compress {
var deflater:Deflater;
var mode:Int;
var finish:Bool = false;
public function new(level:Int) {
throw new haxe.exceptions.NotImplementedException("Not implemented for this platform"); // FIXME: Add unit tests for Compress/Uncompress and check current implementation
this.deflater = new Deflater(level);
this.mode = Deflater.NO_FLUSH;
}
public function execute(src:haxe.io.Bytes, srcPos:Int, dst:haxe.io.Bytes, dstPos:Int):{done:Bool, read:Int, write:Int} {
deflater.setInput(src.getData(), srcPos, src.length - srcPos);
if (finish)
deflater.finish();
finish = false;
var written = deflater.deflate(dst.getData(), dstPos, dst.length - dstPos);
var read = deflater.getTotalIn();
return {done: deflater.finished(), read: read, write: written};
}
public function setFlushMode(f:FlushMode) {
this.mode = switch (f) {
case NO:
Deflater.NO_FLUSH;
case SYNC:
Deflater.SYNC_FLUSH;
case FULL:
Deflater.FULL_FLUSH;
case FINISH:
this.finish = true;
Deflater.FULL_FLUSH;
case BLOCK:
throw new haxe.exceptions.NotImplementedException();
}
}
public function close() {
deflater.end();
}
public static function run(s:haxe.io.Bytes, level:Int):haxe.io.Bytes {
var deflater = new java.util.zip.Deflater(level);
deflater.setInput(s.getData());
var outputStream = new java.io.ByteArrayOutputStream(s.length);
deflater.finish();
var buffer = haxe.io.Bytes.alloc(1024).getData();
while (!deflater.finished()) {
var count = deflater.deflate(buffer);
outputStream.write(buffer, 0, count);
}
outputStream.close();
return haxe.io.Bytes.ofData(outputStream.toByteArray());
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright (C)2005-2019 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package haxe.zip;
import java.util.zip.Inflater;
class Uncompress {
final inflater:Inflater;
public function new(?windowBits:Int) {
inflater = new Inflater(windowBits != null && windowBits < 0);
}
public function execute(src:haxe.io.Bytes, srcPos:Int, dst:haxe.io.Bytes, dstPos:Int):{done:Bool, read:Int, write:Int} {
inflater.setInput(src.getData(), srcPos, src.length - srcPos);
inflater.inflate(dst.getData(), dstPos, dst.length - dstPos);
return {
done: inflater.finished(),
read: Int64.toInt(inflater.getBytesRead()),
write: Int64.toInt(inflater.getBytesWritten())
};
}
public function setFlushMode(f:FlushMode) {}
public function close() {
inflater.end();
}
public static function run(src:haxe.io.Bytes, ?bufsize:Int):haxe.io.Bytes {
var decompresser = new java.util.zip.Inflater();
var buf = haxe.io.Bytes.alloc(bufsize == null ? src.length : bufsize).getData();
var out = new java.io.ByteArrayOutputStream(src.length);
decompresser.setInput(src.getData(), 0, src.length);
while (!decompresser.finished()) {
var count = decompresser.inflate(buf);
out.write(buf, 0, count);
}
out.close();
return haxe.io.Bytes.ofData(out.toByteArray());
}
}