forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
594
Kha/Sources/kha/audio2/ogg/vorbis/data/Codebook.hx
Normal file
594
Kha/Sources/kha/audio2/ogg/vorbis/data/Codebook.hx
Normal file
@ -0,0 +1,594 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError.ReaderErrorType;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Codebook
|
||||
{
|
||||
static public inline var NO_CODE = 255;
|
||||
|
||||
public var dimensions:Int;
|
||||
public var entries:Int;
|
||||
public var codewordLengths:Vector<Int>; //uint8*
|
||||
public var minimumValue:Float;
|
||||
public var deltaValue:Float;
|
||||
public var valueBits:Int; //uint8
|
||||
public var lookupType:Int; //uint8
|
||||
public var sequenceP:Bool; //uint8
|
||||
public var sparse:Bool; //uint8
|
||||
public var lookupValues:UInt; //uint32
|
||||
public var multiplicands:Vector<Float>; // codetype *
|
||||
public var codewords:Vector<UInt>; //uint32*
|
||||
public var fastHuffman:Vector<Int>; //[FAST_HUFFMAN_TABLE_SIZE];
|
||||
public var sortedCodewords:Array<UInt>; //uint32*
|
||||
public var sortedValues:Vector<Int>;
|
||||
public var sortedEntries:Int;
|
||||
|
||||
public function new () {
|
||||
}
|
||||
|
||||
static public function read(decodeState:VorbisDecodeState):Codebook {
|
||||
var c = new Codebook();
|
||||
if (decodeState.readBits(8) != 0x42 || decodeState.readBits(8) != 0x43 || decodeState.readBits(8) != 0x56) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP);
|
||||
}
|
||||
|
||||
var x = decodeState.readBits(8);
|
||||
c.dimensions = (decodeState.readBits(8) << 8) + x;
|
||||
|
||||
var x = decodeState.readBits(8);
|
||||
var y = decodeState.readBits(8);
|
||||
c.entries = (decodeState.readBits(8) << 16) + (y << 8) + x;
|
||||
var ordered = decodeState.readBits(1);
|
||||
c.sparse = (ordered != 0) ? false : (decodeState.readBits(1) != 0);
|
||||
|
||||
var lengths = new Vector(c.entries);
|
||||
if (!c.sparse) {
|
||||
c.codewordLengths = lengths;
|
||||
}
|
||||
|
||||
var total = 0;
|
||||
|
||||
if (ordered != 0) {
|
||||
var currentEntry = 0;
|
||||
var currentLength = decodeState.readBits(5) + 1;
|
||||
|
||||
while (currentEntry < c.entries) {
|
||||
var limit = c.entries - currentEntry;
|
||||
var n = decodeState.readBits(MathTools.ilog(limit));
|
||||
if (currentEntry + n > c.entries) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "codebook entrys");
|
||||
}
|
||||
for (i in 0...n) {
|
||||
lengths.set(currentEntry + i, currentLength);
|
||||
}
|
||||
currentEntry += n;
|
||||
currentLength++;
|
||||
}
|
||||
} else {
|
||||
for (j in 0...c.entries) {
|
||||
var present = (c.sparse) ? decodeState.readBits(1) : 1;
|
||||
if (present != 0) {
|
||||
lengths.set(j, decodeState.readBits(5) + 1);
|
||||
total++;
|
||||
} else {
|
||||
lengths.set(j, NO_CODE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (c.sparse && total >= (c.entries >> 2)) {
|
||||
c.codewordLengths = lengths;
|
||||
c.sparse = false;
|
||||
}
|
||||
|
||||
c.sortedEntries = if (c.sparse) {
|
||||
total;
|
||||
} else {
|
||||
var sortedCount = 0;
|
||||
for (j in 0...c.entries) {
|
||||
var l = lengths.get(j);
|
||||
if (l > Setting.FAST_HUFFMAN_LENGTH && l != NO_CODE) {
|
||||
++sortedCount;
|
||||
}
|
||||
}
|
||||
sortedCount;
|
||||
}
|
||||
|
||||
var values:Vector<UInt> = null;
|
||||
|
||||
if (!c.sparse) {
|
||||
c.codewords = new Vector<UInt>(c.entries);
|
||||
} else {
|
||||
if (c.sortedEntries != 0) {
|
||||
c.codewordLengths = new Vector(c.sortedEntries);
|
||||
c.codewords = new Vector<UInt>(c.entries);
|
||||
values = new Vector<UInt>(c.entries);
|
||||
}
|
||||
|
||||
var size:Int = c.entries + (32 + 32) * c.sortedEntries;
|
||||
}
|
||||
|
||||
if (!c.computeCodewords(lengths, c.entries, values)) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "compute codewords");
|
||||
}
|
||||
|
||||
if (c.sortedEntries != 0) {
|
||||
// allocate an extra slot for sentinels
|
||||
c.sortedCodewords = [];
|
||||
|
||||
// allocate an extra slot at the front so that sortedValues[-1] is defined
|
||||
// so that we can catch that case without an extra if
|
||||
c.sortedValues = new Vector<Int>(c.sortedEntries);
|
||||
c.computeSortedHuffman(lengths, values);
|
||||
}
|
||||
|
||||
if (c.sparse) {
|
||||
values = null;
|
||||
c.codewords = null;
|
||||
lengths = null;
|
||||
}
|
||||
|
||||
c.computeAcceleratedHuffman();
|
||||
|
||||
c.lookupType = decodeState.readBits(4);
|
||||
if (c.lookupType > 2) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "codebook lookup type");
|
||||
}
|
||||
|
||||
if (c.lookupType > 0) {
|
||||
c.minimumValue = VorbisTools.floatUnpack(decodeState.readBits(32));
|
||||
c.deltaValue = VorbisTools.floatUnpack(decodeState.readBits(32));
|
||||
c.valueBits = decodeState.readBits(4) + 1;
|
||||
c.sequenceP = (decodeState.readBits(1) != 0);
|
||||
|
||||
if (c.lookupType == 1) {
|
||||
c.lookupValues = VorbisTools.lookup1Values(c.entries, c.dimensions);
|
||||
} else {
|
||||
c.lookupValues = c.entries * c.dimensions;
|
||||
}
|
||||
var mults = new Vector<Int>(c.lookupValues);
|
||||
for (j in 0...c.lookupValues) {
|
||||
var q = decodeState.readBits(c.valueBits);
|
||||
if (q == VorbisTools.EOP) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "fail lookup");
|
||||
}
|
||||
mults[j] = q;
|
||||
}
|
||||
|
||||
{
|
||||
c.multiplicands = new Vector(c.lookupValues);
|
||||
|
||||
//STB_VORBIS_CODEBOOK_FLOATS = true
|
||||
for (j in 0...c.lookupValues) {
|
||||
c.multiplicands[j] = mults[j] * c.deltaValue + c.minimumValue;
|
||||
}
|
||||
}
|
||||
|
||||
//STB_VORBIS_CODEBOOK_FLOATS = true
|
||||
if (c.lookupType == 2 && c.sequenceP) {
|
||||
for (j in 1...c.lookupValues) {
|
||||
c.multiplicands[j] = c.multiplicands[j - 1];
|
||||
}
|
||||
c.sequenceP = false;
|
||||
}
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
inline function addEntry(huffCode:UInt, symbol:Int, count:Int, len:Int, values:Vector<UInt>)
|
||||
{
|
||||
if (!sparse) {
|
||||
codewords[symbol] = huffCode;
|
||||
} else {
|
||||
codewords[count] = huffCode;
|
||||
codewordLengths.set(count, len);
|
||||
values[count] = symbol;
|
||||
}
|
||||
}
|
||||
|
||||
inline function includeInSort(len:Int)
|
||||
{
|
||||
return if (sparse) {
|
||||
VorbisTools.assert(len != NO_CODE);
|
||||
true;
|
||||
} else if (len == NO_CODE) {
|
||||
false;
|
||||
} else if (len > Setting.FAST_HUFFMAN_LENGTH) {
|
||||
true;
|
||||
} else {
|
||||
false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function computeCodewords(len:Vector<Int>, n:Int, values:Vector<UInt>)
|
||||
{
|
||||
var available = new Vector<UInt>(32);
|
||||
for (x in 0...32) available[x] = 0;
|
||||
|
||||
// find the first entry
|
||||
var k = 0;
|
||||
while (k < n) {
|
||||
if (len.get(k) < NO_CODE) {
|
||||
break;
|
||||
}
|
||||
k++;
|
||||
}
|
||||
|
||||
if (k == n) {
|
||||
VorbisTools.assert(sortedEntries == 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
var m = 0;
|
||||
|
||||
// add to the list
|
||||
addEntry(0, k, m++, len.get(k), values);
|
||||
|
||||
// add all available leaves
|
||||
var i = 0;
|
||||
|
||||
while (++i <= len.get(k)) {
|
||||
available[i] = (1:UInt) << ((32 - i):UInt);
|
||||
}
|
||||
|
||||
// note that the above code treats the first case specially,
|
||||
// but it's really the same as the following code, so they
|
||||
// could probably be combined (except the initial code is 0,
|
||||
// and I use 0 in available[] to mean 'empty')
|
||||
i = k;
|
||||
while (++i < n) {
|
||||
var z = len.get(i);
|
||||
if (z == NO_CODE) continue;
|
||||
|
||||
// find lowest available leaf (should always be earliest,
|
||||
// which is what the specification calls for)
|
||||
// note that this property, and the fact we can never have
|
||||
// more than one free leaf at a given level, isn't totally
|
||||
// trivial to prove, but it seems true and the assert never
|
||||
// fires, so!
|
||||
while (z > 0 && available[z] == 0) --z;
|
||||
if (z == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var res:UInt = available[z];
|
||||
available[z] = 0;
|
||||
addEntry(VorbisTools.bitReverse(res), i, m++, len.get(i), values);
|
||||
|
||||
// propogate availability up the tree
|
||||
if (z != len.get(i)) {
|
||||
var y = len.get(i);
|
||||
while (y > z) {
|
||||
VorbisTools.assert(available[y] == 0);
|
||||
available[y] = res + (1 << (32 - y));
|
||||
y--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
function computeSortedHuffman(lengths:Vector<Int>, values:Vector<UInt>)
|
||||
{
|
||||
// build a list of all the entries
|
||||
// OPTIMIZATION: don't include the short ones, since they'll be caught by FAST_HUFFMAN.
|
||||
// this is kind of a frivolous optimization--I don't see any performance improvement,
|
||||
// but it's like 4 extra lines of code, so.
|
||||
if (!sparse) {
|
||||
var k = 0;
|
||||
for (i in 0...entries) {
|
||||
if (includeInSort(lengths.get(i))) {
|
||||
sortedCodewords[k++] = VorbisTools.bitReverse(codewords[i]);
|
||||
}
|
||||
}
|
||||
VorbisTools.assert(k == sortedEntries);
|
||||
} else {
|
||||
for (i in 0...sortedEntries) {
|
||||
sortedCodewords[i] = VorbisTools.bitReverse(codewords[i]);
|
||||
}
|
||||
}
|
||||
|
||||
sortedCodewords[sortedEntries] = 0xffffffff;
|
||||
sortedCodewords.sort(VorbisTools.uintAsc);
|
||||
|
||||
var len = sparse ? sortedEntries : entries;
|
||||
// now we need to indicate how they correspond; we could either
|
||||
// #1: sort a different data structure that says who they correspond to
|
||||
// #2: for each sorted entry, search the original list to find who corresponds
|
||||
// #3: for each original entry, find the sorted entry
|
||||
// #1 requires extra storage, #2 is slow, #3 can use binary search!
|
||||
for (i in 0...len) {
|
||||
var huffLen = sparse ? lengths.get(values[i]) : lengths.get(i);
|
||||
if (includeInSort(huffLen)) {
|
||||
var code = VorbisTools.bitReverse(codewords[i]);
|
||||
var x = 0;
|
||||
var n = sortedEntries;
|
||||
while (n > 1) {
|
||||
// invariant: sc[x] <= code < sc[x+n]
|
||||
var m = x + (n >> 1);
|
||||
if (sortedCodewords[m] <= code) {
|
||||
x = m;
|
||||
n -= (n>>1);
|
||||
} else {
|
||||
n >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
//VorbisTools.assert(sortedCodewords[x] == code);
|
||||
if (sparse) {
|
||||
sortedValues[x] = values[i];
|
||||
codewordLengths.set(x, huffLen);
|
||||
} else {
|
||||
sortedValues[x] = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function computeAcceleratedHuffman()
|
||||
{
|
||||
fastHuffman = new Vector(Setting.FAST_HUFFMAN_TABLE_SIZE);
|
||||
fastHuffman[0] = -1;
|
||||
for (i in 0...(Setting.FAST_HUFFMAN_TABLE_SIZE)) {
|
||||
fastHuffman[i] = -1;
|
||||
}
|
||||
|
||||
var len = (sparse) ? sortedEntries : entries;
|
||||
|
||||
//STB_VORBIS_FAST_HUFFMAN_SHORT
|
||||
//if (len > 32767) len = 32767; // largest possible value we can encode!
|
||||
|
||||
for (i in 0...len) {
|
||||
if (codewordLengths[i] <= Setting.FAST_HUFFMAN_LENGTH) {
|
||||
var z:Int = (sparse) ? VorbisTools.bitReverse(sortedCodewords[i]) : codewords[i];
|
||||
// set table entries for all bit combinations in the higher bits
|
||||
while (z < Setting.FAST_HUFFMAN_TABLE_SIZE) {
|
||||
fastHuffman[z] = i;
|
||||
z += 1 << codewordLengths[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function codebookDecode(decodeState:VorbisDecodeState, output:Vector<Float>, offset:Int, len:Int)
|
||||
{
|
||||
var z = decodeStart(decodeState);
|
||||
var lookupValues = this.lookupValues;
|
||||
var sequenceP = this.sequenceP;
|
||||
var multiplicands = this.multiplicands;
|
||||
var minimumValue = this.minimumValue;
|
||||
|
||||
if (z < 0) {
|
||||
return false;
|
||||
}
|
||||
if (len > dimensions) {
|
||||
len = dimensions;
|
||||
}
|
||||
|
||||
// STB_VORBIS_DIVIDES_IN_CODEBOOK = true
|
||||
if (lookupType == 1) {
|
||||
var div = 1;
|
||||
var last = 0.0;
|
||||
for (i in 0...len) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
output[offset + i] += val;
|
||||
if (sequenceP) {
|
||||
last = val + minimumValue;
|
||||
}
|
||||
div *= lookupValues;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
z *= dimensions;
|
||||
if (sequenceP) {
|
||||
var last = 0.0;
|
||||
for (i in 0...len) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
output[offset + i] += val;
|
||||
last = val + minimumValue;
|
||||
}
|
||||
} else {
|
||||
var last = 0.0;
|
||||
for (i in 0...len) {
|
||||
output[offset + i] += multiplicands[z + i] + last;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function codebookDecodeStep(decodeState:VorbisDecodeState, output:Vector<Float>, offset:Int, len:Int, step:Int)
|
||||
{
|
||||
var z = decodeStart(decodeState);
|
||||
var last = 0.0;
|
||||
if (z < 0) {
|
||||
return false;
|
||||
}
|
||||
if (len > dimensions) {
|
||||
len = dimensions;
|
||||
}
|
||||
|
||||
var lookupValues = this.lookupValues;
|
||||
var sequenceP = this.sequenceP;
|
||||
var multiplicands = this.multiplicands;
|
||||
|
||||
// STB_VORBIS_DIVIDES_IN_CODEBOOK = true
|
||||
|
||||
if (lookupType == 1) {
|
||||
var div = 1;
|
||||
for (i in 0...len) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
output[offset + i * step] += val;
|
||||
if (sequenceP) {
|
||||
last = val;
|
||||
}
|
||||
div *= lookupValues;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
z *= dimensions;
|
||||
for (i in 0...len) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
output[offset + i * step] += val;
|
||||
if (sequenceP) {
|
||||
last = val;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
inline function decodeStart(decodeState:VorbisDecodeState)
|
||||
{
|
||||
return decodeState.decode(this);
|
||||
|
||||
//var z = -1;
|
||||
//// type 0 is only legal in a scalar context
|
||||
//if (lookupType == 0) {
|
||||
// throw new ReaderError(INVALID_STREAM);
|
||||
//} else {
|
||||
// z = decodeState.decode(this);
|
||||
// //if (sparse) VorbisTools.assert(z < sortedEntries);
|
||||
// if (z < 0) { // check for VorbisTools.EOP
|
||||
// if (decodeState.isLastByte()) {
|
||||
// return z;
|
||||
// } else {
|
||||
// throw new ReaderError(INVALID_STREAM);
|
||||
// }
|
||||
// } else {
|
||||
// return z;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
static var delay = 0;
|
||||
|
||||
public function decodeDeinterleaveRepeat(decodeState:VorbisDecodeState, residueBuffers:Vector<Vector<Float>>, ch:Int, cInter:Int, pInter:Int, len:Int, totalDecode:Int)
|
||||
{
|
||||
var effective = dimensions;
|
||||
|
||||
// type 0 is only legal in a scalar context
|
||||
if (lookupType == 0) {
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
}
|
||||
|
||||
var multiplicands = this.multiplicands;
|
||||
var sequenceP = this.sequenceP;
|
||||
var lookupValues = this.lookupValues;
|
||||
|
||||
while (totalDecode > 0) {
|
||||
var last = 0.0;
|
||||
var z = decodeState.decode(this);
|
||||
|
||||
if (z < 0) {
|
||||
if (decodeState.isLastByte()) {
|
||||
return null;
|
||||
}
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
}
|
||||
|
||||
// if this will take us off the end of the buffers, stop short!
|
||||
// we check by computing the length of the virtual interleaved
|
||||
// buffer (len*ch), our current offset within it (pInter*ch)+(cInter),
|
||||
// and the length we'll be using (effective)
|
||||
if (cInter + pInter * ch + effective > len * ch) {
|
||||
effective = len * ch - (pInter * ch - cInter);
|
||||
}
|
||||
|
||||
if (lookupType == 1) {
|
||||
var div = 1;
|
||||
if (sequenceP) {
|
||||
for (i in 0...effective) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
last = val;
|
||||
div *= lookupValues;
|
||||
}
|
||||
} else {
|
||||
for (i in 0...effective) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
div *= lookupValues;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
z *= dimensions;
|
||||
if (sequenceP) {
|
||||
for (i in 0...effective) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
last = val;
|
||||
}
|
||||
} else {
|
||||
for (i in 0...effective) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
totalDecode -= effective;
|
||||
}
|
||||
|
||||
return {
|
||||
cInter : cInter,
|
||||
pInter : pInter
|
||||
}
|
||||
}
|
||||
|
||||
public function residueDecode(decodeState:VorbisDecodeState, target:Vector<Float>, offset:Int, n:Int, rtype:Int)
|
||||
{
|
||||
if (rtype == 0) {
|
||||
var step = Std.int(n / dimensions);
|
||||
for (k in 0...step) {
|
||||
if (!codebookDecodeStep(decodeState, target, offset + k, n-offset-k, step)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var k = 0;
|
||||
while(k < n) {
|
||||
if (!codebookDecode(decodeState, target, offset, n-k)) {
|
||||
return false;
|
||||
}
|
||||
k += dimensions;
|
||||
offset += dimensions;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
130
Kha/Sources/kha/audio2/ogg/vorbis/data/Comment.hx
Normal file
130
Kha/Sources/kha/audio2/ogg/vorbis/data/Comment.hx
Normal file
@ -0,0 +1,130 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Comment {
|
||||
public var data(default, null):Map<String, Array<String>>;
|
||||
|
||||
public var title(get, never):String;
|
||||
function get_title() {
|
||||
return getString("title");
|
||||
}
|
||||
|
||||
public var loopStart(get, never):Null<Int>;
|
||||
function get_loopStart() {
|
||||
return Std.parseInt(getString("loopstart"));
|
||||
}
|
||||
|
||||
public var loopLength(get, never):Null<Int>;
|
||||
function get_loopLength() {
|
||||
return Std.parseInt(getString("looplength"));
|
||||
}
|
||||
|
||||
public var version(get, never):String;
|
||||
function get_version() {
|
||||
return getString("version");
|
||||
}
|
||||
|
||||
public var album(get, never):String;
|
||||
function get_album() {
|
||||
return getString("album");
|
||||
}
|
||||
|
||||
public var organization(get, never):String;
|
||||
function get_organization() {
|
||||
return getString("organization");
|
||||
}
|
||||
|
||||
public var tracknumber(get, never):String;
|
||||
function get_tracknumber() {
|
||||
return getString("tracknumber");
|
||||
}
|
||||
|
||||
public var performer(get, never):String;
|
||||
function get_performer() {
|
||||
return getString("performer");
|
||||
}
|
||||
|
||||
public var copyright(get, never):String;
|
||||
function get_copyright() {
|
||||
return getString("copyright");
|
||||
}
|
||||
|
||||
public var license(get, never):String;
|
||||
function get_license() {
|
||||
return getString("license");
|
||||
}
|
||||
|
||||
public var artist(get, never):String;
|
||||
function get_artist() {
|
||||
return getString("artist");
|
||||
}
|
||||
|
||||
public var description(get, never):String;
|
||||
function get_description() {
|
||||
return getString("description");
|
||||
}
|
||||
|
||||
public var genre(get, never):String;
|
||||
function get_genre() {
|
||||
return getString("genre");
|
||||
}
|
||||
|
||||
public var date(get, never):String;
|
||||
function get_date() {
|
||||
return getString("date");
|
||||
}
|
||||
|
||||
public var location(get, never):String;
|
||||
function get_location() {
|
||||
return getString("location");
|
||||
}
|
||||
|
||||
public var contact(get, never):String;
|
||||
function get_contact() {
|
||||
return getString("contact");
|
||||
}
|
||||
|
||||
public var isrc(get, never):String;
|
||||
function get_isrc() {
|
||||
return getString("isrc");
|
||||
}
|
||||
|
||||
public var artists(get, never):Array<String>;
|
||||
function get_artists() {
|
||||
return getArray("artist");
|
||||
}
|
||||
|
||||
public function new() {
|
||||
data = new Map();
|
||||
}
|
||||
|
||||
public function add(key:String, value:String) {
|
||||
key = key.toLowerCase();
|
||||
if (data.exists(key)) {
|
||||
data[key].push(value);
|
||||
} else {
|
||||
data[key] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
public function getString(key:String) {
|
||||
key = key.toLowerCase();
|
||||
return if (data.exists(key)) {
|
||||
data[key][0];
|
||||
} else {
|
||||
null;
|
||||
}
|
||||
}
|
||||
|
||||
public function getArray(key:String) {
|
||||
key = key.toLowerCase();
|
||||
return if (data.exists(key)) {
|
||||
data[key];
|
||||
} else {
|
||||
null;
|
||||
}
|
||||
}
|
||||
}
|
151
Kha/Sources/kha/audio2/ogg/vorbis/data/Floor.hx
Normal file
151
Kha/Sources/kha/audio2/ogg/vorbis/data/Floor.hx
Normal file
@ -0,0 +1,151 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Floor
|
||||
{
|
||||
public var floor0:Floor0;
|
||||
public var floor1:Floor1;
|
||||
public var type:Int;
|
||||
|
||||
function new()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState, codebooks:Vector<Codebook>):Floor
|
||||
{
|
||||
var floor = new Floor();
|
||||
|
||||
floor.type = decodeState.readBits(16);
|
||||
if (floor.type > 1) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (floor.type == 0) {
|
||||
var g = floor.floor0 = new Floor0();
|
||||
g.order = decodeState.readBits(8);
|
||||
g.rate = decodeState.readBits(16);
|
||||
g.barkMapSize = decodeState.readBits(16);
|
||||
g.amplitudeBits = decodeState.readBits(6);
|
||||
g.amplitudeOffset = decodeState.readBits(8);
|
||||
g.numberOfBooks = decodeState.readBits(4) + 1;
|
||||
for (j in 0...g.numberOfBooks) {
|
||||
g.bookList[j] = decodeState.readBits(8);
|
||||
}
|
||||
throw new ReaderError(FEATURE_NOT_SUPPORTED);
|
||||
} else {
|
||||
var p = new Array<IntPoint>();
|
||||
var g = floor.floor1 = new Floor1();
|
||||
var maxClass = -1;
|
||||
g.partitions = decodeState.readBits(5);
|
||||
g.partitionClassList = new Vector(g.partitions);
|
||||
for (j in 0...g.partitions) {
|
||||
g.partitionClassList[j] = decodeState.readBits(4);
|
||||
if (g.partitionClassList[j] > maxClass) {
|
||||
maxClass = g.partitionClassList[j];
|
||||
}
|
||||
}
|
||||
g.classDimensions = new Vector(maxClass + 1);
|
||||
g.classMasterbooks = new Vector(maxClass + 1);
|
||||
g.classSubclasses = new Vector(maxClass + 1);
|
||||
g.subclassBooks = new Vector(maxClass + 1);
|
||||
for (j in 0...(maxClass + 1)) {
|
||||
g.classDimensions[j] = decodeState.readBits(3) + 1;
|
||||
g.classSubclasses[j] = decodeState.readBits(2);
|
||||
if (g.classSubclasses[j] != 0) {
|
||||
g.classMasterbooks[j] = decodeState.readBits(8);
|
||||
if (g.classMasterbooks[j] >= codebooks.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
var kl = (1 << g.classSubclasses[j]);
|
||||
g.subclassBooks[j] = new Vector(kl);
|
||||
for (k in 0...kl) {
|
||||
g.subclassBooks[j][k] = decodeState.readBits(8)-1;
|
||||
if (g.subclassBooks[j][k] >= codebooks.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
g.floor1Multiplier = decodeState.readBits(2) + 1;
|
||||
g.rangebits = decodeState.readBits(4);
|
||||
g.xlist = new Vector(31*8+2);
|
||||
g.xlist[0] = 0;
|
||||
g.xlist[1] = 1 << g.rangebits;
|
||||
g.values = 2;
|
||||
for (j in 0...g.partitions) {
|
||||
var c = g.partitionClassList[j];
|
||||
for (k in 0...g.classDimensions[c]) {
|
||||
g.xlist[g.values] = decodeState.readBits(g.rangebits);
|
||||
g.values++;
|
||||
}
|
||||
}
|
||||
|
||||
// precompute the sorting
|
||||
for (j in 0...g.values) {
|
||||
p.push(new IntPoint());
|
||||
p[j].x = g.xlist[j];
|
||||
p[j].y = j;
|
||||
}
|
||||
|
||||
p.sort(VorbisTools.pointCompare);
|
||||
|
||||
g.sortedOrder = new Vector(g.values);
|
||||
for (j in 0...g.values) {
|
||||
g.sortedOrder[j] = p[j].y;
|
||||
}
|
||||
|
||||
g.neighbors = new Vector(g.values);
|
||||
// precompute the neighbors
|
||||
for (j in 2...g.values) {
|
||||
var ne = VorbisTools.neighbors(g.xlist, j);
|
||||
g.neighbors[j] = new Vector(g.values);
|
||||
g.neighbors[j][0] = ne.low;
|
||||
g.neighbors[j][1] = ne.high;
|
||||
}
|
||||
}
|
||||
|
||||
return floor;
|
||||
}
|
||||
}
|
||||
|
||||
class Floor0
|
||||
{
|
||||
public var order:Int; //uint8
|
||||
public var rate:Int; //uint16
|
||||
public var barkMapSize:Int; //uint16
|
||||
public var amplitudeBits:Int; //uint8
|
||||
public var amplitudeOffset:Int; //uint8
|
||||
public var numberOfBooks:Int; //uint8
|
||||
public var bookList:Vector<UInt>; //uint8 [16] varies
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
||||
|
||||
class Floor1
|
||||
{
|
||||
public var partitions:Int; // uint8
|
||||
public var partitionClassList:Vector<Int>; // uint8 varies
|
||||
public var classDimensions:Vector<Int>; // uint8 [16] varies
|
||||
public var classSubclasses:Vector<Int>; // uint8 [16] varies
|
||||
public var classMasterbooks:Vector<Int>; // uint8 [16] varies
|
||||
public var subclassBooks:Vector<Vector<Int>>; //int 16 [16][8] varies
|
||||
public var xlist:Vector<Int>; //uint16 [31*8+2] varies
|
||||
public var sortedOrder:Vector<Int>; //uint8 [31 * 8 + 2];
|
||||
public var neighbors:Vector<Vector<Int>>; //uint8[31 * 8 + 2][2];
|
||||
public var floor1Multiplier:Int;
|
||||
public var rangebits:Int;
|
||||
public var values:Int;
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
213
Kha/Sources/kha/audio2/ogg/vorbis/data/Header.hx
Normal file
213
Kha/Sources/kha/audio2/ogg/vorbis/data/Header.hx
Normal file
@ -0,0 +1,213 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.BytesInput;
|
||||
import haxe.io.BytesOutput;
|
||||
import haxe.io.Input;
|
||||
import haxe.io.Output;
|
||||
import kha.audio2.ogg.vorbis.data.Comment;
|
||||
import kha.audio2.ogg.vorbis.data.Page.PageFlag;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError.ReaderErrorType;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Header {
|
||||
|
||||
static public inline var PACKET_ID = 1;
|
||||
static public inline var PACKET_COMMENT = 3;
|
||||
static public inline var PACKET_SETUP = 5;
|
||||
|
||||
public var maximumBitRate(default, null):UInt;
|
||||
public var nominalBitRate(default, null):UInt;
|
||||
public var minimumBitRate(default, null):UInt;
|
||||
public var sampleRate(default, null):UInt;
|
||||
public var channel(default, null):Int;
|
||||
public var blocksize0(default, null):Int;
|
||||
public var blocksize1(default, null):Int;
|
||||
public var codebooks(default, null):Vector<Codebook>;
|
||||
public var floorConfig(default, null):Vector<Floor>;
|
||||
public var residueConfig(default, null):Vector<Residue>;
|
||||
public var mapping(default, null):Vector<Mapping>;
|
||||
public var modes(default, null):Vector<Mode>; // [64] varies
|
||||
public var comment(default, null):Comment;
|
||||
public var vendor(default, null):String;
|
||||
|
||||
function new() {
|
||||
|
||||
}
|
||||
|
||||
static public function read(decodeState:VorbisDecodeState):Header {
|
||||
var page = decodeState.page;
|
||||
page.start(decodeState);
|
||||
|
||||
if ((page.flag & PageFlag.FIRST_PAGE) == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "not firstPage");
|
||||
}
|
||||
if ((page.flag & PageFlag.LAST_PAGE) != 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "lastPage");
|
||||
}
|
||||
if ((page.flag & PageFlag.CONTINUED_PACKET) != 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "continuedPacket");
|
||||
}
|
||||
|
||||
decodeState.firstPageValidate();
|
||||
if (decodeState.readByte() != PACKET_ID) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "decodeState head");
|
||||
}
|
||||
|
||||
// vorbis header
|
||||
decodeState.vorbisValidate();
|
||||
|
||||
// vorbisVersion
|
||||
var version = decodeState.readInt32();
|
||||
if (version != 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "vorbis version : " + version);
|
||||
}
|
||||
|
||||
var header = new Header();
|
||||
|
||||
header.channel = decodeState.readByte();
|
||||
if (header.channel == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "no channel");
|
||||
} else if (header.channel > Setting.MAX_CHANNELS) {
|
||||
throw new ReaderError(TOO_MANY_CHANNELS, "too many channels");
|
||||
}
|
||||
|
||||
header.sampleRate = decodeState.readInt32();
|
||||
if (header.sampleRate == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "no sampling rate");
|
||||
}
|
||||
|
||||
header.maximumBitRate = decodeState.readInt32();
|
||||
header.nominalBitRate = decodeState.readInt32();
|
||||
header.minimumBitRate = decodeState.readInt32();
|
||||
|
||||
var x = decodeState.readByte();
|
||||
var log0 = x & 15;
|
||||
var log1 = x >> 4;
|
||||
header.blocksize0 = 1 << log0;
|
||||
header.blocksize1 = 1 << log1;
|
||||
if (log0 < 6 || log0 > 13) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (log1 < 6 || log1 > 13) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (log0 > log1) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
|
||||
// framingFlag
|
||||
var x = decodeState.readByte();
|
||||
if (x & 1 == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE);
|
||||
}
|
||||
|
||||
// comment fields
|
||||
decodeState.page.start(decodeState);
|
||||
decodeState.startPacket();
|
||||
|
||||
var len = 0;
|
||||
var output = new BytesOutput();
|
||||
while((len = decodeState.next()) != 0) {
|
||||
output.write(decodeState.readBytes(len));
|
||||
decodeState.bytesInSeg = 0;
|
||||
}
|
||||
|
||||
{
|
||||
var packetInput = new BytesInput(output.getBytes());
|
||||
packetInput.readByte();
|
||||
packetInput.read(6);
|
||||
|
||||
var vendorLength:UInt = packetInput.readInt32();
|
||||
header.vendor = packetInput.readString(vendorLength);
|
||||
header.comment = new Comment();
|
||||
|
||||
var commentCount = packetInput.readInt32();
|
||||
|
||||
for (i in 0...commentCount) {
|
||||
var n = packetInput.readInt32();
|
||||
var str = packetInput.readString(n);
|
||||
var splitter = str.indexOf("=");
|
||||
if (splitter != -1) {
|
||||
header.comment.add(str.substring(0, splitter), str.substring(splitter + 1));
|
||||
}
|
||||
}
|
||||
|
||||
var x = packetInput.readByte();
|
||||
if (x & 1 == 0) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
// third packet!
|
||||
decodeState.startPacket();
|
||||
|
||||
if (decodeState.readPacket() != PACKET_SETUP) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "setup packet");
|
||||
}
|
||||
|
||||
decodeState.vorbisValidate();
|
||||
|
||||
// codebooks
|
||||
var codebookCount = decodeState.readBits(8) + 1;
|
||||
header.codebooks = new Vector(codebookCount);
|
||||
for (i in 0...codebookCount) {
|
||||
header.codebooks[i] = Codebook.read(decodeState);
|
||||
}
|
||||
|
||||
// time domain transfers (notused)
|
||||
x = decodeState.readBits(6) + 1;
|
||||
for (i in 0...x) {
|
||||
if (decodeState.readBits(16) != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
// Floors
|
||||
var floorCount = decodeState.readBits(6) + 1;
|
||||
header.floorConfig = new Vector(floorCount);
|
||||
for (i in 0...floorCount) {
|
||||
header.floorConfig[i] = Floor.read(decodeState, header.codebooks);
|
||||
}
|
||||
|
||||
// Residue
|
||||
var residueCount = decodeState.readBits(6) + 1;
|
||||
header.residueConfig = new Vector(residueCount);
|
||||
for (i in 0...residueCount) {
|
||||
header.residueConfig[i] = Residue.read(decodeState, header.codebooks);
|
||||
}
|
||||
|
||||
//Mapping
|
||||
var mappingCount = decodeState.readBits(6) + 1;
|
||||
header.mapping = new Vector(mappingCount);
|
||||
for (i in 0...mappingCount) {
|
||||
var map = Mapping.read(decodeState, header.channel);
|
||||
header.mapping[i] = map;
|
||||
for (j in 0...map.submaps) {
|
||||
if (map.submapFloor[j] >= header.floorConfig.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (map.submapResidue[j] >= header.residueConfig.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var modeCount = decodeState.readBits(6) + 1;
|
||||
header.modes = new Vector(modeCount);
|
||||
for (i in 0...modeCount) {
|
||||
var mode = Mode.read(decodeState);
|
||||
header.modes[i] = mode;
|
||||
if (mode.mapping >= header.mapping.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
decodeState.flushPacket();
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
15
Kha/Sources/kha/audio2/ogg/vorbis/data/IntPoint.hx
Normal file
15
Kha/Sources/kha/audio2/ogg/vorbis/data/IntPoint.hx
Normal file
@ -0,0 +1,15 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class IntPoint
|
||||
{
|
||||
public var x:Int;
|
||||
public var y:Int;
|
||||
|
||||
public function new() {
|
||||
|
||||
}
|
||||
}
|
127
Kha/Sources/kha/audio2/ogg/vorbis/data/Mapping.hx
Normal file
127
Kha/Sources/kha/audio2/ogg/vorbis/data/Mapping.hx
Normal file
@ -0,0 +1,127 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
class Mapping
|
||||
{
|
||||
public var couplingSteps:Int; // uint16
|
||||
public var chan:Vector<MappingChannel>;
|
||||
public var submaps:Int; // uint8
|
||||
public var submapFloor:Vector<Int>; // uint8 varies
|
||||
public var submapResidue:Vector<Int>; // uint8 varies
|
||||
public function new() {
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState, channels:Int):Mapping
|
||||
{
|
||||
var m = new Mapping();
|
||||
var mappingType = decodeState.readBits(16);
|
||||
if (mappingType != 0) {
|
||||
throw new ReaderError(INVALID_SETUP, "mapping type " + mappingType);
|
||||
}
|
||||
|
||||
m.chan = new Vector(channels);
|
||||
for (j in 0...channels) {
|
||||
m.chan[j] = new MappingChannel();
|
||||
}
|
||||
|
||||
if (decodeState.readBits(1) != 0) {
|
||||
m.submaps = decodeState.readBits(4)+1;
|
||||
} else {
|
||||
m.submaps = 1;
|
||||
}
|
||||
|
||||
//if (m.submaps > maxSubmaps) {
|
||||
// maxSubmaps = m.submaps;
|
||||
//}
|
||||
|
||||
if (decodeState.readBits(1) != 0) {
|
||||
m.couplingSteps = decodeState.readBits(8)+1;
|
||||
for (k in 0...m.couplingSteps) {
|
||||
m.chan[k].magnitude = decodeState.readBits(MathTools.ilog(channels-1));
|
||||
m.chan[k].angle = decodeState.readBits(MathTools.ilog(channels-1));
|
||||
if (m.chan[k].magnitude >= channels) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.chan[k].angle >= channels) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.chan[k].magnitude == m.chan[k].angle) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
m.couplingSteps = 0;
|
||||
}
|
||||
|
||||
// reserved field
|
||||
if (decodeState.readBits(2) != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.submaps > 1) {
|
||||
for (j in 0...channels) {
|
||||
m.chan[j].mux = decodeState.readBits(4);
|
||||
if (m.chan[j].mux >= m.submaps) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (j in 0...channels) {
|
||||
m.chan[j].mux = 0;
|
||||
}
|
||||
}
|
||||
|
||||
m.submapFloor = new Vector(m.submaps);
|
||||
m.submapResidue = new Vector(m.submaps);
|
||||
|
||||
for (j in 0...m.submaps) {
|
||||
decodeState.readBits(8); // discard
|
||||
m.submapFloor[j] = decodeState.readBits(8);
|
||||
m.submapResidue[j] = decodeState.readBits(8);
|
||||
}
|
||||
|
||||
return m;
|
||||
}
|
||||
|
||||
public function doFloor(floors:Vector<Floor>, i:Int, n:Int, target:Vector<Float>, finalY:Array<Int>, step2Flag:Vector<Bool>)
|
||||
{
|
||||
var n2 = n >> 1;
|
||||
var s = chan[i].mux, floor;
|
||||
var floor = floors[submapFloor[s]];
|
||||
if (floor.type == 0) {
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
} else {
|
||||
var g = floor.floor1;
|
||||
var lx = 0, ly = finalY[0] * g.floor1Multiplier;
|
||||
for (q in 1...g.values) {
|
||||
var j = g.sortedOrder[q];
|
||||
if (finalY[j] >= 0)
|
||||
{
|
||||
var hy = finalY[j] * g.floor1Multiplier;
|
||||
var hx = g.xlist[j];
|
||||
VorbisTools.drawLine(target, lx, ly, hx, hy, n2);
|
||||
lx = hx;
|
||||
ly = hy;
|
||||
}
|
||||
}
|
||||
if (lx < n2) {
|
||||
// optimization of: drawLine(target, lx,ly, n,ly, n2);
|
||||
for (j in lx...n2) {
|
||||
target[j] *= VorbisTools.INVERSE_DB_TABLE[ly];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MappingChannel
|
||||
{
|
||||
public var magnitude:Int; // uint8
|
||||
public var angle:Int; // uint8
|
||||
public var mux:Int; // uint8
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
29
Kha/Sources/kha/audio2/ogg/vorbis/data/Mode.hx
Normal file
29
Kha/Sources/kha/audio2/ogg/vorbis/data/Mode.hx
Normal file
@ -0,0 +1,29 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
class Mode
|
||||
{
|
||||
public var blockflag:Bool; // uint8
|
||||
public var mapping:Int; // uint8
|
||||
public var windowtype:Int; // uint16
|
||||
public var transformtype:Int; // uint16
|
||||
|
||||
public function new() {
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState) {
|
||||
var m = new Mode();
|
||||
m.blockflag = (decodeState.readBits(1) != 0);
|
||||
m.windowtype = decodeState.readBits(16);
|
||||
m.transformtype = decodeState.readBits(16);
|
||||
m.mapping = decodeState.readBits(8);
|
||||
if (m.windowtype != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.transformtype != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
return m;
|
||||
}
|
||||
}
|
60
Kha/Sources/kha/audio2/ogg/vorbis/data/Page.hx
Normal file
60
Kha/Sources/kha/audio2/ogg/vorbis/data/Page.hx
Normal file
@ -0,0 +1,60 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError.ReaderErrorType;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Page {
|
||||
public var flag(default, null):Int;
|
||||
|
||||
public function new () {
|
||||
|
||||
}
|
||||
|
||||
public function clone() {
|
||||
var page = new Page();
|
||||
page.flag = flag;
|
||||
return page;
|
||||
}
|
||||
|
||||
// startPage
|
||||
public function start(decodeState:VorbisDecodeState) {
|
||||
decodeState.capturePattern();
|
||||
startWithoutCapturePattern(decodeState);
|
||||
}
|
||||
|
||||
// startPageNoCapturePattern
|
||||
public function startWithoutCapturePattern(decodeState:VorbisDecodeState) {
|
||||
var version = decodeState.readByte();
|
||||
if (version != 0) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_STREAM_STRUCTURE_VERSION, "" + version);
|
||||
}
|
||||
|
||||
this.flag = decodeState.readByte();
|
||||
var loc0 = decodeState.readInt32();
|
||||
var loc1 = decodeState.readInt32();
|
||||
|
||||
// input serial number -- vorbis doesn't interleave, so discard
|
||||
decodeState.readInt32();
|
||||
//if (this.serial != get32(f)) throw new ReaderError(ReaderErrorType.incorrectStreamSerialNumber);
|
||||
|
||||
// page sequence number
|
||||
decodeState.readInt32();
|
||||
|
||||
// CRC32
|
||||
decodeState.readInt32();
|
||||
|
||||
// pageSegments
|
||||
decodeState.setup(loc0, loc1);
|
||||
}
|
||||
}
|
||||
|
||||
class PageFlag {
|
||||
static public inline var CONTINUED_PACKET = 1;
|
||||
static public inline var FIRST_PAGE = 2;
|
||||
static public inline var LAST_PAGE = 4;
|
||||
}
|
18
Kha/Sources/kha/audio2/ogg/vorbis/data/ProbedPage.hx
Normal file
18
Kha/Sources/kha/audio2/ogg/vorbis/data/ProbedPage.hx
Normal file
@ -0,0 +1,18 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
|
||||
class ProbedPage
|
||||
{
|
||||
public var pageStart:Int;
|
||||
public var pageEnd:Int;
|
||||
public var afterPreviousPageStart:Int;
|
||||
public var firstDecodedSample:Null<Int>;
|
||||
public var lastDecodedSample:Null<Int>;
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
53
Kha/Sources/kha/audio2/ogg/vorbis/data/ReaderError.hx
Normal file
53
Kha/Sources/kha/audio2/ogg/vorbis/data/ReaderError.hx
Normal file
@ -0,0 +1,53 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.PosInfos;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class ReaderError
|
||||
{
|
||||
public var type(default, null):ReaderErrorType;
|
||||
public var message(default, null):String;
|
||||
public var posInfos(default, null):PosInfos;
|
||||
|
||||
public function new(type:ReaderErrorType, ?message:String = "", ?posInfos:PosInfos) {
|
||||
this.type = type;
|
||||
this.message = message;
|
||||
this.posInfos = posInfos;
|
||||
}
|
||||
}
|
||||
|
||||
enum ReaderErrorType
|
||||
{
|
||||
NEED_MORE_DATA; // not a real error
|
||||
|
||||
INVALID_API_MIXING; // can't mix API modes
|
||||
OUTOFMEM; // not enough memory
|
||||
FEATURE_NOT_SUPPORTED; // uses floor 0
|
||||
TOO_MANY_CHANNELS; // STB_VORBIS_MAX_CHANNELS is too small
|
||||
FILE_OPEN_FAILURE; // fopen() failed
|
||||
SEEK_WITHOUT_LENGTH; // can't seek in unknown-length file
|
||||
|
||||
UNEXPECTED_EOF; // file is truncated?
|
||||
SEEK_INVALID; // seek past EOF
|
||||
|
||||
// decoding errors (corrupt/invalid input) -- you probably
|
||||
// don't care about the exact details of these
|
||||
|
||||
// vorbis errors:
|
||||
INVALID_SETUP;
|
||||
INVALID_STREAM;
|
||||
|
||||
// ogg errors:
|
||||
MISSING_CAPTURE_PATTERN;
|
||||
INVALID_STREAM_STRUCTURE_VERSION;
|
||||
CONTINUED_PACKET_FLAG_INVALID;
|
||||
INCORRECT_STREAM_SERIAL_NUMBER;
|
||||
INVALID_FIRST_PAGE;
|
||||
BAD_PACKET_TYPE;
|
||||
CANT_FIND_LAST_PAGE;
|
||||
SEEK_FAILED;
|
||||
|
||||
OTHER;
|
||||
}
|
298
Kha/Sources/kha/audio2/ogg/vorbis/data/Residue.hx
Normal file
298
Kha/Sources/kha/audio2/ogg/vorbis/data/Residue.hx
Normal file
@ -0,0 +1,298 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Residue
|
||||
{
|
||||
public var begin(default, null):UInt; // uint32
|
||||
public var end(default, null):UInt; // uint32
|
||||
public var partSize(default, null):UInt; // uint32
|
||||
public var classifications(default, null):Int; // uint8
|
||||
public var classbook(default, null):Int; // uint8
|
||||
public var classdata(default, null):Vector<Vector<Int>>; //uint8 **
|
||||
public var residueBooks(default, null):Vector<Vector<Int>>; //int16 (*)[8]
|
||||
public var type(default, null):Int;
|
||||
|
||||
public function new() {
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState, codebooks:Vector<Codebook>):Residue
|
||||
{
|
||||
var r = new Residue();
|
||||
r.type = decodeState.readBits(16);
|
||||
if (r.type > 2) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
|
||||
var residueCascade = new Vector<Int>(64);
|
||||
r.begin = decodeState.readBits(24);
|
||||
r.end = decodeState.readBits(24);
|
||||
r.partSize = decodeState.readBits(24)+1;
|
||||
var classifications = r.classifications = decodeState.readBits(6)+1;
|
||||
r.classbook = decodeState.readBits(8);
|
||||
|
||||
for (j in 0...r.classifications) {
|
||||
var highBits = 0;
|
||||
var lowBits = decodeState.readBits(3);
|
||||
if (decodeState.readBits(1) != 0){
|
||||
highBits = decodeState.readBits(5);
|
||||
}
|
||||
residueCascade[j] = highBits * 8 + lowBits;
|
||||
}
|
||||
|
||||
r.residueBooks = new Vector(r.classifications);
|
||||
for (j in 0...r.classifications) {
|
||||
r.residueBooks[j] = new Vector(8);
|
||||
for (k in 0...8) {
|
||||
if (residueCascade[j] & (1 << k) != 0) {
|
||||
r.residueBooks[j][k] = decodeState.readBits(8);
|
||||
if (r.residueBooks[j][k] >= codebooks.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
} else {
|
||||
r.residueBooks[j][k] = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// precompute the classifications[] array to avoid inner-loop mod/divide
|
||||
// call it 'classdata' since we already have classifications
|
||||
var el = codebooks[r.classbook].entries;
|
||||
var classwords = codebooks[r.classbook].dimensions;
|
||||
r.classdata = new Vector(el);
|
||||
|
||||
for (j in 0...el) {
|
||||
var temp = j;
|
||||
var k = classwords;
|
||||
var cd = r.classdata[j] = new Vector(classwords);
|
||||
while (--k >= 0) {
|
||||
cd[k] = temp % classifications;
|
||||
temp = Std.int(temp / classifications);
|
||||
}
|
||||
}
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
|
||||
public function decode(decodeState:VorbisDecodeState, header:Header, residueBuffers:Vector<Vector<Float>>, ch:Int, n:Int, doNotDecode:Vector<Bool>, channelBuffers:Vector<Vector<Float>>)
|
||||
{
|
||||
// STB_VORBIS_DIVIDES_IN_RESIDUE = true
|
||||
var codebooks = header.codebooks;
|
||||
var classwords = codebooks[classbook].dimensions;
|
||||
var nRead = end - begin;
|
||||
var partSize = this.partSize;
|
||||
var partRead = Std.int(nRead / partSize);
|
||||
var classifications = new Vector<Int>(header.channel * partRead + 1); // + 1 is a hack for a possible crash in line 268 with some ogg files
|
||||
|
||||
VorbisTools.stbProf(2);
|
||||
for (i in 0...ch) {
|
||||
if (!doNotDecode[i]) {
|
||||
var buffer = residueBuffers[i];
|
||||
for (j in 0...buffer.length) {
|
||||
buffer[j] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (type == 2 && ch != 1) {
|
||||
for (j in 0...ch) {
|
||||
if (!doNotDecode[j]) {
|
||||
break;
|
||||
} else if (j == ch - 1) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
VorbisTools.stbProf(3);
|
||||
for (pass in 0...8) {
|
||||
var pcount = 0, classSet = 0;
|
||||
if (ch == 2) {
|
||||
VorbisTools.stbProf(13);
|
||||
while (pcount < partRead) {
|
||||
var z = begin + pcount * partSize;
|
||||
var cInter = (z & 1);
|
||||
var pInter = z >> 1;
|
||||
if (pass == 0) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var q = decodeState.decode(c);
|
||||
if (q == VorbisTools.EOP) {
|
||||
return;
|
||||
}
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[i + pcount] = q % this.classifications;
|
||||
q = Std.int(q / this.classifications);
|
||||
}
|
||||
}
|
||||
VorbisTools.stbProf(5);
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
var z = begin + pcount*partSize;
|
||||
var c = classifications[pcount];
|
||||
var b = residueBooks[c][pass];
|
||||
if (b >= 0) {
|
||||
var book = codebooks[b];
|
||||
VorbisTools.stbProf(20); // accounts for X time
|
||||
var result = book.decodeDeinterleaveRepeat(decodeState, residueBuffers, ch, cInter, pInter, n, partSize);
|
||||
if (result == null) {
|
||||
return;
|
||||
} else {
|
||||
cInter = result.cInter;
|
||||
pInter = result.pInter;
|
||||
}
|
||||
VorbisTools.stbProf(7);
|
||||
} else {
|
||||
z += partSize;
|
||||
cInter = z & 1;
|
||||
pInter = z >> 1;
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
VorbisTools.stbProf(8);
|
||||
}
|
||||
} else if (ch == 1) {
|
||||
while (pcount < partRead) {
|
||||
var z = begin + pcount*partSize;
|
||||
var cInter = 0;
|
||||
var pInter = z;
|
||||
if (pass == 0) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var q = decodeState.decode(c);
|
||||
if (q == VorbisTools.EOP) return;
|
||||
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[i + pcount] = q % this.classifications;
|
||||
q = Std.int(q / this.classifications);
|
||||
}
|
||||
}
|
||||
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
var z = begin + pcount * partSize;
|
||||
var b = residueBooks[classifications[pcount]][pass];
|
||||
if (b >= 0) {
|
||||
var book:Codebook = codebooks[b];
|
||||
VorbisTools.stbProf(22);
|
||||
var result = book.decodeDeinterleaveRepeat(decodeState, residueBuffers, ch, cInter, pInter, n, partSize);
|
||||
if (result == null) {
|
||||
return;
|
||||
} else {
|
||||
cInter = result.cInter;
|
||||
pInter = result.pInter;
|
||||
}
|
||||
VorbisTools.stbProf(3);
|
||||
} else {
|
||||
z += partSize;
|
||||
cInter = 0;
|
||||
pInter = z;
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (pcount < partRead) {
|
||||
var z = begin + pcount * partSize;
|
||||
var cInter = z % ch;
|
||||
var pInter = Std.int(z / ch);
|
||||
|
||||
if (pass == 0) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var q = decodeState.decode(c);
|
||||
if (q == VorbisTools.EOP) {
|
||||
return;
|
||||
}
|
||||
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[i+pcount] = q % this.classifications;
|
||||
q = Std.int(q / this.classifications);
|
||||
}
|
||||
}
|
||||
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
var z = begin + pcount * partSize;
|
||||
var b = residueBooks[classifications[pcount]][pass];
|
||||
if (b >= 0) {
|
||||
var book = codebooks[b];
|
||||
VorbisTools.stbProf(22);
|
||||
var result = book.decodeDeinterleaveRepeat(decodeState, residueBuffers, ch, cInter, pInter, n, partSize);
|
||||
if (result == null) {
|
||||
return;
|
||||
} else {
|
||||
cInter = result.cInter;
|
||||
pInter = result.pInter;
|
||||
}
|
||||
VorbisTools.stbProf(3);
|
||||
} else {
|
||||
z += partSize;
|
||||
cInter = z % ch;
|
||||
pInter = Std.int(z / ch);
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
VorbisTools.stbProf(9);
|
||||
|
||||
for (pass in 0...8) {
|
||||
var pcount = 0;
|
||||
var classSet = 0;
|
||||
while (pcount < partRead) {
|
||||
if (pass == 0) {
|
||||
for (j in 0...ch) {
|
||||
if (!doNotDecode[j]) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var temp = decodeState.decode(c);
|
||||
if (temp == VorbisTools.EOP) {
|
||||
return;
|
||||
}
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[j * partRead + i + pcount] = temp % this.classifications;
|
||||
temp = Std.int(temp / this.classifications);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
for (j in 0...ch) {
|
||||
if (!doNotDecode[j]) {
|
||||
var c = classifications[j * partRead + pcount];
|
||||
var b = residueBooks[c][pass];
|
||||
if (b >= 0) {
|
||||
var target = residueBuffers[j];
|
||||
var offset = begin + pcount * partSize;
|
||||
var n = partSize;
|
||||
var book = codebooks[b];
|
||||
if (!book.residueDecode(decodeState, target, offset, n, type)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
15
Kha/Sources/kha/audio2/ogg/vorbis/data/Setting.hx
Normal file
15
Kha/Sources/kha/audio2/ogg/vorbis/data/Setting.hx
Normal file
@ -0,0 +1,15 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Setting
|
||||
{
|
||||
static public inline var MAX_CHANNELS = 16;
|
||||
static public inline var PUSHDATA_CRC_COUNT = 4;
|
||||
static public inline var FAST_HUFFMAN_LENGTH = 10;
|
||||
static public inline var FAST_HUFFMAN_TABLE_SIZE = (1 << FAST_HUFFMAN_LENGTH);
|
||||
static public inline var FAST_HUFFMAN_TABLE_MASK = FAST_HUFFMAN_TABLE_SIZE - 1;
|
||||
|
||||
}
|
Reference in New Issue
Block a user