forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
30
Kha/Sources/kha/audio2/Audio.hx
Normal file
30
Kha/Sources/kha/audio2/Audio.hx
Normal file
@ -0,0 +1,30 @@
|
||||
package kha.audio2;
|
||||
|
||||
extern class Audio {
|
||||
/**
|
||||
* The samples per second natively used by the target system.
|
||||
*/
|
||||
public static var samplesPerSecond: Int;
|
||||
|
||||
/**
|
||||
* Requests additional audio data.
|
||||
* Beware: This is called from a separate audio thread on some targets.
|
||||
* See kha.audio2.Audio1 for sample code.
|
||||
*/
|
||||
public static var audioCallback: Int->Buffer->Void;
|
||||
|
||||
/**
|
||||
* Similar to kha.audio1.Audio.stream, but only for hardware accelerated audio playback.
|
||||
* Expect this to return null and provide a pure software alternative.
|
||||
* @param music The music we want to play.
|
||||
* @param loop If we want the music to loop, default = false.
|
||||
* @return On success returns a valid AudioChannel object. Otherwise returns null.
|
||||
*/
|
||||
public static function stream(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel;
|
||||
|
||||
/**
|
||||
* Used in Kinc based backends to untangle the audio thread from the garbage collector.
|
||||
* Be very careful please.
|
||||
*/
|
||||
public static var disableGcInteractions: Bool;
|
||||
}
|
197
Kha/Sources/kha/audio2/Audio1.hx
Normal file
197
Kha/Sources/kha/audio2/Audio1.hx
Normal file
@ -0,0 +1,197 @@
|
||||
package kha.audio2;
|
||||
|
||||
#if cpp
|
||||
import sys.thread.Mutex;
|
||||
#end
|
||||
import haxe.ds.Vector;
|
||||
|
||||
class Audio1 {
|
||||
static inline var channelCount: Int = 32;
|
||||
static var soundChannels: Vector<AudioChannel>;
|
||||
static var streamChannels: Vector<StreamChannel>;
|
||||
|
||||
static var internalSoundChannels: Vector<AudioChannel>;
|
||||
static var internalStreamChannels: Vector<StreamChannel>;
|
||||
static var sampleCache1: kha.arrays.Float32Array;
|
||||
static var sampleCache2: kha.arrays.Float32Array;
|
||||
static var lastAllocationCount: Int = 0;
|
||||
|
||||
#if cpp
|
||||
static var mutex: Mutex;
|
||||
#end
|
||||
|
||||
@:noCompletion
|
||||
public static function _init(): Void {
|
||||
#if cpp
|
||||
mutex = new Mutex();
|
||||
#end
|
||||
soundChannels = new Vector<AudioChannel>(channelCount);
|
||||
streamChannels = new Vector<StreamChannel>(channelCount);
|
||||
internalSoundChannels = new Vector<AudioChannel>(channelCount);
|
||||
internalStreamChannels = new Vector<StreamChannel>(channelCount);
|
||||
sampleCache1 = new kha.arrays.Float32Array(512);
|
||||
sampleCache2 = new kha.arrays.Float32Array(512);
|
||||
lastAllocationCount = 0;
|
||||
Audio.audioCallback = mix;
|
||||
}
|
||||
|
||||
static inline function max(a: Float, b: Float): Float {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
|
||||
static inline function min(a: Float, b: Float): Float {
|
||||
return a < b ? a : b;
|
||||
}
|
||||
|
||||
public static function mix(samplesBox: kha.internal.IntBox, buffer: Buffer): Void {
|
||||
var samples = samplesBox.value;
|
||||
if (sampleCache1.length < samples) {
|
||||
if (Audio.disableGcInteractions) {
|
||||
trace("Unexpected allocation request in audio thread.");
|
||||
for (i in 0...samples) {
|
||||
buffer.data.set(buffer.writeLocation, 0);
|
||||
buffer.writeLocation += 1;
|
||||
if (buffer.writeLocation >= buffer.size) {
|
||||
buffer.writeLocation = 0;
|
||||
}
|
||||
}
|
||||
lastAllocationCount = 0;
|
||||
Audio.disableGcInteractions = false;
|
||||
return;
|
||||
}
|
||||
sampleCache1 = new kha.arrays.Float32Array(samples * 2);
|
||||
sampleCache2 = new kha.arrays.Float32Array(samples * 2);
|
||||
lastAllocationCount = 0;
|
||||
}
|
||||
else {
|
||||
if (lastAllocationCount > 100) {
|
||||
Audio.disableGcInteractions = true;
|
||||
}
|
||||
else {
|
||||
lastAllocationCount += 1;
|
||||
}
|
||||
}
|
||||
|
||||
for (i in 0...samples) {
|
||||
sampleCache2[i] = 0;
|
||||
}
|
||||
|
||||
#if cpp
|
||||
mutex.acquire();
|
||||
#end
|
||||
for (i in 0...channelCount) {
|
||||
internalSoundChannels[i] = soundChannels[i];
|
||||
}
|
||||
for (i in 0...channelCount) {
|
||||
internalStreamChannels[i] = streamChannels[i];
|
||||
}
|
||||
#if cpp
|
||||
mutex.release();
|
||||
#end
|
||||
|
||||
for (channel in internalSoundChannels) {
|
||||
if (channel == null || channel.finished)
|
||||
continue;
|
||||
channel.nextSamples(sampleCache1, samples, buffer.samplesPerSecond);
|
||||
for (i in 0...samples) {
|
||||
sampleCache2[i] += sampleCache1[i] * channel.volume;
|
||||
}
|
||||
}
|
||||
for (channel in internalStreamChannels) {
|
||||
if (channel == null || channel.finished)
|
||||
continue;
|
||||
channel.nextSamples(sampleCache1, samples, buffer.samplesPerSecond);
|
||||
for (i in 0...samples) {
|
||||
sampleCache2[i] += sampleCache1[i] * channel.volume;
|
||||
}
|
||||
}
|
||||
|
||||
for (i in 0...samples) {
|
||||
buffer.data.set(buffer.writeLocation, max(min(sampleCache2[i], 1.0), -1.0));
|
||||
buffer.writeLocation += 1;
|
||||
if (buffer.writeLocation >= buffer.size) {
|
||||
buffer.writeLocation = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static function play(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
var channel: kha.audio2.AudioChannel = null;
|
||||
if (Audio.samplesPerSecond != sound.sampleRate) {
|
||||
channel = new ResamplingAudioChannel(loop, sound.sampleRate);
|
||||
}
|
||||
else {
|
||||
#if sys_ios
|
||||
channel = new ResamplingAudioChannel(loop, sound.sampleRate);
|
||||
#else
|
||||
channel = new AudioChannel(loop);
|
||||
#end
|
||||
}
|
||||
channel.data = sound.uncompressedData;
|
||||
var foundChannel = false;
|
||||
|
||||
#if cpp
|
||||
mutex.acquire();
|
||||
#end
|
||||
for (i in 0...channelCount) {
|
||||
if (soundChannels[i] == null || soundChannels[i].finished) {
|
||||
soundChannels[i] = channel;
|
||||
foundChannel = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
#if cpp
|
||||
mutex.release();
|
||||
#end
|
||||
|
||||
return foundChannel ? channel : null;
|
||||
}
|
||||
|
||||
public static function _playAgain(channel: kha.audio2.AudioChannel): Void {
|
||||
#if cpp
|
||||
mutex.acquire();
|
||||
#end
|
||||
for (i in 0...channelCount) {
|
||||
if (soundChannels[i] == channel) {
|
||||
soundChannels[i] = null;
|
||||
}
|
||||
}
|
||||
for (i in 0...channelCount) {
|
||||
if (soundChannels[i] == null || soundChannels[i].finished || soundChannels[i] == channel) {
|
||||
soundChannels[i] = channel;
|
||||
break;
|
||||
}
|
||||
}
|
||||
#if cpp
|
||||
mutex.release();
|
||||
#end
|
||||
}
|
||||
|
||||
public static function stream(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
{
|
||||
// try to use hardware accelerated audio decoding
|
||||
var hardwareChannel = Audio.stream(sound, loop);
|
||||
if (hardwareChannel != null)
|
||||
return hardwareChannel;
|
||||
}
|
||||
|
||||
var channel: StreamChannel = new StreamChannel(sound.compressedData, loop);
|
||||
var foundChannel = false;
|
||||
|
||||
#if cpp
|
||||
mutex.acquire();
|
||||
#end
|
||||
for (i in 0...channelCount) {
|
||||
if (streamChannels[i] == null || streamChannels[i].finished) {
|
||||
streamChannels[i] = channel;
|
||||
foundChannel = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
#if cpp
|
||||
mutex.release();
|
||||
#end
|
||||
|
||||
return foundChannel ? channel : null;
|
||||
}
|
||||
}
|
165
Kha/Sources/kha/audio2/AudioChannel.hx
Normal file
165
Kha/Sources/kha/audio2/AudioChannel.hx
Normal file
@ -0,0 +1,165 @@
|
||||
package kha.audio2;
|
||||
|
||||
import kha.arrays.Float32Array;
|
||||
|
||||
@:headerCode("#include <kinc/threads/atomic.h>")
|
||||
@:headerClassCode("volatile float kinc_volume; volatile int kinc_position; volatile int kinc_paused; volatile int kinc_stopped; volatile int kinc_looping;")
|
||||
class AudioChannel implements kha.audio1.AudioChannel {
|
||||
public var data: Float32Array = null;
|
||||
|
||||
#if cpp
|
||||
var myVolume(get, set): Float;
|
||||
|
||||
inline function get_myVolume(): Float {
|
||||
return untyped __cpp__("kinc_volume");
|
||||
}
|
||||
|
||||
inline function set_myVolume(value: Float): Float {
|
||||
untyped __cpp__("KINC_ATOMIC_EXCHANGE_FLOAT(&kinc_volume, (float){0})", value);
|
||||
return value;
|
||||
}
|
||||
|
||||
var myPosition(get, set): Int;
|
||||
|
||||
inline function get_myPosition(): Int {
|
||||
return untyped __cpp__("kinc_position");
|
||||
}
|
||||
|
||||
inline function set_myPosition(value: Int): Int {
|
||||
untyped __cpp__("KINC_ATOMIC_EXCHANGE_32(&kinc_position, {0})", value);
|
||||
return value;
|
||||
}
|
||||
|
||||
var paused(get, set): Bool;
|
||||
|
||||
inline function get_paused(): Bool {
|
||||
return untyped __cpp__("kinc_paused != 0");
|
||||
}
|
||||
|
||||
inline function set_paused(value: Bool): Bool {
|
||||
untyped __cpp__("KINC_ATOMIC_EXCHANGE_32(&kinc_paused, {0} ? 1 : 0)", value);
|
||||
return value;
|
||||
}
|
||||
|
||||
var stopped(get, set): Bool;
|
||||
|
||||
inline function get_stopped(): Bool {
|
||||
return untyped __cpp__("kinc_stopped != 0");
|
||||
}
|
||||
|
||||
inline function set_stopped(value: Bool): Bool {
|
||||
untyped __cpp__("KINC_ATOMIC_EXCHANGE_32(&kinc_stopped, {0} ? 1 : 0)", value);
|
||||
return value;
|
||||
}
|
||||
|
||||
var looping(get, set): Bool;
|
||||
|
||||
inline function get_looping(): Bool {
|
||||
return untyped __cpp__("kinc_looping != 0");
|
||||
}
|
||||
|
||||
inline function set_looping(value: Bool): Bool {
|
||||
untyped __cpp__("KINC_ATOMIC_EXCHANGE_32(&kinc_looping, {0} ? 1 : 0)", value);
|
||||
return value;
|
||||
}
|
||||
#else
|
||||
var myVolume: Float;
|
||||
var myPosition: Int;
|
||||
var paused: Bool;
|
||||
var stopped: Bool;
|
||||
var looping: Bool;
|
||||
#end
|
||||
|
||||
public function new(looping: Bool) {
|
||||
this.looping = looping;
|
||||
stopped = false;
|
||||
paused = false;
|
||||
myPosition = 0;
|
||||
myVolume = 1;
|
||||
}
|
||||
|
||||
public function nextSamples(requestedSamples: Float32Array, requestedLength: Int, sampleRate: Int): Void {
|
||||
if (paused || stopped) {
|
||||
for (i in 0...requestedLength) {
|
||||
requestedSamples[i] = 0;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var requestedSamplesIndex = 0;
|
||||
while (requestedSamplesIndex < requestedLength) {
|
||||
for (i in 0...min(data.length - myPosition, requestedLength - requestedSamplesIndex)) {
|
||||
requestedSamples[requestedSamplesIndex++] = data[myPosition++];
|
||||
}
|
||||
|
||||
if (myPosition >= data.length) {
|
||||
myPosition = 0;
|
||||
if (!looping) {
|
||||
stopped = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while (requestedSamplesIndex < requestedLength) {
|
||||
requestedSamples[requestedSamplesIndex++] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
public function play(): Void {
|
||||
paused = false;
|
||||
stopped = false;
|
||||
kha.audio1.Audio._playAgain(this);
|
||||
}
|
||||
|
||||
public function pause(): Void {
|
||||
paused = true;
|
||||
}
|
||||
|
||||
public function stop(): Void {
|
||||
myPosition = 0;
|
||||
stopped = true;
|
||||
}
|
||||
|
||||
public var length(get, null): Float; // Seconds
|
||||
|
||||
function get_length(): Float {
|
||||
return data.length / kha.audio2.Audio.samplesPerSecond / 2; // 44.1 khz in stereo
|
||||
}
|
||||
|
||||
public var position(get, set): Float; // Seconds
|
||||
|
||||
function get_position(): Float {
|
||||
return myPosition / kha.audio2.Audio.samplesPerSecond / 2;
|
||||
}
|
||||
|
||||
function set_position(value: Float): Float {
|
||||
myPosition = Math.round(value * kha.audio2.Audio.samplesPerSecond * 2);
|
||||
myPosition = max(min(myPosition, data.length), 0);
|
||||
return value;
|
||||
}
|
||||
|
||||
public var volume(get, set): Float;
|
||||
|
||||
function get_volume(): Float {
|
||||
return myVolume;
|
||||
}
|
||||
|
||||
function set_volume(value: Float): Float {
|
||||
return myVolume = value;
|
||||
}
|
||||
|
||||
public var finished(get, null): Bool;
|
||||
|
||||
function get_finished(): Bool {
|
||||
return stopped;
|
||||
}
|
||||
|
||||
static inline function max(a: Int, b: Int) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
|
||||
static inline function min(a: Int, b: Int) {
|
||||
return a < b ? a : b;
|
||||
}
|
||||
}
|
20
Kha/Sources/kha/audio2/Buffer.hx
Normal file
20
Kha/Sources/kha/audio2/Buffer.hx
Normal file
@ -0,0 +1,20 @@
|
||||
package kha.audio2;
|
||||
|
||||
class Buffer {
|
||||
public var channels: Int;
|
||||
public var samplesPerSecond: Int;
|
||||
|
||||
public var data: kha.arrays.Float32Array;
|
||||
public var size: Int;
|
||||
public var readLocation: Int;
|
||||
public var writeLocation: Int;
|
||||
|
||||
public function new(size: Int, channels: Int, samplesPerSecond: Int) {
|
||||
this.size = size;
|
||||
this.data = new kha.arrays.Float32Array(size);
|
||||
this.channels = channels;
|
||||
this.samplesPerSecond = samplesPerSecond;
|
||||
readLocation = 0;
|
||||
writeLocation = 0;
|
||||
}
|
||||
}
|
137
Kha/Sources/kha/audio2/ResamplingAudioChannel.hx
Normal file
137
Kha/Sources/kha/audio2/ResamplingAudioChannel.hx
Normal file
@ -0,0 +1,137 @@
|
||||
package kha.audio2;
|
||||
|
||||
import kha.arrays.Float32Array;
|
||||
|
||||
class ResamplingAudioChannel extends AudioChannel {
|
||||
public var sampleRate: Int;
|
||||
|
||||
public function new(looping: Bool, sampleRate: Int) {
|
||||
super(looping);
|
||||
this.sampleRate = sampleRate;
|
||||
}
|
||||
|
||||
public override function nextSamples(requestedSamples: Float32Array, requestedLength: Int, sampleRate: Int): Void {
|
||||
if (paused || stopped) {
|
||||
for (i in 0...requestedLength) {
|
||||
requestedSamples[i] = 0;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var requestedSamplesIndex = 0;
|
||||
while (requestedSamplesIndex < requestedLength) {
|
||||
for (i in 0...min(sampleLength(sampleRate) - myPosition, requestedLength - requestedSamplesIndex)) {
|
||||
requestedSamples[requestedSamplesIndex++] = sample(myPosition++, sampleRate);
|
||||
}
|
||||
|
||||
if (myPosition >= sampleLength(sampleRate)) {
|
||||
myPosition = 0;
|
||||
if (!looping) {
|
||||
stopped = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while (requestedSamplesIndex < requestedLength) {
|
||||
requestedSamples[requestedSamplesIndex++] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
inline function sample(position: Int, sampleRate: Int): Float {
|
||||
var even = position % 2 == 0;
|
||||
var factor = this.sampleRate / sampleRate;
|
||||
|
||||
if (even) {
|
||||
position = Std.int(position / 2);
|
||||
var pos = factor * position;
|
||||
var pos1 = Math.floor(pos);
|
||||
var pos2 = Math.floor(pos + 1);
|
||||
pos1 *= 2;
|
||||
pos2 *= 2;
|
||||
|
||||
var minimum = 0;
|
||||
var maximum = data.length - 1;
|
||||
maximum = maximum % 2 == 0 ? maximum : maximum - 1;
|
||||
|
||||
var a = (pos1 < minimum || pos1 > maximum) ? 0 : data[pos1];
|
||||
var b = (pos2 < minimum || pos2 > maximum) ? 0 : data[pos2];
|
||||
return lerp(a, b, pos - Math.floor(pos));
|
||||
}
|
||||
else {
|
||||
position = Std.int(position / 2);
|
||||
var pos = factor * position;
|
||||
var pos1 = Math.floor(pos);
|
||||
var pos2 = Math.floor(pos + 1);
|
||||
pos1 = pos1 * 2 + 1;
|
||||
pos2 = pos2 * 2 + 1;
|
||||
|
||||
var minimum = 1;
|
||||
var maximum = data.length - 1;
|
||||
maximum = maximum % 2 != 0 ? maximum : maximum - 1;
|
||||
|
||||
var a = (pos1 < minimum || pos1 > maximum) ? 0 : data[pos1];
|
||||
var b = (pos2 < minimum || pos2 > maximum) ? 0 : data[pos2];
|
||||
return lerp(a, b, pos - Math.floor(pos));
|
||||
}
|
||||
}
|
||||
|
||||
inline function lerp(v0: Float, v1: Float, t: Float) {
|
||||
return (1 - t) * v0 + t * v1;
|
||||
}
|
||||
|
||||
inline function sampleLength(sampleRate: Int): Int {
|
||||
var value = Math.ceil(data.length * (sampleRate / this.sampleRate));
|
||||
return value % 2 == 0 ? value : value + 1;
|
||||
}
|
||||
|
||||
public override function play(): Void {
|
||||
paused = false;
|
||||
stopped = false;
|
||||
kha.audio1.Audio._playAgain(this);
|
||||
}
|
||||
|
||||
public override function pause(): Void {
|
||||
paused = true;
|
||||
}
|
||||
|
||||
public override function stop(): Void {
|
||||
myPosition = 0;
|
||||
stopped = true;
|
||||
}
|
||||
|
||||
override function get_length(): Float {
|
||||
return data.length / this.sampleRate / 2; // 44.1 khz in stereo
|
||||
}
|
||||
|
||||
override function get_position(): Float {
|
||||
return myPosition / kha.audio2.Audio.samplesPerSecond / 2;
|
||||
}
|
||||
|
||||
override function set_position(value: Float): Float {
|
||||
var pos = Math.round(value * kha.audio2.Audio.samplesPerSecond * 2.0);
|
||||
pos = pos % 2 == 0 ? pos : pos + 1;
|
||||
myPosition = max(min(pos, sampleLength(kha.audio2.Audio.samplesPerSecond)), 0);
|
||||
return value;
|
||||
}
|
||||
|
||||
override function get_volume(): Float {
|
||||
return myVolume;
|
||||
}
|
||||
|
||||
override function set_volume(value: Float): Float {
|
||||
return myVolume = value;
|
||||
}
|
||||
|
||||
override function get_finished(): Bool {
|
||||
return stopped;
|
||||
}
|
||||
|
||||
static inline function max(a: Int, b: Int) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
|
||||
static inline function min(a: Int, b: Int) {
|
||||
return a < b ? a : b;
|
||||
}
|
||||
}
|
101
Kha/Sources/kha/audio2/StreamChannel.hx
Normal file
101
Kha/Sources/kha/audio2/StreamChannel.hx
Normal file
@ -0,0 +1,101 @@
|
||||
package kha.audio2;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.BytesOutput;
|
||||
import kha.audio2.ogg.vorbis.Reader;
|
||||
|
||||
#if (!cpp && !hl)
|
||||
class StreamChannel implements kha.audio1.AudioChannel {
|
||||
#if (!kha_no_ogg)
|
||||
var reader: Reader;
|
||||
#end
|
||||
var atend: Bool = false;
|
||||
var loop: Bool;
|
||||
var myVolume: Float;
|
||||
var paused: Bool = false;
|
||||
|
||||
public function new(data: Bytes, loop: Bool) {
|
||||
myVolume = 1;
|
||||
this.loop = loop;
|
||||
#if (!kha_no_ogg)
|
||||
reader = Reader.openFromBytes(data);
|
||||
#end
|
||||
}
|
||||
|
||||
public function nextSamples(samples: kha.arrays.Float32Array, length: Int, sampleRate: Int): Void {
|
||||
if (paused) {
|
||||
for (i in 0...length) {
|
||||
samples[i] = 0;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
#if (!kha_no_ogg)
|
||||
var count = reader.read(samples, Std.int(length / 2), 2, sampleRate, true) * 2;
|
||||
if (count < length) {
|
||||
if (loop) {
|
||||
reader.currentMillisecond = 0;
|
||||
}
|
||||
else {
|
||||
atend = true;
|
||||
}
|
||||
for (i in count...length) {
|
||||
samples[i] = 0;
|
||||
}
|
||||
}
|
||||
#end
|
||||
}
|
||||
|
||||
public function play(): Void {
|
||||
paused = false;
|
||||
}
|
||||
|
||||
public function pause(): Void {
|
||||
paused = true;
|
||||
}
|
||||
|
||||
public function stop(): Void {
|
||||
atend = true;
|
||||
}
|
||||
|
||||
public var length(get, null): Float; // Seconds
|
||||
|
||||
function get_length(): Float {
|
||||
#if (kha_no_ogg)
|
||||
return 0.0;
|
||||
#else
|
||||
return reader.totalMillisecond / 1000.0;
|
||||
#end
|
||||
}
|
||||
|
||||
public var position(get, set): Float; // Seconds
|
||||
|
||||
function get_position(): Float {
|
||||
#if (kha_no_ogg)
|
||||
return 0.0;
|
||||
#else
|
||||
return reader.currentMillisecond / 1000.0;
|
||||
#end
|
||||
}
|
||||
|
||||
function set_position(value: Float): Float {
|
||||
return value;
|
||||
}
|
||||
|
||||
public var volume(get, set): Float;
|
||||
|
||||
function get_volume(): Float {
|
||||
return myVolume;
|
||||
}
|
||||
|
||||
function set_volume(value: Float): Float {
|
||||
return myVolume = value;
|
||||
}
|
||||
|
||||
public var finished(get, null): Bool;
|
||||
|
||||
function get_finished(): Bool {
|
||||
return atend;
|
||||
}
|
||||
}
|
||||
#end
|
32
Kha/Sources/kha/audio2/ogg/tools/Crc32.hx
Normal file
32
Kha/Sources/kha/audio2/ogg/tools/Crc32.hx
Normal file
@ -0,0 +1,32 @@
|
||||
package kha.audio2.ogg.tools;
|
||||
import haxe.ds.Vector;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Crc32
|
||||
{
|
||||
static inline var POLY:UInt = 0x04c11db7;
|
||||
static var table:Vector<UInt>;
|
||||
|
||||
public static function init() {
|
||||
if (table != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
table = new Vector(256);
|
||||
for (i in 0...256) {
|
||||
var s:UInt = ((i:UInt) << (24:UInt));
|
||||
for (j in 0...8) {
|
||||
s = (s << 1) ^ (s >= ((1:UInt) << 31) ? POLY : 0);
|
||||
}
|
||||
table[i] = s;
|
||||
}
|
||||
}
|
||||
|
||||
public static inline function update(crc:UInt, byte:UInt):UInt
|
||||
{
|
||||
return (crc << 8) ^ table[byte ^ (crc >>> 24)];
|
||||
}
|
||||
}
|
36
Kha/Sources/kha/audio2/ogg/tools/MathTools.hx
Normal file
36
Kha/Sources/kha/audio2/ogg/tools/MathTools.hx
Normal file
@ -0,0 +1,36 @@
|
||||
package kha.audio2.ogg.tools;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class MathTools
|
||||
{
|
||||
public static inline function ilog(n:Int)
|
||||
{
|
||||
var log2_4 = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4];
|
||||
|
||||
// 2 compares if n < 16, 3 compares otherwise (4 if signed or n > 1<<29)
|
||||
return if (n < (1 << 14)) {
|
||||
if (n < (1 << 4)) {
|
||||
0 + log2_4[n];
|
||||
} else if (n < (1 << 9)) {
|
||||
5 + log2_4[n >> 5];
|
||||
} else {
|
||||
10 + log2_4[n >> 10];
|
||||
}
|
||||
} else if (n < (1 << 24)) {
|
||||
if (n < (1 << 19)) {
|
||||
15 + log2_4[n >> 15];
|
||||
} else {
|
||||
20 + log2_4[n >> 20];
|
||||
}
|
||||
} else if (n < (1 << 29)) {
|
||||
25 + log2_4[n >> 25];
|
||||
} else if (n < (1 << 31)) {
|
||||
30 + log2_4[n >> 30];
|
||||
} else {
|
||||
0; // signed n returns 0
|
||||
}
|
||||
}
|
||||
}
|
545
Kha/Sources/kha/audio2/ogg/tools/Mdct.hx
Normal file
545
Kha/Sources/kha/audio2/ogg/tools/Mdct.hx
Normal file
@ -0,0 +1,545 @@
|
||||
package kha.audio2.ogg.tools;
|
||||
import haxe.ds.Vector;
|
||||
|
||||
/**
|
||||
* modified discrete cosine transform
|
||||
* @author shohei909
|
||||
*/
|
||||
class Mdct {
|
||||
static public inline function inverseTransform(buffer:Vector<Float>, n:Int, a:Vector<Float>, b:Vector<Float>, c:Vector<Float>, bitReverse:Vector<Int>)
|
||||
{
|
||||
var n2 = n >> 1;
|
||||
var n4 = n >> 2;
|
||||
var n8 = n >> 3;
|
||||
// @OPTIMIZE: reduce register pressure by using fewer variables?
|
||||
//int save_point = temp_alloc_save(f);
|
||||
|
||||
var buf2 = new Vector(n2);
|
||||
// twiddle factors
|
||||
|
||||
// IMDCT algorithm from "The use of multirate filter banks for coding of high quality digital audio"
|
||||
// See notes about bugs in that paper in less-optimal implementation 'inverseMdct_old' after this function.
|
||||
|
||||
// kernel from paper
|
||||
|
||||
|
||||
// merged:
|
||||
// copy and reflect spectral data
|
||||
// step 0
|
||||
|
||||
// note that it turns out that the items added together during
|
||||
// this step are, in fact, being added to themselves (as reflected
|
||||
// by step 0). inexplicable inefficiency! this became obvious
|
||||
// once I combined the passes.
|
||||
|
||||
// so there's a missing 'times 2' here (for adding X to itself).
|
||||
// this propogates through linearly to the end, where the numbers
|
||||
// are 1/2 too small, and need to be compensated for.
|
||||
|
||||
{
|
||||
var dOffset = n2 - 2;
|
||||
var aaOffset = 0;
|
||||
var eOffset = 0;
|
||||
var eStopOffset = n2;
|
||||
while (eOffset != eStopOffset) {
|
||||
buf2[dOffset + 1] = (buffer[eOffset + 0] * a[aaOffset + 0] - buffer[eOffset + 2] * a[aaOffset + 1]);
|
||||
buf2[dOffset + 0] = (buffer[eOffset + 0] * a[aaOffset + 1] + buffer[eOffset + 2] * a[aaOffset + 0]);
|
||||
dOffset -= 2;
|
||||
aaOffset += 2;
|
||||
eOffset += 4;
|
||||
}
|
||||
|
||||
eOffset = n2 - 3;
|
||||
while (dOffset >= 0) {
|
||||
buf2[dOffset + 1] = (-buffer[eOffset + 2] * a[aaOffset + 0] - -buffer[eOffset + 0]*a[aaOffset + 1]);
|
||||
buf2[dOffset + 0] = (-buffer[eOffset + 2] * a[aaOffset + 1] + -buffer[eOffset + 0]*a[aaOffset + 0]);
|
||||
dOffset -= 2;
|
||||
aaOffset += 2;
|
||||
eOffset -= 4;
|
||||
}
|
||||
}
|
||||
|
||||
// now we use symbolic names for these, so that we can
|
||||
// possibly swap their meaning as we change which operations
|
||||
// are in place
|
||||
|
||||
var u = buffer;
|
||||
var v = buf2;
|
||||
|
||||
// step 2 (paper output is w, now u)
|
||||
// this could be in place, but the data ends up in the wrong
|
||||
// place... _somebody_'s got to swap it, so this is nominated
|
||||
{
|
||||
var aaOffset = n2 - 8;
|
||||
var eOffset0 = n4;
|
||||
var eOffset1 = 0;
|
||||
|
||||
var dOffset0 = n4;
|
||||
var dOffset1 = 0;
|
||||
|
||||
while (aaOffset >= 0) {
|
||||
|
||||
var v41_21:Float = v[eOffset0 + 1] - v[eOffset1 + 1];
|
||||
var v40_20:Float = v[eOffset0 + 0] - v[eOffset1 + 0];
|
||||
u[dOffset0 + 1] = v[eOffset0 + 1] + v[eOffset1 + 1];
|
||||
u[dOffset0 + 0] = v[eOffset0 + 0] + v[eOffset1 + 0];
|
||||
u[dOffset1 + 1] = v41_21*a[aaOffset + 4] - v40_20*a[aaOffset + 5];
|
||||
u[dOffset1 + 0] = v40_20*a[aaOffset + 4] + v41_21*a[aaOffset + 5];
|
||||
|
||||
v41_21 = v[eOffset0 + 3] - v[eOffset1 + 3];
|
||||
v40_20 = v[eOffset0 + 2] - v[eOffset1 + 2];
|
||||
u[dOffset0 + 3] = v[eOffset0 + 3] + v[eOffset1 + 3];
|
||||
u[dOffset0 + 2] = v[eOffset0 + 2] + v[eOffset1 + 2];
|
||||
u[dOffset1 + 3] = v41_21*a[aaOffset + 0] - v40_20*a[aaOffset + 1];
|
||||
u[dOffset1 + 2] = v40_20*a[aaOffset + 0] + v41_21*a[aaOffset + 1];
|
||||
|
||||
aaOffset -= 8;
|
||||
|
||||
dOffset0 += 4;
|
||||
dOffset1 += 4;
|
||||
eOffset0 += 4;
|
||||
eOffset1 += 4;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// step 3
|
||||
var ld = MathTools.ilog(n) - 1; // ilog is off-by-one from normal definitions
|
||||
|
||||
// optimized step 3:
|
||||
|
||||
// the original step3 loop can be nested r inside s or s inside r;
|
||||
// it's written originally as s inside r, but this is dumb when r
|
||||
// iterates many times, and s few. So I have two copies of it and
|
||||
// switch between them halfway.
|
||||
|
||||
// this is iteration 0 of step 3
|
||||
step3Iter0Loop(n >> 4, u, n2-1-n4*0, -(n >> 3), a);
|
||||
step3Iter0Loop(n >> 4, u, n2-1-n4*1, -(n >> 3), a);
|
||||
|
||||
// this is iteration 1 of step 3
|
||||
step3InnerRLoop(n >> 5, u, n2-1 - n8*0, -(n >> 4), a, 16);
|
||||
step3InnerRLoop(n >> 5, u, n2-1 - n8*1, -(n >> 4), a, 16);
|
||||
step3InnerRLoop(n >> 5, u, n2-1 - n8*2, -(n >> 4), a, 16);
|
||||
step3InnerRLoop(n >> 5, u, n2-1 - n8*3, -(n >> 4), a, 16);
|
||||
|
||||
for (l in 2...((ld - 3) >> 1)) {
|
||||
var k0 = n >> (l + 2);
|
||||
var k0_2 = k0 >> 1;
|
||||
var lim = 1 << (l+1);
|
||||
for (i in 0...lim) {
|
||||
step3InnerRLoop(n >> (l + 4), u, n2 - 1 - k0 * i, -k0_2, a, 1 << (l + 3));
|
||||
}
|
||||
}
|
||||
|
||||
for (l in ((ld - 3) >> 1)...(ld-6)) {
|
||||
var k0 = n >> (l + 2);
|
||||
var k1 = 1 << (l + 3);
|
||||
var k0_2 = k0 >> 1;
|
||||
var rlim = n >> (l+6);
|
||||
var lim = 1 << (l+1);
|
||||
var aOffset = 0;
|
||||
var i_off = n2 - 1;
|
||||
var r = rlim + 1;
|
||||
while (--r > 0) {
|
||||
step3InnerSLoop(lim, u, i_off, -k0_2, a, aOffset, k1, k0);
|
||||
aOffset += k1 * 4;
|
||||
i_off -= 8;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// iterations with count:
|
||||
// ld-6,-5,-4 all interleaved together
|
||||
// the big win comes from getting rid of needless flops
|
||||
// due to the constants on pass 5 & 4 being all 1 and 0;
|
||||
// combining them to be simultaneous to improve cache made little difference
|
||||
step3InnerSLoopLd654(n >> 5, u, n2-1, a, n);
|
||||
|
||||
|
||||
// output is u
|
||||
|
||||
// step 4, 5, and 6
|
||||
// cannot be in-place because of step 5
|
||||
{
|
||||
// weirdly, I'd have thought reading sequentially and writing
|
||||
// erratically would have been better than vice-versa, but in
|
||||
// fact that's not what my testing showed. (That is, with
|
||||
// j = bitreverse(i), do you read i and write j, or read j and write i.)
|
||||
var brOffset = 0;
|
||||
var dOffset0 = n4-4; // v
|
||||
var dOffset1 = n2-4; // v
|
||||
|
||||
while (dOffset0 >= 0) {
|
||||
var k4 = bitReverse[brOffset + 0];
|
||||
v[dOffset1 +3] = u[k4+0];
|
||||
v[dOffset1 +2] = u[k4+1];
|
||||
v[dOffset0 +3] = u[k4+2];
|
||||
v[dOffset0 +2] = u[k4+3];
|
||||
|
||||
k4 = bitReverse[brOffset + 1];
|
||||
v[dOffset1 +1] = u[k4+0];
|
||||
v[dOffset1 +0] = u[k4+1];
|
||||
v[dOffset0 +1] = u[k4+2];
|
||||
v[dOffset0 +0] = u[k4+3];
|
||||
|
||||
dOffset0 -= 4;
|
||||
dOffset1 -= 4;
|
||||
brOffset += 2;
|
||||
}
|
||||
}
|
||||
|
||||
// (paper output is u, now v)
|
||||
|
||||
// data must be in buf2
|
||||
//assert(v == buf2);
|
||||
|
||||
// step 7 (paper output is v, now v)
|
||||
// this is now in place
|
||||
{
|
||||
var cOffset = 0;
|
||||
var dOffset = 0; // v
|
||||
var eOffset = n2 - 4; // v
|
||||
|
||||
while (dOffset < eOffset) {
|
||||
var a02 = v[dOffset + 0] - v[eOffset + 2];
|
||||
var a11 = v[dOffset + 1] + v[eOffset + 3];
|
||||
|
||||
var b0 = c[cOffset + 1]*a02 + c[cOffset + 0]*a11;
|
||||
var b1 = c[cOffset + 1]*a11 - c[cOffset + 0]*a02;
|
||||
|
||||
var b2 = v[dOffset + 0] + v[eOffset + 2];
|
||||
var b3 = v[dOffset + 1] - v[eOffset + 3];
|
||||
|
||||
v[dOffset + 0] = b2 + b0;
|
||||
v[dOffset + 1] = b3 + b1;
|
||||
v[eOffset + 2] = b2 - b0;
|
||||
v[eOffset + 3] = b1 - b3;
|
||||
|
||||
a02 = v[dOffset + 2] - v[eOffset + 0];
|
||||
a11 = v[dOffset + 3] + v[eOffset + 1];
|
||||
|
||||
b0 = c[cOffset + 3]*a02 + c[cOffset + 2]*a11;
|
||||
b1 = c[cOffset + 3]*a11 - c[cOffset + 2]*a02;
|
||||
|
||||
b2 = v[dOffset + 2] + v[eOffset + 0];
|
||||
b3 = v[dOffset + 3] - v[eOffset + 1];
|
||||
|
||||
v[dOffset + 2] = b2 + b0;
|
||||
v[dOffset + 3] = b3 + b1;
|
||||
v[eOffset + 0] = b2 - b0;
|
||||
v[eOffset + 1] = b1 - b3;
|
||||
|
||||
cOffset += 4;
|
||||
dOffset += 4;
|
||||
eOffset -= 4;
|
||||
}
|
||||
}
|
||||
|
||||
// data must be in buf2
|
||||
|
||||
// step 8+decode (paper output is X, now buffer)
|
||||
// this generates pairs of data a la 8 and pushes them directly through
|
||||
// the decode kernel (pushing rather than pulling) to avoid having
|
||||
// to make another pass later
|
||||
|
||||
// this cannot POSSIBLY be in place, so we refer to the buffers directly
|
||||
|
||||
{
|
||||
var bOffset = n2 - 8; //b
|
||||
var eOffset = n2 - 8; //buf2
|
||||
var dOffset0 = 0; //buffer
|
||||
var dOffset1 = n2-4; //buffer
|
||||
var dOffset2 = n2; //buffer
|
||||
var dOffset3 = n - 4; //buffer
|
||||
|
||||
while (eOffset >= 0) {
|
||||
var p3 = buf2[eOffset + 6]*b[bOffset + 7] - buf2[eOffset + 7]*b[bOffset + 6];
|
||||
var p2 = -buf2[eOffset + 6]*b[bOffset + 6] - buf2[eOffset + 7]*b[bOffset + 7];
|
||||
|
||||
buffer[dOffset0 + 0] = p3;
|
||||
buffer[dOffset1 + 3] = - p3;
|
||||
buffer[dOffset2 + 0] = p2;
|
||||
buffer[dOffset3 + 3] = p2;
|
||||
|
||||
var p1 = buf2[eOffset + 4]*b[bOffset + 5] - buf2[eOffset + 5]*b[bOffset + 4];
|
||||
var p0 = -buf2[eOffset + 4]*b[bOffset + 4] - buf2[eOffset + 5]*b[bOffset + 5];
|
||||
|
||||
buffer[dOffset0 + 1] = p1;
|
||||
buffer[dOffset1 + 2] = - p1;
|
||||
buffer[dOffset2 + 1] = p0;
|
||||
buffer[dOffset3 + 2] = p0;
|
||||
|
||||
p3 = buf2[eOffset + 2]*b[bOffset + 3] - buf2[eOffset + 3]*b[bOffset + 2];
|
||||
p2 = -buf2[eOffset + 2]*b[bOffset + 2] - buf2[eOffset + 3]*b[bOffset + 3];
|
||||
|
||||
buffer[dOffset0 + 2] = p3;
|
||||
buffer[dOffset1 + 1] = - p3;
|
||||
buffer[dOffset2 + 2] = p2;
|
||||
buffer[dOffset3 + 1] = p2;
|
||||
|
||||
p1 = buf2[eOffset + 0]*b[bOffset + 1] - buf2[eOffset + 1]*b[bOffset + 0];
|
||||
p0 = -buf2[eOffset + 0]*b[bOffset + 0] - buf2[eOffset + 1]*b[bOffset + 1];
|
||||
|
||||
buffer[dOffset0 + 3] = p1;
|
||||
buffer[dOffset1 + 0] = - p1;
|
||||
buffer[dOffset2 + 3] = p0;
|
||||
buffer[dOffset3 + 0] = p0;
|
||||
|
||||
bOffset -= 8;
|
||||
eOffset -= 8;
|
||||
dOffset0 += 4;
|
||||
dOffset2 += 4;
|
||||
dOffset1 -= 4;
|
||||
dOffset3 -= 4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// the following were split out into separate functions while optimizing;
|
||||
// they could be pushed back up but eh. __forceinline showed no change;
|
||||
// they're probably already being inlined.
|
||||
static inline function step3Iter0Loop(n:Int, e:Vector<Float>, i_off:Int, k_off:Int, a:Vector<Float>)
|
||||
{
|
||||
var eeOffset0 = i_off; // e
|
||||
var eeOffset2 = i_off + k_off; // e
|
||||
var aOffset = 0;
|
||||
var i = (n >> 2) + 1;
|
||||
|
||||
while (--i > 0) {
|
||||
var k00_20 = e[eeOffset0 + 0] - e[eeOffset2 + 0];
|
||||
var k01_21 = e[eeOffset0 + -1] - e[eeOffset2 + -1];
|
||||
|
||||
e[eeOffset0 + 0] += e[eeOffset2 + 0];//e[eeOffset0 + 0] = e[eeOffset0 + 0] + e[eeOffset2 + 0];
|
||||
e[eeOffset0 + -1] += e[eeOffset2 + -1];//e[eeOffset0 + -1] = e[eeOffset0 + -1] + e[eeOffset2 + -1];
|
||||
e[eeOffset2 + 0] = k00_20 * a[aOffset + 0] - k01_21 * a[aOffset + 1];
|
||||
e[eeOffset2 + -1] = k01_21 * a[aOffset + 0] + k00_20 * a[aOffset + 1];
|
||||
aOffset += 8;
|
||||
|
||||
k00_20 = e[eeOffset0 + -2] - e[eeOffset2 + -2];
|
||||
k01_21 = e[eeOffset0 + -3] - e[eeOffset2 + -3];
|
||||
e[eeOffset0 + -2] += e[eeOffset2 + -2];//e[eeOffset0 + -2] = e[eeOffset0 + -2] + e[eeOffset2 + -2];
|
||||
e[eeOffset0 + -3] += e[eeOffset2 + -3];//e[eeOffset0 + -3] = e[eeOffset0 + -3] + e[eeOffset2 + -3];
|
||||
e[eeOffset2 + -2] = k00_20 * a[aOffset + 0] - k01_21 * a[aOffset + 1];
|
||||
e[eeOffset2 + -3] = k01_21 * a[aOffset + 0] + k00_20 * a[aOffset + 1];
|
||||
aOffset += 8;
|
||||
|
||||
k00_20 = e[eeOffset0 + -4] - e[eeOffset2 + -4];
|
||||
k01_21 = e[eeOffset0 + -5] - e[eeOffset2 + -5];
|
||||
e[eeOffset0 + -4] += e[eeOffset2 + -4];//e[eeOffset0 + -4] = e[eeOffset0 + -4] + e[eeOffset2 + -4];
|
||||
e[eeOffset0 + -5] += e[eeOffset2 + -5];//e[eeOffset0 + -5] = e[eeOffset0 + -5] + e[eeOffset2 + -5];
|
||||
e[eeOffset2 + -4] = k00_20 * a[aOffset + 0] - k01_21 * a[aOffset + 1];
|
||||
e[eeOffset2 + -5] = k01_21 * a[aOffset + 0] + k00_20 * a[aOffset + 1];
|
||||
aOffset += 8;
|
||||
|
||||
k00_20 = e[eeOffset0 + -6] - e[eeOffset2 + -6];
|
||||
k01_21 = e[eeOffset0 + -7] - e[eeOffset2 + -7];
|
||||
e[eeOffset0 + -6] += e[eeOffset2 + -6];//e[eeOffset0 + -6] = e[eeOffset0 + -6] + e[eeOffset2 + -6];
|
||||
e[eeOffset0 + -7] += e[eeOffset2 + -7];//e[eeOffset0 + -7] = e[eeOffset0 + -7] + e[eeOffset2 + -7];
|
||||
e[eeOffset2 + -6] = k00_20 * a[aOffset + 0] - k01_21 * a[aOffset + 1];
|
||||
e[eeOffset2 + -7] = k01_21 * a[aOffset + 0] + k00_20 * a[aOffset + 1];
|
||||
aOffset += 8;
|
||||
eeOffset0 -= 8;
|
||||
eeOffset2 -= 8;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static inline function step3InnerRLoop(lim:Int, e:Vector<Float>, d0:Int, k_off:Int, a:Vector<Float>, k1:Int) {
|
||||
var aOffset = 0;
|
||||
var eOffset0 = d0; //e
|
||||
var eOffset2 = d0 + k_off; //e
|
||||
var i = (lim >> 2) + 1;
|
||||
|
||||
while (--i > 0) {
|
||||
var k00_20 = e[eOffset0 + -0] - e[eOffset2 + -0];
|
||||
var k01_21 = e[eOffset0 + -1] - e[eOffset2 + -1];
|
||||
e[eOffset0 + -0] += e[eOffset2 + -0];//e[eOffset0 + -0] = e[eOffset0 + -0] + e[eOffset2 + -0];
|
||||
e[eOffset0 + -1] += e[eOffset2 + -1];//e[eOffset0 + -1] = e[eOffset0 + -1] + e[eOffset2 + -1];
|
||||
e[eOffset2 + -0] = (k00_20)*a[aOffset + 0] - (k01_21) * a[aOffset + 1];
|
||||
e[eOffset2 + -1] = (k01_21)*a[aOffset + 0] + (k00_20) * a[aOffset + 1];
|
||||
|
||||
aOffset += k1;
|
||||
|
||||
k00_20 = e[eOffset0 + -2] - e[eOffset2 + -2];
|
||||
k01_21 = e[eOffset0 + -3] - e[eOffset2 + -3];
|
||||
e[eOffset0 + -2] += e[eOffset2 + -2];//e[eOffset0 + -2] = e[eOffset0 + -2] + e[eOffset2 + -2];
|
||||
e[eOffset0 + -3] += e[eOffset2 + -3];//e[eOffset0 + -3] = e[eOffset0 + -3] + e[eOffset2 + -3];
|
||||
e[eOffset2 + -2] = (k00_20)*a[aOffset + 0] - (k01_21) * a[aOffset + 1];
|
||||
e[eOffset2 + -3] = (k01_21)*a[aOffset + 0] + (k00_20) * a[aOffset + 1];
|
||||
|
||||
aOffset += k1;
|
||||
|
||||
k00_20 = e[eOffset0 + -4] - e[eOffset2 + -4];
|
||||
k01_21 = e[eOffset0 + -5] - e[eOffset2 + -5];
|
||||
e[eOffset0 + -4] += e[eOffset2 + -4];//e[eOffset0 + -4] = e[eOffset0 + -4] + e[eOffset2 + -4];
|
||||
e[eOffset0 + -5] += e[eOffset2 + -5];//e[eOffset0 + -5] = e[eOffset0 + -5] + e[eOffset2 + -5];
|
||||
e[eOffset2 + -4] = (k00_20)*a[aOffset + 0] - (k01_21) * a[aOffset + 1];
|
||||
e[eOffset2 + -5] = (k01_21)*a[aOffset + 0] + (k00_20) * a[aOffset + 1];
|
||||
|
||||
aOffset += k1;
|
||||
|
||||
k00_20 = e[eOffset0 + -6] - e[eOffset2 + -6];
|
||||
k01_21 = e[eOffset0 + -7] - e[eOffset2 + -7];
|
||||
e[eOffset0 + -6] += e[eOffset2 + -6];//e[eOffset0 + -6] = e[eOffset0 + -6] + e[eOffset2 + -6];
|
||||
e[eOffset0 + -7] += e[eOffset2 + -7];//e[eOffset0 + -7] = e[eOffset0 + -7] + e[eOffset2 + -7];
|
||||
e[eOffset2 + -6] = (k00_20)*a[aOffset + 0] - (k01_21) * a[aOffset + 1];
|
||||
e[eOffset2 + -7] = (k01_21)*a[aOffset + 0] + (k00_20) * a[aOffset + 1];
|
||||
|
||||
eOffset0 -= 8;
|
||||
eOffset2 -= 8;
|
||||
|
||||
aOffset += k1;
|
||||
}
|
||||
}
|
||||
|
||||
static inline function step3InnerSLoop(n:Int, e:Vector<Float>, i_off:Int, k_off:Int, a:Vector<Float>, aOffset0:Int, aOffset1:Int, k0:Int)
|
||||
{
|
||||
var A0 = a[aOffset0];
|
||||
var A1 = a[aOffset0 + 1];
|
||||
var A2 = a[aOffset0 + aOffset1];
|
||||
var A3 = a[aOffset0 + aOffset1 + 1];
|
||||
var A4 = a[aOffset0 + aOffset1 * 2+0];
|
||||
var A5 = a[aOffset0 + aOffset1 * 2+1];
|
||||
var A6 = a[aOffset0 + aOffset1 * 3+0];
|
||||
var A7 = a[aOffset0 + aOffset1 * 3+1];
|
||||
|
||||
var eeOffset0 = i_off; // e
|
||||
var eeOffset2 = i_off + k_off; // e
|
||||
var i = n + 1;
|
||||
while (--i > 0) {
|
||||
var k00 = e[eeOffset0 + 0] - e[eeOffset2 + 0];
|
||||
var k11 = e[eeOffset0 + -1] - e[eeOffset2 + -1];
|
||||
e[eeOffset0 + 0] = e[eeOffset0 + 0] + e[eeOffset2 + 0];
|
||||
e[eeOffset0 + -1] = e[eeOffset0 + -1] + e[eeOffset2 + -1];
|
||||
e[eeOffset2 + 0] = (k00) * A0 - (k11) * A1;
|
||||
e[eeOffset2 + -1] = (k11) * A0 + (k00) * A1;
|
||||
|
||||
k00 = e[eeOffset0 + -2] - e[eeOffset2 + -2];
|
||||
k11 = e[eeOffset0 + -3] - e[eeOffset2 + -3];
|
||||
e[eeOffset0 + -2] = e[eeOffset0 + -2] + e[eeOffset2 + -2];
|
||||
e[eeOffset0 + -3] = e[eeOffset0 + -3] + e[eeOffset2 + -3];
|
||||
e[eeOffset2 + -2] = (k00) * A2 - (k11) * A3;
|
||||
e[eeOffset2 + -3] = (k11) * A2 + (k00) * A3;
|
||||
|
||||
k00 = e[eeOffset0 + -4] - e[eeOffset2 + -4];
|
||||
k11 = e[eeOffset0 + -5] - e[eeOffset2 + -5];
|
||||
e[eeOffset0 + -4] = e[eeOffset0 + -4] + e[eeOffset2 + -4];
|
||||
e[eeOffset0 + -5] = e[eeOffset0 + -5] + e[eeOffset2 + -5];
|
||||
e[eeOffset2 + -4] = (k00) * A4 - (k11) * A5;
|
||||
e[eeOffset2 + -5] = (k11) * A4 + (k00) * A5;
|
||||
|
||||
k00 = e[eeOffset0 + -6] - e[eeOffset2 + -6];
|
||||
k11 = e[eeOffset0 + -7] - e[eeOffset2 + -7];
|
||||
e[eeOffset0 + -6] = e[eeOffset0 + -6] + e[eeOffset2 + -6];
|
||||
e[eeOffset0 + -7] = e[eeOffset0 + -7] + e[eeOffset2 + -7];
|
||||
e[eeOffset2 + -6] = (k00) * A6 - (k11) * A7;
|
||||
e[eeOffset2 + -7] = (k11) * A6 + (k00) * A7;
|
||||
|
||||
eeOffset0 -= k0;
|
||||
eeOffset2 -= k0;
|
||||
}
|
||||
}
|
||||
|
||||
static inline function iter54(e:Vector<Float>, zOffset:Int)
|
||||
{
|
||||
var t0 = e[zOffset + 0];
|
||||
var t1 = e[zOffset + -4];
|
||||
var k00 = t0 - t1;
|
||||
var y0 = t0 + t1;
|
||||
|
||||
t0 = e[zOffset + -2];
|
||||
t1 = e[zOffset + -6];
|
||||
var y2 = t0 + t1;
|
||||
var k22 = t0 - t1;
|
||||
|
||||
e[zOffset + -0] = y0 + y2; // z0 + z4 + z2 + z6
|
||||
e[zOffset + -2] = y0 - y2; // z0 + z4 - z2 - z6
|
||||
|
||||
// done with y0,y2
|
||||
|
||||
var k33 = e[zOffset + -3] - e[zOffset + -7];
|
||||
|
||||
e[zOffset + -4] = k00 + k33; // z0 - z4 + z3 - z7
|
||||
e[zOffset + -6] = k00 - k33; // z0 - z4 - z3 + z7
|
||||
|
||||
// done with k33
|
||||
|
||||
t0 = e[zOffset + -1];
|
||||
t1 = e[zOffset + -5];
|
||||
var k11 = t0 - t1;
|
||||
var y1 = t0 + t1;
|
||||
var y3 = e[zOffset + -3] + e[zOffset + -7];
|
||||
|
||||
e[zOffset + -1] = y1 + y3; // z1 + z5 + z3 + z7
|
||||
e[zOffset + -3] = y1 - y3; // z1 + z5 - z3 - z7
|
||||
e[zOffset + -5] = k11 - k22; // z1 - z5 + z2 - z6
|
||||
e[zOffset + -7] = k11 + k22; // z1 - z5 - z2 + z6
|
||||
}
|
||||
|
||||
static inline function step3InnerSLoopLd654(n:Int, e:Vector<Float>, i_off:Int, a:Vector<Float>, baseN:Int)
|
||||
{
|
||||
var A2 = a[baseN >> 3];
|
||||
var zOffset = i_off; // e
|
||||
var baseOffset = i_off - 16 * n; //e
|
||||
|
||||
while (zOffset > baseOffset) {
|
||||
var t0 = e[zOffset];
|
||||
var t1 = e[zOffset + -8];
|
||||
e[zOffset + -8] = t0 - t1;
|
||||
e[zOffset + -0] = t0 + t1;
|
||||
|
||||
t0 = e[zOffset + -1];
|
||||
t1 = e[zOffset + -9];
|
||||
e[zOffset + -9] = t0 - t1;
|
||||
e[zOffset + -1] = t0 + t1;
|
||||
|
||||
|
||||
t0 = e[zOffset + -2];
|
||||
t1 = e[zOffset + -10];
|
||||
var k00 = t0 - t1;
|
||||
e[zOffset + -2] = t0 + t1;
|
||||
|
||||
t0 = e[zOffset + -3];
|
||||
t1 = e[zOffset + -11];
|
||||
var k11 = t0 - t1;
|
||||
e[zOffset + -3] = t0 + t1;
|
||||
|
||||
e[zOffset + -10] = (k00+k11) * A2;
|
||||
e[zOffset + -11] = (k11-k00) * A2;
|
||||
|
||||
|
||||
t0 = e[zOffset + -4];
|
||||
t1 = e[zOffset + -12];
|
||||
k00 = t1 - t0; // reverse to avoid a unary negation
|
||||
e[zOffset + -4] = t0 + t1;
|
||||
|
||||
t0 = e[zOffset + -5];
|
||||
t1 = e[zOffset + -13];
|
||||
k11 = t0 - t1;
|
||||
e[zOffset + -5] = t0 + t1;
|
||||
|
||||
e[zOffset + -12] = k11;
|
||||
e[zOffset + -13] = k00;
|
||||
|
||||
|
||||
t0 = e[zOffset + -6];
|
||||
t1 = e[zOffset + -14];
|
||||
k00 = t1 - t0; // reverse to avoid a unary negation
|
||||
e[zOffset + -6] = t0 + t1;
|
||||
|
||||
t0 = e[zOffset + -7];
|
||||
t1 = e[zOffset + -15];
|
||||
k11 = t0 - t1;
|
||||
e[zOffset + -7] = t0 + t1;
|
||||
|
||||
e[zOffset + -14] = (k00+k11) * A2;
|
||||
e[zOffset + -15] = (k00-k11) * A2;
|
||||
|
||||
iter54(e, zOffset);
|
||||
iter54(e, zOffset - 8);
|
||||
zOffset -= 16;
|
||||
}
|
||||
}
|
||||
}
|
160
Kha/Sources/kha/audio2/ogg/vorbis/Reader.hx
Normal file
160
Kha/Sources/kha/audio2/ogg/vorbis/Reader.hx
Normal file
@ -0,0 +1,160 @@
|
||||
package kha.audio2.ogg.vorbis;
|
||||
|
||||
import haxe.io.BytesOutput;
|
||||
import haxe.io.Output;
|
||||
import haxe.io.StringInput;
|
||||
import kha.audio2.ogg.tools.Mdct;
|
||||
import kha.audio2.ogg.vorbis.data.Floor;
|
||||
import kha.audio2.ogg.vorbis.data.Mapping;
|
||||
import kha.audio2.ogg.vorbis.data.Mode;
|
||||
import kha.audio2.ogg.vorbis.data.Header;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.BytesInput;
|
||||
import haxe.io.Eof;
|
||||
import haxe.io.Input;
|
||||
import haxe.PosInfos;
|
||||
|
||||
#if sys
|
||||
import sys.FileSystem;
|
||||
import sys.io.File;
|
||||
import sys.io.FileInput;
|
||||
#end
|
||||
|
||||
/**
|
||||
* public domain ogg reader.
|
||||
* @author shohei909
|
||||
*/
|
||||
class Reader {
|
||||
public var decoder(default, null):VorbisDecoder;
|
||||
|
||||
public var header(get, never):Header;
|
||||
function get_header():Header {
|
||||
return decoder.header;
|
||||
}
|
||||
|
||||
public var totalSample(get, never):Int;
|
||||
function get_totalSample():Int {
|
||||
return decoder.totalSample;
|
||||
}
|
||||
|
||||
public var totalMillisecond(get, never):Float;
|
||||
|
||||
function get_totalMillisecond():Float {
|
||||
return sampleToMillisecond(decoder.totalSample);
|
||||
}
|
||||
|
||||
public var currentSample(get, set):Int;
|
||||
function get_currentSample():Int {
|
||||
return decoder.currentSample;
|
||||
}
|
||||
|
||||
function set_currentSample(value:Int):Int {
|
||||
decoder.seek(seekFunc, inputLength, value);
|
||||
return decoder.currentSample;
|
||||
}
|
||||
|
||||
public var currentMillisecond(get, set):Float;
|
||||
|
||||
function get_currentMillisecond():Float
|
||||
{
|
||||
return sampleToMillisecond(currentSample);
|
||||
}
|
||||
|
||||
function set_currentMillisecond(value:Float):Float {
|
||||
currentSample = millisecondToSample(value);
|
||||
return currentMillisecond;
|
||||
}
|
||||
|
||||
public var loopStart:Null<Int>;
|
||||
public var loopLength:Null<Int>;
|
||||
|
||||
var seekFunc:Int->Void;
|
||||
var inputLength:Int;
|
||||
|
||||
function new (input:Input, seekFunc:Int->Void, inputLength:Int) {
|
||||
this.seekFunc = seekFunc;
|
||||
this.inputLength = inputLength;
|
||||
decoder = VorbisDecoder.start(input);
|
||||
decoder.setupSampleNumber(seekFunc, inputLength);
|
||||
loopStart = header.comment.loopStart;
|
||||
loopLength = header.comment.loopLength;
|
||||
}
|
||||
|
||||
public static function openFromBytes(bytes:Bytes) {
|
||||
var input = new BytesInput(bytes);
|
||||
return new Reader(input, seekBytes.bind(input), bytes.length);
|
||||
}
|
||||
|
||||
static function seekBytes(bytes:BytesInput, pos:Int) {
|
||||
bytes.position = pos;
|
||||
}
|
||||
|
||||
#if sys
|
||||
public static function openFromFile(fileName:String):Reader {
|
||||
var file = File.read(fileName, true);
|
||||
var stat = FileSystem.stat(fileName);
|
||||
return new Reader(file, file.seek.bind(_, SeekBegin), stat.size);
|
||||
}
|
||||
#end
|
||||
|
||||
public static function readAll(bytes:Bytes, output:Output, useFloat:Bool = false):Header {
|
||||
var input = new BytesInput(bytes);
|
||||
var decoder = VorbisDecoder.start(input);
|
||||
decoder.setupSampleNumber(seekBytes.bind(input), bytes.length);
|
||||
var header = decoder.header;
|
||||
var count = 0;
|
||||
var bufferSize = 4096;
|
||||
var buffer = new kha.arrays.Float32Array(bufferSize * header.channel);
|
||||
while (true) {
|
||||
var n = decoder.read(buffer, bufferSize, header.channel, header.sampleRate, useFloat);
|
||||
for (i in 0...n * header.channel) {
|
||||
output.writeFloat(buffer[i]);
|
||||
}
|
||||
if (n == 0) { break; }
|
||||
count += n;
|
||||
}
|
||||
return decoder.header;
|
||||
}
|
||||
|
||||
public function read(output:kha.arrays.Float32Array, ?samples:Int, ?channels:Int, ?sampleRate:Int, useFloat:Bool = false) {
|
||||
decoder.ensurePosition(seekFunc);
|
||||
|
||||
if (samples == null) {
|
||||
samples = decoder.totalSample;
|
||||
}
|
||||
if (channels == null) {
|
||||
channels = header.channel;
|
||||
}
|
||||
if (sampleRate == null) {
|
||||
sampleRate = header.sampleRate;
|
||||
}
|
||||
return decoder.read(output, samples, channels, sampleRate, useFloat);
|
||||
}
|
||||
|
||||
public function clone():Reader {
|
||||
var reader = Type.createEmptyInstance(Reader);
|
||||
reader.seekFunc = seekFunc;
|
||||
reader.inputLength = inputLength;
|
||||
reader.decoder = decoder.clone(seekFunc);
|
||||
reader.loopStart = loopStart;
|
||||
reader.loopLength = loopLength;
|
||||
return reader;
|
||||
}
|
||||
|
||||
|
||||
public inline function sampleToMillisecond(samples:Int) {
|
||||
return samples / header.sampleRate * 1000;
|
||||
}
|
||||
|
||||
public inline function millisecondToSample(millseconds:Float) {
|
||||
return Math.floor(millseconds / 1000 * header.sampleRate);
|
||||
}
|
||||
}
|
||||
|
||||
private typedef InitData = {
|
||||
input:Input,
|
||||
seekFunc:Int->Void,
|
||||
inputLength:Int,
|
||||
}
|
857
Kha/Sources/kha/audio2/ogg/vorbis/VorbisDecodeState.hx
Normal file
857
Kha/Sources/kha/audio2/ogg/vorbis/VorbisDecodeState.hx
Normal file
@ -0,0 +1,857 @@
|
||||
package kha.audio2.ogg.vorbis;
|
||||
|
||||
import haxe.ds.Vector;
|
||||
import haxe.Int64;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.Eof;
|
||||
import haxe.io.Input;
|
||||
import haxe.io.Output;
|
||||
import kha.audio2.ogg.tools.Crc32;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
import kha.audio2.ogg.vorbis.data.Codebook;
|
||||
import kha.audio2.ogg.vorbis.data.Floor.Floor1;
|
||||
import kha.audio2.ogg.vorbis.data.Header;
|
||||
import kha.audio2.ogg.vorbis.data.Mode;
|
||||
import kha.audio2.ogg.vorbis.data.Page;
|
||||
import kha.audio2.ogg.vorbis.data.ProbedPage;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError;
|
||||
import kha.audio2.ogg.vorbis.data.Page;
|
||||
import kha.audio2.ogg.vorbis.data.Residue;
|
||||
import kha.audio2.ogg.vorbis.data.Setting;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecoder.DecodeInitialResult;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class VorbisDecodeState
|
||||
{
|
||||
public static inline var INVALID_BITS = -1;
|
||||
|
||||
public var page(default, null):Page;
|
||||
public var eof(default, null):Bool;
|
||||
public var pFirst(default, null):ProbedPage;
|
||||
public var pLast(default, null):ProbedPage;
|
||||
public var validBits(default, null):Int = 0;
|
||||
public var inputPosition(default, null):Int;
|
||||
public var input(default, null):Input;
|
||||
public var discardSamplesDeferred:Int;
|
||||
public var segments(default, null):Vector<Int>;
|
||||
public var bytesInSeg:Int = 0; // uint8
|
||||
|
||||
// decode buffer
|
||||
public var channelBuffers:Vector<Vector<Float>>; //var *[STB_VORBIS_MAX_CHANNELS];
|
||||
public var channelBufferStart:Int;
|
||||
public var channelBufferEnd:Int;
|
||||
public var currentSample(default, null):Int;
|
||||
|
||||
public var previousWindow:Vector<Vector<Float>>; //var *[STB_VORBIS_MAX_CHANNELS];
|
||||
public var previousLength:Int;
|
||||
public var finalY:Vector<Array<Int>>; // [STB_VORBIS_MAX_CHANNELS];
|
||||
|
||||
|
||||
var firstDecode:Bool = false;
|
||||
var nextSeg:Int = 0;
|
||||
|
||||
var acc:UInt;
|
||||
var lastSeg:Bool; // flag that we're on the last decodeState
|
||||
var lastSegWhich:Int; // what was the decodeState number of the l1ast seg?
|
||||
|
||||
var endSegWithKnownLoc:Int;
|
||||
var knownLocForPacket:Int;
|
||||
|
||||
var error:ReaderError;
|
||||
|
||||
var currentLoc:Int; //uint32 sample location of next frame to decode
|
||||
var currentLocValid:Int;
|
||||
|
||||
var firstAudioPageOffset:UInt;
|
||||
|
||||
public function new(input:Input)
|
||||
{
|
||||
this.input = input;
|
||||
inputPosition = 0;
|
||||
page = new Page();
|
||||
Crc32.init();
|
||||
}
|
||||
|
||||
public function setup(loc0:Int, loc1:Int) {
|
||||
var segmentCount = readByte();
|
||||
this.segments = read(segmentCount);
|
||||
|
||||
// assume we Don't_ know any the sample position of any segments
|
||||
this.endSegWithKnownLoc = -2;
|
||||
if (loc0 != 0xFFFFFFFF || loc1 != 0xFFFFFFFF) {
|
||||
var i:Int = segmentCount - 1;
|
||||
while (i >= 0) {
|
||||
if (segments.get(i) < 255) {
|
||||
break;
|
||||
}
|
||||
if (i >= 0) {
|
||||
this.endSegWithKnownLoc = i;
|
||||
this.knownLocForPacket = loc0;
|
||||
}
|
||||
i--;
|
||||
}
|
||||
}
|
||||
|
||||
if (firstDecode) {
|
||||
var i:Int = 0;
|
||||
var len:Int = 0;
|
||||
var p = new ProbedPage();
|
||||
|
||||
for (i in 0...segmentCount) {
|
||||
len += segments.get(i);
|
||||
}
|
||||
len += 27 + segmentCount;
|
||||
|
||||
p.pageStart = firstAudioPageOffset;
|
||||
p.pageEnd = p.pageStart + len;
|
||||
p.firstDecodedSample = 0;
|
||||
p.lastDecodedSample = loc0;
|
||||
pFirst = p;
|
||||
}
|
||||
|
||||
nextSeg = 0;
|
||||
}
|
||||
|
||||
public function clone(seekFunc:Int->Void)
|
||||
{
|
||||
var state = Type.createEmptyInstance(VorbisDecodeState);
|
||||
|
||||
seekFunc(inputPosition);
|
||||
state.input = input;
|
||||
|
||||
// primitive
|
||||
state.eof = eof;
|
||||
state.validBits = validBits;
|
||||
state.discardSamplesDeferred = discardSamplesDeferred;
|
||||
state.firstDecode = firstDecode;
|
||||
state.nextSeg = nextSeg;
|
||||
state.bytesInSeg = bytesInSeg;
|
||||
state.acc = state.acc;
|
||||
state.lastSeg = lastSeg;
|
||||
state.lastSegWhich = lastSegWhich;
|
||||
state.currentLoc = currentLoc;
|
||||
state.currentLocValid = currentLocValid;
|
||||
state.inputPosition = inputPosition;
|
||||
state.firstAudioPageOffset = firstAudioPageOffset;
|
||||
|
||||
// sharrow copy
|
||||
state.error = error;
|
||||
state.segments = segments;
|
||||
state.pFirst = pFirst;
|
||||
state.pLast = pLast;
|
||||
|
||||
// deep copy
|
||||
state.page = page.clone();
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
|
||||
// nextSegment
|
||||
public function next():Int {
|
||||
if (lastSeg) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (nextSeg == -1) {
|
||||
lastSegWhich = segments.length - 1; // in case startPage fails
|
||||
|
||||
try {
|
||||
page.start(this);
|
||||
} catch(e:ReaderError) {
|
||||
lastSeg = true;
|
||||
error = e;
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ((page.flag & PageFlag.CONTINUED_PACKET) == 0) {
|
||||
throw new ReaderError(ReaderErrorType.CONTINUED_PACKET_FLAG_INVALID);
|
||||
}
|
||||
}
|
||||
|
||||
var len = segments.get(nextSeg++);
|
||||
if (len < 255) {
|
||||
lastSeg = true;
|
||||
lastSegWhich = nextSeg - 1;
|
||||
}
|
||||
if (nextSeg >= segments.length) {
|
||||
nextSeg = -1;
|
||||
}
|
||||
|
||||
VorbisTools.assert(bytesInSeg == 0);
|
||||
bytesInSeg = len;
|
||||
return len;
|
||||
}
|
||||
|
||||
public function startPacket() {
|
||||
while (nextSeg == -1) {
|
||||
page.start(this);
|
||||
if ((page.flag & PageFlag.CONTINUED_PACKET) != 0) {
|
||||
throw new ReaderError(ReaderErrorType.MISSING_CAPTURE_PATTERN);
|
||||
}
|
||||
}
|
||||
|
||||
lastSeg = false;
|
||||
validBits = 0;
|
||||
bytesInSeg = 0;
|
||||
}
|
||||
|
||||
public function maybeStartPacket():Bool
|
||||
{
|
||||
if (nextSeg == -1) {
|
||||
var eof = false;
|
||||
var x = try {
|
||||
readByte();
|
||||
} catch (e:Eof) {
|
||||
eof = true;
|
||||
0;
|
||||
}
|
||||
|
||||
if (eof) {
|
||||
return false; // EOF at page boundary is not an error!
|
||||
}
|
||||
|
||||
if (x != 0x4f || readByte() != 0x67 || readByte() != 0x67 || readByte() != 0x53) {
|
||||
throw new ReaderError(ReaderErrorType.MISSING_CAPTURE_PATTERN);
|
||||
}
|
||||
|
||||
page.startWithoutCapturePattern(this);
|
||||
}
|
||||
|
||||
startPacket();
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// public inline function readBits(n:Int):Int
|
||||
public function readBits(n:Int):Int // Kha: reduce output size
|
||||
{
|
||||
if (validBits < 0) {
|
||||
return 0;
|
||||
} else if (validBits < n) {
|
||||
if (n > 24) {
|
||||
// the accumulator technique below would not work correctly in this case
|
||||
return readBits(24) + ((readBits(n - 24) << 24));
|
||||
} else {
|
||||
if (validBits == 0) {
|
||||
acc = 0;
|
||||
}
|
||||
|
||||
do {
|
||||
if (bytesInSeg == 0 && (lastSeg || next() == 0)) {
|
||||
validBits = INVALID_BITS;
|
||||
break;
|
||||
} else {
|
||||
bytesInSeg--;
|
||||
acc += (readByte() << validBits);
|
||||
validBits += 8;
|
||||
}
|
||||
} while (validBits < n);
|
||||
|
||||
if (validBits < 0) {
|
||||
return 0;
|
||||
} else {
|
||||
var z = acc & ((1 << n) - 1);
|
||||
acc >>>= n;
|
||||
validBits -= n;
|
||||
return z;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var z = acc & ((1 << n) - 1);
|
||||
acc >>>= n;
|
||||
validBits -= n;
|
||||
return z;
|
||||
}
|
||||
}
|
||||
inline function readPacketRaw():Int {
|
||||
return if (bytesInSeg == 0 && (lastSeg || next() == 0)) { // CLANG!
|
||||
VorbisTools.EOP;
|
||||
} else {
|
||||
//VorbisTools.assert(bytesInSeg > 0);
|
||||
bytesInSeg--;
|
||||
readByte();
|
||||
}
|
||||
}
|
||||
|
||||
public inline function readPacket():Int
|
||||
{
|
||||
var x = readPacketRaw();
|
||||
validBits = 0;
|
||||
return x;
|
||||
}
|
||||
|
||||
public inline function flushPacket():Void {
|
||||
while (bytesInSeg != 0 || (!lastSeg && next() != 0)) {
|
||||
bytesInSeg--;
|
||||
readByte();
|
||||
}
|
||||
}
|
||||
|
||||
public inline function vorbisValidate() {
|
||||
var header = Bytes.alloc(6);
|
||||
for (i in 0...6) {
|
||||
header.set(i, readPacket());
|
||||
}
|
||||
if (header.toString() != "vorbis") {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "vorbis header");
|
||||
}
|
||||
}
|
||||
|
||||
public function firstPageValidate()
|
||||
{
|
||||
if (segments.length != 1) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "segmentCount");
|
||||
}
|
||||
if (segments.get(0) != 30) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "decodeState head");
|
||||
}
|
||||
}
|
||||
|
||||
public function startFirstDecode()
|
||||
{
|
||||
firstAudioPageOffset = inputPosition;
|
||||
firstDecode = true;
|
||||
}
|
||||
|
||||
public inline function capturePattern()
|
||||
{
|
||||
if (readByte() != 0x4f || readByte() != 0x67 || readByte() != 0x67 || readByte() != 0x53) {
|
||||
throw new ReaderError(ReaderErrorType.MISSING_CAPTURE_PATTERN);
|
||||
}
|
||||
}
|
||||
|
||||
inline function skip(len:Int)
|
||||
{
|
||||
read(len);
|
||||
}
|
||||
|
||||
function prepHuffman()
|
||||
{
|
||||
if (validBits <= 24) {
|
||||
if (validBits == 0) {
|
||||
acc = 0;
|
||||
}
|
||||
do {
|
||||
if (bytesInSeg == 0 && (lastSeg || next() == 0)) { // CLANG!
|
||||
return;
|
||||
} else {
|
||||
bytesInSeg--;
|
||||
acc += readByte() << validBits;
|
||||
validBits += 8;
|
||||
}
|
||||
} while (validBits <= 24);
|
||||
}
|
||||
}
|
||||
|
||||
public inline function decode(c:Codebook):Int {
|
||||
var val = decodeRaw(c);
|
||||
if (c.sparse) {
|
||||
val = c.sortedValues[val];
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
public inline function decodeRaw(c:Codebook)
|
||||
{
|
||||
if (validBits < Setting.FAST_HUFFMAN_LENGTH){
|
||||
prepHuffman();
|
||||
}
|
||||
|
||||
// fast huffman table lookup
|
||||
var i = c.fastHuffman[acc & Setting.FAST_HUFFMAN_TABLE_MASK];
|
||||
|
||||
return if (i >= 0) {
|
||||
var l = c.codewordLengths[i];
|
||||
acc >>>= l;
|
||||
validBits -= l;
|
||||
if (validBits < 0) {
|
||||
validBits = 0;
|
||||
-1;
|
||||
} else {
|
||||
i;
|
||||
}
|
||||
} else {
|
||||
decodeScalarRaw(c);
|
||||
}
|
||||
}
|
||||
|
||||
public inline function isLastByte()
|
||||
{
|
||||
return bytesInSeg == 0 && lastSeg;
|
||||
}
|
||||
|
||||
public function finishDecodePacket(previousLength:Int, n:Int, r:DecodeInitialResult)
|
||||
{
|
||||
var left = r.left.start;
|
||||
var currentLocValid = false;
|
||||
var n2 = n >> 1;
|
||||
|
||||
if (firstDecode) {
|
||||
// assume we start so first non-discarded sample is sample 0
|
||||
// this isn't to spec, but spec would require us to read ahead
|
||||
// and decode the size of all current frames--could be done,
|
||||
// but presumably it's not a commonly used feature
|
||||
currentLoc = -n2; // start of first frame is positioned for discard
|
||||
// we might have to discard samples "from" the next frame too,
|
||||
// if we're lapping a large block then a small at the start?
|
||||
discardSamplesDeferred = n - r.right.end;
|
||||
currentLocValid = true;
|
||||
firstDecode = false;
|
||||
} else if (discardSamplesDeferred != 0) {
|
||||
r.left.start += discardSamplesDeferred;
|
||||
left = r.left.start;
|
||||
discardSamplesDeferred = 0;
|
||||
} else if (previousLength == 0 && currentLocValid) {
|
||||
// we're recovering from a seek... that means we're going to discard
|
||||
// the samples from this packet even though we know our position from
|
||||
// the last page header, so we need to update the position based on
|
||||
// the discarded samples here
|
||||
// but wait, the code below is going to add this in itself even
|
||||
// on a discard, so we don't need to do it here...
|
||||
}
|
||||
|
||||
// check if we have ogg information about the sample # for this packet
|
||||
if (lastSegWhich == endSegWithKnownLoc) {
|
||||
// if we have a valid current loc, and this is final:
|
||||
if (currentLocValid && (page.flag & PageFlag.LAST_PAGE) != 0) {
|
||||
var currentEnd = knownLocForPacket - (n - r.right.end);
|
||||
// then let's infer the size of the (probably) short final frame
|
||||
if (currentEnd < currentLoc + r.right.end) {
|
||||
var len = if (currentEnd < currentLoc) {
|
||||
// negative truncation, that's impossible!
|
||||
0;
|
||||
} else {
|
||||
currentEnd - currentLoc;
|
||||
}
|
||||
len += r.left.start;
|
||||
currentLoc += len;
|
||||
|
||||
return {
|
||||
len : len,
|
||||
left : left,
|
||||
right : r.right.start,
|
||||
}
|
||||
}
|
||||
}
|
||||
// otherwise, just set our sample loc
|
||||
// guess that the ogg granule pos refers to the Middle_ of the
|
||||
// last frame?
|
||||
// set currentLoc to the position of leftStart
|
||||
currentLoc = knownLocForPacket - (n2-r.left.start);
|
||||
currentLocValid = true;
|
||||
}
|
||||
|
||||
if (currentLocValid) {
|
||||
currentLoc += (r.right.start - r.left.start);
|
||||
}
|
||||
|
||||
// if (alloc.allocBuffer)
|
||||
//assert(alloc.allocBufferLengthInBytes == tempOffset);
|
||||
|
||||
return {
|
||||
len : r.right.end,
|
||||
left : left,
|
||||
right : r.right.start,
|
||||
}
|
||||
}
|
||||
|
||||
public inline function readInt32():Int
|
||||
{
|
||||
inputPosition += 4;
|
||||
return input.readInt32();
|
||||
}
|
||||
|
||||
public inline function readByte():Int
|
||||
{
|
||||
inputPosition += 1;
|
||||
return input.readByte();
|
||||
}
|
||||
|
||||
public inline function read(n:Int):Vector<Int> {
|
||||
inputPosition += n;
|
||||
var vec = new Vector(n);
|
||||
for (i in 0...n) {
|
||||
vec[i] = input.readByte();
|
||||
}
|
||||
return vec;
|
||||
}
|
||||
|
||||
public inline function readBytes(n:Int):Bytes {
|
||||
inputPosition += n;
|
||||
return input.read(n);
|
||||
}
|
||||
|
||||
public inline function readString(n:Int):String
|
||||
{
|
||||
inputPosition += n;
|
||||
return input.readString(n);
|
||||
}
|
||||
|
||||
public function getSampleNumber(seekFunc:Int->Void, inputLength:UInt):Int {
|
||||
|
||||
// first, store the current decode position so we can restore it
|
||||
var restoreOffset = inputPosition;
|
||||
|
||||
// now we want to seek back 64K from the end (the last page must
|
||||
// be at most a little less than 64K, but let's allow a little slop)
|
||||
var previousSafe = if (inputLength >= 65536 && inputLength - 65536 >= firstAudioPageOffset) {
|
||||
inputLength - 65536;
|
||||
} else {
|
||||
firstAudioPageOffset;
|
||||
}
|
||||
|
||||
setInputOffset(seekFunc, previousSafe);
|
||||
|
||||
// previousSafe is now our candidate 'earliest known place that seeking
|
||||
// to will lead to the final page'
|
||||
var end = 0;
|
||||
var last = false;
|
||||
switch (findPage(seekFunc, inputLength)) {
|
||||
case Found(e, l):
|
||||
end = e;
|
||||
last = l;
|
||||
case NotFound:
|
||||
throw new ReaderError(ReaderErrorType.CANT_FIND_LAST_PAGE);
|
||||
}
|
||||
|
||||
// check if there are more pages
|
||||
var lastPageLoc = inputPosition;
|
||||
|
||||
// stop when the lastPage flag is set, not when we reach eof;
|
||||
// this allows us to stop short of a 'fileSection' end without
|
||||
// explicitly checking the length of the section
|
||||
while (!last) {
|
||||
setInputOffset(seekFunc, end);
|
||||
switch (findPage(seekFunc, inputLength)) {
|
||||
case Found(e, l):
|
||||
end = e;
|
||||
last = l;
|
||||
case NotFound:
|
||||
// the last page we found didn't have the 'last page' flag
|
||||
// set. whoops!
|
||||
break;
|
||||
}
|
||||
|
||||
previousSafe = lastPageLoc + 1;
|
||||
lastPageLoc = inputPosition;
|
||||
}
|
||||
|
||||
setInputOffset(seekFunc, lastPageLoc);
|
||||
|
||||
// parse the header
|
||||
var vorbisHeader = read(6);
|
||||
|
||||
// extract the absolute granule position
|
||||
var lo = readInt32();
|
||||
var hi = readInt32();
|
||||
if (lo == 0xffffffff && hi == 0xffffffff || hi > 0) {
|
||||
throw new ReaderError(ReaderErrorType.CANT_FIND_LAST_PAGE);
|
||||
}
|
||||
|
||||
pLast = new ProbedPage();
|
||||
pLast.pageStart = lastPageLoc;
|
||||
pLast.pageEnd = end;
|
||||
pLast.lastDecodedSample = lo;
|
||||
pLast.firstDecodedSample = null;
|
||||
pLast.afterPreviousPageStart = previousSafe;
|
||||
|
||||
setInputOffset(seekFunc, restoreOffset);
|
||||
return lo;
|
||||
}
|
||||
|
||||
public inline function forcePageResync()
|
||||
{
|
||||
nextSeg = -1;
|
||||
}
|
||||
|
||||
public inline function setInputOffset(seekFunc:Int->Void, n:Int)
|
||||
{
|
||||
seekFunc(inputPosition = n);
|
||||
}
|
||||
|
||||
public function findPage(seekFunc:Int->Void, inputLength:Int):FindPageResult {
|
||||
try {
|
||||
while (true) {
|
||||
var n = readByte();
|
||||
if (n == 0x4f) { // page header
|
||||
var retryLoc = inputPosition;
|
||||
// check if we're off the end of a fileSection stream
|
||||
if (retryLoc - 25 > inputLength) {
|
||||
return FindPageResult.NotFound;
|
||||
}
|
||||
|
||||
if (readByte() != 0x67 || readByte() != 0x67 || readByte() != 0x53) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var header = new Vector<UInt>(27);
|
||||
header[0] = 0x4f;
|
||||
header[1] = 0x67;
|
||||
header[2] = 0x67;
|
||||
header[3] = 0x53;
|
||||
for (i in 4...27) {
|
||||
header[i] = readByte();
|
||||
}
|
||||
|
||||
if (header[4] != 0) {
|
||||
setInputOffset(seekFunc, retryLoc);
|
||||
continue;
|
||||
}
|
||||
|
||||
var goal:UInt = header[22] + (header[23] << 8) + (header[24]<<16) + (header[25]<<24);
|
||||
for (i in 22...26) {
|
||||
header[i] = 0;
|
||||
}
|
||||
|
||||
var crc:UInt = 0;
|
||||
for (i in 0...27){
|
||||
crc = Crc32.update(crc, header[i]);
|
||||
}
|
||||
|
||||
var len = 0;
|
||||
try {
|
||||
for (i in 0...header[26]) {
|
||||
var s = readByte();
|
||||
crc = Crc32.update(crc, s);
|
||||
len += s;
|
||||
}
|
||||
for (i in 0...len) {
|
||||
crc = Crc32.update(crc, readByte());
|
||||
}
|
||||
} catch (e:Eof) {
|
||||
return FindPageResult.NotFound;
|
||||
}
|
||||
|
||||
// finished parsing probable page
|
||||
if (crc == goal) {
|
||||
// we could now check that it's either got the last
|
||||
// page flag set, OR it's followed by the capture
|
||||
// pattern, but I guess TECHNICALLY you could have
|
||||
// a file with garbage between each ogg page and recover
|
||||
// from it automatically? So even though that paranoia
|
||||
// might decrease the chance of an invalid decode by
|
||||
// another 2^32, not worth it since it would hose those
|
||||
// invalid-but-useful files?
|
||||
var end = inputPosition;
|
||||
setInputOffset(seekFunc, retryLoc - 1);
|
||||
return FindPageResult.Found(end, (header[5] & 0x04 != 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e:Eof) {
|
||||
return FindPageResult.NotFound;
|
||||
}
|
||||
}
|
||||
|
||||
public function analyzePage(seekFunc:Int->Void, h:Header)
|
||||
{
|
||||
var z:ProbedPage = new ProbedPage();
|
||||
var packetType = new Vector<Bool>(255);
|
||||
|
||||
// record where the page starts
|
||||
z.pageStart = inputPosition;
|
||||
|
||||
// parse the header
|
||||
var pageHeader = read(27);
|
||||
VorbisTools.assert(pageHeader.get(0) == 0x4f && pageHeader.get(1) == 0x67 && pageHeader.get(2) == 0x67 && pageHeader.get(3) == 0x53);
|
||||
var lacing = read(pageHeader.get(26));
|
||||
|
||||
// determine the length of the payload
|
||||
var len = 0;
|
||||
for (i in 0...pageHeader.get(26)){
|
||||
len += lacing.get(i);
|
||||
}
|
||||
|
||||
// this implies where the page ends
|
||||
z.pageEnd = z.pageStart + 27 + pageHeader.get(26) + len;
|
||||
|
||||
// read the last-decoded sample out of the data
|
||||
z.lastDecodedSample = pageHeader.get(6) + (pageHeader.get(7) << 8) + (pageHeader.get(8) << 16) + (pageHeader.get(9) << 16);
|
||||
|
||||
if ((pageHeader.get(5) & 4) != 0) {
|
||||
// if this is the last page, it's not possible to work
|
||||
// backwards to figure out the first sample! whoops! fuck.
|
||||
z.firstDecodedSample = null;
|
||||
setInputOffset(seekFunc, z.pageStart);
|
||||
return z;
|
||||
}
|
||||
|
||||
// scan through the frames to determine the sample-count of each one...
|
||||
// our goal is the sample # of the first fully-decoded sample on the
|
||||
// page, which is the first decoded sample of the 2nd packet
|
||||
|
||||
var numPacket = 0;
|
||||
var packetStart = ((pageHeader.get(5) & 1) == 0);
|
||||
|
||||
var modeCount = h.modes.length;
|
||||
|
||||
for (i in 0...pageHeader.get(26)) {
|
||||
if (packetStart) {
|
||||
if (lacing.get(i) == 0) {
|
||||
|
||||
setInputOffset(seekFunc, z.pageStart);
|
||||
return null; // trying to read from zero-length packet
|
||||
}
|
||||
var n = readByte();
|
||||
|
||||
// if bottom bit is non-zero, we've got corruption
|
||||
if (n & 1 != 0) {
|
||||
setInputOffset(seekFunc, z.pageStart);
|
||||
return null;
|
||||
}
|
||||
n >>= 1;
|
||||
var b = MathTools.ilog(modeCount - 1);
|
||||
n &= (1 << b) - 1;
|
||||
if (n >= modeCount) {
|
||||
setInputOffset(seekFunc, z.pageStart);
|
||||
return null;
|
||||
}
|
||||
packetType[numPacket++] = h.modes[n].blockflag;
|
||||
skip(lacing.get(i)-1);
|
||||
} else {
|
||||
skip(lacing.get(i));
|
||||
}
|
||||
packetStart = (lacing.get(i) < 255);
|
||||
}
|
||||
|
||||
// now that we know the sizes of all the pages, we can start determining
|
||||
// how much sample data there is.
|
||||
var samples = 0;
|
||||
|
||||
// for the last packet, we step by its whole length, because the definition
|
||||
// is that we encoded the end sample loc of the 'last packet completed',
|
||||
// where 'completed' refers to packets being split, and we are left to guess
|
||||
// what 'end sample loc' means. we assume it means ignoring the fact that
|
||||
// the last half of the data is useless without windowing against the next
|
||||
// packet... (so it's not REALLY complete in that sense)
|
||||
if (numPacket > 1) {
|
||||
samples += packetType[numPacket-1] ? h.blocksize1 : h.blocksize0;
|
||||
}
|
||||
|
||||
var i = numPacket - 2;
|
||||
while (i >= 1) {
|
||||
i--;
|
||||
// now, for this packet, how many samples do we have that
|
||||
// do not overlap the following packet?
|
||||
if (packetType[i]) {
|
||||
if (packetType[i + 1]) {
|
||||
samples += h.blocksize1 >> 1;
|
||||
} else {
|
||||
samples += ((h.blocksize1 - h.blocksize0) >> 2) + (h.blocksize0 >> 1);
|
||||
}
|
||||
} else {
|
||||
samples += h.blocksize0 >> 1;
|
||||
}
|
||||
i--;
|
||||
}
|
||||
// now, at this point, we've rewound to the very beginning of the
|
||||
// Second_ packet. if we entirely discard the first packet after
|
||||
// a seek, this will be exactly the right sample number. HOWEVER!
|
||||
// we can't as easily compute this number for the LAST page. The
|
||||
// only way to get the sample offset of the LAST page is to use
|
||||
// the end loc from the previous page. But what that returns us
|
||||
// is _exactly_ the place where we get our first non-overlapped
|
||||
// sample. (I think. Stupid spec for being ambiguous.) So for
|
||||
// consistency it's better to do that here, too. However, that
|
||||
// will then require us to NOT discard all of the first frame we
|
||||
// decode, in some cases, which means an even weirder frame size
|
||||
// and extra code. what a fucking pain.
|
||||
|
||||
// we're going to discard the first packet if we
|
||||
// start the seek here, so we don't care about it. (we could actually
|
||||
// do better; if the first packet is long, and the previous packet
|
||||
// is short, there's actually data in the first half of the first
|
||||
// packet that doesn't need discarding... but not worth paying the
|
||||
// effort of tracking that of that here and in the seeking logic)
|
||||
// except crap, if we infer it from the Previous_ packet's end
|
||||
// location, we DO need to use that definition... and we HAVE to
|
||||
// infer the start loc of the LAST packet from the previous packet's
|
||||
// end location. fuck you, ogg vorbis.
|
||||
|
||||
z.firstDecodedSample = z.lastDecodedSample - samples;
|
||||
|
||||
// restore file state to where we were
|
||||
setInputOffset(seekFunc, z.pageStart);
|
||||
return z;
|
||||
}
|
||||
|
||||
|
||||
function decodeScalarRaw(c:Codebook):Int
|
||||
{
|
||||
prepHuffman();
|
||||
|
||||
VorbisTools.assert(c.sortedCodewords != null || c.codewords != null);
|
||||
// cases to use binary search: sortedCodewords && !codewords
|
||||
|
||||
var codewordLengths = c.codewordLengths;
|
||||
var codewords = c.codewords;
|
||||
var sortedCodewords = c.sortedCodewords;
|
||||
|
||||
if (c.entries > 8 ? (sortedCodewords != null) : codewords != null) {
|
||||
// binary search
|
||||
var code = VorbisTools.bitReverse(acc);
|
||||
var x = 0;
|
||||
var n = c.sortedEntries;
|
||||
|
||||
while (n > 1) {
|
||||
// invariant: sc[x] <= code < sc[x+n]
|
||||
var m = x + (n >> 1);
|
||||
if (sortedCodewords[m] <= code) {
|
||||
x = m;
|
||||
n -= (n>>1);
|
||||
} else {
|
||||
n >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
// x is now the sorted index
|
||||
if (!c.sparse) {
|
||||
x = c.sortedValues[x];
|
||||
}
|
||||
|
||||
// x is now sorted index if sparse, or symbol otherwise
|
||||
var len = codewordLengths[x];
|
||||
if (validBits >= len) {
|
||||
acc >>>= len;
|
||||
validBits -= len;
|
||||
return x;
|
||||
}
|
||||
|
||||
validBits = 0;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// if small, linear search
|
||||
VorbisTools.assert(!c.sparse);
|
||||
for (i in 0...c.entries) {
|
||||
var cl = codewordLengths[i];
|
||||
if (cl == Codebook.NO_CODE) {
|
||||
continue;
|
||||
}
|
||||
if (codewords[i] == (acc & ((1 << cl)-1))) {
|
||||
if (validBits >= cl) {
|
||||
acc >>>= cl;
|
||||
validBits -= cl;
|
||||
return i;
|
||||
}
|
||||
validBits = 0;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
error = new ReaderError(INVALID_STREAM);
|
||||
validBits = 0;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private enum FindPageResult {
|
||||
Found(end:Int, last:Bool);
|
||||
NotFound;
|
||||
}
|
784
Kha/Sources/kha/audio2/ogg/vorbis/VorbisDecoder.hx
Normal file
784
Kha/Sources/kha/audio2/ogg/vorbis/VorbisDecoder.hx
Normal file
@ -0,0 +1,784 @@
|
||||
package kha.audio2.ogg.vorbis;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.BytesOutput;
|
||||
import haxe.io.Input;
|
||||
import haxe.io.Output;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
import kha.audio2.ogg.tools.Mdct;
|
||||
import kha.audio2.ogg.vorbis.data.Codebook;
|
||||
import kha.audio2.ogg.vorbis.data.Floor.Floor1;
|
||||
import kha.audio2.ogg.vorbis.data.Header;
|
||||
import kha.audio2.ogg.vorbis.data.Mode;
|
||||
import kha.audio2.ogg.vorbis.data.ProbedPage;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class VorbisDecoder
|
||||
{
|
||||
var previousWindow:Vector<Vector<Float>>; //var *[STB_VORBIS_MAX_CHANNELS];
|
||||
var previousLength:Int;
|
||||
var finalY:Vector<Array<Int>>; // [STB_VORBIS_MAX_CHANNELS];
|
||||
|
||||
// twiddle factors
|
||||
var a:Vector<Vector<Float>>; // var * [2]
|
||||
var b:Vector<Vector<Float>>; // var * [2]
|
||||
var c:Vector<Vector<Float>>; // var * [2]
|
||||
var window:Vector<Vector<Float>>; //var * [2];
|
||||
var bitReverseData:Vector<Vector<Int>>; //uint16 * [2]
|
||||
|
||||
// decode buffer
|
||||
var channelBuffers:Vector<Vector<Float>>; //var *[STB_VORBIS_MAX_CHANNELS];
|
||||
var channelBufferStart:Int;
|
||||
var channelBufferEnd:Int;
|
||||
|
||||
public var header(default, null):Header;
|
||||
public var currentSample(default, null):Int;
|
||||
public var totalSample(default, null):Null<Int>;
|
||||
var decodeState:VorbisDecodeState;
|
||||
|
||||
function new(header:Header, decodeState:VorbisDecodeState) {
|
||||
this.header = header;
|
||||
this.decodeState = decodeState;
|
||||
totalSample = null;
|
||||
currentSample = 0;
|
||||
|
||||
//Channel
|
||||
previousLength = 0;
|
||||
|
||||
channelBuffers = new Vector(header.channel);
|
||||
previousWindow = new Vector(header.channel);
|
||||
finalY = new Vector(header.channel);
|
||||
|
||||
for (i in 0...header.channel) {
|
||||
channelBuffers[i] = VorbisTools.emptyFloatVector(header.blocksize1);
|
||||
previousWindow[i] = VorbisTools.emptyFloatVector(Std.int(header.blocksize1 / 2));
|
||||
finalY[i] = new Array();
|
||||
}
|
||||
|
||||
a = new Vector(2);
|
||||
b = new Vector(2);
|
||||
c = new Vector(2);
|
||||
window = new Vector(2);
|
||||
bitReverseData = new Vector(2);
|
||||
initBlocksize(0, header.blocksize0);
|
||||
initBlocksize(1, header.blocksize1);
|
||||
}
|
||||
|
||||
public static function start(input:Input) {
|
||||
var decodeState = new VorbisDecodeState(input);
|
||||
var header = Header.read(decodeState);
|
||||
var decoder = new VorbisDecoder(header, decodeState);
|
||||
decodeState.startFirstDecode();
|
||||
decoder.pumpFirstFrame();
|
||||
|
||||
return decoder;
|
||||
}
|
||||
|
||||
public function read(output:kha.arrays.Float32Array, samples:Int, channels:Int, sampleRate:Int, useFloat:Bool) {
|
||||
if (sampleRate % header.sampleRate != 0) {
|
||||
throw 'Unsupported sampleRate : can\'t convert ${header.sampleRate} to $sampleRate';
|
||||
}
|
||||
if (channels % header.channel != 0) {
|
||||
throw 'Unsupported channels : can\'t convert ${header.channel} to $channels';
|
||||
}
|
||||
|
||||
var sampleRepeat = Std.int(sampleRate / header.sampleRate);
|
||||
var channelRepeat = Std.int(channels / header.channel);
|
||||
|
||||
var n = 0;
|
||||
var len = Math.floor(samples / sampleRepeat);
|
||||
if (totalSample != null && len > totalSample - currentSample) {
|
||||
len = totalSample - currentSample;
|
||||
}
|
||||
|
||||
var index = 0;
|
||||
while (n < len) {
|
||||
var k = channelBufferEnd - channelBufferStart;
|
||||
if (k >= len - n) k = len - n;
|
||||
for (j in channelBufferStart...(channelBufferStart + k)) {
|
||||
for (sr in 0...sampleRepeat) {
|
||||
for (i in 0...header.channel) {
|
||||
for (cr in 0...channelRepeat) {
|
||||
var value = channelBuffers[i][j];
|
||||
if (value > 1) {
|
||||
value = 1;
|
||||
} else if (value < -1) {
|
||||
value = -1;
|
||||
}
|
||||
|
||||
if (useFloat) {
|
||||
//output.writeFloat(value);
|
||||
output[index] = value;
|
||||
++index;
|
||||
} else {
|
||||
//output.writeInt16(Math.floor(value * 0x7FFF));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
n += k;
|
||||
channelBufferStart += k;
|
||||
if (n == len || getFrameFloat() == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (j in n...len) {
|
||||
for (sr in 0...sampleRepeat) {
|
||||
for (i in 0...header.channel) {
|
||||
for (cr in 0...channelRepeat) {
|
||||
if (useFloat) {
|
||||
//output.writeFloat(0);
|
||||
output[index] = 0;
|
||||
++index;
|
||||
} else {
|
||||
//output.writeInt16(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentSample += len;
|
||||
return len * sampleRepeat;
|
||||
}
|
||||
|
||||
public function skipSamples(len:Int) {
|
||||
var n = 0;
|
||||
if (totalSample != null && len > totalSample - currentSample) {
|
||||
len = totalSample - currentSample;
|
||||
}
|
||||
while (n < len) {
|
||||
var k = channelBufferEnd - channelBufferStart;
|
||||
if (k >= len - n) k = len - n;
|
||||
n += k;
|
||||
channelBufferStart += k;
|
||||
if (n == len || getFrameFloat() == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
currentSample += len;
|
||||
return len;
|
||||
}
|
||||
|
||||
public function setupSampleNumber(seekFunc:Int->Void, inputLength:Int) {
|
||||
if (totalSample == null) {
|
||||
totalSample = decodeState.getSampleNumber(seekFunc, inputLength);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public function seek(seekFunc:Int->Void, inputLength:UInt, sampleNumber:Int) {
|
||||
if (currentSample == sampleNumber) {
|
||||
return;
|
||||
}
|
||||
|
||||
// do we know the location of the last page?
|
||||
if (totalSample == null) {
|
||||
setupSampleNumber(seekFunc, inputLength);
|
||||
if (totalSample == 0) {
|
||||
throw new ReaderError(ReaderErrorType.CANT_FIND_LAST_PAGE);
|
||||
}
|
||||
}
|
||||
|
||||
if (sampleNumber < 0) {
|
||||
sampleNumber = 0;
|
||||
}
|
||||
|
||||
var p0 = decodeState.pFirst;
|
||||
var p1 = decodeState.pLast;
|
||||
|
||||
if (sampleNumber >= p1.lastDecodedSample) {
|
||||
sampleNumber = p1.lastDecodedSample - 1;
|
||||
}
|
||||
|
||||
if (sampleNumber < p0.lastDecodedSample) {
|
||||
seekFrameFromPage(seekFunc, p0.pageStart, 0, sampleNumber);
|
||||
} else {
|
||||
var attempts = 0;
|
||||
|
||||
while (p0.pageEnd < p1.pageStart) {
|
||||
|
||||
|
||||
// copy these into local variables so we can tweak them
|
||||
// if any are unknown
|
||||
var startOffset:UInt = p0.pageEnd;
|
||||
var endOffset:UInt = p1.afterPreviousPageStart; // an address known to seek to page p1
|
||||
var startSample = p0.lastDecodedSample;
|
||||
var endSample = p1.lastDecodedSample;
|
||||
|
||||
// currently there is no such tweaking logic needed/possible?
|
||||
if (startSample == null || endSample == null) {
|
||||
throw new ReaderError(SEEK_FAILED);
|
||||
}
|
||||
|
||||
// now we want to lerp between these for the target samples...
|
||||
|
||||
// step 1: we need to bias towards the page start...
|
||||
if (startOffset + 4000 < endOffset) {
|
||||
endOffset -= 4000;
|
||||
}
|
||||
|
||||
// now compute an interpolated search loc
|
||||
var probe:UInt = startOffset + Math.floor((endOffset - startOffset) / (endSample - startSample) * (sampleNumber - startSample));
|
||||
|
||||
// next we need to bias towards binary search...
|
||||
// code is a little wonky to allow for full 32-bit unsigned values
|
||||
if (attempts >= 4) {
|
||||
var probe2:UInt = startOffset + ((endOffset - startOffset) >> 1);
|
||||
probe = if (attempts >= 8) {
|
||||
probe2;
|
||||
} else if (probe < probe2) {
|
||||
probe + ((probe2 - probe) >>> 1);
|
||||
} else {
|
||||
probe2 + ((probe - probe2) >>> 1);
|
||||
}
|
||||
}
|
||||
++attempts;
|
||||
decodeState.setInputOffset(seekFunc, probe);
|
||||
|
||||
switch (decodeState.findPage(seekFunc, inputLength)) {
|
||||
case NotFound:
|
||||
throw new ReaderError(SEEK_FAILED);
|
||||
case Found(_):
|
||||
}
|
||||
|
||||
var q:ProbedPage = decodeState.analyzePage(seekFunc, header);
|
||||
if (q == null) {
|
||||
throw new ReaderError(SEEK_FAILED);
|
||||
}
|
||||
q.afterPreviousPageStart = probe;
|
||||
|
||||
// it's possible we've just found the last page again
|
||||
if (q.pageStart == p1.pageStart) {
|
||||
p1 = q;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sampleNumber < q.lastDecodedSample) {
|
||||
p1 = q;
|
||||
} else {
|
||||
p0 = q;
|
||||
}
|
||||
}
|
||||
|
||||
if (p0.lastDecodedSample <= sampleNumber && sampleNumber < p1.lastDecodedSample) {
|
||||
seekFrameFromPage(seekFunc, p1.pageStart, p0.lastDecodedSample, sampleNumber);
|
||||
} else {
|
||||
throw new ReaderError(SEEK_FAILED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public function seekFrameFromPage(seekFunc:Int->Void, pageStart:Int, firstSample:Int, targetSample:Int) {
|
||||
var frame = 0;
|
||||
var frameStart:Int = firstSample;
|
||||
|
||||
// firstSample is the sample # of the first sample that doesn't
|
||||
// overlap the previous page... note that this requires us to
|
||||
// Partially_ discard the first packet! bleh.
|
||||
decodeState.setInputOffset(seekFunc, pageStart);
|
||||
decodeState.forcePageResync();
|
||||
|
||||
// frame start is where the previous packet's last decoded sample
|
||||
// was, which corresponds to leftEnd... EXCEPT if the previous
|
||||
// packet was long and this packet is short? Probably a bug here.
|
||||
|
||||
// now, we can start decoding frames... we'll only FAKE decode them,
|
||||
// until we find the frame that contains our sample; then we'll rewind,
|
||||
// and try again
|
||||
var leftEnd = 0;
|
||||
var leftStart = 0;
|
||||
|
||||
var prevState = null;
|
||||
var lastState = null;
|
||||
|
||||
while (true) {
|
||||
prevState = lastState;
|
||||
lastState = decodeState.clone(seekFunc);
|
||||
|
||||
var initialResult = decodeInitial();
|
||||
if (initialResult == null) {
|
||||
lastState = prevState;
|
||||
break;
|
||||
}
|
||||
|
||||
leftStart = initialResult.left.start;
|
||||
leftEnd = initialResult.left.end;
|
||||
|
||||
var start = if (frame == 0) {
|
||||
leftEnd;
|
||||
} else{
|
||||
leftStart;
|
||||
}
|
||||
|
||||
// the window starts at leftStart; the last valid sample we generate
|
||||
// before the next frame's window start is rightStart-1
|
||||
if (targetSample < frameStart + initialResult.right.start - start) {
|
||||
break;
|
||||
}
|
||||
|
||||
decodeState.flushPacket();
|
||||
frameStart += initialResult.right.start - start;
|
||||
++frame;
|
||||
}
|
||||
|
||||
decodeState = lastState;
|
||||
seekFunc(decodeState.inputPosition);
|
||||
|
||||
previousLength = 0;
|
||||
pumpFirstFrame();
|
||||
|
||||
currentSample = frameStart;
|
||||
skipSamples(targetSample - frameStart);
|
||||
}
|
||||
|
||||
public function clone(seekFunc:Int->Void) {
|
||||
var decoder = Type.createEmptyInstance(VorbisDecoder);
|
||||
|
||||
decoder.currentSample = currentSample;
|
||||
decoder.totalSample = totalSample;
|
||||
decoder.previousLength = previousLength;
|
||||
decoder.channelBufferStart = channelBufferStart;
|
||||
decoder.channelBufferEnd = channelBufferEnd;
|
||||
|
||||
// sharrow copy
|
||||
decoder.a = a;
|
||||
decoder.b = b;
|
||||
decoder.c = c;
|
||||
decoder.window = window;
|
||||
decoder.bitReverseData = bitReverseData;
|
||||
decoder.header = header;
|
||||
|
||||
// deep copy
|
||||
decoder.decodeState = decodeState.clone(seekFunc);
|
||||
decoder.channelBuffers = new Vector(header.channel);
|
||||
decoder.previousWindow = new Vector(header.channel);
|
||||
decoder.finalY = new Vector(header.channel);
|
||||
|
||||
for (i in 0...header.channel) {
|
||||
decoder.channelBuffers[i] = VorbisTools.copyVector(channelBuffers[i]);
|
||||
decoder.previousWindow[i] = VorbisTools.copyVector(previousWindow[i]);
|
||||
decoder.finalY[i] = Lambda.array(finalY[i]);
|
||||
}
|
||||
|
||||
return decoder;
|
||||
}
|
||||
|
||||
public function ensurePosition(seekFunc:Int->Void) {
|
||||
seekFunc(decodeState.inputPosition);
|
||||
}
|
||||
|
||||
function getFrameFloat() {
|
||||
var result = decodePacket();
|
||||
if (result == null) {
|
||||
channelBufferStart = channelBufferEnd = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
var len = finishFrame(result);
|
||||
|
||||
channelBufferStart = result.left;
|
||||
channelBufferEnd = result.left + len;
|
||||
|
||||
return len;
|
||||
}
|
||||
|
||||
function pumpFirstFrame() {
|
||||
finishFrame(decodePacket());
|
||||
}
|
||||
|
||||
function finishFrame(r:DecodePacketResult):Int {
|
||||
var len = r.len;
|
||||
var right = r.right;
|
||||
var left = r.left;
|
||||
|
||||
// we use right&left (the start of the right- and left-window sin()-regions)
|
||||
// to determine how much to return, rather than inferring from the rules
|
||||
// (same result, clearer code); 'left' indicates where our sin() window
|
||||
// starts, therefore where the previous window's right edge starts, and
|
||||
// therefore where to start mixing from the previous buffer. 'right'
|
||||
// indicates where our sin() ending-window starts, therefore that's where
|
||||
// we start saving, and where our returned-data ends.
|
||||
|
||||
// mixin from previous window
|
||||
if (previousLength != 0) {
|
||||
var n = previousLength;
|
||||
var w = getWindow(n);
|
||||
for (i in 0...header.channel) {
|
||||
var cb = channelBuffers[i];
|
||||
var pw = previousWindow[i];
|
||||
for (j in 0...n) {
|
||||
cb[left+j] = cb[left+j] * w[j] + pw[j] * w[n-1-j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var prev = previousLength;
|
||||
|
||||
// last half of this data becomes previous window
|
||||
previousLength = len - right;
|
||||
|
||||
// @OPTIMIZE: could avoid this copy by double-buffering the
|
||||
// output (flipping previousWindow with channelBuffers), but
|
||||
// then previousWindow would have to be 2x as large, and
|
||||
// channelBuffers couldn't be temp mem (although they're NOT
|
||||
// currently temp mem, they could be (unless we want to level
|
||||
// performance by spreading out the computation))
|
||||
for (i in 0...header.channel) {
|
||||
var pw = previousWindow[i];
|
||||
var cb = channelBuffers[i];
|
||||
for (j in 0...(len - right)) {
|
||||
pw[j] = cb[right + j];
|
||||
}
|
||||
}
|
||||
|
||||
if (prev == 0) {
|
||||
// there was no previous packet, so this data isn't valid...
|
||||
// this isn't entirely true, only the would-have-overlapped data
|
||||
// isn't valid, but this seems to be what the spec requires
|
||||
return 0;
|
||||
}
|
||||
|
||||
// truncate a short frame
|
||||
if (len < right) {
|
||||
right = len;
|
||||
}
|
||||
|
||||
return right - left;
|
||||
}
|
||||
|
||||
function getWindow(len:Int)
|
||||
{
|
||||
len <<= 1;
|
||||
return if (len == header.blocksize0) {
|
||||
window[0];
|
||||
} else if (len == header.blocksize1) {
|
||||
window[1];
|
||||
} else {
|
||||
VorbisTools.assert(false);
|
||||
null;
|
||||
}
|
||||
}
|
||||
|
||||
function initBlocksize(bs:Int, n:Int)
|
||||
{
|
||||
var n2 = n >> 1, n4 = n >> 2, n8 = n >> 3;
|
||||
a[bs] = new Vector(n2);
|
||||
b[bs] = new Vector(n2);
|
||||
c[bs] = new Vector(n4);
|
||||
window[bs] = new Vector(n2);
|
||||
bitReverseData[bs] = new Vector(n8);
|
||||
|
||||
VorbisTools.computeTwiddleFactors(n, a[bs], b[bs], c[bs]);
|
||||
VorbisTools.computeWindow(n, window[bs]);
|
||||
VorbisTools.computeBitReverse(n, bitReverseData[bs]);
|
||||
}
|
||||
|
||||
function inverseMdct(buffer:Vector<Float>, n:Int, blocktype:Bool) {
|
||||
var bt = blocktype ? 1 : 0;
|
||||
Mdct.inverseTransform(buffer, n, a[bt], b[bt], c[bt], bitReverseData[bt]);
|
||||
}
|
||||
|
||||
function decodePacket():DecodePacketResult
|
||||
{
|
||||
var result = decodeInitial();
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
var rest = decodePacketRest(result);
|
||||
return rest;
|
||||
}
|
||||
|
||||
function decodeInitial():DecodeInitialResult
|
||||
{
|
||||
channelBufferStart = channelBufferEnd = 0;
|
||||
|
||||
do {
|
||||
if (!decodeState.maybeStartPacket()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// check packet type
|
||||
if (decodeState.readBits(1) != 0) {
|
||||
while (VorbisTools.EOP != decodeState.readPacket()) {};
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
} while (true);
|
||||
|
||||
var i = decodeState.readBits(MathTools.ilog(header.modes.length - 1));
|
||||
if (i == VorbisTools.EOP || i >= header.modes.length) {
|
||||
throw new ReaderError(ReaderErrorType.SEEK_FAILED);
|
||||
}
|
||||
|
||||
var m = header.modes[i];
|
||||
var n, prev, next;
|
||||
|
||||
if (m.blockflag) {
|
||||
n = header.blocksize1;
|
||||
prev = decodeState.readBits(1);
|
||||
next = decodeState.readBits(1);
|
||||
} else {
|
||||
prev = next = 0;
|
||||
n = header.blocksize0;
|
||||
}
|
||||
|
||||
// WINDOWING
|
||||
var windowCenter = n >> 1;
|
||||
|
||||
return {
|
||||
mode : i,
|
||||
left : if (m.blockflag && prev == 0) {
|
||||
start : (n - header.blocksize0) >> 2,
|
||||
end : (n + header.blocksize0) >> 2,
|
||||
} else {
|
||||
start : 0,
|
||||
end : windowCenter,
|
||||
},
|
||||
right : if (m.blockflag && next == 0) {
|
||||
start : (n * 3 - header.blocksize0) >> 2,
|
||||
end : (n * 3 + header.blocksize0) >> 2,
|
||||
} else {
|
||||
start : windowCenter,
|
||||
end : n,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function decodePacketRest(r:DecodeInitialResult):DecodePacketResult
|
||||
{
|
||||
var len = 0;
|
||||
var m = header.modes[r.mode];
|
||||
|
||||
var zeroChannel = new Vector<Bool>(256);
|
||||
var reallyZeroChannel = new Vector<Bool>(256);
|
||||
|
||||
// WINDOWING
|
||||
|
||||
var n = m.blockflag ? header.blocksize1 : header.blocksize0;
|
||||
var map = header.mapping[m.mapping];
|
||||
|
||||
// FLOORS
|
||||
var n2 = n >> 1;
|
||||
VorbisTools.stbProf(1);
|
||||
var rangeList = [256, 128, 86, 64];
|
||||
var codebooks = header.codebooks;
|
||||
|
||||
for (i in 0...header.channel) {
|
||||
var s = map.chan[i].mux;
|
||||
zeroChannel[i] = false;
|
||||
var floor = header.floorConfig[map.submapFloor[s]];
|
||||
if (floor.type == 0) {
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
} else {
|
||||
var g:Floor1 = floor.floor1;
|
||||
if (decodeState.readBits(1) != 0) {
|
||||
var fy = new Array<Int>();
|
||||
var step2Flag = new Vector<Bool>(256);
|
||||
var range = rangeList[g.floor1Multiplier-1];
|
||||
var offset = 2;
|
||||
fy = finalY[i];
|
||||
fy[0] = decodeState.readBits(MathTools.ilog(range)-1);
|
||||
fy[1] = decodeState.readBits(MathTools.ilog(range)-1);
|
||||
for (j in 0...g.partitions) {
|
||||
var pclass = g.partitionClassList[j];
|
||||
var cdim = g.classDimensions[pclass];
|
||||
var cbits = g.classSubclasses[pclass];
|
||||
var csub = (1 << cbits) - 1;
|
||||
var cval = 0;
|
||||
if (cbits != 0) {
|
||||
var c = codebooks[g.classMasterbooks[pclass]];
|
||||
cval = decodeState.decode(c);
|
||||
}
|
||||
|
||||
var books = g.subclassBooks[pclass];
|
||||
for (k in 0...cdim) {
|
||||
var book = books[cval & csub];
|
||||
cval >>= cbits;
|
||||
fy[offset++] = if (book >= 0) {
|
||||
decodeState.decode(codebooks[book]);
|
||||
} else {
|
||||
0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (decodeState.validBits == VorbisDecodeState.INVALID_BITS) {
|
||||
zeroChannel[i] = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
step2Flag[0] = step2Flag[1] = true;
|
||||
var naighbors = g.neighbors;
|
||||
var xlist = g.xlist;
|
||||
for (j in 2...g.values) {
|
||||
var low = naighbors[j][0];
|
||||
var high = naighbors[j][1];
|
||||
var lowroom = VorbisTools.predictPoint(xlist[j], xlist[low], xlist[high], fy[low], fy[high]);
|
||||
var val = fy[j];
|
||||
var highroom = range - lowroom;
|
||||
var room = if (highroom < lowroom){
|
||||
highroom * 2;
|
||||
}else{
|
||||
lowroom * 2;
|
||||
}
|
||||
if (val != 0) {
|
||||
step2Flag[low] = step2Flag[high] = true;
|
||||
step2Flag[j] = true;
|
||||
if (val >= room){
|
||||
if (highroom > lowroom){
|
||||
fy[j] = val - lowroom + lowroom;
|
||||
}else{
|
||||
fy[j] = lowroom - val + highroom - 1;
|
||||
}
|
||||
} else {
|
||||
if (val & 1 != 0){
|
||||
fy[j] = lowroom - ((val+1)>>1);
|
||||
} else{
|
||||
fy[j] = lowroom + (val>>1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
step2Flag[j] = false;
|
||||
fy[j] = lowroom;
|
||||
}
|
||||
}
|
||||
|
||||
// defer final floor computation until _after_ residue
|
||||
for (j in 0...g.values) {
|
||||
if (!step2Flag[j]){
|
||||
fy[j] = -1;
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
zeroChannel[i] = true;
|
||||
}
|
||||
// So we just defer everything else to later
|
||||
// at this point we've decoded the floor into buffer
|
||||
}
|
||||
}
|
||||
VorbisTools.stbProf(0);
|
||||
// at this point we've decoded all floors
|
||||
|
||||
//if (alloc.allocBuffer) {
|
||||
// assert(alloc.allocBufferLengthInBytes == tempOffset);
|
||||
//}
|
||||
|
||||
// re-enable coupled channels if necessary
|
||||
for (i in 0...header.channel) {
|
||||
reallyZeroChannel[i] = zeroChannel[i];
|
||||
}
|
||||
for (i in 0...map.couplingSteps) {
|
||||
if (!zeroChannel[map.chan[i].magnitude] || !zeroChannel[map.chan[i].angle]) {
|
||||
zeroChannel[map.chan[i].magnitude] = zeroChannel[map.chan[i].angle] = false;
|
||||
}
|
||||
}
|
||||
// RESIDUE DECODE
|
||||
for (i in 0...map.submaps) {
|
||||
var residueBuffers = new Vector<Vector<Float>>(header.channel);
|
||||
var doNotDecode = new Vector<Bool>(256);
|
||||
var ch = 0;
|
||||
for (j in 0...header.channel) {
|
||||
|
||||
if (map.chan[j].mux == i) {
|
||||
if (zeroChannel[j]) {
|
||||
doNotDecode[ch] = true;
|
||||
residueBuffers[ch] = null;
|
||||
} else {
|
||||
doNotDecode[ch] = false;
|
||||
residueBuffers[ch] = channelBuffers[j];
|
||||
}
|
||||
++ch;
|
||||
}
|
||||
}
|
||||
|
||||
var r = map.submapResidue[i];
|
||||
var residue = header.residueConfig[r];
|
||||
residue.decode(decodeState,header, residueBuffers, ch, n2, doNotDecode, channelBuffers);
|
||||
}
|
||||
|
||||
// INVERSE COUPLING
|
||||
VorbisTools.stbProf(14);
|
||||
|
||||
var i = map.couplingSteps;
|
||||
var n2 = n >> 1;
|
||||
while (--i >= 0) {
|
||||
var m = channelBuffers[map.chan[i].magnitude];
|
||||
var a = channelBuffers[map.chan[i].angle];
|
||||
for (j in 0...n2) {
|
||||
var a2, m2;
|
||||
if (m[j] > 0) {
|
||||
if (a[j] > 0) {
|
||||
m2 = m[j];
|
||||
a2 = m[j] - a[j];
|
||||
} else {
|
||||
a2 = m[j];
|
||||
m2 = m[j] + a[j];
|
||||
}
|
||||
} else {
|
||||
if (a[j] > 0) {
|
||||
m2 = m[j];
|
||||
a2 = m[j] + a[j];
|
||||
} else {
|
||||
a2 = m[j];
|
||||
m2 = m[j] - a[j];
|
||||
}
|
||||
}
|
||||
m[j] = m2;
|
||||
a[j] = a2;
|
||||
}
|
||||
}
|
||||
|
||||
// finish decoding the floors
|
||||
VorbisTools.stbProf(15);
|
||||
for (i in 0...header.channel) {
|
||||
if (reallyZeroChannel[i]) {
|
||||
for(j in 0...n2) {
|
||||
channelBuffers[i][j] = 0;
|
||||
}
|
||||
} else {
|
||||
map.doFloor(header.floorConfig, i, n, channelBuffers[i], finalY[i], null);
|
||||
}
|
||||
}
|
||||
|
||||
// INVERSE MDCT
|
||||
VorbisTools.stbProf(16);
|
||||
for (i in 0...header.channel) {
|
||||
inverseMdct(channelBuffers[i], n, m.blockflag);
|
||||
}
|
||||
VorbisTools.stbProf(0);
|
||||
|
||||
// this shouldn't be necessary, unless we exited on an error
|
||||
// and want to flush to get to the next packet
|
||||
decodeState.flushPacket();
|
||||
|
||||
return decodeState.finishDecodePacket(previousLength, n, r);
|
||||
}
|
||||
}
|
||||
|
||||
typedef DecodePacketResult = {
|
||||
var len : Int;
|
||||
var left : Int;
|
||||
var right : Int;
|
||||
}
|
||||
|
||||
typedef DecodeInitialResult = {
|
||||
var mode : Int;
|
||||
var left : Range;
|
||||
var right : Range;
|
||||
}
|
||||
|
||||
private typedef Range = {
|
||||
var start : Int;
|
||||
var end : Int;
|
||||
}
|
291
Kha/Sources/kha/audio2/ogg/vorbis/VorbisTools.hx
Normal file
291
Kha/Sources/kha/audio2/ogg/vorbis/VorbisTools.hx
Normal file
@ -0,0 +1,291 @@
|
||||
package kha.audio2.ogg.vorbis;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.Input;
|
||||
import haxe.PosInfos;
|
||||
import kha.audio2.ogg.vorbis.data.IntPoint;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class VorbisTools
|
||||
{
|
||||
static public inline var EOP = -1;
|
||||
static public var integerDivideTable:Vector<Vector<Int>>;
|
||||
static inline var M__PI = 3.14159265358979323846264;
|
||||
|
||||
static inline var DIVTAB_NUMER = 32;
|
||||
static inline var DIVTAB_DENOM = 64;
|
||||
|
||||
static public var INVERSE_DB_TABLE = [
|
||||
1.0649863e-07, 1.1341951e-07, 1.2079015e-07, 1.2863978e-07,
|
||||
1.3699951e-07, 1.4590251e-07, 1.5538408e-07, 1.6548181e-07,
|
||||
1.7623575e-07, 1.8768855e-07, 1.9988561e-07, 2.1287530e-07,
|
||||
2.2670913e-07, 2.4144197e-07, 2.5713223e-07, 2.7384213e-07,
|
||||
2.9163793e-07, 3.1059021e-07, 3.3077411e-07, 3.5226968e-07,
|
||||
3.7516214e-07, 3.9954229e-07, 4.2550680e-07, 4.5315863e-07,
|
||||
4.8260743e-07, 5.1396998e-07, 5.4737065e-07, 5.8294187e-07,
|
||||
6.2082472e-07, 6.6116941e-07, 7.0413592e-07, 7.4989464e-07,
|
||||
7.9862701e-07, 8.5052630e-07, 9.0579828e-07, 9.6466216e-07,
|
||||
1.0273513e-06, 1.0941144e-06, 1.1652161e-06, 1.2409384e-06,
|
||||
1.3215816e-06, 1.4074654e-06, 1.4989305e-06, 1.5963394e-06,
|
||||
1.7000785e-06, 1.8105592e-06, 1.9282195e-06, 2.0535261e-06,
|
||||
2.1869758e-06, 2.3290978e-06, 2.4804557e-06, 2.6416497e-06,
|
||||
2.8133190e-06, 2.9961443e-06, 3.1908506e-06, 3.3982101e-06,
|
||||
3.6190449e-06, 3.8542308e-06, 4.1047004e-06, 4.3714470e-06,
|
||||
4.6555282e-06, 4.9580707e-06, 5.2802740e-06, 5.6234160e-06,
|
||||
5.9888572e-06, 6.3780469e-06, 6.7925283e-06, 7.2339451e-06,
|
||||
7.7040476e-06, 8.2047000e-06, 8.7378876e-06, 9.3057248e-06,
|
||||
9.9104632e-06, 1.0554501e-05, 1.1240392e-05, 1.1970856e-05,
|
||||
1.2748789e-05, 1.3577278e-05, 1.4459606e-05, 1.5399272e-05,
|
||||
1.6400004e-05, 1.7465768e-05, 1.8600792e-05, 1.9809576e-05,
|
||||
2.1096914e-05, 2.2467911e-05, 2.3928002e-05, 2.5482978e-05,
|
||||
2.7139006e-05, 2.8902651e-05, 3.0780908e-05, 3.2781225e-05,
|
||||
3.4911534e-05, 3.7180282e-05, 3.9596466e-05, 4.2169667e-05,
|
||||
4.4910090e-05, 4.7828601e-05, 5.0936773e-05, 5.4246931e-05,
|
||||
5.7772202e-05, 6.1526565e-05, 6.5524908e-05, 6.9783085e-05,
|
||||
7.4317983e-05, 7.9147585e-05, 8.4291040e-05, 8.9768747e-05,
|
||||
9.5602426e-05, 0.00010181521, 0.00010843174, 0.00011547824,
|
||||
0.00012298267, 0.00013097477, 0.00013948625, 0.00014855085,
|
||||
0.00015820453, 0.00016848555, 0.00017943469, 0.00019109536,
|
||||
0.00020351382, 0.00021673929, 0.00023082423, 0.00024582449,
|
||||
0.00026179955, 0.00027881276, 0.00029693158, 0.00031622787,
|
||||
0.00033677814, 0.00035866388, 0.00038197188, 0.00040679456,
|
||||
0.00043323036, 0.00046138411, 0.00049136745, 0.00052329927,
|
||||
0.00055730621, 0.00059352311, 0.00063209358, 0.00067317058,
|
||||
0.00071691700, 0.00076350630, 0.00081312324, 0.00086596457,
|
||||
0.00092223983, 0.00098217216, 0.0010459992, 0.0011139742,
|
||||
0.0011863665, 0.0012634633, 0.0013455702, 0.0014330129,
|
||||
0.0015261382, 0.0016253153, 0.0017309374, 0.0018434235,
|
||||
0.0019632195, 0.0020908006, 0.0022266726, 0.0023713743,
|
||||
0.0025254795, 0.0026895994, 0.0028643847, 0.0030505286,
|
||||
0.0032487691, 0.0034598925, 0.0036847358, 0.0039241906,
|
||||
0.0041792066, 0.0044507950, 0.0047400328, 0.0050480668,
|
||||
0.0053761186, 0.0057254891, 0.0060975636, 0.0064938176,
|
||||
0.0069158225, 0.0073652516, 0.0078438871, 0.0083536271,
|
||||
0.0088964928, 0.009474637, 0.010090352, 0.010746080,
|
||||
0.011444421, 0.012188144, 0.012980198, 0.013823725,
|
||||
0.014722068, 0.015678791, 0.016697687, 0.017782797,
|
||||
0.018938423, 0.020169149, 0.021479854, 0.022875735,
|
||||
0.024362330, 0.025945531, 0.027631618, 0.029427276,
|
||||
0.031339626, 0.033376252, 0.035545228, 0.037855157,
|
||||
0.040315199, 0.042935108, 0.045725273, 0.048696758,
|
||||
0.051861348, 0.055231591, 0.058820850, 0.062643361,
|
||||
0.066714279, 0.071049749, 0.075666962, 0.080584227,
|
||||
0.085821044, 0.091398179, 0.097337747, 0.10366330,
|
||||
0.11039993, 0.11757434, 0.12521498, 0.13335215,
|
||||
0.14201813, 0.15124727, 0.16107617, 0.17154380,
|
||||
0.18269168, 0.19456402, 0.20720788, 0.22067342,
|
||||
0.23501402, 0.25028656, 0.26655159, 0.28387361,
|
||||
0.30232132, 0.32196786, 0.34289114, 0.36517414,
|
||||
0.38890521, 0.41417847, 0.44109412, 0.46975890,
|
||||
0.50028648, 0.53279791, 0.56742212, 0.60429640,
|
||||
0.64356699, 0.68538959, 0.72993007, 0.77736504,
|
||||
0.82788260, 0.88168307, 0.9389798, 1.0
|
||||
];
|
||||
|
||||
public static inline function assert(b:Bool, ?p:PosInfos) {
|
||||
#if debug
|
||||
if (!b) {
|
||||
throw new ReaderError(ReaderErrorType.OTHER, "", p);
|
||||
}
|
||||
#end
|
||||
}
|
||||
|
||||
public static inline function neighbors(x:Vector<Int>, n:Int)
|
||||
{
|
||||
var low = -1;
|
||||
var high = 65536;
|
||||
var plow = 0;
|
||||
var phigh = 0;
|
||||
|
||||
for (i in 0...n) {
|
||||
if (x[i] > low && x[i] < x[n]) { plow = i; low = x[i]; }
|
||||
if (x[i] < high && x[i] > x[n]) { phigh = i; high = x[i]; }
|
||||
}
|
||||
return {
|
||||
low : plow,
|
||||
high : phigh,
|
||||
}
|
||||
}
|
||||
|
||||
public static inline function floatUnpack(x:UInt):Float
|
||||
{
|
||||
// from the specification
|
||||
var mantissa:Float = x & 0x1fffff;
|
||||
var sign:Int = x & 0x80000000;
|
||||
var exp:Int = (x & 0x7fe00000) >>> 21;
|
||||
var res:Float = (sign != 0) ? -mantissa : mantissa;
|
||||
return res * Math.pow(2, exp - 788);
|
||||
}
|
||||
|
||||
public static inline function bitReverse(n:UInt):UInt
|
||||
{
|
||||
n = ((n & 0xAAAAAAAA) >>> 1) | ((n & 0x55555555) << 1);
|
||||
n = ((n & 0xCCCCCCCC) >>> 2) | ((n & 0x33333333) << 2);
|
||||
n = ((n & 0xF0F0F0F0) >>> 4) | ((n & 0x0F0F0F0F) << 4);
|
||||
n = ((n & 0xFF00FF00) >>> 8) | ((n & 0x00FF00FF) << 8);
|
||||
return (n >>> 16) | (n << 16);
|
||||
}
|
||||
|
||||
public static inline function pointCompare(a:IntPoint, b:IntPoint) {
|
||||
return if (a.x < b.x) -1 else if (a.x > b.x) 1 else 0;
|
||||
}
|
||||
|
||||
public static function uintAsc(a:UInt, b:UInt) {
|
||||
return if (a < b) {
|
||||
-1;
|
||||
} else if (a == b){
|
||||
0;
|
||||
} else {
|
||||
1;
|
||||
}
|
||||
}
|
||||
|
||||
public static function lookup1Values(entries:Int, dim:Int)
|
||||
{
|
||||
var r = Std.int(Math.exp(Math.log(entries) / dim));
|
||||
if (Std.int(Math.pow(r + 1, dim)) <= entries) {
|
||||
r++;
|
||||
}
|
||||
|
||||
assert(Math.pow(r+1, dim) > entries);
|
||||
assert(Std.int(Math.pow(r, dim)) <= entries); // (int),floor() as above
|
||||
return r;
|
||||
}
|
||||
|
||||
public static function computeWindow(n:Int, window:Vector<Float>)
|
||||
{
|
||||
var n2 = n >> 1;
|
||||
for (i in 0...n2) {
|
||||
window[i] = Math.sin(0.5 * M__PI * square(Math.sin((i - 0 + 0.5) / n2 * 0.5 * M__PI)));
|
||||
}
|
||||
}
|
||||
|
||||
public static function square(f:Float) {
|
||||
return f * f;
|
||||
}
|
||||
|
||||
public static function computeBitReverse(n:Int, rev:Vector<Int>)
|
||||
{
|
||||
var ld = MathTools.ilog(n) - 1;
|
||||
var n8 = n >> 3;
|
||||
|
||||
for (i in 0...n8) {
|
||||
rev[i] = (bitReverse(i) >>> (32 - ld + 3)) << 2;
|
||||
}
|
||||
}
|
||||
|
||||
public static function computeTwiddleFactors(n:Int, af:Vector<Float>, bf:Vector<Float>, cf:Vector<Float>)
|
||||
{
|
||||
var n4 = n >> 2;
|
||||
var n8 = n >> 3;
|
||||
|
||||
var k2 = 0;
|
||||
for (k in 0...n4) {
|
||||
af[k2] = Math.cos(4*k*M__PI/n);
|
||||
af[k2 + 1] = -Math.sin(4*k*M__PI/n);
|
||||
bf[k2] = Math.cos((k2+1)*M__PI/n/2) * 0.5;
|
||||
bf[k2 + 1] = Math.sin((k2 + 1) * M__PI / n / 2) * 0.5;
|
||||
k2 += 2;
|
||||
}
|
||||
|
||||
var k2 = 0;
|
||||
for (k in 0...n8) {
|
||||
cf[k2 ] = Math.cos(2*(k2+1) * M__PI/n);
|
||||
cf[k2+1] = -Math.sin(2*(k2+1) * M__PI/n);
|
||||
k2 += 2;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static function drawLine(output:Vector<Float>, x0:Int, y0:Int, x1:Int, y1:Int, n:Int)
|
||||
{
|
||||
if (integerDivideTable == null) {
|
||||
integerDivideTable = new Vector(DIVTAB_NUMER);
|
||||
for (i in 0...DIVTAB_NUMER) {
|
||||
integerDivideTable[i] = new Vector(DIVTAB_DENOM);
|
||||
for (j in 1...DIVTAB_DENOM) {
|
||||
integerDivideTable[i][j] = Std.int(i / j);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var dy = y1 - y0;
|
||||
var adx = x1 - x0;
|
||||
var ady = dy < 0 ? -dy : dy;
|
||||
var base:Int;
|
||||
var x = x0;
|
||||
var y = y0;
|
||||
var err = 0;
|
||||
var sy = if (adx < DIVTAB_DENOM && ady < DIVTAB_NUMER) {
|
||||
if (dy < 0) {
|
||||
base = -integerDivideTable[ady][adx];
|
||||
base - 1;
|
||||
} else {
|
||||
base = integerDivideTable[ady][adx];
|
||||
base + 1;
|
||||
}
|
||||
} else {
|
||||
base = Std.int(dy / adx);
|
||||
if (dy < 0) {
|
||||
base - 1;
|
||||
} else {
|
||||
base + 1;
|
||||
}
|
||||
}
|
||||
ady -= (base < 0 ? -base : base) * adx;
|
||||
if (x1 > n) {
|
||||
x1 = n;
|
||||
}
|
||||
|
||||
output[x] *= INVERSE_DB_TABLE[y];
|
||||
|
||||
for (i in (x + 1)...x1) {
|
||||
err += ady;
|
||||
if (err >= adx) {
|
||||
err -= adx;
|
||||
y += sy;
|
||||
} else {
|
||||
y += base;
|
||||
}
|
||||
output[i] *= INVERSE_DB_TABLE[y];
|
||||
}
|
||||
}
|
||||
|
||||
public macro static inline function stbProf(i:Int)
|
||||
{
|
||||
return macro null;// macro trace($v { i }, channelBuffers[0][0], channelBuffers[0][1]);
|
||||
}
|
||||
|
||||
public static inline function predictPoint(x:Int, x0:Int, x1:Int, y0:Int, y1:Int):Int
|
||||
{
|
||||
var dy = y1 - y0;
|
||||
var adx = x1 - x0;
|
||||
// @OPTIMIZE: force int division to round in the right direction... is this necessary on x86?
|
||||
var err = Math.abs(dy) * (x - x0);
|
||||
var off = Std.int(err / adx);
|
||||
return dy < 0 ? (y0 - off) : (y0 + off);
|
||||
}
|
||||
|
||||
public static inline function emptyFloatVector(len:Int) {
|
||||
var vec = new Vector<Float>(len);
|
||||
#if neko
|
||||
for (i in 0...len) {
|
||||
vec[i] = 0;
|
||||
}
|
||||
#end
|
||||
return vec;
|
||||
}
|
||||
|
||||
static public function copyVector(source:Vector<Float>):Vector<Float> {
|
||||
var dest:Vector<Float> = new Vector<Float>(source.length);
|
||||
for (i in 0...source.length) {
|
||||
dest[i] = source[i];
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
}
|
594
Kha/Sources/kha/audio2/ogg/vorbis/data/Codebook.hx
Normal file
594
Kha/Sources/kha/audio2/ogg/vorbis/data/Codebook.hx
Normal file
@ -0,0 +1,594 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError.ReaderErrorType;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Codebook
|
||||
{
|
||||
static public inline var NO_CODE = 255;
|
||||
|
||||
public var dimensions:Int;
|
||||
public var entries:Int;
|
||||
public var codewordLengths:Vector<Int>; //uint8*
|
||||
public var minimumValue:Float;
|
||||
public var deltaValue:Float;
|
||||
public var valueBits:Int; //uint8
|
||||
public var lookupType:Int; //uint8
|
||||
public var sequenceP:Bool; //uint8
|
||||
public var sparse:Bool; //uint8
|
||||
public var lookupValues:UInt; //uint32
|
||||
public var multiplicands:Vector<Float>; // codetype *
|
||||
public var codewords:Vector<UInt>; //uint32*
|
||||
public var fastHuffman:Vector<Int>; //[FAST_HUFFMAN_TABLE_SIZE];
|
||||
public var sortedCodewords:Array<UInt>; //uint32*
|
||||
public var sortedValues:Vector<Int>;
|
||||
public var sortedEntries:Int;
|
||||
|
||||
public function new () {
|
||||
}
|
||||
|
||||
static public function read(decodeState:VorbisDecodeState):Codebook {
|
||||
var c = new Codebook();
|
||||
if (decodeState.readBits(8) != 0x42 || decodeState.readBits(8) != 0x43 || decodeState.readBits(8) != 0x56) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP);
|
||||
}
|
||||
|
||||
var x = decodeState.readBits(8);
|
||||
c.dimensions = (decodeState.readBits(8) << 8) + x;
|
||||
|
||||
var x = decodeState.readBits(8);
|
||||
var y = decodeState.readBits(8);
|
||||
c.entries = (decodeState.readBits(8) << 16) + (y << 8) + x;
|
||||
var ordered = decodeState.readBits(1);
|
||||
c.sparse = (ordered != 0) ? false : (decodeState.readBits(1) != 0);
|
||||
|
||||
var lengths = new Vector(c.entries);
|
||||
if (!c.sparse) {
|
||||
c.codewordLengths = lengths;
|
||||
}
|
||||
|
||||
var total = 0;
|
||||
|
||||
if (ordered != 0) {
|
||||
var currentEntry = 0;
|
||||
var currentLength = decodeState.readBits(5) + 1;
|
||||
|
||||
while (currentEntry < c.entries) {
|
||||
var limit = c.entries - currentEntry;
|
||||
var n = decodeState.readBits(MathTools.ilog(limit));
|
||||
if (currentEntry + n > c.entries) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "codebook entrys");
|
||||
}
|
||||
for (i in 0...n) {
|
||||
lengths.set(currentEntry + i, currentLength);
|
||||
}
|
||||
currentEntry += n;
|
||||
currentLength++;
|
||||
}
|
||||
} else {
|
||||
for (j in 0...c.entries) {
|
||||
var present = (c.sparse) ? decodeState.readBits(1) : 1;
|
||||
if (present != 0) {
|
||||
lengths.set(j, decodeState.readBits(5) + 1);
|
||||
total++;
|
||||
} else {
|
||||
lengths.set(j, NO_CODE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (c.sparse && total >= (c.entries >> 2)) {
|
||||
c.codewordLengths = lengths;
|
||||
c.sparse = false;
|
||||
}
|
||||
|
||||
c.sortedEntries = if (c.sparse) {
|
||||
total;
|
||||
} else {
|
||||
var sortedCount = 0;
|
||||
for (j in 0...c.entries) {
|
||||
var l = lengths.get(j);
|
||||
if (l > Setting.FAST_HUFFMAN_LENGTH && l != NO_CODE) {
|
||||
++sortedCount;
|
||||
}
|
||||
}
|
||||
sortedCount;
|
||||
}
|
||||
|
||||
var values:Vector<UInt> = null;
|
||||
|
||||
if (!c.sparse) {
|
||||
c.codewords = new Vector<UInt>(c.entries);
|
||||
} else {
|
||||
if (c.sortedEntries != 0) {
|
||||
c.codewordLengths = new Vector(c.sortedEntries);
|
||||
c.codewords = new Vector<UInt>(c.entries);
|
||||
values = new Vector<UInt>(c.entries);
|
||||
}
|
||||
|
||||
var size:Int = c.entries + (32 + 32) * c.sortedEntries;
|
||||
}
|
||||
|
||||
if (!c.computeCodewords(lengths, c.entries, values)) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "compute codewords");
|
||||
}
|
||||
|
||||
if (c.sortedEntries != 0) {
|
||||
// allocate an extra slot for sentinels
|
||||
c.sortedCodewords = [];
|
||||
|
||||
// allocate an extra slot at the front so that sortedValues[-1] is defined
|
||||
// so that we can catch that case without an extra if
|
||||
c.sortedValues = new Vector<Int>(c.sortedEntries);
|
||||
c.computeSortedHuffman(lengths, values);
|
||||
}
|
||||
|
||||
if (c.sparse) {
|
||||
values = null;
|
||||
c.codewords = null;
|
||||
lengths = null;
|
||||
}
|
||||
|
||||
c.computeAcceleratedHuffman();
|
||||
|
||||
c.lookupType = decodeState.readBits(4);
|
||||
if (c.lookupType > 2) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "codebook lookup type");
|
||||
}
|
||||
|
||||
if (c.lookupType > 0) {
|
||||
c.minimumValue = VorbisTools.floatUnpack(decodeState.readBits(32));
|
||||
c.deltaValue = VorbisTools.floatUnpack(decodeState.readBits(32));
|
||||
c.valueBits = decodeState.readBits(4) + 1;
|
||||
c.sequenceP = (decodeState.readBits(1) != 0);
|
||||
|
||||
if (c.lookupType == 1) {
|
||||
c.lookupValues = VorbisTools.lookup1Values(c.entries, c.dimensions);
|
||||
} else {
|
||||
c.lookupValues = c.entries * c.dimensions;
|
||||
}
|
||||
var mults = new Vector<Int>(c.lookupValues);
|
||||
for (j in 0...c.lookupValues) {
|
||||
var q = decodeState.readBits(c.valueBits);
|
||||
if (q == VorbisTools.EOP) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "fail lookup");
|
||||
}
|
||||
mults[j] = q;
|
||||
}
|
||||
|
||||
{
|
||||
c.multiplicands = new Vector(c.lookupValues);
|
||||
|
||||
//STB_VORBIS_CODEBOOK_FLOATS = true
|
||||
for (j in 0...c.lookupValues) {
|
||||
c.multiplicands[j] = mults[j] * c.deltaValue + c.minimumValue;
|
||||
}
|
||||
}
|
||||
|
||||
//STB_VORBIS_CODEBOOK_FLOATS = true
|
||||
if (c.lookupType == 2 && c.sequenceP) {
|
||||
for (j in 1...c.lookupValues) {
|
||||
c.multiplicands[j] = c.multiplicands[j - 1];
|
||||
}
|
||||
c.sequenceP = false;
|
||||
}
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
inline function addEntry(huffCode:UInt, symbol:Int, count:Int, len:Int, values:Vector<UInt>)
|
||||
{
|
||||
if (!sparse) {
|
||||
codewords[symbol] = huffCode;
|
||||
} else {
|
||||
codewords[count] = huffCode;
|
||||
codewordLengths.set(count, len);
|
||||
values[count] = symbol;
|
||||
}
|
||||
}
|
||||
|
||||
inline function includeInSort(len:Int)
|
||||
{
|
||||
return if (sparse) {
|
||||
VorbisTools.assert(len != NO_CODE);
|
||||
true;
|
||||
} else if (len == NO_CODE) {
|
||||
false;
|
||||
} else if (len > Setting.FAST_HUFFMAN_LENGTH) {
|
||||
true;
|
||||
} else {
|
||||
false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function computeCodewords(len:Vector<Int>, n:Int, values:Vector<UInt>)
|
||||
{
|
||||
var available = new Vector<UInt>(32);
|
||||
for (x in 0...32) available[x] = 0;
|
||||
|
||||
// find the first entry
|
||||
var k = 0;
|
||||
while (k < n) {
|
||||
if (len.get(k) < NO_CODE) {
|
||||
break;
|
||||
}
|
||||
k++;
|
||||
}
|
||||
|
||||
if (k == n) {
|
||||
VorbisTools.assert(sortedEntries == 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
var m = 0;
|
||||
|
||||
// add to the list
|
||||
addEntry(0, k, m++, len.get(k), values);
|
||||
|
||||
// add all available leaves
|
||||
var i = 0;
|
||||
|
||||
while (++i <= len.get(k)) {
|
||||
available[i] = (1:UInt) << ((32 - i):UInt);
|
||||
}
|
||||
|
||||
// note that the above code treats the first case specially,
|
||||
// but it's really the same as the following code, so they
|
||||
// could probably be combined (except the initial code is 0,
|
||||
// and I use 0 in available[] to mean 'empty')
|
||||
i = k;
|
||||
while (++i < n) {
|
||||
var z = len.get(i);
|
||||
if (z == NO_CODE) continue;
|
||||
|
||||
// find lowest available leaf (should always be earliest,
|
||||
// which is what the specification calls for)
|
||||
// note that this property, and the fact we can never have
|
||||
// more than one free leaf at a given level, isn't totally
|
||||
// trivial to prove, but it seems true and the assert never
|
||||
// fires, so!
|
||||
while (z > 0 && available[z] == 0) --z;
|
||||
if (z == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var res:UInt = available[z];
|
||||
available[z] = 0;
|
||||
addEntry(VorbisTools.bitReverse(res), i, m++, len.get(i), values);
|
||||
|
||||
// propogate availability up the tree
|
||||
if (z != len.get(i)) {
|
||||
var y = len.get(i);
|
||||
while (y > z) {
|
||||
VorbisTools.assert(available[y] == 0);
|
||||
available[y] = res + (1 << (32 - y));
|
||||
y--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
function computeSortedHuffman(lengths:Vector<Int>, values:Vector<UInt>)
|
||||
{
|
||||
// build a list of all the entries
|
||||
// OPTIMIZATION: don't include the short ones, since they'll be caught by FAST_HUFFMAN.
|
||||
// this is kind of a frivolous optimization--I don't see any performance improvement,
|
||||
// but it's like 4 extra lines of code, so.
|
||||
if (!sparse) {
|
||||
var k = 0;
|
||||
for (i in 0...entries) {
|
||||
if (includeInSort(lengths.get(i))) {
|
||||
sortedCodewords[k++] = VorbisTools.bitReverse(codewords[i]);
|
||||
}
|
||||
}
|
||||
VorbisTools.assert(k == sortedEntries);
|
||||
} else {
|
||||
for (i in 0...sortedEntries) {
|
||||
sortedCodewords[i] = VorbisTools.bitReverse(codewords[i]);
|
||||
}
|
||||
}
|
||||
|
||||
sortedCodewords[sortedEntries] = 0xffffffff;
|
||||
sortedCodewords.sort(VorbisTools.uintAsc);
|
||||
|
||||
var len = sparse ? sortedEntries : entries;
|
||||
// now we need to indicate how they correspond; we could either
|
||||
// #1: sort a different data structure that says who they correspond to
|
||||
// #2: for each sorted entry, search the original list to find who corresponds
|
||||
// #3: for each original entry, find the sorted entry
|
||||
// #1 requires extra storage, #2 is slow, #3 can use binary search!
|
||||
for (i in 0...len) {
|
||||
var huffLen = sparse ? lengths.get(values[i]) : lengths.get(i);
|
||||
if (includeInSort(huffLen)) {
|
||||
var code = VorbisTools.bitReverse(codewords[i]);
|
||||
var x = 0;
|
||||
var n = sortedEntries;
|
||||
while (n > 1) {
|
||||
// invariant: sc[x] <= code < sc[x+n]
|
||||
var m = x + (n >> 1);
|
||||
if (sortedCodewords[m] <= code) {
|
||||
x = m;
|
||||
n -= (n>>1);
|
||||
} else {
|
||||
n >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
//VorbisTools.assert(sortedCodewords[x] == code);
|
||||
if (sparse) {
|
||||
sortedValues[x] = values[i];
|
||||
codewordLengths.set(x, huffLen);
|
||||
} else {
|
||||
sortedValues[x] = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function computeAcceleratedHuffman()
|
||||
{
|
||||
fastHuffman = new Vector(Setting.FAST_HUFFMAN_TABLE_SIZE);
|
||||
fastHuffman[0] = -1;
|
||||
for (i in 0...(Setting.FAST_HUFFMAN_TABLE_SIZE)) {
|
||||
fastHuffman[i] = -1;
|
||||
}
|
||||
|
||||
var len = (sparse) ? sortedEntries : entries;
|
||||
|
||||
//STB_VORBIS_FAST_HUFFMAN_SHORT
|
||||
//if (len > 32767) len = 32767; // largest possible value we can encode!
|
||||
|
||||
for (i in 0...len) {
|
||||
if (codewordLengths[i] <= Setting.FAST_HUFFMAN_LENGTH) {
|
||||
var z:Int = (sparse) ? VorbisTools.bitReverse(sortedCodewords[i]) : codewords[i];
|
||||
// set table entries for all bit combinations in the higher bits
|
||||
while (z < Setting.FAST_HUFFMAN_TABLE_SIZE) {
|
||||
fastHuffman[z] = i;
|
||||
z += 1 << codewordLengths[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function codebookDecode(decodeState:VorbisDecodeState, output:Vector<Float>, offset:Int, len:Int)
|
||||
{
|
||||
var z = decodeStart(decodeState);
|
||||
var lookupValues = this.lookupValues;
|
||||
var sequenceP = this.sequenceP;
|
||||
var multiplicands = this.multiplicands;
|
||||
var minimumValue = this.minimumValue;
|
||||
|
||||
if (z < 0) {
|
||||
return false;
|
||||
}
|
||||
if (len > dimensions) {
|
||||
len = dimensions;
|
||||
}
|
||||
|
||||
// STB_VORBIS_DIVIDES_IN_CODEBOOK = true
|
||||
if (lookupType == 1) {
|
||||
var div = 1;
|
||||
var last = 0.0;
|
||||
for (i in 0...len) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
output[offset + i] += val;
|
||||
if (sequenceP) {
|
||||
last = val + minimumValue;
|
||||
}
|
||||
div *= lookupValues;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
z *= dimensions;
|
||||
if (sequenceP) {
|
||||
var last = 0.0;
|
||||
for (i in 0...len) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
output[offset + i] += val;
|
||||
last = val + minimumValue;
|
||||
}
|
||||
} else {
|
||||
var last = 0.0;
|
||||
for (i in 0...len) {
|
||||
output[offset + i] += multiplicands[z + i] + last;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function codebookDecodeStep(decodeState:VorbisDecodeState, output:Vector<Float>, offset:Int, len:Int, step:Int)
|
||||
{
|
||||
var z = decodeStart(decodeState);
|
||||
var last = 0.0;
|
||||
if (z < 0) {
|
||||
return false;
|
||||
}
|
||||
if (len > dimensions) {
|
||||
len = dimensions;
|
||||
}
|
||||
|
||||
var lookupValues = this.lookupValues;
|
||||
var sequenceP = this.sequenceP;
|
||||
var multiplicands = this.multiplicands;
|
||||
|
||||
// STB_VORBIS_DIVIDES_IN_CODEBOOK = true
|
||||
|
||||
if (lookupType == 1) {
|
||||
var div = 1;
|
||||
for (i in 0...len) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
output[offset + i * step] += val;
|
||||
if (sequenceP) {
|
||||
last = val;
|
||||
}
|
||||
div *= lookupValues;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
z *= dimensions;
|
||||
for (i in 0...len) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
output[offset + i * step] += val;
|
||||
if (sequenceP) {
|
||||
last = val;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
inline function decodeStart(decodeState:VorbisDecodeState)
|
||||
{
|
||||
return decodeState.decode(this);
|
||||
|
||||
//var z = -1;
|
||||
//// type 0 is only legal in a scalar context
|
||||
//if (lookupType == 0) {
|
||||
// throw new ReaderError(INVALID_STREAM);
|
||||
//} else {
|
||||
// z = decodeState.decode(this);
|
||||
// //if (sparse) VorbisTools.assert(z < sortedEntries);
|
||||
// if (z < 0) { // check for VorbisTools.EOP
|
||||
// if (decodeState.isLastByte()) {
|
||||
// return z;
|
||||
// } else {
|
||||
// throw new ReaderError(INVALID_STREAM);
|
||||
// }
|
||||
// } else {
|
||||
// return z;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
static var delay = 0;
|
||||
|
||||
public function decodeDeinterleaveRepeat(decodeState:VorbisDecodeState, residueBuffers:Vector<Vector<Float>>, ch:Int, cInter:Int, pInter:Int, len:Int, totalDecode:Int)
|
||||
{
|
||||
var effective = dimensions;
|
||||
|
||||
// type 0 is only legal in a scalar context
|
||||
if (lookupType == 0) {
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
}
|
||||
|
||||
var multiplicands = this.multiplicands;
|
||||
var sequenceP = this.sequenceP;
|
||||
var lookupValues = this.lookupValues;
|
||||
|
||||
while (totalDecode > 0) {
|
||||
var last = 0.0;
|
||||
var z = decodeState.decode(this);
|
||||
|
||||
if (z < 0) {
|
||||
if (decodeState.isLastByte()) {
|
||||
return null;
|
||||
}
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
}
|
||||
|
||||
// if this will take us off the end of the buffers, stop short!
|
||||
// we check by computing the length of the virtual interleaved
|
||||
// buffer (len*ch), our current offset within it (pInter*ch)+(cInter),
|
||||
// and the length we'll be using (effective)
|
||||
if (cInter + pInter * ch + effective > len * ch) {
|
||||
effective = len * ch - (pInter * ch - cInter);
|
||||
}
|
||||
|
||||
if (lookupType == 1) {
|
||||
var div = 1;
|
||||
if (sequenceP) {
|
||||
for (i in 0...effective) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
last = val;
|
||||
div *= lookupValues;
|
||||
}
|
||||
} else {
|
||||
for (i in 0...effective) {
|
||||
var off = Std.int(z / div) % lookupValues;
|
||||
var val = multiplicands[off] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
div *= lookupValues;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
z *= dimensions;
|
||||
if (sequenceP) {
|
||||
for (i in 0...effective) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
last = val;
|
||||
}
|
||||
} else {
|
||||
for (i in 0...effective) {
|
||||
var val = multiplicands[z + i] + last;
|
||||
residueBuffers[cInter][pInter] += val;
|
||||
if (++cInter == ch) {
|
||||
cInter = 0;
|
||||
++pInter;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
totalDecode -= effective;
|
||||
}
|
||||
|
||||
return {
|
||||
cInter : cInter,
|
||||
pInter : pInter
|
||||
}
|
||||
}
|
||||
|
||||
public function residueDecode(decodeState:VorbisDecodeState, target:Vector<Float>, offset:Int, n:Int, rtype:Int)
|
||||
{
|
||||
if (rtype == 0) {
|
||||
var step = Std.int(n / dimensions);
|
||||
for (k in 0...step) {
|
||||
if (!codebookDecodeStep(decodeState, target, offset + k, n-offset-k, step)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var k = 0;
|
||||
while(k < n) {
|
||||
if (!codebookDecode(decodeState, target, offset, n-k)) {
|
||||
return false;
|
||||
}
|
||||
k += dimensions;
|
||||
offset += dimensions;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
130
Kha/Sources/kha/audio2/ogg/vorbis/data/Comment.hx
Normal file
130
Kha/Sources/kha/audio2/ogg/vorbis/data/Comment.hx
Normal file
@ -0,0 +1,130 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Comment {
|
||||
public var data(default, null):Map<String, Array<String>>;
|
||||
|
||||
public var title(get, never):String;
|
||||
function get_title() {
|
||||
return getString("title");
|
||||
}
|
||||
|
||||
public var loopStart(get, never):Null<Int>;
|
||||
function get_loopStart() {
|
||||
return Std.parseInt(getString("loopstart"));
|
||||
}
|
||||
|
||||
public var loopLength(get, never):Null<Int>;
|
||||
function get_loopLength() {
|
||||
return Std.parseInt(getString("looplength"));
|
||||
}
|
||||
|
||||
public var version(get, never):String;
|
||||
function get_version() {
|
||||
return getString("version");
|
||||
}
|
||||
|
||||
public var album(get, never):String;
|
||||
function get_album() {
|
||||
return getString("album");
|
||||
}
|
||||
|
||||
public var organization(get, never):String;
|
||||
function get_organization() {
|
||||
return getString("organization");
|
||||
}
|
||||
|
||||
public var tracknumber(get, never):String;
|
||||
function get_tracknumber() {
|
||||
return getString("tracknumber");
|
||||
}
|
||||
|
||||
public var performer(get, never):String;
|
||||
function get_performer() {
|
||||
return getString("performer");
|
||||
}
|
||||
|
||||
public var copyright(get, never):String;
|
||||
function get_copyright() {
|
||||
return getString("copyright");
|
||||
}
|
||||
|
||||
public var license(get, never):String;
|
||||
function get_license() {
|
||||
return getString("license");
|
||||
}
|
||||
|
||||
public var artist(get, never):String;
|
||||
function get_artist() {
|
||||
return getString("artist");
|
||||
}
|
||||
|
||||
public var description(get, never):String;
|
||||
function get_description() {
|
||||
return getString("description");
|
||||
}
|
||||
|
||||
public var genre(get, never):String;
|
||||
function get_genre() {
|
||||
return getString("genre");
|
||||
}
|
||||
|
||||
public var date(get, never):String;
|
||||
function get_date() {
|
||||
return getString("date");
|
||||
}
|
||||
|
||||
public var location(get, never):String;
|
||||
function get_location() {
|
||||
return getString("location");
|
||||
}
|
||||
|
||||
public var contact(get, never):String;
|
||||
function get_contact() {
|
||||
return getString("contact");
|
||||
}
|
||||
|
||||
public var isrc(get, never):String;
|
||||
function get_isrc() {
|
||||
return getString("isrc");
|
||||
}
|
||||
|
||||
public var artists(get, never):Array<String>;
|
||||
function get_artists() {
|
||||
return getArray("artist");
|
||||
}
|
||||
|
||||
public function new() {
|
||||
data = new Map();
|
||||
}
|
||||
|
||||
public function add(key:String, value:String) {
|
||||
key = key.toLowerCase();
|
||||
if (data.exists(key)) {
|
||||
data[key].push(value);
|
||||
} else {
|
||||
data[key] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
public function getString(key:String) {
|
||||
key = key.toLowerCase();
|
||||
return if (data.exists(key)) {
|
||||
data[key][0];
|
||||
} else {
|
||||
null;
|
||||
}
|
||||
}
|
||||
|
||||
public function getArray(key:String) {
|
||||
key = key.toLowerCase();
|
||||
return if (data.exists(key)) {
|
||||
data[key];
|
||||
} else {
|
||||
null;
|
||||
}
|
||||
}
|
||||
}
|
151
Kha/Sources/kha/audio2/ogg/vorbis/data/Floor.hx
Normal file
151
Kha/Sources/kha/audio2/ogg/vorbis/data/Floor.hx
Normal file
@ -0,0 +1,151 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Floor
|
||||
{
|
||||
public var floor0:Floor0;
|
||||
public var floor1:Floor1;
|
||||
public var type:Int;
|
||||
|
||||
function new()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState, codebooks:Vector<Codebook>):Floor
|
||||
{
|
||||
var floor = new Floor();
|
||||
|
||||
floor.type = decodeState.readBits(16);
|
||||
if (floor.type > 1) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (floor.type == 0) {
|
||||
var g = floor.floor0 = new Floor0();
|
||||
g.order = decodeState.readBits(8);
|
||||
g.rate = decodeState.readBits(16);
|
||||
g.barkMapSize = decodeState.readBits(16);
|
||||
g.amplitudeBits = decodeState.readBits(6);
|
||||
g.amplitudeOffset = decodeState.readBits(8);
|
||||
g.numberOfBooks = decodeState.readBits(4) + 1;
|
||||
for (j in 0...g.numberOfBooks) {
|
||||
g.bookList[j] = decodeState.readBits(8);
|
||||
}
|
||||
throw new ReaderError(FEATURE_NOT_SUPPORTED);
|
||||
} else {
|
||||
var p = new Array<IntPoint>();
|
||||
var g = floor.floor1 = new Floor1();
|
||||
var maxClass = -1;
|
||||
g.partitions = decodeState.readBits(5);
|
||||
g.partitionClassList = new Vector(g.partitions);
|
||||
for (j in 0...g.partitions) {
|
||||
g.partitionClassList[j] = decodeState.readBits(4);
|
||||
if (g.partitionClassList[j] > maxClass) {
|
||||
maxClass = g.partitionClassList[j];
|
||||
}
|
||||
}
|
||||
g.classDimensions = new Vector(maxClass + 1);
|
||||
g.classMasterbooks = new Vector(maxClass + 1);
|
||||
g.classSubclasses = new Vector(maxClass + 1);
|
||||
g.subclassBooks = new Vector(maxClass + 1);
|
||||
for (j in 0...(maxClass + 1)) {
|
||||
g.classDimensions[j] = decodeState.readBits(3) + 1;
|
||||
g.classSubclasses[j] = decodeState.readBits(2);
|
||||
if (g.classSubclasses[j] != 0) {
|
||||
g.classMasterbooks[j] = decodeState.readBits(8);
|
||||
if (g.classMasterbooks[j] >= codebooks.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
var kl = (1 << g.classSubclasses[j]);
|
||||
g.subclassBooks[j] = new Vector(kl);
|
||||
for (k in 0...kl) {
|
||||
g.subclassBooks[j][k] = decodeState.readBits(8)-1;
|
||||
if (g.subclassBooks[j][k] >= codebooks.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
g.floor1Multiplier = decodeState.readBits(2) + 1;
|
||||
g.rangebits = decodeState.readBits(4);
|
||||
g.xlist = new Vector(31*8+2);
|
||||
g.xlist[0] = 0;
|
||||
g.xlist[1] = 1 << g.rangebits;
|
||||
g.values = 2;
|
||||
for (j in 0...g.partitions) {
|
||||
var c = g.partitionClassList[j];
|
||||
for (k in 0...g.classDimensions[c]) {
|
||||
g.xlist[g.values] = decodeState.readBits(g.rangebits);
|
||||
g.values++;
|
||||
}
|
||||
}
|
||||
|
||||
// precompute the sorting
|
||||
for (j in 0...g.values) {
|
||||
p.push(new IntPoint());
|
||||
p[j].x = g.xlist[j];
|
||||
p[j].y = j;
|
||||
}
|
||||
|
||||
p.sort(VorbisTools.pointCompare);
|
||||
|
||||
g.sortedOrder = new Vector(g.values);
|
||||
for (j in 0...g.values) {
|
||||
g.sortedOrder[j] = p[j].y;
|
||||
}
|
||||
|
||||
g.neighbors = new Vector(g.values);
|
||||
// precompute the neighbors
|
||||
for (j in 2...g.values) {
|
||||
var ne = VorbisTools.neighbors(g.xlist, j);
|
||||
g.neighbors[j] = new Vector(g.values);
|
||||
g.neighbors[j][0] = ne.low;
|
||||
g.neighbors[j][1] = ne.high;
|
||||
}
|
||||
}
|
||||
|
||||
return floor;
|
||||
}
|
||||
}
|
||||
|
||||
class Floor0
|
||||
{
|
||||
public var order:Int; //uint8
|
||||
public var rate:Int; //uint16
|
||||
public var barkMapSize:Int; //uint16
|
||||
public var amplitudeBits:Int; //uint8
|
||||
public var amplitudeOffset:Int; //uint8
|
||||
public var numberOfBooks:Int; //uint8
|
||||
public var bookList:Vector<UInt>; //uint8 [16] varies
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
||||
|
||||
class Floor1
|
||||
{
|
||||
public var partitions:Int; // uint8
|
||||
public var partitionClassList:Vector<Int>; // uint8 varies
|
||||
public var classDimensions:Vector<Int>; // uint8 [16] varies
|
||||
public var classSubclasses:Vector<Int>; // uint8 [16] varies
|
||||
public var classMasterbooks:Vector<Int>; // uint8 [16] varies
|
||||
public var subclassBooks:Vector<Vector<Int>>; //int 16 [16][8] varies
|
||||
public var xlist:Vector<Int>; //uint16 [31*8+2] varies
|
||||
public var sortedOrder:Vector<Int>; //uint8 [31 * 8 + 2];
|
||||
public var neighbors:Vector<Vector<Int>>; //uint8[31 * 8 + 2][2];
|
||||
public var floor1Multiplier:Int;
|
||||
public var rangebits:Int;
|
||||
public var values:Int;
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
213
Kha/Sources/kha/audio2/ogg/vorbis/data/Header.hx
Normal file
213
Kha/Sources/kha/audio2/ogg/vorbis/data/Header.hx
Normal file
@ -0,0 +1,213 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.BytesInput;
|
||||
import haxe.io.BytesOutput;
|
||||
import haxe.io.Input;
|
||||
import haxe.io.Output;
|
||||
import kha.audio2.ogg.vorbis.data.Comment;
|
||||
import kha.audio2.ogg.vorbis.data.Page.PageFlag;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError.ReaderErrorType;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Header {
|
||||
|
||||
static public inline var PACKET_ID = 1;
|
||||
static public inline var PACKET_COMMENT = 3;
|
||||
static public inline var PACKET_SETUP = 5;
|
||||
|
||||
public var maximumBitRate(default, null):UInt;
|
||||
public var nominalBitRate(default, null):UInt;
|
||||
public var minimumBitRate(default, null):UInt;
|
||||
public var sampleRate(default, null):UInt;
|
||||
public var channel(default, null):Int;
|
||||
public var blocksize0(default, null):Int;
|
||||
public var blocksize1(default, null):Int;
|
||||
public var codebooks(default, null):Vector<Codebook>;
|
||||
public var floorConfig(default, null):Vector<Floor>;
|
||||
public var residueConfig(default, null):Vector<Residue>;
|
||||
public var mapping(default, null):Vector<Mapping>;
|
||||
public var modes(default, null):Vector<Mode>; // [64] varies
|
||||
public var comment(default, null):Comment;
|
||||
public var vendor(default, null):String;
|
||||
|
||||
function new() {
|
||||
|
||||
}
|
||||
|
||||
static public function read(decodeState:VorbisDecodeState):Header {
|
||||
var page = decodeState.page;
|
||||
page.start(decodeState);
|
||||
|
||||
if ((page.flag & PageFlag.FIRST_PAGE) == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "not firstPage");
|
||||
}
|
||||
if ((page.flag & PageFlag.LAST_PAGE) != 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "lastPage");
|
||||
}
|
||||
if ((page.flag & PageFlag.CONTINUED_PACKET) != 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "continuedPacket");
|
||||
}
|
||||
|
||||
decodeState.firstPageValidate();
|
||||
if (decodeState.readByte() != PACKET_ID) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "decodeState head");
|
||||
}
|
||||
|
||||
// vorbis header
|
||||
decodeState.vorbisValidate();
|
||||
|
||||
// vorbisVersion
|
||||
var version = decodeState.readInt32();
|
||||
if (version != 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "vorbis version : " + version);
|
||||
}
|
||||
|
||||
var header = new Header();
|
||||
|
||||
header.channel = decodeState.readByte();
|
||||
if (header.channel == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "no channel");
|
||||
} else if (header.channel > Setting.MAX_CHANNELS) {
|
||||
throw new ReaderError(TOO_MANY_CHANNELS, "too many channels");
|
||||
}
|
||||
|
||||
header.sampleRate = decodeState.readInt32();
|
||||
if (header.sampleRate == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE, "no sampling rate");
|
||||
}
|
||||
|
||||
header.maximumBitRate = decodeState.readInt32();
|
||||
header.nominalBitRate = decodeState.readInt32();
|
||||
header.minimumBitRate = decodeState.readInt32();
|
||||
|
||||
var x = decodeState.readByte();
|
||||
var log0 = x & 15;
|
||||
var log1 = x >> 4;
|
||||
header.blocksize0 = 1 << log0;
|
||||
header.blocksize1 = 1 << log1;
|
||||
if (log0 < 6 || log0 > 13) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (log1 < 6 || log1 > 13) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (log0 > log1) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
|
||||
// framingFlag
|
||||
var x = decodeState.readByte();
|
||||
if (x & 1 == 0) {
|
||||
throw new ReaderError(INVALID_FIRST_PAGE);
|
||||
}
|
||||
|
||||
// comment fields
|
||||
decodeState.page.start(decodeState);
|
||||
decodeState.startPacket();
|
||||
|
||||
var len = 0;
|
||||
var output = new BytesOutput();
|
||||
while((len = decodeState.next()) != 0) {
|
||||
output.write(decodeState.readBytes(len));
|
||||
decodeState.bytesInSeg = 0;
|
||||
}
|
||||
|
||||
{
|
||||
var packetInput = new BytesInput(output.getBytes());
|
||||
packetInput.readByte();
|
||||
packetInput.read(6);
|
||||
|
||||
var vendorLength:UInt = packetInput.readInt32();
|
||||
header.vendor = packetInput.readString(vendorLength);
|
||||
header.comment = new Comment();
|
||||
|
||||
var commentCount = packetInput.readInt32();
|
||||
|
||||
for (i in 0...commentCount) {
|
||||
var n = packetInput.readInt32();
|
||||
var str = packetInput.readString(n);
|
||||
var splitter = str.indexOf("=");
|
||||
if (splitter != -1) {
|
||||
header.comment.add(str.substring(0, splitter), str.substring(splitter + 1));
|
||||
}
|
||||
}
|
||||
|
||||
var x = packetInput.readByte();
|
||||
if (x & 1 == 0) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
// third packet!
|
||||
decodeState.startPacket();
|
||||
|
||||
if (decodeState.readPacket() != PACKET_SETUP) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_SETUP, "setup packet");
|
||||
}
|
||||
|
||||
decodeState.vorbisValidate();
|
||||
|
||||
// codebooks
|
||||
var codebookCount = decodeState.readBits(8) + 1;
|
||||
header.codebooks = new Vector(codebookCount);
|
||||
for (i in 0...codebookCount) {
|
||||
header.codebooks[i] = Codebook.read(decodeState);
|
||||
}
|
||||
|
||||
// time domain transfers (notused)
|
||||
x = decodeState.readBits(6) + 1;
|
||||
for (i in 0...x) {
|
||||
if (decodeState.readBits(16) != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
// Floors
|
||||
var floorCount = decodeState.readBits(6) + 1;
|
||||
header.floorConfig = new Vector(floorCount);
|
||||
for (i in 0...floorCount) {
|
||||
header.floorConfig[i] = Floor.read(decodeState, header.codebooks);
|
||||
}
|
||||
|
||||
// Residue
|
||||
var residueCount = decodeState.readBits(6) + 1;
|
||||
header.residueConfig = new Vector(residueCount);
|
||||
for (i in 0...residueCount) {
|
||||
header.residueConfig[i] = Residue.read(decodeState, header.codebooks);
|
||||
}
|
||||
|
||||
//Mapping
|
||||
var mappingCount = decodeState.readBits(6) + 1;
|
||||
header.mapping = new Vector(mappingCount);
|
||||
for (i in 0...mappingCount) {
|
||||
var map = Mapping.read(decodeState, header.channel);
|
||||
header.mapping[i] = map;
|
||||
for (j in 0...map.submaps) {
|
||||
if (map.submapFloor[j] >= header.floorConfig.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (map.submapResidue[j] >= header.residueConfig.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var modeCount = decodeState.readBits(6) + 1;
|
||||
header.modes = new Vector(modeCount);
|
||||
for (i in 0...modeCount) {
|
||||
var mode = Mode.read(decodeState);
|
||||
header.modes[i] = mode;
|
||||
if (mode.mapping >= header.mapping.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
|
||||
decodeState.flushPacket();
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
15
Kha/Sources/kha/audio2/ogg/vorbis/data/IntPoint.hx
Normal file
15
Kha/Sources/kha/audio2/ogg/vorbis/data/IntPoint.hx
Normal file
@ -0,0 +1,15 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class IntPoint
|
||||
{
|
||||
public var x:Int;
|
||||
public var y:Int;
|
||||
|
||||
public function new() {
|
||||
|
||||
}
|
||||
}
|
127
Kha/Sources/kha/audio2/ogg/vorbis/data/Mapping.hx
Normal file
127
Kha/Sources/kha/audio2/ogg/vorbis/data/Mapping.hx
Normal file
@ -0,0 +1,127 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.tools.MathTools;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
class Mapping
|
||||
{
|
||||
public var couplingSteps:Int; // uint16
|
||||
public var chan:Vector<MappingChannel>;
|
||||
public var submaps:Int; // uint8
|
||||
public var submapFloor:Vector<Int>; // uint8 varies
|
||||
public var submapResidue:Vector<Int>; // uint8 varies
|
||||
public function new() {
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState, channels:Int):Mapping
|
||||
{
|
||||
var m = new Mapping();
|
||||
var mappingType = decodeState.readBits(16);
|
||||
if (mappingType != 0) {
|
||||
throw new ReaderError(INVALID_SETUP, "mapping type " + mappingType);
|
||||
}
|
||||
|
||||
m.chan = new Vector(channels);
|
||||
for (j in 0...channels) {
|
||||
m.chan[j] = new MappingChannel();
|
||||
}
|
||||
|
||||
if (decodeState.readBits(1) != 0) {
|
||||
m.submaps = decodeState.readBits(4)+1;
|
||||
} else {
|
||||
m.submaps = 1;
|
||||
}
|
||||
|
||||
//if (m.submaps > maxSubmaps) {
|
||||
// maxSubmaps = m.submaps;
|
||||
//}
|
||||
|
||||
if (decodeState.readBits(1) != 0) {
|
||||
m.couplingSteps = decodeState.readBits(8)+1;
|
||||
for (k in 0...m.couplingSteps) {
|
||||
m.chan[k].magnitude = decodeState.readBits(MathTools.ilog(channels-1));
|
||||
m.chan[k].angle = decodeState.readBits(MathTools.ilog(channels-1));
|
||||
if (m.chan[k].magnitude >= channels) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.chan[k].angle >= channels) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.chan[k].magnitude == m.chan[k].angle) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
m.couplingSteps = 0;
|
||||
}
|
||||
|
||||
// reserved field
|
||||
if (decodeState.readBits(2) != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.submaps > 1) {
|
||||
for (j in 0...channels) {
|
||||
m.chan[j].mux = decodeState.readBits(4);
|
||||
if (m.chan[j].mux >= m.submaps) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (j in 0...channels) {
|
||||
m.chan[j].mux = 0;
|
||||
}
|
||||
}
|
||||
|
||||
m.submapFloor = new Vector(m.submaps);
|
||||
m.submapResidue = new Vector(m.submaps);
|
||||
|
||||
for (j in 0...m.submaps) {
|
||||
decodeState.readBits(8); // discard
|
||||
m.submapFloor[j] = decodeState.readBits(8);
|
||||
m.submapResidue[j] = decodeState.readBits(8);
|
||||
}
|
||||
|
||||
return m;
|
||||
}
|
||||
|
||||
public function doFloor(floors:Vector<Floor>, i:Int, n:Int, target:Vector<Float>, finalY:Array<Int>, step2Flag:Vector<Bool>)
|
||||
{
|
||||
var n2 = n >> 1;
|
||||
var s = chan[i].mux, floor;
|
||||
var floor = floors[submapFloor[s]];
|
||||
if (floor.type == 0) {
|
||||
throw new ReaderError(INVALID_STREAM);
|
||||
} else {
|
||||
var g = floor.floor1;
|
||||
var lx = 0, ly = finalY[0] * g.floor1Multiplier;
|
||||
for (q in 1...g.values) {
|
||||
var j = g.sortedOrder[q];
|
||||
if (finalY[j] >= 0)
|
||||
{
|
||||
var hy = finalY[j] * g.floor1Multiplier;
|
||||
var hx = g.xlist[j];
|
||||
VorbisTools.drawLine(target, lx, ly, hx, hy, n2);
|
||||
lx = hx;
|
||||
ly = hy;
|
||||
}
|
||||
}
|
||||
if (lx < n2) {
|
||||
// optimization of: drawLine(target, lx,ly, n,ly, n2);
|
||||
for (j in lx...n2) {
|
||||
target[j] *= VorbisTools.INVERSE_DB_TABLE[ly];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MappingChannel
|
||||
{
|
||||
public var magnitude:Int; // uint8
|
||||
public var angle:Int; // uint8
|
||||
public var mux:Int; // uint8
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
29
Kha/Sources/kha/audio2/ogg/vorbis/data/Mode.hx
Normal file
29
Kha/Sources/kha/audio2/ogg/vorbis/data/Mode.hx
Normal file
@ -0,0 +1,29 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
class Mode
|
||||
{
|
||||
public var blockflag:Bool; // uint8
|
||||
public var mapping:Int; // uint8
|
||||
public var windowtype:Int; // uint16
|
||||
public var transformtype:Int; // uint16
|
||||
|
||||
public function new() {
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState) {
|
||||
var m = new Mode();
|
||||
m.blockflag = (decodeState.readBits(1) != 0);
|
||||
m.windowtype = decodeState.readBits(16);
|
||||
m.transformtype = decodeState.readBits(16);
|
||||
m.mapping = decodeState.readBits(8);
|
||||
if (m.windowtype != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
if (m.transformtype != 0) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
return m;
|
||||
}
|
||||
}
|
60
Kha/Sources/kha/audio2/ogg/vorbis/data/Page.hx
Normal file
60
Kha/Sources/kha/audio2/ogg/vorbis/data/Page.hx
Normal file
@ -0,0 +1,60 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.data.ReaderError.ReaderErrorType;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Page {
|
||||
public var flag(default, null):Int;
|
||||
|
||||
public function new () {
|
||||
|
||||
}
|
||||
|
||||
public function clone() {
|
||||
var page = new Page();
|
||||
page.flag = flag;
|
||||
return page;
|
||||
}
|
||||
|
||||
// startPage
|
||||
public function start(decodeState:VorbisDecodeState) {
|
||||
decodeState.capturePattern();
|
||||
startWithoutCapturePattern(decodeState);
|
||||
}
|
||||
|
||||
// startPageNoCapturePattern
|
||||
public function startWithoutCapturePattern(decodeState:VorbisDecodeState) {
|
||||
var version = decodeState.readByte();
|
||||
if (version != 0) {
|
||||
throw new ReaderError(ReaderErrorType.INVALID_STREAM_STRUCTURE_VERSION, "" + version);
|
||||
}
|
||||
|
||||
this.flag = decodeState.readByte();
|
||||
var loc0 = decodeState.readInt32();
|
||||
var loc1 = decodeState.readInt32();
|
||||
|
||||
// input serial number -- vorbis doesn't interleave, so discard
|
||||
decodeState.readInt32();
|
||||
//if (this.serial != get32(f)) throw new ReaderError(ReaderErrorType.incorrectStreamSerialNumber);
|
||||
|
||||
// page sequence number
|
||||
decodeState.readInt32();
|
||||
|
||||
// CRC32
|
||||
decodeState.readInt32();
|
||||
|
||||
// pageSegments
|
||||
decodeState.setup(loc0, loc1);
|
||||
}
|
||||
}
|
||||
|
||||
class PageFlag {
|
||||
static public inline var CONTINUED_PACKET = 1;
|
||||
static public inline var FIRST_PAGE = 2;
|
||||
static public inline var LAST_PAGE = 4;
|
||||
}
|
18
Kha/Sources/kha/audio2/ogg/vorbis/data/ProbedPage.hx
Normal file
18
Kha/Sources/kha/audio2/ogg/vorbis/data/ProbedPage.hx
Normal file
@ -0,0 +1,18 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
|
||||
class ProbedPage
|
||||
{
|
||||
public var pageStart:Int;
|
||||
public var pageEnd:Int;
|
||||
public var afterPreviousPageStart:Int;
|
||||
public var firstDecodedSample:Null<Int>;
|
||||
public var lastDecodedSample:Null<Int>;
|
||||
|
||||
public function new() {
|
||||
}
|
||||
}
|
53
Kha/Sources/kha/audio2/ogg/vorbis/data/ReaderError.hx
Normal file
53
Kha/Sources/kha/audio2/ogg/vorbis/data/ReaderError.hx
Normal file
@ -0,0 +1,53 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.PosInfos;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class ReaderError
|
||||
{
|
||||
public var type(default, null):ReaderErrorType;
|
||||
public var message(default, null):String;
|
||||
public var posInfos(default, null):PosInfos;
|
||||
|
||||
public function new(type:ReaderErrorType, ?message:String = "", ?posInfos:PosInfos) {
|
||||
this.type = type;
|
||||
this.message = message;
|
||||
this.posInfos = posInfos;
|
||||
}
|
||||
}
|
||||
|
||||
enum ReaderErrorType
|
||||
{
|
||||
NEED_MORE_DATA; // not a real error
|
||||
|
||||
INVALID_API_MIXING; // can't mix API modes
|
||||
OUTOFMEM; // not enough memory
|
||||
FEATURE_NOT_SUPPORTED; // uses floor 0
|
||||
TOO_MANY_CHANNELS; // STB_VORBIS_MAX_CHANNELS is too small
|
||||
FILE_OPEN_FAILURE; // fopen() failed
|
||||
SEEK_WITHOUT_LENGTH; // can't seek in unknown-length file
|
||||
|
||||
UNEXPECTED_EOF; // file is truncated?
|
||||
SEEK_INVALID; // seek past EOF
|
||||
|
||||
// decoding errors (corrupt/invalid input) -- you probably
|
||||
// don't care about the exact details of these
|
||||
|
||||
// vorbis errors:
|
||||
INVALID_SETUP;
|
||||
INVALID_STREAM;
|
||||
|
||||
// ogg errors:
|
||||
MISSING_CAPTURE_PATTERN;
|
||||
INVALID_STREAM_STRUCTURE_VERSION;
|
||||
CONTINUED_PACKET_FLAG_INVALID;
|
||||
INCORRECT_STREAM_SERIAL_NUMBER;
|
||||
INVALID_FIRST_PAGE;
|
||||
BAD_PACKET_TYPE;
|
||||
CANT_FIND_LAST_PAGE;
|
||||
SEEK_FAILED;
|
||||
|
||||
OTHER;
|
||||
}
|
298
Kha/Sources/kha/audio2/ogg/vorbis/data/Residue.hx
Normal file
298
Kha/Sources/kha/audio2/ogg/vorbis/data/Residue.hx
Normal file
@ -0,0 +1,298 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
import haxe.ds.Vector;
|
||||
import haxe.io.Input;
|
||||
import kha.audio2.ogg.vorbis.VorbisDecodeState;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Residue
|
||||
{
|
||||
public var begin(default, null):UInt; // uint32
|
||||
public var end(default, null):UInt; // uint32
|
||||
public var partSize(default, null):UInt; // uint32
|
||||
public var classifications(default, null):Int; // uint8
|
||||
public var classbook(default, null):Int; // uint8
|
||||
public var classdata(default, null):Vector<Vector<Int>>; //uint8 **
|
||||
public var residueBooks(default, null):Vector<Vector<Int>>; //int16 (*)[8]
|
||||
public var type(default, null):Int;
|
||||
|
||||
public function new() {
|
||||
}
|
||||
|
||||
public static function read(decodeState:VorbisDecodeState, codebooks:Vector<Codebook>):Residue
|
||||
{
|
||||
var r = new Residue();
|
||||
r.type = decodeState.readBits(16);
|
||||
if (r.type > 2) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
|
||||
var residueCascade = new Vector<Int>(64);
|
||||
r.begin = decodeState.readBits(24);
|
||||
r.end = decodeState.readBits(24);
|
||||
r.partSize = decodeState.readBits(24)+1;
|
||||
var classifications = r.classifications = decodeState.readBits(6)+1;
|
||||
r.classbook = decodeState.readBits(8);
|
||||
|
||||
for (j in 0...r.classifications) {
|
||||
var highBits = 0;
|
||||
var lowBits = decodeState.readBits(3);
|
||||
if (decodeState.readBits(1) != 0){
|
||||
highBits = decodeState.readBits(5);
|
||||
}
|
||||
residueCascade[j] = highBits * 8 + lowBits;
|
||||
}
|
||||
|
||||
r.residueBooks = new Vector(r.classifications);
|
||||
for (j in 0...r.classifications) {
|
||||
r.residueBooks[j] = new Vector(8);
|
||||
for (k in 0...8) {
|
||||
if (residueCascade[j] & (1 << k) != 0) {
|
||||
r.residueBooks[j][k] = decodeState.readBits(8);
|
||||
if (r.residueBooks[j][k] >= codebooks.length) {
|
||||
throw new ReaderError(INVALID_SETUP);
|
||||
}
|
||||
} else {
|
||||
r.residueBooks[j][k] = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// precompute the classifications[] array to avoid inner-loop mod/divide
|
||||
// call it 'classdata' since we already have classifications
|
||||
var el = codebooks[r.classbook].entries;
|
||||
var classwords = codebooks[r.classbook].dimensions;
|
||||
r.classdata = new Vector(el);
|
||||
|
||||
for (j in 0...el) {
|
||||
var temp = j;
|
||||
var k = classwords;
|
||||
var cd = r.classdata[j] = new Vector(classwords);
|
||||
while (--k >= 0) {
|
||||
cd[k] = temp % classifications;
|
||||
temp = Std.int(temp / classifications);
|
||||
}
|
||||
}
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
|
||||
public function decode(decodeState:VorbisDecodeState, header:Header, residueBuffers:Vector<Vector<Float>>, ch:Int, n:Int, doNotDecode:Vector<Bool>, channelBuffers:Vector<Vector<Float>>)
|
||||
{
|
||||
// STB_VORBIS_DIVIDES_IN_RESIDUE = true
|
||||
var codebooks = header.codebooks;
|
||||
var classwords = codebooks[classbook].dimensions;
|
||||
var nRead = end - begin;
|
||||
var partSize = this.partSize;
|
||||
var partRead = Std.int(nRead / partSize);
|
||||
var classifications = new Vector<Int>(header.channel * partRead + 1); // + 1 is a hack for a possible crash in line 268 with some ogg files
|
||||
|
||||
VorbisTools.stbProf(2);
|
||||
for (i in 0...ch) {
|
||||
if (!doNotDecode[i]) {
|
||||
var buffer = residueBuffers[i];
|
||||
for (j in 0...buffer.length) {
|
||||
buffer[j] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (type == 2 && ch != 1) {
|
||||
for (j in 0...ch) {
|
||||
if (!doNotDecode[j]) {
|
||||
break;
|
||||
} else if (j == ch - 1) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
VorbisTools.stbProf(3);
|
||||
for (pass in 0...8) {
|
||||
var pcount = 0, classSet = 0;
|
||||
if (ch == 2) {
|
||||
VorbisTools.stbProf(13);
|
||||
while (pcount < partRead) {
|
||||
var z = begin + pcount * partSize;
|
||||
var cInter = (z & 1);
|
||||
var pInter = z >> 1;
|
||||
if (pass == 0) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var q = decodeState.decode(c);
|
||||
if (q == VorbisTools.EOP) {
|
||||
return;
|
||||
}
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[i + pcount] = q % this.classifications;
|
||||
q = Std.int(q / this.classifications);
|
||||
}
|
||||
}
|
||||
VorbisTools.stbProf(5);
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
var z = begin + pcount*partSize;
|
||||
var c = classifications[pcount];
|
||||
var b = residueBooks[c][pass];
|
||||
if (b >= 0) {
|
||||
var book = codebooks[b];
|
||||
VorbisTools.stbProf(20); // accounts for X time
|
||||
var result = book.decodeDeinterleaveRepeat(decodeState, residueBuffers, ch, cInter, pInter, n, partSize);
|
||||
if (result == null) {
|
||||
return;
|
||||
} else {
|
||||
cInter = result.cInter;
|
||||
pInter = result.pInter;
|
||||
}
|
||||
VorbisTools.stbProf(7);
|
||||
} else {
|
||||
z += partSize;
|
||||
cInter = z & 1;
|
||||
pInter = z >> 1;
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
VorbisTools.stbProf(8);
|
||||
}
|
||||
} else if (ch == 1) {
|
||||
while (pcount < partRead) {
|
||||
var z = begin + pcount*partSize;
|
||||
var cInter = 0;
|
||||
var pInter = z;
|
||||
if (pass == 0) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var q = decodeState.decode(c);
|
||||
if (q == VorbisTools.EOP) return;
|
||||
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[i + pcount] = q % this.classifications;
|
||||
q = Std.int(q / this.classifications);
|
||||
}
|
||||
}
|
||||
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
var z = begin + pcount * partSize;
|
||||
var b = residueBooks[classifications[pcount]][pass];
|
||||
if (b >= 0) {
|
||||
var book:Codebook = codebooks[b];
|
||||
VorbisTools.stbProf(22);
|
||||
var result = book.decodeDeinterleaveRepeat(decodeState, residueBuffers, ch, cInter, pInter, n, partSize);
|
||||
if (result == null) {
|
||||
return;
|
||||
} else {
|
||||
cInter = result.cInter;
|
||||
pInter = result.pInter;
|
||||
}
|
||||
VorbisTools.stbProf(3);
|
||||
} else {
|
||||
z += partSize;
|
||||
cInter = 0;
|
||||
pInter = z;
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (pcount < partRead) {
|
||||
var z = begin + pcount * partSize;
|
||||
var cInter = z % ch;
|
||||
var pInter = Std.int(z / ch);
|
||||
|
||||
if (pass == 0) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var q = decodeState.decode(c);
|
||||
if (q == VorbisTools.EOP) {
|
||||
return;
|
||||
}
|
||||
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[i+pcount] = q % this.classifications;
|
||||
q = Std.int(q / this.classifications);
|
||||
}
|
||||
}
|
||||
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
var z = begin + pcount * partSize;
|
||||
var b = residueBooks[classifications[pcount]][pass];
|
||||
if (b >= 0) {
|
||||
var book = codebooks[b];
|
||||
VorbisTools.stbProf(22);
|
||||
var result = book.decodeDeinterleaveRepeat(decodeState, residueBuffers, ch, cInter, pInter, n, partSize);
|
||||
if (result == null) {
|
||||
return;
|
||||
} else {
|
||||
cInter = result.cInter;
|
||||
pInter = result.pInter;
|
||||
}
|
||||
VorbisTools.stbProf(3);
|
||||
} else {
|
||||
z += partSize;
|
||||
cInter = z % ch;
|
||||
pInter = Std.int(z / ch);
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
VorbisTools.stbProf(9);
|
||||
|
||||
for (pass in 0...8) {
|
||||
var pcount = 0;
|
||||
var classSet = 0;
|
||||
while (pcount < partRead) {
|
||||
if (pass == 0) {
|
||||
for (j in 0...ch) {
|
||||
if (!doNotDecode[j]) {
|
||||
var c:Codebook = codebooks[classbook];
|
||||
var temp = decodeState.decode(c);
|
||||
if (temp == VorbisTools.EOP) {
|
||||
return;
|
||||
}
|
||||
var i = classwords;
|
||||
while (--i >= 0) {
|
||||
classifications[j * partRead + i + pcount] = temp % this.classifications;
|
||||
temp = Std.int(temp / this.classifications);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (i in 0...classwords) {
|
||||
if (pcount >= partRead) {
|
||||
break;
|
||||
}
|
||||
for (j in 0...ch) {
|
||||
if (!doNotDecode[j]) {
|
||||
var c = classifications[j * partRead + pcount];
|
||||
var b = residueBooks[c][pass];
|
||||
if (b >= 0) {
|
||||
var target = residueBuffers[j];
|
||||
var offset = begin + pcount * partSize;
|
||||
var n = partSize;
|
||||
var book = codebooks[b];
|
||||
if (!book.residueDecode(decodeState, target, offset, n, type)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
++pcount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
15
Kha/Sources/kha/audio2/ogg/vorbis/data/Setting.hx
Normal file
15
Kha/Sources/kha/audio2/ogg/vorbis/data/Setting.hx
Normal file
@ -0,0 +1,15 @@
|
||||
package kha.audio2.ogg.vorbis.data;
|
||||
|
||||
/**
|
||||
* ...
|
||||
* @author shohei909
|
||||
*/
|
||||
class Setting
|
||||
{
|
||||
static public inline var MAX_CHANNELS = 16;
|
||||
static public inline var PUSHDATA_CRC_COUNT = 4;
|
||||
static public inline var FAST_HUFFMAN_LENGTH = 10;
|
||||
static public inline var FAST_HUFFMAN_TABLE_SIZE = (1 << FAST_HUFFMAN_LENGTH);
|
||||
static public inline var FAST_HUFFMAN_TABLE_MASK = FAST_HUFFMAN_TABLE_SIZE - 1;
|
||||
|
||||
}
|
Reference in New Issue
Block a user