Update Files

This commit is contained in:
2025-01-22 16:18:30 +01:00
parent ed4603cf95
commit a36294b518
16718 changed files with 2960346 additions and 0 deletions

View File

@ -0,0 +1,458 @@
// =============================================================================
// audioCallback() is roughly based on
// https://github.com/Kode/Kha/blob/master/Sources/kha/audio2/Audio1.hx
// =============================================================================
package aura;
import haxe.Exception;
import haxe.ds.Vector;
import kha.Assets;
import kha.SystemImpl;
import kha.arrays.Float32Array;
import aura.channels.Html5StreamChannel;
import aura.channels.MixChannel;
import aura.channels.UncompBufferChannel;
import aura.channels.UncompBufferResamplingChannel;
import aura.channels.StreamChannel;
import aura.format.mhr.MHRReader;
import aura.threading.BufferCache;
import aura.types.AudioBuffer;
import aura.types.HRTF;
import aura.utils.Assert;
import aura.utils.BufferUtils.clearBuffer;
import aura.utils.MathUtils;
import aura.utils.Profiler;
import aura.utils.Pointer;
// Convenience typedefs to auto-import them with this module
typedef BaseChannelHandle = aura.channels.BaseChannel.BaseChannelHandle;
typedef UncompBufferChannelHandle = aura.channels.UncompBufferChannel.UncompBufferChannelHandle;
typedef MixChannelHandle = aura.channels.MixChannel.MixChannelHandle;
@:access(aura.channels.MixChannelHandle)
class Aura {
public static var options(default, null): Null<AuraOptions> = null;
public static var sampleRate(default, null): Int;
public static var lastBufferSize(default, null): Int = 0;
public static var listener: Listener;
public static final mixChannels = new Map<String, MixChannelHandle>();
public static var masterChannel(default, null): MixChannelHandle;
static inline var BLOCK_SIZE = 1024;
static inline var NUM_OUTPUT_CHANNELS = 2;
static inline var BLOCK_CHANNEL_SIZE = Std.int(BLOCK_SIZE / NUM_OUTPUT_CHANNELS);
static var p_samplesBuffer = new Pointer<Float32Array>(null);
static var blockBuffer = new AudioBuffer(NUM_OUTPUT_CHANNELS, BLOCK_CHANNEL_SIZE);
static var blockBufPos = 0;
static final hrtfs = new Map<String, HRTF>();
public static function init(?options: AuraOptions) {
sampleRate = kha.audio2.Audio.samplesPerSecond;
assert(Critical, sampleRate != 0, "sampleRate must not be 0!");
Aura.options = AuraOptions.addDefaults(options);
@:privateAccess MixChannel.channelSize = Aura.options.channelSize;
listener = new Listener();
BufferCache.init();
// Create a few preconfigured mix channels
masterChannel = createMixChannel("master");
createMixChannel("music").setMixChannel(masterChannel);
createMixChannel("fx").setMixChannel(masterChannel);
#if (kha_html5 || kha_debug_html5)
if (kha.SystemImpl.mobile) {
// kha.js.MobileWebAudio doesn't support a custom audio callback, so
// manually synchronize all tracks here (note that because of this
// limitation there are no insert effects supported for mobile audio)
kha.Scheduler.addTimeTask(masterChannel.getMixChannel().synchronize, 0, 1/60);
}
else {
#end
kha.audio2.Audio.audioCallback = audioCallback;
#if (kha_html5 || kha_debug_html5)
}
#end
#if (kha_html5 || kha_debug_html5)
// Check if virtual html5 stream channels can be made physical
kha.Scheduler.addBreakableTimeTask(() -> {
if (kha.SystemImpl.mobileAudioPlaying) {
Html5StreamChannel.makeChannelsPhysical();
return BreakTask;
}
return ContinueTask;
}, 0, 1/60);
#end
kha.System.notifyOnApplicationState(null, null, null, null, () -> {
Profiler.shutdown();
});
}
/**
Load all assets listed in the given `loadConfig`.
If all assets are loaded successfully, `done` is called.
For each asset that fails to be loaded, `failed` is called if it
is passed to this function.
If `onProgress` is passed to this function, it is called for each
successfully loaded asset with the number of successfully loaded assets
so far including the current asset (first parameter), the number
of assets in the `loadConfig` (second parameter), as well as the name
of the current asset (third parameter).
**/
public static function loadAssets(loadConfig: AuraLoadConfig, done: Void->Void, ?failed: Void->Void, ?onProgress:Int->Int->String->Void) {
final length = loadConfig.getEntryCount();
var count = 0;
for (soundName in loadConfig.compressed) {
if (!doesSoundExist(soundName)) {
onLoadingError(null, failed, soundName);
continue;
}
Assets.loadSound(soundName, (sound: kha.Sound) -> {
#if !kha_krom // Krom only uses uncompressedData
if (sound.compressedData == null) {
throw 'Cannot compress already uncompressed sound ${soundName}!';
}
#end
count++;
if (onProgress != null) {
onProgress(count, length, soundName);
}
if (count == length) {
done();
return;
}
}, (error: kha.AssetError) -> { onLoadingError(error, failed, soundName); });
}
for (soundName in loadConfig.uncompressed) {
if (!doesSoundExist(soundName)) {
onLoadingError(null, failed, soundName);
continue;
}
Assets.loadSound(soundName, (sound: kha.Sound) -> {
if (sound.uncompressedData == null) {
sound.uncompress(() -> {
count++;
if (onProgress != null) {
onProgress(count, length, soundName);
}
if (count == length) {
done();
return;
}
});
}
else {
count++;
if (onProgress != null) {
onProgress(count, length, soundName);
}
if (count == length) {
done();
return;
}
}
}, (error: kha.AssetError) -> { onLoadingError(error, failed, soundName); });
}
for (hrtfName in loadConfig.hrtf) {
if (!doesBlobExist(hrtfName)) {
onLoadingError(null, failed, hrtfName);
continue;
}
Assets.loadBlob(hrtfName, (blob: kha.Blob) -> {
var hrtf: HRTF;
try {
hrtf = MHRReader.read(blob.toBytes());
}
catch (e: Exception) {
trace('Could not load hrtf $hrtfName: ${e.details()}');
if (failed != null) {
failed();
}
return;
}
hrtfs[hrtfName] = hrtf;
count++;
if (onProgress != null) {
onProgress(count, length, hrtfName);
}
if (count == length) {
done();
return;
}
}, (error: kha.AssetError) -> { onLoadingError(error, failed, hrtfName); });
}
}
static function onLoadingError(error: Null<kha.AssetError>, failed: Null<Void->Void>, assetName: String) {
final errorInfo = error == null ? "" : "\nOriginal error: " + error.url + "..." + error.error;
trace(
'Could not load asset "$assetName", make sure that all assets are named\n'
+ " correctly and that they are included in the khafile.js."
+ errorInfo
);
if (failed != null) {
failed();
}
}
/**
Returns whether a sound exists and can be loaded.
**/
public static inline function doesSoundExist(soundName: String): Bool {
// Use reflection instead of Asset.sounds.get() to prevent errors on
// static targets. A sound's description is the sound's entry in
// files.json and not a kha.Sound, but get() returns a sound which would
// lead to a invalid cast exception.
// Relying on Kha internals ("Description" as name) is bad, but there is
// no good alternative...
return Reflect.field(Assets.sounds, soundName + "Description") != null;
}
/**
Returns whether a blob exists and can be loaded.
**/
public static inline function doesBlobExist(blobName: String): Bool {
return Reflect.field(Assets.blobs, blobName + "Description") != null;
}
public static inline function getSound(soundName: String): Null<kha.Sound> {
return Assets.sounds.get(soundName);
}
public static inline function getHRTF(hrtfName: String): Null<HRTF> {
return hrtfs.get(hrtfName);
}
/**
Create a new audio channel to play an uncompressed and pre-loaded sound, and return a main-thread handle object to the newly created channel.
The playback of the newly created channel does not start automatically.
@param sound The _uncompressed_ sound to play by the created channel
@param loop Whether to loop the playback of the channel
@param mixChannelHandle (Optional) A handle for the `MixChannel`
to which to route the audio output of the newly created channel.
If the parameter is `null` (default), route the channel's output
to the master channel
@return A main-thread handle to the newly created channel, or `null`
if the created channel could not be assigned to the given mix channel
(e.g. in case of circular dependencies)
**/
public static function createUncompBufferChannel(sound: kha.Sound, loop: Bool = false, mixChannelHandle: Null<MixChannelHandle> = null): Null<UncompBufferChannelHandle> {
assert(Critical, sound.uncompressedData != null,
"Cannot play a sound with no uncompressed data. Make sure to load it as 'uncompressed' in the AuraLoadConfig."
);
if (mixChannelHandle == null) {
mixChannelHandle = masterChannel;
}
// TODO: Like Kha, only use resampling channel if pitch is used or if samplerate of sound and system differs
final newChannel = new UncompBufferResamplingChannel(sound.uncompressedData, loop, sound.sampleRate);
final handle = new UncompBufferChannelHandle(newChannel);
final foundChannel = handle.setMixChannel(mixChannelHandle);
return foundChannel ? handle : null;
}
/**
Create a new audio channel to play a compressed and pre-loaded sound, and return a main-thread handle object to the newly created channel.
The playback of the newly created channel does not start automatically.
@param sound The _compressed_ sound to play by the created channel
@param loop Whether to loop the playback of the channel
@param mixChannelHandle (Optional) A handle for the `MixChannel`
to which to route the audio output of the newly created channel.
If the parameter is `null` (default), route the channel's output
to the master channel
@return A main-thread handle to the newly created channel, or `null`
if the created channel could not be assigned to the given mix channel
(e.g. in case of circular dependencies)
**/
public static function createCompBufferChannel(sound: kha.Sound, loop: Bool = false, mixChannelHandle: Null<MixChannelHandle> = null): Null<BaseChannelHandle> {
#if kha_krom
// Krom only uses uncompressedData -> no streaming
return createUncompBufferChannel(sound, loop, mixChannelHandle);
#end
assert(Critical, sound.compressedData != null,
"Cannot stream a sound with no compressed data. Make sure to load it as 'compressed' in the AuraLoadConfig."
);
if (mixChannelHandle == null) {
mixChannelHandle = masterChannel;
}
#if (kha_html5 || kha_debug_html5)
final newChannel = kha.SystemImpl.mobile ? new Html5MobileStreamChannel(sound, loop) : new Html5StreamChannel(sound, loop);
#else
final khaChannel: Null<kha.audio1.AudioChannel> = kha.audio2.Audio1.stream(sound, loop);
if (khaChannel == null) {
return null;
}
final newChannel = new StreamChannel(cast khaChannel);
newChannel.stop();
#end
final handle = new BaseChannelHandle(newChannel);
final foundChannel = handle.setMixChannel(mixChannelHandle);
return foundChannel ? handle : null;
}
/**
Create a `MixChannel` to control a group of other channels together.
@param name Optional name. If not empty, the name can be used later to
retrieve the channel handle via `Aura.mixChannels[name]`.
**/
public static inline function createMixChannel(name: String = ""): MixChannelHandle {
final handle = new MixChannelHandle(new MixChannel());
if (name != "") {
assert(Error, !mixChannels.exists(name), 'MixChannel with name $name already exists!');
mixChannels[name] = handle;
#if AURA_DEBUG
handle.name = name;
#end
}
return handle;
}
/**
Mixes all sub channels and sounds in this channel together.
Based on `kha.audio2.Audio1.mix()`.
@param samplesBox Wrapper that holds the amount of requested samples.
@param buffer The buffer into which to write the output samples.
**/
static function audioCallback(samplesBox: kha.internal.IntBox, buffer: kha.audio2.Buffer): Void {
Profiler.frame("AudioCallback");
Time.update();
final samplesRequested = samplesBox.value;
Aura.lastBufferSize = samplesRequested;
if (!BufferCache.getBuffer(TFloat32Array, p_samplesBuffer, 1, samplesRequested)) {
for (_ in 0...samplesRequested) {
buffer.data.set(buffer.writeLocation, 0);
buffer.writeLocation += 1;
if (buffer.writeLocation >= buffer.size) {
buffer.writeLocation = 0;
}
}
return;
}
// At this point we can be sure that sampleCache is not null
final sampleCache = p_samplesBuffer.get();
// Copy reference to masterChannel for some more thread safety.
// TODO: Investigate if other solutions are required here
var master: MixChannel = masterChannel.getMixChannel();
master.synchronize();
clearBuffer(sampleCache);
var samplesWritten = 0;
// The blockBuffer still has some values from the last audioCallback
// invocation that haven't been written to the sampleCache yet
if (blockBufPos != 0) {
final samplesToWrite = minI(samplesRequested, BLOCK_SIZE - blockBufPos);
blockBuffer.interleaveToFloat32Array(sampleCache, Std.int(blockBufPos / NUM_OUTPUT_CHANNELS), 0, Std.int(samplesToWrite / NUM_OUTPUT_CHANNELS));
samplesWritten += samplesToWrite;
blockBufPos += samplesToWrite;
if (blockBufPos >= BLOCK_SIZE) {
blockBufPos = 0;
}
}
while (samplesWritten < samplesRequested) {
master.nextSamples(blockBuffer, buffer.samplesPerSecond);
final samplesStillWritable = minI(samplesRequested - samplesWritten, BLOCK_SIZE);
blockBuffer.interleaveToFloat32Array(sampleCache, 0, samplesWritten, Std.int(samplesStillWritable / NUM_OUTPUT_CHANNELS));
samplesWritten += samplesStillWritable;
blockBufPos += samplesStillWritable;
if (blockBufPos >= BLOCK_SIZE) {
blockBufPos = 0;
}
}
for (i in 0...samplesRequested) {
// Write clamped samples to final buffer
buffer.data.set(buffer.writeLocation, maxF(minF(sampleCache[i], 1.0), -1.0));
buffer.writeLocation += 1;
if (buffer.writeLocation >= buffer.size) {
buffer.writeLocation = 0;
}
}
#if AURA_BENCHMARK
Time.endOfFrame();
#end
}
}
@:allow(aura.Aura)
@:structInit
class AuraLoadConfig {
public final compressed: Array<String> = [];
public final uncompressed: Array<String> = [];
public final hrtf: Array<String> = [];
inline function getEntryCount(): Int {
return compressed.length + uncompressed.length + hrtf.length;
}
}
@:structInit
class AuraOptions {
@:optional public var channelSize: Null<Int>;
public static function addDefaults(options: Null<AuraOptions>) {
if (options == null) { options = {}; }
if (options.channelSize == null) { options.channelSize = 16; }
return options;
}
}
private enum abstract BreakableTaskStatus(Bool) to Bool {
var BreakTask = false;
var ContinueTask = true;
}

View File

@ -0,0 +1,96 @@
package aura;
import aura.math.Vec3;
@:allow(aura.Handle)
@:allow(aura.dsp.panner.Panner)
class Listener {
public var location(default, null): Vec3;
public var look(default, null): Vec3;
public var right(default, null): Vec3;
var velocity: Vec3;
public function new() {
this.location = new Vec3(0, 0, 0);
this.velocity = new Vec3(0, 0, 0);
this.look = new Vec3(0, 1, 0);
this.right = new Vec3(1, 0, 0);
}
/**
Set the listener's view direction. `look` points directly in the view
direction, `right` is perpendicular to `look` and is used internally to
get the sign of the angle between a channel and the listener.
Both parameters must be normalized.
**/
public inline function setViewDirection(look: Vec3, right: Vec3) {
assert(Debug, look.length == 1 && right.length == 1);
this.look.setFrom(look);
this.right.setFrom(right);
}
/**
Set the location of this listener in world space.
Calling this function also sets the listener's velocity if the call
to this function is not the first call for this listener. This behavior
avoids audible "jumps" in the audio output for initial placement
of objects if they are far away from the origin.
**/
public function setLocation(location: Vec3) {
final time = Time.getTime();
final timeDeltaLastCall = time - _setLocation_lastCallTime;
// If the last time setLocation() was called was at an earlier time step
if (timeDeltaLastCall > 0) {
_setLocation_lastLocation.setFrom(this.location);
_setLocation_lastVelocityUpdateTime = _setLocation_lastCallTime;
}
final timeDeltaVelocityUpdate = time - _setLocation_lastVelocityUpdateTime;
this.location.setFrom(location);
if (!_setLocation_initializedLocation) {
_setLocation_initializedLocation = true;
}
else if (timeDeltaVelocityUpdate > 0) {
velocity.setFrom(location.sub(_setLocation_lastLocation).mult(1 / timeDeltaVelocityUpdate));
}
_setLocation_lastCallTime = time;
}
var _setLocation_initializedLocation = false;
var _setLocation_lastLocation: Vec3 = new Vec3(0, 0, 0);
var _setLocation_lastCallTime: Float = 0.0;
var _setLocation_lastVelocityUpdateTime: Float = 0.0;
/**
Wrapper around `setViewDirection()` and `setLocation()`.
**/
public function set(location: Vec3, look: Vec3, right: Vec3) {
inline setViewDirection(look, right);
inline setLocation(location);
}
/**
Resets the location, direction and velocity of the listener to their
default values.
**/
public inline function reset() {
this.location.setFrom(new Vec3(0, 0, 0));
this.velocity.setFrom(new Vec3(0, 0, 0));
this._setLocation_initializedLocation = false;
this._setLocation_lastLocation.setFrom(new Vec3(0, 0, 0));
this._setLocation_lastVelocityUpdateTime = Time.getTime();
this.look.setFrom(new Vec3(0, 1, 0));
this.right.setFrom(new Vec3(1, 0, 0));
}
}

View File

@ -0,0 +1,61 @@
package aura;
import kha.Scheduler;
import aura.threading.BufferCache;
class Time {
public static var lastTime(default, null): Float = 0.0;
public static var delta(default, null): Float = 0.0;
#if AURA_UNIT_TESTS
public static var overrideTime: Null<Float> = null;
#end
#if AURA_BENCHMARK
public static var times: Array<Float>;
static var benchmarkStarted = false;
static var currentIteration = 0;
static var numIterations = 0;
static var onBenchmarkDone: Array<Float>->Void;
#end
public static inline function getTime():Float {
#if AURA_UNIT_TESTS
if (overrideTime != null) {
return overrideTime;
}
#end
return Scheduler.realTime();
}
public static inline function update() {
delta = getTime() - lastTime;
lastTime = getTime();
BufferCache.updateTimer();
}
#if AURA_BENCHMARK
public static inline function endOfFrame() {
if (benchmarkStarted) {
times[currentIteration] = Scheduler.realTime() - lastTime;
currentIteration++;
if (currentIteration == numIterations) {
onBenchmarkDone(times);
benchmarkStarted = false;
currentIteration = 0;
}
}
}
public static function startBenchmark(numIterations: Int, onBenchmarkDone: Array<Float>->Void) {
Time.numIterations = numIterations;
Time.onBenchmarkDone = onBenchmarkDone;
times = new Array();
times.resize(numIterations);
benchmarkStarted = true;
}
#end
}

View File

@ -0,0 +1,105 @@
package aura;
import aura.utils.MathUtils.clampF;
/**
Integer representing a Hertz value.
**/
typedef Hertz = Int;
/**
Float representing milliseconds.
**/
typedef Millisecond = Float;
enum abstract Channels(Int) {
var Left = 1 << 0;
var Right = 1 << 1;
var All = ~0;
public inline function matches(mask: Channels): Bool {
return (this & mask.asInt()) != 0;
}
public inline function matchesIndex(index: Int): Bool {
return ((1 << index) & this) != 0;
}
inline function asInt(): Int {
return this;
}
}
abstract Balance(Float) from Float to Float {
public static inline var LEFT: Balance = 0.0;
public static inline var CENTER: Balance = 0.5;
public static inline var RIGHT: Balance = 1.0;
inline function new(value: Float) {
this = clampF(value);
}
@:from public static inline function fromAngle(angle: Angle): Balance {
return switch (angle) {
case Deg(deg): (deg + 90) / 180;
case Rad(rad): (rad + Math.PI / 2) / Math.PI;
}
}
@:op(~A) public function invert() {
return 1.0 - this;
}
}
enum Angle {
Deg(deg: Int);
Rad(rad: Float);
}
#if cpp
@:forward
@:forwardStatics
abstract AtomicInt(cpp.AtomicInt) from Int to Int {
public inline function toPtr(): cpp.Pointer<cpp.AtomicInt> {
final val: cpp.AtomicInt = this; // For some reason, this line is required for correct codegen...
return cpp.Pointer.addressOf(val);
}
}
#else
typedef AtomicInt = Int;
#end
#if (haxe_ver >= 4.3 && hl_ver >= version("1.13.0") && !js)
typedef AtomicBool = haxe.atomic.AtomicBool;
#else
@:forward
@:forwardStatics
abstract AtomicBool({val: Bool}) { // We need indirection via struct here to not run into compile issues with `this`
public inline function new(value: Bool) {
this = {val: value};
}
public inline function compareExchange(expected: Bool, replacement: Bool): Bool {
final orig = this.val;
if (orig == expected) {
this.val = replacement;
}
return orig;
}
public inline function exchange(value: Bool): Bool {
final orig = this.val;
this.val = value;
return orig;
}
public inline function load(): Bool {
return this.val;
}
public inline function store(value: Bool): Bool {
return this.val = value;
}
}
#end

View File

@ -0,0 +1,244 @@
package aura.channels;
import aura.channels.MixChannel.MixChannelHandle;
import aura.dsp.DSP;
import aura.dsp.panner.Panner;
import aura.threading.Fifo;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.utils.Interpolator.LinearInterpolator;
import aura.utils.MathUtils;
/**
Main-thread handle to an audio channel in the audio thread.
**/
@:access(aura.channels.BaseChannel)
@:allow(aura.dsp.panner.Panner)
class BaseChannelHandle {
/**
Whether the playback of the handle's channel is currently paused.
**/
public var paused(get, never): Bool;
inline function get_paused(): Bool { return channel.paused; }
/**
Whether the playback of the handle's channel has finished.
On `MixerChannel`s this value is always `false`.
**/
public var finished(get, never): Bool;
inline function get_finished(): Bool { return channel.finished; }
public var panner(get, null): Null<Panner>;
inline function get_panner(): Null<Panner> { return channel.panner; }
/**
Link to the audio channel in the audio thread.
**/
final channel: BaseChannel;
var parentHandle: Null<MixChannelHandle> = null;
// Parameter cache for getter functions
var _volume: Float = 1.0;
var _pitch: Float = 1.0;
public inline function new(channel: BaseChannel) {
this.channel = channel;
}
/**
Starts the playback. If the sound wasn't played before or was stopped,
the playback starts from the beginning. If it is paused, playback starts
from the position where it was paused.
@param retrigger Controls the behaviour if the sound is already playing.
If true, restart playback from the beginning, else do nothing.
**/
public inline function play(retrigger = false) {
channel.sendMessage({ id: ChannelMessageID.Play, data: retrigger });
}
public inline function pause() {
channel.sendMessage({ id: ChannelMessageID.Pause, data: null });
}
public inline function stop() {
channel.sendMessage({ id: ChannelMessageID.Stop, data: null });
}
public inline function addInsert(insert: DSP): DSP {
return channel.addInsert(insert);
}
public inline function removeInsert(insert: DSP) {
channel.removeInsert(insert);
}
/**
Set the mix channel into which this channel routes its output.
Returns `true` if setting the mix channel was successful and `false` if
there would be a circular dependency or the amount of input channels of
the mix channel is already maxed out.
**/
public function setMixChannel(mixChannelHandle: MixChannelHandle): Bool {
if (mixChannelHandle == parentHandle) {
return true;
}
if (parentHandle != null) {
@:privateAccess parentHandle.removeInputChannel(this);
parentHandle = null;
}
if (mixChannelHandle == null) {
return true;
}
// Return false for circular references (including mixChannelHandle == this)
var curHandle = mixChannelHandle;
while (curHandle != null) {
if (curHandle == this) {
return false;
}
curHandle = curHandle.parentHandle;
}
final success = @:privateAccess mixChannelHandle.addInputChannel(this);
if (success) {
parentHandle = mixChannelHandle;
} else {
parentHandle = null;
}
return success;
}
public inline function setVolume(volume: Float) {
assert(Critical, volume >= 0, "Volume value must not be a negative number!");
channel.sendMessage({ id: ChannelMessageID.PVolume, data: maxF(0.0, volume) });
this._volume = volume;
}
public inline function getVolume(): Float {
return this._volume;
}
public inline function setPitch(pitch: Float) {
assert(Critical, pitch > 0, "Pitch value must be a positive number!");
channel.sendMessage({ id: ChannelMessageID.PPitch, data: maxF(0.0, pitch) });
this._pitch = pitch;
}
public inline function getPitch(): Float {
return this._pitch;
}
#if AURA_DEBUG
public function getDebugAttrs(): Map<String, String> {
return ["In use" => Std.string(@:privateAccess channel.isPlayable())];
}
#end
}
/**
Base class of all audio channels in the audio thread.
**/
@:allow(aura.Aura)
@:access(aura.dsp.DSP)
@:allow(aura.dsp.panner.Panner)
@:access(aura.dsp.panner.Panner)
abstract class BaseChannel {
final messages: Fifo<Message> = new Fifo();
final inserts: Array<DSP> = [];
var panner: Null<Panner> = null;
// Parameters
final pVolume = new LinearInterpolator(1.0);
final pDopplerRatio = new LinearInterpolator(1.0);
final pDstAttenuation = new LinearInterpolator(1.0);
var treeLevel(default, null): Int = 0;
var paused: Bool = false;
var finished: Bool = true;
abstract function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz): Void;
abstract function play(retrigger: Bool): Void;
abstract function pause(): Void;
abstract function stop(): Void;
function isPlayable(): Bool {
return !paused && !finished;
}
function setTreeLevel(level: Int) {
this.treeLevel = level;
}
inline function processInserts(buffer: AudioBuffer) {
for (insert in inserts) {
if (insert.bypass) { continue; }
insert.process(buffer);
}
if (panner != null) {
panner.process(buffer);
}
}
inline function addInsert(insert: DSP): DSP {
assert(Critical, !insert.inUse, "DSP objects can only belong to one unique channel");
insert.inUse = true;
inserts.push(insert);
return insert;
}
inline function removeInsert(insert: DSP) {
var found = inserts.remove(insert);
if (found) {
insert.inUse = false;
}
}
function synchronize() {
var message: Null<Message>;
while ((message = messages.tryPop()) != null) {
parseMessage(message);
}
for (insert in inserts) {
insert.synchronize();
}
if (panner != null) {
panner.synchronize();
}
}
function parseMessage(message: Message) {
switch (message.id) {
case ChannelMessageID.Play: play(cast message.data);
case ChannelMessageID.Pause: pause();
case ChannelMessageID.Stop: stop();
case ChannelMessageID.PVolume: pVolume.targetValue = cast message.data;
case ChannelMessageID.PDopplerRatio: pDopplerRatio.targetValue = cast message.data;
case ChannelMessageID.PDstAttenuation: pDstAttenuation.targetValue = cast message.data;
default:
}
}
inline function sendMessage(message: Message) {
messages.add(message);
}
}
enum abstract AttenuationMode(Int) {
var Linear;
var Inverse;
var Exponential;
}

View File

@ -0,0 +1,192 @@
package aura.channels;
#if (kha_html5 || kha_debug_html5)
import js.Browser;
import js.html.AudioElement;
import js.html.URL;
import kha.SystemImpl;
import kha.js.MobileWebAudioChannel;
import aura.threading.Message;
import aura.types.AudioBuffer;
/**
Channel dedicated for streaming playback on html5.
Because most browsers don't allow audio playback before the user has
interacted with the website or canvas at least once, we can't always play
audio without causing an exception. In order to not cause chaos with sounds
playing at wrong times, sounds are virtualized before they can actually be
played. This means that their playback position is tracked and as soon as
the user interacts with the web page, the audio starts playing at the
correct position as if the sound would be playing all the time since it was
started.
Note that on mobile browsers the `aura.channels.Html5MobileStreamChannel` is
used instead.
**/
class Html5StreamChannel extends BaseChannel {
static final virtualChannels: Array<Html5StreamChannel> = [];
final audioElement: AudioElement;
var virtualPosition: Float;
var lastUpdateTime: Float;
public function new(sound: kha.Sound, loop: Bool) {
audioElement = Browser.document.createAudioElement();
final mimeType = #if kha_debug_html5 "audio/ogg" #else "audio/mp4" #end;
final blob = new js.html.Blob([sound.compressedData.getData()], {type: mimeType});
// TODO: if removing channels, use revokeObjectUrl() ?
// see https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
audioElement.src = URL.createObjectURL(blob);
audioElement.loop = loop;
if (isVirtual()) {
virtualChannels.push(this);
}
}
inline function isVirtual(): Bool {
return !SystemImpl.mobileAudioPlaying;
}
@:allow(aura.Aura)
static function makeChannelsPhysical() {
for (channel in virtualChannels) {
channel.updateVirtualPosition();
channel.audioElement.currentTime = channel.virtualPosition;
if (!channel.finished && !channel.paused) {
channel.audioElement.play();
}
}
virtualChannels.resize(0);
}
inline function updateVirtualPosition() {
final now = kha.Scheduler.realTime();
if (finished) {
virtualPosition = 0;
}
else if (!paused) {
virtualPosition += now - lastUpdateTime;
while (virtualPosition > audioElement.duration) {
virtualPosition -= audioElement.duration;
}
}
lastUpdateTime = now;
}
public function play(retrigger: Bool) {
if (isVirtual()) {
updateVirtualPosition();
if (retrigger) {
virtualPosition = 0;
}
}
else {
audioElement.play();
if (retrigger) {
audioElement.currentTime = 0;
}
}
paused = false;
finished = false;
}
public function pause() {
if (isVirtual()) {
updateVirtualPosition();
}
else {
audioElement.pause();
}
paused = true;
}
public function stop() {
if (isVirtual()) {
updateVirtualPosition();
}
else {
audioElement.pause();
audioElement.currentTime = 0;
}
finished = true;
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz) {}
override function parseMessage(message: Message) {
switch (message.id) {
// Because we're using a HTML implementation here, we cannot use the
// LinearInterpolator parameters
case ChannelMessageID.PVolume: audioElement.volume = cast message.data;
case ChannelMessageID.PPitch:
case ChannelMessageID.PDopplerRatio:
case ChannelMessageID.PDstAttenuation:
default:
super.parseMessage(message);
}
}
}
/**
Wrapper around kha.js.MobileWebAudioChannel.
See https://github.com/Kode/Kha/issues/299 and
https://github.com/Kode/Kha/commit/12494b1112b64e4286b6a2fafc0f08462c1e7971
**/
class Html5MobileStreamChannel extends BaseChannel {
final khaChannel: kha.js.MobileWebAudioChannel;
public function new(sound: kha.Sound, loop: Bool) {
khaChannel = new kha.js.MobileWebAudioChannel(cast sound, loop);
}
public function play(retrigger: Bool) {
if (retrigger) {
khaChannel.position = 0;
}
khaChannel.play();
paused = false;
finished = false;
}
public function pause() {
khaChannel.pause();
paused = true;
}
public function stop() {
khaChannel.stop();
finished = true;
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz) {}
override function parseMessage(message: Message) {
switch (message.id) {
// Because we're using a HTML implementation here, we cannot use the
// LinearInterpolator parameters
case ChannelMessageID.PVolume: khaChannel.volume = cast message.data;
case ChannelMessageID.PPitch:
case ChannelMessageID.PDopplerRatio:
case ChannelMessageID.PDstAttenuation:
default:
super.parseMessage(message);
}
}
}
#end

View File

@ -0,0 +1,299 @@
package aura.channels;
import haxe.ds.Vector;
#if cpp
import sys.thread.Mutex;
#end
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.threading.BufferCache;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.utils.Profiler;
/**
Main-thread handle to a `MixChannel` in the audio thread.
**/
class MixChannelHandle extends BaseChannelHandle {
#if AURA_DEBUG
public var name: String = "";
public var inputHandles: Array<BaseChannelHandle> = new Array();
#end
public inline function getNumInputs(): Int {
return getMixChannel().getNumInputs();
}
/**
Adds an input channel. Returns `true` if adding the channel was
successful, `false` if the amount of input channels is already maxed
out.
**/
inline function addInputChannel(channelHandle: BaseChannelHandle): Bool {
assert(Error, channelHandle != null, "channelHandle must not be null");
final foundChannel = getMixChannel().addInputChannel(channelHandle.channel);
#if AURA_DEBUG
if (foundChannel) inputHandles.push(channelHandle);
#end
return foundChannel;
}
/**
Removes an input channel from this `MixChannel`.
**/
inline function removeInputChannel(channelHandle: BaseChannelHandle) {
#if AURA_DEBUG
inputHandles.remove(channelHandle);
#end
getMixChannel().removeInputChannel(channelHandle.channel);
}
inline function getMixChannel(): MixChannel {
return cast this.channel;
}
#if AURA_DEBUG
public override function getDebugAttrs(): Map<String, String> {
return super.getDebugAttrs().mergeIntoThis([
"Name" => name,
"Num inserts" => Std.string(@:privateAccess channel.inserts.length),
]);
}
#end
}
/**
A channel that mixes together the output of multiple input channels.
**/
@:access(aura.dsp.DSP)
class MixChannel extends BaseChannel {
#if cpp
static var mutex: Mutex = new Mutex();
#end
/**
The amount of inputs a MixChannel can hold. Set this value via
`Aura.init(channelSize)`.
**/
static var channelSize: Int;
var inputChannels: Vector<BaseChannel>;
var numUsedInputs: Int = 0;
/**
Temporary copy of inputChannels for thread safety.
**/
var inputChannelsCopy: Vector<BaseChannel>;
public function new() {
inputChannels = new Vector<BaseChannel>(channelSize);
// Make sure super.isPlayable() is true until we find better semantics
// for MixChannel.play()/pause()/stop()
this.finished = false;
}
/**
Adds an input channel. Returns `true` if adding the channel was
successful, `false` if the amount of input channels is already maxed
out.
**/
public function addInputChannel(channel: BaseChannel): Bool {
var foundChannel = false;
#if cpp
mutex.acquire();
#end
for (i in 0...MixChannel.channelSize) {
if (inputChannels[i] == null) { // || inputChannels[i].finished) {
inputChannels[i] = channel;
numUsedInputs++;
channel.setTreeLevel(this.treeLevel + 1);
foundChannel = true;
break;
}
}
updateChannelsCopy();
#if cpp
mutex.release();
#end
return foundChannel;
}
public function removeInputChannel(channel: BaseChannel) {
#if cpp
mutex.acquire();
#end
for (i in 0...MixChannel.channelSize) {
if (inputChannels[i] == channel) {
inputChannels[i] = null;
numUsedInputs--;
break;
}
}
updateChannelsCopy();
#if cpp
mutex.release();
#end
}
public inline function getNumInputs() {
return numUsedInputs;
}
/**
Copy the references to the inputs channels for thread safety. This
function does not acquire any additional mutexes.
@see `MixChannel.inputChannelsCopy`
**/
inline function updateChannelsCopy() {
inputChannelsCopy = inputChannels.copy();
// TODO: Streaming
// for (i in 0...channelCount) {
// internalStreamChannels[i] = streamChannels[i];
// }
}
override function isPlayable(): Bool {
// TODO: be more intelligent here and actually check inputs?
return super.isPlayable() && numUsedInputs != 0;
}
override function setTreeLevel(level: Int) {
this.treeLevel = level;
for (inputChannel in inputChannels) {
if (inputChannel != null) {
inputChannel.setTreeLevel(level + 1);
}
}
}
override function synchronize() {
for (inputChannel in inputChannels) {
if (inputChannel != null) {
inputChannel.synchronize();
}
}
super.synchronize();
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz): Void {
Profiler.event();
if (numUsedInputs == 0) {
requestedSamples.clear();
return;
}
final inputBuffer = BufferCache.getTreeBuffer(treeLevel, requestedSamples.numChannels, requestedSamples.channelLength);
if (inputBuffer == null) {
requestedSamples.clear();
return;
}
var first = true;
var foundPlayableInput = false;
for (channel in inputChannelsCopy) {
if (channel == null || !channel.isPlayable()) {
continue;
}
foundPlayableInput = true;
channel.nextSamples(inputBuffer, sampleRate);
if (first) {
// To prevent feedback loops, the input buffer has to be cleared
// before all inputs are added to it. To not waste calculations,
// we do not clear the buffer here but instead just override
// the previous sample cache.
for (i in 0...requestedSamples.rawData.length) {
requestedSamples.rawData[i] = inputBuffer.rawData[i];
}
first = false;
}
else {
for (i in 0...requestedSamples.rawData.length) {
requestedSamples.rawData[i] += inputBuffer.rawData[i];
}
}
}
// for (channel in internalStreamChannels) {
// if (channel == null || !channel.isPlayable())
// continue;
// foundPlayableInput = true;
// channel.nextSamples(inputBuffer, samples, buffer.samplesPerSecond);
// for (i in 0...samples) {
// sampleCacheAccumulated[i] += inputBuffer[i] * channel.volume;
// }
// }
if (!foundPlayableInput) {
// Didn't read from input channels, clear possible garbage values
requestedSamples.clear();
return;
}
// Apply volume of this channel
final stepVol = pVolume.getLerpStepSize(requestedSamples.channelLength);
for (c in 0...requestedSamples.numChannels) {
final channelView = requestedSamples.getChannelView(c);
for (i in 0...requestedSamples.channelLength) {
channelView[i] *= pVolume.currentValue;
pVolume.currentValue += stepVol;
}
pVolume.currentValue = pVolume.lastValue;
}
pVolume.updateLast();
processInserts(requestedSamples);
}
/**
Calls `play()` for all input channels.
**/
public function play(retrigger: Bool): Void {
for (inputChannel in inputChannels) {
if (inputChannel != null) {
inputChannel.play(retrigger);
}
}
}
/**
Calls `pause()` for all input channels.
**/
public function pause(): Void {
for (inputChannel in inputChannels) {
if (inputChannel != null) {
inputChannel.pause();
}
}
}
/**
Calls `stop()` for all input channels.
**/
public function stop(): Void {
for (inputChannel in inputChannels) {
if (inputChannel != null) {
inputChannel.stop();
}
}
}
}

View File

@ -0,0 +1,64 @@
package aura.channels;
import aura.utils.Pointer;
import kha.arrays.Float32Array;
import aura.threading.BufferCache;
import aura.threading.Message;
import aura.types.AudioBuffer;
/**
Wrapper around `kha.audio2.StreamChannel` (for now).
**/
class StreamChannel extends BaseChannel {
final khaChannel: kha.audio2.StreamChannel;
final p_khaBuffer = new Pointer<Float32Array>(null);
public function new(khaChannel: kha.audio2.StreamChannel) {
this.khaChannel = khaChannel;
}
public function play(retrigger: Bool) {
paused = false;
finished = false;
khaChannel.play();
if (retrigger) {
khaChannel.position = 0;
}
}
public function pause() {
paused = true;
khaChannel.pause();
}
public function stop() {
finished = true;
khaChannel.stop();
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz) {
if (!BufferCache.getBuffer(TFloat32Array, p_khaBuffer, 1, requestedSamples.numChannels * requestedSamples.channelLength)) {
requestedSamples.clear();
return;
}
final khaBuffer = p_khaBuffer.get();
khaChannel.nextSamples(khaBuffer, requestedSamples.channelLength, sampleRate);
requestedSamples.deinterleaveFromFloat32Array(khaBuffer, requestedSamples.numChannels);
}
override function parseMessage(message: Message) {
switch (message.id) {
// Because we're using a Kha implementation here, we cannot use the
// LinearInterpolator parameters
case ChannelMessageID.PVolume: khaChannel.volume = cast message.data;
case ChannelMessageID.PPitch:
case ChannelMessageID.PDopplerRatio:
case ChannelMessageID.PDstAttenuation:
default:
super.parseMessage(message);
}
}
}

View File

@ -0,0 +1,264 @@
package aura.channels;
import kha.arrays.Float32Array;
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.dsp.sourcefx.SourceEffect;
import aura.utils.MathUtils;
import aura.threading.Message;
import aura.types.AudioBuffer;
// TODO make handle thread-safe!
@:access(aura.channels.UncompBufferChannel)
class UncompBufferChannelHandle extends BaseChannelHandle {
final _sourceEffects: Array<SourceEffect> = []; // main-thread twin of channel.sourceEffects. TODO investigate better solution
var _playbackDataLength = -1;
inline function getUncompBufferChannel(): UncompBufferChannel {
return cast this.channel;
}
/**
Return the sound's length in seconds.
**/
public inline function getLength(): Float {
return getUncompBufferChannel().data.channelLength / Aura.sampleRate;
}
/**
Return the channel's current playback position in seconds.
**/
public inline function getPlaybackPosition(): Float {
return getUncompBufferChannel().playbackPosition / Aura.sampleRate;
}
/**
Set the channel's current playback position in seconds.
**/
public inline function setPlaybackPosition(value: Float) {
final pos = Math.round(value * Aura.sampleRate);
getUncompBufferChannel().playbackPosition = clampI(pos, 0, getUncompBufferChannel().data.channelLength);
}
public function addSourceEffect(sourceEffect: SourceEffect) {
_sourceEffects.push(sourceEffect);
final playbackData = updatePlaybackBuffer();
getUncompBufferChannel().sendMessage({ id: UncompBufferChannelMessageID.AddSourceEffect, data: [sourceEffect, playbackData] });
}
public function removeSourceEffect(sourceEffect: SourceEffect) {
if (_sourceEffects.remove(sourceEffect)) {
final playbackData = updatePlaybackBuffer();
getUncompBufferChannel().sendMessage({ id: UncompBufferChannelMessageID.RemoveSourceEffect, data: [sourceEffect, playbackData] });
}
}
@:access(aura.dsp.sourcefx.SourceEffect)
function updatePlaybackBuffer(): Null<AudioBuffer> {
final data = getUncompBufferChannel().data;
var playbackData: Null<AudioBuffer> = null;
if (_sourceEffects.length == 0) {
playbackData = data;
}
else {
var requiredChannelLength = data.channelLength;
var prevChannelLength = data.channelLength;
for (sourceEffect in _sourceEffects) {
prevChannelLength = sourceEffect.calculateRequiredChannelLength(prevChannelLength);
requiredChannelLength = maxI(requiredChannelLength, prevChannelLength);
}
if (_playbackDataLength != requiredChannelLength) {
playbackData = new AudioBuffer(data.numChannels, requiredChannelLength);
_playbackDataLength = requiredChannelLength;
}
}
// if null -> no buffer to change in channel
return playbackData;
}
}
@:allow(aura.channels.UncompBufferChannelHandle)
class UncompBufferChannel extends BaseChannel {
public static inline var NUM_CHANNELS = 2;
final sourceEffects: Array<SourceEffect> = [];
var appliedSourceEffects = false;
/** The current playback position in samples. **/
var playbackPosition: Int = 0;
var looping: Bool = false;
/**
The original audio source data for this channel.
**/
final data: AudioBuffer;
/**
The audio data used for playback. This might be different than `this.data`
if this channel has `AudioSourceEffect`s assigned to it.
**/
var playbackData: AudioBuffer;
public function new(data: Float32Array, looping: Bool) {
this.data = this.playbackData = new AudioBuffer(2, Std.int(data.length / 2));
this.data.deinterleaveFromFloat32Array(data, 2);
this.looping = looping;
}
override function parseMessage(message: Message) {
switch (message.id) {
case UncompBufferChannelMessageID.AddSourceEffect:
final sourceEffect: SourceEffect = message.dataAsArrayUnsafe()[0];
final _playbackData = message.dataAsArrayUnsafe()[1];
if (_playbackData != null) {
playbackData = _playbackData;
}
addSourceEffect(sourceEffect);
case UncompBufferChannelMessageID.RemoveSourceEffect:
final sourceEffect: SourceEffect = message.dataAsArrayUnsafe()[0];
final _playbackData = message.dataAsArrayUnsafe()[1];
if (_playbackData != null) {
playbackData = _playbackData;
}
removeSourceEffect(sourceEffect);
default: super.parseMessage(message);
}
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz): Void {
assert(Critical, requestedSamples.numChannels == playbackData.numChannels);
final stepDopplerRatio = pDopplerRatio.getLerpStepSize(requestedSamples.channelLength);
final stepDstAttenuation = pDstAttenuation.getLerpStepSize(requestedSamples.channelLength);
final stepVol = pVolume.getLerpStepSize(requestedSamples.channelLength);
var samplesWritten = 0;
// As long as there are more samples requested
while (samplesWritten < requestedSamples.channelLength) {
// Check how many samples we can actually write
final samplesToWrite = minI(playbackData.channelLength - playbackPosition, requestedSamples.channelLength - samplesWritten);
for (c in 0...requestedSamples.numChannels) {
final outChannelView = requestedSamples.getChannelView(c);
final dataChannelView = playbackData.getChannelView(c);
// Reset interpolators for channel
pDopplerRatio.currentValue = pDopplerRatio.lastValue;
pDstAttenuation.currentValue = pDstAttenuation.lastValue;
pVolume.currentValue = pVolume.lastValue;
for (i in 0...samplesToWrite) {
final value = dataChannelView[playbackPosition + i] * pVolume.currentValue * pDstAttenuation.currentValue;
outChannelView[samplesWritten + i] = value;
// TODO: SIMD
pDopplerRatio.currentValue += stepDopplerRatio;
pDstAttenuation.currentValue += stepDstAttenuation;
pVolume.currentValue += stepVol;
}
}
samplesWritten += samplesToWrite;
playbackPosition += samplesToWrite;
if (playbackPosition >= playbackData.channelLength) {
playbackPosition = 0;
if (looping) {
optionallyApplySourceEffects();
}
else {
finished = true;
break;
}
}
}
// Fill further requested samples with zeroes
for (c in 0...requestedSamples.numChannels) {
final channelView = requestedSamples.getChannelView(c);
for (i in samplesWritten...requestedSamples.channelLength) {
channelView[i] = 0;
}
}
pDopplerRatio.updateLast();
pDstAttenuation.updateLast();
pVolume.updateLast();
processInserts(requestedSamples);
}
function play(retrigger: Bool): Void {
if (finished || retrigger || !appliedSourceEffects) {
optionallyApplySourceEffects();
}
paused = false;
finished = false;
if (retrigger) {
playbackPosition = 0;
}
}
function pause(): Void {
paused = true;
}
function stop(): Void {
playbackPosition = 0;
finished = true;
}
inline function addSourceEffect(audioSourceEffect: SourceEffect) {
sourceEffects.push(audioSourceEffect);
appliedSourceEffects = false;
}
inline function removeSourceEffect(audioSourceEffect: SourceEffect) {
sourceEffects.remove(audioSourceEffect);
appliedSourceEffects = false;
}
/**
Apply all source effects to `playbackData`, if there are any.
**/
@:access(aura.dsp.sourcefx.SourceEffect)
function optionallyApplySourceEffects() {
var currentSrcBuffer = data;
var previousLength = data.channelLength;
var needsReprocessing = !appliedSourceEffects;
if (!needsReprocessing) {
for (sourceEffect in sourceEffects) {
if (sourceEffect.applyOnReplay.load()) {
needsReprocessing = true;
break;
}
}
}
if (needsReprocessing) {
for (sourceEffect in sourceEffects) {
previousLength = sourceEffect.process(currentSrcBuffer, previousLength, playbackData);
currentSrcBuffer = playbackData;
}
}
appliedSourceEffects = true;
}
}
private class UncompBufferChannelMessageID extends ChannelMessageID {
final AddSourceEffect;
final RemoveSourceEffect;
}

View File

@ -0,0 +1,145 @@
// =============================================================================
// Roughly based on
// https://github.com/Kode/Kha/blob/master/Sources/kha/audio2/ResamplingAudioChannel.hx
// =============================================================================
package aura.channels;
import kha.arrays.Float32Array;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.utils.MathUtils;
import aura.utils.Interpolator.LinearInterpolator;
import aura.utils.Profiler;
import aura.utils.Resampler;
class UncompBufferResamplingChannel extends UncompBufferChannel {
public var sampleRate: Hertz;
public var floatPosition: Float = 0.0;
final pPitch = new LinearInterpolator(1.0);
public function new(data: Float32Array, looping: Bool, sampleRate: Hertz) {
super(data, looping);
this.sampleRate = sampleRate;
};
override function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz): Void {
Profiler.event();
assert(Critical, requestedSamples.numChannels == playbackData.numChannels);
final stepDopplerRatio = pDopplerRatio.getLerpStepSize(requestedSamples.channelLength);
final stepDstAttenuation = pDstAttenuation.getLerpStepSize(requestedSamples.channelLength);
final stepPitch = pPitch.getLerpStepSize(requestedSamples.channelLength);
final stepVol = pVolume.getLerpStepSize(requestedSamples.channelLength);
final resampleLength = Resampler.getResampleLength(playbackData.channelLength, this.sampleRate, sampleRate);
var samplesWritten = 0;
var reachedEndOfData = false;
// As long as there are more samples requested and there is data left
while (samplesWritten < requestedSamples.channelLength && !reachedEndOfData) {
final initialFloatPosition = floatPosition;
// Check how many samples we can actually write
final samplesToWrite = minI(resampleLength - playbackPosition, requestedSamples.channelLength - samplesWritten);
for (c in 0...requestedSamples.numChannels) {
final outChannelView = requestedSamples.getChannelView(c);
// Reset interpolators for channel
pDopplerRatio.currentValue = pDopplerRatio.lastValue;
pDstAttenuation.currentValue = pDstAttenuation.lastValue;
pPitch.currentValue = pPitch.lastValue;
pVolume.currentValue = pVolume.lastValue;
floatPosition = initialFloatPosition;
for (i in 0...samplesToWrite) {
var sampledVal: Float = Resampler.sampleAtTargetPositionLerp(playbackData.getChannelView(c), floatPosition, this.sampleRate, sampleRate);
if (pDopplerRatio.currentValue <= 0) {
// In this case, the audio is inaudible at the time of emission at its source,
// although technically the sound would eventually arrive at the listener in reverse.
// We don't simulate the latter, but still make the sound silent for some added realism
outChannelView[samplesWritten + i] = 0.0;
floatPosition += pPitch.currentValue;
}
else {
outChannelView[samplesWritten + i] = sampledVal * pVolume.currentValue * pDstAttenuation.currentValue;
floatPosition += pPitch.currentValue * pDopplerRatio.currentValue;
}
pDopplerRatio.currentValue += stepDopplerRatio;
pDstAttenuation.currentValue += stepDstAttenuation;
pPitch.currentValue += stepPitch;
pVolume.currentValue += stepVol;
if (floatPosition >= resampleLength) {
if (looping) {
while (floatPosition >= resampleLength) {
playbackPosition -= resampleLength;
floatPosition -= resampleLength; // Keep fraction
}
if (c == 0) {
optionallyApplySourceEffects();
}
}
else {
stop();
reachedEndOfData = true;
break;
}
}
else {
playbackPosition = Std.int(floatPosition);
}
}
}
samplesWritten += samplesToWrite;
}
// We're out of data, but more samples are requested
for (c in 0...requestedSamples.numChannels) {
final channelView = requestedSamples.getChannelView(c);
for (i in samplesWritten...requestedSamples.channelLength) {
channelView[i] = 0;
}
}
pDopplerRatio.updateLast();
pDstAttenuation.updateLast();
pPitch.updateLast();
pVolume.updateLast();
processInserts(requestedSamples);
}
override public function play(retrigger: Bool) {
super.play(retrigger);
if (retrigger) {
floatPosition = 0.0;
}
}
override public function stop() {
super.stop();
floatPosition = 0.0;
}
override public function pause() {
super.pause();
floatPosition = playbackPosition;
}
override function parseMessage(message: Message) {
switch (message.id) {
case ChannelMessageID.PPitch: pPitch.targetValue = cast message.data;
default:
super.parseMessage(message);
}
}
}

View File

@ -0,0 +1,16 @@
package aura.channels.generators;
abstract class BaseGenerator extends BaseChannel {
public function play(retrigger: Bool): Void {
paused = false;
finished = false;
}
public function pause(): Void {
paused = true;
}
public function stop(): Void {
finished = true;
}
}

View File

@ -0,0 +1,42 @@
package aura.channels.generators;
import haxe.ds.Vector;
import kha.FastFloat;
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.types.AudioBuffer;
import aura.utils.BufferUtils;
/**
Signal noise produced by Brownian motion.
**/
class BrownNoise extends BaseGenerator {
final last: Vector<FastFloat>;
inline function new() {
last = createEmptyVecF32(2);
}
/**
Creates a new BrownNoise channel and returns a handle to it.
**/
public static function create(): BaseChannelHandle {
return new BaseChannelHandle(new BrownNoise());
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz) {
for (c in 0...requestedSamples.numChannels) {
final channelView = requestedSamples.getChannelView(c);
for (i in 0...requestedSamples.channelLength) {
final white = Math.random() * 2 - 1;
channelView[i] = (last[c] + (0.02 * white)) / 1.02;
last[c] = channelView[i];
channelView[i] * 3.5;
}
}
processInserts(requestedSamples);
}
}

View File

@ -0,0 +1,67 @@
package aura.channels.generators;
import haxe.ds.Vector;
import kha.FastFloat;
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.types.AudioBuffer;
import aura.utils.BufferUtils;
/**
Signal with a frequency spectrum such that the power spectral density
(energy or power per Hz) is inversely proportional to the frequency of the
signal. Each octave (halving/doubling in frequency) carries an equal amount
of noise power.
**/
class PinkNoise extends BaseGenerator {
final b0: Vector<FastFloat>;
final b1: Vector<FastFloat>;
final b2: Vector<FastFloat>;
final b3: Vector<FastFloat>;
final b4: Vector<FastFloat>;
final b5: Vector<FastFloat>;
final b6: Vector<FastFloat>;
inline function new() {
b0 = createEmptyVecF32(2);
b1 = createEmptyVecF32(2);
b2 = createEmptyVecF32(2);
b3 = createEmptyVecF32(2);
b4 = createEmptyVecF32(2);
b5 = createEmptyVecF32(2);
b6 = createEmptyVecF32(2);
}
/**
Creates a new PinkNoise channel and returns a handle to it.
**/
public static function create(): BaseChannelHandle {
return new BaseChannelHandle(new PinkNoise());
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz) {
for (c in 0...requestedSamples.numChannels) {
final channelView = requestedSamples.getChannelView(c);
for (i in 0...requestedSamples.channelLength) {
final white = Math.random() * 2 - 1;
// Paul Kellet's refined method from
// https://www.firstpr.com.au/dsp/pink-noise/
b0[c] = 0.99886 * b0[c] + white * 0.0555179;
b1[c] = 0.99332 * b1[c] + white * 0.0750759;
b2[c] = 0.96900 * b2[c] + white * 0.1538520;
b3[c] = 0.86650 * b3[c] + white * 0.3104856;
b4[c] = 0.55000 * b4[c] + white * 0.5329522;
b5[c] = -0.7616 * b5[c] - white * 0.0168980;
channelView[i] = b0[c] + b1[c] + b2[c] + b3[c] + b4[c] + b5[c] + b6[c] + white * 0.5362;
channelView[i] *= 0.11;
b6[c] = white * 0.115926;
}
}
processInserts(requestedSamples);
}
}

View File

@ -0,0 +1,27 @@
package aura.channels.generators;
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.types.AudioBuffer;
/**
Random signal with a constant power spectral density.
**/
class WhiteNoise extends BaseGenerator {
inline function new() {}
/**
Creates a new WhiteNoise channel and returns a handle to it.
**/
public static function create(): BaseChannelHandle {
return new BaseChannelHandle(new WhiteNoise());
}
function nextSamples(requestedSamples: AudioBuffer, sampleRate: Hertz) {
for (i in 0...requestedSamples.rawData.length) {
requestedSamples.rawData[i] = Math.random() * 2 - 1;
}
processInserts(requestedSamples);
}
}

View File

@ -0,0 +1,36 @@
package aura.dsp;
import aura.threading.Fifo;
import aura.threading.Message;
import aura.types.AudioBuffer;
@:allow(aura.dsp.panner.Panner)
abstract class DSP {
public var bypass = false;
var inUse = false;
final messages: Fifo<Message> = new Fifo();
abstract function process(buffer: AudioBuffer): Void;
function synchronize() {
var message: Null<Message>;
while ((message = messages.tryPop()) != null) {
parseMessage(message);
}
}
function parseMessage(message: Message) {
switch (message.id) {
// TODO
case DSPMessageID.BypassEnable:
case DSPMessageID.BypassDisable:
default:
}
}
inline function sendMessage(message: Message) {
messages.add(message);
}
}

View File

@ -0,0 +1,61 @@
package aura.dsp;
import haxe.ds.Vector;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.utils.CircularBuffer;
class DelayLine extends DSP {
public static inline var NUM_CHANNELS = 2;
public final maxDelaySamples: Int;
final delayBufs: Vector<CircularBuffer>;
public function new(maxDelaySamples: Int) {
this.maxDelaySamples = maxDelaySamples;
delayBufs = new Vector(NUM_CHANNELS);
for (i in 0...NUM_CHANNELS) {
delayBufs[i] = new CircularBuffer(maxDelaySamples);
}
}
public inline function setDelay(delaySamples: Int) {
for (i in 0...NUM_CHANNELS) {
delayBufs[i].setDelay(delaySamples);
}
}
public inline function setDelays(delaySamples: Array<Int>) {
for (i in 0...NUM_CHANNELS) {
delayBufs[i].setDelay(delaySamples[i]);
}
}
function process(buffer: AudioBuffer) {
for (c in 0...buffer.numChannels) {
final delayBuf = delayBufs[c];
if (delayBuf.delay == 0) continue;
final channelView = buffer.getChannelView(c);
for (i in 0...buffer.channelLength) {
delayBuf.set(channelView[i]);
channelView[i] = delayBuf.get();
delayBuf.increment();
}
}
}
override function parseMessage(message: Message) {
switch (message.id) {
case DSPMessageID.SetDelays:
setDelays(message.data);
default:
super.parseMessage(message);
}
}
}

View File

@ -0,0 +1,217 @@
package aura.dsp;
import haxe.ds.Vector;
import kha.FastFloat;
import kha.arrays.Float32Array;
import kha.arrays.Int32Array;
import aura.math.FFT;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.types.ComplexArray;
import aura.types.SwapBuffer;
import aura.utils.BufferUtils;
import aura.utils.MathUtils;
import aura.utils.Profiler;
/**
Calculates the 1D linear convolution of the input with another buffer called
`impulse`.
**/
class FFTConvolver extends DSP {
public static inline var NUM_CHANNELS = 2;
public static inline var FFT_SIZE = 1024;
public static inline var CHUNK_SIZE = Std.int(FFT_SIZE / 2);
/**
The amount of samples used to (temporally) interpolate
between consecutive impulse responses. Values larger than `CHUNK_SIZE`
are clamped to that length.
**Special values**:
- Any negative value: Automatically follows `CHUNK_SIZE`
- 0: Do not interpolate between consecutive impulse responses // TODO implement me
**/
// TODO: make thread-safe
public var temporalInterpolationLength = -1;
final impulseSwapBuffer: SwapBuffer;
/**
The part of the last output signal that was longer than the last frame
buffer and thus overlaps to the next frame. To prevent allocations
during runtime and to ensure that overlapPrev is not longer than one
FFT segment, the overlap vectors are preallocated to `CHUNK_SIZE - 1`.
Use `overlapLength` to get the true length.
**/
final overlapPrev: Vector<Vector<FastFloat>>;
/**
The (per-channel) overlap length of the convolution result for the
current impulse response.
**/
final overlapLength: Vector<Int>;
/**
The (per-channel) overlap length of the convolution result for the
impulse response from the previous processing block.
**/
final prevOverlapLength: Vector<Int>;
static var signalFFT: Null<RealValuedFFT>;
final impulseFFT: Null<RealValuedFFT>;
var currentImpulseAlternationIndex = 0;
final prevImpulseLengths: Int32Array = new Int32Array(NUM_CHANNELS);
public function new() {
assert(Error, isPowerOf2(FFT_SIZE), 'FFT_SIZE must be a power of 2, but it is $FFT_SIZE');
if (signalFFT == null) {
signalFFT = new RealValuedFFT(FFT_SIZE, 2, 2);
}
impulseFFT = new RealValuedFFT(FFT_SIZE, 1, NUM_CHANNELS * 2);
prevImpulseLengths = new Int32Array(NUM_CHANNELS);
for (i in 0...prevImpulseLengths.length) {
prevImpulseLengths[i] = 0;
}
impulseSwapBuffer = new SwapBuffer(CHUNK_SIZE * 2);
overlapPrev = new Vector(NUM_CHANNELS);
for (i in 0...NUM_CHANNELS) {
// Max. impulse size is CHUNK_SIZE
overlapPrev[i] = createEmptyVecF32(CHUNK_SIZE - 1);
}
overlapLength = createEmptyVecI(NUM_CHANNELS);
prevOverlapLength = createEmptyVecI(NUM_CHANNELS);
}
// TODO: move this into main thread and use swapbuffer for impulse freqs
// instead? Moving the impulse FFT computation into the main thread will
// also remove the fft computation while the swap buffer lock is active,
// reducing the lock time, but it occupies the main thread more...
function updateImpulseFromSwapBuffer(impulseLengths: Array<Int>) {
final impulseTimeDomain = impulseFFT.getInput(0);
impulseSwapBuffer.beginRead();
for (c in 0...impulseLengths.length) {
impulseSwapBuffer.read(impulseTimeDomain, 0, CHUNK_SIZE * c, CHUNK_SIZE);
inline calculateImpulseFFT(impulseLengths[c], c);
}
impulseSwapBuffer.endRead();
currentImpulseAlternationIndex = 1 - currentImpulseAlternationIndex;
}
inline function calculateImpulseFFT(impulseLength: Int, channelIndex: Int) {
impulseFFT.forwardFFT(0, NUM_CHANNELS * channelIndex + currentImpulseAlternationIndex);
overlapLength[channelIndex] = maxI(prevImpulseLengths[channelIndex], impulseLength - 1);
prevImpulseLengths[channelIndex] = impulseLength;
}
public function process(buffer: AudioBuffer) {
Profiler.event();
// TODO
assert(Critical, buffer.numChannels == NUM_CHANNELS);
for (c in 0...buffer.numChannels) {
if (overlapLength[c] < 0) return;
}
// Ensure correct boundaries
final isMultiple = (buffer.channelLength % CHUNK_SIZE) == 0 || (CHUNK_SIZE % buffer.channelLength) == 0;
assert(Debug, isMultiple, "channelLength must be a multiple of CHUNK_SIZE or vice versa");
var numSegments: Int; // Segments per channel frame
var segmentSize: Int;
if (CHUNK_SIZE < buffer.channelLength) {
numSegments = Std.int(buffer.channelLength / CHUNK_SIZE);
segmentSize = CHUNK_SIZE;
}
else {
// TODO: accumulate samples if buffer.channelLength < CHUNK_SIZE,
// then delay output
numSegments = 1;
segmentSize = buffer.channelLength;
}
final numInterpolationSteps = temporalInterpolationLength < 0 ? CHUNK_SIZE : minI(temporalInterpolationLength, CHUNK_SIZE);
final interpolationStepSize = 1 / numInterpolationSteps;
final signalTimeDomainCurrentImpulse = signalFFT.getInput(0);
final signalTimeDomainPrevImpulse = signalFFT.getInput(1);
final signalFreqDomainCurrentImpulse = signalFFT.getOutput(0);
final signalFreqDomainPrevImpulse = signalFFT.getOutput(1);
for (c in 0...buffer.numChannels) {
final channelView = buffer.getChannelView(c);
final impulseFreqDomainCurrent = impulseFFT.getOutput(NUM_CHANNELS * c + (1 - currentImpulseAlternationIndex));
final impulseFreqDomainPrev = impulseFFT.getOutput(NUM_CHANNELS * c + currentImpulseAlternationIndex);
for (s in 0...numSegments) {
final segmentOffset = s * segmentSize;
// Copy to FFT input buffer and apply padding
for (i in 0...segmentSize) {
signalTimeDomainCurrentImpulse[i] = channelView[segmentOffset + i];
}
for (i in segmentSize...FFT_SIZE) {
signalTimeDomainCurrentImpulse[i] = 0.0;
}
signalFFT.forwardFFT(0, 0);
// Copy signal frequency signal to multiply with
// both current and previous impulse frequency responses
signalFreqDomainPrevImpulse.copyFrom(signalFreqDomainCurrentImpulse);
// The actual convolution takes place here
// TODO: SIMD
for (i in 0...FFT_SIZE) {
signalFreqDomainCurrentImpulse[i] *= impulseFreqDomainCurrent[i];
signalFreqDomainPrevImpulse[i] *= impulseFreqDomainPrev[i];
}
// Transform back into time domain
signalFFT.inverseFFT(0, 0);
signalFFT.inverseFFT(1, 1);
// Interpolate (only for first segment) and copy to output
final actualNumInterpolationSteps = (s == 0) ? numInterpolationSteps : 0;
var t = 0.0;
for (i in 0...actualNumInterpolationSteps) {
channelView[segmentOffset + i] = lerpF32(signalTimeDomainPrevImpulse[i], signalTimeDomainCurrentImpulse[i], t);
t += interpolationStepSize;
}
for (i in actualNumInterpolationSteps...CHUNK_SIZE) {
channelView[segmentOffset + i] = signalTimeDomainCurrentImpulse[i];
}
// Apply overlapping from last segment
for (i in 0...prevOverlapLength[c]) {
channelView[segmentOffset + i] += overlapPrev[c][i];
}
// Write overlapping samples for next segment
for (i in 0...overlapLength[c]) {
overlapPrev[c][i] = signalTimeDomainCurrentImpulse[CHUNK_SIZE + i];
}
prevOverlapLength[c] = overlapLength[c];
}
}
}
override function parseMessage(message: Message) {
switch (message.id) {
case DSPMessageID.SwapBufferReady:
updateImpulseFromSwapBuffer(message.data);
default:
super.parseMessage(message);
}
}
}

View File

@ -0,0 +1,86 @@
package aura.dsp;
import haxe.ds.Vector;
import kha.FastFloat;
import aura.Types;
import aura.types.AudioBuffer;
import aura.utils.BufferUtils;
import aura.utils.FrequencyUtils;
import aura.utils.MathUtils;
using aura.utils.StepIterator;
/**
A simple IIR (infinite impulse response) lowpass/bandpass/highpass filter
with a slope of 12 dB/octave.
**/
class Filter extends DSP {
/**
Whether the filter should be a low-/band- or highpass filter.
**/
public var filterMode: FilterMode;
final buf: Vector<Vector<FastFloat>>;
final cutoff: Vector<FastFloat>;
public function new(filterMode: FilterMode) {
this.filterMode = filterMode;
this.buf = new Vector(2); // Two channels
buf[0] = createEmptyVecF32(2); // Two buffers per channel
buf[1] = createEmptyVecF32(2);
this.cutoff = new Vector(2);
cutoff[0] = cutoff[1] = 1.0;
}
public function process(buffer: AudioBuffer) {
for (c in 0...buffer.numChannels) {
if (cutoff[c] == 1.0) { continue; }
final channelView = buffer.getChannelView(c);
for (i in 0...buffer.channelLength) {
// http://www.martin-finke.de/blog/articles/audio-plugins-013-filter/
buf[c][0] += cutoff[c] * (channelView[i] - buf[c][0]);
buf[c][1] += cutoff[c] * (buf[c][0] - buf[c][1]);
// TODO: Move the switch out of the loop, even if that means duplicate code?
channelView[i] = switch (filterMode) {
case LowPass: buf[c][1];
case HighPass: channelView[i] - buf[c][0];
case BandPass: buf[c][0] - buf[c][1];
}
}
}
}
/**
Set the cutoff frequency for this filter. `channels` state for which
channels to set the cutoff value.
**/
public inline function setCutoffFreq(cutoffFreq: Hertz, channels: Channels = All) {
final maxFreq = sampleRateToMaxFreq(Aura.sampleRate);
final c = frequencyToFactor(clampI(cutoffFreq, 0, maxFreq), maxFreq);
if (channels.matches(Channels.Left)) { cutoff[0] = c; }
if (channels.matches(Channels.Right)) { cutoff[1] = c; }
}
/**
Get the cutoff frequency of this filter. `channels` state from which
channels to get the cutoff value, if it's `Both`, the left channel's
cutoff frequency is returned.
**/
public inline function getCutoffFreq(channels: Channels = All): Hertz {
final c = channels.matches(Channels.Left) ? cutoff[0] : cutoff[1];
return factorToFrequency(c, sampleRateToMaxFreq(Aura.sampleRate));
}
}
enum abstract FilterMode(Int) {
var LowPass;
var BandPass;
var HighPass;
}

View File

@ -0,0 +1,99 @@
package aura.dsp;
import haxe.ds.Vector;
import kha.FastFloat;
import kha.arrays.Float32Array;
import aura.Types;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.utils.CircularBuffer;
/**
A delay line that supports fractions of samples set as delay times.
The implementation follows the linear interpolation approach as presented
in https://ccrma.stanford.edu/~jos/pasp/Fractional_Delay_Filtering_Linear.html.
@see `aura.dsp.DelayLine`
**/
class FractionalDelayLine extends DSP {
/**
The maximum amount of channels this DSP effect supports.
**/
public final maxNumChannels: Int;
/**
The maximum amount of (whole) samples by which any channel of the input
can be delayed.
**/
public final maxDelayLength: Int;
final delayBufs: Vector<CircularBuffer>;
final delayLengthFracts: Float32Array;
public function new(maxNumChannels: Int, maxDelayLength: Int) {
this.maxNumChannels = maxNumChannels;
this.maxDelayLength = maxDelayLength;
delayLengthFracts = new Float32Array(maxNumChannels);
delayBufs = new Vector(maxNumChannels);
for (i in 0...maxNumChannels) {
delayLengthFracts[i] = 0.0;
delayBufs[i] = new CircularBuffer(maxDelayLength);
}
}
public inline function setDelayLength(channelMask: Channels, delayLength: FastFloat) {
assert(Error, delayLength >= 0);
assert(Error, delayLength < maxDelayLength);
sendMessage({id: DSPMessageID.SetDelays, data: [channelMask, delayLength]});
}
function process(buffer: AudioBuffer) {
for (c in 0...buffer.numChannels) {
if (delayBufs[c].delay == 0) continue;
final channelView = buffer.getChannelView(c);
for (i in 0...buffer.channelLength) {
delayBufs[c].set(channelView[i]);
var delayedSignalMm1 = delayBufs[c].get(); // M - 1
delayBufs[c].increment();
var delayedSignalM = delayBufs[c].get(); // M
channelView[i] = delayedSignalM + delayLengthFracts[c] * (delayedSignalMm1 - delayedSignalM);
}
}
}
override function parseMessage(message: Message) {
switch (message.id) {
case DSPMessageID.SetDelays:
final channelMask = message.dataAsArrayUnsafe()[0];
final delayLength = message.dataAsArrayUnsafe()[1];
at_setDelayLength(channelMask, delayLength);
default:
super.parseMessage(message);
}
}
inline function at_setDelayLength(channelMask: Channels, delayLength: FastFloat) {
final delayLengthFloor = Math.ffloor(delayLength); // TODO implement 32-bit ffloor
final delayLengthFract = delayLength - delayLengthFloor;
final delayLengthInt = Std.int(delayLengthFloor);
for (c in 0...maxNumChannels) {
if (!channelMask.matchesIndex(c)) {
continue;
}
delayLengthFracts[c] = delayLengthFract;
delayBufs[c].setDelay(delayLengthInt + 1);
}
}
}

View File

@ -0,0 +1,55 @@
package aura.dsp;
import aura.types.AudioBuffer;
import aura.utils.CircularBuffer;
import aura.utils.FrequencyUtils;
/**
The [Haas effect](https://en.wikipedia.org/wiki/Precedence_effect) is a
psychoacoustical effect that uses a delay of one stereo channel of ca.
3 - 50 milliseconds to create the perception of 3D sound.
Using a negative value for `delay` moves the sound to the left of the
listener by delaying the right channel. Using a positive value delays the
left channel and moves the sound to the right. If `delay` is `0`, this
effect does nothing.
**/
class HaasEffect extends DSP {
var delayChannelIdx: Int;
var diffSamples: Int;
var delayBuff: CircularBuffer;
public function new(delay: Millisecond) {
this.diffSamples = 0;
this.setDelay(delay);
}
public function process(buffer: AudioBuffer) {
if (diffSamples == 0) return;
for (c in 0...buffer.numChannels) {
if (c != delayChannelIdx) { continue; }
final channelView = buffer.getChannelView(c);
for (i in 0...buffer.channelLength) {
delayBuff.set(channelView[i]);
channelView[i] = delayBuff.get();
delayBuff.increment();
}
}
}
public function setDelay(delay: Millisecond) {
final prev = diffSamples;
this.diffSamples = msToSamples(Aura.sampleRate, delay);
if (prev != diffSamples) {
this.delayChannelIdx = (diffSamples > 0) ? 0 : 1;
this.delayBuff = new CircularBuffer((diffSamples < 0) ? -diffSamples : diffSamples);
}
}
public inline function getDelay(): Millisecond {
return samplesToMs(Aura.sampleRate, diffSamples);
}
}

View File

@ -0,0 +1,109 @@
package aura.dsp;
import haxe.ds.Vector;
import kha.FastFloat;
import kha.arrays.ByteArray;
import aura.types.AudioBuffer;
import aura.utils.CircularBuffer;
/**
Perform efficient convolution of sparse impulse responses (i.e., impulse
responses in which most samples have a value of 0).
**/
class SparseConvolver extends DSP {
static inline var NUM_CHANNELS = 2;
public final impulseBuffer: SparseImpulseBuffer;
final delayBufs: Vector<CircularBuffer>;
/**
Create a new `SparseConvolver` object.
@param maxNumImpulses The maximal amount of non-zero impulses that can be stored in `this.impulseBuffer`.
@param maxNumImpulseResponseSamples The highest possible position of any non-zero impulse stored in the `impulseBuffer`.
There is no bounds checking in place!
**/
public function new(maxNumImpulses: Int, maxNumImpulseResponseSamples: Int) {
assert(Error, maxNumImpulseResponseSamples > maxNumImpulses);
impulseBuffer = new SparseImpulseBuffer(maxNumImpulses);
delayBufs = new Vector(NUM_CHANNELS);
for (i in 0...NUM_CHANNELS) {
delayBufs[i] = new CircularBuffer(maxNumImpulseResponseSamples);
}
}
public inline function getMaxNumImpulses(): Int {
return impulseBuffer.length;
}
public inline function getMaxNumImpulseResponseSamples(): Int {
return delayBufs[0].length;
}
function process(buffer: AudioBuffer) {
assert(Error, buffer.numChannels == NUM_CHANNELS);
for (c in 0...buffer.numChannels) {
final channelView = buffer.getChannelView(c);
final delayBuf = delayBufs[c];
for (i in 0...buffer.channelLength) {
delayBuf.set(channelView[i]);
var convolutionSum: FastFloat = 0.0;
for (impulseIndex in 0...impulseBuffer.length) {
// Move read pointer to impulse position, probably not the
// most cache efficient operation but it looks pretty unavoidable
delayBuf.setDelay(impulseBuffer.getImpulsePos(impulseIndex));
convolutionSum += delayBuf.get() * impulseBuffer.getImpulseMagnitude(impulseIndex);
}
// TODO: impulse response must be longer than buffer.channelLength!
channelView[i] = convolutionSum;
delayBuf.increment();
}
}
}
}
/**
A cache efficient buffer to store `(position: Int, magnitude: FastFloat)`
pairs that represent impulses of varying magnitudes within a sparse impulse
response. The buffer is **NOT** guaranteed to be zero-initialized.
**/
abstract SparseImpulseBuffer(ByteArray) {
public var length(get, never): Int;
public inline function new(numImpulses: Int) {
this = ByteArray.make(numImpulses * 8);
}
public inline function get_length(): Int {
return this.byteLength >> 3;
}
public inline function getImpulsePos(index: Int): Int {
return this.getUint32(index * 8);
}
public inline function setImpulsePos(index: Int, position: Int) {
this.setUint32(index * 8, position);
}
public inline function getImpulseMagnitude(index: Int): FastFloat {
return this.getFloat32(index * 8 + 4);
}
public inline function setImpulseMagnitude(index: Int, magnitude: FastFloat) {
this.setFloat32(index * 8 + 4, magnitude);
}
}

View File

@ -0,0 +1,120 @@
package aura.dsp.panner;
import kha.FastFloat;
import kha.arrays.Float32Array;
import aura.Types.Channels;
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.types.HRTF;
import aura.utils.MathUtils;
import aura.utils.Pointer;
class HRTFPanner extends Panner {
public var hrtf: HRTF;
final hrtfConvolver: FFTConvolver;
final hrtfDelayLine: FractionalDelayLine;
final hrirPtrDelay: Pointer<FastFloat>;
final hrirPtrImpulseLength: Pointer<Int>;
final hrir: Float32Array;
final hrirOpp: Float32Array;
public function new(handle: BaseChannelHandle, hrtf: HRTF) {
super(handle);
this.hrtf = hrtf;
hrtfConvolver = new FFTConvolver();
hrtfDelayLine = new FractionalDelayLine(2, Math.ceil(hrtf.maxDelayLength));
hrtfConvolver.bypass = true;
hrtfDelayLine.bypass = true;
hrirPtrDelay = new Pointer<FastFloat>();
hrirPtrImpulseLength = new Pointer<Int>();
hrir = new Float32Array(FFTConvolver.CHUNK_SIZE);
hrirOpp = new Float32Array(FFTConvolver.CHUNK_SIZE);
}
override public function update3D() {
final listener = Aura.listener;
final dirToChannel = this.location.sub(listener.location);
if (dirToChannel.length == 0) {
hrtfConvolver.bypass = true;
hrtfDelayLine.bypass = true;
handle.channel.sendMessage({ id: ChannelMessageID.PDstAttenuation, data: 1.0 });
return;
}
final look = listener.look;
final up = listener.right.cross(look).normalized();
// Project the channel position (relative to the listener) to the plane
// described by the listener's look and right vectors
final projectedChannelPos = projectPointOntoPlane(dirToChannel, up).normalized();
final elevationCos = up.dot(dirToChannel.normalized());
// 180: top, 0: bottom
final elevation = 180 - (Math.acos(elevationCos) * (180 / Math.PI));
var angle = getFullAngleDegrees(look, projectedChannelPos, up);
angle = angle != 0 ? 360 - angle : 0; // Make clockwise
hrtf.getInterpolatedHRIR(elevation, angle, hrir, hrirPtrImpulseLength, hrirPtrDelay);
final hrirLength = hrirPtrImpulseLength.getSure();
final hrirDelay = hrirPtrDelay.getSure();
if (hrtf.numChannels == 1) {
hrtf.getInterpolatedHRIR(elevation, 360 - angle, hrirOpp, hrirPtrImpulseLength, hrirPtrDelay);
final hrirOppLength = hrirPtrImpulseLength.getSure();
final hrirOppDelay = hrirPtrDelay.getSure();
final swapBuf = @:privateAccess hrtfConvolver.impulseSwapBuffer;
swapBuf.beginWrite();
// Left channel
swapBuf.write(hrir, 0, 0, hrirLength);
swapBuf.writeZero(hrirLength, FFTConvolver.CHUNK_SIZE);
// Right channel
swapBuf.write(hrirOpp, 0, FFTConvolver.CHUNK_SIZE, hrirOppLength);
swapBuf.writeZero(FFTConvolver.CHUNK_SIZE + hrirOppLength, swapBuf.length);
swapBuf.endWrite();
hrtfConvolver.bypass = false;
hrtfDelayLine.bypass = false;
hrtfConvolver.sendMessage({id: DSPMessageID.SwapBufferReady, data: [hrirLength, hrirOppLength]});
hrtfDelayLine.setDelayLength(Channels.Left, hrirDelay);
hrtfDelayLine.setDelayLength(Channels.Right, hrirOppDelay);
}
else {
for (c in 0...hrtf.numChannels) {
// final delaySamples = Math.round(hrir.delays[0]);
// TODO: handle interleaved coeffs of stereo HRTFs
// Deinterleave when reading the file?
}
}
super.update3D();
}
override public function reset3D() {
hrtfConvolver.bypass = true;
hrtfDelayLine.bypass = true;
super.reset3D();
}
function process(buffer: AudioBuffer) {
if (!hrtfConvolver.bypass) {
hrtfConvolver.synchronize();
hrtfConvolver.process(buffer);
hrtfDelayLine.synchronize();
hrtfDelayLine.process(buffer);
}
}
}

View File

@ -0,0 +1,166 @@
package aura.dsp.panner;
import kha.FastFloat;
import kha.math.FastVector3;
import aura.channels.BaseChannel.BaseChannelHandle;
import aura.math.Vec3;
import aura.threading.Message;
import aura.utils.MathUtils;
abstract class Panner extends DSP {
static inline var REFERENCE_DST = 1.0;
static inline var SPEED_OF_SOUND = 343.4; // Air, m/s
/**
The strength of the doppler effect.
This value is multiplied to the calculated doppler effect, thus:
- A value of `0.0` results in no doppler effect.
- A value between `0.0` and `1.0` attenuates the effect (smaller values: more attenuation).
- A value of `1.0` does not attenuate or amplify the doppler effect.
- A value larger than `1.0` amplifies the doppler effect (larger values: more amplification).
**/
public var dopplerStrength = 1.0;
public var attenuationMode = AttenuationMode.Inverse;
public var attenuationFactor = 1.0;
public var maxDistance = 10.0;
// public var minDistance = 1;
var handle: BaseChannelHandle;
/**
The location of this audio source in world space.
**/
var location: Vec3 = new Vec3(0, 0, 0);
/**
The velocity of this audio source in world space.
**/
var velocity: Vec3 = new Vec3(0, 0, 0);
public function new(handle: BaseChannelHandle) {
this.inUse = true; // Don't allow using panners with addInsert()
this.handle = handle;
this.handle.channel.panner = this;
}
public inline function setHandle(handle: BaseChannelHandle) {
if (this.handle != null) {
this.handle.channel.panner = null;
}
reset3D();
this.handle = handle;
this.handle.channel.panner = this;
}
/**
Update the channel's audible 3D parameters after changing the channel's
or the listener's position or rotation.
**/
public function update3D() {
final displacementToSource = location.sub(Aura.listener.location);
calculateAttenuation(displacementToSource);
calculateDoppler(displacementToSource);
};
/**
Reset all the audible 3D sound parameters (balance, doppler effect etc.)
which are calculated by `update3D()`. This function does *not* reset the
location value of the sound, so if you call `update3D()` again, you will
hear the sound at the same position as before you called `reset3D()`.
**/
public function reset3D() {
handle.channel.sendMessage({ id: ChannelMessageID.PDopplerRatio, data: 1.0 });
handle.channel.sendMessage({ id: ChannelMessageID.PDstAttenuation, data: 1.0 });
};
/**
Set the location of this panner in world space.
Calling this function also sets the panner's velocity if the call
to this function is not the first call for this panner. This behavior
avoids audible "jumps" in the doppler effect for initial placement
of objects if they are far away from the origin.
**/
public function setLocation(location: Vec3) {
final time = Time.getTime();
final timeDeltaLastCall = time - _setLocation_lastCallTime;
// If the last time setLocation() was called was at an earlier time step
if (timeDeltaLastCall > 0) {
_setLocation_lastLocation.setFrom(this.location);
_setLocation_lastVelocityUpdateTime = _setLocation_lastCallTime;
}
final timeDeltaVelocityUpdate = time - _setLocation_lastVelocityUpdateTime;
this.location.setFrom(location);
if (!_setLocation_initializedLocation) {
// Prevent jumps in the doppler effect caused by initial distance
// too far away from the origin
_setLocation_initializedLocation = true;
}
else if (timeDeltaVelocityUpdate > 0) {
velocity.setFrom(location.sub(_setLocation_lastLocation).mult(1 / timeDeltaVelocityUpdate));
}
_setLocation_lastCallTime = time;
}
var _setLocation_initializedLocation = false;
var _setLocation_lastLocation: Vec3 = new Vec3(0, 0, 0);
var _setLocation_lastCallTime: Float = 0.0;
var _setLocation_lastVelocityUpdateTime: Float = 0.0;
function calculateAttenuation(dirToChannel: FastVector3) {
final dst = maxF(REFERENCE_DST, dirToChannel.length);
final dstAttenuation = switch (attenuationMode) {
case Linear:
maxF(0.0, 1 - attenuationFactor * (dst - REFERENCE_DST) / (maxDistance - REFERENCE_DST));
case Inverse:
REFERENCE_DST / (REFERENCE_DST + attenuationFactor * (dst - REFERENCE_DST));
case Exponential:
Math.pow(dst / REFERENCE_DST, -attenuationFactor);
}
handle.channel.sendMessage({ id: ChannelMessageID.PDstAttenuation, data: dstAttenuation });
}
function calculateDoppler(displacementToSource: FastVector3) {
final listener = Aura.listener;
var dopplerRatio: FastFloat = 1.0;
if (dopplerStrength != 0.0 && (listener.velocity.length != 0 || this.velocity.length != 0)) {
final dist = displacementToSource.length;
if (dist == 0) {
// We don't have any radial velocity here...
handle.channel.sendMessage({ id: ChannelMessageID.PDopplerRatio, data: 1.0 });
return;
}
// Calculate radial velocity
final vr = listener.velocity.dot(displacementToSource) / dist;
final vs = this.velocity.dot(displacementToSource) / dist;
// Sound source comes closer exactly at speed of sound,
// make silent and prevent division by zero below
if (vs == -SPEED_OF_SOUND) {
handle.channel.sendMessage({ id: ChannelMessageID.PDopplerRatio, data: 0.0 });
return;
}
dopplerRatio = (SPEED_OF_SOUND + vr) / (SPEED_OF_SOUND + vs);
dopplerRatio = Math.pow(dopplerRatio, dopplerStrength);
}
handle.channel.sendMessage({ id: ChannelMessageID.PDopplerRatio, data: dopplerRatio });
}
}
enum abstract AttenuationMode(Int) {
var Linear;
var Inverse;
var Exponential;
}

View File

@ -0,0 +1,127 @@
package aura.dsp.panner;
import aura.threading.Message;
import aura.types.AudioBuffer;
import aura.utils.Interpolator.LinearInterpolator;
import aura.utils.MathUtils;
using aura.utils.StepIterator;
class StereoPanner extends Panner {
final pVolumeLeft = new LinearInterpolator(1.0);
final pVolumeRight = new LinearInterpolator(1.0);
var _balance = Balance.CENTER;
override public function update3D() {
final listener = Aura.listener;
final dirToChannel = this.location.sub(listener.location);
if (dirToChannel.length == 0) {
setBalance(Balance.CENTER);
handle.channel.sendMessage({ id: ChannelMessageID.PDstAttenuation, data: 1.0 });
return;
}
final look = listener.look;
final up = listener.right.cross(look).normalized();
// Project the channel position (relative to the listener) to the plane
// described by the listener's look and right vectors
final projectedChannelPos = projectPointOntoPlane(dirToChannel, up).normalized();
// Angle cosine
var angle = listener.look.dot(projectedChannelPos);
// The calculated angle cosine looks like this on the unit circle:
// / 1 \
// 0 x 0 , where x is the listener and top is on the front
// \ -1 /
// Make the center 0.5, use absolute angle to prevent phase flipping.
// We loose front/back information here, but that's ok
angle = Math.abs(angle * 0.5);
// The angle cosine doesn't contain side information, so if the sound is
// to the right of the listener, we must invert the angle
if (listener.right.dot(projectedChannelPos) > 0) {
angle = 1 - angle;
}
setBalance(angle);
super.update3D();
}
override public function reset3D() {
setBalance(Balance.CENTER);
super.reset3D();
}
public inline function setBalance(balance: Balance) {
this._balance = balance;
sendMessage({ id: StereoPannerMessageID.PVolumeLeft, data: Math.sqrt(~balance) });
sendMessage({ id: StereoPannerMessageID.PVolumeRight, data: Math.sqrt(balance) });
}
public inline function getBalance(): Balance {
return this._balance;
}
function process(buffer: AudioBuffer) {
assert(Critical, buffer.numChannels == 2, "A StereoPanner can only be applied to stereo channels");
final channelViewL = buffer.getChannelView(0);
final channelViewR = buffer.getChannelView(1);
final stepSizeL = pVolumeLeft.getLerpStepSize(buffer.channelLength);
final stepSizeR = pVolumeRight.getLerpStepSize(buffer.channelLength);
#if AURA_SIMD
final stepSizesL = pVolumeLeft.getLerpStepSizes32x4(buffer.channelLength);
final stepSizesR = pVolumeRight.getLerpStepSizes32x4(buffer.channelLength);
final lenRemainder = mod4(buffer.channelLength);
final startRemainder = buffer.channelLength - lenRemainder - 1;
for (i in (0...buffer.channelLength).step(4)) {
pVolumeLeft.applySIMD32x4(channelViewL, i, stepSizesL);
pVolumeRight.applySIMD32x4(channelViewR, i, stepSizesR);
}
for (i in startRemainder...lenRemainder) {
channelViewL[i] *= pVolumeLeft.currentValue;
channelViewR[i] *= pVolumeRight.currentValue;
pVolumeLeft.currentValue += stepSizeL;
pVolumeRight.currentValue += stepSizeR;
}
#else
for (i in 0...buffer.channelLength) {
channelViewL[i] *= pVolumeLeft.currentValue;
channelViewR[i] *= pVolumeRight.currentValue;
pVolumeLeft.currentValue += stepSizeL;
pVolumeRight.currentValue += stepSizeR;
}
#end
pVolumeLeft.updateLast();
pVolumeRight.updateLast();
}
override function parseMessage(message: Message) {
switch (message.id) {
case StereoPannerMessageID.PVolumeLeft: pVolumeLeft.targetValue = cast message.data;
case StereoPannerMessageID.PVolumeRight: pVolumeRight.targetValue = cast message.data;
default:
super.parseMessage(message);
}
}
}
class StereoPannerMessageID extends DSPMessageID {
final PVolumeLeft;
final PVolumeRight;
}

View File

@ -0,0 +1,60 @@
package aura.dsp.sourcefx;
import aura.types.AudioBuffer;
import aura.Types;
/**
A special type of audio effect that—unlike insert effects—is not applied
continuously during audio playback but instead to the audio source buffer of
an `aura.channels.UncompBufferChannel` object.
This allows `SourceEffect`s to bake effects or provide sound variations
(for example by selecting random sounds from a pool of sounds, or by creating
sound variations on the fly with `aura.dsp.sourcefx.VelvetNoiseVariator`).
**/
abstract class SourceEffect {
/**
If `false` (default), `SourceEffect.process()` is only called
before the linked audio channel is played for the very first time with
its current combination of source effects. Adding or removing source
effects to a channel results in a recalculation of all source effects
on that channel.
If `true`, _additionally_ call `SourceEffect.process()` before each
consecutive replay of the audio source, including:
- Repetitions if the audio source is looping
- Calls to `audioChannel.play()` if the audio channel was stopped or
`play()` is called with `retrigger` set to `true`.
**/
public var applyOnReplay(default, null): AtomicBool = new AtomicBool(false);
/**
`SourceEffect`s are allowed to change the length of the source
audio passed as `srcBuffer` to `SourceEffect.process()`.
This function is used to calculate the amount of memory that needs to be
allocated to efficiently process all audio source effects of a channel.
It must return the least required channel length of the effect's
destination buffer with respect to the given source channel length.
**/
abstract function calculateRequiredChannelLength(srcChannelLength: Int): Int;
/**
Apply the effect to the audio data stored in the given source buffer and
write the result into the destination buffer.
- `srcBuffer` and `dstBuffer` may or may not point to the same object.
- The channels of `srcBuffer` might be longer than the valid audio
contained, use `srcChannelLength` to get the amount of valid samples
in each channel of the source buffer.
- `dstBuffer` is guaranteed to contain channels _at least_ the length
of `calculateRequiredChannelLength(srcChannelLength)`, it is expected
that the source effect fills `dstBuffer` exactly to that length.
This function must return the required destination channel length as
calculated by `calculateRequiredChannelLength(srcChannelLength)`.
**/
abstract function process(srcBuffer: AudioBuffer, srcChannelLength: Int, dstBuffer: AudioBuffer): Int;
}

View File

@ -0,0 +1,119 @@
package aura.dsp.sourcefx;
import kha.FastFloat;
import aura.dsp.SparseConvolver;
import aura.types.AudioBuffer;
import aura.utils.MathUtils;
import aura.utils.FrequencyUtils;
/**
Generate infinite variations on short percussive samples on the fly,
following the technique from the paper linked below.
The parameters of this effect need careful tweaking. Some examples can be
found in _Table 1_ in the paper linked below.
**Paper**:
Fagerström, Jon & Schlecht, Sebastian & Välimäki, Vesa. (2021).
One-to-Many Conversion for Percussive Samples. doi.org/10.23919/DAFx51585.2021.9768256.
**/
class VelvetNoiseVariator extends SourceEffect {
public final noiseLengthMs: FastFloat;
public final strength: FastFloat;
public final decayRate: FastFloat;
final highpassFilter: Filter;
final sparseConvolver: SparseConvolver;
var averageImpulseSpacing: Float;
/**
Create a new `VelvetNoiseVariator`.
@param noiseLengthMs The length of the velvet noise used for convolution, in milliseconds.
@param numImpulses The amount of impulses in the velvet noise.
@param decayRate The strength of the exponential decay of the velvet noise impulses.
@param lowShelfCutoff The cutoff frequency for the integrated high-pass filter.
@param strength The strength/influence of this effect. Think of this as a dry/wet control.
**/
public function new(noiseLengthMs: FastFloat, numImpulses: Int, decayRate: FastFloat, lowShelfCutoff: Hertz, strength: FastFloat) {
this.noiseLengthMs = noiseLengthMs;
final noiseLengthSamples = msToSamples(Aura.sampleRate, noiseLengthMs);
this.sparseConvolver = new SparseConvolver(numImpulses, noiseLengthSamples);
this.averageImpulseSpacing = maxF(1.0, noiseLengthSamples / numImpulses);
this.highpassFilter = new Filter(HighPass);
highpassFilter.setCutoffFreq(lowShelfCutoff, All);
this.applyOnReplay.store(true);
this.decayRate = decayRate;
this.strength = strength;
}
public static function fillVelvetNoiseSparse(impulseBuffer: SparseImpulseBuffer, averageImpulseSpacing: Float, decayRate: FastFloat) {
var nextGridPosPrecise = 0.0;
var nextGridPosRounded = 0;
var nextImpulsePos = 0;
// Attenuate consecutive pulses
final expFactor = Math.pow(E_INV, decayRate); // e^(-decayRate) == 1/e^decayRate == (1/e)^decayRate
var exponentialDecayFactor = 1.0;
for (i in 0...impulseBuffer.length) {
final currentGridPosRounded = nextGridPosRounded;
nextGridPosPrecise += averageImpulseSpacing;
nextGridPosRounded = Math.round(nextGridPosPrecise);
nextImpulsePos = currentGridPosRounded + Std.random(nextGridPosRounded - currentGridPosRounded);
impulseBuffer.setImpulsePos(i, nextImpulsePos);
impulseBuffer.setImpulseMagnitude(i, (Math.random() < 0.5 ? -1.0 : 1.0) * exponentialDecayFactor);
exponentialDecayFactor *= expFactor; // e^(-decayRate*i) == e^(-decayRate)^i
}
}
function calculateRequiredChannelLength(srcChannelLength: Int): Int {
return srcChannelLength + sparseConvolver.getMaxNumImpulseResponseSamples() - 1;
}
@:access(aura.dsp.SparseConvolver)
function process(srcBuffer: AudioBuffer, srcChannelLength: Int, dstBuffer: AudioBuffer): Int {
final requiredLength = calculateRequiredChannelLength(srcChannelLength);
// Copy and pad data
for (c in 0...srcBuffer.numChannels) {
final srcChannelView = srcBuffer.getChannelView(c);
final dstChannelView = dstBuffer.getChannelView(c);
for (i in 0...srcChannelLength) {
dstChannelView[i] = srcChannelView[i];
}
// Pad with zeroes to convolve without overlapping
for (i in srcChannelLength...requiredLength) {
dstChannelView[i] = 0.0;
}
}
fillVelvetNoiseSparse(sparseConvolver.impulseBuffer, averageImpulseSpacing, decayRate);
highpassFilter.process(dstBuffer);
sparseConvolver.process(dstBuffer);
for (c in 0...srcBuffer.numChannels) {
final srcChannelView = srcBuffer.getChannelView(c);
final dstChannelView = dstBuffer.getChannelView(c);
for (i in 0...srcChannelLength) {
dstChannelView[i] = dstChannelView[i] * strength + srcChannelView[i];
}
for (i in srcChannelLength...requiredLength) {
dstChannelView[i] = dstChannelView[i] * strength;
}
}
return requiredLength;
}
}

View File

@ -0,0 +1,21 @@
package aura.format;
import haxe.Int64;
import haxe.io.Input;
inline function readInt64(inp: Input): Int64 {
final first = inp.readInt32();
final second = inp.readInt32();
return inp.bigEndian ? Int64.make(first, second) : Int64.make(second, first);
}
inline function readUInt32(inp: Input): Int64 {
var out: Int64 = 0;
for (i in 0...4) {
out += Int64.shl(inp.readByte(), (inp.bigEndian ? 3 - i : i) * 8);
}
return out;
}

View File

@ -0,0 +1,170 @@
/**
Specification:
V1: https://github.com/kcat/openal-soft/blob/be7938ed385e18c7800c663672262bb2976aa734/docs/hrtf.txt
V2: https://github.com/kcat/openal-soft/blob/0349bcc500fdb9b1245a5ddce01b2896bcf9bbb9/docs/hrtf.txt
V3: https://github.com/kcat/openal-soft/blob/3ef4bffaf959d06527a247faa19cc869781745e4/docs/hrtf.txt
**/
package aura.format.mhr;
import haxe.Int64;
import haxe.ds.Vector;
import haxe.io.Bytes;
import haxe.io.BytesInput;
import kha.arrays.Float32Array;
import aura.types.HRTF;
using aura.format.InputExtension;
/**
Load MHR HRTF files (format versions 13 are supported) into `HRTF` objects.
**/
class MHRReader {
public static function read(bytes: Bytes): HRTF {
final inp = new BytesInput(bytes);
inp.bigEndian = false;
final magic = inp.readString(8, UTF8);
final version = versionFromMagic(magic);
final sampleRate = Int64.toInt(inp.readUInt32());
final sampleType = switch (version) {
case V1: SampleType16Bit;
case V2: inp.readByte();
case V3: SampleType24Bit;
}
final channelType = switch (version) {
case V1: 0; // mono
case V2 | V3: inp.readByte();
}
final channels = channelType + 1;
// Samples per HRIR (head related impulse response) per channel
final hrirSize = inp.readByte();
// Number of fields used by the data set. Each field represents a
// set of points for a given distance.
final fieldCount = version == V1 ? 1 : inp.readByte();
final fields = new Vector<Field>(fieldCount);
var totalHRIRCount = 0;
for (i in 0...fieldCount) {
final field = new Field();
// 1000mm is arbitrary, but it doesn't matter since the interpolation
// can only access one distance anyway...
field.distance = version == V1 ? 1000 : inp.readUInt16();
field.evCount = inp.readByte();
field.azCount = new Vector<Int>(field.evCount);
field.evHRIROffsets = new Vector<Int>(field.evCount);
var fieldHrirCount = 0;
for (j in 0...field.evCount) {
// Calculate the offset into the HRIR arrays. Different
// elevations may have different amounts of azimuths/HRIRs
field.evHRIROffsets[j] = fieldHrirCount;
field.azCount[j] = inp.readByte();
fieldHrirCount += field.azCount[j];
}
field.hrirCount = fieldHrirCount;
totalHRIRCount += fieldHrirCount;
fields[i] = field;
}
// Read actual HRIR samples into coeffs
for (i in 0...fieldCount) {
final field = fields[i];
final hrirs = new Vector<HRIR>(field.hrirCount);
field.hrirs = hrirs;
for (j in 0...field.hrirCount) {
// Create individual HRIR
final hrir = hrirs[j] = new HRIR();
hrir.coeffs = new Float32Array(hrirSize * channels);
switch (sampleType) {
case SampleType16Bit:
for (s in 0...hrirSize) {
final coeff = inp.readInt16();
// 32768 = 2^15
hrir.coeffs[s] = coeff / (coeff < 0 ? 32768.0 : 32767.0);
}
case SampleType24Bit:
for (s in 0...hrirSize) {
final coeff = inp.readInt24();
// 8388608 = 2^23
hrir.coeffs[s] = coeff / (coeff < 0 ? 8388608.0 : 8388607.0);
}
}
}
}
// Read per-HRIR delay
var maxDelayLength = 0.0;
for (i in 0...fieldCount) {
final field = fields[i];
for (j in 0...field.hrirCount) {
final hrir = field.hrirs[j];
hrir.delays = new Vector<Float>(channels);
for (ch in 0...channels) {
// 6.2 fixed point
final delayRaw = inp.readByte();
final delayIntPart = delayRaw >> 2;
final delayFloatPart = isBitSet(delayRaw, 1) * 0.5 + isBitSet(delayRaw, 0) * 0.25;
final delay = delayIntPart + delayFloatPart;
hrir.delays[ch] = delay;
if (delay > maxDelayLength) {
maxDelayLength = delay;
}
}
}
}
// This should error if uncommented, check if we have reached the end of
// the file.
// inp.readByte();
return {
sampleRate: sampleRate,
numChannels: channels,
hrirSize: hrirSize,
hrirCount: totalHRIRCount,
fields: fields,
maxDelayLength: maxDelayLength
};
}
static inline function isBitSet(byte: Int, position: Int): Int {
return (byte & (1 << position) == 0) ? 0 : 1;
}
static inline function versionFromMagic(magic: String): MHRVersion {
return switch (magic) {
case "MinPHR01": V1;
case "MinPHR02": V2;
case "MinPHR03": V3;
default:
throw 'File is not an MHR HRTF file! Unknown magic string "$magic".';
}
}
}
private enum abstract SampleType(Int) from Int {
var SampleType16Bit;
var SampleType24Bit;
}
private enum abstract MHRVersion(Int) {
var V1;
var V2;
var V3;
}

View File

@ -0,0 +1,10 @@
package aura;
#if !macro
import aura.Types.Angle;
import aura.Types.Balance;
import aura.Types.Hertz;
import aura.Types.Millisecond;
#end
import aura.utils.Assert.*;

View File

@ -0,0 +1,384 @@
package aura.math;
import haxe.ds.Vector;
import kha.arrays.Float32Array;
import aura.types.Complex;
import aura.types.ComplexArray;
import aura.utils.BufferUtils;
import aura.utils.MathUtils;
enum abstract FFTInputType(Int) {
var RealValuedInput;
var ComplexValuedInput;
}
/**
Container for all required buffers for an FFT computation. The input buffers
can either be real or complex which depends on whether you instantiate an
`aura.math.FFT.RealValuedFFT` or an `aura.math.FFT.ComplexValuedFFT`.
Each instance of this class can have multiple input and output buffers whose
indices have to be passed to the respective FFT functions. It is more
efficient to use multiple buffers for different FFT calculations with the
same size instead of multiple instances of this class. The input buffers
are guaranteed to be zero-initialized.
Make sure to not use objects of this class in different threads at the same
time since `FFTBase` is not thread safe!
**/
abstract class FFTBase {
public final size: Int;
public final halfSize: Int;
public final outputBuffers: Vector<ComplexArray>;
final expRotationStepTable: ComplexArray;
public inline function new(size: Int, numOutputs: Int) {
this.size = size;
this.halfSize = size >>> 1;
outputBuffers = new Vector(numOutputs);
for (i in 0...numOutputs) {
outputBuffers[i] = new ComplexArray(size);
}
// Since the calculations for the complex exponential inside a FFT are
// basically just a rotation around the unit circle with a constant step
// size that only depends on the layer size, we can precompute the
// complex rotation steps.
final numExpTableEntries = log2Unsigned(size);
expRotationStepTable = new ComplexArray(numExpTableEntries);
for (halfLayerIdx in 0...numExpTableEntries) {
final halfLayerSize = exp2(halfLayerIdx);
// (-2 * Math.PI) / layerSize == -Math.PI / halfLayerSize,
// so we store values corresponding to each possible halfLayer index
expRotationStepTable[halfLayerIdx] = Complex.exp(-Math.PI / halfLayerSize);
}
}
public abstract function forwardFFT(inputBufferIndex: Int, outputBufferIndex: Int): Void;
public abstract function inverseFFT(inputBufferIndex: Int, outputBufferIndex: Int): Void;
public abstract function getInput(index: Int): Dynamic;
public inline function getOutput(index: Int): ComplexArray {
return outputBuffers[index];
}
}
class RealValuedFFT extends FFTBase {
public final inputBuffers: Vector<Float32Array>;
final tmpInputBufferHalf: ComplexArray;
final tmpOutputBufferHalf: ComplexArray;
public inline function new(size: Int, numInputs: Int, numOutputs: Int) {
super(size, numOutputs);
inputBuffers = new Vector(numInputs);
for (i in 0...numInputs) {
inputBuffers[i] = createEmptyF32Array(size);
}
tmpInputBufferHalf = new ComplexArray(halfSize);
tmpOutputBufferHalf = new ComplexArray(halfSize);
}
public inline function forwardFFT(inputBufferIndex: Int, outputBufferIndex: Int) {
realfft(inputBuffers[inputBufferIndex], outputBuffers[outputBufferIndex], tmpInputBufferHalf, tmpOutputBufferHalf, size, expRotationStepTable);
}
public inline function inverseFFT(inputBufferIndex: Int, outputBufferIndex: Int) {
realifft(outputBuffers[outputBufferIndex], inputBuffers[inputBufferIndex], tmpOutputBufferHalf, tmpInputBufferHalf, size, expRotationStepTable);
}
public inline function getInput(index: Int): Float32Array {
return inputBuffers[index];
}
}
class ComplexValuedFFT extends FFTBase {
public final inputBuffers: Vector<ComplexArray>;
public inline function new(size: Int, numInputs: Int, numOutputs: Int) {
super(size, numOutputs);
inputBuffers = new Vector(numInputs);
for (i in 0...numInputs) {
inputBuffers[i] = new ComplexArray(size);
}
}
public inline function forwardFFT(inputBufferIndex: Int, outputBufferIndex: Int) {
fft(inputBuffers[inputBufferIndex], outputBuffers[outputBufferIndex], size, expRotationStepTable);
}
public inline function inverseFFT(inputBufferIndex: Int, outputBufferIndex: Int) {
ifft(outputBuffers[outputBufferIndex], inputBuffers[inputBufferIndex], size, expRotationStepTable);
}
public inline function getInput(index: Int): ComplexArray {
return inputBuffers[index];
}
}
/**
Calculate the fast fourier transformation of the signal given in `inTimes`
and output the result in `outFreqs`.
@param inTimes Input buffer in time domain. Must have length of `size`.
@param outFreqs Output buffer in frequency domain. Must have length of `size`.
@param size The size of the FFT. Must be a power of 2.
**/
inline function fft(inTimes: ComplexArray, outFreqs: ComplexArray, size: Int, expRotationStepTable: ComplexArray) {
ditfft2Iterative(inTimes, outFreqs, size, false, expRotationStepTable);
}
/**
Calculate the inverse fast fourier transformation of the signal given in
`inFreqs` and output the result in `outTimes`.
@param inFreqs Input buffer in frequency domain. Must have length of `size`.
@param outTimes Output buffer in time domain. Must have length of `size`.
@param size The size of both buffers. Must be a power of 2.
@param scale If true, scale output values by `1 / size`.
**/
inline function ifft(inFreqs: ComplexArray, outTimes: ComplexArray, size: Int, expRotationStepTable: ComplexArray, scale = true) {
ditfft2Iterative(inFreqs, outTimes, size, true, expRotationStepTable);
if (scale) {
for (i in 0...size) {
outTimes[i] = outTimes[i].scale(1 / size);
}
}
}
/**
Variant of `aura.math.fft` with real-valued input, almost double as fast as
its complex-input counterpart.
@param inTimes Input buffer in time domain. Must have length of `size`.
@param outFreqs Output buffer in frequency domain. Must have length of `size`.
@param timeCmplxStore Temporary buffer. May contain any values and will contain garbage values afterwards. Must have length of `Std.int(size / 2)`.
@param freqCmplxStore Temporary buffer. May contain any values and will contain garbage values afterwards. Must have length of `Std.int(size / 2)`.
@param size The size of the FFT. Must be a power of 2.
**/
inline function realfft(inTimes: Float32Array, outFreqs: ComplexArray, timeCmplxStore: ComplexArray, freqCmplxStore: ComplexArray, size: Int, expRotationStepTable: ComplexArray) {
// Reference:
// Lyons, Richard G. (2011). Understanding Digital Signal Processing,
// 3rd edn. pp. 694696 (Section 13.5.2: Performing a 2N-Point Real FFT)
final halfSize = Std.int(size / 2);
assert(Error, inTimes.length == size);
assert(Error, outFreqs.length == size);
assert(Error, timeCmplxStore.length == halfSize);
assert(Error, freqCmplxStore.length == halfSize);
for (i in 0...halfSize) {
timeCmplxStore[i] = new Complex(inTimes[2 * i], inTimes[2 * i + 1]);
}
fft(timeCmplxStore, freqCmplxStore, halfSize, expRotationStepTable);
final piN = Math.PI / halfSize;
// Construct first half of the result
for (i in 0...halfSize) {
final opp = (i == 0) ? freqCmplxStore[i] : freqCmplxStore[halfSize - i];
final xPlus = new Complex(
0.5 * (freqCmplxStore[i].real + opp.real),
0.5 * (freqCmplxStore[i].imag + opp.imag)
);
final xMinus = new Complex(
0.5 * (freqCmplxStore[i].real - opp.real),
0.5 * (freqCmplxStore[i].imag - opp.imag)
);
final piNi = piN * i;
final iSin = Math.sin(piNi);
final iCos = Math.cos(piNi);
final real = xPlus.real + iCos * xPlus.imag - iSin * xMinus.real;
final imag = xMinus.imag - iSin * xPlus.imag - iCos * xMinus.real;
outFreqs[i] = new Complex(real, imag);
}
outFreqs[halfSize] = freqCmplxStore[0].real - freqCmplxStore[0].imag;
// Mirror first half to second half of the result
for (i in halfSize + 1...size) {
outFreqs[i] = outFreqs[halfSize - 1 - (i - halfSize)].conj();
}
}
/**
Variant of `aura.math.ifft` with real-valued output, almost double as fast
as its complex-input counterpart.
@param inFreqs Input buffer in frequency domain. Must have length of `size`.
@param outTimes Output buffer in time domain. Must have length of `size`.
@param freqCmplxStore Temporary buffer. May contain any values and will contain garbage values afterwards. Must have length of `Std.int(size / 2)`.
@param timeCmplxStore Temporary buffer. May contain any values and will contain garbage values afterwards. Must have length of `Std.int(size / 2)`.
@param size The size of the FFT. Must be a power of 2.
**/
inline function realifft(inFreqs: ComplexArray, outTimes: Float32Array, freqCmplxStore: ComplexArray, timeCmplxStore: ComplexArray, size: Int, expRotationStepTable: ComplexArray) {
// Reference:
// Scheibler, Robin (2013). Real FFT Algorithms.
// Available at: http://www.robinscheibler.org/2013/02/13/real-fft.html
final halfSize = Std.int(size / 2);
assert(Error, inFreqs.length == size);
assert(Error, outTimes.length == size);
assert(Error, freqCmplxStore.length == halfSize);
assert(Error, timeCmplxStore.length == halfSize);
final pi2N = (2 * Math.PI) / size;
// Construct input
for (i in 0...halfSize) {
final oppC = ((i == 0) ? inFreqs[i] : inFreqs[halfSize - i]).conj();
final xEven = 0.5 * (inFreqs[i] + oppC);
final xOdd = 0.5 * ((inFreqs[i] - oppC) * Complex.exp(i * pi2N));
freqCmplxStore[i] = xEven + xOdd.multWithI();
}
ifft(freqCmplxStore, timeCmplxStore, halfSize, expRotationStepTable, false);
final scale = 2 / size;
for (i in 0...halfSize) {
outTimes[2 * i] = timeCmplxStore[i].real * scale;
outTimes[2 * i + 1] = timeCmplxStore[i].imag * scale;
}
}
/**
Modified copy of `dsp.FFT.ditfft2()` from the "hxdsp" library (*) to be able
to use Aura's own complex number type to make the fft allocation-free.
The used algorithm is a Radix-2 Decimation-In-Time variant of CooleyTukey's
FFT, recursive.
(*) https://github.com/baioc/hxdsp, released under the UNLICENSE license.
**/
#if AURA_BACKEND_HL @:hlNative("aura_hl", "ditfft2") #end
private function ditfft2(time: ComplexArray, t: Int, freq: ComplexArray, f: Int, n: Int, step: Int, inverse: Bool) {
if (n == 1) {
freq[f] = time[t];
}
else {
final halfLen = Std.int(n / 2);
ditfft2(time, t, freq, f, halfLen, step * 2, inverse);
ditfft2(time, t + step, freq, f + halfLen, halfLen, step * 2, inverse);
final tExp = ((inverse ? 1 : -1) * 2 * Math.PI) / n;
for (k in 0...halfLen) {
final even = freq[f + k].copy();
final odd = freq[f + k + halfLen].copy();
final twiddle = Complex.exp(tExp * k) * odd;
freq[f + k] = even + twiddle;
freq[f + k + halfLen] = even - twiddle;
}
}
}
#if AURA_BACKEND_HL @:hlNative("aura_hl", "ditfft2_iterative") #end
private function ditfft2Iterative(time: ComplexArray, freq: ComplexArray, n: Int, inverse: Bool, expRotationStepTable: ComplexArray) {
// Decimate
final log2N = log2Unsigned(n);
for (i in 0...n) {
final reversedI = bitReverseUint32(i, log2N);
if (reversedI > i) {
freq[i] = time[reversedI];
freq[reversedI] = time[i];
}
else if (reversedI == i) {
freq[i] = time[reversedI];
}
}
var layerSize = 2; // Size of the FFT for the current layer in the divide & conquer tree
var halfLayerIdx = 0;
while (layerSize <= n) { // Iterate over all layers beginning with the lowest
final halfLayerSize = layerSize >>> 1;
final expRotationStep = expRotationStepTable[halfLayerIdx].copy();
if (inverse) {
expRotationStep.setFrom(expRotationStep.conj());
}
var sectionOffset = 0;
while (sectionOffset < n) {
final currentExpRotation = new Complex(1.0, 0.0);
for (i in 0...halfLayerSize) {
final even = freq[sectionOffset + i].copy();
final odd = freq[sectionOffset + i + halfLayerSize];
final twiddle = currentExpRotation * odd;
freq[sectionOffset + i] = even + twiddle;
freq[sectionOffset + i + halfLayerSize] = even - twiddle;
currentExpRotation.setFrom(currentExpRotation * expRotationStep);
}
sectionOffset += layerSize;
}
layerSize <<= 1;
halfLayerIdx++;
}
}
// The following bit reversal code was taken (and slightly altered) from
// https://graphics.stanford.edu/~seander/bithacks.html#BitReverseTable.
// The original sources are released in the public domain.
// Bit reversal LUT where each entry is one possible byte (value = address)
private final bitReverseTable: kha.arrays.Uint8Array = uint8ArrayFromIntArray([
0x00, 0x80, 0x40, 0xC0, 0x20, 0xA0, 0x60, 0xE0, 0x10, 0x90, 0x50, 0xD0, 0x30, 0xB0, 0x70, 0xF0,
0x08, 0x88, 0x48, 0xC8, 0x28, 0xA8, 0x68, 0xE8, 0x18, 0x98, 0x58, 0xD8, 0x38, 0xB8, 0x78, 0xF8,
0x04, 0x84, 0x44, 0xC4, 0x24, 0xA4, 0x64, 0xE4, 0x14, 0x94, 0x54, 0xD4, 0x34, 0xB4, 0x74, 0xF4,
0x0C, 0x8C, 0x4C, 0xCC, 0x2C, 0xAC, 0x6C, 0xEC, 0x1C, 0x9C, 0x5C, 0xDC, 0x3C, 0xBC, 0x7C, 0xFC,
0x02, 0x82, 0x42, 0xC2, 0x22, 0xA2, 0x62, 0xE2, 0x12, 0x92, 0x52, 0xD2, 0x32, 0xB2, 0x72, 0xF2,
0x0A, 0x8A, 0x4A, 0xCA, 0x2A, 0xAA, 0x6A, 0xEA, 0x1A, 0x9A, 0x5A, 0xDA, 0x3A, 0xBA, 0x7A, 0xFA,
0x06, 0x86, 0x46, 0xC6, 0x26, 0xA6, 0x66, 0xE6, 0x16, 0x96, 0x56, 0xD6, 0x36, 0xB6, 0x76, 0xF6,
0x0E, 0x8E, 0x4E, 0xCE, 0x2E, 0xAE, 0x6E, 0xEE, 0x1E, 0x9E, 0x5E, 0xDE, 0x3E, 0xBE, 0x7E, 0xFE,
0x01, 0x81, 0x41, 0xC1, 0x21, 0xA1, 0x61, 0xE1, 0x11, 0x91, 0x51, 0xD1, 0x31, 0xB1, 0x71, 0xF1,
0x09, 0x89, 0x49, 0xC9, 0x29, 0xA9, 0x69, 0xE9, 0x19, 0x99, 0x59, 0xD9, 0x39, 0xB9, 0x79, 0xF9,
0x05, 0x85, 0x45, 0xC5, 0x25, 0xA5, 0x65, 0xE5, 0x15, 0x95, 0x55, 0xD5, 0x35, 0xB5, 0x75, 0xF5,
0x0D, 0x8D, 0x4D, 0xCD, 0x2D, 0xAD, 0x6D, 0xED, 0x1D, 0x9D, 0x5D, 0xDD, 0x3D, 0xBD, 0x7D, 0xFD,
0x03, 0x83, 0x43, 0xC3, 0x23, 0xA3, 0x63, 0xE3, 0x13, 0x93, 0x53, 0xD3, 0x33, 0xB3, 0x73, 0xF3,
0x0B, 0x8B, 0x4B, 0xCB, 0x2B, 0xAB, 0x6B, 0xEB, 0x1B, 0x9B, 0x5B, 0xDB, 0x3B, 0xBB, 0x7B, 0xFB,
0x07, 0x87, 0x47, 0xC7, 0x27, 0xA7, 0x67, 0xE7, 0x17, 0x97, 0x57, 0xD7, 0x37, 0xB7, 0x77, 0xF7,
0x0F, 0x8F, 0x4F, 0xCF, 0x2F, 0xAF, 0x6F, 0xEF, 0x1F, 0x9F, 0x5F, 0xDF, 0x3F, 0xBF, 0x7F, 0xFF
]);
/**
Return the reversed bits of the given `value`, where `log2N` is the position
of the most significant bit that should be used for the left bound of the
"reverse range".
**/
private inline function bitReverseUint32(value: Int, log2N: Int): Int {
return (
(bitReverseTable[ value & 0xff] << 24) |
(bitReverseTable[(value >>> 8 ) & 0xff] << 16) |
(bitReverseTable[(value >>> 16) & 0xff] << 8 ) |
(bitReverseTable[(value >>> 24) & 0xff] )
) >>> (32 - log2N);
}
private inline function uint8ArrayFromIntArray(array: Array<Int>): kha.arrays.Uint8Array {
final out = new kha.arrays.Uint8Array(array.length);
for (i in 0...array.length) {
out[i] = array[i];
}
return out;
}

View File

@ -0,0 +1,54 @@
package aura.math;
import kha.FastFloat;
import kha.math.FastVector3;
import kha.math.FastVector4;
@:forward
abstract Vec3(FastVector3) from FastVector3 to FastVector3 {
public inline function new(x: FastFloat = 0.0, y: FastFloat = 0.0, z: FastFloat = 0.0) {
this = new FastVector3(x, y, z);
}
@:from
public static inline function fromKhaVec3(v: kha.math.FastVector3): Vec3 {
return new FastVector3(v.x, v.y, v.z);
}
@:from
public static inline function fromKhaVec4(v: kha.math.FastVector4): Vec3 {
return new FastVector3(v.x, v.y, v.z);
}
@:to
public inline function toKhaVec3(): kha.math.FastVector3 {
return new FastVector3(this.x, this.y, this.z);
}
@:to
public inline function toKhaVec4(): kha.math.FastVector4 {
return new FastVector4(this.x, this.y, this.z);
}
#if (AURA_WITH_IRON || leenkx)
@:from
public static inline function fromIronVec3(v: iron.math.Vec3): Vec3{
return new FastVector3(v.x, v.y, v.z);
}
@:from
public static inline function fromIronVec4(v: iron.math.Vec4): Vec3{
return new FastVector3(v.x, v.y, v.z);
}
@:to
public inline function toIronVec3(): iron.math.Vec3 {
return new iron.math.Vec3(this.x, this.y, this.z);
}
@:to
public inline function toIronVec4(): iron.math.Vec4 {
return new iron.math.Vec4(this.x, this.y, this.z);
}
#end
}

View File

@ -0,0 +1,188 @@
// =============================================================================
// getBuffer() is roughly based on
// https://github.com/Kode/Kha/blob/master/Sources/kha/audio2/Audio1.hx
//
// References:
// [1]: https://github.com/Kode/Kha/blob/3a3e9e6d51b1d6e3309a80cd795860da3ea07355/Backends/Kinc-hxcpp/main.cpp#L186-L233
//
// =============================================================================
package aura.threading;
import haxe.ds.Vector;
import kha.arrays.Float32Array;
import aura.types.AudioBuffer;
import aura.types.ComplexArray;
import aura.utils.Pointer;
class BufferCache {
// TODO: Make max tree height configurable
public static inline var MAX_TREE_HEIGHT = 8;
/**
Number of audioCallback() invocations since the last allocation. This is
used to automatically switch off interactions with the garbage collector
in the audio thread if there are no allocations for some time (for extra
performance).
**/
static var lastAllocationTimer: Int = 0;
/**
Each level in the channel tree has its own buffer that can be shared by
the channels on that level.
**/
static var treeBuffers: Vector<Pointer<AudioBuffer>>;
static var bufferConfigs: Vector<BufferConfig>;
public static inline function init() {
treeBuffers = new Vector(MAX_TREE_HEIGHT);
for (i in 0...treeBuffers.length) {
treeBuffers[i] = new Pointer<AudioBuffer>();
}
bufferConfigs = BufferType.createAllConfigs();
}
public static inline function updateTimer() {
lastAllocationTimer++;
if (lastAllocationTimer > 100) {
kha.audio2.Audio.disableGcInteractions = true;
}
}
public static function getTreeBuffer(treeLevel: Int, numChannels: Int, channelLength: Int): Null<AudioBuffer> {
var p_buffer = treeBuffers[treeLevel];
if (!getBuffer(TAudioBuffer, p_buffer, numChannels, channelLength)) {
// Unexpected allocation message is already printed
trace(' treeLevel: $treeLevel');
return null;
}
return p_buffer.get();
}
@:generic
public static function getBuffer<T>(bufferType: BufferType, p_buffer: PointerType<T>, numChannels: Int, channelLength: Int): Bool {
final bufferCfg = bufferConfigs[bufferType];
var buffer = p_buffer.get();
final currentNumChannels = (buffer == null) ? 0 : bufferCfg.getNumChannels(buffer);
final currentChannelLength = (buffer == null) ? 0 : bufferCfg.getChannelLength(buffer);
if (buffer != null && currentNumChannels >= numChannels && currentChannelLength >= channelLength) {
// Buffer is already big enough
return true;
}
if (kha.audio2.Audio.disableGcInteractions) {
// This code is executed in the case that there are suddenly
// more samples requested while the GC interactions are turned
// off (because the number of samples was sufficient for a
// longer time). We can't just turn on GC interactions, it will
// not take effect before the next audio callback invocation, so
// we skip this "frame" instead (see [1] for reference).
#if !AURA_UNIT_TESTS
trace("Unexpected allocation request in audio thread.");
final haveMsgNumC = (buffer == null) ? 'no buffer' : '${currentNumChannels}';
final haveMsgen = (buffer == null) ? 'no buffer' : '${currentChannelLength}';
trace(' wanted amount of channels: $numChannels (have: $haveMsgNumC)');
trace(' wanted channel length: $channelLength (have: $haveMsgen)');
#end
lastAllocationTimer = 0;
kha.audio2.Audio.disableGcInteractions = false;
return false;
}
// If the buffer exists but too few samples fit in, overallocate by
// factor 2 to avoid too many allocations. Eventually the buffer will be
// big enough for the required amount of samples. If the buffer does not
// exist yet, do not overallocate to prevent too high memory usage
// (the requested length should not change much).
buffer = cast bufferCfg.construct(numChannels, buffer == null ? channelLength : channelLength * 2);
p_buffer.set(buffer);
lastAllocationTimer = 0;
return true;
}
}
@:structInit
class BufferConfig {
public var construct: Int->Int->Any;
public var getNumChannels: Any->Int;
public var getChannelLength: Any->Int;
}
/**
Type-unsafe workaround for covariance and unification issues when working
with the generic `BufferCache.getBuffer()`.
**/
enum abstract BufferType(Int) to Int {
/** Represents `aura.types.AudioBuffer`. **/
var TAudioBuffer;
/** Represents `kha.arrays.Float32Array`. **/
var TFloat32Array;
/** Represents `Array<Float>`. **/
var TArrayFloat;
/** Represents `Array<dsp.Complex>`. **/
var TArrayComplex;
private var enumSize;
public static function createAllConfigs(): Vector<BufferConfig> {
final out = new Vector<BufferConfig>(enumSize);
out[TAudioBuffer] = ({
construct: (numChannels: Int, channelLength: Int) -> {
return new AudioBuffer(numChannels, channelLength);
},
getNumChannels: (buffer: Any) -> {
return (cast buffer: AudioBuffer).numChannels;
},
getChannelLength: (buffer: Any) -> {
return (cast buffer: AudioBuffer).channelLength;
}
}: BufferConfig);
out[TFloat32Array] = ({
construct: (numChannels: Int, channelLength: Int) -> {
return new Float32Array(channelLength);
},
getNumChannels: (buffer: Any) -> {
return 1;
},
getChannelLength: (buffer: Any) -> {
return (cast buffer: Float32Array).length;
}
}: BufferConfig);
out[TArrayFloat] = ({
construct: (numChannels: Int, channelLength: Int) -> {
final v = new Array<Float>();
v.resize(channelLength);
return v;
},
getNumChannels: (buffer: Any) -> {
return 1;
},
getChannelLength: (buffer: Any) -> {
return (cast buffer: Array<Float>).length;
}
}: BufferConfig);
out[TArrayComplex] = ({
construct: (numChannels: Int, channelLength: Int) -> {
return new ComplexArray(channelLength);
},
getNumChannels: (buffer: Any) -> {
return 1;
},
getChannelLength: (buffer: Any) -> {
return (cast buffer: ComplexArray).length;
}
}: BufferConfig);
return out;
}
}

View File

@ -0,0 +1,36 @@
package aura.threading;
/**
Non-blocking first in/first out queue for thread synchronization. On targets
with threading support, `sys.thread.Dequeue` is used, on those without
threading `haxe.ds.List` is used instead.
**/
@:generic
@:forward(add)
@:nullSafety(StrictThreaded)
abstract Fifo<T>(FifoImpl<T>) {
public inline function new() {
this = new FifoImpl<T>();
}
public inline function tryPop(): Null<T> {
return this.pop(false);
}
}
#if (target.threaded)
private typedef FifoImpl<T> = sys.thread.Deque<T>;
#else
@:generic
@:forward(add)
@:nullSafety(StrictThreaded)
private abstract FifoImpl<T>(List<T>) {
public inline function new() {
this = new List<T>();
}
public inline function pop(block: Bool): Null<T> {
return this.pop();
}
}
#end

View File

@ -0,0 +1,37 @@
package aura.threading;
@:struct
@:structInit
class Message {
public final id: Int;
public final data: Null<Dynamic>;
public final inline function dataAsArrayUnsafe(): Null<Array<Dynamic>> {
return data;
}
}
@:autoBuild(aura.utils.macro.ExtensibleEnumBuilder.build())
@:build(aura.utils.macro.ExtensibleEnumBuilder.build())
class MessageID {}
class ChannelMessageID extends MessageID {
final Play;
final Pause;
final Stop;
// Parameters
final PVolume;
final PPitch;
final PDopplerRatio;
final PDstAttenuation;
}
class DSPMessageID extends MessageID {
final BypassEnable;
final BypassDisable;
final SwapBufferReady;
final SetDelays;
}

View File

@ -0,0 +1,177 @@
package aura.types;
import haxe.ds.Vector;
import kha.FastFloat;
import kha.arrays.Float32Array;
import aura.utils.BufferUtils;
/**
Deinterleaved 32-bit floating point audio buffer.
**/
class AudioBuffer {
/**
The amount of audio channels in this buffer.
**/
public final numChannels: Int;
/**
The amount of samples stored in each channel of this buffer.
**/
public final channelLength: Int;
/**
The raw samples data of this buffer.
To access the samples of a specific channel, please use
`AudioBuffer.getChannelView()`.
**/
public final rawData: Float32Array;
final channelViews: Vector<AudioBufferChannelView>;
/**
Create a new `AudioBuffer` object.
@param numChannels The amount of audio channels in this buffer.
@param channelLength The amount of samples stored in each channel.
**/
public inline function new(numChannels: Int, channelLength: Int) {
assert(Error, numChannels > 0);
assert(Error, channelLength > 0);
this.numChannels = numChannels;
this.channelLength = channelLength;
this.rawData = new Float32Array(numChannels * channelLength);
channelViews = new Vector(numChannels);
for (c in 0...numChannels) {
channelViews[c] = this.rawData.subarray(channelLength * c, channelLength * (c + 1));
}
}
/**
Get access to the samples data in the audio channel specified by `channelIndex`.
**/
public inline function getChannelView(channelIndex: Int): AudioBufferChannelView {
assert(Error, 0 <= channelIndex && channelIndex < this.numChannels);
return channelViews[channelIndex];
}
/**
Copy and interleave this `AudioBuffer` into the given `target` array.
@param sourceOffset Per-channel position in this `AudioBuffer` from where to start copying and interleaving samples.
@param targetOffset Absolute position in the target array at which to start writing samples.
@param numSamplesToCopy The amount of samples to copy (per channel).
**/
public inline function interleaveToFloat32Array(target: Float32Array, sourceOffset: Int, targetOffset: Int, numSamplesToCopy: Int) {
assert(Error, numSamplesToCopy >= 0);
assert(Error, sourceOffset >= 0);
assert(Error, sourceOffset + numSamplesToCopy <= this.channelLength);
assert(Error, targetOffset >= 0);
assert(Error, targetOffset + numSamplesToCopy * this.numChannels <= target.length);
for (i in 0...numSamplesToCopy) {
for (c in 0...numChannels) {
target[targetOffset + i * numChannels + c] = getChannelView(c)[sourceOffset + i];
}
}
}
/**
Copy and deinterleave the given `source` array into this `AudioBuffer`.
@param source An interleaved array of audio samples.
@param numSourceChannels The amount of channels in the `source` array,
which must be smaller or equal to the amount of channels in this
`AudioBuffer`. The source channels are copied to the `numSourceChannels`
first channels in this `AudioBuffer`.
**/
public inline function deinterleaveFromFloat32Array(source: Float32Array, numSourceChannels: Int) {
assert(Error, numSourceChannels >= 0 && numSourceChannels <= this.numChannels);
assert(Error, source.length >= numSourceChannels * this.channelLength);
for (i in 0...channelLength) {
for (c in 0...numSourceChannels) {
getChannelView(c)[i] = source[i * numSourceChannels + c];
}
}
}
/**
Fill each audio channel in this buffer with zeroes.
**/
public inline function clear() {
clearBuffer(rawData);
}
/**
Copy the samples from `other` into this buffer.
Both buffers must have the same amount of channels
and the same amount of samples per channel.
**/
public inline function copyFromEquallySized(other: AudioBuffer) {
assert(Error, this.numChannels == other.numChannels);
assert(Error, this.channelLength == other.channelLength);
for (i in 0...rawData.length) {
this.rawData[i] = other.rawData[i];
}
}
/**
Copy the samples from `other` into this buffer.
Both buffers must have the same amount of channels, `other` must have
fewer or equal the amount of samples per channel than this buffer.
If `other` has fewer samples per channel than this buffer,
`padWithZeroes` specifies whether the remaining samples in this buffer
should be padded with zeroes (`padWithZeroes` is `true`) or should be
remain unmodified (`padWithZeroes` is `false`).
**/
public inline function copyFromShorterBuffer(other: AudioBuffer, padWithZeroes: Bool) {
assert(Error, this.numChannels == other.numChannels);
assert(Error, this.channelLength >= other.channelLength);
for (c in 0...this.numChannels) {
final thisView = this.getChannelView(c);
final otherView = other.getChannelView(c);
for (i in 0...other.channelLength) {
thisView[i] = otherView[i];
}
if (padWithZeroes) {
for (i in other.channelLength...this.channelLength) {
thisView[i] = 0.0;
}
}
}
}
}
/**
An array-like view on the samples data of an `AudioBuffer` channel.
**/
abstract AudioBufferChannelView(Float32Array) from Float32Array to Float32Array {
public function new(size: Int) {
this = new Float32Array(size);
}
@:arrayAccess
public function get(index: Int): FastFloat {
return this.get(index);
}
@:arrayAccess
public function set(index: Int, value: FastFloat): FastFloat {
return this.set(index, value);
}
}

View File

@ -0,0 +1,93 @@
package aura.types;
import kha.FastFloat;
@:notNull
@:pure
@:unreflective
@:forward(real, imag)
abstract Complex(ComplexImpl) {
public inline function new(real: FastFloat, imag: FastFloat) {
this = new ComplexImpl(real, imag);
}
@:from
public static inline function fromReal(real: FastFloat): Complex {
return new Complex(real, 0.0);
}
public static inline function newZero(): Complex {
return new Complex(0.0, 0.0);
}
public inline function copy(): Complex {
return new Complex(this.real, this.imag);
}
public inline function setZero() {
this.real = this.imag = 0.0;
}
public inline function setFrom(other: Complex) {
this.real = other.real;
this.imag = other.imag;
}
public inline function scale(s: FastFloat): Complex {
return new Complex(this.real * s, this.imag * s);
}
public static inline function exp(w: FastFloat) {
return new Complex(Math.cos(w), Math.sin(w));
}
@:op(A + B)
@:commutative
public inline function add(other: Complex): Complex {
return new Complex(this.real + other.real, this.imag + other.imag);
}
@:op(A - B)
public inline function sub(other: Complex): Complex {
return new Complex(this.real - other.real, this.imag - other.imag);
}
@:op(A * B)
@:commutative
public inline function mult(other: Complex): Complex {
return new Complex(
this.real*other.real - this.imag*other.imag,
this.real*other.imag + this.imag*other.real
);
}
/**
Optimized version of `this * new Complex(0.0, 1.0)`.
**/
public inline function multWithI(): Complex {
return new Complex(-this.imag, this.real);
}
@:op(~A)
public inline function conj(): Complex {
return new Complex(this.real, -this.imag);
}
public inline function equals(other: Complex): Bool {
return this.real == other.real && this.imag == other.imag;
}
}
@:pure
@:notNull
@:unreflective
@:struct
private final class ComplexImpl {
public var real: FastFloat;
public var imag: FastFloat;
public inline function new(real: FastFloat, imag: FastFloat) {
this.real = real;
this.imag = imag;
}
}

View File

@ -0,0 +1,194 @@
package aura.types;
import haxe.ds.Vector;
#if AURA_BACKEND_HL
import kha.FastFloat;
#end
typedef ComplexArrayImpl =
#if AURA_BACKEND_HL
HL_ComplexArrayImpl
#elseif js
JS_ComplexArrayImpl
#else
Vector<Complex>
#end
;
/**
An array of complex numbers.
**/
@:forward(length)
@:unreflective
abstract ComplexArray(ComplexArrayImpl) {
/**
Create a new zero-initialized complex array.
**/
public inline function new(length: Int) {
#if AURA_BACKEND_HL
this = inline HL_ComplexArray.create(length);
#elseif js
this = new JS_ComplexArrayImpl(length);
#else
this = new ComplexArrayImpl(length);
for (i in 0...length) {
this[i] = Complex.newZero();
}
#end
}
#if AURA_BACKEND_HL
public inline function free() {
HL_ComplexArray.free(this);
}
#end
/**
Get the complex number at the given index from the array. Note that it
is _not_ guaranteed that the returned value will be the same object
instance than stored in the array, because the array does not store
instances on every target.
**/
@:arrayAccess
public inline function get(index: Int): Complex {
#if AURA_BACKEND_HL
return HL_ComplexArray.get(this, index);
#elseif js
return JS_ComplexArrayImpl.get(this, index);
#else
return this[index];
#end
}
/**
Set a complex number at the given array index. It is _guaranteed_ that
the given value is copied to the array so that the passed complex object
instance may be kept on the stack if possible.
**/
@:arrayAccess
public inline function set(index: Int, value: Complex): Complex {
#if AURA_BACKEND_HL
return HL_ComplexArray.set(this, index, value);
#elseif js
return JS_ComplexArrayImpl.set(this, index, value);
#else
// Copy to array to keep original value on stack
this[index].setFrom(value);
// It is important to return the element from the array instead of
// the `value` parameter, so that Haxe doesn't create a temporary
// complex object (allocated on the heap in the worst case) to store
// the state of `value` before calling `setFrom()` above...
return this[index];
#end
}
#if js
public inline function subarray(offset: Int, ?length: Int): ComplexArray {
return this.subarray(offset, length);
}
#end
public inline function copy(): ComplexArray {
var ret = new ComplexArray(this.length);
for (i in 0...this.length) {
#if AURA_BACKEND_HL
ret[i].setFrom(HL_ComplexArray.get(this, i));
#elseif js
ret.set(i, ret.get(i));
#else
ret[i] = this[i];
#end
}
return ret;
}
/**
Copy the contents of `other` into this array.
Both arrays must have the same length.
**/
public inline function copyFrom(other: ComplexArray) {
assert(Error, this.length == other.length);
for (i in 0...this.length) {
set(i, other[i]);
}
}
}
#if AURA_BACKEND_HL
private class HL_ComplexArrayImpl {
public var self: hl.Bytes;
public var length: Int;
public inline function new() {}
}
private class HL_ComplexArray {
public static inline function create(length: Int): ComplexArrayImpl {
final impl = new ComplexArrayImpl();
impl.length = length;
if (length > 0) {
impl.self = aura_hl_complex_array_alloc(length);
if (impl.self == null) {
throw 'Could not allocate enough memory for complex array of length ${length}';
}
}
return impl;
}
public static inline function free(impl: ComplexArrayImpl) {
aura_hl_complex_array_free(impl.self);
}
public static inline function get(impl: ComplexArrayImpl, index: Int): Complex {
return aura_hl_complex_array_get(impl.self, index);
}
public static inline function set(impl: ComplexArrayImpl, index: Int, value: Complex): Complex {
return aura_hl_complex_array_set(impl.self, index, value.real, value.imag);
}
@:hlNative("aura_hl", "complex_array_alloc")
static function aura_hl_complex_array_alloc(length: Int): hl.Bytes { return null; }
@:hlNative("aura_hl", "complex_array_free")
static function aura_hl_complex_array_free(complexArray: hl.Bytes): Void {}
@:hlNative("aura_hl", "complex_array_get")
static function aura_hl_complex_array_get(complexArray: hl.Bytes, index: Int): Complex { return Complex.newZero(); }
@:hlNative("aura_hl", "complex_array_set")
static function aura_hl_complex_array_set(complexArray: hl.Bytes, index: Int, real: FastFloat, imag: FastFloat): Complex { return Complex.newZero(); }
}
#end // AURA_BACKEND_HL
#if js
@:forward
private abstract JS_ComplexArrayImpl(js.lib.DataView) {
public var length(get, never): Int;
public inline function get_length(): Int {
return this.byteLength >>> 3;
}
public inline function new(length: Int) {
final buffer = new js.lib.ArrayBuffer(length * 2 * 4);
this = new js.lib.DataView(buffer, 0, buffer.byteLength);
}
public static inline function get(impl: JS_ComplexArrayImpl, index: Int): Complex {
return new Complex(impl.getFloat32(index * 4 * 2), impl.getFloat32((index * 2 + 1) * 4));
}
public static inline function set(impl: JS_ComplexArrayImpl, index: Int, value: Complex): Complex {
impl.setFloat32(index * 2 * 4, value.real);
impl.setFloat32((index * 2 + 1) * 4, value.imag);
return value;
}
public inline function subarray(offset: Int, ?length: Int): ComplexArray {
return cast new js.lib.DataView(this.buffer, offset * 2 * 4, length != null ? length * 2 * 4 : null);
}
}
#end // js

View File

@ -0,0 +1,201 @@
package aura.types;
import haxe.ds.Vector;
import kha.FastFloat;
import kha.arrays.Float32Array;
import aura.utils.BufferUtils;
import aura.utils.MathUtils;
import aura.utils.Pointer;
using aura.utils.ReverseIterator;
/**
The entirety of all fields with their respective HRIRs (head related impulse
responses).
**/
@:structInit class HRTF {
/**
The sample rate of the HRIRs.
**/
public final sampleRate: Int;
/**
The number of channels of the HRIRs.
**/
public final numChannels: Int;
/**
The amount of samples of each HRIR (per channel).
**/
public final hrirSize: Int;
/**
The amount of HRIRs in this HRTF.
**/
public final hrirCount: Int;
/**
The fields of this HRTF.
**/
public final fields: Vector<Field>;
/**
The longest delay of any HRIR contained in this HRTF in samples. Useful
to preallocate enough memory for delay lines (use
`Math.ceil(maxDelayLength)`).
**/
public final maxDelayLength: Float;
/**
Create a bilinearly interpolated HRIR for the given direction (distance
is fixed for now) and store it in `outputBuf`. The length of the HRIR's
impulse response as well as the interpolated delay (in samples) is
stored in `outImpulseLength` and `outDelay`.
@param elevation Elevation (polar) angle from 0 (bottom) to 180 (top).
@param azimuth Azimuthal angle from 0 (front) to 360, clockwise.
**/
public function getInterpolatedHRIR(
elevation: Float, azimuth: Float,
outputBuf: Float32Array, outImpulseLength: Pointer<Int>, outDelay: Pointer<FastFloat>
) {
/**
Used terms in this function:
low/high: the elevations of the closest HRIR below and above the
given elevation
left/right: the azimuths of the closest HRIR left and right to the
given azimuth (the azimuth angle is clockwise, so the directions
left/right are meant from the perspective from the origin)
**/
clearBuffer(outputBuf);
if (azimuth == 360) {
azimuth = 0;
}
// TODO Use fixed distance for now...
final field = this.fields[this.fields.length - 1];
// Elevations don't go all the way around the sphere (only bottom to
// top), so at the top we don't jump to the bottom but stay at the top.
// Also, the indices include the borders of the range, so use -1 for
// calculating the elevationStep.
final elevationStep = 180 / (field.evCount - 1);
final elevationIndexLow = Std.int(elevation / elevationStep);
final elevationIndexHigh = minI(elevationIndexLow + 1, field.evCount - 1);
var elevationWeight = (elevation % elevationStep) / elevationStep;
final elevationHRIROffsetLow = field.evHRIROffsets[elevationIndexLow];
final elevationHRIROffsetHigh = field.evHRIROffsets[elevationIndexHigh];
var delay = 0.0;
var hrirLength = 0;
for (ev in 0...2) {
final elevationIndex = ev == 0 ? elevationIndexLow : elevationIndexHigh;
final elevationHRIROffset = ev == 0 ? elevationHRIROffsetLow : elevationHRIROffsetHigh;
final azimuthStep = 360 / field.azCount[elevationIndex];
final azimuthIndexLeft = Std.int(azimuth / azimuthStep);
var azimuthIndexRight = azimuthIndexLeft + 1;
if (azimuthIndexRight == field.azCount[elevationIndex]) {
azimuthIndexRight = 0;
}
final azimuthWeight = (azimuth % azimuthStep) / azimuthStep;
final hrirLeft = field.hrirs[elevationHRIROffset + azimuthIndexLeft];
final hrirRight = field.hrirs[elevationHRIROffset + azimuthIndexRight];
final evWeight = ev == 0 ? 1 - elevationWeight : elevationWeight;
// Interpolate delay
delay += lerp(hrirLeft.delays[0], hrirRight.delays[0], azimuthWeight) * evWeight;
// Interpolate coefficients
final invWeight = 1 - azimuthWeight;
for (i in 0...outputBuf.length) {
final leftCoeff = i < hrirLeft.coeffs.length ? hrirLeft.coeffs[i] * invWeight : 0.0;
final rightCoeff = i < hrirRight.coeffs.length ? hrirRight.coeffs[i] * azimuthWeight : 0.0;
outputBuf[i] += (leftCoeff + rightCoeff) * evWeight;
}
var maxLength = maxI(hrirLeft.coeffs.length, hrirRight.coeffs.length);
if (maxLength > hrirLength) {
hrirLength = maxLength;
}
}
// Delay is stored in samples relative to the HRTF sample rate, convert
// to current sample rate
final sampleRateFactor = this.sampleRate / Aura.sampleRate;
outDelay.set(delay * sampleRateFactor);
outImpulseLength.set(hrirLength);
}
}
/**
A field represents the entirety of HRIRs (head related impulse responses)
for a given distance to the listener. Imagine this as one layer of a sphere
around the listener.
**/
class Field {
/**
Distance to the listener, in millimeters (in the range 50mm-2500mm).
**/
public var distance: Int;
/**
Total HRIR count (for all elevations combined).
**/
public var hrirCount: Int;
/**
Number of elevations in this field. Elevations start at -90 degrees
(bottom) and go up to 90 degrees.
**/
public var evCount: Int;
/**
Number of azimuths (and HRIRs) per elevation. Azimuths construct a full
circle (360 degrees), starting at the front of the listener and going
clockwise.
**/
public var azCount: Vector<Int>;
/**
The offset into the `hrirs` array per elevation. The stored offset index
starts at the HRIR with azimuth 0 (front of the listener).
**/
public var evHRIROffsets: Vector<Int>;
/**
All HRIRs in this field.
**/
public var hrirs: Vector<HRIR>;
public function new() {}
}
/**
A single HRIR (head related impulse response)
**/
class HRIR {
/**
The impulse response coefficients. If the HRIR is stereo, the
coefficients are interleaved (left/right).
**/
public var coeffs: Float32Array;
/**
Delay of the impulse response per channel in samples.
**/
// TODO: Don't forget to also change this when resampling!
public var delays: Vector<Float>;
public function new() {}
}

View File

@ -0,0 +1,112 @@
package aura.types;
import haxe.ds.Vector;
import kha.arrays.Float32Array;
import aura.Types.AtomicInt;
// TODO: Make generic in some way
@:nullSafety(StrictThreaded)
class SwapBuffer {
static final ROW_COUNT = 2;
public final length: Int;
// https://www.usenix.org/legacy/publications/library/proceedings/usenix02/full_papers/huang/huang_html/node8.html
public final data: Vector<Vector<Float32Array>>;
final readerCount: Vector<AtomicInt>;
final newerBuf: Vector<AtomicInt>;
var latestWriteRow: AtomicInt = 0;
var curWriteBufIdx: AtomicInt = 0;
var curWriteRowIdx: AtomicInt = 0;
var curReadRowIdx: AtomicInt = 0;
public function new(length: Int) {
this.length = length;
this.data = new Vector(ROW_COUNT);
for (i in 0...ROW_COUNT) {
data[i] = new Vector(ROW_COUNT);
for (j in 0...ROW_COUNT) {
data[i][j] = new Float32Array(length);
}
}
this.readerCount = new Vector(ROW_COUNT);
for (i in 0...ROW_COUNT) {
readerCount[i] = 0;
}
this.newerBuf = new Vector(ROW_COUNT);
for (i in 0...ROW_COUNT) {
newerBuf[i] = 0;
}
}
public inline function beginRead() {
curReadRowIdx = latestWriteRow;
#if cpp
readerCount[curReadRowIdx] = AtomicInt.atomicInc(readerCount[curReadRowIdx].toPtr());
#else
readerCount[curReadRowIdx]++;
#end
}
public inline function endRead() {
#if cpp
readerCount[curReadRowIdx] = AtomicInt.atomicDec(readerCount[curReadRowIdx].toPtr());
#else
readerCount[curReadRowIdx]--;
#end
}
public inline function read(dst: Float32Array, dstStart: Int, srcStart: Int, length: Int) {
final bufIdx = newerBuf[curReadRowIdx];
for (i in 0...length) {
dst[dstStart + i] = data[curReadRowIdx][bufIdx][srcStart + i];
}
}
public inline function beginWrite() {
for (i in 0...ROW_COUNT) {
if (readerCount[i] == 0) {
curWriteRowIdx = i;
break;
}
}
// Select the least current row buffer
curWriteBufIdx = 1 - newerBuf[curWriteRowIdx];
}
public inline function endWrite() {
newerBuf[curWriteRowIdx] = curWriteBufIdx;
latestWriteRow = curWriteRowIdx;
}
public inline function write(src: Float32Array, srcStart: Int, dstStart: Int, length: Int = -1) {
if (length == -1) {
length = src.length - srcStart;
}
for (i in srcStart...srcStart + length) {
data[curWriteRowIdx][curWriteBufIdx][dstStart + i] = src[i]; // TODO: Investigate possible memory leaks through allocating
}
}
public inline function writeVecF(src: Vector<Float>, srcStart: Int, dstStart: Int, length: Int = -1) {
if (length == -1) {
length = src.length - srcStart;
}
for (i in srcStart...srcStart + length) {
data[curWriteRowIdx][curWriteBufIdx][dstStart + i] = src[i];
}
}
public inline function writeZero(dstStart: Int, dstEnd: Int) {
for (i in dstStart...dstEnd) {
data[curWriteRowIdx][curWriteBufIdx][i] = 0;
}
}
}

View File

@ -0,0 +1,126 @@
package aura.utils;
import haxe.Exception;
import haxe.PosInfos;
import haxe.exceptions.PosException;
import haxe.macro.Context;
import haxe.macro.Expr;
using haxe.macro.ExprTools;
class Assert {
/**
Checks whether the given expression evaluates to true. If this is not
the case, a `AuraAssertionException` with additional information is
thrown.
The assert level describes the severity of the assertion. If the
severity is lower than the level stored in the `AURA_ASSERT_LEVEL` flag,
the assertion is omitted from the code so that it doesn't decrease the
runtime performance.
@param level The severity of this assertion.
@param condition The conditional expression to test.
@param message Optional message to display when the assertion fails.
**/
public static macro function assert(level: ExprOf<AssertLevel>, condition: ExprOf<Bool>, ?message: ExprOf<String>): Expr {
final levelVal: AssertLevel = AssertLevel.fromExpr(level);
final assertThreshold = AssertLevel.fromString(Context.definedValue("AURA_ASSERT_LEVEL"));
if (levelVal < assertThreshold) {
return macro {};
}
return macro {
if (!$condition) {
#if AURA_ASSERT_QUIT kha.System.stop(); #end
@:pos(condition.pos)
final exception = new aura.utils.Assert.AuraAssertionException($v{condition.toString()}, ${message});
#if AURA_ASSERT_START_DEBUGGER
@:privateAccess aura.utils.Assert.logError(exception.details());
@:privateAccess aura.utils.Assert.logError("An assertion error was triggered, starting debugger...");
aura.utils.Debug.startDebugger();
#else
@:pos(condition.pos)
@:privateAccess aura.utils.Assert.throwAssertionError(exception);
#end
}
};
}
/**
Helper function to prevent Haxe "bug" that actually throws an error
even when using `macro throw` (inlining this method also does not work).
**/
static function throwAssertionError(exp: AuraAssertionException, ?pos: PosInfos) {
throw exp;
}
static function logError(str: String, ?infos: PosInfos) {
#if sys
Sys.stderr().writeString(str + "\n");
#elseif kha_krom
Krom.log(str + "\n");
#elseif kha_js
js.html.Console.error(str);
#else
haxe.Log.trace(str, infos);
#end
}
}
/**
Exception that is thrown when an assertion fails.
@see `Assert`
**/
class AuraAssertionException extends PosException {
/**
@param exprString The string representation of the failed assert condition.
@param message Custom error message, use `null` to omit printing the message.
**/
public function new(exprString: String, message: Null<String>, ?previous: Exception, ?pos: Null<PosInfos>) {
final optMsg = message != null ? '\n\tMessage: $message' : "";
super('\n[Aura] Failed assertion:$optMsg\n\tExpression: ($exprString)', previous, pos);
}
}
enum abstract AssertLevel(Int) from Int to Int {
var Debug: AssertLevel;
var Warning: AssertLevel;
var Error: AssertLevel;
var Critical: AssertLevel;
// Don't use this level in assert() calls!
var NoAssertions: AssertLevel;
public static function fromExpr(e: ExprOf<AssertLevel>): AssertLevel {
switch (e.expr) {
case EConst(CIdent(v)): return fromString(v);
default: throw new Exception('Unsupported expression: $e');
};
}
/**
Converts a string into an `AssertLevel`, the string must be spelled
exactly as the assert level. `null` defaults to `AssertLevel.Critical`.
**/
public static function fromString(s: String): AssertLevel {
return switch (s) {
case "Debug": Debug;
case "Warning": Warning;
case "Error": Error;
case "Critical" | null: Critical;
case "NoAssertions": NoAssertions;
default: throw 'Could not convert "$s" to AssertLevel';
}
}
@:op(A < B) static function lt(a:AssertLevel, b:AssertLevel):Bool;
@:op(A > B) static function gt(a:AssertLevel, b:AssertLevel):Bool;
}

View File

@ -0,0 +1,103 @@
package aura.utils;
import haxe.ds.Vector;
import kha.FastFloat;
import kha.arrays.Float32Array;
inline function fillBuffer(buffer: Float32Array, value: FastFloat, length: Int = -1) {
for (i in 0...(length == -1 ? buffer.length : length)) {
buffer[i] = value;
}
}
inline function clearBuffer(buffer: Float32Array) {
#if hl
hl_fillByteArray(buffer, 0);
#else
fillBuffer(buffer, 0);
#end
}
inline function initZeroesVecI(vector: Vector<Int>) {
#if (haxe_ver >= "4.300")
vector.fill(0);
#else
for (i in 0...vector.length) {
vector[i] = 0;
}
#end
}
inline function initZeroesF64(vector: Vector<Float>) {
#if (haxe_ver >= "4.300")
vector.fill(0);
#else
for (i in 0...vector.length) {
vector[i] = 0;
}
#end
}
inline function initZeroesF32(vector: Vector<FastFloat>) {
#if (haxe_ver >= "4.300")
vector.fill(0);
#else
for (i in 0...vector.length) {
vector[i] = 0;
}
#end
}
/**
Creates an empty integer vector with the given length. It is guaranteed to
be always filled with 0, independent of the target.
**/
inline function createEmptyVecI(length: Int): Vector<Int> {
#if target.static
return new Vector<Int>(length);
#else
// On dynamic targets, vectors hold `null` after creation instead of 0
final vec = new Vector<Int>(length);
inline initZeroesVecI(vec);
return vec;
#end
}
/**
Creates an empty float vector with the given length. It is guaranteed to be
always filled with 0, independent of the target.
**/
inline function createEmptyVecF64(length: Int): Vector<Float> {
#if target.static
return new Vector<Float>(length);
#else
final vec = new Vector<Float>(length);
inline initZeroesF64(vec);
return vec;
#end
}
inline function createEmptyVecF32(length: Int): Vector<FastFloat> {
#if target.static
return new Vector<FastFloat>(length);
#else
final vec = new Vector<FastFloat>(length);
inline initZeroesF32(vec);
return vec;
#end
}
inline function createEmptyF32Array(length: Int): Float32Array {
final out = new Float32Array(length);
#if !js
clearBuffer(out);
#end
return out;
}
#if hl
inline function hl_fillByteArray(a: kha.arrays.ByteArray, byteValue: Int) {
(a.buffer: hl.Bytes).fill(0, a.byteLength, byteValue);
}
#end

View File

@ -0,0 +1,48 @@
package aura.utils;
import kha.FastFloat;
import kha.arrays.Float32Array;
import aura.utils.BufferUtils;
class CircularBuffer {
final data: Float32Array;
var readHead: Int;
var writeHead: Int;
public var length(get, null): Int;
public var delay = 0;
public inline function new(size: Int) {
assert(Warning, size > 0);
this.data = createEmptyF32Array(size);
this.length = size;
this.writeHead = 0;
this.readHead = 1;
}
public inline function setDelay(delaySamples: Int) {
delay = delaySamples;
readHead = writeHead - delaySamples;
if (readHead < 0) {
readHead += length;
}
}
public inline function get_length(): Int {
return data.length;
}
public inline function get(): FastFloat {
return data[readHead];
}
public inline function set(value: FastFloat) {
data[writeHead] = value;
}
public inline function increment() {
if (++readHead >= length) readHead = 0;
if (++writeHead >= length) writeHead = 0;
}
}

View File

@ -0,0 +1,130 @@
package aura.utils;
import kha.Image;
import kha.arrays.Float32Array;
import kha.graphics2.Graphics;
import aura.utils.MathUtils;
using StringTools;
class Debug {
static var id = 0;
public static inline function startDebugger() {
#if js
js.Syntax.code("debugger");
#end
}
/**
Generates GraphViz/dot code to draw the channel tree for debugging. On
html5 this code is copied to the clipboard, on other targets it is
copied to the console but might be cut off (so better use html5 for
that).
**/
public static function debugTreeViz() {
#if AURA_DEBUG
final content = new StringBuf();
content.add("digraph Aura_Tree_Snapshot {\n");
content.add('\tranksep=equally;\n');
content.add('\trankdir=BT;\n');
content.add('\tnode [fontname = "helvetica"];\n');
addTreeToViz(content, Aura.masterChannel);
content.add("}");
copyToClipboard(content.toString());
#else
trace("Please build with 'AURA_DEBUG' flag!");
#end
}
#if AURA_DEBUG
static function addTreeToViz(buf: StringBuf, channelHandle: Handle) {
buf.add('\t${id++} [\n');
buf.add('\t\tshape=plaintext,\n');
buf.add('\t\tlabel=<<table border="1" cellborder="0" style="rounded">\n');
buf.add('\t\t\t<tr><td colspan="2"><b>${Type.getClassName(Type.getClass(channelHandle))}</b></td></tr>\n');
buf.add('\t\t\t<tr><td colspan="2">${Type.getClassName(Type.getClass(@:privateAccess channelHandle.channel))}</td></tr>\n');
buf.add('\t\t\t<hr/>\n');
buf.add('\t\t\t<tr><td><i>Tree level</i></td><td>${@:privateAccess channelHandle.channel.treeLevel}</td></tr>\n');
buf.add('\t\t\t<hr/>\n');
for (key => val in channelHandle.getDebugAttrs()) {
buf.add('\t\t\t<tr><td><i>$key</i></td><td>$val</td></tr>');
}
buf.add('\t\t</table>>\n');
buf.add('\t];\n');
final thisID = id - 1;
if (Std.isOfType(channelHandle, MixChannelHandle)) {
var mixHandle: MixChannelHandle = cast channelHandle;
for (inputHandle in mixHandle.inputHandles) {
final inputID = id;
addTreeToViz(buf, inputHandle);
buf.add('\t${inputID} -> ${thisID};\n');
}
}
}
#end
static function copyToClipboard(text: String) {
#if (kha_html5 || kha_debug_html5)
js.Browser.navigator.clipboard.writeText(text)
.then(
(_) -> { trace("Debug tree code has been copied to clipboard."); },
(err) -> {
trace('Debug tree code could not be copied to clipboard, writing to console instead. Reason: $err');
trace(text);
}
);
#else
trace(text);
#end
}
public static function drawWaveform(buffer: Float32Array, g: Graphics, x: Float, y: Float, w: Float, h: Float) {
g.begin(false);
g.opacity = 1.0;
g.color = kha.Color.fromFloats(0.176, 0.203, 0.223);
g.fillRect(x, y, w, h);
final borderSize = 2;
g.color = kha.Color.fromFloats(0.099, 0.099, 0.099);
g.drawRect(x + borderSize * 0.5, y + borderSize * 0.5, w - borderSize, h - borderSize, borderSize);
g.color = kha.Color.fromFloats(0.898, 0.411, 0.164);
final deinterleavedLength = Std.int(buffer.length / 2);
final numLines = buffer.length - 1;
final stepSize = w / numLines;
final innerHeight = h - 2 * borderSize;
for (c in 0...2) {
if ( c == 1 ) g.color = kha.Color.fromFloats(0.023, 0.443, 0.796);
for (i in 0...deinterleavedLength - 1) {
final idx = i + c * deinterleavedLength;
final y1 = y + borderSize + (1 - clampF(buffer[idx] * 0.5 + 0.5, 0, 1)) * innerHeight;
final y2 = y + borderSize + (1 - clampF(buffer[idx + 1] * 0.5 + 0.5, 0, 1)) * innerHeight;
g.drawLine(x + idx * stepSize, y1, x + (idx + 1) * stepSize, y2);
}
}
g.color = kha.Color.fromFloats(0.023, 0.443, 0.796);
g.opacity = 0.5;
// g.drawLine(x + w / 2, y, x + w / 2, y + h, 2);
g.end();
}
public static function createRenderTarget(w: Int, h: Int): Image {
return Image.createRenderTarget(Std.int(w), Std.int(h), null, NoDepthAndStencil, 1);
}
// #end
}

View File

@ -0,0 +1,22 @@
package aura.utils;
/**
The decibel (dB) is a relative unit of measurement equal to one tenth of a bel (B).
It expresses the ratio of two values of a power or root-power quantity on a logarithmic scale.
The number of decibels is ten times the logarithm to base 10 of the ratio of two power quantities.
A change in power by a factor of 10 corresponds to a 10 dB change in level.
At the half power point an audio circuit or an antenna exhibits an attenuation of approximately 3 dB.
A change in amplitude by a factor of 10 results in a change in power by a factor of 100, which corresponds to a 20 dB change in level.
A change in amplitude ratio by a factor of 2 (equivalently factor of 4 in power change) approximately corresponds to a 6 dB change in level.
**/
class Decibel {
@:pure public static inline function toDecibel(volume: Float): Float {
return 20 * MathUtils.log10(volume);
}
@:pure public static inline function toLinear(db: Float): Float {
return Math.pow(10, db / 20);
}
}

View File

@ -0,0 +1,27 @@
package aura.utils;
import aura.utils.Assert.*;
@:pure inline function frequencyToFactor(freq: Hertz, maxFreq: Hertz): Float {
assert(Debug, freq <= maxFreq);
return freq / maxFreq;
}
@:pure inline function factorToFrequency(factor: Float, maxFreq: Hertz): Hertz {
assert(Debug, 0.0 <= factor && factor <= 1.0);
return Std.int(factor * maxFreq);
}
@:pure inline function sampleRateToMaxFreq(sampleRate: Hertz): Hertz {
return Std.int(sampleRate / 2.0);
}
@:pure inline function msToSamples(sampleRate: Hertz, milliseconds: Millisecond): Int {
return Math.ceil((milliseconds * 0.001) * sampleRate);
}
@:pure inline function samplesToMs(sampleRate: Hertz, samples: Int): Millisecond {
return (samples / sampleRate) * 1000;
}

View File

@ -0,0 +1,53 @@
package aura.utils;
import kha.FastFloat;
import kha.simd.Float32x4;
import aura.types.AudioBuffer.AudioBufferChannelView;
class LinearInterpolator {
public var lastValue: FastFloat;
public var targetValue: FastFloat;
public var currentValue: FastFloat;
public inline function new(targetValue: FastFloat) {
this.targetValue = this.currentValue = this.lastValue = targetValue;
}
public inline function updateLast() {
this.lastValue = this.currentValue = this.targetValue;
}
public inline function getLerpStepSize(numSteps: Int): FastFloat {
return (this.targetValue - this.lastValue) / numSteps;
}
/**
Return a 32x4 SIMD register where each value contains the step size times
its index for efficient usage in `LinearInterpolator.applySIMD32x4()`.
**/
public inline function getLerpStepSizes32x4(numSteps: Int): Float32x4 {
final stepSize = getLerpStepSize(numSteps);
return Float32x4.mul(Float32x4.loadAllFast(stepSize), Float32x4.loadFast(1.0, 2.0, 3.0, 4.0));
}
/**
Applies four consecutive interpolation steps to `samples` (multiplicative)
using Kha's 32x4 SIMD API, starting at index `i`. `stepSizes32x4` must
be a SIMD register filled with `LinearInterpolator.getLerpStepSizes32x4()`.
There is no bound checking in place! It is assumed that 4 samples can
be accessed starting at `i`.
**/
public inline function applySIMD32x4(samples: AudioBufferChannelView, i: Int, stepSizes32x4: Float32x4) {
var rampValues = Float32x4.add(Float32x4.loadAllFast(currentValue), stepSizes32x4);
currentValue = Float32x4.getFast(rampValues, 3);
var signalValues = Float32x4.loadFast(samples[i], samples[i + 1], samples[i + 2], samples[i + 3]);
var res = Float32x4.mul(signalValues, rampValues);
samples[i + 0] = Float32x4.getFast(res, 0);
samples[i + 1] = Float32x4.getFast(res, 1);
samples[i + 2] = Float32x4.getFast(res, 2);
samples[i + 3] = Float32x4.getFast(res, 3);
}
}

View File

@ -0,0 +1,13 @@
package aura.utils;
/**
Merges the contents of `from` into `to` and returns the latter (`to` is
modified).
**/
@:generic
inline function mergeIntoThis<K, V>(to: Map<K, V>, from: Map<K, V>): Map<K, V> {
for (key => val in from) {
to[key] = val;
}
return to;
}

View File

@ -0,0 +1,131 @@
/**
Various math helper functions.
**/
package aura.utils;
import kha.FastFloat;
import aura.math.Vec3;
/** 1.0 / ln(10) in double precision **/
inline var LN10_INV_DOUBLE: Float = 0.43429448190325181666793241674895398318767547607421875;
/** 1.0 / ln(10) in single precision **/
inline var LN10_INV_SINGLE: kha.FastFloat = 0.4342944920063018798828125;
/** 1.0 / e (Euler's number) **/
inline var E_INV: kha.FastFloat = 0.367879441171442321595523770161460867;
@:pure inline function maxI(a: Int, b: Int): Int {
return a > b ? a : b;
}
@:pure inline function minI(a: Int, b: Int): Int {
return a < b ? a : b;
}
@:pure inline function maxF(a: Float, b: Float): Float {
return a > b ? a : b;
}
@:pure inline function minF(a: Float, b: Float): Float {
return a < b ? a : b;
}
@:pure inline function lerp(valA: Float, valB: Float, fac: Float): Float {
return valA * (1 - fac) + valB * fac;
}
@:pure inline function lerpF32(valA: FastFloat, valB: FastFloat, fac: FastFloat): FastFloat {
return valA * (1 - fac) + valB * fac;
}
@:pure inline function clampI(val: Int, min: Int = 0, max: Int = 1): Int {
return maxI(min, minI(max, val));
}
@:pure inline function clampF(val: Float, min: Float = 0.0, max: Float = 1.0): Float {
return maxF(min, minF(max, val));
}
/**
Returns the base-10 logarithm of a number.
**/
@:pure inline function log10(v: Float): Float {
return Math.log(v) * LN10_INV_DOUBLE;
}
/**
Calculate the counterclockwise angle of the rotation of `vecOther` relative
to `vecBase` around the rotation axis of `vecNormal`. All input vectors
*must* be normalized!
**/
@:pure inline function getFullAngleDegrees(vecBase: Vec3, vecOther: Vec3, vecNormal: Vec3): Float {
final dot = vecBase.dot(vecOther);
final det = determinant3x3(vecBase, vecOther, vecNormal);
var radians = Math.atan2(det, dot);
// Move [-PI, 0) to [PI, 2 * PI]
if (radians < 0) {
radians += 2 * Math.PI;
}
return radians * 180 / Math.PI;
}
@:pure inline function determinant3x3(col1: Vec3, col2: Vec3, col3: Vec3): Float {
return (
col1.x * col2.y * col3.z
+ col2.x * col3.y * col1.z
+ col3.x * col1.y * col2.z
- col1.z * col2.y * col3.x
- col2.z * col3.y * col1.x
- col3.z * col1.y * col2.x
);
}
/**
Projects the given point to a plane described by its normal vector. The
origin of the plane is assumed to be at (0, 0, 0).
**/
@:pure inline function projectPointOntoPlane(point: Vec3, planeNormal: Vec3): Vec3 {
return point.sub(planeNormal.mult(planeNormal.dot(point)));
}
@:pure inline function isPowerOf2(val: Int): Bool {
return (val & (val - 1)) == 0;
}
@:pure inline function getNearestIndexF(value: Float, stepSize: Float): Int {
final quotient: Int = Std.int(value / stepSize);
final remainder: Float = value % stepSize;
return (remainder > stepSize / 2) ? (quotient + 1) : (quotient);
}
/**
Calculates the logarithm of base 2 for the given unsigned(!) integer `n`,
which is the position of the most significant bit set.
**/
@:pure inline function log2Unsigned(n: Int): Int {
// TODO: optimize? See https://graphics.stanford.edu/~seander/bithacks.html#IntegerLog
var res = 0;
var tmp = n >>> 1; // Workaround for https://github.com/HaxeFoundation/haxe/issues/10783
while (tmp != 0) {
res++;
tmp >>>= 1;
}
return res;
}
/** Calculates 2^n for a given unsigned integer `n`. **/
@:pure inline function exp2(n: Int): Int {
return 1 << n;
}
@:pure inline function div4(n: Int): Int {
return n >>> 2;
}
@:pure inline function mod4(n: Int): Int {
return n & 3;
}

View File

@ -0,0 +1,37 @@
package aura.utils;
@:generic
class Pointer<T> {
public var value: Null<T>;
public inline function new(value: Null<T> = null) {
set(value);
}
public inline function set(value: Null<T>) {
this.value = value;
}
public inline function get(): Null<T> {
return this.value;
}
/**
Return the pointer's value typed as not-nullable. Use at your own risk.
**/
public inline function getSure(): T {
return @:nullSafety(Off) (this.value: T);
}
}
/**
Workaround for covariance issues when using generics. Use `PointerType<T>`
instead of `Pointer<T>` when using generic pointers as function parameters.
**/
@:generic
typedef PointerType<T> = {
public var value: Null<T>;
public function set(value: Null<T>): Void;
public function get(): Null<T>;
}

View File

@ -0,0 +1,24 @@
package aura.utils;
#if (cpp && AURA_WITH_OPTICK)
@:cppInclude('optick.h')
#end
class Profiler {
public static inline function frame(threadName: String) {
#if (cpp && AURA_WITH_OPTICK)
untyped __cpp__("OPTICK_FRAME({0})", threadName);
#end
}
public static inline function event() {
#if (cpp && AURA_WITH_OPTICK)
untyped __cpp__("OPTICK_EVENT()");
#end
}
public static inline function shutdown() {
#if (cpp && AURA_WITH_OPTICK)
untyped __cpp__("OPTICK_SHUTDOWN()");
#end
}
}

View File

@ -0,0 +1,79 @@
package aura.utils;
import kha.arrays.Float32Array;
import aura.utils.MathUtils;
/**
Various utilities for resampling (i.e. changing the sample rate) of signals.
Terminology used in this class for a resampling process:
- **Source data** describes the data prior to resampling.
- **Target data** describes the resampled data.
**/
class Resampler {
/**
Return the amount of samples required for storing the result of
resampling data with the given `sourceDataLength` to the
`targetSampleRate`.
**/
public static inline function getResampleLength(sourceDataLength: Int, sourceSampleRate: Hertz, targetSampleRate: Hertz): Int {
return Math.ceil(sourceDataLength * (targetSampleRate / sourceSampleRate));
}
/**
Transform a position (in samples) relative to the source's sample rate
into a position (in samples) relative to the target's sample rate and
return the transformed position.
**/
public static inline function sourceSamplePosToTargetPos(sourceSamplePos: Float, sourceSampleRate: Hertz, targetSampleRate: Hertz): Float {
return sourceSamplePos * (targetSampleRate / sourceSampleRate);
}
/**
Transform a position (in samples) relative to the target's sample rate
into a position (in samples) relative to the source's sample rate and
return the transformed position.
**/
public static inline function targetSamplePosToSourcePos(targetSamplePos: Float, sourceSampleRate: Hertz, targetSampleRate: Hertz): Float {
return targetSamplePos * (sourceSampleRate / targetSampleRate);
}
/**
Resample the given `sourceData` from `sourceSampleRate` to
`targetSampleRate` and write the resampled data into `targetData`.
It is expected that
`targetData.length == Resampler.getResampleLength(sourceData.length, sourceSampleRate, targetSampleRate)`,
otherwise this method may fail (there are no safety checks in place)!
**/
public static inline function resampleFloat32Array(sourceData: Float32Array, sourceSampleRate: Hertz, targetData: Float32Array, targetSampleRate: Hertz) {
for (i in 0...targetData.length) {
targetData[i] = sampleAtTargetPositionLerp(sourceData, i, sourceSampleRate, targetSampleRate);
}
}
/**
Sample the given `sourceData` at `targetSamplePos` (position in samples
relative to the target data) using linear interpolation for values
between source samples.
@param sourceSampleRate The sample rate of the source data
@param targetSampleRate The sample rate of the target data
**/
public static function sampleAtTargetPositionLerp(sourceData: Float32Array, targetSamplePos: Float, sourceSampleRate: Hertz, targetSampleRate: Hertz): Float {
assert(Critical, targetSamplePos >= 0.0);
final sourceSamplePos = targetSamplePosToSourcePos(targetSamplePos, sourceSampleRate, targetSampleRate);
final maxPos = sourceData.length - 1;
final pos1 = Math.floor(sourceSamplePos);
final pos2 = pos1 + 1;
final value1 = (pos1 > maxPos) ? sourceData[maxPos] : sourceData[pos1];
final value2 = (pos2 > maxPos) ? sourceData[maxPos] : sourceData[pos2];
return lerp(value1, value2, sourceSamplePos - Math.floor(sourceSamplePos));
}
}

View File

@ -0,0 +1,32 @@
package aura.utils;
/**
Use this as a static extension:
```haxe
using ReverseIterator;
for (i in (0...10).reversed()) {
// Do something...
}
```
**/
inline function reversed(iter: IntIterator, step: Int = 1) {
return @:privateAccess new ReverseIterator(iter.min, iter.max, step);
}
private class ReverseIterator {
var currentIndex: Int;
var end: Int;
var step: Int;
public inline function new(start: Int, end: Int, step: Int) {
this.currentIndex = start;
this.end = end;
this.step = step;
}
public inline function hasNext() return currentIndex > end;
public inline function next() return (currentIndex -= step) + step;
}

View File

@ -0,0 +1,36 @@
// =============================================================================
// Adapted from
// https://code.haxe.org/category/data-structures/step-iterator.html
// =============================================================================
package aura.utils;
/**
Use this as a static extension:
```haxe
using aura.utils.StepIterator;
for (i in (0...10).step(2)) {
// Do something...
}
```
**/
inline function step(iter: IntIterator, step: Int) {
return @:privateAccess new StepIterator(iter.min, iter.max, step);
}
private class StepIterator {
var currentIndex: Int;
final end: Int;
final step: Int;
public inline function new(start: Int, end: Int, step: Int) {
this.currentIndex = start;
this.end = end;
this.step = step;
}
public inline function hasNext() return currentIndex < end;
public inline function next() return (currentIndex += step) - step;
}

View File

@ -0,0 +1,23 @@
package aura.utils;
import kha.arrays.Float32Array;
class TestSignals {
/**
Fill the given `array` with a signal that represents a DC or 0Hz signal.
**/
public static inline function fillDC(array: Float32Array) {
for (i in 0...array.length) {
array[i] = (i == 0) ? 0.0 : 1.0;
}
}
/**
Fill the given `array` with a single unit impulse.
**/
public static inline function fillUnitImpulse(array: Float32Array) {
for (i in 0...array.length) {
array[i] = (i == 0) ? 1.0 : 0.0;
}
}
}

View File

@ -0,0 +1,136 @@
package aura.utils.macro;
import haxe.macro.Context;
import haxe.macro.Expr;
import haxe.macro.Type.ClassType;
/**
This macro implements integer enum types that can extend from others, at the
cost of some limitations.
## Usage
```haxe
@:autoBuild(aura.utils.macro.ExtensibleEnumBuilder.build())
@:build(aura.utils.macro.ExtensibleEnumBuilder.build())
class BaseEnum {
var ABaseEnumValue;
}
class ExtendingEnum extends BaseEnum {
var AnExtendingEnumValue;
}
```
This macro transforms the variables in above example into the static inline
variables `BaseEnum.ABaseEnumValue = 0` and `ExtendingEnum.AnExtendingEnumValue = 1`.
The compiler dump after the macro looks as follows:
```haxe
// BaseEnum.dump
@:used @:autoBuild(aura.utils.macro.ExtensibleEnumBuilder.build()) @:build(aura.utils.macro.ExtensibleEnumBuilder.build())
class BaseEnum {
@:value(0)
public static inline var ABaseEnumValue:Int = 0;
@:value(ABaseEnumValue + 1)
static inline var _SubtypeOffset:Int = 1;
}
// ExtendingEnum.dump
@:used @:build(aura.utils.macro.ExtensibleEnumBuilder.build()) @:autoBuild(aura.utils.macro.ExtensibleEnumBuilder.build())
class ExtendingEnum extends BaseEnum {
@:value(@:privateAccess Main.BaseEnum._SubtypeOffset)
public static inline var AnExtendingEnumValue:Int = 1;
@:value(AnExtendingEnumValue + 1)
static inline var _SubtypeOffset:Int = 2;
}
```
## Limitations
- Only integer types are supported.
- The enums are stored in classes instead of `enum abstract` types.
- Actual values are typed as int, no auto-completion and less intelligent switch/case statements.
- No actual OOP-like inheritance (which wouldn't work with enums since enum inheritance would need to be contravariant).
More importantly, only the values of the variables are extended, but subclassing enums _don't inherit the variables_
of their superclass enums.
- Little complexity and compile time added by using a macro.
**/
class ExtensibleEnumBuilder {
@:persistent static final SUBTYPE_VARNAME = "_SubtypeOffset";
public static macro function build(): Array<Field> {
final fields = Context.getBuildFields();
final newFields = new Array<Field>();
final cls = Context.getLocalClass().get();
final superClass = cls.superClass;
final isExtending = superClass != null;
var lastField: Null<Field> = null;
for (field in fields) {
switch (field.kind) {
case FVar(complexType, expr):
var newExpr: Expr;
if (lastField == null) {
if (isExtending) {
final path = classTypeToStringPath(superClass.t.get());
newExpr = macro @:pos(Context.currentPos()) @:privateAccess ${strPathToExpr(path)}.$SUBTYPE_VARNAME;
}
else {
newExpr = macro 0;
}
}
else {
newExpr = macro $i{lastField.name} + 1;
}
newFields.push({
name: field.name,
access: [APublic, AStatic, AInline],
kind: FVar(complexType, newExpr),
meta: field.meta,
doc: field.doc,
pos: Context.currentPos()
});
lastField = field;
default:
newFields.push(field);
}
}
newFields.push({
name: SUBTYPE_VARNAME,
access: [APrivate, AStatic, AInline],
kind: FVar(macro: Int, lastField != null ? macro $i{lastField.name} + 1 : macro 0),
pos: Context.currentPos()
});
return newFields;
}
static function classTypeToStringPath(classType: ClassType): String {
var moduleName = classType.module.split(".").pop();
final name = moduleName + "." + classType.name;
return classType.pack.length == 0 ? name : classType.pack.join(".") + "." + name;
}
static inline function strPathToExpr(path: String): Expr {
// final pathArray = path.split(".");
// final first = EConst(CIdent(pathArray.shift()));
// var expr = { expr: first, pos: Context.currentPos() };
// for (item in pathArray) {
// expr = { expr: EField(expr, item), pos: Context.currentPos() };
// }
// return expr;
return macro $p{path.split(".")}
}
}