merge upstream

This commit is contained in:
2025-06-05 17:49:54 +00:00
11 changed files with 166 additions and 19 deletions

View File

@ -508,9 +508,7 @@ fragColor.rgb = min(fragColor.rgb, 65504 * 0.5);
fragColor.rgb = pow(fragColor.rgb, vec3(1.0 / 2.2)); // To gamma
} else if (PPComp4.x == 10){
fragColor.rgb = tonemapAgXFull(fragColor.rgb);
} else {
fragColor.rgb = vec3(0,1,0); //ERROR
}
} //else { fragColor.rgb = vec3(0,1,0); //ERROR}
#endif
#else

View File

@ -45,7 +45,7 @@ class DebugDrawHelper {
iron.App.notifyOnRender2D(onRender);
if (debugDrawMode & DrawRayCast != 0) {
iron.App.notifyOnUpdate(function () {
iron.App.notifyOnFixedUpdate(function () {
rayCasts.resize(0);
});
}

View File

@ -434,8 +434,8 @@ class PhysicsWorld extends Trait {
from: from,
to: to,
hasHit: rc.hasHit(),
hitPoint: hitPointWorld,
hitNormal: hitNormalWorld
hitPoint: hitPointWorld.clone(),
hitNormal: hitNormalWorld.clone()
});
}

View File

@ -16,7 +16,7 @@ class CameraGetNode(LnxLogicTreeNode):
self.add_output('LnxFloatSocket', 'DOF Distance')#6
self.add_output('LnxFloatSocket', 'DOF Length')#7
self.add_output('LnxFloatSocket', 'DOF F-Stop')#8
self.add_output('LnxBoolSocket', 'Tonemapping')#9
self.add_output('LnxIntSocket', 'Tonemapping')#9
self.add_output('LnxFloatSocket', 'Distort')#10
self.add_output('LnxFloatSocket', 'Film Grain')#11
self.add_output('LnxFloatSocket', 'Sharpen')#12

View File

@ -16,6 +16,10 @@ class ChromaticAberrationSetNode(LnxLogicTreeNode):
self.add_output('LnxNodeSocketAction', 'Out')
def draw_buttons(self, context, layout):
layout.label(text="Type 0: Simple")
layout.label(text="Type 1: Spectral")
def get_replacement_node(self, node_tree: bpy.types.NodeTree):
if self.lnx_version not in (0, 1):
raise LookupError()

View File

@ -4,7 +4,7 @@ class CameraSetNode(LnxLogicTreeNode):
"""Set the post-processing effects of a camera."""
bl_idname = 'LNCameraSetNode'
bl_label = 'Set Camera Post Process'
lnx_version = 5
lnx_version = 6
def remove_extra_inputs(self, context):
@ -29,7 +29,7 @@ class CameraSetNode(LnxLogicTreeNode):
if self.property0 == 'DoF F-Stop':
self.add_input('LnxFloatSocket', 'DoF F-Stop', default_value=128.0)#8
if self.property0 == 'Tonemapping':
self.add_input('LnxBoolSocket', 'Tonemapping', default_value=False)#9
self.add_input('LnxIntSocket', 'Tonemapping', default_value=5)#9
if self.property0 == 'Distort':
self.add_input('LnxFloatSocket', 'Distort', default_value=2.0)#10
if self.property0 == 'Film Grain':
@ -70,6 +70,13 @@ class CameraSetNode(LnxLogicTreeNode):
self.add_output('LnxNodeSocketAction', 'Out')
def draw_buttons(self, context, layout):
if self.property0 == 'Tonemapping':
layout.label(text="0: Filmic")
layout.label(text="1: Filmic2")
layout.label(text="2: Reinhard")
layout.label(text="3: Uncharted2")
layout.label(text="5: Agx")
layout.label(text="6: None")
layout.prop(self, 'property0')
def get_replacement_node(self, node_tree: bpy.types.NodeTree):

View File

@ -152,8 +152,12 @@ if bpy.app.version > (4, 1, 0):
pass
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
and (node.inputs['Emission Color'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission Color'], (0.0, 0.0, 0.0), comp_alpha=False)):
emission_col = c.parse_vector_input(node.inputs[26])
emission_strength = c.parse_value_input(node.inputs[27])
if bpy.app.version >= (4, 4, 0):
emission_col = c.parse_vector_input(node.inputs[27])
emission_strength = c.parse_value_input(node.inputs[28])
else:
emission_col = c.parse_vector_input(node.inputs[26])
emission_strength = c.parse_value_input(node.inputs[27])
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
mat_state.emission_type = mat_state.EmissionType.SHADED
else:

View File

@ -26,7 +26,7 @@ class Time {
return overrideTime;
}
#end
return Scheduler.realTime();
return Scheduler.time();
}
public static inline function update() {

View File

@ -4,9 +4,15 @@ package aura.channels;
import js.Browser;
import js.html.AudioElement;
import js.html.audio.AudioContext;
import js.html.audio.ChannelSplitterNode;
import js.html.audio.ChannelMergerNode;
import js.html.audio.GainNode;
import js.html.audio.MediaElementAudioSourceNode;
import js.html.URL;
import kha.SystemImpl;
import kha.js.MobileWebAudio;
import kha.js.MobileWebAudioChannel;
import aura.threading.Message;
@ -30,20 +36,67 @@ import aura.types.AudioBuffer;
class Html5StreamChannel extends BaseChannel {
static final virtualChannels: Array<Html5StreamChannel> = [];
final audioContext: AudioContext;
final audioElement: AudioElement;
final source: MediaElementAudioSourceNode;
final gain: GainNode;
final leftGain: GainNode;
final rightGain: GainNode;
final attenuationGain: GainNode;
final splitter: ChannelSplitterNode;
final merger: ChannelMergerNode;
var virtualPosition: Float;
var lastUpdateTime: Float;
var dopplerRatio: Float = 1.0;
var pitch: Float = 1.0;
public function new(sound: kha.Sound, loop: Bool) {
audioContext = new AudioContext();
audioElement = Browser.document.createAudioElement();
source = audioContext.createMediaElementSource(audioElement);
final mimeType = #if kha_debug_html5 "audio/ogg" #else "audio/mp4" #end;
final blob = new js.html.Blob([sound.compressedData.getData()], {type: mimeType});
final soundData: js.lib.ArrayBuffer = sound.compressedData.getData();
final blob = new js.html.Blob([soundData], {type: mimeType});
// TODO: if removing channels, use revokeObjectUrl() ?
// see https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
audioElement.src = URL.createObjectURL(blob);
audioElement.loop = loop;
untyped audioElement.preservesPitch = false;
splitter = audioContext.createChannelSplitter(2);
leftGain = audioContext.createGain();
rightGain = audioContext.createGain();
attenuationGain = audioContext.createGain();
merger = audioContext.createChannelMerger(2);
gain = audioContext.createGain();
source.connect(splitter);
// The sound data needs to be decoded because `sounds.channels` returns `0`.
audioContext.decodeAudioData(soundData, function (buffer) {
// TODO: add more cases for Quad and 5.1 ? - https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Basic_concepts_behind_Web_Audio_API#audio_channels
switch (buffer.numberOfChannels) {
case 1:
splitter.connect(leftGain, 0);
splitter.connect(rightGain, 0);
case 2:
splitter.connect(leftGain, 0);
splitter.connect(rightGain, 1);
default:
}
});
leftGain.connect(merger, 0, 0);
rightGain.connect(merger, 0, 1);
merger.connect(attenuationGain);
attenuationGain.connect(gain);
gain.connect(audioContext.destination);
if (isVirtual()) {
virtualChannels.push(this);
@ -130,15 +183,32 @@ class Html5StreamChannel extends BaseChannel {
switch (message.id) {
// Because we're using a HTML implementation here, we cannot use the
// LinearInterpolator parameters
case ChannelMessageID.PVolume: audioElement.volume = cast message.data;
case ChannelMessageID.PVolume: attenuationGain.gain.value = cast message.data;
case ChannelMessageID.PPitch:
pitch = cast message.data;
updatePlaybackRate();
case ChannelMessageID.PDopplerRatio:
case ChannelMessageID.PDstAttenuation:
dopplerRatio = cast message.data;
updatePlaybackRate();
case ChannelMessageID.PDstAttenuation: gain.gain.value = cast message.data;
case ChannelMessageID.PVolumeLeft: leftGain.gain.value = cast message.data;
case ChannelMessageID.PVolumeRight: rightGain.gain.value = cast message.data;
default:
super.parseMessage(message);
}
}
function updatePlaybackRate() {
try {
audioElement.playbackRate = pitch * dopplerRatio;
}
catch (e) {
// Ignore. Unfortunately some browsers only support a certain range
// of playback rates, but this is not explicitly specified, so there's
// not much we can do here.
}
}
}
/**
@ -147,10 +217,50 @@ class Html5StreamChannel extends BaseChannel {
https://github.com/Kode/Kha/commit/12494b1112b64e4286b6a2fafc0f08462c1e7971
**/
class Html5MobileStreamChannel extends BaseChannel {
final audioContext: AudioContext;
final khaChannel: kha.js.MobileWebAudioChannel;
final leftGain: GainNode;
final rightGain: GainNode;
final attenuationGain: GainNode;
final splitter: ChannelSplitterNode;
final merger: ChannelMergerNode;
var dopplerRatio: Float = 1.0;
var pitch: Float = 1.0;
public function new(sound: kha.Sound, loop: Bool) {
audioContext = MobileWebAudio._context;
khaChannel = new kha.js.MobileWebAudioChannel(cast sound, loop);
@:privateAccess khaChannel.gain.disconnect(audioContext.destination);
@:privateAccess khaChannel.source.disconnect(@:privateAccess khaChannel.gain);
splitter = audioContext.createChannelSplitter(2);
leftGain = audioContext.createGain();
rightGain = audioContext.createGain();
merger = audioContext.createChannelMerger(2);
attenuationGain = audioContext.createGain();
@:privateAccess khaChannel.source.connect(splitter);
// TODO: add more cases for Quad and 5.1 ? - https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Basic_concepts_behind_Web_Audio_API#audio_channels
switch (sound.channels) {
case 1:
splitter.connect(leftGain, 0);
splitter.connect(rightGain, 0);
case 2:
splitter.connect(leftGain, 0);
splitter.connect(rightGain, 1);
default:
}
leftGain.connect(merger, 0, 0);
rightGain.connect(merger, 0, 1);
merger.connect(attenuationGain);
attenuationGain.connect(@:privateAccess khaChannel.gain);
@:privateAccess khaChannel.gain.connect(audioContext.destination);
}
public function play(retrigger: Bool) {
@ -181,12 +291,26 @@ class Html5MobileStreamChannel extends BaseChannel {
// LinearInterpolator parameters
case ChannelMessageID.PVolume: khaChannel.volume = cast message.data;
case ChannelMessageID.PPitch:
pitch = cast message.data;
updatePlaybackRate();
case ChannelMessageID.PDopplerRatio:
case ChannelMessageID.PDstAttenuation:
dopplerRatio = cast message.data;
updatePlaybackRate();
case ChannelMessageID.PDstAttenuation: attenuationGain.gain.value = cast message.data;
case ChannelMessageID.PVolumeLeft: leftGain.gain.value = cast message.data;
case ChannelMessageID.PVolumeRight: rightGain.gain.value = cast message.data;
default:
super.parseMessage(message);
}
}
function updatePlaybackRate() {
try {
@:privateAccess khaChannel.source.playbackRate.value = pitch * dopplerRatio;
}
catch (e) {}
}
}
#end

View File

@ -61,9 +61,14 @@ class StereoPanner extends Panner {
public inline function setBalance(balance: Balance) {
this._balance = balance;
sendMessage({ id: StereoPannerMessageID.PVolumeLeft, data: Math.sqrt(~balance) });
sendMessage({ id: StereoPannerMessageID.PVolumeRight, data: Math.sqrt(balance) });
final volumeLeft = Math.sqrt(~balance);Add commentMore actions
final volumeRight = Math.sqrt(balance);
sendMessage({ id: StereoPannerMessageID.PVolumeLeft, data: volumeLeft });
sendMessage({ id: StereoPannerMessageID.PVolumeRight, data: volumeRight });
#if (kha_html5 || kha_debug_html5)
handle.channel.sendMessage({ id: ChannelMessageID.PVolumeLeft, data: volumeLeft });
handle.channel.sendMessage({ id: ChannelMessageID.PVolumeRight, data: volumeRight });
#end
}
public inline function getBalance(): Balance {

View File

@ -25,6 +25,11 @@ class ChannelMessageID extends MessageID {
final PPitch;
final PDopplerRatio;
final PDstAttenuation;
#if (kha_html5 || kha_debug_html5)
final PVolumeLeft;Add commentMore actions
final PVolumeRight;
#end
}
class DSPMessageID extends MessageID {