Merge pull request 'main' (#107) from Onek8/LNXSDK:main into main

Reviewed-on: LeenkxTeam/LNXSDK#107
This commit is contained in:
2025-09-23 17:54:11 +00:00
35 changed files with 1194 additions and 612 deletions

View File

@ -331,15 +331,18 @@ class RenderPath {
}); });
} }
public static function sortMeshesShader(meshes: Array<MeshObject>) { public static function sortMeshesIndex(meshes: Array<MeshObject>) {
meshes.sort(function(a, b): Int { meshes.sort(function(a, b): Int {
#if rp_depth_texture #if rp_depth_texture
var depthDiff = boolToInt(a.depthRead) - boolToInt(b.depthRead); var depthDiff = boolToInt(a.depthRead) - boolToInt(b.depthRead);
if (depthDiff != 0) return depthDiff; if (depthDiff != 0) return depthDiff;
#end #end
return a.materials[0].name >= b.materials[0].name ? 1 : -1; if (a.data.sortingIndex != b.data.sortingIndex) {
}); return a.data.sortingIndex > b.data.sortingIndex ? 1 : -1;
}
return a.data.name >= b.data.name ? 1 : -1; });
} }
public function drawMeshes(context: String) { public function drawMeshes(context: String) {
@ -399,7 +402,7 @@ class RenderPath {
#if lnx_batch #if lnx_batch
sortMeshesDistance(Scene.active.meshBatch.nonBatched); sortMeshesDistance(Scene.active.meshBatch.nonBatched);
#else #else
drawOrder == DrawOrder.Shader ? sortMeshesShader(meshes) : sortMeshesDistance(meshes); drawOrder == DrawOrder.Index ? sortMeshesIndex(meshes) : sortMeshesDistance(meshes);
#end #end
meshesSorted = true; meshesSorted = true;
} }
@ -914,6 +917,6 @@ class CachedShaderContext {
@:enum abstract DrawOrder(Int) from Int { @:enum abstract DrawOrder(Int) from Int {
var Distance = 0; // Early-z var Distance = 0; // Early-z
var Shader = 1; // Less state changes var Index = 1; // Less state changes
// var Mix = 2; // Distance buckets sorted by shader // var Mix = 2; // Distance buckets sorted by shader
} }

View File

@ -9,6 +9,7 @@ import iron.data.SceneFormat;
class MeshData { class MeshData {
public var name: String; public var name: String;
public var sortingIndex: Int;
public var raw: TMeshData; public var raw: TMeshData;
public var format: TSceneFormat; public var format: TSceneFormat;
public var geom: Geometry; public var geom: Geometry;
@ -23,7 +24,8 @@ class MeshData {
public function new(raw: TMeshData, done: MeshData->Void) { public function new(raw: TMeshData, done: MeshData->Void) {
this.raw = raw; this.raw = raw;
this.name = raw.name; this.name = raw.name;
this.sortingIndex = raw.sorting_index;
if (raw.scale_pos != null) scalePos = raw.scale_pos; if (raw.scale_pos != null) scalePos = raw.scale_pos;
if (raw.scale_tex != null) scaleTex = raw.scale_tex; if (raw.scale_tex != null) scaleTex = raw.scale_tex;

View File

@ -49,6 +49,7 @@ typedef TMeshData = {
@:structInit class TMeshData { @:structInit class TMeshData {
#end #end
public var name: String; public var name: String;
public var sorting_index: Int;
public var vertex_arrays: Array<TVertexArray>; public var vertex_arrays: Array<TVertexArray>;
public var index_arrays: Array<TIndexArray>; public var index_arrays: Array<TIndexArray>;
@:optional public var dynamic_usage: Null<Bool>; @:optional public var dynamic_usage: Null<Bool>;
@ -222,6 +223,7 @@ typedef TShaderData = {
@:structInit class TShaderData { @:structInit class TShaderData {
#end #end
public var name: String; public var name: String;
public var next_pass: String;
public var contexts: Array<TShaderContext>; public var contexts: Array<TShaderContext>;
} }

View File

@ -22,6 +22,7 @@ using StringTools;
class ShaderData { class ShaderData {
public var name: String; public var name: String;
public var nextPass: String;
public var raw: TShaderData; public var raw: TShaderData;
public var contexts: Array<ShaderContext> = []; public var contexts: Array<ShaderContext> = [];
@ -33,6 +34,7 @@ class ShaderData {
public function new(raw: TShaderData, done: ShaderData->Void, overrideContext: TShaderOverride = null) { public function new(raw: TShaderData, done: ShaderData->Void, overrideContext: TShaderOverride = null) {
this.raw = raw; this.raw = raw;
this.name = raw.name; this.name = raw.name;
this.nextPass = raw.next_pass;
for (c in raw.contexts) contexts.push(null); for (c in raw.contexts) contexts.push(null);
var contextsLoaded = 0; var contextsLoaded = 0;

View File

@ -302,6 +302,10 @@ class MeshObject extends Object {
// Render mesh // Render mesh
var ldata = lod.data; var ldata = lod.data;
// Next pass rendering first (inverse order)
renderNextPass(g, context, bindParams, lod);
for (i in 0...ldata.geom.indexBuffers.length) { for (i in 0...ldata.geom.indexBuffers.length) {
var mi = ldata.geom.materialIndices[i]; var mi = ldata.geom.materialIndices[i];
@ -405,4 +409,85 @@ class MeshObject extends Object {
} }
} }
} }
function renderNextPass(g: Graphics, context: String, bindParams: Array<String>, lod: MeshObject) {
var ldata = lod.data;
for (i in 0...ldata.geom.indexBuffers.length) {
var mi = ldata.geom.materialIndices[i];
if (mi >= materials.length) continue;
var currentMaterial: MaterialData = materials[mi];
if (currentMaterial == null || currentMaterial.shader == null) continue;
var nextPassName: String = currentMaterial.shader.nextPass;
if (nextPassName == null || nextPassName == "") continue;
var nextMaterial: MaterialData = null;
for (mat in materials) {
// First try exact match
if (mat.name == nextPassName) {
nextMaterial = mat;
break;
}
// If no exact match, try to match base name for linked materials
if (mat.name.indexOf("_") > 0 && mat.name.substr(mat.name.length - 6) == ".blend") {
var baseName = mat.name.substring(0, mat.name.indexOf("_"));
if (baseName == nextPassName) {
nextMaterial = mat;
break;
}
}
}
if (nextMaterial == null) continue;
var nextMaterialContext: MaterialContext = null;
var nextShaderContext: ShaderContext = null;
for (j in 0...nextMaterial.raw.contexts.length) {
if (nextMaterial.raw.contexts[j].name.substr(0, context.length) == context) {
nextMaterialContext = nextMaterial.contexts[j];
nextShaderContext = nextMaterial.shader.getContext(context);
break;
}
}
if (nextShaderContext == null) continue;
if (skipContext(context, nextMaterial)) continue;
var elems = nextShaderContext.raw.vertex_elements;
// Uniforms
if (nextShaderContext.pipeState != lastPipeline) {
g.setPipeline(nextShaderContext.pipeState);
lastPipeline = nextShaderContext.pipeState;
}
Uniforms.setContextConstants(g, nextShaderContext, bindParams);
Uniforms.setObjectConstants(g, nextShaderContext, this);
Uniforms.setMaterialConstants(g, nextShaderContext, nextMaterialContext);
// VB / IB
#if lnx_deinterleaved
g.setVertexBuffers(ldata.geom.get(elems));
#else
if (ldata.geom.instancedVB != null) {
g.setVertexBuffers([ldata.geom.get(elems), ldata.geom.instancedVB]);
}
else {
g.setVertexBuffer(ldata.geom.get(elems));
}
#end
g.setIndexBuffer(ldata.geom.indexBuffers[i]);
// Draw next pass for this specific geometry section
if (ldata.geom.instanced) {
g.drawIndexedVerticesInstanced(ldata.geom.instanceCount, ldata.geom.start, ldata.geom.count);
}
else {
g.drawIndexedVertices(ldata.geom.start, ldata.geom.count);
}
}
}
} }

View File

@ -39,11 +39,11 @@ class Time {
} }
public static inline function time(): Float { public static inline function time(): Float {
return kha.Scheduler.time(); return kha.Scheduler.time() * scale;
} }
public static inline function realTime(): Float { public static inline function realTime(): Float {
return kha.Scheduler.realTime(); return kha.Scheduler.realTime() * scale;
} }
public static function update() { public static function update() {

View File

@ -94,34 +94,34 @@ class Tween {
// Way too much Reflect trickery.. // Way too much Reflect trickery..
var ps = Reflect.fields(a.props); var ps = Reflect.fields(a.props);
for (i in 0...ps.length) { for (j in 0...ps.length) {
var p = ps[i]; var p = ps[j];
var k = a._time / a.duration; var k = a._time / a.duration;
if (k > 1) k = 1; if (k > 1) k = 1;
if (a._comps[i] == 1) { if (a._comps[j] == 1) {
var fromVal: Float = a._x[i]; var fromVal: Float = a._x[j];
var toVal: Float = Reflect.getProperty(a.props, p); var toVal: Float = Reflect.getProperty(a.props, p);
var val: Float = fromVal + (toVal - fromVal) * eases[a.ease](k); var val: Float = fromVal + (toVal - fromVal) * eases[a.ease](k);
Reflect.setProperty(a.target, p, val); Reflect.setProperty(a.target, p, val);
} }
else { // _comps[i] == 4 else { // _comps[j] == 4
var obj = Reflect.getProperty(a.props, p); var obj = Reflect.getProperty(a.props, p);
var toX: Float = Reflect.getProperty(obj, "x"); var toX: Float = Reflect.getProperty(obj, "x");
var toY: Float = Reflect.getProperty(obj, "y"); var toY: Float = Reflect.getProperty(obj, "y");
var toZ: Float = Reflect.getProperty(obj, "z"); var toZ: Float = Reflect.getProperty(obj, "z");
var toW: Float = Reflect.getProperty(obj, "w"); var toW: Float = Reflect.getProperty(obj, "w");
if (a._normalize[i]) { if (a._normalize[j]) {
var qdot = (a._x[i] * toX) + (a._y[i] * toY) + (a._z[i] * toZ) + (a._w[i] * toW); var qdot = (a._x[j] * toX) + (a._y[j] * toY) + (a._z[j] * toZ) + (a._w[j] * toW);
if (qdot < 0.0) { if (qdot < 0.0) {
toX = -toX; toY = -toY; toZ = -toZ; toW = -toW; toX = -toX; toY = -toY; toZ = -toZ; toW = -toW;
} }
} }
var x: Float = a._x[i] + (toX - a._x[i]) * eases[a.ease](k); var x: Float = a._x[j] + (toX - a._x[j]) * eases[a.ease](k);
var y: Float = a._y[i] + (toY - a._y[i]) * eases[a.ease](k); var y: Float = a._y[j] + (toY - a._y[j]) * eases[a.ease](k);
var z: Float = a._z[i] + (toZ - a._z[i]) * eases[a.ease](k); var z: Float = a._z[j] + (toZ - a._z[j]) * eases[a.ease](k);
var w: Float = a._w[i] + (toW - a._w[i]) * eases[a.ease](k); var w: Float = a._w[j] + (toW - a._w[j]) * eases[a.ease](k);
if (a._normalize[i]) { if (a._normalize[j]) {
var l = Math.sqrt(x * x + y * y + z * z + w * w); var l = Math.sqrt(x * x + y * y + z * z + w * w);
if (l > 0.0) { if (l > 0.0) {
l = 1.0 / l; l = 1.0 / l;

View File

@ -0,0 +1,41 @@
package leenkx.logicnode;
class ProbabilisticIndexNode extends LogicNode {
public function new(tree: LogicTree) {
super(tree);
}
override function get(from: Int): Dynamic {
var probs: Array<Float> = [];
var probs_acum: Array<Float> = [];
var sum: Float = 0;
for (p in 0...inputs.length){
probs.push(inputs[p].get());
sum += probs[p];
}
if (sum > 1){
for (p in 0...probs.length)
probs[p] /= sum;
}
sum = 0;
for (p in 0...probs.length){
sum += probs[p];
probs_acum.push(sum);
}
var rand: Float = Math.random();
for (p in 0...probs.length){
if (p == 0 && rand <= probs_acum[p]) return p;
else if (0 < p && p < probs.length-1 && probs_acum[p-1] < rand && rand <= probs_acum[p]) return p;
else if (p == probs.length-1 && probs_acum[p-1] < rand) return p;
}
return null;
}
}

View File

@ -1,5 +1,7 @@
package leenkx.logicnode; package leenkx.logicnode;
import iron.data.SceneFormat;
class SetWorldNode extends LogicNode { class SetWorldNode extends LogicNode {
public function new(tree: LogicTree) { public function new(tree: LogicTree) {
@ -10,25 +12,6 @@ class SetWorldNode extends LogicNode {
var world: String = inputs[1].get(); var world: String = inputs[1].get();
if (world != null){ if (world != null){
//check if world shader data exists
var file: String = 'World_'+world+'_data';
#if lnx_json
file += ".json";
#elseif lnx_compress
file += ".lz4";
#else
file += '.lnx';
#end
var exists: Bool = false;
iron.data.Data.getBlob(file, function(b: kha.Blob) {
if (b != null) exists = true;
});
assert(Error, exists == true, "World must be either associated to a scene or have fake user");
iron.Scene.active.raw.world_ref = world; iron.Scene.active.raw.world_ref = world;
var npath = leenkx.renderpath.RenderPathCreator.get(); var npath = leenkx.renderpath.RenderPathCreator.get();
npath.loadShader("shader_datas/World_" + world + "/World_" + world); npath.loadShader("shader_datas/World_" + world + "/World_" + world);

View File

@ -641,18 +641,20 @@ class RenderPathForward {
var framebuffer = ""; var framebuffer = "";
#end #end
#if ((rp_antialiasing == "Off") || (rp_antialiasing == "FXAA")) RenderPathCreator.finalTarget = path.currentTarget;
var target = "";
#if ((rp_antialiasing == "Off") || (rp_antialiasing == "FXAA") || (!rp_render_to_texture))
{ {
RenderPathCreator.finalTarget = path.currentTarget; target = framebuffer;
path.setTarget(framebuffer);
} }
#else #else
{ {
path.setTarget("buf"); target = "buf";
RenderPathCreator.finalTarget = path.currentTarget;
} }
#end #end
path.setTarget(target);
#if rp_compositordepth #if rp_compositordepth
{ {
path.bindTarget("_main", "gbufferD"); path.bindTarget("_main", "gbufferD");
@ -671,6 +673,15 @@ class RenderPathForward {
} }
#end #end
#if rp_overlays
{
path.setTarget(target);
path.clearTarget(null, 1.0);
path.drawMeshes("overlay");
}
#end
#if ((rp_antialiasing == "SMAA") || (rp_antialiasing == "TAA")) #if ((rp_antialiasing == "SMAA") || (rp_antialiasing == "TAA"))
{ {
path.setTarget("bufa"); path.setTarget("bufa");
@ -701,12 +712,6 @@ class RenderPathForward {
} }
#end #end
#if rp_overlays
{
path.clearTarget(null, 1.0);
path.drawMeshes("overlay");
}
#end
} }
public static function setupDepthTexture() { public static function setupDepthTexture() {

View File

@ -3,33 +3,35 @@ package leenkx.system;
import haxe.Constraints.Function; import haxe.Constraints.Function;
class Signal { class Signal {
var callbacks:Array<Function> = []; var callbacks: Array<Function> = [];
public function new() { public function new() {
} }
public function connect(callback:Function) { public function connect(callback: Function) {
if (!callbacks.contains(callback)) callbacks.push(callback); if (!callbacks.contains(callback)) callbacks.push(callback);
} }
public function disconnect(callback:Function) { public function disconnect(callback: Function) {
if (callbacks.contains(callback)) callbacks.remove(callback); if (callbacks.contains(callback)) callbacks.remove(callback);
} }
public function emit(...args:Any) { public function emit(...args: Any) {
for (callback in callbacks) Reflect.callMethod(this, callback, args); for (callback in callbacks.copy()) {
if (callbacks.contains(callback)) Reflect.callMethod(null, callback, args);
}
} }
public function getConnections():Array<Function> { public function getConnections(): Array<Function> {
return callbacks; return callbacks;
} }
public function isConnected(callBack:Function):Bool { public function isConnected(callBack: Function):Bool {
return callbacks.contains(callBack); return callbacks.contains(callBack);
} }
public function isNull():Bool { public function isNull(): Bool {
return callbacks.length == 0; return callbacks.length == 0;
} }
} }

View File

@ -57,7 +57,7 @@ class Starter {
iron.Scene.getRenderPath = getRenderPath; iron.Scene.getRenderPath = getRenderPath;
#end #end
#if lnx_draworder_shader #if lnx_draworder_shader
iron.RenderPath.active.drawOrder = iron.RenderPath.DrawOrder.Shader; iron.RenderPath.active.drawOrder = iron.RenderPath.DrawOrder.Index;
#end // else Distance #end // else Distance
}); });
}); });

View File

@ -1,87 +1,243 @@
package leenkx.trait; package leenkx.trait;
import iron.Trait;
import iron.math.Vec4; import iron.math.Vec4;
import iron.system.Input; import iron.system.Input;
import iron.object.Object; import iron.object.Object;
import iron.object.CameraObject; import iron.object.CameraObject;
import leenkx.trait.physics.PhysicsWorld; import leenkx.trait.physics.PhysicsWorld;
import leenkx.trait.internal.CameraController; import leenkx.trait.physics.RigidBody;
import kha.FastFloat;
class FirstPersonController extends CameraController { class FirstPersonController extends Trait {
#if (!lnx_physics) #if (!lnx_physics)
public function new() { super(); } public function new() { super(); }
#else #else
var head: Object; @prop public var rotationSpeed:Float = 0.15;
static inline var rotationSpeed = 2.0; @prop public var maxPitch:Float = 2.2;
@prop public var minPitch:Float = 0.5;
@prop public var enableJump:Bool = true;
@prop public var jumpForce:Float = 22.0;
@prop public var moveSpeed:Float = 500.0;
public function new() { @prop public var forwardKey:String = "w";
super(); @prop public var backwardKey:String = "s";
@prop public var leftKey:String = "a";
@prop public var rightKey:String = "d";
@prop public var jumpKey:String = "space";
iron.Scene.active.notifyOnInit(init); @prop public var allowAirJump:Bool = false;
}
function init() { @prop public var canRun:Bool = true;
head = object.getChildOfType(CameraObject); @prop public var runKey:String = "shift";
@prop public var runSpeed:Float = 1000.0;
PhysicsWorld.active.notifyOnPreUpdate(preUpdate); // Sistema de estamina
notifyOnUpdate(update); @prop public var stamina:Bool = false;
notifyOnRemove(removed); @prop public var staminaBase:Float = 75.0;
} @prop public var staRecoverPerSec:Float = 5.0;
@prop public var staDecreasePerSec:Float = 5.0;
@prop public var staRecoverTime:Float = 2.0;
@prop public var staDecreasePerJump:Float = 5.0;
@prop public var enableFatigue:Bool = false;
@prop public var fatigueSpeed:Float = 0.5; // the reduction of movement when fatigue is activated...
@prop public var fatigueThreshold:Float = 30.0; // Tiempo corriendo sin parar para la activacion // Time running non-stop for activation...
@prop public var fatRecoveryThreshold:Float = 7.5; // Tiempo sin correr/saltar para salir de fatiga // Time without running/jumping to get rid of fatigue...
var xVec = Vec4.xAxis(); // Var Privadas
var zVec = Vec4.zAxis(); var head:CameraObject;
function preUpdate() { var pitch:Float = 0.0;
if (Input.occupied || !body.ready) return; var body:RigidBody;
var mouse = Input.getMouse(); var moveForward:Bool = false;
var kb = Input.getKeyboard(); var moveBackward:Bool = false;
var moveLeft:Bool = false;
var moveRight:Bool = false;
var isRunning:Bool = false;
if (mouse.started() && !mouse.locked) mouse.lock(); var canJump:Bool = true;
else if (kb.started("escape") && mouse.locked) mouse.unlock(); var staminaValue:Float = 0.0;
var timeSinceStop:Float = 0.0;
if (mouse.locked || mouse.down()) { var fatigueTimer:Float = 0.0;
head.transform.rotate(xVec, -mouse.movementY / 250 * rotationSpeed); var fatigueCooldown:Float = 0.0;
transform.rotate(zVec, -mouse.movementX / 250 * rotationSpeed); var isFatigueActive:Bool = false;
body.syncTransform();
public function new() {
super();
iron.Scene.active.notifyOnInit(init);
}
function init() {
body = object.getTrait(RigidBody);
head = object.getChildOfType(CameraObject);
PhysicsWorld.active.notifyOnPreUpdate(preUpdate);
notifyOnUpdate(update);
notifyOnRemove(removed);
staminaValue = staminaBase;
}
function removed() {
PhysicsWorld.active.removePreUpdate(preUpdate);
}
var zVec = Vec4.zAxis();
function preUpdate() {
if (Input.occupied || body == null) return;
var mouse = Input.getMouse();
var kb = Input.getKeyboard();
if (mouse.started() && !mouse.locked)
mouse.lock();
else if (kb.started("escape") && mouse.locked)
mouse.unlock();
if (mouse.locked || mouse.down()) {
var deltaTime:Float = iron.system.Time.delta;
object.transform.rotate(zVec, -mouse.movementX * rotationSpeed * deltaTime);
var deltaPitch:Float = -(mouse.movementY * rotationSpeed * deltaTime);
pitch += deltaPitch;
pitch = Math.max(minPitch, Math.min(maxPitch, pitch));
head.transform.setRotation(pitch, 0.0, 0.0);
body.syncTransform();
}
}
var dir:Vec4 = new Vec4();
function isFatigued():Bool {
return enableFatigue && isFatigueActive;
}
function update() {
if (body == null) return;
var deltaTime:Float = iron.system.Time.delta;
var kb = Input.getKeyboard();
moveForward = kb.down(forwardKey);
moveBackward = kb.down(backwardKey);
moveLeft = kb.down(leftKey);
moveRight = kb.down(rightKey);
var isMoving = moveForward || moveBackward || moveLeft || moveRight;
var isGrounded:Bool = false;
#if lnx_physics
var vel = body.getLinearVelocity();
if (Math.abs(vel.z) < 0.1) {
isGrounded = true;
}
#end
// Dejo establecido el salto para tener en cuenta la (enableFatigue) si es que es false/true....
if (isGrounded && !isFatigued()) {
canJump = true;
} }
} // Saltar con estamina
if (enableJump && kb.started(jumpKey) && canJump) {
var jumpPower = jumpForce;
// Disminuir el salto al 50% si la (stamina) esta por debajo o en el 20%.
if (stamina) {
if (staminaValue <= 0) {
jumpPower = 0;
} else if (staminaValue <= staminaBase * 0.2) {
jumpPower *= 0.5;
}
function removed() { staminaValue -= staDecreasePerJump;
PhysicsWorld.active.removePreUpdate(preUpdate); if (staminaValue < 0.0) staminaValue = 0.0;
} timeSinceStop = 0.0;
}
var dir = new Vec4(); if (jumpPower > 0) {
function update() { body.applyImpulse(new Vec4(0, 0, jumpPower));
if (!body.ready) return; if (!allowAirJump) canJump = false;
}
}
if (jump) { // Control de estamina y correr
body.applyImpulse(new Vec4(0, 0, 16)); if (canRun && kb.down(runKey) && isMoving) {
jump = false; if (stamina) {
if (staminaValue > 0.0) {
isRunning = true;
staminaValue -= staDecreasePerSec * deltaTime;
if (staminaValue < 0.0) staminaValue = 0.0;
} else {
isRunning = false;
}
} else {
isRunning = true;
}
} else {
isRunning = false;
}
// (temporizadores aparte)
if (isRunning) {
timeSinceStop = 0.0;
fatigueTimer += deltaTime;
fatigueCooldown = 0.0;
} else {
timeSinceStop += deltaTime;
fatigueCooldown += deltaTime;
}
// Evitar correr y saltar al estar fatigado...
if (isFatigued()) {
isRunning = false;
canJump = false;
} }
// Move // Activar fatiga despues de correr continuamente durante cierto umbral
dir.set(0, 0, 0); if (enableFatigue && fatigueTimer >= fatigueThreshold) {
if (moveForward) dir.add(transform.look()); isFatigueActive = true;
if (moveBackward) dir.add(transform.look().mult(-1)); }
if (moveLeft) dir.add(transform.right().mult(-1));
if (moveRight) dir.add(transform.right());
// Push down // Eliminar la fatiga despues de recuperarse
var btvec = body.getLinearVelocity(); if (enableFatigue && isFatigueActive && fatigueCooldown >= fatRecoveryThreshold) {
body.setLinearVelocity(0.0, 0.0, btvec.z - 1.0); isFatigueActive = false;
fatigueTimer = 0.0;
}
if (moveForward || moveBackward || moveLeft || moveRight) { // Recuperar estamina si no esta corriendo
var dirN = dir.normalize(); if (stamina && !isRunning && staminaValue < staminaBase && !isFatigued()) {
dirN.mult(6); if (timeSinceStop >= staRecoverTime) {
body.activate(); staminaValue += staRecoverPerSec * deltaTime;
body.setLinearVelocity(dirN.x, dirN.y, btvec.z - 1.0); if (staminaValue > staminaBase) staminaValue = staminaBase;
} }
}
// Keep vertical // Movimiento ejes (local)
body.setAngularFactor(0, 0, 0); dir.set(0, 0, 0);
camera.buildMatrix(); if (moveForward) dir.add(object.transform.look());
} if (moveBackward) dir.add(object.transform.look().mult(-1));
#end if (moveLeft) dir.add(object.transform.right().mult(-1));
if (moveRight) dir.add(object.transform.right());
var btvec = body.getLinearVelocity();
body.setLinearVelocity(0.0, 0.0, btvec.z - 1.0);
if (isMoving) {
var dirN = dir.normalize();
var baseSpeed = moveSpeed;
if (isRunning && moveForward) {
baseSpeed = runSpeed;
}
var currentSpeed = isFatigued() ? baseSpeed * fatigueSpeed : baseSpeed;
dirN.mult(currentSpeed * deltaTime);
body.activate();
body.setLinearVelocity(dirN.x, dirN.y, btvec.z - 1.0);
}
body.setAngularFactor(0, 0, 0);
head.buildMatrix();
}
#end
} }
// Stamina and fatigue system.....

View File

@ -1727,6 +1727,7 @@ class LeenkxExporter:
tangdata = np.array(tangdata, dtype='<i2') tangdata = np.array(tangdata, dtype='<i2')
# Output # Output
o['sorting_index'] = bobject.lnx_sorting_index
o['vertex_arrays'] = [] o['vertex_arrays'] = []
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' }) o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' }) o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
@ -1979,7 +1980,7 @@ class LeenkxExporter:
if bobject.parent is None or bobject.parent.name not in collection.objects: if bobject.parent is None or bobject.parent.name not in collection.objects:
asset_name = lnx.utils.asset_name(bobject) asset_name = lnx.utils.asset_name(bobject)
if collection.library: if collection.library and not collection.name in self.scene.collection.children:
# Add external linked objects # Add external linked objects
# Iron differentiates objects based on their names, # Iron differentiates objects based on their names,
# so errors will happen if two objects with the # so errors will happen if two objects with the
@ -2208,6 +2209,9 @@ class LeenkxExporter:
elif material.lnx_cull_mode != 'clockwise': elif material.lnx_cull_mode != 'clockwise':
o['override_context'] = {} o['override_context'] = {}
o['override_context']['cull_mode'] = material.lnx_cull_mode o['override_context']['cull_mode'] = material.lnx_cull_mode
if material.lnx_compare_mode != 'less':
o['override_context'] = {}
o['override_context']['compare_mode'] = material.lnx_compare_mode
o['contexts'] = [] o['contexts'] = []
@ -2395,7 +2399,7 @@ class LeenkxExporter:
world = self.scene.world world = self.scene.world
if world is not None: if world is not None:
world_name = lnx.utils.safestr(world.name) world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
if world_name not in self.world_array: if world_name not in self.world_array:
self.world_array.append(world_name) self.world_array.append(world_name)
@ -2544,12 +2548,12 @@ class LeenkxExporter:
if collection.name.startswith(('RigidBodyWorld', 'Trait|')): if collection.name.startswith(('RigidBodyWorld', 'Trait|')):
continue continue
if self.scene.user_of_id(collection) or collection.library or collection in self.referenced_collections: if self.scene.user_of_id(collection) or collection in self.referenced_collections:
self.export_collection(collection) self.export_collection(collection)
if not LeenkxExporter.option_mesh_only: if not LeenkxExporter.option_mesh_only:
if self.scene.camera is not None: if self.scene.camera is not None:
self.output['camera_ref'] = self.scene.camera.name self.output['camera_ref'] = lnx.utils.asset_name(self.scene.camera) if self.scene.library else self.scene.camera.name
else: else:
if self.scene.name == lnx.utils.get_project_scene_name(): if self.scene.name == lnx.utils.get_project_scene_name():
log.warn(f'Scene "{self.scene.name}" is missing a camera') log.warn(f'Scene "{self.scene.name}" is missing a camera')
@ -2573,7 +2577,7 @@ class LeenkxExporter:
self.export_tilesheets() self.export_tilesheets()
if self.scene.world is not None: if self.scene.world is not None:
self.output['world_ref'] = lnx.utils.safestr(self.scene.world.name) self.output['world_ref'] = lnx.utils.safestr(lnx.utils.asset_name(self.scene.world) if self.scene.world.library else self.scene.world.name)
if self.scene.use_gravity: if self.scene.use_gravity:
self.output['gravity'] = [self.scene.gravity[0], self.scene.gravity[1], self.scene.gravity[2]] self.output['gravity'] = [self.scene.gravity[0], self.scene.gravity[1], self.scene.gravity[2]]
@ -3376,7 +3380,7 @@ class LeenkxExporter:
if mobile_mat: if mobile_mat:
lnx_radiance = False lnx_radiance = False
out_probe = {'name': world.name} out_probe = {'name': lnx.utils.asset_name(world) if world.library else world.name}
if lnx_irradiance: if lnx_irradiance:
ext = '' if wrd.lnx_minimize else '.json' ext = '' if wrd.lnx_minimize else '.json'
out_probe['irradiance'] = irrsharmonics + '_irradiance' + ext out_probe['irradiance'] = irrsharmonics + '_irradiance' + ext

View File

@ -1,445 +1,446 @@
""" """
Exports smaller geometry but is slower. Exports smaller geometry but is slower.
To be replaced with https://github.com/zeux/meshoptimizer To be replaced with https://github.com/zeux/meshoptimizer
""" """
from typing import Optional from typing import Optional
import bpy import bpy
from mathutils import Vector from mathutils import Vector
import numpy as np import numpy as np
import lnx.utils import lnx.utils
from lnx import log from lnx import log
if lnx.is_reload(__name__): if lnx.is_reload(__name__):
log = lnx.reload_module(log) log = lnx.reload_module(log)
lnx.utils = lnx.reload_module(lnx.utils) lnx.utils = lnx.reload_module(lnx.utils)
else: else:
lnx.enable_reload(__name__) lnx.enable_reload(__name__)
class Vertex: class Vertex:
__slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index") __slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index")
def __init__(self, mesh: bpy.types.Mesh, loop: bpy.types.MeshLoop, vcol0: Optional[bpy.types.Attribute]): def __init__(self, mesh: bpy.types.Mesh, loop: bpy.types.MeshLoop, vcol0: Optional[bpy.types.Attribute]):
self.vertex_index = loop.vertex_index self.vertex_index = loop.vertex_index
loop_idx = loop.index loop_idx = loop.index
self.co = mesh.vertices[self.vertex_index].co[:] self.co = mesh.vertices[self.vertex_index].co[:]
self.normal = loop.normal[:] self.normal = loop.normal[:]
self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers) self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers)
self.col = [0.0, 0.0, 0.0] if vcol0 is None else vcol0.data[loop_idx].color[:] self.col = [0.0, 0.0, 0.0] if vcol0 is None else vcol0.data[loop_idx].color[:]
self.loop_indices = [loop_idx] self.loop_indices = [loop_idx]
self.index = 0 self.index = 0
def __hash__(self): def __hash__(self):
return hash((self.co, self.normal, self.uvs)) return hash((self.co, self.normal, self.uvs))
def __eq__(self, other): def __eq__(self, other):
eq = ( eq = (
(self.co == other.co) and (self.co == other.co) and
(self.normal == other.normal) and (self.normal == other.normal) and
(self.uvs == other.uvs) and (self.uvs == other.uvs) and
(self.col == other.col) (self.col == other.col)
) )
if eq: if eq:
indices = self.loop_indices + other.loop_indices indices = self.loop_indices + other.loop_indices
self.loop_indices = indices self.loop_indices = indices
other.loop_indices = indices other.loop_indices = indices
return eq return eq
def calc_tangents(posa, nora, uva, ias, scale_pos): def calc_tangents(posa, nora, uva, ias, scale_pos):
num_verts = int(len(posa) / 4) num_verts = int(len(posa) / 4)
tangents = np.empty(num_verts * 3, dtype='<f4') tangents = np.empty(num_verts * 3, dtype='<f4')
# bitangents = np.empty(num_verts * 3, dtype='<f4') # bitangents = np.empty(num_verts * 3, dtype='<f4')
for ar in ias: for ar in ias:
ia = ar['values'] ia = ar['values']
num_tris = int(len(ia) / 3) num_tris = int(len(ia) / 3)
for i in range(0, num_tris): for i in range(0, num_tris):
i0 = ia[i * 3 ] i0 = ia[i * 3 ]
i1 = ia[i * 3 + 1] i1 = ia[i * 3 + 1]
i2 = ia[i * 3 + 2] i2 = ia[i * 3 + 2]
v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2])) v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2]))
v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2])) v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2]))
v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2])) v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2]))
uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1])) uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1]))
uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1])) uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1]))
uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1])) uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1]))
deltaPos1 = v1 - v0 deltaPos1 = v1 - v0
deltaPos2 = v2 - v0 deltaPos2 = v2 - v0
deltaUV1 = uv1 - uv0 deltaUV1 = uv1 - uv0
deltaUV2 = uv2 - uv0 deltaUV2 = uv2 - uv0
d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x) d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x)
if d != 0: if d != 0:
r = 1.0 / d r = 1.0 / d
else: else:
r = 1.0 r = 1.0
tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r
# bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r # bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r
tangents[i0 * 3 ] += tangent.x tangents[i0 * 3 ] += tangent.x
tangents[i0 * 3 + 1] += tangent.y tangents[i0 * 3 + 1] += tangent.y
tangents[i0 * 3 + 2] += tangent.z tangents[i0 * 3 + 2] += tangent.z
tangents[i1 * 3 ] += tangent.x tangents[i1 * 3 ] += tangent.x
tangents[i1 * 3 + 1] += tangent.y tangents[i1 * 3 + 1] += tangent.y
tangents[i1 * 3 + 2] += tangent.z tangents[i1 * 3 + 2] += tangent.z
tangents[i2 * 3 ] += tangent.x tangents[i2 * 3 ] += tangent.x
tangents[i2 * 3 + 1] += tangent.y tangents[i2 * 3 + 1] += tangent.y
tangents[i2 * 3 + 2] += tangent.z tangents[i2 * 3 + 2] += tangent.z
# bitangents[i0 * 3 ] += bitangent.x # bitangents[i0 * 3 ] += bitangent.x
# bitangents[i0 * 3 + 1] += bitangent.y # bitangents[i0 * 3 + 1] += bitangent.y
# bitangents[i0 * 3 + 2] += bitangent.z # bitangents[i0 * 3 + 2] += bitangent.z
# bitangents[i1 * 3 ] += bitangent.x # bitangents[i1 * 3 ] += bitangent.x
# bitangents[i1 * 3 + 1] += bitangent.y # bitangents[i1 * 3 + 1] += bitangent.y
# bitangents[i1 * 3 + 2] += bitangent.z # bitangents[i1 * 3 + 2] += bitangent.z
# bitangents[i2 * 3 ] += bitangent.x # bitangents[i2 * 3 ] += bitangent.x
# bitangents[i2 * 3 + 1] += bitangent.y # bitangents[i2 * 3 + 1] += bitangent.y
# bitangents[i2 * 3 + 2] += bitangent.z # bitangents[i2 * 3 + 2] += bitangent.z
# Orthogonalize # Orthogonalize
for i in range(0, num_verts): for i in range(0, num_verts):
t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2])) t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2]))
# b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2])) # b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2]))
n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos)) n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos))
v = t - n * n.dot(t) v = t - n * n.dot(t)
v.normalize() v.normalize()
# Calculate handedness # Calculate handedness
# cnv = n.cross(v) # cnv = n.cross(v)
# if cnv.dot(b) < 0.0: # if cnv.dot(b) < 0.0:
# v = v * -1.0 # v = v * -1.0
tangents[i * 3 ] = v.x tangents[i * 3 ] = v.x
tangents[i * 3 + 1] = v.y tangents[i * 3 + 1] = v.y
tangents[i * 3 + 2] = v.z tangents[i * 3 + 2] = v.z
return tangents return tangents
def export_mesh_data(self, export_mesh: bpy.types.Mesh, bobject: bpy.types.Object, o, has_armature=False): def export_mesh_data(self, export_mesh: bpy.types.Mesh, bobject: bpy.types.Object, o, has_armature=False):
if bpy.app.version < (4, 1, 0): if bpy.app.version < (4, 1, 0):
export_mesh.calc_normals_split() export_mesh.calc_normals_split()
else: else:
updated_normals = export_mesh.corner_normals updated_normals = export_mesh.corner_normals
# exportMesh.calc_loop_triangles() # exportMesh.calc_loop_triangles()
vcol0 = self.get_nth_vertex_colors(export_mesh, 0) vcol0 = self.get_nth_vertex_colors(export_mesh, 0)
vert_list = {Vertex(export_mesh, loop, vcol0): 0 for loop in export_mesh.loops}.keys() vert_list = {Vertex(export_mesh, loop, vcol0): 0 for loop in export_mesh.loops}.keys()
num_verts = len(vert_list) num_verts = len(vert_list)
num_uv_layers = len(export_mesh.uv_layers) num_uv_layers = len(export_mesh.uv_layers)
# Check if shape keys were exported # Check if shape keys were exported
has_morph_target = self.get_shape_keys(bobject.data) has_morph_target = self.get_shape_keys(bobject.data)
if has_morph_target: if has_morph_target:
# Shape keys UV are exported separately, so reduce UV count by 1 # Shape keys UV are exported separately, so reduce UV count by 1
num_uv_layers -= 1 num_uv_layers -= 1
morph_uv_index = self.get_morph_uv_index(bobject.data) morph_uv_index = self.get_morph_uv_index(bobject.data)
has_tex = self.get_export_uvs(export_mesh) and num_uv_layers > 0 has_tex = self.get_export_uvs(export_mesh) or num_uv_layers > 0 # TODO FIXME: this should use an `and` instead of `or`. Workaround to completely ignore if the mesh has the `export_uvs` flag. Only checking the `uv_layers` to bypass issues with materials in linked objects.
if self.has_baked_material(bobject, export_mesh.materials): if self.has_baked_material(bobject, export_mesh.materials):
has_tex = True has_tex = True
has_tex1 = has_tex and num_uv_layers > 1 has_tex1 = has_tex and num_uv_layers > 1
num_colors = self.get_num_vertex_colors(export_mesh) num_colors = self.get_num_vertex_colors(export_mesh)
has_col = self.get_export_vcols(export_mesh) and num_colors > 0 has_col = self.get_export_vcols(export_mesh) and num_colors > 0
has_tang = self.has_tangents(export_mesh) has_tang = self.has_tangents(export_mesh)
pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
if has_tex or has_morph_target: if has_tex or has_morph_target:
uv_layers = export_mesh.uv_layers uv_layers = export_mesh.uv_layers
maxdim = 1.0 maxdim = 1.0
maxdim_uvlayer = None maxdim_uvlayer = None
if has_tex: if has_tex:
t0map = 0 # Get active uvmap t0map = 0 # Get active uvmap
t0data = np.empty(num_verts * 2, dtype='<f4') t0data = np.empty(num_verts * 2, dtype='<f4')
if uv_layers is not None: if uv_layers is not None:
if 'UVMap_baked' in uv_layers: if 'UVMap_baked' in uv_layers:
for i in range(0, len(uv_layers)): for i in range(0, len(uv_layers)):
if uv_layers[i].name == 'UVMap_baked': if uv_layers[i].name == 'UVMap_baked':
t0map = i t0map = i
break break
else: else:
for i in range(0, len(uv_layers)): for i in range(0, len(uv_layers)):
if uv_layers[i].active_render and uv_layers[i].name != 'UVMap_shape_key': if uv_layers[i].active_render and uv_layers[i].name != 'UVMap_shape_key':
t0map = i t0map = i
break break
if has_tex1: if has_tex1:
for i in range(0, len(uv_layers)): for i in range(0, len(uv_layers)):
# Not UVMap 0 # Not UVMap 0
if i != t0map: if i != t0map:
# Not Shape Key UVMap # Not Shape Key UVMap
if has_morph_target and uv_layers[i].name == 'UVMap_shape_key': if has_morph_target and uv_layers[i].name == 'UVMap_shape_key':
continue continue
# Neither UVMap 0 Nor Shape Key Map # Neither UVMap 0 Nor Shape Key Map
t1map = i t1map = i
t1data = np.empty(num_verts * 2, dtype='<f4') t1data = np.empty(num_verts * 2, dtype='<f4')
# Scale for packed coords # Scale for packed coords
lay0 = uv_layers[t0map] lay0 = uv_layers[t0map]
maxdim_uvlayer = lay0 maxdim_uvlayer = lay0
for v in lay0.data: for v in lay0.data:
if abs(v.uv[0]) > maxdim: if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0]) maxdim = abs(v.uv[0])
if abs(v.uv[1]) > maxdim: if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1]) maxdim = abs(v.uv[1])
if has_tex1: if has_tex1:
lay1 = uv_layers[t1map] lay1 = uv_layers[t1map]
for v in lay1.data: for v in lay1.data:
if abs(v.uv[0]) > maxdim: if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0]) maxdim = abs(v.uv[0])
maxdim_uvlayer = lay1 maxdim_uvlayer = lay1
if abs(v.uv[1]) > maxdim: if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1]) maxdim = abs(v.uv[1])
maxdim_uvlayer = lay1 maxdim_uvlayer = lay1
if has_morph_target: if has_morph_target:
morph_data = np.empty(num_verts * 2, dtype='<f4') morph_data = np.empty(num_verts * 2, dtype='<f4')
lay2 = uv_layers[morph_uv_index] lay2 = uv_layers[morph_uv_index]
for v in lay2.data: for v in lay2.data:
if abs(v.uv[0]) > maxdim: if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0]) maxdim = abs(v.uv[0])
maxdim_uvlayer = lay2 maxdim_uvlayer = lay2
if abs(v.uv[1]) > maxdim: if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1]) maxdim = abs(v.uv[1])
maxdim_uvlayer = lay2 maxdim_uvlayer = lay2
if maxdim > 1: if maxdim > 1:
o['scale_tex'] = maxdim o['scale_tex'] = maxdim
invscale_tex = (1 / o['scale_tex']) * 32767 invscale_tex = (1 / o['scale_tex']) * 32767
else: else:
invscale_tex = 1 * 32767 invscale_tex = 1 * 32767
self.check_uv_precision(export_mesh, maxdim, maxdim_uvlayer, invscale_tex) self.check_uv_precision(export_mesh, maxdim, maxdim_uvlayer, invscale_tex)
if has_col: if has_col:
cdata = np.empty(num_verts * 3, dtype='<f4') cdata = np.empty(num_verts * 3, dtype='<f4')
# Save aabb # Save aabb
self.calc_aabb(bobject) self.calc_aabb(bobject)
# Scale for packed coords # Scale for packed coords
maxdim = max(bobject.data.lnx_aabb[0], max(bobject.data.lnx_aabb[1], bobject.data.lnx_aabb[2])) maxdim = max(bobject.data.lnx_aabb[0], max(bobject.data.lnx_aabb[1], bobject.data.lnx_aabb[2]))
if maxdim > 2: if maxdim > 2:
o['scale_pos'] = maxdim / 2 o['scale_pos'] = maxdim / 2
else: else:
o['scale_pos'] = 1.0 o['scale_pos'] = 1.0
if has_armature: # Allow up to 2x bigger bounds for skinned mesh if has_armature: # Allow up to 2x bigger bounds for skinned mesh
o['scale_pos'] *= 2.0 o['scale_pos'] *= 2.0
scale_pos = o['scale_pos'] scale_pos = o['scale_pos']
invscale_pos = (1 / scale_pos) * 32767 invscale_pos = (1 / scale_pos) * 32767
# Make arrays # Make arrays
for i, v in enumerate(vert_list): for i, v in enumerate(vert_list):
v.index = i v.index = i
co = v.co co = v.co
normal = v.normal normal = v.normal
i4 = i * 4 i4 = i * 4
i2 = i * 2 i2 = i * 2
pdata[i4 ] = co[0] pdata[i4 ] = co[0]
pdata[i4 + 1] = co[1] pdata[i4 + 1] = co[1]
pdata[i4 + 2] = co[2] pdata[i4 + 2] = co[2]
pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
ndata[i2 ] = normal[0] ndata[i2 ] = normal[0]
ndata[i2 + 1] = normal[1] ndata[i2 + 1] = normal[1]
if has_tex: if has_tex:
uv = v.uvs[t0map] uv = v.uvs[t0map]
t0data[i2 ] = uv[0] t0data[i2 ] = uv[0]
t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
if has_tex1: if has_tex1:
uv = v.uvs[t1map] uv = v.uvs[t1map]
t1data[i2 ] = uv[0] t1data[i2 ] = uv[0]
t1data[i2 + 1] = 1.0 - uv[1] t1data[i2 + 1] = 1.0 - uv[1]
if has_morph_target: if has_morph_target:
uv = v.uvs[morph_uv_index] uv = v.uvs[morph_uv_index]
morph_data[i2 ] = uv[0] morph_data[i2 ] = uv[0]
morph_data[i2 + 1] = 1.0 - uv[1] morph_data[i2 + 1] = 1.0 - uv[1]
if has_col: if has_col:
i3 = i * 3 i3 = i * 3
cdata[i3 ] = v.col[0] cdata[i3 ] = v.col[0]
cdata[i3 + 1] = v.col[1] cdata[i3 + 1] = v.col[1]
cdata[i3 + 2] = v.col[2] cdata[i3 + 2] = v.col[2]
# Indices # Indices
# Create dict for every material slot # Create dict for every material slot
prims = {ma.name if ma else '': [] for ma in export_mesh.materials} prims = {ma.name if ma else '': [] for ma in export_mesh.materials}
v_maps = {ma.name if ma else '': [] for ma in export_mesh.materials} v_maps = {ma.name if ma else '': [] for ma in export_mesh.materials}
if not prims: if not prims:
# No materials # No materials
prims = {'': []} prims = {'': []}
v_maps = {'': []} v_maps = {'': []}
# Create dict of {loop_indices : vertex} with each loop_index in each vertex in Vertex_list # Create dict of {loop_indices : vertex} with each loop_index in each vertex in Vertex_list
vert_dict = {i : v for v in vert_list for i in v.loop_indices} vert_dict = {i : v for v in vert_list for i in v.loop_indices}
# For each polygon in a mesh # For each polygon in a mesh
for poly in export_mesh.polygons: for poly in export_mesh.polygons:
# Index of the first loop of this polygon # Index of the first loop of this polygon
first = poly.loop_start first = poly.loop_start
# No materials assigned # No materials assigned
if len(export_mesh.materials) == 0: if len(export_mesh.materials) == 0:
# Get prim # Get prim
prim = prims[''] prim = prims['']
v_map = v_maps[''] v_map = v_maps['']
else: else:
# First material # First material
mat = export_mesh.materials[min(poly.material_index, len(export_mesh.materials) - 1)] mat = export_mesh.materials[min(poly.material_index, len(export_mesh.materials) - 1)]
# Get prim for this material # Get prim for this material
prim = prims[mat.name if mat else ''] prim = prims[mat.name if mat else '']
v_map = v_maps[mat.name if mat else ''] v_map = v_maps[mat.name if mat else '']
# List of indices for each loop_index belonging to this polygon # List of indices for each loop_index belonging to this polygon
indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)] indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)]
v_indices = [vert_dict[i].vertex_index for i in range(first, first+poly.loop_total)] v_indices = [vert_dict[i].vertex_index for i in range(first, first+poly.loop_total)]
# If 3 loops per polygon (Triangle?) # If 3 loops per polygon (Triangle?)
if poly.loop_total == 3: if poly.loop_total == 3:
prim += indices prim += indices
v_map += v_indices v_map += v_indices
# If > 3 loops per polygon (Non-Triangular?) # If > 3 loops per polygon (Non-Triangular?)
elif poly.loop_total > 3: elif poly.loop_total > 3:
for i in range(poly.loop_total-2): for i in range(poly.loop_total-2):
prim += (indices[-1], indices[i], indices[i + 1]) prim += (indices[-1], indices[i], indices[i + 1])
v_map += (v_indices[-1], v_indices[i], v_indices[i + 1]) v_map += (v_indices[-1], v_indices[i], v_indices[i + 1])
# Write indices # Write indices
o['index_arrays'] = [] o['index_arrays'] = []
for mat, prim in prims.items(): for mat, prim in prims.items():
idata = [0] * len(prim) idata = [0] * len(prim)
v_map_data = [0] * len(prim) v_map_data = [0] * len(prim)
v_map_sub = v_maps[mat] v_map_sub = v_maps[mat]
for i, v in enumerate(prim): for i, v in enumerate(prim):
idata[i] = v idata[i] = v
v_map_data[i] = v_map_sub[i] v_map_data[i] = v_map_sub[i]
if len(idata) == 0: # No face assigned if len(idata) == 0: # No face assigned
continue continue
ia = {'values': idata, 'material': 0, 'vertex_map': v_map_data} ia = {'values': idata, 'material': 0, 'vertex_map': v_map_data}
# Find material index for multi-mat mesh # Find material index for multi-mat mesh
if len(export_mesh.materials) > 1: if len(export_mesh.materials) > 1:
for i in range(0, len(export_mesh.materials)): for i in range(0, len(export_mesh.materials)):
if (export_mesh.materials[i] is not None and mat == export_mesh.materials[i].name) or \ if (export_mesh.materials[i] is not None and mat == export_mesh.materials[i].name) or \
(export_mesh.materials[i] is None and mat == ''): # Default material for empty slots (export_mesh.materials[i] is None and mat == ''): # Default material for empty slots
ia['material'] = i ia['material'] = i
break break
o['index_arrays'].append(ia) o['index_arrays'].append(ia)
if has_tang: if has_tang:
tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos) tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos)
pdata *= invscale_pos pdata *= invscale_pos
ndata *= 32767 ndata *= 32767
pdata = np.array(pdata, dtype='<i2') pdata = np.array(pdata, dtype='<i2')
ndata = np.array(ndata, dtype='<i2') ndata = np.array(ndata, dtype='<i2')
if has_tex: if has_tex:
t0data *= invscale_tex t0data *= invscale_tex
t0data = np.array(t0data, dtype='<i2') t0data = np.array(t0data, dtype='<i2')
if has_tex1: if has_tex1:
t1data *= invscale_tex t1data *= invscale_tex
t1data = np.array(t1data, dtype='<i2') t1data = np.array(t1data, dtype='<i2')
if has_morph_target: if has_morph_target:
morph_data *= invscale_tex morph_data *= invscale_tex
morph_data = np.array(morph_data, dtype='<i2') morph_data = np.array(morph_data, dtype='<i2')
if has_col: if has_col:
cdata *= 32767 cdata *= 32767
cdata = np.array(cdata, dtype='<i2') cdata = np.array(cdata, dtype='<i2')
if has_tang: if has_tang:
tangdata *= 32767 tangdata *= 32767
tangdata = np.array(tangdata, dtype='<i2') tangdata = np.array(tangdata, dtype='<i2')
# Output # Output
o['vertex_arrays'] = [] o['sorting_index'] = bobject.lnx_sorting_index
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' }) o['vertex_arrays'] = []
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' }) o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
if has_tex: o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' }) if has_tex:
if has_tex1: o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' }) if has_tex1:
if has_morph_target: o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
o['vertex_arrays'].append({ 'attrib': 'morph', 'values': morph_data, 'data': 'short2norm' }) if has_morph_target:
if has_col: o['vertex_arrays'].append({ 'attrib': 'morph', 'values': morph_data, 'data': 'short2norm' })
o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 }) if has_col:
if has_tang: o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 }) if has_tang:
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
return vert_list
return vert_list
def export_skin(self, bobject, armature, vert_list, o):
# This function exports all skinning data, which includes the skeleton def export_skin(self, bobject, armature, vert_list, o):
# and per-vertex bone influence data # This function exports all skinning data, which includes the skeleton
oskin = {} # and per-vertex bone influence data
o['skin'] = oskin oskin = {}
o['skin'] = oskin
# Write the skin bind pose transform
otrans = {} # Write the skin bind pose transform
oskin['transform'] = otrans otrans = {}
otrans['values'] = self.write_matrix(bobject.matrix_world) oskin['transform'] = otrans
otrans['values'] = self.write_matrix(bobject.matrix_world)
# Write the bone object reference array
oskin['bone_ref_array'] = [] # Write the bone object reference array
oskin['bone_len_array'] = [] oskin['bone_ref_array'] = []
oskin['bone_len_array'] = []
bone_array = armature.data.bones
bone_count = len(bone_array) bone_array = armature.data.bones
rpdat = lnx.utils.get_rp() bone_count = len(bone_array)
max_bones = rpdat.lnx_skin_max_bones rpdat = lnx.utils.get_rp()
if bone_count > max_bones: max_bones = rpdat.lnx_skin_max_bones
log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Leenkx Render Props - Max Bones') if bone_count > max_bones:
log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Leenkx Render Props - Max Bones')
for i in range(bone_count):
boneRef = self.find_bone(bone_array[i].name) for i in range(bone_count):
if boneRef: boneRef = self.find_bone(bone_array[i].name)
oskin['bone_ref_array'].append(boneRef[1]["structName"]) if boneRef:
oskin['bone_len_array'].append(bone_array[i].length) oskin['bone_ref_array'].append(boneRef[1]["structName"])
else: oskin['bone_len_array'].append(bone_array[i].length)
oskin['bone_ref_array'].append("") else:
oskin['bone_len_array'].append(0.0) oskin['bone_ref_array'].append("")
oskin['bone_len_array'].append(0.0)
# Write the bind pose transform array
oskin['transformsI'] = [] # Write the bind pose transform array
for i in range(bone_count): oskin['transformsI'] = []
skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe() for i in range(bone_count):
skeletonI = (skeletonI @ bobject.matrix_world) skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
oskin['transformsI'].append(self.write_matrix(skeletonI)) skeletonI = (skeletonI @ bobject.matrix_world)
oskin['transformsI'].append(self.write_matrix(skeletonI))
# Export the per-vertex bone influence data
group_remap = [] # Export the per-vertex bone influence data
for group in bobject.vertex_groups: group_remap = []
for i in range(bone_count): for group in bobject.vertex_groups:
if bone_array[i].name == group.name: for i in range(bone_count):
group_remap.append(i) if bone_array[i].name == group.name:
break group_remap.append(i)
else: break
group_remap.append(-1) else:
group_remap.append(-1)
bone_count_array = np.empty(len(vert_list), dtype='<i2')
bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2') bone_count_array = np.empty(len(vert_list), dtype='<i2')
bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2') bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2')
bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2')
vertices = bobject.data.vertices
count = 0 vertices = bobject.data.vertices
for index, v in enumerate(vert_list): count = 0
bone_count = 0 for index, v in enumerate(vert_list):
total_weight = 0.0 bone_count = 0
bone_values = [] total_weight = 0.0
for g in vertices[v.vertex_index].groups: bone_values = []
bone_index = group_remap[g.group] for g in vertices[v.vertex_index].groups:
bone_weight = g.weight bone_index = group_remap[g.group]
if bone_index >= 0: #and bone_weight != 0.0: bone_weight = g.weight
bone_values.append((bone_weight, bone_index)) if bone_index >= 0: #and bone_weight != 0.0:
total_weight += bone_weight bone_values.append((bone_weight, bone_index))
bone_count += 1 total_weight += bone_weight
bone_count += 1
if bone_count > 4:
bone_count = 4 if bone_count > 4:
bone_values.sort(reverse=True) bone_count = 4
bone_values = bone_values[:4] bone_values.sort(reverse=True)
bone_values = bone_values[:4]
bone_count_array[index] = bone_count
for bv in bone_values: bone_count_array[index] = bone_count
bone_weight_array[count] = bv[0] * 32767 for bv in bone_values:
bone_index_array[count] = bv[1] bone_weight_array[count] = bv[0] * 32767
count += 1 bone_index_array[count] = bv[1]
count += 1
if total_weight not in (0.0, 1.0):
normalizer = 1.0 / total_weight if total_weight not in (0.0, 1.0):
for i in range(bone_count): normalizer = 1.0 / total_weight
bone_weight_array[count - i - 1] *= normalizer for i in range(bone_count):
bone_weight_array[count - i - 1] *= normalizer
oskin['bone_count_array'] = bone_count_array
oskin['bone_index_array'] = bone_index_array[:count] oskin['bone_count_array'] = bone_count_array
oskin['bone_weight_array'] = bone_weight_array[:count] oskin['bone_index_array'] = bone_index_array[:count]
oskin['bone_weight_array'] = bone_weight_array[:count]
# Bone constraints
for bone in armature.pose.bones: # Bone constraints
if len(bone.constraints) > 0: for bone in armature.pose.bones:
if 'constraints' not in oskin: if len(bone.constraints) > 0:
oskin['constraints'] = [] if 'constraints' not in oskin:
self.add_constraints(bone, oskin, bone=True) oskin['constraints'] = []
self.add_constraints(bone, oskin, bone=True)

View File

@ -1,4 +1,16 @@
import bpy, os, subprocess, sys, platform, aud, json, datetime, socket import bpy, os, subprocess, sys, platform, json, datetime, socket
aud = None
try:
import aud
except (ImportError, AttributeError) as e:
if any(err in str(e) for err in ["numpy.core.multiarray", "_ARRAY_API", "compiled using NumPy 1.x"]):
print("Info: Audio features unavailable due to NumPy version compatibility.")
else:
print(f"Warning: Audio module unavailable: {e}")
aud = None
from . import encoding, pack, log from . import encoding, pack, log
from . cycles import lightmap, prepare, nodes, cache from . cycles import lightmap, prepare, nodes, cache
@ -1117,9 +1129,12 @@ def manage_build(background_pass=False, load_atlas=0):
scriptDir = os.path.dirname(os.path.realpath(__file__)) scriptDir = os.path.dirname(os.path.realpath(__file__))
sound_path = os.path.abspath(os.path.join(scriptDir, '..', 'assets/'+soundfile)) sound_path = os.path.abspath(os.path.join(scriptDir, '..', 'assets/'+soundfile))
device = aud.Device() if aud is not None:
sound = aud.Sound.file(sound_path) device = aud.Device()
device.play(sound) sound = aud.Sound.file(sound_path)
device.play(sound)
else:
print(f"Build completed!")
if logging: if logging:
print("Log file output:") print("Log file output:")

View File

@ -16,3 +16,9 @@ class ArraySpliceNode(LnxLogicTreeNode):
self.add_output('LnxNodeSocketAction', 'Out') self.add_output('LnxNodeSocketAction', 'Out')
self.add_output('LnxNodeSocketArray', 'Array') self.add_output('LnxNodeSocketArray', 'Array')
def get_replacement_node(self, node_tree: bpy.types.NodeTree):
if self.lnx_version not in (0, 1):
raise LookupError()
return NodeReplacement.Identity(self)

View File

@ -17,6 +17,17 @@ class OnEventNode(LnxLogicTreeNode):
'custom': 'Custom' 'custom': 'Custom'
} }
def update(self):
if self.property1 != 'custom':
if self.inputs[0].is_linked:
self.label = f'{self.bl_label}: {self.property1}'
else:
self.label = f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
elif self.inputs[1].is_linked:
self.label = f'{self.bl_label}: {self.property1}'
else:
self.label = f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
def set_mode(self, context): def set_mode(self, context):
if self.property1 != 'custom': if self.property1 != 'custom':
if len(self.inputs) > 1: if len(self.inputs) > 1:
@ -25,7 +36,17 @@ class OnEventNode(LnxLogicTreeNode):
if len(self.inputs) < 2: if len(self.inputs) < 2:
self.add_input('LnxNodeSocketAction', 'In') self.add_input('LnxNodeSocketAction', 'In')
self.inputs.move(1, 0) self.inputs.move(1, 0)
if self.property1 != 'custom':
if self.inputs[0].is_linked:
self.label = f'{self.bl_label}: {self.property1}'
else:
self.label = f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
elif self.inputs[1].is_linked:
self.label = f'{self.bl_label}: {self.property1}'
else:
self.label = f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
# Use a new property to preserve compatibility # Use a new property to preserve compatibility
property1: HaxeEnumProperty( property1: HaxeEnumProperty(
'property1', 'property1',
@ -52,9 +73,15 @@ class OnEventNode(LnxLogicTreeNode):
layout.prop(self, 'property1', text='') layout.prop(self, 'property1', text='')
def draw_label(self) -> str: def draw_label(self) -> str:
if self.inputs[0].is_linked: if self.property1 != 'custom':
return self.bl_label if self.inputs[0].is_linked:
return f'{self.bl_label}: {self.inputs[0].get_default_value()}' return f'{self.bl_label}: {self.property1}'
else:
return f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
elif self.inputs[1].is_linked:
return f'{self.bl_label}: {self.property1}'
else:
return f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
def get_replacement_node(self, node_tree: bpy.types.NodeTree): def get_replacement_node(self, node_tree: bpy.types.NodeTree):
if self.lnx_version not in (0, 1): if self.lnx_version not in (0, 1):

View File

@ -7,12 +7,19 @@ class KeyboardNode(LnxLogicTreeNode):
lnx_section = 'keyboard' lnx_section = 'keyboard'
lnx_version = 2 lnx_version = 2
def update(self):
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
def upd(self, context):
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
property0: HaxeEnumProperty( property0: HaxeEnumProperty(
'property0', 'property0',
items = [('started', 'Started', 'The keyboard button starts to be pressed'), items = [('started', 'Started', 'The keyboard button starts to be pressed'),
('down', 'Down', 'The keyboard button is pressed'), ('down', 'Down', 'The keyboard button is pressed'),
('released', 'Released', 'The keyboard button stops being pressed')], ('released', 'Released', 'The keyboard button stops being pressed')],
name='', default='down') name='', default='down', update=upd)
property1: HaxeEnumProperty( property1: HaxeEnumProperty(
'property1', 'property1',
@ -69,7 +76,7 @@ class KeyboardNode(LnxLogicTreeNode):
('right', 'right', 'right'), ('right', 'right', 'right'),
('left', 'left', 'left'), ('left', 'left', 'left'),
('down', 'down', 'down'),], ('down', 'down', 'down'),],
name='', default='space') name='', default='space', update=upd)
def lnx_init(self, context): def lnx_init(self, context):
self.add_output('LnxNodeSocketAction', 'Out') self.add_output('LnxNodeSocketAction', 'Out')

View File

@ -8,13 +8,25 @@ class MouseNode(LnxLogicTreeNode):
lnx_section = 'mouse' lnx_section = 'mouse'
lnx_version = 3 lnx_version = 3
def update(self):
if self.property0 != 'moved':
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
else:
self.label = f'{self.bl_label}: {self.property0}'
def upd(self, context):
if self.property0 != 'moved':
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
else:
self.label = f'{self.bl_label}: {self.property0}'
property0: HaxeEnumProperty( property0: HaxeEnumProperty(
'property0', 'property0',
items = [('started', 'Started', 'The mouse button begins to be pressed'), items = [('started', 'Started', 'The mouse button begins to be pressed'),
('down', 'Down', 'The mouse button is pressed'), ('down', 'Down', 'The mouse button is pressed'),
('released', 'Released', 'The mouse button stops being pressed'), ('released', 'Released', 'The mouse button stops being pressed'),
('moved', 'Moved', 'Moved')], ('moved', 'Moved', 'Moved')],
name='', default='down') name='', default='down', update=upd)
property1: HaxeEnumProperty( property1: HaxeEnumProperty(
'property1', 'property1',
items = [('left', 'Left', 'Left mouse button'), items = [('left', 'Left', 'Left mouse button'),
@ -22,7 +34,7 @@ class MouseNode(LnxLogicTreeNode):
('right', 'Right', 'Right mouse button'), ('right', 'Right', 'Right mouse button'),
('side1', 'Side 1', 'Side 1 mouse button'), ('side1', 'Side 1', 'Side 1 mouse button'),
('side2', 'Side 2', 'Side 2 mouse button')], ('side2', 'Side 2', 'Side 2 mouse button')],
name='', default='left') name='', default='left', update=upd)
property2: HaxeBoolProperty( property2: HaxeBoolProperty(
'property2', 'property2',
name='Include Debug Console', name='Include Debug Console',

View File

@ -18,6 +18,10 @@ class CallGroupNode(LnxLogicTreeNode):
def lnx_init(self, context): def lnx_init(self, context):
pass pass
def update(self):
if self.group_tree:
self.label = f'Group: {self.group_tree.name}'
# Function to add input sockets and re-link sockets # Function to add input sockets and re-link sockets
def update_inputs(self, tree, node, inp_sockets, in_links): def update_inputs(self, tree, node, inp_sockets, in_links):
count = 0 count = 0
@ -58,10 +62,12 @@ class CallGroupNode(LnxLogicTreeNode):
tree.links.new(current_socket, link) tree.links.new(current_socket, link)
count = count + 1 count = count + 1
def remove_tree(self):
self.group_tree = None
def update_sockets(self, context): def update_sockets(self, context):
if self.group_tree:
self.label = f'Group: {self.group_tree.name}'
else:
self.label = 'Call Node Group'
# List to store from and to sockets of connected nodes # List to store from and to sockets of connected nodes
from_socket_list = [] from_socket_list = []
to_socket_list = [] to_socket_list = []
@ -107,6 +113,10 @@ class CallGroupNode(LnxLogicTreeNode):
# Prperty to store group tree pointer # Prperty to store group tree pointer
group_tree: PointerProperty(name='Group', type=bpy.types.NodeTree, update=update_sockets) group_tree: PointerProperty(name='Group', type=bpy.types.NodeTree, update=update_sockets)
def edit_tree(self):
self.label = f'Group: {self.group_tree.name}'
bpy.ops.lnx.edit_group_tree()
def draw_label(self) -> str: def draw_label(self) -> str:
if self.group_tree is not None: if self.group_tree is not None:
return f'Group: {self.group_tree.name}' return f'Group: {self.group_tree.name}'
@ -134,8 +144,9 @@ class CallGroupNode(LnxLogicTreeNode):
op = row_name.operator('lnx.unlink_group_tree', icon='X', text='') op = row_name.operator('lnx.unlink_group_tree', icon='X', text='')
op.node_index = self.get_id_str() op.node_index = self.get_id_str()
row_ops.enabled = not self.group_tree is None row_ops.enabled = not self.group_tree is None
op = row_ops.operator('lnx.edit_group_tree', icon='FULLSCREEN_ENTER', text='Edit tree') op = row_ops.operator('lnx.node_call_func', icon='FULLSCREEN_ENTER', text='Edit tree')
op.node_index = self.get_id_str() op.node_index = self.get_id_str()
op.callback_name = 'edit_tree'
def get_replacement_node(self, node_tree: bpy.types.NodeTree): def get_replacement_node(self, node_tree: bpy.types.NodeTree):
if self.lnx_version not in (0, 1, 2): if self.lnx_version not in (0, 1, 2):

View File

@ -0,0 +1,51 @@
from lnx.logicnode.lnx_nodes import *
class ProbabilisticIndexNode(LnxLogicTreeNode):
"""This system gets an index based on probabilistic values,
ensuring that the total sum of the probabilities equals 1.
If the probabilities do not sum to 1, they will be adjusted
accordingly to guarantee a total sum of 1. Only one output will be
triggered at a time.
@output index: the index.
"""
bl_idname = 'LNProbabilisticIndexNode'
bl_label = 'Probabilistic Index'
lnx_section = 'logic'
lnx_version = 1
num_choices: IntProperty(default=0, min=0)
def __init__(self):
array_nodes[str(id(self))] = self
def lnx_init(self, context):
self.add_output('LnxIntSocket', 'Index')
def draw_buttons(self, context, layout):
row = layout.row(align=True)
op = row.operator('lnx.node_call_func', text='New', icon='PLUS', emboss=True)
op.node_index = str(id(self))
op.callback_name = 'add_func'
op2 = row.operator('lnx.node_call_func', text='', icon='X', emboss=True)
op2.node_index = str(id(self))
op2.callback_name = 'remove_func'
def add_func(self):
self.add_input('LnxFloatSocket', f'Prob Index {self.num_choices}')
self.num_choices += 1
def remove_func(self):
if len(self.inputs) > 0:
self.inputs.remove(self.inputs[-1])
self.num_choices -= 1
def draw_label(self) -> str:
if self.num_choices == 0:
return self.bl_label
return f'{self.bl_label}: [{self.num_choices}]'

View File

@ -1,7 +1,10 @@
from lnx.logicnode.lnx_nodes import * from lnx.logicnode.lnx_nodes import *
class SetWorldNode(LnxLogicTreeNode): class SetWorldNode(LnxLogicTreeNode):
"""Sets the World of the active scene.""" """Sets the World of the active scene.
World must be either associated to a scene or have fake user."""
bl_idname = 'LNSetWorldNode' bl_idname = 'LNSetWorldNode'
bl_label = 'Set World' bl_label = 'Set World'
lnx_version = 1 lnx_version = 1

View File

@ -116,7 +116,73 @@ def remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE) os.chmod(path, stat.S_IWRITE)
func(path) func(path)
appended_scenes = []
def load_external_blends():
global appended_scenes
wrd = bpy.data.worlds['Lnx']
if not hasattr(wrd, 'lnx_external_blends_path'):
return
external_path = getattr(wrd, 'lnx_external_blends_path', '')
if not external_path or not external_path.strip():
return
abs_path = bpy.path.abspath(external_path.strip())
if not os.path.exists(abs_path):
return
# Walk recursively through all subdirs
for root, dirs, files in os.walk(abs_path):
for filename in files:
if not filename.endswith(".blend"):
continue
blend_path = os.path.join(root, filename)
try:
with bpy.data.libraries.load(blend_path, link=True) as (data_from, data_to):
data_to.scenes = list(data_from.scenes)
for scn in data_to.scenes:
if scn is not None and scn not in appended_scenes:
# make name unique with file name
scn.name += "_" + filename.replace(".blend", "")
appended_scenes.append(scn)
log.info(f"Loaded external blend: {blend_path}")
except Exception as e:
log.error(f"Failed to load external blend {blend_path}: {e}")
def clear_external_scenes():
global appended_scenes
if not appended_scenes:
return
for scn in appended_scenes:
try:
bpy.data.scenes.remove(scn, do_unlink=True)
except Exception as e:
log.error(f"Failed to remove scene {scn.name}: {e}")
for lib in list(bpy.data.libraries):
try:
if lib.users == 0:
bpy.data.libraries.remove(lib)
except Exception as e:
log.error(f"Failed to remove library {lib.name}: {e}")
try:
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
except Exception as e:
log.error(f"Failed to purge orphan data: {e}")
appended_scenes = []
def export_data(fp, sdk_path): def export_data(fp, sdk_path):
load_external_blends()
wrd = bpy.data.worlds['Lnx'] wrd = bpy.data.worlds['Lnx']
rpdat = lnx.utils.get_rp() rpdat = lnx.utils.get_rp()
@ -323,6 +389,8 @@ def export_data(fp, sdk_path):
state.last_resy = resy state.last_resy = resy
state.last_scene = scene_name state.last_scene = scene_name
clear_external_scenes()
def compile(assets_only=False): def compile(assets_only=False):
wrd = bpy.data.worlds['Lnx'] wrd = bpy.data.worlds['Lnx']
fp = lnx.utils.get_fp() fp = lnx.utils.get_fp()

View File

@ -39,14 +39,15 @@ def add_world_defs():
# Store contexts # Store contexts
if rpdat.rp_hdr == False: if rpdat.rp_hdr == False:
wrd.world_defs += '_LDR' wrd.world_defs += '_LDR'
if lnx.utils.get_active_scene().world is not None:
if lnx.utils.get_active_scene().world.lnx_light_ies_texture:
wrd.world_defs += '_LightIES'
assets.add_embedded_data('iestexture.png')
if lnx.utils.get_active_scene().world.lnx_light_ies_texture == True: if lnx.utils.get_active_scene().world.lnx_light_clouds_texture:
wrd.world_defs += '_LightIES' wrd.world_defs += '_LightClouds'
assets.add_embedded_data('iestexture.png') assets.add_embedded_data('cloudstexture.png')
if lnx.utils.get_active_scene().world.lnx_light_clouds_texture == True:
wrd.world_defs += '_LightClouds'
assets.add_embedded_data('cloudstexture.png')
if rpdat.rp_renderer == 'Deferred': if rpdat.rp_renderer == 'Deferred':
assets.add_khafile_def('lnx_deferred') assets.add_khafile_def('lnx_deferred')
@ -240,7 +241,7 @@ def build():
compo_depth = True compo_depth = True
focus_distance = 0.0 focus_distance = 0.0
if len(bpy.data.cameras) > 0 and lnx.utils.get_active_scene().camera.data.dof.use_dof: if lnx.utils.get_active_scene().camera and lnx.utils.get_active_scene().camera.data.dof.use_dof:
focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance
if focus_distance > 0.0: if focus_distance > 0.0:

View File

@ -69,7 +69,7 @@ def build():
if rpdat.lnx_irradiance: if rpdat.lnx_irradiance:
# Plain background color # Plain background color
if '_EnvCol' in world.world_defs: if '_EnvCol' in world.world_defs:
world_name = lnx.utils.safestr(world.name) world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
# Irradiance json file name # Irradiance json file name
world.lnx_envtex_name = world_name world.lnx_envtex_name = world_name
world.lnx_envtex_irr_name = world_name world.lnx_envtex_irr_name = world_name
@ -99,7 +99,7 @@ def build():
def create_world_shaders(world: bpy.types.World): def create_world_shaders(world: bpy.types.World):
"""Creates fragment and vertex shaders for the given world.""" """Creates fragment and vertex shaders for the given world."""
global shader_datas global shader_datas
world_name = lnx.utils.safestr(world.name) world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
pass_name = 'World_' + world_name pass_name = 'World_' + world_name
shader_props = { shader_props = {
@ -160,7 +160,7 @@ def create_world_shaders(world: bpy.types.World):
def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: ShaderContext): def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: ShaderContext):
"""Generates the shader code for the given world.""" """Generates the shader code for the given world."""
world_name = lnx.utils.safestr(world.name) world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
world.world_defs = '' world.world_defs = ''
rpdat = lnx.utils.get_rp() rpdat = lnx.utils.get_rp()
wrd = bpy.data.worlds['Lnx'] wrd = bpy.data.worlds['Lnx']
@ -175,7 +175,7 @@ def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: Sha
frag.write('fragColor.rgb = backgroundCol;') frag.write('fragColor.rgb = backgroundCol;')
return return
parser_state = ParserState(ParserContext.WORLD, world.name, world) parser_state = ParserState(ParserContext.WORLD, lnx.utils.asset_name(world) if world.library else world.name, world)
parser_state.con = con parser_state.con = con
parser_state.curshader = frag parser_state.curshader = frag
parser_state.frag = frag parser_state.frag = frag

View File

@ -94,6 +94,7 @@ def parse_material_output(node: bpy.types.Node, custom_particle_node: bpy.types.
parse_displacement = state.parse_displacement parse_displacement = state.parse_displacement
particle_info = { particle_info = {
'index': False, 'index': False,
'random': False,
'age': False, 'age': False,
'lifetime': False, 'lifetime': False,
'location': False, 'location': False,

View File

@ -254,9 +254,10 @@ def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.t
c.particle_info['index'] = True c.particle_info['index'] = True
return 'p_index' if particles_on else '0.0' return 'p_index' if particles_on else '0.0'
# TODO: Random # Random
if out_socket == node.outputs[1]: if out_socket == node.outputs[1]:
return '0.0' c.particle_info['random'] = True
return 'p_random' if particles_on else '0.0'
# Age # Age
elif out_socket == node.outputs[2]: elif out_socket == node.outputs[2]:
@ -276,7 +277,7 @@ def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.t
# Size # Size
elif out_socket == node.outputs[5]: elif out_socket == node.outputs[5]:
c.particle_info['size'] = True c.particle_info['size'] = True
return '1.0' return 'p_size' if particles_on else '1.0'
# Velocity # Velocity
elif out_socket == node.outputs[6]: elif out_socket == node.outputs[6]:

View File

@ -58,7 +58,6 @@ def make(context_id, rpasses):
con['alpha_blend_destination'] = mat.lnx_blending_destination_alpha con['alpha_blend_destination'] = mat.lnx_blending_destination_alpha
con['alpha_blend_operation'] = mat.lnx_blending_operation_alpha con['alpha_blend_operation'] = mat.lnx_blending_operation_alpha
con['depth_write'] = False con['depth_write'] = False
con['compare_mode'] = 'less'
elif particle: elif particle:
pass pass
# Depth prepass was performed, exclude mat with depth read that # Depth prepass was performed, exclude mat with depth read that
@ -66,6 +65,9 @@ def make(context_id, rpasses):
elif dprepass and not (rpdat.rp_depth_texture and mat.lnx_depth_read): elif dprepass and not (rpdat.rp_depth_texture and mat.lnx_depth_read):
con['depth_write'] = False con['depth_write'] = False
con['compare_mode'] = 'equal' con['compare_mode'] = 'equal'
else:
con['depth_write'] = mat.lnx_depth_write
con['compare_mode'] = mat.lnx_compare_mode
attachment_format = 'RGBA32' if '_LDR' in wrd.world_defs else 'RGBA64' attachment_format = 'RGBA32' if '_LDR' in wrd.world_defs else 'RGBA64'
con['color_attachments'] = [attachment_format, attachment_format] con['color_attachments'] = [attachment_format, attachment_format]

View File

@ -55,6 +55,7 @@ def write(vert, particle_info=None, shadowmap=False):
# Outs # Outs
out_index = True if particle_info != None and particle_info['index'] else False out_index = True if particle_info != None and particle_info['index'] else False
out_random = True if particle_info != None and particle_info['random'] else False
out_age = True if particle_info != None and particle_info['age'] else False out_age = True if particle_info != None and particle_info['age'] else False
out_lifetime = True if particle_info != None and particle_info['lifetime'] else False out_lifetime = True if particle_info != None and particle_info['lifetime'] else False
out_location = True if particle_info != None and particle_info['location'] else False out_location = True if particle_info != None and particle_info['location'] else False
@ -258,6 +259,11 @@ def write(vert, particle_info=None, shadowmap=False):
vert.add_out('float p_index') vert.add_out('float p_index')
vert.write('p_index = gl_InstanceID;') vert.write('p_index = gl_InstanceID;')
if out_random:
vert.add_out('float p_random')
vert.write('p_random = fract(sin(gl_InstanceID) * 43758.5453);')
def write_tilesheet(vert): def write_tilesheet(vert):
# tilesx, tilesy, framerate - pd[3][0], pd[3][1], pd[3][2] # tilesx, tilesy, framerate - pd[3][0], pd[3][1], pd[3][2]
vert.write('int frame = int((p_age) / pd[3][2]);') vert.write('int frame = int((p_age) / pd[3][2]);')

View File

@ -23,6 +23,7 @@ class ShaderData:
self.data = {'shader_datas': [self.sd]} self.data = {'shader_datas': [self.sd]}
self.matname = lnx.utils.safesrc(lnx.utils.asset_name(material)) self.matname = lnx.utils.safesrc(lnx.utils.asset_name(material))
self.sd['name'] = self.matname + '_data' self.sd['name'] = self.matname + '_data'
self.sd['next_pass'] = material.lnx_next_pass
self.sd['contexts'] = [] self.sd['contexts'] = []
def add_context(self, props) -> 'ShaderContext': def add_context(self, props) -> 'ShaderContext':

View File

@ -142,6 +142,8 @@ def init_properties():
bpy.types.World.lnx_project_version = StringProperty(name="Version", description="Exported project version", default="1.0.0", update=assets.invalidate_compiler_cache, set=set_version, get=get_version) bpy.types.World.lnx_project_version = StringProperty(name="Version", description="Exported project version", default="1.0.0", update=assets.invalidate_compiler_cache, set=set_version, get=get_version)
bpy.types.World.lnx_project_version_autoinc = BoolProperty(name="Auto-increment Build Number", description="Auto-increment build number", default=True, update=assets.invalidate_compiler_cache) bpy.types.World.lnx_project_version_autoinc = BoolProperty(name="Auto-increment Build Number", description="Auto-increment build number", default=True, update=assets.invalidate_compiler_cache)
bpy.types.World.lnx_project_bundle = StringProperty(name="Bundle", description="Exported project bundle", default="org.leenkx3d", update=assets.invalidate_compiler_cache, set=set_project_bundle, get=get_project_bundle) bpy.types.World.lnx_project_bundle = StringProperty(name="Bundle", description="Exported project bundle", default="org.leenkx3d", update=assets.invalidate_compiler_cache, set=set_project_bundle, get=get_project_bundle)
# External Blend Files
bpy.types.World.lnx_external_blends_path = StringProperty(name="External Blends", description="Directory containing external blend files to include in export", default="", subtype='DIR_PATH', update=assets.invalidate_compiler_cache)
# Android Settings # Android Settings
bpy.types.World.lnx_project_android_sdk_min = IntProperty(name="Minimal Version SDK", description="Minimal Version Android SDK", default=23, min=14, max=30, update=assets.invalidate_compiler_cache) bpy.types.World.lnx_project_android_sdk_min = IntProperty(name="Minimal Version SDK", description="Minimal Version Android SDK", default=23, min=14, max=30, update=assets.invalidate_compiler_cache)
bpy.types.World.lnx_project_android_sdk_target = IntProperty(name="Target Version SDK", description="Target Version Android SDK", default=26, min=26, max=30, update=assets.invalidate_compiler_cache) bpy.types.World.lnx_project_android_sdk_target = IntProperty(name="Target Version SDK", description="Target Version Android SDK", default=26, min=26, max=30, update=assets.invalidate_compiler_cache)
@ -350,6 +352,7 @@ def init_properties():
update=assets.invalidate_instance_cache, update=assets.invalidate_instance_cache,
override={'LIBRARY_OVERRIDABLE'}) override={'LIBRARY_OVERRIDABLE'})
bpy.types.Object.lnx_export = BoolProperty(name="Export", description="Export object data", default=True, override={'LIBRARY_OVERRIDABLE'}) bpy.types.Object.lnx_export = BoolProperty(name="Export", description="Export object data", default=True, override={'LIBRARY_OVERRIDABLE'})
bpy.types.Object.lnx_sorting_index = IntProperty(name="Sorting Index", description="Sorting index for the Render's Draw Order", default=0, override={'LIBRARY_OVERRIDABLE'})
bpy.types.Object.lnx_spawn = BoolProperty(name="Spawn", description="Auto-add this object when creating scene", default=True, override={'LIBRARY_OVERRIDABLE'}) bpy.types.Object.lnx_spawn = BoolProperty(name="Spawn", description="Auto-add this object when creating scene", default=True, override={'LIBRARY_OVERRIDABLE'})
bpy.types.Object.lnx_mobile = BoolProperty(name="Mobile", description="Object moves during gameplay", default=False, override={'LIBRARY_OVERRIDABLE'}) bpy.types.Object.lnx_mobile = BoolProperty(name="Mobile", description="Object moves during gameplay", default=False, override={'LIBRARY_OVERRIDABLE'})
bpy.types.Object.lnx_visible = BoolProperty(name="Visible", description="Render this object", default=True, override={'LIBRARY_OVERRIDABLE'}) bpy.types.Object.lnx_visible = BoolProperty(name="Visible", description="Render this object", default=True, override={'LIBRARY_OVERRIDABLE'})
@ -436,6 +439,18 @@ def init_properties():
bpy.types.Material.lnx_depth_read = BoolProperty(name="Read Depth", description="Allow this material to read from a depth texture which is copied from the depth buffer. The meshes using this material will be drawn after all meshes that don't read from the depth texture", default=False) bpy.types.Material.lnx_depth_read = BoolProperty(name="Read Depth", description="Allow this material to read from a depth texture which is copied from the depth buffer. The meshes using this material will be drawn after all meshes that don't read from the depth texture", default=False)
bpy.types.Material.lnx_overlay = BoolProperty(name="Overlay", description="Renders the material, unshaded, over other shaded materials", default=False) bpy.types.Material.lnx_overlay = BoolProperty(name="Overlay", description="Renders the material, unshaded, over other shaded materials", default=False)
bpy.types.Material.lnx_decal = BoolProperty(name="Decal", default=False) bpy.types.Material.lnx_decal = BoolProperty(name="Decal", default=False)
bpy.types.Material.lnx_compare_mode = EnumProperty(
items=[
('always', 'Always', 'Always'),
('never', 'Never', 'Never'),
('less', 'Less', 'Less'),
('less_equal', 'Less Equal', 'Less Equal'),
('greater', 'Greater', 'Greater'),
('greater_equal', 'Greater Equal', 'Greater Equal'),
('equal', 'Equal', 'Equal'),
('not_equal', 'Not Equal', 'Not Equal'),
],
name="Compare Mode", default='less', description="Comparison mode for the material")
bpy.types.Material.lnx_two_sided = BoolProperty(name="Two-Sided", description="Flip normal when drawing back-face", default=False) bpy.types.Material.lnx_two_sided = BoolProperty(name="Two-Sided", description="Flip normal when drawing back-face", default=False)
bpy.types.Material.lnx_ignore_irradiance = BoolProperty(name="Ignore Irradiance", description="Ignore irradiance for material", default=False) bpy.types.Material.lnx_ignore_irradiance = BoolProperty(name="Ignore Irradiance", description="Ignore irradiance for material", default=False)
bpy.types.Material.lnx_cull_mode = EnumProperty( bpy.types.Material.lnx_cull_mode = EnumProperty(
@ -443,6 +458,8 @@ def init_properties():
('clockwise', 'Front', 'Clockwise'), ('clockwise', 'Front', 'Clockwise'),
('counter_clockwise', 'Back', 'Counter-Clockwise')], ('counter_clockwise', 'Back', 'Counter-Clockwise')],
name="Cull Mode", default='clockwise', description="Draw geometry faces") name="Cull Mode", default='clockwise', description="Draw geometry faces")
bpy.types.Material.lnx_next_pass = StringProperty(
name="Next Pass", default='', description="Next pass for the material", update=assets.invalidate_shader_cache)
bpy.types.Material.lnx_discard = BoolProperty(name="Alpha Test", default=False, description="Do not render fragments below specified opacity threshold") bpy.types.Material.lnx_discard = BoolProperty(name="Alpha Test", default=False, description="Do not render fragments below specified opacity threshold")
bpy.types.Material.lnx_discard_opacity = FloatProperty(name="Mesh Opacity", default=0.2, min=0, max=1) bpy.types.Material.lnx_discard_opacity = FloatProperty(name="Mesh Opacity", default=0.2, min=0, max=1)
bpy.types.Material.lnx_discard_opacity_shadows = FloatProperty(name="Shadows Opacity", default=0.1, min=0, max=1) bpy.types.Material.lnx_discard_opacity_shadows = FloatProperty(name="Shadows Opacity", default=0.1, min=0, max=1)

View File

@ -63,6 +63,7 @@ class LNX_PT_ObjectPropsPanel(bpy.types.Panel):
return return
col = layout.column() col = layout.column()
col.prop(mat, 'lnx_sorting_index')
col.prop(obj, 'lnx_export') col.prop(obj, 'lnx_export')
if not obj.lnx_export: if not obj.lnx_export:
return return
@ -551,6 +552,51 @@ class LNX_OT_NewCustomMaterial(bpy.types.Operator):
return{'FINISHED'} return{'FINISHED'}
class LNX_OT_NextPassMaterialSelector(bpy.types.Operator):
"""Select material for next pass"""
bl_idname = "lnx.next_pass_material_selector"
bl_label = "Select Next Pass Material"
def execute(self, context):
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.popup_menu(self.draw_menu, title="Select Next Pass Material", icon='MATERIAL')
return {'FINISHED'}
def draw_menu(self, popup, context):
layout = popup.layout
# Add 'None' option
op = layout.operator("lnx.set_next_pass_material", text="")
op.material_name = ""
# Add materials from the current object's material slots
if context.object and hasattr(context.object, 'material_slots'):
for slot in context.object.material_slots:
if (slot.material is not None and slot.material != context.material):
op = layout.operator("lnx.set_next_pass_material", text=slot.material.name)
op.material_name = slot.material.name
class LNX_OT_SetNextPassMaterial(bpy.types.Operator):
"""Set the next pass material"""
bl_idname = "lnx.set_next_pass_material"
bl_label = "Set Next Pass Material"
material_name: StringProperty()
def execute(self, context):
if context.material:
context.material.lnx_next_pass = self.material_name
# Redraw the UI to update the display
for area in context.screen.areas:
if area.type == 'PROPERTIES':
area.tag_redraw()
return {'FINISHED'}
class LNX_PG_BindTexturesListItem(bpy.types.PropertyGroup): class LNX_PG_BindTexturesListItem(bpy.types.PropertyGroup):
uniform_name: StringProperty( uniform_name: StringProperty(
name='Uniform Name', name='Uniform Name',
@ -634,18 +680,23 @@ class LNX_PT_MaterialPropsPanel(bpy.types.Panel):
mat = bpy.context.material mat = bpy.context.material
if mat is None: if mat is None:
return return
layout.prop(mat, 'lnx_cast_shadow') layout.prop(mat, 'lnx_cast_shadow')
columnb = layout.column() columnb = layout.column()
wrd = bpy.data.worlds['Lnx'] wrd = bpy.data.worlds['Lnx']
columnb.enabled = len(wrd.lnx_rplist) > 0 and lnx.utils.get_rp().rp_renderer == 'Forward' columnb.enabled = len(wrd.lnx_rplist) > 0 and lnx.utils.get_rp().rp_renderer == 'Forward'
columnb.prop(mat, 'lnx_receive_shadow') columnb.prop(mat, 'lnx_receive_shadow')
layout.prop(mat, 'lnx_ignore_irradiance') layout.prop(mat, 'lnx_ignore_irradiance')
layout.prop(mat, 'lnx_compare_mode')
layout.prop(mat, 'lnx_two_sided') layout.prop(mat, 'lnx_two_sided')
columnb = layout.column() columnb = layout.column()
columnb.enabled = not mat.lnx_two_sided columnb.enabled = not mat.lnx_two_sided
columnb.prop(mat, 'lnx_cull_mode') columnb.prop(mat, 'lnx_cull_mode')
row = layout.row(align=True)
row.prop(mat, 'lnx_next_pass', text="Next Pass")
row.operator('lnx.next_pass_material_selector', text='', icon='MATERIAL')
layout.prop(mat, 'lnx_material_id') layout.prop(mat, 'lnx_material_id')
layout.prop(mat, 'lnx_depth_write')
layout.prop(mat, 'lnx_depth_read') layout.prop(mat, 'lnx_depth_read')
layout.prop(mat, 'lnx_overlay') layout.prop(mat, 'lnx_overlay')
layout.prop(mat, 'lnx_decal') layout.prop(mat, 'lnx_decal')
@ -1229,7 +1280,8 @@ class LNX_PT_ProjectModulesPanel(bpy.types.Panel):
layout.prop_search(wrd, 'lnx_khafile', bpy.data, 'texts') layout.prop_search(wrd, 'lnx_khafile', bpy.data, 'texts')
layout.prop(wrd, 'lnx_project_root') layout.prop(wrd, 'lnx_project_root')
layout.prop(wrd, 'lnx_external_blends_path')
class LnxVirtualInputPanel(bpy.types.Panel): class LnxVirtualInputPanel(bpy.types.Panel):
bl_label = "Leenkx Virtual Input" bl_label = "Leenkx Virtual Input"
bl_space_type = "PROPERTIES" bl_space_type = "PROPERTIES"
@ -2267,7 +2319,10 @@ class LnxGenTerrainButton(bpy.types.Operator):
node.location = (-200, -200) node.location = (-200, -200)
node.inputs[0].default_value = 5.0 node.inputs[0].default_value = 5.0
links.new(nodes['Bump'].inputs[2], nodes['_TerrainHeight'].outputs[0]) links.new(nodes['Bump'].inputs[2], nodes['_TerrainHeight'].outputs[0])
links.new(nodes['Principled BSDF'].inputs[20], nodes['Bump'].outputs[0]) if bpy.app.version[0] >= 4:
links.new(nodes['Principled BSDF'].inputs[22], nodes['Bump'].outputs[0])
else:
links.new(nodes['Principled BSDF'].inputs[20], nodes['Bump'].outputs[0])
# Create sectors # Create sectors
root_obj = bpy.data.objects.new("Terrain", None) root_obj = bpy.data.objects.new("Terrain", None)
@ -2300,7 +2355,16 @@ class LnxGenTerrainButton(bpy.types.Operator):
disp_mod.texture.extension = 'EXTEND' disp_mod.texture.extension = 'EXTEND'
disp_mod.texture.use_interpolation = False disp_mod.texture.use_interpolation = False
disp_mod.texture.use_mipmap = False disp_mod.texture.use_mipmap = False
disp_mod.texture.image = bpy.data.images.load(filepath=scn.lnx_terrain_textures+'/heightmap_' + j + '.png') try:
disp_mod.texture.image = bpy.data.images.load(filepath=scn.lnx_terrain_textures+'/heightmap_' + j + '.png')
except Exception as e:
if i == 0: # Only show message once
if scn.lnx_terrain_textures.startswith('//') and not bpy.data.filepath:
self.report({'INFO'}, "Generating terrain... Save .blend file and add your heightmaps for each sector in "
"the \"Bundled\" folder using the format \"heightmap_01.png\", \"heightmap_02.png\", etc.")
else:
self.report({'INFO'}, f"Heightmap not found: {scn.lnx_terrain_textures}/heightmap_{j}.png - using blank image")
f = 1 f = 1
levels = 0 levels = 0
while f < disp_mod.texture.image.size[0]: while f < disp_mod.texture.image.size[0]:
@ -2908,6 +2972,8 @@ __REG_CLASSES = (
InvalidateCacheButton, InvalidateCacheButton,
InvalidateMaterialCacheButton, InvalidateMaterialCacheButton,
LNX_OT_NewCustomMaterial, LNX_OT_NewCustomMaterial,
LNX_OT_NextPassMaterialSelector,
LNX_OT_SetNextPassMaterial,
LNX_PG_BindTexturesListItem, LNX_PG_BindTexturesListItem,
LNX_UL_BindTexturesList, LNX_UL_BindTexturesList,
LNX_OT_BindTexturesListNewItem, LNX_OT_BindTexturesListNewItem,

View File

@ -338,8 +338,8 @@ project.addSources('Sources');
if rpdat.lnx_particles != 'Off': if rpdat.lnx_particles != 'Off':
assets.add_khafile_def('lnx_particles') assets.add_khafile_def('lnx_particles')
if rpdat.rp_draw_order == 'Shader': if rpdat.rp_draw_order == 'Index':
assets.add_khafile_def('lnx_draworder_shader') assets.add_khafile_def('lnx_draworder_index')
if lnx.utils.get_viewport_controls() == 'azerty': if lnx.utils.get_viewport_controls() == 'azerty':
assets.add_khafile_def('lnx_azerty') assets.add_khafile_def('lnx_azerty')
@ -818,7 +818,7 @@ const int compoChromaticSamples = {rpdat.lnx_chromatic_aberration_samples};
focus_distance = 0.0 focus_distance = 0.0
fstop = 0.0 fstop = 0.0
if len(bpy.data.cameras) > 0 and lnx.utils.get_active_scene().camera.data.dof.use_dof: if lnx.utils.get_active_scene().camera and lnx.utils.get_active_scene().camera.data.dof.use_dof:
focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance
fstop = lnx.utils.get_active_scene().camera.data.dof.aperture_fstop fstop = lnx.utils.get_active_scene().camera.data.dof.aperture_fstop
lens = lnx.utils.get_active_scene().camera.data.lens lens = lnx.utils.get_active_scene().camera.data.lens

View File

@ -118,7 +118,8 @@ def render_envmap(target_dir: str, world: bpy.types.World) -> str:
scene = bpy.data.scenes['_lnx_envmap_render'] scene = bpy.data.scenes['_lnx_envmap_render']
scene.world = world scene.world = world
image_name = f'env_{lnx.utils.safesrc(world.name)}.{ENVMAP_EXT}' world_name = lnx.utils.asset_name(world) if world.library else world.name
image_name = f'env_{lnx.utils.safesrc(world_name)}.{ENVMAP_EXT}'
render_path = os.path.join(target_dir, image_name) render_path = os.path.join(target_dir, image_name)
scene.render.filepath = render_path scene.render.filepath = render_path