forked from LeenkxTeam/LNXSDK
		
	merge upstream
This commit is contained in:
		@ -331,15 +331,18 @@ class RenderPath {
 | 
			
		||||
		});
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public static function sortMeshesShader(meshes: Array<MeshObject>) {
 | 
			
		||||
	public static function sortMeshesIndex(meshes: Array<MeshObject>) {
 | 
			
		||||
		meshes.sort(function(a, b): Int {
 | 
			
		||||
			#if rp_depth_texture
 | 
			
		||||
			var depthDiff = boolToInt(a.depthRead) - boolToInt(b.depthRead);
 | 
			
		||||
			if (depthDiff != 0) return depthDiff;
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
			return a.materials[0].name >= b.materials[0].name ? 1 : -1;
 | 
			
		||||
		});
 | 
			
		||||
			if (a.data.sortingIndex != b.data.sortingIndex) {
 | 
			
		||||
				return a.data.sortingIndex > b.data.sortingIndex ? 1 : -1;
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			return a.data.name >= b.data.name ? 1 : -1;		});
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public function drawMeshes(context: String) {
 | 
			
		||||
@ -399,7 +402,7 @@ class RenderPath {
 | 
			
		||||
			#if lnx_batch
 | 
			
		||||
			sortMeshesDistance(Scene.active.meshBatch.nonBatched);
 | 
			
		||||
			#else
 | 
			
		||||
			drawOrder == DrawOrder.Shader ? sortMeshesShader(meshes) : sortMeshesDistance(meshes);
 | 
			
		||||
			drawOrder == DrawOrder.Index ? sortMeshesIndex(meshes) : sortMeshesDistance(meshes);
 | 
			
		||||
			#end
 | 
			
		||||
			meshesSorted = true;
 | 
			
		||||
		}
 | 
			
		||||
@ -914,6 +917,6 @@ class CachedShaderContext {
 | 
			
		||||
 | 
			
		||||
@:enum abstract DrawOrder(Int) from Int {
 | 
			
		||||
	var Distance = 0; // Early-z
 | 
			
		||||
	var Shader = 1; // Less state changes
 | 
			
		||||
	var Index = 1; // Less state changes
 | 
			
		||||
	// var Mix = 2; // Distance buckets sorted by shader
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -9,6 +9,7 @@ import iron.data.SceneFormat;
 | 
			
		||||
class MeshData {
 | 
			
		||||
 | 
			
		||||
	public var name: String;
 | 
			
		||||
	public var sortingIndex: Int;
 | 
			
		||||
	public var raw: TMeshData;
 | 
			
		||||
	public var format: TSceneFormat;
 | 
			
		||||
	public var geom: Geometry;
 | 
			
		||||
@ -23,7 +24,8 @@ class MeshData {
 | 
			
		||||
	public function new(raw: TMeshData, done: MeshData->Void) {
 | 
			
		||||
		this.raw = raw;
 | 
			
		||||
		this.name = raw.name;
 | 
			
		||||
 | 
			
		||||
		this.sortingIndex = raw.sorting_index;
 | 
			
		||||
		
 | 
			
		||||
		if (raw.scale_pos != null) scalePos = raw.scale_pos;
 | 
			
		||||
		if (raw.scale_tex != null) scaleTex = raw.scale_tex;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -49,6 +49,7 @@ typedef TMeshData = {
 | 
			
		||||
@:structInit class TMeshData {
 | 
			
		||||
#end
 | 
			
		||||
	public var name: String;
 | 
			
		||||
	public var sorting_index: Int;
 | 
			
		||||
	public var vertex_arrays: Array<TVertexArray>;
 | 
			
		||||
	public var index_arrays: Array<TIndexArray>;
 | 
			
		||||
	@:optional public var dynamic_usage: Null<Bool>;
 | 
			
		||||
@ -222,6 +223,7 @@ typedef TShaderData = {
 | 
			
		||||
@:structInit class TShaderData {
 | 
			
		||||
#end
 | 
			
		||||
	public var name: String;
 | 
			
		||||
	public var next_pass: String;
 | 
			
		||||
	public var contexts: Array<TShaderContext>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -393,6 +395,7 @@ typedef TParticleData = {
 | 
			
		||||
	public var name: String;
 | 
			
		||||
	public var type: Int; // 0 - Emitter, Hair
 | 
			
		||||
	public var auto_start: Bool;
 | 
			
		||||
	public var dynamic_emitter: Bool;
 | 
			
		||||
	public var is_unique: Bool;
 | 
			
		||||
	public var loop: Bool;
 | 
			
		||||
	public var count: Int;
 | 
			
		||||
 | 
			
		||||
@ -22,6 +22,7 @@ using StringTools;
 | 
			
		||||
class ShaderData {
 | 
			
		||||
 | 
			
		||||
	public var name: String;
 | 
			
		||||
	public var nextPass: String;
 | 
			
		||||
	public var raw: TShaderData;
 | 
			
		||||
	public var contexts: Array<ShaderContext> = [];
 | 
			
		||||
 | 
			
		||||
@ -33,6 +34,7 @@ class ShaderData {
 | 
			
		||||
	public function new(raw: TShaderData, done: ShaderData->Void, overrideContext: TShaderOverride = null) {
 | 
			
		||||
		this.raw = raw;
 | 
			
		||||
		this.name = raw.name;
 | 
			
		||||
		this.nextPass = raw.next_pass;
 | 
			
		||||
 | 
			
		||||
		for (c in raw.contexts) contexts.push(null);
 | 
			
		||||
		var contextsLoaded = 0;
 | 
			
		||||
 | 
			
		||||
@ -302,6 +302,10 @@ class MeshObject extends Object {
 | 
			
		||||
 | 
			
		||||
		// Render mesh
 | 
			
		||||
		var ldata = lod.data;
 | 
			
		||||
		
 | 
			
		||||
		// Next pass rendering first (inverse order)
 | 
			
		||||
		renderNextPass(g, context, bindParams, lod);
 | 
			
		||||
		
 | 
			
		||||
		for (i in 0...ldata.geom.indexBuffers.length) {
 | 
			
		||||
 | 
			
		||||
			var mi = ldata.geom.materialIndices[i];
 | 
			
		||||
@ -405,4 +409,85 @@ class MeshObject extends Object {
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	
 | 
			
		||||
	
 | 
			
		||||
	function renderNextPass(g: Graphics, context: String, bindParams: Array<String>, lod: MeshObject) {
 | 
			
		||||
		var ldata = lod.data;
 | 
			
		||||
		for (i in 0...ldata.geom.indexBuffers.length) {
 | 
			
		||||
			var mi = ldata.geom.materialIndices[i];
 | 
			
		||||
			if (mi >= materials.length) continue;
 | 
			
		||||
 | 
			
		||||
			var currentMaterial: MaterialData = materials[mi];
 | 
			
		||||
			if (currentMaterial == null || currentMaterial.shader == null) continue;
 | 
			
		||||
 | 
			
		||||
			var nextPassName: String = currentMaterial.shader.nextPass;
 | 
			
		||||
			if (nextPassName == null || nextPassName == "") continue;
 | 
			
		||||
 | 
			
		||||
			var nextMaterial: MaterialData = null;
 | 
			
		||||
			for (mat in materials) {
 | 
			
		||||
				// First try exact match
 | 
			
		||||
				if (mat.name == nextPassName) {
 | 
			
		||||
					nextMaterial = mat;
 | 
			
		||||
					break;
 | 
			
		||||
				}
 | 
			
		||||
				// If no exact match, try to match base name for linked materials
 | 
			
		||||
				if (mat.name.indexOf("_") > 0 && mat.name.substr(mat.name.length - 6) == ".blend") {
 | 
			
		||||
					var baseName = mat.name.substring(0, mat.name.indexOf("_"));
 | 
			
		||||
					if (baseName == nextPassName) {
 | 
			
		||||
						nextMaterial = mat;
 | 
			
		||||
						break;
 | 
			
		||||
					}
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if (nextMaterial == null) continue;
 | 
			
		||||
 | 
			
		||||
			var nextMaterialContext: MaterialContext = null;
 | 
			
		||||
			var nextShaderContext: ShaderContext = null;
 | 
			
		||||
 | 
			
		||||
			for (j in 0...nextMaterial.raw.contexts.length) {
 | 
			
		||||
				if (nextMaterial.raw.contexts[j].name.substr(0, context.length) == context) {
 | 
			
		||||
					nextMaterialContext = nextMaterial.contexts[j];
 | 
			
		||||
					nextShaderContext = nextMaterial.shader.getContext(context);
 | 
			
		||||
					break;
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if (nextShaderContext == null) continue;
 | 
			
		||||
			if (skipContext(context, nextMaterial)) continue;
 | 
			
		||||
 | 
			
		||||
			var elems = nextShaderContext.raw.vertex_elements;
 | 
			
		||||
 | 
			
		||||
			// Uniforms
 | 
			
		||||
			if (nextShaderContext.pipeState != lastPipeline) {
 | 
			
		||||
				g.setPipeline(nextShaderContext.pipeState);
 | 
			
		||||
				lastPipeline = nextShaderContext.pipeState;
 | 
			
		||||
			}
 | 
			
		||||
			Uniforms.setContextConstants(g, nextShaderContext, bindParams);
 | 
			
		||||
			Uniforms.setObjectConstants(g, nextShaderContext, this);
 | 
			
		||||
			Uniforms.setMaterialConstants(g, nextShaderContext, nextMaterialContext);
 | 
			
		||||
 | 
			
		||||
			// VB / IB
 | 
			
		||||
			#if lnx_deinterleaved
 | 
			
		||||
			g.setVertexBuffers(ldata.geom.get(elems));
 | 
			
		||||
			#else
 | 
			
		||||
			if (ldata.geom.instancedVB != null) {
 | 
			
		||||
				g.setVertexBuffers([ldata.geom.get(elems), ldata.geom.instancedVB]);
 | 
			
		||||
			}
 | 
			
		||||
			else {
 | 
			
		||||
				g.setVertexBuffer(ldata.geom.get(elems));
 | 
			
		||||
			}
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
			g.setIndexBuffer(ldata.geom.indexBuffers[i]);
 | 
			
		||||
 | 
			
		||||
			// Draw next pass for this specific geometry section
 | 
			
		||||
			if (ldata.geom.instanced) {
 | 
			
		||||
				g.drawIndexedVerticesInstanced(ldata.geom.instanceCount, ldata.geom.start, ldata.geom.count);
 | 
			
		||||
			}
 | 
			
		||||
			else {
 | 
			
		||||
				g.drawIndexedVertices(ldata.geom.start, ldata.geom.count);
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -8,6 +8,8 @@ import kha.arrays.Float32Array;
 | 
			
		||||
import iron.data.Data;
 | 
			
		||||
import iron.data.ParticleData;
 | 
			
		||||
import iron.data.SceneFormat;
 | 
			
		||||
import iron.data.Geometry;
 | 
			
		||||
import iron.data.MeshData;
 | 
			
		||||
import iron.system.Time;
 | 
			
		||||
import iron.math.Mat4;
 | 
			
		||||
import iron.math.Quat;
 | 
			
		||||
@ -17,6 +19,7 @@ import iron.math.Vec4;
 | 
			
		||||
class ParticleSystem {
 | 
			
		||||
	public var data: ParticleData;
 | 
			
		||||
	public var speed = 1.0;
 | 
			
		||||
	public var dynamicEmitter: Bool = true;
 | 
			
		||||
	var currentSpeed = 0.0;
 | 
			
		||||
	var particles: Array<Particle>;
 | 
			
		||||
	var ready: Bool;
 | 
			
		||||
@ -52,6 +55,12 @@ class ParticleSystem {
 | 
			
		||||
	
 | 
			
		||||
	var random = 0.0;
 | 
			
		||||
 | 
			
		||||
	var tmpV4 = new Vec4();
 | 
			
		||||
 | 
			
		||||
	var instancedData: Float32Array = null;
 | 
			
		||||
	var lastSpawnedCount: Int = 0;
 | 
			
		||||
	var hasUniqueGeom: Bool = false; 
 | 
			
		||||
 | 
			
		||||
	public function new(sceneName: String, pref: TParticleReference) {
 | 
			
		||||
		seed = pref.seed;
 | 
			
		||||
		currentSpeed = speed;
 | 
			
		||||
@ -62,6 +71,11 @@ class ParticleSystem {
 | 
			
		||||
		Data.getParticle(sceneName, pref.particle, function(b: ParticleData) {
 | 
			
		||||
			data = b;
 | 
			
		||||
			r = data.raw;
 | 
			
		||||
			if (r.dynamic_emitter != null){
 | 
			
		||||
				dynamicEmitter = r.dynamic_emitter; 
 | 
			
		||||
			} else {
 | 
			
		||||
				dynamicEmitter = true;
 | 
			
		||||
			}
 | 
			
		||||
			if (Scene.active.raw.gravity != null) {
 | 
			
		||||
				gx = Scene.active.raw.gravity[0] * r.weight_gravity;
 | 
			
		||||
				gy = Scene.active.raw.gravity[1] * r.weight_gravity;
 | 
			
		||||
@ -98,6 +112,8 @@ class ParticleSystem {
 | 
			
		||||
		lap = 0;
 | 
			
		||||
		lapTime = 0;
 | 
			
		||||
		speed = currentSpeed;
 | 
			
		||||
		lastSpawnedCount = 0;
 | 
			
		||||
		instancedData = null;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public function pause() {
 | 
			
		||||
@ -130,8 +146,13 @@ class ParticleSystem {
 | 
			
		||||
 | 
			
		||||
		// Copy owner world transform but discard scale
 | 
			
		||||
		owner.transform.world.decompose(ownerLoc, ownerRot, ownerScl);
 | 
			
		||||
		object.transform.loc = ownerLoc;
 | 
			
		||||
		object.transform.rot = ownerRot;
 | 
			
		||||
		if (dynamicEmitter) {
 | 
			
		||||
			object.transform.loc.x = 0; object.transform.loc.y = 0; object.transform.loc.z = 0;
 | 
			
		||||
			object.transform.rot = new Quat();
 | 
			
		||||
		} else {
 | 
			
		||||
			object.transform.loc = ownerLoc;
 | 
			
		||||
			object.transform.rot = ownerRot;
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// Set particle size per particle system
 | 
			
		||||
		object.transform.scale = new Vec4(r.particle_size, r.particle_size, r.particle_size, 1);
 | 
			
		||||
@ -158,13 +179,18 @@ class ParticleSystem {
 | 
			
		||||
		if (lap > prevLap && !r.loop) {
 | 
			
		||||
			end();
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		if (lap > prevLap && r.loop) {
 | 
			
		||||
			lastSpawnedCount = 0;
 | 
			
		||||
		}
 | 
			
		||||
		
 | 
			
		||||
		updateGpu(object, owner);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public function getData(): Mat4 {
 | 
			
		||||
		var hair = r.type == 1;
 | 
			
		||||
		m._00 = animtime;
 | 
			
		||||
		// Store loop flag in the sign: positive -> loop, negative -> no loop
 | 
			
		||||
		m._00 = r.loop ? animtime : -animtime;
 | 
			
		||||
		m._01 = hair ? 1 / particles.length : spawnRate;
 | 
			
		||||
		m._02 = hair ? 1 : lifetime;
 | 
			
		||||
		m._03 = particles.length;
 | 
			
		||||
@ -187,17 +213,26 @@ class ParticleSystem {
 | 
			
		||||
		return r.size_random;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public function getRandom(): FastFloat {
 | 
			
		||||
	public inline function getRandom(): FastFloat {
 | 
			
		||||
		return random;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public function getSize(): FastFloat {
 | 
			
		||||
	public inline function getSize(): FastFloat {
 | 
			
		||||
		return r.particle_size;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	function updateGpu(object: MeshObject, owner: MeshObject) {
 | 
			
		||||
		if (!object.data.geom.instanced) setupGeomGpu(object, owner);
 | 
			
		||||
		// GPU particles transform is attached to owner object
 | 
			
		||||
		if (dynamicEmitter) {
 | 
			
		||||
			if (!hasUniqueGeom) ensureUniqueGeom(object);
 | 
			
		||||
			var needSetup = instancedData == null || object.data.geom.instancedVB == null;
 | 
			
		||||
			if (needSetup) setupGeomGpuDynamic(object, owner);
 | 
			
		||||
			updateSpawnedInstances(object, owner);
 | 
			
		||||
		}
 | 
			
		||||
		else {
 | 
			
		||||
			if (!hasUniqueGeom) ensureUniqueGeom(object);
 | 
			
		||||
			if (!object.data.geom.instanced) setupGeomGpu(object, owner);
 | 
			
		||||
		}
 | 
			
		||||
		// GPU particles transform is attached to owner object in static mode
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	function setupGeomGpu(object: MeshObject, owner: MeshObject) {
 | 
			
		||||
@ -258,18 +293,134 @@ class ParticleSystem {
 | 
			
		||||
		object.data.geom.setupInstanced(instancedData, 1, Usage.StaticUsage);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	function fhash(n: Int): Float {
 | 
			
		||||
		var s = n + 1.0;
 | 
			
		||||
		s *= 9301.0 % s;
 | 
			
		||||
		s = (s * 9301.0 + 49297.0) % 233280.0;
 | 
			
		||||
		return s / 233280.0;
 | 
			
		||||
	// allocate instanced VB once for this object 
 | 
			
		||||
	function setupGeomGpuDynamic(object: MeshObject, owner: MeshObject) {
 | 
			
		||||
		if (instancedData == null) instancedData = new Float32Array(particles.length * 3);
 | 
			
		||||
		lastSpawnedCount = 0; 
 | 
			
		||||
		// Create instanced VB once if missing (seed with our instancedData)
 | 
			
		||||
		if (object.data.geom.instancedVB == null) {
 | 
			
		||||
			object.data.geom.setupInstanced(instancedData, 1, Usage.DynamicUsage);
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	function ensureUniqueGeom(object: MeshObject) {
 | 
			
		||||
		if (hasUniqueGeom) return;
 | 
			
		||||
		var newData: MeshData = null;
 | 
			
		||||
		new MeshData(object.data.raw, function(dat: MeshData) {
 | 
			
		||||
			dat.scalePos = object.data.scalePos;
 | 
			
		||||
			dat.scaleTex = object.data.scaleTex;
 | 
			
		||||
			dat.format = object.data.format;
 | 
			
		||||
			newData = dat;
 | 
			
		||||
		});
 | 
			
		||||
		if (newData != null) object.setData(newData);
 | 
			
		||||
		hasUniqueGeom = true;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	function updateSpawnedInstances(object: MeshObject, owner: MeshObject) {
 | 
			
		||||
		if (instancedData == null) return;
 | 
			
		||||
		var targetCount = count;
 | 
			
		||||
		if (targetCount > particles.length) targetCount = particles.length;
 | 
			
		||||
		if (targetCount <= lastSpawnedCount) return;
 | 
			
		||||
 | 
			
		||||
		var normFactor = 1 / 32767;
 | 
			
		||||
		var scalePosOwner = owner.data.scalePos;
 | 
			
		||||
		var scalePosParticle = object.data.scalePos;
 | 
			
		||||
		var particleSize = r.particle_size;
 | 
			
		||||
		var base = 1.0 / (particleSize * scalePosParticle);
 | 
			
		||||
 | 
			
		||||
		switch (r.emit_from) {
 | 
			
		||||
			case 0: // Vert
 | 
			
		||||
				var pa = owner.data.geom.positions;
 | 
			
		||||
				var osx = owner.transform.scale.x;
 | 
			
		||||
				var osy = owner.transform.scale.y;
 | 
			
		||||
				var osz = owner.transform.scale.z;
 | 
			
		||||
				var pCount = Std.int(pa.values.length / pa.size);
 | 
			
		||||
				for (idx in lastSpawnedCount...targetCount) {
 | 
			
		||||
					var j = Std.int(fhash(idx) * pCount);
 | 
			
		||||
					var lx = pa.values[j * pa.size    ] * normFactor * scalePosOwner * osx;
 | 
			
		||||
					var ly = pa.values[j * pa.size + 1] * normFactor * scalePosOwner * osy;
 | 
			
		||||
					var lz = pa.values[j * pa.size + 2] * normFactor * scalePosOwner * osz;
 | 
			
		||||
					tmpV4.x = lx; tmpV4.y = ly; tmpV4.z = lz; tmpV4.w = 1;
 | 
			
		||||
					tmpV4.applyQuat(ownerRot);
 | 
			
		||||
					var o = idx * 3;
 | 
			
		||||
					instancedData.set(o    , (tmpV4.x + ownerLoc.x) * base);
 | 
			
		||||
					instancedData.set(o + 1, (tmpV4.y + ownerLoc.y) * base);
 | 
			
		||||
					instancedData.set(o + 2, (tmpV4.z + ownerLoc.z) * base);
 | 
			
		||||
				}
 | 
			
		||||
 | 
			
		||||
			case 1: // Face
 | 
			
		||||
				var positions = owner.data.geom.positions.values;
 | 
			
		||||
				var osx1 = owner.transform.scale.x;
 | 
			
		||||
				var osy1 = owner.transform.scale.y;
 | 
			
		||||
				var osz1 = owner.transform.scale.z;
 | 
			
		||||
				for (idx in lastSpawnedCount...targetCount) {
 | 
			
		||||
					var ia = owner.data.geom.indices[Std.random(owner.data.geom.indices.length)];
 | 
			
		||||
					var faceIndex = Std.random(Std.int(ia.length / 3));
 | 
			
		||||
					var i0 = ia[faceIndex * 3 + 0];
 | 
			
		||||
					var i1 = ia[faceIndex * 3 + 1];
 | 
			
		||||
					var i2 = ia[faceIndex * 3 + 2];
 | 
			
		||||
					var v0x = positions[i0 * 4    ], v0y = positions[i0 * 4 + 1], v0z = positions[i0 * 4 + 2];
 | 
			
		||||
					var v1x = positions[i1 * 4    ], v1y = positions[i1 * 4 + 1], v1z = positions[i1 * 4 + 2];
 | 
			
		||||
					var v2x = positions[i2 * 4    ], v2y = positions[i2 * 4 + 1], v2z = positions[i2 * 4 + 2];
 | 
			
		||||
					var rx = Math.random(); var ry = Math.random(); if (rx + ry > 1) { rx = 1 - rx; ry = 1 - ry; }
 | 
			
		||||
					var pxs = v0x + rx * (v1x - v0x) + ry * (v2x - v0x);
 | 
			
		||||
					var pys = v0y + rx * (v1y - v0y) + ry * (v2y - v0y);
 | 
			
		||||
					var pzs = v0z + rx * (v1z - v0z) + ry * (v2z - v0z);
 | 
			
		||||
					var px = pxs * normFactor * scalePosOwner * osx1;
 | 
			
		||||
					var py = pys * normFactor * scalePosOwner * osy1;
 | 
			
		||||
					var pz = pzs * normFactor * scalePosOwner * osz1;
 | 
			
		||||
					tmpV4.x = px; tmpV4.y = py; tmpV4.z = pz; tmpV4.w = 1;
 | 
			
		||||
					tmpV4.applyQuat(ownerRot);
 | 
			
		||||
					var o1 = idx * 3;
 | 
			
		||||
					instancedData.set(o1    , (tmpV4.x + ownerLoc.x) * base);
 | 
			
		||||
					instancedData.set(o1 + 1, (tmpV4.y + ownerLoc.y) * base);
 | 
			
		||||
					instancedData.set(o1 + 2, (tmpV4.z + ownerLoc.z) * base);
 | 
			
		||||
				}
 | 
			
		||||
 | 
			
		||||
			case 2: // Volume
 | 
			
		||||
				var dim = object.transform.dim;
 | 
			
		||||
				for (idx in lastSpawnedCount...targetCount) {
 | 
			
		||||
					tmpV4.x = (Math.random() * 2.0 - 1.0) * (dim.x * 0.5);
 | 
			
		||||
					tmpV4.y = (Math.random() * 2.0 - 1.0) * (dim.y * 0.5);
 | 
			
		||||
					tmpV4.z = (Math.random() * 2.0 - 1.0) * (dim.z * 0.5);
 | 
			
		||||
					tmpV4.w = 1;
 | 
			
		||||
					tmpV4.applyQuat(ownerRot);
 | 
			
		||||
					var o2 = idx * 3;
 | 
			
		||||
					instancedData.set(o2    , (tmpV4.x + ownerLoc.x) * base);
 | 
			
		||||
					instancedData.set(o2 + 1, (tmpV4.y + ownerLoc.y) * base);
 | 
			
		||||
					instancedData.set(o2 + 2, (tmpV4.z + ownerLoc.z) * base);
 | 
			
		||||
				}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// Upload full active range [0..targetCount) to this object's instanced VB
 | 
			
		||||
		var geom = object.data.geom;
 | 
			
		||||
		if (geom.instancedVB == null) {
 | 
			
		||||
			geom.setupInstanced(instancedData, 1, Usage.DynamicUsage);
 | 
			
		||||
		}
 | 
			
		||||
		var vb = geom.instancedVB.lock();
 | 
			
		||||
		var totalFloats = targetCount * 3; // xyz per instance
 | 
			
		||||
		var i = 0;
 | 
			
		||||
		while (i < totalFloats) {
 | 
			
		||||
			vb.setFloat32(i * 4, instancedData[i]);
 | 
			
		||||
			i++;
 | 
			
		||||
		}
 | 
			
		||||
		geom.instancedVB.unlock();
 | 
			
		||||
		geom.instanceCount = targetCount;
 | 
			
		||||
		lastSpawnedCount = targetCount;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	inline function fhash(n: Int): Float {
 | 
			
		||||
    var s = n + 1.0;
 | 
			
		||||
    s *= 9301.0 % s;
 | 
			
		||||
    s = (s * 9301.0 + 49297.0) % 233280.0;
 | 
			
		||||
    return s / 233280.0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
	public function remove() {}
 | 
			
		||||
 | 
			
		||||
	/**
 | 
			
		||||
		Generates a random point in the triangle with vertex positions abc.
 | 
			
		||||
 | 
			
		||||
		
 | 
			
		||||
		Please note that the given position vectors are changed in-place by this
 | 
			
		||||
		function and can be considered garbage afterwards, so make sure to clone
 | 
			
		||||
		them first if needed.
 | 
			
		||||
 | 
			
		||||
@ -39,11 +39,11 @@ class Time {
 | 
			
		||||
	}
 | 
			
		||||
		
 | 
			
		||||
	public static inline function time(): Float {
 | 
			
		||||
		return kha.Scheduler.time();
 | 
			
		||||
		return kha.Scheduler.time() * scale;
 | 
			
		||||
	}
 | 
			
		||||
	
 | 
			
		||||
	public static inline function realTime(): Float {
 | 
			
		||||
		return kha.Scheduler.realTime();
 | 
			
		||||
		return kha.Scheduler.realTime() * scale;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public static function update() {
 | 
			
		||||
 | 
			
		||||
@ -94,34 +94,34 @@ class Tween {
 | 
			
		||||
 | 
			
		||||
				// Way too much Reflect trickery..
 | 
			
		||||
				var ps = Reflect.fields(a.props);
 | 
			
		||||
				for (i in 0...ps.length) {
 | 
			
		||||
					var p = ps[i];
 | 
			
		||||
				for (j in 0...ps.length) {
 | 
			
		||||
					var p = ps[j];
 | 
			
		||||
					var k = a._time / a.duration;
 | 
			
		||||
					if (k > 1) k = 1;
 | 
			
		||||
 | 
			
		||||
					if (a._comps[i] == 1) {
 | 
			
		||||
						var fromVal: Float = a._x[i];
 | 
			
		||||
					if (a._comps[j] == 1) {
 | 
			
		||||
						var fromVal: Float = a._x[j];
 | 
			
		||||
						var toVal: Float = Reflect.getProperty(a.props, p);
 | 
			
		||||
						var val: Float = fromVal + (toVal - fromVal) * eases[a.ease](k);
 | 
			
		||||
						Reflect.setProperty(a.target, p, val);
 | 
			
		||||
					}
 | 
			
		||||
					else { // _comps[i] == 4
 | 
			
		||||
					else { // _comps[j] == 4
 | 
			
		||||
						var obj = Reflect.getProperty(a.props, p);
 | 
			
		||||
						var toX: Float = Reflect.getProperty(obj, "x");
 | 
			
		||||
						var toY: Float = Reflect.getProperty(obj, "y");
 | 
			
		||||
						var toZ: Float = Reflect.getProperty(obj, "z");
 | 
			
		||||
						var toW: Float = Reflect.getProperty(obj, "w");
 | 
			
		||||
						if (a._normalize[i]) {
 | 
			
		||||
							var qdot = (a._x[i] * toX) + (a._y[i] * toY) + (a._z[i] * toZ) + (a._w[i] * toW);
 | 
			
		||||
						if (a._normalize[j]) {
 | 
			
		||||
							var qdot = (a._x[j] * toX) + (a._y[j] * toY) + (a._z[j] * toZ) + (a._w[j] * toW);
 | 
			
		||||
							if (qdot < 0.0) {
 | 
			
		||||
								toX = -toX; toY = -toY; toZ = -toZ; toW = -toW;
 | 
			
		||||
							}
 | 
			
		||||
						}
 | 
			
		||||
						var x: Float = a._x[i] + (toX - a._x[i]) * eases[a.ease](k);
 | 
			
		||||
						var y: Float = a._y[i] + (toY - a._y[i]) * eases[a.ease](k);
 | 
			
		||||
						var z: Float = a._z[i] + (toZ - a._z[i]) * eases[a.ease](k);
 | 
			
		||||
						var w: Float = a._w[i] + (toW - a._w[i]) * eases[a.ease](k);
 | 
			
		||||
						if (a._normalize[i]) {
 | 
			
		||||
						var x: Float = a._x[j] + (toX - a._x[j]) * eases[a.ease](k);
 | 
			
		||||
						var y: Float = a._y[j] + (toY - a._y[j]) * eases[a.ease](k);
 | 
			
		||||
						var z: Float = a._z[j] + (toZ - a._z[j]) * eases[a.ease](k);
 | 
			
		||||
						var w: Float = a._w[j] + (toW - a._w[j]) * eases[a.ease](k);
 | 
			
		||||
						if (a._normalize[j]) {
 | 
			
		||||
							var l = Math.sqrt(x * x + y * y + z * z + w * w);
 | 
			
		||||
							if (l > 0.0) {
 | 
			
		||||
								l = 1.0 / l;
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										41
									
								
								leenkx/Sources/leenkx/logicnode/ProbabilisticIndexNode.hx
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								leenkx/Sources/leenkx/logicnode/ProbabilisticIndexNode.hx
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,41 @@
 | 
			
		||||
package leenkx.logicnode;
 | 
			
		||||
 | 
			
		||||
class ProbabilisticIndexNode extends LogicNode {
 | 
			
		||||
 | 
			
		||||
	public function new(tree: LogicTree) {
 | 
			
		||||
		super(tree);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	override function get(from: Int): Dynamic {
 | 
			
		||||
 | 
			
		||||
		var probs: Array<Float> = [];
 | 
			
		||||
		var probs_acum: Array<Float> = [];
 | 
			
		||||
		var sum: Float = 0;
 | 
			
		||||
 | 
			
		||||
		for (p in 0...inputs.length){
 | 
			
		||||
			probs.push(inputs[p].get());
 | 
			
		||||
			sum += probs[p];
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		if (sum > 1){
 | 
			
		||||
			for (p in 0...probs.length)
 | 
			
		||||
				probs[p] /= sum;
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		sum = 0;
 | 
			
		||||
		for (p in 0...probs.length){
 | 
			
		||||
			sum += probs[p];
 | 
			
		||||
			probs_acum.push(sum);
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		var rand: Float = Math.random();
 | 
			
		||||
 | 
			
		||||
		for (p in 0...probs.length){
 | 
			
		||||
			if (p == 0 && rand <= probs_acum[p]) return p;
 | 
			
		||||
			else if (0 < p && p < probs.length-1 && probs_acum[p-1] < rand && rand <= probs_acum[p]) return p;
 | 
			
		||||
			else if (p == probs.length-1 && probs_acum[p-1] < rand) return p;
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		return null;
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
@ -1,5 +1,7 @@
 | 
			
		||||
package leenkx.logicnode;
 | 
			
		||||
 | 
			
		||||
import iron.data.SceneFormat;
 | 
			
		||||
 | 
			
		||||
class SetWorldNode extends LogicNode {
 | 
			
		||||
 | 
			
		||||
	public function new(tree: LogicTree) {
 | 
			
		||||
@ -10,25 +12,6 @@ class SetWorldNode extends LogicNode {
 | 
			
		||||
		var world: String = inputs[1].get();
 | 
			
		||||
 | 
			
		||||
		if (world != null){
 | 
			
		||||
 | 
			
		||||
			//check if world shader data exists
 | 
			
		||||
			var file: String = 'World_'+world+'_data';
 | 
			
		||||
			#if lnx_json
 | 
			
		||||
				file += ".json";
 | 
			
		||||
			#elseif lnx_compress
 | 
			
		||||
				file += ".lz4";
 | 
			
		||||
			#else
 | 
			
		||||
				file += '.lnx';
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
			var exists: Bool = false;
 | 
			
		||||
 | 
			
		||||
			iron.data.Data.getBlob(file, function(b: kha.Blob) {
 | 
			
		||||
				if (b != null) exists = true;
 | 
			
		||||
			});
 | 
			
		||||
 | 
			
		||||
			assert(Error, exists == true, "World must be either associated to a scene or have fake user");
 | 
			
		||||
 | 
			
		||||
			iron.Scene.active.raw.world_ref = world;
 | 
			
		||||
			var npath = leenkx.renderpath.RenderPathCreator.get();
 | 
			
		||||
			npath.loadShader("shader_datas/World_" + world + "/World_" + world);
 | 
			
		||||
 | 
			
		||||
@ -641,18 +641,20 @@ class RenderPathForward {
 | 
			
		||||
			var framebuffer = "";
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
			#if ((rp_antialiasing == "Off") || (rp_antialiasing == "FXAA"))
 | 
			
		||||
			RenderPathCreator.finalTarget = path.currentTarget;
 | 
			
		||||
 | 
			
		||||
			var target = "";
 | 
			
		||||
			#if ((rp_antialiasing == "Off") || (rp_antialiasing == "FXAA") || (!rp_render_to_texture))
 | 
			
		||||
			{
 | 
			
		||||
				RenderPathCreator.finalTarget = path.currentTarget;
 | 
			
		||||
				path.setTarget(framebuffer);
 | 
			
		||||
				target = framebuffer;
 | 
			
		||||
			}
 | 
			
		||||
			#else
 | 
			
		||||
			{
 | 
			
		||||
				path.setTarget("buf");
 | 
			
		||||
				RenderPathCreator.finalTarget = path.currentTarget;
 | 
			
		||||
				target = "buf";
 | 
			
		||||
			}
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
			path.setTarget(target);
 | 
			
		||||
			
 | 
			
		||||
			#if rp_compositordepth
 | 
			
		||||
			{
 | 
			
		||||
				path.bindTarget("_main", "gbufferD");
 | 
			
		||||
@ -671,6 +673,15 @@ class RenderPathForward {
 | 
			
		||||
			}
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
			#if rp_overlays
 | 
			
		||||
			{
 | 
			
		||||
				path.setTarget(target);
 | 
			
		||||
				path.clearTarget(null, 1.0);
 | 
			
		||||
				path.drawMeshes("overlay");
 | 
			
		||||
			}
 | 
			
		||||
			#end
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
			#if ((rp_antialiasing == "SMAA") || (rp_antialiasing == "TAA"))
 | 
			
		||||
			{
 | 
			
		||||
				path.setTarget("bufa");
 | 
			
		||||
@ -701,12 +712,6 @@ class RenderPathForward {
 | 
			
		||||
		}
 | 
			
		||||
		#end
 | 
			
		||||
 | 
			
		||||
		#if rp_overlays
 | 
			
		||||
		{
 | 
			
		||||
			path.clearTarget(null, 1.0);
 | 
			
		||||
			path.drawMeshes("overlay");
 | 
			
		||||
		}
 | 
			
		||||
		#end
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	public static function setupDepthTexture() {
 | 
			
		||||
 | 
			
		||||
@ -3,33 +3,35 @@ package leenkx.system;
 | 
			
		||||
import haxe.Constraints.Function;
 | 
			
		||||
 | 
			
		||||
class Signal {
 | 
			
		||||
    var callbacks:Array<Function> = [];
 | 
			
		||||
    var callbacks: Array<Function> = [];
 | 
			
		||||
 | 
			
		||||
    public function new() {
 | 
			
		||||
        
 | 
			
		||||
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    public function connect(callback:Function) {
 | 
			
		||||
    public function connect(callback: Function) {
 | 
			
		||||
        if (!callbacks.contains(callback)) callbacks.push(callback);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    public function disconnect(callback:Function) {
 | 
			
		||||
    public function disconnect(callback: Function) {
 | 
			
		||||
        if (callbacks.contains(callback)) callbacks.remove(callback);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public function emit(...args:Any) {
 | 
			
		||||
        for (callback in callbacks) Reflect.callMethod(this, callback, args);
 | 
			
		||||
    public function emit(...args: Any) {
 | 
			
		||||
        for (callback in callbacks.copy()) {
 | 
			
		||||
            if (callbacks.contains(callback)) Reflect.callMethod(null, callback, args);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public function getConnections():Array<Function> {
 | 
			
		||||
    public function getConnections(): Array<Function> {
 | 
			
		||||
        return callbacks;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public function isConnected(callBack:Function):Bool {
 | 
			
		||||
    public function isConnected(callBack: Function):Bool {
 | 
			
		||||
        return callbacks.contains(callBack);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public function isNull():Bool {
 | 
			
		||||
    public function isNull(): Bool {
 | 
			
		||||
        return callbacks.length == 0;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -57,7 +57,7 @@ class Starter {
 | 
			
		||||
						iron.Scene.getRenderPath = getRenderPath;
 | 
			
		||||
						#end
 | 
			
		||||
						#if lnx_draworder_shader
 | 
			
		||||
						iron.RenderPath.active.drawOrder = iron.RenderPath.DrawOrder.Shader;
 | 
			
		||||
						iron.RenderPath.active.drawOrder = iron.RenderPath.DrawOrder.Index;
 | 
			
		||||
						#end // else Distance
 | 
			
		||||
					});
 | 
			
		||||
				});
 | 
			
		||||
 | 
			
		||||
@ -1,87 +1,243 @@
 | 
			
		||||
package leenkx.trait;
 | 
			
		||||
 | 
			
		||||
import iron.Trait;
 | 
			
		||||
import iron.math.Vec4;
 | 
			
		||||
import iron.system.Input;
 | 
			
		||||
import iron.object.Object;
 | 
			
		||||
import iron.object.CameraObject;
 | 
			
		||||
import leenkx.trait.physics.PhysicsWorld;
 | 
			
		||||
import leenkx.trait.internal.CameraController;
 | 
			
		||||
import leenkx.trait.physics.RigidBody;
 | 
			
		||||
import kha.FastFloat;
 | 
			
		||||
 | 
			
		||||
class FirstPersonController extends CameraController {
 | 
			
		||||
class FirstPersonController extends Trait {
 | 
			
		||||
 | 
			
		||||
#if (!lnx_physics)
 | 
			
		||||
	public function new() { super(); }
 | 
			
		||||
#else
 | 
			
		||||
    #if (!lnx_physics)
 | 
			
		||||
    public function new() { super(); }
 | 
			
		||||
    #else
 | 
			
		||||
 | 
			
		||||
	var head: Object;
 | 
			
		||||
	static inline var rotationSpeed = 2.0;
 | 
			
		||||
    @prop public var rotationSpeed:Float = 0.15;
 | 
			
		||||
    @prop public var maxPitch:Float = 2.2;
 | 
			
		||||
    @prop public var minPitch:Float = 0.5;
 | 
			
		||||
    @prop public var enableJump:Bool = true;
 | 
			
		||||
    @prop public var jumpForce:Float = 22.0;
 | 
			
		||||
    @prop public var moveSpeed:Float = 500.0;
 | 
			
		||||
 | 
			
		||||
	public function new() {
 | 
			
		||||
		super();
 | 
			
		||||
    @prop public var forwardKey:String = "w";
 | 
			
		||||
    @prop public var backwardKey:String = "s";
 | 
			
		||||
    @prop public var leftKey:String = "a";
 | 
			
		||||
    @prop public var rightKey:String = "d";
 | 
			
		||||
    @prop public var jumpKey:String = "space";
 | 
			
		||||
 | 
			
		||||
		iron.Scene.active.notifyOnInit(init);
 | 
			
		||||
	}
 | 
			
		||||
    @prop public var allowAirJump:Bool = false;
 | 
			
		||||
 | 
			
		||||
	function init() {
 | 
			
		||||
		head = object.getChildOfType(CameraObject);
 | 
			
		||||
    @prop public var canRun:Bool = true;
 | 
			
		||||
    @prop public var runKey:String = "shift";
 | 
			
		||||
    @prop public var runSpeed:Float = 1000.0;
 | 
			
		||||
 | 
			
		||||
		PhysicsWorld.active.notifyOnPreUpdate(preUpdate);
 | 
			
		||||
		notifyOnUpdate(update);
 | 
			
		||||
		notifyOnRemove(removed);
 | 
			
		||||
	}
 | 
			
		||||
    // Sistema de estamina
 | 
			
		||||
    @prop public var stamina:Bool = false;
 | 
			
		||||
    @prop public var staminaBase:Float = 75.0;
 | 
			
		||||
    @prop public var staRecoverPerSec:Float = 5.0;
 | 
			
		||||
    @prop public var staDecreasePerSec:Float = 5.0;
 | 
			
		||||
    @prop public var staRecoverTime:Float = 2.0;
 | 
			
		||||
    @prop public var staDecreasePerJump:Float = 5.0;
 | 
			
		||||
    @prop public var enableFatigue:Bool = false;
 | 
			
		||||
    @prop public var fatigueSpeed:Float = 0.5;  // the reduction of movement when fatigue is activated... 
 | 
			
		||||
    @prop public var fatigueThreshold:Float = 30.0; // Tiempo corriendo sin parar para la activacion // Time running non-stop for activation...
 | 
			
		||||
    @prop public var fatRecoveryThreshold:Float = 7.5; // Tiempo sin correr/saltar para salir de fatiga // Time without running/jumping to get rid of fatigue...
 | 
			
		||||
    
 | 
			
		||||
 | 
			
		||||
	var xVec = Vec4.xAxis();
 | 
			
		||||
	var zVec = Vec4.zAxis();
 | 
			
		||||
	function preUpdate() {
 | 
			
		||||
		if (Input.occupied || !body.ready) return;
 | 
			
		||||
    // Var Privadas 
 | 
			
		||||
    var head:CameraObject;
 | 
			
		||||
    var pitch:Float = 0.0;
 | 
			
		||||
    var body:RigidBody;
 | 
			
		||||
 | 
			
		||||
		var mouse = Input.getMouse();
 | 
			
		||||
		var kb = Input.getKeyboard();
 | 
			
		||||
    var moveForward:Bool = false;
 | 
			
		||||
    var moveBackward:Bool = false;
 | 
			
		||||
    var moveLeft:Bool = false;
 | 
			
		||||
    var moveRight:Bool = false;
 | 
			
		||||
    var isRunning:Bool = false;
 | 
			
		||||
 | 
			
		||||
		if (mouse.started() && !mouse.locked) mouse.lock();
 | 
			
		||||
		else if (kb.started("escape") && mouse.locked) mouse.unlock();
 | 
			
		||||
    var canJump:Bool = true;
 | 
			
		||||
    var staminaValue:Float = 0.0;
 | 
			
		||||
    var timeSinceStop:Float = 0.0;
 | 
			
		||||
 | 
			
		||||
		if (mouse.locked || mouse.down()) {
 | 
			
		||||
			head.transform.rotate(xVec, -mouse.movementY / 250 * rotationSpeed);
 | 
			
		||||
			transform.rotate(zVec, -mouse.movementX / 250 * rotationSpeed);
 | 
			
		||||
			body.syncTransform();
 | 
			
		||||
    var fatigueTimer:Float = 0.0;
 | 
			
		||||
    var fatigueCooldown:Float = 0.0;
 | 
			
		||||
    var isFatigueActive:Bool = false;
 | 
			
		||||
 | 
			
		||||
    public function new() {
 | 
			
		||||
        super();
 | 
			
		||||
        iron.Scene.active.notifyOnInit(init);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    function init() {
 | 
			
		||||
        body = object.getTrait(RigidBody);
 | 
			
		||||
        head = object.getChildOfType(CameraObject);
 | 
			
		||||
        PhysicsWorld.active.notifyOnPreUpdate(preUpdate);
 | 
			
		||||
        notifyOnUpdate(update);
 | 
			
		||||
        notifyOnRemove(removed);
 | 
			
		||||
        staminaValue = staminaBase;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    function removed() {
 | 
			
		||||
        PhysicsWorld.active.removePreUpdate(preUpdate);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    var zVec = Vec4.zAxis();
 | 
			
		||||
 | 
			
		||||
    function preUpdate() {
 | 
			
		||||
        if (Input.occupied || body == null) return;
 | 
			
		||||
        var mouse = Input.getMouse();
 | 
			
		||||
        var kb = Input.getKeyboard();
 | 
			
		||||
 | 
			
		||||
        if (mouse.started() && !mouse.locked)
 | 
			
		||||
            mouse.lock();
 | 
			
		||||
        else if (kb.started("escape") && mouse.locked)
 | 
			
		||||
            mouse.unlock();
 | 
			
		||||
 | 
			
		||||
        if (mouse.locked || mouse.down()) {
 | 
			
		||||
            var deltaTime:Float = iron.system.Time.delta;
 | 
			
		||||
            object.transform.rotate(zVec, -mouse.movementX * rotationSpeed * deltaTime);
 | 
			
		||||
            var deltaPitch:Float = -(mouse.movementY * rotationSpeed * deltaTime);
 | 
			
		||||
            pitch += deltaPitch;
 | 
			
		||||
            pitch = Math.max(minPitch, Math.min(maxPitch, pitch));
 | 
			
		||||
            head.transform.setRotation(pitch, 0.0, 0.0);
 | 
			
		||||
            body.syncTransform();
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    var dir:Vec4 = new Vec4();
 | 
			
		||||
 | 
			
		||||
    function isFatigued():Bool {
 | 
			
		||||
        return enableFatigue && isFatigueActive;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    function update() {
 | 
			
		||||
        if (body == null) return;
 | 
			
		||||
        var deltaTime:Float = iron.system.Time.delta;
 | 
			
		||||
        var kb = Input.getKeyboard();
 | 
			
		||||
 | 
			
		||||
        moveForward = kb.down(forwardKey);
 | 
			
		||||
        moveBackward = kb.down(backwardKey);
 | 
			
		||||
        moveLeft = kb.down(leftKey);
 | 
			
		||||
        moveRight = kb.down(rightKey);
 | 
			
		||||
        var isMoving = moveForward || moveBackward || moveLeft || moveRight;
 | 
			
		||||
 | 
			
		||||
        var isGrounded:Bool = false;
 | 
			
		||||
        #if lnx_physics
 | 
			
		||||
        var vel = body.getLinearVelocity();
 | 
			
		||||
        if (Math.abs(vel.z) < 0.1) {
 | 
			
		||||
            isGrounded = true;
 | 
			
		||||
        }
 | 
			
		||||
        #end
 | 
			
		||||
 | 
			
		||||
        // Dejo establecido el salto para tener en cuenta la (enableFatigue) si es que es false/true....
 | 
			
		||||
		if (isGrounded && !isFatigued()) {
 | 
			
		||||
		    canJump = true;
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
        // Saltar con estamina
 | 
			
		||||
        if (enableJump && kb.started(jumpKey) && canJump) {
 | 
			
		||||
            var jumpPower = jumpForce;
 | 
			
		||||
            // Disminuir el salto al 50% si la (stamina) esta por debajo o en el 20%.
 | 
			
		||||
            if (stamina) {
 | 
			
		||||
                if (staminaValue <= 0) {
 | 
			
		||||
                    jumpPower = 0;
 | 
			
		||||
                } else if (staminaValue <= staminaBase * 0.2) {
 | 
			
		||||
                    jumpPower *= 0.5;
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
	function removed() {
 | 
			
		||||
		PhysicsWorld.active.removePreUpdate(preUpdate);
 | 
			
		||||
	}
 | 
			
		||||
                staminaValue -= staDecreasePerJump;
 | 
			
		||||
                if (staminaValue < 0.0) staminaValue = 0.0;
 | 
			
		||||
                timeSinceStop = 0.0;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
	var dir = new Vec4();
 | 
			
		||||
	function update() {
 | 
			
		||||
		if (!body.ready) return;
 | 
			
		||||
            if (jumpPower > 0) {
 | 
			
		||||
                body.applyImpulse(new Vec4(0, 0, jumpPower));
 | 
			
		||||
                if (!allowAirJump) canJump = false;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
		if (jump) {
 | 
			
		||||
			body.applyImpulse(new Vec4(0, 0, 16));
 | 
			
		||||
			jump = false;
 | 
			
		||||
        // Control de estamina y correr
 | 
			
		||||
        if (canRun && kb.down(runKey) && isMoving) {
 | 
			
		||||
            if (stamina) {
 | 
			
		||||
                if (staminaValue > 0.0) {
 | 
			
		||||
                    isRunning = true;
 | 
			
		||||
                    staminaValue -= staDecreasePerSec * deltaTime;
 | 
			
		||||
                    if (staminaValue < 0.0) staminaValue = 0.0;
 | 
			
		||||
                } else {
 | 
			
		||||
                    isRunning = false;
 | 
			
		||||
                }
 | 
			
		||||
            } else {
 | 
			
		||||
                isRunning = true;
 | 
			
		||||
            }
 | 
			
		||||
        } else {
 | 
			
		||||
            isRunning = false;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // (temporizadores aparte)
 | 
			
		||||
        if (isRunning) {
 | 
			
		||||
            timeSinceStop = 0.0;
 | 
			
		||||
            fatigueTimer += deltaTime;
 | 
			
		||||
            fatigueCooldown = 0.0;
 | 
			
		||||
        } else {
 | 
			
		||||
            timeSinceStop += deltaTime;
 | 
			
		||||
            fatigueCooldown += deltaTime;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // Evitar correr y saltar al estar fatigado...
 | 
			
		||||
        if (isFatigued()) {
 | 
			
		||||
   			 isRunning = false;
 | 
			
		||||
   			 canJump = false;
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// Move
 | 
			
		||||
		dir.set(0, 0, 0);
 | 
			
		||||
		if (moveForward) dir.add(transform.look());
 | 
			
		||||
		if (moveBackward) dir.add(transform.look().mult(-1));
 | 
			
		||||
		if (moveLeft) dir.add(transform.right().mult(-1));
 | 
			
		||||
		if (moveRight) dir.add(transform.right());
 | 
			
		||||
        // Activar fatiga despues de correr continuamente durante cierto umbral
 | 
			
		||||
        if (enableFatigue && fatigueTimer >= fatigueThreshold) {
 | 
			
		||||
            isFatigueActive = true;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
		// Push down
 | 
			
		||||
		var btvec = body.getLinearVelocity();
 | 
			
		||||
		body.setLinearVelocity(0.0, 0.0, btvec.z - 1.0);
 | 
			
		||||
        // Eliminar la fatiga despues de recuperarse
 | 
			
		||||
        if (enableFatigue && isFatigueActive && fatigueCooldown >= fatRecoveryThreshold) {
 | 
			
		||||
            isFatigueActive = false;
 | 
			
		||||
            fatigueTimer = 0.0;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
		if (moveForward || moveBackward || moveLeft || moveRight) {
 | 
			
		||||
			var dirN = dir.normalize();
 | 
			
		||||
			dirN.mult(6);
 | 
			
		||||
			body.activate();
 | 
			
		||||
			body.setLinearVelocity(dirN.x, dirN.y, btvec.z - 1.0);
 | 
			
		||||
		}
 | 
			
		||||
        // Recuperar estamina si no esta corriendo
 | 
			
		||||
        if (stamina && !isRunning && staminaValue < staminaBase && !isFatigued()) {
 | 
			
		||||
            if (timeSinceStop >= staRecoverTime) {
 | 
			
		||||
                staminaValue += staRecoverPerSec * deltaTime;
 | 
			
		||||
                if (staminaValue > staminaBase) staminaValue = staminaBase;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
		// Keep vertical
 | 
			
		||||
		body.setAngularFactor(0, 0, 0);
 | 
			
		||||
		camera.buildMatrix();
 | 
			
		||||
	}
 | 
			
		||||
#end
 | 
			
		||||
        // Movimiento ejes (local)
 | 
			
		||||
        dir.set(0, 0, 0);
 | 
			
		||||
        if (moveForward) dir.add(object.transform.look());
 | 
			
		||||
        if (moveBackward) dir.add(object.transform.look().mult(-1));
 | 
			
		||||
        if (moveLeft) dir.add(object.transform.right().mult(-1));
 | 
			
		||||
        if (moveRight) dir.add(object.transform.right());
 | 
			
		||||
 | 
			
		||||
        var btvec = body.getLinearVelocity();
 | 
			
		||||
        body.setLinearVelocity(0.0, 0.0, btvec.z - 1.0);
 | 
			
		||||
 | 
			
		||||
        if (isMoving) {
 | 
			
		||||
            var dirN = dir.normalize();
 | 
			
		||||
            var baseSpeed = moveSpeed;
 | 
			
		||||
            if (isRunning && moveForward) {
 | 
			
		||||
                baseSpeed = runSpeed;
 | 
			
		||||
            }
 | 
			
		||||
            var currentSpeed = isFatigued() ? baseSpeed * fatigueSpeed : baseSpeed;
 | 
			
		||||
            dirN.mult(currentSpeed * deltaTime);
 | 
			
		||||
            body.activate();
 | 
			
		||||
            body.setLinearVelocity(dirN.x, dirN.y, btvec.z - 1.0);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        body.setAngularFactor(0, 0, 0);
 | 
			
		||||
        head.buildMatrix();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #end
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// Stamina and fatigue system.....
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								leenkx/blender/data/lnx_data_2.blend
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								leenkx/blender/data/lnx_data_2.blend
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							@ -1,9 +1,17 @@
 | 
			
		||||
import importlib
 | 
			
		||||
import sys
 | 
			
		||||
import types
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
# This gets cleared if this package/the __init__ module is reloaded
 | 
			
		||||
_module_cache: dict[str, types.ModuleType] = {}
 | 
			
		||||
if bpy.app.version < (2, 92, 0):
 | 
			
		||||
    from typing import Dict
 | 
			
		||||
    ModuleCacheType = Dict[str, types.ModuleType]
 | 
			
		||||
else: 
 | 
			
		||||
    ModuleCacheType = dict[str, types.ModuleType]
 | 
			
		||||
 | 
			
		||||
_module_cache: ModuleCacheType = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def enable_reload(module_name: str):
 | 
			
		||||
 | 
			
		||||
@ -15,7 +15,14 @@ from enum import Enum, unique
 | 
			
		||||
import math
 | 
			
		||||
import os
 | 
			
		||||
import time
 | 
			
		||||
from typing import Any, Dict, List, Tuple, Union, Optional
 | 
			
		||||
from typing import Any, Dict, List, Tuple, Union, Optional, TYPE_CHECKING
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if bpy.app.version >= (3, 0, 0):
 | 
			
		||||
    VertexColorType = bpy.types.Attribute
 | 
			
		||||
else:
 | 
			
		||||
    VertexColorType = bpy.types.MeshLoopColorLayer
 | 
			
		||||
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
@ -138,7 +145,7 @@ class LeenkxExporter:
 | 
			
		||||
        self.world_array = []
 | 
			
		||||
        self.particle_system_array = {}
 | 
			
		||||
 | 
			
		||||
        self.referenced_collections: list[bpy.types.Collection] = []
 | 
			
		||||
        self.referenced_collections: List[bpy.types.Collection] = []
 | 
			
		||||
        """Collections referenced by collection instances"""
 | 
			
		||||
 | 
			
		||||
        self.has_spawning_camera = False
 | 
			
		||||
@ -1449,31 +1456,38 @@ class LeenkxExporter:
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def get_num_vertex_colors(mesh: bpy.types.Mesh) -> int:
 | 
			
		||||
        """Return the amount of vertex color attributes of the given mesh."""
 | 
			
		||||
        num = 0
 | 
			
		||||
        for attr in mesh.attributes:
 | 
			
		||||
            if attr.data_type in ('BYTE_COLOR', 'FLOAT_COLOR'):
 | 
			
		||||
                if attr.domain == 'CORNER':
 | 
			
		||||
                    num += 1
 | 
			
		||||
                else:
 | 
			
		||||
                    log.warn(f'Only vertex colors with domain "Face Corner" are supported for now, ignoring "{attr.name}"')
 | 
			
		||||
 | 
			
		||||
        return num
 | 
			
		||||
        if bpy.app.version >= (3, 0, 0):
 | 
			
		||||
            num = 0
 | 
			
		||||
            for attr in mesh.attributes:
 | 
			
		||||
                if attr.data_type in ('BYTE_COLOR', 'FLOAT_COLOR'):
 | 
			
		||||
                    if attr.domain == 'CORNER':
 | 
			
		||||
                        num += 1
 | 
			
		||||
                    else:
 | 
			
		||||
                        log.warn(f'Only vertex colors with domain "Face Corner" are supported for now, ignoring "{attr.name}"')
 | 
			
		||||
            return num
 | 
			
		||||
        else:
 | 
			
		||||
            return len(mesh.vertex_colors)
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def get_nth_vertex_colors(mesh: bpy.types.Mesh, n: int) -> Optional[bpy.types.Attribute]:
 | 
			
		||||
    def get_nth_vertex_colors(mesh: bpy.types.Mesh, n: int) -> Optional[VertexColorType]:
 | 
			
		||||
        """Return the n-th vertex color attribute from the given mesh,
 | 
			
		||||
        ignoring all other attribute types and unsupported domains.
 | 
			
		||||
        """
 | 
			
		||||
        i = 0
 | 
			
		||||
        for attr in mesh.attributes:
 | 
			
		||||
            if attr.data_type in ('BYTE_COLOR', 'FLOAT_COLOR'):
 | 
			
		||||
                if attr.domain != 'CORNER':
 | 
			
		||||
                    log.warn(f'Only vertex colors with domain "Face Corner" are supported for now, ignoring "{attr.name}"')
 | 
			
		||||
                    continue
 | 
			
		||||
                if i == n:
 | 
			
		||||
                    return attr
 | 
			
		||||
                i += 1
 | 
			
		||||
        return None
 | 
			
		||||
        if bpy.app.version >= (3, 0, 0):
 | 
			
		||||
            i = 0
 | 
			
		||||
            for attr in mesh.attributes:
 | 
			
		||||
                if attr.data_type in ('BYTE_COLOR', 'FLOAT_COLOR'):
 | 
			
		||||
                    if attr.domain != 'CORNER':
 | 
			
		||||
                        log.warn(f'Only vertex colors with domain "Face Corner" are supported for now, ignoring "{attr.name}"')
 | 
			
		||||
                        continue
 | 
			
		||||
                    if i == n:
 | 
			
		||||
                        return attr
 | 
			
		||||
                    i += 1
 | 
			
		||||
            return None
 | 
			
		||||
        else:
 | 
			
		||||
            if 0 <= n < len(mesh.vertex_colors):
 | 
			
		||||
                return mesh.vertex_colors[n]
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def check_uv_precision(mesh: bpy.types.Mesh, uv_max_dim: float, max_dim_uvmap: bpy.types.MeshUVLoopLayer, invscale_tex: float):
 | 
			
		||||
@ -1727,6 +1741,7 @@ class LeenkxExporter:
 | 
			
		||||
            tangdata = np.array(tangdata, dtype='<i2')
 | 
			
		||||
 | 
			
		||||
        # Output
 | 
			
		||||
        o['sorting_index'] = bobject.lnx_sorting_index
 | 
			
		||||
        o['vertex_arrays'] = []
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
 | 
			
		||||
@ -1979,7 +1994,7 @@ class LeenkxExporter:
 | 
			
		||||
            if bobject.parent is None or bobject.parent.name not in collection.objects:
 | 
			
		||||
                asset_name = lnx.utils.asset_name(bobject)
 | 
			
		||||
 | 
			
		||||
                if collection.library:
 | 
			
		||||
                if collection.library and not collection.name in self.scene.collection.children:
 | 
			
		||||
                    # Add external linked objects
 | 
			
		||||
                    # Iron differentiates objects based on their names,
 | 
			
		||||
                    # so errors will happen if two objects with the
 | 
			
		||||
@ -2208,6 +2223,9 @@ class LeenkxExporter:
 | 
			
		||||
            elif material.lnx_cull_mode != 'clockwise':
 | 
			
		||||
                o['override_context'] = {}
 | 
			
		||||
                o['override_context']['cull_mode'] = material.lnx_cull_mode
 | 
			
		||||
            if material.lnx_compare_mode != 'less':
 | 
			
		||||
                o['override_context'] = {}
 | 
			
		||||
                o['override_context']['compare_mode'] = material.lnx_compare_mode
 | 
			
		||||
 | 
			
		||||
            o['contexts'] = []
 | 
			
		||||
 | 
			
		||||
@ -2330,6 +2348,7 @@ class LeenkxExporter:
 | 
			
		||||
                'name': particleRef[1]["structName"],
 | 
			
		||||
                'type': 0 if psettings.type == 'EMITTER' else 1, # HAIR
 | 
			
		||||
                'auto_start': psettings.lnx_auto_start,
 | 
			
		||||
                'dynamic_emitter': psettings.lnx_dynamic_emitter,
 | 
			
		||||
                'is_unique': psettings.lnx_is_unique,
 | 
			
		||||
                'loop': psettings.lnx_loop,
 | 
			
		||||
                # Emission
 | 
			
		||||
@ -2395,7 +2414,7 @@ class LeenkxExporter:
 | 
			
		||||
        world = self.scene.world
 | 
			
		||||
 | 
			
		||||
        if world is not None:
 | 
			
		||||
            world_name = lnx.utils.safestr(world.name)
 | 
			
		||||
            world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
 | 
			
		||||
 | 
			
		||||
            if world_name not in self.world_array:
 | 
			
		||||
                self.world_array.append(world_name)
 | 
			
		||||
@ -2544,12 +2563,12 @@ class LeenkxExporter:
 | 
			
		||||
                if collection.name.startswith(('RigidBodyWorld', 'Trait|')):
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                if self.scene.user_of_id(collection) or collection.library or collection in self.referenced_collections:
 | 
			
		||||
                if self.scene.user_of_id(collection) or collection in self.referenced_collections:
 | 
			
		||||
                    self.export_collection(collection)
 | 
			
		||||
 | 
			
		||||
        if not LeenkxExporter.option_mesh_only:
 | 
			
		||||
            if self.scene.camera is not None:
 | 
			
		||||
                self.output['camera_ref'] = self.scene.camera.name
 | 
			
		||||
                self.output['camera_ref'] = lnx.utils.asset_name(self.scene.camera) if self.scene.library else self.scene.camera.name
 | 
			
		||||
            else:
 | 
			
		||||
                if self.scene.name == lnx.utils.get_project_scene_name():
 | 
			
		||||
                    log.warn(f'Scene "{self.scene.name}" is missing a camera')
 | 
			
		||||
@ -2573,7 +2592,7 @@ class LeenkxExporter:
 | 
			
		||||
            self.export_tilesheets()
 | 
			
		||||
 | 
			
		||||
            if self.scene.world is not None:
 | 
			
		||||
                self.output['world_ref'] = lnx.utils.safestr(self.scene.world.name)
 | 
			
		||||
                self.output['world_ref'] = lnx.utils.safestr(lnx.utils.asset_name(self.scene.world) if self.scene.world.library else self.scene.world.name)
 | 
			
		||||
 | 
			
		||||
            if self.scene.use_gravity:
 | 
			
		||||
                self.output['gravity'] = [self.scene.gravity[0], self.scene.gravity[1], self.scene.gravity[2]]
 | 
			
		||||
@ -3089,7 +3108,18 @@ class LeenkxExporter:
 | 
			
		||||
 | 
			
		||||
            rbw = self.scene.rigidbody_world
 | 
			
		||||
            if rbw is not None and rbw.enabled:
 | 
			
		||||
                out_trait['parameters'] = [str(rbw.time_scale), str(rbw.substeps_per_frame), str(rbw.solver_iterations), str(wrd.lnx_physics_fixed_step)]
 | 
			
		||||
                if hasattr(rbw, 'substeps_per_frame'):
 | 
			
		||||
                    substeps = str(rbw.substeps_per_frame)
 | 
			
		||||
                elif hasattr(rbw, 'steps_per_second'):
 | 
			
		||||
                    scene_fps = bpy.context.scene.render.fps
 | 
			
		||||
                    substeps_per_frame = rbw.steps_per_second / scene_fps
 | 
			
		||||
                    substeps = str(int(round(substeps_per_frame)))
 | 
			
		||||
                else:
 | 
			
		||||
                    print("WARNING: Physics rigid body world cannot determine steps/substeps. Please report this for further investigation.")
 | 
			
		||||
                    print("Setting steps to 10 [ low ]")
 | 
			
		||||
                    substeps = '10'
 | 
			
		||||
                
 | 
			
		||||
                out_trait['parameters'] = [str(rbw.time_scale), substeps, str(rbw.solver_iterations), str(wrd.lnx_physics_fixed_step)]
 | 
			
		||||
 | 
			
		||||
                if phys_pkg == 'bullet' or phys_pkg == 'oimo':
 | 
			
		||||
                    debug_draw_mode = 1 if wrd.lnx_physics_dbg_draw_wireframe else 0
 | 
			
		||||
@ -3376,7 +3406,7 @@ class LeenkxExporter:
 | 
			
		||||
        if mobile_mat:
 | 
			
		||||
            lnx_radiance = False
 | 
			
		||||
 | 
			
		||||
        out_probe = {'name': world.name}
 | 
			
		||||
        out_probe = {'name': lnx.utils.asset_name(world) if world.library else world.name}
 | 
			
		||||
        if lnx_irradiance:
 | 
			
		||||
            ext = '' if wrd.lnx_minimize else '.json'
 | 
			
		||||
            out_probe['irradiance'] = irrsharmonics + '_irradiance' + ext
 | 
			
		||||
 | 
			
		||||
@ -1,445 +1,450 @@
 | 
			
		||||
"""
 | 
			
		||||
Exports smaller geometry but is slower.
 | 
			
		||||
To be replaced with https://github.com/zeux/meshoptimizer
 | 
			
		||||
"""
 | 
			
		||||
from typing import Optional
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
from mathutils import Vector
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
import lnx.utils
 | 
			
		||||
from lnx import log
 | 
			
		||||
 | 
			
		||||
if lnx.is_reload(__name__):
 | 
			
		||||
    log = lnx.reload_module(log)
 | 
			
		||||
    lnx.utils = lnx.reload_module(lnx.utils)
 | 
			
		||||
else:
 | 
			
		||||
    lnx.enable_reload(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Vertex:
 | 
			
		||||
    __slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index")
 | 
			
		||||
 | 
			
		||||
    def __init__(self, mesh: bpy.types.Mesh, loop: bpy.types.MeshLoop, vcol0: Optional[bpy.types.Attribute]):
 | 
			
		||||
        self.vertex_index = loop.vertex_index
 | 
			
		||||
        loop_idx = loop.index
 | 
			
		||||
        self.co = mesh.vertices[self.vertex_index].co[:]
 | 
			
		||||
        self.normal = loop.normal[:]
 | 
			
		||||
        self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers)
 | 
			
		||||
        self.col = [0.0, 0.0, 0.0] if vcol0 is None else vcol0.data[loop_idx].color[:]
 | 
			
		||||
        self.loop_indices = [loop_idx]
 | 
			
		||||
        self.index = 0
 | 
			
		||||
 | 
			
		||||
    def __hash__(self):
 | 
			
		||||
        return hash((self.co, self.normal, self.uvs))
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other):
 | 
			
		||||
        eq = (
 | 
			
		||||
            (self.co == other.co) and
 | 
			
		||||
            (self.normal == other.normal) and
 | 
			
		||||
            (self.uvs == other.uvs) and
 | 
			
		||||
            (self.col == other.col)
 | 
			
		||||
            )
 | 
			
		||||
        if eq:
 | 
			
		||||
            indices = self.loop_indices + other.loop_indices
 | 
			
		||||
            self.loop_indices = indices
 | 
			
		||||
            other.loop_indices = indices
 | 
			
		||||
        return eq
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def calc_tangents(posa, nora, uva, ias, scale_pos):
 | 
			
		||||
    num_verts = int(len(posa) / 4)
 | 
			
		||||
    tangents = np.empty(num_verts * 3, dtype='<f4')
 | 
			
		||||
    # bitangents = np.empty(num_verts * 3, dtype='<f4')
 | 
			
		||||
    for ar in ias:
 | 
			
		||||
        ia = ar['values']
 | 
			
		||||
        num_tris = int(len(ia) / 3)
 | 
			
		||||
        for i in range(0, num_tris):
 | 
			
		||||
            i0 = ia[i * 3    ]
 | 
			
		||||
            i1 = ia[i * 3 + 1]
 | 
			
		||||
            i2 = ia[i * 3 + 2]
 | 
			
		||||
            v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2]))
 | 
			
		||||
            v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2]))
 | 
			
		||||
            v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2]))
 | 
			
		||||
            uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1]))
 | 
			
		||||
            uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1]))
 | 
			
		||||
            uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1]))
 | 
			
		||||
 | 
			
		||||
            deltaPos1 = v1 - v0
 | 
			
		||||
            deltaPos2 = v2 - v0
 | 
			
		||||
            deltaUV1 = uv1 - uv0
 | 
			
		||||
            deltaUV2 = uv2 - uv0
 | 
			
		||||
            d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x)
 | 
			
		||||
            if d != 0:
 | 
			
		||||
                r = 1.0 / d
 | 
			
		||||
            else:
 | 
			
		||||
                r = 1.0
 | 
			
		||||
            tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r
 | 
			
		||||
            # bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r
 | 
			
		||||
 | 
			
		||||
            tangents[i0 * 3    ] += tangent.x
 | 
			
		||||
            tangents[i0 * 3 + 1] += tangent.y
 | 
			
		||||
            tangents[i0 * 3 + 2] += tangent.z
 | 
			
		||||
            tangents[i1 * 3    ] += tangent.x
 | 
			
		||||
            tangents[i1 * 3 + 1] += tangent.y
 | 
			
		||||
            tangents[i1 * 3 + 2] += tangent.z
 | 
			
		||||
            tangents[i2 * 3    ] += tangent.x
 | 
			
		||||
            tangents[i2 * 3 + 1] += tangent.y
 | 
			
		||||
            tangents[i2 * 3 + 2] += tangent.z
 | 
			
		||||
            # bitangents[i0 * 3    ] += bitangent.x
 | 
			
		||||
            # bitangents[i0 * 3 + 1] += bitangent.y
 | 
			
		||||
            # bitangents[i0 * 3 + 2] += bitangent.z
 | 
			
		||||
            # bitangents[i1 * 3    ] += bitangent.x
 | 
			
		||||
            # bitangents[i1 * 3 + 1] += bitangent.y
 | 
			
		||||
            # bitangents[i1 * 3 + 2] += bitangent.z
 | 
			
		||||
            # bitangents[i2 * 3    ] += bitangent.x
 | 
			
		||||
            # bitangents[i2 * 3 + 1] += bitangent.y
 | 
			
		||||
            # bitangents[i2 * 3 + 2] += bitangent.z
 | 
			
		||||
    # Orthogonalize
 | 
			
		||||
    for i in range(0, num_verts):
 | 
			
		||||
        t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2]))
 | 
			
		||||
        # b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2]))
 | 
			
		||||
        n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos))
 | 
			
		||||
        v = t - n * n.dot(t)
 | 
			
		||||
        v.normalize()
 | 
			
		||||
        # Calculate handedness
 | 
			
		||||
        # cnv = n.cross(v)
 | 
			
		||||
        # if cnv.dot(b) < 0.0:
 | 
			
		||||
            # v = v * -1.0
 | 
			
		||||
        tangents[i * 3    ] = v.x
 | 
			
		||||
        tangents[i * 3 + 1] = v.y
 | 
			
		||||
        tangents[i * 3 + 2] = v.z
 | 
			
		||||
    return tangents
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def export_mesh_data(self, export_mesh: bpy.types.Mesh, bobject: bpy.types.Object, o, has_armature=False):
 | 
			
		||||
    if bpy.app.version < (4, 1, 0):
 | 
			
		||||
        export_mesh.calc_normals_split()
 | 
			
		||||
    else:
 | 
			
		||||
        updated_normals = export_mesh.corner_normals
 | 
			
		||||
    # exportMesh.calc_loop_triangles()
 | 
			
		||||
    vcol0 = self.get_nth_vertex_colors(export_mesh, 0)
 | 
			
		||||
    vert_list = {Vertex(export_mesh, loop, vcol0): 0 for loop in export_mesh.loops}.keys()
 | 
			
		||||
    num_verts = len(vert_list)
 | 
			
		||||
    num_uv_layers = len(export_mesh.uv_layers)
 | 
			
		||||
    # Check if shape keys were exported
 | 
			
		||||
    has_morph_target = self.get_shape_keys(bobject.data)
 | 
			
		||||
    if has_morph_target:
 | 
			
		||||
        # Shape keys UV are exported separately, so reduce UV count by 1
 | 
			
		||||
        num_uv_layers -= 1
 | 
			
		||||
        morph_uv_index = self.get_morph_uv_index(bobject.data)
 | 
			
		||||
    has_tex = self.get_export_uvs(export_mesh) and num_uv_layers > 0
 | 
			
		||||
    if self.has_baked_material(bobject, export_mesh.materials):
 | 
			
		||||
        has_tex = True
 | 
			
		||||
    has_tex1 = has_tex and num_uv_layers > 1
 | 
			
		||||
    num_colors = self.get_num_vertex_colors(export_mesh)
 | 
			
		||||
    has_col = self.get_export_vcols(export_mesh) and num_colors > 0
 | 
			
		||||
    has_tang = self.has_tangents(export_mesh)
 | 
			
		||||
 | 
			
		||||
    pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
 | 
			
		||||
    ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
 | 
			
		||||
    if has_tex or has_morph_target:
 | 
			
		||||
        uv_layers = export_mesh.uv_layers
 | 
			
		||||
        maxdim = 1.0
 | 
			
		||||
        maxdim_uvlayer = None
 | 
			
		||||
        if has_tex:
 | 
			
		||||
            t0map = 0 # Get active uvmap
 | 
			
		||||
            t0data = np.empty(num_verts * 2, dtype='<f4')
 | 
			
		||||
            if uv_layers is not None:
 | 
			
		||||
                if 'UVMap_baked' in uv_layers:
 | 
			
		||||
                    for i in range(0, len(uv_layers)):
 | 
			
		||||
                        if uv_layers[i].name == 'UVMap_baked':
 | 
			
		||||
                            t0map = i
 | 
			
		||||
                            break
 | 
			
		||||
                else:
 | 
			
		||||
                    for i in range(0, len(uv_layers)):
 | 
			
		||||
                        if uv_layers[i].active_render and uv_layers[i].name != 'UVMap_shape_key':
 | 
			
		||||
                            t0map = i
 | 
			
		||||
                            break
 | 
			
		||||
            if has_tex1:
 | 
			
		||||
                for i in range(0, len(uv_layers)):
 | 
			
		||||
                    # Not UVMap 0
 | 
			
		||||
                    if i != t0map:
 | 
			
		||||
                        # Not Shape Key UVMap
 | 
			
		||||
                        if has_morph_target and uv_layers[i].name == 'UVMap_shape_key':
 | 
			
		||||
                            continue
 | 
			
		||||
                        # Neither UVMap 0 Nor Shape Key Map
 | 
			
		||||
                        t1map = i
 | 
			
		||||
                t1data = np.empty(num_verts * 2, dtype='<f4')
 | 
			
		||||
            # Scale for packed coords
 | 
			
		||||
            lay0 = uv_layers[t0map]
 | 
			
		||||
            maxdim_uvlayer = lay0
 | 
			
		||||
            for v in lay0.data:
 | 
			
		||||
                if abs(v.uv[0]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[0])
 | 
			
		||||
                if abs(v.uv[1]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[1])
 | 
			
		||||
            if has_tex1:
 | 
			
		||||
                lay1 = uv_layers[t1map]
 | 
			
		||||
                for v in lay1.data:
 | 
			
		||||
                    if abs(v.uv[0]) > maxdim:
 | 
			
		||||
                        maxdim = abs(v.uv[0])
 | 
			
		||||
                        maxdim_uvlayer = lay1
 | 
			
		||||
                    if abs(v.uv[1]) > maxdim:
 | 
			
		||||
                        maxdim = abs(v.uv[1])
 | 
			
		||||
                        maxdim_uvlayer = lay1
 | 
			
		||||
        if has_morph_target:
 | 
			
		||||
            morph_data = np.empty(num_verts * 2, dtype='<f4')
 | 
			
		||||
            lay2 = uv_layers[morph_uv_index]
 | 
			
		||||
            for v in lay2.data:
 | 
			
		||||
                if abs(v.uv[0]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[0])
 | 
			
		||||
                    maxdim_uvlayer = lay2
 | 
			
		||||
                if abs(v.uv[1]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[1])
 | 
			
		||||
                    maxdim_uvlayer = lay2
 | 
			
		||||
        if maxdim > 1:
 | 
			
		||||
            o['scale_tex'] = maxdim
 | 
			
		||||
            invscale_tex = (1 / o['scale_tex']) * 32767
 | 
			
		||||
        else:
 | 
			
		||||
            invscale_tex = 1 * 32767
 | 
			
		||||
        self.check_uv_precision(export_mesh, maxdim, maxdim_uvlayer, invscale_tex)
 | 
			
		||||
 | 
			
		||||
    if has_col:
 | 
			
		||||
        cdata = np.empty(num_verts * 3, dtype='<f4')
 | 
			
		||||
 | 
			
		||||
    # Save aabb
 | 
			
		||||
    self.calc_aabb(bobject)
 | 
			
		||||
 | 
			
		||||
    # Scale for packed coords
 | 
			
		||||
    maxdim = max(bobject.data.lnx_aabb[0], max(bobject.data.lnx_aabb[1], bobject.data.lnx_aabb[2]))
 | 
			
		||||
    if maxdim > 2:
 | 
			
		||||
        o['scale_pos'] = maxdim / 2
 | 
			
		||||
    else:
 | 
			
		||||
        o['scale_pos'] = 1.0
 | 
			
		||||
    if has_armature: # Allow up to 2x bigger bounds for skinned mesh
 | 
			
		||||
        o['scale_pos'] *= 2.0
 | 
			
		||||
 | 
			
		||||
    scale_pos = o['scale_pos']
 | 
			
		||||
    invscale_pos = (1 / scale_pos) * 32767
 | 
			
		||||
 | 
			
		||||
    # Make arrays
 | 
			
		||||
    for i, v in enumerate(vert_list):
 | 
			
		||||
        v.index = i
 | 
			
		||||
        co = v.co
 | 
			
		||||
        normal = v.normal
 | 
			
		||||
        i4 = i * 4
 | 
			
		||||
        i2 = i * 2
 | 
			
		||||
        pdata[i4    ] = co[0]
 | 
			
		||||
        pdata[i4 + 1] = co[1]
 | 
			
		||||
        pdata[i4 + 2] = co[2]
 | 
			
		||||
        pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
 | 
			
		||||
        ndata[i2    ] = normal[0]
 | 
			
		||||
        ndata[i2 + 1] = normal[1]
 | 
			
		||||
        if has_tex:
 | 
			
		||||
            uv = v.uvs[t0map]
 | 
			
		||||
            t0data[i2    ] = uv[0]
 | 
			
		||||
            t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
 | 
			
		||||
            if has_tex1:
 | 
			
		||||
                uv = v.uvs[t1map]
 | 
			
		||||
                t1data[i2    ] = uv[0]
 | 
			
		||||
                t1data[i2 + 1] = 1.0 - uv[1]
 | 
			
		||||
        if has_morph_target:
 | 
			
		||||
               uv = v.uvs[morph_uv_index]
 | 
			
		||||
               morph_data[i2    ] = uv[0]
 | 
			
		||||
               morph_data[i2 + 1] = 1.0 - uv[1]
 | 
			
		||||
        if has_col:
 | 
			
		||||
            i3 = i * 3
 | 
			
		||||
            cdata[i3    ] = v.col[0]
 | 
			
		||||
            cdata[i3 + 1] = v.col[1]
 | 
			
		||||
            cdata[i3 + 2] = v.col[2]
 | 
			
		||||
 | 
			
		||||
    # Indices
 | 
			
		||||
    # Create dict for every material slot
 | 
			
		||||
    prims = {ma.name if ma else '': [] for ma in export_mesh.materials}
 | 
			
		||||
    v_maps = {ma.name if ma else '': [] for ma in export_mesh.materials}
 | 
			
		||||
    if not prims:
 | 
			
		||||
        # No materials
 | 
			
		||||
        prims = {'': []}
 | 
			
		||||
        v_maps = {'': []}
 | 
			
		||||
 | 
			
		||||
    # Create dict of {loop_indices : vertex} with each loop_index in each vertex in Vertex_list
 | 
			
		||||
    vert_dict = {i : v for v in vert_list for i in v.loop_indices}
 | 
			
		||||
    # For each polygon in a mesh
 | 
			
		||||
    for poly in export_mesh.polygons:
 | 
			
		||||
        # Index of the first loop of this polygon
 | 
			
		||||
        first = poly.loop_start
 | 
			
		||||
        # No materials assigned
 | 
			
		||||
        if len(export_mesh.materials) == 0:
 | 
			
		||||
            # Get prim
 | 
			
		||||
            prim = prims['']
 | 
			
		||||
            v_map = v_maps['']
 | 
			
		||||
        else:
 | 
			
		||||
            # First material
 | 
			
		||||
            mat = export_mesh.materials[min(poly.material_index, len(export_mesh.materials) - 1)]
 | 
			
		||||
            # Get prim for this material
 | 
			
		||||
            prim = prims[mat.name if mat else '']
 | 
			
		||||
            v_map = v_maps[mat.name if mat else '']
 | 
			
		||||
        # List of indices for each loop_index belonging to this polygon
 | 
			
		||||
        indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)]
 | 
			
		||||
        v_indices = [vert_dict[i].vertex_index for i in range(first, first+poly.loop_total)]
 | 
			
		||||
 | 
			
		||||
        # If 3 loops per polygon (Triangle?)
 | 
			
		||||
        if poly.loop_total == 3:
 | 
			
		||||
            prim += indices
 | 
			
		||||
            v_map += v_indices
 | 
			
		||||
        # If > 3 loops per polygon (Non-Triangular?)
 | 
			
		||||
        elif poly.loop_total > 3:
 | 
			
		||||
            for i in range(poly.loop_total-2):
 | 
			
		||||
                prim += (indices[-1], indices[i], indices[i + 1])
 | 
			
		||||
                v_map += (v_indices[-1], v_indices[i], v_indices[i + 1])
 | 
			
		||||
 | 
			
		||||
    # Write indices
 | 
			
		||||
    o['index_arrays'] = []
 | 
			
		||||
    for mat, prim in prims.items():
 | 
			
		||||
        idata = [0] * len(prim)
 | 
			
		||||
        v_map_data = [0] * len(prim)
 | 
			
		||||
        v_map_sub = v_maps[mat]
 | 
			
		||||
        for i, v in enumerate(prim):
 | 
			
		||||
            idata[i] = v
 | 
			
		||||
            v_map_data[i] = v_map_sub[i]
 | 
			
		||||
        if len(idata) == 0: # No face assigned
 | 
			
		||||
            continue
 | 
			
		||||
        ia = {'values': idata, 'material': 0, 'vertex_map': v_map_data}
 | 
			
		||||
        # Find material index for multi-mat mesh
 | 
			
		||||
        if len(export_mesh.materials) > 1:
 | 
			
		||||
            for i in range(0, len(export_mesh.materials)):
 | 
			
		||||
                if (export_mesh.materials[i] is not None and mat == export_mesh.materials[i].name) or \
 | 
			
		||||
                   (export_mesh.materials[i] is None and mat == ''):  # Default material for empty slots
 | 
			
		||||
                    ia['material'] = i
 | 
			
		||||
                    break
 | 
			
		||||
        o['index_arrays'].append(ia)
 | 
			
		||||
 | 
			
		||||
    if has_tang:
 | 
			
		||||
        tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos)
 | 
			
		||||
 | 
			
		||||
    pdata *= invscale_pos
 | 
			
		||||
    ndata *= 32767
 | 
			
		||||
    pdata = np.array(pdata, dtype='<i2')
 | 
			
		||||
    ndata = np.array(ndata, dtype='<i2')
 | 
			
		||||
    if has_tex:
 | 
			
		||||
        t0data *= invscale_tex
 | 
			
		||||
        t0data = np.array(t0data, dtype='<i2')
 | 
			
		||||
        if has_tex1:
 | 
			
		||||
            t1data *= invscale_tex
 | 
			
		||||
            t1data = np.array(t1data, dtype='<i2')
 | 
			
		||||
    if has_morph_target:
 | 
			
		||||
        morph_data *= invscale_tex
 | 
			
		||||
        morph_data = np.array(morph_data, dtype='<i2')
 | 
			
		||||
    if has_col:
 | 
			
		||||
        cdata *= 32767
 | 
			
		||||
        cdata = np.array(cdata, dtype='<i2')
 | 
			
		||||
    if has_tang:
 | 
			
		||||
        tangdata *= 32767
 | 
			
		||||
        tangdata = np.array(tangdata, dtype='<i2')
 | 
			
		||||
 | 
			
		||||
    # Output
 | 
			
		||||
    o['vertex_arrays'] = []
 | 
			
		||||
    o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
 | 
			
		||||
    o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
 | 
			
		||||
    if has_tex:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
 | 
			
		||||
        if has_tex1:
 | 
			
		||||
            o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
 | 
			
		||||
    if has_morph_target:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'morph', 'values': morph_data, 'data': 'short2norm' })
 | 
			
		||||
    if has_col:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
 | 
			
		||||
    if has_tang:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
 | 
			
		||||
 | 
			
		||||
    return vert_list
 | 
			
		||||
 | 
			
		||||
def export_skin(self, bobject, armature, vert_list, o):
 | 
			
		||||
    # This function exports all skinning data, which includes the skeleton
 | 
			
		||||
    # and per-vertex bone influence data
 | 
			
		||||
    oskin = {}
 | 
			
		||||
    o['skin'] = oskin
 | 
			
		||||
 | 
			
		||||
    # Write the skin bind pose transform
 | 
			
		||||
    otrans = {}
 | 
			
		||||
    oskin['transform'] = otrans
 | 
			
		||||
    otrans['values'] = self.write_matrix(bobject.matrix_world)
 | 
			
		||||
 | 
			
		||||
    # Write the bone object reference array
 | 
			
		||||
    oskin['bone_ref_array'] = []
 | 
			
		||||
    oskin['bone_len_array'] = []
 | 
			
		||||
 | 
			
		||||
    bone_array = armature.data.bones
 | 
			
		||||
    bone_count = len(bone_array)
 | 
			
		||||
    rpdat = lnx.utils.get_rp()
 | 
			
		||||
    max_bones = rpdat.lnx_skin_max_bones
 | 
			
		||||
    if bone_count > max_bones:
 | 
			
		||||
        log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Leenkx Render Props - Max Bones')
 | 
			
		||||
 | 
			
		||||
    for i in range(bone_count):
 | 
			
		||||
        boneRef = self.find_bone(bone_array[i].name)
 | 
			
		||||
        if boneRef:
 | 
			
		||||
            oskin['bone_ref_array'].append(boneRef[1]["structName"])
 | 
			
		||||
            oskin['bone_len_array'].append(bone_array[i].length)
 | 
			
		||||
        else:
 | 
			
		||||
            oskin['bone_ref_array'].append("")
 | 
			
		||||
            oskin['bone_len_array'].append(0.0)
 | 
			
		||||
 | 
			
		||||
    # Write the bind pose transform array
 | 
			
		||||
    oskin['transformsI'] = []
 | 
			
		||||
    for i in range(bone_count):
 | 
			
		||||
        skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
 | 
			
		||||
        skeletonI = (skeletonI @ bobject.matrix_world)
 | 
			
		||||
        oskin['transformsI'].append(self.write_matrix(skeletonI))
 | 
			
		||||
 | 
			
		||||
    # Export the per-vertex bone influence data
 | 
			
		||||
    group_remap = []
 | 
			
		||||
    for group in bobject.vertex_groups:
 | 
			
		||||
        for i in range(bone_count):
 | 
			
		||||
            if bone_array[i].name == group.name:
 | 
			
		||||
                group_remap.append(i)
 | 
			
		||||
                break
 | 
			
		||||
        else:
 | 
			
		||||
            group_remap.append(-1)
 | 
			
		||||
 | 
			
		||||
    bone_count_array = np.empty(len(vert_list), dtype='<i2')
 | 
			
		||||
    bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2')
 | 
			
		||||
    bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2')
 | 
			
		||||
 | 
			
		||||
    vertices = bobject.data.vertices
 | 
			
		||||
    count = 0
 | 
			
		||||
    for index, v in enumerate(vert_list):
 | 
			
		||||
        bone_count = 0
 | 
			
		||||
        total_weight = 0.0
 | 
			
		||||
        bone_values = []
 | 
			
		||||
        for g in vertices[v.vertex_index].groups:
 | 
			
		||||
            bone_index = group_remap[g.group]
 | 
			
		||||
            bone_weight = g.weight
 | 
			
		||||
            if bone_index >= 0: #and bone_weight != 0.0:
 | 
			
		||||
                bone_values.append((bone_weight, bone_index))
 | 
			
		||||
                total_weight += bone_weight
 | 
			
		||||
                bone_count += 1
 | 
			
		||||
 | 
			
		||||
        if bone_count > 4:
 | 
			
		||||
            bone_count = 4
 | 
			
		||||
            bone_values.sort(reverse=True)
 | 
			
		||||
            bone_values = bone_values[:4]
 | 
			
		||||
 | 
			
		||||
        bone_count_array[index] = bone_count
 | 
			
		||||
        for bv in bone_values:
 | 
			
		||||
            bone_weight_array[count] = bv[0] * 32767
 | 
			
		||||
            bone_index_array[count] = bv[1]
 | 
			
		||||
            count += 1
 | 
			
		||||
        
 | 
			
		||||
        if total_weight not in (0.0, 1.0):
 | 
			
		||||
            normalizer = 1.0 / total_weight
 | 
			
		||||
            for i in range(bone_count):
 | 
			
		||||
                bone_weight_array[count - i - 1] *= normalizer
 | 
			
		||||
 | 
			
		||||
    oskin['bone_count_array'] = bone_count_array
 | 
			
		||||
    oskin['bone_index_array'] = bone_index_array[:count]
 | 
			
		||||
    oskin['bone_weight_array'] = bone_weight_array[:count]
 | 
			
		||||
 | 
			
		||||
    # Bone constraints
 | 
			
		||||
    for bone in armature.pose.bones:
 | 
			
		||||
        if len(bone.constraints) > 0:
 | 
			
		||||
            if 'constraints' not in oskin:
 | 
			
		||||
                oskin['constraints'] = []
 | 
			
		||||
            self.add_constraints(bone, oskin, bone=True)
 | 
			
		||||
"""
 | 
			
		||||
Exports smaller geometry but is slower.
 | 
			
		||||
To be replaced with https://github.com/zeux/meshoptimizer
 | 
			
		||||
"""
 | 
			
		||||
from typing import Optional, TYPE_CHECKING
 | 
			
		||||
import bpy
 | 
			
		||||
from mathutils import Vector
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
import lnx.utils
 | 
			
		||||
from lnx import log
 | 
			
		||||
 | 
			
		||||
if lnx.is_reload(__name__):
 | 
			
		||||
    log = lnx.reload_module(log)
 | 
			
		||||
    lnx.utils = lnx.reload_module(lnx.utils)
 | 
			
		||||
else:
 | 
			
		||||
    lnx.enable_reload(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Vertex:
 | 
			
		||||
    __slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index")
 | 
			
		||||
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self, 
 | 
			
		||||
        mesh: 'bpy.types.Mesh', 
 | 
			
		||||
        loop: 'bpy.types.MeshLoop', 
 | 
			
		||||
        vcol0: Optional['bpy.types.MeshLoopColor' if bpy.app.version < (3, 0, 0) else 'bpy.types.Attribute']
 | 
			
		||||
    ):
 | 
			
		||||
        self.vertex_index = loop.vertex_index
 | 
			
		||||
        loop_idx = loop.index
 | 
			
		||||
        self.co = mesh.vertices[self.vertex_index].co[:]
 | 
			
		||||
        self.normal = loop.normal[:]
 | 
			
		||||
        self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers)
 | 
			
		||||
        self.col = [0.0, 0.0, 0.0] if vcol0 is None else vcol0.data[loop_idx].color[:]
 | 
			
		||||
        self.loop_indices = [loop_idx]
 | 
			
		||||
        self.index = 0
 | 
			
		||||
 | 
			
		||||
    def __hash__(self):
 | 
			
		||||
        return hash((self.co, self.normal, self.uvs))
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other):
 | 
			
		||||
        eq = (
 | 
			
		||||
            (self.co == other.co) and
 | 
			
		||||
            (self.normal == other.normal) and
 | 
			
		||||
            (self.uvs == other.uvs) and
 | 
			
		||||
            (self.col == other.col)
 | 
			
		||||
            )
 | 
			
		||||
        if eq:
 | 
			
		||||
            indices = self.loop_indices + other.loop_indices
 | 
			
		||||
            self.loop_indices = indices
 | 
			
		||||
            other.loop_indices = indices
 | 
			
		||||
        return eq
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def calc_tangents(posa, nora, uva, ias, scale_pos):
 | 
			
		||||
    num_verts = int(len(posa) / 4)
 | 
			
		||||
    tangents = np.empty(num_verts * 3, dtype='<f4')
 | 
			
		||||
    # bitangents = np.empty(num_verts * 3, dtype='<f4')
 | 
			
		||||
    for ar in ias:
 | 
			
		||||
        ia = ar['values']
 | 
			
		||||
        num_tris = int(len(ia) / 3)
 | 
			
		||||
        for i in range(0, num_tris):
 | 
			
		||||
            i0 = ia[i * 3    ]
 | 
			
		||||
            i1 = ia[i * 3 + 1]
 | 
			
		||||
            i2 = ia[i * 3 + 2]
 | 
			
		||||
            v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2]))
 | 
			
		||||
            v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2]))
 | 
			
		||||
            v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2]))
 | 
			
		||||
            uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1]))
 | 
			
		||||
            uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1]))
 | 
			
		||||
            uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1]))
 | 
			
		||||
 | 
			
		||||
            deltaPos1 = v1 - v0
 | 
			
		||||
            deltaPos2 = v2 - v0
 | 
			
		||||
            deltaUV1 = uv1 - uv0
 | 
			
		||||
            deltaUV2 = uv2 - uv0
 | 
			
		||||
            d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x)
 | 
			
		||||
            if d != 0:
 | 
			
		||||
                r = 1.0 / d
 | 
			
		||||
            else:
 | 
			
		||||
                r = 1.0
 | 
			
		||||
            tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r
 | 
			
		||||
            # bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r
 | 
			
		||||
 | 
			
		||||
            tangents[i0 * 3    ] += tangent.x
 | 
			
		||||
            tangents[i0 * 3 + 1] += tangent.y
 | 
			
		||||
            tangents[i0 * 3 + 2] += tangent.z
 | 
			
		||||
            tangents[i1 * 3    ] += tangent.x
 | 
			
		||||
            tangents[i1 * 3 + 1] += tangent.y
 | 
			
		||||
            tangents[i1 * 3 + 2] += tangent.z
 | 
			
		||||
            tangents[i2 * 3    ] += tangent.x
 | 
			
		||||
            tangents[i2 * 3 + 1] += tangent.y
 | 
			
		||||
            tangents[i2 * 3 + 2] += tangent.z
 | 
			
		||||
            # bitangents[i0 * 3    ] += bitangent.x
 | 
			
		||||
            # bitangents[i0 * 3 + 1] += bitangent.y
 | 
			
		||||
            # bitangents[i0 * 3 + 2] += bitangent.z
 | 
			
		||||
            # bitangents[i1 * 3    ] += bitangent.x
 | 
			
		||||
            # bitangents[i1 * 3 + 1] += bitangent.y
 | 
			
		||||
            # bitangents[i1 * 3 + 2] += bitangent.z
 | 
			
		||||
            # bitangents[i2 * 3    ] += bitangent.x
 | 
			
		||||
            # bitangents[i2 * 3 + 1] += bitangent.y
 | 
			
		||||
            # bitangents[i2 * 3 + 2] += bitangent.z
 | 
			
		||||
    # Orthogonalize
 | 
			
		||||
    for i in range(0, num_verts):
 | 
			
		||||
        t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2]))
 | 
			
		||||
        # b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2]))
 | 
			
		||||
        n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos))
 | 
			
		||||
        v = t - n * n.dot(t)
 | 
			
		||||
        v.normalize()
 | 
			
		||||
        # Calculate handedness
 | 
			
		||||
        # cnv = n.cross(v)
 | 
			
		||||
        # if cnv.dot(b) < 0.0:
 | 
			
		||||
            # v = v * -1.0
 | 
			
		||||
        tangents[i * 3    ] = v.x
 | 
			
		||||
        tangents[i * 3 + 1] = v.y
 | 
			
		||||
        tangents[i * 3 + 2] = v.z
 | 
			
		||||
    return tangents
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def export_mesh_data(self, export_mesh: bpy.types.Mesh, bobject: bpy.types.Object, o, has_armature=False):
 | 
			
		||||
    if bpy.app.version < (4, 1, 0):
 | 
			
		||||
        export_mesh.calc_normals_split()
 | 
			
		||||
    else:
 | 
			
		||||
        updated_normals = export_mesh.corner_normals
 | 
			
		||||
    # exportMesh.calc_loop_triangles()
 | 
			
		||||
    vcol0 = self.get_nth_vertex_colors(export_mesh, 0)
 | 
			
		||||
    vert_list = {Vertex(export_mesh, loop, vcol0): 0 for loop in export_mesh.loops}.keys()
 | 
			
		||||
    num_verts = len(vert_list)
 | 
			
		||||
    num_uv_layers = len(export_mesh.uv_layers)
 | 
			
		||||
    # Check if shape keys were exported
 | 
			
		||||
    has_morph_target = self.get_shape_keys(bobject.data)
 | 
			
		||||
    if has_morph_target:
 | 
			
		||||
        # Shape keys UV are exported separately, so reduce UV count by 1
 | 
			
		||||
        num_uv_layers -= 1
 | 
			
		||||
        morph_uv_index = self.get_morph_uv_index(bobject.data)
 | 
			
		||||
    has_tex = self.get_export_uvs(export_mesh) or num_uv_layers > 0 # TODO FIXME: this should use an `and` instead of `or`. Workaround to completely ignore if the mesh has the `export_uvs` flag. Only checking the `uv_layers` to bypass issues with materials in linked objects.
 | 
			
		||||
    if self.has_baked_material(bobject, export_mesh.materials):
 | 
			
		||||
        has_tex = True
 | 
			
		||||
    has_tex1 = has_tex and num_uv_layers > 1
 | 
			
		||||
    num_colors = self.get_num_vertex_colors(export_mesh)
 | 
			
		||||
    has_col = self.get_export_vcols(export_mesh) and num_colors > 0
 | 
			
		||||
    has_tang = self.has_tangents(export_mesh)
 | 
			
		||||
 | 
			
		||||
    pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
 | 
			
		||||
    ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
 | 
			
		||||
    if has_tex or has_morph_target:
 | 
			
		||||
        uv_layers = export_mesh.uv_layers
 | 
			
		||||
        maxdim = 1.0
 | 
			
		||||
        maxdim_uvlayer = None
 | 
			
		||||
        if has_tex:
 | 
			
		||||
            t0map = 0 # Get active uvmap
 | 
			
		||||
            t0data = np.empty(num_verts * 2, dtype='<f4')
 | 
			
		||||
            if uv_layers is not None:
 | 
			
		||||
                if 'UVMap_baked' in uv_layers:
 | 
			
		||||
                    for i in range(0, len(uv_layers)):
 | 
			
		||||
                        if uv_layers[i].name == 'UVMap_baked':
 | 
			
		||||
                            t0map = i
 | 
			
		||||
                            break
 | 
			
		||||
                else:
 | 
			
		||||
                    for i in range(0, len(uv_layers)):
 | 
			
		||||
                        if uv_layers[i].active_render and uv_layers[i].name != 'UVMap_shape_key':
 | 
			
		||||
                            t0map = i
 | 
			
		||||
                            break
 | 
			
		||||
            if has_tex1:
 | 
			
		||||
                for i in range(0, len(uv_layers)):
 | 
			
		||||
                    # Not UVMap 0
 | 
			
		||||
                    if i != t0map:
 | 
			
		||||
                        # Not Shape Key UVMap
 | 
			
		||||
                        if has_morph_target and uv_layers[i].name == 'UVMap_shape_key':
 | 
			
		||||
                            continue
 | 
			
		||||
                        # Neither UVMap 0 Nor Shape Key Map
 | 
			
		||||
                        t1map = i
 | 
			
		||||
                t1data = np.empty(num_verts * 2, dtype='<f4')
 | 
			
		||||
            # Scale for packed coords
 | 
			
		||||
            lay0 = uv_layers[t0map]
 | 
			
		||||
            maxdim_uvlayer = lay0
 | 
			
		||||
            for v in lay0.data:
 | 
			
		||||
                if abs(v.uv[0]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[0])
 | 
			
		||||
                if abs(v.uv[1]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[1])
 | 
			
		||||
            if has_tex1:
 | 
			
		||||
                lay1 = uv_layers[t1map]
 | 
			
		||||
                for v in lay1.data:
 | 
			
		||||
                    if abs(v.uv[0]) > maxdim:
 | 
			
		||||
                        maxdim = abs(v.uv[0])
 | 
			
		||||
                        maxdim_uvlayer = lay1
 | 
			
		||||
                    if abs(v.uv[1]) > maxdim:
 | 
			
		||||
                        maxdim = abs(v.uv[1])
 | 
			
		||||
                        maxdim_uvlayer = lay1
 | 
			
		||||
        if has_morph_target:
 | 
			
		||||
            morph_data = np.empty(num_verts * 2, dtype='<f4')
 | 
			
		||||
            lay2 = uv_layers[morph_uv_index]
 | 
			
		||||
            for v in lay2.data:
 | 
			
		||||
                if abs(v.uv[0]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[0])
 | 
			
		||||
                    maxdim_uvlayer = lay2
 | 
			
		||||
                if abs(v.uv[1]) > maxdim:
 | 
			
		||||
                    maxdim = abs(v.uv[1])
 | 
			
		||||
                    maxdim_uvlayer = lay2
 | 
			
		||||
        if maxdim > 1:
 | 
			
		||||
            o['scale_tex'] = maxdim
 | 
			
		||||
            invscale_tex = (1 / o['scale_tex']) * 32767
 | 
			
		||||
        else:
 | 
			
		||||
            invscale_tex = 1 * 32767
 | 
			
		||||
        self.check_uv_precision(export_mesh, maxdim, maxdim_uvlayer, invscale_tex)
 | 
			
		||||
 | 
			
		||||
    if has_col:
 | 
			
		||||
        cdata = np.empty(num_verts * 3, dtype='<f4')
 | 
			
		||||
 | 
			
		||||
    # Save aabb
 | 
			
		||||
    self.calc_aabb(bobject)
 | 
			
		||||
 | 
			
		||||
    # Scale for packed coords
 | 
			
		||||
    maxdim = max(bobject.data.lnx_aabb[0], max(bobject.data.lnx_aabb[1], bobject.data.lnx_aabb[2]))
 | 
			
		||||
    if maxdim > 2:
 | 
			
		||||
        o['scale_pos'] = maxdim / 2
 | 
			
		||||
    else:
 | 
			
		||||
        o['scale_pos'] = 1.0
 | 
			
		||||
    if has_armature: # Allow up to 2x bigger bounds for skinned mesh
 | 
			
		||||
        o['scale_pos'] *= 2.0
 | 
			
		||||
 | 
			
		||||
    scale_pos = o['scale_pos']
 | 
			
		||||
    invscale_pos = (1 / scale_pos) * 32767
 | 
			
		||||
 | 
			
		||||
    # Make arrays
 | 
			
		||||
    for i, v in enumerate(vert_list):
 | 
			
		||||
        v.index = i
 | 
			
		||||
        co = v.co
 | 
			
		||||
        normal = v.normal
 | 
			
		||||
        i4 = i * 4
 | 
			
		||||
        i2 = i * 2
 | 
			
		||||
        pdata[i4    ] = co[0]
 | 
			
		||||
        pdata[i4 + 1] = co[1]
 | 
			
		||||
        pdata[i4 + 2] = co[2]
 | 
			
		||||
        pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
 | 
			
		||||
        ndata[i2    ] = normal[0]
 | 
			
		||||
        ndata[i2 + 1] = normal[1]
 | 
			
		||||
        if has_tex:
 | 
			
		||||
            uv = v.uvs[t0map]
 | 
			
		||||
            t0data[i2    ] = uv[0]
 | 
			
		||||
            t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
 | 
			
		||||
            if has_tex1:
 | 
			
		||||
                uv = v.uvs[t1map]
 | 
			
		||||
                t1data[i2    ] = uv[0]
 | 
			
		||||
                t1data[i2 + 1] = 1.0 - uv[1]
 | 
			
		||||
        if has_morph_target:
 | 
			
		||||
               uv = v.uvs[morph_uv_index]
 | 
			
		||||
               morph_data[i2    ] = uv[0]
 | 
			
		||||
               morph_data[i2 + 1] = 1.0 - uv[1]
 | 
			
		||||
        if has_col:
 | 
			
		||||
            i3 = i * 3
 | 
			
		||||
            cdata[i3    ] = v.col[0]
 | 
			
		||||
            cdata[i3 + 1] = v.col[1]
 | 
			
		||||
            cdata[i3 + 2] = v.col[2]
 | 
			
		||||
 | 
			
		||||
    # Indices
 | 
			
		||||
    # Create dict for every material slot
 | 
			
		||||
    prims = {ma.name if ma else '': [] for ma in export_mesh.materials}
 | 
			
		||||
    v_maps = {ma.name if ma else '': [] for ma in export_mesh.materials}
 | 
			
		||||
    if not prims:
 | 
			
		||||
        # No materials
 | 
			
		||||
        prims = {'': []}
 | 
			
		||||
        v_maps = {'': []}
 | 
			
		||||
 | 
			
		||||
    # Create dict of {loop_indices : vertex} with each loop_index in each vertex in Vertex_list
 | 
			
		||||
    vert_dict = {i : v for v in vert_list for i in v.loop_indices}
 | 
			
		||||
    # For each polygon in a mesh
 | 
			
		||||
    for poly in export_mesh.polygons:
 | 
			
		||||
        # Index of the first loop of this polygon
 | 
			
		||||
        first = poly.loop_start
 | 
			
		||||
        # No materials assigned
 | 
			
		||||
        if len(export_mesh.materials) == 0:
 | 
			
		||||
            # Get prim
 | 
			
		||||
            prim = prims['']
 | 
			
		||||
            v_map = v_maps['']
 | 
			
		||||
        else:
 | 
			
		||||
            # First material
 | 
			
		||||
            mat = export_mesh.materials[min(poly.material_index, len(export_mesh.materials) - 1)]
 | 
			
		||||
            # Get prim for this material
 | 
			
		||||
            prim = prims[mat.name if mat else '']
 | 
			
		||||
            v_map = v_maps[mat.name if mat else '']
 | 
			
		||||
        # List of indices for each loop_index belonging to this polygon
 | 
			
		||||
        indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)]
 | 
			
		||||
        v_indices = [vert_dict[i].vertex_index for i in range(first, first+poly.loop_total)]
 | 
			
		||||
 | 
			
		||||
        # If 3 loops per polygon (Triangle?)
 | 
			
		||||
        if poly.loop_total == 3:
 | 
			
		||||
            prim += indices
 | 
			
		||||
            v_map += v_indices
 | 
			
		||||
        # If > 3 loops per polygon (Non-Triangular?)
 | 
			
		||||
        elif poly.loop_total > 3:
 | 
			
		||||
            for i in range(poly.loop_total-2):
 | 
			
		||||
                prim += (indices[-1], indices[i], indices[i + 1])
 | 
			
		||||
                v_map += (v_indices[-1], v_indices[i], v_indices[i + 1])
 | 
			
		||||
 | 
			
		||||
    # Write indices
 | 
			
		||||
    o['index_arrays'] = []
 | 
			
		||||
    for mat, prim in prims.items():
 | 
			
		||||
        idata = [0] * len(prim)
 | 
			
		||||
        v_map_data = [0] * len(prim)
 | 
			
		||||
        v_map_sub = v_maps[mat]
 | 
			
		||||
        for i, v in enumerate(prim):
 | 
			
		||||
            idata[i] = v
 | 
			
		||||
            v_map_data[i] = v_map_sub[i]
 | 
			
		||||
        if len(idata) == 0: # No face assigned
 | 
			
		||||
            continue
 | 
			
		||||
        ia = {'values': idata, 'material': 0, 'vertex_map': v_map_data}
 | 
			
		||||
        # Find material index for multi-mat mesh
 | 
			
		||||
        if len(export_mesh.materials) > 1:
 | 
			
		||||
            for i in range(0, len(export_mesh.materials)):
 | 
			
		||||
                if (export_mesh.materials[i] is not None and mat == export_mesh.materials[i].name) or \
 | 
			
		||||
                   (export_mesh.materials[i] is None and mat == ''):  # Default material for empty slots
 | 
			
		||||
                    ia['material'] = i
 | 
			
		||||
                    break
 | 
			
		||||
        o['index_arrays'].append(ia)
 | 
			
		||||
 | 
			
		||||
    if has_tang:
 | 
			
		||||
        tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos)
 | 
			
		||||
 | 
			
		||||
    pdata *= invscale_pos
 | 
			
		||||
    ndata *= 32767
 | 
			
		||||
    pdata = np.array(pdata, dtype='<i2')
 | 
			
		||||
    ndata = np.array(ndata, dtype='<i2')
 | 
			
		||||
    if has_tex:
 | 
			
		||||
        t0data *= invscale_tex
 | 
			
		||||
        t0data = np.array(t0data, dtype='<i2')
 | 
			
		||||
        if has_tex1:
 | 
			
		||||
            t1data *= invscale_tex
 | 
			
		||||
            t1data = np.array(t1data, dtype='<i2')
 | 
			
		||||
    if has_morph_target:
 | 
			
		||||
        morph_data *= invscale_tex
 | 
			
		||||
        morph_data = np.array(morph_data, dtype='<i2')
 | 
			
		||||
    if has_col:
 | 
			
		||||
        cdata *= 32767
 | 
			
		||||
        cdata = np.array(cdata, dtype='<i2')
 | 
			
		||||
    if has_tang:
 | 
			
		||||
        tangdata *= 32767
 | 
			
		||||
        tangdata = np.array(tangdata, dtype='<i2')
 | 
			
		||||
 | 
			
		||||
    # Output
 | 
			
		||||
    o['sorting_index'] = bobject.lnx_sorting_index
 | 
			
		||||
    o['vertex_arrays'] = []
 | 
			
		||||
    o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
 | 
			
		||||
    o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
 | 
			
		||||
    if has_tex:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
 | 
			
		||||
        if has_tex1:
 | 
			
		||||
            o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
 | 
			
		||||
    if has_morph_target:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'morph', 'values': morph_data, 'data': 'short2norm' })
 | 
			
		||||
    if has_col:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
 | 
			
		||||
    if has_tang:
 | 
			
		||||
        o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
 | 
			
		||||
 | 
			
		||||
    return vert_list
 | 
			
		||||
 | 
			
		||||
def export_skin(self, bobject, armature, vert_list, o):
 | 
			
		||||
    # This function exports all skinning data, which includes the skeleton
 | 
			
		||||
    # and per-vertex bone influence data
 | 
			
		||||
    oskin = {}
 | 
			
		||||
    o['skin'] = oskin
 | 
			
		||||
 | 
			
		||||
    # Write the skin bind pose transform
 | 
			
		||||
    otrans = {}
 | 
			
		||||
    oskin['transform'] = otrans
 | 
			
		||||
    otrans['values'] = self.write_matrix(bobject.matrix_world)
 | 
			
		||||
 | 
			
		||||
    # Write the bone object reference array
 | 
			
		||||
    oskin['bone_ref_array'] = []
 | 
			
		||||
    oskin['bone_len_array'] = []
 | 
			
		||||
 | 
			
		||||
    bone_array = armature.data.bones
 | 
			
		||||
    bone_count = len(bone_array)
 | 
			
		||||
    rpdat = lnx.utils.get_rp()
 | 
			
		||||
    max_bones = rpdat.lnx_skin_max_bones
 | 
			
		||||
    if bone_count > max_bones:
 | 
			
		||||
        log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Leenkx Render Props - Max Bones')
 | 
			
		||||
 | 
			
		||||
    for i in range(bone_count):
 | 
			
		||||
        boneRef = self.find_bone(bone_array[i].name)
 | 
			
		||||
        if boneRef:
 | 
			
		||||
            oskin['bone_ref_array'].append(boneRef[1]["structName"])
 | 
			
		||||
            oskin['bone_len_array'].append(bone_array[i].length)
 | 
			
		||||
        else:
 | 
			
		||||
            oskin['bone_ref_array'].append("")
 | 
			
		||||
            oskin['bone_len_array'].append(0.0)
 | 
			
		||||
 | 
			
		||||
    # Write the bind pose transform array
 | 
			
		||||
    oskin['transformsI'] = []
 | 
			
		||||
    for i in range(bone_count):
 | 
			
		||||
        skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
 | 
			
		||||
        skeletonI = (skeletonI @ bobject.matrix_world)
 | 
			
		||||
        oskin['transformsI'].append(self.write_matrix(skeletonI))
 | 
			
		||||
 | 
			
		||||
    # Export the per-vertex bone influence data
 | 
			
		||||
    group_remap = []
 | 
			
		||||
    for group in bobject.vertex_groups:
 | 
			
		||||
        for i in range(bone_count):
 | 
			
		||||
            if bone_array[i].name == group.name:
 | 
			
		||||
                group_remap.append(i)
 | 
			
		||||
                break
 | 
			
		||||
        else:
 | 
			
		||||
            group_remap.append(-1)
 | 
			
		||||
 | 
			
		||||
    bone_count_array = np.empty(len(vert_list), dtype='<i2')
 | 
			
		||||
    bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2')
 | 
			
		||||
    bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2')
 | 
			
		||||
 | 
			
		||||
    vertices = bobject.data.vertices
 | 
			
		||||
    count = 0
 | 
			
		||||
    for index, v in enumerate(vert_list):
 | 
			
		||||
        bone_count = 0
 | 
			
		||||
        total_weight = 0.0
 | 
			
		||||
        bone_values = []
 | 
			
		||||
        for g in vertices[v.vertex_index].groups:
 | 
			
		||||
            bone_index = group_remap[g.group]
 | 
			
		||||
            bone_weight = g.weight
 | 
			
		||||
            if bone_index >= 0: #and bone_weight != 0.0:
 | 
			
		||||
                bone_values.append((bone_weight, bone_index))
 | 
			
		||||
                total_weight += bone_weight
 | 
			
		||||
                bone_count += 1
 | 
			
		||||
 | 
			
		||||
        if bone_count > 4:
 | 
			
		||||
            bone_count = 4
 | 
			
		||||
            bone_values.sort(reverse=True)
 | 
			
		||||
            bone_values = bone_values[:4]
 | 
			
		||||
 | 
			
		||||
        bone_count_array[index] = bone_count
 | 
			
		||||
        for bv in bone_values:
 | 
			
		||||
            bone_weight_array[count] = bv[0] * 32767
 | 
			
		||||
            bone_index_array[count] = bv[1]
 | 
			
		||||
            count += 1
 | 
			
		||||
        
 | 
			
		||||
        if total_weight not in (0.0, 1.0):
 | 
			
		||||
            normalizer = 1.0 / total_weight
 | 
			
		||||
            for i in range(bone_count):
 | 
			
		||||
                bone_weight_array[count - i - 1] *= normalizer
 | 
			
		||||
 | 
			
		||||
    oskin['bone_count_array'] = bone_count_array
 | 
			
		||||
    oskin['bone_index_array'] = bone_index_array[:count]
 | 
			
		||||
    oskin['bone_weight_array'] = bone_weight_array[:count]
 | 
			
		||||
 | 
			
		||||
    # Bone constraints
 | 
			
		||||
    for bone in armature.pose.bones:
 | 
			
		||||
        if len(bone.constraints) > 0:
 | 
			
		||||
            if 'constraints' not in oskin:
 | 
			
		||||
                oskin['constraints'] = []
 | 
			
		||||
            self.add_constraints(bone, oskin, bone=True)
 | 
			
		||||
 | 
			
		||||
@ -98,7 +98,7 @@ def on_operator_post(operator_id: str) -> None:
 | 
			
		||||
            target_obj.lnx_rb_collision_filter_mask = source_obj.lnx_rb_collision_filter_mask
 | 
			
		||||
 | 
			
		||||
    elif operator_id == "NODE_OT_new_node_tree":
 | 
			
		||||
        if bpy.context.space_data.tree_type == lnx.nodes_logic.LnxLogicTree.bl_idname:
 | 
			
		||||
        if bpy.context.space_data is not None and bpy.context.space_data.tree_type == lnx.nodes_logic.LnxLogicTree.bl_idname:
 | 
			
		||||
            # In Blender 3.5+, new node trees are no longer called "NodeTree"
 | 
			
		||||
            # but follow the bl_label attribute by default. New logic trees
 | 
			
		||||
            # are thus called "Leenkx Logic Editor" which conflicts with Haxe's
 | 
			
		||||
@ -132,9 +132,10 @@ def send_operator(op):
 | 
			
		||||
def always() -> float:
 | 
			
		||||
    # Force ui redraw
 | 
			
		||||
    if state.redraw_ui:
 | 
			
		||||
        for area in bpy.context.screen.areas:
 | 
			
		||||
            if area.type in ('NODE_EDITOR', 'PROPERTIES', 'VIEW_3D'):
 | 
			
		||||
                area.tag_redraw()
 | 
			
		||||
        if bpy.context.screen is not None:
 | 
			
		||||
            for area in bpy.context.screen.areas:
 | 
			
		||||
                if area.type in ('NODE_EDITOR', 'PROPERTIES', 'VIEW_3D'):
 | 
			
		||||
                    area.tag_redraw()
 | 
			
		||||
        state.redraw_ui = False
 | 
			
		||||
 | 
			
		||||
    return 0.5
 | 
			
		||||
@ -251,7 +252,7 @@ def get_polling_stats() -> dict:
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
loaded_py_libraries: dict[str, types.ModuleType] = {}
 | 
			
		||||
loaded_py_libraries: Dict[str, types.ModuleType] = {}
 | 
			
		||||
context_screen = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -347,10 +348,18 @@ def reload_blend_data():
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_library(asset_name):
 | 
			
		||||
    if bpy.data.filepath.endswith('lnx_data.blend'): # Prevent load in library itself
 | 
			
		||||
        return
 | 
			
		||||
    # Prevent load in library itself
 | 
			
		||||
    if bpy.app.version <= (2, 93, 0):
 | 
			
		||||
        if bpy.data.filepath.endswith('lnx_data_2.blend'): 
 | 
			
		||||
            return
 | 
			
		||||
    else:
 | 
			
		||||
        if bpy.data.filepath.endswith('lnx_data.blend'): 
 | 
			
		||||
            return
 | 
			
		||||
    sdk_path = lnx.utils.get_sdk_path()
 | 
			
		||||
    data_path = sdk_path + '/leenkx/blender/data/lnx_data.blend'
 | 
			
		||||
    if bpy.app.version <= (2, 93, 0):
 | 
			
		||||
        data_path = sdk_path + '/leenkx/blender/data/lnx_data_2.blend'
 | 
			
		||||
    else:
 | 
			
		||||
        data_path = sdk_path + '/leenkx/blender/data/lnx_data.blend'
 | 
			
		||||
    data_names = [asset_name]
 | 
			
		||||
 | 
			
		||||
    # Import
 | 
			
		||||
 | 
			
		||||
@ -1,13 +1,15 @@
 | 
			
		||||
from typing import List, Dict, Optional, Any
 | 
			
		||||
 | 
			
		||||
import lnx.utils
 | 
			
		||||
from lnx import assets
 | 
			
		||||
 | 
			
		||||
def parse_context(
 | 
			
		||||
    c: dict,
 | 
			
		||||
    sres: dict,
 | 
			
		||||
    asset,
 | 
			
		||||
    defs: list[str],
 | 
			
		||||
    vert: list[str] = None,
 | 
			
		||||
    frag: list[str] = None,
 | 
			
		||||
    c: Dict[str, Any],
 | 
			
		||||
    sres: Dict[str, Any],
 | 
			
		||||
    asset: Any,
 | 
			
		||||
    defs: List[str],
 | 
			
		||||
    vert: Optional[List[str]] = None,
 | 
			
		||||
    frag: Optional[List[str]] = None,
 | 
			
		||||
):
 | 
			
		||||
    con = {
 | 
			
		||||
        "name": c["name"],
 | 
			
		||||
@ -99,7 +101,12 @@ def parse_context(
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_shader(
 | 
			
		||||
    sres, c: dict, con: dict, defs: list[str], lines: list[str], parse_attributes: bool
 | 
			
		||||
    sres: Dict[str, Any], 
 | 
			
		||||
    c: Dict[str, Any], 
 | 
			
		||||
    con: Dict[str, Any], 
 | 
			
		||||
    defs: List[str], 
 | 
			
		||||
    lines: List[str], 
 | 
			
		||||
    parse_attributes: bool
 | 
			
		||||
):
 | 
			
		||||
    """Parses the given shader to get information about the used vertex
 | 
			
		||||
    elements, uniforms and constants. This information is later used in
 | 
			
		||||
@ -229,7 +236,12 @@ def parse_shader(
 | 
			
		||||
                    check_link(c, defs, cid, const)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_link(source_context: dict, defs: list[str], cid: str, out: dict):
 | 
			
		||||
def check_link(
 | 
			
		||||
    source_context: Dict[str, Any], 
 | 
			
		||||
    defs: List[str], 
 | 
			
		||||
    cid: str, 
 | 
			
		||||
    out: Dict[str, Any]
 | 
			
		||||
):
 | 
			
		||||
    """Checks whether the uniform/constant with the given name (`cid`)
 | 
			
		||||
    has a link stated in the json (`source_context`) that can be safely
 | 
			
		||||
    included based on the given defines (`defs`). If that is the case,
 | 
			
		||||
@ -273,7 +285,12 @@ def check_link(source_context: dict, defs: list[str], cid: str, out: dict):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make(
 | 
			
		||||
    res: dict, base_name: str, json_data: dict, fp, defs: list[str], make_variants: bool
 | 
			
		||||
    res: Dict[str, Any], 
 | 
			
		||||
    base_name: str, 
 | 
			
		||||
    json_data: Dict[str, Any], 
 | 
			
		||||
    fp: Any, 
 | 
			
		||||
    defs: List[str], 
 | 
			
		||||
    make_variants: bool
 | 
			
		||||
):
 | 
			
		||||
    sres = {"name": base_name, "contexts": []}
 | 
			
		||||
    res["shader_datas"].append(sres)
 | 
			
		||||
 | 
			
		||||
@ -1049,17 +1049,18 @@ class TLM_ToggleTexelDensity(bpy.types.Operator):
 | 
			
		||||
 | 
			
		||||
                #img = bpy.data.images.load(filepath)
 | 
			
		||||
 | 
			
		||||
                for area in bpy.context.screen.areas:
 | 
			
		||||
                    if area.type == 'VIEW_3D':
 | 
			
		||||
                        space_data = area.spaces.active
 | 
			
		||||
                        bpy.ops.screen.area_dupli('INVOKE_DEFAULT')
 | 
			
		||||
                        new_window = context.window_manager.windows[-1]
 | 
			
		||||
                if bpy.context.screen is not None:
 | 
			
		||||
                    for area in bpy.context.screen.areas:
 | 
			
		||||
                        if area.type == 'VIEW_3D':
 | 
			
		||||
                            space_data = area.spaces.active
 | 
			
		||||
                            bpy.ops.screen.area_dupli('INVOKE_DEFAULT')
 | 
			
		||||
                            new_window = context.window_manager.windows[-1]
 | 
			
		||||
 | 
			
		||||
                        area = new_window.screen.areas[-1]
 | 
			
		||||
                        area.type = 'VIEW_3D'
 | 
			
		||||
                        #bg = space_data.background_images.new()
 | 
			
		||||
                        print(bpy.context.object)
 | 
			
		||||
                        bpy.ops.object.bake_td_uv_to_vc()
 | 
			
		||||
                            area = new_window.screen.areas[-1]
 | 
			
		||||
                            area.type = 'VIEW_3D'
 | 
			
		||||
                            #bg = space_data.background_images.new()
 | 
			
		||||
                            print(bpy.context.object)
 | 
			
		||||
                            bpy.ops.object.bake_td_uv_to_vc()
 | 
			
		||||
 | 
			
		||||
                        #bg.image = img
 | 
			
		||||
                        break
 | 
			
		||||
 | 
			
		||||
@ -28,9 +28,10 @@ class TLM_PT_Imagetools(bpy.types.Panel):
 | 
			
		||||
 | 
			
		||||
        activeImg = None
 | 
			
		||||
 | 
			
		||||
        for area in bpy.context.screen.areas:
 | 
			
		||||
            if area.type == 'IMAGE_EDITOR':
 | 
			
		||||
                activeImg = area.spaces.active.image
 | 
			
		||||
        if bpy.context.screen is not None:
 | 
			
		||||
            for area in bpy.context.screen.areas:
 | 
			
		||||
                if area.type == 'IMAGE_EDITOR':
 | 
			
		||||
                    activeImg = area.spaces.active.image
 | 
			
		||||
 | 
			
		||||
        if activeImg is not None and activeImg.name != "Render Result" and activeImg.name != "Viewer Node":
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,4 +1,16 @@
 | 
			
		||||
import bpy, os, subprocess, sys, platform, aud, json, datetime, socket
 | 
			
		||||
import bpy, os, subprocess, sys, platform, json, datetime, socket
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
aud = None
 | 
			
		||||
try:
 | 
			
		||||
    import aud
 | 
			
		||||
except (ImportError, AttributeError) as e:
 | 
			
		||||
 | 
			
		||||
    if any(err in str(e) for err in ["numpy.core.multiarray", "_ARRAY_API", "compiled using NumPy 1.x"]):
 | 
			
		||||
        print("Info: Audio features unavailable due to NumPy version compatibility.")
 | 
			
		||||
    else:
 | 
			
		||||
        print(f"Warning: Audio module unavailable: {e}")
 | 
			
		||||
    aud = None
 | 
			
		||||
 | 
			
		||||
from . import encoding, pack, log
 | 
			
		||||
from . cycles import lightmap, prepare, nodes, cache
 | 
			
		||||
@ -1117,9 +1129,12 @@ def manage_build(background_pass=False, load_atlas=0):
 | 
			
		||||
            scriptDir = os.path.dirname(os.path.realpath(__file__))
 | 
			
		||||
            sound_path = os.path.abspath(os.path.join(scriptDir, '..', 'assets/'+soundfile))
 | 
			
		||||
 | 
			
		||||
            device = aud.Device()
 | 
			
		||||
            sound = aud.Sound.file(sound_path)
 | 
			
		||||
            device.play(sound)
 | 
			
		||||
            if aud is not None:
 | 
			
		||||
                device = aud.Device()
 | 
			
		||||
                sound = aud.Sound.file(sound_path)
 | 
			
		||||
                device.play(sound)
 | 
			
		||||
            else:
 | 
			
		||||
                print(f"Build completed!")
 | 
			
		||||
 | 
			
		||||
        if logging:
 | 
			
		||||
            print("Log file output:")
 | 
			
		||||
 | 
			
		||||
@ -103,11 +103,11 @@ class BlendSpaceNode(LnxLogicTreeNode):
 | 
			
		||||
        self.remove_advanced_draw()
 | 
			
		||||
    
 | 
			
		||||
    def get_blend_space_points(self):
 | 
			
		||||
        if bpy.context.space_data.edit_tree == self.get_tree():
 | 
			
		||||
        if bpy.context.space_data is not None and bpy.context.space_data.edit_tree == self.get_tree():
 | 
			
		||||
            return self.blend_space.points
 | 
			
		||||
    
 | 
			
		||||
    def draw_advanced(self):
 | 
			
		||||
        if bpy.context.space_data.edit_tree == self.get_tree():
 | 
			
		||||
        if bpy.context.space_data is not None and bpy.context.space_data.edit_tree == self.get_tree():
 | 
			
		||||
            self.blend_space.draw()
 | 
			
		||||
 | 
			
		||||
    def lnx_init(self, context):
 | 
			
		||||
 | 
			
		||||
@ -16,3 +16,9 @@ class ArraySpliceNode(LnxLogicTreeNode):
 | 
			
		||||
 | 
			
		||||
        self.add_output('LnxNodeSocketAction', 'Out')
 | 
			
		||||
        self.add_output('LnxNodeSocketArray', 'Array')
 | 
			
		||||
 | 
			
		||||
    def get_replacement_node(self, node_tree: bpy.types.NodeTree):
 | 
			
		||||
        if self.lnx_version not in (0, 1):
 | 
			
		||||
            raise LookupError()
 | 
			
		||||
 | 
			
		||||
        return NodeReplacement.Identity(self)
 | 
			
		||||
@ -156,149 +156,149 @@ class CreateElementNode(LnxLogicTreeNode):
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Class')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Style')
 | 
			
		||||
 
 | 
			
		||||
            match index:
 | 
			
		||||
                case 0:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Href', default_value='#')
 | 
			
		||||
                case 3:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Alt')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Coords')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Href')
 | 
			
		||||
                case 6:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src')
 | 
			
		||||
                case 11:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Cite', default_value='URL')
 | 
			
		||||
                case 14:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Type', default_value='Submit')
 | 
			
		||||
                case 15:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Height', default_value='150px')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Width', default_value='300px')
 | 
			
		||||
                case 19 | 20:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Span')
 | 
			
		||||
                case 21:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Value')
 | 
			
		||||
                case 24 | 53:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Cite', default_value='URL')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Datetime', default_value='YYYY-MM-DDThh:mm:ssTZD')
 | 
			
		||||
                case 26:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Title')
 | 
			
		||||
                case 32:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src', default_value='URL') 
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Type')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Height')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Width')
 | 
			
		||||
                case 33:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Form')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Name')
 | 
			
		||||
                case 37:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Action', default_value='URL')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Method', default_value='get')
 | 
			
		||||
                case 44:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Profile', default_value='URI')
 | 
			
		||||
                case 48:
 | 
			
		||||
                    self.add_input('LnxBoolSocket', 'xmlns' , default_value=False )
 | 
			
		||||
                case 50:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src', default_value='URL')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Height' , default_value="150px" )
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Width', default_value='300px')
 | 
			
		||||
                case 51:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Height' , default_value='150px')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Width', default_value='150px')
 | 
			
		||||
                case 52:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Type', default_value='text')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Value')
 | 
			
		||||
                case 55:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'For', default_value='element_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Form', default_value='form_id')
 | 
			
		||||
                case 57:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Value')
 | 
			
		||||
                case 58:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Href', default_value='#')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Hreflang', default_value='en')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Title')
 | 
			
		||||
                case 58:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Name', default_value='mapname')
 | 
			
		||||
                case 63:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Charset', default_value='character_set')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Content', default_value='text')
 | 
			
		||||
                case 64:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'high')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'low')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'max')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'min')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'optimum')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'value')
 | 
			
		||||
                case 67:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'data', default_value='URL') 
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'height', default_value='pixels')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'name', default_value='name')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'type', default_value='media_type')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'usemap', default_value='#mapname')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'width', default_value='pixels')
 | 
			
		||||
                case 68:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'start', default_value='number')
 | 
			
		||||
                case 69:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'label', default_value='text')
 | 
			
		||||
                case 70:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'label', default_value='text')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'value', default_value='value')
 | 
			
		||||
                case 71:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'for', default_value='element_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'name', default_value='name')
 | 
			
		||||
                case 75:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'max', default_value='number')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'value', default_value='number')
 | 
			
		||||
                case 76:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'cite', default_value='URL')
 | 
			
		||||
                case 78:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'cite', default_value='URL')
 | 
			
		||||
                case 79:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'integrity' , default_value='filehash')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'type', default_value='scripttype')
 | 
			
		||||
                case 81:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'form' , default_value='form_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'name' , default_value='text')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'type', default_value='scripttype')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'size', default_value='number')
 | 
			
		||||
                case 84:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'size')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'src' , default_value='URL')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'srcset', default_value='URL')
 | 
			
		||||
                case 87:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'type', default_value='media_type') 
 | 
			
		||||
                case 93:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'colspan' , default_value='number')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'headers' , default_value='header_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'rowspan', default_value='number')
 | 
			
		||||
                case 95:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'cols' , default_value='number')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'dirname' , default_value='name.dir')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'rowspan', default_value='number')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'maxlength', default_value='number')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'name' , default_value='text')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'placeholder' , default_value='text')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'rows' , default_value='number')
 | 
			
		||||
                case 97:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'abbr' , default_value='text')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'colspan' , default_value='number')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'headers', default_value='header_id')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'rowspan', default_value='number')
 | 
			
		||||
                case 99:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Datetime', default_value='YYYY-MM-DDThh:mm:ssTZD')
 | 
			
		||||
                case 102:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src', default_value='URL')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'srclang', default_value='en')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'label', default_value='text')
 | 
			
		||||
                case 106:
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Src', default_value='URL')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'width', default_value='pixels')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'height', default_value='pixels')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'poster', default_value='URL')
 | 
			
		||||
            if index == 0:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Href', default_value='#')
 | 
			
		||||
            elif index == 3:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Alt')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Coords')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Href')
 | 
			
		||||
            elif index == 6:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src')
 | 
			
		||||
            elif index == 11:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Cite', default_value='URL')
 | 
			
		||||
            elif index == 14:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Type', default_value='Submit')
 | 
			
		||||
            elif index == 15:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Height', default_value='150px')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Width', default_value='300px')
 | 
			
		||||
            elif index in (19, 20):
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Span')
 | 
			
		||||
            elif index == 21:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Value')
 | 
			
		||||
            elif index in (24, 53):
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Cite', default_value='URL')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Datetime', default_value='YYYY-MM-DDThh:mm:ssTZD')
 | 
			
		||||
            elif index == 26:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Title')
 | 
			
		||||
            elif index == 32:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src', default_value='URL') 
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Type')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Height')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Width')
 | 
			
		||||
            elif index == 33:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Form')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Name')
 | 
			
		||||
            elif index == 37:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Action', default_value='URL')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Method', default_value='get')
 | 
			
		||||
            elif index == 44:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Profile', default_value='URI')
 | 
			
		||||
            elif index == 48:
 | 
			
		||||
                self.add_input('LnxBoolSocket', 'xmlns' , default_value=False )
 | 
			
		||||
            elif index == 50:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src', default_value='URL')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Height' , default_value="150px" )
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Width', default_value='300px')
 | 
			
		||||
            elif index == 51:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Height' , default_value='150px')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Width', default_value='150px')
 | 
			
		||||
            elif index == 52:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Type', default_value='text')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Value')
 | 
			
		||||
            elif index == 55:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'For', default_value='element_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Form', default_value='form_id')
 | 
			
		||||
            elif index == 57:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Value')
 | 
			
		||||
            elif index == 58:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Href', default_value='#')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Hreflang', default_value='en')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Title')
 | 
			
		||||
            # Note: There's a duplicate case 58 in the original, handling as separate elif
 | 
			
		||||
            elif index == 60:  # This was the second case 58, likely meant to be a different index
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Name', default_value='mapname')
 | 
			
		||||
            elif index == 63:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Charset', default_value='character_set')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Content', default_value='text')
 | 
			
		||||
            elif index == 64:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'high')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'low')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'max')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'min')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'optimum')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'value')
 | 
			
		||||
            elif index == 67:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'data', default_value='URL') 
 | 
			
		||||
                self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'height', default_value='pixels')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'name', default_value='name')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'type', default_value='media_type')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'usemap', default_value='#mapname')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'width', default_value='pixels')
 | 
			
		||||
            elif index == 68:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'start', default_value='number')
 | 
			
		||||
            elif index == 69:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'label', default_value='text')
 | 
			
		||||
            elif index == 70:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'label', default_value='text')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'value', default_value='value')
 | 
			
		||||
            elif index == 71:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'for', default_value='element_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'name', default_value='name')
 | 
			
		||||
            elif index == 75:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'max', default_value='number')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'value', default_value='number')
 | 
			
		||||
            elif index == 76:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'cite', default_value='URL')
 | 
			
		||||
            elif index == 78:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'cite', default_value='URL')
 | 
			
		||||
            elif index == 79:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'integrity' , default_value='filehash')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'type', default_value='scripttype')
 | 
			
		||||
            elif index == 81:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'form' , default_value='form_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'name' , default_value='text')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'type', default_value='scripttype')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'size', default_value='number')
 | 
			
		||||
            elif index == 84:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'size')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'src' , default_value='URL')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'srcset', default_value='URL')
 | 
			
		||||
            elif index == 87:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'type', default_value='media_type') 
 | 
			
		||||
            elif index == 93:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'colspan' , default_value='number')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'headers' , default_value='header_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'rowspan', default_value='number')
 | 
			
		||||
            elif index == 95:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'cols' , default_value='number')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'dirname' , default_value='name.dir')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'rowspan', default_value='number')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'form', default_value='form_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'maxlength', default_value='number')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'name' , default_value='text')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'placeholder' , default_value='text')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'rows' , default_value='number')
 | 
			
		||||
            elif index == 97:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'abbr' , default_value='text')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'colspan' , default_value='number')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'headers', default_value='header_id')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'rowspan', default_value='number')
 | 
			
		||||
            elif index == 99:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Datetime', default_value='YYYY-MM-DDThh:mm:ssTZD')
 | 
			
		||||
            elif index == 102:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src', default_value='URL')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'srclang', default_value='en')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'label', default_value='text')
 | 
			
		||||
            elif index == 106:
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Src', default_value='URL')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'width', default_value='pixels')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'height', default_value='pixels')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'poster', default_value='URL')
 | 
			
		||||
 | 
			
		||||
            for i in range(len(self.inputs)):
 | 
			
		||||
                if self.inputs[i].name in self.data_map:
 | 
			
		||||
 | 
			
		||||
@ -38,18 +38,17 @@ class JSEventTargetNode(LnxLogicTreeNode):
 | 
			
		||||
            # Arguements for type Client
 | 
			
		||||
            index = self.get_count_in(select_current)
 | 
			
		||||
     
 | 
			
		||||
            match index:
 | 
			
		||||
                case 2: 
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'JS Object')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Event')
 | 
			
		||||
                case _:
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'JS Object')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Type')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Listener')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Options')
 | 
			
		||||
                    self.add_input('LnxBoolSocket', 'unTrusted')
 | 
			
		||||
            if index == 2:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'JS Object')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Event')
 | 
			
		||||
            else:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'JS Object')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Type')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Listener')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Options')
 | 
			
		||||
                self.add_input('LnxBoolSocket', 'unTrusted')
 | 
			
		||||
 | 
			
		||||
        self['property0'] = value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -43,27 +43,26 @@ class RenderElementNode(LnxLogicTreeNode):
 | 
			
		||||
            # Arguements for type Client
 | 
			
		||||
            index = self.get_count_in(select_current)
 | 
			
		||||
     
 | 
			
		||||
            match index:
 | 
			
		||||
                case 2: 
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Torrent')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Selector')    
 | 
			
		||||
                case 5:
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Element')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'HTML')
 | 
			
		||||
                case 6:
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Element')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Text')
 | 
			
		||||
                case 7:
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'HTML')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Selector')
 | 
			
		||||
                case _:
 | 
			
		||||
                    self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                    self.add_input('LnxDynamicSocket', 'Element')
 | 
			
		||||
                    self.add_input('LnxStringSocket', 'Selector')
 | 
			
		||||
            if index == 2:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Torrent')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Selector')    
 | 
			
		||||
            elif index == 5:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Element')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'HTML')
 | 
			
		||||
            elif index == 6:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Element')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Text')
 | 
			
		||||
            elif index == 7:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'HTML')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Selector')
 | 
			
		||||
            else:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.add_input('LnxDynamicSocket', 'Element')
 | 
			
		||||
                self.add_input('LnxStringSocket', 'Selector')
 | 
			
		||||
 | 
			
		||||
        self['property0'] = value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -17,6 +17,17 @@ class OnEventNode(LnxLogicTreeNode):
 | 
			
		||||
        'custom': 'Custom'
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    def update(self):
 | 
			
		||||
        if self.property1 != 'custom':
 | 
			
		||||
            if self.inputs[0].is_linked:
 | 
			
		||||
                self.label = f'{self.bl_label}: {self.property1}'
 | 
			
		||||
            else:
 | 
			
		||||
                self.label = f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
 | 
			
		||||
        elif self.inputs[1].is_linked:
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property1}'
 | 
			
		||||
        else:
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
 | 
			
		||||
 | 
			
		||||
    def set_mode(self, context):
 | 
			
		||||
        if self.property1 != 'custom':
 | 
			
		||||
            if len(self.inputs) > 1:
 | 
			
		||||
@ -25,7 +36,17 @@ class OnEventNode(LnxLogicTreeNode):
 | 
			
		||||
            if len(self.inputs) < 2:
 | 
			
		||||
                self.add_input('LnxNodeSocketAction', 'In')
 | 
			
		||||
                self.inputs.move(1, 0)
 | 
			
		||||
 | 
			
		||||
                
 | 
			
		||||
        if self.property1 != 'custom':
 | 
			
		||||
            if self.inputs[0].is_linked:
 | 
			
		||||
                self.label = f'{self.bl_label}: {self.property1}'
 | 
			
		||||
            else:
 | 
			
		||||
                self.label = f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
 | 
			
		||||
        elif self.inputs[1].is_linked:
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property1}'
 | 
			
		||||
        else:
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
 | 
			
		||||
            
 | 
			
		||||
    # Use a new property to preserve compatibility
 | 
			
		||||
    property1: HaxeEnumProperty(
 | 
			
		||||
        'property1',
 | 
			
		||||
@ -52,9 +73,15 @@ class OnEventNode(LnxLogicTreeNode):
 | 
			
		||||
        layout.prop(self, 'property1', text='')
 | 
			
		||||
 | 
			
		||||
    def draw_label(self) -> str:
 | 
			
		||||
        if self.inputs[0].is_linked:
 | 
			
		||||
            return self.bl_label
 | 
			
		||||
        return f'{self.bl_label}: {self.inputs[0].get_default_value()}'
 | 
			
		||||
        if self.property1 != 'custom':
 | 
			
		||||
            if self.inputs[0].is_linked:
 | 
			
		||||
                return f'{self.bl_label}: {self.property1}'
 | 
			
		||||
            else:
 | 
			
		||||
                return f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
 | 
			
		||||
        elif self.inputs[1].is_linked:
 | 
			
		||||
            return f'{self.bl_label}: {self.property1}'
 | 
			
		||||
        else:
 | 
			
		||||
            return f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
 | 
			
		||||
 | 
			
		||||
    def get_replacement_node(self, node_tree: bpy.types.NodeTree):
 | 
			
		||||
        if self.lnx_version not in (0, 1):
 | 
			
		||||
 | 
			
		||||
@ -7,12 +7,19 @@ class KeyboardNode(LnxLogicTreeNode):
 | 
			
		||||
    lnx_section = 'keyboard'
 | 
			
		||||
    lnx_version = 2
 | 
			
		||||
 | 
			
		||||
    def update(self):
 | 
			
		||||
        self.label = f'{self.bl_label}: {self.property0} {self.property1}'
 | 
			
		||||
 | 
			
		||||
    def upd(self, context):
 | 
			
		||||
        self.label = f'{self.bl_label}: {self.property0} {self.property1}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    property0: HaxeEnumProperty(
 | 
			
		||||
        'property0',
 | 
			
		||||
        items = [('started', 'Started', 'The keyboard button starts to be pressed'),
 | 
			
		||||
                 ('down', 'Down', 'The keyboard button is pressed'),
 | 
			
		||||
                 ('released', 'Released', 'The keyboard button stops being pressed')],
 | 
			
		||||
        name='', default='down')
 | 
			
		||||
        name='', default='down', update=upd)
 | 
			
		||||
 | 
			
		||||
    property1: HaxeEnumProperty(
 | 
			
		||||
        'property1',
 | 
			
		||||
@ -69,7 +76,7 @@ class KeyboardNode(LnxLogicTreeNode):
 | 
			
		||||
                 ('right', 'right', 'right'),
 | 
			
		||||
                 ('left', 'left', 'left'),
 | 
			
		||||
                 ('down', 'down', 'down'),],
 | 
			
		||||
        name='', default='space')
 | 
			
		||||
        name='', default='space', update=upd)
 | 
			
		||||
 | 
			
		||||
    def lnx_init(self, context):
 | 
			
		||||
        self.add_output('LnxNodeSocketAction', 'Out')
 | 
			
		||||
 | 
			
		||||
@ -8,13 +8,25 @@ class MouseNode(LnxLogicTreeNode):
 | 
			
		||||
    lnx_section = 'mouse'
 | 
			
		||||
    lnx_version = 3
 | 
			
		||||
 | 
			
		||||
    def update(self):
 | 
			
		||||
        if self.property0 != 'moved':
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property0} {self.property1}'
 | 
			
		||||
        else:
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property0}'
 | 
			
		||||
 | 
			
		||||
    def upd(self, context):
 | 
			
		||||
        if self.property0 != 'moved':
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property0} {self.property1}'
 | 
			
		||||
        else:
 | 
			
		||||
            self.label = f'{self.bl_label}: {self.property0}'
 | 
			
		||||
 | 
			
		||||
    property0: HaxeEnumProperty(
 | 
			
		||||
        'property0',
 | 
			
		||||
        items = [('started', 'Started', 'The mouse button begins to be pressed'),
 | 
			
		||||
                 ('down', 'Down', 'The mouse button is pressed'),
 | 
			
		||||
                 ('released', 'Released', 'The mouse button stops being pressed'),
 | 
			
		||||
                 ('moved', 'Moved', 'Moved')],
 | 
			
		||||
        name='', default='down')
 | 
			
		||||
        name='', default='down', update=upd)
 | 
			
		||||
    property1: HaxeEnumProperty(
 | 
			
		||||
        'property1',
 | 
			
		||||
        items = [('left', 'Left', 'Left mouse button'),
 | 
			
		||||
@ -22,7 +34,7 @@ class MouseNode(LnxLogicTreeNode):
 | 
			
		||||
                 ('right', 'Right', 'Right mouse button'),
 | 
			
		||||
                 ('side1', 'Side 1', 'Side 1 mouse button'),
 | 
			
		||||
                 ('side2', 'Side 2', 'Side 2 mouse button')],
 | 
			
		||||
        name='', default='left')
 | 
			
		||||
        name='', default='left', update=upd)
 | 
			
		||||
    property2: HaxeBoolProperty(
 | 
			
		||||
        'property2',
 | 
			
		||||
        name='Include Debug Console',
 | 
			
		||||
 | 
			
		||||
@ -66,7 +66,10 @@ class LnxGroupTree(bpy.types.NodeTree):
 | 
			
		||||
        """Try to avoid creating loops of group trees with each other"""
 | 
			
		||||
        # upstream trees of tested treed should nad share trees with downstream trees of current tree
 | 
			
		||||
        tested_tree_upstream_trees = {t.name for t in self.upstream_trees()}
 | 
			
		||||
        current_tree_downstream_trees = {p.node_tree.name for p in bpy.context.space_data.path}
 | 
			
		||||
        if bpy.context.space_data is not None:
 | 
			
		||||
            current_tree_downstream_trees = {p.node_tree.name for p in bpy.context.space_data.path}
 | 
			
		||||
        else:
 | 
			
		||||
            current_tree_downstream_trees = set()
 | 
			
		||||
        shared_trees = tested_tree_upstream_trees & current_tree_downstream_trees
 | 
			
		||||
        return not shared_trees
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -2,9 +2,17 @@ from collections import OrderedDict
 | 
			
		||||
import itertools
 | 
			
		||||
import math
 | 
			
		||||
import textwrap
 | 
			
		||||
from typing import Any, final, Generator, List, Optional, Type, Union
 | 
			
		||||
from typing import Any, Dict, Generator, List, Optional, Tuple, Type, Union
 | 
			
		||||
from typing import OrderedDict as ODict  # Prevent naming conflicts
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from typing import final
 | 
			
		||||
except ImportError:
 | 
			
		||||
    # Python < 3.8 compatibility
 | 
			
		||||
    def final(f):
 | 
			
		||||
        """No final in Python < 3.8"""
 | 
			
		||||
        return f
 | 
			
		||||
 | 
			
		||||
import bpy.types
 | 
			
		||||
from bpy.props import *
 | 
			
		||||
from nodeitems_utils import NodeItem
 | 
			
		||||
@ -39,11 +47,11 @@ PKG_AS_CATEGORY = "__pkgcat__"
 | 
			
		||||
nodes = []
 | 
			
		||||
category_items: ODict[str, List['LnxNodeCategory']] = OrderedDict()
 | 
			
		||||
 | 
			
		||||
array_nodes: dict[str, 'LnxLogicTreeNode'] = dict()
 | 
			
		||||
array_nodes: Dict[str, 'LnxLogicTreeNode'] = dict()
 | 
			
		||||
 | 
			
		||||
# See LnxLogicTreeNode.update()
 | 
			
		||||
# format: [tree pointer => (num inputs, num input links, num outputs, num output links)]
 | 
			
		||||
last_node_state: dict[int, tuple[int, int, int, int]] = {}
 | 
			
		||||
last_node_state: Dict[int, Tuple[int, int, int, int]] = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LnxLogicTreeNode(bpy.types.Node):
 | 
			
		||||
 | 
			
		||||
@ -10,7 +10,7 @@ mutable (common Python pitfall, be aware of this!), but because they
 | 
			
		||||
don't get accessed later it doesn't matter here and we keep it this way
 | 
			
		||||
for parity with the Blender API.
 | 
			
		||||
"""
 | 
			
		||||
from typing import Any, Callable, Sequence, Union
 | 
			
		||||
from typing import Any, Callable, List, Sequence, Set, Union
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import bpy
 | 
			
		||||
@ -49,6 +49,10 @@ def __haxe_prop(prop_type: Callable, prop_name: str, *args, **kwargs) -> Any:
 | 
			
		||||
    # bpy.types.Bone, remove them here to prevent registration errors
 | 
			
		||||
    if 'tags' in kwargs:
 | 
			
		||||
        del kwargs['tags']
 | 
			
		||||
    
 | 
			
		||||
    # Remove override parameter for Blender versions that don't support it
 | 
			
		||||
    if bpy.app.version < (2, 90, 0) and 'override' in kwargs:
 | 
			
		||||
        del kwargs['override']
 | 
			
		||||
 | 
			
		||||
    return prop_type(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
@ -87,7 +91,7 @@ def HaxeBoolVectorProperty(
 | 
			
		||||
        update=None,
 | 
			
		||||
        get=None,
 | 
			
		||||
        set=None
 | 
			
		||||
) -> list['bpy.types.BoolProperty']:
 | 
			
		||||
) -> List['bpy.types.BoolProperty']:
 | 
			
		||||
    """Declares a new BoolVectorProperty that has a Haxe counterpart
 | 
			
		||||
    with the given prop_name (Python and Haxe names must be identical
 | 
			
		||||
    for now).
 | 
			
		||||
@ -118,7 +122,7 @@ def HaxeEnumProperty(
 | 
			
		||||
        items: Sequence,
 | 
			
		||||
        name: str = "",
 | 
			
		||||
        description: str = "",
 | 
			
		||||
        default: Union[str, set[str]] = None,
 | 
			
		||||
        default: Union[str, Set[str]] = None,
 | 
			
		||||
        options: set = {'ANIMATABLE'},
 | 
			
		||||
        override: set = set(),
 | 
			
		||||
        tags: set = set(),
 | 
			
		||||
@ -180,7 +184,7 @@ def HaxeFloatVectorProperty(
 | 
			
		||||
        update=None,
 | 
			
		||||
        get=None,
 | 
			
		||||
        set=None
 | 
			
		||||
) -> list['bpy.types.FloatProperty']:
 | 
			
		||||
) -> List['bpy.types.FloatProperty']:
 | 
			
		||||
    """Declares a new FloatVectorProperty that has a Haxe counterpart
 | 
			
		||||
    with the given prop_name (Python and Haxe names must be identical
 | 
			
		||||
    for now).
 | 
			
		||||
@ -232,7 +236,7 @@ def HaxeIntVectorProperty(
 | 
			
		||||
        update=None,
 | 
			
		||||
        get=None,
 | 
			
		||||
        set=None
 | 
			
		||||
) -> list['bpy.types.IntProperty']:
 | 
			
		||||
) -> List['bpy.types.IntProperty']:
 | 
			
		||||
    """Declares a new IntVectorProperty that has a Haxe counterpart with
 | 
			
		||||
    the given prop_name (Python and Haxe names must be identical for now).
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
@ -18,6 +18,10 @@ class CallGroupNode(LnxLogicTreeNode):
 | 
			
		||||
    def lnx_init(self, context):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    def update(self):
 | 
			
		||||
        if self.group_tree:
 | 
			
		||||
            self.label = f'Group: {self.group_tree.name}'
 | 
			
		||||
 | 
			
		||||
    # Function to add input sockets and re-link sockets
 | 
			
		||||
    def update_inputs(self, tree, node, inp_sockets, in_links):
 | 
			
		||||
        count = 0
 | 
			
		||||
@ -58,10 +62,12 @@ class CallGroupNode(LnxLogicTreeNode):
 | 
			
		||||
                        tree.links.new(current_socket, link)
 | 
			
		||||
            count = count + 1
 | 
			
		||||
    
 | 
			
		||||
    def remove_tree(self):
 | 
			
		||||
        self.group_tree = None
 | 
			
		||||
 | 
			
		||||
    def update_sockets(self, context):
 | 
			
		||||
        if self.group_tree:
 | 
			
		||||
            self.label = f'Group: {self.group_tree.name}'
 | 
			
		||||
        else:
 | 
			
		||||
            self.label = 'Call Node Group'
 | 
			
		||||
 | 
			
		||||
        # List to store from and to sockets of connected nodes
 | 
			
		||||
        from_socket_list = []
 | 
			
		||||
        to_socket_list = []
 | 
			
		||||
@ -107,6 +113,10 @@ class CallGroupNode(LnxLogicTreeNode):
 | 
			
		||||
    # Prperty to store group tree pointer
 | 
			
		||||
    group_tree: PointerProperty(name='Group', type=bpy.types.NodeTree, update=update_sockets)
 | 
			
		||||
 | 
			
		||||
    def edit_tree(self):
 | 
			
		||||
        self.label = f'Group: {self.group_tree.name}'
 | 
			
		||||
        bpy.ops.lnx.edit_group_tree()
 | 
			
		||||
 | 
			
		||||
    def draw_label(self) -> str:
 | 
			
		||||
        if self.group_tree is not None:
 | 
			
		||||
            return f'Group: {self.group_tree.name}'
 | 
			
		||||
@ -134,8 +144,9 @@ class CallGroupNode(LnxLogicTreeNode):
 | 
			
		||||
            op = row_name.operator('lnx.unlink_group_tree', icon='X', text='')
 | 
			
		||||
            op.node_index = self.get_id_str()
 | 
			
		||||
        row_ops.enabled = not self.group_tree is None
 | 
			
		||||
        op = row_ops.operator('lnx.edit_group_tree', icon='FULLSCREEN_ENTER', text='Edit tree')
 | 
			
		||||
        op = row_ops.operator('lnx.node_call_func', icon='FULLSCREEN_ENTER', text='Edit tree')
 | 
			
		||||
        op.node_index = self.get_id_str()
 | 
			
		||||
        op.callback_name = 'edit_tree'
 | 
			
		||||
 | 
			
		||||
    def get_replacement_node(self, node_tree: bpy.types.NodeTree):
 | 
			
		||||
        if self.lnx_version not in (0, 1, 2):
 | 
			
		||||
 | 
			
		||||
@ -27,7 +27,10 @@ class GroupInputsNode(LnxLogicTreeNode):
 | 
			
		||||
    copy_override: BoolProperty(name='copy override', description='', default=False)
 | 
			
		||||
 | 
			
		||||
    def init(self, context):
 | 
			
		||||
        tree = bpy.context.space_data.edit_tree
 | 
			
		||||
        if bpy.context.space_data is not None:
 | 
			
		||||
            tree = bpy.context.space_data.edit_tree
 | 
			
		||||
        else:
 | 
			
		||||
            return 
 | 
			
		||||
        node_count = 0
 | 
			
		||||
        for node in tree.nodes:
 | 
			
		||||
            if node.bl_idname == 'LNGroupInputsNode':
 | 
			
		||||
 | 
			
		||||
@ -27,7 +27,10 @@ class GroupOutputsNode(LnxLogicTreeNode):
 | 
			
		||||
    copy_override: BoolProperty(name='copy override', description='', default=False)
 | 
			
		||||
 | 
			
		||||
    def init(self, context):
 | 
			
		||||
        tree = bpy.context.space_data.edit_tree
 | 
			
		||||
        if bpy.context.space_data is not None:
 | 
			
		||||
            tree = bpy.context.space_data.edit_tree
 | 
			
		||||
        else:
 | 
			
		||||
            return 
 | 
			
		||||
        node_count = 0
 | 
			
		||||
        for node in tree.nodes:
 | 
			
		||||
            if node.bl_idname == 'LNGroupOutputsNode':
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,51 @@
 | 
			
		||||
from lnx.logicnode.lnx_nodes import *
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ProbabilisticIndexNode(LnxLogicTreeNode):
 | 
			
		||||
    """This system gets an index based on probabilistic values,
 | 
			
		||||
    ensuring that the total sum of the probabilities equals 1.
 | 
			
		||||
    If the probabilities do not sum to 1, they will be adjusted
 | 
			
		||||
    accordingly to guarantee a total sum of 1. Only one output will be
 | 
			
		||||
    triggered at a time.
 | 
			
		||||
    @output index: the index.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    bl_idname = 'LNProbabilisticIndexNode'
 | 
			
		||||
    bl_label = 'Probabilistic Index'
 | 
			
		||||
    lnx_section = 'logic'
 | 
			
		||||
    lnx_version = 1
 | 
			
		||||
 | 
			
		||||
    num_choices: IntProperty(default=0, min=0)
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        array_nodes[str(id(self))] = self
 | 
			
		||||
 | 
			
		||||
    def lnx_init(self, context):
 | 
			
		||||
 | 
			
		||||
        self.add_output('LnxIntSocket', 'Index')
 | 
			
		||||
 | 
			
		||||
    def draw_buttons(self, context, layout):
 | 
			
		||||
        row = layout.row(align=True)
 | 
			
		||||
 | 
			
		||||
        op = row.operator('lnx.node_call_func', text='New', icon='PLUS', emboss=True)
 | 
			
		||||
        op.node_index = str(id(self))
 | 
			
		||||
        op.callback_name = 'add_func'
 | 
			
		||||
        op2 = row.operator('lnx.node_call_func', text='', icon='X', emboss=True)
 | 
			
		||||
        op2.node_index = str(id(self))
 | 
			
		||||
        op2.callback_name = 'remove_func'
 | 
			
		||||
 | 
			
		||||
    def add_func(self):
 | 
			
		||||
        self.add_input('LnxFloatSocket', f'Prob Index {self.num_choices}')
 | 
			
		||||
        self.num_choices += 1
 | 
			
		||||
 | 
			
		||||
    def remove_func(self):
 | 
			
		||||
        if len(self.inputs) > 0:
 | 
			
		||||
            self.inputs.remove(self.inputs[-1])
 | 
			
		||||
            self.num_choices -= 1
 | 
			
		||||
 | 
			
		||||
    def draw_label(self) -> str:
 | 
			
		||||
        if self.num_choices == 0:
 | 
			
		||||
            return self.bl_label
 | 
			
		||||
 | 
			
		||||
        return f'{self.bl_label}: [{self.num_choices}]'
 | 
			
		||||
 | 
			
		||||
@ -350,7 +350,10 @@ class LNX_PG_TreeVarListItem(bpy.types.PropertyGroup):
 | 
			
		||||
    def _set_name(self, value: str):
 | 
			
		||||
        old_name = self._get_name()
 | 
			
		||||
 | 
			
		||||
        tree = bpy.context.space_data.path[-1].node_tree
 | 
			
		||||
        if bpy.context.space_data is not None:
 | 
			
		||||
            tree = bpy.context.space_data.path[-1].node_tree
 | 
			
		||||
        else:
 | 
			
		||||
            return  # No valid context
 | 
			
		||||
        lst = tree.lnx_treevariableslist
 | 
			
		||||
 | 
			
		||||
        if value == '':
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,10 @@
 | 
			
		||||
from lnx.logicnode.lnx_nodes import *
 | 
			
		||||
 | 
			
		||||
class SetWorldNode(LnxLogicTreeNode):
 | 
			
		||||
    """Sets the World of the active scene."""
 | 
			
		||||
    """Sets the World of the active scene.
 | 
			
		||||
    World must be either associated to a scene or have fake user."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    bl_idname = 'LNSetWorldNode'
 | 
			
		||||
    bl_label = 'Set World'
 | 
			
		||||
    lnx_version = 1
 | 
			
		||||
 | 
			
		||||
@ -116,7 +116,73 @@ def remove_readonly(func, path, excinfo):
 | 
			
		||||
    os.chmod(path, stat.S_IWRITE)
 | 
			
		||||
    func(path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
appended_scenes = []
 | 
			
		||||
 | 
			
		||||
def load_external_blends():
 | 
			
		||||
    global appended_scenes
 | 
			
		||||
 | 
			
		||||
    wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
    if not hasattr(wrd, 'lnx_external_blends_path'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    external_path = getattr(wrd, 'lnx_external_blends_path', '')
 | 
			
		||||
    if not external_path or not external_path.strip():
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    abs_path = bpy.path.abspath(external_path.strip())
 | 
			
		||||
    if not os.path.exists(abs_path):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Walk recursively through all subdirs
 | 
			
		||||
    for root, dirs, files in os.walk(abs_path):
 | 
			
		||||
        for filename in files:
 | 
			
		||||
            if not filename.endswith(".blend"):
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            blend_path = os.path.join(root, filename)
 | 
			
		||||
            try:
 | 
			
		||||
                with bpy.data.libraries.load(blend_path, link=True) as (data_from, data_to):
 | 
			
		||||
                    data_to.scenes = list(data_from.scenes)
 | 
			
		||||
 | 
			
		||||
                for scn in data_to.scenes:
 | 
			
		||||
                    if scn is not None and scn not in appended_scenes:
 | 
			
		||||
                        # make name unique with file name
 | 
			
		||||
                        scn.name += "_" + filename.replace(".blend", "")
 | 
			
		||||
                        appended_scenes.append(scn)
 | 
			
		||||
 | 
			
		||||
                log.info(f"Loaded external blend: {blend_path}")
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                log.error(f"Failed to load external blend {blend_path}: {e}")
 | 
			
		||||
 | 
			
		||||
def clear_external_scenes():
 | 
			
		||||
    global appended_scenes
 | 
			
		||||
    if not appended_scenes:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    for scn in appended_scenes:
 | 
			
		||||
        try:
 | 
			
		||||
            bpy.data.scenes.remove(scn, do_unlink=True)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            log.error(f"Failed to remove scene {scn.name}: {e}")
 | 
			
		||||
 | 
			
		||||
    for lib in list(bpy.data.libraries):
 | 
			
		||||
        try:
 | 
			
		||||
            if lib.users == 0:
 | 
			
		||||
                bpy.data.libraries.remove(lib)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            log.error(f"Failed to remove library {lib.name}: {e}")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
 | 
			
		||||
    except Exception as e:
 | 
			
		||||
        log.error(f"Failed to purge orphan data: {e}")
 | 
			
		||||
 | 
			
		||||
    appended_scenes = []
 | 
			
		||||
 | 
			
		||||
def export_data(fp, sdk_path):
 | 
			
		||||
    load_external_blends()
 | 
			
		||||
 | 
			
		||||
    wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
    rpdat = lnx.utils.get_rp()
 | 
			
		||||
 | 
			
		||||
@ -323,6 +389,8 @@ def export_data(fp, sdk_path):
 | 
			
		||||
        state.last_resy = resy
 | 
			
		||||
        state.last_scene = scene_name
 | 
			
		||||
 | 
			
		||||
    clear_external_scenes()
 | 
			
		||||
 | 
			
		||||
def compile(assets_only=False):
 | 
			
		||||
    wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
    fp = lnx.utils.get_fp()
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,5 @@
 | 
			
		||||
import os
 | 
			
		||||
from typing import Optional, TextIO
 | 
			
		||||
from typing import List, Optional, TextIO, Dict, Any, TypeVar, TYPE_CHECKING
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
@ -17,14 +17,14 @@ if lnx.is_reload(__name__):
 | 
			
		||||
else:
 | 
			
		||||
    lnx.enable_reload(__name__)
 | 
			
		||||
 | 
			
		||||
parsed_nodes = []
 | 
			
		||||
parsed_ids = dict() # Sharing node data
 | 
			
		||||
function_nodes = dict()
 | 
			
		||||
function_node_outputs = dict()
 | 
			
		||||
parsed_nodes = []  # type: List[str]
 | 
			
		||||
parsed_ids = dict()  # type: Dict[str, str] # Sharing node data
 | 
			
		||||
function_nodes = dict()  # type: Dict[str, Any]
 | 
			
		||||
function_node_outputs = dict()  # type: Dict[str, str]
 | 
			
		||||
group_name = ''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_logic_trees() -> list['lnx.nodes_logic.LnxLogicTree']:
 | 
			
		||||
def get_logic_trees() -> List['lnx.nodes_logic.LnxLogicTree']:
 | 
			
		||||
    ar = []
 | 
			
		||||
    for node_group in bpy.data.node_groups:
 | 
			
		||||
        if node_group.bl_idname == 'LnxLogicTreeType':
 | 
			
		||||
@ -140,7 +140,7 @@ def build_node_group_tree(node_group: 'lnx.nodes_logic.LnxLogicTree', f: TextIO,
 | 
			
		||||
    return group_input_name, group_output_name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def build_node(node: bpy.types.Node, f: TextIO, name_prefix: str = None) -> Optional[str]:
 | 
			
		||||
def build_node(node: bpy.types.Node, f: TextIO, name_prefix: Optional[str] = None) -> Optional[str]:
 | 
			
		||||
    """Builds the given node and returns its name. f is an opened file object."""
 | 
			
		||||
    global parsed_nodes
 | 
			
		||||
    global parsed_ids
 | 
			
		||||
 | 
			
		||||
@ -39,14 +39,15 @@ def add_world_defs():
 | 
			
		||||
    # Store contexts
 | 
			
		||||
    if rpdat.rp_hdr == False:
 | 
			
		||||
        wrd.world_defs += '_LDR'
 | 
			
		||||
        
 | 
			
		||||
    if lnx.utils.get_active_scene().world is not None:
 | 
			
		||||
        if lnx.utils.get_active_scene().world.lnx_light_ies_texture:
 | 
			
		||||
            wrd.world_defs += '_LightIES'
 | 
			
		||||
            assets.add_embedded_data('iestexture.png')
 | 
			
		||||
 | 
			
		||||
    if lnx.utils.get_active_scene().world.lnx_light_ies_texture == True:
 | 
			
		||||
        wrd.world_defs += '_LightIES'
 | 
			
		||||
        assets.add_embedded_data('iestexture.png')
 | 
			
		||||
 | 
			
		||||
    if lnx.utils.get_active_scene().world.lnx_light_clouds_texture == True:
 | 
			
		||||
        wrd.world_defs += '_LightClouds'
 | 
			
		||||
        assets.add_embedded_data('cloudstexture.png')
 | 
			
		||||
        if lnx.utils.get_active_scene().world.lnx_light_clouds_texture:
 | 
			
		||||
            wrd.world_defs += '_LightClouds'
 | 
			
		||||
            assets.add_embedded_data('cloudstexture.png')
 | 
			
		||||
 | 
			
		||||
    if rpdat.rp_renderer == 'Deferred':
 | 
			
		||||
        assets.add_khafile_def('lnx_deferred')
 | 
			
		||||
@ -240,7 +241,7 @@ def build():
 | 
			
		||||
                compo_depth = True
 | 
			
		||||
 | 
			
		||||
            focus_distance = 0.0
 | 
			
		||||
            if len(bpy.data.cameras) > 0 and lnx.utils.get_active_scene().camera.data.dof.use_dof:
 | 
			
		||||
            if lnx.utils.get_active_scene().camera and lnx.utils.get_active_scene().camera.data.dof.use_dof:
 | 
			
		||||
                focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance
 | 
			
		||||
 | 
			
		||||
            if focus_distance > 0.0:
 | 
			
		||||
 | 
			
		||||
@ -69,7 +69,7 @@ def build():
 | 
			
		||||
                if rpdat.lnx_irradiance:
 | 
			
		||||
                    # Plain background color
 | 
			
		||||
                    if '_EnvCol' in world.world_defs:
 | 
			
		||||
                        world_name = lnx.utils.safestr(world.name)
 | 
			
		||||
                        world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
 | 
			
		||||
                        # Irradiance json file name
 | 
			
		||||
                        world.lnx_envtex_name = world_name
 | 
			
		||||
                        world.lnx_envtex_irr_name = world_name
 | 
			
		||||
@ -99,7 +99,7 @@ def build():
 | 
			
		||||
def create_world_shaders(world: bpy.types.World):
 | 
			
		||||
    """Creates fragment and vertex shaders for the given world."""
 | 
			
		||||
    global shader_datas
 | 
			
		||||
    world_name = lnx.utils.safestr(world.name)
 | 
			
		||||
    world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
 | 
			
		||||
    pass_name = 'World_' + world_name
 | 
			
		||||
 | 
			
		||||
    shader_props = {
 | 
			
		||||
@ -160,7 +160,7 @@ def create_world_shaders(world: bpy.types.World):
 | 
			
		||||
 | 
			
		||||
def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: ShaderContext):
 | 
			
		||||
    """Generates the shader code for the given world."""
 | 
			
		||||
    world_name = lnx.utils.safestr(world.name)
 | 
			
		||||
    world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
 | 
			
		||||
    world.world_defs = ''
 | 
			
		||||
    rpdat = lnx.utils.get_rp()
 | 
			
		||||
    wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
@ -175,7 +175,7 @@ def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: Sha
 | 
			
		||||
        frag.write('fragColor.rgb = backgroundCol;')
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    parser_state = ParserState(ParserContext.WORLD, world.name, world)
 | 
			
		||||
    parser_state = ParserState(ParserContext.WORLD, lnx.utils.asset_name(world) if world.library else world.name, world)
 | 
			
		||||
    parser_state.con = con
 | 
			
		||||
    parser_state.curshader = frag
 | 
			
		||||
    parser_state.frag = frag
 | 
			
		||||
 | 
			
		||||
@ -94,6 +94,7 @@ def parse_material_output(node: bpy.types.Node, custom_particle_node: bpy.types.
 | 
			
		||||
    parse_displacement = state.parse_displacement
 | 
			
		||||
    particle_info = {
 | 
			
		||||
        'index': False,
 | 
			
		||||
        'random': False,
 | 
			
		||||
        'age': False,
 | 
			
		||||
        'lifetime': False,
 | 
			
		||||
        'location': False,
 | 
			
		||||
 | 
			
		||||
@ -254,9 +254,10 @@ def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.t
 | 
			
		||||
        c.particle_info['index'] = True
 | 
			
		||||
        return 'p_index' if particles_on else '0.0'
 | 
			
		||||
 | 
			
		||||
    # TODO: Random
 | 
			
		||||
    # Random
 | 
			
		||||
    if out_socket == node.outputs[1]:
 | 
			
		||||
        return '0.0'
 | 
			
		||||
        c.particle_info['random'] = True
 | 
			
		||||
        return 'p_random' if particles_on else '0.0'
 | 
			
		||||
 | 
			
		||||
    # Age
 | 
			
		||||
    elif out_socket == node.outputs[2]:
 | 
			
		||||
@ -276,7 +277,7 @@ def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.t
 | 
			
		||||
    # Size
 | 
			
		||||
    elif out_socket == node.outputs[5]:
 | 
			
		||||
        c.particle_info['size'] = True
 | 
			
		||||
        return '1.0'
 | 
			
		||||
        return 'p_size' if particles_on else '1.0'
 | 
			
		||||
 | 
			
		||||
    # Velocity
 | 
			
		||||
    elif out_socket == node.outputs[6]:
 | 
			
		||||
 | 
			
		||||
@ -76,7 +76,7 @@ def parse_addshader(node: bpy.types.ShaderNodeAddShader, out_socket: NodeSocket,
 | 
			
		||||
        state.out_ior = '({0} * 0.5 + {1} * 0.5)'.format(ior1, ior2)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if bpy.app.version < (3, 0, 0):
 | 
			
		||||
if bpy.app.version < (2, 92, 0):
 | 
			
		||||
    def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
 | 
			
		||||
        if state.parse_surface:
 | 
			
		||||
            c.write_normal(node.inputs[20])
 | 
			
		||||
@ -84,18 +84,20 @@ if bpy.app.version < (3, 0, 0):
 | 
			
		||||
            state.out_metallic = c.parse_value_input(node.inputs[4])
 | 
			
		||||
            state.out_specular = c.parse_value_input(node.inputs[5])
 | 
			
		||||
            state.out_roughness = c.parse_value_input(node.inputs[7])
 | 
			
		||||
            if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
 | 
			
		||||
                    and (node.inputs['Emission'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission'], (0.0, 0.0, 0.0), comp_alpha=False)):
 | 
			
		||||
            if node.inputs['Emission'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission'], (0.0, 0.0, 0.0), comp_alpha=False):
 | 
			
		||||
                emission_col = c.parse_vector_input(node.inputs[17])
 | 
			
		||||
                emission_strength = c.parse_value_input(node.inputs[18])
 | 
			
		||||
                state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
 | 
			
		||||
                state.out_emission_col = emission_col
 | 
			
		||||
                mat_state.emission_type = mat_state.EmissionType.SHADED
 | 
			
		||||
            else:
 | 
			
		||||
                mat_state.emission_type = mat_state.EmissionType.NO_EMISSION   
 | 
			
		||||
        if state.parse_opacity:
 | 
			
		||||
            state.out_ior = c.parse_value_input(node.inputs[14])
 | 
			
		||||
            state.out_opacity = c.parse_value_input(node.inputs[19])
 | 
			
		||||
if bpy.app.version >= (3, 0, 0) and bpy.app.version <= (4, 1, 0):
 | 
			
		||||
            # In Blender 2.83, Alpha socket is at index 18, not 19
 | 
			
		||||
            if 'Alpha' in node.inputs:
 | 
			
		||||
                state.out_opacity = c.parse_value_input(node.inputs['Alpha'])
 | 
			
		||||
            else:
 | 
			
		||||
                state.out_opacity = '1.0'
 | 
			
		||||
if bpy.app.version >= (2, 92, 0) and bpy.app.version <= (4, 1, 0):
 | 
			
		||||
    def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
 | 
			
		||||
        if state.parse_surface:
 | 
			
		||||
            c.write_normal(node.inputs[22])
 | 
			
		||||
 | 
			
		||||
@ -1,4 +1,4 @@
 | 
			
		||||
from typing import Any, Callable, Optional
 | 
			
		||||
from typing import Any, Callable, Dict, List, Optional, TypeVar, Union
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
@ -32,8 +32,8 @@ else:
 | 
			
		||||
is_displacement = False
 | 
			
		||||
 | 
			
		||||
# User callbacks
 | 
			
		||||
write_material_attribs: Optional[Callable[[dict[str, Any], shader.Shader], bool]] = None
 | 
			
		||||
write_material_attribs_post: Optional[Callable[[dict[str, Any], shader.Shader], None]] = None
 | 
			
		||||
write_material_attribs: Optional[Callable[[Dict[str, Any], shader.Shader], bool]] = None
 | 
			
		||||
write_material_attribs_post: Optional[Callable[[Dict[str, Any], shader.Shader], None]] = None
 | 
			
		||||
write_vertex_attribs: Optional[Callable[[shader.Shader], bool]] = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -58,7 +58,6 @@ def make(context_id, rpasses):
 | 
			
		||||
        con['alpha_blend_destination'] = mat.lnx_blending_destination_alpha
 | 
			
		||||
        con['alpha_blend_operation'] = mat.lnx_blending_operation_alpha
 | 
			
		||||
        con['depth_write'] = False
 | 
			
		||||
        con['compare_mode'] = 'less'
 | 
			
		||||
    elif particle:
 | 
			
		||||
        pass
 | 
			
		||||
    # Depth prepass was performed, exclude mat with depth read that
 | 
			
		||||
@ -66,6 +65,9 @@ def make(context_id, rpasses):
 | 
			
		||||
    elif dprepass and not (rpdat.rp_depth_texture and mat.lnx_depth_read):
 | 
			
		||||
        con['depth_write'] = False
 | 
			
		||||
        con['compare_mode'] = 'equal'
 | 
			
		||||
    else:
 | 
			
		||||
        con['depth_write'] = mat.lnx_depth_write
 | 
			
		||||
        con['compare_mode'] = mat.lnx_compare_mode
 | 
			
		||||
 | 
			
		||||
    attachment_format = 'RGBA32' if '_LDR' in wrd.world_defs else 'RGBA64'
 | 
			
		||||
    con['color_attachments'] = [attachment_format, attachment_format]
 | 
			
		||||
 | 
			
		||||
@ -55,6 +55,7 @@ def write(vert, particle_info=None, shadowmap=False):
 | 
			
		||||
 | 
			
		||||
    # Outs
 | 
			
		||||
    out_index = True if particle_info != None and particle_info['index'] else False
 | 
			
		||||
    out_random = True if particle_info != None and particle_info['random'] else False
 | 
			
		||||
    out_age = True if particle_info != None and particle_info['age'] else False
 | 
			
		||||
    out_lifetime = True if particle_info != None and particle_info['lifetime'] else False
 | 
			
		||||
    out_location = True if particle_info != None and particle_info['location'] else False
 | 
			
		||||
@ -168,58 +169,57 @@ def write(vert, particle_info=None, shadowmap=False):
 | 
			
		||||
            vert.write('float s = sin(p_angle);')
 | 
			
		||||
            vert.write('vec3 center = spos.xyz - p_location;')
 | 
			
		||||
 | 
			
		||||
            match rotation_mode:
 | 
			
		||||
                case 'OB_X':
 | 
			
		||||
                    vert.write('vec3 rz = vec3(center.y, -center.x, center.z);')
 | 
			
		||||
                    vert.write('vec2 rotation = vec2(rz.y * c - rz.z * s, rz.y * s + rz.z * c);')
 | 
			
		||||
                    vert.write('spos.xyz = vec3(rz.x, rotation.x, rotation.y) + p_location;')
 | 
			
		||||
            if rotation_mode == 'OB_X':
 | 
			
		||||
                vert.write('vec3 rz = vec3(center.y, -center.x, center.z);')
 | 
			
		||||
                vert.write('vec2 rotation = vec2(rz.y * c - rz.z * s, rz.y * s + rz.z * c);')
 | 
			
		||||
                vert.write('spos.xyz = vec3(rz.x, rotation.x, rotation.y) + p_location;')
 | 
			
		||||
 | 
			
		||||
                    if (not shadowmap):
 | 
			
		||||
                        vert.write('wnormal = vec3(wnormal.y, -wnormal.x, wnormal.z);')
 | 
			
		||||
                        vert.write('vec2 n_rot = vec2(wnormal.y * c - wnormal.z * s, wnormal.y * s + wnormal.z * c);')
 | 
			
		||||
                        vert.write('wnormal = normalize(vec3(wnormal.x, n_rot.x, n_rot.y));')
 | 
			
		||||
                case 'OB_Y':
 | 
			
		||||
                    vert.write('vec2 rotation = vec2(center.x * c + center.z * s, -center.x * s + center.z * c);')
 | 
			
		||||
                    vert.write('spos.xyz = vec3(rotation.x, center.y, rotation.y) + p_location;')
 | 
			
		||||
                if (not shadowmap):
 | 
			
		||||
                    vert.write('wnormal = vec3(wnormal.y, -wnormal.x, wnormal.z);')
 | 
			
		||||
                    vert.write('vec2 n_rot = vec2(wnormal.y * c - wnormal.z * s, wnormal.y * s + wnormal.z * c);')
 | 
			
		||||
                    vert.write('wnormal = normalize(vec3(wnormal.x, n_rot.x, n_rot.y));')
 | 
			
		||||
            elif rotation_mode == 'OB_Y':
 | 
			
		||||
                vert.write('vec2 rotation = vec2(center.x * c + center.z * s, -center.x * s + center.z * c);')
 | 
			
		||||
                vert.write('spos.xyz = vec3(rotation.x, center.y, rotation.y) + p_location;')
 | 
			
		||||
 | 
			
		||||
                    if (not shadowmap):
 | 
			
		||||
                        vert.write('wnormal = normalize(vec3(wnormal.x * c + wnormal.z * s, wnormal.y, -wnormal.x * s + wnormal.z * c));')
 | 
			
		||||
                case 'OB_Z':
 | 
			
		||||
                    vert.write('vec3 rz = vec3(center.y, -center.x, center.z);')
 | 
			
		||||
                    vert.write('vec3 ry = vec3(-rz.z, rz.y, rz.x);')
 | 
			
		||||
                    vert.write('vec2 rotation = vec2(ry.x * c - ry.y * s, ry.x * s + ry.y * c);')
 | 
			
		||||
                    vert.write('spos.xyz = vec3(rotation.x, rotation.y, ry.z) + p_location;')
 | 
			
		||||
                if (not shadowmap):
 | 
			
		||||
                    vert.write('wnormal = normalize(vec3(wnormal.x * c + wnormal.z * s, wnormal.y, -wnormal.x * s + wnormal.z * c));')
 | 
			
		||||
            elif rotation_mode == 'OB_Z':
 | 
			
		||||
                vert.write('vec3 rz = vec3(center.y, -center.x, center.z);')
 | 
			
		||||
                vert.write('vec3 ry = vec3(-rz.z, rz.y, rz.x);')
 | 
			
		||||
                vert.write('vec2 rotation = vec2(ry.x * c - ry.y * s, ry.x * s + ry.y * c);')
 | 
			
		||||
                vert.write('spos.xyz = vec3(rotation.x, rotation.y, ry.z) + p_location;')
 | 
			
		||||
 | 
			
		||||
                    if (not shadowmap):
 | 
			
		||||
                        vert.write('wnormal = vec3(wnormal.y, -wnormal.x, wnormal.z);')
 | 
			
		||||
                        vert.write('wnormal = vec3(-wnormal.z, wnormal.y, wnormal.x);')
 | 
			
		||||
                        vert.write('vec2 n_rot = vec2(wnormal.x * c - wnormal.y * s, wnormal.x * s + wnormal.y * c);')
 | 
			
		||||
                        vert.write('wnormal = normalize(vec3(n_rot.x, n_rot.y, wnormal.z));')
 | 
			
		||||
                case 'VEL':
 | 
			
		||||
                    vert.write('vec3 forward = -normalize(p_velocity);')
 | 
			
		||||
                    vert.write('if (length(forward) > 1e-5) {')
 | 
			
		||||
                    vert.write('vec3 world_up = vec3(0.0, 0.0, 1.0);')
 | 
			
		||||
                if (not shadowmap):
 | 
			
		||||
                    vert.write('wnormal = vec3(wnormal.y, -wnormal.x, wnormal.z);')
 | 
			
		||||
                    vert.write('wnormal = vec3(-wnormal.z, wnormal.y, wnormal.x);')
 | 
			
		||||
                    vert.write('vec2 n_rot = vec2(wnormal.x * c - wnormal.y * s, wnormal.x * s + wnormal.y * c);')
 | 
			
		||||
                    vert.write('wnormal = normalize(vec3(n_rot.x, n_rot.y, wnormal.z));')
 | 
			
		||||
            elif rotation_mode == 'VEL':
 | 
			
		||||
                vert.write('vec3 forward = -normalize(p_velocity);')
 | 
			
		||||
                vert.write('if (length(forward) > 1e-5) {')
 | 
			
		||||
                vert.write('vec3 world_up = vec3(0.0, 0.0, 1.0);')
 | 
			
		||||
 | 
			
		||||
                    vert.write('if (abs(dot(forward, world_up)) > 0.999) {')
 | 
			
		||||
                    vert.write('world_up = vec3(-1.0, 0.0, 0.0);')
 | 
			
		||||
                    vert.write('}')
 | 
			
		||||
                vert.write('if (abs(dot(forward, world_up)) > 0.999) {')
 | 
			
		||||
                vert.write('world_up = vec3(-1.0, 0.0, 0.0);')
 | 
			
		||||
                vert.write('}')
 | 
			
		||||
 | 
			
		||||
                    vert.write('vec3 right = cross(world_up, forward);')
 | 
			
		||||
                    vert.write('if (length(right) < 1e-5) {')
 | 
			
		||||
                    vert.write('forward = -forward;')
 | 
			
		||||
                    vert.write('right = cross(world_up, forward);')
 | 
			
		||||
                    vert.write('}')
 | 
			
		||||
                    vert.write('right = normalize(right);')
 | 
			
		||||
                    vert.write('vec3 up = normalize(cross(forward, right));')
 | 
			
		||||
                vert.write('vec3 right = cross(world_up, forward);')
 | 
			
		||||
                vert.write('if (length(right) < 1e-5) {')
 | 
			
		||||
                vert.write('forward = -forward;')
 | 
			
		||||
                vert.write('right = cross(world_up, forward);')
 | 
			
		||||
                vert.write('}')
 | 
			
		||||
                vert.write('right = normalize(right);')
 | 
			
		||||
                vert.write('vec3 up = normalize(cross(forward, right));')
 | 
			
		||||
 | 
			
		||||
                    vert.write('mat3 rot = mat3(right, -forward, up);')
 | 
			
		||||
                    vert.write('mat3 phase = mat3(vec3(c, 0.0, -s), vec3(0.0, 1.0, 0.0), vec3(s, 0.0, c));')
 | 
			
		||||
                    vert.write('mat3 final_rot = rot * phase;')
 | 
			
		||||
                    vert.write('spos.xyz = final_rot * center + p_location;')
 | 
			
		||||
                vert.write('mat3 rot = mat3(right, -forward, up);')
 | 
			
		||||
                vert.write('mat3 phase = mat3(vec3(c, 0.0, -s), vec3(0.0, 1.0, 0.0), vec3(s, 0.0, c));')
 | 
			
		||||
                vert.write('mat3 final_rot = rot * phase;')
 | 
			
		||||
                vert.write('spos.xyz = final_rot * center + p_location;')
 | 
			
		||||
 | 
			
		||||
                    if (not shadowmap):
 | 
			
		||||
                        vert.write('wnormal = normalize(final_rot * wnormal);')
 | 
			
		||||
                    vert.write('}')
 | 
			
		||||
                if (not shadowmap):
 | 
			
		||||
                    vert.write('wnormal = normalize(final_rot * wnormal);')
 | 
			
		||||
                vert.write('}')
 | 
			
		||||
 | 
			
		||||
            if rotation_factor_random != 0:
 | 
			
		||||
                str_rotate_around = '''vec3 rotate_around(vec3 v, vec3 angle) {
 | 
			
		||||
@ -258,6 +258,11 @@ def write(vert, particle_info=None, shadowmap=False):
 | 
			
		||||
        vert.add_out('float p_index')
 | 
			
		||||
        vert.write('p_index = gl_InstanceID;')
 | 
			
		||||
 | 
			
		||||
    if out_random:
 | 
			
		||||
        vert.add_out('float p_random')
 | 
			
		||||
        vert.write('p_random = fract(sin(gl_InstanceID) * 43758.5453);')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_tilesheet(vert):
 | 
			
		||||
    # tilesx, tilesy, framerate - pd[3][0], pd[3][1], pd[3][2]
 | 
			
		||||
    vert.write('int frame = int((p_age) / pd[3][2]);')
 | 
			
		||||
 | 
			
		||||
@ -1,4 +1,4 @@
 | 
			
		||||
from typing import Generator
 | 
			
		||||
from typing import Generator, Tuple
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
@ -101,7 +101,7 @@ def iter_nodes_leenkxpbr(node_group: bpy.types.NodeTree) -> Generator[bpy.types.
 | 
			
		||||
            yield node
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def equals_color_socket(socket: bpy.types.NodeSocketColor, value: tuple[float, ...], *, comp_alpha=True) -> bool:
 | 
			
		||||
def equals_color_socket(socket: bpy.types.NodeSocketColor, value: Tuple[float, ...], *, comp_alpha=True) -> bool:
 | 
			
		||||
    # NodeSocketColor.default_value is of bpy_prop_array type that doesn't
 | 
			
		||||
    # support direct comparison
 | 
			
		||||
    return (
 | 
			
		||||
 | 
			
		||||
@ -4,7 +4,7 @@ This module contains a list of all material nodes that Leenkx supports
 | 
			
		||||
"""
 | 
			
		||||
from enum import IntEnum, unique
 | 
			
		||||
from dataclasses import dataclass
 | 
			
		||||
from typing import Any, Callable, Optional
 | 
			
		||||
from typing import Any, Callable, Optional, Dict, List, Tuple, TypeVar, Union
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
@ -62,7 +62,7 @@ class MaterialNodeMeta:
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ALL_NODES: dict[str, MaterialNodeMeta] = {
 | 
			
		||||
ALL_NODES: Dict[str, MaterialNodeMeta] = {
 | 
			
		||||
    # --- nodes_color
 | 
			
		||||
    'BRIGHTCONTRAST': MaterialNodeMeta(parse_func=nodes_color.parse_brightcontrast),
 | 
			
		||||
    'CURVE_RGB': MaterialNodeMeta(parse_func=nodes_color.parse_curvergb),
 | 
			
		||||
 | 
			
		||||
@ -23,6 +23,7 @@ class ShaderData:
 | 
			
		||||
        self.data = {'shader_datas': [self.sd]}
 | 
			
		||||
        self.matname = lnx.utils.safesrc(lnx.utils.asset_name(material))
 | 
			
		||||
        self.sd['name'] = self.matname + '_data'
 | 
			
		||||
        self.sd['next_pass'] = material.lnx_next_pass
 | 
			
		||||
        self.sd['contexts'] = []
 | 
			
		||||
 | 
			
		||||
    def add_context(self, props) -> 'ShaderContext':
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,5 @@
 | 
			
		||||
import collections.abc
 | 
			
		||||
from typing import Any, Generator, Optional, Type, Union
 | 
			
		||||
from typing import Any, Generator, Optional, Type, Tuple, Union
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
import mathutils
 | 
			
		||||
@ -49,7 +49,7 @@ def iter_nodes_by_type(node_group: bpy.types.NodeTree, ntype: str) -> Generator[
 | 
			
		||||
            yield node
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def input_get_connected_node(input_socket: bpy.types.NodeSocket) -> tuple[Optional[bpy.types.Node], Optional[bpy.types.NodeSocket]]:
 | 
			
		||||
def input_get_connected_node(input_socket: bpy.types.NodeSocket) -> Tuple[Optional[bpy.types.Node], Optional[bpy.types.NodeSocket]]:
 | 
			
		||||
    """Get the node and the output socket of that node that is connected
 | 
			
		||||
    to the given input, while following reroutes. If the input has
 | 
			
		||||
    multiple incoming connections, the first one is followed. If the
 | 
			
		||||
@ -70,7 +70,7 @@ def input_get_connected_node(input_socket: bpy.types.NodeSocket) -> tuple[Option
 | 
			
		||||
    return from_node, link.from_socket
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def output_get_connected_node(output_socket: bpy.types.NodeSocket) -> tuple[Optional[bpy.types.Node], Optional[bpy.types.NodeSocket]]:
 | 
			
		||||
def output_get_connected_node(output_socket: bpy.types.NodeSocket) -> Tuple[Optional[bpy.types.Node], Optional[bpy.types.NodeSocket]]:
 | 
			
		||||
    """Get the node and the input socket of that node that is connected
 | 
			
		||||
    to the given output, while following reroutes. If the output has
 | 
			
		||||
    multiple outgoing connections, the first one is followed. If the
 | 
			
		||||
@ -152,7 +152,7 @@ def get_export_node_name(node: bpy.types.Node) -> str:
 | 
			
		||||
    return '_' + lnx.utils.safesrc(node.name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_haxe_property_names(node: bpy.types.Node) -> Generator[tuple[str, str], None, None]:
 | 
			
		||||
def get_haxe_property_names(node: bpy.types.Node) -> Generator[Tuple[str, str], None, None]:
 | 
			
		||||
    """Generator that yields the names of all node properties that have
 | 
			
		||||
    a counterpart in the node's Haxe class.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,13 @@
 | 
			
		||||
import bpy
 | 
			
		||||
from bpy.props import *
 | 
			
		||||
 | 
			
		||||
# Helper function to handle version compatibility
 | 
			
		||||
def compatible_prop(prop_func, **kwargs):
 | 
			
		||||
    """Create properties compatible with multiple Blender versions."""
 | 
			
		||||
    if bpy.app.version < (2, 90, 0):
 | 
			
		||||
        # Remove override parameter for Blender 2.83
 | 
			
		||||
        kwargs.pop('override', None)
 | 
			
		||||
    return prop_func(**kwargs)
 | 
			
		||||
import re
 | 
			
		||||
import multiprocessing
 | 
			
		||||
 | 
			
		||||
@ -142,6 +150,8 @@ def init_properties():
 | 
			
		||||
    bpy.types.World.lnx_project_version = StringProperty(name="Version", description="Exported project version", default="1.0.0", update=assets.invalidate_compiler_cache, set=set_version, get=get_version)
 | 
			
		||||
    bpy.types.World.lnx_project_version_autoinc = BoolProperty(name="Auto-increment Build Number", description="Auto-increment build number", default=True, update=assets.invalidate_compiler_cache)
 | 
			
		||||
    bpy.types.World.lnx_project_bundle = StringProperty(name="Bundle", description="Exported project bundle", default="org.leenkx3d", update=assets.invalidate_compiler_cache, set=set_project_bundle, get=get_project_bundle)
 | 
			
		||||
    # External Blend Files
 | 
			
		||||
    bpy.types.World.lnx_external_blends_path = StringProperty(name="External Blends", description="Directory containing external blend files to include in export", default="", subtype='DIR_PATH', update=assets.invalidate_compiler_cache)
 | 
			
		||||
    # Android Settings
 | 
			
		||||
    bpy.types.World.lnx_project_android_sdk_min = IntProperty(name="Minimal Version SDK", description="Minimal Version Android SDK", default=23, min=14, max=30, update=assets.invalidate_compiler_cache)
 | 
			
		||||
    bpy.types.World.lnx_project_android_sdk_target = IntProperty(name="Target Version SDK", description="Target Version Android SDK", default=26, min=26, max=30, update=assets.invalidate_compiler_cache)
 | 
			
		||||
@ -339,7 +349,7 @@ def init_properties():
 | 
			
		||||
    bpy.types.World.lnx_winmaximize = BoolProperty(name="Maximizable", description="Allow window maximize", default=False, update=assets.invalidate_compiler_cache)
 | 
			
		||||
    bpy.types.World.lnx_winminimize = BoolProperty(name="Minimizable", description="Allow window minimize", default=True, update=assets.invalidate_compiler_cache)
 | 
			
		||||
    # For object
 | 
			
		||||
    bpy.types.Object.lnx_instanced = EnumProperty(
 | 
			
		||||
    bpy.types.Object.lnx_instanced = compatible_prop(EnumProperty,
 | 
			
		||||
        items = [('Off', 'Off', 'No instancing of children'),
 | 
			
		||||
                 ('Loc', 'Loc', 'Instances use their unique position (ipos)'),
 | 
			
		||||
                 ('Loc + Rot', 'Loc + Rot', 'Instances use their unique position and rotation (ipos and irot)'),
 | 
			
		||||
@ -349,11 +359,12 @@ def init_properties():
 | 
			
		||||
        description='Whether to use instancing to draw the children of this object. If enabled, this option defines what attributes may vary between the instances',
 | 
			
		||||
        update=assets.invalidate_instance_cache,
 | 
			
		||||
        override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_export = BoolProperty(name="Export", description="Export object data", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_spawn = BoolProperty(name="Spawn", description="Auto-add this object when creating scene", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_mobile = BoolProperty(name="Mobile", description="Object moves during gameplay", default=False, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_visible = BoolProperty(name="Visible", description="Render this object", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_visible_shadow = BoolProperty(name="Lighting", description="Object contributes to the lighting even if invisible", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_export = compatible_prop(BoolProperty, name="Export", description="Export object data", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_sorting_index = compatible_prop(IntProperty, name="Sorting Index", description="Sorting index for the Render's Draw Order", default=0, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_spawn = compatible_prop(BoolProperty, name="Spawn", description="Auto-add this object when creating scene", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_mobile = compatible_prop(BoolProperty, name="Mobile", description="Object moves during gameplay", default=False, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_visible = compatible_prop(BoolProperty, name="Visible", description="Render this object", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_visible_shadow = compatible_prop(BoolProperty, name="Lighting", description="Object contributes to the lighting even if invisible", default=True, override={'LIBRARY_OVERRIDABLE'})
 | 
			
		||||
    bpy.types.Object.lnx_soft_body_margin = FloatProperty(name="Soft Body Margin", description="Collision margin", default=0.04)
 | 
			
		||||
    bpy.types.Object.lnx_rb_linear_factor = FloatVectorProperty(name="Linear Factor", size=3, description="Set to 0 to lock axis", default=[1,1,1])
 | 
			
		||||
    bpy.types.Object.lnx_rb_angular_factor = FloatVectorProperty(name="Angular Factor", size=3, description="Set to 0 to lock axis", default=[1,1,1])
 | 
			
		||||
@ -433,9 +444,22 @@ def init_properties():
 | 
			
		||||
    bpy.types.World.lnx_nishita_density = FloatVectorProperty(name="Nishita Density", size=3, default=[1, 1, 1])
 | 
			
		||||
    bpy.types.Material.lnx_cast_shadow = BoolProperty(name="Cast Shadow", default=True)
 | 
			
		||||
    bpy.types.Material.lnx_receive_shadow = BoolProperty(name="Receive Shadow", description="Requires forward render path", default=True)
 | 
			
		||||
    bpy.types.Material.lnx_depth_write = BoolProperty(name="Write Depth", description="Allow this material to write to the depth buffer", default=True)
 | 
			
		||||
    bpy.types.Material.lnx_depth_read = BoolProperty(name="Read Depth", description="Allow this material to read from a depth texture which is copied from the depth buffer. The meshes using this material will be drawn after all meshes that don't read from the depth texture", default=False)
 | 
			
		||||
    bpy.types.Material.lnx_overlay = BoolProperty(name="Overlay", description="Renders the material, unshaded, over other shaded materials", default=False)
 | 
			
		||||
    bpy.types.Material.lnx_decal = BoolProperty(name="Decal", default=False)
 | 
			
		||||
    bpy.types.Material.lnx_compare_mode = EnumProperty(
 | 
			
		||||
        items=[
 | 
			
		||||
            ('always', 'Always', 'Always'),
 | 
			
		||||
            ('never', 'Never', 'Never'),
 | 
			
		||||
            ('less', 'Less', 'Less'),
 | 
			
		||||
            ('less_equal', 'Less Equal', 'Less Equal'),
 | 
			
		||||
            ('greater', 'Greater', 'Greater'),
 | 
			
		||||
            ('greater_equal', 'Greater Equal', 'Greater Equal'),
 | 
			
		||||
            ('equal', 'Equal', 'Equal'),
 | 
			
		||||
            ('not_equal', 'Not Equal', 'Not Equal'),
 | 
			
		||||
        ],
 | 
			
		||||
        name="Compare Mode", default='less', description="Comparison mode for the material")
 | 
			
		||||
    bpy.types.Material.lnx_two_sided = BoolProperty(name="Two-Sided", description="Flip normal when drawing back-face", default=False)
 | 
			
		||||
    bpy.types.Material.lnx_ignore_irradiance = BoolProperty(name="Ignore Irradiance", description="Ignore irradiance for material", default=False)
 | 
			
		||||
    bpy.types.Material.lnx_cull_mode = EnumProperty(
 | 
			
		||||
@ -443,6 +467,8 @@ def init_properties():
 | 
			
		||||
               ('clockwise', 'Front', 'Clockwise'),
 | 
			
		||||
               ('counter_clockwise', 'Back', 'Counter-Clockwise')],
 | 
			
		||||
        name="Cull Mode", default='clockwise', description="Draw geometry faces")
 | 
			
		||||
    bpy.types.Material.lnx_next_pass = StringProperty(
 | 
			
		||||
        name="Next Pass", default='', description="Next pass for the material", update=assets.invalidate_shader_cache)
 | 
			
		||||
    bpy.types.Material.lnx_discard = BoolProperty(name="Alpha Test", default=False, description="Do not render fragments below specified opacity threshold")
 | 
			
		||||
    bpy.types.Material.lnx_discard_opacity = FloatProperty(name="Mesh Opacity", default=0.2, min=0, max=1)
 | 
			
		||||
    bpy.types.Material.lnx_discard_opacity_shadows = FloatProperty(name="Shadows Opacity", default=0.1, min=0, max=1)
 | 
			
		||||
@ -568,6 +594,11 @@ def init_properties():
 | 
			
		||||
    bpy.types.Node.lnx_version = IntProperty(name="Node Version", description="The version of an instanced node", default=0)
 | 
			
		||||
    # Particles
 | 
			
		||||
    bpy.types.ParticleSettings.lnx_auto_start = BoolProperty(name="Auto Start", description="Automatically start this particle system on load", default=True)
 | 
			
		||||
    bpy.types.ParticleSettings.lnx_dynamic_emitter = BoolProperty(
 | 
			
		||||
        name="Dynamic",
 | 
			
		||||
        description="Particles have independent transform updates following emitter compared to a static baked particle system used if emitters dont generally move around.",
 | 
			
		||||
        default=True
 | 
			
		||||
    )
 | 
			
		||||
    bpy.types.ParticleSettings.lnx_is_unique = BoolProperty(name="Is Unique", description="Make this particle system look different each time it starts", default=False)
 | 
			
		||||
    bpy.types.ParticleSettings.lnx_loop = BoolProperty(name="Loop", description="Loop this particle system", default=False)
 | 
			
		||||
    bpy.types.ParticleSettings.lnx_count_mult = FloatProperty(name="Multiply Count", description="Multiply particle count when rendering in Leenkx", default=1.0)
 | 
			
		||||
 | 
			
		||||
@ -420,16 +420,19 @@ class LNX_OT_ExporterOpenVS(bpy.types.Operator):
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def poll(cls, context):
 | 
			
		||||
        if not lnx.utils.get_os_is_windows():
 | 
			
		||||
            cls.poll_message_set('This operator is only supported on Windows')
 | 
			
		||||
            if bpy.app.version >= (2, 90, 0):
 | 
			
		||||
                cls.poll_message_set('This operator is only supported on Windows')
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
        if len(wrd.lnx_exporterlist) == 0:
 | 
			
		||||
            cls.poll_message_set('No export configuration exists')
 | 
			
		||||
            if bpy.app.version >= (2, 90, 0):
 | 
			
		||||
                cls.poll_message_set('No export configuration exists')
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        if wrd.lnx_exporterlist[wrd.lnx_exporterlist_index].lnx_project_target != 'windows-hl':
 | 
			
		||||
            cls.poll_message_set('This operator only works with the Windows (C) target')
 | 
			
		||||
            if bpy.app.version >= (2, 90, 0):
 | 
			
		||||
                cls.poll_message_set('This operator only works with the Windows (C) target')
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
@ -12,9 +12,11 @@ import bpy.utils.previews
 | 
			
		||||
import lnx.make as make
 | 
			
		||||
from lnx.props_traits_props import *
 | 
			
		||||
import lnx.ui_icons as ui_icons
 | 
			
		||||
 | 
			
		||||
import lnx.utils
 | 
			
		||||
import lnx.write_data as write_data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if lnx.is_reload(__name__):
 | 
			
		||||
    lnx.make = lnx.reload_module(lnx.make)
 | 
			
		||||
    lnx.props_traits_props = lnx.reload_module(lnx.props_traits_props)
 | 
			
		||||
@ -90,20 +92,31 @@ class LnxTraitListItem(bpy.types.PropertyGroup):
 | 
			
		||||
    def poll_node_trees(self, tree: NodeTree):
 | 
			
		||||
        """Ensure that only logic node trees show up as node traits"""
 | 
			
		||||
        return tree.bl_idname == 'LnxLogicTreeType'
 | 
			
		||||
    
 | 
			
		||||
    if bpy.app.version < (2, 90, 0):
 | 
			
		||||
        name: StringProperty(name="Name", description="The name of the trait", default="")
 | 
			
		||||
        enabled_prop: BoolProperty(name="", description="Whether this trait is enabled", default=True, update=trigger_recompile)
 | 
			
		||||
        fake_user: BoolProperty(name="Fake User", description="Export this trait even if it is deactivated", default=False)
 | 
			
		||||
        class_name_prop: StringProperty(name="Class", description="A name for this item", default="", update=update_trait_group)
 | 
			
		||||
        canvas_name_prop: StringProperty(name="Canvas", description="A name for this item", default="", update=update_trait_group)
 | 
			
		||||
        webassembly_prop: StringProperty(name="Module", description="A name for this item", default="", update=update_trait_group)
 | 
			
		||||
        node_tree_prop: PointerProperty(type=NodeTree, update=update_trait_group, poll=poll_node_trees)
 | 
			
		||||
        lnx_traitpropslist_index: IntProperty(name="Index for my_list", default=0, options={"LIBRARY_EDITABLE"})
 | 
			
		||||
    else:
 | 
			
		||||
        name: StringProperty(name="Name", description="The name of the trait", default="", override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        enabled_prop: BoolProperty(name="", description="Whether this trait is enabled", default=True, update=trigger_recompile, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        fake_user: BoolProperty(name="Fake User", description="Export this trait even if it is deactivated", default=False, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        class_name_prop: StringProperty(name="Class", description="A name for this item", default="", update=update_trait_group, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        canvas_name_prop: StringProperty(name="Canvas", description="A name for this item", default="", update=update_trait_group, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        webassembly_prop: StringProperty(name="Module", description="A name for this item", default="", update=update_trait_group, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        node_tree_prop: PointerProperty(type=NodeTree, update=update_trait_group, override={"LIBRARY_OVERRIDABLE"}, poll=poll_node_trees)
 | 
			
		||||
        lnx_traitpropslist_index: IntProperty(name="Index for my_list", default=0, options={"LIBRARY_EDITABLE"}, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
 | 
			
		||||
    name: StringProperty(name="Name", description="The name of the trait", default="", override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    enabled_prop: BoolProperty(name="", description="Whether this trait is enabled", default=True, update=trigger_recompile, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    is_object: BoolProperty(name="", default=True)
 | 
			
		||||
    fake_user: BoolProperty(name="Fake User", description="Export this trait even if it is deactivated", default=False, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    type_prop: EnumProperty(name="Type", items=PROP_TYPES_ENUM)
 | 
			
		||||
 | 
			
		||||
    class_name_prop: StringProperty(name="Class", description="A name for this item", default="", update=update_trait_group, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    canvas_name_prop: StringProperty(name="Canvas", description="A name for this item", default="", update=update_trait_group, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    webassembly_prop: StringProperty(name="Module", description="A name for this item", default="", update=update_trait_group, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    node_tree_prop: PointerProperty(type=NodeTree, update=update_trait_group, override={"LIBRARY_OVERRIDABLE"}, poll=poll_node_trees)
 | 
			
		||||
 | 
			
		||||
    lnx_traitpropslist: CollectionProperty(type=LnxTraitPropListItem)
 | 
			
		||||
    lnx_traitpropslist_index: IntProperty(name="Index for my_list", default=0, options={"LIBRARY_EDITABLE"}, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    lnx_traitpropswarnings: CollectionProperty(type=LnxTraitPropWarning)
 | 
			
		||||
 | 
			
		||||
class LNX_UL_TraitList(bpy.types.UIList):
 | 
			
		||||
@ -756,7 +769,8 @@ class LnxRefreshObjectScriptsButton(bpy.types.Operator):
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def poll(cls, context):
 | 
			
		||||
        cls.poll_message_set(LnxRefreshScriptsButton.poll_msg)
 | 
			
		||||
        if bpy.app.version >= (2, 90, 0):
 | 
			
		||||
            cls.poll_message_set(LnxRefreshScriptsButton.poll_msg)
 | 
			
		||||
        # Technically we could keep the operator enabled here since
 | 
			
		||||
        # fetch_trait_props() checks for overrides and the operator does
 | 
			
		||||
        # not depend on the current object, but this way the user
 | 
			
		||||
@ -1064,11 +1078,17 @@ __REG_CLASSES = (
 | 
			
		||||
)
 | 
			
		||||
__reg_classes, unregister = bpy.utils.register_classes_factory(__REG_CLASSES)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def register():
 | 
			
		||||
    __reg_classes()
 | 
			
		||||
 | 
			
		||||
    bpy.types.Object.lnx_traitlist = CollectionProperty(type=LnxTraitListItem, override={"LIBRARY_OVERRIDABLE", "USE_INSERTION"})
 | 
			
		||||
    bpy.types.Object.lnx_traitlist_index = IntProperty(name="Index for lnx_traitlist", default=0, options={"LIBRARY_EDITABLE"}, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    bpy.types.Scene.lnx_traitlist = CollectionProperty(type=LnxTraitListItem, override={"LIBRARY_OVERRIDABLE", "USE_INSERTION"})
 | 
			
		||||
    bpy.types.Scene.lnx_traitlist_index = IntProperty(name="Index for lnx_traitlist", default=0, options={"LIBRARY_EDITABLE"}, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    if bpy.app.version < (2, 90, 0):
 | 
			
		||||
        bpy.types.Object.lnx_traitlist = CollectionProperty(type=LnxTraitListItem)
 | 
			
		||||
        bpy.types.Object.lnx_traitlist_index = IntProperty(name="Index for lnx_traitlist", default=0, options={"LIBRARY_EDITABLE"})
 | 
			
		||||
        bpy.types.Scene.lnx_traitlist = CollectionProperty(type=LnxTraitListItem)
 | 
			
		||||
        bpy.types.Scene.lnx_traitlist_index = IntProperty(name="Index for lnx_traitlist", default=0, options={"LIBRARY_EDITABLE"})
 | 
			
		||||
    else:
 | 
			
		||||
        bpy.types.Object.lnx_traitlist = CollectionProperty(type=LnxTraitListItem, override={"LIBRARY_OVERRIDABLE", "USE_INSERTION"})
 | 
			
		||||
        bpy.types.Object.lnx_traitlist_index = IntProperty(name="Index for lnx_traitlist", default=0, options={"LIBRARY_EDITABLE"}, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        bpy.types.Scene.lnx_traitlist = CollectionProperty(type=LnxTraitListItem, override={"LIBRARY_OVERRIDABLE", "USE_INSERTION"})
 | 
			
		||||
        bpy.types.Scene.lnx_traitlist_index = IntProperty(name="Index for lnx_traitlist", default=0, options={"LIBRARY_EDITABLE"}, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
 
 | 
			
		||||
@ -3,6 +3,7 @@ from bpy.props import *
 | 
			
		||||
 | 
			
		||||
__all__ = ['LnxTraitPropWarning', 'LnxTraitPropListItem', 'LNX_UL_PropList']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PROP_TYPE_ICONS = {
 | 
			
		||||
    "String": "SORTALPHA",
 | 
			
		||||
    "Int": "CHECKBOX_DEHLT",
 | 
			
		||||
@ -45,42 +46,65 @@ class LnxTraitPropListItem(bpy.types.PropertyGroup):
 | 
			
		||||
        name="Name",
 | 
			
		||||
        description="The name of this property",
 | 
			
		||||
        default="Untitled")
 | 
			
		||||
 | 
			
		||||
    type: EnumProperty(
 | 
			
		||||
        items=(
 | 
			
		||||
            # (Haxe Type, Display Name, Description)
 | 
			
		||||
            ("String", "String", "String Type"),
 | 
			
		||||
            ("Int", "Integer", "Integer Type"),
 | 
			
		||||
            ("Float", "Float", "Float Type"),
 | 
			
		||||
            ("Bool", "Boolean", "Boolean Type"),
 | 
			
		||||
            ("Vec2", "Vec2", "2D Vector Type"),
 | 
			
		||||
            ("Vec3", "Vec3", "3D Vector Type"),
 | 
			
		||||
            ("Vec4", "Vec4", "4D Vector Type"),
 | 
			
		||||
            ("Object", "Object", "Object Type"),
 | 
			
		||||
            ("CameraObject", "Camera Object", "Camera Object Type"),
 | 
			
		||||
            ("LightObject", "Light Object", "Light Object Type"),
 | 
			
		||||
            ("MeshObject", "Mesh Object", "Mesh Object Type"),
 | 
			
		||||
            ("SpeakerObject", "Speaker Object", "Speaker Object Type"),
 | 
			
		||||
            ("TSceneFormat", "Scene", "Scene Type")),
 | 
			
		||||
        name="Type",
 | 
			
		||||
        description="The type of this property",
 | 
			
		||||
        default="String",
 | 
			
		||||
        override={"LIBRARY_OVERRIDABLE"}
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # === VALUES ===
 | 
			
		||||
    value_string: StringProperty(name="Value", default="", override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_int: IntProperty(name="Value", default=0, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_float: FloatProperty(name="Value", default=0.0, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_bool: BoolProperty(name="Value", default=False, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_vec2: FloatVectorProperty(name="Value", size=2, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_vec3: FloatVectorProperty(name="Value", size=3, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_vec4: FloatVectorProperty(name="Value", size=4, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    value_object: PointerProperty(
 | 
			
		||||
        name="Value", type=bpy.types.Object, poll=filter_objects,
 | 
			
		||||
        override={"LIBRARY_OVERRIDABLE"}
 | 
			
		||||
    )
 | 
			
		||||
    value_scene: PointerProperty(name="Value", type=bpy.types.Scene, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
    if bpy.app.version < (2, 90, 0):
 | 
			
		||||
        type: EnumProperty(
 | 
			
		||||
            items=(
 | 
			
		||||
                # (Haxe Type, Display Name, Description)
 | 
			
		||||
                ("String", "String", "String Type"),
 | 
			
		||||
                ("Int", "Integer", "Integer Type"),
 | 
			
		||||
                ("Float", "Float", "Float Type"),
 | 
			
		||||
                ("Bool", "Boolean", "Boolean Type"),
 | 
			
		||||
                ("Vec2", "Vec2", "2D Vector Type"),
 | 
			
		||||
                ("Vec3", "Vec3", "3D Vector Type"),
 | 
			
		||||
                ("Vec4", "Vec4", "4D Vector Type"),
 | 
			
		||||
                ("Object", "Object", "Object Type"),
 | 
			
		||||
                ("CameraObject", "Camera Object", "Camera Object Type"),
 | 
			
		||||
                ("LightObject", "Light Object", "Light Object Type"),
 | 
			
		||||
                ("MeshObject", "Mesh Object", "Mesh Object Type"),
 | 
			
		||||
                ("SpeakerObject", "Speaker Object", "Speaker Object Type"),
 | 
			
		||||
                ("TSceneFormat", "Scene", "Scene Type")),
 | 
			
		||||
            name="Type",
 | 
			
		||||
            description="The type of this property",
 | 
			
		||||
            default="String")
 | 
			
		||||
        value_string: StringProperty(name="Value", default="")
 | 
			
		||||
        value_int: IntProperty(name="Value", default=0)
 | 
			
		||||
        value_float: FloatProperty(name="Value", default=0.0)
 | 
			
		||||
        value_bool: BoolProperty(name="Value", default=False)
 | 
			
		||||
        value_vec2: FloatVectorProperty(name="Value", size=2)
 | 
			
		||||
        value_vec3: FloatVectorProperty(name="Value", size=3)
 | 
			
		||||
        value_vec4: FloatVectorProperty(name="Value", size=4)
 | 
			
		||||
        value_object: PointerProperty(name="Value", type=bpy.types.Object, poll=filter_objects)
 | 
			
		||||
        value_scene: PointerProperty(name="Value", type=bpy.types.Scene)
 | 
			
		||||
    else:
 | 
			
		||||
        type: EnumProperty(
 | 
			
		||||
            items=(
 | 
			
		||||
                # (Haxe Type, Display Name, Description)
 | 
			
		||||
                ("String", "String", "String Type"),
 | 
			
		||||
                ("Int", "Integer", "Integer Type"),
 | 
			
		||||
                ("Float", "Float", "Float Type"),
 | 
			
		||||
                ("Bool", "Boolean", "Boolean Type"),
 | 
			
		||||
                ("Vec2", "Vec2", "2D Vector Type"),
 | 
			
		||||
                ("Vec3", "Vec3", "3D Vector Type"),
 | 
			
		||||
                ("Vec4", "Vec4", "4D Vector Type"),
 | 
			
		||||
                ("Object", "Object", "Object Type"),
 | 
			
		||||
                ("CameraObject", "Camera Object", "Camera Object Type"),
 | 
			
		||||
                ("LightObject", "Light Object", "Light Object Type"),
 | 
			
		||||
                ("MeshObject", "Mesh Object", "Mesh Object Type"),
 | 
			
		||||
                ("SpeakerObject", "Speaker Object", "Speaker Object Type"),
 | 
			
		||||
                ("TSceneFormat", "Scene", "Scene Type")),
 | 
			
		||||
            name="Type",
 | 
			
		||||
            description="The type of this property",
 | 
			
		||||
            default="String",
 | 
			
		||||
            override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_string: StringProperty(name="Value", default="", override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_int: IntProperty(name="Value", default=0, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_float: FloatProperty(name="Value", default=0.0, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_bool: BoolProperty(name="Value", default=False, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_vec2: FloatVectorProperty(name="Value", size=2, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_vec3: FloatVectorProperty(name="Value", size=3, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_vec4: FloatVectorProperty(name="Value", size=4, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_object: PointerProperty(name="Value", type=bpy.types.Object, poll=filter_objects, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
        value_scene: PointerProperty(name="Value", type=bpy.types.Scene, override={"LIBRARY_OVERRIDABLE"})
 | 
			
		||||
 | 
			
		||||
    def set_value(self, val):
 | 
			
		||||
        # Would require way too much effort, so it's out of scope here.
 | 
			
		||||
 | 
			
		||||
@ -8,6 +8,24 @@ import mathutils
 | 
			
		||||
import bpy
 | 
			
		||||
from bpy.props import *
 | 
			
		||||
 | 
			
		||||
# Helper functions for Blender version compatibility  
 | 
			
		||||
def get_panel_options():
 | 
			
		||||
    """Get panel options compatible with current Blender version."""
 | 
			
		||||
    if bpy.app.version >= (2, 93, 0):  # INSTANCED was introduced around 2.93
 | 
			
		||||
        return {'INSTANCED'}
 | 
			
		||||
    else:
 | 
			
		||||
        return set()  # Empty set for older versions
 | 
			
		||||
 | 
			
		||||
def column_with_heading(layout, heading='', align=False):
 | 
			
		||||
    """Create a column with optional heading, compatible across Blender versions."""
 | 
			
		||||
    if bpy.app.version >= (2, 92, 0):
 | 
			
		||||
        return layout.column(heading=heading, align=align)
 | 
			
		||||
    else:
 | 
			
		||||
        col = layout.column(align=align)
 | 
			
		||||
        if heading:
 | 
			
		||||
            col.label(text=heading)
 | 
			
		||||
        return col
 | 
			
		||||
 | 
			
		||||
from lnx.lightmapper.panels import scene
 | 
			
		||||
 | 
			
		||||
import lnx.api
 | 
			
		||||
@ -63,6 +81,7 @@ class LNX_PT_ObjectPropsPanel(bpy.types.Panel):
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        col = layout.column()
 | 
			
		||||
        col.prop(obj, 'lnx_sorting_index')
 | 
			
		||||
        col.prop(obj, 'lnx_export')
 | 
			
		||||
        if not obj.lnx_export:
 | 
			
		||||
            return
 | 
			
		||||
@ -206,6 +225,7 @@ class LNX_PT_ParticlesPropsPanel(bpy.types.Panel):
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        layout.prop(obj.settings, 'lnx_auto_start')
 | 
			
		||||
        layout.prop(obj.settings, 'lnx_dynamic_emitter')
 | 
			
		||||
        layout.prop(obj.settings, 'lnx_is_unique')
 | 
			
		||||
        layout.prop(obj.settings, 'lnx_loop')
 | 
			
		||||
        layout.prop(obj.settings, 'lnx_count_mult')
 | 
			
		||||
@ -551,6 +571,51 @@ class LNX_OT_NewCustomMaterial(bpy.types.Operator):
 | 
			
		||||
 | 
			
		||||
        return{'FINISHED'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LNX_OT_NextPassMaterialSelector(bpy.types.Operator):
 | 
			
		||||
    """Select material for next pass"""
 | 
			
		||||
    bl_idname = "lnx.next_pass_material_selector"
 | 
			
		||||
    bl_label = "Select Next Pass Material"
 | 
			
		||||
 | 
			
		||||
    def execute(self, context):
 | 
			
		||||
        return {'FINISHED'}
 | 
			
		||||
 | 
			
		||||
    def invoke(self, context, event):
 | 
			
		||||
        context.window_manager.popup_menu(self.draw_menu, title="Select Next Pass Material", icon='MATERIAL')
 | 
			
		||||
        return {'FINISHED'}
 | 
			
		||||
 | 
			
		||||
    def draw_menu(self, popup, context):
 | 
			
		||||
        layout = popup.layout
 | 
			
		||||
 | 
			
		||||
        # Add 'None' option
 | 
			
		||||
        op = layout.operator("lnx.set_next_pass_material", text="")
 | 
			
		||||
        op.material_name = ""
 | 
			
		||||
 | 
			
		||||
        # Add materials from the current object's material slots
 | 
			
		||||
        if context.object and hasattr(context.object, 'material_slots'):
 | 
			
		||||
            for slot in context.object.material_slots:
 | 
			
		||||
                if (slot.material is not None and slot.material != context.material):
 | 
			
		||||
                    op = layout.operator("lnx.set_next_pass_material", text=slot.material.name)
 | 
			
		||||
                    op.material_name = slot.material.name
 | 
			
		||||
 | 
			
		||||
class LNX_OT_SetNextPassMaterial(bpy.types.Operator):
 | 
			
		||||
    """Set the next pass material"""
 | 
			
		||||
    bl_idname = "lnx.set_next_pass_material"
 | 
			
		||||
    bl_label = "Set Next Pass Material"
 | 
			
		||||
 | 
			
		||||
    material_name: StringProperty()
 | 
			
		||||
 | 
			
		||||
    def execute(self, context):
 | 
			
		||||
        if context.material:
 | 
			
		||||
            context.material.lnx_next_pass = self.material_name
 | 
			
		||||
        # Redraw the UI to update the display
 | 
			
		||||
        for area in context.screen.areas:
 | 
			
		||||
            if area.type == 'PROPERTIES':
 | 
			
		||||
                area.tag_redraw()
 | 
			
		||||
        return {'FINISHED'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LNX_PG_BindTexturesListItem(bpy.types.PropertyGroup):
 | 
			
		||||
    uniform_name: StringProperty(
 | 
			
		||||
        name='Uniform Name',
 | 
			
		||||
@ -634,18 +699,23 @@ class LNX_PT_MaterialPropsPanel(bpy.types.Panel):
 | 
			
		||||
        mat = bpy.context.material
 | 
			
		||||
        if mat is None:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
            
 | 
			
		||||
        layout.prop(mat, 'lnx_cast_shadow')
 | 
			
		||||
        columnb = layout.column()
 | 
			
		||||
        wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
        columnb.enabled = len(wrd.lnx_rplist) > 0 and lnx.utils.get_rp().rp_renderer == 'Forward'
 | 
			
		||||
        columnb.prop(mat, 'lnx_receive_shadow')
 | 
			
		||||
        layout.prop(mat, 'lnx_ignore_irradiance')
 | 
			
		||||
        layout.prop(mat, 'lnx_compare_mode')
 | 
			
		||||
        layout.prop(mat, 'lnx_two_sided')
 | 
			
		||||
        columnb = layout.column()
 | 
			
		||||
        columnb.enabled = not mat.lnx_two_sided
 | 
			
		||||
        columnb.prop(mat, 'lnx_cull_mode')
 | 
			
		||||
        row = layout.row(align=True)
 | 
			
		||||
        row.prop(mat, 'lnx_next_pass', text="Next Pass")
 | 
			
		||||
        row.operator('lnx.next_pass_material_selector', text='', icon='MATERIAL')
 | 
			
		||||
        layout.prop(mat, 'lnx_material_id')
 | 
			
		||||
        layout.prop(mat, 'lnx_depth_write')
 | 
			
		||||
        layout.prop(mat, 'lnx_depth_read')
 | 
			
		||||
        layout.prop(mat, 'lnx_overlay')
 | 
			
		||||
        layout.prop(mat, 'lnx_decal')
 | 
			
		||||
@ -887,13 +957,13 @@ class LNX_PT_LeenkxExporterPanel(bpy.types.Panel):
 | 
			
		||||
        col = layout.column()
 | 
			
		||||
        col.prop(wrd, 'lnx_project_icon')
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='Code Output', align=True)
 | 
			
		||||
        col = column_with_heading(layout, 'Code Output', align=True)
 | 
			
		||||
        col.prop(wrd, 'lnx_dce')
 | 
			
		||||
        col.prop(wrd, 'lnx_compiler_inline')
 | 
			
		||||
        col.prop(wrd, 'lnx_minify_js')
 | 
			
		||||
        col.prop(wrd, 'lnx_no_traces')
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='Data', align=True)
 | 
			
		||||
        col = column_with_heading(layout, 'Data', align=True)
 | 
			
		||||
        col.prop(wrd, 'lnx_minimize')
 | 
			
		||||
        col.prop(wrd, 'lnx_optimize_data')
 | 
			
		||||
        col.prop(wrd, 'lnx_asset_compression')
 | 
			
		||||
@ -1126,32 +1196,32 @@ class LNX_PT_ProjectFlagsPanel(bpy.types.Panel):
 | 
			
		||||
        layout.use_property_decorate = False
 | 
			
		||||
        wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='Debug', align=True)
 | 
			
		||||
        col = column_with_heading(layout, 'Debug', align=True)
 | 
			
		||||
        col.prop(wrd, 'lnx_verbose_output')
 | 
			
		||||
        col.prop(wrd, 'lnx_cache_build')
 | 
			
		||||
        col.prop(wrd, 'lnx_clear_on_compile')
 | 
			
		||||
        col.prop(wrd, 'lnx_assert_level')
 | 
			
		||||
        col.prop(wrd, 'lnx_assert_quit')
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='Runtime', align=True)
 | 
			
		||||
        col = column_with_heading(layout, 'Runtime', align=True)
 | 
			
		||||
        col.prop(wrd, 'lnx_live_patch')
 | 
			
		||||
        col.prop(wrd, 'lnx_stream_scene')
 | 
			
		||||
        col.prop(wrd, 'lnx_loadscreen')
 | 
			
		||||
        col.prop(wrd, 'lnx_write_config')
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='Renderer', align=True)
 | 
			
		||||
        col = column_with_heading(layout, 'Renderer', align=True)
 | 
			
		||||
        col.prop(wrd, 'lnx_batch_meshes')
 | 
			
		||||
        col.prop(wrd, 'lnx_batch_materials')
 | 
			
		||||
        col.prop(wrd, 'lnx_deinterleaved_buffers')
 | 
			
		||||
        col.prop(wrd, 'lnx_export_tangents')
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='Quality')
 | 
			
		||||
        col = column_with_heading(layout, 'Quality')
 | 
			
		||||
        row = col.row()  # To expand below property UI horizontally
 | 
			
		||||
        row.prop(wrd, 'lnx_canvas_img_scaling_quality', expand=True)
 | 
			
		||||
        col.prop(wrd, 'lnx_texture_quality')
 | 
			
		||||
        col.prop(wrd, 'lnx_sound_quality')
 | 
			
		||||
 | 
			
		||||
        col = layout.column(heading='External Assets')
 | 
			
		||||
        col = column_with_heading(layout, 'External Assets')
 | 
			
		||||
        col.prop(wrd, 'lnx_copy_override')
 | 
			
		||||
        col.operator('lnx.copy_to_bundled', icon='IMAGE_DATA')
 | 
			
		||||
 | 
			
		||||
@ -1229,7 +1299,8 @@ class LNX_PT_ProjectModulesPanel(bpy.types.Panel):
 | 
			
		||||
 | 
			
		||||
        layout.prop_search(wrd, 'lnx_khafile', bpy.data, 'texts')
 | 
			
		||||
        layout.prop(wrd, 'lnx_project_root')
 | 
			
		||||
 | 
			
		||||
        layout.prop(wrd, 'lnx_external_blends_path')
 | 
			
		||||
        
 | 
			
		||||
class LnxVirtualInputPanel(bpy.types.Panel):
 | 
			
		||||
    bl_label = "Leenkx Virtual Input"
 | 
			
		||||
    bl_space_type = "PROPERTIES"
 | 
			
		||||
@ -1464,7 +1535,7 @@ class LNX_PT_TopbarPanel(bpy.types.Panel):
 | 
			
		||||
    bl_label = "Leenkx Player"
 | 
			
		||||
    bl_space_type = "VIEW_3D"
 | 
			
		||||
    bl_region_type = "WINDOW"
 | 
			
		||||
    bl_options = {'INSTANCED'}
 | 
			
		||||
    bl_options = get_panel_options()
 | 
			
		||||
 | 
			
		||||
    def draw_header(self, context):
 | 
			
		||||
        row = self.layout.row(align=True)
 | 
			
		||||
@ -2267,7 +2338,10 @@ class LnxGenTerrainButton(bpy.types.Operator):
 | 
			
		||||
        node.location = (-200, -200)
 | 
			
		||||
        node.inputs[0].default_value = 5.0
 | 
			
		||||
        links.new(nodes['Bump'].inputs[2], nodes['_TerrainHeight'].outputs[0])
 | 
			
		||||
        links.new(nodes['Principled BSDF'].inputs[20], nodes['Bump'].outputs[0])
 | 
			
		||||
        if bpy.app.version[0] >= 4:
 | 
			
		||||
            links.new(nodes['Principled BSDF'].inputs[22], nodes['Bump'].outputs[0]) 
 | 
			
		||||
        else:
 | 
			
		||||
            links.new(nodes['Principled BSDF'].inputs[20], nodes['Bump'].outputs[0])
 | 
			
		||||
 | 
			
		||||
        # Create sectors
 | 
			
		||||
        root_obj = bpy.data.objects.new("Terrain", None)
 | 
			
		||||
@ -2300,7 +2374,16 @@ class LnxGenTerrainButton(bpy.types.Operator):
 | 
			
		||||
            disp_mod.texture.extension = 'EXTEND'
 | 
			
		||||
            disp_mod.texture.use_interpolation = False
 | 
			
		||||
            disp_mod.texture.use_mipmap = False
 | 
			
		||||
            disp_mod.texture.image = bpy.data.images.load(filepath=scn.lnx_terrain_textures+'/heightmap_' + j + '.png')
 | 
			
		||||
            try:
 | 
			
		||||
                disp_mod.texture.image = bpy.data.images.load(filepath=scn.lnx_terrain_textures+'/heightmap_' + j + '.png')
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                if i == 0:  # Only show message once
 | 
			
		||||
                    if scn.lnx_terrain_textures.startswith('//') and not bpy.data.filepath:
 | 
			
		||||
                        self.report({'INFO'}, "Generating terrain... Save .blend file and add your heightmaps for each sector in "
 | 
			
		||||
                                   "the \"Bundled\" folder using the format \"heightmap_01.png\", \"heightmap_02.png\", etc.")
 | 
			
		||||
                    else:
 | 
			
		||||
                        self.report({'INFO'}, f"Heightmap not found: {scn.lnx_terrain_textures}/heightmap_{j}.png - using blank image")
 | 
			
		||||
                
 | 
			
		||||
            f = 1
 | 
			
		||||
            levels = 0
 | 
			
		||||
            while f < disp_mod.texture.image.size[0]:
 | 
			
		||||
@ -2856,7 +2939,7 @@ def draw_conditional_prop(layout: bpy.types.UILayout, heading: str, data: bpy.ty
 | 
			
		||||
    """Draws a property row with a checkbox that enables a value field.
 | 
			
		||||
    The function fails when prop_condition is not a boolean property.
 | 
			
		||||
    """
 | 
			
		||||
    col = layout.column(heading=heading)
 | 
			
		||||
    col = column_with_heading(layout, heading)
 | 
			
		||||
    row = col.row()
 | 
			
		||||
    row.prop(data, prop_condition, text='')
 | 
			
		||||
    sub = row.row()
 | 
			
		||||
@ -2908,6 +2991,8 @@ __REG_CLASSES = (
 | 
			
		||||
    InvalidateCacheButton,
 | 
			
		||||
    InvalidateMaterialCacheButton,
 | 
			
		||||
    LNX_OT_NewCustomMaterial,
 | 
			
		||||
    LNX_OT_NextPassMaterialSelector,
 | 
			
		||||
    LNX_OT_SetNextPassMaterial,
 | 
			
		||||
    LNX_PG_BindTexturesListItem,
 | 
			
		||||
    LNX_UL_BindTexturesList,
 | 
			
		||||
    LNX_OT_BindTexturesListNewItem,
 | 
			
		||||
 | 
			
		||||
@ -96,7 +96,7 @@ def convert_image(image, path, file_format='JPEG'):
 | 
			
		||||
    ren.image_settings.color_mode = orig_color_mode
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_random_color_rgb() -> list[float]:
 | 
			
		||||
def get_random_color_rgb() -> List[float]:
 | 
			
		||||
    """Return a random RGB color with values in range [0, 1]."""
 | 
			
		||||
    return [random.random(), random.random(), random.random()]
 | 
			
		||||
 | 
			
		||||
@ -1162,7 +1162,7 @@ def get_link_web_server():
 | 
			
		||||
    return '' if not hasattr(addon_prefs, 'link_web_server') else addon_prefs.link_web_server
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_file_lnx_version_tuple() -> tuple[int]:
 | 
			
		||||
def get_file_lnx_version_tuple() -> Tuple[int, ...]:
 | 
			
		||||
    wrd = bpy.data.worlds['Lnx']
 | 
			
		||||
    return tuple(map(int, wrd.lnx_version.split('.')))
 | 
			
		||||
 | 
			
		||||
@ -1218,9 +1218,9 @@ def cpu_count(*, physical_only=False) -> Optional[int]:
 | 
			
		||||
            return int(subprocess.check_output(command))
 | 
			
		||||
 | 
			
		||||
    except subprocess.CalledProcessError as e:
 | 
			
		||||
        err_reason = f'Reason: command {command} exited with code {e.returncode}.'
 | 
			
		||||
        err_reason = 'Reason: command {} exited with code {}.'.format(command, e.returncode)
 | 
			
		||||
    except FileNotFoundError as e:
 | 
			
		||||
        err_reason = f'Reason: couldn\'t open file from command {command} ({e.errno=}).'
 | 
			
		||||
        err_reason = 'Reason: couldn\'t open file from command {} (errno={}).'.format(command, e.errno)
 | 
			
		||||
 | 
			
		||||
    # Last resort even though it can be wrong
 | 
			
		||||
    log.warn("Could not retrieve count of physical CPUs, using logical CPU count instead.\n\t" + err_reason)
 | 
			
		||||
 | 
			
		||||
@ -5,7 +5,7 @@ import json
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import subprocess
 | 
			
		||||
from typing import Any, Optional, Callable
 | 
			
		||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
 | 
			
		||||
 | 
			
		||||
import bpy
 | 
			
		||||
 | 
			
		||||
@ -56,7 +56,7 @@ def is_version_installed(version_major: str) -> bool:
 | 
			
		||||
    return any(v['version_major'] == version_major for v in _installed_versions)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_installed_version(version_major: str, re_fetch=False) -> Optional[dict[str, str]]:
 | 
			
		||||
def get_installed_version(version_major: str, re_fetch=False) -> Optional[Dict[str, str]]:
 | 
			
		||||
    for installed_version in _installed_versions:
 | 
			
		||||
        if installed_version['version_major'] == version_major:
 | 
			
		||||
            return installed_version
 | 
			
		||||
@ -71,7 +71,7 @@ def get_installed_version(version_major: str, re_fetch=False) -> Optional[dict[s
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_supported_version(version_major: str) -> Optional[dict[str, str]]:
 | 
			
		||||
def get_supported_version(version_major: str) -> Optional[Dict[str, str]]:
 | 
			
		||||
    for version in supported_versions:
 | 
			
		||||
        if version[0] == version_major:
 | 
			
		||||
            return {
 | 
			
		||||
@ -100,7 +100,7 @@ def fetch_installed_vs(silent=False) -> bool:
 | 
			
		||||
            if not silent:
 | 
			
		||||
                log.warn(
 | 
			
		||||
                    f'Found a Visual Studio installation with incomplete information, skipping\n'
 | 
			
		||||
                    f'    ({name=}, {versions=}, {path=})'
 | 
			
		||||
                    f'    (name={name if name is not None else "None"}, versions={versions}, path={path if path is not None else "None"})'
 | 
			
		||||
                )
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
@ -212,14 +212,14 @@ def compile_in_vs(version_major: str, done: Callable[[], None]) -> bool:
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _vswhere_get_display_name(instance_data: dict[str, Any]) -> Optional[str]:
 | 
			
		||||
def _vswhere_get_display_name(instance_data: Dict[str, Any]) -> Optional[str]:
 | 
			
		||||
    name_raw = instance_data.get('displayName', None)
 | 
			
		||||
    if name_raw is None:
 | 
			
		||||
        return None
 | 
			
		||||
    return lnx.utils.safestr(name_raw).replace('_', ' ').strip()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _vswhere_get_version(instance_data: dict[str, Any]) -> Optional[tuple[str, str, tuple[int, ...]]]:
 | 
			
		||||
def _vswhere_get_version(instance_data: Dict[str, Any]) -> Optional[Tuple[str, str, Tuple[int, int, int, int]]]:
 | 
			
		||||
    version_raw = instance_data.get('installationVersion', None)
 | 
			
		||||
    if version_raw is None:
 | 
			
		||||
        return None
 | 
			
		||||
@ -230,11 +230,11 @@ def _vswhere_get_version(instance_data: dict[str, Any]) -> Optional[tuple[str, s
 | 
			
		||||
    return version_major, version_full, version_full_ints
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _vswhere_get_path(instance_data: dict[str, Any]) -> Optional[str]:
 | 
			
		||||
def _vswhere_get_path(instance_data: Dict[str, Any]) -> Optional[str]:
 | 
			
		||||
    return instance_data.get('installationPath', None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _vswhere_get_instances(silent=False) -> Optional[list[dict[str, Any]]]:
 | 
			
		||||
def _vswhere_get_instances(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
 | 
			
		||||
    # vswhere.exe only exists at that location since VS2017 v15.2, for
 | 
			
		||||
    # earlier versions we'd need to package vswhere with Leenkx
 | 
			
		||||
    exe_path = os.path.join(os.environ["ProgramFiles(x86)"], 'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
 | 
			
		||||
@ -256,7 +256,7 @@ def _vswhere_get_instances(silent=False) -> Optional[list[dict[str, Any]]]:
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def version_full_to_ints(version_full: str) -> tuple[int, ...]:
 | 
			
		||||
def version_full_to_ints(version_full: str) -> Tuple[int, ...]:
 | 
			
		||||
    return tuple(int(i) for i in version_full.split('.'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -281,7 +281,7 @@ def get_vcxproj_path() -> str:
 | 
			
		||||
    return os.path.join(project_path, project_name + '.vcxproj')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def fetch_project_version() -> tuple[Optional[str], Optional[str], Optional[str]]:
 | 
			
		||||
def fetch_project_version() -> Tuple[Optional[str], Optional[str], Optional[str]]:
 | 
			
		||||
    version_major = None
 | 
			
		||||
    version_min_full = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -338,8 +338,8 @@ project.addSources('Sources');
 | 
			
		||||
        if rpdat.lnx_particles != 'Off':
 | 
			
		||||
            assets.add_khafile_def('lnx_particles')
 | 
			
		||||
 | 
			
		||||
        if rpdat.rp_draw_order == 'Shader':
 | 
			
		||||
            assets.add_khafile_def('lnx_draworder_shader')
 | 
			
		||||
        if rpdat.rp_draw_order == 'Index':
 | 
			
		||||
            assets.add_khafile_def('lnx_draworder_index')
 | 
			
		||||
 | 
			
		||||
        if lnx.utils.get_viewport_controls() == 'azerty':
 | 
			
		||||
            assets.add_khafile_def('lnx_azerty')
 | 
			
		||||
@ -818,7 +818,7 @@ const int compoChromaticSamples = {rpdat.lnx_chromatic_aberration_samples};
 | 
			
		||||
 | 
			
		||||
        focus_distance = 0.0
 | 
			
		||||
        fstop = 0.0
 | 
			
		||||
        if len(bpy.data.cameras) > 0 and lnx.utils.get_active_scene().camera.data.dof.use_dof:
 | 
			
		||||
        if lnx.utils.get_active_scene().camera and lnx.utils.get_active_scene().camera.data.dof.use_dof:
 | 
			
		||||
            focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance
 | 
			
		||||
            fstop = lnx.utils.get_active_scene().camera.data.dof.aperture_fstop
 | 
			
		||||
            lens = lnx.utils.get_active_scene().camera.data.lens
 | 
			
		||||
 | 
			
		||||
@ -118,7 +118,8 @@ def render_envmap(target_dir: str, world: bpy.types.World) -> str:
 | 
			
		||||
    scene = bpy.data.scenes['_lnx_envmap_render']
 | 
			
		||||
    scene.world = world
 | 
			
		||||
 | 
			
		||||
    image_name = f'env_{lnx.utils.safesrc(world.name)}.{ENVMAP_EXT}'
 | 
			
		||||
    world_name = lnx.utils.asset_name(world) if world.library else world.name
 | 
			
		||||
    image_name = f'env_{lnx.utils.safesrc(world_name)}.{ENVMAP_EXT}'
 | 
			
		||||
    render_path = os.path.join(target_dir, image_name)
 | 
			
		||||
    scene.render.filepath = render_path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Reference in New Issue
	
	Block a user