Merge pull request 'main' (#107) from Onek8/LNXSDK:main into main
Reviewed-on: #107
This commit is contained in:
@ -331,15 +331,18 @@ class RenderPath {
|
||||
});
|
||||
}
|
||||
|
||||
public static function sortMeshesShader(meshes: Array<MeshObject>) {
|
||||
public static function sortMeshesIndex(meshes: Array<MeshObject>) {
|
||||
meshes.sort(function(a, b): Int {
|
||||
#if rp_depth_texture
|
||||
var depthDiff = boolToInt(a.depthRead) - boolToInt(b.depthRead);
|
||||
if (depthDiff != 0) return depthDiff;
|
||||
#end
|
||||
|
||||
return a.materials[0].name >= b.materials[0].name ? 1 : -1;
|
||||
});
|
||||
if (a.data.sortingIndex != b.data.sortingIndex) {
|
||||
return a.data.sortingIndex > b.data.sortingIndex ? 1 : -1;
|
||||
}
|
||||
|
||||
return a.data.name >= b.data.name ? 1 : -1; });
|
||||
}
|
||||
|
||||
public function drawMeshes(context: String) {
|
||||
@ -399,7 +402,7 @@ class RenderPath {
|
||||
#if lnx_batch
|
||||
sortMeshesDistance(Scene.active.meshBatch.nonBatched);
|
||||
#else
|
||||
drawOrder == DrawOrder.Shader ? sortMeshesShader(meshes) : sortMeshesDistance(meshes);
|
||||
drawOrder == DrawOrder.Index ? sortMeshesIndex(meshes) : sortMeshesDistance(meshes);
|
||||
#end
|
||||
meshesSorted = true;
|
||||
}
|
||||
@ -914,6 +917,6 @@ class CachedShaderContext {
|
||||
|
||||
@:enum abstract DrawOrder(Int) from Int {
|
||||
var Distance = 0; // Early-z
|
||||
var Shader = 1; // Less state changes
|
||||
var Index = 1; // Less state changes
|
||||
// var Mix = 2; // Distance buckets sorted by shader
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import iron.data.SceneFormat;
|
||||
class MeshData {
|
||||
|
||||
public var name: String;
|
||||
public var sortingIndex: Int;
|
||||
public var raw: TMeshData;
|
||||
public var format: TSceneFormat;
|
||||
public var geom: Geometry;
|
||||
@ -23,7 +24,8 @@ class MeshData {
|
||||
public function new(raw: TMeshData, done: MeshData->Void) {
|
||||
this.raw = raw;
|
||||
this.name = raw.name;
|
||||
|
||||
this.sortingIndex = raw.sorting_index;
|
||||
|
||||
if (raw.scale_pos != null) scalePos = raw.scale_pos;
|
||||
if (raw.scale_tex != null) scaleTex = raw.scale_tex;
|
||||
|
||||
|
@ -49,6 +49,7 @@ typedef TMeshData = {
|
||||
@:structInit class TMeshData {
|
||||
#end
|
||||
public var name: String;
|
||||
public var sorting_index: Int;
|
||||
public var vertex_arrays: Array<TVertexArray>;
|
||||
public var index_arrays: Array<TIndexArray>;
|
||||
@:optional public var dynamic_usage: Null<Bool>;
|
||||
@ -222,6 +223,7 @@ typedef TShaderData = {
|
||||
@:structInit class TShaderData {
|
||||
#end
|
||||
public var name: String;
|
||||
public var next_pass: String;
|
||||
public var contexts: Array<TShaderContext>;
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ using StringTools;
|
||||
class ShaderData {
|
||||
|
||||
public var name: String;
|
||||
public var nextPass: String;
|
||||
public var raw: TShaderData;
|
||||
public var contexts: Array<ShaderContext> = [];
|
||||
|
||||
@ -33,6 +34,7 @@ class ShaderData {
|
||||
public function new(raw: TShaderData, done: ShaderData->Void, overrideContext: TShaderOverride = null) {
|
||||
this.raw = raw;
|
||||
this.name = raw.name;
|
||||
this.nextPass = raw.next_pass;
|
||||
|
||||
for (c in raw.contexts) contexts.push(null);
|
||||
var contextsLoaded = 0;
|
||||
|
@ -302,6 +302,10 @@ class MeshObject extends Object {
|
||||
|
||||
// Render mesh
|
||||
var ldata = lod.data;
|
||||
|
||||
// Next pass rendering first (inverse order)
|
||||
renderNextPass(g, context, bindParams, lod);
|
||||
|
||||
for (i in 0...ldata.geom.indexBuffers.length) {
|
||||
|
||||
var mi = ldata.geom.materialIndices[i];
|
||||
@ -405,4 +409,85 @@ class MeshObject extends Object {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function renderNextPass(g: Graphics, context: String, bindParams: Array<String>, lod: MeshObject) {
|
||||
var ldata = lod.data;
|
||||
for (i in 0...ldata.geom.indexBuffers.length) {
|
||||
var mi = ldata.geom.materialIndices[i];
|
||||
if (mi >= materials.length) continue;
|
||||
|
||||
var currentMaterial: MaterialData = materials[mi];
|
||||
if (currentMaterial == null || currentMaterial.shader == null) continue;
|
||||
|
||||
var nextPassName: String = currentMaterial.shader.nextPass;
|
||||
if (nextPassName == null || nextPassName == "") continue;
|
||||
|
||||
var nextMaterial: MaterialData = null;
|
||||
for (mat in materials) {
|
||||
// First try exact match
|
||||
if (mat.name == nextPassName) {
|
||||
nextMaterial = mat;
|
||||
break;
|
||||
}
|
||||
// If no exact match, try to match base name for linked materials
|
||||
if (mat.name.indexOf("_") > 0 && mat.name.substr(mat.name.length - 6) == ".blend") {
|
||||
var baseName = mat.name.substring(0, mat.name.indexOf("_"));
|
||||
if (baseName == nextPassName) {
|
||||
nextMaterial = mat;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (nextMaterial == null) continue;
|
||||
|
||||
var nextMaterialContext: MaterialContext = null;
|
||||
var nextShaderContext: ShaderContext = null;
|
||||
|
||||
for (j in 0...nextMaterial.raw.contexts.length) {
|
||||
if (nextMaterial.raw.contexts[j].name.substr(0, context.length) == context) {
|
||||
nextMaterialContext = nextMaterial.contexts[j];
|
||||
nextShaderContext = nextMaterial.shader.getContext(context);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextShaderContext == null) continue;
|
||||
if (skipContext(context, nextMaterial)) continue;
|
||||
|
||||
var elems = nextShaderContext.raw.vertex_elements;
|
||||
|
||||
// Uniforms
|
||||
if (nextShaderContext.pipeState != lastPipeline) {
|
||||
g.setPipeline(nextShaderContext.pipeState);
|
||||
lastPipeline = nextShaderContext.pipeState;
|
||||
}
|
||||
Uniforms.setContextConstants(g, nextShaderContext, bindParams);
|
||||
Uniforms.setObjectConstants(g, nextShaderContext, this);
|
||||
Uniforms.setMaterialConstants(g, nextShaderContext, nextMaterialContext);
|
||||
|
||||
// VB / IB
|
||||
#if lnx_deinterleaved
|
||||
g.setVertexBuffers(ldata.geom.get(elems));
|
||||
#else
|
||||
if (ldata.geom.instancedVB != null) {
|
||||
g.setVertexBuffers([ldata.geom.get(elems), ldata.geom.instancedVB]);
|
||||
}
|
||||
else {
|
||||
g.setVertexBuffer(ldata.geom.get(elems));
|
||||
}
|
||||
#end
|
||||
|
||||
g.setIndexBuffer(ldata.geom.indexBuffers[i]);
|
||||
|
||||
// Draw next pass for this specific geometry section
|
||||
if (ldata.geom.instanced) {
|
||||
g.drawIndexedVerticesInstanced(ldata.geom.instanceCount, ldata.geom.start, ldata.geom.count);
|
||||
}
|
||||
else {
|
||||
g.drawIndexedVertices(ldata.geom.start, ldata.geom.count);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,11 +39,11 @@ class Time {
|
||||
}
|
||||
|
||||
public static inline function time(): Float {
|
||||
return kha.Scheduler.time();
|
||||
return kha.Scheduler.time() * scale;
|
||||
}
|
||||
|
||||
public static inline function realTime(): Float {
|
||||
return kha.Scheduler.realTime();
|
||||
return kha.Scheduler.realTime() * scale;
|
||||
}
|
||||
|
||||
public static function update() {
|
||||
|
@ -94,34 +94,34 @@ class Tween {
|
||||
|
||||
// Way too much Reflect trickery..
|
||||
var ps = Reflect.fields(a.props);
|
||||
for (i in 0...ps.length) {
|
||||
var p = ps[i];
|
||||
for (j in 0...ps.length) {
|
||||
var p = ps[j];
|
||||
var k = a._time / a.duration;
|
||||
if (k > 1) k = 1;
|
||||
|
||||
if (a._comps[i] == 1) {
|
||||
var fromVal: Float = a._x[i];
|
||||
if (a._comps[j] == 1) {
|
||||
var fromVal: Float = a._x[j];
|
||||
var toVal: Float = Reflect.getProperty(a.props, p);
|
||||
var val: Float = fromVal + (toVal - fromVal) * eases[a.ease](k);
|
||||
Reflect.setProperty(a.target, p, val);
|
||||
}
|
||||
else { // _comps[i] == 4
|
||||
else { // _comps[j] == 4
|
||||
var obj = Reflect.getProperty(a.props, p);
|
||||
var toX: Float = Reflect.getProperty(obj, "x");
|
||||
var toY: Float = Reflect.getProperty(obj, "y");
|
||||
var toZ: Float = Reflect.getProperty(obj, "z");
|
||||
var toW: Float = Reflect.getProperty(obj, "w");
|
||||
if (a._normalize[i]) {
|
||||
var qdot = (a._x[i] * toX) + (a._y[i] * toY) + (a._z[i] * toZ) + (a._w[i] * toW);
|
||||
if (a._normalize[j]) {
|
||||
var qdot = (a._x[j] * toX) + (a._y[j] * toY) + (a._z[j] * toZ) + (a._w[j] * toW);
|
||||
if (qdot < 0.0) {
|
||||
toX = -toX; toY = -toY; toZ = -toZ; toW = -toW;
|
||||
}
|
||||
}
|
||||
var x: Float = a._x[i] + (toX - a._x[i]) * eases[a.ease](k);
|
||||
var y: Float = a._y[i] + (toY - a._y[i]) * eases[a.ease](k);
|
||||
var z: Float = a._z[i] + (toZ - a._z[i]) * eases[a.ease](k);
|
||||
var w: Float = a._w[i] + (toW - a._w[i]) * eases[a.ease](k);
|
||||
if (a._normalize[i]) {
|
||||
var x: Float = a._x[j] + (toX - a._x[j]) * eases[a.ease](k);
|
||||
var y: Float = a._y[j] + (toY - a._y[j]) * eases[a.ease](k);
|
||||
var z: Float = a._z[j] + (toZ - a._z[j]) * eases[a.ease](k);
|
||||
var w: Float = a._w[j] + (toW - a._w[j]) * eases[a.ease](k);
|
||||
if (a._normalize[j]) {
|
||||
var l = Math.sqrt(x * x + y * y + z * z + w * w);
|
||||
if (l > 0.0) {
|
||||
l = 1.0 / l;
|
||||
|
41
leenkx/Sources/leenkx/logicnode/ProbabilisticIndexNode.hx
Normal file
41
leenkx/Sources/leenkx/logicnode/ProbabilisticIndexNode.hx
Normal file
@ -0,0 +1,41 @@
|
||||
package leenkx.logicnode;
|
||||
|
||||
class ProbabilisticIndexNode extends LogicNode {
|
||||
|
||||
public function new(tree: LogicTree) {
|
||||
super(tree);
|
||||
}
|
||||
|
||||
override function get(from: Int): Dynamic {
|
||||
|
||||
var probs: Array<Float> = [];
|
||||
var probs_acum: Array<Float> = [];
|
||||
var sum: Float = 0;
|
||||
|
||||
for (p in 0...inputs.length){
|
||||
probs.push(inputs[p].get());
|
||||
sum += probs[p];
|
||||
}
|
||||
|
||||
if (sum > 1){
|
||||
for (p in 0...probs.length)
|
||||
probs[p] /= sum;
|
||||
}
|
||||
|
||||
sum = 0;
|
||||
for (p in 0...probs.length){
|
||||
sum += probs[p];
|
||||
probs_acum.push(sum);
|
||||
}
|
||||
|
||||
var rand: Float = Math.random();
|
||||
|
||||
for (p in 0...probs.length){
|
||||
if (p == 0 && rand <= probs_acum[p]) return p;
|
||||
else if (0 < p && p < probs.length-1 && probs_acum[p-1] < rand && rand <= probs_acum[p]) return p;
|
||||
else if (p == probs.length-1 && probs_acum[p-1] < rand) return p;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
package leenkx.logicnode;
|
||||
|
||||
import iron.data.SceneFormat;
|
||||
|
||||
class SetWorldNode extends LogicNode {
|
||||
|
||||
public function new(tree: LogicTree) {
|
||||
@ -10,25 +12,6 @@ class SetWorldNode extends LogicNode {
|
||||
var world: String = inputs[1].get();
|
||||
|
||||
if (world != null){
|
||||
|
||||
//check if world shader data exists
|
||||
var file: String = 'World_'+world+'_data';
|
||||
#if lnx_json
|
||||
file += ".json";
|
||||
#elseif lnx_compress
|
||||
file += ".lz4";
|
||||
#else
|
||||
file += '.lnx';
|
||||
#end
|
||||
|
||||
var exists: Bool = false;
|
||||
|
||||
iron.data.Data.getBlob(file, function(b: kha.Blob) {
|
||||
if (b != null) exists = true;
|
||||
});
|
||||
|
||||
assert(Error, exists == true, "World must be either associated to a scene or have fake user");
|
||||
|
||||
iron.Scene.active.raw.world_ref = world;
|
||||
var npath = leenkx.renderpath.RenderPathCreator.get();
|
||||
npath.loadShader("shader_datas/World_" + world + "/World_" + world);
|
||||
|
@ -641,18 +641,20 @@ class RenderPathForward {
|
||||
var framebuffer = "";
|
||||
#end
|
||||
|
||||
#if ((rp_antialiasing == "Off") || (rp_antialiasing == "FXAA"))
|
||||
RenderPathCreator.finalTarget = path.currentTarget;
|
||||
|
||||
var target = "";
|
||||
#if ((rp_antialiasing == "Off") || (rp_antialiasing == "FXAA") || (!rp_render_to_texture))
|
||||
{
|
||||
RenderPathCreator.finalTarget = path.currentTarget;
|
||||
path.setTarget(framebuffer);
|
||||
target = framebuffer;
|
||||
}
|
||||
#else
|
||||
{
|
||||
path.setTarget("buf");
|
||||
RenderPathCreator.finalTarget = path.currentTarget;
|
||||
target = "buf";
|
||||
}
|
||||
#end
|
||||
|
||||
path.setTarget(target);
|
||||
|
||||
#if rp_compositordepth
|
||||
{
|
||||
path.bindTarget("_main", "gbufferD");
|
||||
@ -671,6 +673,15 @@ class RenderPathForward {
|
||||
}
|
||||
#end
|
||||
|
||||
#if rp_overlays
|
||||
{
|
||||
path.setTarget(target);
|
||||
path.clearTarget(null, 1.0);
|
||||
path.drawMeshes("overlay");
|
||||
}
|
||||
#end
|
||||
|
||||
|
||||
#if ((rp_antialiasing == "SMAA") || (rp_antialiasing == "TAA"))
|
||||
{
|
||||
path.setTarget("bufa");
|
||||
@ -701,12 +712,6 @@ class RenderPathForward {
|
||||
}
|
||||
#end
|
||||
|
||||
#if rp_overlays
|
||||
{
|
||||
path.clearTarget(null, 1.0);
|
||||
path.drawMeshes("overlay");
|
||||
}
|
||||
#end
|
||||
}
|
||||
|
||||
public static function setupDepthTexture() {
|
||||
|
@ -3,33 +3,35 @@ package leenkx.system;
|
||||
import haxe.Constraints.Function;
|
||||
|
||||
class Signal {
|
||||
var callbacks:Array<Function> = [];
|
||||
var callbacks: Array<Function> = [];
|
||||
|
||||
public function new() {
|
||||
|
||||
|
||||
}
|
||||
|
||||
public function connect(callback:Function) {
|
||||
public function connect(callback: Function) {
|
||||
if (!callbacks.contains(callback)) callbacks.push(callback);
|
||||
}
|
||||
|
||||
public function disconnect(callback:Function) {
|
||||
public function disconnect(callback: Function) {
|
||||
if (callbacks.contains(callback)) callbacks.remove(callback);
|
||||
}
|
||||
|
||||
public function emit(...args:Any) {
|
||||
for (callback in callbacks) Reflect.callMethod(this, callback, args);
|
||||
public function emit(...args: Any) {
|
||||
for (callback in callbacks.copy()) {
|
||||
if (callbacks.contains(callback)) Reflect.callMethod(null, callback, args);
|
||||
}
|
||||
}
|
||||
|
||||
public function getConnections():Array<Function> {
|
||||
public function getConnections(): Array<Function> {
|
||||
return callbacks;
|
||||
}
|
||||
|
||||
public function isConnected(callBack:Function):Bool {
|
||||
public function isConnected(callBack: Function):Bool {
|
||||
return callbacks.contains(callBack);
|
||||
}
|
||||
|
||||
public function isNull():Bool {
|
||||
public function isNull(): Bool {
|
||||
return callbacks.length == 0;
|
||||
}
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ class Starter {
|
||||
iron.Scene.getRenderPath = getRenderPath;
|
||||
#end
|
||||
#if lnx_draworder_shader
|
||||
iron.RenderPath.active.drawOrder = iron.RenderPath.DrawOrder.Shader;
|
||||
iron.RenderPath.active.drawOrder = iron.RenderPath.DrawOrder.Index;
|
||||
#end // else Distance
|
||||
});
|
||||
});
|
||||
|
@ -1,87 +1,243 @@
|
||||
package leenkx.trait;
|
||||
|
||||
import iron.Trait;
|
||||
import iron.math.Vec4;
|
||||
import iron.system.Input;
|
||||
import iron.object.Object;
|
||||
import iron.object.CameraObject;
|
||||
import leenkx.trait.physics.PhysicsWorld;
|
||||
import leenkx.trait.internal.CameraController;
|
||||
import leenkx.trait.physics.RigidBody;
|
||||
import kha.FastFloat;
|
||||
|
||||
class FirstPersonController extends CameraController {
|
||||
class FirstPersonController extends Trait {
|
||||
|
||||
#if (!lnx_physics)
|
||||
public function new() { super(); }
|
||||
#else
|
||||
#if (!lnx_physics)
|
||||
public function new() { super(); }
|
||||
#else
|
||||
|
||||
var head: Object;
|
||||
static inline var rotationSpeed = 2.0;
|
||||
@prop public var rotationSpeed:Float = 0.15;
|
||||
@prop public var maxPitch:Float = 2.2;
|
||||
@prop public var minPitch:Float = 0.5;
|
||||
@prop public var enableJump:Bool = true;
|
||||
@prop public var jumpForce:Float = 22.0;
|
||||
@prop public var moveSpeed:Float = 500.0;
|
||||
|
||||
public function new() {
|
||||
super();
|
||||
@prop public var forwardKey:String = "w";
|
||||
@prop public var backwardKey:String = "s";
|
||||
@prop public var leftKey:String = "a";
|
||||
@prop public var rightKey:String = "d";
|
||||
@prop public var jumpKey:String = "space";
|
||||
|
||||
iron.Scene.active.notifyOnInit(init);
|
||||
}
|
||||
@prop public var allowAirJump:Bool = false;
|
||||
|
||||
function init() {
|
||||
head = object.getChildOfType(CameraObject);
|
||||
@prop public var canRun:Bool = true;
|
||||
@prop public var runKey:String = "shift";
|
||||
@prop public var runSpeed:Float = 1000.0;
|
||||
|
||||
PhysicsWorld.active.notifyOnPreUpdate(preUpdate);
|
||||
notifyOnUpdate(update);
|
||||
notifyOnRemove(removed);
|
||||
}
|
||||
// Sistema de estamina
|
||||
@prop public var stamina:Bool = false;
|
||||
@prop public var staminaBase:Float = 75.0;
|
||||
@prop public var staRecoverPerSec:Float = 5.0;
|
||||
@prop public var staDecreasePerSec:Float = 5.0;
|
||||
@prop public var staRecoverTime:Float = 2.0;
|
||||
@prop public var staDecreasePerJump:Float = 5.0;
|
||||
@prop public var enableFatigue:Bool = false;
|
||||
@prop public var fatigueSpeed:Float = 0.5; // the reduction of movement when fatigue is activated...
|
||||
@prop public var fatigueThreshold:Float = 30.0; // Tiempo corriendo sin parar para la activacion // Time running non-stop for activation...
|
||||
@prop public var fatRecoveryThreshold:Float = 7.5; // Tiempo sin correr/saltar para salir de fatiga // Time without running/jumping to get rid of fatigue...
|
||||
|
||||
|
||||
var xVec = Vec4.xAxis();
|
||||
var zVec = Vec4.zAxis();
|
||||
function preUpdate() {
|
||||
if (Input.occupied || !body.ready) return;
|
||||
// Var Privadas
|
||||
var head:CameraObject;
|
||||
var pitch:Float = 0.0;
|
||||
var body:RigidBody;
|
||||
|
||||
var mouse = Input.getMouse();
|
||||
var kb = Input.getKeyboard();
|
||||
var moveForward:Bool = false;
|
||||
var moveBackward:Bool = false;
|
||||
var moveLeft:Bool = false;
|
||||
var moveRight:Bool = false;
|
||||
var isRunning:Bool = false;
|
||||
|
||||
if (mouse.started() && !mouse.locked) mouse.lock();
|
||||
else if (kb.started("escape") && mouse.locked) mouse.unlock();
|
||||
var canJump:Bool = true;
|
||||
var staminaValue:Float = 0.0;
|
||||
var timeSinceStop:Float = 0.0;
|
||||
|
||||
if (mouse.locked || mouse.down()) {
|
||||
head.transform.rotate(xVec, -mouse.movementY / 250 * rotationSpeed);
|
||||
transform.rotate(zVec, -mouse.movementX / 250 * rotationSpeed);
|
||||
body.syncTransform();
|
||||
var fatigueTimer:Float = 0.0;
|
||||
var fatigueCooldown:Float = 0.0;
|
||||
var isFatigueActive:Bool = false;
|
||||
|
||||
public function new() {
|
||||
super();
|
||||
iron.Scene.active.notifyOnInit(init);
|
||||
}
|
||||
|
||||
function init() {
|
||||
body = object.getTrait(RigidBody);
|
||||
head = object.getChildOfType(CameraObject);
|
||||
PhysicsWorld.active.notifyOnPreUpdate(preUpdate);
|
||||
notifyOnUpdate(update);
|
||||
notifyOnRemove(removed);
|
||||
staminaValue = staminaBase;
|
||||
}
|
||||
|
||||
function removed() {
|
||||
PhysicsWorld.active.removePreUpdate(preUpdate);
|
||||
}
|
||||
|
||||
var zVec = Vec4.zAxis();
|
||||
|
||||
function preUpdate() {
|
||||
if (Input.occupied || body == null) return;
|
||||
var mouse = Input.getMouse();
|
||||
var kb = Input.getKeyboard();
|
||||
|
||||
if (mouse.started() && !mouse.locked)
|
||||
mouse.lock();
|
||||
else if (kb.started("escape") && mouse.locked)
|
||||
mouse.unlock();
|
||||
|
||||
if (mouse.locked || mouse.down()) {
|
||||
var deltaTime:Float = iron.system.Time.delta;
|
||||
object.transform.rotate(zVec, -mouse.movementX * rotationSpeed * deltaTime);
|
||||
var deltaPitch:Float = -(mouse.movementY * rotationSpeed * deltaTime);
|
||||
pitch += deltaPitch;
|
||||
pitch = Math.max(minPitch, Math.min(maxPitch, pitch));
|
||||
head.transform.setRotation(pitch, 0.0, 0.0);
|
||||
body.syncTransform();
|
||||
}
|
||||
}
|
||||
|
||||
var dir:Vec4 = new Vec4();
|
||||
|
||||
function isFatigued():Bool {
|
||||
return enableFatigue && isFatigueActive;
|
||||
}
|
||||
|
||||
function update() {
|
||||
if (body == null) return;
|
||||
var deltaTime:Float = iron.system.Time.delta;
|
||||
var kb = Input.getKeyboard();
|
||||
|
||||
moveForward = kb.down(forwardKey);
|
||||
moveBackward = kb.down(backwardKey);
|
||||
moveLeft = kb.down(leftKey);
|
||||
moveRight = kb.down(rightKey);
|
||||
var isMoving = moveForward || moveBackward || moveLeft || moveRight;
|
||||
|
||||
var isGrounded:Bool = false;
|
||||
#if lnx_physics
|
||||
var vel = body.getLinearVelocity();
|
||||
if (Math.abs(vel.z) < 0.1) {
|
||||
isGrounded = true;
|
||||
}
|
||||
#end
|
||||
|
||||
// Dejo establecido el salto para tener en cuenta la (enableFatigue) si es que es false/true....
|
||||
if (isGrounded && !isFatigued()) {
|
||||
canJump = true;
|
||||
}
|
||||
}
|
||||
// Saltar con estamina
|
||||
if (enableJump && kb.started(jumpKey) && canJump) {
|
||||
var jumpPower = jumpForce;
|
||||
// Disminuir el salto al 50% si la (stamina) esta por debajo o en el 20%.
|
||||
if (stamina) {
|
||||
if (staminaValue <= 0) {
|
||||
jumpPower = 0;
|
||||
} else if (staminaValue <= staminaBase * 0.2) {
|
||||
jumpPower *= 0.5;
|
||||
}
|
||||
|
||||
function removed() {
|
||||
PhysicsWorld.active.removePreUpdate(preUpdate);
|
||||
}
|
||||
staminaValue -= staDecreasePerJump;
|
||||
if (staminaValue < 0.0) staminaValue = 0.0;
|
||||
timeSinceStop = 0.0;
|
||||
}
|
||||
|
||||
var dir = new Vec4();
|
||||
function update() {
|
||||
if (!body.ready) return;
|
||||
if (jumpPower > 0) {
|
||||
body.applyImpulse(new Vec4(0, 0, jumpPower));
|
||||
if (!allowAirJump) canJump = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (jump) {
|
||||
body.applyImpulse(new Vec4(0, 0, 16));
|
||||
jump = false;
|
||||
// Control de estamina y correr
|
||||
if (canRun && kb.down(runKey) && isMoving) {
|
||||
if (stamina) {
|
||||
if (staminaValue > 0.0) {
|
||||
isRunning = true;
|
||||
staminaValue -= staDecreasePerSec * deltaTime;
|
||||
if (staminaValue < 0.0) staminaValue = 0.0;
|
||||
} else {
|
||||
isRunning = false;
|
||||
}
|
||||
} else {
|
||||
isRunning = true;
|
||||
}
|
||||
} else {
|
||||
isRunning = false;
|
||||
}
|
||||
|
||||
// (temporizadores aparte)
|
||||
if (isRunning) {
|
||||
timeSinceStop = 0.0;
|
||||
fatigueTimer += deltaTime;
|
||||
fatigueCooldown = 0.0;
|
||||
} else {
|
||||
timeSinceStop += deltaTime;
|
||||
fatigueCooldown += deltaTime;
|
||||
}
|
||||
|
||||
// Evitar correr y saltar al estar fatigado...
|
||||
if (isFatigued()) {
|
||||
isRunning = false;
|
||||
canJump = false;
|
||||
}
|
||||
|
||||
// Move
|
||||
dir.set(0, 0, 0);
|
||||
if (moveForward) dir.add(transform.look());
|
||||
if (moveBackward) dir.add(transform.look().mult(-1));
|
||||
if (moveLeft) dir.add(transform.right().mult(-1));
|
||||
if (moveRight) dir.add(transform.right());
|
||||
// Activar fatiga despues de correr continuamente durante cierto umbral
|
||||
if (enableFatigue && fatigueTimer >= fatigueThreshold) {
|
||||
isFatigueActive = true;
|
||||
}
|
||||
|
||||
// Push down
|
||||
var btvec = body.getLinearVelocity();
|
||||
body.setLinearVelocity(0.0, 0.0, btvec.z - 1.0);
|
||||
// Eliminar la fatiga despues de recuperarse
|
||||
if (enableFatigue && isFatigueActive && fatigueCooldown >= fatRecoveryThreshold) {
|
||||
isFatigueActive = false;
|
||||
fatigueTimer = 0.0;
|
||||
}
|
||||
|
||||
if (moveForward || moveBackward || moveLeft || moveRight) {
|
||||
var dirN = dir.normalize();
|
||||
dirN.mult(6);
|
||||
body.activate();
|
||||
body.setLinearVelocity(dirN.x, dirN.y, btvec.z - 1.0);
|
||||
}
|
||||
// Recuperar estamina si no esta corriendo
|
||||
if (stamina && !isRunning && staminaValue < staminaBase && !isFatigued()) {
|
||||
if (timeSinceStop >= staRecoverTime) {
|
||||
staminaValue += staRecoverPerSec * deltaTime;
|
||||
if (staminaValue > staminaBase) staminaValue = staminaBase;
|
||||
}
|
||||
}
|
||||
|
||||
// Keep vertical
|
||||
body.setAngularFactor(0, 0, 0);
|
||||
camera.buildMatrix();
|
||||
}
|
||||
#end
|
||||
// Movimiento ejes (local)
|
||||
dir.set(0, 0, 0);
|
||||
if (moveForward) dir.add(object.transform.look());
|
||||
if (moveBackward) dir.add(object.transform.look().mult(-1));
|
||||
if (moveLeft) dir.add(object.transform.right().mult(-1));
|
||||
if (moveRight) dir.add(object.transform.right());
|
||||
|
||||
var btvec = body.getLinearVelocity();
|
||||
body.setLinearVelocity(0.0, 0.0, btvec.z - 1.0);
|
||||
|
||||
if (isMoving) {
|
||||
var dirN = dir.normalize();
|
||||
var baseSpeed = moveSpeed;
|
||||
if (isRunning && moveForward) {
|
||||
baseSpeed = runSpeed;
|
||||
}
|
||||
var currentSpeed = isFatigued() ? baseSpeed * fatigueSpeed : baseSpeed;
|
||||
dirN.mult(currentSpeed * deltaTime);
|
||||
body.activate();
|
||||
body.setLinearVelocity(dirN.x, dirN.y, btvec.z - 1.0);
|
||||
}
|
||||
|
||||
body.setAngularFactor(0, 0, 0);
|
||||
head.buildMatrix();
|
||||
}
|
||||
|
||||
#end
|
||||
}
|
||||
|
||||
|
||||
// Stamina and fatigue system.....
|
@ -1727,6 +1727,7 @@ class LeenkxExporter:
|
||||
tangdata = np.array(tangdata, dtype='<i2')
|
||||
|
||||
# Output
|
||||
o['sorting_index'] = bobject.lnx_sorting_index
|
||||
o['vertex_arrays'] = []
|
||||
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
|
||||
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
|
||||
@ -1979,7 +1980,7 @@ class LeenkxExporter:
|
||||
if bobject.parent is None or bobject.parent.name not in collection.objects:
|
||||
asset_name = lnx.utils.asset_name(bobject)
|
||||
|
||||
if collection.library:
|
||||
if collection.library and not collection.name in self.scene.collection.children:
|
||||
# Add external linked objects
|
||||
# Iron differentiates objects based on their names,
|
||||
# so errors will happen if two objects with the
|
||||
@ -2208,6 +2209,9 @@ class LeenkxExporter:
|
||||
elif material.lnx_cull_mode != 'clockwise':
|
||||
o['override_context'] = {}
|
||||
o['override_context']['cull_mode'] = material.lnx_cull_mode
|
||||
if material.lnx_compare_mode != 'less':
|
||||
o['override_context'] = {}
|
||||
o['override_context']['compare_mode'] = material.lnx_compare_mode
|
||||
|
||||
o['contexts'] = []
|
||||
|
||||
@ -2395,7 +2399,7 @@ class LeenkxExporter:
|
||||
world = self.scene.world
|
||||
|
||||
if world is not None:
|
||||
world_name = lnx.utils.safestr(world.name)
|
||||
world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
|
||||
|
||||
if world_name not in self.world_array:
|
||||
self.world_array.append(world_name)
|
||||
@ -2544,12 +2548,12 @@ class LeenkxExporter:
|
||||
if collection.name.startswith(('RigidBodyWorld', 'Trait|')):
|
||||
continue
|
||||
|
||||
if self.scene.user_of_id(collection) or collection.library or collection in self.referenced_collections:
|
||||
if self.scene.user_of_id(collection) or collection in self.referenced_collections:
|
||||
self.export_collection(collection)
|
||||
|
||||
if not LeenkxExporter.option_mesh_only:
|
||||
if self.scene.camera is not None:
|
||||
self.output['camera_ref'] = self.scene.camera.name
|
||||
self.output['camera_ref'] = lnx.utils.asset_name(self.scene.camera) if self.scene.library else self.scene.camera.name
|
||||
else:
|
||||
if self.scene.name == lnx.utils.get_project_scene_name():
|
||||
log.warn(f'Scene "{self.scene.name}" is missing a camera')
|
||||
@ -2573,7 +2577,7 @@ class LeenkxExporter:
|
||||
self.export_tilesheets()
|
||||
|
||||
if self.scene.world is not None:
|
||||
self.output['world_ref'] = lnx.utils.safestr(self.scene.world.name)
|
||||
self.output['world_ref'] = lnx.utils.safestr(lnx.utils.asset_name(self.scene.world) if self.scene.world.library else self.scene.world.name)
|
||||
|
||||
if self.scene.use_gravity:
|
||||
self.output['gravity'] = [self.scene.gravity[0], self.scene.gravity[1], self.scene.gravity[2]]
|
||||
@ -3376,7 +3380,7 @@ class LeenkxExporter:
|
||||
if mobile_mat:
|
||||
lnx_radiance = False
|
||||
|
||||
out_probe = {'name': world.name}
|
||||
out_probe = {'name': lnx.utils.asset_name(world) if world.library else world.name}
|
||||
if lnx_irradiance:
|
||||
ext = '' if wrd.lnx_minimize else '.json'
|
||||
out_probe['irradiance'] = irrsharmonics + '_irradiance' + ext
|
||||
|
@ -1,445 +1,446 @@
|
||||
"""
|
||||
Exports smaller geometry but is slower.
|
||||
To be replaced with https://github.com/zeux/meshoptimizer
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import bpy
|
||||
from mathutils import Vector
|
||||
import numpy as np
|
||||
|
||||
import lnx.utils
|
||||
from lnx import log
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
class Vertex:
|
||||
__slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index")
|
||||
|
||||
def __init__(self, mesh: bpy.types.Mesh, loop: bpy.types.MeshLoop, vcol0: Optional[bpy.types.Attribute]):
|
||||
self.vertex_index = loop.vertex_index
|
||||
loop_idx = loop.index
|
||||
self.co = mesh.vertices[self.vertex_index].co[:]
|
||||
self.normal = loop.normal[:]
|
||||
self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers)
|
||||
self.col = [0.0, 0.0, 0.0] if vcol0 is None else vcol0.data[loop_idx].color[:]
|
||||
self.loop_indices = [loop_idx]
|
||||
self.index = 0
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.co, self.normal, self.uvs))
|
||||
|
||||
def __eq__(self, other):
|
||||
eq = (
|
||||
(self.co == other.co) and
|
||||
(self.normal == other.normal) and
|
||||
(self.uvs == other.uvs) and
|
||||
(self.col == other.col)
|
||||
)
|
||||
if eq:
|
||||
indices = self.loop_indices + other.loop_indices
|
||||
self.loop_indices = indices
|
||||
other.loop_indices = indices
|
||||
return eq
|
||||
|
||||
|
||||
def calc_tangents(posa, nora, uva, ias, scale_pos):
|
||||
num_verts = int(len(posa) / 4)
|
||||
tangents = np.empty(num_verts * 3, dtype='<f4')
|
||||
# bitangents = np.empty(num_verts * 3, dtype='<f4')
|
||||
for ar in ias:
|
||||
ia = ar['values']
|
||||
num_tris = int(len(ia) / 3)
|
||||
for i in range(0, num_tris):
|
||||
i0 = ia[i * 3 ]
|
||||
i1 = ia[i * 3 + 1]
|
||||
i2 = ia[i * 3 + 2]
|
||||
v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2]))
|
||||
v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2]))
|
||||
v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2]))
|
||||
uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1]))
|
||||
uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1]))
|
||||
uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1]))
|
||||
|
||||
deltaPos1 = v1 - v0
|
||||
deltaPos2 = v2 - v0
|
||||
deltaUV1 = uv1 - uv0
|
||||
deltaUV2 = uv2 - uv0
|
||||
d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x)
|
||||
if d != 0:
|
||||
r = 1.0 / d
|
||||
else:
|
||||
r = 1.0
|
||||
tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r
|
||||
# bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r
|
||||
|
||||
tangents[i0 * 3 ] += tangent.x
|
||||
tangents[i0 * 3 + 1] += tangent.y
|
||||
tangents[i0 * 3 + 2] += tangent.z
|
||||
tangents[i1 * 3 ] += tangent.x
|
||||
tangents[i1 * 3 + 1] += tangent.y
|
||||
tangents[i1 * 3 + 2] += tangent.z
|
||||
tangents[i2 * 3 ] += tangent.x
|
||||
tangents[i2 * 3 + 1] += tangent.y
|
||||
tangents[i2 * 3 + 2] += tangent.z
|
||||
# bitangents[i0 * 3 ] += bitangent.x
|
||||
# bitangents[i0 * 3 + 1] += bitangent.y
|
||||
# bitangents[i0 * 3 + 2] += bitangent.z
|
||||
# bitangents[i1 * 3 ] += bitangent.x
|
||||
# bitangents[i1 * 3 + 1] += bitangent.y
|
||||
# bitangents[i1 * 3 + 2] += bitangent.z
|
||||
# bitangents[i2 * 3 ] += bitangent.x
|
||||
# bitangents[i2 * 3 + 1] += bitangent.y
|
||||
# bitangents[i2 * 3 + 2] += bitangent.z
|
||||
# Orthogonalize
|
||||
for i in range(0, num_verts):
|
||||
t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2]))
|
||||
# b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2]))
|
||||
n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos))
|
||||
v = t - n * n.dot(t)
|
||||
v.normalize()
|
||||
# Calculate handedness
|
||||
# cnv = n.cross(v)
|
||||
# if cnv.dot(b) < 0.0:
|
||||
# v = v * -1.0
|
||||
tangents[i * 3 ] = v.x
|
||||
tangents[i * 3 + 1] = v.y
|
||||
tangents[i * 3 + 2] = v.z
|
||||
return tangents
|
||||
|
||||
|
||||
def export_mesh_data(self, export_mesh: bpy.types.Mesh, bobject: bpy.types.Object, o, has_armature=False):
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
export_mesh.calc_normals_split()
|
||||
else:
|
||||
updated_normals = export_mesh.corner_normals
|
||||
# exportMesh.calc_loop_triangles()
|
||||
vcol0 = self.get_nth_vertex_colors(export_mesh, 0)
|
||||
vert_list = {Vertex(export_mesh, loop, vcol0): 0 for loop in export_mesh.loops}.keys()
|
||||
num_verts = len(vert_list)
|
||||
num_uv_layers = len(export_mesh.uv_layers)
|
||||
# Check if shape keys were exported
|
||||
has_morph_target = self.get_shape_keys(bobject.data)
|
||||
if has_morph_target:
|
||||
# Shape keys UV are exported separately, so reduce UV count by 1
|
||||
num_uv_layers -= 1
|
||||
morph_uv_index = self.get_morph_uv_index(bobject.data)
|
||||
has_tex = self.get_export_uvs(export_mesh) and num_uv_layers > 0
|
||||
if self.has_baked_material(bobject, export_mesh.materials):
|
||||
has_tex = True
|
||||
has_tex1 = has_tex and num_uv_layers > 1
|
||||
num_colors = self.get_num_vertex_colors(export_mesh)
|
||||
has_col = self.get_export_vcols(export_mesh) and num_colors > 0
|
||||
has_tang = self.has_tangents(export_mesh)
|
||||
|
||||
pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
|
||||
ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
|
||||
if has_tex or has_morph_target:
|
||||
uv_layers = export_mesh.uv_layers
|
||||
maxdim = 1.0
|
||||
maxdim_uvlayer = None
|
||||
if has_tex:
|
||||
t0map = 0 # Get active uvmap
|
||||
t0data = np.empty(num_verts * 2, dtype='<f4')
|
||||
if uv_layers is not None:
|
||||
if 'UVMap_baked' in uv_layers:
|
||||
for i in range(0, len(uv_layers)):
|
||||
if uv_layers[i].name == 'UVMap_baked':
|
||||
t0map = i
|
||||
break
|
||||
else:
|
||||
for i in range(0, len(uv_layers)):
|
||||
if uv_layers[i].active_render and uv_layers[i].name != 'UVMap_shape_key':
|
||||
t0map = i
|
||||
break
|
||||
if has_tex1:
|
||||
for i in range(0, len(uv_layers)):
|
||||
# Not UVMap 0
|
||||
if i != t0map:
|
||||
# Not Shape Key UVMap
|
||||
if has_morph_target and uv_layers[i].name == 'UVMap_shape_key':
|
||||
continue
|
||||
# Neither UVMap 0 Nor Shape Key Map
|
||||
t1map = i
|
||||
t1data = np.empty(num_verts * 2, dtype='<f4')
|
||||
# Scale for packed coords
|
||||
lay0 = uv_layers[t0map]
|
||||
maxdim_uvlayer = lay0
|
||||
for v in lay0.data:
|
||||
if abs(v.uv[0]) > maxdim:
|
||||
maxdim = abs(v.uv[0])
|
||||
if abs(v.uv[1]) > maxdim:
|
||||
maxdim = abs(v.uv[1])
|
||||
if has_tex1:
|
||||
lay1 = uv_layers[t1map]
|
||||
for v in lay1.data:
|
||||
if abs(v.uv[0]) > maxdim:
|
||||
maxdim = abs(v.uv[0])
|
||||
maxdim_uvlayer = lay1
|
||||
if abs(v.uv[1]) > maxdim:
|
||||
maxdim = abs(v.uv[1])
|
||||
maxdim_uvlayer = lay1
|
||||
if has_morph_target:
|
||||
morph_data = np.empty(num_verts * 2, dtype='<f4')
|
||||
lay2 = uv_layers[morph_uv_index]
|
||||
for v in lay2.data:
|
||||
if abs(v.uv[0]) > maxdim:
|
||||
maxdim = abs(v.uv[0])
|
||||
maxdim_uvlayer = lay2
|
||||
if abs(v.uv[1]) > maxdim:
|
||||
maxdim = abs(v.uv[1])
|
||||
maxdim_uvlayer = lay2
|
||||
if maxdim > 1:
|
||||
o['scale_tex'] = maxdim
|
||||
invscale_tex = (1 / o['scale_tex']) * 32767
|
||||
else:
|
||||
invscale_tex = 1 * 32767
|
||||
self.check_uv_precision(export_mesh, maxdim, maxdim_uvlayer, invscale_tex)
|
||||
|
||||
if has_col:
|
||||
cdata = np.empty(num_verts * 3, dtype='<f4')
|
||||
|
||||
# Save aabb
|
||||
self.calc_aabb(bobject)
|
||||
|
||||
# Scale for packed coords
|
||||
maxdim = max(bobject.data.lnx_aabb[0], max(bobject.data.lnx_aabb[1], bobject.data.lnx_aabb[2]))
|
||||
if maxdim > 2:
|
||||
o['scale_pos'] = maxdim / 2
|
||||
else:
|
||||
o['scale_pos'] = 1.0
|
||||
if has_armature: # Allow up to 2x bigger bounds for skinned mesh
|
||||
o['scale_pos'] *= 2.0
|
||||
|
||||
scale_pos = o['scale_pos']
|
||||
invscale_pos = (1 / scale_pos) * 32767
|
||||
|
||||
# Make arrays
|
||||
for i, v in enumerate(vert_list):
|
||||
v.index = i
|
||||
co = v.co
|
||||
normal = v.normal
|
||||
i4 = i * 4
|
||||
i2 = i * 2
|
||||
pdata[i4 ] = co[0]
|
||||
pdata[i4 + 1] = co[1]
|
||||
pdata[i4 + 2] = co[2]
|
||||
pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
|
||||
ndata[i2 ] = normal[0]
|
||||
ndata[i2 + 1] = normal[1]
|
||||
if has_tex:
|
||||
uv = v.uvs[t0map]
|
||||
t0data[i2 ] = uv[0]
|
||||
t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
|
||||
if has_tex1:
|
||||
uv = v.uvs[t1map]
|
||||
t1data[i2 ] = uv[0]
|
||||
t1data[i2 + 1] = 1.0 - uv[1]
|
||||
if has_morph_target:
|
||||
uv = v.uvs[morph_uv_index]
|
||||
morph_data[i2 ] = uv[0]
|
||||
morph_data[i2 + 1] = 1.0 - uv[1]
|
||||
if has_col:
|
||||
i3 = i * 3
|
||||
cdata[i3 ] = v.col[0]
|
||||
cdata[i3 + 1] = v.col[1]
|
||||
cdata[i3 + 2] = v.col[2]
|
||||
|
||||
# Indices
|
||||
# Create dict for every material slot
|
||||
prims = {ma.name if ma else '': [] for ma in export_mesh.materials}
|
||||
v_maps = {ma.name if ma else '': [] for ma in export_mesh.materials}
|
||||
if not prims:
|
||||
# No materials
|
||||
prims = {'': []}
|
||||
v_maps = {'': []}
|
||||
|
||||
# Create dict of {loop_indices : vertex} with each loop_index in each vertex in Vertex_list
|
||||
vert_dict = {i : v for v in vert_list for i in v.loop_indices}
|
||||
# For each polygon in a mesh
|
||||
for poly in export_mesh.polygons:
|
||||
# Index of the first loop of this polygon
|
||||
first = poly.loop_start
|
||||
# No materials assigned
|
||||
if len(export_mesh.materials) == 0:
|
||||
# Get prim
|
||||
prim = prims['']
|
||||
v_map = v_maps['']
|
||||
else:
|
||||
# First material
|
||||
mat = export_mesh.materials[min(poly.material_index, len(export_mesh.materials) - 1)]
|
||||
# Get prim for this material
|
||||
prim = prims[mat.name if mat else '']
|
||||
v_map = v_maps[mat.name if mat else '']
|
||||
# List of indices for each loop_index belonging to this polygon
|
||||
indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)]
|
||||
v_indices = [vert_dict[i].vertex_index for i in range(first, first+poly.loop_total)]
|
||||
|
||||
# If 3 loops per polygon (Triangle?)
|
||||
if poly.loop_total == 3:
|
||||
prim += indices
|
||||
v_map += v_indices
|
||||
# If > 3 loops per polygon (Non-Triangular?)
|
||||
elif poly.loop_total > 3:
|
||||
for i in range(poly.loop_total-2):
|
||||
prim += (indices[-1], indices[i], indices[i + 1])
|
||||
v_map += (v_indices[-1], v_indices[i], v_indices[i + 1])
|
||||
|
||||
# Write indices
|
||||
o['index_arrays'] = []
|
||||
for mat, prim in prims.items():
|
||||
idata = [0] * len(prim)
|
||||
v_map_data = [0] * len(prim)
|
||||
v_map_sub = v_maps[mat]
|
||||
for i, v in enumerate(prim):
|
||||
idata[i] = v
|
||||
v_map_data[i] = v_map_sub[i]
|
||||
if len(idata) == 0: # No face assigned
|
||||
continue
|
||||
ia = {'values': idata, 'material': 0, 'vertex_map': v_map_data}
|
||||
# Find material index for multi-mat mesh
|
||||
if len(export_mesh.materials) > 1:
|
||||
for i in range(0, len(export_mesh.materials)):
|
||||
if (export_mesh.materials[i] is not None and mat == export_mesh.materials[i].name) or \
|
||||
(export_mesh.materials[i] is None and mat == ''): # Default material for empty slots
|
||||
ia['material'] = i
|
||||
break
|
||||
o['index_arrays'].append(ia)
|
||||
|
||||
if has_tang:
|
||||
tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos)
|
||||
|
||||
pdata *= invscale_pos
|
||||
ndata *= 32767
|
||||
pdata = np.array(pdata, dtype='<i2')
|
||||
ndata = np.array(ndata, dtype='<i2')
|
||||
if has_tex:
|
||||
t0data *= invscale_tex
|
||||
t0data = np.array(t0data, dtype='<i2')
|
||||
if has_tex1:
|
||||
t1data *= invscale_tex
|
||||
t1data = np.array(t1data, dtype='<i2')
|
||||
if has_morph_target:
|
||||
morph_data *= invscale_tex
|
||||
morph_data = np.array(morph_data, dtype='<i2')
|
||||
if has_col:
|
||||
cdata *= 32767
|
||||
cdata = np.array(cdata, dtype='<i2')
|
||||
if has_tang:
|
||||
tangdata *= 32767
|
||||
tangdata = np.array(tangdata, dtype='<i2')
|
||||
|
||||
# Output
|
||||
o['vertex_arrays'] = []
|
||||
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
|
||||
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
|
||||
if has_tex:
|
||||
o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
|
||||
if has_tex1:
|
||||
o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
|
||||
if has_morph_target:
|
||||
o['vertex_arrays'].append({ 'attrib': 'morph', 'values': morph_data, 'data': 'short2norm' })
|
||||
if has_col:
|
||||
o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
|
||||
if has_tang:
|
||||
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
|
||||
|
||||
return vert_list
|
||||
|
||||
def export_skin(self, bobject, armature, vert_list, o):
|
||||
# This function exports all skinning data, which includes the skeleton
|
||||
# and per-vertex bone influence data
|
||||
oskin = {}
|
||||
o['skin'] = oskin
|
||||
|
||||
# Write the skin bind pose transform
|
||||
otrans = {}
|
||||
oskin['transform'] = otrans
|
||||
otrans['values'] = self.write_matrix(bobject.matrix_world)
|
||||
|
||||
# Write the bone object reference array
|
||||
oskin['bone_ref_array'] = []
|
||||
oskin['bone_len_array'] = []
|
||||
|
||||
bone_array = armature.data.bones
|
||||
bone_count = len(bone_array)
|
||||
rpdat = lnx.utils.get_rp()
|
||||
max_bones = rpdat.lnx_skin_max_bones
|
||||
if bone_count > max_bones:
|
||||
log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Leenkx Render Props - Max Bones')
|
||||
|
||||
for i in range(bone_count):
|
||||
boneRef = self.find_bone(bone_array[i].name)
|
||||
if boneRef:
|
||||
oskin['bone_ref_array'].append(boneRef[1]["structName"])
|
||||
oskin['bone_len_array'].append(bone_array[i].length)
|
||||
else:
|
||||
oskin['bone_ref_array'].append("")
|
||||
oskin['bone_len_array'].append(0.0)
|
||||
|
||||
# Write the bind pose transform array
|
||||
oskin['transformsI'] = []
|
||||
for i in range(bone_count):
|
||||
skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
|
||||
skeletonI = (skeletonI @ bobject.matrix_world)
|
||||
oskin['transformsI'].append(self.write_matrix(skeletonI))
|
||||
|
||||
# Export the per-vertex bone influence data
|
||||
group_remap = []
|
||||
for group in bobject.vertex_groups:
|
||||
for i in range(bone_count):
|
||||
if bone_array[i].name == group.name:
|
||||
group_remap.append(i)
|
||||
break
|
||||
else:
|
||||
group_remap.append(-1)
|
||||
|
||||
bone_count_array = np.empty(len(vert_list), dtype='<i2')
|
||||
bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2')
|
||||
bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2')
|
||||
|
||||
vertices = bobject.data.vertices
|
||||
count = 0
|
||||
for index, v in enumerate(vert_list):
|
||||
bone_count = 0
|
||||
total_weight = 0.0
|
||||
bone_values = []
|
||||
for g in vertices[v.vertex_index].groups:
|
||||
bone_index = group_remap[g.group]
|
||||
bone_weight = g.weight
|
||||
if bone_index >= 0: #and bone_weight != 0.0:
|
||||
bone_values.append((bone_weight, bone_index))
|
||||
total_weight += bone_weight
|
||||
bone_count += 1
|
||||
|
||||
if bone_count > 4:
|
||||
bone_count = 4
|
||||
bone_values.sort(reverse=True)
|
||||
bone_values = bone_values[:4]
|
||||
|
||||
bone_count_array[index] = bone_count
|
||||
for bv in bone_values:
|
||||
bone_weight_array[count] = bv[0] * 32767
|
||||
bone_index_array[count] = bv[1]
|
||||
count += 1
|
||||
|
||||
if total_weight not in (0.0, 1.0):
|
||||
normalizer = 1.0 / total_weight
|
||||
for i in range(bone_count):
|
||||
bone_weight_array[count - i - 1] *= normalizer
|
||||
|
||||
oskin['bone_count_array'] = bone_count_array
|
||||
oskin['bone_index_array'] = bone_index_array[:count]
|
||||
oskin['bone_weight_array'] = bone_weight_array[:count]
|
||||
|
||||
# Bone constraints
|
||||
for bone in armature.pose.bones:
|
||||
if len(bone.constraints) > 0:
|
||||
if 'constraints' not in oskin:
|
||||
oskin['constraints'] = []
|
||||
self.add_constraints(bone, oskin, bone=True)
|
||||
"""
|
||||
Exports smaller geometry but is slower.
|
||||
To be replaced with https://github.com/zeux/meshoptimizer
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import bpy
|
||||
from mathutils import Vector
|
||||
import numpy as np
|
||||
|
||||
import lnx.utils
|
||||
from lnx import log
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
class Vertex:
|
||||
__slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index")
|
||||
|
||||
def __init__(self, mesh: bpy.types.Mesh, loop: bpy.types.MeshLoop, vcol0: Optional[bpy.types.Attribute]):
|
||||
self.vertex_index = loop.vertex_index
|
||||
loop_idx = loop.index
|
||||
self.co = mesh.vertices[self.vertex_index].co[:]
|
||||
self.normal = loop.normal[:]
|
||||
self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers)
|
||||
self.col = [0.0, 0.0, 0.0] if vcol0 is None else vcol0.data[loop_idx].color[:]
|
||||
self.loop_indices = [loop_idx]
|
||||
self.index = 0
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.co, self.normal, self.uvs))
|
||||
|
||||
def __eq__(self, other):
|
||||
eq = (
|
||||
(self.co == other.co) and
|
||||
(self.normal == other.normal) and
|
||||
(self.uvs == other.uvs) and
|
||||
(self.col == other.col)
|
||||
)
|
||||
if eq:
|
||||
indices = self.loop_indices + other.loop_indices
|
||||
self.loop_indices = indices
|
||||
other.loop_indices = indices
|
||||
return eq
|
||||
|
||||
|
||||
def calc_tangents(posa, nora, uva, ias, scale_pos):
|
||||
num_verts = int(len(posa) / 4)
|
||||
tangents = np.empty(num_verts * 3, dtype='<f4')
|
||||
# bitangents = np.empty(num_verts * 3, dtype='<f4')
|
||||
for ar in ias:
|
||||
ia = ar['values']
|
||||
num_tris = int(len(ia) / 3)
|
||||
for i in range(0, num_tris):
|
||||
i0 = ia[i * 3 ]
|
||||
i1 = ia[i * 3 + 1]
|
||||
i2 = ia[i * 3 + 2]
|
||||
v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2]))
|
||||
v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2]))
|
||||
v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2]))
|
||||
uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1]))
|
||||
uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1]))
|
||||
uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1]))
|
||||
|
||||
deltaPos1 = v1 - v0
|
||||
deltaPos2 = v2 - v0
|
||||
deltaUV1 = uv1 - uv0
|
||||
deltaUV2 = uv2 - uv0
|
||||
d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x)
|
||||
if d != 0:
|
||||
r = 1.0 / d
|
||||
else:
|
||||
r = 1.0
|
||||
tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r
|
||||
# bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r
|
||||
|
||||
tangents[i0 * 3 ] += tangent.x
|
||||
tangents[i0 * 3 + 1] += tangent.y
|
||||
tangents[i0 * 3 + 2] += tangent.z
|
||||
tangents[i1 * 3 ] += tangent.x
|
||||
tangents[i1 * 3 + 1] += tangent.y
|
||||
tangents[i1 * 3 + 2] += tangent.z
|
||||
tangents[i2 * 3 ] += tangent.x
|
||||
tangents[i2 * 3 + 1] += tangent.y
|
||||
tangents[i2 * 3 + 2] += tangent.z
|
||||
# bitangents[i0 * 3 ] += bitangent.x
|
||||
# bitangents[i0 * 3 + 1] += bitangent.y
|
||||
# bitangents[i0 * 3 + 2] += bitangent.z
|
||||
# bitangents[i1 * 3 ] += bitangent.x
|
||||
# bitangents[i1 * 3 + 1] += bitangent.y
|
||||
# bitangents[i1 * 3 + 2] += bitangent.z
|
||||
# bitangents[i2 * 3 ] += bitangent.x
|
||||
# bitangents[i2 * 3 + 1] += bitangent.y
|
||||
# bitangents[i2 * 3 + 2] += bitangent.z
|
||||
# Orthogonalize
|
||||
for i in range(0, num_verts):
|
||||
t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2]))
|
||||
# b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2]))
|
||||
n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos))
|
||||
v = t - n * n.dot(t)
|
||||
v.normalize()
|
||||
# Calculate handedness
|
||||
# cnv = n.cross(v)
|
||||
# if cnv.dot(b) < 0.0:
|
||||
# v = v * -1.0
|
||||
tangents[i * 3 ] = v.x
|
||||
tangents[i * 3 + 1] = v.y
|
||||
tangents[i * 3 + 2] = v.z
|
||||
return tangents
|
||||
|
||||
|
||||
def export_mesh_data(self, export_mesh: bpy.types.Mesh, bobject: bpy.types.Object, o, has_armature=False):
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
export_mesh.calc_normals_split()
|
||||
else:
|
||||
updated_normals = export_mesh.corner_normals
|
||||
# exportMesh.calc_loop_triangles()
|
||||
vcol0 = self.get_nth_vertex_colors(export_mesh, 0)
|
||||
vert_list = {Vertex(export_mesh, loop, vcol0): 0 for loop in export_mesh.loops}.keys()
|
||||
num_verts = len(vert_list)
|
||||
num_uv_layers = len(export_mesh.uv_layers)
|
||||
# Check if shape keys were exported
|
||||
has_morph_target = self.get_shape_keys(bobject.data)
|
||||
if has_morph_target:
|
||||
# Shape keys UV are exported separately, so reduce UV count by 1
|
||||
num_uv_layers -= 1
|
||||
morph_uv_index = self.get_morph_uv_index(bobject.data)
|
||||
has_tex = self.get_export_uvs(export_mesh) or num_uv_layers > 0 # TODO FIXME: this should use an `and` instead of `or`. Workaround to completely ignore if the mesh has the `export_uvs` flag. Only checking the `uv_layers` to bypass issues with materials in linked objects.
|
||||
if self.has_baked_material(bobject, export_mesh.materials):
|
||||
has_tex = True
|
||||
has_tex1 = has_tex and num_uv_layers > 1
|
||||
num_colors = self.get_num_vertex_colors(export_mesh)
|
||||
has_col = self.get_export_vcols(export_mesh) and num_colors > 0
|
||||
has_tang = self.has_tangents(export_mesh)
|
||||
|
||||
pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
|
||||
ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
|
||||
if has_tex or has_morph_target:
|
||||
uv_layers = export_mesh.uv_layers
|
||||
maxdim = 1.0
|
||||
maxdim_uvlayer = None
|
||||
if has_tex:
|
||||
t0map = 0 # Get active uvmap
|
||||
t0data = np.empty(num_verts * 2, dtype='<f4')
|
||||
if uv_layers is not None:
|
||||
if 'UVMap_baked' in uv_layers:
|
||||
for i in range(0, len(uv_layers)):
|
||||
if uv_layers[i].name == 'UVMap_baked':
|
||||
t0map = i
|
||||
break
|
||||
else:
|
||||
for i in range(0, len(uv_layers)):
|
||||
if uv_layers[i].active_render and uv_layers[i].name != 'UVMap_shape_key':
|
||||
t0map = i
|
||||
break
|
||||
if has_tex1:
|
||||
for i in range(0, len(uv_layers)):
|
||||
# Not UVMap 0
|
||||
if i != t0map:
|
||||
# Not Shape Key UVMap
|
||||
if has_morph_target and uv_layers[i].name == 'UVMap_shape_key':
|
||||
continue
|
||||
# Neither UVMap 0 Nor Shape Key Map
|
||||
t1map = i
|
||||
t1data = np.empty(num_verts * 2, dtype='<f4')
|
||||
# Scale for packed coords
|
||||
lay0 = uv_layers[t0map]
|
||||
maxdim_uvlayer = lay0
|
||||
for v in lay0.data:
|
||||
if abs(v.uv[0]) > maxdim:
|
||||
maxdim = abs(v.uv[0])
|
||||
if abs(v.uv[1]) > maxdim:
|
||||
maxdim = abs(v.uv[1])
|
||||
if has_tex1:
|
||||
lay1 = uv_layers[t1map]
|
||||
for v in lay1.data:
|
||||
if abs(v.uv[0]) > maxdim:
|
||||
maxdim = abs(v.uv[0])
|
||||
maxdim_uvlayer = lay1
|
||||
if abs(v.uv[1]) > maxdim:
|
||||
maxdim = abs(v.uv[1])
|
||||
maxdim_uvlayer = lay1
|
||||
if has_morph_target:
|
||||
morph_data = np.empty(num_verts * 2, dtype='<f4')
|
||||
lay2 = uv_layers[morph_uv_index]
|
||||
for v in lay2.data:
|
||||
if abs(v.uv[0]) > maxdim:
|
||||
maxdim = abs(v.uv[0])
|
||||
maxdim_uvlayer = lay2
|
||||
if abs(v.uv[1]) > maxdim:
|
||||
maxdim = abs(v.uv[1])
|
||||
maxdim_uvlayer = lay2
|
||||
if maxdim > 1:
|
||||
o['scale_tex'] = maxdim
|
||||
invscale_tex = (1 / o['scale_tex']) * 32767
|
||||
else:
|
||||
invscale_tex = 1 * 32767
|
||||
self.check_uv_precision(export_mesh, maxdim, maxdim_uvlayer, invscale_tex)
|
||||
|
||||
if has_col:
|
||||
cdata = np.empty(num_verts * 3, dtype='<f4')
|
||||
|
||||
# Save aabb
|
||||
self.calc_aabb(bobject)
|
||||
|
||||
# Scale for packed coords
|
||||
maxdim = max(bobject.data.lnx_aabb[0], max(bobject.data.lnx_aabb[1], bobject.data.lnx_aabb[2]))
|
||||
if maxdim > 2:
|
||||
o['scale_pos'] = maxdim / 2
|
||||
else:
|
||||
o['scale_pos'] = 1.0
|
||||
if has_armature: # Allow up to 2x bigger bounds for skinned mesh
|
||||
o['scale_pos'] *= 2.0
|
||||
|
||||
scale_pos = o['scale_pos']
|
||||
invscale_pos = (1 / scale_pos) * 32767
|
||||
|
||||
# Make arrays
|
||||
for i, v in enumerate(vert_list):
|
||||
v.index = i
|
||||
co = v.co
|
||||
normal = v.normal
|
||||
i4 = i * 4
|
||||
i2 = i * 2
|
||||
pdata[i4 ] = co[0]
|
||||
pdata[i4 + 1] = co[1]
|
||||
pdata[i4 + 2] = co[2]
|
||||
pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
|
||||
ndata[i2 ] = normal[0]
|
||||
ndata[i2 + 1] = normal[1]
|
||||
if has_tex:
|
||||
uv = v.uvs[t0map]
|
||||
t0data[i2 ] = uv[0]
|
||||
t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
|
||||
if has_tex1:
|
||||
uv = v.uvs[t1map]
|
||||
t1data[i2 ] = uv[0]
|
||||
t1data[i2 + 1] = 1.0 - uv[1]
|
||||
if has_morph_target:
|
||||
uv = v.uvs[morph_uv_index]
|
||||
morph_data[i2 ] = uv[0]
|
||||
morph_data[i2 + 1] = 1.0 - uv[1]
|
||||
if has_col:
|
||||
i3 = i * 3
|
||||
cdata[i3 ] = v.col[0]
|
||||
cdata[i3 + 1] = v.col[1]
|
||||
cdata[i3 + 2] = v.col[2]
|
||||
|
||||
# Indices
|
||||
# Create dict for every material slot
|
||||
prims = {ma.name if ma else '': [] for ma in export_mesh.materials}
|
||||
v_maps = {ma.name if ma else '': [] for ma in export_mesh.materials}
|
||||
if not prims:
|
||||
# No materials
|
||||
prims = {'': []}
|
||||
v_maps = {'': []}
|
||||
|
||||
# Create dict of {loop_indices : vertex} with each loop_index in each vertex in Vertex_list
|
||||
vert_dict = {i : v for v in vert_list for i in v.loop_indices}
|
||||
# For each polygon in a mesh
|
||||
for poly in export_mesh.polygons:
|
||||
# Index of the first loop of this polygon
|
||||
first = poly.loop_start
|
||||
# No materials assigned
|
||||
if len(export_mesh.materials) == 0:
|
||||
# Get prim
|
||||
prim = prims['']
|
||||
v_map = v_maps['']
|
||||
else:
|
||||
# First material
|
||||
mat = export_mesh.materials[min(poly.material_index, len(export_mesh.materials) - 1)]
|
||||
# Get prim for this material
|
||||
prim = prims[mat.name if mat else '']
|
||||
v_map = v_maps[mat.name if mat else '']
|
||||
# List of indices for each loop_index belonging to this polygon
|
||||
indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)]
|
||||
v_indices = [vert_dict[i].vertex_index for i in range(first, first+poly.loop_total)]
|
||||
|
||||
# If 3 loops per polygon (Triangle?)
|
||||
if poly.loop_total == 3:
|
||||
prim += indices
|
||||
v_map += v_indices
|
||||
# If > 3 loops per polygon (Non-Triangular?)
|
||||
elif poly.loop_total > 3:
|
||||
for i in range(poly.loop_total-2):
|
||||
prim += (indices[-1], indices[i], indices[i + 1])
|
||||
v_map += (v_indices[-1], v_indices[i], v_indices[i + 1])
|
||||
|
||||
# Write indices
|
||||
o['index_arrays'] = []
|
||||
for mat, prim in prims.items():
|
||||
idata = [0] * len(prim)
|
||||
v_map_data = [0] * len(prim)
|
||||
v_map_sub = v_maps[mat]
|
||||
for i, v in enumerate(prim):
|
||||
idata[i] = v
|
||||
v_map_data[i] = v_map_sub[i]
|
||||
if len(idata) == 0: # No face assigned
|
||||
continue
|
||||
ia = {'values': idata, 'material': 0, 'vertex_map': v_map_data}
|
||||
# Find material index for multi-mat mesh
|
||||
if len(export_mesh.materials) > 1:
|
||||
for i in range(0, len(export_mesh.materials)):
|
||||
if (export_mesh.materials[i] is not None and mat == export_mesh.materials[i].name) or \
|
||||
(export_mesh.materials[i] is None and mat == ''): # Default material for empty slots
|
||||
ia['material'] = i
|
||||
break
|
||||
o['index_arrays'].append(ia)
|
||||
|
||||
if has_tang:
|
||||
tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos)
|
||||
|
||||
pdata *= invscale_pos
|
||||
ndata *= 32767
|
||||
pdata = np.array(pdata, dtype='<i2')
|
||||
ndata = np.array(ndata, dtype='<i2')
|
||||
if has_tex:
|
||||
t0data *= invscale_tex
|
||||
t0data = np.array(t0data, dtype='<i2')
|
||||
if has_tex1:
|
||||
t1data *= invscale_tex
|
||||
t1data = np.array(t1data, dtype='<i2')
|
||||
if has_morph_target:
|
||||
morph_data *= invscale_tex
|
||||
morph_data = np.array(morph_data, dtype='<i2')
|
||||
if has_col:
|
||||
cdata *= 32767
|
||||
cdata = np.array(cdata, dtype='<i2')
|
||||
if has_tang:
|
||||
tangdata *= 32767
|
||||
tangdata = np.array(tangdata, dtype='<i2')
|
||||
|
||||
# Output
|
||||
o['sorting_index'] = bobject.lnx_sorting_index
|
||||
o['vertex_arrays'] = []
|
||||
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
|
||||
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
|
||||
if has_tex:
|
||||
o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
|
||||
if has_tex1:
|
||||
o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
|
||||
if has_morph_target:
|
||||
o['vertex_arrays'].append({ 'attrib': 'morph', 'values': morph_data, 'data': 'short2norm' })
|
||||
if has_col:
|
||||
o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
|
||||
if has_tang:
|
||||
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
|
||||
|
||||
return vert_list
|
||||
|
||||
def export_skin(self, bobject, armature, vert_list, o):
|
||||
# This function exports all skinning data, which includes the skeleton
|
||||
# and per-vertex bone influence data
|
||||
oskin = {}
|
||||
o['skin'] = oskin
|
||||
|
||||
# Write the skin bind pose transform
|
||||
otrans = {}
|
||||
oskin['transform'] = otrans
|
||||
otrans['values'] = self.write_matrix(bobject.matrix_world)
|
||||
|
||||
# Write the bone object reference array
|
||||
oskin['bone_ref_array'] = []
|
||||
oskin['bone_len_array'] = []
|
||||
|
||||
bone_array = armature.data.bones
|
||||
bone_count = len(bone_array)
|
||||
rpdat = lnx.utils.get_rp()
|
||||
max_bones = rpdat.lnx_skin_max_bones
|
||||
if bone_count > max_bones:
|
||||
log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Leenkx Render Props - Max Bones')
|
||||
|
||||
for i in range(bone_count):
|
||||
boneRef = self.find_bone(bone_array[i].name)
|
||||
if boneRef:
|
||||
oskin['bone_ref_array'].append(boneRef[1]["structName"])
|
||||
oskin['bone_len_array'].append(bone_array[i].length)
|
||||
else:
|
||||
oskin['bone_ref_array'].append("")
|
||||
oskin['bone_len_array'].append(0.0)
|
||||
|
||||
# Write the bind pose transform array
|
||||
oskin['transformsI'] = []
|
||||
for i in range(bone_count):
|
||||
skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
|
||||
skeletonI = (skeletonI @ bobject.matrix_world)
|
||||
oskin['transformsI'].append(self.write_matrix(skeletonI))
|
||||
|
||||
# Export the per-vertex bone influence data
|
||||
group_remap = []
|
||||
for group in bobject.vertex_groups:
|
||||
for i in range(bone_count):
|
||||
if bone_array[i].name == group.name:
|
||||
group_remap.append(i)
|
||||
break
|
||||
else:
|
||||
group_remap.append(-1)
|
||||
|
||||
bone_count_array = np.empty(len(vert_list), dtype='<i2')
|
||||
bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2')
|
||||
bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2')
|
||||
|
||||
vertices = bobject.data.vertices
|
||||
count = 0
|
||||
for index, v in enumerate(vert_list):
|
||||
bone_count = 0
|
||||
total_weight = 0.0
|
||||
bone_values = []
|
||||
for g in vertices[v.vertex_index].groups:
|
||||
bone_index = group_remap[g.group]
|
||||
bone_weight = g.weight
|
||||
if bone_index >= 0: #and bone_weight != 0.0:
|
||||
bone_values.append((bone_weight, bone_index))
|
||||
total_weight += bone_weight
|
||||
bone_count += 1
|
||||
|
||||
if bone_count > 4:
|
||||
bone_count = 4
|
||||
bone_values.sort(reverse=True)
|
||||
bone_values = bone_values[:4]
|
||||
|
||||
bone_count_array[index] = bone_count
|
||||
for bv in bone_values:
|
||||
bone_weight_array[count] = bv[0] * 32767
|
||||
bone_index_array[count] = bv[1]
|
||||
count += 1
|
||||
|
||||
if total_weight not in (0.0, 1.0):
|
||||
normalizer = 1.0 / total_weight
|
||||
for i in range(bone_count):
|
||||
bone_weight_array[count - i - 1] *= normalizer
|
||||
|
||||
oskin['bone_count_array'] = bone_count_array
|
||||
oskin['bone_index_array'] = bone_index_array[:count]
|
||||
oskin['bone_weight_array'] = bone_weight_array[:count]
|
||||
|
||||
# Bone constraints
|
||||
for bone in armature.pose.bones:
|
||||
if len(bone.constraints) > 0:
|
||||
if 'constraints' not in oskin:
|
||||
oskin['constraints'] = []
|
||||
self.add_constraints(bone, oskin, bone=True)
|
||||
|
@ -1,4 +1,16 @@
|
||||
import bpy, os, subprocess, sys, platform, aud, json, datetime, socket
|
||||
import bpy, os, subprocess, sys, platform, json, datetime, socket
|
||||
|
||||
|
||||
aud = None
|
||||
try:
|
||||
import aud
|
||||
except (ImportError, AttributeError) as e:
|
||||
|
||||
if any(err in str(e) for err in ["numpy.core.multiarray", "_ARRAY_API", "compiled using NumPy 1.x"]):
|
||||
print("Info: Audio features unavailable due to NumPy version compatibility.")
|
||||
else:
|
||||
print(f"Warning: Audio module unavailable: {e}")
|
||||
aud = None
|
||||
|
||||
from . import encoding, pack, log
|
||||
from . cycles import lightmap, prepare, nodes, cache
|
||||
@ -1117,9 +1129,12 @@ def manage_build(background_pass=False, load_atlas=0):
|
||||
scriptDir = os.path.dirname(os.path.realpath(__file__))
|
||||
sound_path = os.path.abspath(os.path.join(scriptDir, '..', 'assets/'+soundfile))
|
||||
|
||||
device = aud.Device()
|
||||
sound = aud.Sound.file(sound_path)
|
||||
device.play(sound)
|
||||
if aud is not None:
|
||||
device = aud.Device()
|
||||
sound = aud.Sound.file(sound_path)
|
||||
device.play(sound)
|
||||
else:
|
||||
print(f"Build completed!")
|
||||
|
||||
if logging:
|
||||
print("Log file output:")
|
||||
|
@ -16,3 +16,9 @@ class ArraySpliceNode(LnxLogicTreeNode):
|
||||
|
||||
self.add_output('LnxNodeSocketAction', 'Out')
|
||||
self.add_output('LnxNodeSocketArray', 'Array')
|
||||
|
||||
def get_replacement_node(self, node_tree: bpy.types.NodeTree):
|
||||
if self.lnx_version not in (0, 1):
|
||||
raise LookupError()
|
||||
|
||||
return NodeReplacement.Identity(self)
|
@ -17,6 +17,17 @@ class OnEventNode(LnxLogicTreeNode):
|
||||
'custom': 'Custom'
|
||||
}
|
||||
|
||||
def update(self):
|
||||
if self.property1 != 'custom':
|
||||
if self.inputs[0].is_linked:
|
||||
self.label = f'{self.bl_label}: {self.property1}'
|
||||
else:
|
||||
self.label = f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
|
||||
elif self.inputs[1].is_linked:
|
||||
self.label = f'{self.bl_label}: {self.property1}'
|
||||
else:
|
||||
self.label = f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
|
||||
|
||||
def set_mode(self, context):
|
||||
if self.property1 != 'custom':
|
||||
if len(self.inputs) > 1:
|
||||
@ -25,7 +36,17 @@ class OnEventNode(LnxLogicTreeNode):
|
||||
if len(self.inputs) < 2:
|
||||
self.add_input('LnxNodeSocketAction', 'In')
|
||||
self.inputs.move(1, 0)
|
||||
|
||||
|
||||
if self.property1 != 'custom':
|
||||
if self.inputs[0].is_linked:
|
||||
self.label = f'{self.bl_label}: {self.property1}'
|
||||
else:
|
||||
self.label = f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
|
||||
elif self.inputs[1].is_linked:
|
||||
self.label = f'{self.bl_label}: {self.property1}'
|
||||
else:
|
||||
self.label = f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
|
||||
|
||||
# Use a new property to preserve compatibility
|
||||
property1: HaxeEnumProperty(
|
||||
'property1',
|
||||
@ -52,9 +73,15 @@ class OnEventNode(LnxLogicTreeNode):
|
||||
layout.prop(self, 'property1', text='')
|
||||
|
||||
def draw_label(self) -> str:
|
||||
if self.inputs[0].is_linked:
|
||||
return self.bl_label
|
||||
return f'{self.bl_label}: {self.inputs[0].get_default_value()}'
|
||||
if self.property1 != 'custom':
|
||||
if self.inputs[0].is_linked:
|
||||
return f'{self.bl_label}: {self.property1}'
|
||||
else:
|
||||
return f'{self.bl_label}: {self.property1} {self.inputs[0].get_default_value()}'
|
||||
elif self.inputs[1].is_linked:
|
||||
return f'{self.bl_label}: {self.property1}'
|
||||
else:
|
||||
return f'{self.bl_label}: {self.property1} {self.inputs[1].get_default_value()}'
|
||||
|
||||
def get_replacement_node(self, node_tree: bpy.types.NodeTree):
|
||||
if self.lnx_version not in (0, 1):
|
||||
|
@ -7,12 +7,19 @@ class KeyboardNode(LnxLogicTreeNode):
|
||||
lnx_section = 'keyboard'
|
||||
lnx_version = 2
|
||||
|
||||
def update(self):
|
||||
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
|
||||
|
||||
def upd(self, context):
|
||||
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
|
||||
|
||||
|
||||
property0: HaxeEnumProperty(
|
||||
'property0',
|
||||
items = [('started', 'Started', 'The keyboard button starts to be pressed'),
|
||||
('down', 'Down', 'The keyboard button is pressed'),
|
||||
('released', 'Released', 'The keyboard button stops being pressed')],
|
||||
name='', default='down')
|
||||
name='', default='down', update=upd)
|
||||
|
||||
property1: HaxeEnumProperty(
|
||||
'property1',
|
||||
@ -69,7 +76,7 @@ class KeyboardNode(LnxLogicTreeNode):
|
||||
('right', 'right', 'right'),
|
||||
('left', 'left', 'left'),
|
||||
('down', 'down', 'down'),],
|
||||
name='', default='space')
|
||||
name='', default='space', update=upd)
|
||||
|
||||
def lnx_init(self, context):
|
||||
self.add_output('LnxNodeSocketAction', 'Out')
|
||||
|
@ -8,13 +8,25 @@ class MouseNode(LnxLogicTreeNode):
|
||||
lnx_section = 'mouse'
|
||||
lnx_version = 3
|
||||
|
||||
def update(self):
|
||||
if self.property0 != 'moved':
|
||||
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
|
||||
else:
|
||||
self.label = f'{self.bl_label}: {self.property0}'
|
||||
|
||||
def upd(self, context):
|
||||
if self.property0 != 'moved':
|
||||
self.label = f'{self.bl_label}: {self.property0} {self.property1}'
|
||||
else:
|
||||
self.label = f'{self.bl_label}: {self.property0}'
|
||||
|
||||
property0: HaxeEnumProperty(
|
||||
'property0',
|
||||
items = [('started', 'Started', 'The mouse button begins to be pressed'),
|
||||
('down', 'Down', 'The mouse button is pressed'),
|
||||
('released', 'Released', 'The mouse button stops being pressed'),
|
||||
('moved', 'Moved', 'Moved')],
|
||||
name='', default='down')
|
||||
name='', default='down', update=upd)
|
||||
property1: HaxeEnumProperty(
|
||||
'property1',
|
||||
items = [('left', 'Left', 'Left mouse button'),
|
||||
@ -22,7 +34,7 @@ class MouseNode(LnxLogicTreeNode):
|
||||
('right', 'Right', 'Right mouse button'),
|
||||
('side1', 'Side 1', 'Side 1 mouse button'),
|
||||
('side2', 'Side 2', 'Side 2 mouse button')],
|
||||
name='', default='left')
|
||||
name='', default='left', update=upd)
|
||||
property2: HaxeBoolProperty(
|
||||
'property2',
|
||||
name='Include Debug Console',
|
||||
|
@ -18,6 +18,10 @@ class CallGroupNode(LnxLogicTreeNode):
|
||||
def lnx_init(self, context):
|
||||
pass
|
||||
|
||||
def update(self):
|
||||
if self.group_tree:
|
||||
self.label = f'Group: {self.group_tree.name}'
|
||||
|
||||
# Function to add input sockets and re-link sockets
|
||||
def update_inputs(self, tree, node, inp_sockets, in_links):
|
||||
count = 0
|
||||
@ -58,10 +62,12 @@ class CallGroupNode(LnxLogicTreeNode):
|
||||
tree.links.new(current_socket, link)
|
||||
count = count + 1
|
||||
|
||||
def remove_tree(self):
|
||||
self.group_tree = None
|
||||
|
||||
def update_sockets(self, context):
|
||||
if self.group_tree:
|
||||
self.label = f'Group: {self.group_tree.name}'
|
||||
else:
|
||||
self.label = 'Call Node Group'
|
||||
|
||||
# List to store from and to sockets of connected nodes
|
||||
from_socket_list = []
|
||||
to_socket_list = []
|
||||
@ -107,6 +113,10 @@ class CallGroupNode(LnxLogicTreeNode):
|
||||
# Prperty to store group tree pointer
|
||||
group_tree: PointerProperty(name='Group', type=bpy.types.NodeTree, update=update_sockets)
|
||||
|
||||
def edit_tree(self):
|
||||
self.label = f'Group: {self.group_tree.name}'
|
||||
bpy.ops.lnx.edit_group_tree()
|
||||
|
||||
def draw_label(self) -> str:
|
||||
if self.group_tree is not None:
|
||||
return f'Group: {self.group_tree.name}'
|
||||
@ -134,8 +144,9 @@ class CallGroupNode(LnxLogicTreeNode):
|
||||
op = row_name.operator('lnx.unlink_group_tree', icon='X', text='')
|
||||
op.node_index = self.get_id_str()
|
||||
row_ops.enabled = not self.group_tree is None
|
||||
op = row_ops.operator('lnx.edit_group_tree', icon='FULLSCREEN_ENTER', text='Edit tree')
|
||||
op = row_ops.operator('lnx.node_call_func', icon='FULLSCREEN_ENTER', text='Edit tree')
|
||||
op.node_index = self.get_id_str()
|
||||
op.callback_name = 'edit_tree'
|
||||
|
||||
def get_replacement_node(self, node_tree: bpy.types.NodeTree):
|
||||
if self.lnx_version not in (0, 1, 2):
|
||||
|
@ -0,0 +1,51 @@
|
||||
from lnx.logicnode.lnx_nodes import *
|
||||
|
||||
|
||||
class ProbabilisticIndexNode(LnxLogicTreeNode):
|
||||
"""This system gets an index based on probabilistic values,
|
||||
ensuring that the total sum of the probabilities equals 1.
|
||||
If the probabilities do not sum to 1, they will be adjusted
|
||||
accordingly to guarantee a total sum of 1. Only one output will be
|
||||
triggered at a time.
|
||||
@output index: the index.
|
||||
"""
|
||||
|
||||
bl_idname = 'LNProbabilisticIndexNode'
|
||||
bl_label = 'Probabilistic Index'
|
||||
lnx_section = 'logic'
|
||||
lnx_version = 1
|
||||
|
||||
num_choices: IntProperty(default=0, min=0)
|
||||
|
||||
def __init__(self):
|
||||
array_nodes[str(id(self))] = self
|
||||
|
||||
def lnx_init(self, context):
|
||||
|
||||
self.add_output('LnxIntSocket', 'Index')
|
||||
|
||||
def draw_buttons(self, context, layout):
|
||||
row = layout.row(align=True)
|
||||
|
||||
op = row.operator('lnx.node_call_func', text='New', icon='PLUS', emboss=True)
|
||||
op.node_index = str(id(self))
|
||||
op.callback_name = 'add_func'
|
||||
op2 = row.operator('lnx.node_call_func', text='', icon='X', emboss=True)
|
||||
op2.node_index = str(id(self))
|
||||
op2.callback_name = 'remove_func'
|
||||
|
||||
def add_func(self):
|
||||
self.add_input('LnxFloatSocket', f'Prob Index {self.num_choices}')
|
||||
self.num_choices += 1
|
||||
|
||||
def remove_func(self):
|
||||
if len(self.inputs) > 0:
|
||||
self.inputs.remove(self.inputs[-1])
|
||||
self.num_choices -= 1
|
||||
|
||||
def draw_label(self) -> str:
|
||||
if self.num_choices == 0:
|
||||
return self.bl_label
|
||||
|
||||
return f'{self.bl_label}: [{self.num_choices}]'
|
||||
|
@ -1,7 +1,10 @@
|
||||
from lnx.logicnode.lnx_nodes import *
|
||||
|
||||
class SetWorldNode(LnxLogicTreeNode):
|
||||
"""Sets the World of the active scene."""
|
||||
"""Sets the World of the active scene.
|
||||
World must be either associated to a scene or have fake user."""
|
||||
|
||||
|
||||
bl_idname = 'LNSetWorldNode'
|
||||
bl_label = 'Set World'
|
||||
lnx_version = 1
|
||||
|
@ -116,7 +116,73 @@ def remove_readonly(func, path, excinfo):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
func(path)
|
||||
|
||||
|
||||
appended_scenes = []
|
||||
|
||||
def load_external_blends():
|
||||
global appended_scenes
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
if not hasattr(wrd, 'lnx_external_blends_path'):
|
||||
return
|
||||
|
||||
external_path = getattr(wrd, 'lnx_external_blends_path', '')
|
||||
if not external_path or not external_path.strip():
|
||||
return
|
||||
|
||||
abs_path = bpy.path.abspath(external_path.strip())
|
||||
if not os.path.exists(abs_path):
|
||||
return
|
||||
|
||||
# Walk recursively through all subdirs
|
||||
for root, dirs, files in os.walk(abs_path):
|
||||
for filename in files:
|
||||
if not filename.endswith(".blend"):
|
||||
continue
|
||||
|
||||
blend_path = os.path.join(root, filename)
|
||||
try:
|
||||
with bpy.data.libraries.load(blend_path, link=True) as (data_from, data_to):
|
||||
data_to.scenes = list(data_from.scenes)
|
||||
|
||||
for scn in data_to.scenes:
|
||||
if scn is not None and scn not in appended_scenes:
|
||||
# make name unique with file name
|
||||
scn.name += "_" + filename.replace(".blend", "")
|
||||
appended_scenes.append(scn)
|
||||
|
||||
log.info(f"Loaded external blend: {blend_path}")
|
||||
except Exception as e:
|
||||
log.error(f"Failed to load external blend {blend_path}: {e}")
|
||||
|
||||
def clear_external_scenes():
|
||||
global appended_scenes
|
||||
if not appended_scenes:
|
||||
return
|
||||
|
||||
for scn in appended_scenes:
|
||||
try:
|
||||
bpy.data.scenes.remove(scn, do_unlink=True)
|
||||
except Exception as e:
|
||||
log.error(f"Failed to remove scene {scn.name}: {e}")
|
||||
|
||||
for lib in list(bpy.data.libraries):
|
||||
try:
|
||||
if lib.users == 0:
|
||||
bpy.data.libraries.remove(lib)
|
||||
except Exception as e:
|
||||
log.error(f"Failed to remove library {lib.name}: {e}")
|
||||
|
||||
try:
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
except Exception as e:
|
||||
log.error(f"Failed to purge orphan data: {e}")
|
||||
|
||||
appended_scenes = []
|
||||
|
||||
def export_data(fp, sdk_path):
|
||||
load_external_blends()
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
@ -323,6 +389,8 @@ def export_data(fp, sdk_path):
|
||||
state.last_resy = resy
|
||||
state.last_scene = scene_name
|
||||
|
||||
clear_external_scenes()
|
||||
|
||||
def compile(assets_only=False):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
fp = lnx.utils.get_fp()
|
||||
|
@ -39,14 +39,15 @@ def add_world_defs():
|
||||
# Store contexts
|
||||
if rpdat.rp_hdr == False:
|
||||
wrd.world_defs += '_LDR'
|
||||
|
||||
if lnx.utils.get_active_scene().world is not None:
|
||||
if lnx.utils.get_active_scene().world.lnx_light_ies_texture:
|
||||
wrd.world_defs += '_LightIES'
|
||||
assets.add_embedded_data('iestexture.png')
|
||||
|
||||
if lnx.utils.get_active_scene().world.lnx_light_ies_texture == True:
|
||||
wrd.world_defs += '_LightIES'
|
||||
assets.add_embedded_data('iestexture.png')
|
||||
|
||||
if lnx.utils.get_active_scene().world.lnx_light_clouds_texture == True:
|
||||
wrd.world_defs += '_LightClouds'
|
||||
assets.add_embedded_data('cloudstexture.png')
|
||||
if lnx.utils.get_active_scene().world.lnx_light_clouds_texture:
|
||||
wrd.world_defs += '_LightClouds'
|
||||
assets.add_embedded_data('cloudstexture.png')
|
||||
|
||||
if rpdat.rp_renderer == 'Deferred':
|
||||
assets.add_khafile_def('lnx_deferred')
|
||||
@ -240,7 +241,7 @@ def build():
|
||||
compo_depth = True
|
||||
|
||||
focus_distance = 0.0
|
||||
if len(bpy.data.cameras) > 0 and lnx.utils.get_active_scene().camera.data.dof.use_dof:
|
||||
if lnx.utils.get_active_scene().camera and lnx.utils.get_active_scene().camera.data.dof.use_dof:
|
||||
focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance
|
||||
|
||||
if focus_distance > 0.0:
|
||||
|
@ -69,7 +69,7 @@ def build():
|
||||
if rpdat.lnx_irradiance:
|
||||
# Plain background color
|
||||
if '_EnvCol' in world.world_defs:
|
||||
world_name = lnx.utils.safestr(world.name)
|
||||
world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
|
||||
# Irradiance json file name
|
||||
world.lnx_envtex_name = world_name
|
||||
world.lnx_envtex_irr_name = world_name
|
||||
@ -99,7 +99,7 @@ def build():
|
||||
def create_world_shaders(world: bpy.types.World):
|
||||
"""Creates fragment and vertex shaders for the given world."""
|
||||
global shader_datas
|
||||
world_name = lnx.utils.safestr(world.name)
|
||||
world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
|
||||
pass_name = 'World_' + world_name
|
||||
|
||||
shader_props = {
|
||||
@ -160,7 +160,7 @@ def create_world_shaders(world: bpy.types.World):
|
||||
|
||||
def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: ShaderContext):
|
||||
"""Generates the shader code for the given world."""
|
||||
world_name = lnx.utils.safestr(world.name)
|
||||
world_name = lnx.utils.safestr(lnx.utils.asset_name(world) if world.library else world.name)
|
||||
world.world_defs = ''
|
||||
rpdat = lnx.utils.get_rp()
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
@ -175,7 +175,7 @@ def build_node_tree(world: bpy.types.World, frag: Shader, vert: Shader, con: Sha
|
||||
frag.write('fragColor.rgb = backgroundCol;')
|
||||
return
|
||||
|
||||
parser_state = ParserState(ParserContext.WORLD, world.name, world)
|
||||
parser_state = ParserState(ParserContext.WORLD, lnx.utils.asset_name(world) if world.library else world.name, world)
|
||||
parser_state.con = con
|
||||
parser_state.curshader = frag
|
||||
parser_state.frag = frag
|
||||
|
@ -94,6 +94,7 @@ def parse_material_output(node: bpy.types.Node, custom_particle_node: bpy.types.
|
||||
parse_displacement = state.parse_displacement
|
||||
particle_info = {
|
||||
'index': False,
|
||||
'random': False,
|
||||
'age': False,
|
||||
'lifetime': False,
|
||||
'location': False,
|
||||
|
@ -254,9 +254,10 @@ def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.t
|
||||
c.particle_info['index'] = True
|
||||
return 'p_index' if particles_on else '0.0'
|
||||
|
||||
# TODO: Random
|
||||
# Random
|
||||
if out_socket == node.outputs[1]:
|
||||
return '0.0'
|
||||
c.particle_info['random'] = True
|
||||
return 'p_random' if particles_on else '0.0'
|
||||
|
||||
# Age
|
||||
elif out_socket == node.outputs[2]:
|
||||
@ -276,7 +277,7 @@ def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.t
|
||||
# Size
|
||||
elif out_socket == node.outputs[5]:
|
||||
c.particle_info['size'] = True
|
||||
return '1.0'
|
||||
return 'p_size' if particles_on else '1.0'
|
||||
|
||||
# Velocity
|
||||
elif out_socket == node.outputs[6]:
|
||||
|
@ -58,7 +58,6 @@ def make(context_id, rpasses):
|
||||
con['alpha_blend_destination'] = mat.lnx_blending_destination_alpha
|
||||
con['alpha_blend_operation'] = mat.lnx_blending_operation_alpha
|
||||
con['depth_write'] = False
|
||||
con['compare_mode'] = 'less'
|
||||
elif particle:
|
||||
pass
|
||||
# Depth prepass was performed, exclude mat with depth read that
|
||||
@ -66,6 +65,9 @@ def make(context_id, rpasses):
|
||||
elif dprepass and not (rpdat.rp_depth_texture and mat.lnx_depth_read):
|
||||
con['depth_write'] = False
|
||||
con['compare_mode'] = 'equal'
|
||||
else:
|
||||
con['depth_write'] = mat.lnx_depth_write
|
||||
con['compare_mode'] = mat.lnx_compare_mode
|
||||
|
||||
attachment_format = 'RGBA32' if '_LDR' in wrd.world_defs else 'RGBA64'
|
||||
con['color_attachments'] = [attachment_format, attachment_format]
|
||||
|
@ -55,6 +55,7 @@ def write(vert, particle_info=None, shadowmap=False):
|
||||
|
||||
# Outs
|
||||
out_index = True if particle_info != None and particle_info['index'] else False
|
||||
out_random = True if particle_info != None and particle_info['random'] else False
|
||||
out_age = True if particle_info != None and particle_info['age'] else False
|
||||
out_lifetime = True if particle_info != None and particle_info['lifetime'] else False
|
||||
out_location = True if particle_info != None and particle_info['location'] else False
|
||||
@ -258,6 +259,11 @@ def write(vert, particle_info=None, shadowmap=False):
|
||||
vert.add_out('float p_index')
|
||||
vert.write('p_index = gl_InstanceID;')
|
||||
|
||||
if out_random:
|
||||
vert.add_out('float p_random')
|
||||
vert.write('p_random = fract(sin(gl_InstanceID) * 43758.5453);')
|
||||
|
||||
|
||||
def write_tilesheet(vert):
|
||||
# tilesx, tilesy, framerate - pd[3][0], pd[3][1], pd[3][2]
|
||||
vert.write('int frame = int((p_age) / pd[3][2]);')
|
||||
|
@ -23,6 +23,7 @@ class ShaderData:
|
||||
self.data = {'shader_datas': [self.sd]}
|
||||
self.matname = lnx.utils.safesrc(lnx.utils.asset_name(material))
|
||||
self.sd['name'] = self.matname + '_data'
|
||||
self.sd['next_pass'] = material.lnx_next_pass
|
||||
self.sd['contexts'] = []
|
||||
|
||||
def add_context(self, props) -> 'ShaderContext':
|
||||
|
@ -142,6 +142,8 @@ def init_properties():
|
||||
bpy.types.World.lnx_project_version = StringProperty(name="Version", description="Exported project version", default="1.0.0", update=assets.invalidate_compiler_cache, set=set_version, get=get_version)
|
||||
bpy.types.World.lnx_project_version_autoinc = BoolProperty(name="Auto-increment Build Number", description="Auto-increment build number", default=True, update=assets.invalidate_compiler_cache)
|
||||
bpy.types.World.lnx_project_bundle = StringProperty(name="Bundle", description="Exported project bundle", default="org.leenkx3d", update=assets.invalidate_compiler_cache, set=set_project_bundle, get=get_project_bundle)
|
||||
# External Blend Files
|
||||
bpy.types.World.lnx_external_blends_path = StringProperty(name="External Blends", description="Directory containing external blend files to include in export", default="", subtype='DIR_PATH', update=assets.invalidate_compiler_cache)
|
||||
# Android Settings
|
||||
bpy.types.World.lnx_project_android_sdk_min = IntProperty(name="Minimal Version SDK", description="Minimal Version Android SDK", default=23, min=14, max=30, update=assets.invalidate_compiler_cache)
|
||||
bpy.types.World.lnx_project_android_sdk_target = IntProperty(name="Target Version SDK", description="Target Version Android SDK", default=26, min=26, max=30, update=assets.invalidate_compiler_cache)
|
||||
@ -350,6 +352,7 @@ def init_properties():
|
||||
update=assets.invalidate_instance_cache,
|
||||
override={'LIBRARY_OVERRIDABLE'})
|
||||
bpy.types.Object.lnx_export = BoolProperty(name="Export", description="Export object data", default=True, override={'LIBRARY_OVERRIDABLE'})
|
||||
bpy.types.Object.lnx_sorting_index = IntProperty(name="Sorting Index", description="Sorting index for the Render's Draw Order", default=0, override={'LIBRARY_OVERRIDABLE'})
|
||||
bpy.types.Object.lnx_spawn = BoolProperty(name="Spawn", description="Auto-add this object when creating scene", default=True, override={'LIBRARY_OVERRIDABLE'})
|
||||
bpy.types.Object.lnx_mobile = BoolProperty(name="Mobile", description="Object moves during gameplay", default=False, override={'LIBRARY_OVERRIDABLE'})
|
||||
bpy.types.Object.lnx_visible = BoolProperty(name="Visible", description="Render this object", default=True, override={'LIBRARY_OVERRIDABLE'})
|
||||
@ -436,6 +439,18 @@ def init_properties():
|
||||
bpy.types.Material.lnx_depth_read = BoolProperty(name="Read Depth", description="Allow this material to read from a depth texture which is copied from the depth buffer. The meshes using this material will be drawn after all meshes that don't read from the depth texture", default=False)
|
||||
bpy.types.Material.lnx_overlay = BoolProperty(name="Overlay", description="Renders the material, unshaded, over other shaded materials", default=False)
|
||||
bpy.types.Material.lnx_decal = BoolProperty(name="Decal", default=False)
|
||||
bpy.types.Material.lnx_compare_mode = EnumProperty(
|
||||
items=[
|
||||
('always', 'Always', 'Always'),
|
||||
('never', 'Never', 'Never'),
|
||||
('less', 'Less', 'Less'),
|
||||
('less_equal', 'Less Equal', 'Less Equal'),
|
||||
('greater', 'Greater', 'Greater'),
|
||||
('greater_equal', 'Greater Equal', 'Greater Equal'),
|
||||
('equal', 'Equal', 'Equal'),
|
||||
('not_equal', 'Not Equal', 'Not Equal'),
|
||||
],
|
||||
name="Compare Mode", default='less', description="Comparison mode for the material")
|
||||
bpy.types.Material.lnx_two_sided = BoolProperty(name="Two-Sided", description="Flip normal when drawing back-face", default=False)
|
||||
bpy.types.Material.lnx_ignore_irradiance = BoolProperty(name="Ignore Irradiance", description="Ignore irradiance for material", default=False)
|
||||
bpy.types.Material.lnx_cull_mode = EnumProperty(
|
||||
@ -443,6 +458,8 @@ def init_properties():
|
||||
('clockwise', 'Front', 'Clockwise'),
|
||||
('counter_clockwise', 'Back', 'Counter-Clockwise')],
|
||||
name="Cull Mode", default='clockwise', description="Draw geometry faces")
|
||||
bpy.types.Material.lnx_next_pass = StringProperty(
|
||||
name="Next Pass", default='', description="Next pass for the material", update=assets.invalidate_shader_cache)
|
||||
bpy.types.Material.lnx_discard = BoolProperty(name="Alpha Test", default=False, description="Do not render fragments below specified opacity threshold")
|
||||
bpy.types.Material.lnx_discard_opacity = FloatProperty(name="Mesh Opacity", default=0.2, min=0, max=1)
|
||||
bpy.types.Material.lnx_discard_opacity_shadows = FloatProperty(name="Shadows Opacity", default=0.1, min=0, max=1)
|
||||
|
@ -63,6 +63,7 @@ class LNX_PT_ObjectPropsPanel(bpy.types.Panel):
|
||||
return
|
||||
|
||||
col = layout.column()
|
||||
col.prop(mat, 'lnx_sorting_index')
|
||||
col.prop(obj, 'lnx_export')
|
||||
if not obj.lnx_export:
|
||||
return
|
||||
@ -551,6 +552,51 @@ class LNX_OT_NewCustomMaterial(bpy.types.Operator):
|
||||
|
||||
return{'FINISHED'}
|
||||
|
||||
|
||||
class LNX_OT_NextPassMaterialSelector(bpy.types.Operator):
|
||||
"""Select material for next pass"""
|
||||
bl_idname = "lnx.next_pass_material_selector"
|
||||
bl_label = "Select Next Pass Material"
|
||||
|
||||
def execute(self, context):
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.popup_menu(self.draw_menu, title="Select Next Pass Material", icon='MATERIAL')
|
||||
return {'FINISHED'}
|
||||
|
||||
def draw_menu(self, popup, context):
|
||||
layout = popup.layout
|
||||
|
||||
# Add 'None' option
|
||||
op = layout.operator("lnx.set_next_pass_material", text="")
|
||||
op.material_name = ""
|
||||
|
||||
# Add materials from the current object's material slots
|
||||
if context.object and hasattr(context.object, 'material_slots'):
|
||||
for slot in context.object.material_slots:
|
||||
if (slot.material is not None and slot.material != context.material):
|
||||
op = layout.operator("lnx.set_next_pass_material", text=slot.material.name)
|
||||
op.material_name = slot.material.name
|
||||
|
||||
class LNX_OT_SetNextPassMaterial(bpy.types.Operator):
|
||||
"""Set the next pass material"""
|
||||
bl_idname = "lnx.set_next_pass_material"
|
||||
bl_label = "Set Next Pass Material"
|
||||
|
||||
material_name: StringProperty()
|
||||
|
||||
def execute(self, context):
|
||||
if context.material:
|
||||
context.material.lnx_next_pass = self.material_name
|
||||
# Redraw the UI to update the display
|
||||
for area in context.screen.areas:
|
||||
if area.type == 'PROPERTIES':
|
||||
area.tag_redraw()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
|
||||
class LNX_PG_BindTexturesListItem(bpy.types.PropertyGroup):
|
||||
uniform_name: StringProperty(
|
||||
name='Uniform Name',
|
||||
@ -634,18 +680,23 @@ class LNX_PT_MaterialPropsPanel(bpy.types.Panel):
|
||||
mat = bpy.context.material
|
||||
if mat is None:
|
||||
return
|
||||
|
||||
|
||||
layout.prop(mat, 'lnx_cast_shadow')
|
||||
columnb = layout.column()
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
columnb.enabled = len(wrd.lnx_rplist) > 0 and lnx.utils.get_rp().rp_renderer == 'Forward'
|
||||
columnb.prop(mat, 'lnx_receive_shadow')
|
||||
layout.prop(mat, 'lnx_ignore_irradiance')
|
||||
layout.prop(mat, 'lnx_compare_mode')
|
||||
layout.prop(mat, 'lnx_two_sided')
|
||||
columnb = layout.column()
|
||||
columnb.enabled = not mat.lnx_two_sided
|
||||
columnb.prop(mat, 'lnx_cull_mode')
|
||||
row = layout.row(align=True)
|
||||
row.prop(mat, 'lnx_next_pass', text="Next Pass")
|
||||
row.operator('lnx.next_pass_material_selector', text='', icon='MATERIAL')
|
||||
layout.prop(mat, 'lnx_material_id')
|
||||
layout.prop(mat, 'lnx_depth_write')
|
||||
layout.prop(mat, 'lnx_depth_read')
|
||||
layout.prop(mat, 'lnx_overlay')
|
||||
layout.prop(mat, 'lnx_decal')
|
||||
@ -1229,7 +1280,8 @@ class LNX_PT_ProjectModulesPanel(bpy.types.Panel):
|
||||
|
||||
layout.prop_search(wrd, 'lnx_khafile', bpy.data, 'texts')
|
||||
layout.prop(wrd, 'lnx_project_root')
|
||||
|
||||
layout.prop(wrd, 'lnx_external_blends_path')
|
||||
|
||||
class LnxVirtualInputPanel(bpy.types.Panel):
|
||||
bl_label = "Leenkx Virtual Input"
|
||||
bl_space_type = "PROPERTIES"
|
||||
@ -2267,7 +2319,10 @@ class LnxGenTerrainButton(bpy.types.Operator):
|
||||
node.location = (-200, -200)
|
||||
node.inputs[0].default_value = 5.0
|
||||
links.new(nodes['Bump'].inputs[2], nodes['_TerrainHeight'].outputs[0])
|
||||
links.new(nodes['Principled BSDF'].inputs[20], nodes['Bump'].outputs[0])
|
||||
if bpy.app.version[0] >= 4:
|
||||
links.new(nodes['Principled BSDF'].inputs[22], nodes['Bump'].outputs[0])
|
||||
else:
|
||||
links.new(nodes['Principled BSDF'].inputs[20], nodes['Bump'].outputs[0])
|
||||
|
||||
# Create sectors
|
||||
root_obj = bpy.data.objects.new("Terrain", None)
|
||||
@ -2300,7 +2355,16 @@ class LnxGenTerrainButton(bpy.types.Operator):
|
||||
disp_mod.texture.extension = 'EXTEND'
|
||||
disp_mod.texture.use_interpolation = False
|
||||
disp_mod.texture.use_mipmap = False
|
||||
disp_mod.texture.image = bpy.data.images.load(filepath=scn.lnx_terrain_textures+'/heightmap_' + j + '.png')
|
||||
try:
|
||||
disp_mod.texture.image = bpy.data.images.load(filepath=scn.lnx_terrain_textures+'/heightmap_' + j + '.png')
|
||||
except Exception as e:
|
||||
if i == 0: # Only show message once
|
||||
if scn.lnx_terrain_textures.startswith('//') and not bpy.data.filepath:
|
||||
self.report({'INFO'}, "Generating terrain... Save .blend file and add your heightmaps for each sector in "
|
||||
"the \"Bundled\" folder using the format \"heightmap_01.png\", \"heightmap_02.png\", etc.")
|
||||
else:
|
||||
self.report({'INFO'}, f"Heightmap not found: {scn.lnx_terrain_textures}/heightmap_{j}.png - using blank image")
|
||||
|
||||
f = 1
|
||||
levels = 0
|
||||
while f < disp_mod.texture.image.size[0]:
|
||||
@ -2908,6 +2972,8 @@ __REG_CLASSES = (
|
||||
InvalidateCacheButton,
|
||||
InvalidateMaterialCacheButton,
|
||||
LNX_OT_NewCustomMaterial,
|
||||
LNX_OT_NextPassMaterialSelector,
|
||||
LNX_OT_SetNextPassMaterial,
|
||||
LNX_PG_BindTexturesListItem,
|
||||
LNX_UL_BindTexturesList,
|
||||
LNX_OT_BindTexturesListNewItem,
|
||||
|
@ -338,8 +338,8 @@ project.addSources('Sources');
|
||||
if rpdat.lnx_particles != 'Off':
|
||||
assets.add_khafile_def('lnx_particles')
|
||||
|
||||
if rpdat.rp_draw_order == 'Shader':
|
||||
assets.add_khafile_def('lnx_draworder_shader')
|
||||
if rpdat.rp_draw_order == 'Index':
|
||||
assets.add_khafile_def('lnx_draworder_index')
|
||||
|
||||
if lnx.utils.get_viewport_controls() == 'azerty':
|
||||
assets.add_khafile_def('lnx_azerty')
|
||||
@ -818,7 +818,7 @@ const int compoChromaticSamples = {rpdat.lnx_chromatic_aberration_samples};
|
||||
|
||||
focus_distance = 0.0
|
||||
fstop = 0.0
|
||||
if len(bpy.data.cameras) > 0 and lnx.utils.get_active_scene().camera.data.dof.use_dof:
|
||||
if lnx.utils.get_active_scene().camera and lnx.utils.get_active_scene().camera.data.dof.use_dof:
|
||||
focus_distance = lnx.utils.get_active_scene().camera.data.dof.focus_distance
|
||||
fstop = lnx.utils.get_active_scene().camera.data.dof.aperture_fstop
|
||||
lens = lnx.utils.get_active_scene().camera.data.lens
|
||||
|
@ -118,7 +118,8 @@ def render_envmap(target_dir: str, world: bpy.types.World) -> str:
|
||||
scene = bpy.data.scenes['_lnx_envmap_render']
|
||||
scene.world = world
|
||||
|
||||
image_name = f'env_{lnx.utils.safesrc(world.name)}.{ENVMAP_EXT}'
|
||||
world_name = lnx.utils.asset_name(world) if world.library else world.name
|
||||
image_name = f'env_{lnx.utils.safesrc(world_name)}.{ENVMAP_EXT}'
|
||||
render_path = os.path.join(target_dir, image_name)
|
||||
scene.render.filepath = render_path
|
||||
|
||||
|
Reference in New Issue
Block a user