forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
1
leenkx/blender/lnx/material/__init__.py
Normal file
1
leenkx/blender/lnx/material/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
import lnx
|
BIN
leenkx/blender/lnx/material/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
leenkx/blender/lnx/material/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
leenkx/blender/lnx/material/__pycache__/cycles.cpython-311.pyc
Normal file
BIN
leenkx/blender/lnx/material/__pycache__/cycles.cpython-311.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
leenkx/blender/lnx/material/__pycache__/make.cpython-311.pyc
Normal file
BIN
leenkx/blender/lnx/material/__pycache__/make.cpython-311.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
leenkx/blender/lnx/material/__pycache__/shader.cpython-311.pyc
Normal file
BIN
leenkx/blender/lnx/material/__pycache__/shader.cpython-311.pyc
Normal file
Binary file not shown.
977
leenkx/blender/lnx/material/cycles.py
Normal file
977
leenkx/blender/lnx/material/cycles.py
Normal file
@ -0,0 +1,977 @@
|
||||
#
|
||||
# This module builds upon Cycles nodes work licensed as
|
||||
# Copyright 2011-2013 Blender Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
import shutil
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx.assets
|
||||
import lnx.log as log
|
||||
import lnx.make_state
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
import lnx.material.node_meta as node_meta
|
||||
import lnx.material.mat_state as mat_state
|
||||
from lnx.material.parser_state import ParserState, ParserContext, ParserPass
|
||||
from lnx.material.shader import Shader, ShaderContext, floatstr, vec3str
|
||||
import lnx.node_utils
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.assets = lnx.reload_module(lnx.assets)
|
||||
log = lnx.reload_module(log)
|
||||
lnx.make_state = lnx.reload_module(lnx.make_state)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.cycles_nodes = lnx.reload_module(lnx.material.cycles_nodes)
|
||||
node_meta = lnx.reload_module(node_meta)
|
||||
from lnx.material.cycles_nodes import *
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserContext, ParserPass
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import Shader, ShaderContext, floatstr, vec3str
|
||||
lnx.node_utils = lnx.reload_module(lnx.node_utils)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
# Particle info export
|
||||
particle_info: Dict[str, bool] = {}
|
||||
|
||||
state: Optional[ParserState]
|
||||
|
||||
|
||||
def parse(nodes, con: ShaderContext,
|
||||
vert: Shader, frag: Shader, geom: Shader, tesc: Shader, tese: Shader,
|
||||
parse_surface=True, parse_opacity=True, parse_displacement=True, basecol_only=False):
|
||||
global state
|
||||
|
||||
state = ParserState(ParserContext.OBJECT, mat_state.material.name)
|
||||
|
||||
state.parse_surface = parse_surface
|
||||
state.parse_opacity = parse_opacity
|
||||
state.parse_displacement = parse_displacement
|
||||
state.basecol_only = basecol_only
|
||||
|
||||
state.con = con
|
||||
|
||||
state.vert = vert
|
||||
state.frag = frag
|
||||
state.geom = geom
|
||||
state.tesc = tesc
|
||||
state.tese = tese
|
||||
|
||||
output_node = node_by_type(nodes, 'OUTPUT_MATERIAL')
|
||||
if output_node is not None:
|
||||
custom_particle_node = node_by_name(nodes, 'LnxCustomParticleNode')
|
||||
parse_material_output(output_node, custom_particle_node)
|
||||
|
||||
# Make sure that individual functions in this module aren't called with an incorrect/old parser state, set it to
|
||||
# None so that it will raise exceptions when not set
|
||||
state = None
|
||||
|
||||
|
||||
def parse_material_output(node: bpy.types.Node, custom_particle_node: bpy.types.Node):
|
||||
global particle_info
|
||||
|
||||
parse_surface = state.parse_surface
|
||||
parse_opacity = state.parse_opacity
|
||||
parse_displacement = state.parse_displacement
|
||||
particle_info = {
|
||||
'index': False,
|
||||
'age': False,
|
||||
'lifetime': False,
|
||||
'location': False,
|
||||
'size': False,
|
||||
'velocity': False,
|
||||
'angular_velocity': False
|
||||
}
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
|
||||
# Surface
|
||||
if parse_surface or parse_opacity:
|
||||
state.parents = []
|
||||
state.parsed = set()
|
||||
state.normal_parsed = False
|
||||
curshader = state.frag
|
||||
state.curshader = curshader
|
||||
|
||||
out_basecol, out_roughness, out_metallic, out_occlusion, out_specular, out_opacity, out_ior, out_emission_col = parse_shader_input(node.inputs[0])
|
||||
if parse_surface:
|
||||
curshader.write(f'basecol = {out_basecol};')
|
||||
curshader.write(f'roughness = {out_roughness};')
|
||||
curshader.write(f'metallic = {out_metallic};')
|
||||
curshader.write(f'occlusion = {out_occlusion};')
|
||||
curshader.write(f'specular = {out_specular};')
|
||||
curshader.write(f'emissionCol = {out_emission_col};')
|
||||
|
||||
if mat_state.emission_type == mat_state.EmissionType.SHADELESS:
|
||||
if '_EmissionShadeless' not in wrd.world_defs:
|
||||
wrd.world_defs += '_EmissionShadeless'
|
||||
elif mat_state.emission_type == mat_state.EmissionType.SHADED:
|
||||
if '_EmissionShaded' not in wrd.world_defs:
|
||||
wrd.world_defs += '_EmissionShaded'
|
||||
lnx.assets.add_khafile_def('rp_gbuffer_emission')
|
||||
|
||||
if parse_opacity:
|
||||
curshader.write('opacity = {0};'.format(out_opacity))
|
||||
curshader.write('ior = {0};'.format(out_ior))
|
||||
|
||||
# Volume
|
||||
# parse_volume_input(node.inputs[1])
|
||||
|
||||
# Displacement
|
||||
if parse_displacement and disp_enabled() and node.inputs[2].is_linked:
|
||||
state.parents = []
|
||||
state.parsed = set()
|
||||
state.normal_parsed = False
|
||||
rpdat = lnx.utils.get_rp()
|
||||
if rpdat.lnx_rp_displacement == 'Tessellation' and state.tese is not None:
|
||||
state.curshader = state.tese
|
||||
else:
|
||||
state.curshader = state.vert
|
||||
out_disp = parse_displacement_input(node.inputs[2])
|
||||
state.curshader.write('vec3 disp = {0};'.format(out_disp))
|
||||
|
||||
if custom_particle_node is not None:
|
||||
if not (parse_displacement and disp_enabled() and node.inputs[2].is_linked):
|
||||
state.parents = []
|
||||
state.parsed = set()
|
||||
state.normal_parsed = False
|
||||
|
||||
state.curshader = state.vert
|
||||
custom_particle_node.parse(state.curshader, state.con)
|
||||
|
||||
|
||||
def parse_group(node, socket): # Entering group
|
||||
index = socket_index(node, socket)
|
||||
output_node = node_by_type(node.node_tree.nodes, 'GROUP_OUTPUT')
|
||||
if output_node is None:
|
||||
return
|
||||
inp = output_node.inputs[index]
|
||||
state.parents.append(node)
|
||||
out_group = parse_input(inp)
|
||||
state.parents.pop()
|
||||
return out_group
|
||||
|
||||
|
||||
def parse_group_input(node: bpy.types.Node, socket: bpy.types.NodeSocket):
|
||||
index = socket_index(node, socket)
|
||||
parent = state.parents.pop() # Leaving group
|
||||
inp = parent.inputs[index]
|
||||
res = parse_input(inp)
|
||||
state.parents.append(parent) # Return to group
|
||||
return res
|
||||
|
||||
|
||||
def parse_input(inp: bpy.types.NodeSocket):
|
||||
if inp.type == 'SHADER':
|
||||
return parse_shader_input(inp)
|
||||
elif inp.type in ('RGB', 'RGBA', 'VECTOR'):
|
||||
return parse_vector_input(inp)
|
||||
elif inp.type == 'VALUE':
|
||||
return parse_value_input(inp)
|
||||
|
||||
|
||||
def parse_shader_input(inp: bpy.types.NodeSocket) -> Tuple[str, ...]:
|
||||
# Follow input
|
||||
if inp.is_linked:
|
||||
link = inp.links[0]
|
||||
if link.from_node.type == 'REROUTE':
|
||||
return parse_shader_input(link.from_node.inputs[0])
|
||||
|
||||
if link.from_socket.type != 'SHADER':
|
||||
log.warn(f'Node tree "{tree_name()}": socket "{link.from_socket.name}" of node "{link.from_node.name}" cannot be connected to a shader socket')
|
||||
state.reset_outs()
|
||||
return state.get_outs()
|
||||
|
||||
return parse_shader(link.from_node, link.from_socket)
|
||||
|
||||
else:
|
||||
# Return default shader values
|
||||
state.reset_outs()
|
||||
return state.get_outs()
|
||||
|
||||
|
||||
def parse_shader(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> Tuple[str, ...]:
|
||||
supported_node_types = (
|
||||
'MIX_SHADER',
|
||||
'ADD_SHADER',
|
||||
'BSDF_PRINCIPLED',
|
||||
'BSDF_DIFFUSE',
|
||||
'BSDF_GLOSSY',
|
||||
'BSDF_SHEEN',
|
||||
'AMBIENT_OCCLUSION',
|
||||
'BSDF_ANISOTROPIC',
|
||||
'EMISSION',
|
||||
'BSDF_GLASS',
|
||||
'HOLDOUT',
|
||||
'SUBSURFACE_SCATTERING',
|
||||
'BSDF_TRANSLUCENT',
|
||||
'BSDF_TRANSPARENT',
|
||||
'BSDF_VELVET',
|
||||
)
|
||||
|
||||
state.reset_outs()
|
||||
|
||||
if node.type in supported_node_types:
|
||||
node_meta.get_node_meta(node).parse_func(node, socket, state)
|
||||
|
||||
elif node.type == 'GROUP':
|
||||
if node.node_tree.name.startswith('Leenkx PBR'):
|
||||
if state.parse_surface:
|
||||
# Normal
|
||||
if node.inputs[5].is_linked and node.inputs[5].links[0].from_node.type == 'NORMAL_MAP':
|
||||
log.warn(tree_name() + ' - Do not use Normal Map node with Leenkx PBR, connect Image Texture directly')
|
||||
parse_normal_map_color_input(node.inputs[5])
|
||||
|
||||
emission_factor = f'clamp({parse_value_input(node.inputs[6])}, 0.0, 1.0)'
|
||||
basecol = parse_vector_input(node.inputs[0])
|
||||
|
||||
# Multiply base color with inverse of emission factor to
|
||||
# copy behaviour of the Mix Shader node used in the group
|
||||
# (less base color -> less shading influence)
|
||||
state.out_basecol = f'({basecol} * (1 - {emission_factor}))'
|
||||
|
||||
state.out_occlusion = parse_value_input(node.inputs[2])
|
||||
state.out_roughness = parse_value_input(node.inputs[3])
|
||||
state.out_metallic = parse_value_input(node.inputs[4])
|
||||
|
||||
# Emission
|
||||
if node.inputs[6].is_linked or node.inputs[6].default_value != 0.0:
|
||||
state.out_emission_col = f'({basecol} * {emission_factor})'
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = parse_value_input(node.inputs[1])
|
||||
state.out_ior = 1.450;
|
||||
else:
|
||||
return parse_group(node, socket)
|
||||
|
||||
elif node.type == 'GROUP_INPUT':
|
||||
return parse_group_input(node, socket)
|
||||
|
||||
elif node.type == 'CUSTOM':
|
||||
if node.bl_idname == 'LnxShaderDataNode':
|
||||
return node_meta.get_node_meta(node).parse_func(node, socket, state)
|
||||
|
||||
else:
|
||||
log.warn(f'Node tree "{tree_name()}": material node type {node.type} not supported')
|
||||
|
||||
return state.get_outs()
|
||||
|
||||
|
||||
def parse_displacement_input(inp):
|
||||
if inp.is_linked:
|
||||
l = inp.links[0]
|
||||
if l.from_node.type == 'REROUTE':
|
||||
return parse_displacement_input(l.from_node.inputs[0])
|
||||
return parse_vector_input(inp)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def parse_vector_input(inp: bpy.types.NodeSocket) -> vec3str:
|
||||
"""Return the parsed result of the given input socket."""
|
||||
# Follow input
|
||||
if inp.is_linked:
|
||||
link = inp.links[0]
|
||||
if link.from_node.type == 'REROUTE':
|
||||
return parse_vector_input(link.from_node.inputs[0])
|
||||
res_var = write_result(link)
|
||||
st = link.from_socket.type
|
||||
if st in ('RGB', 'RGBA', 'VECTOR'):
|
||||
return res_var
|
||||
elif st in ('VALUE', 'INT'):
|
||||
return f'vec3({res_var})'
|
||||
else:
|
||||
log.warn(f'Node tree "{tree_name()}": socket "{link.from_socket.name}" of node "{link.from_node.name}" cannot be connected to a vector-like socket')
|
||||
return to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
# Unlinked reroute
|
||||
elif inp.type == 'VALUE':
|
||||
return to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
# Use direct socket value
|
||||
else:
|
||||
if mat_batch() and inp.is_uniform:
|
||||
return to_uniform(inp)
|
||||
else:
|
||||
return to_vec3(inp.default_value)
|
||||
|
||||
|
||||
def parse_vector(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> str:
|
||||
"""Parses the vector/color output value from the given node and socket."""
|
||||
supported_node_types = (
|
||||
'ATTRIBUTE',
|
||||
|
||||
# RGB outputs
|
||||
'RGB',
|
||||
'TEX_BRICK',
|
||||
'TEX_CHECKER',
|
||||
'TEX_ENVIRONMENT',
|
||||
'TEX_GRADIENT',
|
||||
'TEX_IMAGE',
|
||||
'TEX_MAGIC',
|
||||
'TEX_MUSGRAVE',
|
||||
'TEX_NOISE',
|
||||
'TEX_POINTDENSITY',
|
||||
'TEX_SKY',
|
||||
'TEX_VORONOI',
|
||||
'TEX_WAVE',
|
||||
'VERTEX_COLOR',
|
||||
'BRIGHTCONTRAST',
|
||||
'GAMMA',
|
||||
'HUE_SAT',
|
||||
'INVERT',
|
||||
'MIX',
|
||||
'BLACKBODY',
|
||||
'VALTORGB',
|
||||
'CURVE_VEC',
|
||||
'CURVE_RGB',
|
||||
'COMBINE_COLOR',
|
||||
'COMBHSV',
|
||||
'COMBRGB',
|
||||
'WAVELENGTH',
|
||||
|
||||
# Vector outputs
|
||||
'CAMERA',
|
||||
'NEW_GEOMETRY',
|
||||
'HAIR_INFO',
|
||||
'OBJECT_INFO',
|
||||
'PARTICLE_INFO',
|
||||
'TANGENT',
|
||||
'TEX_COORD',
|
||||
'UVMAP',
|
||||
'BUMP',
|
||||
'MAPPING',
|
||||
'NORMAL',
|
||||
'NORMAL_MAP',
|
||||
'VECT_TRANSFORM',
|
||||
'COMBXYZ',
|
||||
'VECT_MATH',
|
||||
'DISPLACEMENT',
|
||||
'VECTOR_ROTATE',
|
||||
)
|
||||
|
||||
if node.type in supported_node_types:
|
||||
return node_meta.get_node_meta(node).parse_func(node, socket, state)
|
||||
|
||||
elif node.type == 'GROUP':
|
||||
return parse_group(node, socket)
|
||||
|
||||
elif node.type == 'GROUP_INPUT':
|
||||
return parse_group_input(node, socket)
|
||||
|
||||
elif node.type == 'CUSTOM':
|
||||
if node.bl_idname == 'LnxShaderDataNode':
|
||||
return node_meta.get_node_meta(node).parse_func(node, socket, state)
|
||||
|
||||
log.warn(f'Node tree "{tree_name()}": material node type {node.type} not supported')
|
||||
return "vec3(0, 0, 0)"
|
||||
|
||||
|
||||
def parse_normal_map_color_input(inp, strength_input=None):
|
||||
frag = state.frag
|
||||
|
||||
if state.basecol_only or not inp.is_linked or state.normal_parsed:
|
||||
return
|
||||
|
||||
state.normal_parsed = True
|
||||
frag.write_normal += 1
|
||||
if not get_lnx_export_tangents() or mat_get_material().lnx_decal: # Compute TBN matrix
|
||||
frag.write('vec3 texn = ({0}) * 2.0 - 1.0;'.format(parse_vector_input(inp)))
|
||||
frag.write('texn.y = -texn.y;')
|
||||
frag.add_include('std/normals.glsl')
|
||||
frag.write('mat3 TBN = cotangentFrame(n, -vVec, texCoord);')
|
||||
frag.write('n = TBN * normalize(texn);')
|
||||
else:
|
||||
frag.write('n = ({0}) * 2.0 - 1.0;'.format(parse_vector_input(inp)))
|
||||
if strength_input is not None:
|
||||
strength = parse_value_input(strength_input)
|
||||
if strength != '1.0':
|
||||
frag.write('n.xy *= {0};'.format(strength))
|
||||
frag.write('n = normalize(TBN * n);')
|
||||
state.con.add_elem('tang', 'short4norm')
|
||||
frag.write_normal -= 1
|
||||
|
||||
|
||||
def parse_value_input(inp: bpy.types.NodeSocket) -> floatstr:
|
||||
# Follow input
|
||||
if inp.is_linked:
|
||||
link = inp.links[0]
|
||||
|
||||
if link.from_node.type == 'REROUTE':
|
||||
return parse_value_input(link.from_node.inputs[0])
|
||||
|
||||
res_var = write_result(link)
|
||||
socket_type = link.from_socket.type
|
||||
if socket_type in ('RGB', 'RGBA', 'VECTOR'):
|
||||
# RGB to BW
|
||||
return rgb_to_bw(res_var)
|
||||
elif socket_type in ('VALUE', 'INT'):
|
||||
return res_var
|
||||
else:
|
||||
log.warn(f'Node tree "{tree_name()}": socket "{link.from_socket.name}" of node "{link.from_node.name}" cannot be connected to a scalar value socket')
|
||||
return '0.0'
|
||||
|
||||
# Use value from socket
|
||||
else:
|
||||
if mat_batch() and inp.is_uniform:
|
||||
return to_uniform(inp)
|
||||
else:
|
||||
return to_vec1(inp.default_value)
|
||||
|
||||
|
||||
def parse_value(node, socket):
|
||||
supported_node_types = (
|
||||
'ATTRIBUTE',
|
||||
'CAMERA',
|
||||
'FRESNEL',
|
||||
'NEW_GEOMETRY',
|
||||
'HAIR_INFO',
|
||||
'LAYER_WEIGHT',
|
||||
'LIGHT_PATH',
|
||||
'OBJECT_INFO',
|
||||
'PARTICLE_INFO',
|
||||
'VALUE',
|
||||
'WIREFRAME',
|
||||
'TEX_BRICK',
|
||||
'TEX_CHECKER',
|
||||
'TEX_GRADIENT',
|
||||
'TEX_IMAGE',
|
||||
'TEX_MAGIC',
|
||||
'TEX_MUSGRAVE',
|
||||
'TEX_NOISE',
|
||||
'TEX_POINTDENSITY',
|
||||
'TEX_VORONOI',
|
||||
'TEX_WAVE',
|
||||
'LIGHT_FALLOFF',
|
||||
'NORMAL',
|
||||
'CLAMP',
|
||||
'VALTORGB',
|
||||
'MATH',
|
||||
'MIX',
|
||||
'RGBTOBW',
|
||||
'SEPARATE_COLOR',
|
||||
'SEPHSV',
|
||||
'SEPRGB',
|
||||
'SEPXYZ',
|
||||
'VECT_MATH',
|
||||
'MAP_RANGE',
|
||||
)
|
||||
|
||||
if node.type in supported_node_types:
|
||||
return node_meta.get_node_meta(node).parse_func(node, socket, state)
|
||||
|
||||
elif node.type == 'GROUP':
|
||||
if node.node_tree.name.startswith('Leenkx PBR'):
|
||||
# Displacement
|
||||
if socket == node.outputs[1]:
|
||||
return parse_value_input(node.inputs[7])
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return parse_group(node, socket)
|
||||
|
||||
elif node.type == 'GROUP_INPUT':
|
||||
return parse_group_input(node, socket)
|
||||
|
||||
elif node.type == 'CUSTOM':
|
||||
if node.bl_idname == 'LnxShaderDataNode':
|
||||
return node_meta.get_node_meta(node).parse_func(node, socket, state)
|
||||
|
||||
log.warn(f'Node tree "{tree_name()}": material node type {node.type} not supported')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def vector_curve(name, fac, points):
|
||||
curshader = state.curshader
|
||||
|
||||
# Write Ys array
|
||||
ys_var = name + '_ys' + state.get_parser_pass_suffix()
|
||||
curshader.write('float {0}[{1}];'.format(ys_var, len(points))) # TODO: Make const
|
||||
for i in range(0, len(points)):
|
||||
curshader.write('{0}[{1}] = {2};'.format(ys_var, i, points[i].location[1]))
|
||||
# Get index
|
||||
fac_var = name + '_fac' + state.get_parser_pass_suffix()
|
||||
curshader.write('float {0} = {1};'.format(fac_var, fac))
|
||||
index = '0'
|
||||
for i in range(1, len(points)):
|
||||
index += ' + ({0} > {1} ? 1 : 0)'.format(fac_var, points[i].location[0])
|
||||
# Write index
|
||||
index_var = name + '_i' + state.get_parser_pass_suffix()
|
||||
curshader.write('int {0} = {1};'.format(index_var, index))
|
||||
# Linear
|
||||
# Write Xs array
|
||||
facs_var = name + '_xs' + state.get_parser_pass_suffix()
|
||||
curshader.write('float {0}[{1}];'.format(facs_var, len(points))) # TODO: Make const
|
||||
for i in range(0, len(points)):
|
||||
curshader.write('{0}[{1}] = {2};'.format(facs_var, i, points[i].location[0]))
|
||||
# Map vector
|
||||
return 'mix({0}[{1}], {0}[{1} + 1], ({2} - {3}[{1}]) * (1.0 / ({3}[{1} + 1] - {3}[{1}]) ))'.format(ys_var, index_var, fac_var, facs_var)
|
||||
|
||||
def write_normal(inp):
|
||||
if inp.is_linked and inp.links[0].from_node.type != 'GROUP_INPUT':
|
||||
normal_res = parse_vector_input(inp)
|
||||
if normal_res != None:
|
||||
state.curshader.write('n = {0};'.format(normal_res))
|
||||
|
||||
|
||||
def is_parsed(node_store_name: str):
|
||||
return node_store_name in state.parsed
|
||||
|
||||
|
||||
def res_var_name(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> str:
|
||||
"""Return the name of the variable that stores the parsed result
|
||||
from the given node and socket."""
|
||||
name = node_name(node.name) + '_' + safesrc(socket.name) + '_res'
|
||||
if '__' in name: # Consecutive _ are reserved
|
||||
name = name.replace('_', '_x')
|
||||
return name
|
||||
|
||||
|
||||
def write_result(link: bpy.types.NodeLink) -> Optional[str]:
|
||||
"""Write the parsed result of the given node link to the shader."""
|
||||
res_var = res_var_name(link.from_node, link.from_socket)
|
||||
|
||||
need_dxdy_offset = node_need_reevaluation_for_screenspace_derivative(link.from_node)
|
||||
if need_dxdy_offset:
|
||||
res_var += state.get_parser_pass_suffix()
|
||||
|
||||
# Unparsed node
|
||||
if not is_parsed(res_var):
|
||||
state.parsed.add(res_var)
|
||||
st = link.from_socket.type
|
||||
|
||||
if st in ('RGB', 'RGBA', 'VECTOR'):
|
||||
res = parse_vector(link.from_node, link.from_socket)
|
||||
if res is None:
|
||||
log.error(f'{link.from_node.name} returned `None` while parsing!')
|
||||
return None
|
||||
state.curshader.write(f'vec3 {res_var} = {res};')
|
||||
|
||||
elif st == 'VALUE':
|
||||
res = parse_value(link.from_node, link.from_socket)
|
||||
if res is None:
|
||||
log.error(f'{link.from_node.name} returned `None` while parsing!')
|
||||
return None
|
||||
if link.from_node.type == "VALUE" and not link.from_node.lnx_material_param:
|
||||
state.curshader.add_const('float', res_var, res)
|
||||
else:
|
||||
state.curshader.write(f'float {res_var} = {res};')
|
||||
|
||||
if state.dxdy_varying_input_value:
|
||||
state.curshader.write(f'{res_var} = {apply_screenspace_derivative_offset_if_required(res_var)};')
|
||||
state.dxdy_varying_input_value = False
|
||||
|
||||
# Normal map already parsed, return
|
||||
elif link.from_node.type == 'NORMAL_MAP':
|
||||
return None
|
||||
|
||||
return res_var
|
||||
|
||||
|
||||
def write_procedurals():
|
||||
if state.curshader not in state.procedurals_written:
|
||||
state.curshader.add_function(c_functions.str_tex_proc)
|
||||
state.procedurals_written.add(state.curshader)
|
||||
|
||||
|
||||
def glsl_type(socket_type: str):
|
||||
"""Socket to glsl type."""
|
||||
if socket_type in ('RGB', 'RGBA', 'VECTOR'):
|
||||
return 'vec3'
|
||||
else:
|
||||
return 'float'
|
||||
|
||||
def to_uniform(inp: bpy.types.NodeSocket):
|
||||
uname = safesrc(inp.node.name) + safesrc(inp.name)
|
||||
state.curshader.add_uniform(glsl_type(inp.type) + ' ' + uname)
|
||||
return uname
|
||||
|
||||
|
||||
def store_var_name(node: bpy.types.Node) -> str:
|
||||
name = node_name(node.name)
|
||||
if name[-1] == "_":
|
||||
return name + '_x_store' # Prevent consecutive __
|
||||
return name + '_store'
|
||||
|
||||
|
||||
def texture_store(node, tex, tex_name, to_linear=False, tex_link=None, default_value=None, is_lnx_mat_param=None):
|
||||
curshader = state.curshader
|
||||
|
||||
tex_store = store_var_name(node)
|
||||
|
||||
if node_need_reevaluation_for_screenspace_derivative(node):
|
||||
tex_store += state.get_parser_pass_suffix()
|
||||
|
||||
if is_parsed(tex_store):
|
||||
return tex_store
|
||||
state.parsed.add(tex_store)
|
||||
|
||||
if is_lnx_mat_param is None:
|
||||
mat_bind_texture(tex)
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
curshader.add_uniform('sampler2D {0}'.format(tex_name), link=tex_link, default_value=default_value, is_lnx_mat_param=is_lnx_mat_param)
|
||||
triplanar = node.projection == 'BOX'
|
||||
if node.inputs[0].is_linked:
|
||||
uv_name = parse_vector_input(node.inputs[0])
|
||||
if triplanar:
|
||||
uv_name = 'vec3({0}.x, 1.0 - {0}.y, {0}.z)'.format(uv_name)
|
||||
else:
|
||||
uv_name = 'vec2({0}.x, 1.0 - {0}.y)'.format(uv_name)
|
||||
else:
|
||||
uv_name = 'vec3(texCoord.xy, 0.0)' if triplanar else 'texCoord'
|
||||
if triplanar:
|
||||
if not curshader.has_include('std/mapping.glsl'):
|
||||
curshader.add_include('std/mapping.glsl')
|
||||
if state.normal_parsed:
|
||||
nor = 'TBN[2]'
|
||||
else:
|
||||
nor = 'n'
|
||||
curshader.write('vec4 {0} = vec4(triplanarMapping({1}, {2}, {3}), 0.0);'.format(tex_store, tex_name, nor, uv_name))
|
||||
else:
|
||||
if mat_state.texture_grad:
|
||||
curshader.write('vec4 {0} = textureGrad({1}, {2}.xy, g2.xy, g2.zw);'.format(tex_store, tex_name, uv_name))
|
||||
else:
|
||||
curshader.write('vec4 {0} = texture({1}, {2}.xy);'.format(tex_store, tex_name, uv_name))
|
||||
|
||||
if to_linear:
|
||||
curshader.write('{0}.rgb = pow({0}.rgb, vec3(2.2));'.format(tex_store))
|
||||
|
||||
return tex_store
|
||||
|
||||
|
||||
def apply_screenspace_derivative_offset_if_required(coords: str) -> str:
|
||||
"""Apply screen-space derivative offsets to the given coordinates,
|
||||
if required by the current ParserPass.
|
||||
"""
|
||||
# Derivative functions are only available in fragment shaders
|
||||
if state.curshader.shader_type == 'frag':
|
||||
if state.current_pass == ParserPass.DX_SCREEN_SPACE:
|
||||
coords = f'({coords}) + {dfdx_fine(coords)}'
|
||||
elif state.current_pass == ParserPass.DY_SCREEN_SPACE:
|
||||
coords = f'({coords}) + {dfdy_fine(coords)}'
|
||||
|
||||
return '(' + coords + ')'
|
||||
|
||||
|
||||
def node_need_reevaluation_for_screenspace_derivative(node: bpy.types.Node) -> bool:
|
||||
if state.current_pass not in (ParserPass.DX_SCREEN_SPACE, ParserPass.DY_SCREEN_SPACE):
|
||||
return False
|
||||
|
||||
should_compute_offset = node_meta.get_node_meta(node).compute_dxdy_variants
|
||||
|
||||
if should_compute_offset == node_meta.ComputeDXDYVariant.ALWAYS:
|
||||
return True
|
||||
elif should_compute_offset == node_meta.ComputeDXDYVariant.NEVER:
|
||||
return False
|
||||
|
||||
# ComputeDXDYVariant.DYNAMIC
|
||||
for inp in node.inputs:
|
||||
c_node, _ = lnx.node_utils.input_get_connected_node(inp)
|
||||
if c_node is None:
|
||||
continue
|
||||
|
||||
if node_need_reevaluation_for_screenspace_derivative(c_node):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def dfdx_fine(val: str) -> str:
|
||||
# GL_ARB_derivative_control is unavailable in OpenGL ES (= no fine/coarse variants),
|
||||
# OES_standard_derivatives is automatically enabled in kha.SystemImpl
|
||||
return f'dFdx({val})' if lnx.utils.is_gapi_gl_es() else f'dFdxFine({val})'
|
||||
|
||||
|
||||
def dfdy_fine(val: str) -> str:
|
||||
return f'dFdy({val})' if lnx.utils.is_gapi_gl_es() else f'dFdyFine({val})'
|
||||
|
||||
|
||||
def to_vec1(v):
|
||||
return str(v)
|
||||
|
||||
|
||||
def to_vec2(v):
|
||||
return f'vec2({v[0]}, {v[1]})'
|
||||
|
||||
|
||||
def to_vec3(v):
|
||||
return f'vec3({v[0]}, {v[1]}, {v[2]})'
|
||||
|
||||
|
||||
def cast_value(val: str, from_type: str, to_type: str) -> str:
|
||||
"""Casts a value that is already parsed in a glsl string to another
|
||||
value in a string.
|
||||
|
||||
vec2 types are not supported (not used in the node editor) and there
|
||||
is no cast towards int types. If casting from vec3 to vec4, the w
|
||||
coordinate/alpha channel is filled with a 1.
|
||||
|
||||
If this function is called with invalid parameters, a TypeError is
|
||||
raised.
|
||||
"""
|
||||
if from_type == to_type:
|
||||
return val
|
||||
|
||||
if from_type in ('int', 'float'):
|
||||
if to_type in ('int', 'float'):
|
||||
return val
|
||||
elif to_type in ('vec2', 'vec3', 'vec4'):
|
||||
return f'{to_type}({val})'
|
||||
|
||||
elif from_type == 'vec3':
|
||||
if to_type == 'float':
|
||||
return rgb_to_bw(val)
|
||||
elif to_type == 'vec4':
|
||||
return f'vec4({val}, 1.0)'
|
||||
|
||||
elif from_type == 'vec4':
|
||||
if to_type == 'float':
|
||||
return rgb_to_bw(val)
|
||||
elif to_type == 'vec3':
|
||||
return f'{val}.xyz'
|
||||
|
||||
raise TypeError("Invalid type cast in shader!")
|
||||
|
||||
|
||||
def rgb_to_bw(res_var: vec3str) -> floatstr:
|
||||
# Blender uses the default OpenColorIO luma coefficients which
|
||||
# originally come from the Rec. 709 standard (see ITU-R BT.709-6 Item 3.3)
|
||||
return f'dot({res_var}, vec3(0.2126, 0.7152, 0.0722))'
|
||||
|
||||
|
||||
def node_by_type(nodes, ntype: str) -> bpy.types.Node:
|
||||
for n in nodes:
|
||||
if n.type == ntype:
|
||||
return n
|
||||
|
||||
def node_by_name(nodes, name: str) -> bpy.types.Node:
|
||||
for n in nodes:
|
||||
if n.bl_idname == name:
|
||||
return n
|
||||
|
||||
def socket_index(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> int:
|
||||
for i in range(0, len(node.outputs)):
|
||||
if node.outputs[i] == socket:
|
||||
return i
|
||||
|
||||
|
||||
def node_name(s: str) -> str:
|
||||
"""Return a unique and safe name for a node for shader code usage."""
|
||||
for p in state.parents:
|
||||
s = p.name + '_' + s
|
||||
if state.curshader.write_textures > 0:
|
||||
s += '_texread'
|
||||
s = safesrc(s)
|
||||
if '__' in s: # Consecutive _ are reserved
|
||||
s = s.replace('_', '_x')
|
||||
return s
|
||||
|
||||
##
|
||||
|
||||
|
||||
def make_texture(
|
||||
image: bpy.types.Image, tex_name: str, matname: str,
|
||||
interpolation: str, extension: str,
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Creates a texture binding entry for the scene's export data
|
||||
('bind_textures') for a given texture image.
|
||||
"""
|
||||
tex = {'name': tex_name}
|
||||
|
||||
if image is None:
|
||||
return None
|
||||
|
||||
if matname is None:
|
||||
matname = mat_state.material.name
|
||||
|
||||
# Get filepath
|
||||
filepath = image.filepath
|
||||
if filepath == '':
|
||||
if image.packed_file is not None:
|
||||
filepath = './' + image.name
|
||||
has_ext = filepath.endswith(('.jpg', '.png', '.hdr'))
|
||||
if not has_ext:
|
||||
# Raw bytes, write converted .jpg to /unpacked
|
||||
filepath += '.raw'
|
||||
|
||||
elif image.source == "GENERATED":
|
||||
unpack_path = os.path.join(lnx.utils.get_fp_build(), 'compiled', 'Assets', 'unpacked')
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
|
||||
filepath = os.path.join(unpack_path, image.name + ".jpg")
|
||||
lnx.utils.convert_image(image, filepath, "JPEG")
|
||||
|
||||
else:
|
||||
log.warn(matname + '/' + image.name + ' - invalid file path')
|
||||
return None
|
||||
else:
|
||||
filepath = lnx.utils.to_absolute_path(filepath, image.library)
|
||||
|
||||
# Reference image name
|
||||
texpath = lnx.utils.asset_path(filepath)
|
||||
texfile = lnx.utils.extract_filename(filepath)
|
||||
tex['file'] = lnx.utils.safestr(texfile)
|
||||
s = tex['file'].rsplit('.', 1)
|
||||
|
||||
if len(s) == 1:
|
||||
log.warn(matname + '/' + image.name + ' - file extension required for image name')
|
||||
return None
|
||||
|
||||
ext = s[1].lower()
|
||||
do_convert = ext not in ('jpg', 'png', 'hdr', 'mp4') # Convert image
|
||||
if do_convert:
|
||||
new_ext = 'png' if (ext in ('tga', 'dds')) else 'jpg'
|
||||
tex['file'] = tex['file'].rsplit('.', 1)[0] + '.' + new_ext
|
||||
|
||||
if image.packed_file is not None or not is_ascii(texfile):
|
||||
# Extract packed data / copy non-ascii texture
|
||||
unpack_path = os.path.join(lnx.utils.get_fp_build(), 'compiled', 'Assets', 'unpacked')
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
unpack_filepath = os.path.join(unpack_path, tex['file'])
|
||||
|
||||
if do_convert:
|
||||
if not os.path.isfile(unpack_filepath):
|
||||
fmt = 'PNG' if new_ext == 'png' else 'JPEG'
|
||||
lnx.utils.convert_image(image, unpack_filepath, file_format=fmt)
|
||||
else:
|
||||
|
||||
# Write bytes if size is different or file does not exist yet
|
||||
if image.packed_file is not None:
|
||||
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size:
|
||||
with open(unpack_filepath, 'wb') as f:
|
||||
f.write(image.packed_file.data)
|
||||
# Copy non-ascii texture
|
||||
else:
|
||||
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != os.path.getsize(texpath):
|
||||
shutil.copy(texpath, unpack_filepath)
|
||||
|
||||
lnx.assets.add(unpack_filepath)
|
||||
|
||||
else:
|
||||
if not os.path.isfile(lnx.utils.asset_path(filepath)):
|
||||
log.warn('Material ' + matname + '/' + image.name + ' - file not found(' + filepath + ')')
|
||||
return None
|
||||
|
||||
if do_convert:
|
||||
unpack_path = os.path.join(lnx.utils.get_fp_build(), 'compiled', 'Assets', 'unpacked')
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
converted_path = os.path.join(unpack_path, tex['file'])
|
||||
# TODO: delete cache when file changes
|
||||
if not os.path.isfile(converted_path):
|
||||
fmt = 'PNG' if new_ext == 'png' else 'JPEG'
|
||||
lnx.utils.convert_image(image, converted_path, file_format=fmt)
|
||||
lnx.assets.add(converted_path)
|
||||
else:
|
||||
# Link image path to assets
|
||||
# TODO: Khamake converts .PNG to .jpg? Convert ext to lowercase on windows
|
||||
if lnx.utils.get_os() == 'win':
|
||||
s = filepath.rsplit('.', 1)
|
||||
lnx.assets.add(lnx.utils.asset_path(s[0] + '.' + s[1].lower()))
|
||||
else:
|
||||
lnx.assets.add(lnx.utils.asset_path(filepath))
|
||||
|
||||
# if image_format != 'RGBA32':
|
||||
# tex['format'] = image_format
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
texfilter = rpdat.lnx_texture_filter
|
||||
if texfilter == 'Anisotropic':
|
||||
interpolation = 'Smart'
|
||||
elif texfilter == 'Linear':
|
||||
interpolation = 'Linear'
|
||||
elif texfilter == 'Point':
|
||||
interpolation = 'Closest'
|
||||
|
||||
if interpolation == 'Cubic': # Mipmap linear
|
||||
tex['mipmap_filter'] = 'linear'
|
||||
tex['generate_mipmaps'] = True
|
||||
elif interpolation == 'Smart': # Mipmap anisotropic
|
||||
tex['min_filter'] = 'anisotropic'
|
||||
tex['mipmap_filter'] = 'linear'
|
||||
tex['generate_mipmaps'] = True
|
||||
elif interpolation == 'Closest':
|
||||
tex['min_filter'] = 'point'
|
||||
tex['mag_filter'] = 'point'
|
||||
# else defaults to linear
|
||||
|
||||
if extension != 'REPEAT': # Extend or clip
|
||||
tex['u_addressing'] = 'clamp'
|
||||
tex['v_addressing'] = 'clamp'
|
||||
|
||||
if image.source == 'MOVIE':
|
||||
tex['source'] = 'movie'
|
||||
tex['min_filter'] = 'linear'
|
||||
tex['mag_filter'] = 'linear'
|
||||
tex['mipmap_filter'] = 'no'
|
||||
tex['generate_mipmaps'] = False
|
||||
|
||||
return tex
|
||||
|
||||
|
||||
def make_texture_from_image_node(image_node: bpy.types.ShaderNodeTexImage, tex_name: str, matname: str = None) -> Optional[Dict[str, Any]]:
|
||||
if matname is None:
|
||||
matname = mat_state.material.name
|
||||
|
||||
return make_texture(image_node.image, tex_name, matname, image_node.interpolation, image_node.extension)
|
||||
|
||||
|
||||
def is_pow(num):
|
||||
return ((num & (num - 1)) == 0) and num != 0
|
||||
|
||||
def is_ascii(s):
|
||||
return len(s) == len(s.encode())
|
||||
|
||||
##
|
||||
|
||||
def get_lnx_export_tangents():
|
||||
return bpy.data.worlds['Lnx'].lnx_export_tangents
|
||||
|
||||
def safesrc(name):
|
||||
return lnx.utils.safesrc(name)
|
||||
|
||||
def disp_enabled():
|
||||
return lnx.utils.disp_enabled(lnx.make_state.target)
|
||||
|
||||
def assets_add(path):
|
||||
lnx.assets.add(path)
|
||||
|
||||
def assets_add_embedded_data(path):
|
||||
lnx.assets.add_embedded_data(path)
|
||||
|
||||
def tree_name() -> str:
|
||||
return state.tree_name
|
||||
|
||||
def mat_batch():
|
||||
return mat_state.batch
|
||||
|
||||
def mat_bind_texture(tex):
|
||||
mat_state.bind_textures.append(tex)
|
||||
|
||||
def mat_get_material():
|
||||
return mat_state.material
|
||||
|
||||
def mat_get_material_users():
|
||||
return mat_state.mat_users
|
517
leenkx/blender/lnx/material/cycles_functions.py
Normal file
517
leenkx/blender/lnx/material/cycles_functions.py
Normal file
@ -0,0 +1,517 @@
|
||||
str_tex_proc = """
|
||||
// <https://www.shadertoy.com/view/4dS3Wd>
|
||||
// By Morgan McGuire @morgan3d, http://graphicscodex.com
|
||||
float hash_f(const float n) { return fract(sin(n) * 1e4); }
|
||||
float hash_f(const vec2 p) { return fract(1e4 * sin(17.0 * p.x + p.y * 0.1) * (0.1 + abs(sin(p.y * 13.0 + p.x)))); }
|
||||
float hash_f(const vec3 co){ return fract(sin(dot(co.xyz, vec3(12.9898,78.233,52.8265)) * 24.384) * 43758.5453); }
|
||||
|
||||
float noise(const vec3 x) {
|
||||
const vec3 step = vec3(110, 241, 171);
|
||||
|
||||
vec3 i = floor(x);
|
||||
vec3 f = fract(x);
|
||||
|
||||
// For performance, compute the base input to a 1D hash from the integer part of the argument and the
|
||||
// incremental change to the 1D based on the 3D -> 1D wrapping
|
||||
float n = dot(i, step);
|
||||
|
||||
vec3 u = f * f * (3.0 - 2.0 * f);
|
||||
return mix(mix(mix( hash_f(n + dot(step, vec3(0, 0, 0))), hash_f(n + dot(step, vec3(1, 0, 0))), u.x),
|
||||
mix( hash_f(n + dot(step, vec3(0, 1, 0))), hash_f(n + dot(step, vec3(1, 1, 0))), u.x), u.y),
|
||||
mix(mix( hash_f(n + dot(step, vec3(0, 0, 1))), hash_f(n + dot(step, vec3(1, 0, 1))), u.x),
|
||||
mix( hash_f(n + dot(step, vec3(0, 1, 1))), hash_f(n + dot(step, vec3(1, 1, 1))), u.x), u.y), u.z);
|
||||
}
|
||||
|
||||
// Shader-code adapted from Blender
|
||||
// https://github.com/sobotka/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_tex_wave.glsl & /gpu_shader_material_fractal_noise.glsl
|
||||
float fractal_noise(const vec3 p, const float o)
|
||||
{
|
||||
float fscale = 1.0;
|
||||
float amp = 1.0;
|
||||
float sum = 0.0;
|
||||
float octaves = clamp(o, 0.0, 16.0);
|
||||
int n = int(octaves);
|
||||
for (int i = 0; i <= n; i++) {
|
||||
float t = noise(fscale * p);
|
||||
sum += t * amp;
|
||||
amp *= 0.5;
|
||||
fscale *= 2.0;
|
||||
}
|
||||
float rmd = octaves - floor(octaves);
|
||||
if (rmd != 0.0) {
|
||||
float t = noise(fscale * p);
|
||||
float sum2 = sum + t * amp;
|
||||
sum *= float(pow(2, n)) / float(pow(2, n + 1) - 1.0);
|
||||
sum2 *= float(pow(2, n + 1)) / float(pow(2, n + 2) - 1);
|
||||
return (1.0 - rmd) * sum + rmd * sum2;
|
||||
}
|
||||
else {
|
||||
sum *= float(pow(2, n)) / float(pow(2, n + 1) - 1);
|
||||
return sum;
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
str_tex_checker = """
|
||||
vec3 tex_checker(const vec3 co, const vec3 col1, const vec3 col2, const float scale) {
|
||||
// Prevent precision issues on unit coordinates
|
||||
vec3 p = (co + 0.000001 * 0.999999) * scale;
|
||||
float xi = abs(floor(p.x));
|
||||
float yi = abs(floor(p.y));
|
||||
float zi = abs(floor(p.z));
|
||||
bool check = ((mod(xi, 2.0) == mod(yi, 2.0)) == bool(mod(zi, 2.0)));
|
||||
return check ? col1 : col2;
|
||||
}
|
||||
float tex_checker_f(const vec3 co, const float scale) {
|
||||
vec3 p = (co + 0.000001 * 0.999999) * scale;
|
||||
float xi = abs(floor(p.x));
|
||||
float yi = abs(floor(p.y));
|
||||
float zi = abs(floor(p.z));
|
||||
return float((mod(xi, 2.0) == mod(yi, 2.0)) == bool(mod(zi, 2.0)));
|
||||
}
|
||||
"""
|
||||
|
||||
str_tex_voronoi = """
|
||||
//Shader-code adapted from Blender
|
||||
//https://github.com/sobotka/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_tex_voronoi.glsl
|
||||
float voronoi_distance(const vec3 a, const vec3 b, const int metric, const float exponent)
|
||||
{
|
||||
if (metric == 0) // SHD_VORONOI_EUCLIDEAN
|
||||
{
|
||||
return distance(a, b);
|
||||
}
|
||||
else if (metric == 1) // SHD_VORONOI_MANHATTAN
|
||||
{
|
||||
return abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z);
|
||||
}
|
||||
else if (metric == 2) // SHD_VORONOI_CHEBYCHEV
|
||||
{
|
||||
return max(abs(a.x - b.x), max(abs(a.y - b.y), abs(a.z - b.z)));
|
||||
}
|
||||
else if (metric == 3) // SHD_VORONOI_MINKOWSKI
|
||||
{
|
||||
return pow(pow(abs(a.x - b.x), exponent) + pow(abs(a.y - b.y), exponent) +
|
||||
pow(abs(a.z - b.z), exponent),
|
||||
1.0 / exponent);
|
||||
}
|
||||
else {
|
||||
return 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
vec3 tex_voronoi(const vec3 coord, const float r, const int metric, const int outp, const float scale, const float exp)
|
||||
{
|
||||
float randomness = clamp(r, 0.0, 1.0);
|
||||
|
||||
vec3 scaledCoord = coord * scale;
|
||||
vec3 cellPosition = floor(scaledCoord);
|
||||
vec3 localPosition = scaledCoord - cellPosition;
|
||||
|
||||
float minDistance = 8.0;
|
||||
vec3 targetOffset, targetPosition;
|
||||
for (int k = -1; k <= 1; k++) {
|
||||
for (int j = -1; j <= 1; j++) {
|
||||
for (int i = -1; i <= 1; i++) {
|
||||
vec3 cellOffset = vec3(float(i), float(j), float(k));
|
||||
vec3 pointPosition = cellOffset;
|
||||
if(randomness != 0.) {
|
||||
pointPosition += vec3(hash_f(cellPosition+cellOffset), hash_f(cellPosition+cellOffset+972.37), hash_f(cellPosition+cellOffset+342.48)) * randomness;}
|
||||
float distanceToPoint = voronoi_distance(pointPosition, localPosition, metric, exp);
|
||||
if (distanceToPoint < minDistance) {
|
||||
targetOffset = cellOffset;
|
||||
minDistance = distanceToPoint;
|
||||
targetPosition = pointPosition;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if(outp == 0){return vec3(minDistance);}
|
||||
else if(outp == 1) {
|
||||
if(randomness == 0.) {return vec3(hash_f(cellPosition+targetOffset), hash_f(cellPosition+targetOffset+972.37), hash_f(cellPosition+targetOffset+342.48));}
|
||||
return (targetPosition - targetOffset)/randomness;
|
||||
}
|
||||
return (targetPosition + cellPosition) / scale;
|
||||
}
|
||||
"""
|
||||
|
||||
# Based on https://www.shadertoy.com/view/4sfGzS
|
||||
# Copyright © 2013 Inigo Quilez
|
||||
# The MIT License - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
# float tex_noise_f(const vec3 x) {
|
||||
# vec3 p = floor(x);
|
||||
# vec3 f = fract(x);
|
||||
# f = f * f * (3.0 - 2.0 * f);
|
||||
# vec2 uv = (p.xy + vec2(37.0, 17.0) * p.z) + f.xy;
|
||||
# vec2 rg = texture(snoise256, (uv + 0.5) / 256.0).yx;
|
||||
# return mix(rg.x, rg.y, f.z);
|
||||
# }
|
||||
# By Morgan McGuire @morgan3d, http://graphicscodex.com Reuse permitted under the BSD license.
|
||||
# https://www.shadertoy.com/view/4dS3Wd
|
||||
str_tex_noise = """
|
||||
float tex_noise(const vec3 p, const float detail, const float distortion) {
|
||||
vec3 pk = p;
|
||||
if (distortion != 0.0) {
|
||||
pk += vec3(noise(p) * distortion);
|
||||
}
|
||||
return fractal_noise(pk, detail);
|
||||
}
|
||||
"""
|
||||
|
||||
# Based on noise created by Nikita Miropolskiy, nikat/2013
|
||||
# Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License
|
||||
str_tex_musgrave = """
|
||||
vec3 random3(const vec3 c) {
|
||||
float j = 4096.0 * sin(dot(c, vec3(17.0, 59.4, 15.0)));
|
||||
vec3 r;
|
||||
r.z = fract(512.0 * j);
|
||||
j *= 0.125;
|
||||
r.x = fract(512.0 * j);
|
||||
j *= 0.125;
|
||||
r.y = fract(512.0 * j);
|
||||
return r - 0.5;
|
||||
}
|
||||
float tex_musgrave_f(const vec3 p) {
|
||||
const float F3 = 0.3333333;
|
||||
const float G3 = 0.1666667;
|
||||
vec3 s = floor(p + dot(p, vec3(F3)));
|
||||
vec3 x = p - s + dot(s, vec3(G3));
|
||||
vec3 e = step(vec3(0.0), x - x.yzx);
|
||||
vec3 i1 = e*(1.0 - e.zxy);
|
||||
vec3 i2 = 1.0 - e.zxy*(1.0 - e);
|
||||
vec3 x1 = x - i1 + G3;
|
||||
vec3 x2 = x - i2 + 2.0*G3;
|
||||
vec3 x3 = x - 1.0 + 3.0*G3;
|
||||
vec4 w, d;
|
||||
w.x = dot(x, x);
|
||||
w.y = dot(x1, x1);
|
||||
w.z = dot(x2, x2);
|
||||
w.w = dot(x3, x3);
|
||||
w = max(0.6 - w, 0.0);
|
||||
d.x = dot(random3(s), x);
|
||||
d.y = dot(random3(s + i1), x1);
|
||||
d.z = dot(random3(s + i2), x2);
|
||||
d.w = dot(random3(s + 1.0), x3);
|
||||
w *= w;
|
||||
w *= w;
|
||||
d *= w;
|
||||
return clamp(dot(d, vec4(52.0)), 0.0, 1.0);
|
||||
}
|
||||
"""
|
||||
|
||||
# col: the incoming color
|
||||
# shift: a vector containing the hue shift, the saturation modificator, the value modificator and the mix factor in this order
|
||||
# this does the following:
|
||||
# make rgb col to hsv
|
||||
# apply hue shift through addition, sat/val through multiplication
|
||||
# return an rgb color, mixed with the original one
|
||||
str_hue_sat = """
|
||||
vec3 hsv_to_rgb(const vec3 c) {
|
||||
const vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
|
||||
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
|
||||
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
|
||||
}
|
||||
vec3 rgb_to_hsv(const vec3 c) {
|
||||
const vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
|
||||
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
|
||||
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
|
||||
|
||||
float d = q.x - min(q.w, q.y);
|
||||
float e = 1.0e-10;
|
||||
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
|
||||
}
|
||||
vec3 hue_sat(const vec3 col, const vec4 shift) {
|
||||
vec3 hsv = rgb_to_hsv(col);
|
||||
hsv.x += shift.x;
|
||||
hsv.y *= shift.y;
|
||||
hsv.z *= shift.z;
|
||||
return mix(hsv_to_rgb(hsv), col, shift.w);
|
||||
}
|
||||
"""
|
||||
|
||||
# https://twitter.com/Donzanoid/status/903424376707657730
|
||||
str_wavelength_to_rgb = """
|
||||
vec3 wavelength_to_rgb(const float t) {
|
||||
vec3 r = t * 2.1 - vec3(1.8, 1.14, 0.3);
|
||||
return 1.0 - r * r;
|
||||
}
|
||||
"""
|
||||
|
||||
str_tex_magic = """
|
||||
vec3 tex_magic(const vec3 p) {
|
||||
float a = 1.0 - (sin(p.x) + sin(p.y));
|
||||
float b = 1.0 - sin(p.x - p.y);
|
||||
float c = 1.0 - sin(p.x + p.y);
|
||||
return vec3(a, b, c);
|
||||
}
|
||||
float tex_magic_f(const vec3 p) {
|
||||
vec3 c = tex_magic(p);
|
||||
return (c.x + c.y + c.z) / 3.0;
|
||||
}
|
||||
"""
|
||||
|
||||
str_tex_brick = """
|
||||
vec3 tex_brick(vec3 p, const vec3 c1, const vec3 c2, const vec3 c3) {
|
||||
p /= vec3(0.9, 0.49, 0.49) / 2;
|
||||
if (fract(p.y * 0.5) > 0.5) p.x += 0.5;
|
||||
p = fract(p);
|
||||
vec3 b = step(p, vec3(0.95, 0.9, 0.9));
|
||||
return mix(c3, c1, b.x * b.y * b.z);
|
||||
}
|
||||
float tex_brick_f(vec3 p) {
|
||||
p /= vec3(0.9, 0.49, 0.49) / 2;
|
||||
if (fract(p.y * 0.5) > 0.5) p.x += 0.5;
|
||||
p = fract(p);
|
||||
vec3 b = step(p, vec3(0.95, 0.9, 0.9));
|
||||
return mix(1.0, 0.0, b.x * b.y * b.z);
|
||||
}
|
||||
"""
|
||||
|
||||
str_tex_wave = """
|
||||
float tex_wave_f(const vec3 p, const int type, const int profile, const float dist, const float detail, const float detail_scale) {
|
||||
float n;
|
||||
if(type == 0) n = (p.x + p.y + p.z) * 9.5;
|
||||
else n = length(p) * 13.0;
|
||||
if(dist != 0.0) n += dist * fractal_noise(p * detail_scale, detail) * 2.0 - 1.0;
|
||||
if(profile == 0) { return 0.5 + 0.5 * sin(n - PI); }
|
||||
else {
|
||||
n /= 2.0 * PI;
|
||||
return n - floor(n);
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
str_brightcontrast = """
|
||||
vec3 brightcontrast(const vec3 col, const float bright, const float contr) {
|
||||
float a = 1.0 + contr;
|
||||
float b = bright - contr * 0.5;
|
||||
return max(a * col + b, 0.0);
|
||||
}
|
||||
"""
|
||||
|
||||
# https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/
|
||||
# dielectric-dielectric
|
||||
# approx pow(1.0 - dotNV, 7.25 / ior)
|
||||
str_fresnel = """
|
||||
float fresnel(float eta, float c) {
|
||||
float g = eta * eta - 1.0 + c * c;
|
||||
if (g < 0.0) return 1.0;
|
||||
g = sqrt(g);
|
||||
float a = (g - c) / (g + c);
|
||||
float b = ((g + c) * c - 1.0) / ((g - c) * c + 1.0);
|
||||
return 0.5 * a * a * (1.0 + b * b);
|
||||
}
|
||||
"""
|
||||
|
||||
# Save division like Blender does it. If dividing by 0, the result is 0.
|
||||
# https://github.com/blender/blender/blob/df1e9b662bd6938f74579cea9d30341f3b6dd02b/intern/cycles/kernel/shaders/node_vector_math.osl
|
||||
str_safe_divide = """
|
||||
vec3 safe_divide(const vec3 a, const vec3 b) {
|
||||
\treturn vec3((b.x != 0.0) ? a.x / b.x : 0.0,
|
||||
\t (b.y != 0.0) ? a.y / b.y : 0.0,
|
||||
\t (b.z != 0.0) ? a.z / b.z : 0.0);
|
||||
}
|
||||
"""
|
||||
|
||||
# https://github.com/blender/blender/blob/df1e9b662bd6938f74579cea9d30341f3b6dd02b/intern/cycles/kernel/shaders/node_vector_math.osl
|
||||
str_project = """
|
||||
vec3 project(const vec3 v, const vec3 v_proj) {
|
||||
\tfloat lenSquared = dot(v_proj, v_proj);
|
||||
\treturn (lenSquared != 0.0) ? (dot(v, v_proj) / lenSquared) * v_proj : vec3(0);
|
||||
}
|
||||
"""
|
||||
|
||||
# Adapted from godot engine math_funcs.h
|
||||
str_wrap = """
|
||||
float wrap(const float value, const float max, const float min) {
|
||||
\tfloat range = max - min;
|
||||
\treturn (range != 0.0) ? value - (range * floor((value - min) / range)) : min;
|
||||
}
|
||||
vec3 wrap(const vec3 value, const vec3 max, const vec3 min) {
|
||||
\treturn vec3(wrap(value.x, max.x, min.x),
|
||||
\t wrap(value.y, max.y, min.y),
|
||||
\t wrap(value.z, max.z, min.z));
|
||||
}
|
||||
"""
|
||||
|
||||
str_blackbody = """
|
||||
vec3 blackbody(const float temperature){
|
||||
|
||||
vec3 rgb = vec3(0.0, 0.0, 0.0);
|
||||
|
||||
vec3 r = vec3(0.0, 0.0, 0.0);
|
||||
vec3 g = vec3(0.0, 0.0, 0.0);
|
||||
vec3 b = vec3(0.0, 0.0, 0.0);
|
||||
|
||||
float t_inv = float(1.0 / temperature);
|
||||
|
||||
if (temperature >= 12000.0) {
|
||||
|
||||
rgb = vec3(0.826270103, 0.994478524, 1.56626022);
|
||||
|
||||
} else if(temperature < 965.0) {
|
||||
|
||||
rgb = vec3(4.70366907, 0.0, 0.0);
|
||||
|
||||
} else {
|
||||
|
||||
if (temperature >= 6365.0) {
|
||||
vec3 r = vec3(3.78765709e+03, 9.36026367e-06, 3.98995841e-01);
|
||||
vec3 g = vec3(-5.00279505e+02, -4.59745390e-06, 1.09090465e+00);
|
||||
vec4 b = vec4(6.72595954e-13, -2.73059993e-08, 4.24068546e-04, -7.52204323e-01);
|
||||
|
||||
rgb = vec3(r.r * t_inv + r.g * temperature + r.b, g.r * t_inv + g.g * temperature + g.b, ((b.r * temperature + b.g) * temperature + b.b) * temperature + b.a );
|
||||
|
||||
} else if (temperature >= 3315.0) {
|
||||
vec3 r = vec3(4.60124770e+03, 2.89727618e-05, 1.48001316e-01);
|
||||
vec3 g = vec3(-1.18134453e+03, -2.18913373e-05, 1.30656109e+00);
|
||||
vec4 b = vec4(-2.22463426e-13, -1.55078698e-08, 3.81675160e-04, -7.30646033e-01);
|
||||
|
||||
rgb = vec3(r.r * t_inv + r.g * temperature + r.b, g.r * t_inv + g.g * temperature + g.b, ((b.r * temperature + b.g) * temperature + b.b) * temperature + b.a );
|
||||
|
||||
} else if (temperature >= 1902.0) {
|
||||
vec3 r = vec3(4.66849800e+03, 2.85655028e-05, 1.29075375e-01);
|
||||
vec3 g = vec3(-1.42546105e+03, -4.01730887e-05, 1.44002695e+00);
|
||||
vec4 b = vec4(-2.02524603e-11, 1.79435860e-07, -2.60561875e-04, -1.41761141e-02);
|
||||
|
||||
rgb = vec3(r.r * t_inv + r.g * temperature + r.b, g.r * t_inv + g.g * temperature + g.b, ((b.r * temperature + b.g) * temperature + b.b) * temperature + b.a );
|
||||
|
||||
} else if (temperature >= 1449.0) {
|
||||
vec3 r = vec3(4.10671449e+03, -8.61949938e-05, 6.41423749e-01);
|
||||
vec3 g = vec3(-1.22075471e+03, 2.56245413e-05, 1.20753416e+00);
|
||||
vec4 b = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
|
||||
rgb = vec3(r.r * t_inv + r.g * temperature + r.b, g.r * t_inv + g.g * temperature + g.b, ((b.r * temperature + b.g) * temperature + b.b) * temperature + b.a );
|
||||
|
||||
} else if (temperature >= 1167.0) {
|
||||
vec3 r = vec3(3.37763626e+03, -4.34581697e-04, 1.64843306e+00);
|
||||
vec3 g = vec3(-1.00402363e+03, 1.29189794e-04, 9.08181524e-01);
|
||||
vec4 b = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
|
||||
rgb = vec3(r.r * t_inv + r.g * temperature + r.b, g.r * t_inv + g.g * temperature + g.b, ((b.r * temperature + b.g) * temperature + b.b) * temperature + b.a );
|
||||
|
||||
} else {
|
||||
vec3 r = vec3(2.52432244e+03, -1.06185848e-03, 3.11067539e+00);
|
||||
vec3 g = vec3(-7.50343014e+02, 3.15679613e-04, 4.73464526e-01);
|
||||
vec4 b = vec4(0.0, 0.0, 0.0, 0.0);
|
||||
|
||||
rgb = vec3(r.r * t_inv + r.g * temperature + r.b, g.r * t_inv + g.g * temperature + g.b, ((b.r * temperature + b.g) * temperature + b.b) * temperature + b.a );
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return rgb;
|
||||
|
||||
}
|
||||
"""
|
||||
|
||||
# Adapted from https://github.com/blender/blender/blob/594f47ecd2d5367ca936cf6fc6ec8168c2b360d0/source/blender/gpu/shaders/material/gpu_shader_material_map_range.glsl
|
||||
str_map_range_linear = """
|
||||
float map_range_linear(const float value, const float fromMin, const float fromMax, const float toMin, const float toMax) {
|
||||
if (fromMax != fromMin) {
|
||||
return float(toMin + ((value - fromMin) / (fromMax - fromMin)) * (toMax - toMin));
|
||||
}
|
||||
else {
|
||||
return float(0.0);
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
str_map_range_stepped = """
|
||||
float map_range_stepped(const float value, const float fromMin, const float fromMax, const float toMin, const float toMax, const float steps) {
|
||||
if (fromMax != fromMin) {
|
||||
float factor = (value - fromMin) / (fromMax - fromMin);
|
||||
factor = (steps > 0.0) ? floor(factor * (steps + 1.0)) / steps : 0.0;
|
||||
return float(toMin + factor * (toMax - toMin));
|
||||
}
|
||||
else {
|
||||
return float(0.0);
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
str_map_range_smoothstep = """
|
||||
float map_range_smoothstep(const float value, const float fromMin, const float fromMax, const float toMin, const float toMax)
|
||||
{
|
||||
if (fromMax != fromMin) {
|
||||
float factor = (fromMin > fromMax) ? 1.0 - smoothstep(fromMax, fromMin, value) :
|
||||
smoothstep(fromMin, fromMax, value);
|
||||
return float(toMin + factor * (toMax - toMin));
|
||||
}
|
||||
else {
|
||||
return float(0.0);
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
str_map_range_smootherstep = """
|
||||
float safe_divide(float a, float b)
|
||||
{
|
||||
return (b != 0.0) ? a / b : 0.0;
|
||||
}
|
||||
|
||||
float smootherstep(float edge0, float edge1, float x)
|
||||
{
|
||||
x = clamp(safe_divide((x - edge0), (edge1 - edge0)), 0.0, 1.0);
|
||||
return x * x * x * (x * (x * 6.0 - 15.0) + 10.0);
|
||||
}
|
||||
|
||||
float map_range_smootherstep(const float value, const float fromMin, const float fromMax, const float toMin, const float toMax) {
|
||||
if (fromMax != fromMin) {
|
||||
float factor = (fromMin > fromMax) ? 1.0 - smootherstep(fromMax, fromMin, value) :
|
||||
smootherstep(fromMin, fromMax, value);
|
||||
return float(toMin + factor * (toMax - toMin));
|
||||
}
|
||||
else {
|
||||
return float(0.0);
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
str_rotate_around_axis = """
|
||||
vec3 rotate_around_axis(const vec3 p, const vec3 axis, const float angle)
|
||||
{
|
||||
float costheta = cos(angle);
|
||||
float sintheta = sin(angle);
|
||||
vec3 r;
|
||||
|
||||
r.x = ((costheta + (1.0 - costheta) * axis.x * axis.x) * p.x) +
|
||||
(((1.0 - costheta) * axis.x * axis.y - axis.z * sintheta) * p.y) +
|
||||
(((1.0 - costheta) * axis.x * axis.z + axis.y * sintheta) * p.z);
|
||||
|
||||
r.y = (((1.0 - costheta) * axis.x * axis.y + axis.z * sintheta) * p.x) +
|
||||
((costheta + (1.0 - costheta) * axis.y * axis.y) * p.y) +
|
||||
(((1.0 - costheta) * axis.y * axis.z - axis.x * sintheta) * p.z);
|
||||
|
||||
r.z = (((1.0 - costheta) * axis.x * axis.z - axis.y * sintheta) * p.x) +
|
||||
(((1.0 - costheta) * axis.y * axis.z + axis.x * sintheta) * p.y) +
|
||||
((costheta + (1.0 - costheta) * axis.z * axis.z) * p.z);
|
||||
|
||||
return r;
|
||||
}
|
||||
"""
|
||||
|
||||
str_euler_to_mat3 = """
|
||||
mat3 euler_to_mat3(vec3 euler)
|
||||
{
|
||||
float cx = cos(euler.x);
|
||||
float cy = cos(euler.y);
|
||||
float cz = cos(euler.z);
|
||||
float sx = sin(euler.x);
|
||||
float sy = sin(euler.y);
|
||||
float sz = sin(euler.z);
|
||||
|
||||
mat3 mat;
|
||||
mat[0][0] = cy * cz;
|
||||
mat[0][1] = cy * sz;
|
||||
mat[0][2] = -sy;
|
||||
|
||||
mat[1][0] = sy * sx * cz - cx * sz;
|
||||
mat[1][1] = sy * sx * sz + cx * cz;
|
||||
mat[1][2] = cy * sx;
|
||||
|
||||
mat[2][0] = sy * cx * cz + sx * sz;
|
||||
mat[2][1] = sy * cx * sz - sx * cz;
|
||||
mat[2][2] = cy * cx;
|
||||
return mat;
|
||||
}
|
||||
"""
|
5
leenkx/blender/lnx/material/cycles_nodes/__init__.py
Normal file
5
leenkx/blender/lnx/material/cycles_nodes/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
import glob
|
||||
from os.path import dirname, basename, isfile
|
||||
|
||||
modules = glob.glob(dirname(__file__) + "/*.py")
|
||||
__all__ = [basename(f)[:-3] for f in modules if isfile(f)]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
171
leenkx/blender/lnx/material/cycles_nodes/nodes_color.py
Normal file
171
leenkx/blender/lnx/material/cycles_nodes/nodes_color.py
Normal file
@ -0,0 +1,171 @@
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserState
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_brightcontrast(node: bpy.types.ShaderNodeBrightContrast, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
out_col = c.parse_vector_input(node.inputs[0])
|
||||
bright = c.parse_value_input(node.inputs[1])
|
||||
contr = c.parse_value_input(node.inputs[2])
|
||||
|
||||
state.curshader.add_function(c_functions.str_brightcontrast)
|
||||
|
||||
return 'brightcontrast({0}, {1}, {2})'.format(out_col, bright, contr)
|
||||
|
||||
|
||||
def parse_gamma(node: bpy.types.ShaderNodeGamma, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
out_col = c.parse_vector_input(node.inputs[0])
|
||||
gamma = c.parse_value_input(node.inputs[1])
|
||||
|
||||
return 'pow({0}, vec3({1}))'.format(out_col, gamma)
|
||||
|
||||
|
||||
def parse_huesat(node: bpy.types.ShaderNodeHueSaturation, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.curshader.add_function(c_functions.str_hue_sat)
|
||||
hue = c.parse_value_input(node.inputs[0])
|
||||
sat = c.parse_value_input(node.inputs[1])
|
||||
val = c.parse_value_input(node.inputs[2])
|
||||
fac = c.parse_value_input(node.inputs[3])
|
||||
col = c.parse_vector_input(node.inputs[4])
|
||||
|
||||
return f'hue_sat({col}, vec4({hue}-0.5, {sat}, {val}, 1.0-{fac}))'
|
||||
|
||||
|
||||
def parse_invert(node: bpy.types.ShaderNodeInvert, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
out_col = c.parse_vector_input(node.inputs[1])
|
||||
|
||||
return f'mix({out_col}, vec3(1.0) - ({out_col}), {fac})'
|
||||
|
||||
|
||||
def parse_mix(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> str:
|
||||
if node.data_type == 'FLOAT':
|
||||
return _parse_mixfloat(node, out_socket, state)
|
||||
elif node.data_type == 'VECTOR':
|
||||
return _parse_mixvec(node, out_socket, state)
|
||||
elif node.data_type == 'RGBA':
|
||||
return _parse_mixrgb(node, out_socket, state)
|
||||
else:
|
||||
log.warn(f'Mix node: unsupported data type {node.data_type}.')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def _parse_mixfloat(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, 0.0, 1.0)'
|
||||
|
||||
return f'mix({c.parse_value_input(node.inputs[2])}, {c.parse_value_input(node.inputs[3])}, {fac})'
|
||||
|
||||
|
||||
def _parse_mixvec(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if node.factor_mode == 'UNIFORM':
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, 0.0, 1.0)'
|
||||
|
||||
elif node.factor_mode == 'NON_UNIFORM':
|
||||
fac = c.parse_vector_input(node.inputs[1])
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, vec3(0.0), vec3(1.0))'
|
||||
|
||||
else:
|
||||
log.warn(f'Mix node: unsupported factor mode {node.factor_mode}.')
|
||||
return 'vec3(0.0, 0.0, 0.0)'
|
||||
|
||||
return f'mix({c.parse_vector_input(node.inputs[4])}, {c.parse_vector_input(node.inputs[5])}, {fac})'
|
||||
|
||||
|
||||
def _parse_mixrgb(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
col1 = c.parse_vector_input(node.inputs[6])
|
||||
col2 = c.parse_vector_input(node.inputs[7])
|
||||
|
||||
# Store factor in variable for linked factor input
|
||||
if node.inputs[0].is_linked:
|
||||
fac = c.node_name(node.name) + '_fac' + state.get_parser_pass_suffix()
|
||||
state.curshader.write('float {0} = {1};'.format(fac, c.parse_value_input(node.inputs[0])))
|
||||
else:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, 0.0, 1.0)'
|
||||
|
||||
# TODO: Do not mix if factor is constant 0.0 or 1.0?
|
||||
|
||||
blend = node.blend_type
|
||||
if blend == 'MIX':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'ADD':
|
||||
out_col = 'mix({0}, {0} + {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'MULTIPLY':
|
||||
out_col = 'mix({0}, {0} * {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'SUBTRACT':
|
||||
out_col = 'mix({0}, {0} - {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'SCREEN':
|
||||
out_col = '(vec3(1.0) - (vec3(1.0 - {2}) + {2} * (vec3(1.0) - {1})) * (vec3(1.0) - {0}))'.format(col1, col2, fac)
|
||||
elif blend == 'DIVIDE':
|
||||
out_col = '(vec3((1.0 - {2}) * {0} + {2} * {0} / {1}))'.format(col1, col2, fac)
|
||||
elif blend == 'DIFFERENCE':
|
||||
out_col = 'mix({0}, abs({0} - {1}), {2})'.format(col1, col2, fac)
|
||||
elif blend == 'DARKEN':
|
||||
out_col = 'min({0}, {1} * {2})'.format(col1, col2, fac)
|
||||
elif blend == 'LIGHTEN':
|
||||
out_col = 'max({0}, {1} * {2})'.format(col1, col2, fac)
|
||||
elif blend == 'OVERLAY':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'DODGE':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'BURN':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'HUE':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'SATURATION':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'VALUE':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'COLOR':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'SOFT_LIGHT':
|
||||
out_col = '((1.0 - {2}) * {0} + {2} * ((vec3(1.0) - {0}) * {1} * {0} + {0} * (vec3(1.0) - (vec3(1.0) - {1}) * (vec3(1.0) - {0}))))'.format(col1, col2, fac)
|
||||
elif blend == 'LINEAR_LIGHT':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
# out_col = '({0} + {2} * (2.0 * ({1} - vec3(0.5))))'.format(col1, col2, fac_var)
|
||||
else:
|
||||
log.warn(f'MixRGB node: unsupported blend type {node.blend_type}.')
|
||||
return col1
|
||||
|
||||
if node.clamp_result:
|
||||
return 'clamp({0}, vec3(0.0), vec3(1.0))'.format(out_col)
|
||||
return out_col
|
||||
|
||||
|
||||
def parse_curvergb(node: bpy.types.ShaderNodeRGBCurve, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
vec = c.parse_vector_input(node.inputs[1])
|
||||
curves = node.mapping.curves
|
||||
name = c.node_name(node.name)
|
||||
# mapping.curves[0].points[0].handle_type
|
||||
return '(sqrt(vec3({0}, {1}, {2}) * vec3({4}, {5}, {6})) * {3})'.format(
|
||||
c.vector_curve(name + '0', vec + '.x', curves[0].points), c.vector_curve(name + '1', vec + '.y', curves[1].points), c.vector_curve(name + '2', vec + '.z', curves[2].points), fac,
|
||||
c.vector_curve(name + '3a', vec + '.x', curves[3].points), c.vector_curve(name + '3b', vec + '.y', curves[3].points), c.vector_curve(name + '3c', vec + '.z', curves[3].points))
|
||||
|
||||
|
||||
def parse_lightfalloff(node: bpy.types.ShaderNodeLightFalloff, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
# https://github.com/blender/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_light_falloff.glsl
|
||||
return c.parse_value_input(node.inputs['Strength'])
|
402
leenkx/blender/lnx/material/cycles_nodes/nodes_converter.py
Normal file
402
leenkx/blender/lnx/material/cycles_nodes/nodes_converter.py
Normal file
@ -0,0 +1,402 @@
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserPass, ParserState
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_maprange(node: bpy.types.ShaderNodeMapRange, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
|
||||
interp = node.interpolation_type
|
||||
|
||||
value: str = c.parse_value_input(node.inputs[0]) if node.inputs[0].is_linked else c.to_vec1(node.inputs[0].default_value)
|
||||
fromMin = c.parse_value_input(node.inputs[1])
|
||||
fromMax = c.parse_value_input(node.inputs[2])
|
||||
toMin = c.parse_value_input(node.inputs[3])
|
||||
toMax = c.parse_value_input(node.inputs[4])
|
||||
|
||||
if interp == "LINEAR":
|
||||
state.curshader.add_function(c_functions.str_map_range_linear)
|
||||
out = f'map_range_linear({value}, {fromMin}, {fromMax}, {toMin}, {toMax})'
|
||||
|
||||
elif interp == "STEPPED":
|
||||
steps = float(c.parse_value_input(node.inputs[5]))
|
||||
state.curshader.add_function(c_functions.str_map_range_stepped)
|
||||
out = f'map_range_stepped({value}, {fromMin}, {fromMax}, {toMin}, {toMax}, {steps})'
|
||||
|
||||
elif interp == "SMOOTHSTEP":
|
||||
state.curshader.add_function(c_functions.str_map_range_smoothstep)
|
||||
out = f'map_range_smoothstep({value}, {fromMin}, {fromMax}, {toMin}, {toMax})'
|
||||
|
||||
elif interp == "SMOOTHERSTEP":
|
||||
state.curshader.add_function(c_functions.str_map_range_smootherstep)
|
||||
out = f'map_range_smootherstep({value}, {fromMin}, {fromMax}, {toMin}, {toMax})'
|
||||
|
||||
else:
|
||||
log.warn(f'Interpolation mode {interp} not supported for Map Range node')
|
||||
return '0.0'
|
||||
|
||||
if node.clamp:
|
||||
out = f'clamp({out}, {toMin}, {toMax})'
|
||||
|
||||
return out
|
||||
|
||||
def parse_blackbody(node: bpy.types.ShaderNodeBlackbody, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
|
||||
t = c.parse_value_input(node.inputs[0])
|
||||
|
||||
state.curshader.add_function(c_functions.str_blackbody)
|
||||
return f'blackbody({t})'
|
||||
|
||||
def parse_clamp(node: bpy.types.ShaderNodeClamp, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
value = c.parse_value_input(node.inputs['Value'])
|
||||
minVal = c.parse_value_input(node.inputs['Min'])
|
||||
maxVal = c.parse_value_input(node.inputs['Max'])
|
||||
|
||||
if node.clamp_type == 'MINMAX':
|
||||
# Condition is minVal < maxVal, otherwise use 'RANGE' type
|
||||
return f'clamp({value}, {minVal}, {maxVal})'
|
||||
|
||||
elif node.clamp_type == 'RANGE':
|
||||
return f'{minVal} < {maxVal} ? clamp({value}, {minVal}, {maxVal}) : clamp({value}, {maxVal}, {minVal})'
|
||||
|
||||
else:
|
||||
log.warn(f'Clamp node: unsupported clamp type {node.clamp_type}.')
|
||||
return value
|
||||
|
||||
|
||||
def parse_valtorgb(node: bpy.types.ShaderNodeValToRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Alpha (TODO: make ColorRamp calculation vec4-based and split afterwards)
|
||||
if out_socket == node.outputs[1]:
|
||||
return '1.0'
|
||||
|
||||
input_fac: bpy.types.NodeSocket = node.inputs[0]
|
||||
|
||||
fac: str = c.parse_value_input(input_fac) if input_fac.is_linked else c.to_vec1(input_fac.default_value)
|
||||
interp = node.color_ramp.interpolation
|
||||
elems = node.color_ramp.elements
|
||||
|
||||
if len(elems) == 1:
|
||||
return c.to_vec3(elems[0].color)
|
||||
|
||||
# Write color array
|
||||
# The last entry is included twice so that the interpolation
|
||||
# between indices works (no out of bounds error)
|
||||
cols_var = c.node_name(node.name).upper() + '_COLS'
|
||||
|
||||
if state.current_pass == ParserPass.REGULAR:
|
||||
cols_entries = ', '.join(f'vec3({elem.color[0]}, {elem.color[1]}, {elem.color[2]})' for elem in elems)
|
||||
cols_entries += f', vec3({elems[len(elems) - 1].color[0]}, {elems[len(elems) - 1].color[1]}, {elems[len(elems) - 1].color[2]})'
|
||||
state.curshader.add_const("vec3", cols_var, cols_entries, array_size=len(elems) + 1)
|
||||
|
||||
fac_var = c.node_name(node.name) + '_fac' + state.get_parser_pass_suffix()
|
||||
state.curshader.write(f'float {fac_var} = {fac};')
|
||||
|
||||
# Get index of the nearest left element relative to the factor
|
||||
index = '0 + '
|
||||
index += ' + '.join([f'(({fac_var} > {elems[i].position}) ? 1 : 0)' for i in range(1, len(elems))])
|
||||
|
||||
# Write index
|
||||
index_var = c.node_name(node.name) + '_i' + state.get_parser_pass_suffix()
|
||||
state.curshader.write(f'int {index_var} = {index};')
|
||||
|
||||
if interp == 'CONSTANT':
|
||||
return f'{cols_var}[{index_var}]'
|
||||
|
||||
# Linear interpolation
|
||||
else:
|
||||
# Write factor array
|
||||
facs_var = c.node_name(node.name).upper() + '_FACS'
|
||||
if state.current_pass == ParserPass.REGULAR:
|
||||
facs_entries = ', '.join(str(elem.position) for elem in elems)
|
||||
# Add one more entry at the rightmost position so that the
|
||||
# interpolation between indices works (no out of bounds error)
|
||||
facs_entries += ', 1.0'
|
||||
state.curshader.add_const("float", facs_var, facs_entries, array_size=len(elems) + 1)
|
||||
|
||||
# Mix color
|
||||
prev_stop_fac = f'{facs_var}[{index_var}]'
|
||||
next_stop_fac = f'{facs_var}[{index_var} + 1]'
|
||||
prev_stop_col = f'{cols_var}[{index_var}]'
|
||||
next_stop_col = f'{cols_var}[{index_var} + 1]'
|
||||
rel_pos = f'({fac_var} - {prev_stop_fac}) * (1.0 / ({next_stop_fac} - {prev_stop_fac}))'
|
||||
return f'mix({prev_stop_col}, {next_stop_col}, max({rel_pos}, 0.0))'
|
||||
|
||||
if bpy.app.version > (3, 2, 0):
|
||||
def parse_combine_color(node: bpy.types.ShaderNodeCombineColor, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
if node.mode == 'RGB':
|
||||
return parse_combrgb(node, out_socket, state)
|
||||
elif node.mode == 'HSV':
|
||||
return parse_combhsv(node, out_socket, state)
|
||||
elif node.mode == 'HSL':
|
||||
log.warn('Combine Color node: HSL mode is not supported, using default value')
|
||||
return c.to_vec3((0.0, 0.0, 0.0))
|
||||
|
||||
|
||||
def parse_combhsv(node: bpy.types.ShaderNodeCombineHSV, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.curshader.add_function(c_functions.str_hue_sat)
|
||||
h = c.parse_value_input(node.inputs[0])
|
||||
s = c.parse_value_input(node.inputs[1])
|
||||
v = c.parse_value_input(node.inputs[2])
|
||||
return f'hsv_to_rgb(vec3({h}, {s}, {v}))'
|
||||
|
||||
|
||||
def parse_combrgb(node: bpy.types.ShaderNodeCombineRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
r = c.parse_value_input(node.inputs[0])
|
||||
g = c.parse_value_input(node.inputs[1])
|
||||
b = c.parse_value_input(node.inputs[2])
|
||||
return f'vec3({r}, {g}, {b})'
|
||||
|
||||
|
||||
def parse_combxyz(node: bpy.types.ShaderNodeCombineXYZ, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
x = c.parse_value_input(node.inputs[0])
|
||||
y = c.parse_value_input(node.inputs[1])
|
||||
z = c.parse_value_input(node.inputs[2])
|
||||
return f'vec3({x}, {y}, {z})'
|
||||
|
||||
|
||||
def parse_wavelength(node: bpy.types.ShaderNodeWavelength, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.curshader.add_function(c_functions.str_wavelength_to_rgb)
|
||||
wl = c.parse_value_input(node.inputs[0])
|
||||
# Roughly map to cycles - 450 to 600 nanometers
|
||||
return f'wavelength_to_rgb(({wl} - 450.0) / 150.0)'
|
||||
|
||||
|
||||
def parse_vectormath(node: bpy.types.ShaderNodeVectorMath, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
op = node.operation
|
||||
|
||||
vec1 = c.parse_vector_input(node.inputs[0])
|
||||
vec2 = c.parse_vector_input(node.inputs[1])
|
||||
|
||||
if out_socket.type == 'VECTOR':
|
||||
if op == 'ADD':
|
||||
return f'({vec1} + {vec2})'
|
||||
elif op == 'SUBTRACT':
|
||||
return f'({vec1} - {vec2})'
|
||||
elif op == 'MULTIPLY':
|
||||
return f'({vec1} * {vec2})'
|
||||
elif op == 'DIVIDE':
|
||||
state.curshader.add_function(c_functions.str_safe_divide)
|
||||
return f'safe_divide({vec1}, {vec2})'
|
||||
|
||||
elif op == 'NORMALIZE':
|
||||
return f'normalize({vec1})'
|
||||
elif op == 'SCALE':
|
||||
# Scale is input 3 despite being visually on another position (see the python tooltip in Blender)
|
||||
scale = c.parse_value_input(node.inputs[3])
|
||||
return f'{vec1} * {scale}'
|
||||
|
||||
elif op == 'REFLECT':
|
||||
return f'reflect({vec1}, normalize({vec2}))'
|
||||
elif op == 'PROJECT':
|
||||
state.curshader.add_function(c_functions.str_project)
|
||||
return f'project({vec1}, {vec2})'
|
||||
elif op == 'CROSS_PRODUCT':
|
||||
return f'cross({vec1}, {vec2})'
|
||||
|
||||
elif op == 'SINE':
|
||||
return f'sin({vec1})'
|
||||
elif op == 'COSINE':
|
||||
return f'cos({vec1})'
|
||||
elif op == 'TANGENT':
|
||||
return f'tan({vec1})'
|
||||
|
||||
elif op == 'MODULO':
|
||||
return f'mod({vec1}, {vec2})'
|
||||
elif op == 'FRACTION':
|
||||
return f'fract({vec1})'
|
||||
|
||||
elif op == 'SNAP':
|
||||
state.curshader.add_function(c_functions.str_safe_divide)
|
||||
return f'floor(safe_divide({vec1}, {vec2})) * {vec2}'
|
||||
elif op == 'WRAP':
|
||||
vec3 = c.parse_vector_input(node.inputs[2])
|
||||
state.curshader.add_function(c_functions.str_wrap)
|
||||
return f'wrap({vec1}, {vec2}, {vec3})'
|
||||
elif op == 'CEIL':
|
||||
return f'ceil({vec1})'
|
||||
elif op == 'FLOOR':
|
||||
return f'floor({vec1})'
|
||||
elif op == 'MAXIMUM':
|
||||
return f'max({vec1}, {vec2})'
|
||||
elif op == 'MINIMUM':
|
||||
return f'min({vec1}, {vec2})'
|
||||
elif op == 'ABSOLUTE':
|
||||
return f'abs({vec1})'
|
||||
|
||||
log.warn(f'Vectormath node: unsupported operation {node.operation}.')
|
||||
return vec1
|
||||
|
||||
# Float output
|
||||
if op == 'DOT_PRODUCT':
|
||||
return f'dot({vec1}, {vec2})'
|
||||
elif op == 'DISTANCE':
|
||||
return f'distance({vec1}, {vec2})'
|
||||
elif op == 'LENGTH':
|
||||
return f'length({vec1})'
|
||||
|
||||
log.warn(f'Vectormath node: unsupported operation {node.operation}.')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_math(node: bpy.types.ShaderNodeMath, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
val1 = c.parse_value_input(node.inputs[0])
|
||||
val2 = c.parse_value_input(node.inputs[1])
|
||||
op = node.operation
|
||||
if op == 'ADD':
|
||||
out_val = '({0} + {1})'.format(val1, val2)
|
||||
elif op == 'SUBTRACT':
|
||||
out_val = '({0} - {1})'.format(val1, val2)
|
||||
elif op == 'MULTIPLY':
|
||||
out_val = '({0} * {1})'.format(val1, val2)
|
||||
elif op == 'DIVIDE':
|
||||
out_val = '({0} / {1})'.format(val1, val2)
|
||||
elif op == 'MULTIPLY_ADD':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = '({0} * {1} + {2})'.format(val1, val2, val3)
|
||||
elif op == 'POWER':
|
||||
out_val = 'pow({0}, {1})'.format(val1, val2)
|
||||
elif op == 'LOGARITHM':
|
||||
out_val = 'log({0})'.format(val1)
|
||||
elif op == 'SQRT':
|
||||
out_val = 'sqrt({0})'.format(val1)
|
||||
elif op == 'INVERSE_SQRT':
|
||||
out_val = 'inversesqrt({0})'.format(val1)
|
||||
elif op == 'ABSOLUTE':
|
||||
out_val = 'abs({0})'.format(val1)
|
||||
elif op == 'EXPONENT':
|
||||
out_val = 'exp({0})'.format(val1)
|
||||
elif op == 'MINIMUM':
|
||||
out_val = 'min({0}, {1})'.format(val1, val2)
|
||||
elif op == 'MAXIMUM':
|
||||
out_val = 'max({0}, {1})'.format(val1, val2)
|
||||
elif op == 'LESS_THAN':
|
||||
out_val = 'float({0} < {1})'.format(val1, val2)
|
||||
elif op == 'GREATER_THAN':
|
||||
out_val = 'float({0} > {1})'.format(val1, val2)
|
||||
elif op == 'SIGN':
|
||||
out_val = 'sign({0})'.format(val1)
|
||||
elif op == 'COMPARE':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float((abs({0} - {1}) <= max({2}, 1e-5)) ? 1.0 : 0.0)'.format(val1, val2, val3)
|
||||
elif op == 'SMOOTH_MIN':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float(float({2} != 0.0 ? min({0},{1}) - (max({2} - abs({0} - {1}), 0.0) / {2}) * (max({2} - abs({0} - {1}), 0.0) / {2}) * (max({2} - abs({0} - {1}), 0.0) / {2}) * {2} * (1.0 / 6.0) : min({0}, {1})))'.format(val1, val2, val3)
|
||||
elif op == 'SMOOTH_MAX':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float(0-(float({2} != 0.0 ? min(-{0},-{1}) - (max({2} - abs(-{0} - (-{1})), 0.0) / {2}) * (max({2} - abs(-{0} - (-{1})), 0.0) / {2}) * (max({2} - abs(-{0} - (-{1})), 0.0) / {2}) * {2} * (1.0 / 6.0) : min(-{0}, (-{1})))))'.format(val1, val2, val3)
|
||||
elif op == 'ROUND':
|
||||
# out_val = 'round({0})'.format(val1)
|
||||
out_val = 'floor({0} + 0.5)'.format(val1)
|
||||
elif op == 'FLOOR':
|
||||
out_val = 'floor({0})'.format(val1)
|
||||
elif op == 'CEIL':
|
||||
out_val = 'ceil({0})'.format(val1)
|
||||
elif op == 'TRUNC':
|
||||
out_val = 'trunc({0})'.format(val1)
|
||||
elif op == 'FRACT':
|
||||
out_val = 'fract({0})'.format(val1)
|
||||
elif op == 'MODULO':
|
||||
# out_val = 'float({0} % {1})'.format(val1, val2)
|
||||
out_val = 'mod({0}, {1})'.format(val1, val2)
|
||||
elif op == 'WRAP':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float((({1}-{2}) != 0.0) ? {0} - (({1}-{2}) * floor(({0} - {2}) / ({1}-{2}))) : {2})'.format(val1, val2, val3)
|
||||
elif op == 'SNAP':
|
||||
out_val = 'floor(({1} != 0.0) ? {0} / {1} : 0.0) * {1}'.format(val1, val2)
|
||||
elif op == 'PINGPONG':
|
||||
out_val = 'float(({1} != 0.0) ? abs(fract(({0} - {1}) / ({1} * 2.0)) * {1} * 2.0 - {1}) : 0.0)'.format(val1, val2)
|
||||
elif op == 'SINE':
|
||||
out_val = 'sin({0})'.format(val1)
|
||||
elif op == 'COSINE':
|
||||
out_val = 'cos({0})'.format(val1)
|
||||
elif op == 'TANGENT':
|
||||
out_val = 'tan({0})'.format(val1)
|
||||
elif op == 'ARCSINE':
|
||||
out_val = 'asin({0})'.format(val1)
|
||||
elif op == 'ARCCOSINE':
|
||||
out_val = 'acos({0})'.format(val1)
|
||||
elif op == 'ARCTANGENT':
|
||||
out_val = 'atan({0})'.format(val1)
|
||||
elif op == 'ARCTAN2':
|
||||
out_val = 'atan({0}, {1})'.format(val1, val2)
|
||||
elif op == 'SINH':
|
||||
out_val = 'sinh({0})'.format(val1)
|
||||
elif op == 'COSH':
|
||||
out_val = 'cosh({0})'.format(val1)
|
||||
elif op == 'TANH':
|
||||
out_val = 'tanh({0})'.format(val1)
|
||||
elif op == 'RADIANS':
|
||||
out_val = 'radians({0})'.format(val1)
|
||||
elif op == 'DEGREES':
|
||||
out_val = 'degrees({0})'.format(val1)
|
||||
|
||||
if node.use_clamp:
|
||||
return 'clamp({0}, 0.0, 1.0)'.format(out_val)
|
||||
else:
|
||||
return out_val
|
||||
|
||||
|
||||
def parse_rgbtobw(node: bpy.types.ShaderNodeRGBToBW, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
return c.rgb_to_bw(c.parse_vector_input(node.inputs[0]))
|
||||
|
||||
if bpy.app.version > (3, 2, 0):
|
||||
def parse_separate_color(node: bpy.types.ShaderNodeSeparateColor, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
if node.mode == 'RGB':
|
||||
return parse_seprgb(node, out_socket, state)
|
||||
elif node.mode == 'HSV':
|
||||
return parse_sephsv(node, out_socket, state)
|
||||
elif node.mode == 'HSL':
|
||||
log.warn('Separate Color node: HSL mode is not supported, using default value')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_sephsv(node: bpy.types.ShaderNodeSeparateHSV, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
state.curshader.add_function(c_functions.str_hue_sat)
|
||||
|
||||
hsv_var = c.node_name(node.name) + '_hsv' + state.get_parser_pass_suffix()
|
||||
if not state.curshader.contains(hsv_var): # Already written if a second output is parsed
|
||||
state.curshader.write(f'const vec3 {hsv_var} = rgb_to_hsv({c.parse_vector_input(node.inputs["Color"])}.rgb);')
|
||||
|
||||
if out_socket == node.outputs[0]:
|
||||
return f'{hsv_var}.x'
|
||||
elif out_socket == node.outputs[1]:
|
||||
return f'{hsv_var}.y'
|
||||
elif out_socket == node.outputs[2]:
|
||||
return f'{hsv_var}.z'
|
||||
|
||||
|
||||
def parse_seprgb(node: bpy.types.ShaderNodeSeparateRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
col = c.parse_vector_input(node.inputs[0])
|
||||
if out_socket == node.outputs[0]:
|
||||
return '{0}.r'.format(col)
|
||||
elif out_socket == node.outputs[1]:
|
||||
return '{0}.g'.format(col)
|
||||
elif out_socket == node.outputs[2]:
|
||||
return '{0}.b'.format(col)
|
||||
|
||||
|
||||
def parse_sepxyz(node: bpy.types.ShaderNodeSeparateXYZ, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
vec = c.parse_vector_input(node.inputs[0])
|
||||
if out_socket == node.outputs[0]:
|
||||
return '{0}.x'.format(vec)
|
||||
elif out_socket == node.outputs[1]:
|
||||
return '{0}.y'.format(vec)
|
||||
elif out_socket == node.outputs[2]:
|
||||
return '{0}.z'.format(vec)
|
426
leenkx/blender/lnx/material/cycles_nodes/nodes_input.py
Normal file
426
leenkx/blender/lnx/material/cycles_nodes/nodes_input.py
Normal file
@ -0,0 +1,426 @@
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
import lnx.material.mat_state as mat_state
|
||||
from lnx.material.parser_state import ParserState, ParserContext
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserContext
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_attribute(node: bpy.types.ShaderNodeAttribute, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
out_type = 'float' if out_socket.type == 'VALUE' else 'vec3'
|
||||
|
||||
if node.attribute_name == 'time':
|
||||
state.curshader.add_uniform('float time', link='_time')
|
||||
|
||||
if out_socket == node.outputs[3]:
|
||||
return '1.0'
|
||||
return c.cast_value('time', from_type='float', to_type=out_type)
|
||||
|
||||
# UV maps (higher priority) and vertex colors
|
||||
if node.attribute_type == 'GEOMETRY':
|
||||
|
||||
# Alpha output. Leenkx doesn't support vertex colors with alpha
|
||||
# values yet and UV maps don't have an alpha channel
|
||||
if out_socket == node.outputs[3]:
|
||||
return '1.0'
|
||||
|
||||
# UV maps
|
||||
mat = c.mat_get_material()
|
||||
mat_users = c.mat_get_material_users()
|
||||
|
||||
if mat_users is not None and mat in mat_users:
|
||||
mat_user = mat_users[mat][0]
|
||||
|
||||
# Curves don't have uv layers, so check that first
|
||||
if hasattr(mat_user.data, 'uv_layers'):
|
||||
lays = mat_user.data.uv_layers
|
||||
|
||||
# First UV map referenced
|
||||
if len(lays) > 0 and node.attribute_name == lays[0].name:
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return c.cast_value('vec3(texCoord.x, 1.0 - texCoord.y, 0.0)', from_type='vec3', to_type=out_type)
|
||||
|
||||
# Second UV map referenced
|
||||
elif len(lays) > 1 and node.attribute_name == lays[1].name:
|
||||
state.con.add_elem('tex1', 'short2norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return c.cast_value('vec3(texCoord1.x, 1.0 - texCoord1.y, 0.0)', from_type='vec3', to_type=out_type)
|
||||
|
||||
# Vertex colors
|
||||
# TODO: support multiple vertex color sets
|
||||
state.con.add_elem('col', 'short4norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return c.cast_value('vcolor', from_type='vec3', to_type=out_type)
|
||||
|
||||
# Check object properties
|
||||
# see https://developer.blender.org/rB6fdcca8de6 for reference
|
||||
mat = c.mat_get_material()
|
||||
mat_users = c.mat_get_material_users()
|
||||
if mat_users is not None and mat in mat_users:
|
||||
# Use first material user for now...
|
||||
mat_user = mat_users[mat][0]
|
||||
|
||||
val = None
|
||||
# Custom properties first
|
||||
if node.attribute_name in mat_user:
|
||||
val = mat_user[node.attribute_name]
|
||||
# Blender properties
|
||||
elif hasattr(mat_user, node.attribute_name):
|
||||
val = getattr(mat_user, node.attribute_name)
|
||||
|
||||
if val is not None:
|
||||
if isinstance(val, float):
|
||||
return c.cast_value(str(val), from_type='float', to_type=out_type)
|
||||
elif isinstance(val, int):
|
||||
return c.cast_value(str(val), from_type='int', to_type=out_type)
|
||||
elif isinstance(val, mathutils.Vector) and len(val) <= 4:
|
||||
out = val.to_4d()
|
||||
|
||||
if out_socket == node.outputs[3]:
|
||||
return c.to_vec1(out[3])
|
||||
return c.cast_value(c.to_vec3(out), from_type='vec3', to_type=out_type)
|
||||
|
||||
# Default values, attribute name did not match
|
||||
if out_socket == node.outputs[3]:
|
||||
return '1.0'
|
||||
return c.cast_value('0.0', from_type='float', to_type=out_type)
|
||||
|
||||
|
||||
def parse_rgb(node: bpy.types.ShaderNodeRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if node.lnx_material_param:
|
||||
nn = 'param_' + c.node_name(node.name)
|
||||
v = out_socket.default_value
|
||||
value = [float(v[0]), float(v[1]), float(v[2])]
|
||||
state.curshader.add_uniform(f'vec3 {nn}', link=f'{node.name}', default_value=value, is_lnx_mat_param=True)
|
||||
return nn
|
||||
else:
|
||||
return c.to_vec3(out_socket.default_value)
|
||||
|
||||
|
||||
def parse_vertex_color(node: bpy.types.ShaderNodeVertexColor, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.con.add_elem('col', 'short4norm')
|
||||
return 'vcolor'
|
||||
|
||||
|
||||
def parse_camera(node: bpy.types.ShaderNodeCameraData, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# View Vector in camera space
|
||||
if out_socket == node.outputs[0]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'vVecCam'
|
||||
|
||||
# View Z Depth
|
||||
elif out_socket == node.outputs[1]:
|
||||
state.curshader.add_include('std/math.glsl')
|
||||
state.curshader.add_uniform('vec2 cameraProj', link='_cameraPlaneProj')
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'linearize(gl_FragCoord.z, cameraProj)'
|
||||
|
||||
# View Distance
|
||||
else:
|
||||
state.curshader.add_uniform('vec3 eye', link='_cameraPosition')
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'distance(eye, wposition)'
|
||||
|
||||
|
||||
def parse_geometry(node: bpy.types.ShaderNodeNewGeometry, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Position
|
||||
if out_socket == node.outputs[0]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'wposition'
|
||||
# Normal
|
||||
elif out_socket == node.outputs[1]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n' if state.curshader.shader_type == 'frag' else 'wnormal'
|
||||
# Tangent
|
||||
elif out_socket == node.outputs[2]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'wtangent'
|
||||
# True Normal
|
||||
elif out_socket == node.outputs[3]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n' if state.curshader.shader_type == 'frag' else 'wnormal'
|
||||
# Incoming
|
||||
elif out_socket == node.outputs[4]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'vVec'
|
||||
# Parametric
|
||||
elif out_socket == node.outputs[5]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'mposition'
|
||||
# Backfacing
|
||||
elif out_socket == node.outputs[6]:
|
||||
return '(1.0 - float(gl_FrontFacing))' if state.context == ParserContext.OBJECT else '0.0'
|
||||
# Pointiness
|
||||
elif out_socket == node.outputs[7]:
|
||||
return '0.0'
|
||||
# Random Per Island
|
||||
elif out_socket == node.outputs[8]:
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_hairinfo(node: bpy.types.ShaderNodeHairInfo, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Tangent Normal
|
||||
if out_socket == node.outputs[3]:
|
||||
return 'vec3(0.0)'
|
||||
else:
|
||||
# Is Strand
|
||||
# Intercept
|
||||
# Thickness
|
||||
# Random
|
||||
return '0.5'
|
||||
|
||||
|
||||
def parse_objectinfo(node: bpy.types.ShaderNodeObjectInfo, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Location
|
||||
if out_socket == node.outputs[0]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return c.to_vec3((0.0, 0.0, 0.0))
|
||||
return 'wposition'
|
||||
|
||||
# Color
|
||||
elif out_socket == node.outputs[1]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
# Use world strength like Blender
|
||||
background_node = c.node_by_type(state.world.node_tree.nodes, 'BACKGROUND')
|
||||
if background_node is None:
|
||||
return c.to_vec3((0.0, 0.0, 0.0))
|
||||
return c.to_vec3([background_node.inputs[1].default_value] * 3)
|
||||
|
||||
# TODO: Implement object color in Iron
|
||||
# state.curshader.add_uniform('vec3 objectInfoColor', link='_objectInfoColor')
|
||||
# return 'objectInfoColor'
|
||||
return c.to_vec3((1.0, 1.0, 1.0))
|
||||
|
||||
# Alpha
|
||||
elif out_socket == node.outputs[2]:
|
||||
# TODO, see color output above
|
||||
return '0.0'
|
||||
|
||||
# Object Index
|
||||
elif out_socket == node.outputs[3]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return '0.0'
|
||||
state.curshader.add_uniform('float objectInfoIndex', link='_objectInfoIndex')
|
||||
return 'objectInfoIndex'
|
||||
|
||||
# Material Index
|
||||
elif out_socket == node.outputs[4]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return '0.0'
|
||||
state.curshader.add_uniform('float objectInfoMaterialIndex', link='_objectInfoMaterialIndex')
|
||||
return 'objectInfoMaterialIndex'
|
||||
|
||||
# Random
|
||||
elif out_socket == node.outputs[5]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return '0.0'
|
||||
|
||||
# Use random value per instance
|
||||
if mat_state.uses_instancing:
|
||||
state.vert.add_out(f'flat float irand')
|
||||
state.frag.add_in(f'flat float irand')
|
||||
state.vert.write(f'irand = fract(sin(gl_InstanceID) * 43758.5453);')
|
||||
return 'irand'
|
||||
|
||||
state.curshader.add_uniform('float objectInfoRandom', link='_objectInfoRandom')
|
||||
return 'objectInfoRandom'
|
||||
|
||||
|
||||
def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
particles_on = lnx.utils.get_rp().lnx_particles == 'On'
|
||||
|
||||
# Index
|
||||
if out_socket == node.outputs[0]:
|
||||
c.particle_info['index'] = True
|
||||
return 'p_index' if particles_on else '0.0'
|
||||
|
||||
# TODO: Random
|
||||
if out_socket == node.outputs[1]:
|
||||
return '0.0'
|
||||
|
||||
# Age
|
||||
elif out_socket == node.outputs[2]:
|
||||
c.particle_info['age'] = True
|
||||
return 'p_age' if particles_on else '0.0'
|
||||
|
||||
# Lifetime
|
||||
elif out_socket == node.outputs[3]:
|
||||
c.particle_info['lifetime'] = True
|
||||
return 'p_lifetime' if particles_on else '0.0'
|
||||
|
||||
# Location
|
||||
if out_socket == node.outputs[4]:
|
||||
c.particle_info['location'] = True
|
||||
return 'p_location' if particles_on else 'vec3(0.0)'
|
||||
|
||||
# Size
|
||||
elif out_socket == node.outputs[5]:
|
||||
c.particle_info['size'] = True
|
||||
return '1.0'
|
||||
|
||||
# Velocity
|
||||
elif out_socket == node.outputs[6]:
|
||||
c.particle_info['velocity'] = True
|
||||
return 'p_velocity' if particles_on else 'vec3(0.0)'
|
||||
|
||||
# Angular Velocity
|
||||
elif out_socket == node.outputs[7]:
|
||||
c.particle_info['angular_velocity'] = True
|
||||
return 'vec3(0.0)'
|
||||
|
||||
|
||||
def parse_tangent(node: bpy.types.ShaderNodeTangent, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'wtangent'
|
||||
|
||||
|
||||
def parse_texcoord(node: bpy.types.ShaderNodeTexCoord, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
#obj = node.object
|
||||
#instance = node.from_instance
|
||||
if out_socket == node.outputs[0]: # Generated - bounds
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'bposition'
|
||||
elif out_socket == node.outputs[1]: # Normal
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n'
|
||||
elif out_socket == node.outputs[2]: # UV
|
||||
if state.context == ParserContext.WORLD:
|
||||
return 'vec3(0.0)'
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'vec3(texCoord.x, 1.0 - texCoord.y, 0.0)'
|
||||
elif out_socket == node.outputs[3]: # Object
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'mposition'
|
||||
elif out_socket == node.outputs[4]: # Camera
|
||||
return 'vec3(0.0)' # 'vposition'
|
||||
elif out_socket == node.outputs[5]: # Window
|
||||
# TODO: Don't use gl_FragCoord here, it uses different axes on different graphics APIs
|
||||
state.frag.add_uniform('vec2 screenSize', link='_screenSize')
|
||||
state.dxdy_varying_input_value = True
|
||||
return f'vec3(gl_FragCoord.xy / screenSize, 0.0)'
|
||||
elif out_socket == node.outputs[6]: # Reflection
|
||||
if state.context == ParserContext.WORLD:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n'
|
||||
return 'vec3(0.0)'
|
||||
|
||||
|
||||
def parse_uvmap(node: bpy.types.ShaderNodeUVMap, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
# instance = node.from_instance
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
mat = c.mat_get_material()
|
||||
mat_users = c.mat_get_material_users()
|
||||
|
||||
state.dxdy_varying_input_value = True
|
||||
|
||||
if mat_users is not None and mat in mat_users:
|
||||
mat_user = mat_users[mat][0]
|
||||
if hasattr(mat_user.data, 'uv_layers'):
|
||||
layers = mat_user.data.uv_layers
|
||||
# Second UV map referenced
|
||||
if len(layers) > 1 and node.uv_map == layers[1].name:
|
||||
state.con.add_elem('tex1', 'short2norm')
|
||||
return 'vec3(texCoord1.x, 1.0 - texCoord1.y, 0.0)'
|
||||
|
||||
return 'vec3(texCoord.x, 1.0 - texCoord.y, 0.0)'
|
||||
|
||||
|
||||
def parse_fresnel(node: bpy.types.ShaderNodeFresnel, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
state.curshader.add_function(c_functions.str_fresnel)
|
||||
ior = c.parse_value_input(node.inputs[0])
|
||||
if node.inputs[1].is_linked:
|
||||
dotnv = 'dot({0}, vVec)'.format(c.parse_vector_input(node.inputs[1]))
|
||||
else:
|
||||
dotnv = 'dotNV'
|
||||
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'fresnel({0}, {1})'.format(ior, dotnv)
|
||||
|
||||
|
||||
def parse_layerweight(node: bpy.types.ShaderNodeLayerWeight, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
blend = c.parse_value_input(node.inputs[0])
|
||||
if node.inputs[1].is_linked:
|
||||
dotnv = 'dot({0}, vVec)'.format(c.parse_vector_input(node.inputs[1]))
|
||||
else:
|
||||
dotnv = 'dotNV'
|
||||
|
||||
state.dxdy_varying_input_value = True
|
||||
|
||||
# Fresnel
|
||||
if out_socket == node.outputs[0]:
|
||||
state.curshader.add_function(c_functions.str_fresnel)
|
||||
return 'fresnel(1.0 / (1.0 - {0}), {1})'.format(blend, dotnv)
|
||||
# Facing
|
||||
elif out_socket == node.outputs[1]:
|
||||
return '(1.0 - pow({0}, ({1} < 0.5) ? 2.0 * {1} : 0.5 / (1.0 - {1})))'.format(dotnv, blend)
|
||||
|
||||
|
||||
def parse_lightpath(node: bpy.types.ShaderNodeLightPath, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
# https://github.com/blender/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_light_path.glsl
|
||||
if out_socket == node.outputs['Is Camera Ray']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Is Shadow Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Is Diffuse Ray']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Is Glossy Ray']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Is Singular Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Is Reflection Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Is Transmission Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Ray Length']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Ray Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Diffuse Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Glossy Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Transparent Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Transmission Depth']:
|
||||
return '0.0'
|
||||
|
||||
log.warn(f'Light Path node: unsupported output {out_socket.identifier}.')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_value(node: bpy.types.ShaderNodeValue, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
if node.lnx_material_param:
|
||||
nn = 'param_' + c.node_name(node.name)
|
||||
value = node.outputs[0].default_value
|
||||
is_lnx_mat_param = True
|
||||
state.curshader.add_uniform('float {0}'.format(nn), link='{0}'.format(node.name), default_value=value, is_lnx_mat_param=is_lnx_mat_param)
|
||||
return nn
|
||||
else:
|
||||
return c.to_vec1(node.outputs[0].default_value)
|
||||
|
||||
|
||||
def parse_wireframe(node: bpy.types.ShaderNodeWireframe, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
# node.use_pixel_size
|
||||
# size = c.parse_value_input(node.inputs[0])
|
||||
return '0.0'
|
290
leenkx/blender/lnx/material/cycles_nodes/nodes_shader.py
Normal file
290
leenkx/blender/lnx/material/cycles_nodes/nodes_shader.py
Normal file
@ -0,0 +1,290 @@
|
||||
import bpy
|
||||
from bpy.types import NodeSocket
|
||||
|
||||
import lnx
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
from lnx.material.parser_state import ParserState
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
c = lnx.reload_module(c)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_mixshader(node: bpy.types.ShaderNodeMixShader, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
# Skip mixing if only one input is effectively used
|
||||
if not node.inputs[0].is_linked:
|
||||
if node.inputs[0].default_value <= 0.0:
|
||||
c.parse_shader_input(node.inputs[1])
|
||||
return
|
||||
elif node.inputs[0].default_value >= 1.0:
|
||||
c.parse_shader_input(node.inputs[2])
|
||||
return
|
||||
|
||||
prefix = '' if node.inputs[0].is_linked else 'const '
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
fac_var = c.node_name(node.name) + '_fac' + state.get_parser_pass_suffix()
|
||||
fac_inv_var = c.node_name(node.name) + '_fac_inv'
|
||||
state.curshader.write('{0}float {1} = clamp({2}, 0.0, 1.0);'.format(prefix, fac_var, fac))
|
||||
state.curshader.write('{0}float {1} = 1.0 - {2};'.format(prefix, fac_inv_var, fac_var))
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc1, rough1, met1, occ1, spec1, opac1, ior1, emi1 = c.parse_shader_input(node.inputs[1])
|
||||
ek1 = mat_state.emission_type
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc2, rough2, met2, occ2, spec2, opac2, ior2, emi2 = c.parse_shader_input(node.inputs[2])
|
||||
ek2 = mat_state.emission_type
|
||||
|
||||
if state.parse_surface:
|
||||
state.out_basecol = '({0} * {3} + {1} * {2})'.format(bc1, bc2, fac_var, fac_inv_var)
|
||||
state.out_roughness = '({0} * {3} + {1} * {2})'.format(rough1, rough2, fac_var, fac_inv_var)
|
||||
state.out_metallic = '({0} * {3} + {1} * {2})'.format(met1, met2, fac_var, fac_inv_var)
|
||||
state.out_occlusion = '({0} * {3} + {1} * {2})'.format(occ1, occ2, fac_var, fac_inv_var)
|
||||
state.out_specular = '({0} * {3} + {1} * {2})'.format(spec1, spec2, fac_var, fac_inv_var)
|
||||
state.out_emission_col = '({0} * {3} + {1} * {2})'.format(emi1, emi2, fac_var, fac_inv_var)
|
||||
mat_state.emission_type = mat_state.EmissionType.get_effective_combination(ek1, ek2)
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '({0} * {3} + {1} * {2})'.format(opac1, opac2, fac_var, fac_inv_var)
|
||||
state.out_ior = '({0} * {3} + {1} * {2})'.format(ior1, ior2, fac_var, fac_inv_var)
|
||||
|
||||
def parse_addshader(node: bpy.types.ShaderNodeAddShader, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc1, rough1, met1, occ1, spec1, opac1, ior1, emi1 = c.parse_shader_input(node.inputs[0])
|
||||
ek1 = mat_state.emission_type
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc2, rough2, met2, occ2, spec2, opac2, ior2, emi2 = c.parse_shader_input(node.inputs[1])
|
||||
ek2 = mat_state.emission_type
|
||||
|
||||
if state.parse_surface:
|
||||
state.out_basecol = '({0} + {1})'.format(bc1, bc2)
|
||||
state.out_roughness = '({0} * 0.5 + {1} * 0.5)'.format(rough1, rough2)
|
||||
state.out_metallic = '({0} * 0.5 + {1} * 0.5)'.format(met1, met2)
|
||||
state.out_occlusion = '({0} * 0.5 + {1} * 0.5)'.format(occ1, occ2)
|
||||
state.out_specular = '({0} * 0.5 + {1} * 0.5)'.format(spec1, spec2)
|
||||
state.out_emission_col = '({0} + {1})'.format(emi1, emi2)
|
||||
mat_state.emission_type = mat_state.EmissionType.get_effective_combination(ek1, ek2)
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '({0} * 0.5 + {1} * 0.5)'.format(opac1, opac2)
|
||||
state.out_ior = '({0} * 0.5 + {1} * 0.5)'.format(ior1, ior2)
|
||||
|
||||
|
||||
if bpy.app.version < (3, 0, 0):
|
||||
def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[20])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_metallic = c.parse_value_input(node.inputs[4])
|
||||
state.out_specular = c.parse_value_input(node.inputs[5])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[7])
|
||||
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
|
||||
and (node.inputs['Emission'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission'], (0.0, 0.0, 0.0), comp_alpha=False)):
|
||||
emission_col = c.parse_vector_input(node.inputs[17])
|
||||
emission_strength = c.parse_value_input(node.inputs[18])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
else:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
if state.parse_opacity:
|
||||
state.out_ior = c.parse_value_input(node.inputs[14])
|
||||
state.out_opacity = c.parse_value_input(node.inputs[19])
|
||||
if bpy.app.version >= (3, 0, 0) and bpy.app.version <= (4, 1, 0):
|
||||
def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[22])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
# subsurface = c.parse_vector_input(node.inputs[1])
|
||||
# subsurface_radius = c.parse_vector_input(node.inputs[2])
|
||||
# subsurface_color = c.parse_vector_input(node.inputs[3])
|
||||
state.out_metallic = c.parse_value_input(node.inputs[6])
|
||||
state.out_specular = c.parse_value_input(node.inputs[7])
|
||||
# specular_tint = c.parse_vector_input(node.inputs[6])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[9])
|
||||
# aniso = c.parse_vector_input(node.inputs[8])
|
||||
# aniso_rot = c.parse_vector_input(node.inputs[9])
|
||||
# sheen = c.parse_vector_input(node.inputs[10])
|
||||
# sheen_tint = c.parse_vector_input(node.inputs[11])
|
||||
# clearcoat = c.parse_vector_input(node.inputs[12])
|
||||
# clearcoat_rough = c.parse_vector_input(node.inputs[13])
|
||||
# ior = c.parse_vector_input(node.inputs[14])
|
||||
# transmission = c.parse_vector_input(node.inputs[15])
|
||||
# transmission_roughness = c.parse_vector_input(node.inputs[16])
|
||||
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
|
||||
and (node.inputs['Emission'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission'], (0.0, 0.0, 0.0), comp_alpha=False)):
|
||||
emission_col = c.parse_vector_input(node.inputs[19])
|
||||
emission_strength = c.parse_value_input(node.inputs[20])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
else:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
# clearcoar_normal = c.parse_vector_input(node.inputs[21])
|
||||
# tangent = c.parse_vector_input(node.inputs[22])
|
||||
if state.parse_opacity:
|
||||
state.out_ior = c.parse_value_input(node.inputs[16])
|
||||
if len(node.inputs) >= 21:
|
||||
state.out_opacity = c.parse_value_input(node.inputs[21])
|
||||
if bpy.app.version > (4, 1, 0):
|
||||
def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[5])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
subsurface = c.parse_value_input(node.inputs[7])
|
||||
subsurface_radius = c.parse_vector_input(node.inputs[9])
|
||||
subsurface_color = c.parse_vector_input(node.inputs[8])
|
||||
state.out_metallic = c.parse_value_input(node.inputs[1])
|
||||
state.out_specular = c.parse_value_input(node.inputs[12])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[2])
|
||||
# Prevent black material when metal = 1.0 and roughness = 0.0
|
||||
try:
|
||||
if float(state.out_roughness) < 0.00101:
|
||||
state.out_roughness = '0.001'
|
||||
except ValueError:
|
||||
pass
|
||||
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
|
||||
and (node.inputs['Emission Color'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission Color'], (0.0, 0.0, 0.0), comp_alpha=False)):
|
||||
emission_col = c.parse_vector_input(node.inputs[26])
|
||||
emission_strength = c.parse_value_input(node.inputs[27])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
else:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
#state.out_occlusion = state.out_roughness
|
||||
#state.out_aniso = c.parse_vector_input(node.inputs[14])
|
||||
#state.out_aniso_rot = c.parse_vector_input(node.inputs[15])
|
||||
#state.out_sheen = c.parse_vector_input(node.inputs[23])
|
||||
#state.out_sheen_tint = c.parse_vector_input(node.inputs[25])
|
||||
#state.out_clearcoat = c.parse_vector_input(node.inputs[18])
|
||||
#state.out_clearcoat_rough = c.parse_vector_input(node.inputs[19])
|
||||
#state.out_ior = c.parse_value_input(node.inputs[3])
|
||||
#state.out_transmission = c.parse_vector_input(node.inputs[17])
|
||||
#state.out_transmission_roughness = state.out_roughness
|
||||
if state.parse_opacity:
|
||||
state.out_ior = c.parse_value_input(node.inputs[3])
|
||||
state.out_opacity = c.parse_value_input(node.inputs[4])
|
||||
|
||||
def parse_bsdfdiffuse(node: bpy.types.ShaderNodeBsdfDiffuse, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_specular = '0.0'
|
||||
|
||||
if bpy.app.version >= (4, 0, 0):
|
||||
def parse_bsdfsheen(node: bpy.types.ShaderNodeBsdfSheen, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
def parse_bsdfglossy(node: bpy.types.ShaderNodeBsdfGlossy, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_metallic = '1.0'
|
||||
else:
|
||||
def parse_bsdfglossy(node: bpy.types.ShaderNodeBsdfAnisotropic, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[4])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_metallic = '1.0'
|
||||
|
||||
|
||||
def parse_ambientocclusion(node: bpy.types.ShaderNodeAmbientOcclusion, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
# Single channel
|
||||
state.out_occlusion = c.parse_vector_input(node.inputs[0]) + '.r'
|
||||
|
||||
|
||||
def parse_bsdfanisotropic(node: bpy.types.ShaderNodeBsdfAnisotropic, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[4])
|
||||
# Revert to glossy
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_metallic = '1.0'
|
||||
|
||||
|
||||
def parse_emission(node: bpy.types.ShaderNodeEmission, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
emission_col = c.parse_vector_input(node.inputs[0])
|
||||
emission_strength = c.parse_value_input(node.inputs[1])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
state.out_basecol = 'vec3(0.0)'
|
||||
state.out_specular = '0.0'
|
||||
state.out_metallic = '0.0'
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADELESS
|
||||
|
||||
|
||||
def parse_bsdfglass(node: bpy.types.ShaderNodeBsdfGlass, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
c.write_normal(node.inputs[3])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '0.0'
|
||||
state.out_ior = c.parse_value_input(node.inputs[2])
|
||||
|
||||
|
||||
def parse_bsdfhair(node: bpy.types.ShaderNodeBsdfHair, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def parse_holdout(node: bpy.types.ShaderNodeHoldout, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
# Occlude
|
||||
state.out_occlusion = '0.0'
|
||||
|
||||
|
||||
def parse_bsdfrefraction(node: bpy.types.ShaderNodeBsdfRefraction, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
c.write_normal(node.inputs[3])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '0.0'
|
||||
state.out_ior = c.parse_value_input(node.inputs[2])
|
||||
|
||||
def parse_subsurfacescattering(node: bpy.types.ShaderNodeSubsurfaceScattering, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
c.write_normal(node.inputs[4])
|
||||
else:
|
||||
c.write_normal(node.inputs[6])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
|
||||
|
||||
def parse_bsdftoon(node: bpy.types.ShaderNodeBsdfToon, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
# c.write_normal(node.inputs[3])
|
||||
pass
|
||||
|
||||
|
||||
def parse_bsdftranslucent(node: bpy.types.ShaderNodeBsdfTranslucent, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[1])
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '(1.0 - {0}.r)'.format(c.parse_vector_input(node.inputs[0]))
|
||||
state.out_ior = '1.0'
|
||||
|
||||
|
||||
def parse_bsdftransparent(node: bpy.types.ShaderNodeBsdfTransparent, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '(1.0 - {0}.r)'.format(c.parse_vector_input(node.inputs[0]))
|
||||
state.out_ior = '1.0'
|
||||
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
def parse_bsdfvelvet(node: bpy.types.ShaderNodeBsdfVelvet, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = '1.0'
|
||||
state.out_metallic = '1.0'
|
591
leenkx/blender/lnx/material/cycles_nodes/nodes_texture.py
Normal file
591
leenkx/blender/lnx/material/cycles_nodes/nodes_texture.py
Normal file
@ -0,0 +1,591 @@
|
||||
import math
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx.assets as assets
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserState, ParserContext, ParserPass
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
import lnx.utils
|
||||
import lnx.write_probes as write_probes
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
assets = lnx.reload_module(assets)
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserContext, ParserPass
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
write_probes = lnx.reload_module(write_probes)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_tex_brick(node: bpy.types.ShaderNodeTexBrick, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_brick)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
col1 = c.parse_vector_input(node.inputs[1])
|
||||
col2 = c.parse_vector_input(node.inputs[2])
|
||||
col3 = c.parse_vector_input(node.inputs[3])
|
||||
scale = c.parse_value_input(node.inputs[4])
|
||||
res = f'tex_brick({co} * {scale}, {col1}, {col2}, {col3})'
|
||||
# Fac
|
||||
else:
|
||||
scale = c.parse_value_input(node.inputs[4])
|
||||
res = 'tex_brick_f({0} * {1})'.format(co, scale)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_checker(node: bpy.types.ShaderNodeTexChecker, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_checker)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
col1 = c.parse_vector_input(node.inputs[1])
|
||||
col2 = c.parse_vector_input(node.inputs[2])
|
||||
scale = c.parse_value_input(node.inputs[3])
|
||||
res = f'tex_checker({co}, {col1}, {col2}, {scale})'
|
||||
# Fac
|
||||
else:
|
||||
scale = c.parse_value_input(node.inputs[3])
|
||||
res = 'tex_checker_f({0}, {1})'.format(co, scale)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_gradient(node: bpy.types.ShaderNodeTexGradient, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
grad = node.gradient_type
|
||||
if grad == 'LINEAR':
|
||||
f = f'{co}.x'
|
||||
elif grad == 'QUADRATIC':
|
||||
f = '0.0'
|
||||
elif grad == 'EASING':
|
||||
f = '0.0'
|
||||
elif grad == 'DIAGONAL':
|
||||
f = f'({co}.x + {co}.y) * 0.5'
|
||||
elif grad == 'RADIAL':
|
||||
f = f'atan({co}.y, {co}.x) / PI2 + 0.5'
|
||||
elif grad == 'QUADRATIC_SPHERE':
|
||||
f = '0.0'
|
||||
else: # SPHERICAL
|
||||
f = f'max(1.0 - sqrt({co}.x * {co}.x + {co}.y * {co}.y + {co}.z * {co}.z), 0.0)'
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
res = f'vec3(clamp({f}, 0.0, 1.0))'
|
||||
# Fac
|
||||
else:
|
||||
res = f'(clamp({f}, 0.0, 1.0))'
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_image(node: bpy.types.ShaderNodeTexImage, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Color or Alpha output
|
||||
use_color_out = out_socket == node.outputs[0]
|
||||
|
||||
if state.context == ParserContext.OBJECT:
|
||||
tex_store = c.store_var_name(node)
|
||||
|
||||
if c.node_need_reevaluation_for_screenspace_derivative(node):
|
||||
tex_store += state.get_parser_pass_suffix()
|
||||
|
||||
# Already fetched
|
||||
if c.is_parsed(tex_store):
|
||||
if use_color_out:
|
||||
return f'{tex_store}.rgb'
|
||||
else:
|
||||
return f'{tex_store}.a'
|
||||
|
||||
tex_name = c.node_name(node.name)
|
||||
tex = c.make_texture_from_image_node(node, tex_name)
|
||||
tex_link = None
|
||||
tex_default_file = None
|
||||
is_lnx_mat_param = None
|
||||
if node.lnx_material_param:
|
||||
tex_link = node.name
|
||||
is_lnx_mat_param = True
|
||||
|
||||
if tex is not None:
|
||||
state.curshader.write_textures += 1
|
||||
if node.lnx_material_param and tex['file'] is not None:
|
||||
tex_default_file = tex['file']
|
||||
if use_color_out:
|
||||
to_linear = node.image is not None and node.image.colorspace_settings.name == 'sRGB'
|
||||
res = f'{c.texture_store(node, tex, tex_name, to_linear, tex_link=tex_link, default_value=tex_default_file, is_lnx_mat_param=is_lnx_mat_param)}.rgb'
|
||||
else:
|
||||
res = f'{c.texture_store(node, tex, tex_name, tex_link=tex_link, default_value=tex_default_file, is_lnx_mat_param=is_lnx_mat_param)}.a'
|
||||
state.curshader.write_textures -= 1
|
||||
return res
|
||||
|
||||
# Empty texture
|
||||
elif node.image is None:
|
||||
tex = {
|
||||
'name': tex_name,
|
||||
'file': ''
|
||||
}
|
||||
if use_color_out:
|
||||
return '{0}.rgb'.format(c.texture_store(node, tex, tex_name, to_linear=False, tex_link=tex_link, is_lnx_mat_param=is_lnx_mat_param))
|
||||
return '{0}.a'.format(c.texture_store(node, tex, tex_name, to_linear=True, tex_link=tex_link, is_lnx_mat_param=is_lnx_mat_param))
|
||||
|
||||
# Pink color for missing texture
|
||||
else:
|
||||
if use_color_out:
|
||||
state.parsed.add(tex_store)
|
||||
state.curshader.write_textures += 1
|
||||
state.curshader.write(f'vec4 {tex_store} = vec4(1.0, 0.0, 1.0, 1.0);')
|
||||
state.curshader.write_textures -= 1
|
||||
return f'{tex_store}.rgb'
|
||||
else:
|
||||
state.curshader.write(f'vec4 {tex_store} = vec4(1.0, 0.0, 1.0, 1.0);')
|
||||
return f'{tex_store}.a'
|
||||
|
||||
# World context
|
||||
# TODO: Merge with above implementation to also allow mappings other than using view coordinates
|
||||
else:
|
||||
world = state.world
|
||||
world.world_defs += '_EnvImg'
|
||||
|
||||
# Background texture
|
||||
state.curshader.add_uniform('sampler2D envmap', link='_envmap')
|
||||
state.curshader.add_uniform('vec2 screenSize', link='_screenSize')
|
||||
|
||||
image = node.image
|
||||
if image is None:
|
||||
log.warn(f'World "{world.name}": image texture node "{node.name}" is empty')
|
||||
return 'vec3(0.0, 0.0, 0.0)' if use_color_out else '0.0'
|
||||
|
||||
filepath = image.filepath
|
||||
|
||||
if image.packed_file is not None:
|
||||
# Extract packed data
|
||||
filepath = lnx.utils.build_dir() + '/compiled/Assets/unpacked'
|
||||
unpack_path = lnx.utils.get_fp() + filepath
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
unpack_filepath = unpack_path + '/' + image.name
|
||||
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size:
|
||||
with open(unpack_filepath, 'wb') as f:
|
||||
f.write(image.packed_file.data)
|
||||
assets.add(unpack_filepath)
|
||||
else:
|
||||
# Link image path to assets
|
||||
assets.add(lnx.utils.asset_path(image.filepath))
|
||||
|
||||
# Reference image name
|
||||
tex_file = lnx.utils.extract_filename(image.filepath)
|
||||
base = tex_file.rsplit('.', 1)
|
||||
ext = base[1].lower()
|
||||
|
||||
if ext == 'hdr':
|
||||
target_format = 'HDR'
|
||||
else:
|
||||
target_format = 'JPEG'
|
||||
|
||||
# Generate prefiltered envmaps
|
||||
world.lnx_envtex_name = tex_file
|
||||
world.lnx_envtex_irr_name = tex_file.rsplit('.', 1)[0]
|
||||
|
||||
disable_hdr = target_format == 'JPEG'
|
||||
from_srgb = image.colorspace_settings.name == "sRGB"
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
mip_count = world.lnx_envtex_num_mips
|
||||
mip_count = write_probes.write_probes(filepath, disable_hdr, from_srgb, mip_count, lnx_radiance=rpdat.lnx_radiance)
|
||||
|
||||
world.lnx_envtex_num_mips = mip_count
|
||||
|
||||
# Will have to get rid of gl_FragCoord, pass texture coords from vertex shader
|
||||
state.curshader.write_init('vec2 texco = gl_FragCoord.xy / screenSize;')
|
||||
return 'texture(envmap, vec2(texco.x, 1.0 - texco.y)).rgb * envmapStrength'
|
||||
|
||||
|
||||
def parse_tex_magic(node: bpy.types.ShaderNodeTexMagic, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_magic)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
scale = c.parse_value_input(node.inputs[1])
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
res = f'tex_magic({co} * {scale} * 4.0)'
|
||||
# Fac
|
||||
else:
|
||||
res = f'tex_magic_f({co} * {scale} * 4.0)'
|
||||
|
||||
return res
|
||||
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
def parse_tex_musgrave(node: bpy.types.ShaderNodeTexMusgrave, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_musgrave)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
scale = c.parse_value_input(node.inputs['Scale'])
|
||||
# detail = c.parse_value_input(node.inputs[2])
|
||||
# distortion = c.parse_value_input(node.inputs[3])
|
||||
|
||||
res = f'tex_musgrave_f({co} * {scale} * 0.5)'
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_noise(node: bpy.types.ShaderNodeTexNoise, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
c.write_procedurals()
|
||||
state.curshader.add_function(c_functions.str_tex_noise)
|
||||
c.assets_add(os.path.join(lnx.utils.get_sdk_path(), 'leenkx', 'Assets', 'noise256.png'))
|
||||
c.assets_add_embedded_data('noise256.png')
|
||||
state.curshader.add_uniform('sampler2D snoise256', link='$noise256.png')
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
scale = c.parse_value_input(node.inputs[2])
|
||||
detail = c.parse_value_input(node.inputs[3])
|
||||
roughness = c.parse_value_input(node.inputs[4])
|
||||
distortion = c.parse_value_input(node.inputs[5])
|
||||
if bpy.app.version >= (4, 1, 0):
|
||||
if node.noise_type == "FBM":
|
||||
if out_socket == node.outputs[1]:
|
||||
state.curshader.add_function(c_functions.str_tex_musgrave)
|
||||
res = 'vec3(tex_musgrave_f({0} * {1}), tex_musgrave_f({0} * {1} + 120.0), tex_musgrave_f({0} * {1} + 168.0))'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
res = f'tex_musgrave_f({co} * {scale} * 1.0)'
|
||||
else:
|
||||
if out_socket == node.outputs[1]:
|
||||
res = 'vec3(tex_noise({0} * {1},{2},{3}), tex_noise({0} * {1} + 120.0,{2},{3}), tex_noise({0} * {1} + 168.0,{2},{3}))'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
res = 'tex_noise({0} * {1},{2},{3})'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
if out_socket == node.outputs[1]:
|
||||
res = 'vec3(tex_noise({0} * {1},{2},{3}), tex_noise({0} * {1} + 120.0,{2},{3}), tex_noise({0} * {1} + 168.0,{2},{3}))'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
res = 'tex_noise({0} * {1},{2},{3})'.format(co, scale, detail, distortion)
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_pointdensity(node: bpy.types.ShaderNodeTexPointDensity, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Pass through
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
# Density
|
||||
else:
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_tex_sky(node: bpy.types.ShaderNodeTexSky, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.context == ParserContext.OBJECT:
|
||||
# Pass through
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
state.world.world_defs += '_EnvSky'
|
||||
|
||||
if node.sky_type == 'PREETHAM' or node.sky_type == 'HOSEK_WILKIE':
|
||||
if node.sky_type == 'PREETHAM':
|
||||
log.info('Info: Preetham sky model is not supported, using Hosek Wilkie sky model instead')
|
||||
return parse_sky_hosekwilkie(node, state)
|
||||
|
||||
elif node.sky_type == 'NISHITA':
|
||||
return parse_sky_nishita(node, state)
|
||||
|
||||
else:
|
||||
log.error(f'Unsupported sky model: {node.sky_type}!')
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
|
||||
def parse_sky_hosekwilkie(node: bpy.types.ShaderNodeTexSky, state: ParserState) -> vec3str:
|
||||
world = state.world
|
||||
curshader = state.curshader
|
||||
|
||||
# Match to cycles
|
||||
world.lnx_envtex_strength *= 0.1
|
||||
|
||||
assets.add_khafile_def('lnx_hosek')
|
||||
curshader.add_uniform('vec3 A', link="_hosekA")
|
||||
curshader.add_uniform('vec3 B', link="_hosekB")
|
||||
curshader.add_uniform('vec3 C', link="_hosekC")
|
||||
curshader.add_uniform('vec3 D', link="_hosekD")
|
||||
curshader.add_uniform('vec3 E', link="_hosekE")
|
||||
curshader.add_uniform('vec3 F', link="_hosekF")
|
||||
curshader.add_uniform('vec3 G', link="_hosekG")
|
||||
curshader.add_uniform('vec3 H', link="_hosekH")
|
||||
curshader.add_uniform('vec3 I', link="_hosekI")
|
||||
curshader.add_uniform('vec3 Z', link="_hosekZ")
|
||||
curshader.add_uniform('vec3 hosekSunDirection', link="_hosekSunDirection")
|
||||
curshader.add_function("""vec3 hosekWilkie(float cos_theta, float gamma, float cos_gamma) {
|
||||
\tvec3 chi = (1 + cos_gamma * cos_gamma) / pow(1 + H * H - 2 * cos_gamma * H, vec3(1.5));
|
||||
\treturn (1 + A * exp(B / (cos_theta + 0.01))) * (C + D * exp(E * gamma) + F * (cos_gamma * cos_gamma) + G * chi + I * sqrt(cos_theta));
|
||||
}""")
|
||||
|
||||
world.lnx_envtex_sun_direction = [node.sun_direction[0], node.sun_direction[1], node.sun_direction[2]]
|
||||
world.lnx_envtex_turbidity = node.turbidity
|
||||
world.lnx_envtex_ground_albedo = node.ground_albedo
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
mobile_mat = rpdat.lnx_material_model == 'Mobile' or rpdat.lnx_material_model == 'Solid'
|
||||
|
||||
if not state.radiance_written:
|
||||
# Irradiance json file name
|
||||
wname = lnx.utils.safestr(world.name)
|
||||
world.lnx_envtex_irr_name = wname
|
||||
write_probes.write_sky_irradiance(wname)
|
||||
|
||||
# Radiance
|
||||
if rpdat.lnx_radiance and rpdat.lnx_irradiance and not mobile_mat:
|
||||
wrd.world_defs += '_Rad'
|
||||
hosek_path = 'leenkx/Assets/hosek/'
|
||||
sdk_path = lnx.utils.get_sdk_path()
|
||||
# Use fake maps for now
|
||||
assets.add(sdk_path + '/' + hosek_path + 'hosek_radiance.hdr')
|
||||
for i in range(0, 8):
|
||||
assets.add(sdk_path + '/' + hosek_path + 'hosek_radiance_' + str(i) + '.hdr')
|
||||
|
||||
world.lnx_envtex_name = 'hosek'
|
||||
world.lnx_envtex_num_mips = 8
|
||||
|
||||
state.radiance_written = True
|
||||
|
||||
curshader.write('float cos_theta = clamp(pos.z, 0.0, 1.0);')
|
||||
curshader.write('float cos_gamma = dot(pos, hosekSunDirection);')
|
||||
curshader.write('float gamma_val = acos(cos_gamma);')
|
||||
|
||||
return 'Z * hosekWilkie(cos_theta, gamma_val, cos_gamma) * envmapStrength;'
|
||||
|
||||
|
||||
def parse_sky_nishita(node: bpy.types.ShaderNodeTexSky, state: ParserState) -> vec3str:
|
||||
curshader = state.curshader
|
||||
curshader.add_include('std/sky.glsl')
|
||||
curshader.add_uniform('vec3 sunDir', link='_sunDirection')
|
||||
curshader.add_uniform('sampler2D nishitaLUT', link='_nishitaLUT', included=True,
|
||||
tex_addr_u='clamp', tex_addr_v='clamp')
|
||||
curshader.add_uniform('vec2 nishitaDensity', link='_nishitaDensity', included=True)
|
||||
|
||||
planet_radius = 6360e3 # Earth radius used in Blender
|
||||
ray_origin_z = planet_radius + node.altitude
|
||||
|
||||
state.world.lnx_nishita_density = [node.air_density, node.dust_density, node.ozone_density]
|
||||
|
||||
sun = ''
|
||||
if node.sun_disc:
|
||||
# The sun size is calculated relative in terms of the distance
|
||||
# between the sun position and the sky dome normal at every
|
||||
# pixel (see sun_disk() in sky.glsl).
|
||||
#
|
||||
# An isosceles triangle is created with the camera at the
|
||||
# opposite side of the base with node.sun_size being the vertex
|
||||
# angle from which the base angle theta is calculated. Iron's
|
||||
# skydome geometry roughly resembles a unit sphere, so the leg
|
||||
# size is set to 1. The base size is the doubled normal-relative
|
||||
# target size.
|
||||
|
||||
# sun_size is already in radians despite being degrees in the UI
|
||||
theta = 0.5 * (math.pi - node.sun_size)
|
||||
size = math.cos(theta)
|
||||
sun = f'* sun_disk(pos, sunDir, {size}, {node.sun_intensity})'
|
||||
|
||||
return f'nishita_atmosphere(pos, vec3(0, 0, {ray_origin_z}), sunDir, {planet_radius}){sun}'
|
||||
|
||||
|
||||
def parse_tex_environment(node: bpy.types.ShaderNodeTexEnvironment, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.context == ParserContext.OBJECT:
|
||||
log.warn('Environment Texture node is not supported for object node trees, using default value')
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
if node.image is None:
|
||||
return c.to_vec3([1.0, 0.0, 1.0])
|
||||
|
||||
world = state.world
|
||||
world.world_defs += '_EnvTex'
|
||||
|
||||
curshader = state.curshader
|
||||
|
||||
curshader.add_include('std/math.glsl')
|
||||
curshader.add_uniform('sampler2D envmap', link='_envmap')
|
||||
|
||||
image = node.image
|
||||
filepath = image.filepath
|
||||
|
||||
if image.packed_file is None and not os.path.isfile(lnx.utils.asset_path(filepath)):
|
||||
log.warn(world.name + ' - unable to open ' + image.filepath)
|
||||
return c.to_vec3([1.0, 0.0, 1.0])
|
||||
|
||||
# Reference image name
|
||||
tex_file = lnx.utils.extract_filename(image.filepath)
|
||||
base = tex_file.rsplit('.', 1)
|
||||
ext = base[1].lower()
|
||||
|
||||
if ext == 'hdr':
|
||||
target_format = 'HDR'
|
||||
else:
|
||||
target_format = 'JPEG'
|
||||
do_convert = ext != 'hdr' and ext != 'jpg'
|
||||
if do_convert:
|
||||
if ext == 'exr':
|
||||
tex_file = base[0] + '.hdr'
|
||||
target_format = 'HDR'
|
||||
else:
|
||||
tex_file = base[0] + '.jpg'
|
||||
target_format = 'JPEG'
|
||||
|
||||
if image.packed_file is not None:
|
||||
# Extract packed data
|
||||
unpack_path = lnx.utils.get_fp_build() + '/compiled/Assets/unpacked'
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
unpack_filepath = unpack_path + '/' + tex_file
|
||||
filepath = unpack_filepath
|
||||
|
||||
if do_convert:
|
||||
if not os.path.isfile(unpack_filepath):
|
||||
lnx.utils.unpack_image(image, unpack_filepath, file_format=target_format)
|
||||
|
||||
elif not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size:
|
||||
with open(unpack_filepath, 'wb') as f:
|
||||
f.write(image.packed_file.data)
|
||||
|
||||
assets.add(unpack_filepath)
|
||||
else:
|
||||
if do_convert:
|
||||
unpack_path = lnx.utils.get_fp_build() + '/compiled/Assets/unpacked'
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
converted_path = unpack_path + '/' + tex_file
|
||||
filepath = converted_path
|
||||
# TODO: delete cache when file changes
|
||||
if not os.path.isfile(converted_path):
|
||||
lnx.utils.convert_image(image, converted_path, file_format=target_format)
|
||||
assets.add(converted_path)
|
||||
else:
|
||||
# Link image path to assets
|
||||
assets.add(lnx.utils.asset_path(image.filepath))
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
if not state.radiance_written:
|
||||
# Generate prefiltered envmaps
|
||||
world.lnx_envtex_name = tex_file
|
||||
world.lnx_envtex_irr_name = tex_file.rsplit('.', 1)[0]
|
||||
disable_hdr = target_format == 'JPEG'
|
||||
from_srgb = image.colorspace_settings.name == "sRGB"
|
||||
|
||||
mip_count = world.lnx_envtex_num_mips
|
||||
mip_count = write_probes.write_probes(filepath, disable_hdr, from_srgb, mip_count, lnx_radiance=rpdat.lnx_radiance)
|
||||
|
||||
world.lnx_envtex_num_mips = mip_count
|
||||
|
||||
state.radiance_written = True
|
||||
|
||||
# Append LDR define
|
||||
if disable_hdr:
|
||||
world.world_defs += '_EnvLDR'
|
||||
assets.add_khafile_def("lnx_envldr")
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
mobile_mat = rpdat.lnx_material_model == 'Mobile' or rpdat.lnx_material_model == 'Solid'
|
||||
|
||||
# Append radiance define
|
||||
if rpdat.lnx_irradiance and rpdat.lnx_radiance and not mobile_mat:
|
||||
wrd.world_defs += '_Rad'
|
||||
|
||||
return 'texture(envmap, envMapEquirect(pos)).rgb * envmapStrength'
|
||||
|
||||
|
||||
def parse_tex_voronoi(node: bpy.types.ShaderNodeTexVoronoi, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
outp = 0
|
||||
if out_socket.type == 'RGBA':
|
||||
outp = 1
|
||||
elif out_socket.type == 'VECTOR':
|
||||
outp = 2
|
||||
m = 0
|
||||
if node.distance == 'MANHATTAN':
|
||||
m = 1
|
||||
elif node.distance == 'CHEBYCHEV':
|
||||
m = 2
|
||||
elif node.distance == 'MINKOWSKI':
|
||||
m = 3
|
||||
|
||||
c.write_procedurals()
|
||||
state.curshader.add_function(c_functions.str_tex_voronoi)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
scale = c.parse_value_input(node.inputs[2])
|
||||
exp = c.parse_value_input(node.inputs[4])
|
||||
randomness = c.parse_value_input(node.inputs[5])
|
||||
|
||||
# Color or Position
|
||||
if out_socket == node.outputs[1] or out_socket == node.outputs[2]:
|
||||
res = 'tex_voronoi({0}, {1}, {2}, {3}, {4}, {5})'.format(co, randomness, m, outp, scale, exp)
|
||||
# Distance
|
||||
else:
|
||||
res = 'tex_voronoi({0}, {1}, {2}, {3}, {4}, {5}).x'.format(co, randomness, m, outp, scale, exp)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_wave(node: bpy.types.ShaderNodeTexWave, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
c.write_procedurals()
|
||||
state.curshader.add_function(c_functions.str_tex_wave)
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
scale = c.parse_value_input(node.inputs[1])
|
||||
distortion = c.parse_value_input(node.inputs[2])
|
||||
detail = c.parse_value_input(node.inputs[3])
|
||||
detail_scale = c.parse_value_input(node.inputs[4])
|
||||
if node.wave_profile == 'SIN':
|
||||
wave_profile = 0
|
||||
else:
|
||||
wave_profile = 1
|
||||
if node.wave_type == 'BANDS':
|
||||
wave_type = 0
|
||||
else:
|
||||
wave_type = 1
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
res = 'vec3(tex_wave_f({0} * {1},{2},{3},{4},{5},{6}))'.format(co, scale, wave_type, wave_profile, distortion, detail, detail_scale)
|
||||
# Fac
|
||||
else:
|
||||
res = 'tex_wave_f({0} * {1},{2},{3},{4},{5},{6})'.format(co, scale, wave_type, wave_profile, distortion, detail, detail_scale)
|
||||
|
||||
return res
|
205
leenkx/blender/lnx/material/cycles_nodes/nodes_vector.py
Normal file
205
leenkx/blender/lnx/material/cycles_nodes/nodes_vector.py
Normal file
@ -0,0 +1,205 @@
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
from mathutils import Euler, Vector
|
||||
|
||||
import lnx.log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserState, ParserPass
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
import lnx.utils as utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.log = lnx.reload_module(lnx.log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserPass
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
utils = lnx.reload_module(utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_curvevec(node: bpy.types.ShaderNodeVectorCurve, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
vec = c.parse_vector_input(node.inputs[1])
|
||||
curves = node.mapping.curves
|
||||
name = c.node_name(node.name)
|
||||
# mapping.curves[0].points[0].handle_type # bezier curve
|
||||
return '(vec3({0}, {1}, {2}) * {3})'.format(
|
||||
c.vector_curve(name + '0', vec + '.x', curves[0].points),
|
||||
c.vector_curve(name + '1', vec + '.y', curves[1].points),
|
||||
c.vector_curve(name + '2', vec + '.z', curves[2].points), fac)
|
||||
|
||||
|
||||
def parse_bump(node: bpy.types.ShaderNodeBump, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.curshader.shader_type != 'frag':
|
||||
lnx.log.warn("Bump node not supported outside of fragment shaders")
|
||||
return 'vec3(0.0)'
|
||||
|
||||
# Interpolation strength
|
||||
strength = c.parse_value_input(node.inputs[0])
|
||||
# Height multiplier
|
||||
# distance = c.parse_value_input(node.inputs[1])
|
||||
height = c.parse_value_input(node.inputs[2])
|
||||
|
||||
state.current_pass = ParserPass.DX_SCREEN_SPACE
|
||||
height_dx = c.parse_value_input(node.inputs[2])
|
||||
state.current_pass = ParserPass.DY_SCREEN_SPACE
|
||||
height_dy = c.parse_value_input(node.inputs[2])
|
||||
state.current_pass = ParserPass.REGULAR
|
||||
|
||||
# nor = c.parse_vector_input(node.inputs[3])
|
||||
|
||||
if height_dx != height or height_dy != height:
|
||||
tangent = f'{c.dfdx_fine("wposition")} + n * ({height_dx} - {height})'
|
||||
bitangent = f'{c.dfdy_fine("wposition")} + n * ({height_dy} - {height})'
|
||||
|
||||
# Cross-product operand order, dFdy is flipped on d3d11
|
||||
bitangent_first = utils.get_gapi() == 'direct3d11'
|
||||
|
||||
if node.invert:
|
||||
bitangent_first = not bitangent_first
|
||||
|
||||
if bitangent_first:
|
||||
# We need to normalize twice, once for the correct "weight" of the strength,
|
||||
# once for having a normalized output vector (lerping vectors does not preserve magnitude)
|
||||
res = f'normalize(mix(n, normalize(cross({bitangent}, {tangent})), {strength}))'
|
||||
else:
|
||||
res = f'normalize(mix(n, normalize(cross({tangent}, {bitangent})), {strength}))'
|
||||
|
||||
else:
|
||||
res = 'n'
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_mapping(node: bpy.types.ShaderNodeMapping, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
# Only "Point", "Texture" and "Vector" types supported for now..
|
||||
# More information about the order of operations for this node:
|
||||
# https://docs.blender.org/manual/en/latest/render/shader_nodes/vector/mapping.html#properties
|
||||
|
||||
input_vector: bpy.types.NodeSocket = node.inputs[0]
|
||||
input_location: bpy.types.NodeSocket = node.inputs['Location']
|
||||
input_rotation: bpy.types.NodeSocket = node.inputs['Rotation']
|
||||
input_scale: bpy.types.NodeSocket = node.inputs['Scale']
|
||||
out = c.parse_vector_input(input_vector) if input_vector.is_linked else c.to_vec3(input_vector.default_value)
|
||||
location = c.parse_vector_input(input_location) if input_location.is_linked else c.to_vec3(input_location.default_value)
|
||||
rotation = c.parse_vector_input(input_rotation) if input_rotation.is_linked else c.to_vec3(input_rotation.default_value)
|
||||
scale = c.parse_vector_input(input_scale) if input_scale.is_linked else c.to_vec3(input_scale.default_value)
|
||||
|
||||
# Use inner functions because the order of operations varies between
|
||||
# mapping node vector types. This adds a slight overhead but makes
|
||||
# the code much more readable.
|
||||
# - "Point" and "Vector" use Scale -> Rotate -> Translate
|
||||
# - "Texture" uses Translate -> Rotate -> Scale
|
||||
def calc_location(output: str) -> str:
|
||||
# Vectors and Eulers support the "!=" operator
|
||||
if input_scale.is_linked or input_scale.default_value != Vector((1, 1, 1)):
|
||||
if node.vector_type == 'TEXTURE':
|
||||
output = f'({output} / {scale})'
|
||||
else:
|
||||
output = f'({output} * {scale})'
|
||||
|
||||
return output
|
||||
|
||||
def calc_scale(output: str) -> str:
|
||||
if input_location.is_linked or input_location.default_value != Vector((0, 0, 0)):
|
||||
# z location is a little off sometimes?...
|
||||
if node.vector_type == 'TEXTURE':
|
||||
output = f'({output} - {location})'
|
||||
else:
|
||||
output = f'({output} + {location})'
|
||||
return output
|
||||
|
||||
out = calc_location(out) if node.vector_type == 'TEXTURE' else calc_scale(out)
|
||||
|
||||
if input_rotation.is_linked or input_rotation.default_value != Euler((0, 0, 0)):
|
||||
var_name = c.node_name(node.name) + "_rotation" + state.get_parser_pass_suffix()
|
||||
if node.vector_type == 'TEXTURE':
|
||||
state.curshader.write(f'mat3 {var_name}X = mat3(1.0, 0.0, 0.0, 0.0, cos({rotation}.x), sin({rotation}.x), 0.0, -sin({rotation}.x), cos({rotation}.x));')
|
||||
state.curshader.write(f'mat3 {var_name}Y = mat3(cos({rotation}.y), 0.0, -sin({rotation}.y), 0.0, 1.0, 0.0, sin({rotation}.y), 0.0, cos({rotation}.y));')
|
||||
state.curshader.write(f'mat3 {var_name}Z = mat3(cos({rotation}.z), sin({rotation}.z), 0.0, -sin({rotation}.z), cos({rotation}.z), 0.0, 0.0, 0.0, 1.0);')
|
||||
else:
|
||||
# A little bit redundant, but faster than 12 more multiplications to make it work dynamically
|
||||
state.curshader.write(f'mat3 {var_name}X = mat3(1.0, 0.0, 0.0, 0.0, cos(-{rotation}.x), sin(-{rotation}.x), 0.0, -sin(-{rotation}.x), cos(-{rotation}.x));')
|
||||
state.curshader.write(f'mat3 {var_name}Y = mat3(cos(-{rotation}.y), 0.0, -sin(-{rotation}.y), 0.0, 1.0, 0.0, sin(-{rotation}.y), 0.0, cos(-{rotation}.y));')
|
||||
state.curshader.write(f'mat3 {var_name}Z = mat3(cos(-{rotation}.z), sin(-{rotation}.z), 0.0, -sin(-{rotation}.z), cos(-{rotation}.z), 0.0, 0.0, 0.0, 1.0);')
|
||||
|
||||
# XYZ-order euler rotation
|
||||
out = f'{out} * {var_name}X * {var_name}Y * {var_name}Z'
|
||||
|
||||
out = calc_scale(out) if node.vector_type == 'TEXTURE' else calc_location(out)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def parse_normal(node: bpy.types.ShaderNodeNormal, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
nor1 = c.to_vec3(node.outputs['Normal'].default_value)
|
||||
|
||||
if out_socket == node.outputs['Normal']:
|
||||
return nor1
|
||||
|
||||
elif out_socket == node.outputs['Dot']:
|
||||
nor2 = c.parse_vector_input(node.inputs["Normal"])
|
||||
return f'dot({nor1}, {nor2})'
|
||||
|
||||
|
||||
def parse_normalmap(node: bpy.types.ShaderNodeNormalMap, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.curshader == state.tese:
|
||||
return c.parse_vector_input(node.inputs[1])
|
||||
else:
|
||||
# space = node.space
|
||||
# map = node.uv_map
|
||||
# Color
|
||||
c.parse_normal_map_color_input(node.inputs[1], node.inputs[0])
|
||||
return 'n'
|
||||
|
||||
|
||||
def parse_vectortransform(node: bpy.types.ShaderNodeVectorTransform, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
# type = node.vector_type
|
||||
# conv_from = node.convert_from
|
||||
# conv_to = node.convert_to
|
||||
# Pass through
|
||||
return c.parse_vector_input(node.inputs[0])
|
||||
|
||||
|
||||
def parse_displacement(node: bpy.types.ShaderNodeDisplacement, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
height = c.parse_value_input(node.inputs[0])
|
||||
midlevel = c.parse_value_input(node.inputs[1])
|
||||
scale = c.parse_value_input(node.inputs[2])
|
||||
nor = c.parse_vector_input(node.inputs[3])
|
||||
return f'(vec3({height}) * {scale})'
|
||||
|
||||
def parse_vectorrotate(node: bpy.types.ShaderNodeVectorRotate, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
|
||||
type = node.rotation_type
|
||||
input_vector: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[0])
|
||||
input_center: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[1])
|
||||
input_axis: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[2])
|
||||
input_angle: bpy.types.NodeSocket = c.parse_value_input(node.inputs[3])
|
||||
input_rotation: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[4])
|
||||
|
||||
if node.invert:
|
||||
input_invert = "0"
|
||||
else:
|
||||
input_invert = "1"
|
||||
|
||||
state.curshader.add_function(c_functions.str_rotate_around_axis)
|
||||
|
||||
if type == 'AXIS_ANGLE':
|
||||
return f'vec3( (length({input_axis}) != 0.0) ? rotate_around_axis({input_vector} - {input_center}, normalize({input_axis}), {input_angle} * {input_invert}) + {input_center} : {input_vector} )'
|
||||
elif type == 'X_AXIS':
|
||||
return f'vec3( rotate_around_axis({input_vector} - {input_center}, vec3(1.0, 0.0, 0.0), {input_angle} * {input_invert}) + {input_center} )'
|
||||
elif type == 'Y_AXIS':
|
||||
return f'vec3( rotate_around_axis({input_vector} - {input_center}, vec3(0.0, 1.0, 0.0), {input_angle} * {input_invert}) + {input_center} )'
|
||||
elif type == 'Z_AXIS':
|
||||
return f'vec3( rotate_around_axis({input_vector} - {input_center}, vec3(0.0, 0.0, 1.0), {input_angle} * {input_invert}) + {input_center} )'
|
||||
elif type == 'EULER_XYZ':
|
||||
state.curshader.add_function(c_functions.str_euler_to_mat3)
|
||||
return f'vec3( mat3(({input_invert} < 0.0) ? transpose(euler_to_mat3({input_rotation})) : euler_to_mat3({input_rotation})) * ({input_vector} - {input_center}) + {input_center})'
|
||||
|
||||
return f'(vec3(1.0, 0.0, 0.0))'
|
6
leenkx/blender/lnx/material/lnx_nodes/__init__.py
Normal file
6
leenkx/blender/lnx/material/lnx_nodes/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
"""Import all nodes"""
|
||||
import glob
|
||||
from os.path import dirname, basename, isfile
|
||||
|
||||
modules = glob.glob(dirname(__file__) + "/*.py")
|
||||
__all__ = [basename(f)[:-3] for f in modules if isfile(f)]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
192
leenkx/blender/lnx/material/lnx_nodes/custom_particle_node.py
Normal file
192
leenkx/blender/lnx/material/lnx_nodes/custom_particle_node.py
Normal file
@ -0,0 +1,192 @@
|
||||
from bpy.props import *
|
||||
from bpy.types import Node
|
||||
|
||||
from lnx.material.lnx_nodes.lnx_nodes import add_node
|
||||
from lnx.material.shader import Shader
|
||||
from lnx.material.cycles import *
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
import lnx
|
||||
lnx.material.lnx_nodes.lnx_nodes = lnx.reload_module(lnx.material.lnx_nodes.lnx_nodes)
|
||||
from lnx.material.lnx_nodes.lnx_nodes import add_node
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import Shader
|
||||
lnx.material.cycles = lnx.reload_module(lnx.material.cycles)
|
||||
from lnx.material.cycles import *
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
class CustomParticleNode(Node):
|
||||
"""Input data for paricles."""
|
||||
bl_idname = 'LnxCustomParticleNode'
|
||||
bl_label = 'Custom Particle'
|
||||
bl_icon = 'NONE'
|
||||
|
||||
posX: BoolProperty(
|
||||
name="",
|
||||
description="enable translation along x",
|
||||
default=False,
|
||||
)
|
||||
|
||||
posY: BoolProperty(
|
||||
name="",
|
||||
description="enable translation along y",
|
||||
default=False,
|
||||
)
|
||||
|
||||
posZ: BoolProperty(
|
||||
name="",
|
||||
description="enable translation along z",
|
||||
default=False,
|
||||
)
|
||||
|
||||
rotX: BoolProperty(
|
||||
name="",
|
||||
description="enable rotation along x",
|
||||
default=False,
|
||||
)
|
||||
|
||||
rotY: BoolProperty(
|
||||
name="",
|
||||
description="enable rotation along y",
|
||||
default=False,
|
||||
)
|
||||
|
||||
rotZ: BoolProperty(
|
||||
name="",
|
||||
description="enable rotation along z",
|
||||
default=False,
|
||||
)
|
||||
|
||||
sclX: BoolProperty(
|
||||
name="",
|
||||
description="enable scaling along x",
|
||||
default=False,
|
||||
)
|
||||
|
||||
sclY: BoolProperty(
|
||||
name="",
|
||||
description="enable scaling along y",
|
||||
default=False,
|
||||
)
|
||||
|
||||
sclZ: BoolProperty(
|
||||
name="",
|
||||
description="enable scaling along z",
|
||||
default=False,
|
||||
)
|
||||
|
||||
billBoard: BoolProperty(
|
||||
name="Bill Board",
|
||||
description="Enable Bill Board",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def init(self, context):
|
||||
self.inputs.new('NodeSocketVector', 'Position')
|
||||
self.inputs.new('NodeSocketVector', 'Rotation')
|
||||
self.inputs.new('NodeSocketVector', 'Scale')
|
||||
|
||||
def draw_buttons(self, context, layout):
|
||||
|
||||
grid0 = layout.grid_flow(row_major=True, columns=4, align=False)
|
||||
|
||||
grid0.label(text="")
|
||||
grid0.label(text=" X")
|
||||
grid0.label(text=" Y")
|
||||
grid0.label(text=" Z")
|
||||
|
||||
grid0.label(text="Pos")
|
||||
grid0.prop(self, "posX")
|
||||
grid0.prop(self, "posY")
|
||||
grid0.prop(self, "posZ")
|
||||
|
||||
grid0.label(text="Rot")
|
||||
grid0.prop(self, "rotX")
|
||||
grid0.prop(self, "rotY")
|
||||
grid0.prop(self, "rotZ")
|
||||
|
||||
grid0.label(text="Scl")
|
||||
grid0.prop(self, "sclX")
|
||||
grid0.prop(self, "sclY")
|
||||
grid0.prop(self, "sclZ")
|
||||
|
||||
layout.prop(self, "billBoard")
|
||||
|
||||
def parse(self, vertshdr: Shader, part_con) -> None:
|
||||
|
||||
if self.sclX or self.sclY or self.sclZ:
|
||||
scl = parse_vector_input(self.inputs[2])
|
||||
|
||||
if self.sclX:
|
||||
vertshdr.write(f'spos.x *= {scl}.x;')
|
||||
|
||||
if self.sclY:
|
||||
vertshdr.write(f'spos.y *= {scl}.y;')
|
||||
|
||||
if self.sclX:
|
||||
vertshdr.write(f'spos.z *= {scl}.z;')
|
||||
|
||||
if self.billBoard:
|
||||
vertshdr.add_uniform('mat4 WV', '_worldViewMatrix')
|
||||
vertshdr.write('spos = mat4(transpose(mat3(WV))) * spos;')
|
||||
|
||||
if self.rotX or self.rotY or self.rotZ:
|
||||
rot = parse_vector_input(self.inputs[1])
|
||||
|
||||
if self.rotX and not self.rotY and not self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(1.0, 0.0, 0.0,')
|
||||
vertshdr.write(f' 0.0, cos({rot}.x), sin({rot}.x),')
|
||||
vertshdr.write(f' 0.0, -sin({rot}.x), cos({rot}.x));')
|
||||
|
||||
if not self.rotX and self.rotY and not self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(cos({rot}.y), 0.0, -sin({rot}.y),')
|
||||
vertshdr.write(f' 0.0, 1.0, 0.0,')
|
||||
vertshdr.write(f' sin({rot}.y), 0.0, cos({rot}.y));')
|
||||
|
||||
if not self.rotX and not self.rotY and self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(cos({rot}.z), sin({rot}.z), 0.0,')
|
||||
vertshdr.write(f' -sin({rot}.z), cos({rot}.z), 0.0,')
|
||||
vertshdr.write(f' 0.0, 0.0, 1.0);')
|
||||
|
||||
if self.rotX and self.rotY and not self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(cos({rot}.y), 0.0, -sin({rot}.y),')
|
||||
vertshdr.write(f' sin({rot}.y) * sin({rot}.x), cos({rot}.x), cos({rot}.y) * sin({rot}.x),')
|
||||
vertshdr.write(f' sin({rot}.y) * cos({rot}.x), -sin({rot}.x), cos({rot}.y) * cos({rot}.x));')
|
||||
|
||||
if self.rotX and not self.rotY and self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(cos({rot}.z), sin({rot}.z), 0.0,')
|
||||
vertshdr.write(f' -sin({rot}.z) * cos({rot}.x), cos({rot}.z) * cos({rot}.x), sin({rot}.x),')
|
||||
vertshdr.write(f' sin({rot}.z) * sin({rot}.x), -cos({rot}.z) * sin({rot}.x), cos({rot}.x));')
|
||||
|
||||
if not self.rotX and self.rotY and self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(cos({rot}.z) * cos({rot}.y), sin({rot}.z) * cos({rot}.y), -sin({rot}.y),')
|
||||
vertshdr.write(f' -sin({rot}.z) , cos({rot}.z), 0.0,')
|
||||
vertshdr.write(f' cos({rot}.z) * sin({rot}.y), sin({rot}.z) * sin({rot}.y), cos({rot}.y));')
|
||||
|
||||
if self.rotX and self.rotY and self.rotZ:
|
||||
vertshdr.write(f'mat3 part_rot_mat = mat3(cos({rot}.z) * cos({rot}.y), sin({rot}.z) * cos({rot}.y), -sin({rot}.y),')
|
||||
vertshdr.write(f' -sin({rot}.z) * cos({rot}.x) + cos({rot}.z) * sin({rot}.y) * sin({rot}.x), cos({rot}.z) * cos({rot}.x) + sin({rot}.z) * sin({rot}.y) * sin({rot}.x), cos({rot}.y) * sin({rot}.x),')
|
||||
vertshdr.write(f' sin({rot}.z) * sin({rot}.x) + cos({rot}.z) * sin({rot}.y) * cos({rot}.x), -cos({rot}.z) * sin({rot}.x) + sin({rot}.z) * sin({rot}.y) * cos({rot}.x), cos({rot}.y) * cos({rot}.x));')
|
||||
|
||||
vertshdr.write('spos.xyz = part_rot_mat * spos.xyz;')
|
||||
if (part_con.data['name'] == 'mesh' or part_con.data['name'] == 'translucent' or part_con.data['name'] == 'refraction'):
|
||||
vertshdr.write('wnormal = transpose(inverse(part_rot_mat)) * wnormal;')
|
||||
|
||||
if self.posX or self.posY or self.posZ:
|
||||
pos = parse_vector_input(self.inputs[0])
|
||||
|
||||
if self.posX:
|
||||
vertshdr.write(f'spos.x += {pos}.x;')
|
||||
|
||||
if self.posY:
|
||||
vertshdr.write(f'spos.y += {pos}.y;')
|
||||
|
||||
if self.posZ:
|
||||
vertshdr.write(f'spos.z += {pos}.z;')
|
||||
|
||||
vertshdr.write('wposition = vec4(W * spos).xyz;')
|
||||
|
||||
|
||||
add_node(CustomParticleNode, category='Leenkx')
|
15
leenkx/blender/lnx/material/lnx_nodes/lnx_nodes.py
Normal file
15
leenkx/blender/lnx/material/lnx_nodes/lnx_nodes.py
Normal file
@ -0,0 +1,15 @@
|
||||
from typing import Type
|
||||
|
||||
from bpy.types import Node
|
||||
import nodeitems_utils
|
||||
|
||||
nodes = []
|
||||
category_items = {}
|
||||
|
||||
|
||||
def add_node(node_class: Type[Node], category: str):
|
||||
global nodes
|
||||
nodes.append(node_class)
|
||||
if category_items.get(category) is None:
|
||||
category_items[category] = []
|
||||
category_items[category].append(nodeitems_utils.NodeItem(node_class.bl_idname))
|
110
leenkx/blender/lnx/material/lnx_nodes/shader_data_node.py
Normal file
110
leenkx/blender/lnx/material/lnx_nodes/shader_data_node.py
Normal file
@ -0,0 +1,110 @@
|
||||
from bpy.props import *
|
||||
from bpy.types import Node, NodeSocket
|
||||
|
||||
import lnx
|
||||
from lnx.material.lnx_nodes.lnx_nodes import add_node
|
||||
from lnx.material.parser_state import ParserState
|
||||
from lnx.material.shader import Shader
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.material.lnx_nodes.lnx_nodes = lnx.reload_module(lnx.material.lnx_nodes.lnx_nodes)
|
||||
from lnx.material.lnx_nodes.lnx_nodes import add_node
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import Shader
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
class ShaderDataNode(Node):
|
||||
"""Allows access to shader data such as uniforms and inputs."""
|
||||
bl_idname = 'LnxShaderDataNode'
|
||||
bl_label = 'Shader Data'
|
||||
bl_icon = 'NONE'
|
||||
|
||||
input_type: EnumProperty(
|
||||
items = [('input', 'Input', 'Shader Input'),
|
||||
('uniform', 'Uniform', 'Uniform value')],
|
||||
name='Input Type',
|
||||
default='input',
|
||||
description="The kind of data that should be retrieved")
|
||||
|
||||
input_source: EnumProperty(
|
||||
items = [('frag', 'Fragment Shader', 'Take the input from the fragment shader'),
|
||||
('vert', 'Vertex Shader', 'Take the input from the vertex shader and pass it through to the fragment shader')],
|
||||
name='Input Source',
|
||||
default='vert',
|
||||
description="Where to take the input value from")
|
||||
|
||||
variable_type: EnumProperty(
|
||||
items = [('int', 'int', 'int'),
|
||||
('float', 'float', 'float'),
|
||||
('vec2', 'vec2', 'vec2'),
|
||||
('vec3', 'vec3', 'vec3'),
|
||||
('vec4', 'vec4', 'vec4'),
|
||||
('sampler2D', 'sampler2D', 'sampler2D')],
|
||||
name='Variable Type',
|
||||
default='vec3',
|
||||
description="The type of the variable")
|
||||
|
||||
variable_name: StringProperty(name="Variable Name", description="The name of the variable")
|
||||
|
||||
def draw_buttons(self, context, layout):
|
||||
col = layout.column(align=True)
|
||||
col.label(text="Input Type:")
|
||||
# Use a row to expand horizontally
|
||||
col.row().prop(self, "input_type", expand=True)
|
||||
|
||||
split = layout.split(factor=0.5, align=True)
|
||||
col_left = split.column()
|
||||
col_right = split.column()
|
||||
|
||||
if self.input_type == "input":
|
||||
col_left.label(text="Input Source")
|
||||
col_right.prop(self, "input_source", text="")
|
||||
|
||||
col_left.label(text="Variable Type")
|
||||
col_right.prop(self, "variable_type", text="")
|
||||
col_left.label(text="Variable Name")
|
||||
col_right.prop(self, "variable_name", text="")
|
||||
|
||||
def init(self, context):
|
||||
self.outputs.new('NodeSocketColor', 'Color')
|
||||
self.outputs.new('NodeSocketVector', 'Vector')
|
||||
self.outputs.new('NodeSocketFloat', 'Float')
|
||||
self.outputs.new('NodeSocketInt', 'Int')
|
||||
|
||||
def __parse(self, out_socket: NodeSocket, state: ParserState) -> str:
|
||||
if self.input_type == "uniform":
|
||||
state.frag.add_uniform(f'{self.variable_type} {self.variable_name}', link=self.variable_name)
|
||||
state.vert.add_uniform(f'{self.variable_type} {self.variable_name}', link=self.variable_name)
|
||||
|
||||
if self.variable_type == "sampler2D":
|
||||
state.frag.add_uniform('vec2 screenSize', link='_screenSize')
|
||||
return f'textureLod({self.variable_name}, gl_FragCoord.xy / screenSize, 0.0).rgb'
|
||||
|
||||
if self.variable_type == "vec2":
|
||||
return f'vec3({self.variable_name}.xy, 0)'
|
||||
|
||||
return self.variable_name
|
||||
|
||||
else:
|
||||
if self.input_source == "frag":
|
||||
state.frag.add_in(f'{self.variable_type} {self.variable_name}')
|
||||
return self.variable_name
|
||||
|
||||
# Reroute input from vertex shader to fragment shader (input must exist!)
|
||||
else:
|
||||
state.vert.add_out(f'{self.variable_type} out_{self.variable_name}')
|
||||
state.frag.add_in(f'{self.variable_type} out_{self.variable_name}')
|
||||
|
||||
state.vert.write(f'out_{self.variable_name} = {self.variable_name};')
|
||||
return 'out_' + self.variable_name
|
||||
|
||||
@staticmethod
|
||||
def parse(node: 'ShaderDataNode', out_socket: NodeSocket, state: ParserState) -> str:
|
||||
return node.__parse(out_socket, state)
|
||||
|
||||
|
||||
add_node(ShaderDataNode, category='Leenkx')
|
170
leenkx/blender/lnx/material/make.py
Normal file
170
leenkx/blender/lnx/material/make.py
Normal file
@ -0,0 +1,170 @@
|
||||
from typing import Dict, List
|
||||
|
||||
import bpy
|
||||
from bpy.types import Material
|
||||
from bpy.types import Object
|
||||
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.make_shader as make_shader
|
||||
import lnx.material.mat_batch as mat_batch
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
import lnx.node_utils
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
cycles = lnx.reload_module(cycles)
|
||||
make_shader = lnx.reload_module(make_shader)
|
||||
mat_batch = lnx.reload_module(mat_batch)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
lnx.node_utils = lnx.reload_module(lnx.node_utils)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def glsl_value(val):
|
||||
if str(type(val)) == "<class 'bpy_prop_array'>":
|
||||
res = []
|
||||
for v in val:
|
||||
res.append(v)
|
||||
return res
|
||||
else:
|
||||
return val
|
||||
|
||||
|
||||
def parse(material: Material, mat_data, mat_users: Dict[Material, List[Object]], mat_lnxusers) -> tuple:
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
# Texture caching for material batching
|
||||
batch_cached_textures = []
|
||||
|
||||
needs_sss = material_needs_sss(material)
|
||||
if needs_sss and rpdat.rp_sss_state != 'Off' and '_SSS' not in wrd.world_defs:
|
||||
# Must be set before calling make_shader.build()
|
||||
wrd.world_defs += '_SSS'
|
||||
|
||||
# No batch - shader data per material
|
||||
if material.lnx_custom_material != '':
|
||||
rpasses = ['mesh']
|
||||
|
||||
con = {'vertex_elements': []}
|
||||
con['vertex_elements'].append({'name': 'pos', 'data': 'short4norm'})
|
||||
con['vertex_elements'].append({'name': 'nor', 'data': 'short2norm'})
|
||||
con['vertex_elements'].append({'name': 'tex', 'data': 'short2norm'})
|
||||
con['vertex_elements'].append({'name': 'tex1', 'data': 'short2norm'})
|
||||
|
||||
sd = {'contexts': [con]}
|
||||
shader_data_name = material.lnx_custom_material
|
||||
bind_constants = {'mesh': []}
|
||||
bind_textures = {'mesh': []}
|
||||
|
||||
make_shader.make_instancing_and_skinning(material, mat_users)
|
||||
|
||||
for idx, item in enumerate(material.lnx_bind_textures_list):
|
||||
if item.uniform_name == '':
|
||||
log.warn(f'Material "{material.name}": skipping export of bind texture at slot {idx + 1} with empty uniform name')
|
||||
continue
|
||||
|
||||
if item.image is not None:
|
||||
tex = cycles.make_texture(item.image, item.uniform_name, material.name, 'Linear', 'REPEAT')
|
||||
if tex is None:
|
||||
continue
|
||||
bind_textures['mesh'].append(tex)
|
||||
else:
|
||||
log.warn(f'Material "{material.name}": skipping export of bind texture at slot {idx + 1} ("{item.uniform_name}") with no image selected')
|
||||
|
||||
elif not wrd.lnx_batch_materials or material.name.startswith('lnxdefault'):
|
||||
rpasses, shader_data, shader_data_name, bind_constants, bind_textures = make_shader.build(material, mat_users, mat_lnxusers)
|
||||
sd = shader_data.sd
|
||||
else:
|
||||
rpasses, shader_data, shader_data_name, bind_constants, bind_textures = mat_batch.get(material)
|
||||
sd = shader_data.sd
|
||||
|
||||
sss_used = False
|
||||
|
||||
# Material
|
||||
for rp in rpasses:
|
||||
c = {
|
||||
'name': rp,
|
||||
'bind_constants': [] + bind_constants[rp],
|
||||
'bind_textures': [] + bind_textures[rp],
|
||||
'depth_read': material.lnx_depth_read,
|
||||
}
|
||||
mat_data['contexts'].append(c)
|
||||
|
||||
if rp == 'mesh':
|
||||
c['bind_constants'].append({'name': 'receiveShadow', 'boolValue': material.lnx_receive_shadow})
|
||||
|
||||
if material.lnx_material_id != 0:
|
||||
c['bind_constants'].append({'name': 'materialID', 'intValue': material.lnx_material_id})
|
||||
|
||||
if material.lnx_material_id == 2:
|
||||
wrd.world_defs += '_Hair'
|
||||
|
||||
elif rpdat.rp_sss_state != 'Off':
|
||||
const = {'name': 'materialID'}
|
||||
if needs_sss:
|
||||
const['intValue'] = 2
|
||||
sss_used = True
|
||||
else:
|
||||
const['intValue'] = 0
|
||||
c['bind_constants'].append(const)
|
||||
|
||||
# TODO: Mesh only material batching
|
||||
if wrd.lnx_batch_materials:
|
||||
# Set textures uniforms
|
||||
if len(c['bind_textures']) > 0:
|
||||
c['bind_textures'] = []
|
||||
for node in material.node_tree.nodes:
|
||||
if node.type == 'TEX_IMAGE':
|
||||
tex_name = lnx.utils.safesrc(node.name)
|
||||
tex = cycles.make_texture_from_image_node(node, tex_name)
|
||||
# Empty texture
|
||||
if tex is None:
|
||||
tex = {'name': tex_name, 'file': ''}
|
||||
c['bind_textures'].append(tex)
|
||||
batch_cached_textures = c['bind_textures']
|
||||
|
||||
# Set marked inputs as uniforms
|
||||
for node in material.node_tree.nodes:
|
||||
for inp in node.inputs:
|
||||
if inp.is_uniform:
|
||||
uname = lnx.utils.safesrc(inp.node.name) + lnx.utils.safesrc(inp.name) # Merge with cycles module
|
||||
c['bind_constants'].append({'name': uname, cycles.glsl_type(inp.type)+'Value': glsl_value(inp.default_value)})
|
||||
|
||||
elif rp == 'translucent' or rp == 'refraction':
|
||||
c['bind_constants'].append({'name': 'receiveShadow', 'boolValue': material.lnx_receive_shadow})
|
||||
|
||||
elif rp == 'shadowmap':
|
||||
if wrd.lnx_batch_materials:
|
||||
if len(c['bind_textures']) > 0:
|
||||
c['bind_textures'] = batch_cached_textures
|
||||
|
||||
if wrd.lnx_single_data_file:
|
||||
mat_data['shader'] = shader_data_name
|
||||
else:
|
||||
# Make sure that custom materials are not expected to be in .arm format
|
||||
ext = '' if wrd.lnx_minimize and material.lnx_custom_material == "" else '.json'
|
||||
mat_data['shader'] = shader_data_name + ext + '/' + shader_data_name
|
||||
|
||||
return sd, rpasses, sss_used
|
||||
|
||||
|
||||
def material_needs_sss(material: Material) -> bool:
|
||||
"""Check whether the given material requires SSS."""
|
||||
for sss_node in lnx.node_utils.iter_nodes_by_type(material.node_tree, 'SUBSURFACE_SCATTERING'):
|
||||
if sss_node is not None and sss_node.outputs[0].is_linked:
|
||||
return True
|
||||
|
||||
for sss_node in lnx.node_utils.iter_nodes_by_type(material.node_tree, 'BSDF_PRINCIPLED'):
|
||||
if sss_node is not None and sss_node.outputs[0].is_linked and (sss_node.inputs[1].is_linked or sss_node.inputs[1].default_value != 0.0):
|
||||
return True
|
||||
|
||||
for sss_node in mat_utils.iter_nodes_leenkxpbr(material.node_tree):
|
||||
if sss_node is not None and sss_node.outputs[0].is_linked and (sss_node.inputs[8].is_linked or sss_node.inputs[8].default_value != 0.0):
|
||||
return True
|
||||
|
||||
return False
|
100
leenkx/blender/lnx/material/make_attrib.py
Normal file
100
leenkx/blender/lnx/material/make_attrib.py
Normal file
@ -0,0 +1,100 @@
|
||||
from typing import Optional
|
||||
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.make_skin as make_skin
|
||||
import lnx.material.make_particle as make_particle
|
||||
import lnx.material.make_inst as make_inst
|
||||
import lnx.material.make_tess as make_tess
|
||||
import lnx.material.make_morph_target as make_morph_target
|
||||
from lnx.material.shader import Shader, ShaderContext
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
make_skin = lnx.reload_module(make_skin)
|
||||
make_particle = lnx.reload_module(make_particle)
|
||||
make_inst = lnx.reload_module(make_inst)
|
||||
make_tess = lnx.reload_module(make_tess)
|
||||
make_morph_target = lnx.reload_module(make_morph_target)
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import Shader, ShaderContext
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def write_vertpos(vert):
|
||||
billboard = mat_state.material.lnx_billboard
|
||||
particle = mat_state.material.lnx_particle_flag
|
||||
# Particles
|
||||
if particle:
|
||||
if lnx.utils.get_rp().lnx_particles == 'On':
|
||||
make_particle.write(vert, particle_info=cycles.particle_info)
|
||||
# Billboards
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat4 WV', '_worldViewMatrix')
|
||||
vert.add_uniform('mat4 P', '_projectionMatrix')
|
||||
vert.write('gl_Position = P * (WV * vec4(0.0, 0.0, spos.z, 1.0) + vec4(spos.x, spos.y, 0.0, 0.0));')
|
||||
else:
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrix')
|
||||
vert.write('gl_Position = WVP * spos;')
|
||||
else:
|
||||
# Billboards
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrixSphere')
|
||||
elif billboard == 'cylindrical':
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrixCylinder')
|
||||
else: # off
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrix')
|
||||
vert.write('gl_Position = WVP * spos;')
|
||||
|
||||
|
||||
def write_norpos(con_mesh: ShaderContext, vert: Shader, declare=False, write_nor=True):
|
||||
is_bone = con_mesh.is_elem('bone')
|
||||
is_morph = con_mesh.is_elem('morph')
|
||||
if is_morph:
|
||||
make_morph_target.morph_pos(vert)
|
||||
if is_bone:
|
||||
make_skin.skin_pos(vert)
|
||||
if write_nor:
|
||||
prep = 'vec3 ' if declare else ''
|
||||
if is_morph:
|
||||
make_morph_target.morph_nor(vert, is_bone, prep)
|
||||
if is_bone:
|
||||
make_skin.skin_nor(vert, is_morph, prep)
|
||||
if not is_morph and not is_bone:
|
||||
vert.write_attrib(prep + 'wnormal = normalize(N * vec3(nor.xy, pos.w));')
|
||||
if con_mesh.is_elem('ipos'):
|
||||
make_inst.inst_pos(con_mesh, vert)
|
||||
|
||||
|
||||
def write_tex_coords(con_mesh: ShaderContext, vert: Shader, frag: Shader, tese: Optional[Shader]):
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
if con_mesh.is_elem('tex'):
|
||||
vert.add_out('vec2 texCoord')
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
if mat_state.material.lnx_tilesheet_flag:
|
||||
if mat_state.material.lnx_particle_flag and rpdat.lnx_particles == 'On':
|
||||
make_particle.write_tilesheet(vert)
|
||||
else:
|
||||
vert.add_uniform('vec2 tilesheetOffset', '_tilesheetOffset')
|
||||
vert.write_attrib('texCoord = tex * texUnpack + tilesheetOffset;')
|
||||
else:
|
||||
vert.write_attrib('texCoord = tex * texUnpack;')
|
||||
|
||||
if tese is not None:
|
||||
tese.write_pre = True
|
||||
make_tess.interpolate(tese, 'texCoord', 2, declare_out=frag.contains('texCoord'))
|
||||
tese.write_pre = False
|
||||
|
||||
if con_mesh.is_elem('tex1'):
|
||||
vert.add_out('vec2 texCoord1')
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
vert.write_attrib('texCoord1 = tex1 * texUnpack;')
|
||||
if tese is not None:
|
||||
tese.write_pre = True
|
||||
make_tess.interpolate(tese, 'texCoord1', 2, declare_out=frag.contains('texCoord1'))
|
||||
tese.write_pre = False
|
110
leenkx/blender/lnx/material/make_cluster.py
Normal file
110
leenkx/blender/lnx/material/make_cluster.py
Normal file
@ -0,0 +1,110 @@
|
||||
import bpy
|
||||
|
||||
import lnx.material.shader as shader
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
shader = lnx.reload_module(shader)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def write(vert: shader.Shader, frag: shader.Shader):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
blend = mat_state.material.lnx_blending
|
||||
parse_opacity = blend or mat_utils.is_transluc(mat_state.material)
|
||||
is_mobile = rpdat.lnx_material_model == 'Mobile'
|
||||
is_shadows = '_ShadowMap' in wrd.world_defs
|
||||
is_shadows_atlas = '_ShadowMapAtlas' in wrd.world_defs
|
||||
is_single_atlas = '_SingleAtlas' in wrd.world_defs
|
||||
|
||||
frag.add_include_front('std/clusters.glsl')
|
||||
frag.add_uniform('vec2 cameraProj', link='_cameraPlaneProj')
|
||||
frag.add_uniform('vec2 cameraPlane', link='_cameraPlane')
|
||||
frag.add_uniform('vec4 lightsArray[maxLights * 3]', link='_lightsArray')
|
||||
frag.add_uniform('sampler2D clustersData', link='_clustersData')
|
||||
if is_shadows:
|
||||
frag.add_uniform('bool receiveShadow')
|
||||
frag.add_uniform('vec2 lightProj', link='_lightPlaneProj', included=True)
|
||||
if is_shadows_atlas:
|
||||
if not is_single_atlas:
|
||||
frag.add_uniform('sampler2DShadow shadowMapAtlasPoint', included=True)
|
||||
frag.add_uniform('sampler2D shadowMapAtlasPointTransparent', included=True)
|
||||
else:
|
||||
frag.add_uniform('sampler2DShadow shadowMapAtlas', top=True)
|
||||
frag.add_uniform('sampler2D shadowMapAtlasTransparent', top=True)
|
||||
frag.add_uniform('vec4 pointLightDataArray[maxLightsCluster]', link='_pointLightsAtlasArray', included=True)
|
||||
else:
|
||||
frag.add_uniform('samplerCubeShadow shadowMapPoint[4]', included=True)
|
||||
frag.add_uniform('samplerCube shadowMapPointTransparent[4]', included=True)
|
||||
|
||||
if not '_VoxelAOvar' in wrd.world_defs and not '_VoxelGI' in wrd.world_defs or ((parse_opacity or '_VoxelShadow' in wrd.world_defs) and ('_VoxelAOvar' in wrd.world_defs or '_VoxelGI' in wrd.world_defs)):
|
||||
vert.add_out('vec4 wvpposition')
|
||||
vert.write('wvpposition = gl_Position;')
|
||||
# wvpposition.z / wvpposition.w
|
||||
frag.write('float viewz = linearize(gl_FragCoord.z, cameraProj);')
|
||||
frag.write('int clusterI = getClusterI((wvpposition.xy / wvpposition.w) * 0.5 + 0.5, viewz, cameraPlane);')
|
||||
frag.write('int numLights = int(texelFetch(clustersData, ivec2(clusterI, 0), 0).r * 255);')
|
||||
|
||||
frag.write('#ifdef HLSL')
|
||||
frag.write('viewz += texture(clustersData, vec2(0.0)).r * 1e-9;') # TODO: krafix bug, needs to generate sampler
|
||||
frag.write('#endif')
|
||||
|
||||
if '_Spot' in wrd.world_defs:
|
||||
frag.add_uniform('vec4 lightsArraySpot[maxLights * 2]', link='_lightsArraySpot')
|
||||
frag.write('int numSpots = int(texelFetch(clustersData, ivec2(clusterI, 1 + maxLightsCluster), 0).r * 255);')
|
||||
frag.write('int numPoints = numLights - numSpots;')
|
||||
if is_shadows:
|
||||
if is_shadows_atlas:
|
||||
if not is_single_atlas:
|
||||
frag.add_uniform('sampler2DShadow shadowMapAtlasSpot', included=True)
|
||||
frag.add_uniform('sampler2D shadowMapAtlasSpotTransparent', included=True)
|
||||
else:
|
||||
frag.add_uniform('sampler2DShadow shadowMapAtlas', top=True)
|
||||
frag.add_uniform('sampler2D shadowMapAtlasTransparent', top=True)
|
||||
else:
|
||||
frag.add_uniform('sampler2DShadow shadowMapSpot[4]', included=True)
|
||||
frag.add_uniform('sampler2D shadowMapSpotTransparent[4]', included=True)
|
||||
frag.add_uniform('mat4 LWVPSpotArray[maxLightsCluster]', link='_biasLightWorldViewProjectionMatrixSpotArray', included=True)
|
||||
|
||||
frag.write('for (int i = 0; i < min(numLights, maxLightsCluster); i++) {')
|
||||
frag.write('int li = int(texelFetch(clustersData, ivec2(clusterI, i + 1), 0).r * 255);')
|
||||
frag.write('direct += sampleLight(')
|
||||
frag.write(' wposition,')
|
||||
frag.write(' n,')
|
||||
frag.write(' vVec,')
|
||||
frag.write(' dotNV,')
|
||||
frag.write(' lightsArray[li * 3].xyz,') # lp
|
||||
frag.write(' lightsArray[li * 3 + 1].xyz,') # lightCol
|
||||
frag.write(' albedo,')
|
||||
frag.write(' roughness,')
|
||||
frag.write(' specular,')
|
||||
frag.write(' f0')
|
||||
|
||||
if is_shadows:
|
||||
if parse_opacity:
|
||||
frag.write('\t, li, lightsArray[li * 3 + 2].x, lightsArray[li * 3 + 2].z != 0.0, opacity != 1.0') # bias
|
||||
else:
|
||||
frag.write('\t, li, lightsArray[li * 3 + 2].x, lightsArray[li * 3 + 2].z != 0.0, false') # bias
|
||||
if '_Spot' in wrd.world_defs:
|
||||
frag.write('\t, lightsArray[li * 3 + 2].y != 0.0')
|
||||
frag.write('\t, lightsArray[li * 3 + 2].y') # spot size (cutoff)
|
||||
frag.write('\t, lightsArraySpot[li * 2].w') # spot blend (exponent)
|
||||
frag.write('\t, lightsArraySpot[li * 2].xyz') # spotDir
|
||||
frag.write('\t, vec2(lightsArray[li * 3].w, lightsArray[li * 3 + 1].w)') # scale
|
||||
frag.write('\t, lightsArraySpot[li * 2 + 1].xyz') # right
|
||||
if '_VoxelShadow' in wrd.world_defs:
|
||||
frag.write(', voxels, voxelsSDF, clipmaps')
|
||||
if '_MicroShadowing' in wrd.world_defs and not is_mobile:
|
||||
frag.write('\t, occlusion')
|
||||
if '_SSRS' in wrd.world_defs:
|
||||
frag.add_uniform('mat4 invVP', '_inverseViewProjectionMatrix')
|
||||
frag.add_uniform('vec3 eye', '_cameraPosition')
|
||||
frag.write(', gl_FragCoord.z, inVP, eye')
|
||||
frag.write(');')
|
||||
|
||||
frag.write('}') # for numLights
|
84
leenkx/blender/lnx/material/make_decal.py
Normal file
84
leenkx/blender/lnx/material/make_decal.py
Normal file
@ -0,0 +1,84 @@
|
||||
import bpy
|
||||
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(context_id):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
|
||||
vs = [{'name': 'pos', 'data': 'float3'}]
|
||||
con_decal = mat_state.data.add_context({ 'name': context_id, 'vertex_elements': vs, 'depth_write': False, 'compare_mode': 'less', 'cull_mode': 'clockwise',
|
||||
'blend_source': 'source_alpha',
|
||||
'blend_destination': 'inverse_source_alpha',
|
||||
'blend_operation': 'add',
|
||||
'color_writes_alpha': [False, False]
|
||||
})
|
||||
|
||||
vert = con_decal.make_vert()
|
||||
frag = con_decal.make_frag()
|
||||
geom = None
|
||||
tesc = None
|
||||
tese = None
|
||||
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrix')
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.add_out('vec4 wvpposition')
|
||||
vert.add_out('vec3 wnormal')
|
||||
|
||||
vert.write('wnormal = N * vec3(0.0, 0.0, 1.0);')
|
||||
vert.write('wvpposition = WVP * vec4(pos.xyz, 1.0);')
|
||||
vert.write('gl_Position = wvpposition;')
|
||||
|
||||
frag.add_include('compiled.inc')
|
||||
frag.add_include('std/gbuffer.glsl')
|
||||
frag.ins = vert.outs
|
||||
frag.add_uniform('sampler2D gbufferD')
|
||||
frag.add_uniform('mat4 invVP', '_inverseViewProjectionMatrix')
|
||||
frag.add_uniform('mat4 invW', '_inverseWorldMatrix')
|
||||
frag.add_out('vec4 fragColor[2]')
|
||||
|
||||
frag.write_attrib(' vec3 n = normalize(wnormal);')
|
||||
|
||||
frag.write_attrib(' vec2 screenPosition = wvpposition.xy / wvpposition.w;')
|
||||
frag.write_attrib(' vec2 depthCoord = screenPosition * 0.5 + 0.5;')
|
||||
frag.write_attrib('#ifdef _InvY')
|
||||
frag.write_attrib(' depthCoord.y = 1.0 - depthCoord.y;')
|
||||
frag.write_attrib('#endif')
|
||||
frag.write_attrib(' float depth = texture(gbufferD, depthCoord).r * 2.0 - 1.0;')
|
||||
|
||||
frag.write_attrib(' vec3 wpos = getPos2(invVP, depth, depthCoord);')
|
||||
frag.write_attrib(' vec4 mpos = invW * vec4(wpos, 1.0);')
|
||||
frag.write_attrib(' if (abs(mpos.x) > 1.0) discard;')
|
||||
frag.write_attrib(' if (abs(mpos.y) > 1.0) discard;')
|
||||
frag.write_attrib(' if (abs(mpos.z) > 1.0) discard;')
|
||||
frag.write_attrib(' vec2 texCoord = mpos.xy * 0.5 + 0.5;')
|
||||
|
||||
frag.write('vec3 basecol;')
|
||||
frag.write('float roughness;')
|
||||
frag.write('float metallic;')
|
||||
frag.write('float occlusion;')
|
||||
frag.write('float specular;')
|
||||
frag.write('float opacity;')
|
||||
frag.write('vec3 emissionCol;') # Declared to prevent compiler errors, but decals currently don't output any emission
|
||||
frag.write('float ior;')
|
||||
cycles.parse(mat_state.nodes, con_decal, vert, frag, geom, tesc, tese)
|
||||
|
||||
frag.write('n /= (abs(n.x) + abs(n.y) + abs(n.z));')
|
||||
frag.write('n.xy = n.z >= 0.0 ? n.xy : octahedronWrap(n.xy);')
|
||||
frag.write('fragColor[0] = vec4(n.xy, roughness, opacity);')
|
||||
frag.write('fragColor[1] = vec4(basecol.rgb, opacity);')
|
||||
|
||||
make_finalize.make(con_decal)
|
||||
|
||||
return con_decal
|
258
leenkx/blender/lnx/material/make_depth.py
Normal file
258
leenkx/blender/lnx/material/make_depth.py
Normal file
@ -0,0 +1,258 @@
|
||||
import bpy
|
||||
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
import lnx.material.make_skin as make_skin
|
||||
import lnx.material.make_inst as make_inst
|
||||
import lnx.material.make_tess as make_tess
|
||||
import lnx.material.make_mesh as make_mesh
|
||||
import lnx.material.make_attrib as make_attrib
|
||||
import lnx.material.make_particle as make_particle
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.material.make_morph_target as make_morph_target
|
||||
import lnx.assets as assets
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
make_skin = lnx.reload_module(make_skin)
|
||||
make_inst = lnx.reload_module(make_inst)
|
||||
make_tess = lnx.reload_module(make_tess)
|
||||
make_particle = lnx.reload_module(make_particle)
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
assets = lnx.reload_module(assets)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(context_id, rpasses, shadowmap=False, shadowmap_transparent=False):
|
||||
|
||||
is_disp = mat_utils.disp_linked(mat_state.output_node)
|
||||
|
||||
vs = [{'name': 'pos', 'data': 'short4norm'}]
|
||||
if is_disp or shadowmap_transparent:
|
||||
vs.append({'name': 'nor', 'data': 'short2norm'})
|
||||
|
||||
if shadowmap_transparent:
|
||||
con_depth = mat_state.data.add_context({
|
||||
'name': context_id,
|
||||
'vertex_elements': vs,
|
||||
'depth_write': False,
|
||||
'depth_read': True,
|
||||
'compare_mode': 'less',
|
||||
'cull_mode': 'clockwise',
|
||||
'blend_source': 'blend_zero',
|
||||
'blend_destination': 'source_color',
|
||||
'blend_operation': 'add',
|
||||
'color_writes_red': [True],
|
||||
'color_writes_green': [True],
|
||||
'color_writes_blue': [True],
|
||||
'color_writes_alpha': [True]
|
||||
})
|
||||
else:
|
||||
con_depth = mat_state.data.add_context({
|
||||
'name': context_id,
|
||||
'vertex_elements': vs,
|
||||
'depth_write': True,
|
||||
'depth_read': False,
|
||||
'compare_mode': 'less',
|
||||
'cull_mode': 'clockwise',
|
||||
'color_writes_red': [False],
|
||||
'color_writes_green': [False],
|
||||
'color_writes_blue': [False],
|
||||
'color_writes_alpha': [False]
|
||||
})
|
||||
|
||||
vert = con_depth.make_vert()
|
||||
frag = con_depth.make_frag()
|
||||
geom = None
|
||||
tesc = None
|
||||
tese = None
|
||||
|
||||
vert.write_attrib('vec4 spos = vec4(pos.xyz, 1.0);')
|
||||
vert.add_include('compiled.inc')
|
||||
frag.add_include('compiled.inc')
|
||||
|
||||
parse_opacity = 'translucent' in rpasses or 'refraction' in rpasses or mat_state.material.lnx_discard
|
||||
|
||||
parse_custom_particle = (cycles.node_by_name(mat_state.nodes, 'LnxCustomParticleNode') is not None)
|
||||
|
||||
if shadowmap_transparent:
|
||||
billboard = mat_state.material.lnx_billboard
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat3 N', '_normalMatrixSphere')
|
||||
elif billboard == 'cylindrical':
|
||||
vert.add_uniform('mat3 N', '_normalMatrixCylinder')
|
||||
else:
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
make_mesh._write_material_attribs_default(frag, parse_opacity) #TODO remove duplicate parsing
|
||||
vert.add_out('vec3 wnormal')
|
||||
make_attrib.write_norpos(con_depth, vert)
|
||||
frag.write_attrib('vec3 n = normalize(wnormal);')
|
||||
cycles.parse(mat_state.nodes, con_depth, vert, frag, geom, tesc, tese, basecol_only=True, parse_opacity=True)
|
||||
elif parse_opacity:
|
||||
frag.write('float opacity;')
|
||||
frag.write('float ior;')
|
||||
|
||||
if(con_depth).is_elem('morph'):
|
||||
make_morph_target.morph_pos(vert)
|
||||
|
||||
if con_depth.is_elem('bone'):
|
||||
make_skin.skin_pos(vert)
|
||||
|
||||
if (not is_disp and parse_custom_particle):
|
||||
cycles.parse(mat_state.nodes, con_depth, vert, frag, geom, tesc, tese, parse_surface=False, parse_opacity=parse_opacity)
|
||||
|
||||
if con_depth.is_elem('ipos'):
|
||||
make_inst.inst_pos(con_depth, vert)
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
if mat_state.material.lnx_particle_flag and rpdat.lnx_particles == 'On':
|
||||
make_particle.write(vert, shadowmap=shadowmap)
|
||||
|
||||
if is_disp:
|
||||
if rpdat.lnx_rp_displacement == 'Vertex':
|
||||
frag.ins = vert.outs
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.write('vec3 wnormal = normalize(N * vec3(nor.xy, pos.w));')
|
||||
if(con_depth.is_elem('ipos')):
|
||||
vert.write('wposition = vec4(W * spos).xyz;')
|
||||
if(con_depth.is_elem('irot')):
|
||||
vert.write('wnormal = normalize(N * mirot * vec3(nor.xy, pos.w));')
|
||||
if not shadowmap_transparent:
|
||||
cycles.parse(mat_state.nodes, con_depth, vert, frag, geom, tesc, tese, parse_surface=False, parse_opacity=parse_opacity)
|
||||
if con_depth.is_elem('tex'):
|
||||
vert.add_out('vec2 texCoord') ## vs only, remove out
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
vert.write_attrib('texCoord = tex * texUnpack;')
|
||||
if con_depth.is_elem('tex1'):
|
||||
vert.add_out('vec2 texCoord1') ## vs only, remove out
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
vert.write_attrib('texCoord1 = tex1 * texUnpack;')
|
||||
if con_depth.is_elem('col'):
|
||||
vert.add_out('vec3 vcolor')
|
||||
vert.write_attrib('vcolor = col.rgb;')
|
||||
vert.write('wposition += wnormal * disp;')
|
||||
if shadowmap:
|
||||
vert.add_uniform('mat4 LVP', '_lightViewProjectionMatrix')
|
||||
vert.write('gl_Position = LVP * vec4(wposition, 1.0);')
|
||||
else:
|
||||
vert.add_uniform('mat4 VP', '_viewProjectionMatrix')
|
||||
vert.write('gl_Position = VP * vec4(wposition, 1.0);')
|
||||
|
||||
else: # Tessellation
|
||||
tesc = con_depth.make_tesc()
|
||||
tese = con_depth.make_tese()
|
||||
tesc.ins = vert.outs
|
||||
tese.ins = tesc.outs
|
||||
frag.ins = tese.outs
|
||||
|
||||
vert.add_out('vec3 wnormal')
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.write('wnormal = normalize(N * vec3(nor.xy, pos.w));')
|
||||
|
||||
make_tess.tesc_levels(tesc, rpdat.lnx_tess_shadows_inner, rpdat.lnx_tess_shadows_outer)
|
||||
make_tess.interpolate(tese, 'wposition', 3)
|
||||
make_tess.interpolate(tese, 'wnormal', 3, normalize=True)
|
||||
|
||||
if not shadowmap_transparent:
|
||||
cycles.parse(mat_state.nodes, con_depth, vert, frag, geom, tesc, tese, parse_surface=False, parse_opacity=parse_opacity)
|
||||
|
||||
if con_depth.is_elem('tex'):
|
||||
vert.add_out('vec2 texCoord')
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
vert.write('texCoord = tex * texUnpack;')
|
||||
tese.write_pre = True
|
||||
make_tess.interpolate(tese, 'texCoord', 2, declare_out=frag.contains('texCoord'))
|
||||
tese.write_pre = False
|
||||
|
||||
if con_depth.is_elem('tex1'):
|
||||
vert.add_out('vec2 texCoord1')
|
||||
vert.write('texCoord1 = tex1;')
|
||||
tese.write_pre = True
|
||||
make_tess.interpolate(tese, 'texCoord1', 2, declare_out=frag.contains('texCoord1'))
|
||||
tese.write_pre = False
|
||||
|
||||
if con_depth.is_elem('col'):
|
||||
vert.add_out('vec3 vcolor')
|
||||
vert.write('vcolor = col.rgb;')
|
||||
tese.write_pre = True
|
||||
make_tess.interpolate(tese, 'vcolor', 3, declare_out=frag.contains('vcolor'))
|
||||
tese.write_pre = False
|
||||
|
||||
if shadowmap:
|
||||
tese.add_uniform('mat4 LVP', '_lightViewProjectionMatrix')
|
||||
tese.write('wposition += wnormal * disp;')
|
||||
tese.write('gl_Position = LVP * vec4(wposition, 1.0);')
|
||||
else:
|
||||
tese.add_uniform('mat4 VP', '_viewProjectionMatrix')
|
||||
tese.write('wposition += wnormal * disp;')
|
||||
tese.write('gl_Position = VP * vec4(wposition, 1.0);')
|
||||
# No displacement
|
||||
else:
|
||||
frag.ins = vert.outs
|
||||
billboard = mat_state.material.lnx_billboard
|
||||
if shadowmap:
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat4 LWVP', '_lightWorldViewProjectionMatrixSphere')
|
||||
elif billboard == 'cylindrical':
|
||||
vert.add_uniform('mat4 LWVP', '_lightWorldViewProjectionMatrixCylinder')
|
||||
else: # off
|
||||
vert.add_uniform('mat4 LWVP', '_lightWorldViewProjectionMatrix')
|
||||
vert.write('gl_Position = LWVP * spos;')
|
||||
else:
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrixSphere')
|
||||
elif billboard == 'cylindrical':
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrixCylinder')
|
||||
else: # off
|
||||
vert.add_uniform('mat4 WVP', '_worldViewProjectionMatrix')
|
||||
vert.write('gl_Position = WVP * spos;')
|
||||
|
||||
if parse_opacity:
|
||||
if not parse_custom_particle and not shadowmap_transparent:
|
||||
cycles.parse(mat_state.nodes, con_depth, vert, frag, geom, tesc, tese, parse_surface=False, parse_opacity=True)
|
||||
|
||||
if con_depth.is_elem('tex'):
|
||||
vert.add_out('vec2 texCoord')
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
if mat_state.material.lnx_tilesheet_flag:
|
||||
vert.add_uniform('vec2 tilesheetOffset', '_tilesheetOffset')
|
||||
vert.write('texCoord = tex * texUnpack + tilesheetOffset;')
|
||||
else:
|
||||
vert.write('texCoord = tex * texUnpack;')
|
||||
|
||||
if con_depth.is_elem('tex1'):
|
||||
vert.add_out('vec2 texCoord1')
|
||||
vert.write('texCoord1 = tex1;')
|
||||
|
||||
if con_depth.is_elem('col'):
|
||||
vert.add_out('vec3 vcolor')
|
||||
vert.write('vcolor = col.rgb;')
|
||||
|
||||
if shadowmap_transparent:
|
||||
frag.add_out('vec4 fragColor')
|
||||
vert.add_out('vec4 wvpposition')
|
||||
vert.write('wvpposition = gl_Position;')
|
||||
frag.write('float depth = (wvpposition.z / wvpposition.w) * 0.5 + 0.5;')
|
||||
frag.write('vec3 color = basecol;')
|
||||
frag.write('color *= 1.0 - opacity;')
|
||||
frag.write('fragColor = vec4(color, depth);')
|
||||
|
||||
if parse_opacity and not shadowmap_transparent:
|
||||
if mat_state.material.lnx_discard:
|
||||
opac = mat_state.material.lnx_discard_opacity_shadows
|
||||
else:
|
||||
opac = '1.0'
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
|
||||
make_finalize.make(con_depth)
|
||||
|
||||
assets.vs_equal(con_depth, assets.shader_cons['depth_vert'])
|
||||
assets.fs_equal(con_depth, assets.shader_cons['depth_frag'])
|
||||
|
||||
return con_depth
|
155
leenkx/blender/lnx/material/make_finalize.py
Normal file
155
leenkx/blender/lnx/material/make_finalize.py
Normal file
@ -0,0 +1,155 @@
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.make_tess as make_tess
|
||||
from lnx.material.shader import ShaderContext
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
make_tess = lnx.reload_module(make_tess)
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import ShaderContext
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(con_mesh: ShaderContext):
|
||||
vert = con_mesh.vert
|
||||
frag = con_mesh.frag
|
||||
geom = con_mesh.geom
|
||||
tesc = con_mesh.tesc
|
||||
tese = con_mesh.tese
|
||||
|
||||
# Additional values referenced in cycles
|
||||
# TODO: enable from cycles.py
|
||||
if frag.contains('dotNV') and not frag.contains('float dotNV'):
|
||||
frag.write_init('float dotNV = max(dot(n, vVec), 0.0);')
|
||||
|
||||
# n is not always defined yet (in some shadowmap shaders e.g.)
|
||||
if not frag.contains('vec3 n'):
|
||||
vert.add_out('vec3 wnormal')
|
||||
billboard = mat_state.material.lnx_billboard
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat3 N', '_normalMatrixSphere')
|
||||
elif billboard == 'cylindrical':
|
||||
vert.add_uniform('mat3 N', '_normalMatrixCylinder')
|
||||
else:
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.write_attrib('wnormal = normalize(N * vec3(nor.xy, pos.w));')
|
||||
frag.write_attrib('vec3 n = normalize(wnormal);')
|
||||
|
||||
# If not yet added, add nor vertex data
|
||||
vertex_elems = con_mesh.data['vertex_elements']
|
||||
has_normals = False
|
||||
for elem in vertex_elems:
|
||||
if elem['name'] == 'nor':
|
||||
has_normals = True
|
||||
break
|
||||
if not has_normals:
|
||||
vertex_elems.append({'name': 'nor', 'data': 'short2norm'})
|
||||
|
||||
write_wpos = False
|
||||
if frag.contains('vVec') and not frag.contains('vec3 vVec'):
|
||||
if tese is not None:
|
||||
tese.add_out('vec3 eyeDir')
|
||||
tese.add_uniform('vec3 eye', '_cameraPosition')
|
||||
tese.write('eyeDir = eye - wposition;')
|
||||
|
||||
else:
|
||||
if not vert.contains('wposition'):
|
||||
write_wpos = True
|
||||
vert.add_out('vec3 eyeDir')
|
||||
vert.add_uniform('vec3 eye', '_cameraPosition')
|
||||
vert.write('eyeDir = eye - wposition;')
|
||||
frag.write_attrib('vec3 vVec = normalize(eyeDir);')
|
||||
|
||||
export_wpos = False
|
||||
if frag.contains('wposition') and not frag.contains('vec3 wposition'):
|
||||
export_wpos = True
|
||||
if tese is not None:
|
||||
export_wpos = True
|
||||
if vert.contains('wposition'):
|
||||
write_wpos = True
|
||||
|
||||
if export_wpos:
|
||||
vert.add_uniform('mat4 W', '_worldMatrix')
|
||||
vert.add_out('vec3 wposition')
|
||||
vert.write('wposition = vec4(W * spos).xyz;')
|
||||
elif write_wpos:
|
||||
vert.add_uniform('mat4 W', '_worldMatrix')
|
||||
vert.write_attrib('vec3 wposition = vec4(W * spos).xyz;')
|
||||
|
||||
frag_mpos = (frag.contains('mposition') and not frag.contains('vec3 mposition')) or vert.contains('mposition')
|
||||
if frag_mpos:
|
||||
vert.add_out('vec3 mposition')
|
||||
vert.add_uniform('float posUnpack', link='_posUnpack')
|
||||
vert.write_attrib('mposition = spos.xyz * posUnpack;')
|
||||
|
||||
if tese is not None:
|
||||
if frag_mpos:
|
||||
make_tess.interpolate(tese, 'mposition', 3, declare_out=True)
|
||||
elif tese.contains('mposition') and not tese.contains('vec3 mposition'):
|
||||
vert.add_out('vec3 mposition')
|
||||
vert.write_pre = True
|
||||
vert.add_uniform('float posUnpack', link='_posUnpack')
|
||||
vert.write('mposition = spos.xyz * posUnpack;')
|
||||
vert.write_pre = False
|
||||
make_tess.interpolate(tese, 'mposition', 3, declare_out=False)
|
||||
|
||||
frag_bpos = (frag.contains('bposition') and not frag.contains('vec3 bposition')) or vert.contains('bposition')
|
||||
if frag_bpos:
|
||||
vert.add_out('vec3 bposition')
|
||||
vert.add_uniform('vec3 dim', link='_dim')
|
||||
vert.add_uniform('vec3 hdim', link='_halfDim')
|
||||
vert.add_uniform('float posUnpack', link='_posUnpack')
|
||||
vert.write_attrib('bposition = (spos.xyz * posUnpack + hdim) / dim;')
|
||||
vert.write_attrib('if (dim.z == 0) bposition.z = 0;')
|
||||
vert.write_attrib('if (dim.y == 0) bposition.y = 0;')
|
||||
vert.write_attrib('if (dim.x == 0) bposition.x = 0;')
|
||||
|
||||
if tese is not None:
|
||||
if frag_bpos:
|
||||
make_tess.interpolate(tese, 'bposition', 3, declare_out=True)
|
||||
elif tese.contains('bposition') and not tese.contains('vec3 bposition'):
|
||||
vert.add_out('vec3 bposition')
|
||||
vert.add_uniform('vec3 dim', link='_dim')
|
||||
vert.add_uniform('vec3 hdim', link='_halfDim')
|
||||
vert.add_uniform('float posUnpack', link='_posUnpack')
|
||||
vert.write_attrib('bposition = (spos.xyz * posUnpack + hdim) / dim;')
|
||||
make_tess.interpolate(tese, 'bposition', 3, declare_out=False)
|
||||
|
||||
frag_wtan = (frag.contains('wtangent') and not frag.contains('vec3 wtangent')) or vert.contains('wtangent')
|
||||
if frag_wtan:
|
||||
# Indicate we want tang attrib in finalizer to prevent TBN generation
|
||||
con_mesh.add_elem('tex', 'short2norm')
|
||||
con_mesh.add_elem('tang', 'short4norm')
|
||||
vert.add_out('vec3 wtangent')
|
||||
vert.write_pre = True
|
||||
vert.write('wtangent = normalize(N * tang.xyz);')
|
||||
vert.write_pre = False
|
||||
|
||||
if tese is not None:
|
||||
if frag_wtan:
|
||||
make_tess.interpolate(tese, 'wtangent', 3, declare_out=True)
|
||||
elif tese.contains('wtangent') and not tese.contains('vec3 wtangent'):
|
||||
vert.add_out('vec3 wtangent')
|
||||
vert.write_pre = True
|
||||
vert.write('wtangent = normalize(N * tang.xyz);')
|
||||
vert.write_pre = False
|
||||
make_tess.interpolate(tese, 'wtangent', 3, declare_out=False)
|
||||
|
||||
if frag.contains('vVecCam'):
|
||||
vert.add_out('vec3 eyeDirCam')
|
||||
vert.add_uniform('mat4 WV', '_worldViewMatrix')
|
||||
vert.write('eyeDirCam = vec4(WV * spos).xyz; eyeDirCam.z *= -1;')
|
||||
frag.write_attrib('vec3 vVecCam = normalize(eyeDirCam);')
|
||||
|
||||
if frag.contains('nAttr'):
|
||||
vert.add_out('vec3 nAttr')
|
||||
vert.write_attrib('nAttr = vec3(nor.xy, pos.w);')
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
if '_Legacy' in wrd.world_defs:
|
||||
frag.replace('sampler2DShadow', 'sampler2D')
|
||||
frag.replace('samplerCubeShadow', 'samplerCube')
|
24
leenkx/blender/lnx/material/make_inst.py
Normal file
24
leenkx/blender/lnx/material/make_inst.py
Normal file
@ -0,0 +1,24 @@
|
||||
|
||||
def inst_pos(con, vert):
|
||||
if con.is_elem('irot'):
|
||||
# http://www.euclideanspace.com/maths/geometry/rotations/conversions/eulerToMatrix/index.htm
|
||||
vert.write('float srotx = sin(irot.x);')
|
||||
vert.write('float crotx = cos(irot.x);')
|
||||
vert.write('float sroty = sin(irot.y);')
|
||||
vert.write('float croty = cos(irot.y);')
|
||||
vert.write('float srotz = sin(irot.z);')
|
||||
vert.write('float crotz = cos(irot.z);')
|
||||
vert.write('mat3 mirot = mat3(')
|
||||
vert.write(' croty * crotz, srotz, -sroty * crotz,')
|
||||
vert.write(' -croty * srotz * crotx + sroty * srotx, crotz * crotx, sroty * srotz * crotx + croty * srotx,')
|
||||
vert.write(' croty * srotz * srotx + sroty * crotx, -crotz * srotx, -sroty * srotz * srotx + croty * crotx')
|
||||
vert.write(');')
|
||||
vert.write('spos.xyz = mirot * spos.xyz;')
|
||||
if (con.data['name'] == 'mesh' or con.data['name'] == 'translucent' or con.data['name'] == 'refraction') and vert.contains('wnormal'):
|
||||
vert.write('wnormal = normalize(N * mirot * vec3(nor.xy, pos.w));')
|
||||
|
||||
|
||||
if con.is_elem('iscl'):
|
||||
vert.write('spos.xyz *= iscl;')
|
||||
|
||||
vert.write('spos.xyz += ipos;')
|
814
leenkx/blender/lnx/material/make_mesh.py
Normal file
814
leenkx/blender/lnx/material/make_mesh.py
Normal file
@ -0,0 +1,814 @@
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx.assets as assets
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.make_tess as make_tess
|
||||
import lnx.material.make_particle as make_particle
|
||||
import lnx.material.make_cluster as make_cluster
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.material.make_attrib as make_attrib
|
||||
import lnx.material.shader as shader
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
assets = lnx.reload_module(assets)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
cycles = lnx.reload_module(cycles)
|
||||
make_tess = lnx.reload_module(make_tess)
|
||||
make_particle = lnx.reload_module(make_particle)
|
||||
make_cluster = lnx.reload_module(make_cluster)
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
make_attrib = lnx.reload_module(make_attrib)
|
||||
shader = lnx.reload_module(shader)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
is_displacement = False
|
||||
|
||||
# User callbacks
|
||||
write_material_attribs: Optional[Callable[[dict[str, Any], shader.Shader], bool]] = None
|
||||
write_material_attribs_post: Optional[Callable[[dict[str, Any], shader.Shader], None]] = None
|
||||
write_vertex_attribs: Optional[Callable[[shader.Shader], bool]] = None
|
||||
|
||||
|
||||
def make(context_id, rpasses):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
rid = rpdat.rp_renderer
|
||||
|
||||
con = { 'name': context_id, 'depth_write': True, 'compare_mode': 'less', 'cull_mode': 'clockwise' }
|
||||
|
||||
# Blend context
|
||||
mat = mat_state.material
|
||||
blend = mat.lnx_blending
|
||||
particle = mat.lnx_particle_flag
|
||||
dprepass = rid == 'Forward' and rpdat.rp_depthprepass
|
||||
if blend:
|
||||
con['name'] = 'blend'
|
||||
con['blend_source'] = mat.lnx_blending_source
|
||||
con['blend_destination'] = mat.lnx_blending_destination
|
||||
con['blend_operation'] = mat.lnx_blending_operation
|
||||
con['alpha_blend_source'] = mat.lnx_blending_source_alpha
|
||||
con['alpha_blend_destination'] = mat.lnx_blending_destination_alpha
|
||||
con['alpha_blend_operation'] = mat.lnx_blending_operation_alpha
|
||||
con['depth_write'] = False
|
||||
con['compare_mode'] = 'less'
|
||||
elif particle:
|
||||
pass
|
||||
# Depth prepass was performed, exclude mat with depth read that
|
||||
# isn't part of depth prepass
|
||||
elif dprepass and not (rpdat.rp_depth_texture and mat.lnx_depth_read):
|
||||
con['depth_write'] = False
|
||||
con['compare_mode'] = 'equal'
|
||||
|
||||
attachment_format = 'RGBA32' if '_LDR' in wrd.world_defs else 'RGBA64'
|
||||
con['color_attachments'] = [attachment_format, attachment_format]
|
||||
if '_gbuffer2' in wrd.world_defs:
|
||||
con['color_attachments'].append(attachment_format)
|
||||
|
||||
con_mesh = mat_state.data.add_context(con)
|
||||
mat_state.con_mesh = con_mesh
|
||||
|
||||
if rid == 'Forward' or blend:
|
||||
if rpdat.lnx_material_model == 'Mobile':
|
||||
make_forward_mobile(con_mesh)
|
||||
elif rpdat.lnx_material_model == 'Solid':
|
||||
make_forward_solid(con_mesh)
|
||||
else:
|
||||
make_forward(con_mesh)
|
||||
elif rid == 'Deferred':
|
||||
make_deferred(con_mesh, rpasses)
|
||||
elif rid == 'Raytracer':
|
||||
make_raytracer(con_mesh)
|
||||
|
||||
make_finalize.make(con_mesh)
|
||||
|
||||
assets.vs_equal(con_mesh, assets.shader_cons['mesh_vert'])
|
||||
|
||||
return con_mesh
|
||||
|
||||
|
||||
def make_base(con_mesh, parse_opacity):
|
||||
global is_displacement
|
||||
global write_vertex_attribs
|
||||
|
||||
vert = con_mesh.make_vert()
|
||||
frag = con_mesh.make_frag()
|
||||
geom = None
|
||||
tesc = None
|
||||
tese = None
|
||||
|
||||
billboard = mat_state.material.lnx_billboard
|
||||
if billboard == 'spherical':
|
||||
vert.add_uniform('mat3 N', '_normalMatrixSphere')
|
||||
elif billboard == 'cylindrical':
|
||||
vert.add_uniform('mat3 N', '_normalMatrixCylinder')
|
||||
else:
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.write_attrib('vec4 spos = vec4(pos.xyz, 1.0);')
|
||||
|
||||
vattr_written = False
|
||||
rpdat = lnx.utils.get_rp()
|
||||
is_displacement = mat_utils.disp_linked(mat_state.output_node)
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
if is_displacement:
|
||||
if rpdat.lnx_rp_displacement == 'Vertex':
|
||||
frag.ins = vert.outs
|
||||
else: # Tessellation
|
||||
tesc = con_mesh.make_tesc()
|
||||
tese = con_mesh.make_tese()
|
||||
tesc.ins = vert.outs
|
||||
tese.ins = tesc.outs
|
||||
frag.ins = tese.outs
|
||||
make_tess.tesc_levels(tesc, rpdat.lnx_tess_mesh_inner, rpdat.lnx_tess_mesh_outer)
|
||||
make_tess.interpolate(tese, 'wposition', 3, declare_out=True)
|
||||
make_tess.interpolate(tese, 'wnormal', 3, declare_out=True, normalize=True)
|
||||
|
||||
# No displacement
|
||||
else:
|
||||
frag.ins = vert.outs
|
||||
if write_vertex_attribs is not None:
|
||||
vattr_written = write_vertex_attribs(vert)
|
||||
|
||||
vert.add_include('compiled.inc')
|
||||
frag.add_include('compiled.inc')
|
||||
|
||||
attribs_written = False
|
||||
if write_material_attribs is not None:
|
||||
attribs_written = write_material_attribs(con_mesh, frag)
|
||||
if not attribs_written:
|
||||
_write_material_attribs_default(frag, parse_opacity)
|
||||
cycles.parse(mat_state.nodes, con_mesh, vert, frag, geom, tesc, tese, parse_opacity=parse_opacity)
|
||||
if write_material_attribs_post is not None:
|
||||
write_material_attribs_post(con_mesh, frag)
|
||||
|
||||
vert.add_out('vec3 wnormal')
|
||||
make_attrib.write_norpos(con_mesh, vert)
|
||||
frag.write_attrib('vec3 n = normalize(wnormal);')
|
||||
|
||||
if mat_state.material.lnx_two_sided:
|
||||
frag.write('if (!gl_FrontFacing) n *= -1;') # Flip normal when drawing back-face
|
||||
|
||||
if not is_displacement and not vattr_written:
|
||||
make_attrib.write_vertpos(vert)
|
||||
|
||||
make_attrib.write_tex_coords(con_mesh, vert, frag, tese)
|
||||
|
||||
if con_mesh.is_elem('col'):
|
||||
vert.add_out('vec3 vcolor')
|
||||
vert.write_attrib('vcolor = col.rgb;')
|
||||
if tese is not None:
|
||||
tese.write_pre = True
|
||||
make_tess.interpolate(tese, 'vcolor', 3, declare_out=frag.contains('vcolor'))
|
||||
tese.write_pre = False
|
||||
|
||||
if con_mesh.is_elem('tang'):
|
||||
if tese is not None:
|
||||
tese.add_out('mat3 TBN')
|
||||
tese.write_attrib('vec3 wbitangent = normalize(cross(wnormal, wtangent));')
|
||||
tese.write_attrib('TBN = mat3(wtangent, wbitangent, wnormal);')
|
||||
else:
|
||||
vert.add_out('mat3 TBN')
|
||||
vert.write_attrib('vec3 tangent = normalize(N * tang.xyz);')
|
||||
vert.write_attrib('vec3 bitangent = normalize(cross(wnormal, tangent));')
|
||||
vert.write_attrib('TBN = mat3(tangent, bitangent, wnormal);')
|
||||
|
||||
if is_displacement:
|
||||
if rpdat.lnx_rp_displacement == 'Vertex':
|
||||
sh = vert
|
||||
else:
|
||||
sh = tese
|
||||
if(con_mesh.is_elem('ipos')):
|
||||
vert.write('wposition = vec4(W * spos).xyz;')
|
||||
sh.add_uniform('mat4 VP', '_viewProjectionMatrix')
|
||||
sh.write('wposition += wnormal * disp;')
|
||||
sh.write('gl_Position = VP * vec4(wposition, 1.0);')
|
||||
|
||||
|
||||
def make_deferred(con_mesh, rpasses):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
lnx_discard = mat_state.material.lnx_discard
|
||||
parse_opacity = lnx_discard or 'translucent' or 'refraction' in rpasses
|
||||
|
||||
make_base(con_mesh, parse_opacity=parse_opacity)
|
||||
|
||||
frag = con_mesh.frag
|
||||
vert = con_mesh.vert
|
||||
tese = con_mesh.tese
|
||||
|
||||
if parse_opacity:
|
||||
if lnx_discard:
|
||||
opac = mat_state.material.lnx_discard_opacity
|
||||
else:
|
||||
opac = '0.9999' # 1.0 - eps
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
|
||||
frag.add_out(f'vec4 fragColor[GBUF_SIZE]')
|
||||
|
||||
if '_gbuffer2' in wrd.world_defs:
|
||||
if '_Veloc' in wrd.world_defs:
|
||||
if tese is None:
|
||||
vert.add_uniform('mat4 prevWVP', link='_prevWorldViewProjectionMatrix')
|
||||
vert.add_out('vec4 wvpposition')
|
||||
vert.add_out('vec4 prevwvpposition')
|
||||
vert.write('wvpposition = gl_Position;')
|
||||
if is_displacement:
|
||||
vert.add_uniform('mat4 invW', link='_inverseWorldMatrix')
|
||||
vert.write('prevwvpposition = prevWVP * (invW * vec4(wposition, 1.0));')
|
||||
else:
|
||||
vert.write('prevwvpposition = prevWVP * spos;')
|
||||
else:
|
||||
tese.add_out('vec4 wvpposition')
|
||||
tese.add_out('vec4 prevwvpposition')
|
||||
tese.write('wvpposition = gl_Position;')
|
||||
if is_displacement:
|
||||
tese.add_uniform('mat4 invW', link='_inverseWorldMatrix')
|
||||
tese.add_uniform('mat4 prevWVP', '_prevWorldViewProjectionMatrix')
|
||||
tese.write('prevwvpposition = prevWVP * (invW * vec4(wposition, 1.0));')
|
||||
else:
|
||||
vert.add_uniform('mat4 prevW', link='_prevWorldMatrix')
|
||||
vert.add_out('vec3 prevwposition')
|
||||
vert.write('prevwposition = vec4(prevW * spos).xyz;')
|
||||
tese.add_uniform('mat4 prevVP', '_prevViewProjectionMatrix')
|
||||
make_tess.interpolate(tese, 'prevwposition', 3)
|
||||
tese.write('prevwvpposition = prevVP * vec4(prevwposition, 1.0);')
|
||||
|
||||
# Pack gbuffer
|
||||
frag.add_include('std/gbuffer.glsl')
|
||||
|
||||
frag.write('n /= (abs(n.x) + abs(n.y) + abs(n.z));')
|
||||
frag.write('n.xy = n.z >= 0.0 ? n.xy : octahedronWrap(n.xy);')
|
||||
|
||||
is_shadeless = mat_state.emission_type == mat_state.EmissionType.SHADELESS
|
||||
if is_shadeless or '_SSS' in wrd.world_defs or '_Hair' in wrd.world_defs:
|
||||
frag.write('uint matid = 0;')
|
||||
if is_shadeless:
|
||||
frag.write('matid = 1;')
|
||||
frag.write('basecol = emissionCol;')
|
||||
if '_SSS' in wrd.world_defs or '_Hair' in wrd.world_defs:
|
||||
frag.add_uniform('int materialID')
|
||||
frag.write('if (materialID == 2) matid = 2;')
|
||||
else:
|
||||
frag.write('const uint matid = 0;')
|
||||
|
||||
frag.write('fragColor[GBUF_IDX_0] = vec4(n.xy, roughness, packFloatInt16(metallic, matid));')
|
||||
frag.write('fragColor[GBUF_IDX_1] = vec4(basecol, packFloat2(occlusion, specular));')
|
||||
|
||||
if '_gbuffer2' in wrd.world_defs:
|
||||
if '_Veloc' in wrd.world_defs:
|
||||
frag.write('vec2 posa = (wvpposition.xy / wvpposition.w) * 0.5 + 0.5;')
|
||||
frag.write('vec2 posb = (prevwvpposition.xy / prevwvpposition.w) * 0.5 + 0.5;')
|
||||
frag.write('fragColor[GBUF_IDX_2].rg = vec2(posa - posb);')
|
||||
frag.write('fragColor[GBUF_IDX_2].b = 0.0;')
|
||||
|
||||
if mat_state.material.lnx_ignore_irradiance:
|
||||
frag.write('fragColor[GBUF_IDX_2].b = 1.0;')
|
||||
|
||||
# Even if the material doesn't use emission we need to write to the
|
||||
# emission buffer (if used) to prevent undefined behaviour
|
||||
frag.write('#ifdef _EmissionShaded')
|
||||
frag.write('fragColor[GBUF_IDX_EMISSION] = vec4(emissionCol, 0.0);') #Alpha channel is unused at the moment
|
||||
frag.write('#endif')
|
||||
|
||||
if '_SSRefraction' in wrd.world_defs or '_VoxelRefract' in wrd.world_defs:
|
||||
frag.write('fragColor[GBUF_IDX_REFRACTION] = vec4(1.0, 1.0, 0.0, 1.0);')
|
||||
|
||||
return con_mesh
|
||||
|
||||
|
||||
def make_raytracer(con_mesh):
|
||||
con_mesh.data['vertex_elements'] = [{'name': 'pos', 'data': 'float3'}, {'name': 'nor', 'data': 'float3'}, {'name': 'tex', 'data': 'float2'}]
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
vert = con_mesh.make_vert()
|
||||
frag = con_mesh.make_frag()
|
||||
vert.add_out('vec3 n')
|
||||
vert.add_out('vec2 uv')
|
||||
vert.write('n = nor;')
|
||||
vert.write('uv = tex;')
|
||||
vert.write('gl_Position = vec4(pos.xyz, 1.0);')
|
||||
|
||||
|
||||
def make_forward_mobile(con_mesh):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
vert = con_mesh.make_vert()
|
||||
frag = con_mesh.make_frag()
|
||||
geom = None
|
||||
tesc = None
|
||||
tese = None
|
||||
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.write_attrib('vec4 spos = vec4(pos.xyz, 1.0);')
|
||||
frag.ins = vert.outs
|
||||
|
||||
vert.add_include('compiled.inc')
|
||||
frag.add_include('compiled.inc')
|
||||
|
||||
lnx_discard = mat_state.material.lnx_discard
|
||||
blend = mat_state.material.lnx_blending
|
||||
is_transluc = mat_utils.is_transluc(mat_state.material)
|
||||
parse_opacity = (blend and is_transluc) or lnx_discard
|
||||
|
||||
_write_material_attribs_default(frag, parse_opacity)
|
||||
cycles.parse(mat_state.nodes, con_mesh, vert, frag, geom, tesc, tese, parse_opacity=parse_opacity, parse_displacement=False)
|
||||
|
||||
if lnx_discard:
|
||||
opac = mat_state.material.lnx_discard_opacity
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
|
||||
make_attrib.write_tex_coords(con_mesh, vert, frag, tese)
|
||||
|
||||
if con_mesh.is_elem('col'):
|
||||
vert.add_out('vec3 vcolor')
|
||||
vert.write('vcolor = col.rgb;')
|
||||
|
||||
if con_mesh.is_elem('tang'):
|
||||
vert.add_out('mat3 TBN')
|
||||
make_attrib.write_norpos(con_mesh, vert, declare=True)
|
||||
vert.write('vec3 tangent = normalize(N * tang.xyz);')
|
||||
vert.write('vec3 bitangent = normalize(cross(wnormal, tangent));')
|
||||
vert.write('TBN = mat3(tangent, bitangent, wnormal);')
|
||||
else:
|
||||
vert.add_out('vec3 wnormal')
|
||||
make_attrib.write_norpos(con_mesh, vert)
|
||||
frag.write_attrib('vec3 n = normalize(wnormal);')
|
||||
|
||||
if mat_state.material.lnx_two_sided:
|
||||
frag.write('if (!gl_FrontFacing) n *= -1;') # Flip normal when drawing back-face
|
||||
|
||||
make_attrib.write_vertpos(vert)
|
||||
|
||||
frag.add_include('std/math.glsl')
|
||||
frag.add_include('std/brdf.glsl')
|
||||
|
||||
frag.add_out('vec4 fragColor')
|
||||
blend = mat_state.material.lnx_blending
|
||||
if blend:
|
||||
if parse_opacity:
|
||||
frag.write('fragColor = vec4(basecol, opacity);')
|
||||
else:
|
||||
frag.write('fragColor = vec4(basecol, 1.0);')
|
||||
return
|
||||
|
||||
is_shadows = '_ShadowMap' in wrd.world_defs
|
||||
is_shadows_atlas = '_ShadowMapAtlas' in wrd.world_defs
|
||||
shadowmap_sun = 'shadowMap'
|
||||
if is_shadows_atlas:
|
||||
is_single_atlas = '_SingleAtlas' in wrd.world_defs
|
||||
shadowmap_sun = 'shadowMapAtlasSun' if not is_single_atlas else 'shadowMapAtlas'
|
||||
frag.add_uniform('vec2 smSizeUniform', '_shadowMapSize', included=True)
|
||||
frag.write('vec3 direct = vec3(0.0);')
|
||||
|
||||
if '_Sun' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 sunCol', '_sunColor')
|
||||
frag.add_uniform('vec3 sunDir', '_sunDirection')
|
||||
frag.write('vec3 svisibility = vec3(1.0);')
|
||||
frag.write('float sdotNL = max(dot(n, sunDir), 0.0);')
|
||||
if is_shadows:
|
||||
vert.add_out('vec4 lightPosition')
|
||||
vert.add_uniform('mat4 LWVP', '_biasLightWorldViewProjectionMatrixSun')
|
||||
vert.write('lightPosition = LWVP * spos;')
|
||||
frag.add_uniform('bool receiveShadow')
|
||||
frag.add_uniform(f'sampler2DShadow {shadowmap_sun}')
|
||||
frag.add_uniform('float shadowsBias', '_sunShadowsBias')
|
||||
|
||||
frag.write('if (receiveShadow) {')
|
||||
if '_CSM' in wrd.world_defs:
|
||||
frag.add_include('std/shadows.glsl')
|
||||
frag.add_uniform('vec4 casData[shadowmapCascades * 4 + 4]', '_cascadeData', included=True)
|
||||
frag.add_uniform('vec3 eye', '_cameraPosition')
|
||||
frag.write(f'svisibility = shadowTestCascade({shadowmap_sun}, eye, wposition + n * shadowsBias * 10, shadowsBias, opacity != 1.0);')
|
||||
else:
|
||||
frag.write('if (lightPosition.w > 0.0) {')
|
||||
frag.write(' vec3 lPos = lightPosition.xyz / lightPosition.w;')
|
||||
if '_Legacy' in wrd.world_defs:
|
||||
frag.write(f' svisibility = float(texture({shadowmap_sun}, vec2(lPos.xy)).r > lPos.z - shadowsBias, opacity != 1.0);')
|
||||
else:
|
||||
frag.write(f' svisibility = texture({shadowmap_sun}, vec3(lPos.xy, lPos.z - shadowsBias), opacity != 1.0).r;')
|
||||
frag.write('}')
|
||||
frag.write('}') # receiveShadow
|
||||
frag.write('direct += basecol * sdotNL * sunCol * svisibility;')
|
||||
|
||||
if '_SinglePoint' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 pointPos', '_pointPosition')
|
||||
frag.add_uniform('vec3 pointCol', '_pointColor')
|
||||
if '_Spot' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 spotDir', link='_spotDirection')
|
||||
frag.add_uniform('vec3 spotRight', link='_spotRight')
|
||||
frag.add_uniform('vec4 spotData', link='_spotData')
|
||||
frag.write('float visibility = 1.0;')
|
||||
frag.write('vec3 ld = pointPos - wposition;')
|
||||
frag.write('vec3 l = normalize(ld);')
|
||||
frag.write('float dotNL = max(dot(n, l), 0.0);')
|
||||
if is_shadows:
|
||||
frag.add_uniform('bool receiveShadow')
|
||||
frag.add_uniform('float pointBias', link='_pointShadowsBias')
|
||||
frag.add_include('std/shadows.glsl')
|
||||
|
||||
frag.write('if (receiveShadow) {')
|
||||
if '_Spot' in wrd.world_defs:
|
||||
vert.add_out('vec4 spotPosition')
|
||||
vert.add_uniform('mat4 LWVPSpotArray[1]', link='_biasLightWorldViewProjectionMatrixSpotArray')
|
||||
vert.write('spotPosition = LWVPSpotArray[0] * spos;')
|
||||
frag.add_uniform('sampler2DShadow shadowMapSpot[1]')
|
||||
frag.write('if (spotPosition.w > 0.0) {')
|
||||
frag.write(' vec3 lPos = spotPosition.xyz / spotPosition.w;')
|
||||
if '_Legacy' in wrd.world_defs:
|
||||
frag.write(' visibility = float(texture(shadowMapSpot[0], vec2(lPos.xy)).r > lPos.z - pointBias);')
|
||||
else:
|
||||
frag.write(' visibility = texture(shadowMapSpot[0], vec3(lPos.xy, lPos.z - pointBias)).r;')
|
||||
frag.write('}')
|
||||
else:
|
||||
frag.add_uniform('vec2 lightProj', link='_lightPlaneProj')
|
||||
frag.add_uniform('samplerCubeShadow shadowMapPoint[1]')
|
||||
frag.write('const float s = shadowmapCubePcfSize;') # TODO: incorrect...
|
||||
frag.write('float compare = lpToDepth(ld, lightProj) - pointBias * 1.5;')
|
||||
frag.write('#ifdef _InvY')
|
||||
frag.write('l.y = -l.y;')
|
||||
frag.write('#endif')
|
||||
if '_Legacy' in wrd.world_defs:
|
||||
frag.write('visibility = float(texture(shadowMapPoint[0], vec3(-l + n * pointBias * 20)).r > compare);')
|
||||
else:
|
||||
frag.write('visibility = texture(shadowMapPoint[0], vec4(-l + n * pointBias * 20, compare)).r;')
|
||||
frag.write('}') # receiveShadow
|
||||
|
||||
frag.write('direct += basecol * dotNL * pointCol * attenuate(distance(wposition, pointPos)) * visibility;')
|
||||
|
||||
if '_Clusters' in wrd.world_defs:
|
||||
frag.add_include('std/light_mobile.glsl')
|
||||
frag.write('vec3 albedo = basecol;')
|
||||
frag.write('vec3 f0 = surfaceF0(basecol, metallic);')
|
||||
make_cluster.write(vert, frag)
|
||||
|
||||
if '_Irr' in wrd.world_defs:
|
||||
frag.add_include('std/shirr.glsl')
|
||||
frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance')
|
||||
env_str = 'shIrradiance(n, shirr)'
|
||||
else:
|
||||
env_str = '0.5'
|
||||
|
||||
frag.add_uniform('float envmapStrength', link='_envmapStrength')
|
||||
frag.write('fragColor = vec4(direct + basecol * {0} * envmapStrength, 1.0);'.format(env_str))
|
||||
|
||||
if '_LDR' in wrd.world_defs:
|
||||
frag.write('fragColor.rgb = pow(fragColor.rgb, vec3(1.0 / 2.2));')
|
||||
|
||||
|
||||
def make_forward_solid(con_mesh):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
vert = con_mesh.make_vert()
|
||||
frag = con_mesh.make_frag()
|
||||
geom = None
|
||||
tesc = None
|
||||
tese = None
|
||||
|
||||
for e in con_mesh.data['vertex_elements']:
|
||||
if e['name'] == 'nor':
|
||||
con_mesh.data['vertex_elements'].remove(e)
|
||||
break
|
||||
|
||||
vert.write_attrib('vec4 spos = vec4(pos.xyz, 1.0);')
|
||||
frag.ins = vert.outs
|
||||
|
||||
vert.add_include('compiled.inc')
|
||||
frag.add_include('compiled.inc')
|
||||
|
||||
lnx_discard = mat_state.material.lnx_discard
|
||||
blend = mat_state.material.lnx_blending
|
||||
is_transluc = mat_utils.is_transluc(mat_state.material)
|
||||
parse_opacity = (blend and is_transluc) or lnx_discard
|
||||
|
||||
_write_material_attribs_default(frag, parse_opacity)
|
||||
cycles.parse(mat_state.nodes, con_mesh, vert, frag, geom, tesc, tese, parse_opacity=parse_opacity, parse_displacement=False, basecol_only=True)
|
||||
|
||||
if lnx_discard:
|
||||
opac = mat_state.material.lnx_discard_opacity
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
|
||||
if con_mesh.is_elem('tex'):
|
||||
vert.add_out('vec2 texCoord')
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
if mat_state.material.lnx_tilesheet_flag:
|
||||
vert.add_uniform('vec2 tilesheetOffset', '_tilesheetOffset')
|
||||
vert.write('texCoord = tex * texUnpack + tilesheetOffset;')
|
||||
else:
|
||||
vert.write('texCoord = tex * texUnpack;')
|
||||
|
||||
if con_mesh.is_elem('col'):
|
||||
vert.add_out('vec3 vcolor')
|
||||
vert.write('vcolor = col.rgb;')
|
||||
|
||||
make_attrib.write_norpos(con_mesh, vert, write_nor=False)
|
||||
make_attrib.write_vertpos(vert)
|
||||
|
||||
frag.add_out('vec4 fragColor')
|
||||
if blend and parse_opacity:
|
||||
frag.write('fragColor = vec4(basecol, opacity);')
|
||||
else:
|
||||
frag.write('fragColor = vec4(basecol, 1.0);')
|
||||
|
||||
if '_LDR' in wrd.world_defs:
|
||||
frag.write('fragColor.rgb = pow(fragColor.rgb, vec3(1.0 / 2.2));')
|
||||
|
||||
|
||||
def make_forward(con_mesh):
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
blend = mat_state.material.lnx_blending
|
||||
parse_opacity = blend or mat_utils.is_transluc(mat_state.material)
|
||||
|
||||
make_forward_base(con_mesh, parse_opacity=parse_opacity)
|
||||
frag = con_mesh.frag
|
||||
|
||||
if '_LTC' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 lightArea0', '_lightArea0', included=True)
|
||||
frag.add_uniform('vec3 lightArea1', '_lightArea1', included=True)
|
||||
frag.add_uniform('vec3 lightArea2', '_lightArea2', included=True)
|
||||
frag.add_uniform('vec3 lightArea3', '_lightArea3', included=True)
|
||||
frag.add_uniform('sampler2D sltcMat', '_ltcMat', included=True)
|
||||
frag.add_uniform('sampler2D sltcMag', '_ltcMag', included=True)
|
||||
if '_ShadowMap' in wrd.world_defs:
|
||||
if '_SinglePoint' in wrd.world_defs:
|
||||
frag.add_uniform('mat4 LWVPSpot[0]', link='_biasLightViewProjectionMatrixSpot0', included=True)
|
||||
frag.add_uniform('sampler2DShadow shadowMapSpot[1]', included=True)
|
||||
if '_Clusters' in wrd.world_defs:
|
||||
frag.add_uniform('mat4 LWVPSpotArray[4]', link='_biasLightWorldViewProjectionMatrixSpotArray', included=True)
|
||||
frag.add_uniform('sampler2DShadow shadowMapSpot[4]', included=True)
|
||||
|
||||
if not blend:
|
||||
mrt = 0 # mrt: multiple render targets
|
||||
if rpdat.rp_ssr:
|
||||
mrt = 1
|
||||
if rpdat.rp_ss_refraction or rpdat.lnx_voxelgi_refract:
|
||||
mrt = 2
|
||||
if mrt != 0:
|
||||
# Store light gbuffer for post-processing
|
||||
frag.add_out(f'vec4 fragColor[{mrt}+1]')
|
||||
frag.add_include('std/gbuffer.glsl')
|
||||
frag.write('n /= (abs(n.x) + abs(n.y) + abs(n.z));')
|
||||
frag.write('n.xy = n.z >= 0.0 ? n.xy : octahedronWrap(n.xy);')
|
||||
frag.write('fragColor[0] = vec4(direct + indirect, packFloat2(occlusion, specular));')
|
||||
frag.write('fragColor[1] = vec4(n.xy, roughness, metallic);')
|
||||
if rpdat.rp_ss_refraction or rpdat.lnx_voxelgi_refract:
|
||||
frag.write(f'fragColor[2] = vec4(1.0, 1.0, 0.0, 1.0);')
|
||||
|
||||
else:
|
||||
frag.add_out('vec4 fragColor[1]')
|
||||
frag.write('fragColor[0] = vec4(direct + indirect, 1.0);')
|
||||
|
||||
if '_LDR' in wrd.world_defs:
|
||||
frag.add_include('std/tonemap.glsl')
|
||||
frag.write('fragColor[0].rgb = tonemapFilmic(fragColor[0].rgb);')
|
||||
|
||||
# Particle opacity
|
||||
if mat_state.material.lnx_particle_flag and lnx.utils.get_rp().lnx_particles == 'On' and mat_state.material.lnx_particle_fade:
|
||||
frag.write('fragColor[0].rgb *= p_fade;')
|
||||
|
||||
|
||||
def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False):
|
||||
global is_displacement
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
|
||||
lnx_discard = mat_state.material.lnx_discard
|
||||
make_base(con_mesh, parse_opacity=(parse_opacity or lnx_discard))
|
||||
|
||||
blend = mat_state.material.lnx_blending
|
||||
|
||||
vert = con_mesh.vert
|
||||
frag = con_mesh.frag
|
||||
tese = con_mesh.tese
|
||||
|
||||
if parse_opacity or lnx_discard:
|
||||
if lnx_discard or blend:
|
||||
opac = mat_state.material.lnx_discard_opacity
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
elif transluc_pass:
|
||||
frag.write('if (opacity == 1.0) discard;')
|
||||
else:
|
||||
opac = '0.9999' # 1.0 - eps
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
|
||||
if blend:
|
||||
frag.add_out('vec4 fragColor[1]')
|
||||
if parse_opacity:
|
||||
frag.write('fragColor[0] = vec4(basecol, opacity);')
|
||||
else:
|
||||
# frag.write('fragColor[0] = vec4(basecol * lightCol * visibility, 1.0);')
|
||||
frag.write('fragColor[0] = vec4(basecol, 1.0);')
|
||||
# TODO: Fade out fragments near depth buffer here
|
||||
return
|
||||
|
||||
frag.write_attrib('vec3 vVec = normalize(eyeDir);')
|
||||
frag.write_attrib('float dotNV = max(dot(n, vVec), 0.0);')
|
||||
|
||||
sh = tese if tese is not None else vert
|
||||
sh.add_out('vec3 eyeDir')
|
||||
sh.add_uniform('vec3 eye', '_cameraPosition')
|
||||
sh.write('eyeDir = eye - wposition;')
|
||||
|
||||
frag.add_include('std/light.glsl')
|
||||
is_shadows = '_ShadowMap' in wrd.world_defs
|
||||
is_shadows_atlas = '_ShadowMapAtlas' in wrd.world_defs
|
||||
is_single_atlas = is_shadows_atlas and '_SingleAtlas' in wrd.world_defs
|
||||
shadowmap_sun = 'shadowMap'
|
||||
shadowmap_sun_tr = 'shadowMapTransparent'
|
||||
if is_shadows_atlas:
|
||||
shadowmap_sun = 'shadowMapAtlasSun' if not is_single_atlas else 'shadowMapAtlas'
|
||||
shadowmap_sun_tr = 'shadowMapAtlasSunTransparent' if not is_single_atlas else 'shadowMapAtlasTransparent'
|
||||
frag.add_uniform('vec2 smSizeUniform', '_shadowMapSize', included=True)
|
||||
|
||||
frag.write('vec3 albedo = surfaceAlbedo(basecol, metallic);')
|
||||
frag.write('vec3 f0 = surfaceF0(basecol, metallic);')
|
||||
|
||||
if '_Brdf' in wrd.world_defs:
|
||||
frag.add_uniform('sampler2D senvmapBrdf', link='$brdf.png')
|
||||
frag.write('vec2 envBRDF = texelFetch(senvmapBrdf, ivec2(vec2(dotNV, 1.0 - roughness) * 256.0), 0).xy;')
|
||||
|
||||
if '_Irr' in wrd.world_defs:
|
||||
frag.add_include('std/shirr.glsl')
|
||||
frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance')
|
||||
frag.write('vec3 envl = shIrradiance(n, shirr);')
|
||||
if '_EnvTex' in wrd.world_defs:
|
||||
frag.write('envl /= PI;')
|
||||
else:
|
||||
frag.write('vec3 envl = vec3(0.0);')
|
||||
|
||||
if '_Rad' in wrd.world_defs:
|
||||
frag.add_uniform('sampler2D senvmapRadiance', link='_envmapRadiance')
|
||||
frag.add_uniform('int envmapNumMipmaps', link='_envmapNumMipmaps')
|
||||
frag.write('vec3 reflectionWorld = reflect(-vVec, n);')
|
||||
frag.write('float lod = getMipFromRoughness(roughness, envmapNumMipmaps);')
|
||||
frag.write('vec3 prefilteredColor = textureLod(senvmapRadiance, envMapEquirect(reflectionWorld), lod).rgb;')
|
||||
|
||||
if '_EnvLDR' in wrd.world_defs:
|
||||
frag.write('envl = pow(envl, vec3(2.2));')
|
||||
if '_Rad' in wrd.world_defs:
|
||||
frag.write('prefilteredColor = pow(prefilteredColor, vec3(2.2));')
|
||||
|
||||
frag.write('envl *= albedo;')
|
||||
|
||||
if '_Brdf' in wrd.world_defs:
|
||||
frag.write('envl.rgb *= 1.0 - (f0 * envBRDF.x + envBRDF.y);')
|
||||
if '_Rad' in wrd.world_defs:
|
||||
frag.write('envl += prefilteredColor * (f0 * envBRDF.x + envBRDF.y);')
|
||||
elif '_EnvCol' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 backgroundCol', link='_backgroundCol')
|
||||
frag.write('envl += backgroundCol * (f0 * envBRDF.x + envBRDF.y);')
|
||||
|
||||
frag.add_uniform('float envmapStrength', link='_envmapStrength')
|
||||
frag.write('envl *= envmapStrength * occlusion;')
|
||||
|
||||
if '_VoxelAOvar' in wrd.world_defs or '_VoxelGI' in wrd.world_defs:
|
||||
if parse_opacity or '_VoxelShadow' in wrd.world_defs:
|
||||
frag.add_include('std/conetrace.glsl')
|
||||
frag.add_uniform('sampler3D voxels')
|
||||
frag.add_uniform('sampler3D voxelsSDF')
|
||||
frag.add_uniform('vec3 eye', "_cameraPosition")
|
||||
frag.add_uniform('float clipmaps[10 * voxelgiClipmapCount]', '_clipmaps')
|
||||
vert.add_out('vec4 wvpposition')
|
||||
vert.write('wvpposition = gl_Position;')
|
||||
frag.write('vec2 texCoord = (wvpposition.xy / wvpposition.w) * 0.5 + 0.5;')
|
||||
|
||||
if '_VoxelAOvar' in wrd.world_defs and not parse_opacity:
|
||||
frag.add_uniform("sampler2D voxels_ao");
|
||||
frag.write('envl *= textureLod(voxels_ao, texCoord, 0.0).rrr;')
|
||||
|
||||
frag.write('vec3 indirect = envl;')
|
||||
|
||||
if '_VoxelGI' in wrd.world_defs:
|
||||
if parse_opacity:
|
||||
frag.write('indirect = traceDiffuse(wposition, n, voxels, clipmaps).rgb * albedo * voxelgiDiff;')
|
||||
frag.write('if (roughness < 1.0 && specular > 0.0)')
|
||||
frag.write(' indirect += traceSpecular(wposition, n, voxels, voxelsSDF, normalize(eye - wposition), roughness, clipmaps, gl_FragCoord.xy).rgb * specular * voxelgiRefl;')
|
||||
else:
|
||||
frag.add_uniform("sampler2D voxels_diffuse")
|
||||
frag.add_uniform("sampler2D voxels_specular")
|
||||
frag.write("indirect = textureLod(voxels_diffuse, texCoord, 0.0).rgb * albedo * voxelgiDiff;")
|
||||
frag.write("if (roughness < 1.0 && specular > 0.0)")
|
||||
frag.write(" indirect += textureLod(voxels_specular, texCoord, 0.0).rgb * specular * voxelgiRefl;")
|
||||
|
||||
frag.write('vec3 direct = vec3(0.0);')
|
||||
|
||||
if '_Sun' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 sunCol', '_sunColor')
|
||||
frag.add_uniform('vec3 sunDir', '_sunDirection')
|
||||
frag.write('vec3 svisibility = vec3(1.0);')
|
||||
frag.write('vec3 sh = normalize(vVec + sunDir);')
|
||||
frag.write('float sdotNL = dot(n, sunDir);')
|
||||
frag.write('float sdotNH = dot(n, sh);')
|
||||
frag.write('float sdotVH = dot(vVec, sh);')
|
||||
if is_shadows:
|
||||
frag.add_uniform('bool receiveShadow')
|
||||
frag.add_uniform(f'sampler2DShadow {shadowmap_sun}', top=True)
|
||||
frag.add_uniform(f'sampler2D {shadowmap_sun_tr}', top=True)
|
||||
frag.add_uniform('float shadowsBias', '_sunShadowsBias')
|
||||
frag.write('if (receiveShadow) {')
|
||||
if '_CSM' in wrd.world_defs:
|
||||
frag.add_include('std/shadows.glsl')
|
||||
frag.add_uniform('vec4 casData[shadowmapCascades * 4 + 4]', '_cascadeData', included=True)
|
||||
frag.add_uniform('vec3 eye', '_cameraPosition')
|
||||
if parse_opacity:
|
||||
frag.write(f'svisibility = shadowTestCascade({shadowmap_sun}, {shadowmap_sun_tr}, eye, wposition + n * shadowsBias * 10, shadowsBias, true);')
|
||||
else:
|
||||
frag.write(f'svisibility = shadowTestCascade({shadowmap_sun}, {shadowmap_sun_tr}, eye, wposition + n * shadowsBias * 10, shadowsBias, false);')
|
||||
|
||||
else:
|
||||
if tese is not None:
|
||||
tese.add_out('vec4 lightPosition')
|
||||
tese.add_uniform('mat4 LVP', '_biasLightViewProjectionMatrix')
|
||||
tese.write('lightPosition = LVP * vec4(wposition, 1.0);')
|
||||
else:
|
||||
if is_displacement:
|
||||
vert.add_out('vec4 lightPosition')
|
||||
vert.add_uniform('mat4 LVP', '_biasLightViewProjectionMatrix')
|
||||
vert.write('lightPosition = LVP * vec4(wposition, 1.0);')
|
||||
else:
|
||||
frag.add_uniform('mat4 LWVP', '_biasLightWorldViewProjectionMatrixSun')
|
||||
frag.write('vec4 lightPosition = LWVP * vec4(wposition + n * shadowsBias * 100, 1.0);')
|
||||
frag.write('vec3 lPos = lightPosition.xyz / lightPosition.w;')
|
||||
frag.write('const vec2 smSize = shadowmapSize;')
|
||||
if parse_opacity:
|
||||
frag.write(f'svisibility = PCF({shadowmap_sun}, {shadowmap_sun_tr}, lPos.xy, lPos.z - shadowsBias, smSize, true);')
|
||||
else:
|
||||
frag.write(f'svisibility = PCF({shadowmap_sun}, {shadowmap_sun_tr}, lPos.xy, lPos.z - shadowsBias, smSize, false);')
|
||||
if '_VoxelShadow' in wrd.world_defs:
|
||||
frag.write('svisibility *= (1.0 - traceShadow(wposition, n, voxels, voxelsSDF, sunDir, clipmaps, gl_FragCoord.xy).r) * voxelgiShad;')
|
||||
frag.write('}') # receiveShadow
|
||||
frag.write('direct += (lambertDiffuseBRDF(albedo, sdotNL) + specularBRDF(f0, roughness, sdotNL, sdotNH, dotNV, sdotVH) * specular) * sunCol * svisibility;')
|
||||
# sun
|
||||
|
||||
if '_SinglePoint' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 pointPos', link='_pointPosition')
|
||||
frag.add_uniform('vec3 pointCol', link='_pointColor')
|
||||
if '_Spot' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 spotDir', link='_spotDirection')
|
||||
frag.add_uniform('vec3 spotRight', link='_spotRight')
|
||||
frag.add_uniform('vec4 spotData', link='_spotData')
|
||||
if is_shadows:
|
||||
frag.add_uniform('bool receiveShadow')
|
||||
frag.add_uniform('float pointBias', link='_pointShadowsBias')
|
||||
if '_Spot' in wrd.world_defs:
|
||||
# Skip world matrix, already in world-space
|
||||
frag.add_uniform('mat4 LWVPSpot[1]', link='_biasLightViewProjectionMatrixSpotArray', included=True)
|
||||
frag.add_uniform('sampler2DShadow shadowMapSpot[1]', included=True)
|
||||
frag.add_uniform('sampler2D shadowMapSpotTransparent[1]', included=True)
|
||||
else:
|
||||
frag.add_uniform('vec2 lightProj', link='_lightPlaneProj', included=True)
|
||||
frag.add_uniform('samplerCubeShadow shadowMapPoint[1]', included=True)
|
||||
frag.add_uniform('samplerCube shadowMapPointTransparent[1]', included=True)
|
||||
frag.write('direct += sampleLight(')
|
||||
frag.write(' wposition, n, vVec, dotNV, pointPos, pointCol, albedo, roughness, specular, f0')
|
||||
if is_shadows:
|
||||
if parse_opacity:
|
||||
frag.write(', 0, pointBias, receiveShadow, opacity != 1.0')
|
||||
else:
|
||||
frag.write(', 0, pointBias, receiveShadow, false')
|
||||
if '_Spot' in wrd.world_defs:
|
||||
frag.write(', true, spotData.x, spotData.y, spotDir, spotData.zw, spotRight')
|
||||
if '_VoxelShadow' in wrd.world_defs:
|
||||
frag.write(', voxels, voxelsSDF, clipmaps')
|
||||
if '_MicroShadowing' in wrd.world_defs:
|
||||
frag.write(', occlusion')
|
||||
if '_SSRS' in wrd.world_defs:
|
||||
frag.add_uniform('mat4 invVP', '_inverseViewProjectionMatrix')
|
||||
frag.add_uniform('vec3 eye', '_cameraPosition')
|
||||
frag.write(', gl_FragCoord.z, inVP, eye')
|
||||
frag.write(');')
|
||||
|
||||
if '_Clusters' in wrd.world_defs:
|
||||
make_cluster.write(vert, frag)
|
||||
|
||||
if mat_state.emission_type != mat_state.EmissionType.NO_EMISSION:
|
||||
if mat_state.emission_type == mat_state.EmissionType.SHADELESS:
|
||||
frag.write('direct = vec3(0.0);')
|
||||
frag.write('indirect += emissionCol;')
|
||||
|
||||
if '_VoxelRefract' in wrd.world_defs and parse_opacity:
|
||||
frag.write('if (opacity < 1.0) {')
|
||||
frag.write('vec3 refraction = traceRefraction(wposition, n, voxels, voxelsSDF, normalize(eye - wposition), ior, roughness, clipmaps, gl_FragCoord.xy).rgb;')
|
||||
frag.write(' indirect = mix(refraction, indirect, opacity) * voxelgiRefr;')
|
||||
frag.write(' direct = mix(refraction, direct, opacity) * voxelgiRefr;')
|
||||
frag.write(' vec3 refraction = traceRefraction(wposition, n, voxels, voxelsSDF, vVec, ior, roughness, clipmaps, texCoord).rgb * voxelgiRefr;')
|
||||
frag.write(' indirect = mix(refraction, indirect, opacity);')
|
||||
frag.write(' direct = mix(refraction, direct, opacity);')
|
||||
frag.write('}')
|
||||
|
||||
def _write_material_attribs_default(frag: shader.Shader, parse_opacity: bool):
|
||||
frag.write('vec3 basecol;')
|
||||
frag.write('float roughness;')
|
||||
frag.write('float metallic;')
|
||||
frag.write('float occlusion;')
|
||||
frag.write('float specular;')
|
||||
# We may not use emission, but the attribute will then be removed
|
||||
# by the shader compiler
|
||||
frag.write('vec3 emissionCol;')
|
||||
if parse_opacity:
|
||||
frag.write('float opacity;')
|
||||
frag.write('float ior;')
|
28
leenkx/blender/lnx/material/make_morph_target.py
Normal file
28
leenkx/blender/lnx/material/make_morph_target.py
Normal file
@ -0,0 +1,28 @@
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
def morph_pos(vert):
|
||||
rpdat = lnx.utils.get_rp()
|
||||
vert.add_include('compiled.inc')
|
||||
vert.add_include('std/morph_target.glsl')
|
||||
vert.add_uniform('sampler2D morphDataPos', link='_morphDataPos', included=True)
|
||||
vert.add_uniform('sampler2D morphDataNor', link='_morphDataNor', included=True)
|
||||
vert.add_uniform('vec4 morphWeights[8]', link='_morphWeights', included=True)
|
||||
vert.add_uniform('vec2 morphScaleOffset', link='_morphScaleOffset', included=True)
|
||||
vert.add_uniform('vec2 morphDataDim', link='_morphDataDim', included=True)
|
||||
vert.add_uniform('float texUnpack', link='_texUnpack')
|
||||
vert.add_uniform('float posUnpack', link='_posUnpack')
|
||||
vert.write_attrib('vec2 texCoordMorph = morph * texUnpack;')
|
||||
vert.write_attrib('spos.xyz *= posUnpack;')
|
||||
vert.write_attrib('getMorphedVertex(texCoordMorph, spos.xyz);')
|
||||
vert.write_attrib('spos.xyz /= posUnpack;')
|
||||
|
||||
def morph_nor(vert, is_bone, prep):
|
||||
vert.write_attrib('vec3 morphNor = vec3(0, 0, 0);')
|
||||
vert.write_attrib('getMorphedNormal(texCoordMorph, vec3(nor.xy, pos.w), morphNor);')
|
||||
if not is_bone:
|
||||
vert.write_attrib(prep + 'wnormal = normalize(N * morphNor);')
|
51
leenkx/blender/lnx/material/make_overlay.py
Normal file
51
leenkx/blender/lnx/material/make_overlay.py
Normal file
@ -0,0 +1,51 @@
|
||||
import lnx
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.material.make_mesh as make_mesh
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
make_mesh = lnx.reload_module(make_mesh)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(context_id):
|
||||
con = { 'name': context_id, 'depth_write': True, 'compare_mode': 'less', 'cull_mode': 'clockwise' }
|
||||
mat = mat_state.material
|
||||
blend = mat.lnx_blending
|
||||
if blend:
|
||||
con['blend_source'] = mat.lnx_blending_source
|
||||
con['blend_destination'] = mat.lnx_blending_destination
|
||||
con['blend_operation'] = mat.lnx_blending_operation
|
||||
con['alpha_blend_source'] = mat.lnx_blending_source_alpha
|
||||
con['alpha_blend_destination'] = mat.lnx_blending_destination_alpha
|
||||
con['alpha_blend_operation'] = mat.lnx_blending_operation_alpha
|
||||
|
||||
con_overlay = mat_state.data.add_context(con)
|
||||
|
||||
lnx_discard = mat.lnx_discard
|
||||
is_transluc = mat_utils.is_transluc(mat)
|
||||
parse_opacity = (blend and is_transluc) or lnx_discard
|
||||
make_mesh.make_base(con_overlay, parse_opacity=parse_opacity)
|
||||
|
||||
frag = con_overlay.frag
|
||||
|
||||
if lnx_discard:
|
||||
opac = mat.lnx_discard_opacity
|
||||
frag.write('if (opacity < {0}) discard;'.format(opac))
|
||||
|
||||
frag.add_out('vec4 fragColor')
|
||||
if blend and parse_opacity:
|
||||
frag.write('fragColor = vec4(basecol + emissionCol, opacity);')
|
||||
else:
|
||||
frag.write('fragColor = vec4(basecol + emissionCol, 1.0);')
|
||||
|
||||
frag.write('fragColor.rgb = pow(fragColor.rgb, vec3(1.0 / 2.2));')
|
||||
|
||||
make_finalize.make(con_overlay)
|
||||
|
||||
return con_overlay
|
99
leenkx/blender/lnx/material/make_particle.py
Normal file
99
leenkx/blender/lnx/material/make_particle.py
Normal file
@ -0,0 +1,99 @@
|
||||
import lnx.utils
|
||||
import lnx.material.mat_state as mat_state
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def write(vert, particle_info=None, shadowmap=False):
|
||||
|
||||
# Outs
|
||||
out_index = True if particle_info != None and particle_info['index'] else False
|
||||
out_age = True if particle_info != None and particle_info['age'] else False
|
||||
out_lifetime = True if particle_info != None and particle_info['lifetime'] else False
|
||||
out_location = True if particle_info != None and particle_info['location'] else False
|
||||
out_size = True if particle_info != None and particle_info['size'] else False
|
||||
out_velocity = True if particle_info != None and particle_info['velocity'] else False
|
||||
out_angular_velocity = True if particle_info != None and particle_info['angular_velocity'] else False
|
||||
|
||||
vert.add_uniform('mat4 pd', '_particleData')
|
||||
|
||||
str_tex_hash = "float fhash(float n) { return fract(sin(n) * 43758.5453); }\n"
|
||||
vert.add_function(str_tex_hash)
|
||||
|
||||
prep = 'float '
|
||||
if out_age:
|
||||
prep = ''
|
||||
vert.add_out('float p_age')
|
||||
# var p_age = lapTime - p.i * spawnRate
|
||||
vert.write(prep + 'p_age = pd[3][3] - gl_InstanceID * pd[0][1];')
|
||||
# p_age -= p_age * fhash(i) * r.lifetime_random;
|
||||
vert.write('p_age -= p_age * fhash(gl_InstanceID) * pd[2][3];')
|
||||
|
||||
# Loop
|
||||
# pd[0][0] - animtime, loop stored in sign
|
||||
# vert.write('while (p_age < 0) p_age += pd[0][0];')
|
||||
vert.write('if (pd[0][0] > 0 && p_age < 0) p_age += (int(-p_age / pd[0][0]) + 1) * pd[0][0];')
|
||||
|
||||
# lifetime
|
||||
prep = 'float '
|
||||
if out_lifetime:
|
||||
prep = ''
|
||||
vert.add_out('float p_lifetime')
|
||||
vert.write(prep + 'p_lifetime = pd[0][2];')
|
||||
# clip with nan
|
||||
vert.write('if (p_age < 0 || p_age > p_lifetime) {')
|
||||
vert.write(' gl_Position /= 0.0;')
|
||||
vert.write(' return;')
|
||||
vert.write('}')
|
||||
|
||||
# vert.write('p_age /= 2;') # Match
|
||||
|
||||
# object_align_factor / 2 + gxyz
|
||||
prep = 'vec3 '
|
||||
if out_velocity:
|
||||
prep = ''
|
||||
vert.add_out('vec3 p_velocity')
|
||||
vert.write(prep + 'p_velocity = vec3(pd[1][0], pd[1][1], pd[1][2]);')
|
||||
|
||||
vert.write('p_velocity.x += fhash(gl_InstanceID) * pd[1][3] - pd[1][3] / 2;')
|
||||
vert.write('p_velocity.y += fhash(gl_InstanceID + pd[0][3]) * pd[1][3] - pd[1][3] / 2;')
|
||||
vert.write('p_velocity.z += fhash(gl_InstanceID + 2 * pd[0][3]) * pd[1][3] - pd[1][3] / 2;')
|
||||
|
||||
# factor_random = pd[1][3]
|
||||
# p.i = gl_InstanceID
|
||||
# particles.length = pd[0][3]
|
||||
|
||||
# gxyz
|
||||
vert.write('p_velocity.x += (pd[2][0] * p_age) / 5;')
|
||||
vert.write('p_velocity.y += (pd[2][1] * p_age) / 5;')
|
||||
vert.write('p_velocity.z += (pd[2][2] * p_age) / 5;')
|
||||
|
||||
prep = 'vec3 '
|
||||
if out_location:
|
||||
prep = ''
|
||||
vert.add_out('vec3 p_location')
|
||||
vert.write(prep + 'p_location = p_velocity * p_age;')
|
||||
|
||||
vert.write('spos.xyz += p_location;')
|
||||
|
||||
# Particle fade
|
||||
if mat_state.material.lnx_particle_flag and lnx.utils.get_rp().lnx_particles == 'On' and mat_state.material.lnx_particle_fade:
|
||||
vert.add_out('float p_fade')
|
||||
vert.write('p_fade = sin(min((p_age / 2) * 3.141592, 3.141592));')
|
||||
|
||||
if out_index:
|
||||
vert.add_out('float p_index');
|
||||
vert.write('p_index = gl_InstanceID;')
|
||||
|
||||
def write_tilesheet(vert):
|
||||
# tilesx, tilesy, framerate - pd[3][0], pd[3][1], pd[3][2]
|
||||
vert.write('int frame = int((p_age) / pd[3][2]);')
|
||||
vert.write('int tx = frame % int(pd[3][0]);')
|
||||
vert.write('int ty = int(frame / pd[3][0]);')
|
||||
vert.write('vec2 tilesheetOffset = vec2(tx * (1 / pd[3][0]), ty * (1 / pd[3][1]));')
|
||||
vert.write('texCoord = tex * texUnpack + tilesheetOffset;')
|
||||
# vert.write('texCoord = tex;')
|
67
leenkx/blender/lnx/material/make_refract.py
Normal file
67
leenkx/blender/lnx/material/make_refract.py
Normal file
@ -0,0 +1,67 @@
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.make_mesh as make_mesh
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.assets as assets
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
make_mesh = lnx.reload_module(make_mesh)
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
assets = lnx.reload_module(assets)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(context_id):
|
||||
con_refract = mat_state.data.add_context({ 'name': context_id, 'depth_write': True, 'compare_mode': 'less', 'cull_mode': 'clockwise' })
|
||||
make_mesh.make_forward_base(con_refract, parse_opacity=True, transluc_pass=True)
|
||||
|
||||
vert = con_refract.vert
|
||||
frag = con_refract.frag
|
||||
tese = con_refract.tese
|
||||
frag.add_include('std/gbuffer.glsl')
|
||||
frag.add_out('vec4 fragColor[3]')
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
# Remove fragColor = ...;
|
||||
frag.main = frag.main[:frag.main.rfind('fragColor')]
|
||||
frag.write('\n')
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
|
||||
frag.write('n /= (abs(n.x) + abs(n.y) + abs(n.z));')
|
||||
frag.write('n.xy = n.z >= 0.0 ? n.xy : octahedronWrap(n.xy);')
|
||||
|
||||
is_shadeless = mat_state.emission_type == mat_state.EmissionType.SHADELESS
|
||||
if is_shadeless or '_SSS' in wrd.world_defs or '_Hair' in wrd.world_defs:
|
||||
frag.write('uint matid = 0;')
|
||||
if is_shadeless:
|
||||
frag.write('matid = 1;')
|
||||
frag.write('basecol = emissionCol;')
|
||||
if '_SSS' in wrd.world_defs or '_Hair' in wrd.world_defs:
|
||||
frag.add_uniform('int materialID')
|
||||
frag.write('if (materialID == 2) matid = 2;')
|
||||
else:
|
||||
frag.write('const uint matid = 0;')
|
||||
|
||||
if rpdat.rp_renderer == 'Deferred':
|
||||
frag.write('fragColor[0] = vec4(n.xy, roughness, packFloatInt16(metallic, matid));')
|
||||
frag.write('fragColor[1] = vec4(direct + indirect, packFloat2(occlusion, specular));')
|
||||
else:
|
||||
frag.write('fragColor[0] = vec4(direct + indirect, packFloat2(occlusion, specular));')
|
||||
frag.write('fragColor[1] = vec4(n.xy, roughness, metallic);')
|
||||
|
||||
frag.write('fragColor[2] = vec4(ior, opacity, 0.0, 1.0);')
|
||||
|
||||
make_finalize.make(con_refract)
|
||||
|
||||
# assets.vs_equal(con_refract, assets.shader_cons['transluc_vert']) # shader_cons has no transluc yet
|
||||
# assets.fs_equal(con_refract, assets.shader_cons['transluc_frag'])
|
||||
|
||||
return con_refract
|
49
leenkx/blender/lnx/material/make_refraction_buffer.py
Normal file
49
leenkx/blender/lnx/material/make_refraction_buffer.py
Normal file
@ -0,0 +1,49 @@
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
import lnx.material.make_mesh as make_mesh
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.assets as assets
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
make_mesh = lnx.reload_module(make_mesh)
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
assets = lnx.reload_module(assets)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(context_id):
|
||||
con_refraction_buffer = mat_state.data.add_context({ 'name': context_id, 'depth_write': False, 'compare_mode': 'less', 'cull_mode': 'clockwise' })
|
||||
|
||||
lnx_discard = mat_state.material.lnx_discard
|
||||
blend = mat_state.material.lnx_blending
|
||||
parse_opacity = blend or mat_utils.is_transluc(mat_state.material) or lnx_discard
|
||||
|
||||
make_mesh.make_base(con_refraction_buffer, parse_opacity)
|
||||
|
||||
vert = con_refraction_buffer.vert
|
||||
frag = con_refraction_buffer.frag
|
||||
|
||||
frag.add_out('vec4 fragColor')
|
||||
|
||||
# Remove fragColor = ...;
|
||||
frag.main = frag.main[:frag.main.rfind('fragColor')]
|
||||
frag.write('\n')
|
||||
|
||||
if parse_opacity:
|
||||
frag.write('fragColor = vec4(ior, opacity, 0.0, 1.0);')
|
||||
else:
|
||||
frag.write('fragColor = vec4(1.0, 1.0, 0.0, 1.0);')
|
||||
|
||||
make_finalize.make(con_refraction_buffer)
|
||||
|
||||
# assets.vs_equal(con_refract, assets.shader_cons['transluc_vert']) # shader_cons has no transluc yet
|
||||
# assets.fs_equal(con_refract, assets.shader_cons['transluc_frag'])
|
||||
|
||||
return con_refraction_buffer
|
225
leenkx/blender/lnx/material/make_shader.py
Normal file
225
leenkx/blender/lnx/material/make_shader.py
Normal file
@ -0,0 +1,225 @@
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import bpy
|
||||
from bpy.types import Material
|
||||
from bpy.types import Object
|
||||
|
||||
import lnx.api
|
||||
import lnx.assets as assets
|
||||
import lnx.exporter
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.make_decal as make_decal
|
||||
import lnx.material.make_depth as make_depth
|
||||
import lnx.material.make_mesh as make_mesh
|
||||
import lnx.material.make_overlay as make_overlay
|
||||
import lnx.material.make_transluc as make_transluc
|
||||
import lnx.material.make_refract as make_refract
|
||||
import lnx.material.make_voxel as make_voxel
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
from lnx.material.shader import Shader, ShaderContext, ShaderData
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.api = lnx.reload_module(lnx.api)
|
||||
assets = lnx.reload_module(assets)
|
||||
lnx.exporter = lnx.reload_module(lnx.exporter)
|
||||
log = lnx.reload_module(log)
|
||||
cycles = lnx.reload_module(cycles)
|
||||
make_decal = lnx.reload_module(make_decal)
|
||||
make_depth = lnx.reload_module(make_depth)
|
||||
make_mesh = lnx.reload_module(make_mesh)
|
||||
make_overlay = lnx.reload_module(make_overlay)
|
||||
make_transluc = lnx.reload_module(make_transluc)
|
||||
make_voxel = lnx.reload_module(make_voxel)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import Shader, ShaderContext, ShaderData
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
rpass_hook = None
|
||||
|
||||
|
||||
def build(material: Material, mat_users: Dict[Material, List[Object]], mat_lnxusers) -> Tuple:
|
||||
mat_state.mat_users = mat_users
|
||||
mat_state.mat_lnxusers = mat_lnxusers
|
||||
mat_state.material = material
|
||||
mat_state.nodes = material.node_tree.nodes
|
||||
mat_state.data = ShaderData(material)
|
||||
mat_state.output_node = cycles.node_by_type(mat_state.nodes, 'OUTPUT_MATERIAL')
|
||||
if mat_state.output_node is None:
|
||||
# Place empty material output to keep compiler happy..
|
||||
mat_state.output_node = mat_state.nodes.new('ShaderNodeOutputMaterial')
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
rpasses = mat_utils.get_rpasses(material)
|
||||
matname = lnx.utils.safesrc(lnx.utils.asset_name(material))
|
||||
rel_path = lnx.utils.build_dir() + '/compiled/Shaders/'
|
||||
full_path = lnx.utils.get_fp() + '/' + rel_path
|
||||
if not os.path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
|
||||
make_instancing_and_skinning(material, mat_users)
|
||||
|
||||
bind_constants = dict()
|
||||
bind_textures = dict()
|
||||
|
||||
for rp in rpasses:
|
||||
car = []
|
||||
bind_constants[rp] = car
|
||||
mat_state.bind_constants = car
|
||||
tar = []
|
||||
bind_textures[rp] = tar
|
||||
mat_state.bind_textures = tar
|
||||
|
||||
con = None
|
||||
|
||||
if rpdat.rp_driver != 'Leenkx' and lnx.api.drivers[rpdat.rp_driver]['make_rpass'] is not None:
|
||||
con = lnx.api.drivers[rpdat.rp_driver]['make_rpass'](rp)
|
||||
|
||||
if con is not None:
|
||||
pass
|
||||
|
||||
elif rp == 'mesh':
|
||||
con = make_mesh.make(rp, rpasses)
|
||||
|
||||
elif rp == 'shadowmap':
|
||||
con = make_depth.make(rp, rpasses, shadowmap=True)
|
||||
|
||||
elif rp == 'shadowmap_transparent':
|
||||
con = make_depth.make(rp, rpasses, shadowmap=True, shadowmap_transparent=True)
|
||||
|
||||
elif rp == 'translucent':
|
||||
con = make_transluc.make(rp)
|
||||
|
||||
elif rp == 'refraction':
|
||||
con = make_refract.make(rp)
|
||||
|
||||
elif rp == 'overlay':
|
||||
con = make_overlay.make(rp)
|
||||
|
||||
elif rp == 'decal':
|
||||
con = make_decal.make(rp)
|
||||
|
||||
elif rp == 'depth':
|
||||
con = make_depth.make(rp, rpasses)
|
||||
|
||||
elif rp == 'voxel':
|
||||
con = make_voxel.make(rp)
|
||||
|
||||
elif rpass_hook is not None:
|
||||
con = rpass_hook(rp)
|
||||
|
||||
write_shaders(rel_path, con, rp, matname)
|
||||
|
||||
shader_data_name = matname + '_data'
|
||||
|
||||
if wrd.lnx_single_data_file:
|
||||
if 'shader_datas' not in lnx.exporter.current_output:
|
||||
lnx.exporter.current_output['shader_datas'] = []
|
||||
lnx.exporter.current_output['shader_datas'].append(mat_state.data.get()['shader_datas'][0])
|
||||
else:
|
||||
lnx.utils.write_lnx(full_path + '/' + matname + '_data.lnx', mat_state.data.get())
|
||||
shader_data_path = lnx.utils.get_fp_build() + '/compiled/Shaders/' + shader_data_name + '.lnx'
|
||||
assets.add_shader_data(shader_data_path)
|
||||
|
||||
return rpasses, mat_state.data, shader_data_name, bind_constants, bind_textures
|
||||
|
||||
|
||||
def write_shaders(rel_path: str, con: ShaderContext, rpass: str, matname: str) -> None:
|
||||
keep_cache = mat_state.material.lnx_cached
|
||||
write_shader(rel_path, con.vert, 'vert', rpass, matname, keep_cache=keep_cache)
|
||||
write_shader(rel_path, con.frag, 'frag', rpass, matname, keep_cache=keep_cache)
|
||||
write_shader(rel_path, con.geom, 'geom', rpass, matname, keep_cache=keep_cache)
|
||||
write_shader(rel_path, con.tesc, 'tesc', rpass, matname, keep_cache=keep_cache)
|
||||
write_shader(rel_path, con.tese, 'tese', rpass, matname, keep_cache=keep_cache)
|
||||
|
||||
|
||||
def write_shader(rel_path: str, shader: Shader, ext: str, rpass: str, matname: str, keep_cache=True) -> None:
|
||||
if shader is None or shader.is_linked:
|
||||
return
|
||||
|
||||
# TODO: blend context
|
||||
if rpass == 'mesh' and mat_state.material.lnx_blending:
|
||||
rpass = 'blend'
|
||||
|
||||
file_ext = '.glsl'
|
||||
if shader.noprocessing:
|
||||
# Use hlsl directly
|
||||
hlsl_dir = lnx.utils.build_dir() + '/compiled/Hlsl/'
|
||||
if not os.path.exists(hlsl_dir):
|
||||
os.makedirs(hlsl_dir)
|
||||
file_ext = '.hlsl'
|
||||
rel_path = rel_path.replace('/compiled/Shaders/', '/compiled/Hlsl/')
|
||||
|
||||
shader_file = matname + '_' + rpass + '.' + ext + file_ext
|
||||
shader_path = lnx.utils.get_fp() + '/' + rel_path + '/' + shader_file
|
||||
assets.add_shader(shader_path)
|
||||
if not os.path.isfile(shader_path) or not keep_cache:
|
||||
with open(shader_path, 'w') as f:
|
||||
f.write(shader.get())
|
||||
|
||||
if shader.noprocessing:
|
||||
cwd = os.getcwd()
|
||||
os.chdir(lnx.utils.get_fp() + '/' + rel_path)
|
||||
hlslbin_path = lnx.utils.get_sdk_path() + '/lib/leenkx_tools/hlslbin/hlslbin.exe'
|
||||
prof = 'vs_5_0' if ext == 'vert' else 'ps_5_0' if ext == 'frag' else 'gs_5_0'
|
||||
# noprocessing flag - gets renamed to .d3d11
|
||||
args = [hlslbin_path.replace('/', '\\').replace('\\\\', '\\'), shader_file, shader_file[:-4] + 'glsl', prof]
|
||||
if ext == 'vert':
|
||||
args.append('-i')
|
||||
args.append('pos')
|
||||
proc = subprocess.call(args)
|
||||
os.chdir(cwd)
|
||||
|
||||
|
||||
def make_instancing_and_skinning(mat: Material, mat_users: Dict[Material, List[Object]]) -> None:
|
||||
"""Build material with instancing or skinning if enabled.
|
||||
If the material is a custom material, only validation checks for instancing are performed."""
|
||||
global_elems = []
|
||||
if mat_users is not None and mat in mat_users:
|
||||
# Whether there are both an instanced object and a not instanced object with this material
|
||||
instancing_usage = [False, False]
|
||||
mat_state.uses_instancing = False
|
||||
|
||||
for bo in mat_users[mat]:
|
||||
if mat.lnx_custom_material == '':
|
||||
# Morph Targets
|
||||
if lnx.utils.export_morph_targets(bo):
|
||||
global_elems.append({'name': 'morph', 'data': 'short2norm'})
|
||||
# GPU Skinning
|
||||
if lnx.utils.export_bone_data(bo):
|
||||
global_elems.append({'name': 'bone', 'data': 'short4norm'})
|
||||
global_elems.append({'name': 'weight', 'data': 'short4norm'})
|
||||
|
||||
# Instancing
|
||||
inst = bo.lnx_instanced
|
||||
if inst != 'Off' or mat.lnx_particle_flag:
|
||||
instancing_usage[0] = True
|
||||
mat_state.uses_instancing = True
|
||||
|
||||
if mat.lnx_custom_material == '':
|
||||
global_elems.append({'name': 'ipos', 'data': 'float3'})
|
||||
if 'Rot' in inst:
|
||||
global_elems.append({'name': 'irot', 'data': 'float3'})
|
||||
if 'Scale' in inst:
|
||||
global_elems.append({'name': 'iscl', 'data': 'float3'})
|
||||
|
||||
elif inst == 'Off':
|
||||
# Ignore children of instanced objects, they are instanced even when set to 'Off'
|
||||
instancing_usage[1] = bo.parent is None or bo.parent.lnx_instanced == 'Off'
|
||||
|
||||
if instancing_usage[0] and instancing_usage[1]:
|
||||
# Display a warning for invalid instancing configurations
|
||||
# See https://github.com/leenkx3d/leenkx/issues/2072
|
||||
log.warn(f'Material "{mat.name}" has both instanced and not instanced objects, objects might flicker!')
|
||||
|
||||
if mat.lnx_custom_material == '':
|
||||
mat_state.data.global_elems = global_elems
|
32
leenkx/blender/lnx/material/make_skin.py
Normal file
32
leenkx/blender/lnx/material/make_skin.py
Normal file
@ -0,0 +1,32 @@
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def skin_pos(vert):
|
||||
vert.add_include('compiled.inc')
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
vert.add_include('std/skinning.glsl')
|
||||
vert.add_uniform('vec4 skinBones[skinMaxBones * 2]', link='_skinBones', included=True)
|
||||
vert.add_uniform('float posUnpack', link='_posUnpack')
|
||||
vert.write_attrib('vec4 skinA;')
|
||||
vert.write_attrib('vec4 skinB;')
|
||||
vert.write_attrib('vec4 skinC;')
|
||||
vert.write_attrib('getSkinningDualQuat(ivec4(bone * 32767), weight, skinA, skinB, skinC);')
|
||||
vert.write_attrib('spos.xyz *= posUnpack;')
|
||||
vert.write_attrib('spos.xyz *= skinC.xyz;')
|
||||
vert.write_attrib('spos.xyz += 2.0 * cross(skinA.xyz, cross(skinA.xyz, spos.xyz) + skinA.w * spos.xyz); // Rotate')
|
||||
vert.write_attrib('spos.xyz += 2.0 * (skinA.w * skinB.xyz - skinB.w * skinA.xyz + cross(skinA.xyz, skinB.xyz)); // Translate')
|
||||
vert.write_attrib('spos.xyz /= posUnpack;')
|
||||
|
||||
|
||||
def skin_nor(vert, is_morph, prep):
|
||||
rpdat = lnx.utils.get_rp()
|
||||
if(is_morph):
|
||||
vert.write_attrib(prep + 'wnormal = normalize(N * (morphNor + 2.0 * cross(skinA.xyz, cross(skinA.xyz, morphNor) + skinA.w * morphNor)));')
|
||||
else:
|
||||
vert.write_attrib(prep + 'wnormal = normalize(N * (vec3(nor.xy, pos.w) + 2.0 * cross(skinA.xyz, cross(skinA.xyz, vec3(nor.xy, pos.w)) + skinA.w * vec3(nor.xy, pos.w))));')
|
32
leenkx/blender/lnx/material/make_tess.py
Normal file
32
leenkx/blender/lnx/material/make_tess.py
Normal file
@ -0,0 +1,32 @@
|
||||
|
||||
def tesc_levels(tesc, innerLevel, outerLevel):
|
||||
tesc.write('if (gl_InvocationID == 0) {')
|
||||
tesc.write(' gl_TessLevelInner[0] = {0}; // inner level'.format(innerLevel))
|
||||
tesc.write(' gl_TessLevelInner[1] = {0};'.format(innerLevel))
|
||||
tesc.write(' gl_TessLevelOuter[0] = {0}; // outer level'.format(outerLevel))
|
||||
tesc.write(' gl_TessLevelOuter[1] = {0};'.format(outerLevel))
|
||||
tesc.write(' gl_TessLevelOuter[2] = {0};'.format(outerLevel))
|
||||
tesc.write(' gl_TessLevelOuter[3] = {0};'.format(outerLevel))
|
||||
tesc.write('}')
|
||||
|
||||
def interpolate(tese, var, size, normalize=False, declare_out=False):
|
||||
tese.add_include('compiled.inc')
|
||||
vec = 'vec{0}'.format(size)
|
||||
if declare_out:
|
||||
tese.add_out('{0} {1}'.format(vec, var))
|
||||
|
||||
s = '{0} {1}_0 = gl_TessCoord.x * tc_{1}[0];\n'.format(vec, var)
|
||||
s += '{0} {1}_1 = gl_TessCoord.y * tc_{1}[1];\n'.format(vec, var)
|
||||
s += '{0} {1}_2 = gl_TessCoord.z * tc_{1}[2];\n'.format(vec, var)
|
||||
|
||||
prep = ''
|
||||
if not declare_out:
|
||||
prep = vec + ' '
|
||||
|
||||
if normalize:
|
||||
s += '{0}{1} = normalize({1}_0 + {1}_1 + {1}_2);\n'.format(prep, var)
|
||||
s += 'vec3 n = {0};\n'.format(var)
|
||||
else:
|
||||
s += '{0}{1} = {1}_0 + {1}_1 + {1}_2;\n'.format(prep, var)
|
||||
|
||||
tese.write_attrib(s)
|
54
leenkx/blender/lnx/material/make_transluc.py
Normal file
54
leenkx/blender/lnx/material/make_transluc.py
Normal file
@ -0,0 +1,54 @@
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.make_mesh as make_mesh
|
||||
import lnx.material.make_finalize as make_finalize
|
||||
import lnx.assets as assets
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
make_mesh = lnx.reload_module(make_mesh)
|
||||
make_finalize = lnx.reload_module(make_finalize)
|
||||
assets = lnx.reload_module(assets)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def make(context_id):
|
||||
con_transluc = mat_state.data.add_context({ 'name': context_id, 'depth_write': False, 'compare_mode': 'less', 'cull_mode': 'clockwise', \
|
||||
'blend_source': 'blend_one', 'blend_destination': 'blend_one', 'blend_operation': 'add', \
|
||||
'alpha_blend_source': 'blend_zero', 'alpha_blend_destination': 'inverse_source_alpha', 'alpha_blend_operation': 'add' })
|
||||
|
||||
make_mesh.make_forward_base(con_transluc, parse_opacity=True, transluc_pass=True)
|
||||
|
||||
vert = con_transluc.vert
|
||||
frag = con_transluc.frag
|
||||
tese = con_transluc.tese
|
||||
frag.add_include('std/gbuffer.glsl')
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
frag.add_out('vec4 fragColor[2]')
|
||||
# Remove fragColor = ...;
|
||||
frag.main = frag.main[:frag.main.rfind('fragColor')]
|
||||
frag.write('\n')
|
||||
|
||||
if '_VoxelAOvar' in wrd.world_defs:
|
||||
frag.write('indirect *= 0.25;')
|
||||
|
||||
frag.write('n /= (abs(n.x) + abs(n.y) + abs(n.z));')
|
||||
frag.write('n.xy = n.z >= 0.0 ? n.xy : octahedronWrap(n.xy);')
|
||||
|
||||
frag.write('vec4 premultipliedReflect = vec4(vec3(direct + indirect * 0.5) * opacity, opacity);');
|
||||
frag.write('float w = clamp(pow(min(1.0, premultipliedReflect.a * 10.0) + 0.01, 3.0) * 1e8 * pow(1.0 - (gl_FragCoord.z) * 0.9, 3.0), 1e-2, 3e3);')
|
||||
frag.write('fragColor[0] = vec4(premultipliedReflect.rgb * w, premultipliedReflect.a);')
|
||||
frag.write('fragColor[1] = vec4(premultipliedReflect.a * w, 0.0, 0.0, 1.0);')
|
||||
|
||||
make_finalize.make(con_transluc)
|
||||
|
||||
# assets.vs_equal(con_transluc, assets.shader_cons['transluc_vert']) # shader_cons has no transluc yet
|
||||
# assets.fs_equal(con_transluc, assets.shader_cons['transluc_frag'])
|
||||
|
||||
return con_transluc
|
508
leenkx/blender/lnx/material/make_voxel.py
Normal file
508
leenkx/blender/lnx/material/make_voxel.py
Normal file
@ -0,0 +1,508 @@
|
||||
"""
|
||||
Copyright (c) 2024 Turánszki János
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
"""
|
||||
import bpy
|
||||
|
||||
import lnx.utils
|
||||
import lnx.assets as assets
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
import lnx.material.make_particle as make_particle
|
||||
import lnx.make_state as state
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
assets = lnx.reload_module(assets)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
def make(context_id):
|
||||
rpdat = lnx.utils.get_rp()
|
||||
if rpdat.rp_voxels == 'Voxel GI':
|
||||
con = make_gi(context_id)
|
||||
else:
|
||||
con = make_ao(context_id)
|
||||
|
||||
assets.vs_equal(con, assets.shader_cons['voxel_vert'])
|
||||
assets.fs_equal(con, assets.shader_cons['voxel_frag'])
|
||||
assets.gs_equal(con, assets.shader_cons['voxel_geom'])
|
||||
|
||||
return con
|
||||
|
||||
def make_gi(context_id):
|
||||
con_voxel = mat_state.data.add_context({ 'name': context_id, 'depth_write': False, 'compare_mode': 'always', 'cull_mode': 'none', 'color_write_red': False, 'color_write_green': False, 'color_write_blue': False, 'color_write_alpha': False, 'conservative_raster': True })
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
|
||||
vert = con_voxel.make_vert()
|
||||
frag = con_voxel.make_frag()
|
||||
geom = con_voxel.make_geom()
|
||||
tesc = None
|
||||
tese = None
|
||||
geom.ins = vert.outs
|
||||
frag.ins = geom.outs
|
||||
|
||||
vert.add_include('compiled.inc')
|
||||
geom.add_include('compiled.inc')
|
||||
frag.add_include('compiled.inc')
|
||||
frag.add_include('std/math.glsl')
|
||||
frag.add_include('std/imageatomic.glsl')
|
||||
frag.add_include('std/gbuffer.glsl')
|
||||
frag.add_include('std/brdf.glsl')
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
frag.add_uniform('layout(r32ui) uimage3D voxels')
|
||||
|
||||
frag.write('vec3 n;')
|
||||
frag.write('vec3 wposition;')
|
||||
frag.write('vec3 basecol;')
|
||||
frag.write('float roughness;') #
|
||||
frag.write('float metallic;') #
|
||||
frag.write('float occlusion;') #
|
||||
frag.write('float specular;') #
|
||||
frag.write('vec3 emissionCol = vec3(0.0);')
|
||||
blend = mat_state.material.lnx_blending
|
||||
parse_opacity = blend or mat_utils.is_transluc(mat_state.material)
|
||||
if parse_opacity:
|
||||
frag.write('float opacity;')
|
||||
frag.write('float ior;')
|
||||
else:
|
||||
frag.write('float opacity = 1.0;')
|
||||
|
||||
frag.write('float dotNV = 0.0;')
|
||||
cycles.parse(mat_state.nodes, con_voxel, vert, frag, geom, tesc, tese, parse_opacity=parse_opacity, parse_displacement=False, basecol_only=True)
|
||||
|
||||
# Voxelized particles
|
||||
particle = mat_state.material.lnx_particle_flag
|
||||
if particle and rpdat.lnx_particles == 'On':
|
||||
# make_particle.write(vert, particle_info=cycles.particle_info)
|
||||
frag.write_pre = True
|
||||
frag.write('const float p_index = 0;')
|
||||
frag.write('const float p_age = 0;')
|
||||
frag.write('const float p_lifetime = 0;')
|
||||
frag.write('const vec3 p_location = vec3(0);')
|
||||
frag.write('const float p_size = 0;')
|
||||
frag.write('const vec3 p_velocity = vec3(0);')
|
||||
frag.write('const vec3 p_angular_velocity = vec3(0);')
|
||||
frag.write_pre = False
|
||||
|
||||
export_mpos = frag.contains('mposition') and not frag.contains('vec3 mposition')
|
||||
if export_mpos:
|
||||
vert.add_out('vec3 mpositionGeom')
|
||||
vert.write_pre = True
|
||||
vert.write('mpositionGeom = pos.xyz;')
|
||||
vert.write_pre = False
|
||||
|
||||
export_bpos = frag.contains('bposition') and not frag.contains('vec3 bposition')
|
||||
if export_bpos:
|
||||
vert.add_out('vec3 bpositionGeom')
|
||||
vert.add_uniform('vec3 dim', link='_dim')
|
||||
vert.add_uniform('vec3 hdim', link='_halfDim')
|
||||
vert.write_pre = True
|
||||
vert.write('bpositionGeom = (pos.xyz + hdim) / dim;')
|
||||
vert.write_pre = False
|
||||
|
||||
vert.add_uniform('mat4 W', '_worldMatrix')
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
vert.add_out('vec3 voxpositionGeom')
|
||||
vert.add_out('vec3 voxnormalGeom')
|
||||
|
||||
if con_voxel.is_elem('col'):
|
||||
vert.add_out('vec3 vcolorGeom')
|
||||
vert.write('vcolorGeom = col.rgb;')
|
||||
|
||||
if con_voxel.is_elem('tex'):
|
||||
vert.add_out('vec2 texCoordGeom')
|
||||
vert.write('texCoordGeom = tex;')
|
||||
|
||||
vert.write('voxpositionGeom = vec3(W * vec4(pos.xyz, 1.0));')
|
||||
vert.write('voxnormalGeom = normalize(N * vec3(nor.xy, pos.w));')
|
||||
|
||||
geom.add_out('vec4 voxposition[3]')
|
||||
geom.add_out('vec3 P')
|
||||
geom.add_out('vec3 voxnormal')
|
||||
geom.add_out('vec4 lightPosition')
|
||||
geom.add_out('vec4 spotPosition')
|
||||
geom.add_out('vec4 wvpposition')
|
||||
|
||||
if con_voxel.is_elem('col'):
|
||||
geom.add_out('vec3 vcolor')
|
||||
if con_voxel.is_elem('tex'):
|
||||
geom.add_out('vec2 texCoord')
|
||||
if export_mpos:
|
||||
geom.add_out('vec3 mposition')
|
||||
if export_bpos:
|
||||
geom.add_out('vec3 bposition')
|
||||
|
||||
geom.add_uniform('float clipmaps[voxelgiClipmapCount * 10]', '_clipmaps')
|
||||
geom.add_uniform('int clipmapLevel', '_clipmapLevel')
|
||||
|
||||
geom.write('vec3 facenormal = abs(voxnormalGeom[0] + voxnormalGeom[1] + voxnormalGeom[2]);')
|
||||
geom.write('uint maxi = facenormal[1] > facenormal[0] ? 1 : 0;')
|
||||
geom.write('maxi = facenormal[2] > facenormal[maxi] ? 2 : maxi;')
|
||||
|
||||
geom.write('for (uint i = 0; i < 3; ++i) {')
|
||||
geom.write(' voxposition[i].xyz = (voxpositionGeom[i] - vec3(clipmaps[int(clipmapLevel * 10 + 4)], clipmaps[int(clipmapLevel * 10 + 5)], clipmaps[int(clipmapLevel * 10 + 6)])) / (float(clipmaps[int(clipmapLevel * 10)]));')
|
||||
geom.write(' if (maxi == 0)')
|
||||
geom.write(' {')
|
||||
geom.write(' voxposition[i].xyz = voxposition[i].zyx;')
|
||||
geom.write(' }')
|
||||
geom.write(' else if (maxi == 1)')
|
||||
geom.write(' {')
|
||||
geom.write(' voxposition[i].xyz = voxposition[i].xzy;')
|
||||
geom.write(' }')
|
||||
geom.write('}')
|
||||
|
||||
geom.write('for (uint i = 0; i < 3; ++i) {')
|
||||
geom.write(' voxposition[i].xy /= voxelgiResolution.xy;')
|
||||
geom.write(' voxposition[i].zw = vec2(1.0);')
|
||||
geom.write(' P = voxpositionGeom[i];')
|
||||
geom.write(' voxnormal = voxnormalGeom[i];')
|
||||
if con_voxel.is_elem('col'):
|
||||
geom.write('vcolor = vcolorGeom[i];')
|
||||
if con_voxel.is_elem('tex'):
|
||||
geom.write('texCoord = texCoordGeom[i];')
|
||||
if export_mpos:
|
||||
geom.write('mposition = mpositionGeom[i];')
|
||||
if export_bpos:
|
||||
geom.write('bposition = bpositionGeom[i];')
|
||||
geom.write(' gl_Position = voxposition[i];')
|
||||
geom.write(' EmitVertex();')
|
||||
geom.write('}')
|
||||
geom.write('EndPrimitive();')
|
||||
|
||||
frag.add_uniform('float clipmaps[voxelgiClipmapCount * 10]', '_clipmaps')
|
||||
frag.add_uniform('int clipmapLevel', '_clipmapLevel')
|
||||
|
||||
frag.write('vec3 uvw = (P - vec3(clipmaps[int(clipmapLevel * 10 + 4)], clipmaps[int(clipmapLevel * 10 + 5)], clipmaps[int(clipmapLevel * 10 + 6)])) / (float(clipmaps[int(clipmapLevel * 10)]) * voxelgiResolution);')
|
||||
frag.write('uvw = (uvw * 0.5 + 0.5);')
|
||||
frag.write('if(any(notEqual(uvw, clamp(uvw, 0.0, 1.0)))) return;')
|
||||
frag.write('vec3 writecoords = floor(uvw * voxelgiResolution);')
|
||||
frag.write_attrib('vec3 N = normalize(voxnormal);')
|
||||
frag.write('vec3 aniso_direction = N;')
|
||||
frag.write('uvec3 face_offsets = uvec3(')
|
||||
frag.write(' aniso_direction.x > 0 ? 0 : 1,')
|
||||
frag.write(' aniso_direction.y > 0 ? 2 : 3,')
|
||||
frag.write(' aniso_direction.z > 0 ? 4 : 5')
|
||||
frag.write(' ) * voxelgiResolution;')
|
||||
frag.write('vec3 direction_weights = abs(N);')
|
||||
|
||||
frag.write('vec3 albedo = surfaceAlbedo(basecol, metallic);')
|
||||
frag.write('vec3 f0 = surfaceF0(basecol, metallic);')
|
||||
|
||||
frag.add_uniform('vec3 eye', '_cameraPosition')
|
||||
frag.write('vec3 eyeDir = eye - wposition;')
|
||||
|
||||
if '_Brdf' in wrd.world_defs:
|
||||
frag.add_uniform('sampler2D senvmapBrdf', link='$brdf.png')
|
||||
frag.write('vec2 envBRDF = texelFetch(senvmapBrdf, ivec2(vec2(dotNV, 1.0 - roughness) * 256.0), 0).xy;')
|
||||
|
||||
if '_Irr' in wrd.world_defs:
|
||||
frag.add_include('std/shirr.glsl')
|
||||
frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance')
|
||||
frag.write('vec3 envl = shIrradiance(n, shirr);')
|
||||
if '_EnvTex' in wrd.world_defs:
|
||||
frag.write('envl /= PI;')
|
||||
else:
|
||||
frag.write('vec3 envl = vec3(0.0);')
|
||||
|
||||
if '_Rad' in wrd.world_defs:
|
||||
frag.add_uniform('sampler2D senvmapRadiance', link='_envmapRadiance')
|
||||
frag.add_uniform('int envmapNumMipmaps', link='_envmapNumMipmaps')
|
||||
frag.write('vec3 reflectionWorld = reflect(-eyeDir, n);')
|
||||
frag.write('float lod = getMipFromRoughness(roughness, envmapNumMipmaps);')
|
||||
frag.write('vec3 prefilteredColor = textureLod(senvmapRadiance, envMapEquirect(reflectionWorld), lod).rgb;')
|
||||
|
||||
if '_EnvLDR' in wrd.world_defs:
|
||||
frag.write('envl = pow(envl, vec3(2.2));')
|
||||
if '_Rad' in wrd.world_defs:
|
||||
frag.write('prefilteredColor = pow(prefilteredColor, vec3(2.2));')
|
||||
|
||||
frag.write('envl *= albedo;')
|
||||
|
||||
if '_Brdf' in wrd.world_defs:
|
||||
frag.write('envl.rgb *= 1.0 - (f0 * envBRDF.x + envBRDF.y);')
|
||||
if '_Rad' in wrd.world_defs:
|
||||
frag.write('envl += prefilteredColor * (f0 * envBRDF.x + envBRDF.y);')
|
||||
elif '_EnvCol' in wrd.world_defs:
|
||||
frag.add_uniform('vec3 backgroundCol', link='_backgroundCol')
|
||||
frag.write('envl += backgroundCol * (f0 * envBRDF.x + envBRDF.y);')
|
||||
|
||||
frag.add_uniform('float envmapStrength', link='_envmapStrength')
|
||||
frag.write('envl *= envmapStrength * occlusion;')
|
||||
|
||||
frag.write('if (direction_weights.x > 0) {')
|
||||
frag.write(' vec4 basecol_direction = vec4(min(basecol * direction_weights.x, vec3(1.0)), 1.0);')
|
||||
frag.write(' vec3 emission_direction = emissionCol * direction_weights.x;')
|
||||
frag.write(' vec2 normal_direction = encode_oct(N * direction_weights.x) * 0.5 + 0.5;')
|
||||
frag.write(' vec3 envl_direction = envl * direction_weights.x;')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, 0)), uint(basecol_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x)), uint(basecol_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 2)), uint(basecol_direction.b * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 3)), uint(basecol_direction.a * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 4)), uint(emission_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 5)), uint(emission_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 6)), uint(emission_direction.b * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 7)), uint(normal_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 8)), uint(normal_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 9)), uint(envl_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 10)), uint(envl_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, voxelgiResolution.x * 11)), uint(envl_direction.b * 255));')
|
||||
frag.write('}')
|
||||
|
||||
frag.write('if (direction_weights.y > 0) {')
|
||||
frag.write(' vec4 basecol_direction = vec4(min(basecol * direction_weights.y, vec3(1.0)), 1.0);')
|
||||
frag.write(' vec3 emission_direction = emissionCol * direction_weights.y;')
|
||||
frag.write(' vec2 normal_direction = encode_oct(N * direction_weights.y) * 0.5 + 0.5;')
|
||||
frag.write(' vec3 envl_direction = envl * direction_weights.y;')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, 0)), uint(basecol_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x)), uint(basecol_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 2)), uint(basecol_direction.b * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 3)), uint(basecol_direction.a * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 4)), uint(emission_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 5)), uint(emission_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 6)), uint(emission_direction.b * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 7)), uint(normal_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 8)), uint(normal_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 9)), uint(envl_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 10)), uint(envl_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, voxelgiResolution.x * 11)), uint(envl_direction.b * 255));')
|
||||
frag.write('}')
|
||||
|
||||
frag.write('if (direction_weights.z > 0) {')
|
||||
frag.write(' vec4 basecol_direction = vec4(min(basecol * direction_weights.z, vec3(1.0)), 1.0);')
|
||||
frag.write(' vec3 emission_direction = emissionCol * direction_weights.z;')
|
||||
frag.write(' vec2 normal_direction = encode_oct(n * direction_weights.z) * 0.5 + 0.5;')
|
||||
frag.write(' vec3 envl_direction = envl * direction_weights.z;')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, 0)), uint(basecol_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x)), uint(basecol_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 2)), uint(basecol_direction.b * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 3)), uint(basecol_direction.a * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 4)), uint(emission_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 5)), uint(emission_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 6)), uint(emission_direction.b * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 7)), uint(normal_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 8)), uint(normal_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 9)), uint(envl_direction.r * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 10)), uint(envl_direction.g * 255));')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, voxelgiResolution.x * 11)), uint(envl_direction.b * 255));')
|
||||
frag.write('}')
|
||||
|
||||
return con_voxel
|
||||
|
||||
|
||||
def make_ao(context_id):
|
||||
con_voxel = mat_state.data.add_context({ 'name': context_id, 'depth_write': False, 'compare_mode': 'always', 'cull_mode': 'none', 'color_writes_red': [False], 'color_writes_green': [False], 'color_writes_blue': [False], 'color_writes_alpha': [False], 'conservative_raster': False })
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
vert = con_voxel.make_vert()
|
||||
frag = con_voxel.make_frag()
|
||||
geom = con_voxel.make_geom()
|
||||
tesc = None
|
||||
tese = None
|
||||
|
||||
geom.ins = vert.outs
|
||||
frag.ins = geom.outs
|
||||
|
||||
frag.add_include('compiled.inc')
|
||||
geom.add_include('compiled.inc')
|
||||
frag.add_include('std/math.glsl')
|
||||
frag.add_include('std/imageatomic.glsl')
|
||||
frag.write_header('#extension GL_ARB_shader_image_load_store : enable')
|
||||
|
||||
vert.add_include('compiled.inc')
|
||||
vert.add_uniform('mat4 W', '_worldMatrix')
|
||||
vert.add_uniform('mat3 N', '_normalMatrix')
|
||||
|
||||
geom.add_uniform('float clipmaps[voxelgiClipmapCount * 10]', '_clipmaps')
|
||||
geom.add_uniform('int clipmapLevel', '_clipmapLevel')
|
||||
|
||||
frag.add_uniform('float clipmaps[voxelgiClipmapCount * 10]', '_clipmaps')
|
||||
frag.add_uniform('int clipmapLevel', '_clipmapLevel')
|
||||
|
||||
"""
|
||||
if lnx.utils.get_gapi() == 'direct3d11':
|
||||
for e in con_voxel.data['vertex_elements']:
|
||||
if e['name'] == 'nor':
|
||||
con_voxel.data['vertex_elements'].remove(e)
|
||||
break
|
||||
|
||||
vert.write('uniform float4x4 W;')
|
||||
vert.write('uniform float3x3 N;')
|
||||
vert.write('struct SPIRV_Cross_Input {')
|
||||
vert.write(' float4 pos : TEXCOORD0;')
|
||||
vert.write(' float3 nor : NORMAL;')
|
||||
vert.write('};')
|
||||
vert.write('struct SPIRV_Cross_Output {')
|
||||
vert.write(' float4 svpos : SV_POSITION;')
|
||||
vert.write(' float3 svnor : NORMAL;')
|
||||
vert.write('};')
|
||||
vert.write('SPIRV_Cross_Output main(SPIRV_Cross_Input stage_input) {')
|
||||
vert.write(' SPIRV_Cross_Output stage_output;')
|
||||
vert.write(' stage_output.svpos.xyz = mul(float4(stage_input.pos.xyz, 1.0), W).xyz;')
|
||||
vert.write(' stage_output.svpos.w = 1.0;')
|
||||
vert.write(' stage_output.svnor.xyz = normalize(mul(float3(nor.xy, pos.w), N).xyz);')
|
||||
vert.write(' return stage_output;')
|
||||
vert.write('}')
|
||||
|
||||
geom.write('uniform float clipmaps[voxelgiClipmapCount * 10];')
|
||||
geom.write('uniform int clipmapLevel;')
|
||||
geom.write('struct SPIRV_Cross_Input {')
|
||||
geom.write(' float4 svpos : SV_POSITION;')
|
||||
geom.write(' float3 svnor : NORMAL;')
|
||||
geom.write('};')
|
||||
geom.write('struct SPIRV_Cross_Output {')
|
||||
geom.write(' float3 wpos : TEXCOORD0;')
|
||||
geom.write(' float3 wnor : NORMAL;')
|
||||
geom.write('};')
|
||||
geom.write('[maxvertexcount(3)]')
|
||||
geom.write('void main(triangle SPIRV_Cross_Input stage_input[3], inout TriangleStream<SPIRV_Cross_Output> output) {')
|
||||
geom.write(' float3 p1 = stage_input[1].svpos.xyz - stage_input[0].svpos.xyz;')
|
||||
geom.write(' float3 p2 = stage_input[2].svpos.xyz - stage_input[0].svpos.xyz;')
|
||||
geom.write(' float3 p = abs(cross(p1, p2));')
|
||||
geom.write(' for (int i = 0; i < 3; ++i) {')
|
||||
geom.write(' SPIRV_Cross_Output stage_output;')
|
||||
geom.write(' stage_output.wpos = (stage_input[i].svpos.xyz + float3(clipmaps[int(clipmapLevel * 10 + 4)], clipmaps[int(clipmapLevel * 10 + 5)], clipmaps[int(clipmapLevel * 10 + 6)])) / (float(clipmaps[clipmapLevel * 10]) * voxelgiResolution);')
|
||||
geom.write(' stage_output.wnor = stage_input[i].svnor.xyz;')
|
||||
geom.write(' if (p.z > p.x && p.z > p.y) {')
|
||||
geom.write(' stage_output.svpos = float4(stage_input[i].svpos.x, stage_input[i].svpos.y, 0.0, 1.0);')
|
||||
geom.write(' }')
|
||||
geom.write(' else if (p.x > p.y && p.x > p.z) {')
|
||||
geom.write(' stage_output.svpos = float4(stage_input[i].svpos.y, stage_input[i].svpos.z, 0.0, 1.0);')
|
||||
geom.write(' }')
|
||||
geom.write(' else {')
|
||||
geom.write(' stage_output.svpos = float4(stage_input[i].svpos.x, stage_input[i].svpos.z, 0.0, 1.0);')
|
||||
geom.write(' }')
|
||||
geom.write(' output.Append(stage_output);')
|
||||
geom.write(' }')
|
||||
geom.write('}')
|
||||
|
||||
frag.add_uniform('layout(r8) writeonly image3D voxels')
|
||||
frag.write('RWTexture3D<float> voxels;')
|
||||
frag.write('uniform float clipmaps[voxelgiClipmapCount * 10];')
|
||||
frag.write('uniform int clipmapLevel;')
|
||||
|
||||
frag.write('struct SPIRV_Cross_Input {')
|
||||
frag.write(' float3 wpos : TEXCOORD0;')
|
||||
frag.write(' float3 wnor : NORMAL;')
|
||||
frag.write('};')
|
||||
frag.write('struct SPIRV_Cross_Output { float4 FragColor : SV_TARGET0; };')
|
||||
frag.write('void main(SPIRV_Cross_Input stage_input) {')
|
||||
frag.write(' float3 uvw = (stage_input.wpos.xyz - float3(clipmaps[int(clipmapLevel * 10 + 4)], clipmaps[int(clipmapLevel * 10 + 5)], clipmaps[int(clipmapLevel * 10 + 6)])) / (float(clipmaps[int(clipmapLevel * 10)]) * voxelgiResolution);')
|
||||
frag.write(' uvw = uvw * 0.5 + 0.5;')
|
||||
frag.write(' if(any(!saturate(uvw))) return;')
|
||||
frag.write(' uvw = floor(uvw * voxelgiResolution);')
|
||||
frag.write(' uint3 face_offsets = uint3(')
|
||||
frag.write(' stage_input.wnor.x > 0 ? 0 : 1,')
|
||||
frag.write(' stage_input.wnor.y > 0 ? 2 : 3,')
|
||||
frag.write(' stage_input.wnor.z > 0 ? 4 : 5')
|
||||
frag.write(' ) * voxelgiResolution;')
|
||||
frag.write(' float3 direction_weights = abs(stage_input.wnor);')
|
||||
|
||||
frag.write(' if (direction_weights.x > 0.0) {')
|
||||
frag.write(' float opac_direction = direction_weights.x;')
|
||||
frag.write(' voxels[uvw + int3(face_offsets.x, 0, 0))] = float4(opac_direction);')
|
||||
frag.write(' }')
|
||||
|
||||
frag.write(' if (direction_weights.y > 0.0) {')
|
||||
frag.write(' float opac_direction = direction_weights.y;')
|
||||
frag.write(' voxels[uvw + int3(face_offsets.y, 0, 0))] = float4(opac_direction);')
|
||||
frag.write(' }')
|
||||
|
||||
frag.write(' if (direction_weights.z > 0.0) {')
|
||||
frag.write(' float opac_direction = direction_weights.z;')
|
||||
frag.write(' voxels[uvw + int3(face_offsets.z, 0, 0))] = float4(opac_direction);')
|
||||
frag.write(' }')
|
||||
frag.write('}')
|
||||
else:
|
||||
"""
|
||||
frag.add_uniform('layout(r32ui) uimage3D voxels')
|
||||
|
||||
vert.add_out('vec3 voxpositionGeom')
|
||||
vert.add_out('vec3 voxnormalGeom')
|
||||
|
||||
vert.write('voxpositionGeom = vec3(W * vec4(pos.xyz, 1.0));')
|
||||
vert.write('voxnormalGeom = normalize(N * vec3(nor.xy, pos.w));')
|
||||
|
||||
geom.add_out('vec4 voxposition[3]')
|
||||
geom.add_out('vec3 P')
|
||||
geom.add_out('vec3 voxnormal')
|
||||
geom.add_uniform('float clipmaps[voxelgiClipmapCount * 10]', '_clipmaps')
|
||||
geom.add_uniform('int clipmapLevel', '_clipmapLevel')
|
||||
|
||||
geom.write('vec3 facenormal = abs(voxnormalGeom[0] + voxnormalGeom[1] + voxnormalGeom[2]);')
|
||||
geom.write('uint maxi = facenormal[1] > facenormal[0] ? 1 : 0;')
|
||||
geom.write('maxi = facenormal[2] > facenormal[maxi] ? 2 : maxi;')
|
||||
|
||||
geom.write('for (uint i = 0; i < 3; ++i) {')
|
||||
geom.write(' voxposition[i].xyz = (voxpositionGeom[i] - vec3(clipmaps[int(clipmapLevel * 10 + 4)], clipmaps[int(clipmapLevel * 10 + 5)], clipmaps[int(clipmapLevel * 10 + 6)])) / (float(clipmaps[int(clipmapLevel * 10)]));')
|
||||
geom.write(' if (maxi == 0)')
|
||||
geom.write(' {')
|
||||
geom.write(' voxposition[i].xyz = voxposition[i].zyx;')
|
||||
geom.write(' }')
|
||||
geom.write(' else if (maxi == 1)')
|
||||
geom.write(' {')
|
||||
geom.write(' voxposition[i].xyz = voxposition[i].xzy;')
|
||||
geom.write(' }')
|
||||
geom.write('}')
|
||||
|
||||
geom.write('for (uint i = 0; i < 3; ++i) {')
|
||||
geom.write(' voxposition[i].xy /= voxelgiResolution.xy;')
|
||||
geom.write(' voxposition[i].zw = vec2(1.0);')
|
||||
geom.write(' P = voxpositionGeom[i];')
|
||||
geom.write(' voxnormal = voxnormalGeom[i];')
|
||||
geom.write(' gl_Position = voxposition[i];')
|
||||
geom.write(' EmitVertex();')
|
||||
geom.write('}')
|
||||
geom.write('EndPrimitive();')
|
||||
|
||||
|
||||
frag.add_uniform('float clipmaps[voxelgiClipmapCount * 10]', '_clipmaps')
|
||||
frag.add_uniform('int clipmapLevel', '_clipmapLevel')
|
||||
|
||||
frag.write('vec3 uvw = (P - vec3(clipmaps[int(clipmapLevel * 10 + 4)], clipmaps[int(clipmapLevel * 10 + 5)], clipmaps[int(clipmapLevel * 10 + 6)])) / (float(clipmaps[int(clipmapLevel * 10)]) * voxelgiResolution);')
|
||||
frag.write('uvw = (uvw * 0.5 + 0.5);')
|
||||
frag.write('if(any(notEqual(uvw, clamp(uvw, 0.0, 1.0)))) return;')
|
||||
frag.write('vec3 writecoords = floor(uvw * voxelgiResolution);')
|
||||
frag.write_attrib('vec3 N = normalize(voxnormal);')
|
||||
frag.write('vec3 aniso_direction = N;')
|
||||
frag.write('uvec3 face_offsets = uvec3(')
|
||||
frag.write(' aniso_direction.x > 0 ? 0 : 1,')
|
||||
frag.write(' aniso_direction.y > 0 ? 2 : 3,')
|
||||
frag.write(' aniso_direction.z > 0 ? 4 : 5')
|
||||
frag.write(' ) * voxelgiResolution;')
|
||||
frag.write('vec3 direction_weights = abs(N);')
|
||||
|
||||
frag.write('if (direction_weights.x > 0) {')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.x, 0, 0)), uint(direction_weights.x * 255));')
|
||||
frag.write('}')
|
||||
|
||||
frag.write('if (direction_weights.y > 0) {')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.y, 0, 0)), uint(direction_weights.y * 255));')
|
||||
frag.write('}')
|
||||
|
||||
frag.write('if (direction_weights.z > 0) {')
|
||||
frag.write(' imageAtomicAdd(voxels, ivec3(writecoords + ivec3(face_offsets.z, 0, 0)), uint(direction_weights.z * 255));')
|
||||
frag.write('}')
|
||||
|
||||
return con_voxel
|
144
leenkx/blender/lnx/material/mat_batch.py
Normal file
144
leenkx/blender/lnx/material/mat_batch.py
Normal file
@ -0,0 +1,144 @@
|
||||
import bpy
|
||||
import lnx
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.material.make_shader as make_shader
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.utils as lnx_utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
cycles = lnx.reload_module(cycles)
|
||||
make_shader = lnx.reload_module(make_shader)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
# TODO: handle groups
|
||||
# TODO: handle cached shaders
|
||||
|
||||
batchDict = None
|
||||
signatureDict = None
|
||||
|
||||
def traverse_tree(node, sign):
|
||||
sign += node.type + '-'
|
||||
for inp in node.inputs:
|
||||
if inp.is_linked:
|
||||
sign = traverse_tree(inp.links[0].from_node, sign)
|
||||
else:
|
||||
sign += 'o' # Unconnected socket
|
||||
return sign
|
||||
|
||||
def get_signature(mat, object: bpy.types.Object):
|
||||
nodes = mat.node_tree.nodes
|
||||
output_node = cycles.node_by_type(nodes, 'OUTPUT_MATERIAL')
|
||||
|
||||
if output_node != None:
|
||||
sign = traverse_tree(output_node, '')
|
||||
# Append flags
|
||||
sign += '1' if mat.lnx_cast_shadow else '0'
|
||||
sign += '1' if mat.lnx_ignore_irradiance else '0'
|
||||
if mat.lnx_two_sided:
|
||||
sign += '2'
|
||||
elif mat.lnx_cull_mode == 'Clockwise':
|
||||
sign += '1'
|
||||
else:
|
||||
sign += '0'
|
||||
sign += str(mat.lnx_material_id)
|
||||
sign += '1' if mat.lnx_depth_read else '0'
|
||||
sign += '1' if mat.lnx_overlay else '0'
|
||||
sign += '1' if mat.lnx_decal else '0'
|
||||
if mat.lnx_discard:
|
||||
sign += '1'
|
||||
sign += str(round(mat.lnx_discard_opacity, 2))
|
||||
sign += str(round(mat.lnx_discard_opacity_shadows, 2))
|
||||
else:
|
||||
sign += '000'
|
||||
sign += mat.lnx_custom_material if mat.lnx_custom_material != '' else '0'
|
||||
sign += mat.lnx_skip_context if mat.lnx_skip_context != '' else '0'
|
||||
sign += '1' if mat.lnx_particle_fade else '0'
|
||||
sign += mat.lnx_billboard
|
||||
sign += '_skin' if lnx.utils.export_bone_data(object) else '0'
|
||||
sign += '_morph' if lnx.utils.export_morph_targets(object) else '0'
|
||||
return sign
|
||||
|
||||
def traverse_tree2(node, ar):
|
||||
ar.append(node)
|
||||
for inp in node.inputs:
|
||||
inp.is_uniform = False
|
||||
if inp.is_linked:
|
||||
traverse_tree2(inp.links[0].from_node, ar)
|
||||
|
||||
def get_sorted(mat):
|
||||
nodes = mat.node_tree.nodes
|
||||
output_node = cycles.node_by_type(nodes, 'OUTPUT_MATERIAL')
|
||||
|
||||
if output_node != None:
|
||||
ar = []
|
||||
traverse_tree2(output_node, ar)
|
||||
return ar
|
||||
|
||||
def mark_uniforms(mats):
|
||||
ars = []
|
||||
for m in mats:
|
||||
ars.append(get_sorted(m))
|
||||
|
||||
# Buckle up..
|
||||
for i in range(0, len(ars[0])): # Traverse nodes
|
||||
for j in range(0, len(ars[0][i].inputs)): # Traverse inputs
|
||||
inp = ars[0][i].inputs[j]
|
||||
if not inp.is_linked and hasattr(inp, 'default_value'):
|
||||
for k in range(1, len(ars)): # Compare default values
|
||||
inp2 = ars[k][i].inputs[j]
|
||||
diff = False
|
||||
if str(type(inp.default_value)) == "<class 'bpy_prop_array'>":
|
||||
for l in range(0, len(inp.default_value)):
|
||||
if inp.default_value[l] != inp2.default_value[l]:
|
||||
diff = True
|
||||
break
|
||||
elif inp.default_value != inp2.default_value:
|
||||
diff = True
|
||||
if diff: # Diff found
|
||||
for ar in ars:
|
||||
ar[i].inputs[j].is_uniform = True
|
||||
break
|
||||
|
||||
def build(materialArray, mat_users, mat_lnxusers):
|
||||
global batchDict
|
||||
batchDict = dict() # Stores shader data for given material
|
||||
signatureDict = dict() # Stores materials for given signature
|
||||
|
||||
# Update signatures
|
||||
for mat in materialArray:
|
||||
if mat.signature == '' or not mat.lnx_cached:
|
||||
mat.signature = get_signature(mat, mat_users[mat][0])
|
||||
# Group signatures
|
||||
if mat.signature in signatureDict:
|
||||
signatureDict[mat.signature].append(mat)
|
||||
else:
|
||||
signatureDict[mat.signature] = [mat]
|
||||
|
||||
# Mark different inputs
|
||||
for ref in signatureDict:
|
||||
mats = signatureDict[ref]
|
||||
if len(mats) > 1:
|
||||
mark_uniforms(mats)
|
||||
|
||||
mat_state.batch = True
|
||||
|
||||
# Build unique shaders
|
||||
for mat in materialArray:
|
||||
for mat2 in materialArray:
|
||||
# Signature not found - build it
|
||||
if mat == mat2:
|
||||
batchDict[mat] = make_shader.build(mat, mat_users, mat_lnxusers)
|
||||
break
|
||||
|
||||
# Already batched
|
||||
if mat.signature == mat2.signature:
|
||||
batchDict[mat] = batchDict[mat2]
|
||||
break
|
||||
|
||||
mat_state.batch = False
|
||||
|
||||
def get(mat):
|
||||
return batchDict[mat]
|
40
leenkx/blender/lnx/material/mat_state.py
Normal file
40
leenkx/blender/lnx/material/mat_state.py
Normal file
@ -0,0 +1,40 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class EmissionType(IntEnum):
|
||||
NO_EMISSION = 0
|
||||
"""The material has no emission at all."""
|
||||
|
||||
SHADELESS = 1
|
||||
"""The material is emissive and does not interact with lights/shadows."""
|
||||
|
||||
SHADED = 2
|
||||
"""The material is emissive and interacts with lights/shadows."""
|
||||
|
||||
@staticmethod
|
||||
def get_effective_combination(a: 'EmissionType', b: 'EmissionType') -> 'EmissionType':
|
||||
# Shaded emission always has precedence over shadeless emission
|
||||
if a == EmissionType.SHADED or b == EmissionType.SHADED:
|
||||
return EmissionType.SHADED
|
||||
|
||||
if a == EmissionType.SHADELESS and b == EmissionType.SHADELESS:
|
||||
return EmissionType.SHADELESS
|
||||
|
||||
# If only one input is shadeless we still need shaded emission
|
||||
if a == EmissionType.SHADELESS or b == EmissionType.SHADELESS:
|
||||
return EmissionType.SHADED
|
||||
|
||||
return EmissionType.NO_EMISSION
|
||||
|
||||
|
||||
data = None # ShaderData
|
||||
material = None
|
||||
nodes = None
|
||||
mat_users = None
|
||||
bind_constants = None # Merged with mat_context bind constants
|
||||
bind_textures = None # Merged with mat_context bind textures
|
||||
batch = False
|
||||
texture_grad = False # Sample textures using textureGrad()
|
||||
con_mesh = None # Mesh context
|
||||
uses_instancing = False # Whether the current material has at least one user with instancing enabled
|
||||
emission_type = EmissionType.NO_EMISSION
|
112
leenkx/blender/lnx/material/mat_utils.py
Normal file
112
leenkx/blender/lnx/material/mat_utils.py
Normal file
@ -0,0 +1,112 @@
|
||||
from typing import Generator
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx.utils
|
||||
import lnx.make_state as make_state
|
||||
import lnx.material.cycles as cycles
|
||||
import lnx.assets as assets
|
||||
import lnx.log as log
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
make_state = lnx.reload_module(make_state)
|
||||
cycles = lnx.reload_module(cycles)
|
||||
log = lnx.reload_module(log)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
add_mesh_contexts = []
|
||||
|
||||
def disp_linked(output_node):
|
||||
linked = output_node.inputs[2].is_linked
|
||||
if not linked:
|
||||
return False
|
||||
# Leenkx PBR with unlinked height socket
|
||||
l = output_node.inputs[2].links[0]
|
||||
if l.from_node.type == 'GROUP' and l.from_node.node_tree.name.startswith('Leenkx PBR') and \
|
||||
l.from_node.inputs[7].is_linked == False:
|
||||
return False
|
||||
disp_enabled = lnx.utils.disp_enabled(make_state.target)
|
||||
rpdat = lnx.utils.get_rp()
|
||||
if not disp_enabled and rpdat.lnx_rp_displacement == 'Tessellation':
|
||||
log.warn('Tessellation not available on ' + make_state.target)
|
||||
return disp_enabled
|
||||
|
||||
def get_rpasses(material):
|
||||
ar = []
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
has_voxels = lnx.utils.voxel_support()
|
||||
|
||||
if material.lnx_decal:
|
||||
ar.append('decal')
|
||||
elif material.lnx_overlay:
|
||||
ar.append('overlay')
|
||||
else:
|
||||
ar.append('mesh')
|
||||
for con in add_mesh_contexts:
|
||||
ar.append(con)
|
||||
if is_transluc(material) and not material.lnx_discard and rpdat.rp_translucency_state != 'Off' and not material.lnx_blending and not rpdat.rp_ss_refraction:
|
||||
ar.append('translucent')
|
||||
elif is_transluc(material) and not material.lnx_discard and not material.lnx_blending and rpdat.rp_ss_refraction:
|
||||
ar.append('refraction')
|
||||
if rpdat.rp_voxels != "Off" and has_voxels:
|
||||
ar.append('voxel')
|
||||
if rpdat.rp_renderer == 'Forward' and rpdat.rp_depthprepass and not material.lnx_blending and not material.lnx_particle_flag:
|
||||
ar.append('depth')
|
||||
|
||||
if material.lnx_cast_shadow and rpdat.rp_shadows and ('mesh' in ar):
|
||||
if 'translucent' in ar or 'refraction' in ar:
|
||||
ar.append('shadowmap_transparent')
|
||||
else:
|
||||
ar.append('shadowmap')
|
||||
|
||||
return ar
|
||||
|
||||
def is_transluc(material):
|
||||
nodes = material.node_tree.nodes
|
||||
output_node = cycles.node_by_type(nodes, 'OUTPUT_MATERIAL')
|
||||
if output_node == None or output_node.inputs[0].is_linked == False:
|
||||
return False
|
||||
|
||||
surface_node = output_node.inputs[0].links[0].from_node
|
||||
return is_transluc_traverse(surface_node)
|
||||
|
||||
def is_transluc_traverse(node):
|
||||
# TODO: traverse groups
|
||||
if is_transluc_type(node):
|
||||
return True
|
||||
for inp in node.inputs:
|
||||
if inp.is_linked:
|
||||
res = is_transluc_traverse(inp.links[0].from_node)
|
||||
if res:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_transluc_type(node: bpy.types.ShaderNode) -> bool:
|
||||
return node.type in ('BSDF_GLASS', 'BSDF_TRANSPARENT', 'BSDF_TRANSLUCENT', 'BSDF_REFRACTION') \
|
||||
or (is_leenkx_pbr_node(node) and (node.inputs['Opacity'].is_linked or node.inputs['Opacity'].default_value != 1.0)) \
|
||||
or (node.type == 'BSDF_PRINCIPLED' and (node.inputs['Alpha'].is_linked or node.inputs['Alpha'].default_value != 1.0))
|
||||
|
||||
|
||||
def is_leenkx_pbr_node(node: bpy.types.ShaderNode) -> bool:
|
||||
return node.type == 'GROUP' and node.node_tree.name.startswith('Leenkx PBR')
|
||||
|
||||
|
||||
def iter_nodes_leenkxpbr(node_group: bpy.types.NodeTree) -> Generator[bpy.types.Node, None, None]:
|
||||
for node in node_group.nodes:
|
||||
if is_leenkx_pbr_node(node):
|
||||
yield node
|
||||
|
||||
|
||||
def equals_color_socket(socket: bpy.types.NodeSocketColor, value: tuple[float, ...], *, comp_alpha=True) -> bool:
|
||||
# NodeSocketColor.default_value is of bpy_prop_array type that doesn't
|
||||
# support direct comparison
|
||||
return (
|
||||
socket.default_value[0] == value[0]
|
||||
and socket.default_value[1] == value[1]
|
||||
and socket.default_value[2] == value[2]
|
||||
and (socket.default_value[3] == value[3] if comp_alpha else True)
|
||||
)
|
215
leenkx/blender/lnx/material/node_meta.py
Normal file
215
leenkx/blender/lnx/material/node_meta.py
Normal file
@ -0,0 +1,215 @@
|
||||
"""
|
||||
This module contains a list of all material nodes that Leenkx supports
|
||||
(excluding output nodes), as well as Leenkx-related metadata.
|
||||
"""
|
||||
from enum import IntEnum, unique
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx.material.lnx_nodes.shader_data_node as shader_data_node
|
||||
import lnx.material.cycles_nodes.nodes_color as nodes_color
|
||||
import lnx.material.cycles_nodes.nodes_converter as nodes_converter
|
||||
import lnx.material.cycles_nodes.nodes_input as nodes_input
|
||||
import lnx.material.cycles_nodes.nodes_shader as nodes_shader
|
||||
import lnx.material.cycles_nodes.nodes_texture as nodes_texture
|
||||
import lnx.material.cycles_nodes.nodes_vector as nodes_vector
|
||||
import lnx.material.parser_state
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
shader_data_node = lnx.reload_module(shader_data_node)
|
||||
nodes_color = lnx.reload_module(nodes_color)
|
||||
nodes_converter = lnx.reload_module(nodes_converter)
|
||||
nodes_input = lnx.reload_module(nodes_input)
|
||||
nodes_shader = lnx.reload_module(nodes_shader)
|
||||
nodes_texture = lnx.reload_module(nodes_texture)
|
||||
nodes_vector = lnx.reload_module(nodes_vector)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
@unique
|
||||
class ComputeDXDYVariant(IntEnum):
|
||||
ALWAYS = 0
|
||||
"""Always compute dx/dy variants of the corresponding node.
|
||||
Use this for input nodes that represent leafs of the node graph
|
||||
if some of their output values vary between fragments.
|
||||
"""
|
||||
|
||||
NEVER = 1
|
||||
"""Never compute dx/dy variants of the corresponding node.
|
||||
Use this for nodes whose output values do not change with respect
|
||||
to fragment positions.
|
||||
"""
|
||||
|
||||
DYNAMIC = 2
|
||||
"""Compute dx/dy variants if any input socket of the corresponding node
|
||||
is connected to a node that requires dx/dy variants.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class MaterialNodeMeta:
|
||||
# Use Any here due to contravariance
|
||||
parse_func: Callable[[Any, bpy.types.NodeSocket, lnx.material.parser_state.ParserState], Optional[str]]
|
||||
"""The function used to parse this node and to translate it to GLSL output code."""
|
||||
|
||||
compute_dxdy_variants: ComputeDXDYVariant = ComputeDXDYVariant.DYNAMIC
|
||||
"""Specifies when this node should compute dx/dy variants
|
||||
if the ParserState is in the dx/dy offset pass.
|
||||
"""
|
||||
|
||||
|
||||
ALL_NODES: dict[str, MaterialNodeMeta] = {
|
||||
# --- nodes_color
|
||||
'BRIGHTCONTRAST': MaterialNodeMeta(parse_func=nodes_color.parse_brightcontrast),
|
||||
'CURVE_RGB': MaterialNodeMeta(parse_func=nodes_color.parse_curvergb),
|
||||
'GAMMA': MaterialNodeMeta(parse_func=nodes_color.parse_gamma),
|
||||
'HUE_SAT': MaterialNodeMeta(parse_func=nodes_color.parse_huesat),
|
||||
'INVERT': MaterialNodeMeta(parse_func=nodes_color.parse_invert),
|
||||
'LIGHT_FALLOFF': MaterialNodeMeta(parse_func=nodes_color.parse_lightfalloff),
|
||||
'MIX': MaterialNodeMeta(parse_func=nodes_color.parse_mix),
|
||||
|
||||
# --- nodes_converter
|
||||
'BLACKBODY': MaterialNodeMeta(parse_func=nodes_converter.parse_blackbody),
|
||||
'CLAMP': MaterialNodeMeta(parse_func=nodes_converter.parse_clamp),
|
||||
'COMBHSV': MaterialNodeMeta(parse_func=nodes_converter.parse_combhsv),
|
||||
'COMBRGB': MaterialNodeMeta(parse_func=nodes_converter.parse_combrgb),
|
||||
'COMBXYZ': MaterialNodeMeta(parse_func=nodes_converter.parse_combxyz),
|
||||
'MAP_RANGE': MaterialNodeMeta(parse_func=nodes_converter.parse_maprange),
|
||||
'MATH': MaterialNodeMeta(parse_func=nodes_converter.parse_math),
|
||||
'RGBTOBW': MaterialNodeMeta(parse_func=nodes_converter.parse_rgbtobw),
|
||||
'SEPHSV': MaterialNodeMeta(parse_func=nodes_converter.parse_sephsv),
|
||||
'SEPRGB': MaterialNodeMeta(parse_func=nodes_converter.parse_seprgb),
|
||||
'SEPXYZ': MaterialNodeMeta(parse_func=nodes_converter.parse_sepxyz),
|
||||
'VALTORGB': MaterialNodeMeta(parse_func=nodes_converter.parse_valtorgb), # ColorRamp
|
||||
'VECT_MATH': MaterialNodeMeta(parse_func=nodes_converter.parse_vectormath),
|
||||
'WAVELENGTH': MaterialNodeMeta(parse_func=nodes_converter.parse_wavelength),
|
||||
|
||||
# --- nodes_input
|
||||
'ATTRIBUTE': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_attribute,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'CAMERA': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_camera,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'FRESNEL': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_fresnel,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'HAIR_INFO': MaterialNodeMeta(parse_func=nodes_input.parse_hairinfo),
|
||||
'LAYER_WEIGHT': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_layerweight,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'LIGHT_PATH': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_lightpath,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'NEW_GEOMETRY': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_geometry,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'OBJECT_INFO': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_objectinfo,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'PARTICLE_INFO': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_particleinfo,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'RGB': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_rgb,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'TANGENT': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_tangent,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'TEX_COORD': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_texcoord,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'UVMAP': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_uvmap,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
),
|
||||
'VALUE': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_value,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'VERTEX_COLOR': MaterialNodeMeta(parse_func=nodes_input.parse_vertex_color),
|
||||
'WIREFRAME': MaterialNodeMeta(
|
||||
parse_func=nodes_input.parse_wireframe,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
|
||||
# --- nodes_shader
|
||||
'ADD_SHADER': MaterialNodeMeta(parse_func=nodes_shader.parse_addshader),
|
||||
'AMBIENT_OCCLUSION': MaterialNodeMeta(parse_func=nodes_shader.parse_ambientocclusion),
|
||||
'BSDF_ANISOTROPIC': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfanisotropic),
|
||||
'BSDF_DIFFUSE': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfdiffuse),
|
||||
'BSDF_GLASS': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfglass),
|
||||
'BSDF_PRINCIPLED': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfprincipled),
|
||||
'BSDF_TRANSLUCENT': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdftranslucent),
|
||||
'BSDF_TRANSPARENT': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdftransparent),
|
||||
'BSDF_REFRACTION': MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfrefraction),
|
||||
'EMISSION': MaterialNodeMeta(parse_func=nodes_shader.parse_emission),
|
||||
'HOLDOUT': MaterialNodeMeta(
|
||||
parse_func=nodes_shader.parse_holdout,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'MIX_SHADER': MaterialNodeMeta(parse_func=nodes_shader.parse_mixshader),
|
||||
'SUBSURFACE_SCATTERING': MaterialNodeMeta(parse_func=nodes_shader.parse_subsurfacescattering),
|
||||
|
||||
# --- nodes_texture
|
||||
'TEX_BRICK': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_brick),
|
||||
'TEX_CHECKER': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_checker),
|
||||
'TEX_ENVIRONMENT': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_environment),
|
||||
'TEX_GRADIENT': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_gradient),
|
||||
'TEX_IMAGE': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_image),
|
||||
'TEX_MAGIC': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_magic),
|
||||
'TEX_NOISE': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_noise),
|
||||
'TEX_POINTDENSITY': MaterialNodeMeta(
|
||||
parse_func=nodes_texture.parse_tex_pointdensity,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.NEVER
|
||||
),
|
||||
'TEX_SKY': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_sky),
|
||||
'TEX_VORONOI': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_voronoi),
|
||||
'TEX_WAVE': MaterialNodeMeta(parse_func=nodes_texture.parse_tex_wave),
|
||||
|
||||
# --- nodes_vector
|
||||
'BUMP': MaterialNodeMeta(parse_func=nodes_vector.parse_bump),
|
||||
'CURVE_VEC': MaterialNodeMeta(parse_func=nodes_vector.parse_curvevec),
|
||||
'DISPLACEMENT': MaterialNodeMeta(parse_func=nodes_vector.parse_displacement),
|
||||
'MAPPING': MaterialNodeMeta(parse_func=nodes_vector.parse_mapping),
|
||||
'NORMAL': MaterialNodeMeta(parse_func=nodes_vector.parse_normal),
|
||||
'NORMAL_MAP': MaterialNodeMeta(parse_func=nodes_vector.parse_normalmap),
|
||||
'VECTOR_ROTATE': MaterialNodeMeta(parse_func=nodes_vector.parse_vectorrotate),
|
||||
'VECT_TRANSFORM': MaterialNodeMeta(parse_func=nodes_vector.parse_vectortransform),
|
||||
|
||||
# --- lnx_nodes
|
||||
'LnxShaderDataNode': MaterialNodeMeta(
|
||||
parse_func=shader_data_node.ShaderDataNode.parse,
|
||||
compute_dxdy_variants=ComputeDXDYVariant.ALWAYS
|
||||
)
|
||||
}
|
||||
|
||||
if bpy.app.version > (3, 2, 0):
|
||||
ALL_NODES['SEPARATE_COLOR'] = MaterialNodeMeta(parse_func=nodes_converter.parse_separate_color)
|
||||
ALL_NODES['COMBINE_COLOR'] = MaterialNodeMeta(parse_func=nodes_converter.parse_combine_color)
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
ALL_NODES['BSDF_VELVET'] = MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfvelvet)
|
||||
ALL_NODES['TEX_MUSGRAVE'] = MaterialNodeMeta(parse_func=nodes_texture.parse_tex_musgrave)
|
||||
if bpy.app.version >= (4, 0, 0):
|
||||
ALL_NODES['BSDF_SHEEN'] = MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfsheen)
|
||||
|
||||
ALL_NODES['BSDF_GLOSSY'] = MaterialNodeMeta(parse_func=nodes_shader.parse_bsdfglossy)
|
||||
|
||||
def get_node_meta(node: bpy.types.Node) -> MaterialNodeMeta:
|
||||
type_identifier = node.type if node.type != 'CUSTOM' else node.bl_idname
|
||||
return ALL_NODES[type_identifier]
|
128
leenkx/blender/lnx/material/parser_state.py
Normal file
128
leenkx/blender/lnx/material/parser_state.py
Normal file
@ -0,0 +1,128 @@
|
||||
from enum import IntEnum, unique
|
||||
from typing import List, Set, Tuple, Union, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
from lnx.material.shader import Shader, ShaderContext, vec3str, floatstr
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import Shader, ShaderContext, vec3str, floatstr
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
@unique
|
||||
class ParserContext(IntEnum):
|
||||
"""Describes which kind of node tree is parsed."""
|
||||
OBJECT = 0
|
||||
# Texture node trees are not supported yet
|
||||
# TEXTURE = 1
|
||||
WORLD = 2
|
||||
|
||||
|
||||
@unique
|
||||
class ParserPass(IntEnum):
|
||||
"""In some situations, a node tree (or a subtree of that) needs
|
||||
to be parsed multiple times in different contexts called _passes_.
|
||||
Nodes can output different code in reaction to the parser state's
|
||||
current pass; for more information on the individual passes
|
||||
please refer to below enum items.
|
||||
"""
|
||||
REGULAR = 0
|
||||
"""The tree is parsed to generate regular shader code."""
|
||||
|
||||
DX_SCREEN_SPACE = 1
|
||||
"""The tree is parsed to output shader code to compute
|
||||
the derivative of a value with respect to the screen's x coordinate."""
|
||||
|
||||
DY_SCREEN_SPACE = 2
|
||||
"""The tree is parsed to output shader code to compute
|
||||
the derivative of a value with respect to the screen's y coordinate."""
|
||||
|
||||
|
||||
class ParserState:
|
||||
"""Dataclass to keep track of the current state while parsing a shader tree."""
|
||||
|
||||
def __init__(self, context: ParserContext, tree_name: str, world: Optional[bpy.types.World] = None):
|
||||
self.context = context
|
||||
self.tree_name = tree_name
|
||||
|
||||
self.current_pass = ParserPass.REGULAR
|
||||
|
||||
# The current world, if parsing a world node tree
|
||||
self.world = world
|
||||
|
||||
# Active shader - frag for surface / tese for displacement
|
||||
self.curshader: Shader = None
|
||||
self.con: ShaderContext = None
|
||||
|
||||
self.vert: Shader = None
|
||||
self.frag: Shader = None
|
||||
self.geom: Shader = None
|
||||
self.tesc: Shader = None
|
||||
self.tese: Shader = None
|
||||
|
||||
# Group stack (last in the list = innermost group)
|
||||
self.parents: List[bpy.types.Node] = []
|
||||
|
||||
# Cache for computing nodes only once
|
||||
self.parsed: Set[str] = set()
|
||||
|
||||
# What to parse from the node tree
|
||||
self.parse_surface = True
|
||||
self.parse_opacity = True
|
||||
self.parse_displacement = True
|
||||
self.basecol_only = False
|
||||
|
||||
self.procedurals_written: set[Shader] = set()
|
||||
|
||||
# Already exported radiance/irradiance (currently we can only convert
|
||||
# an already existing texture as radiance/irradiance)
|
||||
self.radiance_written = False
|
||||
|
||||
self.normal_parsed = False
|
||||
|
||||
self.dxdy_varying_input_value = False
|
||||
"""Whether the result of the previously parsed node differs
|
||||
between fragments and represents an input value to which to apply
|
||||
dx/dy offsets (if required by the parser pass).
|
||||
"""
|
||||
|
||||
# Shader output values
|
||||
self.out_basecol: vec3str = 'vec3(0.8)'
|
||||
self.out_roughness: floatstr = '0.0'
|
||||
self.out_metallic: floatstr = '0.0'
|
||||
self.out_occlusion: floatstr = '1.0'
|
||||
self.out_specular: floatstr = '1.0'
|
||||
self.out_opacity: floatstr = '1.0'
|
||||
self.out_ior: floatstr = '1.450'
|
||||
self.out_emission_col: vec3str = 'vec3(0.0)'
|
||||
|
||||
def reset_outs(self):
|
||||
"""Reset the shader output values to their default values."""
|
||||
self.out_basecol = 'vec3(0.8)'
|
||||
self.out_roughness = '0.0'
|
||||
self.out_metallic = '0.0'
|
||||
self.out_occlusion = '1.0'
|
||||
self.out_specular = '1.0'
|
||||
self.out_opacity = '1.0'
|
||||
self.out_ior = '1.450'
|
||||
self.out_emission_col = 'vec3(0.0)'
|
||||
|
||||
def get_outs(self) -> Tuple[vec3str, floatstr, floatstr, floatstr, floatstr, floatstr, floatstr, vec3str]:
|
||||
"""Return the shader output values as a tuple."""
|
||||
return (self.out_basecol, self.out_roughness, self.out_metallic, self.out_occlusion, self.out_specular,
|
||||
self.out_opacity, self.out_ior, self.out_emission_col)
|
||||
|
||||
|
||||
def get_parser_pass_suffix(self) -> str:
|
||||
"""Return a suffix for the current parser pass that can be appended
|
||||
to shader variables to avoid compilation errors due to redefinitions.
|
||||
"""
|
||||
if self.current_pass == ParserPass.DX_SCREEN_SPACE:
|
||||
return '_dx'
|
||||
elif self.current_pass == ParserPass.DY_SCREEN_SPACE:
|
||||
return '_dy'
|
||||
return ''
|
459
leenkx/blender/lnx/material/shader.py
Normal file
459
leenkx/blender/lnx/material/shader.py
Normal file
@ -0,0 +1,459 @@
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
# Type aliases for type hints to make it easier to see which kind of
|
||||
# shader data type is stored in a string
|
||||
floatstr = str
|
||||
vec2str = str
|
||||
vec3str = str
|
||||
vec4str = str
|
||||
|
||||
|
||||
class ShaderData:
|
||||
|
||||
def __init__(self, material):
|
||||
self.material = material
|
||||
self.contexts = []
|
||||
self.global_elems = [] # bone, weight, ipos, irot, iscl
|
||||
self.sd = {}
|
||||
self.data = {'shader_datas': [self.sd]}
|
||||
self.matname = lnx.utils.safesrc(lnx.utils.asset_name(material))
|
||||
self.sd['name'] = self.matname + '_data'
|
||||
self.sd['contexts'] = []
|
||||
|
||||
def add_context(self, props) -> 'ShaderContext':
|
||||
con = ShaderContext(self.material, self.sd, props)
|
||||
if con not in self.sd['contexts']:
|
||||
for elem in self.global_elems:
|
||||
con.add_elem(elem['name'], elem['data'])
|
||||
self.sd['contexts'].append(con.get())
|
||||
return con
|
||||
|
||||
def get(self):
|
||||
return self.data
|
||||
|
||||
class ShaderContext:
|
||||
|
||||
def __init__(self, material, shader_data, props):
|
||||
self.vert = None
|
||||
self.frag = None
|
||||
self.geom = None
|
||||
self.tesc = None
|
||||
self.tese = None
|
||||
self.material = material
|
||||
self.matname = lnx.utils.safesrc(lnx.utils.asset_name(material))
|
||||
self.shader_data = shader_data
|
||||
self.data = {}
|
||||
self.data['name'] = props['name']
|
||||
self.data['depth_write'] = props['depth_write']
|
||||
self.data['compare_mode'] = props['compare_mode']
|
||||
self.data['cull_mode'] = props['cull_mode']
|
||||
if 'vertex_elements' in props:
|
||||
self.data['vertex_elements'] = props['vertex_elements']
|
||||
else:
|
||||
self.data['vertex_elements'] = [{'name': 'pos', 'data': 'short4norm'}, {'name': 'nor', 'data': 'short2norm'}] # (p.xyz, n.z), (n.xy)
|
||||
if 'blend_source' in props:
|
||||
self.data['blend_source'] = props['blend_source']
|
||||
if 'blend_destination' in props:
|
||||
self.data['blend_destination'] = props['blend_destination']
|
||||
if 'blend_operation' in props:
|
||||
self.data['blend_operation'] = props['blend_operation']
|
||||
if 'alpha_blend_source' in props:
|
||||
self.data['alpha_blend_source'] = props['alpha_blend_source']
|
||||
if 'alpha_blend_destination' in props:
|
||||
self.data['alpha_blend_destination'] = props['alpha_blend_destination']
|
||||
if 'alpha_blend_operation' in props:
|
||||
self.data['alpha_blend_operation'] = props['alpha_blend_operation']
|
||||
if 'color_writes_red' in props:
|
||||
self.data['color_writes_red'] = props['color_writes_red']
|
||||
if 'color_writes_green' in props:
|
||||
self.data['color_writes_green'] = props['color_writes_green']
|
||||
if 'color_writes_blue' in props:
|
||||
self.data['color_writes_blue'] = props['color_writes_blue']
|
||||
if 'color_writes_alpha' in props:
|
||||
self.data['color_writes_alpha'] = props['color_writes_alpha']
|
||||
if 'color_attachments' in props:
|
||||
self.data['color_attachments'] = props['color_attachments']
|
||||
|
||||
self.data['texture_units'] = []
|
||||
self.tunits = self.data['texture_units']
|
||||
self.data['constants'] = []
|
||||
self.constants = self.data['constants']
|
||||
|
||||
def add_elem(self, name, data):
|
||||
elem = { 'name': name, 'data': data }
|
||||
if elem not in self.data['vertex_elements']:
|
||||
self.data['vertex_elements'].append(elem)
|
||||
self.sort_vs()
|
||||
|
||||
def sort_vs(self):
|
||||
vs = []
|
||||
ar = ['pos', 'nor', 'tex', 'tex1', 'morph', 'col', 'tang', 'bone', 'weight', 'ipos', 'irot', 'iscl']
|
||||
for ename in ar:
|
||||
elem = self.get_elem(ename)
|
||||
if elem != None:
|
||||
vs.append(elem)
|
||||
self.data['vertex_elements'] = vs
|
||||
|
||||
def is_elem(self, name):
|
||||
for elem in self.data['vertex_elements']:
|
||||
if elem['name'] == name:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_elem(self, name):
|
||||
for elem in self.data['vertex_elements']:
|
||||
if elem['name'] == name:
|
||||
return elem
|
||||
return None
|
||||
|
||||
def get(self):
|
||||
return self.data
|
||||
|
||||
def add_constant(self, ctype, name, link=None, default_value=None, is_lnx_mat_param=None):
|
||||
for c in self.constants:
|
||||
if c['name'] == name:
|
||||
return
|
||||
|
||||
c = { 'name': name, 'type': ctype}
|
||||
if link is not None:
|
||||
c['link'] = link
|
||||
if default_value is not None:
|
||||
if ctype == 'float':
|
||||
c['floatValue'] = default_value
|
||||
if ctype == 'vec3':
|
||||
c['vec3Value'] = default_value
|
||||
if is_lnx_mat_param is not None:
|
||||
c['is_lnx_parameter'] = True
|
||||
self.constants.append(c)
|
||||
|
||||
def add_texture_unit(self, name, link=None, is_image=None,
|
||||
addr_u=None, addr_v=None,
|
||||
filter_min=None, filter_mag=None, mipmap_filter=None,
|
||||
default_value=None, is_lnx_mat_param=None):
|
||||
for c in self.tunits:
|
||||
if c['name'] == name:
|
||||
return
|
||||
|
||||
c = {'name': name}
|
||||
if link is not None:
|
||||
c['link'] = link
|
||||
if is_image is not None:
|
||||
c['is_image'] = is_image
|
||||
if addr_u is not None:
|
||||
c['addressing_u'] = addr_u
|
||||
if addr_v is not None:
|
||||
c['addressing_v'] = addr_v
|
||||
if filter_min is not None:
|
||||
c['filter_min'] = filter_min
|
||||
if filter_mag is not None:
|
||||
c['filter_mag'] = filter_mag
|
||||
if mipmap_filter is not None:
|
||||
c['mipmap_filter'] = mipmap_filter
|
||||
if default_value is not None:
|
||||
c['default_image_file'] = default_value
|
||||
if is_lnx_mat_param is not None:
|
||||
c['is_lnx_parameter'] = True
|
||||
|
||||
self.tunits.append(c)
|
||||
|
||||
def make_vert(self, custom_name: str = None):
|
||||
if custom_name is None:
|
||||
self.data['vertex_shader'] = self.matname + '_' + self.data['name'] + '.vert'
|
||||
else:
|
||||
self.data['vertex_shader'] = custom_name + '.vert'
|
||||
self.vert = Shader(self, 'vert')
|
||||
return self.vert
|
||||
|
||||
def make_frag(self, custom_name: str = None):
|
||||
if custom_name is None:
|
||||
self.data['fragment_shader'] = self.matname + '_' + self.data['name'] + '.frag'
|
||||
else:
|
||||
self.data['fragment_shader'] = custom_name + '.frag'
|
||||
self.frag = Shader(self, 'frag')
|
||||
return self.frag
|
||||
|
||||
def make_geom(self, custom_name: str = None):
|
||||
if custom_name is None:
|
||||
self.data['geometry_shader'] = self.matname + '_' + self.data['name'] + '.geom'
|
||||
else:
|
||||
self.data['geometry_shader'] = custom_name + '.geom'
|
||||
self.geom = Shader(self, 'geom')
|
||||
return self.geom
|
||||
|
||||
def make_tesc(self, custom_name: str = None):
|
||||
if custom_name is None:
|
||||
self.data['tesscontrol_shader'] = self.matname + '_' + self.data['name'] + '.tesc'
|
||||
else:
|
||||
self.data['tesscontrol_shader'] = custom_name + '.tesc'
|
||||
self.tesc = Shader(self, 'tesc')
|
||||
return self.tesc
|
||||
|
||||
def make_tese(self, custom_name: str = None):
|
||||
if custom_name is None:
|
||||
self.data['tesseval_shader'] = self.matname + '_' + self.data['name'] + '.tese'
|
||||
else:
|
||||
self.data['tesseval_shader'] = custom_name + '.tese'
|
||||
self.tese = Shader(self, 'tese')
|
||||
return self.tese
|
||||
|
||||
|
||||
class Shader:
|
||||
def __init__(self, context, shader_type):
|
||||
self.context = context
|
||||
self.shader_type = shader_type
|
||||
self.includes = []
|
||||
self.ins = []
|
||||
self.outs = []
|
||||
self.uniforms_top = []
|
||||
self.uniforms = []
|
||||
self.constants = []
|
||||
self.functions = {}
|
||||
self.main = ''
|
||||
self.main_init = ''
|
||||
self.main_normal = ''
|
||||
self.main_textures = ''
|
||||
self.main_attribs = ''
|
||||
self.header = ''
|
||||
self.write_pre = False
|
||||
self.write_normal = 0
|
||||
self.write_textures = 0
|
||||
self.tab = 1
|
||||
self.vstruct_as_vsin = True
|
||||
self.lock = False
|
||||
self.geom_passthrough = False
|
||||
self.is_linked = False # Use already generated shader
|
||||
self.noprocessing = False
|
||||
|
||||
def has_include(self, s):
|
||||
return s in self.includes
|
||||
|
||||
def add_include(self, s):
|
||||
if not self.has_include(s):
|
||||
self.includes.append(s)
|
||||
|
||||
def add_include_front(self, s):
|
||||
if not self.has_include(s):
|
||||
pos = 0
|
||||
# make sure compiled.inc is always on top
|
||||
if len(self.includes) > 0 and self.includes[0] == 'compiled.inc':
|
||||
pos = 1
|
||||
self.includes.insert(pos, s)
|
||||
|
||||
def add_in(self, s):
|
||||
if s not in self.ins:
|
||||
self.ins.append(s)
|
||||
|
||||
def add_out(self, s):
|
||||
if s not in self.outs:
|
||||
self.outs.append(s)
|
||||
|
||||
def add_uniform(self, s, link=None, included=False, top=False,
|
||||
tex_addr_u=None, tex_addr_v=None,
|
||||
tex_filter_min=None, tex_filter_mag=None,
|
||||
tex_mipmap_filter=None, default_value=None, is_lnx_mat_param=None):
|
||||
ar = s.split(' ')
|
||||
# layout(RGBA8) image3D voxels
|
||||
utype = ar[-2]
|
||||
uname = ar[-1]
|
||||
if utype.startswith('sampler') or utype.startswith('image') or utype.startswith('uimage'):
|
||||
is_image = True if (utype.startswith('image') or utype.startswith('uimage')) else None
|
||||
if uname[-1] == ']': # Array of samplers - sampler2D mySamplers[2]
|
||||
# Add individual units - mySamplers[0], mySamplers[1]
|
||||
for i in range(int(uname[-2])):
|
||||
uname_array = uname[:-2] + str(i) + ']'
|
||||
self.context.add_texture_unit(
|
||||
uname_array, link, is_image,
|
||||
tex_addr_u, tex_addr_v,
|
||||
tex_filter_min, tex_filter_mag, tex_mipmap_filter)
|
||||
else:
|
||||
self.context.add_texture_unit(
|
||||
uname, link, is_image,
|
||||
tex_addr_u, tex_addr_v,
|
||||
tex_filter_min, tex_filter_mag, tex_mipmap_filter,
|
||||
default_value=default_value, is_lnx_mat_param=is_lnx_mat_param)
|
||||
else:
|
||||
# Prefer vec4[] for d3d to avoid padding
|
||||
if ar[0] == 'float' and '[' in ar[1]:
|
||||
ar[0] = 'floats'
|
||||
ar[1] = ar[1].split('[', 1)[0]
|
||||
elif ar[0] == 'vec4' and '[' in ar[1]:
|
||||
ar[0] = 'floats'
|
||||
ar[1] = ar[1].split('[', 1)[0]
|
||||
elif ar[0] == 'mat4' and '[' in ar[1]:
|
||||
ar[0] = 'floats'
|
||||
ar[1] = ar[1].split('[', 1)[0]
|
||||
self.context.add_constant(ar[0], ar[1], link=link, default_value=default_value, is_lnx_mat_param=is_lnx_mat_param)
|
||||
if top:
|
||||
if not included and s not in self.uniforms_top:
|
||||
self.uniforms_top.append(s)
|
||||
elif not included and s not in self.uniforms:
|
||||
self.uniforms.append(s)
|
||||
|
||||
def add_const(self, type_str: str, name: str, value_str: str, array_size: int = 0):
|
||||
"""
|
||||
Add a global constant to the shader.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
type_str: str
|
||||
The name of the type, like 'float' or 'vec3'. If the
|
||||
constant is an array, there is no need to add `[]` to the
|
||||
type
|
||||
name: str
|
||||
The name of the variable
|
||||
value_str: str
|
||||
The value of the constant as a string
|
||||
array_size: int
|
||||
If not 0 (default value), create an array with the given size
|
||||
"""
|
||||
if array_size == 0:
|
||||
self.constants.append(f'{type_str} {name} = {value_str}')
|
||||
elif array_size > 0:
|
||||
self.constants.append(f'{type_str} {name}[{array_size}] = {type_str}[]({value_str})')
|
||||
|
||||
def add_function(self, s):
|
||||
fname = s.split('(', 1)[0]
|
||||
if fname in self.functions:
|
||||
return
|
||||
self.functions[fname] = s
|
||||
|
||||
def contains(self, s):
|
||||
return s in self.main or \
|
||||
s in self.main_init or \
|
||||
s in self.main_normal or \
|
||||
s in self.ins or \
|
||||
s in self.main_textures or \
|
||||
s in self.main_attribs
|
||||
|
||||
def replace(self, old, new):
|
||||
self.main = self.main.replace(old, new)
|
||||
self.main_init = self.main_init.replace(old, new)
|
||||
self.main_normal = self.main_normal.replace(old, new)
|
||||
self.main_textures = self.main_textures.replace(old, new)
|
||||
self.main_attribs = self.main_attribs.replace(old, new)
|
||||
self.uniforms = [u.replace(old, new) for u in self.uniforms]
|
||||
|
||||
def write_init(self, s, unique=True):
|
||||
"""Prepend to the main function. If `unique` is true (default), look for other occurences first."""
|
||||
if unique and self.contains(s):
|
||||
return
|
||||
|
||||
self.main_init = '\t' + s + '\n' + self.main_init
|
||||
|
||||
def write(self, s):
|
||||
if self.lock:
|
||||
return
|
||||
if self.write_textures > 0:
|
||||
self.main_textures += '\t' * 1 + s + '\n'
|
||||
elif self.write_normal > 0:
|
||||
self.main_normal += '\t' * 1 + s + '\n'
|
||||
elif self.write_pre:
|
||||
self.main_init += '\t' * 1 + s + '\n'
|
||||
else:
|
||||
self.main += '\t' * self.tab + s + '\n'
|
||||
|
||||
def write_header(self, s):
|
||||
self.header += s + '\n'
|
||||
|
||||
def write_attrib(self, s):
|
||||
self.main_attribs += '\t' + s + '\n'
|
||||
|
||||
def is_equal(self, sh):
|
||||
self.vstruct_to_vsin()
|
||||
return self.ins == sh.ins and \
|
||||
self.main == sh.main and \
|
||||
self.main_normal == sh.main_normal and \
|
||||
self.main_init == sh.main_init and \
|
||||
self.main_textures == sh.main_textures and \
|
||||
self.main_attribs == sh.main_attribs
|
||||
|
||||
def data_size(self, data):
|
||||
if data == 'float1':
|
||||
return '1'
|
||||
elif data == 'float2':
|
||||
return '2'
|
||||
elif data == 'float3':
|
||||
return '3'
|
||||
elif data == 'float4':
|
||||
return '4'
|
||||
elif data == 'short2norm':
|
||||
return '2'
|
||||
elif data == 'short4norm':
|
||||
return '4'
|
||||
|
||||
def vstruct_to_vsin(self):
|
||||
if self.shader_type != 'vert' or self.ins != [] or not self.vstruct_as_vsin: # Vertex structure as vertex shader input
|
||||
return
|
||||
vs = self.context.data['vertex_elements']
|
||||
for e in vs:
|
||||
self.add_in('vec' + self.data_size(e['data']) + ' ' + e['name'])
|
||||
|
||||
def get(self):
|
||||
if self.noprocessing:
|
||||
return self.main
|
||||
|
||||
s = '#version 450\n'
|
||||
|
||||
s += self.header
|
||||
|
||||
in_ext = ''
|
||||
out_ext = ''
|
||||
|
||||
if self.shader_type == 'vert':
|
||||
self.vstruct_to_vsin()
|
||||
|
||||
elif self.shader_type == 'tesc':
|
||||
in_ext = '[]'
|
||||
out_ext = '[]'
|
||||
s += 'layout(vertices = 3) out;\n'
|
||||
# Gen outs
|
||||
for sin in self.ins:
|
||||
ar = sin.rsplit(' ', 1) # vec3 wnormal
|
||||
tc_s = 'tc_' + ar[1]
|
||||
self.add_out(ar[0] + ' ' + tc_s)
|
||||
# Pass data
|
||||
self.write('{0}[gl_InvocationID] = {1}[gl_InvocationID];'.format(tc_s, ar[1]))
|
||||
|
||||
elif self.shader_type == 'tese':
|
||||
in_ext = '[]'
|
||||
s += 'layout(triangles, equal_spacing, ccw) in;\n'
|
||||
|
||||
elif self.shader_type == 'geom':
|
||||
in_ext = '[]'
|
||||
s += 'layout(triangles) in;\n'
|
||||
if not self.geom_passthrough:
|
||||
s += 'layout(triangle_strip) out;\n'
|
||||
s += 'layout(max_vertices=3) out;\n'
|
||||
|
||||
for a in self.uniforms_top:
|
||||
s += 'uniform ' + a + ';\n'
|
||||
for a in self.includes:
|
||||
s += '#include "' + a + '"\n'
|
||||
if self.geom_passthrough:
|
||||
s += 'layout(passthrough) in gl_PerVertex { vec4 gl_Position; } gl_in[];\n'
|
||||
for a in self.ins:
|
||||
if self.geom_passthrough:
|
||||
s += 'layout(passthrough) '
|
||||
s += 'in {0}{1};\n'.format(a, in_ext)
|
||||
for a in self.outs:
|
||||
if not self.geom_passthrough:
|
||||
s += 'out {0}{1};\n'.format(a, out_ext)
|
||||
for a in self.uniforms:
|
||||
s += 'uniform ' + a + ';\n'
|
||||
for c in self.constants:
|
||||
s += 'const ' + c + ';\n'
|
||||
for f in self.functions:
|
||||
s += self.functions[f] + '\n'
|
||||
s += 'void main() {\n'
|
||||
s += self.main_attribs
|
||||
s += self.main_textures
|
||||
s += self.main_normal
|
||||
s += self.main_init
|
||||
s += self.main
|
||||
s += '}\n'
|
||||
return s
|
Reference in New Issue
Block a user