forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
5
leenkx/blender/lnx/material/cycles_nodes/__init__.py
Normal file
5
leenkx/blender/lnx/material/cycles_nodes/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
import glob
|
||||
from os.path import dirname, basename, isfile
|
||||
|
||||
modules = glob.glob(dirname(__file__) + "/*.py")
|
||||
__all__ = [basename(f)[:-3] for f in modules if isfile(f)]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
171
leenkx/blender/lnx/material/cycles_nodes/nodes_color.py
Normal file
171
leenkx/blender/lnx/material/cycles_nodes/nodes_color.py
Normal file
@ -0,0 +1,171 @@
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserState
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_brightcontrast(node: bpy.types.ShaderNodeBrightContrast, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
out_col = c.parse_vector_input(node.inputs[0])
|
||||
bright = c.parse_value_input(node.inputs[1])
|
||||
contr = c.parse_value_input(node.inputs[2])
|
||||
|
||||
state.curshader.add_function(c_functions.str_brightcontrast)
|
||||
|
||||
return 'brightcontrast({0}, {1}, {2})'.format(out_col, bright, contr)
|
||||
|
||||
|
||||
def parse_gamma(node: bpy.types.ShaderNodeGamma, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
out_col = c.parse_vector_input(node.inputs[0])
|
||||
gamma = c.parse_value_input(node.inputs[1])
|
||||
|
||||
return 'pow({0}, vec3({1}))'.format(out_col, gamma)
|
||||
|
||||
|
||||
def parse_huesat(node: bpy.types.ShaderNodeHueSaturation, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.curshader.add_function(c_functions.str_hue_sat)
|
||||
hue = c.parse_value_input(node.inputs[0])
|
||||
sat = c.parse_value_input(node.inputs[1])
|
||||
val = c.parse_value_input(node.inputs[2])
|
||||
fac = c.parse_value_input(node.inputs[3])
|
||||
col = c.parse_vector_input(node.inputs[4])
|
||||
|
||||
return f'hue_sat({col}, vec4({hue}-0.5, {sat}, {val}, 1.0-{fac}))'
|
||||
|
||||
|
||||
def parse_invert(node: bpy.types.ShaderNodeInvert, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
out_col = c.parse_vector_input(node.inputs[1])
|
||||
|
||||
return f'mix({out_col}, vec3(1.0) - ({out_col}), {fac})'
|
||||
|
||||
|
||||
def parse_mix(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> str:
|
||||
if node.data_type == 'FLOAT':
|
||||
return _parse_mixfloat(node, out_socket, state)
|
||||
elif node.data_type == 'VECTOR':
|
||||
return _parse_mixvec(node, out_socket, state)
|
||||
elif node.data_type == 'RGBA':
|
||||
return _parse_mixrgb(node, out_socket, state)
|
||||
else:
|
||||
log.warn(f'Mix node: unsupported data type {node.data_type}.')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def _parse_mixfloat(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, 0.0, 1.0)'
|
||||
|
||||
return f'mix({c.parse_value_input(node.inputs[2])}, {c.parse_value_input(node.inputs[3])}, {fac})'
|
||||
|
||||
|
||||
def _parse_mixvec(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if node.factor_mode == 'UNIFORM':
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, 0.0, 1.0)'
|
||||
|
||||
elif node.factor_mode == 'NON_UNIFORM':
|
||||
fac = c.parse_vector_input(node.inputs[1])
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, vec3(0.0), vec3(1.0))'
|
||||
|
||||
else:
|
||||
log.warn(f'Mix node: unsupported factor mode {node.factor_mode}.')
|
||||
return 'vec3(0.0, 0.0, 0.0)'
|
||||
|
||||
return f'mix({c.parse_vector_input(node.inputs[4])}, {c.parse_vector_input(node.inputs[5])}, {fac})'
|
||||
|
||||
|
||||
def _parse_mixrgb(node: bpy.types.ShaderNodeMixRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
col1 = c.parse_vector_input(node.inputs[6])
|
||||
col2 = c.parse_vector_input(node.inputs[7])
|
||||
|
||||
# Store factor in variable for linked factor input
|
||||
if node.inputs[0].is_linked:
|
||||
fac = c.node_name(node.name) + '_fac' + state.get_parser_pass_suffix()
|
||||
state.curshader.write('float {0} = {1};'.format(fac, c.parse_value_input(node.inputs[0])))
|
||||
else:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
|
||||
if node.clamp_factor:
|
||||
fac = f'clamp({fac}, 0.0, 1.0)'
|
||||
|
||||
# TODO: Do not mix if factor is constant 0.0 or 1.0?
|
||||
|
||||
blend = node.blend_type
|
||||
if blend == 'MIX':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'ADD':
|
||||
out_col = 'mix({0}, {0} + {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'MULTIPLY':
|
||||
out_col = 'mix({0}, {0} * {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'SUBTRACT':
|
||||
out_col = 'mix({0}, {0} - {1}, {2})'.format(col1, col2, fac)
|
||||
elif blend == 'SCREEN':
|
||||
out_col = '(vec3(1.0) - (vec3(1.0 - {2}) + {2} * (vec3(1.0) - {1})) * (vec3(1.0) - {0}))'.format(col1, col2, fac)
|
||||
elif blend == 'DIVIDE':
|
||||
out_col = '(vec3((1.0 - {2}) * {0} + {2} * {0} / {1}))'.format(col1, col2, fac)
|
||||
elif blend == 'DIFFERENCE':
|
||||
out_col = 'mix({0}, abs({0} - {1}), {2})'.format(col1, col2, fac)
|
||||
elif blend == 'DARKEN':
|
||||
out_col = 'min({0}, {1} * {2})'.format(col1, col2, fac)
|
||||
elif blend == 'LIGHTEN':
|
||||
out_col = 'max({0}, {1} * {2})'.format(col1, col2, fac)
|
||||
elif blend == 'OVERLAY':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'DODGE':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'BURN':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'HUE':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'SATURATION':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'VALUE':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'COLOR':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
elif blend == 'SOFT_LIGHT':
|
||||
out_col = '((1.0 - {2}) * {0} + {2} * ((vec3(1.0) - {0}) * {1} * {0} + {0} * (vec3(1.0) - (vec3(1.0) - {1}) * (vec3(1.0) - {0}))))'.format(col1, col2, fac)
|
||||
elif blend == 'LINEAR_LIGHT':
|
||||
out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac) # Revert to mix
|
||||
# out_col = '({0} + {2} * (2.0 * ({1} - vec3(0.5))))'.format(col1, col2, fac_var)
|
||||
else:
|
||||
log.warn(f'MixRGB node: unsupported blend type {node.blend_type}.')
|
||||
return col1
|
||||
|
||||
if node.clamp_result:
|
||||
return 'clamp({0}, vec3(0.0), vec3(1.0))'.format(out_col)
|
||||
return out_col
|
||||
|
||||
|
||||
def parse_curvergb(node: bpy.types.ShaderNodeRGBCurve, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
vec = c.parse_vector_input(node.inputs[1])
|
||||
curves = node.mapping.curves
|
||||
name = c.node_name(node.name)
|
||||
# mapping.curves[0].points[0].handle_type
|
||||
return '(sqrt(vec3({0}, {1}, {2}) * vec3({4}, {5}, {6})) * {3})'.format(
|
||||
c.vector_curve(name + '0', vec + '.x', curves[0].points), c.vector_curve(name + '1', vec + '.y', curves[1].points), c.vector_curve(name + '2', vec + '.z', curves[2].points), fac,
|
||||
c.vector_curve(name + '3a', vec + '.x', curves[3].points), c.vector_curve(name + '3b', vec + '.y', curves[3].points), c.vector_curve(name + '3c', vec + '.z', curves[3].points))
|
||||
|
||||
|
||||
def parse_lightfalloff(node: bpy.types.ShaderNodeLightFalloff, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
# https://github.com/blender/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_light_falloff.glsl
|
||||
return c.parse_value_input(node.inputs['Strength'])
|
402
leenkx/blender/lnx/material/cycles_nodes/nodes_converter.py
Normal file
402
leenkx/blender/lnx/material/cycles_nodes/nodes_converter.py
Normal file
@ -0,0 +1,402 @@
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserPass, ParserState
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_maprange(node: bpy.types.ShaderNodeMapRange, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
|
||||
interp = node.interpolation_type
|
||||
|
||||
value: str = c.parse_value_input(node.inputs[0]) if node.inputs[0].is_linked else c.to_vec1(node.inputs[0].default_value)
|
||||
fromMin = c.parse_value_input(node.inputs[1])
|
||||
fromMax = c.parse_value_input(node.inputs[2])
|
||||
toMin = c.parse_value_input(node.inputs[3])
|
||||
toMax = c.parse_value_input(node.inputs[4])
|
||||
|
||||
if interp == "LINEAR":
|
||||
state.curshader.add_function(c_functions.str_map_range_linear)
|
||||
out = f'map_range_linear({value}, {fromMin}, {fromMax}, {toMin}, {toMax})'
|
||||
|
||||
elif interp == "STEPPED":
|
||||
steps = float(c.parse_value_input(node.inputs[5]))
|
||||
state.curshader.add_function(c_functions.str_map_range_stepped)
|
||||
out = f'map_range_stepped({value}, {fromMin}, {fromMax}, {toMin}, {toMax}, {steps})'
|
||||
|
||||
elif interp == "SMOOTHSTEP":
|
||||
state.curshader.add_function(c_functions.str_map_range_smoothstep)
|
||||
out = f'map_range_smoothstep({value}, {fromMin}, {fromMax}, {toMin}, {toMax})'
|
||||
|
||||
elif interp == "SMOOTHERSTEP":
|
||||
state.curshader.add_function(c_functions.str_map_range_smootherstep)
|
||||
out = f'map_range_smootherstep({value}, {fromMin}, {fromMax}, {toMin}, {toMax})'
|
||||
|
||||
else:
|
||||
log.warn(f'Interpolation mode {interp} not supported for Map Range node')
|
||||
return '0.0'
|
||||
|
||||
if node.clamp:
|
||||
out = f'clamp({out}, {toMin}, {toMax})'
|
||||
|
||||
return out
|
||||
|
||||
def parse_blackbody(node: bpy.types.ShaderNodeBlackbody, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
|
||||
t = c.parse_value_input(node.inputs[0])
|
||||
|
||||
state.curshader.add_function(c_functions.str_blackbody)
|
||||
return f'blackbody({t})'
|
||||
|
||||
def parse_clamp(node: bpy.types.ShaderNodeClamp, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
value = c.parse_value_input(node.inputs['Value'])
|
||||
minVal = c.parse_value_input(node.inputs['Min'])
|
||||
maxVal = c.parse_value_input(node.inputs['Max'])
|
||||
|
||||
if node.clamp_type == 'MINMAX':
|
||||
# Condition is minVal < maxVal, otherwise use 'RANGE' type
|
||||
return f'clamp({value}, {minVal}, {maxVal})'
|
||||
|
||||
elif node.clamp_type == 'RANGE':
|
||||
return f'{minVal} < {maxVal} ? clamp({value}, {minVal}, {maxVal}) : clamp({value}, {maxVal}, {minVal})'
|
||||
|
||||
else:
|
||||
log.warn(f'Clamp node: unsupported clamp type {node.clamp_type}.')
|
||||
return value
|
||||
|
||||
|
||||
def parse_valtorgb(node: bpy.types.ShaderNodeValToRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Alpha (TODO: make ColorRamp calculation vec4-based and split afterwards)
|
||||
if out_socket == node.outputs[1]:
|
||||
return '1.0'
|
||||
|
||||
input_fac: bpy.types.NodeSocket = node.inputs[0]
|
||||
|
||||
fac: str = c.parse_value_input(input_fac) if input_fac.is_linked else c.to_vec1(input_fac.default_value)
|
||||
interp = node.color_ramp.interpolation
|
||||
elems = node.color_ramp.elements
|
||||
|
||||
if len(elems) == 1:
|
||||
return c.to_vec3(elems[0].color)
|
||||
|
||||
# Write color array
|
||||
# The last entry is included twice so that the interpolation
|
||||
# between indices works (no out of bounds error)
|
||||
cols_var = c.node_name(node.name).upper() + '_COLS'
|
||||
|
||||
if state.current_pass == ParserPass.REGULAR:
|
||||
cols_entries = ', '.join(f'vec3({elem.color[0]}, {elem.color[1]}, {elem.color[2]})' for elem in elems)
|
||||
cols_entries += f', vec3({elems[len(elems) - 1].color[0]}, {elems[len(elems) - 1].color[1]}, {elems[len(elems) - 1].color[2]})'
|
||||
state.curshader.add_const("vec3", cols_var, cols_entries, array_size=len(elems) + 1)
|
||||
|
||||
fac_var = c.node_name(node.name) + '_fac' + state.get_parser_pass_suffix()
|
||||
state.curshader.write(f'float {fac_var} = {fac};')
|
||||
|
||||
# Get index of the nearest left element relative to the factor
|
||||
index = '0 + '
|
||||
index += ' + '.join([f'(({fac_var} > {elems[i].position}) ? 1 : 0)' for i in range(1, len(elems))])
|
||||
|
||||
# Write index
|
||||
index_var = c.node_name(node.name) + '_i' + state.get_parser_pass_suffix()
|
||||
state.curshader.write(f'int {index_var} = {index};')
|
||||
|
||||
if interp == 'CONSTANT':
|
||||
return f'{cols_var}[{index_var}]'
|
||||
|
||||
# Linear interpolation
|
||||
else:
|
||||
# Write factor array
|
||||
facs_var = c.node_name(node.name).upper() + '_FACS'
|
||||
if state.current_pass == ParserPass.REGULAR:
|
||||
facs_entries = ', '.join(str(elem.position) for elem in elems)
|
||||
# Add one more entry at the rightmost position so that the
|
||||
# interpolation between indices works (no out of bounds error)
|
||||
facs_entries += ', 1.0'
|
||||
state.curshader.add_const("float", facs_var, facs_entries, array_size=len(elems) + 1)
|
||||
|
||||
# Mix color
|
||||
prev_stop_fac = f'{facs_var}[{index_var}]'
|
||||
next_stop_fac = f'{facs_var}[{index_var} + 1]'
|
||||
prev_stop_col = f'{cols_var}[{index_var}]'
|
||||
next_stop_col = f'{cols_var}[{index_var} + 1]'
|
||||
rel_pos = f'({fac_var} - {prev_stop_fac}) * (1.0 / ({next_stop_fac} - {prev_stop_fac}))'
|
||||
return f'mix({prev_stop_col}, {next_stop_col}, max({rel_pos}, 0.0))'
|
||||
|
||||
if bpy.app.version > (3, 2, 0):
|
||||
def parse_combine_color(node: bpy.types.ShaderNodeCombineColor, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
if node.mode == 'RGB':
|
||||
return parse_combrgb(node, out_socket, state)
|
||||
elif node.mode == 'HSV':
|
||||
return parse_combhsv(node, out_socket, state)
|
||||
elif node.mode == 'HSL':
|
||||
log.warn('Combine Color node: HSL mode is not supported, using default value')
|
||||
return c.to_vec3((0.0, 0.0, 0.0))
|
||||
|
||||
|
||||
def parse_combhsv(node: bpy.types.ShaderNodeCombineHSV, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.curshader.add_function(c_functions.str_hue_sat)
|
||||
h = c.parse_value_input(node.inputs[0])
|
||||
s = c.parse_value_input(node.inputs[1])
|
||||
v = c.parse_value_input(node.inputs[2])
|
||||
return f'hsv_to_rgb(vec3({h}, {s}, {v}))'
|
||||
|
||||
|
||||
def parse_combrgb(node: bpy.types.ShaderNodeCombineRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
r = c.parse_value_input(node.inputs[0])
|
||||
g = c.parse_value_input(node.inputs[1])
|
||||
b = c.parse_value_input(node.inputs[2])
|
||||
return f'vec3({r}, {g}, {b})'
|
||||
|
||||
|
||||
def parse_combxyz(node: bpy.types.ShaderNodeCombineXYZ, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
x = c.parse_value_input(node.inputs[0])
|
||||
y = c.parse_value_input(node.inputs[1])
|
||||
z = c.parse_value_input(node.inputs[2])
|
||||
return f'vec3({x}, {y}, {z})'
|
||||
|
||||
|
||||
def parse_wavelength(node: bpy.types.ShaderNodeWavelength, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.curshader.add_function(c_functions.str_wavelength_to_rgb)
|
||||
wl = c.parse_value_input(node.inputs[0])
|
||||
# Roughly map to cycles - 450 to 600 nanometers
|
||||
return f'wavelength_to_rgb(({wl} - 450.0) / 150.0)'
|
||||
|
||||
|
||||
def parse_vectormath(node: bpy.types.ShaderNodeVectorMath, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
op = node.operation
|
||||
|
||||
vec1 = c.parse_vector_input(node.inputs[0])
|
||||
vec2 = c.parse_vector_input(node.inputs[1])
|
||||
|
||||
if out_socket.type == 'VECTOR':
|
||||
if op == 'ADD':
|
||||
return f'({vec1} + {vec2})'
|
||||
elif op == 'SUBTRACT':
|
||||
return f'({vec1} - {vec2})'
|
||||
elif op == 'MULTIPLY':
|
||||
return f'({vec1} * {vec2})'
|
||||
elif op == 'DIVIDE':
|
||||
state.curshader.add_function(c_functions.str_safe_divide)
|
||||
return f'safe_divide({vec1}, {vec2})'
|
||||
|
||||
elif op == 'NORMALIZE':
|
||||
return f'normalize({vec1})'
|
||||
elif op == 'SCALE':
|
||||
# Scale is input 3 despite being visually on another position (see the python tooltip in Blender)
|
||||
scale = c.parse_value_input(node.inputs[3])
|
||||
return f'{vec1} * {scale}'
|
||||
|
||||
elif op == 'REFLECT':
|
||||
return f'reflect({vec1}, normalize({vec2}))'
|
||||
elif op == 'PROJECT':
|
||||
state.curshader.add_function(c_functions.str_project)
|
||||
return f'project({vec1}, {vec2})'
|
||||
elif op == 'CROSS_PRODUCT':
|
||||
return f'cross({vec1}, {vec2})'
|
||||
|
||||
elif op == 'SINE':
|
||||
return f'sin({vec1})'
|
||||
elif op == 'COSINE':
|
||||
return f'cos({vec1})'
|
||||
elif op == 'TANGENT':
|
||||
return f'tan({vec1})'
|
||||
|
||||
elif op == 'MODULO':
|
||||
return f'mod({vec1}, {vec2})'
|
||||
elif op == 'FRACTION':
|
||||
return f'fract({vec1})'
|
||||
|
||||
elif op == 'SNAP':
|
||||
state.curshader.add_function(c_functions.str_safe_divide)
|
||||
return f'floor(safe_divide({vec1}, {vec2})) * {vec2}'
|
||||
elif op == 'WRAP':
|
||||
vec3 = c.parse_vector_input(node.inputs[2])
|
||||
state.curshader.add_function(c_functions.str_wrap)
|
||||
return f'wrap({vec1}, {vec2}, {vec3})'
|
||||
elif op == 'CEIL':
|
||||
return f'ceil({vec1})'
|
||||
elif op == 'FLOOR':
|
||||
return f'floor({vec1})'
|
||||
elif op == 'MAXIMUM':
|
||||
return f'max({vec1}, {vec2})'
|
||||
elif op == 'MINIMUM':
|
||||
return f'min({vec1}, {vec2})'
|
||||
elif op == 'ABSOLUTE':
|
||||
return f'abs({vec1})'
|
||||
|
||||
log.warn(f'Vectormath node: unsupported operation {node.operation}.')
|
||||
return vec1
|
||||
|
||||
# Float output
|
||||
if op == 'DOT_PRODUCT':
|
||||
return f'dot({vec1}, {vec2})'
|
||||
elif op == 'DISTANCE':
|
||||
return f'distance({vec1}, {vec2})'
|
||||
elif op == 'LENGTH':
|
||||
return f'length({vec1})'
|
||||
|
||||
log.warn(f'Vectormath node: unsupported operation {node.operation}.')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_math(node: bpy.types.ShaderNodeMath, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
val1 = c.parse_value_input(node.inputs[0])
|
||||
val2 = c.parse_value_input(node.inputs[1])
|
||||
op = node.operation
|
||||
if op == 'ADD':
|
||||
out_val = '({0} + {1})'.format(val1, val2)
|
||||
elif op == 'SUBTRACT':
|
||||
out_val = '({0} - {1})'.format(val1, val2)
|
||||
elif op == 'MULTIPLY':
|
||||
out_val = '({0} * {1})'.format(val1, val2)
|
||||
elif op == 'DIVIDE':
|
||||
out_val = '({0} / {1})'.format(val1, val2)
|
||||
elif op == 'MULTIPLY_ADD':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = '({0} * {1} + {2})'.format(val1, val2, val3)
|
||||
elif op == 'POWER':
|
||||
out_val = 'pow({0}, {1})'.format(val1, val2)
|
||||
elif op == 'LOGARITHM':
|
||||
out_val = 'log({0})'.format(val1)
|
||||
elif op == 'SQRT':
|
||||
out_val = 'sqrt({0})'.format(val1)
|
||||
elif op == 'INVERSE_SQRT':
|
||||
out_val = 'inversesqrt({0})'.format(val1)
|
||||
elif op == 'ABSOLUTE':
|
||||
out_val = 'abs({0})'.format(val1)
|
||||
elif op == 'EXPONENT':
|
||||
out_val = 'exp({0})'.format(val1)
|
||||
elif op == 'MINIMUM':
|
||||
out_val = 'min({0}, {1})'.format(val1, val2)
|
||||
elif op == 'MAXIMUM':
|
||||
out_val = 'max({0}, {1})'.format(val1, val2)
|
||||
elif op == 'LESS_THAN':
|
||||
out_val = 'float({0} < {1})'.format(val1, val2)
|
||||
elif op == 'GREATER_THAN':
|
||||
out_val = 'float({0} > {1})'.format(val1, val2)
|
||||
elif op == 'SIGN':
|
||||
out_val = 'sign({0})'.format(val1)
|
||||
elif op == 'COMPARE':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float((abs({0} - {1}) <= max({2}, 1e-5)) ? 1.0 : 0.0)'.format(val1, val2, val3)
|
||||
elif op == 'SMOOTH_MIN':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float(float({2} != 0.0 ? min({0},{1}) - (max({2} - abs({0} - {1}), 0.0) / {2}) * (max({2} - abs({0} - {1}), 0.0) / {2}) * (max({2} - abs({0} - {1}), 0.0) / {2}) * {2} * (1.0 / 6.0) : min({0}, {1})))'.format(val1, val2, val3)
|
||||
elif op == 'SMOOTH_MAX':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float(0-(float({2} != 0.0 ? min(-{0},-{1}) - (max({2} - abs(-{0} - (-{1})), 0.0) / {2}) * (max({2} - abs(-{0} - (-{1})), 0.0) / {2}) * (max({2} - abs(-{0} - (-{1})), 0.0) / {2}) * {2} * (1.0 / 6.0) : min(-{0}, (-{1})))))'.format(val1, val2, val3)
|
||||
elif op == 'ROUND':
|
||||
# out_val = 'round({0})'.format(val1)
|
||||
out_val = 'floor({0} + 0.5)'.format(val1)
|
||||
elif op == 'FLOOR':
|
||||
out_val = 'floor({0})'.format(val1)
|
||||
elif op == 'CEIL':
|
||||
out_val = 'ceil({0})'.format(val1)
|
||||
elif op == 'TRUNC':
|
||||
out_val = 'trunc({0})'.format(val1)
|
||||
elif op == 'FRACT':
|
||||
out_val = 'fract({0})'.format(val1)
|
||||
elif op == 'MODULO':
|
||||
# out_val = 'float({0} % {1})'.format(val1, val2)
|
||||
out_val = 'mod({0}, {1})'.format(val1, val2)
|
||||
elif op == 'WRAP':
|
||||
val3 = c.parse_value_input(node.inputs[2])
|
||||
out_val = 'float((({1}-{2}) != 0.0) ? {0} - (({1}-{2}) * floor(({0} - {2}) / ({1}-{2}))) : {2})'.format(val1, val2, val3)
|
||||
elif op == 'SNAP':
|
||||
out_val = 'floor(({1} != 0.0) ? {0} / {1} : 0.0) * {1}'.format(val1, val2)
|
||||
elif op == 'PINGPONG':
|
||||
out_val = 'float(({1} != 0.0) ? abs(fract(({0} - {1}) / ({1} * 2.0)) * {1} * 2.0 - {1}) : 0.0)'.format(val1, val2)
|
||||
elif op == 'SINE':
|
||||
out_val = 'sin({0})'.format(val1)
|
||||
elif op == 'COSINE':
|
||||
out_val = 'cos({0})'.format(val1)
|
||||
elif op == 'TANGENT':
|
||||
out_val = 'tan({0})'.format(val1)
|
||||
elif op == 'ARCSINE':
|
||||
out_val = 'asin({0})'.format(val1)
|
||||
elif op == 'ARCCOSINE':
|
||||
out_val = 'acos({0})'.format(val1)
|
||||
elif op == 'ARCTANGENT':
|
||||
out_val = 'atan({0})'.format(val1)
|
||||
elif op == 'ARCTAN2':
|
||||
out_val = 'atan({0}, {1})'.format(val1, val2)
|
||||
elif op == 'SINH':
|
||||
out_val = 'sinh({0})'.format(val1)
|
||||
elif op == 'COSH':
|
||||
out_val = 'cosh({0})'.format(val1)
|
||||
elif op == 'TANH':
|
||||
out_val = 'tanh({0})'.format(val1)
|
||||
elif op == 'RADIANS':
|
||||
out_val = 'radians({0})'.format(val1)
|
||||
elif op == 'DEGREES':
|
||||
out_val = 'degrees({0})'.format(val1)
|
||||
|
||||
if node.use_clamp:
|
||||
return 'clamp({0}, 0.0, 1.0)'.format(out_val)
|
||||
else:
|
||||
return out_val
|
||||
|
||||
|
||||
def parse_rgbtobw(node: bpy.types.ShaderNodeRGBToBW, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
return c.rgb_to_bw(c.parse_vector_input(node.inputs[0]))
|
||||
|
||||
if bpy.app.version > (3, 2, 0):
|
||||
def parse_separate_color(node: bpy.types.ShaderNodeSeparateColor, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
if node.mode == 'RGB':
|
||||
return parse_seprgb(node, out_socket, state)
|
||||
elif node.mode == 'HSV':
|
||||
return parse_sephsv(node, out_socket, state)
|
||||
elif node.mode == 'HSL':
|
||||
log.warn('Separate Color node: HSL mode is not supported, using default value')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_sephsv(node: bpy.types.ShaderNodeSeparateHSV, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
state.curshader.add_function(c_functions.str_hue_sat)
|
||||
|
||||
hsv_var = c.node_name(node.name) + '_hsv' + state.get_parser_pass_suffix()
|
||||
if not state.curshader.contains(hsv_var): # Already written if a second output is parsed
|
||||
state.curshader.write(f'const vec3 {hsv_var} = rgb_to_hsv({c.parse_vector_input(node.inputs["Color"])}.rgb);')
|
||||
|
||||
if out_socket == node.outputs[0]:
|
||||
return f'{hsv_var}.x'
|
||||
elif out_socket == node.outputs[1]:
|
||||
return f'{hsv_var}.y'
|
||||
elif out_socket == node.outputs[2]:
|
||||
return f'{hsv_var}.z'
|
||||
|
||||
|
||||
def parse_seprgb(node: bpy.types.ShaderNodeSeparateRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
col = c.parse_vector_input(node.inputs[0])
|
||||
if out_socket == node.outputs[0]:
|
||||
return '{0}.r'.format(col)
|
||||
elif out_socket == node.outputs[1]:
|
||||
return '{0}.g'.format(col)
|
||||
elif out_socket == node.outputs[2]:
|
||||
return '{0}.b'.format(col)
|
||||
|
||||
|
||||
def parse_sepxyz(node: bpy.types.ShaderNodeSeparateXYZ, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
vec = c.parse_vector_input(node.inputs[0])
|
||||
if out_socket == node.outputs[0]:
|
||||
return '{0}.x'.format(vec)
|
||||
elif out_socket == node.outputs[1]:
|
||||
return '{0}.y'.format(vec)
|
||||
elif out_socket == node.outputs[2]:
|
||||
return '{0}.z'.format(vec)
|
426
leenkx/blender/lnx/material/cycles_nodes/nodes_input.py
Normal file
426
leenkx/blender/lnx/material/cycles_nodes/nodes_input.py
Normal file
@ -0,0 +1,426 @@
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
import lnx.material.mat_state as mat_state
|
||||
from lnx.material.parser_state import ParserState, ParserContext
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
import lnx.utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserContext
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_attribute(node: bpy.types.ShaderNodeAttribute, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
out_type = 'float' if out_socket.type == 'VALUE' else 'vec3'
|
||||
|
||||
if node.attribute_name == 'time':
|
||||
state.curshader.add_uniform('float time', link='_time')
|
||||
|
||||
if out_socket == node.outputs[3]:
|
||||
return '1.0'
|
||||
return c.cast_value('time', from_type='float', to_type=out_type)
|
||||
|
||||
# UV maps (higher priority) and vertex colors
|
||||
if node.attribute_type == 'GEOMETRY':
|
||||
|
||||
# Alpha output. Leenkx doesn't support vertex colors with alpha
|
||||
# values yet and UV maps don't have an alpha channel
|
||||
if out_socket == node.outputs[3]:
|
||||
return '1.0'
|
||||
|
||||
# UV maps
|
||||
mat = c.mat_get_material()
|
||||
mat_users = c.mat_get_material_users()
|
||||
|
||||
if mat_users is not None and mat in mat_users:
|
||||
mat_user = mat_users[mat][0]
|
||||
|
||||
# Curves don't have uv layers, so check that first
|
||||
if hasattr(mat_user.data, 'uv_layers'):
|
||||
lays = mat_user.data.uv_layers
|
||||
|
||||
# First UV map referenced
|
||||
if len(lays) > 0 and node.attribute_name == lays[0].name:
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return c.cast_value('vec3(texCoord.x, 1.0 - texCoord.y, 0.0)', from_type='vec3', to_type=out_type)
|
||||
|
||||
# Second UV map referenced
|
||||
elif len(lays) > 1 and node.attribute_name == lays[1].name:
|
||||
state.con.add_elem('tex1', 'short2norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return c.cast_value('vec3(texCoord1.x, 1.0 - texCoord1.y, 0.0)', from_type='vec3', to_type=out_type)
|
||||
|
||||
# Vertex colors
|
||||
# TODO: support multiple vertex color sets
|
||||
state.con.add_elem('col', 'short4norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return c.cast_value('vcolor', from_type='vec3', to_type=out_type)
|
||||
|
||||
# Check object properties
|
||||
# see https://developer.blender.org/rB6fdcca8de6 for reference
|
||||
mat = c.mat_get_material()
|
||||
mat_users = c.mat_get_material_users()
|
||||
if mat_users is not None and mat in mat_users:
|
||||
# Use first material user for now...
|
||||
mat_user = mat_users[mat][0]
|
||||
|
||||
val = None
|
||||
# Custom properties first
|
||||
if node.attribute_name in mat_user:
|
||||
val = mat_user[node.attribute_name]
|
||||
# Blender properties
|
||||
elif hasattr(mat_user, node.attribute_name):
|
||||
val = getattr(mat_user, node.attribute_name)
|
||||
|
||||
if val is not None:
|
||||
if isinstance(val, float):
|
||||
return c.cast_value(str(val), from_type='float', to_type=out_type)
|
||||
elif isinstance(val, int):
|
||||
return c.cast_value(str(val), from_type='int', to_type=out_type)
|
||||
elif isinstance(val, mathutils.Vector) and len(val) <= 4:
|
||||
out = val.to_4d()
|
||||
|
||||
if out_socket == node.outputs[3]:
|
||||
return c.to_vec1(out[3])
|
||||
return c.cast_value(c.to_vec3(out), from_type='vec3', to_type=out_type)
|
||||
|
||||
# Default values, attribute name did not match
|
||||
if out_socket == node.outputs[3]:
|
||||
return '1.0'
|
||||
return c.cast_value('0.0', from_type='float', to_type=out_type)
|
||||
|
||||
|
||||
def parse_rgb(node: bpy.types.ShaderNodeRGB, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if node.lnx_material_param:
|
||||
nn = 'param_' + c.node_name(node.name)
|
||||
v = out_socket.default_value
|
||||
value = [float(v[0]), float(v[1]), float(v[2])]
|
||||
state.curshader.add_uniform(f'vec3 {nn}', link=f'{node.name}', default_value=value, is_lnx_mat_param=True)
|
||||
return nn
|
||||
else:
|
||||
return c.to_vec3(out_socket.default_value)
|
||||
|
||||
|
||||
def parse_vertex_color(node: bpy.types.ShaderNodeVertexColor, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.con.add_elem('col', 'short4norm')
|
||||
return 'vcolor'
|
||||
|
||||
|
||||
def parse_camera(node: bpy.types.ShaderNodeCameraData, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# View Vector in camera space
|
||||
if out_socket == node.outputs[0]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'vVecCam'
|
||||
|
||||
# View Z Depth
|
||||
elif out_socket == node.outputs[1]:
|
||||
state.curshader.add_include('std/math.glsl')
|
||||
state.curshader.add_uniform('vec2 cameraProj', link='_cameraPlaneProj')
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'linearize(gl_FragCoord.z, cameraProj)'
|
||||
|
||||
# View Distance
|
||||
else:
|
||||
state.curshader.add_uniform('vec3 eye', link='_cameraPosition')
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'distance(eye, wposition)'
|
||||
|
||||
|
||||
def parse_geometry(node: bpy.types.ShaderNodeNewGeometry, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Position
|
||||
if out_socket == node.outputs[0]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'wposition'
|
||||
# Normal
|
||||
elif out_socket == node.outputs[1]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n' if state.curshader.shader_type == 'frag' else 'wnormal'
|
||||
# Tangent
|
||||
elif out_socket == node.outputs[2]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'wtangent'
|
||||
# True Normal
|
||||
elif out_socket == node.outputs[3]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n' if state.curshader.shader_type == 'frag' else 'wnormal'
|
||||
# Incoming
|
||||
elif out_socket == node.outputs[4]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'vVec'
|
||||
# Parametric
|
||||
elif out_socket == node.outputs[5]:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'mposition'
|
||||
# Backfacing
|
||||
elif out_socket == node.outputs[6]:
|
||||
return '(1.0 - float(gl_FrontFacing))' if state.context == ParserContext.OBJECT else '0.0'
|
||||
# Pointiness
|
||||
elif out_socket == node.outputs[7]:
|
||||
return '0.0'
|
||||
# Random Per Island
|
||||
elif out_socket == node.outputs[8]:
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_hairinfo(node: bpy.types.ShaderNodeHairInfo, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Tangent Normal
|
||||
if out_socket == node.outputs[3]:
|
||||
return 'vec3(0.0)'
|
||||
else:
|
||||
# Is Strand
|
||||
# Intercept
|
||||
# Thickness
|
||||
# Random
|
||||
return '0.5'
|
||||
|
||||
|
||||
def parse_objectinfo(node: bpy.types.ShaderNodeObjectInfo, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Location
|
||||
if out_socket == node.outputs[0]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return c.to_vec3((0.0, 0.0, 0.0))
|
||||
return 'wposition'
|
||||
|
||||
# Color
|
||||
elif out_socket == node.outputs[1]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
# Use world strength like Blender
|
||||
background_node = c.node_by_type(state.world.node_tree.nodes, 'BACKGROUND')
|
||||
if background_node is None:
|
||||
return c.to_vec3((0.0, 0.0, 0.0))
|
||||
return c.to_vec3([background_node.inputs[1].default_value] * 3)
|
||||
|
||||
# TODO: Implement object color in Iron
|
||||
# state.curshader.add_uniform('vec3 objectInfoColor', link='_objectInfoColor')
|
||||
# return 'objectInfoColor'
|
||||
return c.to_vec3((1.0, 1.0, 1.0))
|
||||
|
||||
# Alpha
|
||||
elif out_socket == node.outputs[2]:
|
||||
# TODO, see color output above
|
||||
return '0.0'
|
||||
|
||||
# Object Index
|
||||
elif out_socket == node.outputs[3]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return '0.0'
|
||||
state.curshader.add_uniform('float objectInfoIndex', link='_objectInfoIndex')
|
||||
return 'objectInfoIndex'
|
||||
|
||||
# Material Index
|
||||
elif out_socket == node.outputs[4]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return '0.0'
|
||||
state.curshader.add_uniform('float objectInfoMaterialIndex', link='_objectInfoMaterialIndex')
|
||||
return 'objectInfoMaterialIndex'
|
||||
|
||||
# Random
|
||||
elif out_socket == node.outputs[5]:
|
||||
if state.context == ParserContext.WORLD:
|
||||
return '0.0'
|
||||
|
||||
# Use random value per instance
|
||||
if mat_state.uses_instancing:
|
||||
state.vert.add_out(f'flat float irand')
|
||||
state.frag.add_in(f'flat float irand')
|
||||
state.vert.write(f'irand = fract(sin(gl_InstanceID) * 43758.5453);')
|
||||
return 'irand'
|
||||
|
||||
state.curshader.add_uniform('float objectInfoRandom', link='_objectInfoRandom')
|
||||
return 'objectInfoRandom'
|
||||
|
||||
|
||||
def parse_particleinfo(node: bpy.types.ShaderNodeParticleInfo, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
particles_on = lnx.utils.get_rp().lnx_particles == 'On'
|
||||
|
||||
# Index
|
||||
if out_socket == node.outputs[0]:
|
||||
c.particle_info['index'] = True
|
||||
return 'p_index' if particles_on else '0.0'
|
||||
|
||||
# TODO: Random
|
||||
if out_socket == node.outputs[1]:
|
||||
return '0.0'
|
||||
|
||||
# Age
|
||||
elif out_socket == node.outputs[2]:
|
||||
c.particle_info['age'] = True
|
||||
return 'p_age' if particles_on else '0.0'
|
||||
|
||||
# Lifetime
|
||||
elif out_socket == node.outputs[3]:
|
||||
c.particle_info['lifetime'] = True
|
||||
return 'p_lifetime' if particles_on else '0.0'
|
||||
|
||||
# Location
|
||||
if out_socket == node.outputs[4]:
|
||||
c.particle_info['location'] = True
|
||||
return 'p_location' if particles_on else 'vec3(0.0)'
|
||||
|
||||
# Size
|
||||
elif out_socket == node.outputs[5]:
|
||||
c.particle_info['size'] = True
|
||||
return '1.0'
|
||||
|
||||
# Velocity
|
||||
elif out_socket == node.outputs[6]:
|
||||
c.particle_info['velocity'] = True
|
||||
return 'p_velocity' if particles_on else 'vec3(0.0)'
|
||||
|
||||
# Angular Velocity
|
||||
elif out_socket == node.outputs[7]:
|
||||
c.particle_info['angular_velocity'] = True
|
||||
return 'vec3(0.0)'
|
||||
|
||||
|
||||
def parse_tangent(node: bpy.types.ShaderNodeTangent, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'wtangent'
|
||||
|
||||
|
||||
def parse_texcoord(node: bpy.types.ShaderNodeTexCoord, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
#obj = node.object
|
||||
#instance = node.from_instance
|
||||
if out_socket == node.outputs[0]: # Generated - bounds
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'bposition'
|
||||
elif out_socket == node.outputs[1]: # Normal
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n'
|
||||
elif out_socket == node.outputs[2]: # UV
|
||||
if state.context == ParserContext.WORLD:
|
||||
return 'vec3(0.0)'
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'vec3(texCoord.x, 1.0 - texCoord.y, 0.0)'
|
||||
elif out_socket == node.outputs[3]: # Object
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'mposition'
|
||||
elif out_socket == node.outputs[4]: # Camera
|
||||
return 'vec3(0.0)' # 'vposition'
|
||||
elif out_socket == node.outputs[5]: # Window
|
||||
# TODO: Don't use gl_FragCoord here, it uses different axes on different graphics APIs
|
||||
state.frag.add_uniform('vec2 screenSize', link='_screenSize')
|
||||
state.dxdy_varying_input_value = True
|
||||
return f'vec3(gl_FragCoord.xy / screenSize, 0.0)'
|
||||
elif out_socket == node.outputs[6]: # Reflection
|
||||
if state.context == ParserContext.WORLD:
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'n'
|
||||
return 'vec3(0.0)'
|
||||
|
||||
|
||||
def parse_uvmap(node: bpy.types.ShaderNodeUVMap, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
# instance = node.from_instance
|
||||
state.con.add_elem('tex', 'short2norm')
|
||||
mat = c.mat_get_material()
|
||||
mat_users = c.mat_get_material_users()
|
||||
|
||||
state.dxdy_varying_input_value = True
|
||||
|
||||
if mat_users is not None and mat in mat_users:
|
||||
mat_user = mat_users[mat][0]
|
||||
if hasattr(mat_user.data, 'uv_layers'):
|
||||
layers = mat_user.data.uv_layers
|
||||
# Second UV map referenced
|
||||
if len(layers) > 1 and node.uv_map == layers[1].name:
|
||||
state.con.add_elem('tex1', 'short2norm')
|
||||
return 'vec3(texCoord1.x, 1.0 - texCoord1.y, 0.0)'
|
||||
|
||||
return 'vec3(texCoord.x, 1.0 - texCoord.y, 0.0)'
|
||||
|
||||
|
||||
def parse_fresnel(node: bpy.types.ShaderNodeFresnel, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
state.curshader.add_function(c_functions.str_fresnel)
|
||||
ior = c.parse_value_input(node.inputs[0])
|
||||
if node.inputs[1].is_linked:
|
||||
dotnv = 'dot({0}, vVec)'.format(c.parse_vector_input(node.inputs[1]))
|
||||
else:
|
||||
dotnv = 'dotNV'
|
||||
|
||||
state.dxdy_varying_input_value = True
|
||||
return 'fresnel({0}, {1})'.format(ior, dotnv)
|
||||
|
||||
|
||||
def parse_layerweight(node: bpy.types.ShaderNodeLayerWeight, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
blend = c.parse_value_input(node.inputs[0])
|
||||
if node.inputs[1].is_linked:
|
||||
dotnv = 'dot({0}, vVec)'.format(c.parse_vector_input(node.inputs[1]))
|
||||
else:
|
||||
dotnv = 'dotNV'
|
||||
|
||||
state.dxdy_varying_input_value = True
|
||||
|
||||
# Fresnel
|
||||
if out_socket == node.outputs[0]:
|
||||
state.curshader.add_function(c_functions.str_fresnel)
|
||||
return 'fresnel(1.0 / (1.0 - {0}), {1})'.format(blend, dotnv)
|
||||
# Facing
|
||||
elif out_socket == node.outputs[1]:
|
||||
return '(1.0 - pow({0}, ({1} < 0.5) ? 2.0 * {1} : 0.5 / (1.0 - {1})))'.format(dotnv, blend)
|
||||
|
||||
|
||||
def parse_lightpath(node: bpy.types.ShaderNodeLightPath, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
# https://github.com/blender/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_light_path.glsl
|
||||
if out_socket == node.outputs['Is Camera Ray']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Is Shadow Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Is Diffuse Ray']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Is Glossy Ray']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Is Singular Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Is Reflection Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Is Transmission Ray']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Ray Length']:
|
||||
return '1.0'
|
||||
elif out_socket == node.outputs['Ray Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Diffuse Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Glossy Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Transparent Depth']:
|
||||
return '0.0'
|
||||
elif out_socket == node.outputs['Transmission Depth']:
|
||||
return '0.0'
|
||||
|
||||
log.warn(f'Light Path node: unsupported output {out_socket.identifier}.')
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_value(node: bpy.types.ShaderNodeValue, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
if node.lnx_material_param:
|
||||
nn = 'param_' + c.node_name(node.name)
|
||||
value = node.outputs[0].default_value
|
||||
is_lnx_mat_param = True
|
||||
state.curshader.add_uniform('float {0}'.format(nn), link='{0}'.format(node.name), default_value=value, is_lnx_mat_param=is_lnx_mat_param)
|
||||
return nn
|
||||
else:
|
||||
return c.to_vec1(node.outputs[0].default_value)
|
||||
|
||||
|
||||
def parse_wireframe(node: bpy.types.ShaderNodeWireframe, out_socket: bpy.types.NodeSocket, state: ParserState) -> floatstr:
|
||||
# node.use_pixel_size
|
||||
# size = c.parse_value_input(node.inputs[0])
|
||||
return '0.0'
|
290
leenkx/blender/lnx/material/cycles_nodes/nodes_shader.py
Normal file
290
leenkx/blender/lnx/material/cycles_nodes/nodes_shader.py
Normal file
@ -0,0 +1,290 @@
|
||||
import bpy
|
||||
from bpy.types import NodeSocket
|
||||
|
||||
import lnx
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.mat_state as mat_state
|
||||
import lnx.material.mat_utils as mat_utils
|
||||
from lnx.material.parser_state import ParserState
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
c = lnx.reload_module(c)
|
||||
mat_state = lnx.reload_module(mat_state)
|
||||
mat_utils = lnx.reload_module(mat_utils)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_mixshader(node: bpy.types.ShaderNodeMixShader, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
# Skip mixing if only one input is effectively used
|
||||
if not node.inputs[0].is_linked:
|
||||
if node.inputs[0].default_value <= 0.0:
|
||||
c.parse_shader_input(node.inputs[1])
|
||||
return
|
||||
elif node.inputs[0].default_value >= 1.0:
|
||||
c.parse_shader_input(node.inputs[2])
|
||||
return
|
||||
|
||||
prefix = '' if node.inputs[0].is_linked else 'const '
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
fac_var = c.node_name(node.name) + '_fac' + state.get_parser_pass_suffix()
|
||||
fac_inv_var = c.node_name(node.name) + '_fac_inv'
|
||||
state.curshader.write('{0}float {1} = clamp({2}, 0.0, 1.0);'.format(prefix, fac_var, fac))
|
||||
state.curshader.write('{0}float {1} = 1.0 - {2};'.format(prefix, fac_inv_var, fac_var))
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc1, rough1, met1, occ1, spec1, opac1, ior1, emi1 = c.parse_shader_input(node.inputs[1])
|
||||
ek1 = mat_state.emission_type
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc2, rough2, met2, occ2, spec2, opac2, ior2, emi2 = c.parse_shader_input(node.inputs[2])
|
||||
ek2 = mat_state.emission_type
|
||||
|
||||
if state.parse_surface:
|
||||
state.out_basecol = '({0} * {3} + {1} * {2})'.format(bc1, bc2, fac_var, fac_inv_var)
|
||||
state.out_roughness = '({0} * {3} + {1} * {2})'.format(rough1, rough2, fac_var, fac_inv_var)
|
||||
state.out_metallic = '({0} * {3} + {1} * {2})'.format(met1, met2, fac_var, fac_inv_var)
|
||||
state.out_occlusion = '({0} * {3} + {1} * {2})'.format(occ1, occ2, fac_var, fac_inv_var)
|
||||
state.out_specular = '({0} * {3} + {1} * {2})'.format(spec1, spec2, fac_var, fac_inv_var)
|
||||
state.out_emission_col = '({0} * {3} + {1} * {2})'.format(emi1, emi2, fac_var, fac_inv_var)
|
||||
mat_state.emission_type = mat_state.EmissionType.get_effective_combination(ek1, ek2)
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '({0} * {3} + {1} * {2})'.format(opac1, opac2, fac_var, fac_inv_var)
|
||||
state.out_ior = '({0} * {3} + {1} * {2})'.format(ior1, ior2, fac_var, fac_inv_var)
|
||||
|
||||
def parse_addshader(node: bpy.types.ShaderNodeAddShader, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc1, rough1, met1, occ1, spec1, opac1, ior1, emi1 = c.parse_shader_input(node.inputs[0])
|
||||
ek1 = mat_state.emission_type
|
||||
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
bc2, rough2, met2, occ2, spec2, opac2, ior2, emi2 = c.parse_shader_input(node.inputs[1])
|
||||
ek2 = mat_state.emission_type
|
||||
|
||||
if state.parse_surface:
|
||||
state.out_basecol = '({0} + {1})'.format(bc1, bc2)
|
||||
state.out_roughness = '({0} * 0.5 + {1} * 0.5)'.format(rough1, rough2)
|
||||
state.out_metallic = '({0} * 0.5 + {1} * 0.5)'.format(met1, met2)
|
||||
state.out_occlusion = '({0} * 0.5 + {1} * 0.5)'.format(occ1, occ2)
|
||||
state.out_specular = '({0} * 0.5 + {1} * 0.5)'.format(spec1, spec2)
|
||||
state.out_emission_col = '({0} + {1})'.format(emi1, emi2)
|
||||
mat_state.emission_type = mat_state.EmissionType.get_effective_combination(ek1, ek2)
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '({0} * 0.5 + {1} * 0.5)'.format(opac1, opac2)
|
||||
state.out_ior = '({0} * 0.5 + {1} * 0.5)'.format(ior1, ior2)
|
||||
|
||||
|
||||
if bpy.app.version < (3, 0, 0):
|
||||
def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[20])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_metallic = c.parse_value_input(node.inputs[4])
|
||||
state.out_specular = c.parse_value_input(node.inputs[5])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[7])
|
||||
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
|
||||
and (node.inputs['Emission'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission'], (0.0, 0.0, 0.0), comp_alpha=False)):
|
||||
emission_col = c.parse_vector_input(node.inputs[17])
|
||||
emission_strength = c.parse_value_input(node.inputs[18])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
else:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
if state.parse_opacity:
|
||||
state.out_ior = c.parse_value_input(node.inputs[14])
|
||||
state.out_opacity = c.parse_value_input(node.inputs[19])
|
||||
if bpy.app.version >= (3, 0, 0) and bpy.app.version <= (4, 1, 0):
|
||||
def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[22])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
# subsurface = c.parse_vector_input(node.inputs[1])
|
||||
# subsurface_radius = c.parse_vector_input(node.inputs[2])
|
||||
# subsurface_color = c.parse_vector_input(node.inputs[3])
|
||||
state.out_metallic = c.parse_value_input(node.inputs[6])
|
||||
state.out_specular = c.parse_value_input(node.inputs[7])
|
||||
# specular_tint = c.parse_vector_input(node.inputs[6])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[9])
|
||||
# aniso = c.parse_vector_input(node.inputs[8])
|
||||
# aniso_rot = c.parse_vector_input(node.inputs[9])
|
||||
# sheen = c.parse_vector_input(node.inputs[10])
|
||||
# sheen_tint = c.parse_vector_input(node.inputs[11])
|
||||
# clearcoat = c.parse_vector_input(node.inputs[12])
|
||||
# clearcoat_rough = c.parse_vector_input(node.inputs[13])
|
||||
# ior = c.parse_vector_input(node.inputs[14])
|
||||
# transmission = c.parse_vector_input(node.inputs[15])
|
||||
# transmission_roughness = c.parse_vector_input(node.inputs[16])
|
||||
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
|
||||
and (node.inputs['Emission'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission'], (0.0, 0.0, 0.0), comp_alpha=False)):
|
||||
emission_col = c.parse_vector_input(node.inputs[19])
|
||||
emission_strength = c.parse_value_input(node.inputs[20])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
else:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
# clearcoar_normal = c.parse_vector_input(node.inputs[21])
|
||||
# tangent = c.parse_vector_input(node.inputs[22])
|
||||
if state.parse_opacity:
|
||||
state.out_ior = c.parse_value_input(node.inputs[16])
|
||||
if len(node.inputs) >= 21:
|
||||
state.out_opacity = c.parse_value_input(node.inputs[21])
|
||||
if bpy.app.version > (4, 1, 0):
|
||||
def parse_bsdfprincipled(node: bpy.types.ShaderNodeBsdfPrincipled, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[5])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
subsurface = c.parse_value_input(node.inputs[7])
|
||||
subsurface_radius = c.parse_vector_input(node.inputs[9])
|
||||
subsurface_color = c.parse_vector_input(node.inputs[8])
|
||||
state.out_metallic = c.parse_value_input(node.inputs[1])
|
||||
state.out_specular = c.parse_value_input(node.inputs[12])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[2])
|
||||
# Prevent black material when metal = 1.0 and roughness = 0.0
|
||||
try:
|
||||
if float(state.out_roughness) < 0.00101:
|
||||
state.out_roughness = '0.001'
|
||||
except ValueError:
|
||||
pass
|
||||
if (node.inputs['Emission Strength'].is_linked or node.inputs['Emission Strength'].default_value != 0.0)\
|
||||
and (node.inputs['Emission Color'].is_linked or not mat_utils.equals_color_socket(node.inputs['Emission Color'], (0.0, 0.0, 0.0), comp_alpha=False)):
|
||||
emission_col = c.parse_vector_input(node.inputs[26])
|
||||
emission_strength = c.parse_value_input(node.inputs[27])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADED
|
||||
else:
|
||||
mat_state.emission_type = mat_state.EmissionType.NO_EMISSION
|
||||
#state.out_occlusion = state.out_roughness
|
||||
#state.out_aniso = c.parse_vector_input(node.inputs[14])
|
||||
#state.out_aniso_rot = c.parse_vector_input(node.inputs[15])
|
||||
#state.out_sheen = c.parse_vector_input(node.inputs[23])
|
||||
#state.out_sheen_tint = c.parse_vector_input(node.inputs[25])
|
||||
#state.out_clearcoat = c.parse_vector_input(node.inputs[18])
|
||||
#state.out_clearcoat_rough = c.parse_vector_input(node.inputs[19])
|
||||
#state.out_ior = c.parse_value_input(node.inputs[3])
|
||||
#state.out_transmission = c.parse_vector_input(node.inputs[17])
|
||||
#state.out_transmission_roughness = state.out_roughness
|
||||
if state.parse_opacity:
|
||||
state.out_ior = c.parse_value_input(node.inputs[3])
|
||||
state.out_opacity = c.parse_value_input(node.inputs[4])
|
||||
|
||||
def parse_bsdfdiffuse(node: bpy.types.ShaderNodeBsdfDiffuse, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_specular = '0.0'
|
||||
|
||||
if bpy.app.version >= (4, 0, 0):
|
||||
def parse_bsdfsheen(node: bpy.types.ShaderNodeBsdfSheen, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
def parse_bsdfglossy(node: bpy.types.ShaderNodeBsdfGlossy, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_metallic = '1.0'
|
||||
else:
|
||||
def parse_bsdfglossy(node: bpy.types.ShaderNodeBsdfAnisotropic, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[4])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_metallic = '1.0'
|
||||
|
||||
|
||||
def parse_ambientocclusion(node: bpy.types.ShaderNodeAmbientOcclusion, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
# Single channel
|
||||
state.out_occlusion = c.parse_vector_input(node.inputs[0]) + '.r'
|
||||
|
||||
|
||||
def parse_bsdfanisotropic(node: bpy.types.ShaderNodeBsdfAnisotropic, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[4])
|
||||
# Revert to glossy
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
state.out_metallic = '1.0'
|
||||
|
||||
|
||||
def parse_emission(node: bpy.types.ShaderNodeEmission, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
emission_col = c.parse_vector_input(node.inputs[0])
|
||||
emission_strength = c.parse_value_input(node.inputs[1])
|
||||
state.out_emission_col = '({0} * {1})'.format(emission_col, emission_strength)
|
||||
state.out_basecol = 'vec3(0.0)'
|
||||
state.out_specular = '0.0'
|
||||
state.out_metallic = '0.0'
|
||||
mat_state.emission_type = mat_state.EmissionType.SHADELESS
|
||||
|
||||
|
||||
def parse_bsdfglass(node: bpy.types.ShaderNodeBsdfGlass, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
c.write_normal(node.inputs[3])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '0.0'
|
||||
state.out_ior = c.parse_value_input(node.inputs[2])
|
||||
|
||||
|
||||
def parse_bsdfhair(node: bpy.types.ShaderNodeBsdfHair, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def parse_holdout(node: bpy.types.ShaderNodeHoldout, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
# Occlude
|
||||
state.out_occlusion = '0.0'
|
||||
|
||||
|
||||
def parse_bsdfrefraction(node: bpy.types.ShaderNodeBsdfRefraction, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
c.write_normal(node.inputs[3])
|
||||
state.out_roughness = c.parse_value_input(node.inputs[1])
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '0.0'
|
||||
state.out_ior = c.parse_value_input(node.inputs[2])
|
||||
|
||||
def parse_subsurfacescattering(node: bpy.types.ShaderNodeSubsurfaceScattering, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
c.write_normal(node.inputs[4])
|
||||
else:
|
||||
c.write_normal(node.inputs[6])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
|
||||
|
||||
def parse_bsdftoon(node: bpy.types.ShaderNodeBsdfToon, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
# c.write_normal(node.inputs[3])
|
||||
pass
|
||||
|
||||
|
||||
def parse_bsdftranslucent(node: bpy.types.ShaderNodeBsdfTranslucent, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[1])
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '(1.0 - {0}.r)'.format(c.parse_vector_input(node.inputs[0]))
|
||||
state.out_ior = '1.0'
|
||||
|
||||
|
||||
def parse_bsdftransparent(node: bpy.types.ShaderNodeBsdfTransparent, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_opacity:
|
||||
state.out_opacity = '(1.0 - {0}.r)'.format(c.parse_vector_input(node.inputs[0]))
|
||||
state.out_ior = '1.0'
|
||||
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
def parse_bsdfvelvet(node: bpy.types.ShaderNodeBsdfVelvet, out_socket: NodeSocket, state: ParserState) -> None:
|
||||
if state.parse_surface:
|
||||
c.write_normal(node.inputs[2])
|
||||
state.out_basecol = c.parse_vector_input(node.inputs[0])
|
||||
state.out_roughness = '1.0'
|
||||
state.out_metallic = '1.0'
|
591
leenkx/blender/lnx/material/cycles_nodes/nodes_texture.py
Normal file
591
leenkx/blender/lnx/material/cycles_nodes/nodes_texture.py
Normal file
@ -0,0 +1,591 @@
|
||||
import math
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
|
||||
import lnx.assets as assets
|
||||
import lnx.log as log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserState, ParserContext, ParserPass
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
import lnx.utils
|
||||
import lnx.write_probes as write_probes
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
assets = lnx.reload_module(assets)
|
||||
log = lnx.reload_module(log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserContext, ParserPass
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
lnx.utils = lnx.reload_module(lnx.utils)
|
||||
write_probes = lnx.reload_module(write_probes)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_tex_brick(node: bpy.types.ShaderNodeTexBrick, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_brick)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
col1 = c.parse_vector_input(node.inputs[1])
|
||||
col2 = c.parse_vector_input(node.inputs[2])
|
||||
col3 = c.parse_vector_input(node.inputs[3])
|
||||
scale = c.parse_value_input(node.inputs[4])
|
||||
res = f'tex_brick({co} * {scale}, {col1}, {col2}, {col3})'
|
||||
# Fac
|
||||
else:
|
||||
scale = c.parse_value_input(node.inputs[4])
|
||||
res = 'tex_brick_f({0} * {1})'.format(co, scale)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_checker(node: bpy.types.ShaderNodeTexChecker, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_checker)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
col1 = c.parse_vector_input(node.inputs[1])
|
||||
col2 = c.parse_vector_input(node.inputs[2])
|
||||
scale = c.parse_value_input(node.inputs[3])
|
||||
res = f'tex_checker({co}, {col1}, {col2}, {scale})'
|
||||
# Fac
|
||||
else:
|
||||
scale = c.parse_value_input(node.inputs[3])
|
||||
res = 'tex_checker_f({0}, {1})'.format(co, scale)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_gradient(node: bpy.types.ShaderNodeTexGradient, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
grad = node.gradient_type
|
||||
if grad == 'LINEAR':
|
||||
f = f'{co}.x'
|
||||
elif grad == 'QUADRATIC':
|
||||
f = '0.0'
|
||||
elif grad == 'EASING':
|
||||
f = '0.0'
|
||||
elif grad == 'DIAGONAL':
|
||||
f = f'({co}.x + {co}.y) * 0.5'
|
||||
elif grad == 'RADIAL':
|
||||
f = f'atan({co}.y, {co}.x) / PI2 + 0.5'
|
||||
elif grad == 'QUADRATIC_SPHERE':
|
||||
f = '0.0'
|
||||
else: # SPHERICAL
|
||||
f = f'max(1.0 - sqrt({co}.x * {co}.x + {co}.y * {co}.y + {co}.z * {co}.z), 0.0)'
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
res = f'vec3(clamp({f}, 0.0, 1.0))'
|
||||
# Fac
|
||||
else:
|
||||
res = f'(clamp({f}, 0.0, 1.0))'
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_image(node: bpy.types.ShaderNodeTexImage, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Color or Alpha output
|
||||
use_color_out = out_socket == node.outputs[0]
|
||||
|
||||
if state.context == ParserContext.OBJECT:
|
||||
tex_store = c.store_var_name(node)
|
||||
|
||||
if c.node_need_reevaluation_for_screenspace_derivative(node):
|
||||
tex_store += state.get_parser_pass_suffix()
|
||||
|
||||
# Already fetched
|
||||
if c.is_parsed(tex_store):
|
||||
if use_color_out:
|
||||
return f'{tex_store}.rgb'
|
||||
else:
|
||||
return f'{tex_store}.a'
|
||||
|
||||
tex_name = c.node_name(node.name)
|
||||
tex = c.make_texture_from_image_node(node, tex_name)
|
||||
tex_link = None
|
||||
tex_default_file = None
|
||||
is_lnx_mat_param = None
|
||||
if node.lnx_material_param:
|
||||
tex_link = node.name
|
||||
is_lnx_mat_param = True
|
||||
|
||||
if tex is not None:
|
||||
state.curshader.write_textures += 1
|
||||
if node.lnx_material_param and tex['file'] is not None:
|
||||
tex_default_file = tex['file']
|
||||
if use_color_out:
|
||||
to_linear = node.image is not None and node.image.colorspace_settings.name == 'sRGB'
|
||||
res = f'{c.texture_store(node, tex, tex_name, to_linear, tex_link=tex_link, default_value=tex_default_file, is_lnx_mat_param=is_lnx_mat_param)}.rgb'
|
||||
else:
|
||||
res = f'{c.texture_store(node, tex, tex_name, tex_link=tex_link, default_value=tex_default_file, is_lnx_mat_param=is_lnx_mat_param)}.a'
|
||||
state.curshader.write_textures -= 1
|
||||
return res
|
||||
|
||||
# Empty texture
|
||||
elif node.image is None:
|
||||
tex = {
|
||||
'name': tex_name,
|
||||
'file': ''
|
||||
}
|
||||
if use_color_out:
|
||||
return '{0}.rgb'.format(c.texture_store(node, tex, tex_name, to_linear=False, tex_link=tex_link, is_lnx_mat_param=is_lnx_mat_param))
|
||||
return '{0}.a'.format(c.texture_store(node, tex, tex_name, to_linear=True, tex_link=tex_link, is_lnx_mat_param=is_lnx_mat_param))
|
||||
|
||||
# Pink color for missing texture
|
||||
else:
|
||||
if use_color_out:
|
||||
state.parsed.add(tex_store)
|
||||
state.curshader.write_textures += 1
|
||||
state.curshader.write(f'vec4 {tex_store} = vec4(1.0, 0.0, 1.0, 1.0);')
|
||||
state.curshader.write_textures -= 1
|
||||
return f'{tex_store}.rgb'
|
||||
else:
|
||||
state.curshader.write(f'vec4 {tex_store} = vec4(1.0, 0.0, 1.0, 1.0);')
|
||||
return f'{tex_store}.a'
|
||||
|
||||
# World context
|
||||
# TODO: Merge with above implementation to also allow mappings other than using view coordinates
|
||||
else:
|
||||
world = state.world
|
||||
world.world_defs += '_EnvImg'
|
||||
|
||||
# Background texture
|
||||
state.curshader.add_uniform('sampler2D envmap', link='_envmap')
|
||||
state.curshader.add_uniform('vec2 screenSize', link='_screenSize')
|
||||
|
||||
image = node.image
|
||||
if image is None:
|
||||
log.warn(f'World "{world.name}": image texture node "{node.name}" is empty')
|
||||
return 'vec3(0.0, 0.0, 0.0)' if use_color_out else '0.0'
|
||||
|
||||
filepath = image.filepath
|
||||
|
||||
if image.packed_file is not None:
|
||||
# Extract packed data
|
||||
filepath = lnx.utils.build_dir() + '/compiled/Assets/unpacked'
|
||||
unpack_path = lnx.utils.get_fp() + filepath
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
unpack_filepath = unpack_path + '/' + image.name
|
||||
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size:
|
||||
with open(unpack_filepath, 'wb') as f:
|
||||
f.write(image.packed_file.data)
|
||||
assets.add(unpack_filepath)
|
||||
else:
|
||||
# Link image path to assets
|
||||
assets.add(lnx.utils.asset_path(image.filepath))
|
||||
|
||||
# Reference image name
|
||||
tex_file = lnx.utils.extract_filename(image.filepath)
|
||||
base = tex_file.rsplit('.', 1)
|
||||
ext = base[1].lower()
|
||||
|
||||
if ext == 'hdr':
|
||||
target_format = 'HDR'
|
||||
else:
|
||||
target_format = 'JPEG'
|
||||
|
||||
# Generate prefiltered envmaps
|
||||
world.lnx_envtex_name = tex_file
|
||||
world.lnx_envtex_irr_name = tex_file.rsplit('.', 1)[0]
|
||||
|
||||
disable_hdr = target_format == 'JPEG'
|
||||
from_srgb = image.colorspace_settings.name == "sRGB"
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
mip_count = world.lnx_envtex_num_mips
|
||||
mip_count = write_probes.write_probes(filepath, disable_hdr, from_srgb, mip_count, lnx_radiance=rpdat.lnx_radiance)
|
||||
|
||||
world.lnx_envtex_num_mips = mip_count
|
||||
|
||||
# Will have to get rid of gl_FragCoord, pass texture coords from vertex shader
|
||||
state.curshader.write_init('vec2 texco = gl_FragCoord.xy / screenSize;')
|
||||
return 'texture(envmap, vec2(texco.x, 1.0 - texco.y)).rgb * envmapStrength'
|
||||
|
||||
|
||||
def parse_tex_magic(node: bpy.types.ShaderNodeTexMagic, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_magic)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
scale = c.parse_value_input(node.inputs[1])
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
res = f'tex_magic({co} * {scale} * 4.0)'
|
||||
# Fac
|
||||
else:
|
||||
res = f'tex_magic_f({co} * {scale} * 4.0)'
|
||||
|
||||
return res
|
||||
|
||||
if bpy.app.version < (4, 1, 0):
|
||||
def parse_tex_musgrave(node: bpy.types.ShaderNodeTexMusgrave, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
state.curshader.add_function(c_functions.str_tex_musgrave)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
scale = c.parse_value_input(node.inputs['Scale'])
|
||||
# detail = c.parse_value_input(node.inputs[2])
|
||||
# distortion = c.parse_value_input(node.inputs[3])
|
||||
|
||||
res = f'tex_musgrave_f({co} * {scale} * 0.5)'
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_noise(node: bpy.types.ShaderNodeTexNoise, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
c.write_procedurals()
|
||||
state.curshader.add_function(c_functions.str_tex_noise)
|
||||
c.assets_add(os.path.join(lnx.utils.get_sdk_path(), 'leenkx', 'Assets', 'noise256.png'))
|
||||
c.assets_add_embedded_data('noise256.png')
|
||||
state.curshader.add_uniform('sampler2D snoise256', link='$noise256.png')
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
scale = c.parse_value_input(node.inputs[2])
|
||||
detail = c.parse_value_input(node.inputs[3])
|
||||
roughness = c.parse_value_input(node.inputs[4])
|
||||
distortion = c.parse_value_input(node.inputs[5])
|
||||
if bpy.app.version >= (4, 1, 0):
|
||||
if node.noise_type == "FBM":
|
||||
if out_socket == node.outputs[1]:
|
||||
state.curshader.add_function(c_functions.str_tex_musgrave)
|
||||
res = 'vec3(tex_musgrave_f({0} * {1}), tex_musgrave_f({0} * {1} + 120.0), tex_musgrave_f({0} * {1} + 168.0))'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
res = f'tex_musgrave_f({co} * {scale} * 1.0)'
|
||||
else:
|
||||
if out_socket == node.outputs[1]:
|
||||
res = 'vec3(tex_noise({0} * {1},{2},{3}), tex_noise({0} * {1} + 120.0,{2},{3}), tex_noise({0} * {1} + 168.0,{2},{3}))'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
res = 'tex_noise({0} * {1},{2},{3})'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
if out_socket == node.outputs[1]:
|
||||
res = 'vec3(tex_noise({0} * {1},{2},{3}), tex_noise({0} * {1} + 120.0,{2},{3}), tex_noise({0} * {1} + 168.0,{2},{3}))'.format(co, scale, detail, distortion)
|
||||
else:
|
||||
res = 'tex_noise({0} * {1},{2},{3})'.format(co, scale, detail, distortion)
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_pointdensity(node: bpy.types.ShaderNodeTexPointDensity, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
# Pass through
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
# Density
|
||||
else:
|
||||
return '0.0'
|
||||
|
||||
|
||||
def parse_tex_sky(node: bpy.types.ShaderNodeTexSky, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.context == ParserContext.OBJECT:
|
||||
# Pass through
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
state.world.world_defs += '_EnvSky'
|
||||
|
||||
if node.sky_type == 'PREETHAM' or node.sky_type == 'HOSEK_WILKIE':
|
||||
if node.sky_type == 'PREETHAM':
|
||||
log.info('Info: Preetham sky model is not supported, using Hosek Wilkie sky model instead')
|
||||
return parse_sky_hosekwilkie(node, state)
|
||||
|
||||
elif node.sky_type == 'NISHITA':
|
||||
return parse_sky_nishita(node, state)
|
||||
|
||||
else:
|
||||
log.error(f'Unsupported sky model: {node.sky_type}!')
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
|
||||
def parse_sky_hosekwilkie(node: bpy.types.ShaderNodeTexSky, state: ParserState) -> vec3str:
|
||||
world = state.world
|
||||
curshader = state.curshader
|
||||
|
||||
# Match to cycles
|
||||
world.lnx_envtex_strength *= 0.1
|
||||
|
||||
assets.add_khafile_def('lnx_hosek')
|
||||
curshader.add_uniform('vec3 A', link="_hosekA")
|
||||
curshader.add_uniform('vec3 B', link="_hosekB")
|
||||
curshader.add_uniform('vec3 C', link="_hosekC")
|
||||
curshader.add_uniform('vec3 D', link="_hosekD")
|
||||
curshader.add_uniform('vec3 E', link="_hosekE")
|
||||
curshader.add_uniform('vec3 F', link="_hosekF")
|
||||
curshader.add_uniform('vec3 G', link="_hosekG")
|
||||
curshader.add_uniform('vec3 H', link="_hosekH")
|
||||
curshader.add_uniform('vec3 I', link="_hosekI")
|
||||
curshader.add_uniform('vec3 Z', link="_hosekZ")
|
||||
curshader.add_uniform('vec3 hosekSunDirection', link="_hosekSunDirection")
|
||||
curshader.add_function("""vec3 hosekWilkie(float cos_theta, float gamma, float cos_gamma) {
|
||||
\tvec3 chi = (1 + cos_gamma * cos_gamma) / pow(1 + H * H - 2 * cos_gamma * H, vec3(1.5));
|
||||
\treturn (1 + A * exp(B / (cos_theta + 0.01))) * (C + D * exp(E * gamma) + F * (cos_gamma * cos_gamma) + G * chi + I * sqrt(cos_theta));
|
||||
}""")
|
||||
|
||||
world.lnx_envtex_sun_direction = [node.sun_direction[0], node.sun_direction[1], node.sun_direction[2]]
|
||||
world.lnx_envtex_turbidity = node.turbidity
|
||||
world.lnx_envtex_ground_albedo = node.ground_albedo
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
rpdat = lnx.utils.get_rp()
|
||||
mobile_mat = rpdat.lnx_material_model == 'Mobile' or rpdat.lnx_material_model == 'Solid'
|
||||
|
||||
if not state.radiance_written:
|
||||
# Irradiance json file name
|
||||
wname = lnx.utils.safestr(world.name)
|
||||
world.lnx_envtex_irr_name = wname
|
||||
write_probes.write_sky_irradiance(wname)
|
||||
|
||||
# Radiance
|
||||
if rpdat.lnx_radiance and rpdat.lnx_irradiance and not mobile_mat:
|
||||
wrd.world_defs += '_Rad'
|
||||
hosek_path = 'leenkx/Assets/hosek/'
|
||||
sdk_path = lnx.utils.get_sdk_path()
|
||||
# Use fake maps for now
|
||||
assets.add(sdk_path + '/' + hosek_path + 'hosek_radiance.hdr')
|
||||
for i in range(0, 8):
|
||||
assets.add(sdk_path + '/' + hosek_path + 'hosek_radiance_' + str(i) + '.hdr')
|
||||
|
||||
world.lnx_envtex_name = 'hosek'
|
||||
world.lnx_envtex_num_mips = 8
|
||||
|
||||
state.radiance_written = True
|
||||
|
||||
curshader.write('float cos_theta = clamp(pos.z, 0.0, 1.0);')
|
||||
curshader.write('float cos_gamma = dot(pos, hosekSunDirection);')
|
||||
curshader.write('float gamma_val = acos(cos_gamma);')
|
||||
|
||||
return 'Z * hosekWilkie(cos_theta, gamma_val, cos_gamma) * envmapStrength;'
|
||||
|
||||
|
||||
def parse_sky_nishita(node: bpy.types.ShaderNodeTexSky, state: ParserState) -> vec3str:
|
||||
curshader = state.curshader
|
||||
curshader.add_include('std/sky.glsl')
|
||||
curshader.add_uniform('vec3 sunDir', link='_sunDirection')
|
||||
curshader.add_uniform('sampler2D nishitaLUT', link='_nishitaLUT', included=True,
|
||||
tex_addr_u='clamp', tex_addr_v='clamp')
|
||||
curshader.add_uniform('vec2 nishitaDensity', link='_nishitaDensity', included=True)
|
||||
|
||||
planet_radius = 6360e3 # Earth radius used in Blender
|
||||
ray_origin_z = planet_radius + node.altitude
|
||||
|
||||
state.world.lnx_nishita_density = [node.air_density, node.dust_density, node.ozone_density]
|
||||
|
||||
sun = ''
|
||||
if node.sun_disc:
|
||||
# The sun size is calculated relative in terms of the distance
|
||||
# between the sun position and the sky dome normal at every
|
||||
# pixel (see sun_disk() in sky.glsl).
|
||||
#
|
||||
# An isosceles triangle is created with the camera at the
|
||||
# opposite side of the base with node.sun_size being the vertex
|
||||
# angle from which the base angle theta is calculated. Iron's
|
||||
# skydome geometry roughly resembles a unit sphere, so the leg
|
||||
# size is set to 1. The base size is the doubled normal-relative
|
||||
# target size.
|
||||
|
||||
# sun_size is already in radians despite being degrees in the UI
|
||||
theta = 0.5 * (math.pi - node.sun_size)
|
||||
size = math.cos(theta)
|
||||
sun = f'* sun_disk(pos, sunDir, {size}, {node.sun_intensity})'
|
||||
|
||||
return f'nishita_atmosphere(pos, vec3(0, 0, {ray_origin_z}), sunDir, {planet_radius}){sun}'
|
||||
|
||||
|
||||
def parse_tex_environment(node: bpy.types.ShaderNodeTexEnvironment, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.context == ParserContext.OBJECT:
|
||||
log.warn('Environment Texture node is not supported for object node trees, using default value')
|
||||
return c.to_vec3([0.0, 0.0, 0.0])
|
||||
|
||||
if node.image is None:
|
||||
return c.to_vec3([1.0, 0.0, 1.0])
|
||||
|
||||
world = state.world
|
||||
world.world_defs += '_EnvTex'
|
||||
|
||||
curshader = state.curshader
|
||||
|
||||
curshader.add_include('std/math.glsl')
|
||||
curshader.add_uniform('sampler2D envmap', link='_envmap')
|
||||
|
||||
image = node.image
|
||||
filepath = image.filepath
|
||||
|
||||
if image.packed_file is None and not os.path.isfile(lnx.utils.asset_path(filepath)):
|
||||
log.warn(world.name + ' - unable to open ' + image.filepath)
|
||||
return c.to_vec3([1.0, 0.0, 1.0])
|
||||
|
||||
# Reference image name
|
||||
tex_file = lnx.utils.extract_filename(image.filepath)
|
||||
base = tex_file.rsplit('.', 1)
|
||||
ext = base[1].lower()
|
||||
|
||||
if ext == 'hdr':
|
||||
target_format = 'HDR'
|
||||
else:
|
||||
target_format = 'JPEG'
|
||||
do_convert = ext != 'hdr' and ext != 'jpg'
|
||||
if do_convert:
|
||||
if ext == 'exr':
|
||||
tex_file = base[0] + '.hdr'
|
||||
target_format = 'HDR'
|
||||
else:
|
||||
tex_file = base[0] + '.jpg'
|
||||
target_format = 'JPEG'
|
||||
|
||||
if image.packed_file is not None:
|
||||
# Extract packed data
|
||||
unpack_path = lnx.utils.get_fp_build() + '/compiled/Assets/unpacked'
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
unpack_filepath = unpack_path + '/' + tex_file
|
||||
filepath = unpack_filepath
|
||||
|
||||
if do_convert:
|
||||
if not os.path.isfile(unpack_filepath):
|
||||
lnx.utils.unpack_image(image, unpack_filepath, file_format=target_format)
|
||||
|
||||
elif not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size:
|
||||
with open(unpack_filepath, 'wb') as f:
|
||||
f.write(image.packed_file.data)
|
||||
|
||||
assets.add(unpack_filepath)
|
||||
else:
|
||||
if do_convert:
|
||||
unpack_path = lnx.utils.get_fp_build() + '/compiled/Assets/unpacked'
|
||||
if not os.path.exists(unpack_path):
|
||||
os.makedirs(unpack_path)
|
||||
converted_path = unpack_path + '/' + tex_file
|
||||
filepath = converted_path
|
||||
# TODO: delete cache when file changes
|
||||
if not os.path.isfile(converted_path):
|
||||
lnx.utils.convert_image(image, converted_path, file_format=target_format)
|
||||
assets.add(converted_path)
|
||||
else:
|
||||
# Link image path to assets
|
||||
assets.add(lnx.utils.asset_path(image.filepath))
|
||||
|
||||
rpdat = lnx.utils.get_rp()
|
||||
|
||||
if not state.radiance_written:
|
||||
# Generate prefiltered envmaps
|
||||
world.lnx_envtex_name = tex_file
|
||||
world.lnx_envtex_irr_name = tex_file.rsplit('.', 1)[0]
|
||||
disable_hdr = target_format == 'JPEG'
|
||||
from_srgb = image.colorspace_settings.name == "sRGB"
|
||||
|
||||
mip_count = world.lnx_envtex_num_mips
|
||||
mip_count = write_probes.write_probes(filepath, disable_hdr, from_srgb, mip_count, lnx_radiance=rpdat.lnx_radiance)
|
||||
|
||||
world.lnx_envtex_num_mips = mip_count
|
||||
|
||||
state.radiance_written = True
|
||||
|
||||
# Append LDR define
|
||||
if disable_hdr:
|
||||
world.world_defs += '_EnvLDR'
|
||||
assets.add_khafile_def("lnx_envldr")
|
||||
|
||||
wrd = bpy.data.worlds['Lnx']
|
||||
mobile_mat = rpdat.lnx_material_model == 'Mobile' or rpdat.lnx_material_model == 'Solid'
|
||||
|
||||
# Append radiance define
|
||||
if rpdat.lnx_irradiance and rpdat.lnx_radiance and not mobile_mat:
|
||||
wrd.world_defs += '_Rad'
|
||||
|
||||
return 'texture(envmap, envMapEquirect(pos)).rgb * envmapStrength'
|
||||
|
||||
|
||||
def parse_tex_voronoi(node: bpy.types.ShaderNodeTexVoronoi, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
outp = 0
|
||||
if out_socket.type == 'RGBA':
|
||||
outp = 1
|
||||
elif out_socket.type == 'VECTOR':
|
||||
outp = 2
|
||||
m = 0
|
||||
if node.distance == 'MANHATTAN':
|
||||
m = 1
|
||||
elif node.distance == 'CHEBYCHEV':
|
||||
m = 2
|
||||
elif node.distance == 'MINKOWSKI':
|
||||
m = 3
|
||||
|
||||
c.write_procedurals()
|
||||
state.curshader.add_function(c_functions.str_tex_voronoi)
|
||||
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
|
||||
scale = c.parse_value_input(node.inputs[2])
|
||||
exp = c.parse_value_input(node.inputs[4])
|
||||
randomness = c.parse_value_input(node.inputs[5])
|
||||
|
||||
# Color or Position
|
||||
if out_socket == node.outputs[1] or out_socket == node.outputs[2]:
|
||||
res = 'tex_voronoi({0}, {1}, {2}, {3}, {4}, {5})'.format(co, randomness, m, outp, scale, exp)
|
||||
# Distance
|
||||
else:
|
||||
res = 'tex_voronoi({0}, {1}, {2}, {3}, {4}, {5}).x'.format(co, randomness, m, outp, scale, exp)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_tex_wave(node: bpy.types.ShaderNodeTexWave, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
c.write_procedurals()
|
||||
state.curshader.add_function(c_functions.str_tex_wave)
|
||||
if node.inputs[0].is_linked:
|
||||
co = c.parse_vector_input(node.inputs[0])
|
||||
else:
|
||||
co = 'bposition'
|
||||
scale = c.parse_value_input(node.inputs[1])
|
||||
distortion = c.parse_value_input(node.inputs[2])
|
||||
detail = c.parse_value_input(node.inputs[3])
|
||||
detail_scale = c.parse_value_input(node.inputs[4])
|
||||
if node.wave_profile == 'SIN':
|
||||
wave_profile = 0
|
||||
else:
|
||||
wave_profile = 1
|
||||
if node.wave_type == 'BANDS':
|
||||
wave_type = 0
|
||||
else:
|
||||
wave_type = 1
|
||||
|
||||
# Color
|
||||
if out_socket == node.outputs[0]:
|
||||
res = 'vec3(tex_wave_f({0} * {1},{2},{3},{4},{5},{6}))'.format(co, scale, wave_type, wave_profile, distortion, detail, detail_scale)
|
||||
# Fac
|
||||
else:
|
||||
res = 'tex_wave_f({0} * {1},{2},{3},{4},{5},{6})'.format(co, scale, wave_type, wave_profile, distortion, detail, detail_scale)
|
||||
|
||||
return res
|
205
leenkx/blender/lnx/material/cycles_nodes/nodes_vector.py
Normal file
205
leenkx/blender/lnx/material/cycles_nodes/nodes_vector.py
Normal file
@ -0,0 +1,205 @@
|
||||
from typing import Union
|
||||
|
||||
import bpy
|
||||
from mathutils import Euler, Vector
|
||||
|
||||
import lnx.log
|
||||
import lnx.material.cycles as c
|
||||
import lnx.material.cycles_functions as c_functions
|
||||
from lnx.material.parser_state import ParserState, ParserPass
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
import lnx.utils as utils
|
||||
|
||||
if lnx.is_reload(__name__):
|
||||
lnx.log = lnx.reload_module(lnx.log)
|
||||
c = lnx.reload_module(c)
|
||||
c_functions = lnx.reload_module(c_functions)
|
||||
lnx.material.parser_state = lnx.reload_module(lnx.material.parser_state)
|
||||
from lnx.material.parser_state import ParserState, ParserPass
|
||||
lnx.material.shader = lnx.reload_module(lnx.material.shader)
|
||||
from lnx.material.shader import floatstr, vec3str
|
||||
utils = lnx.reload_module(utils)
|
||||
else:
|
||||
lnx.enable_reload(__name__)
|
||||
|
||||
|
||||
def parse_curvevec(node: bpy.types.ShaderNodeVectorCurve, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
fac = c.parse_value_input(node.inputs[0])
|
||||
vec = c.parse_vector_input(node.inputs[1])
|
||||
curves = node.mapping.curves
|
||||
name = c.node_name(node.name)
|
||||
# mapping.curves[0].points[0].handle_type # bezier curve
|
||||
return '(vec3({0}, {1}, {2}) * {3})'.format(
|
||||
c.vector_curve(name + '0', vec + '.x', curves[0].points),
|
||||
c.vector_curve(name + '1', vec + '.y', curves[1].points),
|
||||
c.vector_curve(name + '2', vec + '.z', curves[2].points), fac)
|
||||
|
||||
|
||||
def parse_bump(node: bpy.types.ShaderNodeBump, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.curshader.shader_type != 'frag':
|
||||
lnx.log.warn("Bump node not supported outside of fragment shaders")
|
||||
return 'vec3(0.0)'
|
||||
|
||||
# Interpolation strength
|
||||
strength = c.parse_value_input(node.inputs[0])
|
||||
# Height multiplier
|
||||
# distance = c.parse_value_input(node.inputs[1])
|
||||
height = c.parse_value_input(node.inputs[2])
|
||||
|
||||
state.current_pass = ParserPass.DX_SCREEN_SPACE
|
||||
height_dx = c.parse_value_input(node.inputs[2])
|
||||
state.current_pass = ParserPass.DY_SCREEN_SPACE
|
||||
height_dy = c.parse_value_input(node.inputs[2])
|
||||
state.current_pass = ParserPass.REGULAR
|
||||
|
||||
# nor = c.parse_vector_input(node.inputs[3])
|
||||
|
||||
if height_dx != height or height_dy != height:
|
||||
tangent = f'{c.dfdx_fine("wposition")} + n * ({height_dx} - {height})'
|
||||
bitangent = f'{c.dfdy_fine("wposition")} + n * ({height_dy} - {height})'
|
||||
|
||||
# Cross-product operand order, dFdy is flipped on d3d11
|
||||
bitangent_first = utils.get_gapi() == 'direct3d11'
|
||||
|
||||
if node.invert:
|
||||
bitangent_first = not bitangent_first
|
||||
|
||||
if bitangent_first:
|
||||
# We need to normalize twice, once for the correct "weight" of the strength,
|
||||
# once for having a normalized output vector (lerping vectors does not preserve magnitude)
|
||||
res = f'normalize(mix(n, normalize(cross({bitangent}, {tangent})), {strength}))'
|
||||
else:
|
||||
res = f'normalize(mix(n, normalize(cross({tangent}, {bitangent})), {strength}))'
|
||||
|
||||
else:
|
||||
res = 'n'
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_mapping(node: bpy.types.ShaderNodeMapping, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
# Only "Point", "Texture" and "Vector" types supported for now..
|
||||
# More information about the order of operations for this node:
|
||||
# https://docs.blender.org/manual/en/latest/render/shader_nodes/vector/mapping.html#properties
|
||||
|
||||
input_vector: bpy.types.NodeSocket = node.inputs[0]
|
||||
input_location: bpy.types.NodeSocket = node.inputs['Location']
|
||||
input_rotation: bpy.types.NodeSocket = node.inputs['Rotation']
|
||||
input_scale: bpy.types.NodeSocket = node.inputs['Scale']
|
||||
out = c.parse_vector_input(input_vector) if input_vector.is_linked else c.to_vec3(input_vector.default_value)
|
||||
location = c.parse_vector_input(input_location) if input_location.is_linked else c.to_vec3(input_location.default_value)
|
||||
rotation = c.parse_vector_input(input_rotation) if input_rotation.is_linked else c.to_vec3(input_rotation.default_value)
|
||||
scale = c.parse_vector_input(input_scale) if input_scale.is_linked else c.to_vec3(input_scale.default_value)
|
||||
|
||||
# Use inner functions because the order of operations varies between
|
||||
# mapping node vector types. This adds a slight overhead but makes
|
||||
# the code much more readable.
|
||||
# - "Point" and "Vector" use Scale -> Rotate -> Translate
|
||||
# - "Texture" uses Translate -> Rotate -> Scale
|
||||
def calc_location(output: str) -> str:
|
||||
# Vectors and Eulers support the "!=" operator
|
||||
if input_scale.is_linked or input_scale.default_value != Vector((1, 1, 1)):
|
||||
if node.vector_type == 'TEXTURE':
|
||||
output = f'({output} / {scale})'
|
||||
else:
|
||||
output = f'({output} * {scale})'
|
||||
|
||||
return output
|
||||
|
||||
def calc_scale(output: str) -> str:
|
||||
if input_location.is_linked or input_location.default_value != Vector((0, 0, 0)):
|
||||
# z location is a little off sometimes?...
|
||||
if node.vector_type == 'TEXTURE':
|
||||
output = f'({output} - {location})'
|
||||
else:
|
||||
output = f'({output} + {location})'
|
||||
return output
|
||||
|
||||
out = calc_location(out) if node.vector_type == 'TEXTURE' else calc_scale(out)
|
||||
|
||||
if input_rotation.is_linked or input_rotation.default_value != Euler((0, 0, 0)):
|
||||
var_name = c.node_name(node.name) + "_rotation" + state.get_parser_pass_suffix()
|
||||
if node.vector_type == 'TEXTURE':
|
||||
state.curshader.write(f'mat3 {var_name}X = mat3(1.0, 0.0, 0.0, 0.0, cos({rotation}.x), sin({rotation}.x), 0.0, -sin({rotation}.x), cos({rotation}.x));')
|
||||
state.curshader.write(f'mat3 {var_name}Y = mat3(cos({rotation}.y), 0.0, -sin({rotation}.y), 0.0, 1.0, 0.0, sin({rotation}.y), 0.0, cos({rotation}.y));')
|
||||
state.curshader.write(f'mat3 {var_name}Z = mat3(cos({rotation}.z), sin({rotation}.z), 0.0, -sin({rotation}.z), cos({rotation}.z), 0.0, 0.0, 0.0, 1.0);')
|
||||
else:
|
||||
# A little bit redundant, but faster than 12 more multiplications to make it work dynamically
|
||||
state.curshader.write(f'mat3 {var_name}X = mat3(1.0, 0.0, 0.0, 0.0, cos(-{rotation}.x), sin(-{rotation}.x), 0.0, -sin(-{rotation}.x), cos(-{rotation}.x));')
|
||||
state.curshader.write(f'mat3 {var_name}Y = mat3(cos(-{rotation}.y), 0.0, -sin(-{rotation}.y), 0.0, 1.0, 0.0, sin(-{rotation}.y), 0.0, cos(-{rotation}.y));')
|
||||
state.curshader.write(f'mat3 {var_name}Z = mat3(cos(-{rotation}.z), sin(-{rotation}.z), 0.0, -sin(-{rotation}.z), cos(-{rotation}.z), 0.0, 0.0, 0.0, 1.0);')
|
||||
|
||||
# XYZ-order euler rotation
|
||||
out = f'{out} * {var_name}X * {var_name}Y * {var_name}Z'
|
||||
|
||||
out = calc_scale(out) if node.vector_type == 'TEXTURE' else calc_location(out)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def parse_normal(node: bpy.types.ShaderNodeNormal, out_socket: bpy.types.NodeSocket, state: ParserState) -> Union[floatstr, vec3str]:
|
||||
nor1 = c.to_vec3(node.outputs['Normal'].default_value)
|
||||
|
||||
if out_socket == node.outputs['Normal']:
|
||||
return nor1
|
||||
|
||||
elif out_socket == node.outputs['Dot']:
|
||||
nor2 = c.parse_vector_input(node.inputs["Normal"])
|
||||
return f'dot({nor1}, {nor2})'
|
||||
|
||||
|
||||
def parse_normalmap(node: bpy.types.ShaderNodeNormalMap, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
if state.curshader == state.tese:
|
||||
return c.parse_vector_input(node.inputs[1])
|
||||
else:
|
||||
# space = node.space
|
||||
# map = node.uv_map
|
||||
# Color
|
||||
c.parse_normal_map_color_input(node.inputs[1], node.inputs[0])
|
||||
return 'n'
|
||||
|
||||
|
||||
def parse_vectortransform(node: bpy.types.ShaderNodeVectorTransform, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
# type = node.vector_type
|
||||
# conv_from = node.convert_from
|
||||
# conv_to = node.convert_to
|
||||
# Pass through
|
||||
return c.parse_vector_input(node.inputs[0])
|
||||
|
||||
|
||||
def parse_displacement(node: bpy.types.ShaderNodeDisplacement, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
height = c.parse_value_input(node.inputs[0])
|
||||
midlevel = c.parse_value_input(node.inputs[1])
|
||||
scale = c.parse_value_input(node.inputs[2])
|
||||
nor = c.parse_vector_input(node.inputs[3])
|
||||
return f'(vec3({height}) * {scale})'
|
||||
|
||||
def parse_vectorrotate(node: bpy.types.ShaderNodeVectorRotate, out_socket: bpy.types.NodeSocket, state: ParserState) -> vec3str:
|
||||
|
||||
type = node.rotation_type
|
||||
input_vector: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[0])
|
||||
input_center: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[1])
|
||||
input_axis: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[2])
|
||||
input_angle: bpy.types.NodeSocket = c.parse_value_input(node.inputs[3])
|
||||
input_rotation: bpy.types.NodeSocket = c.parse_vector_input(node.inputs[4])
|
||||
|
||||
if node.invert:
|
||||
input_invert = "0"
|
||||
else:
|
||||
input_invert = "1"
|
||||
|
||||
state.curshader.add_function(c_functions.str_rotate_around_axis)
|
||||
|
||||
if type == 'AXIS_ANGLE':
|
||||
return f'vec3( (length({input_axis}) != 0.0) ? rotate_around_axis({input_vector} - {input_center}, normalize({input_axis}), {input_angle} * {input_invert}) + {input_center} : {input_vector} )'
|
||||
elif type == 'X_AXIS':
|
||||
return f'vec3( rotate_around_axis({input_vector} - {input_center}, vec3(1.0, 0.0, 0.0), {input_angle} * {input_invert}) + {input_center} )'
|
||||
elif type == 'Y_AXIS':
|
||||
return f'vec3( rotate_around_axis({input_vector} - {input_center}, vec3(0.0, 1.0, 0.0), {input_angle} * {input_invert}) + {input_center} )'
|
||||
elif type == 'Z_AXIS':
|
||||
return f'vec3( rotate_around_axis({input_vector} - {input_center}, vec3(0.0, 0.0, 1.0), {input_angle} * {input_invert}) + {input_center} )'
|
||||
elif type == 'EULER_XYZ':
|
||||
state.curshader.add_function(c_functions.str_euler_to_mat3)
|
||||
return f'vec3( mat3(({input_invert} < 0.0) ? transpose(euler_to_mat3({input_rotation})) : euler_to_mat3({input_rotation})) * ({input_vector} - {input_center}) + {input_center})'
|
||||
|
||||
return f'(vec3(1.0, 0.0, 0.0))'
|
Reference in New Issue
Block a user