2016-12-13 20:06:23 +01:00
|
|
|
#
|
|
|
|
# This module builds upon Cycles nodes work licensed as
|
|
|
|
# Copyright 2011-2013 Blender Foundation
|
2019-12-21 20:28:41 +01:00
|
|
|
#
|
2016-12-13 20:06:23 +01:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
2019-12-21 20:28:41 +01:00
|
|
|
#
|
2016-12-13 20:06:23 +01:00
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
2019-12-21 20:28:41 +01:00
|
|
|
#
|
2016-12-13 20:06:23 +01:00
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
2018-10-26 19:45:07 +02:00
|
|
|
import os
|
2020-04-11 16:44:06 +02:00
|
|
|
import shutil
|
2020-10-05 01:27:36 +02:00
|
|
|
from typing import Any, Callable, Dict, Optional, Tuple
|
2020-04-11 16:44:06 +02:00
|
|
|
|
|
|
|
import bpy
|
|
|
|
|
2017-11-20 14:32:36 +01:00
|
|
|
import arm.assets
|
2020-10-09 19:18:59 +02:00
|
|
|
import arm.log as log
|
2017-11-20 14:32:36 +01:00
|
|
|
import arm.make_state
|
|
|
|
import arm.material.cycles_functions as c_functions
|
2020-10-05 01:27:36 +02:00
|
|
|
from arm.material.cycles_nodes import *
|
2020-10-08 21:17:02 +02:00
|
|
|
import arm.material.mat_state as mat_state
|
2020-10-05 20:51:58 +02:00
|
|
|
from arm.material.parser_state import ParserState, ParserContext
|
2020-10-05 01:59:49 +02:00
|
|
|
from arm.material.shader import Shader, ShaderContext, floatstr, vec3str
|
2020-10-09 19:18:59 +02:00
|
|
|
import arm.utils
|
2020-04-18 21:46:56 +02:00
|
|
|
|
2020-10-09 23:32:41 +02:00
|
|
|
# Particle info export
|
|
|
|
particle_info: Dict[str, bool] = {}
|
2020-10-05 01:27:36 +02:00
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
state: Optional[ParserState]
|
|
|
|
|
2017-05-24 11:04:15 +02:00
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def parse(nodes, con: ShaderContext,
|
|
|
|
vert: Shader, frag: Shader, geom: Shader, tesc: Shader, tese: Shader,
|
|
|
|
parse_surface=True, parse_opacity=True, parse_displacement=True, basecol_only=False):
|
2020-10-08 21:03:14 +02:00
|
|
|
global state
|
|
|
|
|
2020-10-10 20:26:29 +02:00
|
|
|
state = ParserState(ParserContext.OBJECT)
|
2020-10-08 21:03:14 +02:00
|
|
|
|
|
|
|
state.parse_surface = parse_surface
|
|
|
|
state.parse_opacity = parse_opacity
|
|
|
|
state.parse_displacement = parse_displacement
|
2020-10-08 21:19:52 +02:00
|
|
|
state.basecol_only = basecol_only
|
2020-10-08 21:03:14 +02:00
|
|
|
|
|
|
|
state.con = con
|
|
|
|
|
|
|
|
state.vert = vert
|
|
|
|
state.frag = frag
|
|
|
|
state.geom = geom
|
|
|
|
state.tesc = tesc
|
|
|
|
state.tese = tese
|
|
|
|
|
2016-12-13 11:42:00 +01:00
|
|
|
output_node = node_by_type(nodes, 'OUTPUT_MATERIAL')
|
2020-10-05 01:27:36 +02:00
|
|
|
if output_node is not None:
|
2020-10-08 21:03:14 +02:00
|
|
|
custom_particle_node = node_by_name(nodes, 'ArmCustomParticleNode')
|
2020-10-08 21:19:52 +02:00
|
|
|
parse_material_output(output_node, custom_particle_node)
|
2020-10-08 21:03:14 +02:00
|
|
|
|
|
|
|
# Make sure that individual functions in this module aren't called with an incorrect/old parser state, set it to
|
|
|
|
# None so that it will raise exceptions when not set
|
|
|
|
state = None
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
|
2020-10-08 21:19:52 +02:00
|
|
|
def parse_material_output(node: bpy.types.Node, custom_particle_node: bpy.types.Node):
|
2017-10-04 18:24:13 +02:00
|
|
|
global particle_info
|
2020-10-08 21:03:14 +02:00
|
|
|
|
|
|
|
parse_surface = state.parse_surface
|
|
|
|
parse_opacity = state.parse_opacity
|
|
|
|
parse_displacement = state.parse_displacement
|
|
|
|
state.emission_found = False
|
2020-04-18 21:52:56 +02:00
|
|
|
particle_info = {
|
|
|
|
'index': False,
|
|
|
|
'age': False,
|
|
|
|
'lifetime': False,
|
|
|
|
'location': False,
|
|
|
|
'size': False,
|
|
|
|
'velocity': False,
|
|
|
|
'angular_velocity': False
|
|
|
|
}
|
2020-10-08 21:03:14 +02:00
|
|
|
state.sample_bump = False
|
|
|
|
state.sample_bump_res = ''
|
|
|
|
state.procedurals_written = False
|
2019-01-23 18:09:53 +01:00
|
|
|
wrd = bpy.data.worlds['Arm']
|
2016-12-17 15:34:43 +01:00
|
|
|
|
|
|
|
# Surface
|
2016-12-20 00:39:18 +01:00
|
|
|
if parse_surface or parse_opacity:
|
2020-10-09 23:32:41 +02:00
|
|
|
state.parents = []
|
|
|
|
state.parsed = set()
|
|
|
|
state.normal_parsed = False
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader = state.frag
|
|
|
|
state.curshader = curshader
|
2019-12-21 20:28:41 +01:00
|
|
|
|
2019-01-23 18:09:53 +01:00
|
|
|
out_basecol, out_roughness, out_metallic, out_occlusion, out_specular, out_opacity, out_emission = parse_shader_input(node.inputs[0])
|
2016-12-20 00:39:18 +01:00
|
|
|
if parse_surface:
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader.write('basecol = {0};'.format(out_basecol))
|
|
|
|
curshader.write('roughness = {0};'.format(out_roughness))
|
|
|
|
curshader.write('metallic = {0};'.format(out_metallic))
|
|
|
|
curshader.write('occlusion = {0};'.format(out_occlusion))
|
|
|
|
curshader.write('specular = {0};'.format(out_specular))
|
2019-01-23 18:09:53 +01:00
|
|
|
if '_Emission' in wrd.world_defs:
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader.write('emission = {0};'.format(out_emission))
|
2016-12-20 00:39:18 +01:00
|
|
|
if parse_opacity:
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader.write('opacity = {0} - 0.0002;'.format(out_opacity))
|
2016-12-17 15:34:43 +01:00
|
|
|
|
|
|
|
# Volume
|
|
|
|
# parse_volume_input(node.inputs[1])
|
|
|
|
|
|
|
|
# Displacement
|
2020-10-08 21:03:14 +02:00
|
|
|
if parse_displacement and disp_enabled() and node.inputs[2].is_linked:
|
2020-10-09 23:32:41 +02:00
|
|
|
state.parents = []
|
|
|
|
state.parsed = set()
|
|
|
|
state.normal_parsed = False
|
2018-05-07 23:09:38 +02:00
|
|
|
rpdat = arm.utils.get_rp()
|
2020-10-08 21:03:14 +02:00
|
|
|
if rpdat.arm_rp_displacement == 'Tessellation' and state.tese is not None:
|
|
|
|
state.curshader = state.tese
|
2018-05-07 23:09:38 +02:00
|
|
|
else:
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader = state.vert
|
2016-12-17 15:34:43 +01:00
|
|
|
out_disp = parse_displacement_input(node.inputs[2])
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader.write('vec3 disp = {0};'.format(out_disp))
|
2020-10-05 01:27:36 +02:00
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
if custom_particle_node is not None:
|
|
|
|
if not (parse_displacement and disp_enabled() and node.inputs[2].is_linked):
|
2020-10-09 23:32:41 +02:00
|
|
|
state.parents = []
|
|
|
|
state.parsed = set()
|
|
|
|
state.normal_parsed = False
|
2020-09-16 23:28:21 +02:00
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader = state.vert
|
|
|
|
custom_particle_node.parse(state.curshader, state.con)
|
|
|
|
|
2016-12-13 11:42:00 +01:00
|
|
|
|
|
|
|
def parse_group(node, socket): # Entering group
|
|
|
|
index = socket_index(node, socket)
|
|
|
|
output_node = node_by_type(node.node_tree.nodes, 'GROUP_OUTPUT')
|
2020-10-08 21:03:14 +02:00
|
|
|
if output_node is None:
|
2016-12-13 11:42:00 +01:00
|
|
|
return
|
|
|
|
inp = output_node.inputs[index]
|
2020-10-09 21:15:21 +02:00
|
|
|
state.parents.append(node)
|
2016-12-13 11:42:00 +01:00
|
|
|
out_group = parse_input(inp)
|
2020-10-09 21:15:21 +02:00
|
|
|
state.parents.pop()
|
2016-12-13 11:42:00 +01:00
|
|
|
return out_group
|
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def parse_group_input(node: bpy.types.Node, socket: bpy.types.NodeSocket):
|
2016-12-13 11:42:00 +01:00
|
|
|
index = socket_index(node, socket)
|
2020-10-09 21:15:21 +02:00
|
|
|
parent = state.parents.pop() # Leaving group
|
2016-12-13 11:42:00 +01:00
|
|
|
inp = parent.inputs[index]
|
2017-01-04 00:13:52 +01:00
|
|
|
res = parse_input(inp)
|
2020-10-09 21:15:21 +02:00
|
|
|
state.parents.append(parent) # Return to group
|
2017-01-04 00:13:52 +01:00
|
|
|
return res
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def parse_input(inp: bpy.types.NodeSocket):
|
2016-12-13 11:42:00 +01:00
|
|
|
if inp.type == 'SHADER':
|
|
|
|
return parse_shader_input(inp)
|
2020-10-05 01:59:49 +02:00
|
|
|
elif inp.type in ('RGB', 'RGBA', 'VECTOR'):
|
2016-12-13 11:42:00 +01:00
|
|
|
return parse_vector_input(inp)
|
|
|
|
elif inp.type == 'VALUE':
|
|
|
|
return parse_value_input(inp)
|
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def parse_shader_input(inp: bpy.types.NodeSocket) -> Tuple[str, ...]:
|
2020-10-08 21:03:14 +02:00
|
|
|
# Follow input
|
2016-12-13 11:42:00 +01:00
|
|
|
if inp.is_linked:
|
2020-10-05 01:59:49 +02:00
|
|
|
link = inp.links[0]
|
|
|
|
if link.from_node.type == 'REROUTE':
|
|
|
|
return parse_shader_input(link.from_node.inputs[0])
|
|
|
|
return parse_shader(link.from_node, link.from_socket)
|
|
|
|
|
|
|
|
# Use direct socket value
|
2016-12-13 11:42:00 +01:00
|
|
|
else:
|
2016-12-13 20:06:23 +01:00
|
|
|
out_basecol = 'vec3(0.8)'
|
2016-12-13 11:42:00 +01:00
|
|
|
out_roughness = '0.0'
|
|
|
|
out_metallic = '0.0'
|
2016-12-13 20:06:23 +01:00
|
|
|
out_occlusion = '1.0'
|
2018-05-19 19:29:14 +02:00
|
|
|
out_specular = '1.0'
|
2016-12-20 00:39:18 +01:00
|
|
|
out_opacity = '1.0'
|
2019-01-23 18:09:53 +01:00
|
|
|
out_emission = '0.0'
|
|
|
|
return out_basecol, out_roughness, out_metallic, out_occlusion, out_specular, out_opacity, out_emission
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-10-05 20:51:58 +02:00
|
|
|
def parse_shader(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> Tuple[str, ...]:
|
2020-10-09 19:46:06 +02:00
|
|
|
# Use switch-like lookup via dictionary
|
|
|
|
# (better performance, better code readability)
|
|
|
|
# 'NODE_TYPE': parser_function
|
|
|
|
node_parser_funcs: Dict[str, Callable] = {
|
|
|
|
'MIX_SHADER': nodes_shader.parse_mixshader,
|
|
|
|
'ADD_SHADER': nodes_shader.parse_addshader,
|
|
|
|
'BSDF_PRINCIPLED': nodes_shader.parse_bsdfprincipled,
|
|
|
|
'BSDF_DIFFUSE': nodes_shader.parse_bsdfdiffuse,
|
2020-10-11 00:11:25 +02:00
|
|
|
'BSDF_GLOSSY': nodes_shader.parse_bsdfglossy,
|
2020-10-09 19:46:06 +02:00
|
|
|
'AMBIENT_OCCLUSION': nodes_shader.parse_ambientocclusion,
|
|
|
|
'BSDF_ANISOTROPIC': nodes_shader.parse_bsdfanisotropic,
|
|
|
|
'EMISSION': nodes_shader.parse_emission,
|
|
|
|
'BSDF_GLASS': nodes_shader.parse_bsdfglass,
|
|
|
|
'HOLDOUT': nodes_shader.parse_holdout,
|
|
|
|
'SUBSURFACE_SCATTERING': nodes_shader.parse_subsurfacescattering,
|
|
|
|
'BSDF_TRANSLUCENT': nodes_shader.parse_bsdftranslucent,
|
2020-10-10 20:27:19 +02:00
|
|
|
'BSDF_TRANSPARENT': nodes_shader.parse_bsdftransparent,
|
2020-10-09 19:46:06 +02:00
|
|
|
'BSDF_VELVET': nodes_shader.parse_bsdfvelvet,
|
|
|
|
}
|
|
|
|
|
|
|
|
if node.type in node_parser_funcs:
|
|
|
|
node_parser_funcs[node.type](node, socket, state)
|
|
|
|
|
|
|
|
elif node.type == 'GROUP':
|
2019-12-21 20:28:41 +01:00
|
|
|
if node.node_tree.name.startswith('Armory PBR'):
|
2020-10-09 23:32:41 +02:00
|
|
|
if state.parse_surface:
|
2020-03-14 18:08:54 +01:00
|
|
|
# Normal
|
|
|
|
if node.inputs[5].is_linked and node.inputs[5].links[0].from_node.type == 'NORMAL_MAP':
|
2020-10-09 19:18:59 +02:00
|
|
|
log.warn(mat_name() + ' - Do not use Normal Map node with Armory PBR, connect Image Texture directly')
|
2020-03-14 18:08:54 +01:00
|
|
|
parse_normal_map_color_input(node.inputs[5])
|
2017-08-28 11:30:06 +02:00
|
|
|
# Base color
|
2020-10-08 21:03:14 +02:00
|
|
|
state.out_basecol = parse_vector_input(node.inputs[0])
|
2017-08-28 11:30:06 +02:00
|
|
|
# Occlusion
|
2020-10-08 21:03:14 +02:00
|
|
|
state.out_occlusion = parse_value_input(node.inputs[2])
|
2017-08-28 11:30:06 +02:00
|
|
|
# Roughness
|
2020-10-08 21:03:14 +02:00
|
|
|
state.out_roughness = parse_value_input(node.inputs[3])
|
2017-08-28 11:30:06 +02:00
|
|
|
# Metallic
|
2020-10-08 21:03:14 +02:00
|
|
|
state.out_metallic = parse_value_input(node.inputs[4])
|
2017-08-28 11:30:06 +02:00
|
|
|
# Emission
|
|
|
|
if node.inputs[6].is_linked or node.inputs[6].default_value != 0.0:
|
2020-10-08 21:03:14 +02:00
|
|
|
state.out_emission = parse_value_input(node.inputs[6])
|
|
|
|
state.emission_found = True
|
2020-10-09 23:32:41 +02:00
|
|
|
if state.parse_opacity:
|
2020-10-08 21:03:14 +02:00
|
|
|
state.out_opacity = parse_value_input(node.inputs[1])
|
2016-12-13 11:42:00 +01:00
|
|
|
else:
|
|
|
|
return parse_group(node, socket)
|
|
|
|
|
|
|
|
elif node.type == 'GROUP_INPUT':
|
2017-01-04 00:13:52 +01:00
|
|
|
return parse_group_input(node, socket)
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-05 20:51:58 +02:00
|
|
|
elif node.type == 'CUSTOM':
|
|
|
|
if node.bl_idname == 'ArmShaderDataNode':
|
2020-10-08 21:03:14 +02:00
|
|
|
return node.parse(state.frag, state.vert)
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-09 19:46:06 +02:00
|
|
|
else:
|
2020-10-05 20:51:58 +02:00
|
|
|
# TODO: Print node tree name (save in ParserState)
|
2020-10-09 19:18:59 +02:00
|
|
|
log.warn(f'Material node type {node.type} not supported')
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
return state.get_outs()
|
2016-12-13 11:42:00 +01:00
|
|
|
|
|
|
|
|
2016-12-17 15:34:43 +01:00
|
|
|
def parse_displacement_input(inp):
|
|
|
|
if inp.is_linked:
|
|
|
|
l = inp.links[0]
|
|
|
|
if l.from_node.type == 'REROUTE':
|
|
|
|
return parse_displacement_input(l.from_node.inputs[0])
|
2018-12-19 13:33:17 +01:00
|
|
|
return parse_vector_input(inp)
|
2016-12-17 15:34:43 +01:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
|
|
|
def parse_vector_input(inp: bpy.types.NodeSocket) -> vec3str:
|
2020-10-08 21:03:14 +02:00
|
|
|
"""Return the parsed result of the given input socket."""
|
|
|
|
# Follow input
|
2016-12-13 11:42:00 +01:00
|
|
|
if inp.is_linked:
|
2020-10-05 01:59:49 +02:00
|
|
|
link = inp.links[0]
|
|
|
|
if link.from_node.type == 'REROUTE':
|
|
|
|
return parse_vector_input(link.from_node.inputs[0])
|
|
|
|
res_var = write_result(link)
|
|
|
|
st = link.from_socket.type
|
|
|
|
if st in ('RGB', 'RGBA', 'VECTOR'):
|
2016-12-15 14:28:22 +01:00
|
|
|
return res_var
|
2020-10-05 01:59:49 +02:00
|
|
|
else: # VALUE
|
|
|
|
return f'vec3({res_var})'
|
|
|
|
|
|
|
|
# Unlinked reroute
|
|
|
|
elif inp.type == 'VALUE':
|
|
|
|
return to_vec3([0.0, 0.0, 0.0])
|
|
|
|
|
|
|
|
# Use direct socket value
|
2016-12-13 11:42:00 +01:00
|
|
|
else:
|
2020-10-05 01:59:49 +02:00
|
|
|
if mat_batch() and inp.is_uniform:
|
|
|
|
return to_uniform(inp)
|
2016-12-15 00:18:59 +01:00
|
|
|
else:
|
2020-10-05 01:59:49 +02:00
|
|
|
return to_vec3(inp.default_value)
|
|
|
|
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-04-11 16:44:06 +02:00
|
|
|
def parse_vector(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> str:
|
2020-10-05 01:27:36 +02:00
|
|
|
"""Parses the vector/color output value from the given node and socket."""
|
2020-10-05 01:36:33 +02:00
|
|
|
node_parser_funcs: Dict[str, Callable] = {
|
|
|
|
'ATTRIBUTE': nodes_input.parse_attribute,
|
|
|
|
|
|
|
|
# RGB outputs
|
|
|
|
'RGB': nodes_input.parse_rgb,
|
|
|
|
'TEX_BRICK': nodes_texture.parse_tex_brick,
|
|
|
|
'TEX_CHECKER': nodes_texture.parse_tex_checker,
|
|
|
|
'TEX_ENVIRONMENT': nodes_texture.parse_tex_environment,
|
|
|
|
'TEX_GRADIENT': nodes_texture.parse_tex_gradient,
|
|
|
|
'TEX_IMAGE': nodes_texture.parse_tex_image,
|
|
|
|
'TEX_MAGIC': nodes_texture.parse_tex_magic,
|
|
|
|
'TEX_MUSGRAVE': nodes_texture.parse_tex_musgrave,
|
|
|
|
'TEX_NOISE': nodes_texture.parse_tex_noise,
|
|
|
|
'TEX_POINTDENSITY': nodes_texture.parse_tex_pointdensity,
|
|
|
|
'TEX_SKY': nodes_texture.parse_tex_sky,
|
|
|
|
'TEX_VORONOI': nodes_texture.parse_tex_voronoi,
|
|
|
|
'TEX_WAVE': nodes_texture.parse_tex_wave,
|
|
|
|
'VERTEX_COLOR': nodes_input.parse_vertex_color,
|
|
|
|
'BRIGHTCONTRAST': nodes_color.parse_brightcontrast,
|
|
|
|
'GAMMA': nodes_color.parse_gamma,
|
2020-10-08 22:33:49 +02:00
|
|
|
'HUE_SAT': nodes_color.parse_huesat,
|
2020-10-05 01:36:33 +02:00
|
|
|
'INVERT': nodes_color.parse_invert,
|
|
|
|
'MIX_RGB': nodes_color.parse_mixrgb,
|
|
|
|
'BLACKBODY': nodes_converter.parse_blackbody,
|
|
|
|
'VALTORGB': nodes_converter.parse_valtorgb, # ColorRamp
|
|
|
|
'CURVE_VEC': nodes_vector.parse_curvevec, # Vector Curves
|
|
|
|
'CURVE_RGB': nodes_color.parse_curvergb,
|
|
|
|
'COMBHSV': nodes_converter.parse_combhsv,
|
|
|
|
'COMBRGB': nodes_converter.parse_combrgb,
|
|
|
|
'WAVELENGTH': nodes_converter.parse_wavelength,
|
|
|
|
|
|
|
|
# Vector outputs
|
|
|
|
'CAMERA': nodes_input.parse_camera,
|
|
|
|
'NEW_GEOMETRY': nodes_input.parse_geometry,
|
|
|
|
'HAIR_INFO': nodes_input.parse_hairinfo,
|
|
|
|
'OBJECT_INFO': nodes_input.parse_objectinfo,
|
|
|
|
'PARTICLE_INFO': nodes_input.parse_particleinfo,
|
|
|
|
'TANGENT': nodes_input.parse_tangent,
|
|
|
|
'TEX_COORD': nodes_input.parse_texcoord,
|
|
|
|
'UVMAP': nodes_input.parse_uvmap,
|
|
|
|
'BUMP': nodes_vector.parse_bump,
|
|
|
|
'MAPPING': nodes_vector.parse_mapping,
|
|
|
|
'NORMAL': nodes_vector.parse_normal,
|
|
|
|
'NORMAL_MAP': nodes_vector.parse_normalmap,
|
|
|
|
'VECT_TRANSFORM': nodes_vector.parse_vectortransform,
|
|
|
|
'COMBXYZ': nodes_converter.parse_combxyz,
|
|
|
|
'VECT_MATH': nodes_converter.parse_vectormath,
|
2020-10-05 01:39:23 +02:00
|
|
|
'DISPLACEMENT': nodes_vector.parse_displacement,
|
2020-10-05 01:36:33 +02:00
|
|
|
}
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-05 01:36:33 +02:00
|
|
|
if node.type in node_parser_funcs:
|
2020-10-08 21:03:14 +02:00
|
|
|
return node_parser_funcs[node.type](node, socket, state)
|
2018-05-26 19:25:07 +02:00
|
|
|
|
2020-10-09 19:46:06 +02:00
|
|
|
elif node.type == 'GROUP':
|
|
|
|
return parse_group(node, socket)
|
|
|
|
|
|
|
|
elif node.type == 'GROUP_INPUT':
|
|
|
|
return parse_group_input(node, socket)
|
|
|
|
|
2020-08-19 17:20:41 +02:00
|
|
|
elif node.type == 'CUSTOM':
|
|
|
|
if node.bl_idname == 'ArmShaderDataNode':
|
2020-10-08 21:03:14 +02:00
|
|
|
return node.parse(state.frag, state.vert)
|
2020-08-19 17:20:41 +02:00
|
|
|
|
2020-10-09 19:46:06 +02:00
|
|
|
log.warn(f'Material node type {node.type} not supported')
|
|
|
|
return "vec3(0, 0, 0)"
|
2020-10-05 01:36:33 +02:00
|
|
|
|
|
|
|
|
2018-11-13 11:18:12 +01:00
|
|
|
def parse_normal_map_color_input(inp, strength_input=None):
|
2020-10-08 21:03:14 +02:00
|
|
|
frag = state.frag
|
|
|
|
|
2020-10-09 23:32:41 +02:00
|
|
|
if state.basecol_only or not inp.is_linked or state.normal_parsed:
|
2017-10-06 19:44:10 +02:00
|
|
|
return
|
2020-10-08 21:19:52 +02:00
|
|
|
|
2020-10-09 23:32:41 +02:00
|
|
|
state.normal_parsed = True
|
2018-03-26 18:04:11 +02:00
|
|
|
frag.write_normal += 1
|
2018-11-19 13:18:40 +01:00
|
|
|
if not get_arm_export_tangents() or mat_get_material().arm_decal: # Compute TBN matrix
|
2017-10-23 16:24:57 +02:00
|
|
|
frag.write('vec3 texn = ({0}) * 2.0 - 1.0;'.format(parse_vector_input(inp)))
|
2018-06-11 19:31:21 +02:00
|
|
|
frag.write('texn.y = -texn.y;')
|
2017-12-20 10:19:44 +01:00
|
|
|
frag.add_include('std/normals.glsl')
|
2018-11-19 13:18:40 +01:00
|
|
|
frag.write('mat3 TBN = cotangentFrame(n, -vVec, texCoord);')
|
2017-10-23 16:24:57 +02:00
|
|
|
frag.write('n = TBN * normalize(texn);')
|
2017-04-01 10:06:49 +02:00
|
|
|
else:
|
2020-11-12 20:09:25 +01:00
|
|
|
frag.write('n = ({0}) * 2.0 - 1.0;'.format(parse_vector_input(inp)))
|
2020-04-18 21:52:56 +02:00
|
|
|
if strength_input is not None:
|
2018-11-13 11:18:12 +01:00
|
|
|
strength = parse_value_input(strength_input)
|
|
|
|
if strength != '1.0':
|
|
|
|
frag.write('n.xy *= {0};'.format(strength))
|
2017-10-24 14:06:42 +02:00
|
|
|
frag.write('n = normalize(TBN * n);')
|
2020-10-08 21:03:14 +02:00
|
|
|
state.con.add_elem('tang', 'short4norm')
|
2018-03-26 18:04:11 +02:00
|
|
|
frag.write_normal -= 1
|
2016-12-17 23:48:18 +01:00
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
|
|
|
def parse_value_input(inp: bpy.types.NodeSocket) -> floatstr:
|
2020-10-08 21:03:14 +02:00
|
|
|
# Follow input
|
2016-12-13 11:42:00 +01:00
|
|
|
if inp.is_linked:
|
2020-04-18 21:50:07 +02:00
|
|
|
link = inp.links[0]
|
2016-12-15 00:18:59 +01:00
|
|
|
|
2020-04-18 21:50:07 +02:00
|
|
|
if link.from_node.type == 'REROUTE':
|
|
|
|
return parse_value_input(link.from_node.inputs[0])
|
2016-12-15 00:18:59 +01:00
|
|
|
|
2020-04-18 21:50:07 +02:00
|
|
|
res_var = write_result(link)
|
|
|
|
socket_type = link.from_socket.type
|
2020-10-05 01:59:49 +02:00
|
|
|
if socket_type in ('RGB', 'RGBA', 'VECTOR'):
|
2020-04-18 21:50:07 +02:00
|
|
|
# RGB to BW
|
2020-10-23 23:37:53 +02:00
|
|
|
return rgb_to_bw(res_var)
|
2020-04-18 21:50:07 +02:00
|
|
|
# VALUE
|
|
|
|
else:
|
2016-12-15 14:28:22 +01:00
|
|
|
return res_var
|
2020-10-05 01:59:49 +02:00
|
|
|
|
|
|
|
# Use value from socket
|
2016-12-13 11:42:00 +01:00
|
|
|
else:
|
2017-11-20 14:32:36 +01:00
|
|
|
if mat_batch() and inp.is_uniform:
|
|
|
|
return to_uniform(inp)
|
2017-03-14 20:43:54 +01:00
|
|
|
else:
|
2017-11-20 14:32:36 +01:00
|
|
|
return to_vec1(inp.default_value)
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2016-12-13 11:42:00 +01:00
|
|
|
def parse_value(node, socket):
|
2020-10-05 23:37:48 +02:00
|
|
|
node_parser_funcs: Dict[str, Callable] = {
|
|
|
|
'ATTRIBUTE': nodes_input.parse_attribute,
|
|
|
|
'CAMERA': nodes_input.parse_camera,
|
|
|
|
'FRESNEL': nodes_input.parse_fresnel,
|
|
|
|
'NEW_GEOMETRY': nodes_input.parse_geometry,
|
|
|
|
'HAIR_INFO': nodes_input.parse_hairinfo,
|
|
|
|
'LAYER_WEIGHT': nodes_input.parse_layerweight,
|
|
|
|
'LIGHT_PATH': nodes_input.parse_lightpath,
|
|
|
|
'OBJECT_INFO': nodes_input.parse_objectinfo,
|
|
|
|
'PARTICLE_INFO': nodes_input.parse_particleinfo,
|
|
|
|
'VALUE': nodes_input.parse_value,
|
|
|
|
'WIREFRAME': nodes_input.parse_wireframe,
|
|
|
|
'TEX_BRICK': nodes_texture.parse_tex_brick,
|
|
|
|
'TEX_CHECKER': nodes_texture.parse_tex_checker,
|
|
|
|
'TEX_GRADIENT': nodes_texture.parse_tex_gradient,
|
|
|
|
'TEX_IMAGE': nodes_texture.parse_tex_image,
|
|
|
|
'TEX_MAGIC': nodes_texture.parse_tex_magic,
|
|
|
|
'TEX_MUSGRAVE': nodes_texture.parse_tex_musgrave,
|
|
|
|
'TEX_NOISE': nodes_texture.parse_tex_noise,
|
|
|
|
'TEX_POINTDENSITY': nodes_texture.parse_tex_pointdensity,
|
|
|
|
'TEX_VORONOI': nodes_texture.parse_tex_voronoi,
|
|
|
|
'TEX_WAVE': nodes_texture.parse_tex_wave,
|
|
|
|
'LIGHT_FALLOFF': nodes_color.parse_lightfalloff,
|
|
|
|
'NORMAL': nodes_vector.parse_normal,
|
2020-10-22 19:10:39 +02:00
|
|
|
'CLAMP': nodes_converter.parse_clamp,
|
2020-10-05 23:37:48 +02:00
|
|
|
'VALTORGB': nodes_converter.parse_valtorgb,
|
|
|
|
'MATH': nodes_converter.parse_math,
|
|
|
|
'RGBTOBW': nodes_converter.parse_rgbtobw,
|
|
|
|
'SEPHSV': nodes_converter.parse_sephsv,
|
|
|
|
'SEPRGB': nodes_converter.parse_seprgb,
|
2020-10-09 19:43:54 +02:00
|
|
|
'SEPXYZ': nodes_converter.parse_sepxyz,
|
2020-10-05 23:37:48 +02:00
|
|
|
'VECT_MATH': nodes_converter.parse_vectormath,
|
|
|
|
}
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-05 23:37:48 +02:00
|
|
|
if node.type in node_parser_funcs:
|
2020-10-08 21:03:14 +02:00
|
|
|
return node_parser_funcs[node.type](node, socket, state)
|
2016-12-13 11:42:00 +01:00
|
|
|
|
2020-10-09 19:46:06 +02:00
|
|
|
elif node.type == 'GROUP':
|
|
|
|
if node.node_tree.name.startswith('Armory PBR'):
|
|
|
|
# Displacement
|
|
|
|
if socket == node.outputs[1]:
|
|
|
|
return parse_value_input(node.inputs[7])
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return parse_group(node, socket)
|
|
|
|
|
|
|
|
elif node.type == 'GROUP_INPUT':
|
|
|
|
return parse_group_input(node, socket)
|
|
|
|
|
2020-08-19 17:20:41 +02:00
|
|
|
elif node.type == 'CUSTOM':
|
|
|
|
if node.bl_idname == 'ArmShaderDataNode':
|
2020-10-08 21:03:14 +02:00
|
|
|
return node.parse(state.frag, state.vert)
|
2020-08-19 17:20:41 +02:00
|
|
|
|
2020-10-09 19:46:06 +02:00
|
|
|
log.warn(f'Material node type {node.type} not supported')
|
|
|
|
return '0.0'
|
2020-10-05 23:37:48 +02:00
|
|
|
|
2018-05-21 17:55:26 +02:00
|
|
|
|
2018-10-26 19:45:07 +02:00
|
|
|
def vector_curve(name, fac, points):
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader = state.curshader
|
|
|
|
|
2018-10-26 19:45:07 +02:00
|
|
|
# Write Ys array
|
|
|
|
ys_var = name + '_ys'
|
|
|
|
curshader.write('float {0}[{1}];'.format(ys_var, len(points))) # TODO: Make const
|
|
|
|
for i in range(0, len(points)):
|
|
|
|
curshader.write('{0}[{1}] = {2};'.format(ys_var, i, points[i].location[1]))
|
|
|
|
# Get index
|
|
|
|
fac_var = name + '_fac'
|
|
|
|
curshader.write('float {0} = {1};'.format(fac_var, fac))
|
|
|
|
index = '0'
|
|
|
|
for i in range(1, len(points)):
|
|
|
|
index += ' + ({0} > {1} ? 1 : 0)'.format(fac_var, points[i].location[0])
|
|
|
|
# Write index
|
|
|
|
index_var = name + '_i'
|
|
|
|
curshader.write('int {0} = {1};'.format(index_var, index))
|
|
|
|
# Linear
|
|
|
|
# Write Xs array
|
|
|
|
facs_var = name + '_xs'
|
|
|
|
curshader.write('float {0}[{1}];'.format(facs_var, len(points))) # TODO: Make const
|
|
|
|
for i in range(0, len(points)):
|
|
|
|
curshader.write('{0}[{1}] = {2};'.format(facs_var, i, points[i].location[0]))
|
|
|
|
# Map vector
|
|
|
|
return 'mix({0}[{1}], {0}[{1} + 1], ({2} - {3}[{1}]) * (1.0 / ({3}[{1} + 1] - {3}[{1}]) ))'.format(ys_var, index_var, fac_var, facs_var)
|
|
|
|
|
2018-05-21 17:55:26 +02:00
|
|
|
def write_normal(inp):
|
2018-05-21 18:46:22 +02:00
|
|
|
if inp.is_linked and inp.links[0].from_node.type != 'GROUP_INPUT':
|
2018-05-21 17:55:26 +02:00
|
|
|
normal_res = parse_vector_input(inp)
|
|
|
|
if normal_res != None:
|
2020-10-09 19:18:44 +02:00
|
|
|
state.curshader.write('n = {0};'.format(normal_res))
|
2018-05-21 17:55:26 +02:00
|
|
|
|
2020-10-09 23:32:41 +02:00
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def is_parsed(node_store_name: str):
|
2020-10-09 23:32:41 +02:00
|
|
|
return node_store_name in state.parsed
|
2018-11-13 10:43:07 +01:00
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-04-18 21:38:35 +02:00
|
|
|
def res_var_name(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> str:
|
2020-10-05 01:59:49 +02:00
|
|
|
"""Return the name of the variable that stores the parsed result
|
|
|
|
from the given node and socket."""
|
2018-05-21 17:55:26 +02:00
|
|
|
return node_name(node.name) + '_' + safesrc(socket.name) + '_res'
|
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-04-18 21:38:35 +02:00
|
|
|
def write_result(link: bpy.types.NodeLink) -> Optional[str]:
|
2020-10-05 01:59:49 +02:00
|
|
|
"""Write the parsed result of the given node link to the shader."""
|
2020-04-18 21:38:35 +02:00
|
|
|
res_var = res_var_name(link.from_node, link.from_socket)
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2018-08-15 14:10:29 +02:00
|
|
|
# Unparsed node
|
2018-11-13 10:43:07 +01:00
|
|
|
if not is_parsed(res_var):
|
2020-10-09 23:32:41 +02:00
|
|
|
state.parsed.add(res_var)
|
2020-04-18 21:38:35 +02:00
|
|
|
st = link.from_socket.type
|
2020-10-05 01:59:49 +02:00
|
|
|
|
|
|
|
if st in ('RGB', 'RGBA', 'VECTOR'):
|
2020-04-18 21:38:35 +02:00
|
|
|
res = parse_vector(link.from_node, link.from_socket)
|
|
|
|
if res is None:
|
2020-10-09 19:18:59 +02:00
|
|
|
log.error(f'{link.from_node.name} returned `None` while parsing!')
|
2018-05-21 17:55:26 +02:00
|
|
|
return None
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader.write(f'vec3 {res_var} = {res};')
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2018-05-21 17:55:26 +02:00
|
|
|
elif st == 'VALUE':
|
2020-04-18 21:38:35 +02:00
|
|
|
res = parse_value(link.from_node, link.from_socket)
|
|
|
|
if res is None:
|
2020-10-09 19:18:59 +02:00
|
|
|
log.error(f'{link.from_node.name} returned `None` while parsing!')
|
2018-05-21 17:55:26 +02:00
|
|
|
return None
|
2020-04-23 11:43:01 +02:00
|
|
|
if link.from_node.type == "VALUE" and not link.from_node.arm_material_param:
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader.add_const('float', res_var, res)
|
2020-04-18 21:39:06 +02:00
|
|
|
else:
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader.write(f'float {res_var} = {res};')
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2018-05-21 17:55:26 +02:00
|
|
|
# Normal map already parsed, return
|
2020-04-18 21:38:35 +02:00
|
|
|
elif link.from_node.type == 'NORMAL_MAP':
|
2018-05-21 17:55:26 +02:00
|
|
|
return None
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2018-05-21 17:55:26 +02:00
|
|
|
return res_var
|
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-03-18 09:35:23 +01:00
|
|
|
def write_procedurals():
|
2020-10-08 21:03:14 +02:00
|
|
|
if not state.procedurals_written:
|
|
|
|
state.curshader.add_function(c_functions.str_tex_proc)
|
|
|
|
state.procedurals_written = True
|
2020-03-18 09:35:23 +01:00
|
|
|
return
|
|
|
|
|
2020-10-23 23:37:53 +02:00
|
|
|
def glsl_type(socket_type: str):
|
|
|
|
"""Socket to glsl type."""
|
|
|
|
if socket_type in ('RGB', 'RGBA', 'VECTOR'):
|
2018-05-21 17:55:26 +02:00
|
|
|
return 'vec3'
|
|
|
|
else:
|
|
|
|
return 'float'
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def to_uniform(inp: bpy.types.NodeSocket):
|
2018-05-21 17:55:26 +02:00
|
|
|
uname = safesrc(inp.node.name) + safesrc(inp.name)
|
2020-10-08 21:03:14 +02:00
|
|
|
state.curshader.add_uniform(glsl_type(inp.type) + ' ' + uname)
|
2018-05-21 17:55:26 +02:00
|
|
|
return uname
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def store_var_name(node: bpy.types.Node):
|
2018-05-21 17:55:26 +02:00
|
|
|
return node_name(node.name) + '_store'
|
|
|
|
|
2018-06-12 00:26:52 +02:00
|
|
|
def texture_store(node, tex, tex_name, to_linear=False, tex_link=None):
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader = state.curshader
|
|
|
|
|
2018-11-13 10:43:07 +01:00
|
|
|
tex_store = store_var_name(node)
|
|
|
|
if is_parsed(tex_store):
|
|
|
|
return tex_store
|
2020-10-09 23:32:41 +02:00
|
|
|
state.parsed.add(tex_store)
|
2018-05-21 17:55:26 +02:00
|
|
|
mat_bind_texture(tex)
|
2020-10-08 21:03:14 +02:00
|
|
|
state.con.add_elem('tex', 'short2norm')
|
2018-06-12 00:26:52 +02:00
|
|
|
curshader.add_uniform('sampler2D {0}'.format(tex_name), link=tex_link)
|
2020-03-14 18:08:54 +01:00
|
|
|
triplanar = node.projection == 'BOX'
|
2018-05-21 17:55:26 +02:00
|
|
|
if node.inputs[0].is_linked:
|
|
|
|
uv_name = parse_vector_input(node.inputs[0])
|
2020-03-14 18:08:54 +01:00
|
|
|
if triplanar:
|
|
|
|
uv_name = 'vec3({0}.x, 1.0 - {0}.y, {0}.z)'.format(uv_name)
|
|
|
|
else:
|
|
|
|
uv_name = 'vec2({0}.x, 1.0 - {0}.y)'.format(uv_name)
|
2018-05-21 17:55:26 +02:00
|
|
|
else:
|
|
|
|
uv_name = 'texCoord'
|
2019-06-02 16:53:13 +02:00
|
|
|
if triplanar:
|
2020-03-14 18:08:54 +01:00
|
|
|
if not curshader.has_include('std/mapping.glsl'):
|
|
|
|
curshader.add_include('std/mapping.glsl')
|
2020-10-09 23:32:41 +02:00
|
|
|
if state.normal_parsed:
|
2020-03-14 18:08:54 +01:00
|
|
|
nor = 'TBN[2]'
|
|
|
|
else:
|
|
|
|
nor = 'n'
|
|
|
|
curshader.write('vec4 {0} = vec4(triplanarMapping({1}, {2}, {3}), 0.0);'.format(tex_store, tex_name, nor, uv_name))
|
2018-05-21 17:55:26 +02:00
|
|
|
else:
|
2020-10-09 19:18:59 +02:00
|
|
|
if mat_state.texture_grad:
|
2019-06-02 16:53:13 +02:00
|
|
|
curshader.write('vec4 {0} = textureGrad({1}, {2}.xy, g2.xy, g2.zw);'.format(tex_store, tex_name, uv_name))
|
|
|
|
else:
|
|
|
|
curshader.write('vec4 {0} = texture({1}, {2}.xy);'.format(tex_store, tex_name, uv_name))
|
2020-10-08 21:03:14 +02:00
|
|
|
if state.sample_bump:
|
|
|
|
state.sample_bump_res = tex_store
|
2018-05-21 17:55:26 +02:00
|
|
|
curshader.write('float {0}_1 = textureOffset({1}, {2}.xy, ivec2(-2, 0)).r;'.format(tex_store, tex_name, uv_name))
|
|
|
|
curshader.write('float {0}_2 = textureOffset({1}, {2}.xy, ivec2(2, 0)).r;'.format(tex_store, tex_name, uv_name))
|
|
|
|
curshader.write('float {0}_3 = textureOffset({1}, {2}.xy, ivec2(0, -2)).r;'.format(tex_store, tex_name, uv_name))
|
|
|
|
curshader.write('float {0}_4 = textureOffset({1}, {2}.xy, ivec2(0, 2)).r;'.format(tex_store, tex_name, uv_name))
|
2020-10-08 21:03:14 +02:00
|
|
|
state.sample_bump = False
|
2018-05-21 17:55:26 +02:00
|
|
|
if to_linear:
|
|
|
|
curshader.write('{0}.rgb = pow({0}.rgb, vec3(2.2));'.format(tex_store))
|
|
|
|
return tex_store
|
|
|
|
|
2020-10-23 23:37:53 +02:00
|
|
|
|
|
|
|
def write_bump(node: bpy.types.Node, out_socket: bpy.types.NodeSocket, res: str, scl=0.001):
|
|
|
|
"""Sample texture values around the current texture coordinate for bump mapping. The result of the sampling is
|
|
|
|
stored in 4 variables named after state.sample_bump_res with _[0-3] appended."""
|
2020-10-08 21:03:14 +02:00
|
|
|
state.sample_bump_res = store_var_name(node) + '_bump'
|
2020-10-23 23:37:53 +02:00
|
|
|
|
2017-11-07 02:26:03 +01:00
|
|
|
# Testing.. get function parts..
|
|
|
|
ar = res.split('(', 1)
|
|
|
|
pre = ar[0] + '('
|
|
|
|
if ',' in ar[1]:
|
|
|
|
ar2 = ar[1].split(',', 1)
|
|
|
|
co = ar2[0]
|
2018-05-07 13:02:24 +02:00
|
|
|
post = ',' + ar2[1]
|
2017-11-07 02:26:03 +01:00
|
|
|
else:
|
|
|
|
co = ar[1][:-1]
|
|
|
|
post = ')'
|
2020-10-08 21:03:14 +02:00
|
|
|
|
2020-10-23 23:37:53 +02:00
|
|
|
coordinate_offsets = (
|
|
|
|
f'vec3(-{scl}, 0.0, 0.0)',
|
|
|
|
f'vec3({scl}, 0.0, {scl})',
|
|
|
|
f'vec3(0.0, -{scl}, 0.0)',
|
|
|
|
f'vec3(0.0, {scl}, -{scl})'
|
|
|
|
)
|
|
|
|
|
|
|
|
needs_conversion_bw = glsl_type(out_socket.type) == "vec3"
|
2020-10-08 21:03:14 +02:00
|
|
|
curshader = state.curshader
|
2020-10-23 23:37:53 +02:00
|
|
|
for i in range(1, 5):
|
|
|
|
if needs_conversion_bw:
|
|
|
|
vec_var = f'{state.sample_bump_res}_vec{i}'
|
|
|
|
curshader.write(f'vec3 {vec_var} = {pre}{co} + {coordinate_offsets[i - 1]}{post};')
|
|
|
|
curshader.write(f'float {state.sample_bump_res}_{i} = {rgb_to_bw(vec_var)};')
|
|
|
|
else:
|
|
|
|
curshader.write(f'float {state.sample_bump_res}_{i} = {pre}{co} + {coordinate_offsets[i - 1]}{post};')
|
|
|
|
|
2020-10-08 21:03:14 +02:00
|
|
|
state.sample_bump = False
|
2017-11-07 02:26:03 +01:00
|
|
|
|
2020-10-23 23:37:53 +02:00
|
|
|
|
2017-11-20 14:32:36 +01:00
|
|
|
def to_vec1(v):
|
2016-12-13 11:42:00 +01:00
|
|
|
return str(v)
|
|
|
|
|
2020-10-23 23:37:53 +02:00
|
|
|
|
2017-11-20 14:32:36 +01:00
|
|
|
def to_vec3(v):
|
2016-12-13 11:42:00 +01:00
|
|
|
return 'vec3({0}, {1}, {2})'.format(v[0], v[1], v[2])
|
|
|
|
|
2020-10-23 23:37:53 +02:00
|
|
|
|
|
|
|
def rgb_to_bw(res_var: vec3str) -> floatstr:
|
|
|
|
return f'((({res_var}.r * 0.3 + {res_var}.g * 0.59 + {res_var}.b * 0.11) / 3.0) * 2.5)'
|
|
|
|
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def node_by_type(nodes, ntype: str) -> bpy.types.Node:
|
2016-12-15 23:50:21 +01:00
|
|
|
for n in nodes:
|
|
|
|
if n.type == ntype:
|
|
|
|
return n
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def node_by_name(nodes, name: str) -> bpy.types.Node:
|
2020-09-16 23:28:21 +02:00
|
|
|
for n in nodes:
|
|
|
|
if n.bl_idname == name:
|
|
|
|
return n
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def socket_index(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> int:
|
2016-12-15 23:50:21 +01:00
|
|
|
for i in range(0, len(node.outputs)):
|
|
|
|
if node.outputs[i] == socket:
|
|
|
|
return i
|
|
|
|
|
2020-10-05 01:59:49 +02:00
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def node_name(s: str) -> str:
|
2020-10-05 01:59:49 +02:00
|
|
|
"""Return a unique and safe name for a node for shader code usage."""
|
2020-10-09 21:15:21 +02:00
|
|
|
for p in state.parents:
|
2018-05-21 18:46:22 +02:00
|
|
|
s = p.name + '_' + s
|
2020-10-08 21:03:14 +02:00
|
|
|
if state.curshader.write_textures > 0:
|
2018-11-13 10:43:07 +01:00
|
|
|
s += '_texread'
|
2018-06-26 18:02:24 +02:00
|
|
|
s = safesrc(s)
|
2017-08-04 11:54:32 +02:00
|
|
|
if '__' in s: # Consecutive _ are reserved
|
|
|
|
s = s.replace('_', '_x')
|
2016-12-15 23:50:21 +01:00
|
|
|
return s
|
2017-11-20 14:32:36 +01:00
|
|
|
|
|
|
|
##
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
def make_texture(image_node: bpy.types.ShaderNodeTexImage, tex_name: str, matname: str = None) -> Optional[Dict[str, Any]]:
|
|
|
|
tex = {'name': tex_name}
|
|
|
|
|
2019-12-21 20:28:41 +01:00
|
|
|
if matname is None:
|
2018-10-26 19:45:07 +02:00
|
|
|
matname = mat_state.material.name
|
|
|
|
|
2020-10-05 01:27:36 +02:00
|
|
|
image = image_node.image
|
2019-12-21 20:28:41 +01:00
|
|
|
if image is None:
|
2018-10-26 19:45:07 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
# Get filepath
|
|
|
|
filepath = image.filepath
|
|
|
|
if filepath == '':
|
2019-12-21 20:28:41 +01:00
|
|
|
if image.packed_file is not None:
|
2018-10-26 19:45:07 +02:00
|
|
|
filepath = './' + image.name
|
2019-12-21 20:28:41 +01:00
|
|
|
has_ext = filepath.endswith(('.jpg', '.png', '.hdr'))
|
2018-10-26 19:45:07 +02:00
|
|
|
if not has_ext:
|
|
|
|
# Raw bytes, write converted .jpg to /unpacked
|
|
|
|
filepath += '.raw'
|
2019-12-21 20:30:49 +01:00
|
|
|
|
|
|
|
elif image.source == "GENERATED":
|
|
|
|
unpack_path = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'unpacked')
|
|
|
|
if not os.path.exists(unpack_path):
|
|
|
|
os.makedirs(unpack_path)
|
|
|
|
|
2019-12-21 20:51:16 +01:00
|
|
|
filepath = os.path.join(unpack_path, image.name + ".jpg")
|
|
|
|
arm.utils.convert_image(image, filepath, "JPEG")
|
2019-12-21 20:30:49 +01:00
|
|
|
|
2018-10-26 19:45:07 +02:00
|
|
|
else:
|
2020-10-09 19:18:59 +02:00
|
|
|
log.warn(matname + '/' + image.name + ' - invalid file path')
|
2018-10-26 19:45:07 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
# Reference image name
|
|
|
|
texpath = arm.utils.asset_path(filepath)
|
|
|
|
texfile = arm.utils.extract_filename(filepath)
|
|
|
|
tex['file'] = arm.utils.safestr(texfile)
|
|
|
|
s = tex['file'].rsplit('.', 1)
|
2019-12-21 20:28:41 +01:00
|
|
|
|
2018-10-26 19:45:07 +02:00
|
|
|
if len(s) == 1:
|
2020-10-09 19:18:59 +02:00
|
|
|
log.warn(matname + '/' + image.name + ' - file extension required for image name')
|
2018-10-26 19:45:07 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
ext = s[1].lower()
|
2019-12-21 20:28:41 +01:00
|
|
|
do_convert = ext not in ('jpg', 'png', 'hdr', 'mp4') # Convert image
|
2018-10-26 19:45:07 +02:00
|
|
|
if do_convert:
|
2019-12-21 20:28:41 +01:00
|
|
|
new_ext = 'png' if (ext in ('tga', 'dds')) else 'jpg'
|
2018-11-12 22:25:08 +01:00
|
|
|
tex['file'] = tex['file'].rsplit('.', 1)[0] + '.' + new_ext
|
2018-10-26 19:45:07 +02:00
|
|
|
|
2019-12-21 20:28:41 +01:00
|
|
|
if image.packed_file is not None or not is_ascii(texfile):
|
2018-10-26 19:45:07 +02:00
|
|
|
# Extract packed data / copy non-ascii texture
|
2019-12-21 20:28:41 +01:00
|
|
|
unpack_path = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'unpacked')
|
2018-10-26 19:45:07 +02:00
|
|
|
if not os.path.exists(unpack_path):
|
|
|
|
os.makedirs(unpack_path)
|
2019-12-21 20:28:41 +01:00
|
|
|
unpack_filepath = os.path.join(unpack_path, tex['file'])
|
|
|
|
|
2018-10-26 19:45:07 +02:00
|
|
|
if do_convert:
|
|
|
|
if not os.path.isfile(unpack_filepath):
|
2018-11-12 22:25:08 +01:00
|
|
|
fmt = 'PNG' if new_ext == 'png' else 'JPEG'
|
2019-01-21 21:39:31 +01:00
|
|
|
arm.utils.convert_image(image, unpack_filepath, file_format=fmt)
|
2018-10-26 19:45:07 +02:00
|
|
|
else:
|
|
|
|
|
|
|
|
# Write bytes if size is different or file does not exist yet
|
2019-12-21 20:28:41 +01:00
|
|
|
if image.packed_file is not None:
|
2018-10-26 19:45:07 +02:00
|
|
|
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size:
|
|
|
|
with open(unpack_filepath, 'wb') as f:
|
|
|
|
f.write(image.packed_file.data)
|
|
|
|
# Copy non-ascii texture
|
|
|
|
else:
|
|
|
|
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != os.path.getsize(texpath):
|
|
|
|
shutil.copy(texpath, unpack_filepath)
|
|
|
|
|
|
|
|
arm.assets.add(unpack_filepath)
|
|
|
|
|
|
|
|
else:
|
|
|
|
if not os.path.isfile(arm.utils.asset_path(filepath)):
|
2020-10-09 19:18:59 +02:00
|
|
|
log.warn('Material ' + matname + '/' + image.name + ' - file not found(' + filepath + ')')
|
2018-10-26 19:45:07 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
if do_convert:
|
2019-12-21 20:28:41 +01:00
|
|
|
unpack_path = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'unpacked')
|
2018-10-26 19:45:07 +02:00
|
|
|
if not os.path.exists(unpack_path):
|
|
|
|
os.makedirs(unpack_path)
|
2019-12-21 20:28:41 +01:00
|
|
|
converted_path = os.path.join(unpack_path, tex['file'])
|
2018-10-26 19:45:07 +02:00
|
|
|
# TODO: delete cache when file changes
|
|
|
|
if not os.path.isfile(converted_path):
|
2018-11-12 22:25:08 +01:00
|
|
|
fmt = 'PNG' if new_ext == 'png' else 'JPEG'
|
|
|
|
arm.utils.convert_image(image, converted_path, file_format=fmt)
|
2018-10-26 19:45:07 +02:00
|
|
|
arm.assets.add(converted_path)
|
|
|
|
else:
|
|
|
|
# Link image path to assets
|
|
|
|
# TODO: Khamake converts .PNG to .jpg? Convert ext to lowercase on windows
|
|
|
|
if arm.utils.get_os() == 'win':
|
|
|
|
s = filepath.rsplit('.', 1)
|
|
|
|
arm.assets.add(arm.utils.asset_path(s[0] + '.' + s[1].lower()))
|
|
|
|
else:
|
|
|
|
arm.assets.add(arm.utils.asset_path(filepath))
|
|
|
|
|
|
|
|
|
|
|
|
# if image_format != 'RGBA32':
|
|
|
|
# tex['format'] = image_format
|
2019-12-21 20:28:41 +01:00
|
|
|
|
2018-10-26 19:45:07 +02:00
|
|
|
interpolation = image_node.interpolation
|
|
|
|
rpdat = arm.utils.get_rp()
|
|
|
|
texfilter = rpdat.arm_texture_filter
|
|
|
|
if texfilter == 'Anisotropic':
|
|
|
|
interpolation = 'Smart'
|
|
|
|
elif texfilter == 'Linear':
|
|
|
|
interpolation = 'Linear'
|
|
|
|
elif texfilter == 'Point':
|
|
|
|
interpolation = 'Closest'
|
|
|
|
|
|
|
|
# TODO: Blender seems to load full images on size request, cache size instead
|
|
|
|
powimage = is_pow(image.size[0]) and is_pow(image.size[1])
|
|
|
|
|
|
|
|
if interpolation == 'Cubic': # Mipmap linear
|
|
|
|
tex['mipmap_filter'] = 'linear'
|
|
|
|
tex['generate_mipmaps'] = True
|
|
|
|
elif interpolation == 'Smart': # Mipmap anisotropic
|
|
|
|
tex['min_filter'] = 'anisotropic'
|
|
|
|
tex['mipmap_filter'] = 'linear'
|
|
|
|
tex['generate_mipmaps'] = True
|
|
|
|
elif interpolation == 'Closest':
|
|
|
|
tex['min_filter'] = 'point'
|
|
|
|
tex['mag_filter'] = 'point'
|
|
|
|
# else defaults to linear
|
|
|
|
|
|
|
|
if image_node.extension != 'REPEAT': # Extend or clip
|
|
|
|
tex['u_addressing'] = 'clamp'
|
|
|
|
tex['v_addressing'] = 'clamp'
|
2019-12-21 20:28:41 +01:00
|
|
|
|
2019-01-23 12:07:44 +01:00
|
|
|
if image.source == 'MOVIE':
|
2018-10-26 19:45:07 +02:00
|
|
|
tex['source'] = 'movie'
|
2019-01-23 12:07:44 +01:00
|
|
|
tex['min_filter'] = 'linear'
|
|
|
|
tex['mag_filter'] = 'linear'
|
|
|
|
tex['mipmap_filter'] = 'no'
|
|
|
|
tex['generate_mipmaps'] = False
|
2018-10-26 19:45:07 +02:00
|
|
|
|
|
|
|
return tex
|
|
|
|
|
|
|
|
def is_pow(num):
|
|
|
|
return ((num & (num - 1)) == 0) and num != 0
|
|
|
|
|
|
|
|
def is_ascii(s):
|
|
|
|
return len(s) == len(s.encode())
|
|
|
|
|
|
|
|
##
|
|
|
|
|
2017-11-20 14:32:36 +01:00
|
|
|
def get_arm_export_tangents():
|
|
|
|
return bpy.data.worlds['Arm'].arm_export_tangents
|
|
|
|
|
|
|
|
def safesrc(name):
|
|
|
|
return arm.utils.safesrc(name)
|
|
|
|
|
2017-11-26 14:45:36 +01:00
|
|
|
def disp_enabled():
|
|
|
|
return arm.utils.disp_enabled(arm.make_state.target)
|
2017-11-20 14:32:36 +01:00
|
|
|
|
|
|
|
def assets_add(path):
|
|
|
|
arm.assets.add(path)
|
|
|
|
|
|
|
|
def assets_add_embedded_data(path):
|
|
|
|
arm.assets.add_embedded_data(path)
|
|
|
|
|
|
|
|
def mat_name():
|
|
|
|
return mat_state.material.name
|
|
|
|
|
|
|
|
def mat_batch():
|
|
|
|
return mat_state.batch
|
|
|
|
|
|
|
|
def mat_bind_texture(tex):
|
|
|
|
mat_state.bind_textures.append(tex)
|
|
|
|
|
|
|
|
def mat_get_material():
|
|
|
|
return mat_state.material
|
|
|
|
|
|
|
|
def mat_get_material_users():
|
|
|
|
return mat_state.mat_users
|