armory/blender/arm/exporter.py

2826 lines
116 KiB
Python
Raw Normal View History

2019-06-25 13:53:57 +02:00
"""
Armory Scene Exporter
http://armory3d.org/
2015-10-30 13:23:09 +01:00
2019-06-25 13:53:57 +02:00
Based on Open Game Engine Exchange
http://opengex.org/
Export plugin for Blender by Eric Lengyel
Copyright 2015, Terathon Software LLC
This software is licensed under the Creative Commons
Attribution-ShareAlike 3.0 Unported License:
http://creativecommons.org/licenses/by-sa/3.0/deed.en_US
"""
2020-04-09 23:40:18 +02:00
from enum import Enum, unique
2015-10-30 13:23:09 +01:00
import math
2019-06-25 13:54:30 +02:00
import os
2016-06-22 10:32:19 +02:00
import time
2020-04-09 23:40:18 +02:00
from typing import Any, Dict, Union
2019-06-25 13:54:30 +02:00
import numpy as np
import bpy
from mathutils import *
2019-06-25 13:54:30 +02:00
2017-03-15 12:30:14 +01:00
import arm.assets as assets
2019-06-25 13:54:30 +02:00
import arm.exporter_opt as exporter_opt
2017-03-15 12:30:14 +01:00
import arm.log as log
import arm.make_renderpath as make_renderpath
2018-05-24 22:16:28 +02:00
import arm.material.cycles as cycles
2019-06-25 13:54:30 +02:00
import arm.material.make as make_material
import arm.material.mat_batch as mat_batch
import arm.utils
2015-10-30 13:23:09 +01:00
2020-04-09 23:40:18 +02:00
@unique
class NodeType(Enum):
"""Represents the type of an object."""
EMPTY = 0
BONE = 1
MESH = 2
LIGHT = 3
CAMERA = 4
SPEAKER = 5
DECAL = 6
PROBE = 7
@classmethod
def get_bobject_type(cls, bobject: bpy.types.Object) -> "NodeType":
"""Returns the NodeType enum member belonging to the type of
the given blender object."""
if bobject.type == "MESH":
if bobject.data.polygons:
return cls.MESH
elif bobject.type == "FONT" or bobject.type == "META":
return cls.MESH
elif bobject.type == "LIGHT":
return cls.LIGHT
elif bobject.type == "CAMERA":
return cls.CAMERA
elif bobject.type == "SPEAKER":
return cls.SPEAKER
elif bobject.type == "LIGHT_PROBE":
return cls.PROBE
return cls.EMPTY
2015-10-30 13:23:09 +01:00
STRUCT_IDENTIFIER = ("object", "bone_object", "mesh_object",
"light_object", "camera_object", "speaker_object",
"decal_object", "probe_object")
2019-05-14 11:43:41 +02:00
current_output = None
2015-10-30 13:23:09 +01:00
2016-10-19 13:28:06 +02:00
class ArmoryExporter:
2020-04-10 00:23:03 +02:00
"""Export to Armory format"""
2016-10-19 13:28:06 +02:00
compress_enabled = False
optimize_enabled = False
# Referenced traits
import_traits = []
def __init__(self, context: bpy.types.Context, filepath: str, scene: bpy.types.Scene = None, depsgraph: bpy.types.Depsgraph = None):
global current_output
self.filepath = filepath
self.scene = context.scene if scene is None else scene
self.depsgraph = context.evaluated_depsgraph_get() if depsgraph is None else depsgraph
self.output = {}
self.output['frame_time'] = 1.0 / (self.scene.render.fps / self.scene.render.fps_base)
current_output = self.output
# Stores the object type ("objectType") and the asset name
# ("structName") in a dict for each object
2020-04-10 01:26:54 +02:00
self.bobject_array: Dict[bpy.types.Object, Dict[str, Union[NodeType, str]]] = {}
self.bobject_bone_array = {}
self.mesh_array = {}
self.light_array = {}
self.probe_array = {}
self.camera_array = {}
self.speaker_array = {}
self.material_array = []
self.particle_system_array = {}
# `True` if there is at least one spawned camera in the scene
self.camera_spawned = False
self.material_to_object_dict = {}
# If no material is assigned, provide default to mimic cycles
self.default_material_objects = []
self.default_skin_material_objects = []
self.default_part_material_objects = []
self.material_to_arm_object_dict = {}
# Stores the link between a blender object and its
# corresponding export data (arm object)
self.object_to_arm_object_dict: Dict[bpy.types.Object, Dict] = {}
self.bone_tracks = []
ArmoryExporter.preprocess()
@classmethod
def export_scene(cls, context: bpy.types.Context, filepath: str, scene: bpy.types.Scene = None, depsgraph: bpy.types.Depsgraph = None) -> None:
2020-04-10 01:03:41 +02:00
"""Exports the given scene to the given filepath. This is the
function that is called in make.py and the entry point of the
exporter."""
cls(context, filepath, scene, depsgraph).execute()
@classmethod
def preprocess(cls):
wrd = bpy.data.worlds['Arm']
cls.export_all_flag = True
cls.export_physics = False # Indicates whether rigid body is exported
if wrd.arm_physics == 'Enabled':
cls.export_physics = True
cls.export_navigation = False
if wrd.arm_navigation == 'Enabled':
cls.export_navigation = True
cls.export_ui = False
cls.option_mesh_only = False
2020-04-10 00:23:03 +02:00
@staticmethod
2020-04-10 00:48:49 +02:00
def write_matrix(matrix):
return [matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3],
matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3],
matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3],
matrix[3][0], matrix[3][1], matrix[3][2], matrix[3][3]]
2020-04-10 00:30:16 +02:00
def get_meshes_file_path(self, object_id: str, compressed=False) -> str:
index = self.filepath.rfind('/')
2016-10-19 13:28:06 +02:00
mesh_fp = self.filepath[:(index + 1)] + 'meshes/'
2020-04-10 00:30:16 +02:00
if not os.path.exists(mesh_fp):
os.makedirs(mesh_fp)
2020-04-10 00:30:16 +02:00
2019-06-22 11:29:05 +02:00
ext = '.lz4' if compressed else '.arm'
2016-10-15 12:17:33 +02:00
return mesh_fp + object_id + ext
@staticmethod
2016-10-19 13:28:06 +02:00
def get_shape_keys(mesh):
2020-04-09 23:53:03 +02:00
# Metaball
if not hasattr(mesh, 'shape_keys'):
2016-11-28 14:40:07 +01:00
return None
2020-04-09 23:53:03 +02:00
2016-10-19 13:28:06 +02:00
shape_keys = mesh.shape_keys
2016-11-28 14:40:07 +01:00
if shape_keys and len(shape_keys.key_blocks) > 1:
2016-10-19 13:28:06 +02:00
return shape_keys
return None
2020-04-09 23:53:03 +02:00
def find_bone(self, name: str):
for bobject_ref in self.bobject_bone_array.items():
2016-11-21 16:49:32 +01:00
if bobject_ref[0].name == name:
return bobject_ref
2016-10-19 13:28:06 +02:00
return None
@staticmethod
2016-10-19 13:28:06 +02:00
def collect_bone_animation(armature, name):
path = "pose.bones[\"" + name + "\"]."
2016-10-19 13:28:06 +02:00
curve_array = []
2016-11-21 16:49:32 +01:00
if armature.animation_data:
action = armature.animation_data.action
2017-02-28 22:26:35 +01:00
if action:
for fcurve in action.fcurves:
2017-02-28 22:26:35 +01:00
if fcurve.data_path.startswith(path):
2016-10-19 13:28:06 +02:00
curve_array.append(fcurve)
return curve_array
2016-10-19 13:28:06 +02:00
def export_bone(self, armature, bone, scene, o, action):
bobject_ref = self.bobject_bone_array.get(bone)
2017-10-10 20:46:44 +02:00
if bobject_ref:
o['type'] = STRUCT_IDENTIFIER[bobject_ref["objectType"].value]
o['name'] = bobject_ref["structName"]
2016-10-19 13:28:06 +02:00
self.export_bone_transform(armature, bone, scene, o, action)
2016-10-19 13:28:06 +02:00
o['children'] = []
for subbobject in bone.children:
so = {}
2016-10-19 13:28:06 +02:00
self.export_bone(armature, subbobject, scene, so, action)
o['children'].append(so)
@staticmethod
def export_pose_markers(oanim, action):
if action.pose_markers is None or len(action.pose_markers) == 0:
2017-11-11 18:26:28 +01:00
return
2017-11-11 18:26:28 +01:00
oanim['marker_frames'] = []
oanim['marker_names'] = []
for pos_marker in action.pose_markers:
oanim['marker_frames'].append(int(pos_marker.frame))
oanim['marker_names'].append(pos_marker.name)
2017-11-11 18:26:28 +01:00
2020-04-10 00:30:16 +02:00
def export_object_sampled_animation(self, bobject: bpy.types.Object,scene: bpy.types.Scene, o: Dict) -> None:
"""Exports animation as full 4x4 matrices for each frame"""
2017-11-19 13:38:54 +01:00
animation_flag = False
2019-06-23 22:52:41 +02:00
animation_flag = bobject.animation_data is not None and bobject.animation_data.action is not None and bobject.type != 'ARMATURE'
2017-10-19 18:27:01 +02:00
# Font out
2017-11-19 13:38:54 +01:00
if animation_flag:
2017-10-19 18:27:01 +02:00
if not 'object_actions' in o:
o['object_actions'] = []
2017-10-22 13:33:07 +02:00
action = bobject.animation_data.action
aname = arm.utils.safestr(arm.utils.asset_name(action))
fp = self.get_meshes_file_path('action_' + aname, compressed=ArmoryExporter.compress_enabled)
2017-10-22 13:33:07 +02:00
assets.add(fp)
ext = '.lz4' if ArmoryExporter.compress_enabled else ''
2018-05-18 13:40:01 +02:00
if ext == '' and not bpy.data.worlds['Arm'].arm_minimize:
ext = '.json'
2017-10-22 13:33:07 +02:00
o['object_actions'].append('action_' + aname + ext)
2017-10-19 18:27:01 +02:00
oaction = {}
2017-10-19 21:28:02 +02:00
oaction['sampled'] = True
2017-10-19 18:27:01 +02:00
oaction['name'] = action.name
oanim = {}
2017-10-22 13:33:07 +02:00
oaction['anim'] = oanim
tracko = {}
tracko['target'] = "transform"
2017-11-11 16:39:11 +01:00
tracko['frames'] = []
2017-11-11 16:39:11 +01:00
begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1])
2017-11-19 13:38:54 +01:00
end_frame += 1
2017-11-09 13:08:01 +01:00
for i in range(begin_frame, end_frame):
2018-03-26 00:29:19 +02:00
tracko['frames'].append(int(i - begin_frame))
2017-11-09 13:08:01 +01:00
2018-03-26 00:29:19 +02:00
tracko['frames'].append(int(end_frame))
2017-03-16 14:53:43 +01:00
tracko['values'] = []
2017-11-09 13:08:01 +01:00
for i in range(begin_frame, end_frame):
scene.frame_set(i)
2020-04-10 00:23:03 +02:00
tracko['values'] += ArmoryExporter.write_matrix(bobject.matrix_local) # Continuos array of matrix transforms
2017-10-19 18:27:01 +02:00
oanim['tracks'] = [tracko]
2017-11-11 18:26:28 +01:00
self.export_pose_markers(oanim, action)
if True: #not action.arm_cached or not os.path.exists(fp):
wrd = bpy.data.worlds['Arm']
if wrd.arm_verbose_output:
print('Exporting object action ' + aname)
2017-10-22 13:33:07 +02:00
actionf = {}
actionf['objects'] = []
actionf['objects'].append(oaction)
oaction['type'] = 'object'
oaction['name'] = aname
oaction['data_ref'] = ''
2018-12-04 19:01:44 +01:00
oaction['transform'] = None
2017-10-22 13:33:07 +02:00
arm.utils.write_arm(fp, actionf)
@staticmethod
def calculate_animation_length(action):
"""Calculates the length of the given action."""
start = action.frame_range[0]
end = action.frame_range[1]
# Take FCurve modifiers into account if they have a restricted
# frame range
for fcurve in action.fcurves:
for modifier in fcurve.modifiers:
if not modifier.use_restricted_range:
continue
if modifier.frame_start < start:
start = modifier.frame_start
if modifier.frame_end > end:
end = modifier.frame_end
return (int(start), int(end))
@staticmethod
def export_animation_track(fcurve, frame_range, target):
"""This function exports a single animation track."""
data_ttrack = {}
data_ttrack['target'] = target
data_ttrack['frames'] = []
data_ttrack['values'] = []
start = frame_range[0]
end = frame_range[1]
for frame in range(start, end + 1):
data_ttrack['frames'].append(frame)
data_ttrack['values'].append(fcurve.evaluate(frame))
return data_ttrack
def export_object_transform(self, bobject, o):
# Internal target names for single FCurve data paths
target_names = {
"location": ("xloc", "yloc", "zloc"),
"rotation_euler": ("xrot", "yrot", "zrot"),
"rotation_quaternion": ("qwrot", "qxrot", "qyrot", "qzrot"),
"scale": ("xscl", "yscl", "zscl"),
"delta_location": ("dxloc", "dyloc", "dzloc"),
"delta_rotation_euler": ("dxrot", "dyrot", "dzrot"),
"delta_rotation_quaternion": ("dqwrot", "dqxrot", "dqyrot", "dqzrot"),
"delta_scale": ("dxscl", "dyscl", "dzscl"),
}
# Static transform
o['transform'] = {}
2020-04-10 00:23:03 +02:00
o['transform']['values'] = ArmoryExporter.write_matrix(bobject.matrix_local)
2019-06-25 14:08:42 +02:00
# Animated transform
if bobject.animation_data is not None and bobject.type != "ARMATURE":
action = bobject.animation_data.action
if action is not None:
action_name = arm.utils.safestr(arm.utils.asset_name(action))
if 'object_actions' not in o:
o['object_actions'] = []
2019-06-25 14:08:42 +02:00
fp = self.get_meshes_file_path('action_' + action_name, compressed=ArmoryExporter.compress_enabled)
assets.add(fp)
ext = '.lz4' if ArmoryExporter.compress_enabled else ''
if ext == '' and not bpy.data.worlds['Arm'].arm_minimize:
ext = '.json'
o['object_actions'].append('action_' + action_name + ext)
2019-06-25 14:08:42 +02:00
oaction = {}
oaction['name'] = action.name
# Export the animation tracks
oanim = {}
oaction['anim'] = oanim
frame_range = self.calculate_animation_length(action)
oanim['begin'] = frame_range[0]
oanim['end'] = frame_range[1]
oanim['tracks'] = []
self.export_pose_markers(oanim, action)
for fcurve in action.fcurves:
data_path = fcurve.data_path
try:
data_ttrack = self.export_animation_track(fcurve, frame_range, target_names[data_path][fcurve.array_index])
2017-10-22 13:33:07 +02:00
except KeyError:
if data_path not in target_names:
log.warn(f"Action {action_name}: The data path '{data_path}' is not supported (yet)!")
continue
# Missing target entry for array_index or something else
else:
raise
oanim['tracks'].append(data_ttrack)
if True: # not action.arm_cached or not os.path.exists(fp):
wrd = bpy.data.worlds['Arm']
if wrd.arm_verbose_output:
print('Exporting object action ' + action_name)
actionf = {}
actionf['objects'] = []
actionf['objects'].append(oaction)
oaction['type'] = 'object'
oaction['name'] = action_name
oaction['data_ref'] = ''
oaction['transform'] = None
arm.utils.write_arm(fp, actionf)
2017-10-22 13:33:07 +02:00
2016-10-19 13:28:06 +02:00
def process_bone(self, bone):
2017-04-08 20:05:35 +02:00
if ArmoryExporter.export_all_flag or bone.select:
self.bobject_bone_array[bone] = {"objectType" : NodeType.BONE, "structName" : bone.name}
for subbobject in bone.children:
2016-10-19 13:28:06 +02:00
self.process_bone(subbobject)
2016-10-19 13:28:06 +02:00
def process_bobject(self, bobject):
"""Adds the given blender object to the bobject_array dict and
stores its type and its name.
If an object is linked, the name of its library is appended
after an "_".
"""
2017-04-08 20:05:35 +02:00
if ArmoryExporter.export_all_flag or bobject.select:
btype = NodeType.get_bobject_type(bobject)
2020-04-09 23:40:18 +02:00
if btype is not NodeType.MESH and ArmoryExporter.option_mesh_only:
return
self.bobject_array[bobject] = {
"objectType": btype,
"structName": arm.utils.asset_name(bobject)
}
2017-01-31 17:58:30 +01:00
if bobject.type == "ARMATURE":
skeleton = bobject.data
2017-04-01 21:25:57 +02:00
if skeleton:
for bone in skeleton.bones:
2017-04-01 21:25:57 +02:00
if not bone.parent:
2016-10-19 13:28:06 +02:00
self.process_bone(bone)
2019-01-17 21:34:38 +01:00
if bobject.arm_instanced == 'Off':
for subbobject in bobject.children:
2016-10-19 13:28:06 +02:00
self.process_bobject(subbobject)
2016-10-19 13:28:06 +02:00
def process_skinned_meshes(self):
for bobjectRef in self.bobject_array.items():
2020-04-09 23:40:18 +02:00
if bobjectRef[1]["objectType"] is NodeType.MESH:
armature = bobjectRef[0].find_armature()
2016-11-28 14:40:07 +01:00
if armature:
for bone in armature.data.bones:
2017-12-04 11:24:34 +01:00
boneRef = self.find_bone(bone.name)
2016-11-28 14:40:07 +01:00
if boneRef:
2017-02-28 22:26:35 +01:00
# If an object is used as a bone, then we force its type to be a bone
2020-04-09 23:40:18 +02:00
boneRef[1]["objectType"] = NodeType.BONE
2016-10-19 13:28:06 +02:00
def export_bone_transform(self, armature, bone, scene, o, action):
2017-11-19 13:38:54 +01:00
pose_bone = armature.pose.bones.get(bone.name)
2019-07-18 21:51:25 +02:00
# if pose_bone is not None:
2018-08-06 12:59:15 +02:00
# transform = pose_bone.matrix.copy()
2019-07-18 21:51:25 +02:00
# if pose_bone.parent is not None:
2018-08-06 12:59:15 +02:00
# transform = pose_bone.parent.matrix.inverted_safe() * transform
# else:
transform = bone.matrix_local.copy()
2019-07-18 21:51:25 +02:00
if bone.parent is not None:
2018-12-18 23:48:38 +01:00
transform = (bone.parent.matrix_local.inverted_safe() @ transform)
o['transform'] = {}
2020-04-10 00:23:03 +02:00
o['transform']['values'] = ArmoryExporter.write_matrix(transform)
2018-08-06 12:59:15 +02:00
curve_array = self.collect_bone_animation(armature, bone.name)
2018-11-19 14:28:04 +01:00
animation = len(curve_array) != 0
2018-08-06 12:59:15 +02:00
2017-11-19 13:38:54 +01:00
if animation and pose_bone:
begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1])
o['anim'] = {}
tracko = {}
o['anim']['tracks'] = [tracko]
tracko['target'] = "transform"
tracko['frames'] = []
for i in range(begin_frame, end_frame + 1):
tracko['frames'].append(i - begin_frame)
tracko['values'] = []
self.bone_tracks.append((tracko['values'], pose_bone))
2017-03-01 11:45:55 +01:00
def use_default_material(self, bobject, o):
2017-04-04 23:11:31 +02:00
if arm.utils.export_bone_data(bobject):
2017-03-01 11:45:55 +01:00
o['material_refs'].append('armdefaultskin')
self.default_skin_material_objects.append(bobject)
2017-03-01 11:45:55 +01:00
else:
2017-01-29 13:07:58 +01:00
o['material_refs'].append('armdefault')
self.default_material_objects.append(bobject)
2017-03-01 11:45:55 +01:00
2018-11-12 12:35:52 +01:00
def use_default_material_part(self):
# Particle object with no material assigned
for ps in bpy.data.particles:
2019-07-18 21:51:25 +02:00
if ps.render_type != 'OBJECT' or ps.instance_object is None:
2018-11-12 12:35:52 +01:00
continue
2018-12-18 23:48:38 +01:00
po = ps.instance_object
if po not in self.object_to_arm_object_dict:
2018-11-12 12:35:52 +01:00
continue
o = self.object_to_arm_object_dict[po]
if len(o['material_refs']) > 0 and o['material_refs'][0] == 'armdefault' and po not in self.default_part_material_objects:
self.default_part_material_objects.append(po)
2018-11-12 12:35:52 +01:00
o['material_refs'] = ['armdefaultpart'] # Replace armdefault
2017-03-01 11:45:55 +01:00
def export_material_ref(self, bobject, material, index, o):
2019-07-18 21:51:25 +02:00
if material is None: # Use default for empty mat slots
2017-03-01 11:45:55 +01:00
self.use_default_material(bobject, o)
return
if not material in self.material_array:
self.material_array.append(material)
2018-03-02 14:50:58 +01:00
o['material_refs'].append(arm.utils.asset_name(material))
2016-10-19 13:28:06 +02:00
def export_particle_system_ref(self, psys, index, o):
if psys.settings in self.particle_system_array: # or not modifier.show_render:
2017-09-21 13:22:00 +02:00
return
2017-10-06 00:02:38 +02:00
if psys.settings.instance_object is None or psys.settings.render_type != 'OBJECT':
return
self.particle_system_array[psys.settings] = {"structName": psys.settings.name}
pref = {}
pref['name'] = psys.name
pref['seed'] = psys.seed
2017-09-21 13:22:00 +02:00
pref['particle'] = psys.settings.name
o['particle_refs'].append(pref)
@staticmethod
def get_view3d_area():
2017-10-04 19:02:55 +02:00
screen = bpy.context.window.screen
for area in screen.areas:
if area.type == 'VIEW_3D':
return area
return None
def get_viewport_view_matrix(self):
2018-05-24 22:16:28 +02:00
play_area = self.get_view3d_area()
2019-07-18 21:51:25 +02:00
if play_area is None:
2017-10-04 19:02:55 +02:00
return None
2018-05-24 22:16:28 +02:00
for space in play_area.spaces:
2017-10-04 19:02:55 +02:00
if space.type == 'VIEW_3D':
return space.region_3d.view_matrix
return None
def get_viewport_projection_matrix(self):
2018-05-24 22:16:28 +02:00
play_area = self.get_view3d_area()
2019-07-18 21:51:25 +02:00
if play_area is None:
2017-10-04 19:02:55 +02:00
return None, False
2018-05-24 22:16:28 +02:00
for space in play_area.spaces:
2017-10-04 19:02:55 +02:00
if space.type == 'VIEW_3D':
# return space.region_3d.perspective_matrix # pesp = window * view
return space.region_3d.window_matrix, space.region_3d.is_perspective
2017-08-10 17:35:11 +02:00
return None, False
2017-11-19 13:38:54 +01:00
def write_bone_matrices(self, scene, action):
2018-12-18 16:46:36 +01:00
# profile_time = time.time()
2017-11-19 13:38:54 +01:00
begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1])
if len(self.bone_tracks) > 0:
for i in range(begin_frame, end_frame + 1):
scene.frame_set(i)
for track in self.bone_tracks:
values, pose_bone = track[0], track[1]
parent = pose_bone.parent
if parent:
2020-04-10 00:23:03 +02:00
values += ArmoryExporter.write_matrix((parent.matrix.inverted_safe() @ pose_bone.matrix))
2017-11-19 13:38:54 +01:00
else:
2020-04-10 00:23:03 +02:00
values += ArmoryExporter.write_matrix(pose_bone.matrix)
2018-12-18 16:46:36 +01:00
# print('Bone matrices exported in ' + str(time.time() - profile_time))
2017-11-19 13:38:54 +01:00
@staticmethod
def has_baked_material(bobject, materials):
2018-03-13 18:23:00 +01:00
for mat in materials:
2019-07-18 21:51:25 +02:00
if mat is None:
2018-03-26 03:08:10 +02:00
continue
2018-03-13 18:23:00 +01:00
baked_mat = mat.name + '_' + bobject.name + '_baked'
if baked_mat in bpy.data.materials:
return True
return False
@staticmethod
def slot_to_material(bobject, slot):
2018-03-13 18:23:00 +01:00
mat = slot.material
# Pick up backed material if present
2019-07-18 21:51:25 +02:00
if mat is not None:
2018-03-13 18:23:00 +01:00
baked_mat = mat.name + '_' + bobject.name + '_baked'
if baked_mat in bpy.data.materials:
mat = bpy.data.materials[baked_mat]
return mat
2018-08-07 12:23:02 +02:00
# def ExportMorphWeights(self, node, shapeKeys, scene):
# action = None
# curveArray = []
# indexArray = []
# if (shapeKeys.animation_data):
# action = shapeKeys.animation_data.action
# if (action):
# for fcurve in action.fcurves:
# if ((fcurve.data_path.startswith("key_blocks[")) and (fcurve.data_path.endswith("].value"))):
# keyName = fcurve.data_path.strip("abcdehklopstuvy[]_.")
# if ((keyName[0] == "\"") or (keyName[0] == "'")):
# index = shapeKeys.key_blocks.find(keyName.strip("\"'"))
# if (index >= 0):
# curveArray.append(fcurve)
# indexArray.append(index)
# else:
# curveArray.append(fcurve)
# indexArray.append(int(keyName))
# if ((not action) and (node.animation_data)):
# action = node.animation_data.action
# if (action):
# for fcurve in action.fcurves:
# if ((fcurve.data_path.startswith("data.shape_keys.key_blocks[")) and (fcurve.data_path.endswith("].value"))):
# keyName = fcurve.data_path.strip("abcdehklopstuvy[]_.")
# if ((keyName[0] == "\"") or (keyName[0] == "'")):
# index = shapeKeys.key_blocks.find(keyName.strip("\"'"))
# if (index >= 0):
# curveArray.append(fcurve)
# indexArray.append(index)
# else:
# curveArray.append(fcurve)
# indexArray.append(int(keyName))
# animated = (len(curveArray) != 0)
# referenceName = shapeKeys.reference_key.name if (shapeKeys.use_relative) else ""
# for k in range(len(shapeKeys.key_blocks)):
# self.IndentWrite(B"MorphWeight", 0, (k == 0))
# if (animated):
# self.Write(B" %mw")
# self.WriteInt(k)
# self.Write(B" (index = ")
# self.WriteInt(k)
# self.Write(B") {float {")
# block = shapeKeys.key_blocks[k]
# self.WriteFloat(block.value if (block.name != referenceName) else 1.0)
# self.Write(B"}}\n")
# if (animated):
# self.IndentWrite(B"Animation (begin = ", 0, True)
2018-09-19 17:31:58 +02:00
# self.WriteFloat((action.frame_range[0]) * self.frameTime)
2018-08-07 12:23:02 +02:00
# self.Write(B", end = ")
2018-09-19 17:31:58 +02:00
# self.WriteFloat((action.frame_range[1]) * self.frameTime)
2018-08-07 12:23:02 +02:00
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.indentLevel += 1
# structFlag = False
# for a in range(len(curveArray)):
# k = indexArray[a]
# target = bytes("mw" + str(k), "UTF-8")
# fcurve = curveArray[a]
# kind = OpenGexExporter.ClassifyAnimationCurve(fcurve)
# if ((kind != kAnimationSampled) and (not self.sampleAnimationFlag)):
# self.ExportAnimationTrack(fcurve, kind, target, structFlag)
# else:
# self.ExportMorphWeightSampledAnimationTrack(shapeKeys.key_blocks[k], target, scene, structFlag)
# structFlag = True
# self.indentLevel -= 1
# self.IndentWrite(B"}\n")
def export_object(self, bobject: bpy.types.Object, scene: bpy.types.Scene,
parent_export_data: Dict = None) -> None:
"""This function exports a single object in the scene and
includes its name, object reference, material references (for
meshes), and transform.
Subobjects are then exported recursively.
"""
if not bobject.arm_export:
return
bobject_ref = self.bobject_array.get(bobject)
2020-04-09 23:25:26 +02:00
if bobject_ref is not None:
object_type = bobject_ref["objectType"]
2017-08-09 00:14:30 +02:00
# Linked object, not present in scene
if bobject not in self.object_to_arm_object_dict:
object_export_data: Dict[str, Any] = {}
2020-04-09 23:25:26 +02:00
object_export_data['traits'] = []
object_export_data['spawn'] = False
self.object_to_arm_object_dict[bobject] = object_export_data
2017-08-08 16:44:33 +02:00
object_export_data = self.object_to_arm_object_dict[bobject]
object_export_data['type'] = STRUCT_IDENTIFIER[object_type.value]
2020-04-09 23:25:26 +02:00
object_export_data['name'] = bobject_ref["structName"]
2017-10-29 17:29:08 +01:00
if bobject.parent_type == "BONE":
2020-04-09 23:25:26 +02:00
object_export_data['parent_bone'] = bobject.parent_bone
2017-10-29 17:29:08 +01:00
2020-04-09 23:25:26 +02:00
if bobject.hide_render or not bobject.arm_visible:
object_export_data['visible'] = False
2016-11-08 16:32:32 +01:00
if not bobject.cycles_visibility.camera:
2020-04-09 23:25:26 +02:00
object_export_data['visible_mesh'] = False
2016-11-08 16:32:32 +01:00
if not bobject.cycles_visibility.shadow:
2020-04-09 23:25:26 +02:00
object_export_data['visible_shadow'] = False
2016-11-08 16:32:32 +01:00
2020-04-09 23:25:26 +02:00
if not bobject.arm_spawn:
object_export_data['spawn'] = False
2020-04-09 23:25:26 +02:00
object_export_data['mobile'] = bobject.arm_mobile
2017-01-29 16:15:04 +01:00
2019-07-18 21:51:25 +02:00
if bobject.instance_type == 'COLLECTION' and bobject.instance_collection is not None:
2020-04-09 23:25:26 +02:00
object_export_data['group_ref'] = bobject.instance_collection.name
2017-09-21 18:30:02 +02:00
if bobject.arm_tilesheet != '':
2020-04-09 23:25:26 +02:00
object_export_data['tilesheet_ref'] = bobject.arm_tilesheet
object_export_data['tilesheet_action_ref'] = bobject.arm_tilesheet_action
2017-09-21 18:30:02 +02:00
if len(bobject.arm_propertylist) > 0:
2020-04-09 23:25:26 +02:00
object_export_data['properties'] = []
for p in bobject.arm_propertylist:
po = {}
po['name'] = p.name_prop
po['value'] = getattr(p, p.type_prop + '_prop')
2020-04-09 23:25:26 +02:00
object_export_data['properties'].append(po)
2018-12-18 23:48:38 +01:00
# TODO:
layer_found = True
2020-04-09 23:25:26 +02:00
if not layer_found:
object_export_data['spawn'] = False
2016-12-05 01:54:01 +01:00
# Export the object reference and material references
objref = bobject.data
2019-07-18 21:51:25 +02:00
if objref is not None:
2017-10-06 11:16:29 +02:00
objname = arm.utils.asset_name(objref)
2016-12-05 01:54:01 +01:00
2020-04-09 23:25:26 +02:00
# LOD
2017-08-21 12:17:55 +02:00
if bobject.type == 'MESH' and hasattr(objref, 'arm_lodlist') and len(objref.arm_lodlist) > 0:
2020-04-09 23:25:26 +02:00
object_export_data['lods'] = []
2017-08-21 12:17:55 +02:00
for l in objref.arm_lodlist:
2020-04-09 23:25:26 +02:00
if not l.enabled_prop:
2016-12-05 01:54:01 +01:00
continue
lod = {}
lod['object_ref'] = l.name
lod['screen_size'] = l.screen_size_prop
2020-04-09 23:25:26 +02:00
object_export_data['lods'].append(lod)
2017-08-21 12:17:55 +02:00
if objref.arm_lod_material:
2020-04-09 23:25:26 +02:00
object_export_data['lod_material'] = True
2016-11-21 16:49:32 +01:00
2020-04-09 23:40:18 +02:00
if object_type is NodeType.MESH:
if objref not in self.mesh_array:
self.mesh_array[objref] = {"structName": objname, "objectTable": [bobject]}
else:
self.mesh_array[objref]["objectTable"].append(bobject)
oid = arm.utils.safestr(self.mesh_array[objref]["structName"])
2019-05-14 11:43:41 +02:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_single_data_file:
2020-04-09 23:25:26 +02:00
object_export_data['data_ref'] = oid
2019-05-14 11:43:41 +02:00
else:
ext = '' if not ArmoryExporter.compress_enabled else '.lz4'
2018-04-14 15:07:05 +02:00
if ext == '' and not bpy.data.worlds['Arm'].arm_minimize:
ext = '.json'
2020-04-09 23:25:26 +02:00
object_export_data['data_ref'] = 'mesh_' + oid + ext + '/' + oid
2020-04-09 23:25:26 +02:00
object_export_data['material_refs'] = []
for i in range(len(bobject.material_slots)):
2018-03-13 18:23:00 +01:00
mat = self.slot_to_material(bobject, bobject.material_slots[i])
# Export ref
2020-04-09 23:25:26 +02:00
self.export_material_ref(bobject, mat, i, object_export_data)
2018-03-13 18:23:00 +01:00
# Decal flag
if mat is not None and mat.arm_decal:
2020-04-09 23:25:26 +02:00
object_export_data['type'] = 'decal_object'
2017-01-29 13:07:58 +01:00
# No material, mimic cycles and assign default
2020-04-09 23:25:26 +02:00
if len(object_export_data['material_refs']) == 0:
self.use_default_material(bobject, object_export_data)
2016-10-02 19:52:40 +02:00
num_psys = len(bobject.particle_systems)
if num_psys > 0:
2020-04-09 23:25:26 +02:00
object_export_data['particle_refs'] = []
2016-10-02 19:52:40 +02:00
for i in range(0, num_psys):
2020-04-09 23:25:26 +02:00
self.export_particle_system_ref(bobject.particle_systems[i], i, object_export_data)
2018-12-20 22:37:39 +01:00
aabb = bobject.data.arm_aabb
if aabb[0] == 0 and aabb[1] == 0 and aabb[2] == 0:
self.calc_aabb(bobject)
2020-04-09 23:25:26 +02:00
object_export_data['dimensions'] = [aabb[0], aabb[1], aabb[2]]
2020-04-09 23:25:26 +02:00
# shapeKeys = ArmoryExporter.get_shape_keys(objref)
# if shapeKeys:
# self.ExportMorphWeights(bobject, shapeKeys, scene, object_export_data)
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.LIGHT:
if objref not in self.light_array:
self.light_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
else:
self.light_array[objref]["objectTable"].append(bobject)
object_export_data['data_ref'] = self.light_array[objref]["structName"]
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.PROBE:
if objref not in self.probe_array:
self.probe_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
2018-10-01 11:45:43 +02:00
else:
self.probe_array[objref]["objectTable"].append(bobject)
2020-04-09 23:25:26 +02:00
2018-10-04 15:35:33 +02:00
dist = bobject.data.influence_distance
2020-04-09 23:25:26 +02:00
2018-10-04 21:12:23 +02:00
if objref.type == "PLANAR":
2020-04-09 23:25:26 +02:00
object_export_data['dimensions'] = [1.0, 1.0, dist]
2018-10-01 11:45:43 +02:00
2020-04-09 23:25:26 +02:00
# GRID, CUBEMAP
else:
object_export_data['dimensions'] = [dist, dist, dist]
object_export_data['data_ref'] = self.probe_array[objref]["structName"]
2020-04-09 23:25:26 +02:00
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.CAMERA:
2020-04-09 23:25:26 +02:00
if 'spawn' in object_export_data and not object_export_data['spawn']:
self.camera_spawned |= False
2017-01-03 12:20:46 +01:00
else:
2017-01-03 01:26:06 +01:00
self.camera_spawned = True
2020-04-09 23:25:26 +02:00
if objref not in self.camera_array:
self.camera_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
else:
self.camera_array[objref]["objectTable"].append(bobject)
object_export_data['data_ref'] = self.camera_array[objref]["structName"]
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.SPEAKER:
if objref not in self.speaker_array:
self.speaker_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
else:
self.speaker_array[objref]["objectTable"].append(bobject)
object_export_data['data_ref'] = self.speaker_array[objref]["structName"]
# Export the transform. If object is animated, then animation tracks are exported here
2019-07-18 21:51:25 +02:00
if bobject.type != 'ARMATURE' and bobject.animation_data is not None:
2017-10-19 18:27:01 +02:00
action = bobject.animation_data.action
export_actions = [action]
for track in bobject.animation_data.nla_tracks:
2019-07-18 21:51:25 +02:00
if track.strips is None:
2017-10-19 18:27:01 +02:00
continue
for strip in track.strips:
2020-04-09 23:53:03 +02:00
if strip.action is None or strip.action in export_actions:
2017-10-19 18:27:01 +02:00
continue
export_actions.append(strip.action)
orig_action = action
for a in export_actions:
bobject.animation_data.action = a
2020-04-09 23:25:26 +02:00
self.export_object_transform(bobject, object_export_data)
2019-07-18 21:51:25 +02:00
if len(export_actions) >= 2 and export_actions[0] is None: # No action assigned
2020-04-09 23:25:26 +02:00
object_export_data['object_actions'].insert(0, 'null')
2017-10-19 18:27:01 +02:00
bobject.animation_data.action = orig_action
else:
2020-04-09 23:25:26 +02:00
self.export_object_transform(bobject, object_export_data)
2017-10-29 17:29:08 +01:00
# If the object is parented to a bone and is not relative, then undo the bone's transform
if bobject.parent_type == "BONE":
armature = bobject.parent.data
bone = armature.bones[bobject.parent_bone]
2018-08-06 12:59:15 +02:00
# if not bone.use_relative_parent:
2020-04-09 23:25:26 +02:00
object_export_data['parent_bone_connected'] = bone.use_connect
2018-08-06 12:59:15 +02:00
if bone.use_connect:
bone_translation = Vector((0, bone.length, 0)) + bone.head
2020-04-09 23:25:26 +02:00
object_export_data['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]]
2018-08-06 12:59:15 +02:00
else:
2018-06-15 21:44:21 +02:00
bone_translation = bone.tail - bone.head
2020-04-09 23:25:26 +02:00
object_export_data['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]]
2018-08-06 12:59:15 +02:00
pose_bone = bobject.parent.pose.bones[bobject.parent_bone]
bone_translation_pose = pose_bone.tail - pose_bone.head
2020-04-09 23:25:26 +02:00
object_export_data['parent_bone_tail_pose'] = [bone_translation_pose[0], bone_translation_pose[1], bone_translation_pose[2]]
2017-10-29 17:29:08 +01:00
2019-07-18 21:51:25 +02:00
if bobject.type == 'ARMATURE' and bobject.data is not None:
2017-02-28 22:26:35 +01:00
bdata = bobject.data # Armature data
action = None # Reference start action
2017-09-05 23:39:24 +02:00
adata = bobject.animation_data
# Active action
2019-07-18 21:51:25 +02:00
if adata is not None:
2017-09-07 13:42:46 +02:00
action = adata.action
2019-07-18 21:51:25 +02:00
if action is None:
2017-09-07 13:42:46 +02:00
log.warn('Object ' + bobject.name + ' - No action assigned, setting to pose')
2017-04-11 23:21:42 +02:00
bobject.animation_data_create()
actions = bpy.data.actions
2017-09-07 13:42:46 +02:00
action = actions.get('armorypose')
2019-07-18 21:51:25 +02:00
if action is None:
2017-09-07 13:42:46 +02:00
action = actions.new(name='armorypose')
2017-04-11 23:21:42 +02:00
2017-09-05 23:39:24 +02:00
# Export actions
2017-08-19 03:08:42 +02:00
export_actions = [action]
2017-10-10 09:57:23 +02:00
# hasattr - armature modifier may reference non-parent armature object to deform with
2019-07-18 21:51:25 +02:00
if hasattr(adata, 'nla_tracks') and adata.nla_tracks is not None:
2017-09-05 23:39:24 +02:00
for track in adata.nla_tracks:
2019-07-18 21:51:25 +02:00
if track.strips is None:
2017-09-05 23:39:24 +02:00
continue
for strip in track.strips:
2019-07-18 21:51:25 +02:00
if strip.action is None:
2017-09-05 23:39:24 +02:00
continue
if strip.action.name == action.name:
continue
export_actions.append(strip.action)
2017-10-06 11:16:29 +02:00
armatureid = arm.utils.safestr(arm.utils.asset_name(bdata))
ext = '.lz4' if ArmoryExporter.compress_enabled else ''
2018-05-18 13:40:01 +02:00
if ext == '' and not bpy.data.worlds['Arm'].arm_minimize:
ext = '.json'
2020-04-09 23:25:26 +02:00
object_export_data['bone_actions'] = []
2017-10-10 09:57:23 +02:00
for action in export_actions:
aname = arm.utils.safestr(arm.utils.asset_name(action))
2020-04-09 23:25:26 +02:00
object_export_data['bone_actions'].append('action_' + armatureid + '_' + aname + ext)
2017-04-11 23:21:42 +02:00
2020-03-04 23:42:38 +01:00
clear_op = set()
2020-03-04 21:34:21 +01:00
skelobj = bobject
2020-03-02 15:03:42 +01:00
baked_actions = []
2020-03-04 23:42:38 +01:00
orig_action = bobject.animation_data.action
2020-03-04 21:34:21 +01:00
if bdata.arm_autobake and bobject.name not in bpy.context.collection.all_objects:
2020-04-09 23:25:26 +02:00
clear_op.add('unlink')
# Clone bjobject and put it in the current scene so the bake operator can run
2020-03-04 23:42:38 +01:00
if bobject.library is not None:
skelobj = bobject.copy()
clear_op.add('rem')
2020-03-04 21:34:21 +01:00
bpy.context.collection.objects.link(skelobj)
2017-04-11 23:21:42 +02:00
for action in export_actions:
2017-10-10 09:57:23 +02:00
aname = arm.utils.safestr(arm.utils.asset_name(action))
2020-03-04 21:34:21 +01:00
skelobj.animation_data.action = action
fp = self.get_meshes_file_path('action_' + armatureid + '_' + aname, compressed=ArmoryExporter.compress_enabled)
2017-04-11 23:21:42 +02:00
assets.add(fp)
2020-04-09 23:25:26 +02:00
if not bdata.arm_cached or not os.path.exists(fp):
2020-03-02 15:03:42 +01:00
#handle autobake
if bdata.arm_autobake:
sel = bpy.context.selected_objects[:]
2020-04-09 23:25:26 +02:00
for _o in sel:
_o.select_set(False)
2020-03-04 21:34:21 +01:00
skelobj.select_set(True)
2020-03-02 15:03:42 +01:00
bpy.ops.nla.bake(frame_start = action.frame_range[0], frame_end=action.frame_range[1], step=1, only_selected=False, visual_keying=True)
2020-03-04 21:34:21 +01:00
action = skelobj.animation_data.action
skelobj.select_set(False)
2020-04-09 23:25:26 +02:00
for _o in sel:
_o.select_set(True)
2020-03-02 15:03:42 +01:00
baked_actions.append(action)
2020-03-07 15:00:51 +01:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_verbose_output:
print('Exporting armature action ' + aname)
2017-04-11 23:21:42 +02:00
bones = []
2017-11-19 13:38:54 +01:00
self.bone_tracks = []
2017-04-11 23:21:42 +02:00
for bone in bdata.bones:
if not bone.parent:
boneo = {}
2020-03-04 21:34:21 +01:00
self.export_bone(skelobj, bone, scene, boneo, action)
2017-04-11 23:21:42 +02:00
bones.append(boneo)
2020-03-05 17:40:15 +01:00
self.write_bone_matrices( bpy.context.scene, action)
2017-11-11 18:26:28 +01:00
if len(bones) > 0 and 'anim' in bones[0]:
self.export_pose_markers(bones[0]['anim'], action)
2017-09-05 23:39:24 +02:00
# Save action separately
action_obj = {}
2017-10-10 09:57:23 +02:00
action_obj['name'] = aname
2017-09-05 23:39:24 +02:00
action_obj['objects'] = bones
arm.utils.write_arm(fp, action_obj)
2020-03-04 21:34:21 +01:00
#restore settings
skelobj.animation_data.action = orig_action
for a in baked_actions: bpy.data.actions.remove( a, do_unlink=True)
2020-03-04 23:42:38 +01:00
if 'unlink' in clear_op: bpy.context.collection.objects.unlink(skelobj)
if 'rem' in clear_op: bpy.data.objects.remove(skelobj, do_unlink=True)
2020-03-04 21:34:21 +01:00
2017-10-22 13:33:07 +02:00
# TODO: cache per action
2017-10-10 09:57:23 +02:00
bdata.arm_cached = True
2020-04-09 23:25:26 +02:00
if parent_export_data is None:
self.output['objects'].append(object_export_data)
else:
2020-04-09 23:25:26 +02:00
parent_export_data['children'].append(object_export_data)
2020-04-09 23:25:26 +02:00
self.post_export_object(bobject, object_export_data, object_type)
2020-04-09 23:25:26 +02:00
if not hasattr(object_export_data, 'children') and len(bobject.children) > 0:
object_export_data['children'] = []
2019-01-17 21:34:38 +01:00
if bobject.arm_instanced == 'Off':
for subbobject in bobject.children:
2020-04-09 23:25:26 +02:00
self.export_object(subbobject, scene, object_export_data)
2018-12-18 16:46:36 +01:00
def export_skin(self, bobject, armature, exportMesh, o):
# This function exports all skinning data, which includes the skeleton
# and per-vertex bone influence data
oskin = {}
2017-05-17 17:06:52 +02:00
o['skin'] = oskin
# Write the skin bind pose transform
otrans = {}
oskin['transform'] = otrans
2020-04-10 00:23:03 +02:00
otrans['values'] = ArmoryExporter.write_matrix(bobject.matrix_world)
2017-04-11 23:21:42 +02:00
bone_array = armature.data.bones
bone_count = len(bone_array)
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
max_bones = rpdat.arm_skin_max_bones
2017-04-11 23:21:42 +02:00
if bone_count > max_bones:
2018-12-18 16:46:36 +01:00
bone_count = max_bones
# Write the bone object reference array
oskin['bone_ref_array'] = np.empty(bone_count, dtype=object)
oskin['bone_len_array'] = np.empty(bone_count, dtype='<f4')
2017-04-11 23:21:42 +02:00
for i in range(bone_count):
2017-12-04 11:24:34 +01:00
boneRef = self.find_bone(bone_array[i].name)
2017-04-01 21:25:57 +02:00
if boneRef:
2018-12-18 16:46:36 +01:00
oskin['bone_ref_array'][i] = boneRef[1]["structName"]
oskin['bone_len_array'][i] = bone_array[i].length
else:
2018-12-18 16:46:36 +01:00
oskin['bone_ref_array'][i] = ""
oskin['bone_len_array'][i] = 0.0
# Write the bind pose transform array
2018-02-19 23:32:51 +01:00
oskin['transformsI'] = []
2019-04-06 14:13:38 +02:00
for i in range(bone_count):
skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
skeletonI = (skeletonI @ bobject.matrix_world)
2020-04-10 00:23:03 +02:00
oskin['transformsI'].append(ArmoryExporter.write_matrix(skeletonI))
# Export the per-vertex bone influence data
2017-04-11 23:21:42 +02:00
group_remap = []
for group in bobject.vertex_groups:
2017-04-11 23:21:42 +02:00
for i in range(bone_count):
2018-12-18 16:46:36 +01:00
if bone_array[i].name == group.name:
2017-04-11 23:21:42 +02:00
group_remap.append(i)
break
else:
2017-04-11 23:21:42 +02:00
group_remap.append(-1)
2018-12-18 16:46:36 +01:00
bone_count_array = np.empty(len(exportMesh.loops), dtype='<i2')
bone_index_array = np.empty(len(exportMesh.loops) * 4, dtype='<i2')
bone_weight_array = np.empty(len(exportMesh.loops) * 4, dtype='<f4')
2017-11-19 13:38:54 +01:00
vertices = bobject.data.vertices
2018-12-18 16:46:36 +01:00
count = 0
for index, l in enumerate(exportMesh.loops):
2017-04-11 23:21:42 +02:00
bone_count = 0
total_weight = 0.0
bone_values = []
2018-12-18 16:46:36 +01:00
for g in vertices[l.vertex_index].groups:
2017-11-19 13:38:54 +01:00
bone_index = group_remap[g.group]
bone_weight = g.weight
2018-12-18 16:46:36 +01:00
if bone_index >= 0: #and bone_weight != 0.0:
2017-04-11 23:21:42 +02:00
bone_values.append((bone_weight, bone_index))
total_weight += bone_weight
bone_count += 1
2018-12-18 16:46:36 +01:00
if bone_count > 4:
2017-04-11 23:21:42 +02:00
bone_count = 4
2018-12-18 16:46:36 +01:00
bone_values.sort(reverse=True)
2017-04-11 23:21:42 +02:00
bone_values = bone_values[:4]
2019-06-23 22:52:41 +02:00
2018-12-18 16:46:36 +01:00
bone_count_array[index] = bone_count
2017-10-01 23:20:47 +02:00
for bv in bone_values:
2018-12-18 16:46:36 +01:00
bone_weight_array[count] = bv[0]
bone_index_array[count] = bv[1]
count += 1
2017-10-01 23:20:47 +02:00
2018-12-18 16:46:36 +01:00
if total_weight != 0.0 and total_weight != 1.0:
2017-04-11 23:21:42 +02:00
normalizer = 1.0 / total_weight
2018-12-18 16:46:36 +01:00
for i in range(bone_count):
bone_weight_array[count - i - 1] *= normalizer
bone_index_array = bone_index_array[:count]
bone_weight_array = bone_weight_array[:count]
bone_weight_array *= 32767
bone_weight_array = np.array(bone_weight_array, dtype='<i2')
oskin['bone_count_array'] = bone_count_array
oskin['bone_index_array'] = bone_index_array
oskin['bone_weight_array'] = bone_weight_array
2018-02-19 23:32:51 +01:00
# Bone constraints
2020-03-02 15:03:42 +01:00
if not armature.data.arm_autobake:
for bone in armature.pose.bones:
if len(bone.constraints) > 0:
if 'constraints' not in oskin:
oskin['constraints'] = []
self.add_constraints(bone, oskin, bone=True)
2018-02-19 23:32:51 +01:00
def write_mesh(self, bobject, fp, o):
2019-05-14 11:43:41 +02:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_single_data_file:
self.output['mesh_datas'].append(o)
else: # One mesh data per file
mesh_obj = {}
mesh_obj['mesh_datas'] = [o]
2017-03-15 12:30:14 +01:00
arm.utils.write_arm(fp, mesh_obj)
2017-08-21 12:17:55 +02:00
bobject.data.arm_cached = True
@staticmethod
def calc_aabb(bobject):
2018-12-20 22:37:39 +01:00
aabb_center = 0.125 * sum((Vector(b) for b in bobject.bound_box), Vector())
bobject.data.arm_aabb = [ \
abs((bobject.bound_box[6][0] - bobject.bound_box[0][0]) / 2 + abs(aabb_center[0])) * 2, \
abs((bobject.bound_box[6][1] - bobject.bound_box[0][1]) / 2 + abs(aabb_center[1])) * 2, \
abs((bobject.bound_box[6][2] - bobject.bound_box[0][2]) / 2 + abs(aabb_center[2])) * 2 \
]
2018-12-16 22:28:57 +01:00
def export_mesh_data(self, exportMesh, bobject, o, has_armature=False):
exportMesh.calc_normals_split()
exportMesh.calc_loop_triangles()
2018-12-18 16:46:36 +01:00
loops = exportMesh.loops
num_verts = len(loops)
num_uv_layers = len(exportMesh.uv_layers)
2018-12-18 16:46:36 +01:00
is_baked = self.has_baked_material(bobject, exportMesh.materials)
has_tex = (self.get_export_uvs(bobject.data) and num_uv_layers > 0) or is_baked
2018-12-18 16:46:36 +01:00
has_tex1 = has_tex and num_uv_layers > 1
num_colors = len(exportMesh.vertex_colors)
has_col = self.get_export_vcols(bobject.data) and num_colors > 0
has_tang = self.has_tangents(bobject.data)
2017-05-17 17:06:52 +02:00
2018-12-18 16:46:36 +01:00
pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
2017-05-17 17:06:52 +02:00
if has_tex:
2018-12-18 16:46:36 +01:00
t0map = 0 # Get active uvmap
t0data = np.empty(num_verts * 2, dtype='<f4')
2018-12-18 23:48:38 +01:00
uv_layers = exportMesh.uv_layers
2019-07-18 21:51:25 +02:00
if uv_layers is not None:
2018-05-24 22:16:28 +02:00
if 'UVMap_baked' in uv_layers:
for i in range(0, len(uv_layers)):
if uv_layers[i].name == 'UVMap_baked':
2018-04-02 22:46:39 +02:00
t0map = i
break
else:
2018-05-24 22:16:28 +02:00
for i in range(0, len(uv_layers)):
if uv_layers[i].active_render:
2018-04-02 22:46:39 +02:00
t0map = i
break
2017-05-17 17:06:52 +02:00
if has_tex1:
2018-12-18 16:46:36 +01:00
t1map = 1 if t0map == 0 else 0
t1data = np.empty(num_verts * 2, dtype='<f4')
2018-12-14 15:27:43 +01:00
# Scale for packed coords
2018-12-18 16:46:36 +01:00
maxdim = 1.0
2019-01-09 15:05:15 +01:00
lay0 = uv_layers[t0map]
2018-12-18 16:46:36 +01:00
for v in lay0.data:
if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0])
if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1])
2019-01-09 15:05:15 +01:00
if has_tex1:
lay1 = uv_layers[t1map]
for v in lay1.data:
if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0])
if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1])
2018-12-14 15:27:43 +01:00
if maxdim > 1:
o['scale_tex'] = maxdim
2018-12-18 16:46:36 +01:00
invscale_tex = (1 / o['scale_tex']) * 32767
2018-12-14 15:27:43 +01:00
else:
2018-12-18 16:46:36 +01:00
invscale_tex = 1 * 32767
2018-12-29 16:34:04 +01:00
if has_tang:
exportMesh.calc_tangents(uvmap=lay0.name)
tangdata = np.empty(num_verts * 3, dtype='<f4')
2018-12-18 16:46:36 +01:00
if has_col:
cdata = np.empty(num_verts * 3, dtype='<f4')
2018-12-14 15:27:43 +01:00
# Scale for packed coords
maxdim = max(bobject.data.arm_aabb[0], max(bobject.data.arm_aabb[1], bobject.data.arm_aabb[2]))
2018-12-30 00:01:34 +01:00
if maxdim > 2:
o['scale_pos'] = maxdim / 2
else:
o['scale_pos'] = 1.0
2018-12-18 16:46:36 +01:00
if has_armature: # Allow up to 2x bigger bounds for skinned mesh
2018-12-16 22:28:57 +01:00
o['scale_pos'] *= 2.0
2019-06-23 22:52:41 +02:00
2018-12-18 16:46:36 +01:00
scale_pos = o['scale_pos']
invscale_pos = (1 / scale_pos) * 32767
2017-05-17 17:06:52 +02:00
2018-12-18 16:46:36 +01:00
verts = exportMesh.vertices
if has_tex:
lay0 = exportMesh.uv_layers[t0map]
if has_tex1:
lay1 = exportMesh.uv_layers[t1map]
2019-01-07 10:42:45 +01:00
if has_col:
vcol0 = exportMesh.vertex_colors[0].data
2018-12-18 16:46:36 +01:00
for i, loop in enumerate(loops):
v = verts[loop.vertex_index]
co = v.co
normal = loop.normal
2018-12-29 16:34:04 +01:00
tang = loop.tangent
2018-12-18 16:46:36 +01:00
i4 = i * 4
i2 = i * 2
pdata[i4 ] = co[0]
pdata[i4 + 1] = co[1]
pdata[i4 + 2] = co[2]
pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
ndata[i2 ] = normal[0]
ndata[i2 + 1] = normal[1]
if has_tex:
uv = lay0.data[loop.index].uv
t0data[i2 ] = uv[0]
t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
if has_tex1:
uv = lay1.data[loop.index].uv
t1data[i2 ] = uv[0]
t1data[i2 + 1] = 1.0 - uv[1]
2018-12-29 16:34:04 +01:00
if has_tang:
i3 = i * 3
tangdata[i3 ] = tang[0]
tangdata[i3 + 1] = tang[1]
tangdata[i3 + 2] = tang[2]
2018-12-18 16:46:36 +01:00
if has_col:
2019-01-07 10:42:45 +01:00
col = vcol0[loop.index].color
2018-12-18 16:46:36 +01:00
i3 = i * 3
2019-02-11 11:12:41 +01:00
cdata[i3 ] = col[0]
cdata[i3 + 1] = col[1]
cdata[i3 + 2] = col[2]
2019-10-30 15:33:14 +01:00
2018-12-18 16:46:36 +01:00
mats = exportMesh.materials
poly_map = []
for i in range(max(len(mats), 1)):
poly_map.append([])
for poly in exportMesh.polygons:
poly_map[poly.material_index].append(poly)
2018-12-14 15:27:43 +01:00
2018-12-18 16:46:36 +01:00
o['index_arrays'] = []
2019-10-30 15:33:14 +01:00
# map polygon indices to triangle loops
tri_loops = {}
for loop in exportMesh.loop_triangles:
if loop.polygon_index not in tri_loops:
tri_loops[loop.polygon_index] = []
tri_loops[loop.polygon_index].append(loop)
2019-10-30 15:33:14 +01:00
2018-12-18 16:46:36 +01:00
for index, polys in enumerate(poly_map):
tris = 0
for poly in polys:
tris += poly.loop_total - 2
if tris == 0: # No face assigned
continue
prim = np.empty(tris * 3, dtype='<i4')
i = 0
for poly in polys:
for loop in tri_loops[poly.index]:
prim[i ] = loops[loop.loops[0]].index
prim[i + 1] = loops[loop.loops[1]].index
prim[i + 2] = loops[loop.loops[2]].index
2018-12-18 16:46:36 +01:00
i += 3
2018-12-14 15:27:43 +01:00
2018-12-18 16:46:36 +01:00
ia = {}
ia['values'] = prim
ia['material'] = 0
if len(mats) > 1:
for i in range(len(mats)): # Multi-mat mesh
if (mats[i] == mats[index]): # Default material for empty slots
ia['material'] = i
break
o['index_arrays'].append(ia)
2018-12-18 16:46:36 +01:00
# Pack
pdata *= invscale_pos
ndata *= 32767
pdata = np.array(pdata, dtype='<i2')
ndata = np.array(ndata, dtype='<i2')
2017-05-17 17:06:52 +02:00
if has_tex:
2018-12-18 16:46:36 +01:00
t0data *= invscale_tex
t0data = np.array(t0data, dtype='<i2')
2017-05-17 17:06:52 +02:00
if has_tex1:
2018-12-18 16:46:36 +01:00
t1data *= invscale_tex
t1data = np.array(t1data, dtype='<i2')
2017-05-17 17:06:52 +02:00
if has_col:
2018-12-18 16:46:36 +01:00
cdata *= 32767
cdata = np.array(cdata, dtype='<i2')
if has_tang:
tangdata *= 32767
tangdata = np.array(tangdata, dtype='<i2')
2018-12-18 16:46:36 +01:00
# Output
o['vertex_arrays'] = []
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
2018-12-18 16:46:36 +01:00
if has_tex:
o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
2018-12-18 16:46:36 +01:00
if has_tex1:
o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
2018-12-18 16:46:36 +01:00
if has_col:
o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
2018-12-18 16:46:36 +01:00
if has_tang:
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
2018-08-07 12:23:02 +02:00
# If there are multiple morph targets, export them here.
# if (shapeKeys):
# shapeKeys.key_blocks[0].value = 0.0
# for m in range(1, len(currentMorphValue)):
# shapeKeys.key_blocks[m].value = 1.0
# mesh.update()
# node.active_shape_key_index = m
# morphMesh = node.to_mesh(scene, applyModifiers, "RENDER", True, False)
# # Write the morph target position array.
# self.IndentWrite(B"VertexArray (attrib = \"position\", morph = ", 0, True)
# self.WriteInt(m)
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.indentLevel += 1
# self.IndentWrite(B"float[3]\t\t// ")
# self.WriteInt(vertexCount)
# self.IndentWrite(B"{\n", 0, True)
# self.WriteMorphPositionArray3D(unifiedVertexArray, morphMesh.vertices)
# self.IndentWrite(B"}\n")
# self.indentLevel -= 1
# self.IndentWrite(B"}\n\n")
# # Write the morph target normal array.
# self.IndentWrite(B"VertexArray (attrib = \"normal\", morph = ")
# self.WriteInt(m)
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.indentLevel += 1
# self.IndentWrite(B"float[3]\t\t// ")
# self.WriteInt(vertexCount)
# self.IndentWrite(B"{\n", 0, True)
# self.WriteMorphNormalArray3D(unifiedVertexArray, morphMesh.vertices, morphMesh.tessfaces)
# self.IndentWrite(B"}\n")
# self.indentLevel -= 1
# self.IndentWrite(B"}\n")
# bpy.data.meshes.remove(morphMesh)
2017-05-17 17:06:52 +02:00
def has_tangents(self, exportMesh):
return self.get_export_uvs(exportMesh) and self.get_export_tangents(exportMesh) and len(exportMesh.uv_layers) > 0
2017-05-17 17:06:52 +02:00
2017-11-26 19:36:14 +01:00
def export_mesh(self, objectRef, scene):
"""Exports a single mesh object."""
2018-12-18 16:46:36 +01:00
# profile_time = time.time()
2017-11-26 19:36:14 +01:00
table = objectRef[1]["objectTable"]
bobject = table[0]
2017-05-13 17:17:43 +02:00
oid = arm.utils.safestr(objectRef[1]["structName"])
2019-05-14 11:43:41 +02:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_single_data_file:
fp = None
else:
fp = self.get_meshes_file_path('mesh_' + oid, compressed=ArmoryExporter.compress_enabled)
assets.add(fp)
2019-05-14 11:43:41 +02:00
# No export necessary
2018-12-19 20:10:34 +01:00
if bobject.data.arm_cached and os.path.exists(fp):
return
2017-11-26 19:36:14 +01:00
# Mesh users have different modifier stack
for i in range(1, len(table)):
if not self.mod_equal_stack(bobject, table[i]):
2019-01-28 11:28:21 +01:00
log.warn('{0} users {1} and {2} differ in modifier stack - use Make Single User - Object & Data for now'.format(oid, bobject.name, table[i].name))
2017-11-26 19:36:14 +01:00
break
if wrd.arm_verbose_output:
print('Exporting mesh ' + arm.utils.asset_name(bobject.data))
2016-09-08 14:08:31 +02:00
o = {}
o['name'] = oid
mesh = objectRef[0]
2018-08-16 12:10:50 +02:00
structFlag = False
2016-11-28 14:40:07 +01:00
# Save the morph state if necessary
activeShapeKeyIndex = bobject.active_shape_key_index
showOnlyShapeKey = bobject.show_only_shape_key
currentMorphValue = []
2016-10-19 13:28:06 +02:00
shapeKeys = ArmoryExporter.get_shape_keys(mesh)
2016-11-28 14:40:07 +01:00
if shapeKeys:
bobject.active_shape_key_index = 0
bobject.show_only_shape_key = True
baseIndex = 0
relative = shapeKeys.use_relative
2016-11-28 14:40:07 +01:00
if relative:
morphCount = 0
baseName = shapeKeys.reference_key.name
for block in shapeKeys.key_blocks:
2016-11-28 14:40:07 +01:00
if block.name == baseName:
baseIndex = morphCount
break
morphCount += 1
morphCount = 0
for block in shapeKeys.key_blocks:
currentMorphValue.append(block.value)
block.value = 0.0
2016-11-28 14:40:07 +01:00
if block.name != "":
# self.IndentWrite(B"Morph (index = ", 0, structFlag)
# self.WriteInt(morphCount)
2017-04-01 21:25:57 +02:00
# if (relative) and (morphCount != baseIndex):
# self.Write(B", base = ")
# self.WriteInt(baseIndex)
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.IndentWrite(B"Name {string {\"", 1)
# self.Write(bytes(block.name, "UTF-8"))
# self.Write(B"\"}}\n")
# self.IndentWrite(B"}\n")
2016-10-19 13:28:06 +02:00
# TODO
structFlag = True
morphCount += 1
shapeKeys.key_blocks[0].value = 1.0
mesh.update()
armature = bobject.find_armature()
2017-04-02 13:13:43 +02:00
apply_modifiers = not armature
2019-05-20 14:32:48 +02:00
2019-08-24 11:50:27 +02:00
bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject
exportMesh = bobject_eval.to_mesh()
2019-07-18 21:51:25 +02:00
if exportMesh is None:
2017-04-01 21:25:57 +02:00
log.warn(oid + ' was not exported')
2016-12-21 19:15:51 +01:00
return
2016-11-12 21:34:06 +01:00
if len(exportMesh.uv_layers) > 2:
2017-04-01 21:25:57 +02:00
log.warn(oid + ' exceeds maximum of 2 UV Maps supported')
2016-11-12 21:34:06 +01:00
2018-12-20 22:37:39 +01:00
# Update aabb
self.calc_aabb(bobject)
# Process meshes
2018-12-18 16:46:36 +01:00
if ArmoryExporter.optimize_enabled:
vert_list = exporter_opt.export_mesh_data(self, exportMesh, bobject, o, has_armature=armature is not None)
2018-12-18 16:46:36 +01:00
if armature:
exporter_opt.export_skin(self, bobject, armature, vert_list, o)
else:
self.export_mesh_data(exportMesh, bobject, o, has_armature=armature is not None)
2018-12-18 16:46:36 +01:00
if armature:
self.export_skin(bobject, armature, exportMesh, o)
2016-10-19 13:28:06 +02:00
# Restore the morph state
2016-11-28 14:40:07 +01:00
if shapeKeys:
bobject.active_shape_key_index = activeShapeKeyIndex
bobject.show_only_shape_key = showOnlyShapeKey
for m in range(len(currentMorphValue)):
shapeKeys.key_blocks[m].value = currentMorphValue[m]
mesh.update()
2019-01-02 15:33:30 +01:00
# Check if mesh is using instanced rendering
instanced_type, instanced_data = self.object_process_instancing(table, o['scale_pos'])
# Save offset data for instanced rendering
2018-08-29 09:35:48 +02:00
if instanced_type > 0:
o['instanced_data'] = instanced_data
o['instanced_type'] = instanced_type
# Export usage
2017-08-21 12:17:55 +02:00
if bobject.data.arm_dynamic_usage:
o['dynamic_usage'] = bobject.data.arm_dynamic_usage
self.write_mesh(bobject, fp, o)
2018-12-18 16:46:36 +01:00
# print('Mesh exported in ' + str(time.time() - profile_time))
2019-05-20 14:32:48 +02:00
if hasattr(bobject, 'evaluated_get'):
bobject_eval.to_mesh_clear()
2018-08-14 19:18:25 +02:00
def export_light(self, objectRef):
"""Exports a single light object."""
rpdat = arm.utils.get_rp()
objref = objectRef[0]
objtype = objref.type
o = {}
o['name'] = objectRef[1]["structName"]
o['type'] = objtype.lower()
2018-12-31 16:58:00 +01:00
o['cast_shadow'] = objref.use_shadow
2017-08-21 12:17:55 +02:00
o['near_plane'] = objref.arm_clip_start
o['far_plane'] = objref.arm_clip_end
o['fov'] = objref.arm_fov
o['color'] = [objref.color[0], objref.color[1], objref.color[2]]
o['strength'] = objref.energy
2017-11-02 23:12:17 +01:00
o['shadows_bias'] = objref.arm_shadows_bias * 0.0001
2018-12-10 00:02:40 +01:00
if rpdat.rp_shadows:
if objtype == 'POINT':
o['shadowmap_size'] = int(rpdat.rp_shadowmap_cube)
else:
o['shadowmap_size'] = arm.utils.get_cascade_size(rpdat)
2017-03-03 14:36:01 +01:00
else:
2018-12-10 00:02:40 +01:00
o['shadowmap_size'] = 0
if objtype == 'SUN':
o['strength'] *= 0.325
o['shadows_bias'] *= 20.0 # Scale bias for ortho light matrix
2017-12-05 22:00:27 +01:00
if o['shadowmap_size'] > 1024:
o['shadows_bias'] *= 1 / (o['shadowmap_size'] / 1024) # Less bias for bigger maps
elif objtype == 'POINT':
o['strength'] *= 2.6
2019-06-03 11:29:16 +02:00
if bpy.app.version >= (2, 80, 72):
o['strength'] *= 0.01
2018-10-04 15:35:33 +02:00
o['fov'] = 1.5708 # pi/2
2017-08-19 03:08:42 +02:00
o['shadowmap_cube'] = True
if objref.shadow_soft_size > 0.1:
o['light_size'] = objref.shadow_soft_size * 10
elif objtype == 'SPOT':
2018-12-18 23:48:38 +01:00
o['strength'] *= 2.6
2019-06-03 11:29:16 +02:00
if bpy.app.version >= (2, 80, 72):
o['strength'] *= 0.01
o['spot_size'] = math.cos(objref.spot_size / 2)
o['spot_blend'] = objref.spot_blend / 10 # Cycles defaults to 0.15
elif objtype == 'AREA':
2019-02-27 21:31:21 +01:00
o['strength'] *= 80.0 / (objref.size * objref.size_y)
2019-06-03 11:29:16 +02:00
if bpy.app.version >= (2, 80, 72):
o['strength'] *= 0.01
o['size'] = objref.size
o['size_y'] = objref.size_y
2019-06-23 22:52:41 +02:00
self.output['light_datas'].append(o)
2018-10-01 11:45:43 +02:00
def export_probe(self, objectRef):
o = {}
o['name'] = objectRef[1]["structName"]
bo = objectRef[0]
if bo.type == 'GRID':
o['type'] = 'grid'
elif bo.type == 'PLANAR':
o['type'] = 'planar'
else: # CUBEMAP
o['type'] = 'cubemap'
self.output['probe_datas'].append(o)
def export_collection(self, collection):
"""Exports a single collection."""
scene_objects = self.scene.collection.all_objects
out_collection = {}
out_collection['name'] = collection.name
out_collection['instance_offset'] = list(collection.instance_offset)
out_collection['object_refs'] = []
for bobject in collection.objects:
# Add unparented objects only, then instantiate full object
# child tree
if bobject.parent is None and bobject.arm_export:
# This object is controlled by proxy
has_proxy_user = False
for bo in bpy.data.objects:
if bo.proxy == bobject:
has_proxy_user = True
break
if has_proxy_user:
continue
asset_name = arm.utils.asset_name(bobject)
if collection.library is None:
#collection is in the same file, but (likely) on another scene
if asset_name not in scene_objects:
self.process_bobject(bobject)
self.export_object(bobject, self.scene)
else:
# Add external linked objects
# Iron differentiates objects based on their names,
# so errors will happen if two objects with the
# same name exists. This check is only required
# when the object in question is in a library,
# otherwise Blender will not allow duplicate names
if asset_name in scene_objects:
log.warn("skipping export of the object"
f" {bobject.name} (collection"
f" {collection.name}) because it has the same"
" export name as another object in the scene:"
f" {asset_name}")
continue
self.process_bobject(bobject)
self.export_object(bobject, self.scene)
out_collection['object_refs'].append(asset_name)
self.output['groups'].append(out_collection)
2017-09-07 13:42:46 +02:00
def get_camera_clear_color(self):
2019-07-18 21:51:25 +02:00
if self.scene.world is None:
2017-11-20 14:39:59 +01:00
return [0.051, 0.051, 0.051, 1.0]
2019-07-18 21:51:25 +02:00
if self.scene.world.node_tree is None:
2018-12-18 23:48:38 +01:00
c = self.scene.world.color
2017-11-16 10:43:34 +01:00
return [c[0], c[1], c[2], 1.0]
2017-11-20 14:39:59 +01:00
if 'Background' in self.scene.world.node_tree.nodes:
background_node = self.scene.world.node_tree.nodes['Background']
col = background_node.inputs[0].default_value
strength = background_node.inputs[1].default_value
ar = [col[0] * strength, col[1] * strength, col[2] * strength, col[3]]
ar[0] = max(min(ar[0], 1.0), 0.0)
ar[1] = max(min(ar[1], 1.0), 0.0)
ar[2] = max(min(ar[2], 1.0), 0.0)
ar[3] = max(min(ar[3], 1.0), 0.0)
return ar
else:
return [0.051, 0.051, 0.051, 1.0]
@staticmethod
def extract_projection(o, proj, with_planes=True):
2017-10-24 10:49:57 +02:00
a = proj[0][0]
b = proj[1][1]
c = proj[2][2]
d = proj[2][3]
k = (c - 1.0) / (c + 1.0)
o['fov'] = 2.0 * math.atan(1.0 / b)
2017-11-02 23:12:17 +01:00
if with_planes:
o['near_plane'] = (d * (1.0 - k)) / (2.0 * k)
2018-08-16 12:10:50 +02:00
o['far_plane'] = k * o['near_plane']
2017-10-24 10:49:57 +02:00
@staticmethod
def extract_ortho(o, proj):
2018-11-15 12:41:47 +01:00
# left, right, bottom, top
o['ortho'] = [-(1 + proj[3][0]) / proj[0][0], \
(1 - proj[3][0]) / proj[0][0], \
-(1 + proj[3][1]) / proj[1][1], \
(1 - proj[3][1]) / proj[1][1]]
o['near_plane'] = (1 + proj[3][2]) / proj[2][2]
o['far_plane'] = -(1 - proj[3][2]) / proj[2][2]
o['near_plane'] *= 2
o['far_plane'] *= 2
2016-10-19 13:28:06 +02:00
def export_camera(self, objectRef):
o = {}
o['name'] = objectRef[1]["structName"]
objref = objectRef[0]
2017-10-24 10:49:57 +02:00
camera = objectRef[1]["objectTable"][0]
render = self.scene.render
2018-12-18 23:48:38 +01:00
proj = camera.calc_matrix_camera(
2019-05-20 14:32:48 +02:00
self.depsgraph,
2018-12-18 23:48:38 +01:00
x=render.resolution_x,
y=render.resolution_y,
scale_x=render.pixel_aspect_x,
scale_y=render.pixel_aspect_y)
2018-11-15 12:41:47 +01:00
if objref.type == 'PERSP':
self.extract_projection(o, proj)
else:
self.extract_ortho(o, proj)
2017-08-21 12:17:55 +02:00
o['frustum_culling'] = objref.arm_frustum_culling
2017-09-07 13:42:46 +02:00
o['clear_color'] = self.get_camera_clear_color()
self.output['camera_datas'].append(o)
2016-10-19 13:28:06 +02:00
def export_speaker(self, objectRef):
# This function exports a single speaker object
o = {}
o['name'] = objectRef[1]["structName"]
objref = objectRef[0]
if objref.sound:
# Packed
2019-07-18 21:51:25 +02:00
if objref.sound.packed_file is not None:
2017-05-23 01:03:44 +02:00
unpack_path = arm.utils.get_fp_build() + '/compiled/Assets/unpacked'
if not os.path.exists(unpack_path):
os.makedirs(unpack_path)
unpack_filepath = unpack_path + '/' + objref.sound.name
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != objref.sound.packed_file.size:
with open(unpack_filepath, 'wb') as f:
f.write(objref.sound.packed_file.data)
assets.add(unpack_filepath)
# External
else:
2017-05-13 17:17:43 +02:00
assets.add(arm.utils.asset_path(objref.sound.filepath)) # Link sound to assets
2017-03-15 12:30:14 +01:00
o['sound'] = arm.utils.extract_filename(objref.sound.filepath)
else:
o['sound'] = ''
2016-09-23 00:34:42 +02:00
o['muted'] = objref.muted
2017-08-21 12:17:55 +02:00
o['loop'] = objref.arm_loop
o['stream'] = objref.arm_stream
2016-09-23 00:34:42 +02:00
o['volume'] = objref.volume
o['pitch'] = objref.pitch
o['attenuation'] = objref.attenuation
o['play_on_start'] = objref.arm_play_on_start
self.output['speaker_datas'].append(o)
2018-11-12 12:35:52 +01:00
def make_default_mat(self, mat_name, mat_objs, is_particle=False):
2017-03-01 11:45:55 +01:00
if mat_name in bpy.data.materials:
return
mat = bpy.data.materials.new(name=mat_name)
2018-03-02 14:50:58 +01:00
# if default_exists:
2018-12-19 20:10:34 +01:00
# mat.arm_cached = True
2018-11-12 12:35:52 +01:00
if is_particle:
mat.arm_particle_flag = True
2019-02-12 13:13:27 +01:00
# Empty material roughness
2017-03-01 11:45:55 +01:00
mat.use_nodes = True
2019-10-30 15:33:14 +01:00
for node in mat.node_tree.nodes:
if node.type == 'BSDF_PRINCIPLED':
node.inputs[7].default_value = 0.25
2017-03-01 11:45:55 +01:00
o = {}
o['name'] = mat.name
o['contexts'] = []
mat_users = dict()
mat_users[mat] = mat_objs
mat_armusers = dict()
mat_armusers[mat] = [o]
2017-08-21 15:36:21 +02:00
make_material.parse(mat, o, mat_users, mat_armusers)
2017-03-01 11:45:55 +01:00
self.output['material_datas'].append(o)
bpy.data.materials.remove(mat)
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
if not rpdat.arm_culling:
2017-08-10 14:39:08 +02:00
o['override_context'] = {}
o['override_context']['cull_mode'] = 'none'
2017-03-01 11:45:55 +01:00
2018-05-24 22:16:28 +02:00
def signature_traverse(self, node, sign):
sign += node.type + '-'
2019-07-18 21:51:25 +02:00
if node.type == 'TEX_IMAGE' and node.image is not None:
2018-05-24 22:16:28 +02:00
sign += node.image.filepath + '-'
for inp in node.inputs:
if inp.is_linked:
sign = self.signature_traverse(inp.links[0].from_node, sign)
else:
# Unconnected socket
if not hasattr(inp, 'default_value'):
sign += 'o'
elif inp.type == 'RGB' or inp.type == 'RGBA' or inp.type == 'VECTOR':
sign += str(inp.default_value[0])
sign += str(inp.default_value[1])
sign += str(inp.default_value[2])
else:
sign += str(inp.default_value)
return sign
def get_signature(self, mat):
nodes = mat.node_tree.nodes
output_node = cycles.node_by_type(nodes, 'OUTPUT_MATERIAL')
2019-07-18 21:51:25 +02:00
if output_node is not None:
2018-05-24 22:16:28 +02:00
sign = self.signature_traverse(output_node, '')
return sign
2018-08-07 17:07:12 +02:00
return None
2018-05-24 22:16:28 +02:00
2016-09-30 23:24:18 +02:00
def export_materials(self):
2017-03-14 20:43:54 +01:00
wrd = bpy.data.worlds['Arm']
2018-02-16 19:57:27 +01:00
# Keep materials with fake user
for material in bpy.data.materials:
2020-04-10 19:22:56 +02:00
if material.use_fake_user and material not in self.material_array:
self.material_array.append(material)
2018-03-27 10:54:19 +02:00
# Ensure the same order for merging materials
self.material_array.sort(key=lambda x: x.name)
2018-03-27 10:54:19 +02:00
if wrd.arm_batch_materials:
mat_users = self.material_to_object_dict
mat_armusers = self.material_to_arm_object_dict
mat_batch.build(self.material_array, mat_users, mat_armusers)
2018-02-16 19:57:27 +01:00
2017-01-13 00:16:00 +01:00
transluc_used = False
2017-02-15 13:15:24 +01:00
overlays_used = False
2018-08-16 14:46:18 +02:00
blending_used = False
2017-02-15 13:15:24 +01:00
decals_used = False
2017-08-08 11:47:04 +02:00
# sss_used = False
for material in self.material_array:
# If the material is unlinked, material becomes None
2019-07-18 21:51:25 +02:00
if material is None:
continue
2018-05-24 23:18:03 +02:00
if not material.use_nodes:
material.use_nodes = True
2018-05-24 22:16:28 +02:00
# Recache material
signature = self.get_signature(material)
if signature != material.signature:
2018-12-19 20:10:34 +01:00
material.arm_cached = False
2019-07-18 21:51:25 +02:00
if signature is not None:
2019-06-03 11:29:16 +02:00
material.signature = signature
2018-05-24 22:16:28 +02:00
o = {}
2018-03-02 14:50:58 +01:00
o['name'] = arm.utils.asset_name(material)
2017-09-28 00:48:57 +02:00
if material.arm_skip_context != '':
o['skip_context'] = material.arm_skip_context
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
if material.arm_two_sided or not rpdat.arm_culling:
2016-12-17 15:34:43 +01:00
o['override_context'] = {}
2017-07-06 23:47:12 +02:00
o['override_context']['cull_mode'] = 'none'
2017-09-17 16:59:00 +02:00
elif material.arm_cull_mode != 'clockwise':
2017-07-06 23:47:12 +02:00
o['override_context'] = {}
2017-08-21 12:17:55 +02:00
o['override_context']['cull_mode'] = material.arm_cull_mode
2016-12-17 15:34:43 +01:00
o['contexts'] = []
mat_users = self.material_to_object_dict
mat_armusers = self.material_to_arm_object_dict
2017-08-21 15:36:21 +02:00
sd, rpasses = make_material.parse(material, o, mat_users, mat_armusers)
2019-06-23 22:52:41 +02:00
2019-01-23 12:07:44 +01:00
# Attach MovieTexture
for con in o['contexts']:
for tex in con['bind_textures']:
2019-01-23 12:17:02 +01:00
if 'source' in tex and tex['source'] == 'movie':
2019-01-23 12:07:44 +01:00
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.internal.MovieTexture'
ArmoryExporter.import_traits.append(trait['class_name'])
trait['parameters'] = ['"' + tex['file'] + '"']
for user in mat_armusers[material]:
user['traits'].append(trait)
2017-02-15 13:15:24 +01:00
2017-03-14 20:43:54 +01:00
if 'translucent' in rpasses:
2017-01-13 00:16:00 +01:00
transluc_used = True
2017-03-14 20:43:54 +01:00
if 'overlay' in rpasses:
2017-02-15 13:15:24 +01:00
overlays_used = True
2018-08-16 14:46:18 +02:00
if 'mesh' in rpasses and material.arm_blending:
blending_used = True
2017-03-14 20:43:54 +01:00
if 'decal' in rpasses:
2017-02-15 13:15:24 +01:00
decals_used = True
2016-12-20 01:39:16 +01:00
uv_export = False
2017-03-02 10:15:22 +01:00
tang_export = False
2016-12-20 01:39:16 +01:00
vcol_export = False
2017-01-04 00:13:52 +01:00
vs_str = ''
2017-05-25 16:48:41 +02:00
for con in sd['contexts']:
2018-12-14 15:27:43 +01:00
for elem in con['vertex_elements']:
2017-05-25 16:48:41 +02:00
if len(vs_str) > 0:
vs_str += ','
vs_str += elem['name']
if elem['name'] == 'tang':
tang_export = True
elif elem['name'] == 'tex':
uv_export = True
elif elem['name'] == 'col':
vcol_export = True
2017-09-29 01:18:57 +02:00
for con in o['contexts']: # TODO: blend context
if con['name'] == 'mesh' and material.arm_blending:
con['name'] = 'blend'
2016-12-20 01:39:16 +01:00
2017-03-02 10:15:22 +01:00
if (material.export_tangents != tang_export) or \
2016-12-20 01:39:16 +01:00
(material.export_uvs != uv_export) or \
(material.export_vcols != vcol_export):
material.export_uvs = uv_export
material.export_vcols = vcol_export
2017-03-02 10:15:22 +01:00
material.export_tangents = tang_export
2018-03-14 13:24:43 +01:00
if material in self.material_to_object_dict:
mat_users = self.material_to_object_dict[material]
2018-03-14 13:24:43 +01:00
for ob in mat_users:
ob.data.arm_cached = False
2016-12-17 15:34:43 +01:00
self.output['material_datas'].append(o)
2018-12-19 20:10:34 +01:00
material.arm_cached = True
2017-02-15 13:15:24 +01:00
# Auto-enable render-path featues
2017-08-19 12:10:06 +02:00
rebuild_rp = False
2017-08-21 20:16:06 +02:00
rpdat = arm.utils.get_rp()
if rpdat.rp_translucency_state == 'Auto' and rpdat.rp_translucency != transluc_used:
rpdat.rp_translucency = transluc_used
2017-08-19 12:10:06 +02:00
rebuild_rp = True
2017-08-21 20:16:06 +02:00
if rpdat.rp_overlays_state == 'Auto' and rpdat.rp_overlays != overlays_used:
rpdat.rp_overlays = overlays_used
2017-08-19 12:10:06 +02:00
rebuild_rp = True
2018-08-16 14:46:18 +02:00
if rpdat.rp_blending_state == 'Auto' and rpdat.rp_blending != blending_used:
rpdat.rp_blending = blending_used
rebuild_rp = True
2017-08-21 20:16:06 +02:00
if rpdat.rp_decals_state == 'Auto' and rpdat.rp_decals != decals_used:
rpdat.rp_decals = decals_used
2017-08-19 12:10:06 +02:00
rebuild_rp = True
2017-08-21 20:16:06 +02:00
# if rpdat.rp_sss_state == 'Auto' and rpdat.rp_sss != sss_used:
# rpdat.rp_sss = sss_used
2017-08-19 12:10:06 +02:00
# rebuild_rp = True
if rebuild_rp:
2017-11-22 21:17:36 +01:00
make_renderpath.build()
2016-09-30 23:24:18 +02:00
def export_particle_systems(self):
if len(self.particle_system_array) > 0:
2017-09-21 18:30:02 +02:00
self.output['particle_datas'] = []
for particleRef in self.particle_system_array.items():
o = {}
psettings = particleRef[0]
2019-07-18 21:51:25 +02:00
if psettings is None:
continue
if psettings.instance_object is None or psettings.render_type != 'OBJECT':
2017-10-06 00:02:38 +02:00
continue
o['name'] = particleRef[1]["structName"]
2017-10-04 14:13:36 +02:00
o['type'] = 0 if psettings.type == 'EMITTER' else 1 # HAIR
2017-10-15 18:16:55 +02:00
o['loop'] = psettings.arm_loop
2018-12-18 23:48:38 +01:00
o['render_emitter'] = False # TODO
2017-09-25 23:26:29 +02:00
# Emission
2019-01-15 13:51:52 +01:00
o['count'] = int(psettings.count * psettings.arm_count_mult)
2018-03-26 00:29:19 +02:00
o['frame_start'] = int(psettings.frame_start)
o['frame_end'] = int(psettings.frame_end)
o['lifetime'] = psettings.lifetime
2017-09-25 23:26:29 +02:00
o['lifetime_random'] = psettings.lifetime_random
o['emit_from'] = 1 if psettings.emit_from == 'VOLUME' else 0 # VERT, FACE
# Velocity
2018-08-16 12:10:50 +02:00
# o['normal_factor'] = psettings.normal_factor
# o['tangent_factor'] = psettings.tangent_factor
# o['tangent_phase'] = psettings.tangent_phase
o['object_align_factor'] = [psettings.object_align_factor[0], psettings.object_align_factor[1], psettings.object_align_factor[2]]
2018-08-16 12:10:50 +02:00
# o['object_factor'] = psettings.object_factor
o['factor_random'] = psettings.factor_random
2017-09-25 23:26:29 +02:00
# Physics
o['physics_type'] = 1 if psettings.physics_type == 'NEWTON' else 0
2017-09-27 00:04:47 +02:00
o['particle_size'] = psettings.particle_size
o['size_random'] = psettings.size_random
o['mass'] = psettings.mass
2017-09-25 23:26:29 +02:00
# Render
2019-01-10 10:53:05 +01:00
o['instance_object'] = psettings.instance_object.name
self.object_to_arm_object_dict[psettings.instance_object]['is_particle'] = True
2017-10-15 15:25:47 +02:00
# Field weights
o['weight_gravity'] = psettings.effector_weights.gravity
self.output['particle_datas'].append(o)
2017-09-21 18:30:02 +02:00
def export_tilesheets(self):
wrd = bpy.data.worlds['Arm']
if len(wrd.arm_tilesheetlist) > 0:
self.output['tilesheet_datas'] = []
for ts in wrd.arm_tilesheetlist:
o = {}
o['name'] = ts.name
o['tilesx'] = ts.tilesx_prop
o['tilesy'] = ts.tilesy_prop
o['framerate'] = ts.framerate_prop
o['actions'] = []
for tsa in ts.arm_tilesheetactionlist:
ao = {}
ao['name'] = tsa.name
ao['start'] = tsa.start_prop
ao['end'] = tsa.end_prop
ao['loop'] = tsa.loop_prop
o['actions'].append(ao)
self.output['tilesheet_datas'].append(o)
2016-09-30 23:24:18 +02:00
def export_worlds(self):
worldRef = self.scene.world
2019-07-18 21:51:25 +02:00
if worldRef is not None:
o = {}
w = worldRef
o['name'] = w.name
2016-10-19 13:28:06 +02:00
self.post_export_world(w, o)
self.output['world_datas'].append(o)
2016-10-19 13:28:06 +02:00
def export_objects(self, scene):
"""Exports all supported blender objects.
References to objects are dictionaries storing the type and
name of that object.
Currently supported:
- Mesh
- Light
- Camera
- Speaker
- Light Probe
"""
if not ArmoryExporter.option_mesh_only:
self.output['light_datas'] = []
self.output['camera_datas'] = []
self.output['speaker_datas'] = []
for light_ref in self.light_array.items():
self.export_light(light_ref)
for camera_ref in self.camera_array.items():
self.export_camera(camera_ref)
# Keep sounds with fake user
for sound in bpy.data.sounds:
2018-04-19 13:51:17 +02:00
if sound.use_fake_user:
assets.add(arm.utils.asset_path(sound.filepath))
for speaker_ref in self.speaker_array.items():
self.export_speaker(speaker_ref)
if bpy.data.lightprobes:
2018-10-01 11:45:43 +02:00
self.output['probe_datas'] = []
for lightprobe_object in self.probe_array.items():
self.export_probe(lightprobe_object)
2018-09-19 15:23:08 +02:00
self.output['mesh_datas'] = []
for mesh_ref in self.mesh_array.items():
self.export_mesh(mesh_ref, scene)
def execute(self):
"""Exports the scene."""
profile_time = time.time()
2017-10-10 20:46:44 +02:00
print('Exporting ' + arm.utils.asset_name(self.scene))
current_frame, current_subframe = self.scene.frame_current, self.scene.frame_subframe
scene_objects = self.scene.collection.all_objects.values()
2017-09-09 20:53:46 +02:00
for bobject in scene_objects:
2017-04-02 13:13:43 +02:00
# Map objects to game objects
o = {}
o['traits'] = []
self.object_to_arm_object_dict[bobject] = o
2017-04-02 13:13:43 +02:00
# Process
# Skip objects that have a parent because children will be exported recursively
2017-01-04 00:13:52 +01:00
if not bobject.parent:
2016-10-19 13:28:06 +02:00
self.process_bobject(bobject)
2019-01-24 14:54:28 +01:00
# Softbody needs connected triangles, use optimized geometry export
for mod in bobject.modifiers:
if mod.type == 'CLOTH' or mod.type == 'SOFT_BODY':
ArmoryExporter.optimize_enabled = True
2016-10-19 13:28:06 +02:00
self.process_skinned_meshes()
2017-05-13 17:17:43 +02:00
self.output['name'] = arm.utils.safestr(self.scene.name)
2019-06-22 11:29:05 +02:00
if self.filepath.endswith('.lz4'):
self.output['name'] += '.lz4'
2018-04-14 15:07:05 +02:00
elif not bpy.data.worlds['Arm'].arm_minimize:
self.output['name'] += '.json'
2016-10-17 00:02:51 +02:00
2019-01-22 12:38:47 +01:00
# Create unique material variants for skinning, tilesheets, particles
2017-04-04 23:11:31 +02:00
matvars = []
2017-10-04 14:13:36 +02:00
matslots = []
2017-09-09 20:53:46 +02:00
for bo in scene_objects:
2017-04-04 23:11:31 +02:00
if arm.utils.export_bone_data(bo):
for slot in bo.material_slots:
if slot.material is None or slot.material.library is not None:
2017-10-04 14:13:36 +02:00
continue
2017-10-10 20:46:44 +02:00
if slot.material.name.endswith('_armskin'):
continue
2017-10-04 14:13:36 +02:00
matslots.append(slot)
2017-04-04 23:11:31 +02:00
mat_name = slot.material.name + '_armskin'
mat = bpy.data.materials.get(mat_name)
2019-07-18 21:51:25 +02:00
if mat is None:
2017-04-04 23:11:31 +02:00
mat = slot.material.copy()
mat.name = mat_name
matvars.append(mat)
slot.material = mat
2019-01-22 12:38:47 +01:00
elif bo.arm_tilesheet != '':
for slot in bo.material_slots:
if slot.material is None or slot.material.library is not None:
2019-01-22 12:38:47 +01:00
continue
if slot.material.name.endswith('_armtile'):
continue
matslots.append(slot)
mat_name = slot.material.name + '_armtile'
mat = bpy.data.materials.get(mat_name)
2019-07-18 21:51:25 +02:00
if mat is None:
2019-01-22 12:38:47 +01:00
mat = slot.material.copy()
mat.name = mat_name
mat.arm_tilesheet_flag = True
matvars.append(mat)
slot.material = mat
# Particle and non-particle objects can not share material
for psys in bpy.data.particles:
2018-12-18 23:48:38 +01:00
bo = psys.instance_object
if bo is None or psys.render_type != 'OBJECT':
continue
for slot in bo.material_slots:
if slot.material is None or slot.material.library is not None:
continue
2018-11-12 12:35:52 +01:00
if slot.material.name.endswith('_armpart'):
continue
matslots.append(slot)
2018-11-12 12:35:52 +01:00
mat_name = slot.material.name + '_armpart'
mat = bpy.data.materials.get(mat_name)
2019-07-18 21:51:25 +02:00
if mat is None:
mat = slot.material.copy()
mat.name = mat_name
mat.arm_particle_flag = True
matvars.append(mat)
slot.material = mat
2017-04-04 23:11:31 +02:00
2017-04-11 23:21:42 +02:00
# Auto-bones
wrd = bpy.data.worlds['Arm']
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
if rpdat.arm_skin_max_bones_auto:
2017-04-11 23:21:42 +02:00
max_bones = 8
for armature in bpy.data.armatures:
if max_bones < len(armature.bones):
max_bones = len(armature.bones)
2018-03-15 16:02:56 +01:00
rpdat.arm_skin_max_bones = max_bones
2017-04-11 23:21:42 +02:00
2019-01-17 21:34:38 +01:00
# Terrain
2019-07-18 21:51:25 +02:00
if self.scene.arm_terrain_object is not None:
2019-01-17 21:34:38 +01:00
# Append trait
if not 'traits' in self.output:
self.output['traits'] = []
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.internal.TerrainPhysics'
self.output['traits'].append(trait)
ArmoryExporter.import_traits.append(trait['class_name'])
ArmoryExporter.export_physics = True
assets.add_khafile_def('arm_terrain')
# Export material
mat = self.scene.arm_terrain_object.children[0].data.materials[0]
self.material_array.append(mat)
2019-01-17 21:34:38 +01:00
# Terrain data
terrain = {}
terrain['name'] = 'Terrain'
terrain['sectors_x'] = self.scene.arm_terrain_sectors[0]
terrain['sectors_y'] = self.scene.arm_terrain_sectors[1]
terrain['sector_size'] = self.scene.arm_terrain_sector_size
terrain['height_scale'] = self.scene.arm_terrain_height_scale
terrain['material_ref'] = mat.name
self.output['terrain_datas'] = [terrain]
self.output['terrain_ref'] = 'Terrain'
self.output['objects'] = []
2017-09-09 20:53:46 +02:00
for bo in scene_objects:
# Skip objects that have a parent because children will be exported recursively
2017-04-04 23:11:31 +02:00
if not bo.parent:
self.export_object(bo, self.scene)
2017-01-29 16:15:04 +01:00
if bpy.data.collections:
2017-01-29 16:15:04 +01:00
self.output['groups'] = []
2019-02-05 12:59:34 +01:00
for collection in bpy.data.collections:
if collection.name.startswith(('RigidBodyWorld', 'Trait|')):
2017-12-03 13:25:20 +01:00
continue
self.export_collection(collection)
if not ArmoryExporter.option_mesh_only:
2019-07-18 21:51:25 +02:00
if self.scene.camera is not None:
self.output['camera_ref'] = self.scene.camera.name
2016-09-08 14:08:31 +02:00
else:
2017-05-13 17:17:43 +02:00
if self.scene.name == arm.utils.get_project_scene_name():
2017-11-25 20:09:55 +01:00
log.warn('No camera found in active scene')
self.output['material_datas'] = []
2018-11-12 12:35:52 +01:00
# Object with no material assigned in the scene
if len(self.default_material_objects) > 0:
self.make_default_mat('armdefault', self.default_material_objects)
if len(self.default_skin_material_objects) > 0:
self.make_default_mat('armdefaultskin', self.default_skin_material_objects)
2018-11-12 12:35:52 +01:00
if len(bpy.data.particles) > 0:
self.use_default_material_part()
if len(self.default_part_material_objects) > 0:
self.make_default_mat('armdefaultpart', self.default_part_material_objects, is_particle=True)
2018-11-12 12:35:52 +01:00
2016-09-30 23:24:18 +02:00
self.export_materials()
self.export_particle_systems()
self.output['world_datas'] = []
2016-09-30 23:24:18 +02:00
self.export_worlds()
2017-09-21 18:30:02 +02:00
self.export_tilesheets()
2019-07-18 21:51:25 +02:00
if self.scene.world is not None:
self.output['world_ref'] = self.scene.world.name
2017-09-21 13:22:00 +02:00
if self.scene.use_gravity:
self.output['gravity'] = [self.scene.gravity[0], self.scene.gravity[1], self.scene.gravity[2]]
2019-01-21 20:56:03 +01:00
rbw = self.scene.rigidbody_world
2019-07-18 21:51:25 +02:00
if rbw is not None:
2019-01-21 20:56:03 +01:00
weights = rbw.effector_weights
self.output['gravity'][0] *= weights.all * weights.gravity
self.output['gravity'][1] *= weights.all * weights.gravity
self.output['gravity'][2] *= weights.all * weights.gravity
2017-09-21 13:22:00 +02:00
else:
self.output['gravity'] = [0.0, 0.0, 0.0]
2016-10-19 13:28:06 +02:00
self.export_objects(self.scene)
# Create Viewport camera
if bpy.data.worlds['Arm'].arm_play_camera != 'Scene':
2018-11-22 13:31:15 +01:00
self.create_default_camera(is_viewport_camera=True)
self.camera_spawned = True
# No camera found
2017-01-03 01:26:06 +01:00
if not self.camera_spawned:
log.warn('No camera found in active scene layers')
# No camera found, create a default one
2018-11-26 09:11:32 +01:00
if (len(self.output['camera_datas']) == 0 or len(bpy.data.cameras) == 0) or not self.camera_spawned:
2018-11-22 13:31:15 +01:00
self.create_default_camera()
2017-08-22 12:08:44 +02:00
2019-01-17 21:34:38 +01:00
# Scene traits
2017-09-04 10:20:04 +02:00
if wrd.arm_physics != 'Disabled' and ArmoryExporter.export_physics:
2017-01-16 15:26:57 +01:00
if not 'traits' in self.output:
self.output['traits'] = []
x = {}
x['type'] = 'Script'
phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
x['class_name'] = 'armory.trait.physics.' + phys_pkg + '.PhysicsWorld'
2018-04-23 12:21:46 +02:00
rbw = self.scene.rigidbody_world
if rbw is not None and rbw.enabled:
2018-09-20 14:39:49 +02:00
x['parameters'] = [str(rbw.time_scale), str(1 / rbw.steps_per_second), str(rbw.solver_iterations)]
2017-01-16 15:26:57 +01:00
self.output['traits'].append(x)
2017-09-04 10:20:04 +02:00
if wrd.arm_navigation != 'Disabled' and ArmoryExporter.export_navigation:
2017-01-16 15:26:57 +01:00
if not 'traits' in self.output:
self.output['traits'] = []
x = {}
x['type'] = 'Script'
2017-09-30 00:32:06 +02:00
x['class_name'] = 'armory.trait.navigation.Navigation'
2017-01-16 15:26:57 +01:00
self.output['traits'].append(x)
2018-11-13 14:17:47 +01:00
if wrd.arm_debug_console:
2017-09-04 10:20:04 +02:00
if not 'traits' in self.output:
self.output['traits'] = []
ArmoryExporter.export_ui = True
x = {}
x['type'] = 'Script'
x['class_name'] = 'armory.trait.internal.DebugConsole'
2018-08-29 09:59:23 +02:00
x['parameters'] = [str(arm.utils.get_ui_scale())]
2017-09-04 10:20:04 +02:00
self.output['traits'].append(x)
2019-02-10 11:47:42 +01:00
if wrd.arm_live_patch:
if not 'traits' in self.output:
self.output['traits'] = []
x = {}
x['type'] = 'Script'
x['class_name'] = 'armory.trait.internal.LivePatch'
self.output['traits'].append(x)
2017-08-27 12:50:09 +02:00
if len(self.scene.arm_traitlist) > 0:
if not 'traits' in self.output:
self.output['traits'] = []
self.export_traits(self.scene, self.output)
if 'traits' in self.output:
for x in self.output['traits']:
ArmoryExporter.import_traits.append(x['class_name'])
2017-01-16 15:26:57 +01:00
2019-12-06 21:41:19 +01:00
self.export_canvas_themes()
2016-09-14 11:49:32 +02:00
# Write embedded data references
if len(assets.embedded_data) > 0:
self.output['embedded_datas'] = []
for file in assets.embedded_data:
self.output['embedded_datas'].append(file)
2016-10-17 00:02:51 +02:00
# Write scene file
2017-03-15 12:30:14 +01:00
arm.utils.write_arm(self.filepath, self.output)
2017-04-04 23:11:31 +02:00
# Remove created material variants
2017-10-04 14:13:36 +02:00
for slot in matslots: # Set back to original material
2019-01-22 12:38:47 +01:00
orig_mat = bpy.data.materials[slot.material.name[:-8]] # _armskin, _armpart, _armtile
2018-03-01 16:15:13 +01:00
orig_mat.export_uvs = slot.material.export_uvs
orig_mat.export_vcols = slot.material.export_vcols
orig_mat.export_tangents = slot.material.export_tangents
2018-12-19 20:10:34 +01:00
orig_mat.arm_cached = slot.material.arm_cached
2018-03-01 16:15:13 +01:00
slot.material = orig_mat
2017-04-04 23:11:31 +02:00
for mat in matvars:
bpy.data.materials.remove(mat, do_unlink=True)
2017-11-19 13:38:54 +01:00
# Restore frame
if self.scene.frame_current != current_frame:
self.scene.frame_set(current_frame, subframe=current_subframe)
2017-11-19 13:38:54 +01:00
2018-12-18 16:46:36 +01:00
print('Scene exported in ' + str(time.time() - profile_time))
return {'FINISHED'}
2018-11-22 13:31:15 +01:00
def create_default_camera(self, is_viewport_camera=False):
o = {}
o['name'] = 'DefaultCamera'
o['near_plane'] = 0.1
o['far_plane'] = 100.0
o['fov'] = 0.85
o['frustum_culling'] = True
o['clear_color'] = self.get_camera_clear_color()
# Set viewport camera projection
if is_viewport_camera:
proj, is_persp = self.get_viewport_projection_matrix()
2019-07-18 21:51:25 +02:00
if proj is not None:
if is_persp:
self.extract_projection(o, proj, with_planes=False)
else:
self.extract_ortho(o, proj)
self.output['camera_datas'].append(o)
2018-11-26 09:11:32 +01:00
o = {}
o['name'] = 'DefaultCamera'
o['type'] = 'camera_object'
o['data_ref'] = 'DefaultCamera'
o['material_refs'] = []
o['transform'] = {}
viewport_matrix = self.get_viewport_view_matrix()
2019-07-18 21:51:25 +02:00
if viewport_matrix is not None:
2020-04-10 00:23:03 +02:00
o['transform']['values'] = ArmoryExporter.write_matrix(viewport_matrix.inverted_safe())
o['local_only'] = True
else:
o['transform']['values'] = [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]
o['traits'] = []
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.WalkNavigation'
o['traits'].append(trait)
ArmoryExporter.import_traits.append(trait['class_name'])
self.output['objects'].append(o)
self.output['camera_ref'] = 'DefaultCamera'
@staticmethod
def get_export_tangents(mesh):
for material in mesh.materials:
if material is not None and material.export_tangents:
return True
return False
@staticmethod
def get_export_vcols(mesh):
for material in mesh.materials:
if material is not None and material.export_vcols:
2016-12-17 23:48:18 +01:00
return True
return False
@staticmethod
def get_export_uvs(mesh):
for material in mesh.materials:
if material is not None and material.export_uvs:
2016-12-17 23:48:18 +01:00
return True
return False
@staticmethod
def object_process_instancing(refs, scale_pos):
2018-08-29 09:35:48 +02:00
instanced_type = 0
instanced_data = None
for bobject in refs:
inst = bobject.arm_instanced
if inst != 'Off':
if inst == 'Loc':
instanced_type = 1
instanced_data = [0.0, 0.0, 0.0] # Include parent
elif inst == 'Loc + Rot':
instanced_type = 2
instanced_data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
elif inst == 'Loc + Scale':
instanced_type = 3
instanced_data = [0.0, 0.0, 0.0, 1.0, 1.0, 1.0]
elif inst == 'Loc + Rot + Scale':
instanced_type = 4
instanced_data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0]
for child in bobject.children:
if not child.arm_export or child.hide_render:
2016-10-17 00:02:51 +02:00
continue
2018-08-29 09:35:48 +02:00
if 'Loc' in inst:
loc = child.matrix_local.to_translation() # Without parent matrix
2019-01-02 15:33:30 +01:00
instanced_data.append(loc.x / scale_pos)
instanced_data.append(loc.y / scale_pos)
instanced_data.append(loc.z / scale_pos)
2018-08-29 09:35:48 +02:00
if 'Rot' in inst:
rot = child.matrix_local.to_euler()
instanced_data.append(rot.x)
instanced_data.append(rot.y)
instanced_data.append(rot.z)
if 'Scale'in inst:
scale = child.matrix_local.to_scale()
instanced_data.append(scale.x)
instanced_data.append(scale.y)
instanced_data.append(scale.z)
break
2019-01-08 15:46:24 +01:00
# Instance render collections with same children?
2019-07-18 21:51:25 +02:00
# elif bobject.instance_type == 'GROUP' and bobject.instance_collection is not None:
2018-08-29 09:35:48 +02:00
# instanced_type = 1
# instanced_data = []
2019-01-08 15:46:24 +01:00
# for child in bpy.data.collections[bobject.instance_collection].objects:
2018-08-29 09:35:48 +02:00
# loc = child.matrix_local.to_translation()
# instanced_data.append(loc.x)
# instanced_data.append(loc.y)
# instanced_data.append(loc.z)
2017-01-29 16:15:04 +01:00
# break
2018-08-29 09:35:48 +02:00
return instanced_type, instanced_data
2016-10-19 13:28:06 +02:00
def post_export_object(self, bobject, o, type):
# Export traits
2017-08-27 12:50:09 +02:00
self.export_traits(bobject, o)
2018-02-19 16:28:21 +01:00
wrd = bpy.data.worlds['Arm']
phys_enabled = wrd.arm_physics != 'Disabled'
phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
# Rigid body trait
if bobject.rigid_body is not None and phys_enabled:
2016-09-08 14:08:31 +02:00
ArmoryExporter.export_physics = True
rb = bobject.rigid_body
shape = 0 # BOX
2019-01-17 21:34:38 +01:00
if rb.collision_shape == 'SPHERE':
shape = 1
elif rb.collision_shape == 'CONVEX_HULL':
shape = 2
elif rb.collision_shape == 'MESH':
2017-12-04 15:10:20 +01:00
shape = 3
elif rb.collision_shape == 'CONE':
shape = 4
elif rb.collision_shape == 'CYLINDER':
shape = 5
elif rb.collision_shape == 'CAPSULE':
shape = 6
2017-11-13 10:18:37 +01:00
body_mass = rb.mass
is_static = (not rb.enabled and not rb.kinematic) or (rb.type == 'PASSIVE' and not rb.kinematic)
2017-12-04 15:10:20 +01:00
if is_static:
2017-11-13 10:18:37 +01:00
body_mass = 0
x = {}
x['type'] = 'Script'
2018-02-19 16:28:21 +01:00
x['class_name'] = 'armory.trait.physics.' + phys_pkg + '.RigidBody'
2018-12-20 18:04:49 +01:00
col_group = ''
for b in rb.collision_collections:
col_group = ('1' if b else '0') + col_group
2019-09-05 22:21:31 +02:00
col_mask = ''
for b in bobject.arm_rb_collision_filter_mask:
col_mask = ('1' if b else '0') + col_mask
2019-10-30 15:33:14 +01:00
2019-09-05 22:21:31 +02:00
x['parameters'] = [str(shape), str(body_mass), str(rb.friction), str(rb.restitution), str(int(col_group, 2)), str(int(col_mask, 2)) ]
2018-01-24 15:17:43 +01:00
lx = bobject.arm_rb_linear_factor[0]
ly = bobject.arm_rb_linear_factor[1]
lz = bobject.arm_rb_linear_factor[2]
ax = bobject.arm_rb_angular_factor[0]
ay = bobject.arm_rb_angular_factor[1]
az = bobject.arm_rb_angular_factor[2]
if bobject.lock_location[0]:
lx = 0
if bobject.lock_location[1]:
ly = 0
if bobject.lock_location[2]:
lz = 0
if bobject.lock_rotation[0]:
ax = 0
if bobject.lock_rotation[1]:
ay = 0
if bobject.lock_rotation[2]:
az = 0
2018-12-20 18:04:49 +01:00
col_margin = str(rb.collision_margin) if rb.use_margin else '0.0'
2017-11-06 13:01:08 +01:00
if rb.use_deactivation or bobject.arm_rb_force_deactivation:
2018-12-20 18:04:49 +01:00
deact_lv = str(rb.deactivate_linear_velocity)
deact_av = str(rb.deactivate_angular_velocity)
deact_time = str(bobject.arm_rb_deactivation_time)
2017-11-06 13:01:08 +01:00
else:
2018-12-20 18:04:49 +01:00
deact_lv = '0.0'
deact_av = '0.0'
deact_time = '0.0'
body_params = '[{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10}, {11}]'.format(
str(rb.linear_damping),
str(rb.angular_damping),
str(lx), str(ly), str(lz),
str(ax), str(ay), str(az),
col_margin,
deact_lv, deact_av, deact_time
)
body_flags = '[{0}, {1}, {2}, {3}]'.format(
2018-12-20 18:04:49 +01:00
str(rb.kinematic).lower(),
str(bobject.arm_rb_trigger).lower(),
str(bobject.arm_rb_ccd).lower(),
str(is_static).lower()
2018-12-20 18:04:49 +01:00
)
x['parameters'].append(body_params)
x['parameters'].append(body_flags)
o['traits'].append(x)
2018-02-19 16:28:21 +01:00
# Phys traits
if phys_enabled:
for m in bobject.modifiers:
if m.type == 'CLOTH':
self.add_softbody_mod(o, bobject, m, 0) # SoftShape.Cloth
elif m.type == 'SOFT_BODY':
self.add_softbody_mod(o, bobject, m, 1) # SoftShape.Volume
elif m.type == 'HOOK':
self.add_hook_mod(o, bobject, m.object.name, m.vertex_group)
# Rigid body constraint
2017-04-02 13:13:43 +02:00
rbc = bobject.rigid_body_constraint
if rbc is not None and rbc.enabled:
2018-02-19 16:28:21 +01:00
self.add_rigidbody_constraint(o, rbc)
2017-01-10 10:41:06 +01:00
2017-04-02 13:13:43 +02:00
# Camera traits
2020-04-09 23:40:18 +02:00
if type is NodeType.CAMERA:
2017-08-19 03:08:42 +02:00
# Viewport camera enabled, attach navigation to active camera
2020-04-09 23:40:18 +02:00
if self.scene.camera is not None and bobject.name == self.scene.camera.name and bpy.data.worlds['Arm'].arm_play_camera != 'Scene':
navigation_trait = {}
navigation_trait['type'] = 'Script'
navigation_trait['class_name'] = 'armory.trait.WalkNavigation'
o['traits'].append(navigation_trait)
# Map objects to materials, can be used in later stages
for i in range(len(bobject.material_slots)):
2018-03-13 18:23:00 +01:00
mat = self.slot_to_material(bobject, bobject.material_slots[i])
if mat in self.material_to_object_dict:
self.material_to_object_dict[mat].append(bobject)
self.material_to_arm_object_dict[mat].append(o)
else:
self.material_to_object_dict[mat] = [bobject]
self.material_to_arm_object_dict[mat] = [o]
2016-10-02 19:52:40 +02:00
# Export constraints
if len(bobject.constraints) > 0:
o['constraints'] = []
2018-02-19 23:32:51 +01:00
self.add_constraints(bobject, o)
2017-08-10 14:10:37 +02:00
for x in o['traits']:
ArmoryExporter.import_traits.append(x['class_name'])
@staticmethod
def add_constraints(bobject, o, bone=False):
2018-02-19 23:32:51 +01:00
for con in bobject.constraints:
if con.mute:
continue
co = {}
co['name'] = con.name
co['type'] = con.type
if bone:
co['bone'] = bobject.name
2019-07-18 21:51:25 +02:00
if hasattr(con, 'target') and con.target is not None:
2018-06-12 15:47:31 +02:00
if con.type == 'COPY_LOCATION':
co['target'] = con.target.name
co['use_x'] = con.use_x
co['use_y'] = con.use_y
co['use_z'] = con.use_z
co['invert_x'] = con.invert_x
co['invert_y'] = con.invert_y
co['invert_z'] = con.invert_z
co['use_offset'] = con.use_offset
co['influence'] = con.influence
elif con.type == 'CHILD_OF':
co['target'] = con.target.name
co['influence'] = con.influence
2018-02-19 23:32:51 +01:00
o['constraints'].append(co)
2017-08-27 12:50:09 +02:00
def export_traits(self, bobject, o):
if hasattr(bobject, 'arm_traitlist'):
for t in bobject.arm_traitlist:
if not t.enabled_prop:
2017-08-27 12:50:09 +02:00
continue
x = {}
if t.type_prop == 'Logic Nodes' and t.node_tree_prop is not None and t.node_tree_prop.name != '':
2017-08-27 12:50:09 +02:00
x['type'] = 'Script'
group_name = arm.utils.safesrc(t.node_tree_prop.name[0].upper() + t.node_tree_prop.name[1:])
2018-03-01 16:15:13 +01:00
x['class_name'] = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.node.' + group_name
2017-09-20 14:45:09 +02:00
elif t.type_prop == 'WebAssembly':
2018-06-15 12:25:53 +02:00
wpath = arm.utils.get_fp() + '/Bundled/' + t.webassembly_prop + '.wasm'
if not os.path.exists(wpath):
log.warn('Wasm "' + t.webassembly_prop + '" not found, skipping')
continue
2018-04-15 11:55:42 +02:00
x['type'] = 'Script'
x['class_name'] = 'armory.trait.internal.WasmScript'
x['parameters'] = ["'" + t.webassembly_prop + "'"]
2017-08-27 12:50:09 +02:00
elif t.type_prop == 'UI Canvas':
2018-06-15 12:25:53 +02:00
cpath = arm.utils.get_fp() + '/Bundled/canvas/' + t.canvas_name_prop + '.json'
if not os.path.exists(cpath):
2018-08-29 11:15:53 +02:00
log.warn('Scene "' + self.scene.name + '" - Object "' + bobject.name + '" - Referenced canvas "' + t.canvas_name_prop + '" not found, skipping')
2018-06-15 12:25:53 +02:00
continue
2017-08-27 12:50:09 +02:00
ArmoryExporter.export_ui = True
x['type'] = 'Script'
x['class_name'] = 'armory.trait.internal.CanvasScript'
x['parameters'] = ["'" + t.canvas_name_prop + "'"]
# assets.add(assetpath) # Bundled is auto-added
# Read file list and add canvas assets
assetpath = arm.utils.get_fp() + '/Bundled/canvas/' + t.canvas_name_prop + '.files'
if os.path.exists(assetpath):
with open(assetpath) as f:
fileList = f.read().splitlines()
for asset in fileList:
# Relative to the root/Bundled/canvas path
asset = asset[6:] # Strip ../../ to start in project root
assets.add(asset)
else: # Haxe/Bundled Script
if t.class_name_prop == '': # Empty class name, skip
continue
x['type'] = 'Script'
if t.type_prop == 'Bundled Script':
trait_prefix = 'armory.trait.'
# TODO: temporary, export single mesh navmesh as obj
if t.class_name_prop == 'NavMesh' and bobject.type == 'MESH' and bpy.data.worlds['Arm'].arm_navigation != 'Disabled':
ArmoryExporter.export_navigation = True
nav_path = arm.utils.get_fp_build() + '/compiled/Assets/navigation'
if not os.path.exists(nav_path):
os.makedirs(nav_path)
nav_filepath = nav_path + '/nav_' + bobject.data.name + '.arm'
assets.add(nav_filepath)
# TODO: Implement cache
#if not os.path.isfile(nav_filepath):
2018-10-24 01:11:37 +02:00
# override = {'selected_objects': [bobject]}
2017-08-27 12:50:09 +02:00
# bobject.scale.y *= -1
# mesh = obj.data
# for face in mesh.faces:
# face.v.reverse()
# bpy.ops.export_scene.obj(override, use_selection=True, filepath=nav_filepath, check_existing=False, use_normals=False, use_uvs=False, use_materials=False)
# bobject.scale.y *= -1
armature = bobject.find_armature()
apply_modifiers = not armature
bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject
exportMesh = bobject_eval.to_mesh()
2019-10-30 15:33:14 +01:00
2017-08-27 12:50:09 +02:00
with open(nav_filepath, 'w') as f:
for v in exportMesh.vertices:
f.write("v %.4f " % (v.co[0] * bobject_eval.scale.x))
f.write("%.4f " % (v.co[2] * bobject_eval.scale.z))
f.write("%.4f\n" % (v.co[1] * bobject_eval.scale.y)) # Flipped
for p in exportMesh.polygons:
2017-08-27 12:50:09 +02:00
f.write("f")
for i in reversed(p.vertices): # Flipped normals
f.write(" %d" % (i + 1))
f.write("\n")
2018-08-29 11:15:53 +02:00
else: # Haxe
2017-08-27 12:50:09 +02:00
trait_prefix = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.'
2018-08-30 15:39:43 +02:00
hxfile = '/Sources/' + (trait_prefix + t.class_name_prop).replace('.', '/') + '.hx'
if not os.path.exists(arm.utils.get_fp() + hxfile):
2018-08-29 11:15:53 +02:00
# TODO: Halt build here once this check is tested
2018-08-30 15:39:43 +02:00
print('Armory Error: Scene "' + self.scene.name + '" - Object "' + bobject.name + '" : Referenced trait file "' + hxfile + '" not found')
2018-08-29 11:15:53 +02:00
2017-08-27 12:50:09 +02:00
x['class_name'] = trait_prefix + t.class_name_prop
# Export trait properties
if t.arm_traitpropslist:
2017-08-27 12:50:09 +02:00
x['props'] = []
for trait_prop in t.arm_traitpropslist:
x['props'].append(trait_prop.name)
2020-01-07 21:09:39 +01:00
x['props'].append(trait_prop.type)
if trait_prop.type.endswith("Object"):
value = arm.utils.asset_name(trait_prop.value_object)
else:
value = trait_prop.get_value()
x['props'].append(value)
2017-08-27 12:50:09 +02:00
o['traits'].append(x)
@staticmethod
def export_canvas_themes():
2019-12-06 21:41:19 +01:00
path_themes = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas')
file_theme = os.path.join(path_themes, "_themes.json")
# If there is a canvas but no _themes.json, create it so that
# CanvasScript.hx works
if os.path.exists(path_themes) and not os.path.exists(file_theme):
with open(file_theme, "w+"):
2019-12-06 21:41:19 +01:00
pass
assets.add(file_theme)
2019-12-06 21:41:19 +01:00
2018-02-19 16:28:21 +01:00
def add_softbody_mod(self, o, bobject, soft_mod, soft_type):
ArmoryExporter.export_physics = True
phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
assets.add_khafile_def('arm_physics_soft')
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.physics.' + phys_pkg + '.SoftBody'
if soft_type == 0:
bend = soft_mod.settings.bending_stiffness
elif soft_type == 1:
bend = (soft_mod.settings.bend + 1.0) * 10
trait['parameters'] = [str(soft_type), str(bend), str(soft_mod.settings.mass), str(bobject.arm_soft_body_margin)]
o['traits'].append(trait)
2019-01-10 11:59:49 +01:00
if soft_type == 0:
2018-02-19 16:28:21 +01:00
self.add_hook_mod(o, bobject, '', soft_mod.settings.vertex_group_mass)
@staticmethod
def add_hook_mod(o, bobject, target_name, group_name):
2018-02-19 16:28:21 +01:00
ArmoryExporter.export_physics = True
phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.physics.' + phys_pkg + '.PhysicsHook'
2017-04-02 13:13:43 +02:00
verts = []
if group_name != '':
group = bobject.vertex_groups[group_name].index
for v in bobject.data.vertices:
for g in v.groups:
if g.group == group:
verts.append(v.co.x)
verts.append(v.co.y)
verts.append(v.co.z)
2018-02-19 16:28:21 +01:00
trait['parameters'] = ["'" + target_name + "'", str(verts)]
o['traits'].append(trait)
@staticmethod
def add_rigidbody_constraint(o, rbc):
2018-02-19 16:28:21 +01:00
rb1 = rbc.object1
rb2 = rbc.object2
if rb1 is None or rb2 is None:
return
2018-02-19 16:28:21 +01:00
ArmoryExporter.export_physics = True
phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
breaking_threshold = rbc.breaking_threshold if rbc.use_breaking else 0
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.physics.' + phys_pkg + '.PhysicsConstraint'
trait['parameters'] = [\
"'" + rb1.name + "'", \
"'" + rb2.name + "'", \
"'" + rbc.type + "'", \
str(rbc.disable_collisions).lower(), \
str(breaking_threshold)]
if rbc.type == "GENERIC":
limits = []
limits.append(1 if rbc.use_limit_lin_x else 0)
limits.append(rbc.limit_lin_x_lower)
limits.append(rbc.limit_lin_x_upper)
limits.append(1 if rbc.use_limit_lin_y else 0)
limits.append(rbc.limit_lin_y_lower)
limits.append(rbc.limit_lin_y_upper)
limits.append(1 if rbc.use_limit_lin_z else 0)
limits.append(rbc.limit_lin_z_lower)
limits.append(rbc.limit_lin_z_upper)
limits.append(1 if rbc.use_limit_ang_x else 0)
limits.append(rbc.limit_ang_x_lower)
limits.append(rbc.limit_ang_x_upper)
limits.append(1 if rbc.use_limit_ang_y else 0)
limits.append(rbc.limit_ang_y_lower)
limits.append(rbc.limit_ang_y_upper)
limits.append(1 if rbc.use_limit_ang_z else 0)
limits.append(rbc.limit_ang_z_lower)
limits.append(rbc.limit_ang_z_upper)
trait['parameters'].append(str(limits))
if rbc.type == "GENERIC_SPRING":
limits = []
limits.append(1 if rbc.use_limit_lin_x else 0)
limits.append(rbc.limit_lin_x_lower)
limits.append(rbc.limit_lin_x_upper)
limits.append(1 if rbc.use_limit_lin_y else 0)
limits.append(rbc.limit_lin_y_lower)
limits.append(rbc.limit_lin_y_upper)
limits.append(1 if rbc.use_limit_lin_z else 0)
limits.append(rbc.limit_lin_z_lower)
limits.append(rbc.limit_lin_z_upper)
limits.append(1 if rbc.use_limit_ang_x else 0)
limits.append(rbc.limit_ang_x_lower)
limits.append(rbc.limit_ang_x_upper)
limits.append(1 if rbc.use_limit_ang_y else 0)
limits.append(rbc.limit_ang_y_lower)
limits.append(rbc.limit_ang_y_upper)
limits.append(1 if rbc.use_limit_ang_z else 0)
limits.append(rbc.limit_ang_z_lower)
limits.append(rbc.limit_ang_z_upper)
limits.append(1 if rbc.use_spring_x else 0)
limits.append(rbc.spring_stiffness_x)
limits.append(rbc.spring_damping_x)
limits.append(1 if rbc.use_spring_y else 0)
limits.append(rbc.spring_stiffness_y)
limits.append(rbc.spring_damping_y)
limits.append(1 if rbc.use_spring_z else 0)
limits.append(rbc.spring_stiffness_z)
limits.append(rbc.spring_damping_z)
limits.append(1 if rbc.use_spring_ang_x else 0)
limits.append(rbc.spring_stiffness_ang_x)
limits.append(rbc.spring_damping_ang_x)
limits.append(1 if rbc.use_spring_ang_y else 0)
limits.append(rbc.spring_stiffness_ang_y)
limits.append(rbc.spring_damping_ang_y)
limits.append(1 if rbc.use_spring_ang_z else 0)
limits.append(rbc.spring_stiffness_ang_z)
limits.append(rbc.spring_damping_ang_z)
trait['parameters'].append(str(limits))
if rbc.type == "HINGE":
limits = []
limits.append(1 if rbc.use_limit_ang_z else 0)
limits.append(rbc.limit_ang_z_lower)
limits.append(rbc.limit_ang_z_upper)
trait['parameters'].append(str(limits))
if rbc.type == "SLIDER":
limits = []
limits.append(1 if rbc.use_limit_lin_x else 0)
limits.append(rbc.limit_lin_x_lower)
limits.append(rbc.limit_lin_x_upper)
trait['parameters'].append(str(limits))
if rbc.type == "PISTON":
limits = []
limits.append(1 if rbc.use_limit_lin_x else 0)
limits.append(rbc.limit_lin_x_lower)
limits.append(rbc.limit_lin_x_upper)
limits.append(1 if rbc.use_limit_ang_x else 0)
limits.append(rbc.limit_ang_x_lower)
limits.append(rbc.limit_ang_x_upper)
trait['parameters'].append(str(limits))
2018-02-19 16:28:21 +01:00
o['traits'].append(trait)
2017-07-04 11:11:34 +02:00
@staticmethod
def post_export_world(world, o):
2017-09-06 13:28:59 +02:00
wrd = bpy.data.worlds['Arm']
2017-08-21 12:17:55 +02:00
bgcol = world.arm_envtex_color
2017-09-06 13:28:59 +02:00
if '_LDR' in wrd.world_defs: # No compositor used
2016-10-02 19:52:40 +02:00
for i in range(0, 3):
bgcol[i] = pow(bgcol[i], 1.0 / 2.2)
2017-03-15 12:30:14 +01:00
o['background_color'] = arm.utils.color_to_int(bgcol)
2016-10-02 19:52:40 +02:00
2017-12-13 00:10:30 +01:00
if '_EnvSky' in wrd.world_defs:
# Sky data for probe
o['sun_direction'] = list(world.arm_envtex_sun_direction)
o['turbidity'] = world.arm_envtex_turbidity
o['ground_albedo'] = world.arm_envtex_ground_albedo
disable_hdr = world.arm_envtex_name.endswith('.jpg')
if '_EnvTex' in wrd.world_defs or '_EnvImg' in wrd.world_defs:
o['envmap'] = world.arm_envtex_name.rsplit('.', 1)[0]
if disable_hdr:
o['envmap'] += '.jpg'
else:
o['envmap'] += '.hdr'
# Main probe
rpdat = arm.utils.get_rp()
2017-11-13 10:18:37 +01:00
solid_mat = rpdat.arm_material_model == 'Solid'
2018-03-15 23:24:48 +01:00
arm_irradiance = rpdat.arm_irradiance and not solid_mat
2017-09-06 13:28:59 +02:00
arm_radiance = False
2017-08-21 12:17:55 +02:00
radtex = world.arm_envtex_name.rsplit('.', 1)[0]
irrsharmonics = world.arm_envtex_irr_name
# Radiance
2017-09-06 13:28:59 +02:00
if '_EnvTex' in wrd.world_defs:
2018-03-15 23:24:48 +01:00
arm_radiance = rpdat.arm_radiance
2018-12-19 13:33:17 +01:00
elif '_EnvSky' in wrd.world_defs:
2018-03-15 23:24:48 +01:00
arm_radiance = rpdat.arm_radiance
radtex = 'hosek'
2017-08-21 12:17:55 +02:00
num_mips = world.arm_envtex_num_mips
strength = world.arm_envtex_strength
2017-09-06 13:28:59 +02:00
2018-01-24 13:10:17 +01:00
mobile_mat = rpdat.arm_material_model == 'Mobile' or rpdat.arm_material_model == 'Solid'
if mobile_mat:
arm_radiance = False
2017-09-06 13:28:59 +02:00
po = {}
po['name'] = world.name
if arm_irradiance:
2018-04-14 15:07:05 +02:00
ext = '' if wrd.arm_minimize else '.json'
po['irradiance'] = irrsharmonics + '_irradiance' + ext
2017-09-06 13:28:59 +02:00
if arm_radiance:
po['radiance'] = radtex + '_radiance'
2018-10-01 11:45:43 +02:00
po['radiance'] += '.jpg' if disable_hdr else '.hdr'
2017-09-06 13:28:59 +02:00
po['radiance_mipmaps'] = num_mips
po['strength'] = strength
2018-10-01 11:45:43 +02:00
o['probe'] = po
2017-11-26 19:36:14 +01:00
@staticmethod
def mod_equal(mod1: bpy.types.Modifier, mod2: bpy.types.Modifier):
"""Compares whether the given modifiers are equal."""
# https://blender.stackexchange.com/questions/70629
2017-11-26 19:36:14 +01:00
return all([getattr(mod1, prop, True) == getattr(mod2, prop, False) for prop in mod1.bl_rna.properties.keys()])
@staticmethod
def mod_equal_stack(obj1, obj2):
"""Returns `True` if the given objects have the same modifiers."""
2017-11-26 19:36:14 +01:00
if len(obj1.modifiers) == 0 and len(obj2.modifiers) == 0:
return True
if len(obj1.modifiers) == 0 or len(obj2.modifiers) == 0:
return False
2018-01-28 18:21:22 +01:00
if len(obj1.modifiers) != len(obj2.modifiers):
return False
return all([ArmoryExporter.mod_equal(m, obj2.modifiers[i]) for i, m in enumerate(obj1.modifiers)])