armory/blender/arm/exporter.py

3059 lines
124 KiB
Python
Raw Normal View History

2019-06-25 13:53:57 +02:00
"""
Armory Scene Exporter
http://armory3d.org/
2015-10-30 13:23:09 +01:00
2019-06-25 13:53:57 +02:00
Based on Open Game Engine Exchange
http://opengex.org/
Export plugin for Blender by Eric Lengyel
Copyright 2015, Terathon Software LLC
This software is licensed under the Creative Commons
Attribution-ShareAlike 3.0 Unported License:
http://creativecommons.org/licenses/by-sa/3.0/deed.en_US
"""
2020-04-09 23:40:18 +02:00
from enum import Enum, unique
2015-10-30 13:23:09 +01:00
import math
2019-06-25 13:54:30 +02:00
import os
2016-06-22 10:32:19 +02:00
import time
2020-04-12 21:27:01 +02:00
from typing import Any, Dict, List, Tuple, Union, Optional
2019-06-25 13:54:30 +02:00
import numpy as np
import bpy
from mathutils import *
2019-06-25 13:54:30 +02:00
2021-10-14 21:19:18 +02:00
import bmesh
2017-03-15 12:30:14 +01:00
import arm.assets as assets
2019-06-25 13:54:30 +02:00
import arm.exporter_opt as exporter_opt
2017-03-15 12:30:14 +01:00
import arm.log as log
import arm.make_renderpath as make_renderpath
2018-05-24 22:16:28 +02:00
import arm.material.cycles as cycles
2019-06-25 13:54:30 +02:00
import arm.material.make as make_material
import arm.material.mat_batch as mat_batch
import arm.utils
import arm.profiler
2015-10-30 13:23:09 +01:00
if arm.is_reload(__name__):
2021-08-04 22:49:38 +02:00
assets = arm.reload_module(assets)
exporter_opt = arm.reload_module(exporter_opt)
log = arm.reload_module(log)
make_renderpath = arm.reload_module(make_renderpath)
cycles = arm.reload_module(cycles)
make_material = arm.reload_module(make_material)
mat_batch = arm.reload_module(mat_batch)
arm.utils = arm.reload_module(arm.utils)
arm.profiler = arm.reload_module(arm.profiler)
else:
arm.enable_reload(__name__)
2021-08-04 22:49:38 +02:00
2020-04-09 23:40:18 +02:00
@unique
class NodeType(Enum):
"""Represents the type of an object."""
EMPTY = 0
BONE = 1
MESH = 2
LIGHT = 3
CAMERA = 4
SPEAKER = 5
DECAL = 6
PROBE = 7
@classmethod
def get_bobject_type(cls, bobject: bpy.types.Object) -> "NodeType":
"""Returns the NodeType enum member belonging to the type of
the given blender object."""
if bobject.type == "MESH":
if bobject.data.polygons:
return cls.MESH
elif bobject.type == "FONT" or bobject.type == "META":
return cls.MESH
elif bobject.type == "LIGHT":
return cls.LIGHT
elif bobject.type == "CAMERA":
return cls.CAMERA
elif bobject.type == "SPEAKER":
return cls.SPEAKER
elif bobject.type == "LIGHT_PROBE":
return cls.PROBE
return cls.EMPTY
2015-10-30 13:23:09 +01:00
STRUCT_IDENTIFIER = ("object", "bone_object", "mesh_object",
"light_object", "camera_object", "speaker_object",
"decal_object", "probe_object")
2020-04-16 22:35:29 +02:00
# Internal target names for single FCurve data paths
FCURVE_TARGET_NAMES = {
"location": ("xloc", "yloc", "zloc"),
"rotation_euler": ("xrot", "yrot", "zrot"),
"rotation_quaternion": ("qwrot", "qxrot", "qyrot", "qzrot"),
"scale": ("xscl", "yscl", "zscl"),
"delta_location": ("dxloc", "dyloc", "dzloc"),
"delta_rotation_euler": ("dxrot", "dyrot", "dzrot"),
"delta_rotation_quaternion": ("dqwrot", "dqxrot", "dqyrot", "dqzrot"),
"delta_scale": ("dxscl", "dyscl", "dzscl"),
}
2019-05-14 11:43:41 +02:00
current_output = None
2015-10-30 13:23:09 +01:00
2016-10-19 13:28:06 +02:00
class ArmoryExporter:
2020-04-16 00:01:46 +02:00
"""Export to Armory format.
Some common naming patterns:
- out_[]: Variables starting with "out_" represent data that is
exported to Iron
- bobject: A Blender object (bpy.types.Object). Used because
`object` is a reserved Python keyword
"""
2016-10-19 13:28:06 +02:00
compress_enabled = False
export_all_flag = True
# Indicates whether rigid body is exported
export_physics = False
optimize_enabled = False
option_mesh_only = False
# Class names of referenced traits
import_traits: List[str] = []
def __init__(self, context: bpy.types.Context, filepath: str, scene: bpy.types.Scene = None, depsgraph: bpy.types.Depsgraph = None):
global current_output
self.filepath = filepath
self.scene = context.scene if scene is None else scene
self.depsgraph = context.evaluated_depsgraph_get() if depsgraph is None else depsgraph
2020-04-16 00:01:46 +02:00
# The output dict contains all data that is later exported to Iron format
self.output: Dict[str, Any] = {'frame_time': 1.0 / (self.scene.render.fps / self.scene.render.fps_base)}
current_output = self.output
# Stores the object type ("objectType") and the asset name
# ("structName") in a dict for each object
2020-04-10 01:26:54 +02:00
self.bobject_array: Dict[bpy.types.Object, Dict[str, Union[NodeType, str]]] = {}
self.bobject_bone_array = {}
self.mesh_array = {}
self.light_array = {}
self.probe_array = {}
self.camera_array = {}
self.speaker_array = {}
self.material_array = []
2020-06-24 00:18:32 +02:00
self.world_array = []
self.particle_system_array = {}
# `True` if there is at least one spawned camera in the scene
self.camera_spawned = False
self.material_to_object_dict = {}
# If no material is assigned, provide default to mimic cycles
self.default_material_objects = []
self.default_skin_material_objects = []
self.default_part_material_objects = []
self.material_to_arm_object_dict = {}
# Stores the link between a blender object and its
# corresponding export data (arm object)
self.object_to_arm_object_dict: Dict[bpy.types.Object, Dict] = {}
self.bone_tracks = []
ArmoryExporter.preprocess()
@classmethod
def export_scene(cls, context: bpy.types.Context, filepath: str, scene: bpy.types.Scene = None, depsgraph: bpy.types.Depsgraph = None) -> None:
"""Exports the given scene to the given file path. This is the
2020-04-10 01:03:41 +02:00
function that is called in make.py and the entry point of the
exporter."""
with arm.profiler.Profile('profile_exporter.prof', arm.utils.get_pref_or_default('profile_exporter', False)):
cls(context, filepath, scene, depsgraph).execute()
@classmethod
def preprocess(cls):
wrd = bpy.data.worlds['Arm']
if wrd.arm_physics == 'Enabled':
cls.export_physics = True
cls.export_navigation = False
if wrd.arm_navigation == 'Enabled':
cls.export_navigation = True
cls.export_ui = False
2020-04-10 00:23:03 +02:00
@staticmethod
2020-04-10 00:48:49 +02:00
def write_matrix(matrix):
return [matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3],
matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3],
matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3],
matrix[3][0], matrix[3][1], matrix[3][2], matrix[3][3]]
2020-04-10 00:30:16 +02:00
def get_meshes_file_path(self, object_id: str, compressed=False) -> str:
index = self.filepath.rfind('/')
2016-10-19 13:28:06 +02:00
mesh_fp = self.filepath[:(index + 1)] + 'meshes/'
2020-04-10 00:30:16 +02:00
if not os.path.exists(mesh_fp):
os.makedirs(mesh_fp)
2020-04-10 00:30:16 +02:00
2019-06-22 11:29:05 +02:00
ext = '.lz4' if compressed else '.arm'
2016-10-15 12:17:33 +02:00
return mesh_fp + object_id + ext
@staticmethod
2016-10-19 13:28:06 +02:00
def get_shape_keys(mesh):
2020-04-09 23:53:03 +02:00
# Metaball
if not hasattr(mesh, 'shape_keys'):
2016-11-28 14:40:07 +01:00
return None
2020-04-09 23:53:03 +02:00
2016-10-19 13:28:06 +02:00
shape_keys = mesh.shape_keys
2016-11-28 14:40:07 +01:00
if shape_keys and len(shape_keys.key_blocks) > 1:
2016-10-19 13:28:06 +02:00
return shape_keys
return None
2020-04-12 21:27:01 +02:00
def find_bone(self, name: str) -> Optional[Tuple[bpy.types.Bone, Dict]]:
2020-04-12 21:16:16 +02:00
"""Finds the bone reference (a tuple containing the bone object
and its data) by the given name and returns it."""
for bone_ref in self.bobject_bone_array.items():
if bone_ref[0].name == name:
return bone_ref
2016-10-19 13:28:06 +02:00
return None
@staticmethod
2020-04-16 22:35:29 +02:00
def collect_bone_animation(armature: bpy.types.Object, name: str) -> List[bpy.types.FCurve]:
path = f"pose.bones[\"{name}\"]."
2016-11-21 16:49:32 +01:00
if armature.animation_data:
action = armature.animation_data.action
2017-02-28 22:26:35 +01:00
if action:
2020-04-16 22:35:29 +02:00
return [fcurve for fcurve in action.fcurves if fcurve.data_path.startswith(path)]
return []
2020-04-16 22:35:29 +02:00
def export_bone(self, armature, bone: bpy.types.Bone, o, action: bpy.types.Action):
bobject_ref = self.bobject_bone_array.get(bone)
2017-10-10 20:46:44 +02:00
if bobject_ref:
o['type'] = STRUCT_IDENTIFIER[bobject_ref["objectType"].value]
o['name'] = bobject_ref["structName"]
2020-04-16 22:35:29 +02:00
self.export_bone_transform(armature, bone, o, action)
2016-10-19 13:28:06 +02:00
o['children'] = []
for sub_bobject in bone.children:
so = {}
2020-04-16 22:35:29 +02:00
self.export_bone(armature, sub_bobject, so, action)
o['children'].append(so)
@staticmethod
def export_pose_markers(oanim, action):
if action.pose_markers is None or len(action.pose_markers) == 0:
2017-11-11 18:26:28 +01:00
return
2017-11-11 18:26:28 +01:00
oanim['marker_frames'] = []
oanim['marker_names'] = []
for pos_marker in action.pose_markers:
oanim['marker_frames'].append(int(pos_marker.frame))
oanim['marker_names'].append(pos_marker.name)
2017-11-11 18:26:28 +01:00
@staticmethod
2020-04-16 22:35:29 +02:00
def calculate_anim_frame_range(action: bpy.types.Action) -> Tuple[int, int]:
"""Calculates the required frame range of the given action by
also taking fcurve modifiers into account.
Modifiers that are not range-restricted are ignored in this
calculation.
"""
start = action.frame_range[0]
end = action.frame_range[1]
# Take FCurve modifiers into account if they have a restricted
# frame range
for fcurve in action.fcurves:
for modifier in fcurve.modifiers:
if not modifier.use_restricted_range:
continue
if modifier.frame_start < start:
start = modifier.frame_start
if modifier.frame_end > end:
end = modifier.frame_end
2020-04-16 22:35:29 +02:00
return int(start), int(end)
@staticmethod
2020-04-16 22:35:29 +02:00
def export_animation_track(fcurve: bpy.types.FCurve, frame_range: Tuple[int, int], target: str) -> Dict:
"""This function exports a single animation track."""
2020-04-16 22:35:29 +02:00
out_track = {'target': target, 'frames': [], 'values': []}
start = frame_range[0]
end = frame_range[1]
for frame in range(start, end + 1):
2020-04-16 22:35:29 +02:00
out_track['frames'].append(frame)
out_track['values'].append(fcurve.evaluate(frame))
2020-04-16 22:35:29 +02:00
return out_track
def export_object_transform(self, bobject: bpy.types.Object, o):
wrd = bpy.data.worlds['Arm']
# Static transform
2020-04-16 22:35:29 +02:00
o['transform'] = {'values': ArmoryExporter.write_matrix(bobject.matrix_local)}
2019-06-25 14:08:42 +02:00
# Animated transform
if bobject.animation_data is not None and bobject.type != "ARMATURE":
action = bobject.animation_data.action
if action is not None:
action_name = arm.utils.safestr(arm.utils.asset_name(action))
fp = self.get_meshes_file_path('action_' + action_name, compressed=ArmoryExporter.compress_enabled)
assets.add(fp)
ext = '.lz4' if ArmoryExporter.compress_enabled else ''
if ext == '' and not wrd.arm_minimize:
ext = '.json'
2019-06-25 14:08:42 +02:00
2020-04-17 00:36:23 +02:00
if 'object_actions' not in o:
o['object_actions'] = []
o['object_actions'].append('action_' + action_name + ext)
2020-04-16 22:35:29 +02:00
frame_range = self.calculate_anim_frame_range(action)
out_anim = {
'begin': frame_range[0],
'end': frame_range[1],
'tracks': []
}
2020-04-16 22:35:29 +02:00
self.export_pose_markers(out_anim, action)
unresolved_data_paths = set()
for fcurve in action.fcurves:
data_path = fcurve.data_path
try:
2020-04-16 22:35:29 +02:00
out_track = self.export_animation_track(fcurve, frame_range, FCURVE_TARGET_NAMES[data_path][fcurve.array_index])
except KeyError:
2020-04-16 22:35:29 +02:00
if data_path not in FCURVE_TARGET_NAMES:
# This can happen if the target is simply not
# supported or the action shares both bone
# and object transform data (FCURVE_TARGET_NAMES
# only contains object transform targets)
unresolved_data_paths.add(data_path)
continue
# Missing target entry for array_index or something else
else:
raise
2020-04-16 22:35:29 +02:00
out_anim['tracks'].append(out_track)
if len(unresolved_data_paths) > 0:
warning = (
f'The action "{action_name}" has fcurve channels with data paths that could not be resolved.'
' This can be caused by the following things:\n'
' - The data paths are not supported.\n'
' - The action exists on both armature and non-armature objects or has both bone and object transform data.'
)
if wrd.arm_verbose_output:
warning += f'\n Unresolved data paths: {unresolved_data_paths}'
else:
warning += '\n To see the list of unresolved data paths please recompile with Armory Project > Verbose Output enabled.'
log.warn(warning)
if True: # not action.arm_cached or not os.path.exists(fp):
if wrd.arm_verbose_output:
print('Exporting object action ' + action_name)
2020-04-16 22:35:29 +02:00
out_object_action = {
'name': action_name,
'anim': out_anim,
'type': 'object',
'data_ref': '',
'transform': None
}
action_file = {'objects': [out_object_action]}
arm.utils.write_arm(fp, action_file)
2017-10-22 13:33:07 +02:00
2020-04-12 21:16:16 +02:00
def process_bone(self, bone: bpy.types.Bone) -> None:
2017-04-08 20:05:35 +02:00
if ArmoryExporter.export_all_flag or bone.select:
self.bobject_bone_array[bone] = {
"objectType": NodeType.BONE,
"structName": bone.name
}
for subbobject in bone.children:
2016-10-19 13:28:06 +02:00
self.process_bone(subbobject)
2020-04-12 21:16:16 +02:00
def process_bobject(self, bobject: bpy.types.Object) -> None:
"""Stores some basic information about the given object (its
name and type).
If the given object is an armature, its bones are also
processed.
"""
if ArmoryExporter.export_all_flag or bobject.select_get():
btype: NodeType = NodeType.get_bobject_type(bobject)
2020-04-09 23:40:18 +02:00
if btype is not NodeType.MESH and ArmoryExporter.option_mesh_only:
return
self.bobject_array[bobject] = {
"objectType": btype,
"structName": arm.utils.asset_name(bobject)
}
2017-01-31 17:58:30 +01:00
if bobject.type == "ARMATURE":
armature: bpy.types.Armature = bobject.data
if armature:
for bone in armature.bones:
2017-04-01 21:25:57 +02:00
if not bone.parent:
2016-10-19 13:28:06 +02:00
self.process_bone(bone)
2019-01-17 21:34:38 +01:00
if bobject.arm_instanced == 'Off':
for subbobject in bobject.children:
2016-10-19 13:28:06 +02:00
self.process_bobject(subbobject)
2016-10-19 13:28:06 +02:00
def process_skinned_meshes(self):
2020-04-12 21:16:16 +02:00
"""Iterates through all objects that are exported and ensures
that bones are actually stored as bones."""
for bobject_ref in self.bobject_array.items():
if bobject_ref[1]["objectType"] is NodeType.MESH:
armature = bobject_ref[0].find_armature()
if armature is not None:
for bone in armature.data.bones:
2020-04-12 21:16:16 +02:00
bone_ref = self.find_bone(bone.name)
if bone_ref is not None:
# If an object is used as a bone, then we
# force its type to be a bone
bone_ref[1]["objectType"] = NodeType.BONE
2020-04-16 22:35:29 +02:00
def export_bone_transform(self, armature: bpy.types.Object, bone: bpy.types.Bone, o, action: bpy.types.Action):
2017-11-19 13:38:54 +01:00
pose_bone = armature.pose.bones.get(bone.name)
2019-07-18 21:51:25 +02:00
# if pose_bone is not None:
2018-08-06 12:59:15 +02:00
# transform = pose_bone.matrix.copy()
2019-07-18 21:51:25 +02:00
# if pose_bone.parent is not None:
2018-08-06 12:59:15 +02:00
# transform = pose_bone.parent.matrix.inverted_safe() * transform
# else:
transform = bone.matrix_local.copy()
2019-07-18 21:51:25 +02:00
if bone.parent is not None:
2018-12-18 23:48:38 +01:00
transform = (bone.parent.matrix_local.inverted_safe() @ transform)
2020-04-16 22:35:29 +02:00
o['transform'] = {'values': ArmoryExporter.write_matrix(transform)}
2020-04-16 22:35:29 +02:00
fcurve_list = self.collect_bone_animation(armature, bone.name)
2018-08-06 12:59:15 +02:00
2020-04-16 22:35:29 +02:00
if fcurve_list and pose_bone:
2017-11-19 13:38:54 +01:00
begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1])
2020-04-16 22:35:29 +02:00
out_track = {'target': "transform", 'frames': [], 'values': []}
o['anim'] = {'tracks': [out_track]}
2017-11-19 13:38:54 +01:00
for i in range(begin_frame, end_frame + 1):
2020-04-16 22:35:29 +02:00
out_track['frames'].append(i - begin_frame)
2017-11-19 13:38:54 +01:00
2020-04-16 22:35:29 +02:00
self.bone_tracks.append((out_track['values'], pose_bone))
def use_default_material(self, bobject: bpy.types.Object, o):
2017-04-04 23:11:31 +02:00
if arm.utils.export_bone_data(bobject):
2017-03-01 11:45:55 +01:00
o['material_refs'].append('armdefaultskin')
self.default_skin_material_objects.append(bobject)
2017-03-01 11:45:55 +01:00
else:
2017-01-29 13:07:58 +01:00
o['material_refs'].append('armdefault')
self.default_material_objects.append(bobject)
2017-03-01 11:45:55 +01:00
2018-11-12 12:35:52 +01:00
def use_default_material_part(self):
# Particle object with no material assigned
for ps in bpy.data.particles:
2019-07-18 21:51:25 +02:00
if ps.render_type != 'OBJECT' or ps.instance_object is None:
2018-11-12 12:35:52 +01:00
continue
2018-12-18 23:48:38 +01:00
po = ps.instance_object
if po not in self.object_to_arm_object_dict:
2018-11-12 12:35:52 +01:00
continue
o = self.object_to_arm_object_dict[po]
if len(o['material_refs']) > 0 and o['material_refs'][0] == 'armdefault' and po not in self.default_part_material_objects:
self.default_part_material_objects.append(po)
o['material_refs'] = ['armdefaultpart'] # Replace armdefault
2018-11-12 12:35:52 +01:00
def export_material_ref(self, bobject: bpy.types.Object, material, index, o):
if material is None: # Use default for empty mat slots
2017-03-01 11:45:55 +01:00
self.use_default_material(bobject, o)
return
if material not in self.material_array:
self.material_array.append(material)
2018-03-02 14:50:58 +01:00
o['material_refs'].append(arm.utils.asset_name(material))
2020-04-16 00:01:46 +02:00
def export_particle_system_ref(self, psys: bpy.types.ParticleSystem, out_object):
if psys.settings.instance_object is None or psys.settings.render_type != 'OBJECT':
return
self.particle_system_array[psys.settings] = {"structName": psys.settings.name}
pref = {
'name': psys.name,
'seed': psys.seed,
'particle': psys.settings.name
}
2020-04-16 00:01:46 +02:00
out_object['particle_refs'].append(pref)
@staticmethod
2020-04-15 23:18:40 +02:00
def get_view3d_area() -> Optional[bpy.types.Area]:
2017-10-04 19:02:55 +02:00
screen = bpy.context.window.screen
for area in screen.areas:
if area.type == 'VIEW_3D':
return area
return None
2020-04-15 23:18:40 +02:00
@staticmethod
def get_viewport_view_matrix() -> Optional[Matrix]:
play_area = ArmoryExporter.get_view3d_area()
2019-07-18 21:51:25 +02:00
if play_area is None:
2017-10-04 19:02:55 +02:00
return None
2018-05-24 22:16:28 +02:00
for space in play_area.spaces:
2017-10-04 19:02:55 +02:00
if space.type == 'VIEW_3D':
return space.region_3d.view_matrix
return None
2020-04-15 23:18:40 +02:00
@staticmethod
def get_viewport_projection_matrix() -> Tuple[Optional[Matrix], bool]:
play_area = ArmoryExporter.get_view3d_area()
2019-07-18 21:51:25 +02:00
if play_area is None:
2017-10-04 19:02:55 +02:00
return None, False
2018-05-24 22:16:28 +02:00
for space in play_area.spaces:
2017-10-04 19:02:55 +02:00
if space.type == 'VIEW_3D':
# return space.region_3d.perspective_matrix # pesp = window * view
return space.region_3d.window_matrix, space.region_3d.is_perspective
2017-08-10 17:35:11 +02:00
return None, False
2017-11-19 13:38:54 +01:00
def write_bone_matrices(self, scene, action):
2018-12-18 16:46:36 +01:00
# profile_time = time.time()
2017-11-19 13:38:54 +01:00
begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1])
if len(self.bone_tracks) > 0:
for i in range(begin_frame, end_frame + 1):
scene.frame_set(i)
for track in self.bone_tracks:
values, pose_bone = track[0], track[1]
parent = pose_bone.parent
if parent:
2020-04-10 00:23:03 +02:00
values += ArmoryExporter.write_matrix((parent.matrix.inverted_safe() @ pose_bone.matrix))
2017-11-19 13:38:54 +01:00
else:
2020-04-10 00:23:03 +02:00
values += ArmoryExporter.write_matrix(pose_bone.matrix)
2018-12-18 16:46:36 +01:00
# print('Bone matrices exported in ' + str(time.time() - profile_time))
2017-11-19 13:38:54 +01:00
@staticmethod
def has_baked_material(bobject, materials):
2018-03-13 18:23:00 +01:00
for mat in materials:
2019-07-18 21:51:25 +02:00
if mat is None:
2018-03-26 03:08:10 +02:00
continue
2018-03-13 18:23:00 +01:00
baked_mat = mat.name + '_' + bobject.name + '_baked'
if baked_mat in bpy.data.materials:
return True
return False
@staticmethod
2020-04-12 21:36:10 +02:00
def create_material_variants(scene: bpy.types.Scene) -> Tuple[List[bpy.types.Material], List[bpy.types.MaterialSlot]]:
"""Creates unique material variants for skinning, tilesheets and
particles."""
matvars: List[bpy.types.Material] = []
matslots: List[bpy.types.MaterialSlot] = []
bobject: bpy.types.Object
for bobject in scene.collection.all_objects.values():
variant_suffix = ''
# Skinning
if arm.utils.export_bone_data(bobject):
variant_suffix = '_armskin'
# Tilesheets
elif bobject.arm_tilesheet != '':
variant_suffix = '_armtile'
if variant_suffix == '':
continue
for slot in bobject.material_slots:
if slot.material is None or slot.material.library is not None:
continue
if slot.material.name.endswith(variant_suffix):
continue
matslots.append(slot)
mat_name = slot.material.name + variant_suffix
mat = bpy.data.materials.get(mat_name)
# Create material variant
if mat is None:
mat = slot.material.copy()
mat.name = mat_name
if variant_suffix == '_armtile':
mat.arm_tilesheet_flag = True
matvars.append(mat)
slot.material = mat
# Particle and non-particle objects can not share material
particle_sys: bpy.types.ParticleSettings
for particle_sys in bpy.data.particles:
bobject = particle_sys.instance_object
if bobject is None or particle_sys.render_type != 'OBJECT':
continue
for slot in bobject.material_slots:
if slot.material is None or slot.material.library is not None:
continue
if slot.material.name.endswith('_armpart'):
continue
matslots.append(slot)
mat_name = slot.material.name + '_armpart'
mat = bpy.data.materials.get(mat_name)
if mat is None:
mat = slot.material.copy()
mat.name = mat_name
mat.arm_particle_flag = True
matvars.append(mat)
slot.material = mat
return matvars, matslots
@staticmethod
def slot_to_material(bobject: bpy.types.Object, slot: bpy.types.MaterialSlot):
2018-03-13 18:23:00 +01:00
mat = slot.material
# Pick up backed material if present
2019-07-18 21:51:25 +02:00
if mat is not None:
2018-03-13 18:23:00 +01:00
baked_mat = mat.name + '_' + bobject.name + '_baked'
if baked_mat in bpy.data.materials:
mat = bpy.data.materials[baked_mat]
return mat
2018-08-07 12:23:02 +02:00
# def ExportMorphWeights(self, node, shapeKeys, scene):
# action = None
# curveArray = []
# indexArray = []
# if (shapeKeys.animation_data):
# action = shapeKeys.animation_data.action
# if (action):
# for fcurve in action.fcurves:
# if ((fcurve.data_path.startswith("key_blocks[")) and (fcurve.data_path.endswith("].value"))):
# keyName = fcurve.data_path.strip("abcdehklopstuvy[]_.")
# if ((keyName[0] == "\"") or (keyName[0] == "'")):
# index = shapeKeys.key_blocks.find(keyName.strip("\"'"))
# if (index >= 0):
# curveArray.append(fcurve)
# indexArray.append(index)
# else:
# curveArray.append(fcurve)
# indexArray.append(int(keyName))
# if ((not action) and (node.animation_data)):
# action = node.animation_data.action
# if (action):
# for fcurve in action.fcurves:
# if ((fcurve.data_path.startswith("data.shape_keys.key_blocks[")) and (fcurve.data_path.endswith("].value"))):
# keyName = fcurve.data_path.strip("abcdehklopstuvy[]_.")
# if ((keyName[0] == "\"") or (keyName[0] == "'")):
# index = shapeKeys.key_blocks.find(keyName.strip("\"'"))
# if (index >= 0):
# curveArray.append(fcurve)
# indexArray.append(index)
# else:
# curveArray.append(fcurve)
# indexArray.append(int(keyName))
# animated = (len(curveArray) != 0)
# referenceName = shapeKeys.reference_key.name if (shapeKeys.use_relative) else ""
# for k in range(len(shapeKeys.key_blocks)):
# self.IndentWrite(B"MorphWeight", 0, (k == 0))
# if (animated):
# self.Write(B" %mw")
# self.WriteInt(k)
# self.Write(B" (index = ")
# self.WriteInt(k)
# self.Write(B") {float {")
# block = shapeKeys.key_blocks[k]
# self.WriteFloat(block.value if (block.name != referenceName) else 1.0)
# self.Write(B"}}\n")
# if (animated):
# self.IndentWrite(B"Animation (begin = ", 0, True)
2018-09-19 17:31:58 +02:00
# self.WriteFloat((action.frame_range[0]) * self.frameTime)
2018-08-07 12:23:02 +02:00
# self.Write(B", end = ")
2018-09-19 17:31:58 +02:00
# self.WriteFloat((action.frame_range[1]) * self.frameTime)
2018-08-07 12:23:02 +02:00
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.indentLevel += 1
# structFlag = False
# for a in range(len(curveArray)):
# k = indexArray[a]
# target = bytes("mw" + str(k), "UTF-8")
# fcurve = curveArray[a]
# kind = OpenGexExporter.ClassifyAnimationCurve(fcurve)
# if ((kind != kAnimationSampled) and (not self.sampleAnimationFlag)):
# self.ExportAnimationTrack(fcurve, kind, target, structFlag)
# else:
# self.ExportMorphWeightSampledAnimationTrack(shapeKeys.key_blocks[k], target, scene, structFlag)
# structFlag = True
# self.indentLevel -= 1
# self.IndentWrite(B"}\n")
2020-04-16 00:01:46 +02:00
def export_object(self, bobject: bpy.types.Object, scene: bpy.types.Scene, out_parent: Dict = None) -> None:
"""This function exports a single object in the scene and
includes its name, object reference, material references (for
meshes), and transform.
Subobjects are then exported recursively.
"""
if not bobject.arm_export:
return
bobject_ref = self.bobject_array.get(bobject)
2020-04-09 23:25:26 +02:00
if bobject_ref is not None:
object_type = bobject_ref["objectType"]
2017-08-09 00:14:30 +02:00
# Linked object, not present in scene
if bobject not in self.object_to_arm_object_dict:
2020-04-16 00:01:46 +02:00
out_object = {
'traits': [],
'spawn': False
}
2020-04-16 00:01:46 +02:00
self.object_to_arm_object_dict[bobject] = out_object
2017-08-08 16:44:33 +02:00
2020-04-16 00:01:46 +02:00
out_object = self.object_to_arm_object_dict[bobject]
out_object['type'] = STRUCT_IDENTIFIER[object_type.value]
out_object['name'] = bobject_ref["structName"]
2017-10-29 17:29:08 +01:00
if bobject.parent_type == "BONE":
2020-04-16 00:01:46 +02:00
out_object['parent_bone'] = bobject.parent_bone
2017-10-29 17:29:08 +01:00
2020-04-09 23:25:26 +02:00
if bobject.hide_render or not bobject.arm_visible:
2020-04-16 00:01:46 +02:00
out_object['visible'] = False
2016-11-08 16:32:32 +01:00
if not bobject.cycles_visibility.camera:
2020-04-16 00:01:46 +02:00
out_object['visible_mesh'] = False
2016-11-08 16:32:32 +01:00
if not bobject.cycles_visibility.shadow:
2020-04-16 00:01:46 +02:00
out_object['visible_shadow'] = False
2016-11-08 16:32:32 +01:00
2020-04-09 23:25:26 +02:00
if not bobject.arm_spawn:
2020-04-16 00:01:46 +02:00
out_object['spawn'] = False
2020-04-16 00:01:46 +02:00
out_object['mobile'] = bobject.arm_mobile
2017-01-29 16:15:04 +01:00
2019-07-18 21:51:25 +02:00
if bobject.instance_type == 'COLLECTION' and bobject.instance_collection is not None:
2020-04-16 00:01:46 +02:00
out_object['group_ref'] = bobject.instance_collection.name
2017-09-21 18:30:02 +02:00
if bobject.arm_tilesheet != '':
2020-04-16 00:01:46 +02:00
out_object['tilesheet_ref'] = bobject.arm_tilesheet
out_object['tilesheet_action_ref'] = bobject.arm_tilesheet_action
2017-09-21 18:30:02 +02:00
if len(bobject.arm_propertylist) > 0:
2020-04-16 00:01:46 +02:00
out_object['properties'] = []
2020-04-15 23:34:30 +02:00
for proplist_item in bobject.arm_propertylist:
2020-04-16 00:01:46 +02:00
out_property = {
2020-04-15 23:34:30 +02:00
'name': proplist_item.name_prop,
'value': getattr(proplist_item, proplist_item.type_prop + '_prop')}
2020-04-16 00:01:46 +02:00
out_object['properties'].append(out_property)
2016-12-05 01:54:01 +01:00
# Export the object reference and material references
objref = bobject.data
2019-07-18 21:51:25 +02:00
if objref is not None:
2017-10-06 11:16:29 +02:00
objname = arm.utils.asset_name(objref)
2016-12-05 01:54:01 +01:00
2020-04-09 23:25:26 +02:00
# LOD
2017-08-21 12:17:55 +02:00
if bobject.type == 'MESH' and hasattr(objref, 'arm_lodlist') and len(objref.arm_lodlist) > 0:
2020-04-16 00:01:46 +02:00
out_object['lods'] = []
2020-04-15 23:34:30 +02:00
for lodlist_item in objref.arm_lodlist:
if not lodlist_item.enabled_prop:
2016-12-05 01:54:01 +01:00
continue
2020-04-16 00:01:46 +02:00
out_lod = {
2020-04-15 23:34:30 +02:00
'object_ref': lodlist_item.name,
'screen_size': lodlist_item.screen_size_prop
}
2020-04-16 00:01:46 +02:00
out_object['lods'].append(out_lod)
2017-08-21 12:17:55 +02:00
if objref.arm_lod_material:
2020-04-16 00:01:46 +02:00
out_object['lod_material'] = True
2016-11-21 16:49:32 +01:00
2020-04-09 23:40:18 +02:00
if object_type is NodeType.MESH:
if objref not in self.mesh_array:
self.mesh_array[objref] = {"structName": objname, "objectTable": [bobject]}
else:
self.mesh_array[objref]["objectTable"].append(bobject)
oid = arm.utils.safestr(self.mesh_array[objref]["structName"])
2019-05-14 11:43:41 +02:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_single_data_file:
2020-04-16 00:01:46 +02:00
out_object['data_ref'] = oid
2019-05-14 11:43:41 +02:00
else:
ext = '' if not ArmoryExporter.compress_enabled else '.lz4'
2018-04-14 15:07:05 +02:00
if ext == '' and not bpy.data.worlds['Arm'].arm_minimize:
ext = '.json'
2020-04-16 00:01:46 +02:00
out_object['data_ref'] = 'mesh_' + oid + ext + '/' + oid
2020-04-16 00:01:46 +02:00
out_object['material_refs'] = []
for i in range(len(bobject.material_slots)):
2018-03-13 18:23:00 +01:00
mat = self.slot_to_material(bobject, bobject.material_slots[i])
# Export ref
2020-04-16 00:01:46 +02:00
self.export_material_ref(bobject, mat, i, out_object)
2018-03-13 18:23:00 +01:00
# Decal flag
if mat is not None and mat.arm_decal:
2020-04-16 00:01:46 +02:00
out_object['type'] = 'decal_object'
2017-01-29 13:07:58 +01:00
# No material, mimic cycles and assign default
2020-04-16 00:01:46 +02:00
if len(out_object['material_refs']) == 0:
self.use_default_material(bobject, out_object)
2016-10-02 19:52:40 +02:00
num_psys = len(bobject.particle_systems)
if num_psys > 0:
2020-04-16 00:01:46 +02:00
out_object['particle_refs'] = []
2020-05-11 17:29:46 +02:00
out_object['render_emitter'] = bobject.show_instancer_for_render
for i in range(num_psys):
2020-04-16 00:01:46 +02:00
self.export_particle_system_ref(bobject.particle_systems[i], out_object)
2018-12-20 22:37:39 +01:00
aabb = bobject.data.arm_aabb
if aabb[0] == 0 and aabb[1] == 0 and aabb[2] == 0:
self.calc_aabb(bobject)
2020-04-16 00:01:46 +02:00
out_object['dimensions'] = [aabb[0], aabb[1], aabb[2]]
2020-04-09 23:25:26 +02:00
# shapeKeys = ArmoryExporter.get_shape_keys(objref)
# if shapeKeys:
2020-04-16 00:01:46 +02:00
# self.ExportMorphWeights(bobject, shapeKeys, scene, out_object)
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.LIGHT:
if objref not in self.light_array:
self.light_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
else:
self.light_array[objref]["objectTable"].append(bobject)
2020-04-16 00:01:46 +02:00
out_object['data_ref'] = self.light_array[objref]["structName"]
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.PROBE:
if objref not in self.probe_array:
self.probe_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
2018-10-01 11:45:43 +02:00
else:
self.probe_array[objref]["objectTable"].append(bobject)
2020-04-09 23:25:26 +02:00
2018-10-04 15:35:33 +02:00
dist = bobject.data.influence_distance
2020-04-09 23:25:26 +02:00
2018-10-04 21:12:23 +02:00
if objref.type == "PLANAR":
2020-04-16 00:01:46 +02:00
out_object['dimensions'] = [1.0, 1.0, dist]
2018-10-01 11:45:43 +02:00
2020-04-09 23:25:26 +02:00
# GRID, CUBEMAP
else:
2020-04-16 00:01:46 +02:00
out_object['dimensions'] = [dist, dist, dist]
out_object['data_ref'] = self.probe_array[objref]["structName"]
2020-04-09 23:25:26 +02:00
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.CAMERA:
2020-04-16 00:01:46 +02:00
if 'spawn' in out_object and not out_object['spawn']:
self.camera_spawned |= False
2017-01-03 12:20:46 +01:00
else:
2017-01-03 01:26:06 +01:00
self.camera_spawned = True
2020-04-09 23:25:26 +02:00
if objref not in self.camera_array:
self.camera_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
else:
self.camera_array[objref]["objectTable"].append(bobject)
2020-04-16 00:01:46 +02:00
out_object['data_ref'] = self.camera_array[objref]["structName"]
2020-04-09 23:40:18 +02:00
elif object_type is NodeType.SPEAKER:
if objref not in self.speaker_array:
self.speaker_array[objref] = {"structName" : objname, "objectTable" : [bobject]}
else:
self.speaker_array[objref]["objectTable"].append(bobject)
2020-04-16 00:01:46 +02:00
out_object['data_ref'] = self.speaker_array[objref]["structName"]
# Export the transform. If object is animated, then animation tracks are exported here
2019-07-18 21:51:25 +02:00
if bobject.type != 'ARMATURE' and bobject.animation_data is not None:
2017-10-19 18:27:01 +02:00
action = bobject.animation_data.action
export_actions = [action]
for track in bobject.animation_data.nla_tracks:
2019-07-18 21:51:25 +02:00
if track.strips is None:
2017-10-19 18:27:01 +02:00
continue
for strip in track.strips:
2020-04-09 23:53:03 +02:00
if strip.action is None or strip.action in export_actions:
2017-10-19 18:27:01 +02:00
continue
export_actions.append(strip.action)
orig_action = action
for a in export_actions:
bobject.animation_data.action = a
2020-04-16 00:01:46 +02:00
self.export_object_transform(bobject, out_object)
2019-07-18 21:51:25 +02:00
if len(export_actions) >= 2 and export_actions[0] is None: # No action assigned
2020-04-16 00:01:46 +02:00
out_object['object_actions'].insert(0, 'null')
2017-10-19 18:27:01 +02:00
bobject.animation_data.action = orig_action
else:
2020-04-16 00:01:46 +02:00
self.export_object_transform(bobject, out_object)
2017-10-29 17:29:08 +01:00
# If the object is parented to a bone and is not relative, then undo the bone's transform
if bobject.parent_type == "BONE":
armature = bobject.parent.data
bone = armature.bones[bobject.parent_bone]
2018-08-06 12:59:15 +02:00
# if not bone.use_relative_parent:
2020-04-16 00:01:46 +02:00
out_object['parent_bone_connected'] = bone.use_connect
2018-08-06 12:59:15 +02:00
if bone.use_connect:
bone_translation = Vector((0, bone.length, 0)) + bone.head
2020-04-16 00:01:46 +02:00
out_object['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]]
2018-08-06 12:59:15 +02:00
else:
2018-06-15 21:44:21 +02:00
bone_translation = bone.tail - bone.head
2020-04-16 00:01:46 +02:00
out_object['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]]
2018-08-06 12:59:15 +02:00
pose_bone = bobject.parent.pose.bones[bobject.parent_bone]
bone_translation_pose = pose_bone.tail - pose_bone.head
2020-04-16 00:01:46 +02:00
out_object['parent_bone_tail_pose'] = [bone_translation_pose[0], bone_translation_pose[1], bone_translation_pose[2]]
2017-10-29 17:29:08 +01:00
2019-07-18 21:51:25 +02:00
if bobject.type == 'ARMATURE' and bobject.data is not None:
2020-04-15 23:34:30 +02:00
# Armature data
bdata = bobject.data
# Reference start action
action = None
2017-09-05 23:39:24 +02:00
adata = bobject.animation_data
# Active action
2019-07-18 21:51:25 +02:00
if adata is not None:
2017-09-07 13:42:46 +02:00
action = adata.action
2019-07-18 21:51:25 +02:00
if action is None:
2017-09-07 13:42:46 +02:00
log.warn('Object ' + bobject.name + ' - No action assigned, setting to pose')
2017-04-11 23:21:42 +02:00
bobject.animation_data_create()
actions = bpy.data.actions
2017-09-07 13:42:46 +02:00
action = actions.get('armorypose')
2019-07-18 21:51:25 +02:00
if action is None:
2017-09-07 13:42:46 +02:00
action = actions.new(name='armorypose')
2017-04-11 23:21:42 +02:00
2017-09-05 23:39:24 +02:00
# Export actions
2017-08-19 03:08:42 +02:00
export_actions = [action]
2020-04-15 23:34:30 +02:00
# hasattr - armature modifier may reference non-parent
# armature object to deform with
2019-07-18 21:51:25 +02:00
if hasattr(adata, 'nla_tracks') and adata.nla_tracks is not None:
2017-09-05 23:39:24 +02:00
for track in adata.nla_tracks:
2019-07-18 21:51:25 +02:00
if track.strips is None:
2017-09-05 23:39:24 +02:00
continue
for strip in track.strips:
2019-07-18 21:51:25 +02:00
if strip.action is None:
2017-09-05 23:39:24 +02:00
continue
if strip.action.name == action.name:
continue
export_actions.append(strip.action)
2017-10-06 11:16:29 +02:00
armatureid = arm.utils.safestr(arm.utils.asset_name(bdata))
ext = '.lz4' if ArmoryExporter.compress_enabled else ''
2018-05-18 13:40:01 +02:00
if ext == '' and not bpy.data.worlds['Arm'].arm_minimize:
ext = '.json'
2020-04-16 00:01:46 +02:00
out_object['bone_actions'] = []
2017-10-10 09:57:23 +02:00
for action in export_actions:
aname = arm.utils.safestr(arm.utils.asset_name(action))
2020-04-16 00:01:46 +02:00
out_object['bone_actions'].append('action_' + armatureid + '_' + aname + ext)
2017-04-11 23:21:42 +02:00
2020-03-04 23:42:38 +01:00
clear_op = set()
2020-03-04 21:34:21 +01:00
skelobj = bobject
2020-03-02 15:03:42 +01:00
baked_actions = []
2020-03-04 23:42:38 +01:00
orig_action = bobject.animation_data.action
2020-03-04 21:34:21 +01:00
if bdata.arm_autobake and bobject.name not in bpy.context.collection.all_objects:
2020-04-09 23:25:26 +02:00
clear_op.add('unlink')
2020-04-15 23:34:30 +02:00
# Clone bobject and put it in the current scene so
# the bake operator can run
2020-03-04 23:42:38 +01:00
if bobject.library is not None:
skelobj = bobject.copy()
clear_op.add('rem')
2020-03-04 21:34:21 +01:00
bpy.context.collection.objects.link(skelobj)
2017-04-11 23:21:42 +02:00
for action in export_actions:
2017-10-10 09:57:23 +02:00
aname = arm.utils.safestr(arm.utils.asset_name(action))
2020-03-04 21:34:21 +01:00
skelobj.animation_data.action = action
fp = self.get_meshes_file_path('action_' + armatureid + '_' + aname, compressed=ArmoryExporter.compress_enabled)
2017-04-11 23:21:42 +02:00
assets.add(fp)
2020-04-09 23:25:26 +02:00
if not bdata.arm_cached or not os.path.exists(fp):
2020-12-17 23:44:51 +01:00
# Store action to use it after autobake was handled
original_action = action
2020-04-15 23:34:30 +02:00
# Handle autobake
2020-03-02 15:03:42 +01:00
if bdata.arm_autobake:
sel = bpy.context.selected_objects[:]
2020-04-09 23:25:26 +02:00
for _o in sel:
_o.select_set(False)
2020-03-04 21:34:21 +01:00
skelobj.select_set(True)
2020-04-15 23:34:30 +02:00
bpy.ops.nla.bake(frame_start=action.frame_range[0], frame_end=action.frame_range[1], step=1, only_selected=False, visual_keying=True)
2020-03-04 21:34:21 +01:00
action = skelobj.animation_data.action
skelobj.select_set(False)
2020-04-09 23:25:26 +02:00
for _o in sel:
_o.select_set(True)
2020-03-02 15:03:42 +01:00
baked_actions.append(action)
2020-03-07 15:00:51 +01:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_verbose_output:
print('Exporting armature action ' + aname)
2017-04-11 23:21:42 +02:00
bones = []
2017-11-19 13:38:54 +01:00
self.bone_tracks = []
2017-04-11 23:21:42 +02:00
for bone in bdata.bones:
if not bone.parent:
boneo = {}
2020-04-16 22:35:29 +02:00
self.export_bone(skelobj, bone, boneo, action)
2017-04-11 23:21:42 +02:00
bones.append(boneo)
2020-04-15 23:34:30 +02:00
self.write_bone_matrices(bpy.context.scene, action)
2017-11-11 18:26:28 +01:00
if len(bones) > 0 and 'anim' in bones[0]:
2020-12-17 23:44:51 +01:00
self.export_pose_markers(bones[0]['anim'], original_action)
2017-09-05 23:39:24 +02:00
# Save action separately
2020-04-15 23:34:30 +02:00
action_obj = {'name': aname, 'objects': bones}
2017-09-05 23:39:24 +02:00
arm.utils.write_arm(fp, action_obj)
2020-04-15 23:34:30 +02:00
# Restore settings
2020-03-04 21:34:21 +01:00
skelobj.animation_data.action = orig_action
2020-04-15 23:34:30 +02:00
for a in baked_actions:
bpy.data.actions.remove(a, do_unlink=True)
if 'unlink' in clear_op:
bpy.context.collection.objects.unlink(skelobj)
if 'rem' in clear_op:
bpy.data.objects.remove(skelobj, do_unlink=True)
2020-03-04 21:34:21 +01:00
2017-10-22 13:33:07 +02:00
# TODO: cache per action
2017-10-10 09:57:23 +02:00
bdata.arm_cached = True
2020-04-16 00:01:46 +02:00
if out_parent is None:
self.output['objects'].append(out_object)
else:
2020-04-16 00:01:46 +02:00
out_parent['children'].append(out_object)
2020-04-16 00:01:46 +02:00
self.post_export_object(bobject, out_object, object_type)
2020-04-16 00:01:46 +02:00
if not hasattr(out_object, 'children') and len(bobject.children) > 0:
out_object['children'] = []
2019-01-17 21:34:38 +01:00
if bobject.arm_instanced == 'Off':
for subbobject in bobject.children:
2020-04-16 00:01:46 +02:00
self.export_object(subbobject, scene, out_object)
2020-04-16 00:01:46 +02:00
def export_skin(self, bobject: bpy.types.Object, armature, export_mesh: bpy.types.Mesh, out_mesh):
2020-04-15 23:46:34 +02:00
"""This function exports all skinning data, which includes the
skeleton and per-vertex bone influence data"""
oskin = {}
2020-04-16 00:01:46 +02:00
out_mesh['skin'] = oskin
# Write the skin bind pose transform
2020-04-15 23:46:34 +02:00
otrans = {'values': ArmoryExporter.write_matrix(bobject.matrix_world)}
oskin['transform'] = otrans
2017-04-11 23:21:42 +02:00
bone_array = armature.data.bones
bone_count = len(bone_array)
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
max_bones = rpdat.arm_skin_max_bones
2017-04-11 23:21:42 +02:00
if bone_count > max_bones:
2018-12-18 16:46:36 +01:00
bone_count = max_bones
# Write the bone object reference array
oskin['bone_ref_array'] = np.empty(bone_count, dtype=object)
oskin['bone_len_array'] = np.empty(bone_count, dtype='<f4')
2017-04-11 23:21:42 +02:00
for i in range(bone_count):
2020-04-15 23:46:34 +02:00
bone_ref = self.find_bone(bone_array[i].name)
if bone_ref:
oskin['bone_ref_array'][i] = bone_ref[1]["structName"]
2018-12-18 16:46:36 +01:00
oskin['bone_len_array'][i] = bone_array[i].length
else:
2018-12-18 16:46:36 +01:00
oskin['bone_ref_array'][i] = ""
oskin['bone_len_array'][i] = 0.0
# Write the bind pose transform array
2018-02-19 23:32:51 +01:00
oskin['transformsI'] = []
2019-04-06 14:13:38 +02:00
for i in range(bone_count):
2020-04-15 23:46:34 +02:00
skeleton_inv = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
skeleton_inv = (skeleton_inv @ bobject.matrix_world)
oskin['transformsI'].append(ArmoryExporter.write_matrix(skeleton_inv))
# Export the per-vertex bone influence data
2017-04-11 23:21:42 +02:00
group_remap = []
for group in bobject.vertex_groups:
2017-04-11 23:21:42 +02:00
for i in range(bone_count):
2018-12-18 16:46:36 +01:00
if bone_array[i].name == group.name:
2017-04-11 23:21:42 +02:00
group_remap.append(i)
break
else:
2017-04-11 23:21:42 +02:00
group_remap.append(-1)
2020-04-15 23:46:34 +02:00
bone_count_array = np.empty(len(export_mesh.loops), dtype='<i2')
bone_index_array = np.empty(len(export_mesh.loops) * 4, dtype='<i2')
bone_weight_array = np.empty(len(export_mesh.loops) * 4, dtype='<f4')
2017-11-19 13:38:54 +01:00
vertices = bobject.data.vertices
2018-12-18 16:46:36 +01:00
count = 0
2020-04-15 23:46:34 +02:00
for index, l in enumerate(export_mesh.loops):
2017-04-11 23:21:42 +02:00
bone_count = 0
total_weight = 0.0
bone_values = []
2018-12-18 16:46:36 +01:00
for g in vertices[l.vertex_index].groups:
2017-11-19 13:38:54 +01:00
bone_index = group_remap[g.group]
bone_weight = g.weight
2020-04-15 23:46:34 +02:00
if bone_index >= 0: #and bone_weight != 0.0:
2017-04-11 23:21:42 +02:00
bone_values.append((bone_weight, bone_index))
total_weight += bone_weight
bone_count += 1
2018-12-18 16:46:36 +01:00
if bone_count > 4:
2017-04-11 23:21:42 +02:00
bone_count = 4
2018-12-18 16:46:36 +01:00
bone_values.sort(reverse=True)
2017-04-11 23:21:42 +02:00
bone_values = bone_values[:4]
2019-06-23 22:52:41 +02:00
2018-12-18 16:46:36 +01:00
bone_count_array[index] = bone_count
2017-10-01 23:20:47 +02:00
for bv in bone_values:
2018-12-18 16:46:36 +01:00
bone_weight_array[count] = bv[0]
bone_index_array[count] = bv[1]
count += 1
2017-10-01 23:20:47 +02:00
2018-12-18 16:46:36 +01:00
if total_weight != 0.0 and total_weight != 1.0:
2017-04-11 23:21:42 +02:00
normalizer = 1.0 / total_weight
2018-12-18 16:46:36 +01:00
for i in range(bone_count):
bone_weight_array[count - i - 1] *= normalizer
bone_index_array = bone_index_array[:count]
bone_weight_array = bone_weight_array[:count]
bone_weight_array *= 32767
bone_weight_array = np.array(bone_weight_array, dtype='<i2')
oskin['bone_count_array'] = bone_count_array
oskin['bone_index_array'] = bone_index_array
oskin['bone_weight_array'] = bone_weight_array
2018-02-19 23:32:51 +01:00
# Bone constraints
2020-03-02 15:03:42 +01:00
if not armature.data.arm_autobake:
for bone in armature.pose.bones:
if len(bone.constraints) > 0:
if 'constraints' not in oskin:
oskin['constraints'] = []
self.add_constraints(bone, oskin, bone=True)
2021-10-14 21:19:18 +02:00
def export_shape_keys(self, bobject: bpy.types.Object, export_mesh: bpy.types.Mesh, out_mesh):
max_shape_keys = 32
output_dir = bpy.path.abspath('//') + "Bundled\\"
name = bobject.data.name
vert_pos = []
vert_nor = []
names = []
default_values = []
count = 0
for shape_key in bobject.data.shape_keys.key_blocks:
if(count > max_shape_keys):
break
vert_data = self.get_vertex_data_from_shape_key(shape_key)
vert_pos.append(vert_data['pos'])
vert_nor.append(vert_data['nor'])
names.append(shape_key.name)
default_values.append(shape_key.value)
count += 1
min, max = self.bake_to_image(vert_pos, vert_nor, name, output_dir)
morph_target = {}
morph_target['morph_target_ref'] = names
morph_target['morph_target_defaults'] = default_values
morph_target['num_morph_targets'] = count
morph_target['morph_scale'] = max - min
morph_target['morph_offset'] = min
out_mesh['morph_target'] = morph_target
self.create_morph_uv_set(bobject)
return
def get_vertex_data_from_shape_key(self, shape_key_data):
vert_pos = shape_key_data.data.values()
vert_nor = shape_key_data.normals_vertex_get()
num_verts = len(vert_pos)
pos = []
nor = []
for i in range(num_verts):
pos.append(list(vert_pos[i].co))
temp = []
for j in range(3):
temp.append(vert_nor[j + i * 3])
nor.append(temp)
return {'pos': pos, 'nor': nor}
def bake_to_image(self, vert_pos, vert_nor, name, output_dir):
pos_array = np.array(vert_pos)
nor_array = np.array(vert_nor)
pos_max = np.amax(pos_array)
pos_min = np.amin(pos_array)
pos_array_scaled = np.interp(pos_array, (pos_min, pos_max), (0, 1))
self.write_output_image(pos_array_scaled, name + '_pos', output_dir)
nor_array_scaled = np.interp(nor_array, (-1, 1), (0, 1))
self.write_output_image(nor_array_scaled, name + '_nor', output_dir)
return pos_min, pos_max
def write_output_image(self, data, name, output_dir):
size = len(data[0]), len(data)
pixel_list = []
for y in range(size[1]):
for x in range(size[0]):
# assign RGBA
pixel_list.append(data[y, x, 0])
pixel_list.append(data[y, x, 1])
pixel_list.append(data[y, x, 2])
pixel_list.append(1.0)
image = bpy.data.images.new(name, width = size[0], height = size[1], is_data = True)
image.pixels = pixel_list
image.save_render(output_dir + name + ".png", scene= bpy.context.scene)
bpy.data.images.remove(image)
def create_morph_uv_set(self, obj):
if(obj.data.uv_layers.get('UVMap_shape_key') is None):
obj.data.uv_layers.new(name = 'UVMap_shape_key')
bm = bmesh.new()
bm.from_mesh(obj.data)
uv_layer = bm.loops.layers.uv.get('UVMap_shape_key')
pixel_size = 1.0 / len(bm.verts)
i= 0
for v in bm.verts:
for l in v.link_loops:
uv_data = l[uv_layer]
uv_data.uv = Vector(((i + 0.5) * pixel_size, 0.0))
i += 1
bm.to_mesh(obj.data)
bm.free()
2018-02-19 23:32:51 +01:00
2020-04-16 00:01:46 +02:00
def write_mesh(self, bobject: bpy.types.Object, fp, out_mesh):
2020-04-15 23:40:05 +02:00
if bpy.data.worlds['Arm'].arm_single_data_file:
2020-04-16 00:01:46 +02:00
self.output['mesh_datas'].append(out_mesh)
2020-04-15 23:40:05 +02:00
# One mesh data per file
else:
2020-04-16 00:01:46 +02:00
mesh_obj = {'mesh_datas': [out_mesh]}
2017-03-15 12:30:14 +01:00
arm.utils.write_arm(fp, mesh_obj)
2017-08-21 12:17:55 +02:00
bobject.data.arm_cached = True
@staticmethod
def calc_aabb(bobject):
2018-12-20 22:37:39 +01:00
aabb_center = 0.125 * sum((Vector(b) for b in bobject.bound_box), Vector())
bobject.data.arm_aabb = [ \
abs((bobject.bound_box[6][0] - bobject.bound_box[0][0]) / 2 + abs(aabb_center[0])) * 2, \
abs((bobject.bound_box[6][1] - bobject.bound_box[0][1]) / 2 + abs(aabb_center[1])) * 2, \
abs((bobject.bound_box[6][2] - bobject.bound_box[0][2]) / 2 + abs(aabb_center[2])) * 2 \
]
def export_mesh_data(self, exportMesh, bobject: bpy.types.Object, o, has_armature=False):
exportMesh.calc_normals_split()
exportMesh.calc_loop_triangles()
2018-12-18 16:46:36 +01:00
loops = exportMesh.loops
num_verts = len(loops)
num_uv_layers = len(exportMesh.uv_layers)
2018-12-18 16:46:36 +01:00
is_baked = self.has_baked_material(bobject, exportMesh.materials)
2021-10-14 21:19:18 +02:00
self.has_shape_key = False
if('morph_target' in o):
self.has_shape_key = True
print(bobject.name)
print(self.has_shape_key)
has_tex = (self.get_export_uvs(bobject.data) and num_uv_layers > 0) or is_baked or self.has_shape_key
2018-12-18 16:46:36 +01:00
has_tex1 = has_tex and num_uv_layers > 1
2021-10-14 21:19:18 +02:00
print(has_tex)
print(has_tex1)
num_colors = len(exportMesh.vertex_colors)
has_col = self.get_export_vcols(bobject.data) and num_colors > 0
has_tang = self.has_tangents(bobject.data)
2017-05-17 17:06:52 +02:00
2018-12-18 16:46:36 +01:00
pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
2017-05-17 17:06:52 +02:00
if has_tex:
2018-12-18 16:46:36 +01:00
t0map = 0 # Get active uvmap
t0data = np.empty(num_verts * 2, dtype='<f4')
2018-12-18 23:48:38 +01:00
uv_layers = exportMesh.uv_layers
2019-07-18 21:51:25 +02:00
if uv_layers is not None:
2018-05-24 22:16:28 +02:00
if 'UVMap_baked' in uv_layers:
for i in range(0, len(uv_layers)):
if uv_layers[i].name == 'UVMap_baked':
2018-04-02 22:46:39 +02:00
t0map = i
break
else:
2018-05-24 22:16:28 +02:00
for i in range(0, len(uv_layers)):
if uv_layers[i].active_render:
2018-04-02 22:46:39 +02:00
t0map = i
break
2017-05-17 17:06:52 +02:00
if has_tex1:
2018-12-18 16:46:36 +01:00
t1map = 1 if t0map == 0 else 0
t1data = np.empty(num_verts * 2, dtype='<f4')
2018-12-14 15:27:43 +01:00
# Scale for packed coords
2018-12-18 16:46:36 +01:00
maxdim = 1.0
2019-01-09 15:05:15 +01:00
lay0 = uv_layers[t0map]
2018-12-18 16:46:36 +01:00
for v in lay0.data:
if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0])
if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1])
2019-01-09 15:05:15 +01:00
if has_tex1:
lay1 = uv_layers[t1map]
for v in lay1.data:
if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0])
if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1])
2018-12-14 15:27:43 +01:00
if maxdim > 1:
o['scale_tex'] = maxdim
2018-12-18 16:46:36 +01:00
invscale_tex = (1 / o['scale_tex']) * 32767
2018-12-14 15:27:43 +01:00
else:
2018-12-18 16:46:36 +01:00
invscale_tex = 1 * 32767
2018-12-29 16:34:04 +01:00
if has_tang:
try:
exportMesh.calc_tangents(uvmap=lay0.name)
except Exception as e:
if hasattr(e, 'message'):
log.error(e.message)
else:
# Assume it was caused because of encountering n-gons
log.error(f"""object {bobject.name} contains n-gons in its mesh, so it's impossible to compute tanget space for normal mapping.
Make sure the mesh only has tris/quads.""")
2018-12-29 16:34:04 +01:00
tangdata = np.empty(num_verts * 3, dtype='<f4')
2018-12-18 16:46:36 +01:00
if has_col:
cdata = np.empty(num_verts * 3, dtype='<f4')
2018-12-14 15:27:43 +01:00
# Scale for packed coords
maxdim = max(bobject.data.arm_aabb[0], max(bobject.data.arm_aabb[1], bobject.data.arm_aabb[2]))
2018-12-30 00:01:34 +01:00
if maxdim > 2:
o['scale_pos'] = maxdim / 2
else:
o['scale_pos'] = 1.0
2018-12-18 16:46:36 +01:00
if has_armature: # Allow up to 2x bigger bounds for skinned mesh
2018-12-16 22:28:57 +01:00
o['scale_pos'] *= 2.0
2019-06-23 22:52:41 +02:00
2018-12-18 16:46:36 +01:00
scale_pos = o['scale_pos']
invscale_pos = (1 / scale_pos) * 32767
2017-05-17 17:06:52 +02:00
2018-12-18 16:46:36 +01:00
verts = exportMesh.vertices
if has_tex:
lay0 = exportMesh.uv_layers[t0map]
if has_tex1:
lay1 = exportMesh.uv_layers[t1map]
2019-01-07 10:42:45 +01:00
if has_col:
vcol0 = exportMesh.vertex_colors[0].data
2018-12-18 16:46:36 +01:00
for i, loop in enumerate(loops):
v = verts[loop.vertex_index]
co = v.co
normal = loop.normal
2018-12-29 16:34:04 +01:00
tang = loop.tangent
2018-12-18 16:46:36 +01:00
i4 = i * 4
i2 = i * 2
pdata[i4 ] = co[0]
pdata[i4 + 1] = co[1]
pdata[i4 + 2] = co[2]
pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
ndata[i2 ] = normal[0]
ndata[i2 + 1] = normal[1]
if has_tex:
uv = lay0.data[loop.index].uv
t0data[i2 ] = uv[0]
t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
if has_tex1:
uv = lay1.data[loop.index].uv
t1data[i2 ] = uv[0]
t1data[i2 + 1] = 1.0 - uv[1]
2018-12-29 16:34:04 +01:00
if has_tang:
i3 = i * 3
tangdata[i3 ] = tang[0]
tangdata[i3 + 1] = tang[1]
tangdata[i3 + 2] = tang[2]
2018-12-18 16:46:36 +01:00
if has_col:
2019-01-07 10:42:45 +01:00
col = vcol0[loop.index].color
2018-12-18 16:46:36 +01:00
i3 = i * 3
2019-02-11 11:12:41 +01:00
cdata[i3 ] = col[0]
cdata[i3 + 1] = col[1]
cdata[i3 + 2] = col[2]
2019-10-30 15:33:14 +01:00
2018-12-18 16:46:36 +01:00
mats = exportMesh.materials
poly_map = []
for i in range(max(len(mats), 1)):
poly_map.append([])
for poly in exportMesh.polygons:
poly_map[poly.material_index].append(poly)
2018-12-14 15:27:43 +01:00
2018-12-18 16:46:36 +01:00
o['index_arrays'] = []
2019-10-30 15:33:14 +01:00
# map polygon indices to triangle loops
tri_loops = {}
for loop in exportMesh.loop_triangles:
if loop.polygon_index not in tri_loops:
tri_loops[loop.polygon_index] = []
tri_loops[loop.polygon_index].append(loop)
2019-10-30 15:33:14 +01:00
2018-12-18 16:46:36 +01:00
for index, polys in enumerate(poly_map):
tris = 0
for poly in polys:
tris += poly.loop_total - 2
if tris == 0: # No face assigned
continue
prim = np.empty(tris * 3, dtype='<i4')
i = 0
for poly in polys:
for loop in tri_loops[poly.index]:
prim[i ] = loops[loop.loops[0]].index
prim[i + 1] = loops[loop.loops[1]].index
prim[i + 2] = loops[loop.loops[2]].index
2018-12-18 16:46:36 +01:00
i += 3
2018-12-14 15:27:43 +01:00
2018-12-18 16:46:36 +01:00
ia = {}
ia['values'] = prim
ia['material'] = 0
if len(mats) > 1:
for i in range(len(mats)): # Multi-mat mesh
if (mats[i] == mats[index]): # Default material for empty slots
ia['material'] = i
break
o['index_arrays'].append(ia)
2018-12-18 16:46:36 +01:00
# Pack
pdata *= invscale_pos
ndata *= 32767
pdata = np.array(pdata, dtype='<i2')
ndata = np.array(ndata, dtype='<i2')
2017-05-17 17:06:52 +02:00
if has_tex:
2018-12-18 16:46:36 +01:00
t0data *= invscale_tex
t0data = np.array(t0data, dtype='<i2')
2017-05-17 17:06:52 +02:00
if has_tex1:
2018-12-18 16:46:36 +01:00
t1data *= invscale_tex
t1data = np.array(t1data, dtype='<i2')
2017-05-17 17:06:52 +02:00
if has_col:
2018-12-18 16:46:36 +01:00
cdata *= 32767
cdata = np.array(cdata, dtype='<i2')
if has_tang:
tangdata *= 32767
tangdata = np.array(tangdata, dtype='<i2')
2018-12-18 16:46:36 +01:00
# Output
o['vertex_arrays'] = []
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
2018-12-18 16:46:36 +01:00
if has_tex:
o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
2018-12-18 16:46:36 +01:00
if has_tex1:
o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
2018-12-18 16:46:36 +01:00
if has_col:
o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
2018-12-18 16:46:36 +01:00
if has_tang:
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
2018-08-07 12:23:02 +02:00
# If there are multiple morph targets, export them here.
# if (shapeKeys):
# shapeKeys.key_blocks[0].value = 0.0
# for m in range(1, len(currentMorphValue)):
# shapeKeys.key_blocks[m].value = 1.0
# mesh.update()
# node.active_shape_key_index = m
# morphMesh = node.to_mesh(scene, applyModifiers, "RENDER", True, False)
# # Write the morph target position array.
# self.IndentWrite(B"VertexArray (attrib = \"position\", morph = ", 0, True)
# self.WriteInt(m)
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.indentLevel += 1
# self.IndentWrite(B"float[3]\t\t// ")
# self.WriteInt(vertexCount)
# self.IndentWrite(B"{\n", 0, True)
# self.WriteMorphPositionArray3D(unifiedVertexArray, morphMesh.vertices)
# self.IndentWrite(B"}\n")
# self.indentLevel -= 1
# self.IndentWrite(B"}\n\n")
# # Write the morph target normal array.
# self.IndentWrite(B"VertexArray (attrib = \"normal\", morph = ")
# self.WriteInt(m)
# self.Write(B")\n")
# self.IndentWrite(B"{\n")
# self.indentLevel += 1
# self.IndentWrite(B"float[3]\t\t// ")
# self.WriteInt(vertexCount)
# self.IndentWrite(B"{\n", 0, True)
# self.WriteMorphNormalArray3D(unifiedVertexArray, morphMesh.vertices, morphMesh.tessfaces)
# self.IndentWrite(B"}\n")
# self.indentLevel -= 1
# self.IndentWrite(B"}\n")
# bpy.data.meshes.remove(morphMesh)
2017-05-17 17:06:52 +02:00
def has_tangents(self, exportMesh):
return self.get_export_uvs(exportMesh) and self.get_export_tangents(exportMesh) and len(exportMesh.uv_layers) > 0
2017-05-17 17:06:52 +02:00
2020-04-15 23:40:05 +02:00
def export_mesh(self, object_ref):
"""Exports a single mesh object."""
2018-12-18 16:46:36 +01:00
# profile_time = time.time()
2020-04-15 23:40:05 +02:00
table = object_ref[1]["objectTable"]
2017-11-26 19:36:14 +01:00
bobject = table[0]
2020-04-15 23:40:05 +02:00
oid = arm.utils.safestr(object_ref[1]["structName"])
2019-05-14 11:43:41 +02:00
wrd = bpy.data.worlds['Arm']
if wrd.arm_single_data_file:
fp = None
else:
fp = self.get_meshes_file_path('mesh_' + oid, compressed=ArmoryExporter.compress_enabled)
assets.add(fp)
2019-05-14 11:43:41 +02:00
# No export necessary
2018-12-19 20:10:34 +01:00
if bobject.data.arm_cached and os.path.exists(fp):
return
2017-11-26 19:36:14 +01:00
# Mesh users have different modifier stack
for i in range(1, len(table)):
if not self.mod_equal_stack(bobject, table[i]):
2019-01-28 11:28:21 +01:00
log.warn('{0} users {1} and {2} differ in modifier stack - use Make Single User - Object & Data for now'.format(oid, bobject.name, table[i].name))
2017-11-26 19:36:14 +01:00
break
if wrd.arm_verbose_output:
print('Exporting mesh ' + arm.utils.asset_name(bobject.data))
2016-09-08 14:08:31 +02:00
2020-04-16 00:01:46 +02:00
out_mesh = {'name': oid}
2020-04-15 23:40:05 +02:00
mesh = object_ref[0]
struct_flag = False
2016-11-28 14:40:07 +01:00
# Save the morph state if necessary
2021-10-14 21:19:18 +02:00
active_shape_key_index = 0
show_only_shape_key = False
current_morph_value = 0
2020-04-15 23:40:05 +02:00
shape_keys = ArmoryExporter.get_shape_keys(mesh)
if shape_keys:
2021-10-14 21:19:18 +02:00
active_shape_key_index = bobject.active_shape_key_index
show_only_shape_key = bobject.show_only_shape_key
current_morph_value = bobject.active_shape_key.value
bobject.active_shape_key_index = 0
bobject.show_only_shape_key = True
2021-10-14 21:19:18 +02:00
self.depsgraph.update()
armature = bobject.find_armature()
2017-04-02 13:13:43 +02:00
apply_modifiers = not armature
2019-05-20 14:32:48 +02:00
2019-08-24 11:50:27 +02:00
bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject
2020-04-15 23:40:05 +02:00
export_mesh = bobject_eval.to_mesh()
2021-10-14 21:19:18 +02:00
if shape_keys:
if(len(bobject.data.uv_layers) > 2):
if(bobject.data.uv_layers.get('UVMap_shape_key') is not None):
self.export_shape_keys(bobject, export_mesh, out_mesh)
else:
log.warn(oid + ' has 2 or more UV Maps. Shape keys are not supported for objects with 2 or more UV maps')
else:
self.export_shape_keys(bobject, export_mesh, out_mesh)
2020-04-15 23:40:05 +02:00
if export_mesh is None:
2017-04-01 21:25:57 +02:00
log.warn(oid + ' was not exported')
2016-12-21 19:15:51 +01:00
return
2020-04-15 23:40:05 +02:00
if len(export_mesh.uv_layers) > 2:
2017-04-01 21:25:57 +02:00
log.warn(oid + ' exceeds maximum of 2 UV Maps supported')
2016-11-12 21:34:06 +01:00
2018-12-20 22:37:39 +01:00
# Update aabb
self.calc_aabb(bobject)
# Process meshes
2018-12-18 16:46:36 +01:00
if ArmoryExporter.optimize_enabled:
2020-04-16 00:01:46 +02:00
vert_list = exporter_opt.export_mesh_data(self, export_mesh, bobject, out_mesh, has_armature=armature is not None)
2018-12-18 16:46:36 +01:00
if armature:
2020-04-16 00:01:46 +02:00
exporter_opt.export_skin(self, bobject, armature, vert_list, out_mesh)
2018-12-18 16:46:36 +01:00
else:
2020-04-16 00:01:46 +02:00
self.export_mesh_data(export_mesh, bobject, out_mesh, has_armature=armature is not None)
2018-12-18 16:46:36 +01:00
if armature:
2020-04-16 00:01:46 +02:00
self.export_skin(bobject, armature, export_mesh, out_mesh)
2016-10-19 13:28:06 +02:00
# Restore the morph state
2020-04-15 23:40:05 +02:00
if shape_keys:
bobject.active_shape_key_index = active_shape_key_index
bobject.show_only_shape_key = show_only_shape_key
2021-10-14 21:19:18 +02:00
bobject.active_shape_key.value = current_morph_value
self.depsgraph.update()
mesh.update()
2019-01-02 15:33:30 +01:00
# Check if mesh is using instanced rendering
2020-04-16 00:01:46 +02:00
instanced_type, instanced_data = self.object_process_instancing(table, out_mesh['scale_pos'])
2019-01-02 15:33:30 +01:00
# Save offset data for instanced rendering
2018-08-29 09:35:48 +02:00
if instanced_type > 0:
2020-04-16 00:01:46 +02:00
out_mesh['instanced_data'] = instanced_data
out_mesh['instanced_type'] = instanced_type
# Export usage
2017-08-21 12:17:55 +02:00
if bobject.data.arm_dynamic_usage:
2020-04-16 00:01:46 +02:00
out_mesh['dynamic_usage'] = bobject.data.arm_dynamic_usage
2020-04-16 00:01:46 +02:00
self.write_mesh(bobject, fp, out_mesh)
2018-12-18 16:46:36 +01:00
# print('Mesh exported in ' + str(time.time() - profile_time))
2019-05-20 14:32:48 +02:00
if hasattr(bobject, 'evaluated_get'):
bobject_eval.to_mesh_clear()
2020-04-15 23:53:19 +02:00
def export_light(self, object_ref):
"""Exports a single light object."""
rpdat = arm.utils.get_rp()
2020-04-15 23:53:19 +02:00
light_ref = object_ref[0]
objtype = light_ref.type
out_light = {
'name': object_ref[1]["structName"],
'type': objtype.lower(),
'cast_shadow': light_ref.use_shadow,
'near_plane': light_ref.arm_clip_start,
'far_plane': light_ref.arm_clip_end,
'fov': light_ref.arm_fov,
'color': [light_ref.color[0], light_ref.color[1], light_ref.color[2]],
'strength': light_ref.energy,
'shadows_bias': light_ref.arm_shadows_bias * 0.0001
}
2018-12-10 00:02:40 +01:00
if rpdat.rp_shadows:
if objtype == 'POINT':
2020-04-15 23:53:19 +02:00
out_light['shadowmap_size'] = int(rpdat.rp_shadowmap_cube)
else:
2020-04-15 23:53:19 +02:00
out_light['shadowmap_size'] = arm.utils.get_cascade_size(rpdat)
2017-03-03 14:36:01 +01:00
else:
2020-04-15 23:53:19 +02:00
out_light['shadowmap_size'] = 0
if objtype == 'SUN':
2020-04-15 23:53:19 +02:00
out_light['strength'] *= 0.325
# Scale bias for ortho light matrix
out_light['shadows_bias'] *= 20.0
if out_light['shadowmap_size'] > 1024:
# Less bias for bigger maps
out_light['shadows_bias'] *= 1 / (out_light['shadowmap_size'] / 1024)
elif objtype == 'POINT':
out_light['strength'] *= 0.01
2020-04-15 23:53:19 +02:00
out_light['fov'] = 1.5708 # pi/2
out_light['shadowmap_cube'] = True
if light_ref.shadow_soft_size > 0.1:
out_light['light_size'] = light_ref.shadow_soft_size * 10
elif objtype == 'SPOT':
out_light['strength'] *= 0.01
2020-04-15 23:53:19 +02:00
out_light['spot_size'] = math.cos(light_ref.spot_size / 2)
# Cycles defaults to 0.15
out_light['spot_blend'] = light_ref.spot_blend / 10
elif objtype == 'AREA':
out_light['strength'] *= 0.01
2020-04-15 23:53:19 +02:00
out_light['size'] = light_ref.size
out_light['size_y'] = light_ref.size_y
2019-06-23 22:52:41 +02:00
2020-04-15 23:53:19 +02:00
self.output['light_datas'].append(out_light)
2018-10-01 11:45:43 +02:00
def export_probe(self, objectRef):
2020-04-15 23:53:19 +02:00
o = {'name': objectRef[1]["structName"]}
2018-10-01 11:45:43 +02:00
bo = objectRef[0]
if bo.type == 'GRID':
o['type'] = 'grid'
elif bo.type == 'PLANAR':
o['type'] = 'planar'
2020-04-15 23:53:19 +02:00
else:
2018-10-01 11:45:43 +02:00
o['type'] = 'cubemap'
self.output['probe_datas'].append(o)
def export_collection(self, collection: bpy.types.Collection):
"""Exports a single collection."""
scene_objects = self.scene.collection.all_objects
2020-04-16 00:01:46 +02:00
out_collection = {
'name': collection.name,
'instance_offset': list(collection.instance_offset),
'object_refs': []
}
for bobject in collection.objects:
if not bobject.arm_export:
continue
# Only add unparented objects or objects with their parent
# outside the collection, then instantiate the full object
# child tree if the collection gets spawned as a whole
if bobject.parent is None or bobject.parent.name not in collection.objects:
# This object is controlled by proxy
has_proxy_user = False
for bo in bpy.data.objects:
if bo.proxy == bobject:
has_proxy_user = True
break
if has_proxy_user:
continue
asset_name = arm.utils.asset_name(bobject)
# Collection is in the same file
if collection.library is None:
# Only export linked objects (from other scenes for example),
# all other objects (in scene_objects) are already exported.
if bobject.name not in scene_objects:
self.process_bobject(bobject)
self.export_object(bobject, self.scene)
else:
# Add external linked objects
# Iron differentiates objects based on their names,
# so errors will happen if two objects with the
# same name exists. This check is only required
# when the object in question is in a library,
# otherwise Blender will not allow duplicate names
if asset_name in scene_objects:
log.warn("skipping export of the object"
f" {bobject.name} (collection"
f" {collection.name}) because it has the same"
" export name as another object in the scene:"
f" {asset_name}")
continue
self.process_bobject(bobject)
self.export_object(bobject, self.scene)
out_collection['object_refs'].append(asset_name)
self.output['groups'].append(out_collection)
2017-09-07 13:42:46 +02:00
def get_camera_clear_color(self):
2019-07-18 21:51:25 +02:00
if self.scene.world is None:
2017-11-20 14:39:59 +01:00
return [0.051, 0.051, 0.051, 1.0]
2019-07-18 21:51:25 +02:00
if self.scene.world.node_tree is None:
2018-12-18 23:48:38 +01:00
c = self.scene.world.color
2017-11-16 10:43:34 +01:00
return [c[0], c[1], c[2], 1.0]
2017-11-20 14:39:59 +01:00
if 'Background' in self.scene.world.node_tree.nodes:
background_node = self.scene.world.node_tree.nodes['Background']
col = background_node.inputs[0].default_value
strength = background_node.inputs[1].default_value
ar = [col[0] * strength, col[1] * strength, col[2] * strength, col[3]]
ar[0] = max(min(ar[0], 1.0), 0.0)
ar[1] = max(min(ar[1], 1.0), 0.0)
ar[2] = max(min(ar[2], 1.0), 0.0)
ar[3] = max(min(ar[3], 1.0), 0.0)
return ar
else:
return [0.051, 0.051, 0.051, 1.0]
@staticmethod
def extract_projection(o, proj, with_planes=True):
2017-10-24 10:49:57 +02:00
a = proj[0][0]
b = proj[1][1]
c = proj[2][2]
d = proj[2][3]
k = (c - 1.0) / (c + 1.0)
o['fov'] = 2.0 * math.atan(1.0 / b)
2017-11-02 23:12:17 +01:00
if with_planes:
o['near_plane'] = (d * (1.0 - k)) / (2.0 * k)
2018-08-16 12:10:50 +02:00
o['far_plane'] = k * o['near_plane']
2017-10-24 10:49:57 +02:00
@staticmethod
def extract_ortho(o, proj):
2018-11-15 12:41:47 +01:00
# left, right, bottom, top
o['ortho'] = [-(1 + proj[3][0]) / proj[0][0], \
(1 - proj[3][0]) / proj[0][0], \
-(1 + proj[3][1]) / proj[1][1], \
(1 - proj[3][1]) / proj[1][1]]
o['near_plane'] = (1 + proj[3][2]) / proj[2][2]
o['far_plane'] = -(1 - proj[3][2]) / proj[2][2]
o['near_plane'] *= 2
o['far_plane'] *= 2
2016-10-19 13:28:06 +02:00
def export_camera(self, objectRef):
o = {}
o['name'] = objectRef[1]["structName"]
objref = objectRef[0]
2017-10-24 10:49:57 +02:00
camera = objectRef[1]["objectTable"][0]
render = self.scene.render
2018-12-18 23:48:38 +01:00
proj = camera.calc_matrix_camera(
2019-05-20 14:32:48 +02:00
self.depsgraph,
2018-12-18 23:48:38 +01:00
x=render.resolution_x,
y=render.resolution_y,
scale_x=render.pixel_aspect_x,
scale_y=render.pixel_aspect_y)
2018-11-15 12:41:47 +01:00
if objref.type == 'PERSP':
self.extract_projection(o, proj)
else:
self.extract_ortho(o, proj)
2017-08-21 12:17:55 +02:00
o['frustum_culling'] = objref.arm_frustum_culling
2017-09-07 13:42:46 +02:00
o['clear_color'] = self.get_camera_clear_color()
self.output['camera_datas'].append(o)
2016-10-19 13:28:06 +02:00
def export_speaker(self, objectRef):
# This function exports a single speaker object
o = {}
o['name'] = objectRef[1]["structName"]
objref = objectRef[0]
if objref.sound:
# Packed
2019-07-18 21:51:25 +02:00
if objref.sound.packed_file is not None:
2017-05-23 01:03:44 +02:00
unpack_path = arm.utils.get_fp_build() + '/compiled/Assets/unpacked'
if not os.path.exists(unpack_path):
os.makedirs(unpack_path)
unpack_filepath = unpack_path + '/' + objref.sound.name
if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != objref.sound.packed_file.size:
with open(unpack_filepath, 'wb') as f:
f.write(objref.sound.packed_file.data)
assets.add(unpack_filepath)
# External
else:
2017-05-13 17:17:43 +02:00
assets.add(arm.utils.asset_path(objref.sound.filepath)) # Link sound to assets
2017-03-15 12:30:14 +01:00
o['sound'] = arm.utils.extract_filename(objref.sound.filepath)
else:
o['sound'] = ''
2016-09-23 00:34:42 +02:00
o['muted'] = objref.muted
2017-08-21 12:17:55 +02:00
o['loop'] = objref.arm_loop
o['stream'] = objref.arm_stream
2016-09-23 00:34:42 +02:00
o['volume'] = objref.volume
o['pitch'] = objref.pitch
o['attenuation'] = objref.attenuation
o['play_on_start'] = objref.arm_play_on_start
self.output['speaker_datas'].append(o)
2018-11-12 12:35:52 +01:00
def make_default_mat(self, mat_name, mat_objs, is_particle=False):
2017-03-01 11:45:55 +01:00
if mat_name in bpy.data.materials:
return
mat = bpy.data.materials.new(name=mat_name)
2018-03-02 14:50:58 +01:00
# if default_exists:
2018-12-19 20:10:34 +01:00
# mat.arm_cached = True
2018-11-12 12:35:52 +01:00
if is_particle:
mat.arm_particle_flag = True
2019-02-12 13:13:27 +01:00
# Empty material roughness
2017-03-01 11:45:55 +01:00
mat.use_nodes = True
2019-10-30 15:33:14 +01:00
for node in mat.node_tree.nodes:
if node.type == 'BSDF_PRINCIPLED':
node.inputs[7].default_value = 0.25
2017-03-01 11:45:55 +01:00
o = {}
o['name'] = mat.name
o['contexts'] = []
mat_users = dict()
mat_users[mat] = mat_objs
mat_armusers = dict()
mat_armusers[mat] = [o]
2017-08-21 15:36:21 +02:00
make_material.parse(mat, o, mat_users, mat_armusers)
2017-03-01 11:45:55 +01:00
self.output['material_datas'].append(o)
bpy.data.materials.remove(mat)
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
if not rpdat.arm_culling:
2017-08-10 14:39:08 +02:00
o['override_context'] = {}
o['override_context']['cull_mode'] = 'none'
2017-03-01 11:45:55 +01:00
2018-05-24 22:16:28 +02:00
def signature_traverse(self, node, sign):
sign += node.type + '-'
2019-07-18 21:51:25 +02:00
if node.type == 'TEX_IMAGE' and node.image is not None:
2018-05-24 22:16:28 +02:00
sign += node.image.filepath + '-'
for inp in node.inputs:
if inp.is_linked:
sign = self.signature_traverse(inp.links[0].from_node, sign)
else:
# Unconnected socket
if not hasattr(inp, 'default_value'):
sign += 'o'
elif inp.type == 'RGB' or inp.type == 'RGBA' or inp.type == 'VECTOR':
sign += str(inp.default_value[0])
sign += str(inp.default_value[1])
sign += str(inp.default_value[2])
else:
sign += str(inp.default_value)
return sign
def get_signature(self, mat):
nodes = mat.node_tree.nodes
output_node = cycles.node_by_type(nodes, 'OUTPUT_MATERIAL')
2019-07-18 21:51:25 +02:00
if output_node is not None:
2018-05-24 22:16:28 +02:00
sign = self.signature_traverse(output_node, '')
return sign
2018-08-07 17:07:12 +02:00
return None
2018-05-24 22:16:28 +02:00
2016-09-30 23:24:18 +02:00
def export_materials(self):
2017-03-14 20:43:54 +01:00
wrd = bpy.data.worlds['Arm']
2018-02-16 19:57:27 +01:00
# Keep materials with fake user
for material in bpy.data.materials:
2020-04-10 19:22:56 +02:00
if material.use_fake_user and material not in self.material_array:
self.material_array.append(material)
2018-03-27 10:54:19 +02:00
# Ensure the same order for merging materials
self.material_array.sort(key=lambda x: x.name)
2018-03-27 10:54:19 +02:00
if wrd.arm_batch_materials:
mat_users = self.material_to_object_dict
mat_armusers = self.material_to_arm_object_dict
mat_batch.build(self.material_array, mat_users, mat_armusers)
2018-02-16 19:57:27 +01:00
2017-01-13 00:16:00 +01:00
transluc_used = False
2017-02-15 13:15:24 +01:00
overlays_used = False
2018-08-16 14:46:18 +02:00
blending_used = False
2017-02-15 13:15:24 +01:00
decals_used = False
2017-08-08 11:47:04 +02:00
# sss_used = False
for material in self.material_array:
# If the material is unlinked, material becomes None
2019-07-18 21:51:25 +02:00
if material is None:
continue
2018-05-24 23:18:03 +02:00
if not material.use_nodes:
material.use_nodes = True
2018-05-24 22:16:28 +02:00
# Recache material
signature = self.get_signature(material)
if signature != material.signature:
2018-12-19 20:10:34 +01:00
material.arm_cached = False
2019-07-18 21:51:25 +02:00
if signature is not None:
2019-06-03 11:29:16 +02:00
material.signature = signature
2018-05-24 22:16:28 +02:00
o = {}
2018-03-02 14:50:58 +01:00
o['name'] = arm.utils.asset_name(material)
2017-09-28 00:48:57 +02:00
if material.arm_skip_context != '':
o['skip_context'] = material.arm_skip_context
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
if material.arm_two_sided or not rpdat.arm_culling:
2016-12-17 15:34:43 +01:00
o['override_context'] = {}
2017-07-06 23:47:12 +02:00
o['override_context']['cull_mode'] = 'none'
2017-09-17 16:59:00 +02:00
elif material.arm_cull_mode != 'clockwise':
2017-07-06 23:47:12 +02:00
o['override_context'] = {}
2017-08-21 12:17:55 +02:00
o['override_context']['cull_mode'] = material.arm_cull_mode
2016-12-17 15:34:43 +01:00
o['contexts'] = []
mat_users = self.material_to_object_dict
mat_armusers = self.material_to_arm_object_dict
2017-08-21 15:36:21 +02:00
sd, rpasses = make_material.parse(material, o, mat_users, mat_armusers)
2019-06-23 22:52:41 +02:00
2019-01-23 12:07:44 +01:00
# Attach MovieTexture
for con in o['contexts']:
for tex in con['bind_textures']:
2019-01-23 12:17:02 +01:00
if 'source' in tex and tex['source'] == 'movie':
2019-01-23 12:07:44 +01:00
trait = {}
trait['type'] = 'Script'
trait['class_name'] = 'armory.trait.internal.MovieTexture'
ArmoryExporter.import_traits.append(trait['class_name'])
trait['parameters'] = ['"' + tex['file'] + '"']
for user in mat_armusers[material]:
user['traits'].append(trait)
2017-02-15 13:15:24 +01:00
2017-03-14 20:43:54 +01:00
if 'translucent' in rpasses:
2017-01-13 00:16:00 +01:00
transluc_used = True
2017-03-14 20:43:54 +01:00
if 'overlay' in rpasses:
2017-02-15 13:15:24 +01:00
overlays_used = True
2018-08-16 14:46:18 +02:00
if 'mesh' in rpasses and material.arm_blending:
blending_used = True
2017-03-14 20:43:54 +01:00
if 'decal' in rpasses:
2017-02-15 13:15:24 +01:00
decals_used = True
2016-12-20 01:39:16 +01:00
uv_export = False
2017-03-02 10:15:22 +01:00
tang_export = False
2016-12-20 01:39:16 +01:00
vcol_export = False
2017-01-04 00:13:52 +01:00
vs_str = ''
2017-05-25 16:48:41 +02:00
for con in sd['contexts']:
2018-12-14 15:27:43 +01:00
for elem in con['vertex_elements']:
2017-05-25 16:48:41 +02:00
if len(vs_str) > 0:
vs_str += ','
vs_str += elem['name']
if elem['name'] == 'tang':
tang_export = True
elif elem['name'] == 'tex':
uv_export = True
elif elem['name'] == 'col':
vcol_export = True
2017-09-29 01:18:57 +02:00
for con in o['contexts']: # TODO: blend context
if con['name'] == 'mesh' and material.arm_blending:
con['name'] = 'blend'
2016-12-20 01:39:16 +01:00
2017-03-02 10:15:22 +01:00
if (material.export_tangents != tang_export) or \
2016-12-20 01:39:16 +01:00
(material.export_uvs != uv_export) or \
(material.export_vcols != vcol_export):
material.export_uvs = uv_export
material.export_vcols = vcol_export
2017-03-02 10:15:22 +01:00
material.export_tangents = tang_export
2018-03-14 13:24:43 +01:00
if material in self.material_to_object_dict:
mat_users = self.material_to_object_dict[material]
2018-03-14 13:24:43 +01:00
for ob in mat_users:
ob.data.arm_cached = False
2016-12-17 15:34:43 +01:00
self.output['material_datas'].append(o)
2018-12-19 20:10:34 +01:00
material.arm_cached = True
2017-02-15 13:15:24 +01:00
# Auto-enable render-path featues
2017-08-19 12:10:06 +02:00
rebuild_rp = False
2017-08-21 20:16:06 +02:00
rpdat = arm.utils.get_rp()
if rpdat.rp_translucency_state == 'Auto' and rpdat.rp_translucency != transluc_used:
rpdat.rp_translucency = transluc_used
2017-08-19 12:10:06 +02:00
rebuild_rp = True
2017-08-21 20:16:06 +02:00
if rpdat.rp_overlays_state == 'Auto' and rpdat.rp_overlays != overlays_used:
rpdat.rp_overlays = overlays_used
2017-08-19 12:10:06 +02:00
rebuild_rp = True
2018-08-16 14:46:18 +02:00
if rpdat.rp_blending_state == 'Auto' and rpdat.rp_blending != blending_used:
rpdat.rp_blending = blending_used
rebuild_rp = True
2017-08-21 20:16:06 +02:00
if rpdat.rp_decals_state == 'Auto' and rpdat.rp_decals != decals_used:
rpdat.rp_decals = decals_used
2017-08-19 12:10:06 +02:00
rebuild_rp = True
2017-08-21 20:16:06 +02:00
# if rpdat.rp_sss_state == 'Auto' and rpdat.rp_sss != sss_used:
# rpdat.rp_sss = sss_used
2017-08-19 12:10:06 +02:00
# rebuild_rp = True
if rebuild_rp:
2017-11-22 21:17:36 +01:00
make_renderpath.build()
2016-09-30 23:24:18 +02:00
def export_particle_systems(self):
if len(self.particle_system_array) > 0:
2017-09-21 18:30:02 +02:00
self.output['particle_datas'] = []
for particleRef in self.particle_system_array.items():
psettings = particleRef[0]
2019-07-18 21:51:25 +02:00
if psettings is None:
continue
if psettings.instance_object is None or psettings.render_type != 'OBJECT':
2017-10-06 00:02:38 +02:00
continue
2020-05-11 17:35:21 +02:00
out_particlesys = {
'name': particleRef[1]["structName"],
'type': 0 if psettings.type == 'EMITTER' else 1, # HAIR
'loop': psettings.arm_loop,
# Emission
'count': int(psettings.count * psettings.arm_count_mult),
'frame_start': int(psettings.frame_start),
'frame_end': int(psettings.frame_end),
'lifetime': psettings.lifetime,
'lifetime_random': psettings.lifetime_random,
'emit_from': 1 if psettings.emit_from == 'VOLUME' else 0, # VERT, FACE
# Velocity
# 'normal_factor': psettings.normal_factor,
# 'tangent_factor': psettings.tangent_factor,
# 'tangent_phase': psettings.tangent_phase,
'object_align_factor': (
psettings.object_align_factor[0],
psettings.object_align_factor[1],
psettings.object_align_factor[2]
),
# 'object_factor': psettings.object_factor,
'factor_random': psettings.factor_random,
# Physics
'physics_type': 1 if psettings.physics_type == 'NEWTON' else 0,
'particle_size': psettings.particle_size,
'size_random': psettings.size_random,
'mass': psettings.mass,
# Render
'instance_object': arm.utils.asset_name(psettings.instance_object),
# Field weights
'weight_gravity': psettings.effector_weights.gravity
}
self.object_to_arm_object_dict[psettings.instance_object]['is_particle'] = True
2020-05-11 17:35:21 +02:00
self.output['particle_datas'].append(out_particlesys)
2017-09-21 18:30:02 +02:00
def export_tilesheets(self):
wrd = bpy.data.worlds['Arm']
if len(wrd.arm_tilesheetlist) > 0:
self.output['tilesheet_datas'] = []
for ts in wrd.arm_tilesheetlist:
o = {}
o['name'] = ts.name
o['tilesx'] = ts.tilesx_prop
o['tilesy'] = ts.tilesy_prop
o['framerate'] = ts.framerate_prop
o['actions'] = []
for tsa in ts.arm_tilesheetactionlist:
ao = {}
ao['name'] = tsa.name
ao['start'] = tsa.start_prop
ao['end'] = tsa.end_prop
ao['loop'] = tsa.loop_prop
o['actions'].append(ao)
self.output['tilesheet_datas'].append(o)
2020-06-18 15:36:34 +02:00
def export_world(self):
"""Exports the world of the current scene."""
2020-06-18 15:36:34 +02:00
world = self.scene.world
2020-06-18 15:36:34 +02:00
if world is not None:
2020-06-24 00:18:32 +02:00
world_name = arm.utils.safestr(world.name)
if world_name not in self.world_array:
self.world_array.append(world_name)
out_world = {'name': world_name}
2020-06-18 15:36:34 +02:00
2020-06-24 00:18:32 +02:00
self.post_export_world(world, out_world)
self.output['world_datas'].append(out_world)
elif arm.utils.get_rp().rp_background == 'World':
log.warn(f'Scene "{self.scene.name}" is missing a world, some render targets will not be cleared')
2016-10-19 13:28:06 +02:00
def export_objects(self, scene):
"""Exports all supported blender objects.
References to objects are dictionaries storing the type and
name of that object.
Currently supported:
- Mesh
- Light
- Camera
- Speaker
- Light Probe
"""
if not ArmoryExporter.option_mesh_only:
self.output['light_datas'] = []
self.output['camera_datas'] = []
self.output['speaker_datas'] = []
for light_ref in self.light_array.items():
self.export_light(light_ref)
for camera_ref in self.camera_array.items():
self.export_camera(camera_ref)
# Keep sounds with fake user
for sound in bpy.data.sounds:
2018-04-19 13:51:17 +02:00
if sound.use_fake_user:
assets.add(arm.utils.asset_path(sound.filepath))
for speaker_ref in self.speaker_array.items():
self.export_speaker(speaker_ref)
if bpy.data.lightprobes:
2018-10-01 11:45:43 +02:00
self.output['probe_datas'] = []
for lightprobe_object in self.probe_array.items():
self.export_probe(lightprobe_object)
2018-09-19 15:23:08 +02:00
self.output['mesh_datas'] = []
for mesh_ref in self.mesh_array.items():
2020-04-15 23:40:05 +02:00
self.export_mesh(mesh_ref)
def execute(self):
"""Exports the scene."""
profile_time = time.time()
2017-10-10 20:46:44 +02:00
print('Exporting ' + arm.utils.asset_name(self.scene))
if self.compress_enabled:
print('Scene data will be compressed which might take a while.')
2017-10-10 20:46:44 +02:00
current_frame, current_subframe = self.scene.frame_current, self.scene.frame_subframe
2020-04-12 21:16:16 +02:00
scene_objects: List[bpy.types.Object] = self.scene.collection.all_objects.values()
2020-04-16 00:01:46 +02:00
# bobject => blender object
2017-09-09 20:53:46 +02:00
for bobject in scene_objects:
# Initialize object export data (map objects to game objects)
2020-04-16 00:01:46 +02:00
out_object: Dict[str, Any] = {'traits': []}
self.object_to_arm_object_dict[bobject] = out_object
2017-04-02 13:13:43 +02:00
# Process
2020-04-12 21:16:16 +02:00
# Skip objects that have a parent because children are
# processed recursively
2017-01-04 00:13:52 +01:00
if not bobject.parent:
2016-10-19 13:28:06 +02:00
self.process_bobject(bobject)
2020-04-12 21:16:16 +02:00
# Softbody needs connected triangles, use optimized
# geometry export
2019-01-24 14:54:28 +01:00
for mod in bobject.modifiers:
if mod.type == 'CLOTH' or mod.type == 'SOFT_BODY':
ArmoryExporter.optimize_enabled = True
2016-10-19 13:28:06 +02:00
self.process_skinned_meshes()
2017-05-13 17:17:43 +02:00
self.output['name'] = arm.utils.safestr(self.scene.name)
2019-06-22 11:29:05 +02:00
if self.filepath.endswith('.lz4'):
self.output['name'] += '.lz4'
2018-04-14 15:07:05 +02:00
elif not bpy.data.worlds['Arm'].arm_minimize:
self.output['name'] += '.json'
2016-10-17 00:02:51 +02:00
2020-04-12 21:36:10 +02:00
# Create unique material variants for skinning, tilesheets and particles
matvars, matslots = self.create_material_variants(self.scene)
2017-04-04 23:11:31 +02:00
2017-04-11 23:21:42 +02:00
# Auto-bones
wrd = bpy.data.worlds['Arm']
2018-03-15 16:02:56 +01:00
rpdat = arm.utils.get_rp()
if rpdat.arm_skin_max_bones_auto:
2017-04-11 23:21:42 +02:00
max_bones = 8
for armature in bpy.data.armatures:
if max_bones < len(armature.bones):
max_bones = len(armature.bones)
2018-03-15 16:02:56 +01:00
rpdat.arm_skin_max_bones = max_bones
2017-04-11 23:21:42 +02:00
2019-01-17 21:34:38 +01:00
# Terrain
2019-07-18 21:51:25 +02:00
if self.scene.arm_terrain_object is not None:
2020-04-12 21:46:21 +02:00
assets.add_khafile_def('arm_terrain')
2019-01-17 21:34:38 +01:00
# Append trait
2020-04-16 00:01:46 +02:00
out_trait = {
2020-04-12 21:46:21 +02:00
'type': 'Script',
'class_name': 'armory.trait.internal.TerrainPhysics'
}
if 'traits' not in self.output:
self.output['traits']: List[Dict[str, str]] = []
2020-04-16 00:01:46 +02:00
self.output['traits'].append(out_trait)
2020-04-12 21:46:21 +02:00
2020-04-16 00:01:46 +02:00
ArmoryExporter.import_traits.append(out_trait['class_name'])
2019-01-17 21:34:38 +01:00
ArmoryExporter.export_physics = True
2020-04-12 21:46:21 +02:00
2019-01-17 21:34:38 +01:00
# Export material
mat = self.scene.arm_terrain_object.children[0].data.materials[0]
self.material_array.append(mat)
2019-01-17 21:34:38 +01:00
# Terrain data
2020-04-16 00:01:46 +02:00
out_terrain = {
2020-04-12 21:46:21 +02:00
'name': 'Terrain',
'sectors_x': self.scene.arm_terrain_sectors[0],
'sectors_y': self.scene.arm_terrain_sectors[1],
'sector_size': self.scene.arm_terrain_sector_size,
'height_scale': self.scene.arm_terrain_height_scale,
'material_ref': mat.name
}
2020-04-16 00:01:46 +02:00
self.output['terrain_datas'] = [out_terrain]
2019-01-17 21:34:38 +01:00
self.output['terrain_ref'] = 'Terrain'
2020-04-16 00:01:46 +02:00
# Export objects
self.output['objects'] = []
for bobject in scene_objects:
2020-04-16 00:01:46 +02:00
# Skip objects that have a parent because children are
# exported recursively
if not bobject.parent:
self.export_object(bobject, self.scene)
2017-01-29 16:15:04 +01:00
2020-04-16 00:01:46 +02:00
# Export collections
if bpy.data.collections:
2017-01-29 16:15:04 +01:00
self.output['groups'] = []
2019-02-05 12:59:34 +01:00
for collection in bpy.data.collections:
if collection.name.startswith(('RigidBodyWorld', 'Trait|')):
2017-12-03 13:25:20 +01:00
continue
self.export_collection(collection)
if not ArmoryExporter.option_mesh_only:
2019-07-18 21:51:25 +02:00
if self.scene.camera is not None:
self.output['camera_ref'] = self.scene.camera.name
2016-09-08 14:08:31 +02:00
else:
2017-05-13 17:17:43 +02:00
if self.scene.name == arm.utils.get_project_scene_name():
2021-02-20 09:53:48 +01:00
log.warn(f'Scene "{self.scene.name}" is missing a camera')
self.output['material_datas'] = []
2018-11-12 12:35:52 +01:00
# Object with no material assigned in the scene
if len(self.default_material_objects) > 0:
self.make_default_mat('armdefault', self.default_material_objects)
if len(self.default_skin_material_objects) > 0:
self.make_default_mat('armdefaultskin', self.default_skin_material_objects)
2018-11-12 12:35:52 +01:00
if len(bpy.data.particles) > 0:
self.use_default_material_part()
if len(self.default_part_material_objects) > 0:
self.make_default_mat('armdefaultpart', self.default_part_material_objects, is_particle=True)
2018-11-12 12:35:52 +01:00
2016-09-30 23:24:18 +02:00
self.export_materials()
self.export_particle_systems()
self.output['world_datas'] = []
2020-06-18 15:36:34 +02:00
self.export_world()
2017-09-21 18:30:02 +02:00
self.export_tilesheets()
2019-07-18 21:51:25 +02:00
if self.scene.world is not None:
self.output['world_ref'] = self.scene.world.name
2017-09-21 13:22:00 +02:00
if self.scene.use_gravity:
self.output['gravity'] = [self.scene.gravity[0], self.scene.gravity[1], self.scene.gravity[2]]
2019-01-21 20:56:03 +01:00
rbw = self.scene.rigidbody_world
2019-07-18 21:51:25 +02:00
if rbw is not None:
2019-01-21 20:56:03 +01:00
weights = rbw.effector_weights
self.output['gravity'][0] *= weights.all * weights.gravity
self.output['gravity'][1] *= weights.all * weights.gravity
self.output['gravity'][2] *= weights.all * weights.gravity
2017-09-21 13:22:00 +02:00
else:
self.output['gravity'] = [0.0, 0.0, 0.0]
2016-10-19 13:28:06 +02:00
self.export_objects(self.scene)
# Create Viewport camera
if bpy.data.worlds['Arm'].arm_play_camera != 'Scene':
2018-11-22 13:31:15 +01:00
self.create_default_camera(is_viewport_camera=True)
self.camera_spawned = True
# No camera found
2017-01-03 01:26:06 +01:00
if not self.camera_spawned:
log.warn( f'Scene "{self.scene.name}" is missing a camera')
2017-01-03 01:26:06 +01:00
# No camera found, create a default one
2018-11-26 09:11:32 +01:00
if (len(self.output['camera_datas']) == 0 or len(bpy.data.cameras) == 0) or not self.camera_spawned:
2018-11-22 13:31:15 +01:00
self.create_default_camera()
2017-08-22 12:08:44 +02:00
2020-04-16 22:51:22 +02:00
self.export_scene_traits()
2017-01-16 15:26:57 +01:00
2019-12-06 21:41:19 +01:00
self.export_canvas_themes()
2016-09-14 11:49:32 +02:00
# Write embedded data references
if len(assets.embedded_data) > 0:
self.output['embedded_datas'] = []
for file in assets.embedded_data:
self.output['embedded_datas'].append(file)
2016-10-17 00:02:51 +02:00
# Write scene file
2017-03-15 12:30:14 +01:00
arm.utils.write_arm(self.filepath, self.output)
2017-04-04 23:11:31 +02:00
# Remove created material variants
2017-10-04 14:13:36 +02:00
for slot in matslots: # Set back to original material
orig_mat = bpy.data.materials[slot.material.name[:-8]] # _armskin, _armpart, _armtile
2018-03-01 16:15:13 +01:00
orig_mat.export_uvs = slot.material.export_uvs
orig_mat.export_vcols = slot.material.export_vcols
orig_mat.export_tangents = slot.material.export_tangents
2018-12-19 20:10:34 +01:00
orig_mat.arm_cached = slot.material.arm_cached
2018-03-01 16:15:13 +01:00
slot.material = orig_mat
2017-04-04 23:11:31 +02:00
for mat in matvars:
bpy.data.materials.remove(mat, do_unlink=True)
2017-11-19 13:38:54 +01:00
# Restore frame
if self.scene.frame_current != current_frame:
self.scene.frame_set(current_frame, subframe=current_subframe)
2017-11-19 13:38:54 +01:00
print('Scene exported in {:0.3f}s'.format(time.time() - profile_time))
2018-11-22 13:31:15 +01:00
def create_default_camera(self, is_viewport_camera=False):
2020-04-16 00:10:44 +02:00
"""Creates the default camera and adds a WalkNavigation trait to it."""
out_camera = {
'name': 'DefaultCamera',
'near_plane': 0.1,
'far_plane': 100.0,
'fov': 0.85,
'frustum_culling': True,
'clear_color': self.get_camera_clear_color()
}
# Set viewport camera projection
if is_viewport_camera:
proj, is_persp = self.get_viewport_projection_matrix()
2019-07-18 21:51:25 +02:00
if proj is not None:
if is_persp:
2020-04-16 00:10:44 +02:00
self.extract_projection(out_camera, proj, with_planes=False)
else:
2020-04-16 00:10:44 +02:00
self.extract_ortho(out_camera, proj)
self.output['camera_datas'].append(out_camera)
out_object = {
'name': 'DefaultCamera',
'type': 'camera_object',
'data_ref': 'DefaultCamera',
'material_refs': [],
'transform': {}
}
viewport_matrix = self.get_viewport_view_matrix()
2019-07-18 21:51:25 +02:00
if viewport_matrix is not None:
2020-04-16 00:10:44 +02:00
out_object['transform']['values'] = ArmoryExporter.write_matrix(viewport_matrix.inverted_safe())
out_object['local_only'] = True
else:
2020-04-16 00:10:44 +02:00
out_object['transform']['values'] = [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]
# Add WalkNavigation trait
trait = {
'type': 'Script',
'class_name': 'armory.trait.WalkNavigation'
}
out_object['traits'] = [trait]
ArmoryExporter.import_traits.append(trait['class_name'])
2020-04-16 00:10:44 +02:00
self.output['objects'].append(out_object)
self.output['camera_ref'] = 'DefaultCamera'
@staticmethod
def get_export_tangents(mesh):
for material in mesh.materials:
if material is not None and material.export_tangents:
return True
return False
@staticmethod
def get_export_vcols(mesh):
for material in mesh.materials:
if material is not None and material.export_vcols:
2016-12-17 23:48:18 +01:00
return True
return False
@staticmethod
def get_export_uvs(mesh):
for material in mesh.materials:
if material is not None and material.export_uvs:
2016-12-17 23:48:18 +01:00
return True
return False
@staticmethod
def object_process_instancing(refs, scale_pos):
2018-08-29 09:35:48 +02:00
instanced_type = 0
instanced_data = None
for bobject in refs:
inst = bobject.arm_instanced
if inst != 'Off':
if inst == 'Loc':
instanced_type = 1
instanced_data = [0.0, 0.0, 0.0] # Include parent
elif inst == 'Loc + Rot':
instanced_type = 2
instanced_data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
elif inst == 'Loc + Scale':
instanced_type = 3
instanced_data = [0.0, 0.0, 0.0, 1.0, 1.0, 1.0]
elif inst == 'Loc + Rot + Scale':
instanced_type = 4
instanced_data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0]
for child in bobject.children:
if not child.arm_export or child.hide_render:
2016-10-17 00:02:51 +02:00
continue
2018-08-29 09:35:48 +02:00
if 'Loc' in inst:
loc = child.matrix_local.to_translation() # Without parent matrix
2019-01-02 15:33:30 +01:00
instanced_data.append(loc.x / scale_pos)
instanced_data.append(loc.y / scale_pos)
instanced_data.append(loc.z / scale_pos)
2018-08-29 09:35:48 +02:00
if 'Rot' in inst:
rot = child.matrix_local.to_euler()
instanced_data.append(rot.x)
instanced_data.append(rot.y)
instanced_data.append(rot.z)
2020-04-16 00:11:09 +02:00
if 'Scale' in inst:
2018-08-29 09:35:48 +02:00
scale = child.matrix_local.to_scale()
instanced_data.append(scale.x)
instanced_data.append(scale.y)
instanced_data.append(scale.z)
break
2019-01-08 15:46:24 +01:00
# Instance render collections with same children?
2019-07-18 21:51:25 +02:00
# elif bobject.instance_type == 'GROUP' and bobject.instance_collection is not None:
2018-08-29 09:35:48 +02:00
# instanced_type = 1
# instanced_data = []
2019-01-08 15:46:24 +01:00
# for child in bpy.data.collections[bobject.instance_collection].objects:
2018-08-29 09:35:48 +02:00
# loc = child.matrix_local.to_translation()
# instanced_data.append(loc.x)
# instanced_data.append(loc.y)
# instanced_data.append(loc.z)
2017-01-29 16:15:04 +01:00
# break
2018-08-29 09:35:48 +02:00
return instanced_type, instanced_data
@staticmethod
def rigid_body_static(rb):
return (not rb.enabled and not rb.kinematic) or (rb.type == 'PASSIVE' and not rb.kinematic)
def post_export_object(self, bobject: bpy.types.Object, o, type):
# Export traits
2017-08-27 12:50:09 +02:00
self.export_traits(bobject, o)
2018-02-19 16:28:21 +01:00
wrd = bpy.data.worlds['Arm']
phys_enabled = wrd.arm_physics != 'Disabled'
phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
# Rigid body trait
if bobject.rigid_body is not None and phys_enabled:
2016-09-08 14:08:31 +02:00
ArmoryExporter.export_physics = True
rb = bobject.rigid_body
2020-04-16 00:11:09 +02:00
shape = 0 # BOX
2019-01-17 21:34:38 +01:00
if rb.collision_shape == 'SPHERE':
shape = 1
elif rb.collision_shape == 'CONVEX_HULL':
shape = 2
elif rb.collision_shape == 'MESH':
2017-12-04 15:10:20 +01:00
shape = 3
elif rb.collision_shape == 'CONE':
shape = 4
elif rb.collision_shape == 'CYLINDER':
shape = 5
elif rb.collision_shape == 'CAPSULE':
shape = 6
2017-11-13 10:18:37 +01:00
body_mass = rb.mass
is_static = self.rigid_body_static(rb)
2017-12-04 15:10:20 +01:00
if is_static:
2017-11-13 10:18:37 +01:00
body_mass = 0
x = {}
x['type'] = 'Script'
2018-02-19 16:28:21 +01:00
x['class_name'] = 'armory.trait.physics.' + phys_pkg + '.RigidBody'
2018-12-20 18:04:49 +01:00
col_group = ''
for b in rb.collision_collections:
col_group = ('1' if b else '0') + col_group
2019-09-05 22:21:31 +02:00
col_mask = ''
for b in bobject.arm_rb_collision_filter_mask:
col_mask = ('1' if b else '0') + col_mask
2019-10-30 15:33:14 +01:00
2019-09-05 22:21:31 +02:00
x['parameters'] = [str(shape), str(body_mass), str(rb.friction), str(rb.restitution), str(int(col_group, 2)), str(int(col_mask, 2)) ]
2018-01-24 15:17:43 +01:00
lx = bobject.arm_rb_linear_factor[0]
ly = bobject.arm_rb_linear_factor[1]
lz = bobject.arm_rb_linear_factor[2]
ax = bobject.arm_rb_angular_factor[0]
ay = bobject.arm_rb_angular_factor[1]
az = bobject.arm_rb_angular_factor[2]
if bobject.lock_location[0]:
lx = 0
if bobject.lock_location[1]:
ly = 0
if bobject.lock_location[2]:
lz = 0
if bobject.lock_rotation[0]:
ax = 0
if bobject.lock_rotation[1]:
ay = 0
if bobject.lock_rotation[2]:
az = 0
2018-12-20 18:04:49 +01:00
col_margin = str(rb.collision_margin) if rb.use_margin else '0.0'
if rb.use_deactivation:
2018-12-20 18:04:49 +01:00
deact_lv = str(rb.deactivate_linear_velocity)
deact_av = str(rb.deactivate_angular_velocity)
deact_time = str(bobject.arm_rb_deactivation_time)
2017-11-06 13:01:08 +01:00
else:
2018-12-20 18:04:49 +01:00
deact_lv = '0.0'
deact_av = '0.0'
deact_time = '0.0'
body_params = '[{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10}, {11}]'.format(
str(rb.linear_damping),
str(rb.angular_damping),
str(lx), str(ly), str(lz),
str(ax), str(ay), str(az),
col_margin,
deact_lv, deact_av, deact_time
)
body_flags = '[{0}, {1}, {2}, {3}, {4}]'.format(
2018-12-20 18:04:49 +01:00
str(rb.kinematic).lower(),
str(bobject.arm_rb_trigger).lower(),
str(bobject.arm_rb_ccd).lower(),
str(is_static).lower(),
str(rb.use_deactivation).lower()
2018-12-20 18:04:49 +01:00
)
x['parameters'].append(body_params)
x['parameters'].append(body_flags)
o['traits'].append(x)
2018-02-19 16:28:21 +01:00
# Phys traits
if phys_enabled:
2020-04-16 23:09:48 +02:00
for modifier in bobject.modifiers:
if modifier.type == 'CLOTH' or modifier.type == 'SOFT_BODY':
self.add_softbody_mod(o, bobject, modifier)
elif modifier.type == 'HOOK':
self.add_hook_mod(o, bobject, modifier.object.name, modifier.vertex_group)
2018-02-19 16:28:21 +01:00
# Rigid body constraint
2017-04-02 13:13:43 +02:00
rbc = bobject.rigid_body_constraint
if rbc is not None and rbc.enabled:
2021-06-24 23:13:29 +02:00
self.add_rigidbody_constraint(o, bobject, rbc)
2017-01-10 10:41:06 +01:00
2017-04-02 13:13:43 +02:00
# Camera traits
2020-04-09 23:40:18 +02:00
if type is NodeType.CAMERA:
2017-08-19 03:08:42 +02:00
# Viewport camera enabled, attach navigation to active camera
2020-04-09 23:40:18 +02:00
if self.scene.camera is not None and bobject.name == self.scene.camera.name and bpy.data.worlds['Arm'].arm_play_camera != 'Scene':
navigation_trait = {}
navigation_trait['type'] = 'Script'
navigation_trait['class_name'] = 'armory.trait.WalkNavigation'
o['traits'].append(navigation_trait)
# Map objects to materials, can be used in later stages
for i in range(len(bobject.material_slots)):
2018-03-13 18:23:00 +01:00
mat = self.slot_to_material(bobject, bobject.material_slots[i])
if mat in self.material_to_object_dict:
self.material_to_object_dict[mat].append(bobject)
self.material_to_arm_object_dict[mat].append(o)
else:
self.material_to_object_dict[mat] = [bobject]
self.material_to_arm_object_dict[mat] = [o]
2021-07-25 20:13:47 +02:00
2021-06-24 21:32:48 +02:00
# Add UniformsManager trait
if type is NodeType.MESH:
uniformManager = {}
uniformManager['type'] = 'Script'
uniformManager['class_name'] = 'armory.trait.internal.UniformsManager'
o['traits'].append(uniformManager)
2016-10-02 19:52:40 +02:00
# Export constraints
if len(bobject.constraints) > 0:
o['constraints'] = []
2018-02-19 23:32:51 +01:00
self.add_constraints(bobject, o)
2017-08-10 14:10:37 +02:00
for x in o['traits']:
ArmoryExporter.import_traits.append(x['class_name'])
@staticmethod
def add_constraints(bobject, o, bone=False):
2020-04-15 23:46:34 +02:00
for constraint in bobject.constraints:
if constraint.mute:
2018-02-19 23:32:51 +01:00
continue
2020-04-16 00:01:46 +02:00
out_constraint = {'name': constraint.name, 'type': constraint.type}
2018-02-19 23:32:51 +01:00
if bone:
2020-04-16 00:01:46 +02:00
out_constraint['bone'] = bobject.name
2020-04-15 23:46:34 +02:00
if hasattr(constraint, 'target') and constraint.target is not None:
if constraint.type == 'COPY_LOCATION':
2020-04-16 00:01:46 +02:00
out_constraint['target'] = constraint.target.name
out_constraint['use_x'] = constraint.use_x
out_constraint['use_y'] = constraint.use_y
out_constraint['use_z'] = constraint.use_z
out_constraint['invert_x'] = constraint.invert_x
out_constraint['invert_y'] = constraint.invert_y
out_constraint['invert_z'] = constraint.invert_z
out_constraint['use_offset'] = constraint.use_offset
out_constraint['influence'] = constraint.influence
2020-04-15 23:46:34 +02:00
elif constraint.type == 'CHILD_OF':
2020-04-16 00:01:46 +02:00
out_constraint['target'] = constraint.target.name
out_constraint['influence'] = constraint.influence
o['constraints'].append(out_constraint)
2018-02-19 23:32:51 +01:00
2021-09-13 21:29:21 +02:00
def export_traits(self, bobject: Union[bpy.types.Scene, bpy.types.Object], o):
2020-04-16 00:39:28 +02:00
if not hasattr(bobject, 'arm_traitlist'):
return
for traitlistItem in bobject.arm_traitlist:
# Do not export disabled traits but still export those
# with fake user enabled so that nodes like `TraitNode`
# still work
if not traitlistItem.enabled_prop and not traitlistItem.fake_user:
continue
out_trait = {}
if traitlistItem.type_prop == 'Logic Nodes' and traitlistItem.node_tree_prop is not None and traitlistItem.node_tree_prop.name != '':
group_name = arm.utils.safesrc(traitlistItem.node_tree_prop.name[0].upper() + traitlistItem.node_tree_prop.name[1:])
out_trait['type'] = 'Script'
out_trait['class_name'] = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.node.' + group_name
elif traitlistItem.type_prop == 'WebAssembly':
wpath = os.path.join(arm.utils.get_fp(), 'Bundled', traitlistItem.webassembly_prop + '.wasm')
if not os.path.exists(wpath):
log.warn(f'Wasm "{traitlistItem.webassembly_prop}" not found, skipping')
2017-08-27 12:50:09 +02:00
continue
2020-04-16 00:39:28 +02:00
out_trait['type'] = 'Script'
out_trait['class_name'] = 'armory.trait.internal.WasmScript'
out_trait['parameters'] = ["'" + traitlistItem.webassembly_prop + "'"]
elif traitlistItem.type_prop == 'UI Canvas':
cpath = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas', traitlistItem.canvas_name_prop + '.json')
if not os.path.exists(cpath):
log.warn(f'Scene "{self.scene.name}" - Object "{bobject.name}" - Referenced canvas "{traitlistItem.canvas_name_prop}" not found, skipping')
continue
ArmoryExporter.export_ui = True
out_trait['type'] = 'Script'
out_trait['class_name'] = 'armory.trait.internal.CanvasScript'
out_trait['parameters'] = ["'" + traitlistItem.canvas_name_prop + "'"]
# Read file list and add canvas assets
assetpath = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas', traitlistItem.canvas_name_prop + '.files')
if os.path.exists(assetpath):
with open(assetpath) as f:
file_list = f.read().splitlines()
for asset in file_list:
# Relative to the root/Bundled/canvas path
asset = asset[6:] # Strip ../../ to start in project root
assets.add(asset)
# Haxe/Bundled Script
else:
# Empty class name, skip
if traitlistItem.class_name_prop == '':
continue
out_trait['type'] = 'Script'
if traitlistItem.type_prop == 'Bundled Script':
trait_prefix = 'armory.trait.'
# TODO: temporary, export single mesh navmesh as obj
if traitlistItem.class_name_prop == 'NavMesh' and bobject.type == 'MESH' and bpy.data.worlds['Arm'].arm_navigation != 'Disabled':
ArmoryExporter.export_navigation = True
nav_path = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'navigation')
if not os.path.exists(nav_path):
os.makedirs(nav_path)
nav_filepath = os.path.join(nav_path, 'nav_' + bobject.data.name + '.arm')
assets.add(nav_filepath)
# TODO: Implement cache
# if not os.path.isfile(nav_filepath):
# override = {'selected_objects': [bobject]}
# bobject.scale.y *= -1
# mesh = obj.data
# for face in mesh.faces:
# face.v.reverse()
# bpy.ops.export_scene.obj(override, use_selection=True, filepath=nav_filepath, check_existing=False, use_normals=False, use_uvs=False, use_materials=False)
# bobject.scale.y *= -1
armature = bobject.find_armature()
apply_modifiers = not armature
bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject
export_mesh = bobject_eval.to_mesh()
with open(nav_filepath, 'w') as f:
for v in export_mesh.vertices:
f.write("v %.4f " % (v.co[0] * bobject_eval.scale.x))
f.write("%.4f " % (v.co[2] * bobject_eval.scale.z))
f.write("%.4f\n" % (v.co[1] * bobject_eval.scale.y)) # Flipped
for p in export_mesh.polygons:
f.write("f")
# Flipped normals
for i in reversed(p.vertices):
f.write(" %d" % (i + 1))
f.write("\n")
# Haxe
else:
trait_prefix = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.'
hxfile = os.path.join('Sources', (trait_prefix + traitlistItem.class_name_prop).replace('.', '/') + '.hx')
if not os.path.exists(os.path.join(arm.utils.get_fp(), hxfile)):
# TODO: Halt build here once this check is tested
print(f'Armory Error: Scene "{self.scene.name}" - Object "{bobject.name}": Referenced trait file "{hxfile}" not found')
out_trait['class_name'] = trait_prefix + traitlistItem.class_name_prop
# Export trait properties
if traitlistItem.arm_traitpropslist:
out_trait['props'] = []
for trait_prop in traitlistItem.arm_traitpropslist:
out_trait['props'].append(trait_prop.name)
out_trait['props'].append(trait_prop.type)
if trait_prop.type.endswith("Object"):
value = arm.utils.asset_name(trait_prop.value_object)
else:
value = trait_prop.get_value()
out_trait['props'].append(value)
2021-09-13 21:29:21 +02:00
if not traitlistItem.enabled_prop:
# If we're here, fake_user is enabled, otherwise we
# would have skipped this trait already
ArmoryExporter.import_traits.append(out_trait['class_name'])
else:
o['traits'].append(out_trait)
2017-08-27 12:50:09 +02:00
2020-04-16 22:51:22 +02:00
def export_scene_traits(self) -> None:
"""Exports the traits of the scene and adds some internal traits
to the scene depending on the exporter settings.
"""
wrd = bpy.data.worlds['Arm']
if wrd.arm_physics != 'Disabled' and ArmoryExporter.export_physics:
2020-04-17 00:32:39 +02:00
if 'traits' not in self.output:
self.output['traits'] = []
2020-04-16 22:51:22 +02:00
phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo'
out_trait = {
'type': 'Script',
'class_name': 'armory.trait.physics.' + phys_pkg + '.PhysicsWorld'
}
rbw = self.scene.rigidbody_world
if rbw is not None and rbw.enabled:
out_trait['parameters'] = [str(rbw.time_scale), str(rbw.substeps_per_frame), str(rbw.solver_iterations)]
2020-04-16 22:51:22 +02:00
2020-04-17 00:32:39 +02:00
self.output['traits'].append(out_trait)
2020-04-16 22:51:22 +02:00
if wrd.arm_navigation != 'Disabled' and ArmoryExporter.export_navigation:
2020-04-17 00:32:39 +02:00
if 'traits' not in self.output:
self.output['traits'] = []
2020-04-16 22:51:22 +02:00
out_trait = {'type': 'Script', 'class_name': 'armory.trait.navigation.Navigation'}
2020-04-17 00:32:39 +02:00
self.output['traits'].append(out_trait)
2020-04-16 22:51:22 +02:00
if wrd.arm_debug_console:
2020-04-17 00:32:39 +02:00
if 'traits' not in self.output:
self.output['traits'] = []
2020-04-16 22:51:22 +02:00
ArmoryExporter.export_ui = True
# Position
debug_console_pos_type = 2
if (wrd.arm_debug_console_position == 'Left'):
debug_console_pos_type = 0
elif (wrd.arm_debug_console_position == 'Center'):
debug_console_pos_type = 1
2020-12-17 23:44:59 +01:00
else:
debug_console_pos_type = 2
# Parameters
2020-04-16 22:51:22 +02:00
out_trait = {
'type': 'Script',
'class_name': 'armory.trait.internal.DebugConsole',
'parameters': [
str(arm.utils.get_ui_scale()),
str(wrd.arm_debug_console_scale),
str(debug_console_pos_type),
str(int(wrd.arm_debug_console_visible)),
str(int(wrd.arm_debug_console_trace_pos)),
str(int(arm.utils.get_debug_console_visible_sc())),
str(int(arm.utils.get_debug_console_scale_in_sc())),
str(int(arm.utils.get_debug_console_scale_out_sc()))
]
2020-04-16 22:51:22 +02:00
}
2020-04-17 00:32:39 +02:00
self.output['traits'].append(out_trait)
2020-04-16 22:51:22 +02:00
2021-07-25 20:13:47 +02:00
if arm.utils.is_livepatch_enabled():
2020-04-17 00:32:39 +02:00
if 'traits' not in self.output:
self.output['traits'] = []
2020-04-16 22:51:22 +02:00
out_trait = {'type': 'Script', 'class_name': 'armory.trait.internal.LivePatch'}
2020-04-17 00:32:39 +02:00
self.output['traits'].append(out_trait)
2020-04-16 22:51:22 +02:00
if len(self.scene.arm_traitlist) > 0:
if 'traits' not in self.output:
self.output['traits'] = []
self.export_traits(self.scene, self.output)
if 'traits' in self.output:
for out_trait in self.output['traits']:
ArmoryExporter.import_traits.append(out_trait['class_name'])
@staticmethod
def export_canvas_themes():
2019-12-06 21:41:19 +01:00
path_themes = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas')
file_theme = os.path.join(path_themes, "_themes.json")
# If there is a canvas but no _themes.json, create it so that
# CanvasScript.hx works
if os.path.exists(path_themes) and not os.path.exists(file_theme):
with open(file_theme, "w+"):
2019-12-06 21:41:19 +01:00
pass
assets.add(file_theme)
2019-12-06 21:41:19 +01:00
2020-04-16 23:09:48 +02:00
@staticmethod
def add_softbody_mod(o, bobject: bpy.types.Object, modifier: Union[bpy.types.ClothModifier, bpy.types.SoftBodyModifier]):
"""Adds a softbody trait to the given object based on the given
softbody/cloth modifier.
"""
2018-02-19 16:28:21 +01:00
ArmoryExporter.export_physics = True
assets.add_khafile_def('arm_physics_soft')
2020-04-16 23:09:48 +02:00
phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo'
out_trait = {'type': 'Script', 'class_name': 'armory.trait.physics.' + phys_pkg + '.SoftBody'}
# ClothModifier
if modifier.type == 'CLOTH':
bend = modifier.settings.bending_stiffness
soft_type = 0
# SoftBodyModifier
elif modifier.type == 'SOFT_BODY':
bend = (modifier.settings.bend + 1.0) * 10
soft_type = 1
else:
# Wrong modifier type
return
2020-07-02 18:37:29 +02:00
out_trait['parameters'] = [str(soft_type), str(bend), str(modifier.settings.mass), str(bobject.arm_soft_body_margin)]
2020-04-16 23:09:48 +02:00
o['traits'].append(out_trait)
2019-01-10 11:59:49 +01:00
if soft_type == 0:
2020-07-02 18:37:29 +02:00
ArmoryExporter.add_hook_mod(o, bobject, '', modifier.settings.vertex_group_mass)
2018-02-19 16:28:21 +01:00
@staticmethod
def add_hook_mod(o, bobject: bpy.types.Object, target_name, group_name):
2018-02-19 16:28:21 +01:00
ArmoryExporter.export_physics = True
2020-04-16 23:09:48 +02:00
phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo'
2020-04-16 23:09:48 +02:00
out_trait = {'type': 'Script', 'class_name': 'armory.trait.physics.' + phys_pkg + '.PhysicsHook'}
2017-04-02 13:13:43 +02:00
verts = []
if group_name != '':
group = bobject.vertex_groups[group_name].index
for v in bobject.data.vertices:
for g in v.groups:
if g.group == group:
verts.append(v.co.x)
verts.append(v.co.y)
verts.append(v.co.z)
2020-04-16 23:09:48 +02:00
out_trait['parameters'] = [f"'{target_name}'", str(verts)]
o['traits'].append(out_trait)
2018-02-19 16:28:21 +01:00
@staticmethod
2021-06-24 23:13:29 +02:00
def add_rigidbody_constraint(o, bobject, rbc):
2018-02-19 16:28:21 +01:00
rb1 = rbc.object1
rb2 = rbc.object2
if rb1 is None or rb2 is None:
return
if rbc.type == "MOTOR":
return
2018-02-19 16:28:21 +01:00
ArmoryExporter.export_physics = True
phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo'
2018-02-19 16:28:21 +01:00
breaking_threshold = rbc.breaking_threshold if rbc.use_breaking else 0
trait = {
'type': 'Script',
'class_name': 'armory.trait.physics.' + phys_pkg + '.PhysicsConstraintExportHelper',
'parameters': [
"'" + rb1.name + "'",
"'" + rb2.name + "'",
str(rbc.disable_collisions).lower(),
2021-06-24 23:13:29 +02:00
str(breaking_threshold),
str(bobject.arm_relative_physics_constraint).lower()
]
}
if rbc.type == "FIXED":
trait['parameters'].insert(2,str(0))
if rbc.type == "POINT":
trait['parameters'].insert(2,str(1))
2018-02-19 16:28:21 +01:00
if rbc.type == "GENERIC":
limits = [
1 if rbc.use_limit_lin_x else 0,
rbc.limit_lin_x_lower,
rbc.limit_lin_x_upper,
1 if rbc.use_limit_lin_y else 0,
rbc.limit_lin_y_lower,
rbc.limit_lin_y_upper,
1 if rbc.use_limit_lin_z else 0,
rbc.limit_lin_z_lower,
rbc.limit_lin_z_upper,
1 if rbc.use_limit_ang_x else 0,
rbc.limit_ang_x_lower,
rbc.limit_ang_x_upper,
1 if rbc.use_limit_ang_y else 0,
rbc.limit_ang_y_lower,
rbc.limit_ang_y_upper,
1 if rbc.use_limit_ang_z else 0,
rbc.limit_ang_z_lower,
rbc.limit_ang_z_upper
]
trait['parameters'].insert(2,str(5))
2018-02-19 16:28:21 +01:00
trait['parameters'].append(str(limits))
if rbc.type == "GENERIC_SPRING":
limits = [
1 if rbc.use_limit_lin_x else 0,
rbc.limit_lin_x_lower,
rbc.limit_lin_x_upper,
1 if rbc.use_limit_lin_y else 0,
rbc.limit_lin_y_lower,
rbc.limit_lin_y_upper,
1 if rbc.use_limit_lin_z else 0,
rbc.limit_lin_z_lower,
rbc.limit_lin_z_upper,
1 if rbc.use_limit_ang_x else 0,
rbc.limit_ang_x_lower,
rbc.limit_ang_x_upper,
1 if rbc.use_limit_ang_y else 0,
rbc.limit_ang_y_lower,
rbc.limit_ang_y_upper,
1 if rbc.use_limit_ang_z else 0,
rbc.limit_ang_z_lower,
rbc.limit_ang_z_upper,
1 if rbc.use_spring_x else 0,
rbc.spring_stiffness_x,
rbc.spring_damping_x,
1 if rbc.use_spring_y else 0,
rbc.spring_stiffness_y,
rbc.spring_damping_y,
1 if rbc.use_spring_z else 0,
rbc.spring_stiffness_z,
rbc.spring_damping_z,
1 if rbc.use_spring_ang_x else 0,
rbc.spring_stiffness_ang_x,
rbc.spring_damping_ang_x,
1 if rbc.use_spring_ang_y else 0,
rbc.spring_stiffness_ang_y,
rbc.spring_damping_ang_y,
1 if rbc.use_spring_ang_z else 0,
rbc.spring_stiffness_ang_z,
rbc.spring_damping_ang_z
]
trait['parameters'].insert(2,str(6))
trait['parameters'].append(str(limits))
if rbc.type == "HINGE":
limits = [
1 if rbc.use_limit_ang_z else 0,
rbc.limit_ang_z_lower,
rbc.limit_ang_z_upper
]
trait['parameters'].insert(2,str(2))
trait['parameters'].append(str(limits))
if rbc.type == "SLIDER":
limits = [
1 if rbc.use_limit_lin_x else 0,
rbc.limit_lin_x_lower,
rbc.limit_lin_x_upper
]
trait['parameters'].insert(2,str(3))
trait['parameters'].append(str(limits))
if rbc.type == "PISTON":
limits = [
1 if rbc.use_limit_lin_x else 0,
rbc.limit_lin_x_lower,
rbc.limit_lin_x_upper,
1 if rbc.use_limit_ang_x else 0,
rbc.limit_ang_x_lower,
rbc.limit_ang_x_upper
]
trait['parameters'].insert(2,str(4))
trait['parameters'].append(str(limits))
2018-02-19 16:28:21 +01:00
o['traits'].append(trait)
2017-07-04 11:11:34 +02:00
@staticmethod
2020-06-18 15:36:34 +02:00
def post_export_world(world: bpy.types.World, out_world: Dict):
2017-09-06 13:28:59 +02:00
wrd = bpy.data.worlds['Arm']
2020-06-18 15:36:34 +02:00
2017-08-21 12:17:55 +02:00
bgcol = world.arm_envtex_color
2020-06-18 15:36:34 +02:00
# No compositor used
if '_LDR' in world.world_defs:
2016-10-02 19:52:40 +02:00
for i in range(0, 3):
bgcol[i] = pow(bgcol[i], 1.0 / 2.2)
2020-06-18 15:36:34 +02:00
out_world['background_color'] = arm.utils.color_to_int(bgcol)
2016-10-02 19:52:40 +02:00
if '_EnvSky' in world.world_defs:
2017-12-13 00:10:30 +01:00
# Sky data for probe
2020-06-28 22:45:54 +02:00
out_world['sun_direction'] = list(world.arm_envtex_sun_direction)
2020-06-18 15:36:34 +02:00
out_world['turbidity'] = world.arm_envtex_turbidity
out_world['ground_albedo'] = world.arm_envtex_ground_albedo
out_world['nishita_density'] = list(world.arm_nishita_density)
2017-12-13 00:10:30 +01:00
disable_hdr = world.arm_envtex_name.endswith('.jpg')
if '_EnvTex' in world.world_defs or '_EnvImg' in world.world_defs:
out_world['envmap'] = world.arm_envtex_name.rsplit('.', 1)[0]
2017-12-13 00:10:30 +01:00
if disable_hdr:
2020-06-18 15:36:34 +02:00
out_world['envmap'] += '.jpg'
2017-12-13 00:10:30 +01:00
else:
2020-06-18 15:36:34 +02:00
out_world['envmap'] += '.hdr'
2017-12-13 00:10:30 +01:00
# Main probe
rpdat = arm.utils.get_rp()
2017-11-13 10:18:37 +01:00
solid_mat = rpdat.arm_material_model == 'Solid'
2018-03-15 23:24:48 +01:00
arm_irradiance = rpdat.arm_irradiance and not solid_mat
arm_radiance = rpdat.arm_radiance
radtex = world.arm_envtex_name.rsplit('.', 1)[0] # Remove file extension
2017-08-21 12:17:55 +02:00
irrsharmonics = world.arm_envtex_irr_name
num_mips = world.arm_envtex_num_mips
strength = world.arm_envtex_strength
2017-09-06 13:28:59 +02:00
2018-01-24 13:10:17 +01:00
mobile_mat = rpdat.arm_material_model == 'Mobile' or rpdat.arm_material_model == 'Solid'
if mobile_mat:
arm_radiance = False
2020-06-18 15:36:34 +02:00
out_probe = {'name': world.name}
2017-09-06 13:28:59 +02:00
if arm_irradiance:
2018-04-14 15:07:05 +02:00
ext = '' if wrd.arm_minimize else '.json'
2020-06-18 15:36:34 +02:00
out_probe['irradiance'] = irrsharmonics + '_irradiance' + ext
2017-09-06 13:28:59 +02:00
if arm_radiance:
2020-06-18 15:36:34 +02:00
out_probe['radiance'] = radtex + '_radiance'
out_probe['radiance'] += '.jpg' if disable_hdr else '.hdr'
out_probe['radiance_mipmaps'] = num_mips
out_probe['strength'] = strength
out_world['probe'] = out_probe
2017-11-26 19:36:14 +01:00
@staticmethod
2020-04-16 23:14:49 +02:00
def mod_equal(mod1: bpy.types.Modifier, mod2: bpy.types.Modifier) -> bool:
"""Compares whether the given modifiers are equal."""
# https://blender.stackexchange.com/questions/70629
2017-11-26 19:36:14 +01:00
return all([getattr(mod1, prop, True) == getattr(mod2, prop, False) for prop in mod1.bl_rna.properties.keys()])
@staticmethod
2020-04-16 23:14:49 +02:00
def mod_equal_stack(obj1: bpy.types.Object, obj2: bpy.types.Object) -> bool:
"""Returns `True` if the given objects have the same modifiers."""
2017-11-26 19:36:14 +01:00
if len(obj1.modifiers) == 0 and len(obj2.modifiers) == 0:
return True
if len(obj1.modifiers) == 0 or len(obj2.modifiers) == 0:
return False
2018-01-28 18:21:22 +01:00
if len(obj1.modifiers) != len(obj2.modifiers):
return False
return all([ArmoryExporter.mod_equal(m, obj2.modifiers[i]) for i, m in enumerate(obj1.modifiers)])