diff --git a/.github/workflows/krom.yml b/.github/workflows/krom.yml index 832c8877..31b9b219 100644 --- a/.github/workflows/krom.yml +++ b/.github/workflows/krom.yml @@ -19,4 +19,4 @@ jobs: - name: Compile run: | cd armory_ci - nodejs_bin/node-linux64 Kha/make.js krom + nodejs_bin/node-linux64 Kha/make.js krom --shaderversion 330 diff --git a/Shaders/clear_color_depth_pass/clear_color_depth_pass.frag.glsl b/Shaders/clear_color_depth_pass/clear_color_depth_pass.frag.glsl new file mode 100644 index 00000000..e32026b8 --- /dev/null +++ b/Shaders/clear_color_depth_pass/clear_color_depth_pass.frag.glsl @@ -0,0 +1,9 @@ +#version 450 + +in vec2 texCoord; +out vec4 fragColor; + +void main() { + fragColor = vec4(0.0, 0.0, 0.0, 1.0); + gl_FragDepth = 1.0; +} diff --git a/Shaders/clear_color_depth_pass/clear_color_depth_pass.json b/Shaders/clear_color_depth_pass/clear_color_depth_pass.json new file mode 100644 index 00000000..5a7b5b93 --- /dev/null +++ b/Shaders/clear_color_depth_pass/clear_color_depth_pass.json @@ -0,0 +1,15 @@ +{ + "contexts": [ + { + "name": "clear_color_depth_pass", + "depth_write": true, + "compare_mode": "always", + "cull_mode": "none", + "links": [], + "texture_params": [], + "vertex_shader": "../include/pass.vert.glsl", + "fragment_shader": "clear_color_depth_pass.frag.glsl", + "color_attachments": ["_HDR"] + } + ] +} diff --git a/Shaders/clear_color_pass/clear_color_pass.frag.glsl b/Shaders/clear_color_pass/clear_color_pass.frag.glsl new file mode 100644 index 00000000..c8cf9ad8 --- /dev/null +++ b/Shaders/clear_color_pass/clear_color_pass.frag.glsl @@ -0,0 +1,8 @@ +#version 450 + +in vec2 texCoord; +out vec4 fragColor; + +void main() { + fragColor = vec4(0.0, 0.0, 0.0, 1.0); +} diff --git a/Shaders/clear_color_pass/clear_color_pass.json b/Shaders/clear_color_pass/clear_color_pass.json new file mode 100644 index 00000000..3c2c5582 --- /dev/null +++ b/Shaders/clear_color_pass/clear_color_pass.json @@ -0,0 +1,15 @@ +{ + "contexts": [ + { + "name": "clear_color_pass", + "depth_write": false, + "compare_mode": "always", + "cull_mode": "none", + "links": [], + "texture_params": [], + "vertex_shader": "../include/pass.vert.glsl", + "fragment_shader": "clear_color_pass.frag.glsl", + "color_attachments": ["_HDR"] + } + ] +} diff --git a/Shaders/clear_depth_pass/clear_depth_pass.frag.glsl b/Shaders/clear_depth_pass/clear_depth_pass.frag.glsl new file mode 100644 index 00000000..78cb125e --- /dev/null +++ b/Shaders/clear_depth_pass/clear_depth_pass.frag.glsl @@ -0,0 +1,8 @@ +#version 450 + +in vec2 texCoord; +out vec4 fragColor; + +void main() { + gl_FragDepth = 1.0; +} diff --git a/Shaders/clear_depth_pass/clear_depth_pass.json b/Shaders/clear_depth_pass/clear_depth_pass.json new file mode 100644 index 00000000..ca318696 --- /dev/null +++ b/Shaders/clear_depth_pass/clear_depth_pass.json @@ -0,0 +1,19 @@ +{ + "contexts": [ + { + "name": "clear_depth_pass", + "depth_write": true, + "color_write_red": false, + "color_write_green": false, + "color_write_blue": false, + "color_write_alpha": false, + "compare_mode": "always", + "cull_mode": "none", + "links": [], + "texture_params": [], + "vertex_shader": "../include/pass.vert.glsl", + "fragment_shader": "clear_depth_pass.frag.glsl", + "color_attachments": ["_HDR"] + } + ] +} diff --git a/Shaders/compositor_pass/compositor_pass.vert.glsl b/Shaders/compositor_pass/compositor_pass.vert.glsl index 8e3089f8..4e0a095d 100644 --- a/Shaders/compositor_pass/compositor_pass.vert.glsl +++ b/Shaders/compositor_pass/compositor_pass.vert.glsl @@ -18,7 +18,7 @@ void main() { // Scale vertex attribute to [0-1] range const vec2 madd = vec2(0.5, 0.5); texCoord = pos.xy * madd + madd; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif @@ -26,7 +26,7 @@ void main() { // #ifdef _CPos // NDC (at the back of cube) - // vec4 v = vec4(pos.xy, 1.0, 1.0); + // vec4 v = vec4(pos.xy, 1.0, 1.0); // v = vec4(invVP * v); // v.xyz /= v.w; // viewRay = v.xyz - eye; diff --git a/Shaders/deferred_light/deferred_light.frag.glsl b/Shaders/deferred_light/deferred_light.frag.glsl index 4e80e3c8..c30e3da4 100644 --- a/Shaders/deferred_light/deferred_light.frag.glsl +++ b/Shaders/deferred_light/deferred_light.frag.glsl @@ -36,7 +36,7 @@ uniform vec3 eyeSnap; uniform float envmapStrength; #ifdef _Irr -//!uniform vec4 shirr[7]; +uniform vec4 shirr[7]; #endif #ifdef _Brdf uniform sampler2D senvmapBrdf; @@ -165,7 +165,7 @@ out vec4 fragColor; void main() { vec4 g0 = textureLod(gbuffer0, texCoord, 0.0); // Normal.xy, metallic/roughness, matid - + vec3 n; n.z = 1.0 - abs(g0.x) - abs(g0.y); n.xy = n.z >= 0.0 ? g0.xy : octahedronWrap(g0.xy); @@ -196,7 +196,7 @@ void main() { // Envmap #ifdef _Irr - vec3 envl = shIrradiance(n); + vec3 envl = shIrradiance(n, shirr); #ifdef _EnvTex envl /= PI; #endif @@ -218,7 +218,7 @@ void main() { #endif envl.rgb *= albedo; - + #ifdef _Rad // Indirect specular envl.rgb += prefilteredColor * (f0 * envBRDF.x + envBRDF.y) * 1.5 * occspec.y; #else @@ -236,7 +236,7 @@ void main() { #else vec3 voxpos = p / voxelgiHalfExtents; #endif - + #ifndef _VoxelAONoTrace #ifdef _VoxelGITemporal envl.rgb *= 1.0 - (traceAO(voxpos, n, voxels) * voxelBlend + @@ -245,7 +245,7 @@ void main() { envl.rgb *= 1.0 - traceAO(voxpos, n, voxels); #endif #endif - + #endif fragColor.rgb = envl; @@ -259,7 +259,7 @@ void main() { #endif #ifdef _Emission - if (g0.a == 1.0) { + if (matid == 1) { fragColor.rgb += g1.rgb; // materialid albedo = vec3(0.0); } @@ -272,7 +272,7 @@ void main() { // for(uint step = 0; step < 400 && color.a < 0.99f; ++step) { // vec3 point = origin + 0.005 * step * direction; // color += (1.0f - color.a) * textureLod(voxels, point * 0.5 + 0.5, 0); - // } + // } // fragColor.rgb += color.rgb; // Show SSAO @@ -320,7 +320,7 @@ void main() { fragColor.rgb += sdirect * svisibility * sunCol; // #ifdef _Hair // Aniso -// if (g0.a == 2.0) { +// if (matid == 2) { // const float shinyParallel = roughness; // const float shinyPerpendicular = 0.1; // const vec3 v = vec3(0.99146, 0.11664, 0.05832); @@ -330,7 +330,7 @@ void main() { // #endif #ifdef _SSS - if (g0.a == 2.0) { + if (matid == 2) { #ifdef _CSM int casi, casindex; mat4 LWVP = getCascadeMat(distance(eye, p), casi, casindex); @@ -346,7 +346,7 @@ void main() { fragColor.rgb += sampleLight( p, n, v, dotNV, pointPos, pointCol, albedo, roughness, occspec.y, f0 #ifdef _ShadowMap - , 0, pointBias + , 0, pointBias, true #endif #ifdef _Spot , true, spotData.x, spotData.y, spotDir @@ -363,12 +363,12 @@ void main() { , gbufferD, invVP, eye #endif ); - + #ifdef _Spot #ifdef _SSS - if (g0.a == 2.0) fragColor.rgb += fragColor.rgb * SSSSTransmittance(LWVPSpot0, p, n, normalize(pointPos - p), lightPlane.y, shadowMapSpot[0]); + if (matid == 2) fragColor.rgb += fragColor.rgb * SSSSTransmittance(LWVPSpot0, p, n, normalize(pointPos - p), lightPlane.y, shadowMapSpot[0]); + #endif #endif - #endif #endif @@ -400,7 +400,7 @@ void main() { occspec.y, f0 #ifdef _ShadowMap - , li, lightsArray[li * 2].w // bias + , li, lightsArray[li * 2].w, true // bias #endif #ifdef _Spot , li > numPoints - 1 diff --git a/Shaders/deferred_light/deferred_light.json b/Shaders/deferred_light/deferred_light.json index e1152218..06d5a80f 100755 --- a/Shaders/deferred_light/deferred_light.json +++ b/Shaders/deferred_light/deferred_light.json @@ -240,7 +240,8 @@ } ], "vertex_shader": "../include/pass_viewray.vert.glsl", - "fragment_shader": "deferred_light.frag.glsl" + "fragment_shader": "deferred_light.frag.glsl", + "color_attachments": ["RGBA64"] } ] } diff --git a/Shaders/deferred_light_mobile/deferred_light.frag.glsl b/Shaders/deferred_light_mobile/deferred_light.frag.glsl index 3c82a4ce..698a8400 100644 --- a/Shaders/deferred_light_mobile/deferred_light.frag.glsl +++ b/Shaders/deferred_light_mobile/deferred_light.frag.glsl @@ -17,7 +17,7 @@ uniform sampler2D gbuffer1; uniform float envmapStrength; #ifdef _Irr -//!uniform vec4 shirr[7]; +uniform vec4 shirr[7]; #endif #ifdef _Brdf uniform sampler2D senvmapBrdf; @@ -96,7 +96,7 @@ out vec4 fragColor; void main() { vec4 g0 = textureLod(gbuffer0, texCoord, 0.0); // Normal.xy, metallic/roughness, depth - + vec3 n; n.z = 1.0 - abs(g0.x) - abs(g0.y); n.xy = n.z >= 0.0 ? g0.xy : octahedronWrap(g0.xy); @@ -123,7 +123,7 @@ void main() { // Envmap #ifdef _Irr - vec3 envl = shIrradiance(n); + vec3 envl = shIrradiance(n, shirr); #ifdef _EnvTex envl /= PI; #endif @@ -145,7 +145,7 @@ void main() { #endif envl.rgb *= albedo; - + #ifdef _Rad // Indirect specular envl.rgb += prefilteredColor * (f0 * envBRDF.x + envBRDF.y) * 1.5 * occspec.y; #else @@ -182,7 +182,7 @@ void main() { fragColor.rgb += sampleLight( p, n, v, dotNV, pointPos, pointCol, albedo, roughness, occspec.y, f0 #ifdef _ShadowMap - , 0, pointBias + , 0, pointBias, true #endif #ifdef _Spot , true, spotData.x, spotData.y, spotDir @@ -218,7 +218,7 @@ void main() { occspec.y, f0 #ifdef _ShadowMap - , li, lightsArray[li * 2].w // bias + , li, lightsArray[li * 2].w, true // bias #endif #ifdef _Spot , li > numPoints - 1 diff --git a/Shaders/deferred_light_mobile/deferred_light_mobile.json b/Shaders/deferred_light_mobile/deferred_light_mobile.json index 8a90e82e..118bcdf3 100644 --- a/Shaders/deferred_light_mobile/deferred_light_mobile.json +++ b/Shaders/deferred_light_mobile/deferred_light_mobile.json @@ -159,7 +159,8 @@ } ], "vertex_shader": "../include/pass_viewray.vert.glsl", - "fragment_shader": "deferred_light.frag.glsl" + "fragment_shader": "deferred_light.frag.glsl", + "color_attachments": ["RGBA64"] } ] } diff --git a/Shaders/deferred_light_solid/deferred_light_solid.json b/Shaders/deferred_light_solid/deferred_light_solid.json index 822c2de7..04997ba0 100644 --- a/Shaders/deferred_light_solid/deferred_light_solid.json +++ b/Shaders/deferred_light_solid/deferred_light_solid.json @@ -7,7 +7,8 @@ "cull_mode": "none", "links": [], "vertex_shader": "../include/pass.vert.glsl", - "fragment_shader": "deferred_light.frag.glsl" + "fragment_shader": "deferred_light.frag.glsl", + "color_attachments": ["RGBA64"] } ] } diff --git a/Shaders/include/pass.vert.glsl b/Shaders/include/pass.vert.glsl index eececc04..1d924d72 100755 --- a/Shaders/include/pass.vert.glsl +++ b/Shaders/include/pass.vert.glsl @@ -10,7 +10,7 @@ void main() { // Scale vertex attribute to 0-1 range const vec2 madd = vec2(0.5, 0.5); texCoord = pos.xy * madd + madd; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif diff --git a/Shaders/include/pass_viewray.vert.glsl b/Shaders/include/pass_viewray.vert.glsl index 5c916a4a..0b77b14d 100644 --- a/Shaders/include/pass_viewray.vert.glsl +++ b/Shaders/include/pass_viewray.vert.glsl @@ -14,7 +14,7 @@ void main() { // Scale vertex attribute to [0-1] range const vec2 madd = vec2(0.5, 0.5); texCoord = pos.xy * madd + madd; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif diff --git a/Shaders/include/pass_viewray2.vert.glsl b/Shaders/include/pass_viewray2.vert.glsl index 727a42a6..8370ab38 100644 --- a/Shaders/include/pass_viewray2.vert.glsl +++ b/Shaders/include/pass_viewray2.vert.glsl @@ -13,14 +13,14 @@ void main() { // Scale vertex attribute to [0-1] range const vec2 madd = vec2(0.5, 0.5); texCoord = pos.xy * madd + madd; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif gl_Position = vec4(pos.xy, 0.0, 1.0); // NDC (at the back of cube) - vec4 v = vec4(pos.x, pos.y, 1.0, 1.0); + vec4 v = vec4(pos.x, pos.y, 1.0, 1.0); v = vec4(invP * v); viewRay = vec3(v.xy / v.z, 1.0); } diff --git a/Shaders/motion_blur_pass/motion_blur_pass.frag.glsl b/Shaders/motion_blur_pass/motion_blur_pass.frag.glsl index 5161e0bd..f58a2e80 100644 --- a/Shaders/motion_blur_pass/motion_blur_pass.frag.glsl +++ b/Shaders/motion_blur_pass/motion_blur_pass.frag.glsl @@ -18,7 +18,7 @@ in vec3 viewRay; out vec4 fragColor; vec2 getVelocity(vec2 coord, float depth) { - #ifdef HLSL + #ifdef _InvY coord.y = 1.0 - coord.y; #endif vec4 currentPos = vec4(coord.xy * 2.0 - 1.0, depth, 1.0); @@ -26,7 +26,7 @@ vec2 getVelocity(vec2 coord, float depth) { vec4 previousPos = prevVP * worldPos; previousPos /= previousPos.w; vec2 velocity = (currentPos - previousPos).xy / 40.0; - #ifdef HLSL + #ifdef _InvY velocity.y = -velocity.y; #endif return velocity; @@ -34,7 +34,7 @@ vec2 getVelocity(vec2 coord, float depth) { void main() { fragColor.rgb = textureLod(tex, texCoord, 0.0).rgb; - + float depth = textureLod(gbufferD, texCoord, 0.0).r * 2.0 - 1.0; if (depth == 1.0) { return; @@ -42,7 +42,7 @@ void main() { float blurScale = motionBlurIntensity * frameScale; vec2 velocity = getVelocity(texCoord, depth) * blurScale; - + vec2 offset = texCoord; int processed = 1; for(int i = 0; i < 8; ++i) { diff --git a/Shaders/motion_blur_veloc_pass/motion_blur_veloc_pass.frag.glsl b/Shaders/motion_blur_veloc_pass/motion_blur_veloc_pass.frag.glsl index a361e3a9..e521f7b1 100755 --- a/Shaders/motion_blur_veloc_pass/motion_blur_veloc_pass.frag.glsl +++ b/Shaders/motion_blur_veloc_pass/motion_blur_veloc_pass.frag.glsl @@ -14,11 +14,11 @@ out vec4 fragColor; void main() { vec2 velocity = textureLod(sveloc, texCoord, 0.0).rg * motionBlurIntensity * frameScale; - - #ifdef HLSL + + #ifdef _InvY velocity.y = -velocity.y; #endif - + fragColor.rgb = textureLod(tex, texCoord, 0.0).rgb; // float speed = length(velocity / texStep); diff --git a/Shaders/probe_cubemap/probe_cubemap.frag.glsl b/Shaders/probe_cubemap/probe_cubemap.frag.glsl index ee33182c..648bec1e 100644 --- a/Shaders/probe_cubemap/probe_cubemap.frag.glsl +++ b/Shaders/probe_cubemap/probe_cubemap.frag.glsl @@ -17,7 +17,7 @@ out vec4 fragColor; void main() { vec2 texCoord = wvpposition.xy / wvpposition.w; texCoord = texCoord * 0.5 + 0.5; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif @@ -46,7 +46,7 @@ void main() { vec3 v = wp - eye; vec3 r = reflect(v, n); - #ifdef HLSL + #ifdef _InvY r.y = -r.y; #endif float intensity = clamp((1.0 - roughness) * dot(wp - probep, n), 0.0, 1.0); diff --git a/Shaders/probe_planar/probe_planar.frag.glsl b/Shaders/probe_planar/probe_planar.frag.glsl index c2de3f0b..81b1782a 100644 --- a/Shaders/probe_planar/probe_planar.frag.glsl +++ b/Shaders/probe_planar/probe_planar.frag.glsl @@ -17,7 +17,7 @@ out vec4 fragColor; void main() { vec2 texCoord = wvpposition.xy / wvpposition.w; texCoord = texCoord * 0.5 + 0.5; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif @@ -39,7 +39,7 @@ void main() { vec3 wp = getPos2(invVP, depth, texCoord); vec4 pp = probeVP * vec4(wp.xyz, 1.0); vec2 tc = (pp.xy / pp.w) * 0.5 + 0.5; - #ifdef HLSL + #ifdef _InvY tc.y = 1.0 - tc.y; #endif diff --git a/Shaders/smaa_blend_weight/smaa_blend_weight.frag.glsl b/Shaders/smaa_blend_weight/smaa_blend_weight.frag.glsl index db2f6593..b1239d53 100644 --- a/Shaders/smaa_blend_weight/smaa_blend_weight.frag.glsl +++ b/Shaders/smaa_blend_weight/smaa_blend_weight.frag.glsl @@ -1,5 +1,6 @@ #version 450 +#include "compiled.inc" #define SMAA_MAX_SEARCH_STEPS_DIAG 8 #define SMAA_AREATEX_MAX_DISTANCE 16 #define SMAA_AREATEX_MAX_DISTANCE_DIAG 20 @@ -33,7 +34,7 @@ out vec4 fragColor; vec2 cdw_end; vec4 textureLodA(sampler2D tex, vec2 coord, float lod) { - #ifdef HLSL + #ifdef _InvY coord.y = 1.0 - coord.y; #endif return textureLod(tex, coord, lod); @@ -104,7 +105,7 @@ vec2 SMAASearchDiag2(vec2 texcoord, vec2 dir) { return coord.zw; } -/** +/** * Similar to SMAAArea, this calculates the area corresponding to a certain * diagonal distance and crossing edges 'e'. */ @@ -147,7 +148,7 @@ vec2 SMAACalculateDiagWeights(vec2 texcoord, vec2 e, vec4 subsampleIndices) { // Fetch the crossing edges: vec4 coords = mad(vec4(-d.x + 0.25, d.x, d.y, -d.y - 0.25), screenSizeInv.xyxy, texcoord.xyxy); vec4 c; - + c.xy = SMAASampleLevelZeroOffset(edgesTex, coords.xy, ivec2(-1, 0)).rg; c.zw = SMAASampleLevelZeroOffset(edgesTex, coords.zw, ivec2( 1, 0)).rg; c.yxwz = SMAADecodeDiagBilinearAccess(c.xyzw); @@ -172,7 +173,7 @@ vec2 SMAACalculateDiagWeights(vec2 texcoord, vec2 e, vec4 subsampleIndices) { d.yw = SMAASearchDiag2(texcoord, vec2(1.0, 1.0)/*, cdw_end*/); float dadd = cdw_end.y > 0.9 ? 1.0 : 0.0; d.y += dadd; - } + } else { d.yw = vec2(0.0, 0.0); } @@ -207,7 +208,7 @@ vec2 SMAACalculateDiagWeights(vec2 texcoord, vec2 e, vec4 subsampleIndices) { /** * This allows to determine how much length should we add in the last step - * of the searches. It takes the bilinearly interpolated edge (see + * of the searches. It takes the bilinearly interpolated edge (see * @PSEUDO_GATHER4), and adds 0, 1 or 2, depending on which edges and * crossing edges are active. */ @@ -244,7 +245,7 @@ float SMAASearchXLeft(vec2 texcoord, float end) { * which edges are active from the four fetched ones. */ vec2 e = vec2(0.0, 1.0); - while (texcoord.x > end && + while (texcoord.x > end && e.g > 0.8281 && // Is there some edge not activated? e.r == 0.0) { // Or is there a crossing edge that breaks the line? e = textureLodA(edgesTex, texcoord, 0.0).rg; @@ -257,20 +258,20 @@ float SMAASearchXLeft(vec2 texcoord, float end) { float SMAASearchXRight(vec2 texcoord, float end) { vec2 e = vec2(0.0, 1.0); - while (texcoord.x < end && + while (texcoord.x < end && e.g > 0.8281 && // Is there some edge not activated? e.r == 0.0) { // Or is there a crossing edge that breaks the line? e = textureLodA(edgesTex, texcoord, 0.0).rg; texcoord = mad(vec2(2.0, 0.0), screenSizeInv.xy, texcoord); } - + float offset = mad(-(255.0 / 127.0), SMAASearchLength(e, 0.5), 3.25); return mad(-screenSizeInv.x, offset, texcoord.x); } float SMAASearchYUp(vec2 texcoord, float end) { vec2 e = vec2(1.0, 0.0); - while (texcoord.y > end && + while (texcoord.y > end && e.r > 0.8281 && // Is there some edge not activated? e.g == 0.0) { // Or is there a crossing edge that breaks the line? e = textureLodA(edgesTex, texcoord, 0.0).rg; @@ -282,7 +283,7 @@ float SMAASearchYUp(vec2 texcoord, float end) { float SMAASearchYDown(vec2 texcoord, float end) { vec2 e = vec2(1.0, 0.0); - while (texcoord.y < end && + while (texcoord.y < end && e.r > 0.8281 && // Is there some edge not activated? e.g == 0.0) { // Or is there a crossing edge that breaks the line? e = textureLodA(edgesTex, texcoord, 0.0).rg; @@ -292,14 +293,14 @@ float SMAASearchYDown(vec2 texcoord, float end) { return mad(-screenSizeInv.y, offset, texcoord.y); } -/** +/** * Ok, we have the distance and both crossing edges. So, what are the areas * at each side of current edge? */ vec2 SMAAArea(vec2 dist, float e1, float e2, float offset) { // Rounding prevents precision errors of bilinear filtering: vec2 texcoord = mad(vec2(SMAA_AREATEX_MAX_DISTANCE, SMAA_AREATEX_MAX_DISTANCE), round(4.0 * vec2(e1, e2)), dist); - + // We do a scale and bias for mapping to texel space: texcoord = mad(SMAA_AREATEX_PIXEL_SIZE, texcoord, 0.5 * SMAA_AREATEX_PIXEL_SIZE); @@ -363,7 +364,7 @@ vec4 SMAABlendingWeightCalculationPS(vec2 texcoord, vec2 pixcoord, // one of the boundaries is enough. weights.rg = SMAACalculateDiagWeights(texcoord, e, subsampleIndices); - // We give priority to diagonals, so if we find a diagonal we skip + // We give priority to diagonals, so if we find a diagonal we skip // horizontal/vertical processing. //SMAA_BRANCH if (weights.r == -weights.g) { // weights.r + weights.g == 0.0 @@ -433,7 +434,7 @@ vec4 SMAABlendingWeightCalculationPS(vec2 texcoord, vec2 pixcoord, // We want the distances to be in pixel units: d = abs(round(mad(screenSize.yy, d, -pixcoord.yy))); - // SMAAArea below needs a sqrt, as the areas texture is compressed + // SMAAArea below needs a sqrt, as the areas texture is compressed // quadratically: vec2 sqrt_d = sqrt(d); diff --git a/Shaders/smaa_edge_detect/smaa_edge_detect.vert.glsl b/Shaders/smaa_edge_detect/smaa_edge_detect.vert.glsl index 61b3961c..7ce815b3 100644 --- a/Shaders/smaa_edge_detect/smaa_edge_detect.vert.glsl +++ b/Shaders/smaa_edge_detect/smaa_edge_detect.vert.glsl @@ -11,7 +11,7 @@ out vec4 offset0; out vec4 offset1; out vec4 offset2; -#ifdef HLSL +#ifdef _InvY #define V_DIR(v) -(v) #else #define V_DIR(v) v @@ -21,7 +21,7 @@ void main() { // Scale vertex attribute to [0-1] range const vec2 madd = vec2(0.5, 0.5); texCoord = pos.xy * madd + madd; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif diff --git a/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.frag.glsl b/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.frag.glsl index d307e79d..99dca290 100755 --- a/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.frag.glsl +++ b/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.frag.glsl @@ -18,7 +18,7 @@ out vec4 fragColor; // Neighborhood Blending Pixel Shader (Third Pass) vec4 textureLodA(sampler2D tex, vec2 coords, float lod) { - #ifdef HLSL + #ifdef _InvY coords.y = 1.0 - coords.y; #endif return textureLod(tex, coords, lod); @@ -49,7 +49,7 @@ vec4 SMAANeighborhoodBlendingPS(vec2 texcoord, vec4 offset) { // Calculate the blending offsets: vec4 blendingOffset = vec4(0.0, a.y, 0.0, a.w); vec2 blendingWeight = a.yw; - + if (h) { blendingOffset.x = a.x; blendingOffset.y = 0.0; @@ -58,11 +58,11 @@ vec4 SMAANeighborhoodBlendingPS(vec2 texcoord, vec4 offset) { blendingWeight.x = a.x; blendingWeight.y = a.z; } - + blendingWeight /= dot(blendingWeight, vec2(1.0, 1.0)); // Calculate the texture coordinates: - #ifdef HLSL + #ifdef _InvY vec2 tc = vec2(texcoord.x, 1.0 - texcoord.y); #else vec2 tc = texcoord; diff --git a/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.vert.glsl b/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.vert.glsl index 8d433797..6568ab64 100644 --- a/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.vert.glsl +++ b/Shaders/smaa_neighborhood_blend/smaa_neighborhood_blend.vert.glsl @@ -9,7 +9,7 @@ uniform vec2 screenSizeInv; out vec2 texCoord; out vec4 offset; -#ifdef HLSL +#ifdef _InvY #define V_DIR(v) -(v) #else #define V_DIR(v) v @@ -19,7 +19,7 @@ void main() { // Scale vertex attribute to [0-1] range const vec2 madd = vec2(0.5, 0.5); texCoord = pos.xy * madd + madd; - #ifdef HLSL + #ifdef _InvY texCoord.y = 1.0 - texCoord.y; #endif diff --git a/Shaders/ssgi_pass/ssgi_pass.frag.glsl b/Shaders/ssgi_pass/ssgi_pass.frag.glsl index acc6d341..694e0f21 100755 --- a/Shaders/ssgi_pass/ssgi_pass.frag.glsl +++ b/Shaders/ssgi_pass/ssgi_pass.frag.glsl @@ -37,7 +37,7 @@ vec2 getProjectedCoord(vec3 hitCoord) { vec4 projectedCoord = P * vec4(hitCoord, 1.0); projectedCoord.xy /= projectedCoord.w; projectedCoord.xy = projectedCoord.xy * 0.5 + 0.5; - #ifdef HLSL + #ifdef _InvY projectedCoord.y = 1.0 - projectedCoord.y; #endif return projectedCoord.xy; diff --git a/Shaders/ssr_pass/ssr_pass.frag.glsl b/Shaders/ssr_pass/ssr_pass.frag.glsl index 0fc1ea0a..8ec84256 100755 --- a/Shaders/ssr_pass/ssr_pass.frag.glsl +++ b/Shaders/ssr_pass/ssr_pass.frag.glsl @@ -29,13 +29,13 @@ vec2 getProjectedCoord(const vec3 hit) { vec4 projectedCoord = P * vec4(hit, 1.0); projectedCoord.xy /= projectedCoord.w; projectedCoord.xy = projectedCoord.xy * 0.5 + 0.5; - #ifdef HLSL + #ifdef _InvY projectedCoord.y = 1.0 - projectedCoord.y; #endif return projectedCoord.xy; } -float getDeltaDepth(const vec3 hit) { +float getDeltaDepth(const vec3 hit) { depth = textureLod(gbufferD, getProjectedCoord(hit), 0.0).r * 2.0 - 1.0; vec3 viewPos = getPosView(viewRay, depth, cameraProj); return viewPos.z - hit.z; @@ -79,7 +79,7 @@ void main() { float spec = fract(textureLod(gbuffer1, texCoord, 0.0).a); if (spec == 0.0) { fragColor.rgb = vec3(0.0); return; } - + float d = textureLod(gbufferD, texCoord, 0.0).r * 2.0 - 1.0; if (d == 1.0) { fragColor.rgb = vec3(0.0); return; } @@ -88,18 +88,18 @@ void main() { n.z = 1.0 - abs(enc.x) - abs(enc.y); n.xy = n.z >= 0.0 ? enc.xy : octahedronWrap(enc.xy); n = normalize(n); - + vec3 viewNormal = V3 * n; vec3 viewPos = getPosView(viewRay, d, cameraProj); vec3 reflected = normalize(reflect(viewPos, viewNormal)); hitCoord = viewPos; - + #ifdef _CPostprocess vec3 dir = reflected * (1.0 - rand(texCoord) * PPComp10.y * roughness) * 2.0; #else vec3 dir = reflected * (1.0 - rand(texCoord) * ssrJitter * roughness) * 2.0; #endif - + // * max(ssrMinRayStep, -viewPos.z) vec4 coords = rayCast(dir); diff --git a/Shaders/std/gbuffer.glsl b/Shaders/std/gbuffer.glsl index d72a8783..233213c0 100755 --- a/Shaders/std/gbuffer.glsl +++ b/Shaders/std/gbuffer.glsl @@ -19,7 +19,7 @@ vec3 getPosView(const vec3 viewRay, const float depth, const vec2 cameraProj) { return viewRay * linearDepth; } -vec3 getPos(const vec3 eye, const vec3 eyeLook, const vec3 viewRay, const float depth, const vec2 cameraProj) { +vec3 getPos(const vec3 eye, const vec3 eyeLook, const vec3 viewRay, const float depth, const vec2 cameraProj) { // eyeLook, viewRay should be normalized float linearDepth = cameraProj.y / ((depth * 0.5 + 0.5) - cameraProj.x); float viewZDist = dot(eyeLook, viewRay); @@ -27,7 +27,7 @@ vec3 getPos(const vec3 eye, const vec3 eyeLook, const vec3 viewRay, const float return wposition; } -vec3 getPosNoEye(const vec3 eyeLook, const vec3 viewRay, const float depth, const vec2 cameraProj) { +vec3 getPosNoEye(const vec3 eyeLook, const vec3 viewRay, const float depth, const vec2 cameraProj) { // eyeLook, viewRay should be normalized float linearDepth = cameraProj.y / ((depth * 0.5 + 0.5) - cameraProj.x); float viewZDist = dot(eyeLook, viewRay); @@ -35,7 +35,7 @@ vec3 getPosNoEye(const vec3 eyeLook, const vec3 viewRay, const float depth, cons return wposition; } -#ifdef HLSL +#if defined(HLSL) || defined(METAL) vec3 getPos2(const mat4 invVP, const float depth, vec2 coord) { coord.y = 1.0 - coord.y; #else @@ -47,7 +47,7 @@ vec3 getPos2(const mat4 invVP, const float depth, const vec2 coord) { return pos.xyz; } -#ifdef HLSL +#if defined(HLSL) || defined(METAL) vec3 getPosView2(const mat4 invP, const float depth, vec2 coord) { coord.y = 1.0 - coord.y; #else @@ -59,7 +59,7 @@ vec3 getPosView2(const mat4 invP, const float depth, const vec2 coord) { return pos.xyz; } -#ifdef HLSL +#if defined(HLSL) || defined(METAL) vec3 getPos2NoEye(const vec3 eye, const mat4 invVP, const float depth, vec2 coord) { coord.y = 1.0 - coord.y; #else diff --git a/Shaders/std/light.glsl b/Shaders/std/light.glsl index ea3871fc..293dad04 100644 --- a/Shaders/std/light.glsl +++ b/Shaders/std/light.glsl @@ -70,7 +70,7 @@ uniform sampler2D sltcMag; vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, const vec3 lp, const vec3 lightCol, const vec3 albedo, const float rough, const float spec, const vec3 f0 #ifdef _ShadowMap - , int index, float bias + , int index, float bias, bool receiveShadow #endif #ifdef _Spot , bool isSpot, float spotA, float spotB, vec3 spotDir @@ -130,40 +130,7 @@ vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, co #ifdef _LTC #ifdef _ShadowMap - #ifdef _SinglePoint - vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); - #endif - #ifdef _Clusters - if (index == 0) { - vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); - } - else if (index == 1) { - vec4 lPos = LWVPSpot1 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[1], lPos.xyz / lPos.w, bias); - } - else if (index == 2) { - vec4 lPos = LWVPSpot2 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[2], lPos.xyz / lPos.w, bias); - } - else if (index == 3) { - vec4 lPos = LWVPSpot3 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[3], lPos.xyz / lPos.w, bias); - } - #endif - #endif - return direct; - #endif - - #ifdef _Spot - if (isSpot) { - float spotEffect = dot(spotDir, l); // lightDir - // x - cutoff, y - cutoff - exponent - if (spotEffect < spotA) { - direct *= smoothstep(spotB, spotA, spotEffect); - } - #ifdef _ShadowMap + if (receiveShadow) { #ifdef _SinglePoint vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); @@ -186,6 +153,43 @@ vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, co direct *= shadowTest(shadowMapSpot[3], lPos.xyz / lPos.w, bias); } #endif + } + #endif + return direct; + #endif + + #ifdef _Spot + if (isSpot) { + float spotEffect = dot(spotDir, l); // lightDir + // x - cutoff, y - cutoff - exponent + if (spotEffect < spotA) { + direct *= smoothstep(spotB, spotA, spotEffect); + } + #ifdef _ShadowMap + if (receiveShadow) { + #ifdef _SinglePoint + vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); + #endif + #ifdef _Clusters + if (index == 0) { + vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); + } + else if (index == 1) { + vec4 lPos = LWVPSpot1 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[1], lPos.xyz / lPos.w, bias); + } + else if (index == 2) { + vec4 lPos = LWVPSpot2 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[2], lPos.xyz / lPos.w, bias); + } + else if (index == 3) { + vec4 lPos = LWVPSpot3 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[3], lPos.xyz / lPos.w, bias); + } + #endif + } #endif return direct; } @@ -196,17 +200,19 @@ vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, co #endif #ifdef _ShadowMap - #ifdef _SinglePoint - #ifndef _Spot - direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); - #endif - #endif - #ifdef _Clusters - if (index == 0) direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); - else if (index == 1) direct *= PCFCube(shadowMapPoint[1], ld, -l, bias, lightProj, n); - else if (index == 2) direct *= PCFCube(shadowMapPoint[2], ld, -l, bias, lightProj, n); - else if (index == 3) direct *= PCFCube(shadowMapPoint[3], ld, -l, bias, lightProj, n); - #endif + if (receiveShadow) { + #ifdef _SinglePoint + #ifndef _Spot + direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); + #endif + #endif + #ifdef _Clusters + if (index == 0) direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); + else if (index == 1) direct *= PCFCube(shadowMapPoint[1], ld, -l, bias, lightProj, n); + else if (index == 2) direct *= PCFCube(shadowMapPoint[2], ld, -l, bias, lightProj, n); + else if (index == 3) direct *= PCFCube(shadowMapPoint[3], ld, -l, bias, lightProj, n); + #endif + } #endif return direct; diff --git a/Shaders/std/light_mobile.glsl b/Shaders/std/light_mobile.glsl index 4696d370..86558dd7 100644 --- a/Shaders/std/light_mobile.glsl +++ b/Shaders/std/light_mobile.glsl @@ -33,7 +33,7 @@ vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, const vec3 lp, const vec3 lightCol, const vec3 albedo, const float rough, const float spec, const vec3 f0 #ifdef _ShadowMap - , int index, float bias + , int index, float bias, bool receiveShadow #endif #ifdef _Spot , bool isSpot, float spotA, float spotB, vec3 spotDir @@ -60,28 +60,30 @@ vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, co direct *= smoothstep(spotB, spotA, spotEffect); } #ifdef _ShadowMap - #ifdef _SinglePoint - vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); - #endif - #ifdef _Clusters - if (index == 0) { + if (receiveShadow) { + #ifdef _SinglePoint vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); + #endif + #ifdef _Clusters + if (index == 0) { + vec4 lPos = LWVPSpot0 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[0], lPos.xyz / lPos.w, bias); + } + else if (index == 1) { + vec4 lPos = LWVPSpot1 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[1], lPos.xyz / lPos.w, bias); + } + else if (index == 2) { + vec4 lPos = LWVPSpot2 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[2], lPos.xyz / lPos.w, bias); + } + else if (index == 3) { + vec4 lPos = LWVPSpot3 * vec4(p + n * bias * 10, 1.0); + direct *= shadowTest(shadowMapSpot[3], lPos.xyz / lPos.w, bias); + } + #endif } - else if (index == 1) { - vec4 lPos = LWVPSpot1 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[1], lPos.xyz / lPos.w, bias); - } - else if (index == 2) { - vec4 lPos = LWVPSpot2 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[2], lPos.xyz / lPos.w, bias); - } - else if (index == 3) { - vec4 lPos = LWVPSpot3 * vec4(p + n * bias * 10, 1.0); - direct *= shadowTest(shadowMapSpot[3], lPos.xyz / lPos.w, bias); - } - #endif #endif return direct; } @@ -89,15 +91,17 @@ vec3 sampleLight(const vec3 p, const vec3 n, const vec3 v, const float dotNV, co #ifdef _ShadowMap #ifndef _Spot - #ifdef _SinglePoint - direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); - #endif - #ifdef _Clusters - if (index == 0) direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); - else if (index == 1) direct *= PCFCube(shadowMapPoint[1], ld, -l, bias, lightProj, n); - else if (index == 2) direct *= PCFCube(shadowMapPoint[2], ld, -l, bias, lightProj, n); - else if (index == 3) direct *= PCFCube(shadowMapPoint[3], ld, -l, bias, lightProj, n); - #endif + if (receiveShadow) { + #ifdef _SinglePoint + direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); + #endif + #ifdef _Clusters + if (index == 0) direct *= PCFCube(shadowMapPoint[0], ld, -l, bias, lightProj, n); + else if (index == 1) direct *= PCFCube(shadowMapPoint[1], ld, -l, bias, lightProj, n); + else if (index == 2) direct *= PCFCube(shadowMapPoint[2], ld, -l, bias, lightProj, n); + else if (index == 3) direct *= PCFCube(shadowMapPoint[3], ld, -l, bias, lightProj, n); + #endif + } #endif #endif diff --git a/Shaders/std/mapping.glsl b/Shaders/std/mapping.glsl new file mode 100644 index 00000000..3bc80cf6 --- /dev/null +++ b/Shaders/std/mapping.glsl @@ -0,0 +1,41 @@ +/* +https://github.com/JonasFolletete/glsl-triplanar-mapping + +MIT License + +Copyright (c) 2018 Jonas FolletĂȘte + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +vec3 blendNormal(vec3 normal) { + vec3 blending = abs(normal); + blending = normalize(max(blending, 0.00001)); + blending /= vec3(blending.x + blending.y + blending.z); + return blending; +} + +vec3 triplanarMapping (sampler2D ImageTexture, vec3 normal, vec3 position) { + vec3 normalBlend = blendNormal(normal); + vec3 xColor = texture(ImageTexture, position.yz).rgb; + vec3 yColor = texture(ImageTexture, position.xz).rgb; + vec3 zColor = texture(ImageTexture, position.xy).rgb; + + return (xColor * normalBlend.x + yColor * normalBlend.y + zColor * normalBlend.z); +} diff --git a/Shaders/std/shadows.glsl b/Shaders/std/shadows.glsl index 5a7769fb..d610676a 100755 --- a/Shaders/std/shadows.glsl +++ b/Shaders/std/shadows.glsl @@ -35,7 +35,7 @@ float PCFCube(samplerCubeShadow shadowMapCube, const vec3 lp, vec3 ml, const flo const float s = shadowmapCubePcfSize; // TODO: incorrect... float compare = lpToDepth(lp, lightProj) - bias * 1.5; ml = ml + n * bias * 20; - #ifdef HLSL + #ifdef _InvY ml.y = -ml.y; #endif float result = texture(shadowMapCube, vec4(ml, compare)); @@ -105,7 +105,7 @@ float shadowTestCascade(sampler2DShadow shadowMap, const vec3 eye, const vec3 p, int casi; int casIndex; mat4 LWVP = getCascadeMat(d, casi, casIndex); - + vec4 lPos = LWVP * vec4(p, 1.0); lPos.xyz /= lPos.w; diff --git a/Shaders/std/shirr.glsl b/Shaders/std/shirr.glsl index 39515beb..e56dc978 100755 --- a/Shaders/std/shirr.glsl +++ b/Shaders/std/shirr.glsl @@ -1,6 +1,5 @@ -uniform vec4 shirr[7]; -vec3 shIrradiance(const vec3 nor) { +vec3 shIrradiance(const vec3 nor, const vec4 shirr[7]) { const float c1 = 0.429043; const float c2 = 0.511664; const float c3 = 0.743125; diff --git a/Shaders/std/ssrs.glsl b/Shaders/std/ssrs.glsl index a04374f8..c9586610 100644 --- a/Shaders/std/ssrs.glsl +++ b/Shaders/std/ssrs.glsl @@ -9,7 +9,7 @@ vec2 getProjectedCoord(vec3 hitCoord) { vec4 projectedCoord = VP * vec4(hitCoord, 1.0); projectedCoord.xy /= projectedCoord.w; projectedCoord.xy = projectedCoord.xy * 0.5 + 0.5; - #ifdef HLSL + #if defined(HLSL) || defined(METAL) projectedCoord.y = 1.0 - projectedCoord.y; #endif return projectedCoord.xy; diff --git a/Shaders/taa_pass/taa_pass.frag.glsl b/Shaders/taa_pass/taa_pass.frag.glsl index 57ec11af..cdaf2bbb 100755 --- a/Shaders/taa_pass/taa_pass.frag.glsl +++ b/Shaders/taa_pass/taa_pass.frag.glsl @@ -15,12 +15,12 @@ const float SMAA_REPROJECTION_WEIGHT_SCALE = 30.0; void main() { vec4 current = textureLod(tex, texCoord, 0.0); - + #ifdef _Veloc // Velocity is assumed to be calculated for motion blur, so we need to inverse it for reprojection vec2 velocity = -textureLod(sveloc, texCoord, 0.0).rg; - #ifdef HLSL + #ifdef _InvY velocity.y = -velocity.y; #endif diff --git a/Shaders/world_pass/world_pass.frag.glsl b/Shaders/world_pass/world_pass.frag.glsl deleted file mode 100644 index dca8371e..00000000 --- a/Shaders/world_pass/world_pass.frag.glsl +++ /dev/null @@ -1,172 +0,0 @@ -#version 450 - -#include "compiled.inc" -#ifdef _EnvTex -#include "std/math.glsl" -#endif - -#ifdef _EnvCol - uniform vec3 backgroundCol; -#endif -#ifdef _EnvSky - uniform vec3 A; - uniform vec3 B; - uniform vec3 C; - uniform vec3 D; - uniform vec3 E; - uniform vec3 F; - uniform vec3 G; - uniform vec3 H; - uniform vec3 I; - uniform vec3 Z; - uniform vec3 hosekSunDirection; -#endif -#ifdef _EnvClouds - uniform sampler3D scloudsBase; - uniform sampler3D scloudsDetail; - uniform sampler2D scloudsMap; - uniform float time; -#endif -#ifdef _EnvTex - uniform sampler2D envmap; -#endif -#ifdef _EnvImg // Static background - uniform vec2 screenSize; - uniform sampler2D envmap; -#endif - -#ifdef _EnvStr -uniform float envmapStrength; -#endif - -in vec3 normal; -out vec4 fragColor; - -#ifdef _EnvSky -vec3 hosekWilkie(float cos_theta, float gamma, float cos_gamma) { - vec3 chi = (1 + cos_gamma * cos_gamma) / pow(1 + H * H - 2 * cos_gamma * H, vec3(1.5)); - return (1 + A * exp(B / (cos_theta + 0.01))) * (C + D * exp(E * gamma) + F * (cos_gamma * cos_gamma) + G * chi + I * sqrt(cos_theta)); -} -#endif - -#ifdef _EnvClouds -// GPU PRO 7 - Real-time Volumetric Cloudscapes -// https://www.guerrilla-games.com/read/the-real-time-volumetric-cloudscapes-of-horizon-zero-dawn -// https://github.com/sebh/TileableVolumeNoise -float remap(float old_val, float old_min, float old_max, float new_min, float new_max) { - return new_min + (((old_val - old_min) / (old_max - old_min)) * (new_max - new_min)); -} - -float getDensityHeightGradientForPoint(float height, float cloud_type) { - const vec4 stratusGrad = vec4(0.02f, 0.05f, 0.09f, 0.11f); - const vec4 stratocumulusGrad = vec4(0.02f, 0.2f, 0.48f, 0.625f); - const vec4 cumulusGrad = vec4(0.01f, 0.0625f, 0.78f, 1.0f); - float stratus = 1.0f - clamp(cloud_type * 2.0f, 0, 1); - float stratocumulus = 1.0f - abs(cloud_type - 0.5f) * 2.0f; - float cumulus = clamp(cloud_type - 0.5f, 0, 1) * 2.0f; - vec4 cloudGradient = stratusGrad * stratus + stratocumulusGrad * stratocumulus + cumulusGrad * cumulus; - return smoothstep(cloudGradient.x, cloudGradient.y, height) - smoothstep(cloudGradient.z, cloudGradient.w, height); -} - -float sampleCloudDensity(vec3 p) { - float cloud_base = textureLod(scloudsBase, p, 0).r * 40; // Base noise - vec3 weather_data = textureLod(scloudsMap, p.xy, 0).rgb; // Weather map - cloud_base *= getDensityHeightGradientForPoint(p.z, weather_data.b); // Cloud type - cloud_base = remap(cloud_base, weather_data.r, 1.0, 0.0, 1.0); // Coverage - cloud_base *= weather_data.r; - float cloud_detail = textureLod(scloudsDetail, p, 0).r * 2; // Detail noise - float cloud_detail_mod = mix(cloud_detail, 1.0 - cloud_detail, clamp(p.z * 10.0, 0, 1)); - cloud_base = remap(cloud_base, cloud_detail_mod * 0.2, 1.0, 0.0, 1.0); - return cloud_base; -} - -float cloudRadiance(vec3 p, vec3 dir){ - #ifdef _EnvSky - vec3 sun_dir = hosekSunDirection; - #else - vec3 sun_dir = vec3(0, 0, -1); - #endif - const int steps = 8; - float step_size = 0.5 / float(steps); - float d = 0.0; - p += sun_dir * step_size; - for(int i = 0; i < steps; ++i) { - d += sampleCloudDensity(p + sun_dir * float(i) * step_size); - } - return 1.0 - d; -} - -vec3 traceClouds(vec3 sky, vec3 dir) { - const float step_size = 0.5 / float(cloudsSteps); - float T = 1.0; - float C = 0.0; - vec2 uv = dir.xy / dir.z * 0.4 * cloudsLower + cloudsWind * time * 0.02; - - for (int i = 0; i < cloudsSteps; ++i) { - float h = float(i) / float(cloudsSteps); - vec3 p = vec3(uv * 0.04, h); - float d = sampleCloudDensity(p); - - if (d > 0) { - // float radiance = cloudRadiance(p, dir); - C += T * exp(h) * d * step_size * 0.6 * cloudsPrecipitation; - T *= exp(-d * step_size); - if (T < 0.01) break; - } - uv += (dir.xy / dir.z) * step_size * cloudsUpper; - } - - return vec3(C) + sky * T; -} -#endif // _EnvClouds - -void main() { - -#ifdef _EnvCol - fragColor.rgb = backgroundCol; -#ifdef _EnvTransp - return; -#endif -#ifdef _EnvClouds - vec3 n = normalize(normal); -#endif -#endif - -#ifndef _EnvSky // Prevent case when sky radiance is enabled -#ifdef _EnvTex - vec3 n = normalize(normal); - fragColor.rgb = texture(envmap, envMapEquirect(n)).rgb * envmapStrength; - #ifdef _EnvLDR - fragColor.rgb = pow(fragColor.rgb, vec3(2.2)); - #endif -#endif -#endif - -#ifdef _EnvImg // Static background - // Will have to get rid of gl_FragCoord, pass tc from VS - vec2 texco = gl_FragCoord.xy / screenSize; - fragColor.rgb = texture(envmap, vec2(texco.x, 1.0 - texco.y)).rgb * envmapStrength; -#endif - -#ifdef _EnvSky - vec3 n = normalize(normal); - float phi = acos(n.z); - float theta = atan(-n.y, n.x) + PI; - - float cos_theta = clamp(n.z, 0.0, 1.0); - float cos_gamma = dot(n, hosekSunDirection); - float gamma_val = acos(cos_gamma); - - fragColor.rgb = Z * hosekWilkie(cos_theta, gamma_val, cos_gamma) * envmapStrength; -#endif - -#ifdef _EnvClouds - if (n.z > 0.0) fragColor.rgb = mix(fragColor.rgb, traceClouds(fragColor.rgb, n), clamp(n.z * 5.0, 0, 1)); -#endif - -#ifdef _LDR - fragColor.rgb = pow(fragColor.rgb, vec3(1.0 / 2.2)); -#endif - - fragColor.a = 0.0; // Mark as non-opaque -} diff --git a/Shaders/world_pass/world_pass.json b/Shaders/world_pass/world_pass.json deleted file mode 100644 index 810f8bb2..00000000 --- a/Shaders/world_pass/world_pass.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "contexts": [ - { - "name": "world_pass", - "depth_write": false, - "compare_mode": "less", - "cull_mode": "clockwise", - "links": [ - { - "name": "SMVP", - "link": "_skydomeMatrix" - }, - { - "name": "backgroundCol", - "link": "_backgroundCol", - "ifdef": ["_EnvCol"] - }, - { - "name": "A", - "link": "_hosekA", - "ifdef": ["_EnvSky"] - }, - { - "name": "B", - "link": "_hosekB", - "ifdef": ["_EnvSky"] - }, - { - "name": "C", - "link": "_hosekC", - "ifdef": ["_EnvSky"] - }, - { - "name": "D", - "link": "_hosekD", - "ifdef": ["_EnvSky"] - }, - { - "name": "E", - "link": "_hosekE", - "ifdef": ["_EnvSky"] - }, - { - "name": "F", - "link": "_hosekF", - "ifdef": ["_EnvSky"] - }, - { - "name": "G", - "link": "_hosekG", - "ifdef": ["_EnvSky"] - }, - { - "name": "H", - "link": "_hosekH", - "ifdef": ["_EnvSky"] - }, - { - "name": "I", - "link": "_hosekI", - "ifdef": ["_EnvSky"] - }, - { - "name": "Z", - "link": "_hosekZ", - "ifdef": ["_EnvSky"] - }, - { - "name": "hosekSunDirection", - "link": "_hosekSunDirection", - "ifdef": ["_EnvSky"] - }, - { - "name": "time", - "link": "_time", - "ifdef": ["_EnvClouds"] - }, - { - "name": "scloudsBase", - "link": "$clouds_base.raw", - "ifdef": ["_EnvClouds"] - }, - { - "name": "scloudsDetail", - "link": "$clouds_detail.raw", - "ifdef": ["_EnvClouds"] - }, - { - "name": "scloudsMap", - "link": "$clouds_map.png", - "ifdef": ["_EnvClouds"] - }, - { - "name": "screenSize", - "link": "_screenSize", - "ifdef": ["_EnvImg"] - }, - { - "name": "envmap", - "link": "_envmap", - "ifdef": ["_EnvTex", "_EnvImg"] - }, - { - "name": "envmapStrength", - "link": "_envmapStrength", - "ifdef": ["_EnvStr"] - } - ], - "texture_params": [], - "vertex_shader": "world_pass.vert.glsl", - "fragment_shader": "world_pass.frag.glsl" - } - ] -} diff --git a/Shaders/world_pass/world_pass.vert.glsl b/Shaders/world_pass/world_pass.vert.glsl deleted file mode 100644 index bd27b1b3..00000000 --- a/Shaders/world_pass/world_pass.vert.glsl +++ /dev/null @@ -1,14 +0,0 @@ -#version 450 - -in vec3 pos; -in vec3 nor; - -out vec3 normal; - -uniform mat4 SMVP; - -void main() { - normal = nor; - vec4 position = SMVP * vec4(pos, 1.0); - gl_Position = vec4(position); -} diff --git a/Sources/armory/logicnode/ArrayAddNode.hx b/Sources/armory/logicnode/ArrayAddNode.hx index fb4787ec..01def102 100644 --- a/Sources/armory/logicnode/ArrayAddNode.hx +++ b/Sources/armory/logicnode/ArrayAddNode.hx @@ -2,21 +2,38 @@ package armory.logicnode; class ArrayAddNode extends LogicNode { + var ar: Array; + public function new(tree: LogicTree) { super(tree); } override function run(from: Int) { - var ar: Array = inputs[1].get(); + ar = inputs[1].get(); if (ar == null) return; - if (inputs.length > 2) { - for (i in 2...inputs.length) { + // "Modify Original" == `false` -> Copy the input array + if (!inputs[3].get()) { + ar = ar.copy(); + } + + if (inputs.length > 4) { + for (i in 4...inputs.length) { var value: Dynamic = inputs[i].get(); - ar.push(value); + + // "Unique Values" options only supports primitive data types + // for now, a custom indexOf() or contains() method would be + // required to compare values of other types + if (!inputs[2].get() || ar.indexOf(value) == -1) { + ar.push(value); + } } } runOutput(0); } + + override function get(from: Int): Dynamic { + return ar; + } } diff --git a/Sources/armory/logicnode/ArrayAddUniqueNode.hx b/Sources/armory/logicnode/ArrayAddUniqueNode.hx deleted file mode 100644 index fafde3d6..00000000 --- a/Sources/armory/logicnode/ArrayAddUniqueNode.hx +++ /dev/null @@ -1,22 +0,0 @@ -package armory.logicnode; - -class ArrayAddUniqueNode extends LogicNode { - - public function new(tree: LogicTree) { - super(tree); - } - - override function run(from: Int) { - var ar: Array = inputs[1].get(); - if (ar == null) return; - - if (inputs.length > 2) { - for (i in 2...inputs.length) { - var value: Dynamic = inputs[i].get(); - if (ar.indexOf(value) == -1) ar.push(value); - } - } - - runOutput(0); - } -} diff --git a/Sources/armory/logicnode/ArrayLoopNode.hx b/Sources/armory/logicnode/ArrayLoopNode.hx index eb993e90..d5a90303 100644 --- a/Sources/armory/logicnode/ArrayLoopNode.hx +++ b/Sources/armory/logicnode/ArrayLoopNode.hx @@ -3,6 +3,7 @@ package armory.logicnode; class ArrayLoopNode extends LogicNode { var value: Dynamic; + var index: Int; public function new(tree: LogicTree) { super(tree); @@ -12,8 +13,10 @@ class ArrayLoopNode extends LogicNode { var ar: Array = inputs[1].get(); if (ar == null) return; + index = -1; for (val in ar) { value = val; + index++; runOutput(0); if (tree.loopBreak) { @@ -21,10 +24,12 @@ class ArrayLoopNode extends LogicNode { break; } } - runOutput(2); + runOutput(3); } override function get(from: Int): Dynamic { - return value; + if (from == 1) + return value; + return index; } } diff --git a/Sources/armory/logicnode/CanvasGetCheckboxNode.hx b/Sources/armory/logicnode/CanvasGetCheckboxNode.hx index 6bb2f4d8..5dabc261 100644 --- a/Sources/armory/logicnode/CanvasGetCheckboxNode.hx +++ b/Sources/armory/logicnode/CanvasGetCheckboxNode.hx @@ -12,7 +12,7 @@ class CanvasGetCheckboxNode extends LogicNode { } #if arm_ui - override function get(from: Int) { + override function get(from: Int): Dynamic { // Null if (canvas == null) canvas = Scene.active.getTrait(CanvasScript); if (canvas == null) canvas = Scene.active.camera.getTrait(CanvasScript); if (canvas == null || !canvas.ready) return null; diff --git a/Sources/armory/logicnode/CanvasGetPositionNode.hx b/Sources/armory/logicnode/CanvasGetPositionNode.hx index 4fa41a61..0f98660b 100644 --- a/Sources/armory/logicnode/CanvasGetPositionNode.hx +++ b/Sources/armory/logicnode/CanvasGetPositionNode.hx @@ -12,7 +12,7 @@ class CanvasGetPositionNode extends LogicNode { } #if arm_ui - override function get(from: Int) { + override function get(from: Int): Dynamic { // Null if (canvas == null) canvas = Scene.active.getTrait(CanvasScript); if (canvas == null) canvas = Scene.active.camera.getTrait(CanvasScript); if (canvas == null || !canvas.ready) return null; diff --git a/Sources/armory/logicnode/CanvasGetSliderNode.hx b/Sources/armory/logicnode/CanvasGetSliderNode.hx index e8da408d..660d9f40 100644 --- a/Sources/armory/logicnode/CanvasGetSliderNode.hx +++ b/Sources/armory/logicnode/CanvasGetSliderNode.hx @@ -12,7 +12,7 @@ class CanvasGetSliderNode extends LogicNode { } #if arm_ui - override function get(from: Int) { + override function get(from: Int): Dynamic { // Null if (canvas == null) canvas = Scene.active.getTrait(CanvasScript); if (canvas == null) canvas = Scene.active.camera.getTrait(CanvasScript); if (canvas == null || !canvas.ready) return null; diff --git a/Sources/armory/logicnode/CanvasSetCheckBoxNode.hx b/Sources/armory/logicnode/CanvasSetCheckBoxNode.hx new file mode 100644 index 00000000..4dee6c3b --- /dev/null +++ b/Sources/armory/logicnode/CanvasSetCheckBoxNode.hx @@ -0,0 +1,42 @@ +package armory.logicnode; + +import iron.Scene; +import armory.trait.internal.CanvasScript; + +class CanvasSetCheckBoxNode extends LogicNode { + + var canvas: CanvasScript; + var element: String; + var value: Bool; + + public function new(tree: LogicTree) { + super(tree); + } + +#if arm_ui + function update() { + if (!canvas.ready) return; + + // This Try/Catch hacks around an issue where the handles are + // not created yet, even though canvas.ready is true. + try { + canvas.getHandle(element).selected = value; + tree.removeUpdate(update); + } + catch (e: Dynamic) {} + + runOutput(0); + } + + override function run(from: Int) { + element = inputs[1].get(); + value = inputs[2].get(); + canvas = Scene.active.getTrait(CanvasScript); + if (canvas == null) canvas = Scene.active.camera.getTrait(CanvasScript); + + // Ensure canvas is ready + tree.notifyOnUpdate(update); + update(); + } +#end +} diff --git a/Sources/armory/logicnode/CanvasSetVisibleNode.hx b/Sources/armory/logicnode/CanvasSetVisibleNode.hx index 7743bb0c..e9a91c58 100644 --- a/Sources/armory/logicnode/CanvasSetVisibleNode.hx +++ b/Sources/armory/logicnode/CanvasSetVisibleNode.hx @@ -18,8 +18,8 @@ class CanvasSetVisibleNode extends LogicNode { if (!canvas.ready) return; tree.removeUpdate(update); - if (visible == true) canvas.getElement(element).visible = true - else canvas.getElement(element).visible = false; + var element = canvas.getElement(element); + if (element != null) element.visible = this.visible; runOutput(0); } override function run(from: Int) { diff --git a/Sources/armory/logicnode/HasContactArrayNode.hx b/Sources/armory/logicnode/HasContactArrayNode.hx new file mode 100644 index 00000000..b5efcdb8 --- /dev/null +++ b/Sources/armory/logicnode/HasContactArrayNode.hx @@ -0,0 +1,35 @@ +package armory.logicnode; + +import iron.object.Object; +import armory.trait.physics.RigidBody; + +class HasContactArrayNode extends LogicNode { + + public function new(tree: LogicTree) { + super(tree); + } + + override function get(from: Int): Dynamic { + var object1: Object = inputs[0].get(); + var objects: Array = inputs[1].get(); + if (object1 == null || objects == null) return false; + +#if arm_physics + var physics = armory.trait.physics.PhysicsWorld.active; + var rb1 = object1.getTrait(RigidBody); + var rbs = physics.getContacts(rb1); + + if (rb1 != null && rbs != null) { + for (object2 in objects) { + var rb2 = object2.getTrait(RigidBody); + for (rb in rbs) { + if (rb == rb2) { + return true; + } + } + } + } +#end + return false; + } +} diff --git a/Sources/armory/logicnode/OnCanvasElementNode.hx b/Sources/armory/logicnode/OnCanvasElementNode.hx new file mode 100644 index 00000000..b38b4ea1 --- /dev/null +++ b/Sources/armory/logicnode/OnCanvasElementNode.hx @@ -0,0 +1,116 @@ +package armory.logicnode; + +import armory.trait.internal.CanvasScript; +import iron.Scene; + +#if arm_ui +import zui.Canvas.Anchor; +#end + +class OnCanvasElementNode extends LogicNode { + + var canvas: CanvasScript; + var element: String; + + /** + * The event type this node should react to, can be "click" or "hover". + */ + public var property0: String; + /** + * If the event type is click, this property states whether to check for + * "down", "started" or "released" events. + */ + public var property1: String; + /** + * The mouse button that this node should react to. Only used when listening + * for mouse clicks. + */ + public var property2: String; + + public function new(tree: LogicTree) { + super(tree); + + tree.notifyOnUpdate(update); + } + + #if arm_ui + function update() { + element = inputs[0].get(); + + // Ensure canvas is ready + if(!Scene.active.ready) return; + canvas = Scene.active.getTrait(CanvasScript); + if (canvas == null) canvas = Scene.active.camera.getTrait(CanvasScript); + if(canvas == null) return; + if (!canvas.ready) return; + if(canvas.getElement(element) == null) return; + if(canvas.getElement(element).visible == false) return; + var mouse = iron.system.Input.getMouse(); + var isEvent = false; + + if (property0 == "click") { + switch (property1) { + case "down": + isEvent = mouse.down(property2); + case "started": + isEvent = mouse.started(property2); + case "released": + isEvent = mouse.released(property2); + } + } + // Hovered + else { + isEvent = true; + } + + if (isEvent) + { + var canvasElem = canvas.getElement(element); + var left = canvasElem.x; + var top = canvasElem.y; + var right = left + canvasElem.width; + var bottom = top + canvasElem.height; + + var anchor = canvasElem.anchor; + var cx = canvas.getCanvas().width; + var cy = canvas.getCanvas().height; + var mouseX = mouse.x; + var mouseY = mouse.y; + + switch(anchor) + { + case Top: + mouseX -= cx/2 - canvasElem.width/2; + case TopRight: + mouseX -= cx - canvasElem.width; + case CenterLeft: + mouseY -= cy/2 - canvasElem.height/2; + case Anchor.Center: + mouseX -= cx/2 - canvasElem.width/2; + mouseY -= cy/2 - canvasElem.height/2; + case CenterRight: + mouseX -= cx - canvasElem.width; + mouseY -= cy/2 - canvasElem.height/2; + case BottomLeft: + mouseY -= cy - canvasElem.height; + case Bottom: + mouseX -= cx/2 - canvasElem.width/2; + mouseY -= cy - canvasElem.height; + case BottomRight: + mouseX -= cx - canvasElem.width; + mouseY -= cy - canvasElem.height; + } + + if((mouseX >= left) && (mouseX <= right)) + { + if((mouseY >= top) && (mouseY <= bottom)) + { + runOutput(0); + } + } + } + } + #else + function update() {} + #end +} diff --git a/Sources/armory/logicnode/OnContactNode.hx b/Sources/armory/logicnode/OnContactNode.hx index 81762590..701af2af 100644 --- a/Sources/armory/logicnode/OnContactNode.hx +++ b/Sources/armory/logicnode/OnContactNode.hx @@ -26,13 +26,15 @@ class OnContactNode extends LogicNode { #if arm_physics var physics = armory.trait.physics.PhysicsWorld.active; var rb1 = object1.getTrait(RigidBody); - var rbs = physics.getContacts(rb1); - if (rb1 != null && rbs != null) { - var rb2 = object2.getTrait(RigidBody); - for (rb in rbs) { - if (rb == rb2) { - contact = true; - break; + if (rb1 != null) { + var rbs = physics.getContacts(rb1); + if (rbs != null) { + var rb2 = object2.getTrait(RigidBody); + for (rb in rbs) { + if (rb == rb2) { + contact = true; + break; + } } } } diff --git a/Sources/armory/logicnode/PlayActionFromNode.hx b/Sources/armory/logicnode/PlayActionFromNode.hx new file mode 100644 index 00000000..581caf13 --- /dev/null +++ b/Sources/armory/logicnode/PlayActionFromNode.hx @@ -0,0 +1,30 @@ +package armory.logicnode; + +import iron.object.Object; +import iron.Scene; + +class PlayActionFromNode extends LogicNode { + + public function new(tree: LogicTree) { + super(tree); + } + + override function run(from: Int) { + var object: Object = inputs[1].get(); + var action: String = inputs[2].get(); + var startFrame:Int = inputs[3].get(); + var blendTime: Float = inputs[4].get(); + + + if (object == null) return; + var animation = object.animation; + if (animation == null) animation = object.getParentArmature(object.name); + + animation.play(action, function() { + runOutput(1); + },blendTime); + animation.update(startFrame*Scene.active.raw.frame_time); + + runOutput(0); + } +} diff --git a/Sources/armory/logicnode/PlaySoundRawNode.hx b/Sources/armory/logicnode/PlaySoundRawNode.hx index da877eef..c957c6a9 100644 --- a/Sources/armory/logicnode/PlaySoundRawNode.hx +++ b/Sources/armory/logicnode/PlaySoundRawNode.hx @@ -2,16 +2,73 @@ package armory.logicnode; class PlaySoundRawNode extends LogicNode { + /** The name of the sound */ public var property0: String; + /** Whether to loop the playback */ + public var property1: Bool; + /** Retrigger */ + public var property2: Bool; + /** Override sample rate */ + public var property3: Bool; + /** Playback sample rate */ + public var property4: Int; + + var sound: kha.Sound = null; + var channel: kha.audio1.AudioChannel = null; public function new(tree: LogicTree) { super(tree); } override function run(from: Int) { - iron.data.Data.getSound(property0, function(sound: kha.Sound) { - iron.system.Audio.play(sound, false); - }); - runOutput(0); + switch (from) { + case Play: + if (sound == null) { + iron.data.Data.getSound(property0, function(s: kha.Sound) { + this.sound = s; + }); + } + + // Resume + if (channel != null) { + if (property2) channel.stop(); + channel.play(); + } + // Start + else if (sound != null) { + if (property3) sound.sampleRate = property4; + channel = iron.system.Audio.play(sound, property1); + } + + tree.notifyOnUpdate(this.onUpdate); + runOutput(0); + + case Pause: + if (channel != null) channel.pause(); + tree.removeUpdate(this.onUpdate); + + case Stop: + if (channel != null) channel.stop(); + tree.removeUpdate(this.onUpdate); + runOutput(2); + } + } + + function onUpdate() { + if (channel != null) { + // Done + if (channel.finished) { + channel = null; + runOutput(2); + } + // Running + else runOutput(1); + } } } + +private enum abstract PlayState(Int) from Int to Int { + var Play = 0; + var Pause = 1; + var Stop = 2; +} diff --git a/Sources/armory/logicnode/SetRotationNode.hx b/Sources/armory/logicnode/SetRotationNode.hx index 60ea0764..6665fe61 100644 --- a/Sources/armory/logicnode/SetRotationNode.hx +++ b/Sources/armory/logicnode/SetRotationNode.hx @@ -15,11 +15,11 @@ class SetRotationNode extends LogicNode { override function run(from: Int) { var object: Object = inputs[1].get(); - if (object == null) { - return; - } + if (object == null) return; var vec: Vec4 = inputs[2].get(); + if (vec == null) return; var w: Float = inputs[3].get(); + switch (property0) { case "Euler Angles": object.transform.rot.fromEuler(vec.x, vec.y, vec.z); diff --git a/Sources/armory/logicnode/SetSceneNode.hx b/Sources/armory/logicnode/SetSceneNode.hx index 3c9a3d27..8068e1eb 100644 --- a/Sources/armory/logicnode/SetSceneNode.hx +++ b/Sources/armory/logicnode/SetSceneNode.hx @@ -13,6 +13,10 @@ class SetSceneNode extends LogicNode { override function run(from: Int) { var sceneName: String = inputs[1].get(); + #if arm_json + sceneName += ".json"; + #end + iron.Scene.setActive(sceneName, function(o: iron.object.Object) { root = o; runOutput(0); diff --git a/Sources/armory/logicnode/TraitNode.hx b/Sources/armory/logicnode/TraitNode.hx index f5b29a63..245216ac 100644 --- a/Sources/armory/logicnode/TraitNode.hx +++ b/Sources/armory/logicnode/TraitNode.hx @@ -14,6 +14,7 @@ class TraitNode extends LogicNode { var cname = Type.resolveClass(Main.projectPackage + "." + property0); if (cname == null) cname = Type.resolveClass(Main.projectPackage + ".node." + property0); + if (cname == null) throw 'No trait with the name "$property0" found, make sure that the trait is exported!'; value = Type.createInstance(cname, []); return value; } diff --git a/Sources/armory/logicnode/TranslateObjectNode.hx b/Sources/armory/logicnode/TranslateObjectNode.hx index d5d8c576..b3ede564 100644 --- a/Sources/armory/logicnode/TranslateObjectNode.hx +++ b/Sources/armory/logicnode/TranslateObjectNode.hx @@ -22,12 +22,9 @@ class TranslateObjectNode extends LogicNode { object.transform.buildMatrix(); } else { - var look = object.transform.world.look().mult(vec.y); - var right = object.transform.world.right().mult(vec.x); - var up = object.transform.world.up().mult(vec.z); - object.transform.loc.add(look); - object.transform.loc.add(right); - object.transform.loc.add(up); + object.transform.move(object.transform.local.look(),vec.y); + object.transform.move(object.transform.local.up(),vec.z); + object.transform.move(object.transform.local.right(),vec.x); object.transform.buildMatrix(); } diff --git a/Sources/armory/logicnode/TranslateOnLocalAxisNode.hx b/Sources/armory/logicnode/TranslateOnLocalAxisNode.hx index 2182affb..4906fce1 100644 --- a/Sources/armory/logicnode/TranslateOnLocalAxisNode.hx +++ b/Sources/armory/logicnode/TranslateOnLocalAxisNode.hx @@ -1,14 +1,10 @@ package armory.logicnode; import iron.object.Object; -import iron.math.Vec4; import armory.trait.physics.RigidBody; class TranslateOnLocalAxisNode extends LogicNode { - var loc = new Vec4(); - var vec = new Vec4(); - public function new(tree: LogicTree) { super(tree); } @@ -21,21 +17,12 @@ class TranslateOnLocalAxisNode extends LogicNode { if (object == null) return; - if (l == 1) loc.setFrom(object.transform.world.look()); - else if (l == 2) loc.setFrom(object.transform.world.up()); - else if (l == 3) loc.setFrom(object.transform.world.right()); + if (ini) sp *= -1; - if (ini) { - loc.x = -loc.x; - loc.y = -loc.y; - loc.z = -loc.z; - } + if (l == 1) object.transform.move(object.transform.local.look(),sp); + else if (l == 2) object.transform.move(object.transform.local.up(),sp); + else if (l == 3) object.transform.move(object.transform.local.right(),sp); - vec.x = loc.x * sp; - vec.y = loc.y * sp; - vec.z = loc.z * sp; - - object.transform.loc.add(vec); object.transform.buildMatrix(); #if arm_physics diff --git a/Sources/armory/logicnode/VectorMathNode.hx b/Sources/armory/logicnode/VectorMathNode.hx index c19880a7..88455735 100644 --- a/Sources/armory/logicnode/VectorMathNode.hx +++ b/Sources/armory/logicnode/VectorMathNode.hx @@ -42,6 +42,8 @@ class VectorMathNode extends LogicNode { f = v.length(); case "Distance": f = v.distanceTo(v2); + case "Reflect": + v.reflect(v2); } if (from == 0) return v; diff --git a/Sources/armory/logicnode/VectorNode.hx b/Sources/armory/logicnode/VectorNode.hx index 7e0c11ab..2c186907 100644 --- a/Sources/armory/logicnode/VectorNode.hx +++ b/Sources/armory/logicnode/VectorNode.hx @@ -17,6 +17,7 @@ class VectorNode extends LogicNode { } override function get(from: Int): Dynamic { + value = new Vec4(); value.x = inputs[0].get(); value.y = inputs[1].get(); value.z = inputs[2].get(); diff --git a/Sources/armory/renderpath/Inc.hx b/Sources/armory/renderpath/Inc.hx index 731edd3e..9c57929a 100644 --- a/Sources/armory/renderpath/Inc.hx +++ b/Sources/armory/renderpath/Inc.hx @@ -41,7 +41,7 @@ class Inc { public static function bindShadowMap() { for (l in iron.Scene.active.lights) { - if (!l.visible || !l.data.raw.cast_shadow || l.data.raw.type != "sun") continue; + if (!l.visible || l.data.raw.type != "sun") continue; var n = "shadowMap"; path.bindTarget(n, n); break; @@ -109,7 +109,8 @@ class Inc { pointIndex = 0; spotIndex = 0; for (l in iron.Scene.active.lights) { - if (!l.visible || !l.data.raw.cast_shadow) continue; + if (!l.visible) continue; + path.light = l; var shadowmap = Inc.getShadowMap(l); var faces = l.data.raw.shadowmap_cube ? 6 : 1; @@ -117,7 +118,9 @@ class Inc { if (faces > 1) path.currentFace = i; path.setTarget(shadowmap); path.clearTarget(null, 1.0); - path.drawMeshes("shadowmap"); + if (l.data.raw.cast_shadow) { + path.drawMeshes("shadowmap"); + } } path.currentFace = -1; diff --git a/Sources/armory/renderpath/Postprocess.hx b/Sources/armory/renderpath/Postprocess.hx index c62f5105..b063ca99 100644 --- a/Sources/armory/renderpath/Postprocess.hx +++ b/Sources/armory/renderpath/Postprocess.hx @@ -1,9 +1,8 @@ package armory.renderpath; -import iron.Scene; -import iron.object.Object; import iron.data.MaterialData; import iron.math.Vec4; +import iron.object.Object; class Postprocess { @@ -100,253 +99,208 @@ class Postprocess { public static function vec3Link(object:Object, mat:MaterialData, link:String):iron.math.Vec4 { var v:Vec4 = null; - if (link == "_globalWeight") { + switch link { + case "_globalWeight": var ppm_index = 0; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } - if (link == "_globalTint") { + case "_globalTint": var ppm_index = 1; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } - if (link == "_globalSaturation") { + case "_globalSaturation": var ppm_index = 2; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } - if (link == "_globalContrast") { + case "_globalContrast": var ppm_index = 3; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } - if (link == "_globalGamma") { + case "_globalGamma": var ppm_index = 4; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } - if (link == "_globalGain") { + case "_globalGain": var ppm_index = 5; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } - if (link == "_globalOffset") { + case "_globalOffset": var ppm_index = 6; v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[ppm_index][0]; v.y = colorgrading_global_uniforms[ppm_index][1]; v.z = colorgrading_global_uniforms[ppm_index][2]; - } //Shadow ppm - if (link == "_shadowSaturation") { + case "_shadowSaturation": var ppm_index = 0; v = iron.object.Uniforms.helpVec; v.x = colorgrading_shadow_uniforms[ppm_index][0]; v.y = colorgrading_shadow_uniforms[ppm_index][1]; v.z = colorgrading_shadow_uniforms[ppm_index][2]; - } - if (link == "_shadowContrast") { + case "_shadowContrast": var ppm_index = 1; v = iron.object.Uniforms.helpVec; v.x = colorgrading_shadow_uniforms[ppm_index][0]; v.y = colorgrading_shadow_uniforms[ppm_index][1]; v.z = colorgrading_shadow_uniforms[ppm_index][2]; - } - if (link == "_shadowGamma") { + case "_shadowGamma": var ppm_index = 2; v = iron.object.Uniforms.helpVec; v.x = colorgrading_shadow_uniforms[ppm_index][0]; v.y = colorgrading_shadow_uniforms[ppm_index][1]; v.z = colorgrading_shadow_uniforms[ppm_index][2]; - } - if (link == "_shadowGain") { + case "_shadowGain": var ppm_index = 3; v = iron.object.Uniforms.helpVec; v.x = colorgrading_shadow_uniforms[ppm_index][0]; v.y = colorgrading_shadow_uniforms[ppm_index][1]; v.z = colorgrading_shadow_uniforms[ppm_index][2]; - } - if (link == "_shadowOffset") { + case "_shadowOffset": var ppm_index = 4; v = iron.object.Uniforms.helpVec; v.x = colorgrading_shadow_uniforms[ppm_index][0]; v.y = colorgrading_shadow_uniforms[ppm_index][1]; v.z = colorgrading_shadow_uniforms[ppm_index][2]; - } //Midtone ppm - if (link == "_midtoneSaturation") { + case "_midtoneSaturation": var ppm_index = 0; v = iron.object.Uniforms.helpVec; v.x = colorgrading_midtone_uniforms[ppm_index][0]; v.y = colorgrading_midtone_uniforms[ppm_index][1]; v.z = colorgrading_midtone_uniforms[ppm_index][2]; - } - if (link == "_midtoneContrast") { + case "_midtoneContrast": var ppm_index = 1; v = iron.object.Uniforms.helpVec; v.x = colorgrading_midtone_uniforms[ppm_index][0]; v.y = colorgrading_midtone_uniforms[ppm_index][1]; v.z = colorgrading_midtone_uniforms[ppm_index][2]; - } - if (link == "_midtoneGamma") { + case "_midtoneGamma": var ppm_index = 2; v = iron.object.Uniforms.helpVec; v.x = colorgrading_midtone_uniforms[ppm_index][0]; v.y = colorgrading_midtone_uniforms[ppm_index][1]; v.z = colorgrading_midtone_uniforms[ppm_index][2]; - } - if (link == "_midtoneGain") { + case "_midtoneGain": var ppm_index = 3; v = iron.object.Uniforms.helpVec; v.x = colorgrading_midtone_uniforms[ppm_index][0]; v.y = colorgrading_midtone_uniforms[ppm_index][1]; v.z = colorgrading_midtone_uniforms[ppm_index][2]; - } - if (link == "_midtoneOffset") { + case "_midtoneOffset": var ppm_index = 4; v = iron.object.Uniforms.helpVec; v.x = colorgrading_midtone_uniforms[ppm_index][0]; v.y = colorgrading_midtone_uniforms[ppm_index][1]; v.z = colorgrading_midtone_uniforms[ppm_index][2]; - } //Highlight ppm - if (link == "_highlightSaturation") { + case "_highlightSaturation": var ppm_index = 0; v = iron.object.Uniforms.helpVec; v.x = colorgrading_highlight_uniforms[ppm_index][0]; v.y = colorgrading_highlight_uniforms[ppm_index][1]; v.z = colorgrading_highlight_uniforms[ppm_index][2]; - } - if (link == "_highlightContrast") { + case "_highlightContrast": var ppm_index = 1; v = iron.object.Uniforms.helpVec; v.x = colorgrading_highlight_uniforms[ppm_index][0]; v.y = colorgrading_highlight_uniforms[ppm_index][1]; v.z = colorgrading_highlight_uniforms[ppm_index][2]; - } - if (link == "_highlightGamma") { + case "_highlightGamma": var ppm_index = 2; v = iron.object.Uniforms.helpVec; v.x = colorgrading_highlight_uniforms[ppm_index][0]; v.y = colorgrading_highlight_uniforms[ppm_index][1]; v.z = colorgrading_highlight_uniforms[ppm_index][2]; - } - if (link == "_highlightGain") { + case "_highlightGain": var ppm_index = 3; v = iron.object.Uniforms.helpVec; v.x = colorgrading_highlight_uniforms[ppm_index][0]; v.y = colorgrading_highlight_uniforms[ppm_index][1]; v.z = colorgrading_highlight_uniforms[ppm_index][2]; - } - if (link == "_highlightOffset") { + case "_highlightOffset": var ppm_index = 4; v = iron.object.Uniforms.helpVec; v.x = colorgrading_highlight_uniforms[ppm_index][0]; v.y = colorgrading_highlight_uniforms[ppm_index][1]; v.z = colorgrading_highlight_uniforms[ppm_index][2]; - } //Postprocess Components - if (link == "_PPComp1") { + case "_PPComp1": v = iron.object.Uniforms.helpVec; v.x = camera_uniforms[0]; //F-Number v.y = camera_uniforms[1]; //Shutter v.z = camera_uniforms[2]; //ISO - } - - if (link == "_PPComp2") { + case "_PPComp2": v = iron.object.Uniforms.helpVec; v.x = camera_uniforms[3]; //EC v.y = camera_uniforms[4]; //Lens Distortion v.z = camera_uniforms[5]; //DOF Autofocus - } - - if (link == "_PPComp3") { + case "_PPComp3": v = iron.object.Uniforms.helpVec; v.x = camera_uniforms[6]; //Distance v.y = camera_uniforms[7]; //Focal Length v.z = camera_uniforms[8]; //F-Stop - } - - if (link == "_PPComp4") { + case "_PPComp4": v = iron.object.Uniforms.helpVec; v.x = Std.int(camera_uniforms[9]); //Tonemapping v.y = camera_uniforms[10]; //Film Grain v.z = tonemapper_uniforms[0]; //Slope - } - - if (link == "_PPComp5") { + case "_PPComp5": v = iron.object.Uniforms.helpVec; v.x = tonemapper_uniforms[1]; //Toe v.y = tonemapper_uniforms[2]; //Shoulder v.z = tonemapper_uniforms[3]; //Black Clip - } - - if (link == "_PPComp6") { + case "_PPComp6": v = iron.object.Uniforms.helpVec; v.x = tonemapper_uniforms[4]; //White Clip v.y = lenstexture_uniforms[0]; //Center Min v.z = lenstexture_uniforms[1]; //Center Max - } - - if (link == "_PPComp7") { + case "_PPComp7": v = iron.object.Uniforms.helpVec; v.x = lenstexture_uniforms[2]; //Lum min v.y = lenstexture_uniforms[3]; //Lum max v.z = lenstexture_uniforms[4]; //Expo - } - - if (link == "_PPComp8") { + case "_PPComp8": v = iron.object.Uniforms.helpVec; v.x = colorgrading_global_uniforms[7][0]; //LUT R v.y = colorgrading_global_uniforms[7][1]; //LUT G v.z = colorgrading_global_uniforms[7][2]; //LUT B - } - - if (link == "_PPComp9") { + case "_PPComp9": v = iron.object.Uniforms.helpVec; v.x = ssr_uniforms[0]; //Step v.y = ssr_uniforms[1]; //StepMin v.z = ssr_uniforms[2]; //Search - } - - if (link == "_PPComp10") { + case "_PPComp10": v = iron.object.Uniforms.helpVec; v.x = ssr_uniforms[3]; //Falloff v.y = ssr_uniforms[4]; //Jitter v.z = bloom_uniforms[0]; //Bloom Threshold - } - - if (link == "_PPComp11") { + case "_PPComp11": v = iron.object.Uniforms.helpVec; v.x = bloom_uniforms[1]; //Bloom Strength v.y = bloom_uniforms[2]; //Bloom Radius v.z = ssao_uniforms[0]; //SSAO Strength - } - - if (link == "_PPComp12") { + case "_PPComp12": v = iron.object.Uniforms.helpVec; v.x = ssao_uniforms[1]; //SSAO Radius v.y = ssao_uniforms[2]; //SSAO Max Steps v.z = 0; - } - - if(link == "_PPComp13") { + case "_PPComp13": v = iron.object.Uniforms.helpVec; v.x = chromatic_aberration_uniforms[0]; //CA Strength v.y = chromatic_aberration_uniforms[1]; //CA Samples @@ -354,13 +308,10 @@ class Postprocess { } return v; - } public static function init() { - iron.object.Uniforms.externalVec3Links.push(vec3Link); - } } diff --git a/Sources/armory/renderpath/RenderPathDeferred.hx b/Sources/armory/renderpath/RenderPathDeferred.hx index 87665920..8f263349 100644 --- a/Sources/armory/renderpath/RenderPathDeferred.hx +++ b/Sources/armory/renderpath/RenderPathDeferred.hx @@ -1,6 +1,7 @@ package armory.renderpath; import iron.RenderPath; +import iron.Scene; class RenderPathDeferred { @@ -37,9 +38,12 @@ class RenderPathDeferred { path = _path; - #if (rp_background == "World") + #if kha_metal { - path.loadShader("shader_datas/world_pass/world_pass"); + path.loadShader("shader_datas/clear_color_depth_pass/clear_color_depth_pass"); + path.loadShader("shader_datas/clear_color_pass/clear_color_pass"); + path.loadShader("shader_datas/clear_depth_pass/clear_depth_pass"); + path.clearShader = "shader_datas/clear_color_depth_pass/clear_color_depth_pass"; } #end @@ -639,8 +643,10 @@ class RenderPathDeferred { #if (rp_background == "World") { - path.setTarget("tex"); // Re-binds depth - path.drawSkydome("shader_datas/world_pass/world_pass"); + if (Scene.active.raw.world_ref != null) { + path.setTarget("tex"); // Re-binds depth + path.drawSkydome("shader_datas/World_" + Scene.active.raw.world_ref + "/World_" + Scene.active.raw.world_ref); + } } #end diff --git a/Sources/armory/renderpath/RenderPathForward.hx b/Sources/armory/renderpath/RenderPathForward.hx index 7c69f134..8c139815 100644 --- a/Sources/armory/renderpath/RenderPathForward.hx +++ b/Sources/armory/renderpath/RenderPathForward.hx @@ -1,6 +1,7 @@ package armory.renderpath; import iron.RenderPath; +import iron.Scene; class RenderPathForward { @@ -34,8 +35,10 @@ class RenderPathForward { #if (rp_background == "World") { - RenderPathCreator.setTargetMeshes(); - path.drawSkydome("shader_datas/world_pass/world_pass"); + if (Scene.active.raw.world_ref != null) { + RenderPathCreator.setTargetMeshes(); + path.drawSkydome("shader_datas/World_" + Scene.active.raw.world_ref + "/World_" + Scene.active.raw.world_ref); + } } #end @@ -62,9 +65,12 @@ class RenderPathForward { path = _path; - #if (rp_background == "World") + #if kha_metal { - path.loadShader("shader_datas/world_pass/world_pass"); + path.loadShader("shader_datas/clear_color_depth_pass/clear_color_depth_pass"); + path.loadShader("shader_datas/clear_color_pass/clear_color_pass"); + path.loadShader("shader_datas/clear_depth_pass/clear_depth_pass"); + path.clearShader = "shader_datas/clear_color_depth_pass/clear_color_depth_pass"; } #end diff --git a/Sources/armory/system/Starter.hx b/Sources/armory/system/Starter.hx index 5661615f..0f019c48 100644 --- a/Sources/armory/system/Starter.hx +++ b/Sources/armory/system/Starter.hx @@ -4,8 +4,6 @@ import kha.WindowOptions; class Starter { - static var tasks: Int; - #if arm_loadscreen public static var drawLoading: kha.graphics2.Graphics->Int->Int->Void = null; public static var numAssets: Int; @@ -13,6 +11,8 @@ class Starter { public static function main(scene: String, mode: Int, resize: Bool, min: Bool, max: Bool, w: Int, h: Int, msaa: Int, vsync: Bool, getRenderPath: Void->iron.RenderPath) { + var tasks = 0; + function start() { if (tasks > 0) return; @@ -76,20 +76,17 @@ class Starter { #if (js && arm_bullet) function loadLibAmmo(name: String) { kha.Assets.loadBlobFromPath(name, function(b: kha.Blob) { - var print = function(s:String) { trace(s); }; - var loaded = function() { tasks--; start(); }; - untyped __js__("(1, eval)({0})", b.toString()); + js.Syntax.code("(1,eval)({0})", b.toString()); #if kha_krom - var instantiateWasm = function(imports, successCallback) { - var wasmbin = Krom.loadBlob("ammo.wasm.wasm"); - var module = new js.lib.webassembly.Module(wasmbin); - var inst = new js.lib.webassembly.Instance(module, imports); + js.Syntax.code("Ammo({print:function(s){haxe.Log.trace(s);},instantiateWasm:function(imports,successCallback) { + var wasmbin = Krom.loadBlob('ammo.wasm.wasm'); + var module = new WebAssembly.Module(wasmbin); + var inst = new WebAssembly.Instance(module,imports); successCallback(inst); return inst.exports; - }; - untyped __js__("Ammo({print:print, instantiateWasm:instantiateWasm}).then(loaded)"); + }}).then(function(){ tasks--; start();})"); #else - untyped __js__("Ammo({print:print}).then(loaded)"); + js.Syntax.code("Ammo({print:function(s){haxe.Log.trace(s);}}).then(function(){ tasks--; start();})"); #end }); } @@ -98,7 +95,7 @@ class Starter { #if (js && arm_navigation) function loadLib(name: String) { kha.Assets.loadBlobFromPath(name, function(b: kha.Blob) { - untyped __js__("(1, eval)({0})", b.toString()); + js.Syntax.code("(1, eval)({0})", b.toString()); tasks--; start(); }); diff --git a/Sources/armory/trait/PhysicsDrag.hx b/Sources/armory/trait/PhysicsDrag.hx index 4c9d58b6..bef674b5 100755 --- a/Sources/armory/trait/PhysicsDrag.hx +++ b/Sources/armory/trait/PhysicsDrag.hx @@ -103,7 +103,7 @@ class PhysicsDrag extends Trait { dir.setZ(dir.z() * pickDist); var newPivotB = new bullet.Bt.Vector3(rayFrom.x() + dir.x(), rayFrom.y() + dir.y(), rayFrom.z() + dir.z()); - #if js + #if (js || hl) pickConstraint.getFrameOffsetA().setOrigin(newPivotB); #elseif cpp pickConstraint.setFrameOffsetAOrigin(newPivotB); diff --git a/Sources/armory/trait/WalkNavigation.hx b/Sources/armory/trait/WalkNavigation.hx index 8dd4a2f3..6bfc50d5 100755 --- a/Sources/armory/trait/WalkNavigation.hx +++ b/Sources/armory/trait/WalkNavigation.hx @@ -9,7 +9,7 @@ import iron.math.Vec4; class WalkNavigation extends Trait { public static var enabled = true; - static inline var speed = 5.0; + var speed = 5.0; var dir = new Vec4(); var xvec = new Vec4(); var yvec = new Vec4(); @@ -111,6 +111,13 @@ class WalkNavigation extends Trait { if (ease < 0.0) ease = 0.0; } + if (mouse.wheelDelta < 0) { + speed *= 1.1; + } else if (mouse.wheelDelta > 0) { + speed *= 0.9; + if (speed < 0.5) speed = 0.5; + } + var d = Time.delta * speed * fast * ease; if (d > 0.0) camera.transform.move(dir, d); diff --git a/Sources/armory/trait/internal/CanvasScript.hx b/Sources/armory/trait/internal/CanvasScript.hx index fbc2652c..b57724a2 100644 --- a/Sources/armory/trait/internal/CanvasScript.hx +++ b/Sources/armory/trait/internal/CanvasScript.hx @@ -60,7 +60,8 @@ class CanvasScript extends Trait { notifyOnRender2D(function(g: kha.graphics2.Graphics) { if (canvas == null) return; - + + setCanvasDimensions(kha.System.windowWidth(), kha.System.windowHeight()); var events = Canvas.draw(cui, canvas, g); for (e in events) { @@ -103,6 +104,14 @@ class CanvasScript extends Trait { return canvas; } + /** + * Set UI scale factor. + * @param factor Scale factor. + */ + public function setUiScale(factor:Float) { + cui.setScale(factor); + } + /** * Set visibility of canvas * @param visible Whether canvas should be visible or not @@ -110,7 +119,16 @@ class CanvasScript extends Trait { public function setCanvasVisibility(visible: Bool){ for (e in canvas.elements) e.visible = visible; } - + + /** + * Set dimensions of canvas + * @param x Width + * @param y Height + */ + public function setCanvasDimensions(x: Int, y: Int){ + canvas.width = x; + canvas.height = y; + } /** * Set font size of the canvas * @param fontSize Size of font to be setted diff --git a/Sources/armory/trait/internal/DebugConsole.hx b/Sources/armory/trait/internal/DebugConsole.hx index 17754e24..e7f2dff8 100755 --- a/Sources/armory/trait/internal/DebugConsole.hx +++ b/Sources/armory/trait/internal/DebugConsole.hx @@ -118,7 +118,7 @@ class DebugConsole extends Trait { static var haxeTrace: Dynamic->haxe.PosInfos->Void = null; static var lastTraces: Array = [""]; static function consoleTrace(v: Dynamic, ?inf: haxe.PosInfos) { - lastTraces.unshift(Std.string(v)); + lastTraces.unshift(haxe.Log.formatOutput(v,inf)); if (lastTraces.length > 10) lastTraces.pop(); haxeTrace(v, inf); } @@ -167,7 +167,7 @@ class DebugConsole extends Trait { if (currentObject.children.length > 0) { ui.row([1 / 13, 12 / 13]); - b = ui.panel(listHandle.nest(lineCounter, {selected: true}), "", true); + b = ui.panel(listHandle.nest(lineCounter, {selected: true}), "", true, false, false); ui.text(currentObject.name); } else { @@ -175,7 +175,7 @@ class DebugConsole extends Trait { // Draw line that shows parent relations ui.g.color = ui.t.ACCENT_COL; - ui.g.drawLine(ui._x - 16, ui._y + ui.ELEMENT_H() / 2, ui._x, ui._y + ui.ELEMENT_H() / 2); + ui.g.drawLine(ui._x - 10, ui._y + ui.ELEMENT_H() / 2, ui._x, ui._y + ui.ELEMENT_H() / 2); ui.g.color = 0xffffffff; ui.text(currentObject.name); @@ -352,8 +352,13 @@ class DebugConsole extends Trait { if (selectedObject.name == "Scene") { selectedType = "(Scene)"; - var p = iron.Scene.active.world.probe; - p.raw.strength = ui.slider(Id.handle({value: p.raw.strength}), "Env Strength", 0.0, 5.0, true); + if (iron.Scene.active.world != null) { + var p = iron.Scene.active.world.probe; + p.raw.strength = ui.slider(Id.handle({value: p.raw.strength}), "Env Strength", 0.0, 5.0, true); + } + else { + ui.text("This scene has no world data to edit."); + } } else if (Std.is(selectedObject, iron.object.LightObject)) { selectedType = "(Light)"; diff --git a/Sources/armory/trait/physics/bullet/PhysicsWorld.hx b/Sources/armory/trait/physics/bullet/PhysicsWorld.hx index d130f79e..18a854e8 100644 --- a/Sources/armory/trait/physics/bullet/PhysicsWorld.hx +++ b/Sources/armory/trait/physics/bullet/PhysicsWorld.hx @@ -327,7 +327,7 @@ class PhysicsWorld extends Trait { #if js rayCallback.set_m_collisionFilterGroup(group); rayCallback.set_m_collisionFilterMask(mask); - #elseif cpp + #elseif (cpp || hl) rayCallback.m_collisionFilterGroup = group; rayCallback.m_collisionFilterMask = mask; #end @@ -348,7 +348,7 @@ class PhysicsWorld extends Trait { hitNormalWorld.set(norm.x(), norm.y(), norm.z()); rb = rbMap.get(untyped body.userIndex); hitInfo = new Hit(rb, hitPointWorld, hitNormalWorld); - #elseif cpp + #elseif (cpp || hl) var hit = rayCallback.m_hitPointWorld; hitPointWorld.set(hit.x(), hit.y(), hit.z()); var norm = rayCallback.m_hitNormalWorld; diff --git a/Sources/armory/trait/physics/bullet/RigidBody.hx b/Sources/armory/trait/physics/bullet/RigidBody.hx index cfaf0923..650ee895 100644 --- a/Sources/armory/trait/physics/bullet/RigidBody.hx +++ b/Sources/armory/trait/physics/bullet/RigidBody.hx @@ -41,6 +41,7 @@ class RigidBody extends iron.Trait { var currentScaleX: Float; var currentScaleY: Float; var currentScaleZ: Float; + var meshInterface: bullet.Bt.TriangleMesh; public var body: bullet.Bt.RigidBody = null; public var motionState: bullet.Bt.MotionState; @@ -95,7 +96,7 @@ class RigidBody extends iron.Trait { this.mask = mask; if (params == null) params = [0.04, 0.1, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0]; - if (flags == null) flags = [false, false, false]; + if (flags == null) flags = [false, false, false, false]; this.linearDamping = params[0]; this.angularDamping = params[1]; @@ -166,7 +167,7 @@ class RigidBody extends iron.Trait { btshape = caps; } else if (shape == Shape.Mesh) { - var meshInterface = fillTriangleMesh(transform.scale); + meshInterface = fillTriangleMesh(transform.scale); if (mass > 0) { var shapeGImpact = new bullet.Bt.GImpactMeshShape(meshInterface); shapeGImpact.updateBound(); @@ -564,11 +565,25 @@ class RigidBody extends iron.Trait { var data = cast(object, MeshObject).data; var i = usersCache.get(data) - 1; usersCache.set(data, i); + if(shape == Shape.Mesh) deleteShape(); if (i <= 0) { - deleteShape(); - shape == Shape.ConvexHull ? - convexHullCache.remove(data) : + if(shape == Shape.ConvexHull) + { + deleteShape(); + convexHullCache.remove(data); + } + else + { triangleMeshCache.remove(data); + if(meshInterface != null) + { + #if js + bullet.Bt.Ammo.destroy(meshInterface); + #else + meshInterface.delete(); + #end + } + } } } else deleteShape(); diff --git a/blender/arm/assets.py b/blender/arm/assets.py index c18efc3e..b0c1bc97 100755 --- a/blender/arm/assets.py +++ b/blender/arm/assets.py @@ -2,6 +2,8 @@ import shutil import os import stat import bpy + +import arm.log as log import arm.utils assets = [] @@ -48,20 +50,26 @@ def reset(): shader_cons['voxel_frag'] = [] shader_cons['voxel_geom'] = [] -def add(file): +def add(asset_file): global assets - if file in assets: + + # Asset already exists, do nothing + if asset_file in assets: return - base = os.path.basename(file) + + asset_file_base = os.path.basename(asset_file) for f in assets: - if f.endswith(base): - print('Armory Warning: Asset name "{0}" already exists, skipping'.format(base)) + f_file_base = os.path.basename(f) + if f_file_base == asset_file_base: + log.warn(f'Armory Warning: Asset name "{asset_file_base}" already exists, skipping') return - assets.append(file) + + assets.append(asset_file) + # Reserved file name for f in reserved_names: - if f in file: - print('Armory Warning: File "{0}" contains reserved keyword, this will break C++ builds!'.format(file)) + if f in asset_file: + log.warn(f'Armory Warning: File "{asset_file}" contains reserved keyword, this will break C++ builds!') def add_khafile_def(d): global khafile_defs diff --git a/blender/arm/exporter.py b/blender/arm/exporter.py index 910a5534..b2558116 100755 --- a/blender/arm/exporter.py +++ b/blender/arm/exporter.py @@ -11,14 +11,16 @@ This software is licensed under the Creative Commons Attribution-ShareAlike 3.0 Unported License: http://creativecommons.org/licenses/by-sa/3.0/deed.en_US """ +from enum import Enum, unique import math import os import time +from typing import Any, Dict, List, Tuple, Union, Optional import numpy as np -from mathutils import * import bpy +from mathutils import * import arm.assets as assets import arm.exporter_opt as exporter_opt @@ -29,173 +31,219 @@ import arm.material.make as make_material import arm.material.mat_batch as mat_batch import arm.utils -NodeTypeEmpty = 0 -NodeTypeBone = 1 -NodeTypeMesh = 2 -NodeTypeLight = 3 -NodeTypeCamera = 4 -NodeTypeSpeaker = 5 -NodeTypeDecal = 6 -NodeTypeProbe = 7 -AnimationTypeSampled = 0 -AnimationTypeLinear = 1 -AnimationTypeBezier = 2 -AnimationTypeConstant = 3 -ExportEpsilon = 1.0e-6 -structIdentifier = ["object", "bone_object", "mesh_object", "light_object", "camera_object", "speaker_object", "decal_object", "probe_object"] +@unique +class NodeType(Enum): + """Represents the type of an object.""" + EMPTY = 0 + BONE = 1 + MESH = 2 + LIGHT = 3 + CAMERA = 4 + SPEAKER = 5 + DECAL = 6 + PROBE = 7 + + @classmethod + def get_bobject_type(cls, bobject: bpy.types.Object) -> "NodeType": + """Returns the NodeType enum member belonging to the type of + the given blender object.""" + if bobject.type == "MESH": + if bobject.data.polygons: + return cls.MESH + elif bobject.type == "FONT" or bobject.type == "META": + return cls.MESH + elif bobject.type == "LIGHT": + return cls.LIGHT + elif bobject.type == "CAMERA": + return cls.CAMERA + elif bobject.type == "SPEAKER": + return cls.SPEAKER + elif bobject.type == "LIGHT_PROBE": + return cls.PROBE + return cls.EMPTY + + +STRUCT_IDENTIFIER = ("object", "bone_object", "mesh_object", + "light_object", "camera_object", "speaker_object", + "decal_object", "probe_object") + +# Internal target names for single FCurve data paths +FCURVE_TARGET_NAMES = { + "location": ("xloc", "yloc", "zloc"), + "rotation_euler": ("xrot", "yrot", "zrot"), + "rotation_quaternion": ("qwrot", "qxrot", "qyrot", "qzrot"), + "scale": ("xscl", "yscl", "zscl"), + "delta_location": ("dxloc", "dyloc", "dzloc"), + "delta_rotation_euler": ("dxrot", "dyrot", "dzrot"), + "delta_rotation_quaternion": ("dqwrot", "dqxrot", "dqyrot", "dqzrot"), + "delta_scale": ("dxscl", "dyscl", "dzscl"), +} + current_output = None -class ArmoryExporter: - '''Export to Armory format''' - def write_matrix(self, matrix): +class ArmoryExporter: + """Export to Armory format. + + Some common naming patterns: + - out_[]: Variables starting with "out_" represent data that is + exported to Iron + - bobject: A Blender object (bpy.types.Object). Used because + `object` is a reserved Python keyword + """ + + compress_enabled = False + export_all_flag = True + # Indicates whether rigid body is exported + export_physics = False + optimize_enabled = False + option_mesh_only = False + + # Class names of referenced traits + import_traits: List[str] = [] + + def __init__(self, context: bpy.types.Context, filepath: str, scene: bpy.types.Scene = None, depsgraph: bpy.types.Depsgraph = None): + global current_output + + self.filepath = filepath + self.scene = context.scene if scene is None else scene + self.depsgraph = context.evaluated_depsgraph_get() if depsgraph is None else depsgraph + + # The output dict contains all data that is later exported to Iron format + self.output: Dict[str, Any] = {'frame_time': 1.0 / (self.scene.render.fps / self.scene.render.fps_base)} + current_output = self.output + + # Stores the object type ("objectType") and the asset name + # ("structName") in a dict for each object + self.bobject_array: Dict[bpy.types.Object, Dict[str, Union[NodeType, str]]] = {} + self.bobject_bone_array = {} + self.mesh_array = {} + self.light_array = {} + self.probe_array = {} + self.camera_array = {} + self.speaker_array = {} + self.material_array = [] + self.world_array = [] + self.particle_system_array = {} + + + # `True` if there is at least one spawned camera in the scene + self.camera_spawned = False + + self.material_to_object_dict = {} + # If no material is assigned, provide default to mimic cycles + self.default_material_objects = [] + self.default_skin_material_objects = [] + self.default_part_material_objects = [] + self.material_to_arm_object_dict = {} + # Stores the link between a blender object and its + # corresponding export data (arm object) + self.object_to_arm_object_dict: Dict[bpy.types.Object, Dict] = {} + + self.bone_tracks = [] + + ArmoryExporter.preprocess() + + @classmethod + def export_scene(cls, context: bpy.types.Context, filepath: str, scene: bpy.types.Scene = None, depsgraph: bpy.types.Depsgraph = None) -> None: + """Exports the given scene to the given file path. This is the + function that is called in make.py and the entry point of the + exporter.""" + cls(context, filepath, scene, depsgraph).execute() + + @classmethod + def preprocess(cls): + wrd = bpy.data.worlds['Arm'] + + if wrd.arm_physics == 'Enabled': + cls.export_physics = True + cls.export_navigation = False + if wrd.arm_navigation == 'Enabled': + cls.export_navigation = True + cls.export_ui = False + + @staticmethod + def write_matrix(matrix): return [matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3], matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3], matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3], matrix[3][0], matrix[3][1], matrix[3][2], matrix[3][3]] - def get_meshes_file_path(self, object_id, compressed=False): + def get_meshes_file_path(self, object_id: str, compressed=False) -> str: index = self.filepath.rfind('/') mesh_fp = self.filepath[:(index + 1)] + 'meshes/' + if not os.path.exists(mesh_fp): os.makedirs(mesh_fp) + ext = '.lz4' if compressed else '.arm' return mesh_fp + object_id + ext - @staticmethod - def get_bobject_type(bobject): - if bobject.type == "MESH": - if bobject.data.polygons: - return NodeTypeMesh - elif bobject.type == "FONT": - return NodeTypeMesh - elif bobject.type == "META": - return NodeTypeMesh - elif bobject.type == "LIGHT": - return NodeTypeLight - elif bobject.type == "CAMERA": - return NodeTypeCamera - elif bobject.type == "SPEAKER": - return NodeTypeSpeaker - elif bobject.type == "LIGHT_PROBE": - return NodeTypeProbe - return NodeTypeEmpty - @staticmethod def get_shape_keys(mesh): - if not hasattr(mesh, 'shape_keys'): # Metaball + # Metaball + if not hasattr(mesh, 'shape_keys'): return None + shape_keys = mesh.shape_keys if shape_keys and len(shape_keys.key_blocks) > 1: return shape_keys return None - def find_bone(self, name): - for bobject_ref in self.bobjectBoneArray.items(): - if bobject_ref[0].name == name: - return bobject_ref + def find_bone(self, name: str) -> Optional[Tuple[bpy.types.Bone, Dict]]: + """Finds the bone reference (a tuple containing the bone object + and its data) by the given name and returns it.""" + for bone_ref in self.bobject_bone_array.items(): + if bone_ref[0].name == name: + return bone_ref return None @staticmethod - def collect_bone_animation(armature, name): - path = "pose.bones[\"" + name + "\"]." - curve_array = [] + def collect_bone_animation(armature: bpy.types.Object, name: str) -> List[bpy.types.FCurve]: + path = f"pose.bones[\"{name}\"]." if armature.animation_data: action = armature.animation_data.action if action: - for fcurve in action.fcurves: - if fcurve.data_path.startswith(path): - curve_array.append(fcurve) - return curve_array + return [fcurve for fcurve in action.fcurves if fcurve.data_path.startswith(path)] - def export_bone(self, armature, bone, scene, o, action): - bobjectRef = self.bobjectBoneArray.get(bone) + return [] - if bobjectRef: - o['type'] = structIdentifier[bobjectRef["objectType"]] - o['name'] = bobjectRef["structName"] - self.export_bone_transform(armature, bone, scene, o, action) + def export_bone(self, armature, bone: bpy.types.Bone, o, action: bpy.types.Action): + bobject_ref = self.bobject_bone_array.get(bone) + + if bobject_ref: + o['type'] = STRUCT_IDENTIFIER[bobject_ref["objectType"].value] + o['name'] = bobject_ref["structName"] + self.export_bone_transform(armature, bone, o, action) o['children'] = [] - for subbobject in bone.children: + for sub_bobject in bone.children: so = {} - self.export_bone(armature, subbobject, scene, so, action) + self.export_bone(armature, sub_bobject, so, action) o['children'].append(so) - def export_pose_markers(self, oanim, action): - if action.pose_markers == None or len(action.pose_markers) == 0: + @staticmethod + def export_pose_markers(oanim, action): + if action.pose_markers is None or len(action.pose_markers) == 0: return + oanim['marker_frames'] = [] oanim['marker_names'] = [] - for m in action.pose_markers: - oanim['marker_frames'].append(int(m.frame)) - oanim['marker_names'].append(m.name) - def export_object_sampled_animation(self, bobject, scene, o): - # This function exports animation as full 4x4 matrices for each frame - animation_flag = False + for pos_marker in action.pose_markers: + oanim['marker_frames'].append(int(pos_marker.frame)) + oanim['marker_names'].append(pos_marker.name) - animation_flag = bobject.animation_data != None and bobject.animation_data.action != None and bobject.type != 'ARMATURE' + @staticmethod + def calculate_anim_frame_range(action: bpy.types.Action) -> Tuple[int, int]: + """Calculates the required frame range of the given action by + also taking fcurve modifiers into account. - # Font out - if animation_flag: - if not 'object_actions' in o: - o['object_actions'] = [] - - action = bobject.animation_data.action - aname = arm.utils.safestr(arm.utils.asset_name(action)) - fp = self.get_meshes_file_path('action_' + aname, compressed=self.is_compress()) - assets.add(fp) - ext = '.lz4' if self.is_compress() else '' - if ext == '' and not bpy.data.worlds['Arm'].arm_minimize: - ext = '.json' - o['object_actions'].append('action_' + aname + ext) - - oaction = {} - oaction['sampled'] = True - oaction['name'] = action.name - oanim = {} - oaction['anim'] = oanim - - tracko = {} - tracko['target'] = "transform" - - tracko['frames'] = [] - - begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1]) - end_frame += 1 - - for i in range(begin_frame, end_frame): - tracko['frames'].append(int(i - begin_frame)) - - tracko['frames'].append(int(end_frame)) - - tracko['values'] = [] - - for i in range(begin_frame, end_frame): - scene.frame_set(i) - tracko['values'] += self.write_matrix(bobject.matrix_local) # Continuos array of matrix transforms - - oanim['tracks'] = [tracko] - self.export_pose_markers(oanim, action) - - if True: #action.arm_cached == False or not os.path.exists(fp): - wrd = bpy.data.worlds['Arm'] - if wrd.arm_verbose_output: - print('Exporting object action ' + aname) - actionf = {} - actionf['objects'] = [] - actionf['objects'].append(oaction) - oaction['type'] = 'object' - oaction['name'] = aname - oaction['data_ref'] = '' - oaction['transform'] = None - arm.utils.write_arm(fp, actionf) - - def calculate_animation_length(self, action): - """Calculates the length of the given action.""" + Modifiers that are not range-restricted are ignored in this + calculation. + """ start = action.frame_range[0] end = action.frame_range[1] @@ -212,41 +260,25 @@ class ArmoryExporter: if modifier.frame_end > end: end = modifier.frame_end - return (int(start), int(end)) + return int(start), int(end) - def export_animation_track(self, fcurve, frame_range, target): + @staticmethod + def export_animation_track(fcurve: bpy.types.FCurve, frame_range: Tuple[int, int], target: str) -> Dict: """This function exports a single animation track.""" - data_ttrack = {} - - data_ttrack['target'] = target - data_ttrack['frames'] = [] - data_ttrack['values'] = [] + out_track = {'target': target, 'frames': [], 'values': []} start = frame_range[0] end = frame_range[1] for frame in range(start, end + 1): - data_ttrack['frames'].append(frame) - data_ttrack['values'].append(fcurve.evaluate(frame)) + out_track['frames'].append(frame) + out_track['values'].append(fcurve.evaluate(frame)) - return data_ttrack - - def export_object_transform(self, bobject, o): - # Internal target names for single FCurve data paths - target_names = { - "location": ("xloc", "yloc", "zloc"), - "rotation_euler": ("xrot", "yrot", "zrot"), - "rotation_quaternion": ("qwrot", "qxrot", "qyrot", "qzrot"), - "scale": ("xscl", "yscl", "zscl"), - "delta_location": ("dxloc", "dyloc", "dzloc"), - "delta_rotation_euler": ("dxrot", "dyrot", "dzrot"), - "delta_rotation_quaternion": ("dqwrot", "dqxrot", "dqyrot", "dqzrot"), - "delta_scale": ("dxscl", "dyscl", "dzscl"), - } + return out_track + def export_object_transform(self, bobject: bpy.types.Object, o): # Static transform - o['transform'] = {} - o['transform']['values'] = self.write_matrix(bobject.matrix_local) + o['transform'] = {'values': ArmoryExporter.write_matrix(bobject.matrix_local)} # Animated transform if bobject.animation_data is not None and bobject.type != "ARMATURE": @@ -255,89 +287,86 @@ class ArmoryExporter: if action is not None: action_name = arm.utils.safestr(arm.utils.asset_name(action)) - if 'object_actions' not in o: - o['object_actions'] = [] - - fp = self.get_meshes_file_path('action_' + action_name, compressed=self.is_compress()) + fp = self.get_meshes_file_path('action_' + action_name, compressed=ArmoryExporter.compress_enabled) assets.add(fp) - ext = '.lz4' if self.is_compress() else '' + ext = '.lz4' if ArmoryExporter.compress_enabled else '' if ext == '' and not bpy.data.worlds['Arm'].arm_minimize: ext = '.json' + + if 'object_actions' not in o: + o['object_actions'] = [] o['object_actions'].append('action_' + action_name + ext) - oaction = {} - oaction['name'] = action.name + frame_range = self.calculate_anim_frame_range(action) + out_anim = { + 'begin': frame_range[0], + 'end': frame_range[1], + 'tracks': [] + } - # Export the animation tracks - oanim = {} - oaction['anim'] = oanim - - frame_range = self.calculate_animation_length(action) - oanim['begin'] = frame_range[0] - oanim['end'] = frame_range[1] - - oanim['tracks'] = [] - self.export_pose_markers(oanim, action) + self.export_pose_markers(out_anim, action) for fcurve in action.fcurves: data_path = fcurve.data_path try: - data_ttrack = self.export_animation_track(fcurve, frame_range, target_names[data_path][fcurve.array_index]) - + out_track = self.export_animation_track(fcurve, frame_range, FCURVE_TARGET_NAMES[data_path][fcurve.array_index]) except KeyError: - if data_path not in target_names: + if data_path not in FCURVE_TARGET_NAMES: log.warn(f"Action {action_name}: The data path '{data_path}' is not supported (yet)!") continue - # Missing target entry for array_index or something else else: raise - oanim['tracks'].append(data_ttrack) + out_anim['tracks'].append(out_track) - if True: #action.arm_cached == False or not os.path.exists(fp): + if True: # not action.arm_cached or not os.path.exists(fp): wrd = bpy.data.worlds['Arm'] if wrd.arm_verbose_output: print('Exporting object action ' + action_name) - actionf = {} - actionf['objects'] = [] - actionf['objects'].append(oaction) - oaction['type'] = 'object' - oaction['name'] = action_name - oaction['data_ref'] = '' - oaction['transform'] = None - arm.utils.write_arm(fp, actionf) - def process_bone(self, bone): + out_object_action = { + 'name': action_name, + 'anim': out_anim, + 'type': 'object', + 'data_ref': '', + 'transform': None + } + action_file = {'objects': [out_object_action]} + arm.utils.write_arm(fp, action_file) + + def process_bone(self, bone: bpy.types.Bone) -> None: if ArmoryExporter.export_all_flag or bone.select: - self.bobjectBoneArray[bone] = {"objectType" : NodeTypeBone, "structName" : bone.name} + self.bobject_bone_array[bone] = { + "objectType": NodeType.BONE, + "structName": bone.name + } for subbobject in bone.children: self.process_bone(subbobject) - def process_bobject(self, bobject): - """Adds the given blender object to the bobjectArray dict and - stores its type and its name. - - If an object is linked, the name of its library is appended - after an "_". + def process_bobject(self, bobject: bpy.types.Object) -> None: + """Stores some basic information about the given object (its + name and type). + If the given object is an armature, its bones are also + processed. """ - if ArmoryExporter.export_all_flag or bobject.select: - btype = ArmoryExporter.get_bobject_type(bobject) + if ArmoryExporter.export_all_flag or bobject.select_get(): + btype: NodeType = NodeType.get_bobject_type(bobject) - if btype != NodeTypeMesh and ArmoryExporter.option_mesh_only: + if btype is not NodeType.MESH and ArmoryExporter.option_mesh_only: return - self.bobjectArray[bobject] = { + self.bobject_array[bobject] = { "objectType": btype, "structName": arm.utils.asset_name(bobject) } if bobject.type == "ARMATURE": - skeleton = bobject.data - if skeleton: - for bone in skeleton.bones: + armature: bpy.types.Armature = bobject.data + if armature: + for bone in armature.bones: if not bone.parent: self.process_bone(bone) @@ -346,18 +375,20 @@ class ArmoryExporter: self.process_bobject(subbobject) def process_skinned_meshes(self): - for bobjectRef in self.bobjectArray.items(): - if bobjectRef[1]["objectType"] == NodeTypeMesh: - armature = bobjectRef[0].find_armature() - if armature: + """Iterates through all objects that are exported and ensures + that bones are actually stored as bones.""" + for bobject_ref in self.bobject_array.items(): + if bobject_ref[1]["objectType"] is NodeType.MESH: + armature = bobject_ref[0].find_armature() + if armature is not None: for bone in armature.data.bones: - boneRef = self.find_bone(bone.name) - if boneRef: - # If an object is used as a bone, then we force its type to be a bone - boneRef[1]["objectType"] = NodeTypeBone - - def export_bone_transform(self, armature, bone, scene, o, action): + bone_ref = self.find_bone(bone.name) + if bone_ref is not None: + # If an object is used as a bone, then we + # force its type to be a bone + bone_ref[1]["objectType"] = NodeType.BONE + def export_bone_transform(self, armature: bpy.types.Object, bone: bpy.types.Bone, o, action: bpy.types.Action): pose_bone = armature.pose.bones.get(bone.name) # if pose_bone is not None: # transform = pose_bone.matrix.copy() @@ -368,33 +399,28 @@ class ArmoryExporter: if bone.parent is not None: transform = (bone.parent.matrix_local.inverted_safe() @ transform) - o['transform'] = {} - o['transform']['values'] = self.write_matrix(transform) + o['transform'] = {'values': ArmoryExporter.write_matrix(transform)} - curve_array = self.collect_bone_animation(armature, bone.name) - animation = len(curve_array) != 0 + fcurve_list = self.collect_bone_animation(armature, bone.name) - if animation and pose_bone: + if fcurve_list and pose_bone: begin_frame, end_frame = int(action.frame_range[0]), int(action.frame_range[1]) - o['anim'] = {} - tracko = {} - o['anim']['tracks'] = [tracko] - tracko['target'] = "transform" - tracko['frames'] = [] + out_track = {'target': "transform", 'frames': [], 'values': []} + o['anim'] = {'tracks': [out_track]} + for i in range(begin_frame, end_frame + 1): - tracko['frames'].append(i - begin_frame) + out_track['frames'].append(i - begin_frame) - tracko['values'] = [] - self.bone_tracks.append((tracko['values'], pose_bone)) + self.bone_tracks.append((out_track['values'], pose_bone)) - def use_default_material(self, bobject, o): + def use_default_material(self, bobject: bpy.types.Object, o): if arm.utils.export_bone_data(bobject): o['material_refs'].append('armdefaultskin') - self.defaultSkinMaterialObjects.append(bobject) + self.default_skin_material_objects.append(bobject) else: o['material_refs'].append('armdefault') - self.defaultMaterialObjects.append(bobject) + self.default_material_objects.append(bobject) def use_default_material_part(self): # Particle object with no material assigned @@ -402,44 +428,44 @@ class ArmoryExporter: if ps.render_type != 'OBJECT' or ps.instance_object is None: continue po = ps.instance_object - if po not in self.objectToArmObjectDict: + if po not in self.object_to_arm_object_dict: continue - o = self.objectToArmObjectDict[po] - if len(o['material_refs']) > 0 and o['material_refs'][0] == 'armdefault' and po not in self.defaultPartMaterialObjects: - self.defaultPartMaterialObjects.append(po) - o['material_refs'] = ['armdefaultpart'] # Replace armdefault + o = self.object_to_arm_object_dict[po] + if len(o['material_refs']) > 0 and o['material_refs'][0] == 'armdefault' and po not in self.default_part_material_objects: + self.default_part_material_objects.append(po) + o['material_refs'] = ['armdefaultpart'] # Replace armdefault - def export_material_ref(self, bobject, material, index, o): - if material is None: # Use default for empty mat slots + def export_material_ref(self, bobject: bpy.types.Object, material, index, o): + if material is None: # Use default for empty mat slots self.use_default_material(bobject, o) return - if not material in self.materialArray: - self.materialArray.append(material) + if material not in self.material_array: + self.material_array.append(material) o['material_refs'].append(arm.utils.asset_name(material)) - def export_particle_system_ref(self, psys, index, o): - if psys.settings in self.particleSystemArray: # or not modifier.show_render: + def export_particle_system_ref(self, psys: bpy.types.ParticleSystem, out_object): + if psys.settings.instance_object is None or psys.settings.render_type != 'OBJECT': return - if psys.settings.instance_object == None or psys.settings.render_type != 'OBJECT': - return + self.particle_system_array[psys.settings] = {"structName": psys.settings.name} + pref = { + 'name': psys.name, + 'seed': psys.seed, + 'particle': psys.settings.name + } + out_object['particle_refs'].append(pref) - self.particleSystemArray[psys.settings] = {"structName" : psys.settings.name} - pref = {} - pref['name'] = psys.name - pref['seed'] = psys.seed - pref['particle'] = psys.settings.name - o['particle_refs'].append(pref) - - def get_view3d_area(self): + @staticmethod + def get_view3d_area() -> Optional[bpy.types.Area]: screen = bpy.context.window.screen for area in screen.areas: if area.type == 'VIEW_3D': return area return None - def get_viewport_view_matrix(self): - play_area = self.get_view3d_area() + @staticmethod + def get_viewport_view_matrix() -> Optional[Matrix]: + play_area = ArmoryExporter.get_view3d_area() if play_area is None: return None for space in play_area.spaces: @@ -447,8 +473,9 @@ class ArmoryExporter: return space.region_3d.view_matrix return None - def get_viewport_projection_matrix(self): - play_area = self.get_view3d_area() + @staticmethod + def get_viewport_projection_matrix() -> Tuple[Optional[Matrix], bool]: + play_area = ArmoryExporter.get_view3d_area() if play_area is None: return None, False for space in play_area.spaces: @@ -467,12 +494,13 @@ class ArmoryExporter: values, pose_bone = track[0], track[1] parent = pose_bone.parent if parent: - values += self.write_matrix((parent.matrix.inverted_safe() @ pose_bone.matrix)) + values += ArmoryExporter.write_matrix((parent.matrix.inverted_safe() @ pose_bone.matrix)) else: - values += self.write_matrix(pose_bone.matrix) + values += ArmoryExporter.write_matrix(pose_bone.matrix) # print('Bone matrices exported in ' + str(time.time() - profile_time)) - def has_baked_material(self, bobject, materials): + @staticmethod + def has_baked_material(bobject, materials): for mat in materials: if mat is None: continue @@ -481,7 +509,72 @@ class ArmoryExporter: return True return False - def slot_to_material(self, bobject, slot): + @staticmethod + def create_material_variants(scene: bpy.types.Scene) -> Tuple[List[bpy.types.Material], List[bpy.types.MaterialSlot]]: + """Creates unique material variants for skinning, tilesheets and + particles.""" + matvars: List[bpy.types.Material] = [] + matslots: List[bpy.types.MaterialSlot] = [] + + bobject: bpy.types.Object + for bobject in scene.collection.all_objects.values(): + variant_suffix = '' + + # Skinning + if arm.utils.export_bone_data(bobject): + variant_suffix = '_armskin' + # Tilesheets + elif bobject.arm_tilesheet != '': + variant_suffix = '_armtile' + + if variant_suffix == '': + continue + + for slot in bobject.material_slots: + if slot.material is None or slot.material.library is not None: + continue + if slot.material.name.endswith(variant_suffix): + continue + + matslots.append(slot) + mat_name = slot.material.name + variant_suffix + mat = bpy.data.materials.get(mat_name) + # Create material variant + if mat is None: + mat = slot.material.copy() + mat.name = mat_name + if variant_suffix == '_armtile': + mat.arm_tilesheet_flag = True + matvars.append(mat) + slot.material = mat + + # Particle and non-particle objects can not share material + particle_sys: bpy.types.ParticleSettings + for particle_sys in bpy.data.particles: + bobject = particle_sys.instance_object + if bobject is None or particle_sys.render_type != 'OBJECT': + continue + + for slot in bobject.material_slots: + if slot.material is None or slot.material.library is not None: + continue + if slot.material.name.endswith('_armpart'): + continue + + matslots.append(slot) + mat_name = slot.material.name + '_armpart' + mat = bpy.data.materials.get(mat_name) + if mat is None: + mat = slot.material.copy() + mat.name = mat_name + mat.arm_particle_flag = True + matvars.append(mat) + slot.material = mat + + return matvars, matslots + + @staticmethod + def slot_to_material(bobject: bpy.types.Object, slot: bpy.types.MaterialSlot): mat = slot.material # Pick up backed material if present if mat is not None: @@ -571,163 +664,168 @@ class ArmoryExporter: # self.indentLevel -= 1 # self.IndentWrite(B"}\n") - def export_object(self, bobject, scene, parento=None): - # This function exports a single object in the scene and includes its name, - # object reference, material references (for meshes), and transform. - # Subobjects are then exported recursively. - if self.preprocess_object(bobject) == False: + def export_object(self, bobject: bpy.types.Object, scene: bpy.types.Scene, out_parent: Dict = None) -> None: + """This function exports a single object in the scene and + includes its name, object reference, material references (for + meshes), and transform. + Subobjects are then exported recursively. + """ + if not bobject.arm_export: return - bobjectRef = self.bobjectArray.get(bobject) - if bobjectRef: - type = bobjectRef["objectType"] + bobject_ref = self.bobject_array.get(bobject) + if bobject_ref is not None: + object_type = bobject_ref["objectType"] # Linked object, not present in scene - if bobject not in self.objectToArmObjectDict: - o = {} - o['traits'] = [] - o['spawn'] = False - self.objectToArmObjectDict[bobject] = o + if bobject not in self.object_to_arm_object_dict: + out_object = { + 'traits': [], + 'spawn': False + } + self.object_to_arm_object_dict[bobject] = out_object - o = self.objectToArmObjectDict[bobject] - o['type'] = structIdentifier[type] - o['name'] = bobjectRef["structName"] + out_object = self.object_to_arm_object_dict[bobject] + out_object['type'] = STRUCT_IDENTIFIER[object_type.value] + out_object['name'] = bobject_ref["structName"] if bobject.parent_type == "BONE": - o['parent_bone'] = bobject.parent_bone + out_object['parent_bone'] = bobject.parent_bone - if bobject.hide_render or bobject.arm_visible == False: - o['visible'] = False + if bobject.hide_render or not bobject.arm_visible: + out_object['visible'] = False if not bobject.cycles_visibility.camera: - o['visible_mesh'] = False + out_object['visible_mesh'] = False if not bobject.cycles_visibility.shadow: - o['visible_shadow'] = False + out_object['visible_shadow'] = False - if bobject.arm_spawn == False: - o['spawn'] = False + if not bobject.arm_spawn: + out_object['spawn'] = False - o['mobile'] = bobject.arm_mobile + out_object['mobile'] = bobject.arm_mobile if bobject.instance_type == 'COLLECTION' and bobject.instance_collection is not None: - o['group_ref'] = bobject.instance_collection.name + out_object['group_ref'] = bobject.instance_collection.name if bobject.arm_tilesheet != '': - o['tilesheet_ref'] = bobject.arm_tilesheet - o['tilesheet_action_ref'] = bobject.arm_tilesheet_action + out_object['tilesheet_ref'] = bobject.arm_tilesheet + out_object['tilesheet_action_ref'] = bobject.arm_tilesheet_action if len(bobject.arm_propertylist) > 0: - o['properties'] = [] - for p in bobject.arm_propertylist: - po = {} - po['name'] = p.name_prop - po['value'] = getattr(p, p.type_prop + '_prop') - o['properties'].append(po) - - # TODO: - layer_found = True - if layer_found == False: - o['spawn'] = False + out_object['properties'] = [] + for proplist_item in bobject.arm_propertylist: + out_property = { + 'name': proplist_item.name_prop, + 'value': getattr(proplist_item, proplist_item.type_prop + '_prop')} + out_object['properties'].append(out_property) # Export the object reference and material references objref = bobject.data if objref is not None: objname = arm.utils.asset_name(objref) - # Lods + # LOD if bobject.type == 'MESH' and hasattr(objref, 'arm_lodlist') and len(objref.arm_lodlist) > 0: - o['lods'] = [] - for l in objref.arm_lodlist: - if l.enabled_prop == False: + out_object['lods'] = [] + for lodlist_item in objref.arm_lodlist: + if not lodlist_item.enabled_prop: continue - lod = {} - lod['object_ref'] = l.name - lod['screen_size'] = l.screen_size_prop - o['lods'].append(lod) + out_lod = { + 'object_ref': lodlist_item.name, + 'screen_size': lodlist_item.screen_size_prop + } + out_object['lods'].append(out_lod) if objref.arm_lod_material: - o['lod_material'] = True + out_object['lod_material'] = True - if type == NodeTypeMesh: - if not objref in self.meshArray: - self.meshArray[objref] = {"structName" : objname, "objectTable" : [bobject]} + if object_type is NodeType.MESH: + if objref not in self.mesh_array: + self.mesh_array[objref] = {"structName": objname, "objectTable": [bobject]} else: - self.meshArray[objref]["objectTable"].append(bobject) + self.mesh_array[objref]["objectTable"].append(bobject) - oid = arm.utils.safestr(self.meshArray[objref]["structName"]) + oid = arm.utils.safestr(self.mesh_array[objref]["structName"]) wrd = bpy.data.worlds['Arm'] if wrd.arm_single_data_file: - o['data_ref'] = oid + out_object['data_ref'] = oid else: - ext = '' if not self.is_compress() else '.lz4' + ext = '' if not ArmoryExporter.compress_enabled else '.lz4' if ext == '' and not bpy.data.worlds['Arm'].arm_minimize: ext = '.json' - o['data_ref'] = 'mesh_' + oid + ext + '/' + oid + out_object['data_ref'] = 'mesh_' + oid + ext + '/' + oid - o['material_refs'] = [] + out_object['material_refs'] = [] for i in range(len(bobject.material_slots)): mat = self.slot_to_material(bobject, bobject.material_slots[i]) # Export ref - self.export_material_ref(bobject, mat, i, o) + self.export_material_ref(bobject, mat, i, out_object) # Decal flag - if mat != None and mat.arm_decal: - o['type'] = 'decal_object' + if mat is not None and mat.arm_decal: + out_object['type'] = 'decal_object' # No material, mimic cycles and assign default - if len(o['material_refs']) == 0: - self.use_default_material(bobject, o) + if len(out_object['material_refs']) == 0: + self.use_default_material(bobject, out_object) num_psys = len(bobject.particle_systems) if num_psys > 0: - o['particle_refs'] = [] - for i in range(0, num_psys): - self.export_particle_system_ref(bobject.particle_systems[i], i, o) + out_object['particle_refs'] = [] + out_object['render_emitter'] = bobject.show_instancer_for_render + for i in range(num_psys): + self.export_particle_system_ref(bobject.particle_systems[i], out_object) aabb = bobject.data.arm_aabb if aabb[0] == 0 and aabb[1] == 0 and aabb[2] == 0: self.calc_aabb(bobject) - o['dimensions'] = [aabb[0], aabb[1], aabb[2]] + out_object['dimensions'] = [aabb[0], aabb[1], aabb[2]] - #shapeKeys = ArmoryExporter.get_shape_keys(objref) - #if shapeKeys: - # self.ExportMorphWeights(bobject, shapeKeys, scene, o) + # shapeKeys = ArmoryExporter.get_shape_keys(objref) + # if shapeKeys: + # self.ExportMorphWeights(bobject, shapeKeys, scene, out_object) - elif type == NodeTypeLight: - if objref not in self.lightArray: - self.lightArray[objref] = {"structName" : objname, "objectTable" : [bobject]} + elif object_type is NodeType.LIGHT: + if objref not in self.light_array: + self.light_array[objref] = {"structName" : objname, "objectTable" : [bobject]} else: - self.lightArray[objref]["objectTable"].append(bobject) - o['data_ref'] = self.lightArray[objref]["structName"] + self.light_array[objref]["objectTable"].append(bobject) + out_object['data_ref'] = self.light_array[objref]["structName"] - elif type == NodeTypeProbe: - if objref not in self.probeArray: - self.probeArray[objref] = {"structName" : objname, "objectTable" : [bobject]} + elif object_type is NodeType.PROBE: + if objref not in self.probe_array: + self.probe_array[objref] = {"structName" : objname, "objectTable" : [bobject]} else: - self.probeArray[objref]["objectTable"].append(bobject) + self.probe_array[objref]["objectTable"].append(bobject) + dist = bobject.data.influence_distance - if objref.type == "PLANAR": - o['dimensions'] = [1.0, 1.0, dist] - else: # GRID, CUBEMAP - o['dimensions'] = [dist, dist, dist] - o['data_ref'] = self.probeArray[objref]["structName"] - elif type == NodeTypeCamera: - if 'spawn' in o and not o['spawn']: - self.camera_spawned = False + if objref.type == "PLANAR": + out_object['dimensions'] = [1.0, 1.0, dist] + + # GRID, CUBEMAP + else: + out_object['dimensions'] = [dist, dist, dist] + out_object['data_ref'] = self.probe_array[objref]["structName"] + + elif object_type is NodeType.CAMERA: + if 'spawn' in out_object and not out_object['spawn']: + self.camera_spawned |= False else: self.camera_spawned = True - if objref not in self.cameraArray: - self.cameraArray[objref] = {"structName" : objname, "objectTable" : [bobject]} - else: - self.cameraArray[objref]["objectTable"].append(bobject) - o['data_ref'] = self.cameraArray[objref]["structName"] - elif type == NodeTypeSpeaker: - if objref not in self.speakerArray: - self.speakerArray[objref] = {"structName" : objname, "objectTable" : [bobject]} + if objref not in self.camera_array: + self.camera_array[objref] = {"structName" : objname, "objectTable" : [bobject]} else: - self.speakerArray[objref]["objectTable"].append(bobject) - o['data_ref'] = self.speakerArray[objref]["structName"] + self.camera_array[objref]["objectTable"].append(bobject) + out_object['data_ref'] = self.camera_array[objref]["structName"] + + elif object_type is NodeType.SPEAKER: + if objref not in self.speaker_array: + self.speaker_array[objref] = {"structName" : objname, "objectTable" : [bobject]} + else: + self.speaker_array[objref]["objectTable"].append(bobject) + out_object['data_ref'] = self.speaker_array[objref]["structName"] # Export the transform. If object is animated, then animation tracks are exported here if bobject.type != 'ARMATURE' and bobject.animation_data is not None: @@ -737,38 +835,40 @@ class ArmoryExporter: if track.strips is None: continue for strip in track.strips: - if strip.action == None or strip.action in export_actions: + if strip.action is None or strip.action in export_actions: continue export_actions.append(strip.action) orig_action = action for a in export_actions: bobject.animation_data.action = a - self.export_object_transform(bobject, o) + self.export_object_transform(bobject, out_object) if len(export_actions) >= 2 and export_actions[0] is None: # No action assigned - o['object_actions'].insert(0, 'null') + out_object['object_actions'].insert(0, 'null') bobject.animation_data.action = orig_action else: - self.export_object_transform(bobject, o) + self.export_object_transform(bobject, out_object) # If the object is parented to a bone and is not relative, then undo the bone's transform if bobject.parent_type == "BONE": armature = bobject.parent.data bone = armature.bones[bobject.parent_bone] # if not bone.use_relative_parent: - o['parent_bone_connected'] = bone.use_connect + out_object['parent_bone_connected'] = bone.use_connect if bone.use_connect: bone_translation = Vector((0, bone.length, 0)) + bone.head - o['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]] + out_object['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]] else: bone_translation = bone.tail - bone.head - o['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]] + out_object['parent_bone_tail'] = [bone_translation[0], bone_translation[1], bone_translation[2]] pose_bone = bobject.parent.pose.bones[bobject.parent_bone] bone_translation_pose = pose_bone.tail - pose_bone.head - o['parent_bone_tail_pose'] = [bone_translation_pose[0], bone_translation_pose[1], bone_translation_pose[2]] + out_object['parent_bone_tail_pose'] = [bone_translation_pose[0], bone_translation_pose[1], bone_translation_pose[2]] if bobject.type == 'ARMATURE' and bobject.data is not None: - bdata = bobject.data # Armature data - action = None # Reference start action + # Armature data + bdata = bobject.data + # Reference start action + action = None adata = bobject.animation_data # Active action @@ -784,7 +884,8 @@ class ArmoryExporter: # Export actions export_actions = [action] - # hasattr - armature modifier may reference non-parent armature object to deform with + # hasattr - armature modifier may reference non-parent + # armature object to deform with if hasattr(adata, 'nla_tracks') and adata.nla_tracks is not None: for track in adata.nla_tracks: if track.strips is None: @@ -797,21 +898,22 @@ class ArmoryExporter: export_actions.append(strip.action) armatureid = arm.utils.safestr(arm.utils.asset_name(bdata)) - ext = '.lz4' if self.is_compress() else '' + ext = '.lz4' if ArmoryExporter.compress_enabled else '' if ext == '' and not bpy.data.worlds['Arm'].arm_minimize: ext = '.json' - o['bone_actions'] = [] + out_object['bone_actions'] = [] for action in export_actions: aname = arm.utils.safestr(arm.utils.asset_name(action)) - o['bone_actions'].append('action_' + armatureid + '_' + aname + ext) + out_object['bone_actions'].append('action_' + armatureid + '_' + aname + ext) clear_op = set() skelobj = bobject baked_actions = [] orig_action = bobject.animation_data.action if bdata.arm_autobake and bobject.name not in bpy.context.collection.all_objects: - clear_op.add( 'unlink' ) - #clone bjobject and put it in the current scene so the bake operator can run + clear_op.add('unlink') + # Clone bobject and put it in the current scene so + # the bake operator can run if bobject.library is not None: skelobj = bobject.copy() clear_op.add('rem') @@ -820,18 +922,20 @@ class ArmoryExporter: for action in export_actions: aname = arm.utils.safestr(arm.utils.asset_name(action)) skelobj.animation_data.action = action - fp = self.get_meshes_file_path('action_' + armatureid + '_' + aname, compressed=self.is_compress()) + fp = self.get_meshes_file_path('action_' + armatureid + '_' + aname, compressed=ArmoryExporter.compress_enabled) assets.add(fp) - if bdata.arm_cached == False or not os.path.exists(fp): - #handle autobake + if not bdata.arm_cached or not os.path.exists(fp): + # Handle autobake if bdata.arm_autobake: sel = bpy.context.selected_objects[:] - for _o in sel: _o.select_set(False) + for _o in sel: + _o.select_set(False) skelobj.select_set(True) - bpy.ops.nla.bake(frame_start = action.frame_range[0], frame_end=action.frame_range[1], step=1, only_selected=False, visual_keying=True) + bpy.ops.nla.bake(frame_start=action.frame_range[0], frame_end=action.frame_range[1], step=1, only_selected=False, visual_keying=True) action = skelobj.animation_data.action skelobj.select_set(False) - for _o in sel: _o.select_set(True) + for _o in sel: + _o.select_set(True) baked_actions.append(action) wrd = bpy.data.worlds['Arm'] @@ -842,50 +946,50 @@ class ArmoryExporter: for bone in bdata.bones: if not bone.parent: boneo = {} - self.export_bone(skelobj, bone, scene, boneo, action) + self.export_bone(skelobj, bone, boneo, action) bones.append(boneo) - self.write_bone_matrices( bpy.context.scene, action) + self.write_bone_matrices(bpy.context.scene, action) if len(bones) > 0 and 'anim' in bones[0]: self.export_pose_markers(bones[0]['anim'], action) # Save action separately - action_obj = {} - action_obj['name'] = aname - action_obj['objects'] = bones + action_obj = {'name': aname, 'objects': bones} arm.utils.write_arm(fp, action_obj) - #restore settings + # Restore settings skelobj.animation_data.action = orig_action - for a in baked_actions: bpy.data.actions.remove( a, do_unlink=True) - if 'unlink' in clear_op: bpy.context.collection.objects.unlink(skelobj) - if 'rem' in clear_op: bpy.data.objects.remove(skelobj, do_unlink=True) + for a in baked_actions: + bpy.data.actions.remove(a, do_unlink=True) + if 'unlink' in clear_op: + bpy.context.collection.objects.unlink(skelobj) + if 'rem' in clear_op: + bpy.data.objects.remove(skelobj, do_unlink=True) # TODO: cache per action bdata.arm_cached = True - if parento is None: - self.output['objects'].append(o) + if out_parent is None: + self.output['objects'].append(out_object) else: - parento['children'].append(o) + out_parent['children'].append(out_object) - self.post_export_object(bobject, o, type) + self.post_export_object(bobject, out_object, object_type) - if not hasattr(o, 'children') and len(bobject.children) > 0: - o['children'] = [] + if not hasattr(out_object, 'children') and len(bobject.children) > 0: + out_object['children'] = [] if bobject.arm_instanced == 'Off': for subbobject in bobject.children: - self.export_object(subbobject, scene, o) + self.export_object(subbobject, scene, out_object) - def export_skin(self, bobject, armature, exportMesh, o): - # This function exports all skinning data, which includes the skeleton - # and per-vertex bone influence data + def export_skin(self, bobject: bpy.types.Object, armature, export_mesh: bpy.types.Mesh, out_mesh): + """This function exports all skinning data, which includes the + skeleton and per-vertex bone influence data""" oskin = {} - o['skin'] = oskin + out_mesh['skin'] = oskin # Write the skin bind pose transform - otrans = {} + otrans = {'values': ArmoryExporter.write_matrix(bobject.matrix_world)} oskin['transform'] = otrans - otrans['values'] = self.write_matrix(bobject.matrix_world) bone_array = armature.data.bones bone_count = len(bone_array) @@ -899,9 +1003,9 @@ class ArmoryExporter: oskin['bone_len_array'] = np.empty(bone_count, dtype='= 0: #and bone_weight != 0.0: + if bone_index >= 0: #and bone_weight != 0.0: bone_values.append((bone_weight, bone_index)) total_weight += bone_weight bone_count += 1 @@ -975,17 +1079,18 @@ class ArmoryExporter: oskin['constraints'] = [] self.add_constraints(bone, oskin, bone=True) - def write_mesh(self, bobject, fp, o): - wrd = bpy.data.worlds['Arm'] - if wrd.arm_single_data_file: - self.output['mesh_datas'].append(o) - else: # One mesh data per file - mesh_obj = {} - mesh_obj['mesh_datas'] = [o] + def write_mesh(self, bobject: bpy.types.Object, fp, out_mesh): + if bpy.data.worlds['Arm'].arm_single_data_file: + self.output['mesh_datas'].append(out_mesh) + + # One mesh data per file + else: + mesh_obj = {'mesh_datas': [out_mesh]} arm.utils.write_arm(fp, mesh_obj) bobject.data.arm_cached = True - def calc_aabb(self, bobject): + @staticmethod + def calc_aabb(bobject): aabb_center = 0.125 * sum((Vector(b) for b in bobject.bound_box), Vector()) bobject.data.arm_aabb = [ \ abs((bobject.bound_box[6][0] - bobject.bound_box[0][0]) / 2 + abs(aabb_center[0])) * 2, \ @@ -993,7 +1098,7 @@ class ArmoryExporter: abs((bobject.bound_box[6][2] - bobject.bound_box[0][2]) / 2 + abs(aabb_center[2])) * 2 \ ] - def export_mesh_data(self, exportMesh, bobject, o, has_armature=False): + def export_mesh_data(self, exportMesh, bobject: bpy.types.Object, o, has_armature=False): exportMesh.calc_normals_split() exportMesh.calc_loop_triangles() @@ -1227,20 +1332,20 @@ class ArmoryExporter: # bpy.data.meshes.remove(morphMesh) def has_tangents(self, exportMesh): - return self.get_export_uvs(exportMesh) == True and self.get_export_tangents(exportMesh) == True and len(exportMesh.uv_layers) > 0 + return self.get_export_uvs(exportMesh) and self.get_export_tangents(exportMesh) and len(exportMesh.uv_layers) > 0 - def export_mesh(self, objectRef, scene): + def export_mesh(self, object_ref): """Exports a single mesh object.""" # profile_time = time.time() - table = objectRef[1]["objectTable"] + table = object_ref[1]["objectTable"] bobject = table[0] - oid = arm.utils.safestr(objectRef[1]["structName"]) + oid = arm.utils.safestr(object_ref[1]["structName"]) wrd = bpy.data.worlds['Arm'] if wrd.arm_single_data_file: fp = None else: - fp = self.get_meshes_file_path('mesh_' + oid, compressed=self.is_compress()) + fp = self.get_meshes_file_path('mesh_' + oid, compressed=ArmoryExporter.compress_enabled) assets.add(fp) # No export necessary if bobject.data.arm_cached and os.path.exists(fp): @@ -1255,44 +1360,43 @@ class ArmoryExporter: if wrd.arm_verbose_output: print('Exporting mesh ' + arm.utils.asset_name(bobject.data)) - o = {} - o['name'] = oid - mesh = objectRef[0] - structFlag = False + out_mesh = {'name': oid} + mesh = object_ref[0] + struct_flag = False # Save the morph state if necessary - activeShapeKeyIndex = bobject.active_shape_key_index - showOnlyShapeKey = bobject.show_only_shape_key - currentMorphValue = [] + active_shape_key_index = bobject.active_shape_key_index + show_only_shape_key = bobject.show_only_shape_key + current_morph_value = [] - shapeKeys = ArmoryExporter.get_shape_keys(mesh) - if shapeKeys: + shape_keys = ArmoryExporter.get_shape_keys(mesh) + if shape_keys: bobject.active_shape_key_index = 0 bobject.show_only_shape_key = True - baseIndex = 0 - relative = shapeKeys.use_relative + base_index = 0 + relative = shape_keys.use_relative if relative: - morphCount = 0 - baseName = shapeKeys.reference_key.name - for block in shapeKeys.key_blocks: - if block.name == baseName: - baseIndex = morphCount + morph_count = 0 + base_name = shape_keys.reference_key.name + for block in shape_keys.key_blocks: + if block.name == base_name: + base_index = morph_count break - morphCount += 1 + morph_count += 1 - morphCount = 0 - for block in shapeKeys.key_blocks: - currentMorphValue.append(block.value) + morph_count = 0 + for block in shape_keys.key_blocks: + current_morph_value.append(block.value) block.value = 0.0 if block.name != "": - # self.IndentWrite(B"Morph (index = ", 0, structFlag) - # self.WriteInt(morphCount) + # self.IndentWrite(B"Morph (index = ", 0, struct_flag) + # self.WriteInt(morph_count) - # if (relative) and (morphCount != baseIndex): + # if (relative) and (morph_count != base_index): # self.Write(B", base = ") - # self.WriteInt(baseIndex) + # self.WriteInt(base_index) # self.Write(B")\n") # self.IndentWrite(B"{\n") @@ -1301,24 +1405,24 @@ class ArmoryExporter: # self.Write(B"\"}}\n") # self.IndentWrite(B"}\n") # TODO - structFlag = True + struct_flag = True - morphCount += 1 + morph_count += 1 - shapeKeys.key_blocks[0].value = 1.0 + shape_keys.key_blocks[0].value = 1.0 mesh.update() armature = bobject.find_armature() apply_modifiers = not armature bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject - exportMesh = bobject_eval.to_mesh() + export_mesh = bobject_eval.to_mesh() - if exportMesh is None: + if export_mesh is None: log.warn(oid + ' was not exported') return - if len(exportMesh.uv_layers) > 2: + if len(export_mesh.uv_layers) > 2: log.warn(oid + ' exceeds maximum of 2 UV Maps supported') # Update aabb @@ -1326,103 +1430,106 @@ class ArmoryExporter: # Process meshes if ArmoryExporter.optimize_enabled: - vert_list = exporter_opt.export_mesh_data(self, exportMesh, bobject, o, has_armature=armature is not None) + vert_list = exporter_opt.export_mesh_data(self, export_mesh, bobject, out_mesh, has_armature=armature is not None) if armature: - exporter_opt.export_skin(self, bobject, armature, vert_list, o) + exporter_opt.export_skin(self, bobject, armature, vert_list, out_mesh) else: - self.export_mesh_data(exportMesh, bobject, o, has_armature=armature is not None) + self.export_mesh_data(export_mesh, bobject, out_mesh, has_armature=armature is not None) if armature: - self.export_skin(bobject, armature, exportMesh, o) + self.export_skin(bobject, armature, export_mesh, out_mesh) # Restore the morph state - if shapeKeys: - bobject.active_shape_key_index = activeShapeKeyIndex - bobject.show_only_shape_key = showOnlyShapeKey + if shape_keys: + bobject.active_shape_key_index = active_shape_key_index + bobject.show_only_shape_key = show_only_shape_key - for m in range(len(currentMorphValue)): - shapeKeys.key_blocks[m].value = currentMorphValue[m] + for m in range(len(current_morph_value)): + shape_keys.key_blocks[m].value = current_morph_value[m] mesh.update() # Check if mesh is using instanced rendering - instanced_type, instanced_data = self.object_process_instancing(table, o['scale_pos']) + instanced_type, instanced_data = self.object_process_instancing(table, out_mesh['scale_pos']) # Save offset data for instanced rendering if instanced_type > 0: - o['instanced_data'] = instanced_data - o['instanced_type'] = instanced_type + out_mesh['instanced_data'] = instanced_data + out_mesh['instanced_type'] = instanced_type # Export usage if bobject.data.arm_dynamic_usage: - o['dynamic_usage'] = bobject.data.arm_dynamic_usage + out_mesh['dynamic_usage'] = bobject.data.arm_dynamic_usage - self.write_mesh(bobject, fp, o) + self.write_mesh(bobject, fp, out_mesh) # print('Mesh exported in ' + str(time.time() - profile_time)) if hasattr(bobject, 'evaluated_get'): bobject_eval.to_mesh_clear() - def export_light(self, objectRef): + def export_light(self, object_ref): """Exports a single light object.""" rpdat = arm.utils.get_rp() - objref = objectRef[0] - objtype = objref.type - o = {} - o['name'] = objectRef[1]["structName"] - o['type'] = objtype.lower() - o['cast_shadow'] = objref.use_shadow - o['near_plane'] = objref.arm_clip_start - o['far_plane'] = objref.arm_clip_end - o['fov'] = objref.arm_fov - o['color'] = [objref.color[0], objref.color[1], objref.color[2]] - o['strength'] = objref.energy - o['shadows_bias'] = objref.arm_shadows_bias * 0.0001 + light_ref = object_ref[0] + objtype = light_ref.type + out_light = { + 'name': object_ref[1]["structName"], + 'type': objtype.lower(), + 'cast_shadow': light_ref.use_shadow, + 'near_plane': light_ref.arm_clip_start, + 'far_plane': light_ref.arm_clip_end, + 'fov': light_ref.arm_fov, + 'color': [light_ref.color[0], light_ref.color[1], light_ref.color[2]], + 'strength': light_ref.energy, + 'shadows_bias': light_ref.arm_shadows_bias * 0.0001 + } if rpdat.rp_shadows: if objtype == 'POINT': - o['shadowmap_size'] = int(rpdat.rp_shadowmap_cube) + out_light['shadowmap_size'] = int(rpdat.rp_shadowmap_cube) else: - o['shadowmap_size'] = arm.utils.get_cascade_size(rpdat) + out_light['shadowmap_size'] = arm.utils.get_cascade_size(rpdat) else: - o['shadowmap_size'] = 0 + out_light['shadowmap_size'] = 0 if objtype == 'SUN': - o['strength'] *= 0.325 - o['shadows_bias'] *= 20.0 # Scale bias for ortho light matrix - if o['shadowmap_size'] > 1024: - o['shadows_bias'] *= 1 / (o['shadowmap_size'] / 1024) # Less bias for bigger maps + out_light['strength'] *= 0.325 + # Scale bias for ortho light matrix + out_light['shadows_bias'] *= 20.0 + if out_light['shadowmap_size'] > 1024: + # Less bias for bigger maps + out_light['shadows_bias'] *= 1 / (out_light['shadowmap_size'] / 1024) elif objtype == 'POINT': - o['strength'] *= 2.6 + out_light['strength'] *= 2.6 if bpy.app.version >= (2, 80, 72): - o['strength'] *= 0.01 - o['fov'] = 1.5708 # pi/2 - o['shadowmap_cube'] = True - if objref.shadow_soft_size > 0.1: - o['light_size'] = objref.shadow_soft_size * 10 + out_light['strength'] *= 0.01 + out_light['fov'] = 1.5708 # pi/2 + out_light['shadowmap_cube'] = True + if light_ref.shadow_soft_size > 0.1: + out_light['light_size'] = light_ref.shadow_soft_size * 10 elif objtype == 'SPOT': - o['strength'] *= 2.6 + out_light['strength'] *= 2.6 if bpy.app.version >= (2, 80, 72): - o['strength'] *= 0.01 - o['spot_size'] = math.cos(objref.spot_size / 2) - o['spot_blend'] = objref.spot_blend / 10 # Cycles defaults to 0.15 + out_light['strength'] *= 0.01 + out_light['spot_size'] = math.cos(light_ref.spot_size / 2) + # Cycles defaults to 0.15 + out_light['spot_blend'] = light_ref.spot_blend / 10 elif objtype == 'AREA': - o['strength'] *= 80.0 / (objref.size * objref.size_y) + out_light['strength'] *= 80.0 / (light_ref.size * light_ref.size_y) if bpy.app.version >= (2, 80, 72): - o['strength'] *= 0.01 - o['size'] = objref.size - o['size_y'] = objref.size_y + out_light['strength'] *= 0.01 + out_light['size'] = light_ref.size + out_light['size_y'] = light_ref.size_y - self.output['light_datas'].append(o) + self.output['light_datas'].append(out_light) def export_probe(self, objectRef): - o = {} - o['name'] = objectRef[1]["structName"] + o = {'name': objectRef[1]["structName"]} bo = objectRef[0] if bo.type == 'GRID': o['type'] = 'grid' elif bo.type == 'PLANAR': o['type'] = 'planar' - else: # CUBEMAP + else: o['type'] = 'cubemap' self.output['probe_datas'].append(o) @@ -1431,10 +1538,11 @@ class ArmoryExporter: """Exports a single collection.""" scene_objects = self.scene.collection.all_objects - out_collection = {} - out_collection['name'] = collection.name - out_collection['instance_offset'] = list(collection.instance_offset) - out_collection['object_refs'] = [] + out_collection = { + 'name': collection.name, + 'instance_offset': list(collection.instance_offset), + 'object_refs': [] + } for bobject in collection.objects: @@ -1454,7 +1562,7 @@ class ArmoryExporter: asset_name = arm.utils.asset_name(bobject) if collection.library is None: - #collection is in the same file, but (likely) on another scene + # collection is in the same file, but (likely) on another scene if asset_name not in scene_objects: self.process_bobject(bobject) self.export_object(bobject, self.scene) @@ -1502,7 +1610,8 @@ class ArmoryExporter: else: return [0.051, 0.051, 0.051, 1.0] - def extract_projection(self, o, proj, with_planes=True): + @staticmethod + def extract_projection(o, proj, with_planes=True): a = proj[0][0] b = proj[1][1] c = proj[2][2] @@ -1513,7 +1622,8 @@ class ArmoryExporter: o['near_plane'] = (d * (1.0 - k)) / (2.0 * k) o['far_plane'] = k * o['near_plane'] - def extract_ortho(self, o, proj): + @staticmethod + def extract_ortho(o, proj): # left, right, bottom, top o['ortho'] = [-(1 + proj[3][0]) / proj[0][0], \ (1 - proj[3][0]) / proj[0][0], \ @@ -1559,7 +1669,7 @@ class ArmoryExporter: if not os.path.exists(unpack_path): os.makedirs(unpack_path) unpack_filepath = unpack_path + '/' + objref.sound.name - if os.path.isfile(unpack_filepath) == False or os.path.getsize(unpack_filepath) != objref.sound.packed_file.size: + if not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != objref.sound.packed_file.size: with open(unpack_filepath, 'wb') as f: f.write(objref.sound.packed_file.data) assets.add(unpack_filepath) @@ -1602,7 +1712,7 @@ class ArmoryExporter: self.output['material_datas'].append(o) bpy.data.materials.remove(mat) rpdat = arm.utils.get_rp() - if rpdat.arm_culling == False: + if not rpdat.arm_culling: o['override_context'] = {} o['override_context']['cull_mode'] = 'none' @@ -1637,23 +1747,25 @@ class ArmoryExporter: wrd = bpy.data.worlds['Arm'] # Keep materials with fake user - for m in bpy.data.materials: - if m.use_fake_user and m not in self.materialArray: - self.materialArray.append(m) + for material in bpy.data.materials: + if material.use_fake_user and material not in self.material_array: + self.material_array.append(material) + # Ensure the same order for merging materials - self.materialArray.sort(key=lambda x: x.name) + self.material_array.sort(key=lambda x: x.name) if wrd.arm_batch_materials: - mat_users = self.materialToObjectDict - mat_armusers = self.materialToArmObjectDict - mat_batch.build(self.materialArray, mat_users, mat_armusers) + mat_users = self.material_to_object_dict + mat_armusers = self.material_to_arm_object_dict + mat_batch.build(self.material_array, mat_users, mat_armusers) transluc_used = False overlays_used = False blending_used = False decals_used = False # sss_used = False - for material in self.materialArray: + + for material in self.material_array: # If the material is unlinked, material becomes None if material is None: continue @@ -1675,7 +1787,7 @@ class ArmoryExporter: o['skip_context'] = material.arm_skip_context rpdat = arm.utils.get_rp() - if material.arm_two_sided or rpdat.arm_culling == False: + if material.arm_two_sided or not rpdat.arm_culling: o['override_context'] = {} o['override_context']['cull_mode'] = 'none' elif material.arm_cull_mode != 'clockwise': @@ -1684,8 +1796,8 @@ class ArmoryExporter: o['contexts'] = [] - mat_users = self.materialToObjectDict - mat_armusers = self.materialToArmObjectDict + mat_users = self.material_to_object_dict + mat_armusers = self.material_to_arm_object_dict sd, rpasses = make_material.parse(material, o, mat_users, mat_armusers) # Attach MovieTexture @@ -1736,8 +1848,8 @@ class ArmoryExporter: material.export_vcols = vcol_export material.export_tangents = tang_export - if material in self.materialToObjectDict: - mat_users = self.materialToObjectDict[material] + if material in self.material_to_object_dict: + mat_users = self.material_to_object_dict[material] for ob in mat_users: ob.data.arm_cached = False @@ -1766,47 +1878,52 @@ class ArmoryExporter: make_renderpath.build() def export_particle_systems(self): - if len(self.particleSystemArray) > 0: + if len(self.particle_system_array) > 0: self.output['particle_datas'] = [] - for particleRef in self.particleSystemArray.items(): - o = {} + for particleRef in self.particle_system_array.items(): psettings = particleRef[0] if psettings is None: continue - if psettings.instance_object == None or psettings.render_type != 'OBJECT': + if psettings.instance_object is None or psettings.render_type != 'OBJECT': continue - o['name'] = particleRef[1]["structName"] - o['type'] = 0 if psettings.type == 'EMITTER' else 1 # HAIR - o['loop'] = psettings.arm_loop - o['render_emitter'] = False # TODO - # Emission - o['count'] = int(psettings.count * psettings.arm_count_mult) - o['frame_start'] = int(psettings.frame_start) - o['frame_end'] = int(psettings.frame_end) - o['lifetime'] = psettings.lifetime - o['lifetime_random'] = psettings.lifetime_random - o['emit_from'] = 1 if psettings.emit_from == 'VOLUME' else 0 # VERT, FACE - # Velocity - # o['normal_factor'] = psettings.normal_factor - # o['tangent_factor'] = psettings.tangent_factor - # o['tangent_phase'] = psettings.tangent_phase - o['object_align_factor'] = [psettings.object_align_factor[0], psettings.object_align_factor[1], psettings.object_align_factor[2]] - # o['object_factor'] = psettings.object_factor - o['factor_random'] = psettings.factor_random - # Physics - o['physics_type'] = 1 if psettings.physics_type == 'NEWTON' else 0 - o['particle_size'] = psettings.particle_size - o['size_random'] = psettings.size_random - o['mass'] = psettings.mass - # Render - o['instance_object'] = psettings.instance_object.name - self.objectToArmObjectDict[psettings.instance_object]['is_particle'] = True - # Field weights - o['weight_gravity'] = psettings.effector_weights.gravity - self.output['particle_datas'].append(o) + out_particlesys = { + 'name': particleRef[1]["structName"], + 'type': 0 if psettings.type == 'EMITTER' else 1, # HAIR + 'loop': psettings.arm_loop, + # Emission + 'count': int(psettings.count * psettings.arm_count_mult), + 'frame_start': int(psettings.frame_start), + 'frame_end': int(psettings.frame_end), + 'lifetime': psettings.lifetime, + 'lifetime_random': psettings.lifetime_random, + 'emit_from': 1 if psettings.emit_from == 'VOLUME' else 0, # VERT, FACE + # Velocity + # 'normal_factor': psettings.normal_factor, + # 'tangent_factor': psettings.tangent_factor, + # 'tangent_phase': psettings.tangent_phase, + 'object_align_factor': ( + psettings.object_align_factor[0], + psettings.object_align_factor[1], + psettings.object_align_factor[2] + ), + # 'object_factor': psettings.object_factor, + 'factor_random': psettings.factor_random, + # Physics + 'physics_type': 1 if psettings.physics_type == 'NEWTON' else 0, + 'particle_size': psettings.particle_size, + 'size_random': psettings.size_random, + 'mass': psettings.mass, + # Render + 'instance_object': arm.utils.asset_name(psettings.instance_object), + # Field weights + 'weight_gravity': psettings.effector_weights.gravity + } + + self.object_to_arm_object_dict[psettings.instance_object]['is_particle'] = True + self.output['particle_datas'].append(out_particlesys) def export_tilesheets(self): wrd = bpy.data.worlds['Arm'] @@ -1828,17 +1945,18 @@ class ArmoryExporter: o['actions'].append(ao) self.output['tilesheet_datas'].append(o) - def export_worlds(self): - worldRef = self.scene.world - if worldRef is not None: - o = {} - w = worldRef - o['name'] = w.name - self.post_export_world(w, o) - self.output['world_datas'].append(o) + def export_world(self): + """Exports the world of the scene.""" + world = self.scene.world + if world is not None: + world_name = arm.utils.safestr(world.name) - def is_compress(self): - return ArmoryExporter.compress_enabled + if world_name not in self.world_array: + self.world_array.append(world_name) + out_world = {'name': world_name} + + self.post_export_world(world, out_world) + self.output['world_datas'].append(out_world) def export_objects(self, scene): """Exports all supported blender objects. @@ -1858,10 +1976,10 @@ class ArmoryExporter: self.output['camera_datas'] = [] self.output['speaker_datas'] = [] - for light_ref in self.lightArray.items(): + for light_ref in self.light_array.items(): self.export_light(light_ref) - for camera_ref in self.cameraArray.items(): + for camera_ref in self.camera_array.items(): self.export_camera(camera_ref) # Keep sounds with fake user @@ -1869,72 +1987,39 @@ class ArmoryExporter: if sound.use_fake_user: assets.add(arm.utils.asset_path(sound.filepath)) - for speaker_ref in self.speakerArray.items(): + for speaker_ref in self.speaker_array.items(): self.export_speaker(speaker_ref) if bpy.data.lightprobes: self.output['probe_datas'] = [] - for lightprobe_object in self.probeArray.items(): + for lightprobe_object in self.probe_array.items(): self.export_probe(lightprobe_object) self.output['mesh_datas'] = [] - for mesh_ref in self.meshArray.items(): - self.export_mesh(mesh_ref, scene) + for mesh_ref in self.mesh_array.items(): + self.export_mesh(mesh_ref) - def execute(self, context, filepath, scene=None, depsgraph=None): - global current_output + def execute(self): + """Exports the scene.""" profile_time = time.time() - - self.scene = context.scene if scene == None else scene - current_frame, current_subframe = self.scene.frame_current, self.scene.frame_subframe - print('Exporting ' + arm.utils.asset_name(self.scene)) - self.output = {} - current_output = self.output - self.output['frame_time'] = 1.0 / (self.scene.render.fps / self.scene.render.fps_base) - self.filepath = filepath - self.bobjectArray = {} - self.bobjectBoneArray = {} - self.meshArray = {} - self.lightArray = {} - self.probeArray = {} - self.cameraArray = {} - self.camera_spawned = False - self.speakerArray = {} - self.materialArray = [] - self.particleSystemArray = {} - self.worldArray = {} # Export all worlds - self.boneParentArray = {} - self.materialToObjectDict = dict() - self.defaultMaterialObjects = [] # If no material is assigned, provide default to mimic cycles - self.defaultSkinMaterialObjects = [] - self.defaultPartMaterialObjects = [] - self.materialToArmObjectDict = dict() - self.objectToArmObjectDict = dict() - self.bone_tracks = [] - # self.active_layers = [] - # for i in range(0, len(self.scene.view_layers)): - # if self.scene.view_layers[i] == True: - # self.active_layers.append(i) - self.depsgraph = context.evaluated_depsgraph_get() if depsgraph == None else depsgraph - self.preprocess() - - # scene_objects = [] - # for lay in self.scene.view_layers: - # scene_objects += lay.objects - scene_objects = self.scene.collection.all_objects + current_frame, current_subframe = self.scene.frame_current, self.scene.frame_subframe + scene_objects: List[bpy.types.Object] = self.scene.collection.all_objects.values() + # bobject => blender object for bobject in scene_objects: - # Map objects to game objects - o = {} - o['traits'] = [] - self.objectToArmObjectDict[bobject] = o + # Initialize object export data (map objects to game objects) + out_object: Dict[str, Any] = {'traits': []} + self.object_to_arm_object_dict[bobject] = out_object + # Process - # Skip objects that have a parent because children will be exported recursively + # Skip objects that have a parent because children are + # processed recursively if not bobject.parent: self.process_bobject(bobject) - # Softbody needs connected triangles, use optimized geometry export + # Softbody needs connected triangles, use optimized + # geometry export for mod in bobject.modifiers: if mod.type == 'CLOTH' or mod.type == 'SOFT_BODY': ArmoryExporter.optimize_enabled = True @@ -1947,59 +2032,8 @@ class ArmoryExporter: elif not bpy.data.worlds['Arm'].arm_minimize: self.output['name'] += '.json' - # Create unique material variants for skinning, tilesheets, particles - matvars = [] - matslots = [] - for bo in scene_objects: - if arm.utils.export_bone_data(bo): - for slot in bo.material_slots: - if slot.material is None or slot.material.library is not None: - continue - if slot.material.name.endswith('_armskin'): - continue - matslots.append(slot) - mat_name = slot.material.name + '_armskin' - mat = bpy.data.materials.get(mat_name) - if mat is None: - mat = slot.material.copy() - mat.name = mat_name - matvars.append(mat) - slot.material = mat - elif bo.arm_tilesheet != '': - for slot in bo.material_slots: - if slot.material is None or slot.material.library is not None: - continue - if slot.material.name.endswith('_armtile'): - continue - matslots.append(slot) - mat_name = slot.material.name + '_armtile' - mat = bpy.data.materials.get(mat_name) - if mat is None: - mat = slot.material.copy() - mat.name = mat_name - mat.arm_tilesheet_flag = True - matvars.append(mat) - slot.material = mat - - # Particle and non-particle objects can not share material - for psys in bpy.data.particles: - bo = psys.instance_object - if bo == None or psys.render_type != 'OBJECT': - continue - for slot in bo.material_slots: - if slot.material == None or slot.material.library is not None: - continue - if slot.material.name.endswith('_armpart'): - continue - matslots.append(slot) - mat_name = slot.material.name + '_armpart' - mat = bpy.data.materials.get(mat_name) - if mat is None: - mat = slot.material.copy() - mat.name = mat_name - mat.arm_particle_flag = True - matvars.append(mat) - slot.material = mat + # Create unique material variants for skinning, tilesheets and particles + matvars, matslots = self.create_material_variants(self.scene) # Auto-bones wrd = bpy.data.worlds['Arm'] @@ -2013,36 +2047,45 @@ class ArmoryExporter: # Terrain if self.scene.arm_terrain_object is not None: - # Append trait - if not 'traits' in self.output: - self.output['traits'] = [] - trait = {} - trait['type'] = 'Script' - trait['class_name'] = 'armory.trait.internal.TerrainPhysics' - self.output['traits'].append(trait) - ArmoryExporter.import_traits.append(trait['class_name']) - ArmoryExporter.export_physics = True assets.add_khafile_def('arm_terrain') + + # Append trait + out_trait = { + 'type': 'Script', + 'class_name': 'armory.trait.internal.TerrainPhysics' + } + if 'traits' not in self.output: + self.output['traits']: List[Dict[str, str]] = [] + + self.output['traits'].append(out_trait) + + ArmoryExporter.import_traits.append(out_trait['class_name']) + ArmoryExporter.export_physics = True + # Export material mat = self.scene.arm_terrain_object.children[0].data.materials[0] - self.materialArray.append(mat) + self.material_array.append(mat) # Terrain data - terrain = {} - terrain['name'] = 'Terrain' - terrain['sectors_x'] = self.scene.arm_terrain_sectors[0] - terrain['sectors_y'] = self.scene.arm_terrain_sectors[1] - terrain['sector_size'] = self.scene.arm_terrain_sector_size - terrain['height_scale'] = self.scene.arm_terrain_height_scale - terrain['material_ref'] = mat.name - self.output['terrain_datas'] = [terrain] + out_terrain = { + 'name': 'Terrain', + 'sectors_x': self.scene.arm_terrain_sectors[0], + 'sectors_y': self.scene.arm_terrain_sectors[1], + 'sector_size': self.scene.arm_terrain_sector_size, + 'height_scale': self.scene.arm_terrain_height_scale, + 'material_ref': mat.name + } + self.output['terrain_datas'] = [out_terrain] self.output['terrain_ref'] = 'Terrain' + # Export objects self.output['objects'] = [] - for bo in scene_objects: - # Skip objects that have a parent because children will be exported recursively - if not bo.parent: - self.export_object(bo, self.scene) + for bobject in scene_objects: + # Skip objects that have a parent because children are + # exported recursively + if not bobject.parent: + self.export_object(bobject, self.scene) + # Export collections if bpy.data.collections: self.output['groups'] = [] for collection in bpy.data.collections: @@ -2061,19 +2104,19 @@ class ArmoryExporter: self.output['material_datas'] = [] # Object with no material assigned in the scene - if len(self.defaultMaterialObjects) > 0: - self.make_default_mat('armdefault', self.defaultMaterialObjects) - if len(self.defaultSkinMaterialObjects) > 0: - self.make_default_mat('armdefaultskin', self.defaultSkinMaterialObjects) + if len(self.default_material_objects) > 0: + self.make_default_mat('armdefault', self.default_material_objects) + if len(self.default_skin_material_objects) > 0: + self.make_default_mat('armdefaultskin', self.default_skin_material_objects) if len(bpy.data.particles) > 0: self.use_default_material_part() - if len(self.defaultPartMaterialObjects) > 0: - self.make_default_mat('armdefaultpart', self.defaultPartMaterialObjects, is_particle=True) + if len(self.default_part_material_objects) > 0: + self.make_default_mat('armdefaultpart', self.default_part_material_objects, is_particle=True) self.export_materials() self.export_particle_systems() self.output['world_datas'] = [] - self.export_worlds() + self.export_world() self.export_tilesheets() if self.scene.world is not None: @@ -2105,48 +2148,7 @@ class ArmoryExporter: if (len(self.output['camera_datas']) == 0 or len(bpy.data.cameras) == 0) or not self.camera_spawned: self.create_default_camera() - # Scene traits - if wrd.arm_physics != 'Disabled' and ArmoryExporter.export_physics: - if not 'traits' in self.output: - self.output['traits'] = [] - x = {} - x['type'] = 'Script' - phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo' - x['class_name'] = 'armory.trait.physics.' + phys_pkg + '.PhysicsWorld' - rbw = self.scene.rigidbody_world - if rbw != None and rbw.enabled: - x['parameters'] = [str(rbw.time_scale), str(1 / rbw.steps_per_second), str(rbw.solver_iterations)] - self.output['traits'].append(x) - if wrd.arm_navigation != 'Disabled' and ArmoryExporter.export_navigation: - if not 'traits' in self.output: - self.output['traits'] = [] - x = {} - x['type'] = 'Script' - x['class_name'] = 'armory.trait.navigation.Navigation' - self.output['traits'].append(x) - if wrd.arm_debug_console: - if not 'traits' in self.output: - self.output['traits'] = [] - ArmoryExporter.export_ui = True - x = {} - x['type'] = 'Script' - x['class_name'] = 'armory.trait.internal.DebugConsole' - x['parameters'] = [str(arm.utils.get_ui_scale())] - self.output['traits'].append(x) - if wrd.arm_live_patch: - if not 'traits' in self.output: - self.output['traits'] = [] - x = {} - x['type'] = 'Script' - x['class_name'] = 'armory.trait.internal.LivePatch' - self.output['traits'].append(x) - if len(self.scene.arm_traitlist) > 0: - if not 'traits' in self.output: - self.output['traits'] = [] - self.export_traits(self.scene, self.output) - if 'traits' in self.output: - for x in self.output['traits']: - ArmoryExporter.import_traits.append(x['class_name']) + self.export_scene_traits() self.export_canvas_themes() @@ -2161,7 +2163,7 @@ class ArmoryExporter: # Remove created material variants for slot in matslots: # Set back to original material - orig_mat = bpy.data.materials[slot.material.name[:-8]] # _armskin, _armpart, _armtile + orig_mat = bpy.data.materials[slot.material.name[:-8]] # _armskin, _armpart, _armtile orig_mat.export_uvs = slot.material.export_uvs orig_mat.export_vcols = slot.material.export_vcols orig_mat.export_tangents = slot.material.export_tangents @@ -2171,70 +2173,80 @@ class ArmoryExporter: bpy.data.materials.remove(mat, do_unlink=True) # Restore frame - if scene.frame_current != current_frame: - scene.frame_set(current_frame, subframe=current_subframe) + if self.scene.frame_current != current_frame: + self.scene.frame_set(current_frame, subframe=current_subframe) print('Scene exported in ' + str(time.time() - profile_time)) - return {'FINISHED'} def create_default_camera(self, is_viewport_camera=False): - o = {} - o['name'] = 'DefaultCamera' - o['near_plane'] = 0.1 - o['far_plane'] = 100.0 - o['fov'] = 0.85 - o['frustum_culling'] = True - o['clear_color'] = self.get_camera_clear_color() + """Creates the default camera and adds a WalkNavigation trait to it.""" + out_camera = { + 'name': 'DefaultCamera', + 'near_plane': 0.1, + 'far_plane': 100.0, + 'fov': 0.85, + 'frustum_culling': True, + 'clear_color': self.get_camera_clear_color() + } + # Set viewport camera projection if is_viewport_camera: proj, is_persp = self.get_viewport_projection_matrix() if proj is not None: if is_persp: - self.extract_projection(o, proj, with_planes=False) + self.extract_projection(out_camera, proj, with_planes=False) else: - self.extract_ortho(o, proj) - self.output['camera_datas'].append(o) + self.extract_ortho(out_camera, proj) + self.output['camera_datas'].append(out_camera) - o = {} - o['name'] = 'DefaultCamera' - o['type'] = 'camera_object' - o['data_ref'] = 'DefaultCamera' - o['material_refs'] = [] - o['transform'] = {} + out_object = { + 'name': 'DefaultCamera', + 'type': 'camera_object', + 'data_ref': 'DefaultCamera', + 'material_refs': [], + 'transform': {} + } viewport_matrix = self.get_viewport_view_matrix() if viewport_matrix is not None: - o['transform']['values'] = self.write_matrix(viewport_matrix.inverted_safe()) - o['local_only'] = True + out_object['transform']['values'] = ArmoryExporter.write_matrix(viewport_matrix.inverted_safe()) + out_object['local_only'] = True else: - o['transform']['values'] = [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0] - o['traits'] = [] - trait = {} - trait['type'] = 'Script' - trait['class_name'] = 'armory.trait.WalkNavigation' - o['traits'].append(trait) + out_object['transform']['values'] = [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0] + + # Add WalkNavigation trait + trait = { + 'type': 'Script', + 'class_name': 'armory.trait.WalkNavigation' + } + out_object['traits'] = [trait] ArmoryExporter.import_traits.append(trait['class_name']) - self.output['objects'].append(o) + + self.output['objects'].append(out_object) self.output['camera_ref'] = 'DefaultCamera' - def get_export_tangents(self, mesh): - for m in mesh.materials: - if m != None and m.export_tangents == True: + @staticmethod + def get_export_tangents(mesh): + for material in mesh.materials: + if material is not None and material.export_tangents: return True return False - def get_export_vcols(self, mesh): - for m in mesh.materials: - if m != None and m.export_vcols == True: + @staticmethod + def get_export_vcols(mesh): + for material in mesh.materials: + if material is not None and material.export_vcols: return True return False - def get_export_uvs(self, mesh): - for m in mesh.materials: - if m != None and m.export_uvs == True: + @staticmethod + def get_export_uvs(mesh): + for material in mesh.materials: + if material is not None and material.export_uvs: return True return False - def object_process_instancing(self, refs, scale_pos): + @staticmethod + def object_process_instancing(refs, scale_pos): instanced_type = 0 instanced_data = None for bobject in refs: @@ -2254,7 +2266,7 @@ class ArmoryExporter: instanced_data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0] for child in bobject.children: - if child.arm_export == False or child.hide_render: + if not child.arm_export or child.hide_render: continue if 'Loc' in inst: loc = child.matrix_local.to_translation() # Without parent matrix @@ -2266,7 +2278,7 @@ class ArmoryExporter: instanced_data.append(rot.x) instanced_data.append(rot.y) instanced_data.append(rot.z) - if 'Scale'in inst: + if 'Scale' in inst: scale = child.matrix_local.to_scale() instanced_data.append(scale.x) instanced_data.append(scale.y) @@ -2286,34 +2298,7 @@ class ArmoryExporter: return instanced_type, instanced_data - def preprocess(self): - wrd = bpy.data.worlds['Arm'] - ArmoryExporter.export_all_flag = True - ArmoryExporter.export_physics = False # Indicates whether rigid body is exported - if wrd.arm_physics == 'Enabled': - ArmoryExporter.export_physics = True - ArmoryExporter.export_navigation = False - if wrd.arm_navigation == 'Enabled': - ArmoryExporter.export_navigation = True - ArmoryExporter.export_ui = False - if not hasattr(ArmoryExporter, 'compress_enabled'): - ArmoryExporter.compress_enabled = False - if not hasattr(ArmoryExporter, 'optimize_enabled'): - ArmoryExporter.optimize_enabled = False - if not hasattr(ArmoryExporter, 'import_traits'): - ArmoryExporter.import_traits = [] # Referenced traits - ArmoryExporter.option_mesh_only = False - - def preprocess_object(self, bobject): # Returns false if object should not be exported - export_object = True - - # Disabled object - if bobject.arm_export == False: - return False - - return export_object - - def post_export_object(self, bobject, o, type): + def post_export_object(self, bobject: bpy.types.Object, o, type): # Export traits self.export_traits(bobject, o) @@ -2322,10 +2307,10 @@ class ArmoryExporter: phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo' # Rigid body trait - if bobject.rigid_body != None and phys_enabled: + if bobject.rigid_body is not None and phys_enabled: ArmoryExporter.export_physics = True rb = bobject.rigid_body - shape = 0 # BOX + shape = 0 # BOX if rb.collision_shape == 'SPHERE': shape = 1 elif rb.collision_shape == 'CONVEX_HULL': @@ -2400,22 +2385,21 @@ class ArmoryExporter: # Phys traits if phys_enabled: - for m in bobject.modifiers: - if m.type == 'CLOTH': - self.add_softbody_mod(o, bobject, m, 0) # SoftShape.Cloth - elif m.type == 'SOFT_BODY': - self.add_softbody_mod(o, bobject, m, 1) # SoftShape.Volume - elif m.type == 'HOOK': - self.add_hook_mod(o, bobject, m.object.name, m.vertex_group) + for modifier in bobject.modifiers: + if modifier.type == 'CLOTH' or modifier.type == 'SOFT_BODY': + self.add_softbody_mod(o, bobject, modifier) + elif modifier.type == 'HOOK': + self.add_hook_mod(o, bobject, modifier.object.name, modifier.vertex_group) + # Rigid body constraint rbc = bobject.rigid_body_constraint - if rbc != None and rbc.enabled: + if rbc is not None and rbc.enabled: self.add_rigidbody_constraint(o, rbc) # Camera traits - if type == NodeTypeCamera: + if type is NodeType.CAMERA: # Viewport camera enabled, attach navigation to active camera - if self.scene.camera != None and bobject.name == self.scene.camera.name and bpy.data.worlds['Arm'].arm_play_camera != 'Scene': + if self.scene.camera is not None and bobject.name == self.scene.camera.name and bpy.data.worlds['Arm'].arm_play_camera != 'Scene': navigation_trait = {} navigation_trait['type'] = 'Script' navigation_trait['class_name'] = 'armory.trait.WalkNavigation' @@ -2424,12 +2408,12 @@ class ArmoryExporter: # Map objects to materials, can be used in later stages for i in range(len(bobject.material_slots)): mat = self.slot_to_material(bobject, bobject.material_slots[i]) - if mat in self.materialToObjectDict: - self.materialToObjectDict[mat].append(bobject) - self.materialToArmObjectDict[mat].append(o) + if mat in self.material_to_object_dict: + self.material_to_object_dict[mat].append(bobject) + self.material_to_arm_object_dict[mat].append(o) else: - self.materialToObjectDict[mat] = [bobject] - self.materialToArmObjectDict[mat] = [o] + self.material_to_object_dict[mat] = [bobject] + self.material_to_arm_object_dict[mat] = [o] # Export constraints if len(bobject.constraints) > 0: @@ -2439,133 +2423,210 @@ class ArmoryExporter: for x in o['traits']: ArmoryExporter.import_traits.append(x['class_name']) - def add_constraints(self, bobject, o, bone=False): - for con in bobject.constraints: - if con.mute: + @staticmethod + def add_constraints(bobject, o, bone=False): + for constraint in bobject.constraints: + if constraint.mute: continue - co = {} - co['name'] = con.name - co['type'] = con.type + out_constraint = {'name': constraint.name, 'type': constraint.type} + if bone: - co['bone'] = bobject.name - if hasattr(con, 'target') and con.target is not None: - if con.type == 'COPY_LOCATION': - co['target'] = con.target.name - co['use_x'] = con.use_x - co['use_y'] = con.use_y - co['use_z'] = con.use_z - co['invert_x'] = con.invert_x - co['invert_y'] = con.invert_y - co['invert_z'] = con.invert_z - co['use_offset'] = con.use_offset - co['influence'] = con.influence - elif con.type == 'CHILD_OF': - co['target'] = con.target.name - co['influence'] = con.influence - o['constraints'].append(co) + out_constraint['bone'] = bobject.name + if hasattr(constraint, 'target') and constraint.target is not None: + if constraint.type == 'COPY_LOCATION': + out_constraint['target'] = constraint.target.name + out_constraint['use_x'] = constraint.use_x + out_constraint['use_y'] = constraint.use_y + out_constraint['use_z'] = constraint.use_z + out_constraint['invert_x'] = constraint.invert_x + out_constraint['invert_y'] = constraint.invert_y + out_constraint['invert_z'] = constraint.invert_z + out_constraint['use_offset'] = constraint.use_offset + out_constraint['influence'] = constraint.influence + elif constraint.type == 'CHILD_OF': + out_constraint['target'] = constraint.target.name + out_constraint['influence'] = constraint.influence - def export_traits(self, bobject, o): - if hasattr(bobject, 'arm_traitlist'): - for t in bobject.arm_traitlist: - if t.enabled_prop == False: + o['constraints'].append(out_constraint) + + def export_traits(self, bobject: bpy.types.Object, o): + if not hasattr(bobject, 'arm_traitlist'): + return + + for traitlistItem in bobject.arm_traitlist: + # Do not export disabled traits but still export those + # with fake user enabled so that nodes like `TraitNode` + # still work + if not traitlistItem.enabled_prop and not traitlistItem.fake_user: + continue + + out_trait = {} + if traitlistItem.type_prop == 'Logic Nodes' and traitlistItem.node_tree_prop is not None and traitlistItem.node_tree_prop.name != '': + group_name = arm.utils.safesrc(traitlistItem.node_tree_prop.name[0].upper() + traitlistItem.node_tree_prop.name[1:]) + + out_trait['type'] = 'Script' + out_trait['class_name'] = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.node.' + group_name + + elif traitlistItem.type_prop == 'WebAssembly': + wpath = os.path.join(arm.utils.get_fp(), 'Bundled', traitlistItem.webassembly_prop + '.wasm') + if not os.path.exists(wpath): + log.warn(f'Wasm "{traitlistItem.webassembly_prop}" not found, skipping') continue - x = {} - if t.type_prop == 'Logic Nodes' and t.node_tree_prop != None and t.node_tree_prop.name != '': - x['type'] = 'Script' - group_name = arm.utils.safesrc(t.node_tree_prop.name[0].upper() + t.node_tree_prop.name[1:]) - x['class_name'] = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.node.' + group_name - elif t.type_prop == 'WebAssembly': - wpath = arm.utils.get_fp() + '/Bundled/' + t.webassembly_prop + '.wasm' - if not os.path.exists(wpath): - log.warn('Wasm "' + t.webassembly_prop + '" not found, skipping') - continue - x['type'] = 'Script' - x['class_name'] = 'armory.trait.internal.WasmScript' - x['parameters'] = ["'" + t.webassembly_prop + "'"] - elif t.type_prop == 'UI Canvas': - cpath = arm.utils.get_fp() + '/Bundled/canvas/' + t.canvas_name_prop + '.json' - if not os.path.exists(cpath): - log.warn('Scene "' + self.scene.name + '" - Object "' + bobject.name + '" - Referenced canvas "' + t.canvas_name_prop + '" not found, skipping') - continue - ArmoryExporter.export_ui = True - x['type'] = 'Script' - x['class_name'] = 'armory.trait.internal.CanvasScript' - x['parameters'] = ["'" + t.canvas_name_prop + "'"] - # assets.add(assetpath) # Bundled is auto-added - # Read file list and add canvas assets - assetpath = arm.utils.get_fp() + '/Bundled/canvas/' + t.canvas_name_prop + '.files' - if os.path.exists(assetpath): - with open(assetpath) as f: - fileList = f.read().splitlines() - for asset in fileList: - # Relative to the root/Bundled/canvas path - asset = asset[6:] # Strip ../../ to start in project root - assets.add(asset) - else: # Haxe/Bundled Script - if t.class_name_prop == '': # Empty class name, skip - continue - x['type'] = 'Script' - if t.type_prop == 'Bundled Script': - trait_prefix = 'armory.trait.' - # TODO: temporary, export single mesh navmesh as obj - if t.class_name_prop == 'NavMesh' and bobject.type == 'MESH' and bpy.data.worlds['Arm'].arm_navigation != 'Disabled': - ArmoryExporter.export_navigation = True - nav_path = arm.utils.get_fp_build() + '/compiled/Assets/navigation' - if not os.path.exists(nav_path): - os.makedirs(nav_path) - nav_filepath = nav_path + '/nav_' + bobject.data.name + '.arm' - assets.add(nav_filepath) - # TODO: Implement cache - #if os.path.isfile(nav_filepath) == False: - # override = {'selected_objects': [bobject]} - # bobject.scale.y *= -1 - # mesh = obj.data - # for face in mesh.faces: - # face.v.reverse() - # bpy.ops.export_scene.obj(override, use_selection=True, filepath=nav_filepath, check_existing=False, use_normals=False, use_uvs=False, use_materials=False) - # bobject.scale.y *= -1 - armature = bobject.find_armature() - apply_modifiers = not armature - bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject - exportMesh = bobject_eval.to_mesh() + out_trait['type'] = 'Script' + out_trait['class_name'] = 'armory.trait.internal.WasmScript' + out_trait['parameters'] = ["'" + traitlistItem.webassembly_prop + "'"] - with open(nav_filepath, 'w') as f: - for v in exportMesh.vertices: - f.write("v %.4f " % (v.co[0] * bobject_eval.scale.x)) - f.write("%.4f " % (v.co[2] * bobject_eval.scale.z)) - f.write("%.4f\n" % (v.co[1] * bobject_eval.scale.y)) # Flipped - for p in exportMesh.polygons: - f.write("f") - for i in reversed(p.vertices): # Flipped normals - f.write(" %d" % (i + 1)) - f.write("\n") - else: # Haxe - trait_prefix = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.' - hxfile = '/Sources/' + (trait_prefix + t.class_name_prop).replace('.', '/') + '.hx' - if not os.path.exists(arm.utils.get_fp() + hxfile): - # TODO: Halt build here once this check is tested - print('Armory Error: Scene "' + self.scene.name + '" - Object "' + bobject.name + '" : Referenced trait file "' + hxfile + '" not found') + elif traitlistItem.type_prop == 'UI Canvas': + cpath = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas', traitlistItem.canvas_name_prop + '.json') + if not os.path.exists(cpath): + log.warn(f'Scene "{self.scene.name}" - Object "{bobject.name}" - Referenced canvas "{traitlistItem.canvas_name_prop}" not found, skipping') + continue - x['class_name'] = trait_prefix + t.class_name_prop + ArmoryExporter.export_ui = True + out_trait['type'] = 'Script' + out_trait['class_name'] = 'armory.trait.internal.CanvasScript' + out_trait['parameters'] = ["'" + traitlistItem.canvas_name_prop + "'"] - # Export trait properties - if t.arm_traitpropslist: - x['props'] = [] - for trait_prop in t.arm_traitpropslist: - x['props'].append(trait_prop.name) - x['props'].append(trait_prop.type) + # Read file list and add canvas assets + assetpath = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas', traitlistItem.canvas_name_prop + '.files') + if os.path.exists(assetpath): + with open(assetpath) as f: + file_list = f.read().splitlines() + for asset in file_list: + # Relative to the root/Bundled/canvas path + asset = asset[6:] # Strip ../../ to start in project root + assets.add(asset) - if trait_prop.type.endswith("Object"): - value = arm.utils.asset_name(trait_prop.value_object) - else: - value = trait_prop.get_value() + # Haxe/Bundled Script + else: + # Empty class name, skip + if traitlistItem.class_name_prop == '': + continue - x['props'].append(value) + out_trait['type'] = 'Script' + if traitlistItem.type_prop == 'Bundled Script': + trait_prefix = 'armory.trait.' - o['traits'].append(x) + # TODO: temporary, export single mesh navmesh as obj + if traitlistItem.class_name_prop == 'NavMesh' and bobject.type == 'MESH' and bpy.data.worlds['Arm'].arm_navigation != 'Disabled': + ArmoryExporter.export_navigation = True - def export_canvas_themes(self): + nav_path = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'navigation') + if not os.path.exists(nav_path): + os.makedirs(nav_path) + nav_filepath = os.path.join(nav_path, 'nav_' + bobject.data.name + '.arm') + assets.add(nav_filepath) + + # TODO: Implement cache + # if not os.path.isfile(nav_filepath): + # override = {'selected_objects': [bobject]} + # bobject.scale.y *= -1 + # mesh = obj.data + # for face in mesh.faces: + # face.v.reverse() + # bpy.ops.export_scene.obj(override, use_selection=True, filepath=nav_filepath, check_existing=False, use_normals=False, use_uvs=False, use_materials=False) + # bobject.scale.y *= -1 + armature = bobject.find_armature() + apply_modifiers = not armature + + bobject_eval = bobject.evaluated_get(self.depsgraph) if apply_modifiers else bobject + export_mesh = bobject_eval.to_mesh() + + with open(nav_filepath, 'w') as f: + for v in export_mesh.vertices: + f.write("v %.4f " % (v.co[0] * bobject_eval.scale.x)) + f.write("%.4f " % (v.co[2] * bobject_eval.scale.z)) + f.write("%.4f\n" % (v.co[1] * bobject_eval.scale.y)) # Flipped + for p in export_mesh.polygons: + f.write("f") + # Flipped normals + for i in reversed(p.vertices): + f.write(" %d" % (i + 1)) + f.write("\n") + + # Haxe + else: + trait_prefix = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_package) + '.' + hxfile = os.path.join('Sources', (trait_prefix + traitlistItem.class_name_prop).replace('.', '/') + '.hx') + if not os.path.exists(os.path.join(arm.utils.get_fp(), hxfile)): + # TODO: Halt build here once this check is tested + print(f'Armory Error: Scene "{self.scene.name}" - Object "{bobject.name}": Referenced trait file "{hxfile}" not found') + + out_trait['class_name'] = trait_prefix + traitlistItem.class_name_prop + + # Export trait properties + if traitlistItem.arm_traitpropslist: + out_trait['props'] = [] + for trait_prop in traitlistItem.arm_traitpropslist: + out_trait['props'].append(trait_prop.name) + out_trait['props'].append(trait_prop.type) + + if trait_prop.type.endswith("Object"): + value = arm.utils.asset_name(trait_prop.value_object) + else: + value = trait_prop.get_value() + + out_trait['props'].append(value) + + o['traits'].append(out_trait) + + def export_scene_traits(self) -> None: + """Exports the traits of the scene and adds some internal traits + to the scene depending on the exporter settings. + """ + wrd = bpy.data.worlds['Arm'] + + if wrd.arm_physics != 'Disabled' and ArmoryExporter.export_physics: + if 'traits' not in self.output: + self.output['traits'] = [] + phys_pkg = 'bullet' if wrd.arm_physics_engine == 'Bullet' else 'oimo' + + out_trait = { + 'type': 'Script', + 'class_name': 'armory.trait.physics.' + phys_pkg + '.PhysicsWorld' + } + + rbw = self.scene.rigidbody_world + if rbw is not None and rbw.enabled: + out_trait['parameters'] = [str(rbw.time_scale), str(1 / rbw.steps_per_second), str(rbw.solver_iterations)] + + self.output['traits'].append(out_trait) + + if wrd.arm_navigation != 'Disabled' and ArmoryExporter.export_navigation: + if 'traits' not in self.output: + self.output['traits'] = [] + out_trait = {'type': 'Script', 'class_name': 'armory.trait.navigation.Navigation'} + self.output['traits'].append(out_trait) + + if wrd.arm_debug_console: + if 'traits' not in self.output: + self.output['traits'] = [] + ArmoryExporter.export_ui = True + out_trait = { + 'type': 'Script', + 'class_name': 'armory.trait.internal.DebugConsole', + 'parameters': [str(arm.utils.get_ui_scale())] + } + self.output['traits'].append(out_trait) + + if wrd.arm_live_patch: + if 'traits' not in self.output: + self.output['traits'] = [] + out_trait = {'type': 'Script', 'class_name': 'armory.trait.internal.LivePatch'} + self.output['traits'].append(out_trait) + + if len(self.scene.arm_traitlist) > 0: + if 'traits' not in self.output: + self.output['traits'] = [] + self.export_traits(self.scene, self.output) + + if 'traits' in self.output: + for out_trait in self.output['traits']: + ArmoryExporter.import_traits.append(out_trait['class_name']) + + @staticmethod + def export_canvas_themes(): path_themes = os.path.join(arm.utils.get_fp(), 'Bundled', 'canvas') file_theme = os.path.join(path_themes, "_themes.json") @@ -2576,28 +2637,41 @@ class ArmoryExporter: pass assets.add(file_theme) - def add_softbody_mod(self, o, bobject, soft_mod, soft_type): + @staticmethod + def add_softbody_mod(o, bobject: bpy.types.Object, modifier: Union[bpy.types.ClothModifier, bpy.types.SoftBodyModifier]): + """Adds a softbody trait to the given object based on the given + softbody/cloth modifier. + """ ArmoryExporter.export_physics = True - phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo' assets.add_khafile_def('arm_physics_soft') - trait = {} - trait['type'] = 'Script' - trait['class_name'] = 'armory.trait.physics.' + phys_pkg + '.SoftBody' - if soft_type == 0: - bend = soft_mod.settings.bending_stiffness - elif soft_type == 1: - bend = (soft_mod.settings.bend + 1.0) * 10 - trait['parameters'] = [str(soft_type), str(bend), str(soft_mod.settings.mass), str(bobject.arm_soft_body_margin)] - o['traits'].append(trait) - if soft_type == 0: - self.add_hook_mod(o, bobject, '', soft_mod.settings.vertex_group_mass) - def add_hook_mod(self, o, bobject, target_name, group_name): - ArmoryExporter.export_physics = True phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo' - trait = {} - trait['type'] = 'Script' - trait['class_name'] = 'armory.trait.physics.' + phys_pkg + '.PhysicsHook' + out_trait = {'type': 'Script', 'class_name': 'armory.trait.physics.' + phys_pkg + '.SoftBody'} + # ClothModifier + if modifier.type == 'CLOTH': + bend = modifier.settings.bending_stiffness + soft_type = 0 + # SoftBodyModifier + elif modifier.type == 'SOFT_BODY': + bend = (modifier.settings.bend + 1.0) * 10 + soft_type = 1 + else: + # Wrong modifier type + return + + out_trait['parameters'] = [str(soft_type), str(bend), str(modifier.settings.mass), str(bobject.arm_soft_body_margin)] + o['traits'].append(out_trait) + + if soft_type == 0: + ArmoryExporter.add_hook_mod(o, bobject, '', modifier.settings.vertex_group_mass) + + @staticmethod + def add_hook_mod(o, bobject: bpy.types.Object, target_name, group_name): + ArmoryExporter.export_physics = True + + phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo' + out_trait = {'type': 'Script', 'class_name': 'armory.trait.physics.' + phys_pkg + '.PhysicsHook'} + verts = [] if group_name != '': group = bobject.vertex_groups[group_name].index @@ -2607,130 +2681,146 @@ class ArmoryExporter: verts.append(v.co.x) verts.append(v.co.y) verts.append(v.co.z) - trait['parameters'] = ["'" + target_name + "'", str(verts)] - o['traits'].append(trait) - def add_rigidbody_constraint(self, o, rbc): + out_trait['parameters'] = [f"'{target_name}'", str(verts)] + o['traits'].append(out_trait) + + @staticmethod + def add_rigidbody_constraint(o, rbc): rb1 = rbc.object1 rb2 = rbc.object2 - if rb1 == None or rb2 is None: + if rb1 is None or rb2 is None: return + ArmoryExporter.export_physics = True phys_pkg = 'bullet' if bpy.data.worlds['Arm'].arm_physics_engine == 'Bullet' else 'oimo' breaking_threshold = rbc.breaking_threshold if rbc.use_breaking else 0 - trait = {} - trait['type'] = 'Script' - trait['class_name'] = 'armory.trait.physics.' + phys_pkg + '.PhysicsConstraint' - trait['parameters'] = [\ - "'" + rb1.name + "'", \ - "'" + rb2.name + "'", \ - "'" + rbc.type + "'", \ - str(rbc.disable_collisions).lower(), \ - str(breaking_threshold)] + + trait = { + 'type': 'Script', + 'class_name': 'armory.trait.physics.' + phys_pkg + '.PhysicsConstraint', + 'parameters': [ + "'" + rb1.name + "'", + "'" + rb2.name + "'", + "'" + rbc.type + "'", + str(rbc.disable_collisions).lower(), + str(breaking_threshold) + ] + } + if rbc.type == "GENERIC": - limits = [] - limits.append(1 if rbc.use_limit_lin_x else 0) - limits.append(rbc.limit_lin_x_lower) - limits.append(rbc.limit_lin_x_upper) - limits.append(1 if rbc.use_limit_lin_y else 0) - limits.append(rbc.limit_lin_y_lower) - limits.append(rbc.limit_lin_y_upper) - limits.append(1 if rbc.use_limit_lin_z else 0) - limits.append(rbc.limit_lin_z_lower) - limits.append(rbc.limit_lin_z_upper) - limits.append(1 if rbc.use_limit_ang_x else 0) - limits.append(rbc.limit_ang_x_lower) - limits.append(rbc.limit_ang_x_upper) - limits.append(1 if rbc.use_limit_ang_y else 0) - limits.append(rbc.limit_ang_y_lower) - limits.append(rbc.limit_ang_y_upper) - limits.append(1 if rbc.use_limit_ang_z else 0) - limits.append(rbc.limit_ang_z_lower) - limits.append(rbc.limit_ang_z_upper) + limits = [ + 1 if rbc.use_limit_lin_x else 0, + rbc.limit_lin_x_lower, + rbc.limit_lin_x_upper, + 1 if rbc.use_limit_lin_y else 0, + rbc.limit_lin_y_lower, + rbc.limit_lin_y_upper, + 1 if rbc.use_limit_lin_z else 0, + rbc.limit_lin_z_lower, + rbc.limit_lin_z_upper, + 1 if rbc.use_limit_ang_x else 0, + rbc.limit_ang_x_lower, + rbc.limit_ang_x_upper, + 1 if rbc.use_limit_ang_y else 0, + rbc.limit_ang_y_lower, + rbc.limit_ang_y_upper, + 1 if rbc.use_limit_ang_z else 0, + rbc.limit_ang_z_lower, + rbc.limit_ang_z_upper + ] trait['parameters'].append(str(limits)) if rbc.type == "GENERIC_SPRING": - limits = [] - limits.append(1 if rbc.use_limit_lin_x else 0) - limits.append(rbc.limit_lin_x_lower) - limits.append(rbc.limit_lin_x_upper) - limits.append(1 if rbc.use_limit_lin_y else 0) - limits.append(rbc.limit_lin_y_lower) - limits.append(rbc.limit_lin_y_upper) - limits.append(1 if rbc.use_limit_lin_z else 0) - limits.append(rbc.limit_lin_z_lower) - limits.append(rbc.limit_lin_z_upper) - limits.append(1 if rbc.use_limit_ang_x else 0) - limits.append(rbc.limit_ang_x_lower) - limits.append(rbc.limit_ang_x_upper) - limits.append(1 if rbc.use_limit_ang_y else 0) - limits.append(rbc.limit_ang_y_lower) - limits.append(rbc.limit_ang_y_upper) - limits.append(1 if rbc.use_limit_ang_z else 0) - limits.append(rbc.limit_ang_z_lower) - limits.append(rbc.limit_ang_z_upper) - limits.append(1 if rbc.use_spring_x else 0) - limits.append(rbc.spring_stiffness_x) - limits.append(rbc.spring_damping_x) - limits.append(1 if rbc.use_spring_y else 0) - limits.append(rbc.spring_stiffness_y) - limits.append(rbc.spring_damping_y) - limits.append(1 if rbc.use_spring_z else 0) - limits.append(rbc.spring_stiffness_z) - limits.append(rbc.spring_damping_z) - limits.append(1 if rbc.use_spring_ang_x else 0) - limits.append(rbc.spring_stiffness_ang_x) - limits.append(rbc.spring_damping_ang_x) - limits.append(1 if rbc.use_spring_ang_y else 0) - limits.append(rbc.spring_stiffness_ang_y) - limits.append(rbc.spring_damping_ang_y) - limits.append(1 if rbc.use_spring_ang_z else 0) - limits.append(rbc.spring_stiffness_ang_z) - limits.append(rbc.spring_damping_ang_z) + limits = [ + 1 if rbc.use_limit_lin_x else 0, + rbc.limit_lin_x_lower, + rbc.limit_lin_x_upper, + 1 if rbc.use_limit_lin_y else 0, + rbc.limit_lin_y_lower, + rbc.limit_lin_y_upper, + 1 if rbc.use_limit_lin_z else 0, + rbc.limit_lin_z_lower, + rbc.limit_lin_z_upper, + 1 if rbc.use_limit_ang_x else 0, + rbc.limit_ang_x_lower, + rbc.limit_ang_x_upper, + 1 if rbc.use_limit_ang_y else 0, + rbc.limit_ang_y_lower, + rbc.limit_ang_y_upper, + 1 if rbc.use_limit_ang_z else 0, + rbc.limit_ang_z_lower, + rbc.limit_ang_z_upper, + 1 if rbc.use_spring_x else 0, + rbc.spring_stiffness_x, + rbc.spring_damping_x, + 1 if rbc.use_spring_y else 0, + rbc.spring_stiffness_y, + rbc.spring_damping_y, + 1 if rbc.use_spring_z else 0, + rbc.spring_stiffness_z, + rbc.spring_damping_z, + 1 if rbc.use_spring_ang_x else 0, + rbc.spring_stiffness_ang_x, + rbc.spring_damping_ang_x, + 1 if rbc.use_spring_ang_y else 0, + rbc.spring_stiffness_ang_y, + rbc.spring_damping_ang_y, + 1 if rbc.use_spring_ang_z else 0, + rbc.spring_stiffness_ang_z, + rbc.spring_damping_ang_z + ] trait['parameters'].append(str(limits)) if rbc.type == "HINGE": - limits = [] - limits.append(1 if rbc.use_limit_ang_z else 0) - limits.append(rbc.limit_ang_z_lower) - limits.append(rbc.limit_ang_z_upper) + limits = [ + 1 if rbc.use_limit_ang_z else 0, + rbc.limit_ang_z_lower, + rbc.limit_ang_z_upper + ] trait['parameters'].append(str(limits)) if rbc.type == "SLIDER": - limits = [] - limits.append(1 if rbc.use_limit_lin_x else 0) - limits.append(rbc.limit_lin_x_lower) - limits.append(rbc.limit_lin_x_upper) + limits = [ + 1 if rbc.use_limit_lin_x else 0, + rbc.limit_lin_x_lower, + rbc.limit_lin_x_upper + ] trait['parameters'].append(str(limits)) if rbc.type == "PISTON": - limits = [] - limits.append(1 if rbc.use_limit_lin_x else 0) - limits.append(rbc.limit_lin_x_lower) - limits.append(rbc.limit_lin_x_upper) - limits.append(1 if rbc.use_limit_ang_x else 0) - limits.append(rbc.limit_ang_x_lower) - limits.append(rbc.limit_ang_x_upper) + limits = [ + 1 if rbc.use_limit_lin_x else 0, + rbc.limit_lin_x_lower, + rbc.limit_lin_x_upper, + 1 if rbc.use_limit_ang_x else 0, + rbc.limit_ang_x_lower, + rbc.limit_ang_x_upper + ] trait['parameters'].append(str(limits)) o['traits'].append(trait) - def post_export_world(self, world, o): + @staticmethod + def post_export_world(world: bpy.types.World, out_world: Dict): wrd = bpy.data.worlds['Arm'] + bgcol = world.arm_envtex_color - if '_LDR' in wrd.world_defs: # No compositor used + # No compositor used + if '_LDR' in world.world_defs: for i in range(0, 3): bgcol[i] = pow(bgcol[i], 1.0 / 2.2) - o['background_color'] = arm.utils.color_to_int(bgcol) + out_world['background_color'] = arm.utils.color_to_int(bgcol) - if '_EnvSky' in wrd.world_defs: + if '_EnvSky' in world.world_defs: # Sky data for probe - o['sun_direction'] = list(world.arm_envtex_sun_direction) - o['turbidity'] = world.arm_envtex_turbidity - o['ground_albedo'] = world.arm_envtex_ground_albedo + out_world['sun_direction'] = list(world.arm_envtex_sun_direction) + out_world['turbidity'] = world.arm_envtex_turbidity + out_world['ground_albedo'] = world.arm_envtex_ground_albedo disable_hdr = world.arm_envtex_name.endswith('.jpg') - if '_EnvTex' in wrd.world_defs or '_EnvImg' in wrd.world_defs: - o['envmap'] = world.arm_envtex_name.rsplit('.', 1)[0] + + if '_EnvTex' in world.world_defs or '_EnvImg' in world.world_defs: + out_world['envmap'] = world.arm_envtex_name.rsplit('.', 1)[0] if disable_hdr: - o['envmap'] += '.jpg' + out_world['envmap'] += '.jpg' else: - o['envmap'] += '.hdr' + out_world['envmap'] += '.hdr' # Main probe rpdat = arm.utils.get_rp() @@ -2739,10 +2829,11 @@ class ArmoryExporter: arm_radiance = False radtex = world.arm_envtex_name.rsplit('.', 1)[0] irrsharmonics = world.arm_envtex_irr_name + # Radiance - if '_EnvTex' in wrd.world_defs: + if '_EnvTex' in world.world_defs: arm_radiance = rpdat.arm_radiance - elif '_EnvSky' in wrd.world_defs: + elif '_EnvSky' in world.world_defs: arm_radiance = rpdat.arm_radiance radtex = 'hosek' num_mips = world.arm_envtex_num_mips @@ -2752,27 +2843,30 @@ class ArmoryExporter: if mobile_mat: arm_radiance = False - po = {} - po['name'] = world.name + out_probe = {'name': world.name} if arm_irradiance: ext = '' if wrd.arm_minimize else '.json' - po['irradiance'] = irrsharmonics + '_irradiance' + ext + out_probe['irradiance'] = irrsharmonics + '_irradiance' + ext if arm_radiance: - po['radiance'] = radtex + '_radiance' - po['radiance'] += '.jpg' if disable_hdr else '.hdr' - po['radiance_mipmaps'] = num_mips - po['strength'] = strength - o['probe'] = po + out_probe['radiance'] = radtex + '_radiance' + out_probe['radiance'] += '.jpg' if disable_hdr else '.hdr' + out_probe['radiance_mipmaps'] = num_mips + out_probe['strength'] = strength + out_world['probe'] = out_probe - # https://blender.stackexchange.com/questions/70629 - def mod_equal(self, mod1, mod2): + @staticmethod + def mod_equal(mod1: bpy.types.Modifier, mod2: bpy.types.Modifier) -> bool: + """Compares whether the given modifiers are equal.""" + # https://blender.stackexchange.com/questions/70629 return all([getattr(mod1, prop, True) == getattr(mod2, prop, False) for prop in mod1.bl_rna.properties.keys()]) - def mod_equal_stack(self, obj1, obj2): + @staticmethod + def mod_equal_stack(obj1: bpy.types.Object, obj2: bpy.types.Object) -> bool: + """Returns `True` if the given objects have the same modifiers.""" if len(obj1.modifiers) == 0 and len(obj2.modifiers) == 0: return True if len(obj1.modifiers) == 0 or len(obj2.modifiers) == 0: return False if len(obj1.modifiers) != len(obj2.modifiers): return False - return all([self.mod_equal(m, obj2.modifiers[i]) for i,m in enumerate(obj1.modifiers)]) + return all([ArmoryExporter.mod_equal(m, obj2.modifiers[i]) for i, m in enumerate(obj1.modifiers)]) diff --git a/blender/arm/exporter_opt.py b/blender/arm/exporter_opt.py index 0564112d..79486f97 100644 --- a/blender/arm/exporter_opt.py +++ b/blender/arm/exporter_opt.py @@ -171,7 +171,7 @@ def export_mesh_data(self, exportMesh, bobject, o, has_armature=False): o['scale_pos'] = 1.0 if has_armature: # Allow up to 2x bigger bounds for skinned mesh o['scale_pos'] *= 2.0 - + scale_pos = o['scale_pos'] invscale_pos = (1 / scale_pos) * 32767 @@ -265,16 +265,16 @@ def export_mesh_data(self, exportMesh, bobject, o, has_armature=False): # Output o['vertex_arrays'] = [] - o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata }) - o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata }) + o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' }) + o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' }) if has_tex: - o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data }) + o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' }) if has_tex1: - o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data }) + o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' }) if has_col: - o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata }) + o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 }) if has_tang: - o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata }) + o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 }) return vert_list @@ -348,7 +348,7 @@ def export_skin(self, bobject, armature, vert_list, o): bone_count = 4 bone_values.sort(reverse=True) bone_values = bone_values[:4] - + bone_count_array[index] = bone_count for bv in bone_values: bone_weight_array[count] = bv[0] * 32767 diff --git a/blender/arm/keymap.py b/blender/arm/keymap.py index 12c0bad2..b4a019ea 100644 --- a/blender/arm/keymap.py +++ b/blender/arm/keymap.py @@ -1,12 +1,21 @@ import bpy import arm.props_ui as props_ui -import arm.utils arm_keymaps = [] def register(): wm = bpy.context.window_manager - km = wm.keyconfigs.addon.keymaps.new(name='Window', space_type='EMPTY', region_type="WINDOW") + addon_keyconfig = wm.keyconfigs.addon + + # Keyconfigs are not available in background mode. If the keyconfig + # was not found despite running _not_ in background mode, a warning + # is printed + if addon_keyconfig is None: + if not bpy.app.background: + print("Armory warning: no keyconfig path found") + return + + km = addon_keyconfig.keymaps.new(name='Window', space_type='EMPTY', region_type="WINDOW") km.keymap_items.new(props_ui.ArmoryPlayButton.bl_idname, type='F5', value='PRESS') arm_keymaps.append(km) diff --git a/blender/arm/lib/make_datas.py b/blender/arm/lib/make_datas.py index b57dbfe3..7a4618ad 100644 --- a/blender/arm/lib/make_datas.py +++ b/blender/arm/lib/make_datas.py @@ -34,6 +34,12 @@ def parse_context(c, sres, asset, defs, vert=None, frag=None): if con['tesseval_shader'] not in asset: asset.append(con['tesseval_shader']) + if 'color_attachments' in c: + con['color_attachments'] = c['color_attachments'] + for i in range(len(con['color_attachments'])): + if con['color_attachments'][i] == '_HDR': + con['color_attachments'][i] = 'RGBA32' if '_LDR' in defs else 'RGBA64' + # Params params = ['depth_write', 'compare_mode', 'cull_mode', \ 'blend_source', 'blend_destination', 'blend_operation', \ @@ -65,7 +71,7 @@ def parse_context(c, sres, asset, defs, vert=None, frag=None): with open(c['tesscontrol_shader']) as f: tesc = f.read().splitlines() parse_shader(sres, c, con, defs, tesc, False) - + if 'tesseval_shader' in c: with open(c['tesseval_shader']) as f: tese = f.read().splitlines() @@ -76,12 +82,12 @@ def parse_shader(sres, c, con, defs, lines, parse_attributes): skip_else = False vertex_elements_parsed = False vertex_elements_parsing = False - + stack = [] if parse_attributes == False: vertex_elements_parsed = True - + for line in lines: line = line.lstrip() diff --git a/blender/arm/lightmapper/__init__.py b/blender/arm/lightmapper/__init__.py new file mode 100644 index 00000000..ae20d2bd --- /dev/null +++ b/blender/arm/lightmapper/__init__.py @@ -0,0 +1 @@ +__all__ = ('Operators', 'Properties', 'Preferences', 'Utility', 'Keymap') \ No newline at end of file diff --git a/blender/arm/lightmapper/assets/dash.ogg b/blender/arm/lightmapper/assets/dash.ogg new file mode 100644 index 00000000..319b5950 Binary files /dev/null and b/blender/arm/lightmapper/assets/dash.ogg differ diff --git a/blender/arm/lightmapper/assets/gentle.ogg b/blender/arm/lightmapper/assets/gentle.ogg new file mode 100644 index 00000000..e36d106d Binary files /dev/null and b/blender/arm/lightmapper/assets/gentle.ogg differ diff --git a/blender/arm/lightmapper/assets/noot.ogg b/blender/arm/lightmapper/assets/noot.ogg new file mode 100644 index 00000000..9f581e4d Binary files /dev/null and b/blender/arm/lightmapper/assets/noot.ogg differ diff --git a/blender/arm/lightmapper/assets/pingping.ogg b/blender/arm/lightmapper/assets/pingping.ogg new file mode 100644 index 00000000..1e63fd80 Binary files /dev/null and b/blender/arm/lightmapper/assets/pingping.ogg differ diff --git a/blender/arm/lightmapper/assets/sound.ogg b/blender/arm/lightmapper/assets/sound.ogg new file mode 100644 index 00000000..9f581e4d Binary files /dev/null and b/blender/arm/lightmapper/assets/sound.ogg differ diff --git a/blender/arm/lightmapper/assets/tlm_data.blend b/blender/arm/lightmapper/assets/tlm_data.blend new file mode 100644 index 00000000..409c894b Binary files /dev/null and b/blender/arm/lightmapper/assets/tlm_data.blend differ diff --git a/blender/arm/lightmapper/icons/bake.png b/blender/arm/lightmapper/icons/bake.png new file mode 100644 index 00000000..f1ff1895 Binary files /dev/null and b/blender/arm/lightmapper/icons/bake.png differ diff --git a/blender/arm/lightmapper/icons/clean.png b/blender/arm/lightmapper/icons/clean.png new file mode 100644 index 00000000..c9bdd9db Binary files /dev/null and b/blender/arm/lightmapper/icons/clean.png differ diff --git a/blender/arm/lightmapper/icons/explore.png b/blender/arm/lightmapper/icons/explore.png new file mode 100644 index 00000000..e5486558 Binary files /dev/null and b/blender/arm/lightmapper/icons/explore.png differ diff --git a/blender/arm/lightmapper/keymap/__init__.py b/blender/arm/lightmapper/keymap/__init__.py new file mode 100644 index 00000000..079b3c5d --- /dev/null +++ b/blender/arm/lightmapper/keymap/__init__.py @@ -0,0 +1,7 @@ +from . import keymap + +def register(): + keymap.register() + +def unregister(): + keymap.unregister() \ No newline at end of file diff --git a/blender/arm/lightmapper/keymap/keymap.py b/blender/arm/lightmapper/keymap/keymap.py new file mode 100644 index 00000000..66778783 --- /dev/null +++ b/blender/arm/lightmapper/keymap/keymap.py @@ -0,0 +1,21 @@ +import bpy + +#from .. operators import build +#from .. operators import clean + +tlm_keymaps = [] + +def register(): + pass + # winman = bpy.context.window_manager + # keyman = winman.keyconfigs.addon.keymaps.new(name='Window', space_type='EMPTY', region_type="WINDOW") + # keyman.keymap_items.new(build.TLM_BuildLightmaps.bl_idname, type='F6', value='PRESS') + # keyman.keymap_items.new(clean.TLM_CleanLightmaps.bl_idname, type='F7', value='PRESS') + # tlm_keymaps.append(keyman) + +def unregister(): + pass + # winman = bpy.context.window_manager + # for keyman in tlm_keymaps: + # winman.keyconfigs.addon.keymaps.remove(keyman) + # del tlm_keymaps[:] \ No newline at end of file diff --git a/blender/arm/lightmapper/operators/__init__.py b/blender/arm/lightmapper/operators/__init__.py new file mode 100644 index 00000000..96b8098d --- /dev/null +++ b/blender/arm/lightmapper/operators/__init__.py @@ -0,0 +1,21 @@ +import bpy +from bpy.utils import register_class, unregister_class +from . import tlm, installopencv + +classes = [ + tlm.TLM_BuildLightmaps, + tlm.TLM_CleanLightmaps, + tlm.TLM_ExploreLightmaps, + tlm.TLM_EnableSelection, + tlm.TLM_DisableSelection, + tlm.TLM_RemoveLightmapUV, + installopencv.TLM_Install_OpenCV +] + +def register(): + for cls in classes: + register_class(cls) + +def unregister(): + for cls in classes: + unregister_class(cls) \ No newline at end of file diff --git a/blender/arm/lightmapper/operators/installopencv.py b/blender/arm/lightmapper/operators/installopencv.py new file mode 100644 index 00000000..c9be4fac --- /dev/null +++ b/blender/arm/lightmapper/operators/installopencv.py @@ -0,0 +1,67 @@ +import bpy, math, os, platform, subprocess, sys, re, shutil + +def ShowMessageBox(message = "", title = "Message Box", icon = 'INFO'): + + def draw(self, context): + self.layout.label(text=message) + + bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) + +class TLM_Install_OpenCV(bpy.types.Operator): + """Install OpenCV""" + bl_idname = "tlm.install_opencv_lightmaps" + bl_label = "Install OpenCV" + bl_description = "Install OpenCV" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + + scene = context.scene + cycles = bpy.data.scenes[scene.name].cycles + + print("Module OpenCV") + + pythonbinpath = bpy.app.binary_path_python + + if platform.system() == "Windows": + pythonlibpath = os.path.join(os.path.dirname(os.path.dirname(pythonbinpath)), "lib") + else: + pythonlibpath = os.path.join(os.path.dirname(os.path.dirname(pythonbinpath)), "lib", os.path.basename(pythonbinpath)[:-1]) + + ensurepippath = os.path.join(pythonlibpath, "ensurepip") + + cmda = [pythonbinpath, ensurepippath, "--upgrade", "--user"] + pip = subprocess.run(cmda, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + if pip.returncode == 0: + print("Sucessfully installed pip!\n") + else: + + try: + import pip + module_pip = True + except ImportError: + #pip + module_pip = False + + if not module_pip: + print("Failed to install pip!\n") + ShowMessageBox("Failed to install pip - Please start Blender as administrator", "Restart", 'PREFERENCES') + return{'FINISHED'} + + cmdb = [pythonbinpath, "-m", "pip", "install", "opencv-python"] + + opencv = subprocess.run(cmdb, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + if opencv.returncode == 0: + print("Sucessfully installed OpenCV!\n") + else: + print("Failed to install OpenCV!\n") + ShowMessageBox("Failed to install opencv - Please start Blender as administrator", "Restart", 'PREFERENCES') + return{'FINISHED'} + + module_opencv = True + print("Sucessfully installed OpenCV!\n") + ShowMessageBox("Please restart blender to enable OpenCV filtering", "Restart", 'PREFERENCES') + + return{'FINISHED'} \ No newline at end of file diff --git a/blender/arm/lightmapper/operators/tlm.py b/blender/arm/lightmapper/operators/tlm.py new file mode 100644 index 00000000..f5529959 --- /dev/null +++ b/blender/arm/lightmapper/operators/tlm.py @@ -0,0 +1,156 @@ +import bpy, os, time, blf, webbrowser +from .. utility import build +from .. utility.cycles import cache + +class TLM_BuildLightmaps(bpy.types.Operator): + bl_idname = "tlm.build_lightmaps" + bl_label = "Build Lightmaps" + bl_description = "Build Lightmaps" + bl_options = {'REGISTER', 'UNDO'} + + def modal(self, context, event): + + #Add progress bar from 0.15 + + print("MODAL") + + return {'PASS_THROUGH'} + + def invoke(self, context, event): + + if not bpy.app.background: + + build.prepare_build(self, False) + + else: + + print("Running in background mode. Contextual operator not available. Use command 'thelightmapper.addon.build.prepare_build()'") + + return {'RUNNING_MODAL'} + + def cancel(self, context): + pass + + def draw_callback_px(self, context, event): + pass + +class TLM_CleanLightmaps(bpy.types.Operator): + bl_idname = "tlm.clean_lightmaps" + bl_label = "Clean Lightmaps" + bl_description = "Clean Lightmaps" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + + scene = context.scene + + filepath = bpy.data.filepath + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.TLM_EngineProperties.tlm_lightmap_savedir) + if os.path.isdir(dirpath): + for file in os.listdir(dirpath): + os.remove(os.path.join(dirpath + "/" + file)) + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + cache.backup_material_restore(obj) + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + cache.backup_material_rename(obj) + + for mat in bpy.data.materials: + if mat.users < 1: + bpy.data.materials.remove(mat) + + for mat in bpy.data.materials: + if mat.name.startswith("."): + if "_Original" in mat.name: + bpy.data.materials.remove(mat) + + for image in bpy.data.images: + if image.name.endswith("_baked"): + bpy.data.images.remove(image, do_unlink=True) + + return {'FINISHED'} + +class TLM_ExploreLightmaps(bpy.types.Operator): + bl_idname = "tlm.explore_lightmaps" + bl_label = "Explore Lightmaps" + bl_description = "Explore Lightmaps" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + + scene = context.scene + cycles = scene.cycles + + if not bpy.data.is_saved: + self.report({'INFO'}, "Please save your file first") + return {"CANCELLED"} + + filepath = bpy.data.filepath + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.TLM_EngineProperties.tlm_lightmap_savedir) + + if os.path.isdir(dirpath): + webbrowser.open('file://' + dirpath) + else: + os.mkdir(dirpath) + webbrowser.open('file://' + dirpath) + + return {'FINISHED'} + +class TLM_EnableSelection(bpy.types.Operator): + """Enable for selection""" + bl_idname = "tlm.enable_selection" + bl_label = "Enable for selection" + bl_description = "Enable for selection" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + + scene = context.scene + + for obj in bpy.context.selected_objects: + obj.TLM_ObjectProperties.tlm_mesh_lightmap_use = True + + if scene.TLM_SceneProperties.tlm_override_object_settings: + obj.TLM_ObjectProperties.tlm_mesh_lightmap_resolution = scene.TLM_SceneProperties.tlm_mesh_lightmap_resolution + obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode = scene.TLM_SceneProperties.tlm_mesh_lightmap_unwrap_mode + obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_margin = scene.TLM_SceneProperties.tlm_mesh_unwrap_margin + + return{'FINISHED'} + +class TLM_DisableSelection(bpy.types.Operator): + """Disable for selection""" + bl_idname = "tlm.disable_selection" + bl_label = "Disable for selection" + bl_description = "Disable for selection" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + + for obj in bpy.context.selected_objects: + obj.TLM_ObjectProperties.tlm_mesh_lightmap_use = False + + return{'FINISHED'} + +class TLM_RemoveLightmapUV(bpy.types.Operator): + """Remove Lightmap UV for selection""" + bl_idname = "tlm.remove_uv_selection" + bl_label = "Remove Lightmap UV" + bl_description = "Remove Lightmap UV for selection" + bl_options = {'REGISTER', 'UNDO'} + + def execute(self, context): + + for obj in bpy.context.selected_objects: + if obj.type == "MESH": + uv_layers = obj.data.uv_layers + + for uvlayer in uv_layers: + if uvlayer.name == "UVMap_Lightmap": + uv_layers.remove(uvlayer) + + return{'FINISHED'} \ No newline at end of file diff --git a/blender/arm/lightmapper/preferences/__init__.py b/blender/arm/lightmapper/preferences/__init__.py new file mode 100644 index 00000000..94cdfaea --- /dev/null +++ b/blender/arm/lightmapper/preferences/__init__.py @@ -0,0 +1,16 @@ +import bpy +from bpy.utils import register_class, unregister_class +from . import addon_preferences +#from . import build, clean, explore, encode, installopencv + +classes = [ + addon_preferences.TLM_AddonPreferences +] + +def register(): + for cls in classes: + register_class(cls) + +def unregister(): + for cls in classes: + unregister_class(cls) \ No newline at end of file diff --git a/blender/arm/lightmapper/preferences/addon_preferences.py b/blender/arm/lightmapper/preferences/addon_preferences.py new file mode 100644 index 00000000..ed52b239 --- /dev/null +++ b/blender/arm/lightmapper/preferences/addon_preferences.py @@ -0,0 +1,72 @@ +import bpy +from os.path import basename, dirname +from bpy.types import AddonPreferences +from .. operators import installopencv +import importlib + +class TLM_AddonPreferences(AddonPreferences): + + bl_idname = "thelightmapper" + + addon_keys = bpy.context.preferences.addons.keys() + + def draw(self, context): + + layout = self.layout + + box = layout.box() + row = box.row() + row.label(text="OpenCV") + + cv2 = importlib.util.find_spec("cv2") + + if cv2 is not None: + row.label(text="OpenCV installed") + else: + row.label(text="OpenCV not found - Install as administrator!", icon_value=2) + row = box.row() + row.operator("tlm.install_opencv_lightmaps", icon="PREFERENCES") + + box = layout.box() + row = box.row() + row.label(text="Blender Xatlas") + if "blender_xatlas" in self.addon_keys: + row.label(text="Blender Xatlas installed and available") + else: + row.label(text="Blender Xatlas not installed", icon_value=2) + row = box.row() + row.label(text="Github: https://github.com/mattedicksoncom/blender-xatlas") + + box = layout.box() + row = box.row() + row.label(text="RizomUV Bridge") + row.label(text="Coming soon") + + box = layout.box() + row = box.row() + row.label(text="UVPackmaster") + row.label(text="Coming soon") + + box = layout.box() + row = box.row() + row.label(text="Texel Density Checker") + row.label(text="Coming soon") + + box = layout.box() + row = box.row() + row.label(text="LuxCoreRender") + row.label(text="Coming soon") + + box = layout.box() + row = box.row() + row.label(text="OctaneRender") + row.label(text="Coming soon") + + # row = layout.row() + # row.label(text="PIP") + # row = layout.row() + # row.label(text="OIDN / Optix") + # row = layout.row() + # row.label(text="UVPackmaster") + # row = layout.row() + # row.label(text="Texel Density") diff --git a/blender/arm/lightmapper/properties/__init__.py b/blender/arm/lightmapper/properties/__init__.py new file mode 100644 index 00000000..5cf4bd65 --- /dev/null +++ b/blender/arm/lightmapper/properties/__init__.py @@ -0,0 +1,33 @@ +import bpy +from bpy.utils import register_class, unregister_class +from . import scene, object +from . renderer import cycles +from . denoiser import oidn, optix + +classes = [ + scene.TLM_SceneProperties, + object.TLM_ObjectProperties, + cycles.TLM_CyclesSceneProperties, + oidn.TLM_OIDNEngineProperties, + optix.TLM_OptixEngineProperties +] + +def register(): + for cls in classes: + register_class(cls) + + bpy.types.Scene.TLM_SceneProperties = bpy.props.PointerProperty(type=scene.TLM_SceneProperties) + bpy.types.Object.TLM_ObjectProperties = bpy.props.PointerProperty(type=object.TLM_ObjectProperties) + bpy.types.Scene.TLM_EngineProperties = bpy.props.PointerProperty(type=cycles.TLM_CyclesSceneProperties) + bpy.types.Scene.TLM_OIDNEngineProperties = bpy.props.PointerProperty(type=oidn.TLM_OIDNEngineProperties) + bpy.types.Scene.TLM_OptixEngineProperties = bpy.props.PointerProperty(type=optix.TLM_OptixEngineProperties) + +def unregister(): + for cls in classes: + unregister_class(cls) + + del bpy.types.Scene.TLM_SceneProperties + del bpy.types.Object.TLM_ObjectProperties + del bpy.types.Scene.TLM_EngineProperties + del bpy.types.Scene.TLM_OIDNEngineProperties + del bpy.types.Scene.TLM_OptixEngineProperties \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/denoiser/integrated.py b/blender/arm/lightmapper/properties/denoiser/integrated.py new file mode 100644 index 00000000..165de7f7 --- /dev/null +++ b/blender/arm/lightmapper/properties/denoiser/integrated.py @@ -0,0 +1,4 @@ +import bpy +from bpy.props import * + +class TLM_IntegratedDenoiseEngineProperties(bpy.types.PropertyGroup): \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/denoiser/oidn.py b/blender/arm/lightmapper/properties/denoiser/oidn.py new file mode 100644 index 00000000..c7d72963 --- /dev/null +++ b/blender/arm/lightmapper/properties/denoiser/oidn.py @@ -0,0 +1,39 @@ +import bpy +from bpy.props import * + +class TLM_OIDNEngineProperties(bpy.types.PropertyGroup): + tlm_oidn_path : StringProperty( + name="OIDN Path", + description="The path to the OIDN binaries", + default="", + subtype="FILE_PATH") + + tlm_oidn_verbose : BoolProperty( + name="Verbose", + description="TODO") + + tlm_oidn_threads : IntProperty( + name="Threads", + default=0, + min=0, + max=64, + description="Amount of threads to use. Set to 0 for auto-detect.") + + tlm_oidn_maxmem : IntProperty( + name="Tiling max Memory", + default=0, + min=512, + max=32768, + description="Use tiling for memory conservation. Set to 0 to disable tiling.") + + tlm_oidn_affinity : BoolProperty( + name="Set Affinity", + description="TODO") + + tlm_oidn_use_albedo : BoolProperty( + name="Use albedo map", + description="TODO") + + tlm_oidn_use_normal : BoolProperty( + name="Use normal map", + description="TODO") \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/denoiser/optix.py b/blender/arm/lightmapper/properties/denoiser/optix.py new file mode 100644 index 00000000..6b55875b --- /dev/null +++ b/blender/arm/lightmapper/properties/denoiser/optix.py @@ -0,0 +1,21 @@ +import bpy +from bpy.props import * + +class TLM_OptixEngineProperties(bpy.types.PropertyGroup): + + tlm_optix_path : StringProperty( + name="Optix Path", + description="TODO", + default="", + subtype="FILE_PATH") + + tlm_optix_verbose : BoolProperty( + name="Verbose", + description="TODO") + + tlm_optix_maxmem : IntProperty( + name="Tiling max Memory", + default=0, + min=512, + max=32768, + description="Use tiling for memory conservation. Set to 0 to disable tiling.") \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/filtering.py b/blender/arm/lightmapper/properties/filtering.py new file mode 100644 index 00000000..b153fe2b --- /dev/null +++ b/blender/arm/lightmapper/properties/filtering.py @@ -0,0 +1,4 @@ +import bpy +from bpy.props import * + +class TLM_FilteringProperties(bpy.types.PropertyGroup): \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/object.py b/blender/arm/lightmapper/properties/object.py new file mode 100644 index 00000000..cc20be5f --- /dev/null +++ b/blender/arm/lightmapper/properties/object.py @@ -0,0 +1,121 @@ +import bpy +from bpy.props import * + +class TLM_ObjectProperties(bpy.types.PropertyGroup): + + addon_keys = bpy.context.preferences.addons.keys() + + tlm_atlas_pointer : StringProperty( + name = "Atlas Group", + description = "Atlas Lightmap Group", + default = "") + + tlm_mesh_lightmap_use : BoolProperty( + name="Enable Lightmapping", + description="TODO", + default=False) + + tlm_mesh_lightmap_resolution : EnumProperty( + items = [('32', '32', 'TODO'), + ('64', '64', 'TODO'), + ('128', '128', 'TODO'), + ('256', '256', 'TODO'), + ('512', '512', 'TODO'), + ('1024', '1024', 'TODO'), + ('2048', '2048', 'TODO'), + ('4096', '4096', 'TODO'), + ('8192', '8192', 'TODO')], + name = "Lightmap Resolution", + description="TODO", + default='256') + + unwrap_modes = [('Lightmap', 'Lightmap', 'TODO'),('SmartProject', 'Smart Project', 'TODO'),('CopyExisting', 'Copy Existing', 'TODO'),('AtlasGroup', 'Atlas Group', 'TODO')] + + if "blender_xatlas" in addon_keys: + unwrap_modes.append(('Xatlas', 'Xatlas', 'TODO')) + + tlm_mesh_lightmap_unwrap_mode : EnumProperty( + items = unwrap_modes, + name = "Unwrap Mode", + description="TODO", + default='SmartProject') + + tlm_mesh_unwrap_margin : FloatProperty( + name="Unwrap Margin", + default=0.1, + min=0.0, + max=1.0, + subtype='FACTOR') + + tlm_mesh_filter_override : BoolProperty( + name="Override filtering", + description="Override the scene specific filtering", + default=False) + + #FILTERING SETTINGS GROUP + tlm_mesh_filtering_engine : EnumProperty( + items = [('OpenCV', 'OpenCV', 'Make use of OpenCV based image filtering (Requires it to be installed first in the preferences panel)'), + ('Numpy', 'Numpy', 'Make use of Numpy based image filtering (Integrated)')], + name = "Filtering library", + description="Select which filtering library to use.", + default='Numpy') + + #Numpy Filtering options + tlm_mesh_numpy_filtering_mode : EnumProperty( + items = [('Blur', 'Blur', 'Basic blur filtering.')], + name = "Filter", + description="TODO", + default='Blur') + + #OpenCV Filtering options + tlm_mesh_filtering_mode : EnumProperty( + items = [('Box', 'Box', 'Basic box blur'), + ('Gaussian', 'Gaussian', 'Gaussian blurring'), + ('Bilateral', 'Bilateral', 'Edge-aware filtering'), + ('Median', 'Median', 'Median blur')], + name = "Filter", + description="TODO", + default='Median') + + tlm_mesh_filtering_gaussian_strength : IntProperty( + name="Gaussian Strength", + default=3, + min=1, + max=50) + + tlm_mesh_filtering_iterations : IntProperty( + name="Filter Iterations", + default=5, + min=1, + max=50) + + tlm_mesh_filtering_box_strength : IntProperty( + name="Box Strength", + default=1, + min=1, + max=50) + + tlm_mesh_filtering_bilateral_diameter : IntProperty( + name="Pixel diameter", + default=3, + min=1, + max=50) + + tlm_mesh_filtering_bilateral_color_deviation : IntProperty( + name="Color deviation", + default=75, + min=1, + max=100) + + tlm_mesh_filtering_bilateral_coordinate_deviation : IntProperty( + name="Color deviation", + default=75, + min=1, + max=100) + + tlm_mesh_filtering_median_kernel : IntProperty( + name="Median kernel", + default=3, + min=1, + max=5) + \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/renderer/cycles.py b/blender/arm/lightmapper/properties/renderer/cycles.py new file mode 100644 index 00000000..aa0dce47 --- /dev/null +++ b/blender/arm/lightmapper/properties/renderer/cycles.py @@ -0,0 +1,87 @@ +import bpy +from bpy.props import * + +class TLM_CyclesSceneProperties(bpy.types.PropertyGroup): + + tlm_mode : EnumProperty( + items = [('CPU', 'CPU', 'Use the processor to bake textures'), + ('GPU', 'GPU', 'Use the graphics card to bake textures')], + name = "Device", + description="Select whether to use the CPU or the GPU for baking", + default="CPU") + + tlm_quality : EnumProperty( + items = [('0', 'Exterior Preview', 'Best for fast exterior previz'), + ('1', 'Interior Preview', 'Best for fast interior previz with bounces'), + ('2', 'Medium', 'Best for complicated interior preview and final for isometric environments'), + ('3', 'High', 'Best used for final baking for 3rd person games'), + ('4', 'Production', 'Best for first-person and Archviz'), + ('5', 'Custom', 'Uses the cycles sample settings provided the user')], + name = "Quality", + description="Select baking quality", + default="0") + + tlm_resolution_scale : EnumProperty( + items = [('1', '1/1', '1'), + ('2', '1/2', '2'), + ('4', '1/4', '4'), + ('8', '1/8', '8')], + name = "Resolution scale", + description="Select resolution scale", + default="2") + + tlm_setting_supersample : EnumProperty( + items = [('none', 'None', 'No supersampling'), + ('2x', '2x', 'Double supersampling'), + ('4x', '4x', 'Quadruple supersampling')], + name = "Supersampling", + description="Supersampling scale", + default="none") + + tlm_bake_mode : EnumProperty( + items = [('Background', 'Background', 'More overhead; allows for network.'), + ('Foreground', 'Foreground', 'Direct in-session bake')], + name = "Baking mode", + description="Select bake mode", + default="Foreground") + + tlm_caching_mode : EnumProperty( + items = [('Copy', 'Copy', 'More overhead; allows for network.'), + ('Cache', 'Cache', 'Cache in separate blend'), + ('Node', 'Node restore', 'EXPERIMENTAL! Use with care')], + name = "Caching mode", + description="Select cache mode", + default="Copy") + + tlm_directional_mode : EnumProperty( + items = [('None', 'None', 'No directional information'), + ('Normal', 'Baked normal', 'Baked normal maps are taken into consideration')], + name = "Directional mode", + description="Select directional mode", + default="None") + + tlm_lightmap_savedir : StringProperty( + name="Lightmap Directory", + description="TODO", + default="Lightmaps", + subtype="FILE_PATH") + + tlm_dilation_margin : IntProperty( + name="Dilation margin", + default=4, + min=1, + max=64, + subtype='PIXEL') + + tlm_exposure_multiplier : FloatProperty( + name="Exposure Multiplier", + default=0, + description="0 to disable. Multiplies GI value") + + tlm_metallic_handling_mode : EnumProperty( + items = [('ignore', 'Ignore', 'No directional information'), + ('clamp', 'Clamp', 'Clamp to value 0.9'), + ('zero', 'Zero', 'Temporarily set to 0 during baking, and reapply after')], + name = "Metallic handling", + description="Set metallic handling mode to prevent black-baking.", + default="ignore") \ No newline at end of file diff --git a/blender/arm/lightmapper/properties/renderer/luxcorerender.py b/blender/arm/lightmapper/properties/renderer/luxcorerender.py new file mode 100644 index 00000000..e69de29b diff --git a/blender/arm/lightmapper/properties/renderer/octanerender.py b/blender/arm/lightmapper/properties/renderer/octanerender.py new file mode 100644 index 00000000..e69de29b diff --git a/blender/arm/lightmapper/properties/renderer/radeonrays.py b/blender/arm/lightmapper/properties/renderer/radeonrays.py new file mode 100644 index 00000000..e69de29b diff --git a/blender/arm/lightmapper/properties/scene.py b/blender/arm/lightmapper/properties/scene.py new file mode 100644 index 00000000..b7da0bc4 --- /dev/null +++ b/blender/arm/lightmapper/properties/scene.py @@ -0,0 +1,273 @@ +import bpy +from bpy.props import * + +class TLM_SceneProperties(bpy.types.PropertyGroup): + + engines = [('Cycles', 'Cycles', 'Use Cycles for lightmapping')] + + #engines.append(('LuxCoreRender', 'LuxCoreRender', 'Use LuxCoreRender for lightmapping')) + #engines.append(('OctaneRender', 'Octane Render', 'Use Octane Render for lightmapping')) + + tlm_lightmap_engine : EnumProperty( + items = engines, + name = "Lightmap Engine", + description="Select which lightmap engine to use.", + default='Cycles') + + #SETTINGS GROUP + tlm_setting_clean_option : EnumProperty( + items = [('Clean', 'Full Clean', 'Clean lightmap directory and revert all materials'), + ('CleanMarked', 'Clean marked', 'Clean only the objects marked for lightmapping')], + name = "Clean mode", + description="The cleaning mode, either full or partial clean. Be careful that you don't delete lightmaps you don't intend to delete.", + default='Clean') + + tlm_setting_keep_cache_files : BoolProperty( + name="Keep cache files", + description="Keep cache files (non-filtered and non-denoised)", + default=True) + + tlm_setting_renderer : EnumProperty( + items = [('CPU', 'CPU', 'Bake using the processor'), + ('GPU', 'GPU', 'Bake using the graphics card')], + name = "Device", + description="Select whether to use the CPU or the GPU", + default="CPU") + + tlm_setting_scale : EnumProperty( + items = [('8', '1/8', '1/8th of set scale'), + ('4', '1/4', '1/4th of set scale'), + ('2', '1/2', 'Half of set scale'), + ('1', '1/1', 'Full scale')], + name = "Lightmap Resolution scale", + description="Lightmap resolution scaling. Adjust for previewing.", + default="1") + + tlm_setting_supersample : EnumProperty( + items = [('2x', '2x', 'Double the sampling resolution'), + ('4x', '4x', 'Quadruple the sampling resolution')], + name = "Lightmap Supersampling", + description="Supersamples the baked lightmap. Increases bake time", + default="2x") + + tlm_setting_savedir : StringProperty( + name="Lightmap Directory", + description="Your baked lightmaps will be stored here.", + default="Lightmaps", + subtype="FILE_PATH") + + tlm_setting_exposure_multiplier : FloatProperty( + name="Exposure Multiplier", + default=0, + description="0 to disable. Multiplies GI value") + + tlm_alert_on_finish : BoolProperty( + name="Alert on finish", + description="Play a sound when the lightmaps are done.", + default=False) + + tlm_setting_apply_scale : BoolProperty( + name="Apply scale", + description="Apply the scale before unwrapping.", + default=True) + + tlm_play_sound : BoolProperty( + name="Play sound on finish", + description="Play sound on finish", + default=False) + + tlm_compile_statistics : BoolProperty( + name="Compile statistics", + description="Compile time statistics in the lightmap folder.", + default=True) + + tlm_apply_on_unwrap : BoolProperty( + name="Apply scale", + description="TODO", + default=False) + + #DENOISE SETTINGS GROUP + tlm_denoise_use : BoolProperty( + name="Enable denoising", + description="Enable denoising for lightmaps", + default=False) + + tlm_denoise_engine : EnumProperty( + items = [('Integrated', 'Integrated', 'Use the Blender native denoiser (Compositor; Slow)'), + ('OIDN', 'Intel Denoiser', 'Use Intel denoiser (CPU powered)'), + ('Optix', 'Optix Denoiser', 'Use Nvidia Optix denoiser (GPU powered)')], + name = "Denoiser", + description="Select which denoising engine to use.", + default='Integrated') + + #FILTERING SETTINGS GROUP + tlm_filtering_use : BoolProperty( + name="Enable filtering", + description="Enable filtering for lightmaps", + default=False) + + tlm_filtering_engine : EnumProperty( + items = [('OpenCV', 'OpenCV', 'Make use of OpenCV based image filtering (Requires it to be installed first in the preferences panel)'), + ('Shader', 'Shader', 'Make use of GPU offscreen shader to filter')], + name = "Filtering library", + description="Select which filtering library to use.", + default='OpenCV') + + #Numpy Filtering options + tlm_numpy_filtering_mode : EnumProperty( + items = [('Blur', 'Blur', 'Basic blur filtering.')], + name = "Filter", + description="TODO", + default='Blur') + + #OpenCV Filtering options + tlm_filtering_mode : EnumProperty( + items = [('Box', 'Box', 'Basic box blur'), + ('Gaussian', 'Gaussian', 'Gaussian blurring'), + ('Bilateral', 'Bilateral', 'Edge-aware filtering'), + ('Median', 'Median', 'Median blur')], + name = "Filter", + description="TODO", + default='Median') + + tlm_filtering_gaussian_strength : IntProperty( + name="Gaussian Strength", + default=3, + min=1, + max=50) + + tlm_filtering_iterations : IntProperty( + name="Filter Iterations", + default=5, + min=1, + max=50) + + tlm_filtering_box_strength : IntProperty( + name="Box Strength", + default=1, + min=1, + max=50) + + tlm_filtering_bilateral_diameter : IntProperty( + name="Pixel diameter", + default=3, + min=1, + max=50) + + tlm_filtering_bilateral_color_deviation : IntProperty( + name="Color deviation", + default=75, + min=1, + max=100) + + tlm_filtering_bilateral_coordinate_deviation : IntProperty( + name="Color deviation", + default=75, + min=1, + max=100) + + tlm_filtering_median_kernel : IntProperty( + name="Median kernel", + default=3, + min=1, + max=5) + + #Encoding properties + tlm_encoding_use : BoolProperty( + name="Enable encoding", + description="Enable encoding for lightmaps", + default=False) + + tlm_encoding_mode : EnumProperty( + items = [('RGBM', 'RGBM', '8-bit HDR encoding. Good for compatibility, good for memory but has banding issues.'), + ('LogLuv', 'LogLuv', '8-bit HDR encoding. Different.'), + ('HDR', 'HDR', '32-bit HDR encoding. Best quality, but high memory usage and not compatible with all devices.')], + name = "Encoding Mode", + description="TODO", + default='HDR') + + tlm_encoding_range : IntProperty( + name="Encoding range", + description="Higher gives a larger HDR range, but also gives more banding.", + default=6, + min=1, + max=10) + + tlm_encoding_armory_setup : BoolProperty( + name="Use Armory decoder", + description="TODO", + default=False) + + tlm_encoding_colorspace : EnumProperty( + items = [('XYZ', 'XYZ', 'TODO'), + ('sRGB', 'sRGB', 'TODO'), + ('NonColor', 'Non-Color', 'TODO'), + ('ACES', 'Linear ACES', 'TODO'), + ('Linear', 'Linear', 'TODO'), + ('FilmicLog', 'Filmic Log', 'TODO')], + name = "Color Space", + description="TODO", + default='Linear') + + tlm_compression : IntProperty( + name="PNG Compression", + description="0 = No compression. 100 = Maximum compression.", + default=0, + min=0, + max=100) + + tlm_format : EnumProperty( + items = [('RGBE', 'HDR', '32-bit RGBE encoded .hdr files. No compression available.'), + ('EXR', 'EXR', '32-bit OpenEXR format.')], + name = "Format", + description="Select default 32-bit format", + default='RGBE') + + tlm_override_object_settings : BoolProperty( + name="Override settings", + description="TODO", + default=False) + + tlm_mesh_lightmap_resolution : EnumProperty( + items = [('32', '32', 'TODO'), + ('64', '64', 'TODO'), + ('128', '128', 'TODO'), + ('256', '256', 'TODO'), + ('512', '512', 'TODO'), + ('1024', '1024', 'TODO'), + ('2048', '2048', 'TODO'), + ('4096', '4096', 'TODO'), + ('8192', '8192', 'TODO')], + name = "Lightmap Resolution", + description="TODO", + default='256') + + tlm_mesh_lightmap_unwrap_mode : EnumProperty( + items = [('Lightmap', 'Lightmap', 'TODO'), + ('SmartProject', 'Smart Project', 'TODO'), + ('CopyExisting', 'Copy Existing', 'TODO'), + ('AtlasGroup', 'Atlas Group', 'TODO')], + name = "Unwrap Mode", + description="TODO", + default='SmartProject') + + tlm_mesh_unwrap_margin : FloatProperty( + name="Unwrap Margin", + default=0.1, + min=0.0, + max=1.0, + subtype='FACTOR') + + tlm_headless : BoolProperty( + name="Don't apply materials", + description="Headless; Do not apply baked materials on finish.", + default=False) + + tlm_alert_sound : EnumProperty( + items = [('dash', 'Dash', 'Dash alert'), + ('noot', 'Noot', 'Noot alert'), + ('gentle', 'Gentle', 'Gentle alert'), + ('pingping', 'Ping', 'Ping alert')], + name = "Alert sound", + description="Alert sound when lightmap building finished.", + default="gentle") \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/__init__.py b/blender/arm/lightmapper/utility/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/blender/arm/lightmapper/utility/build.py b/blender/arm/lightmapper/utility/build.py new file mode 100644 index 00000000..c4b05a7e --- /dev/null +++ b/blender/arm/lightmapper/utility/build.py @@ -0,0 +1,612 @@ +import bpy, os, importlib, subprocess, sys, threading, platform, aud +from . import encoding +from . cycles import lightmap, prepare, nodes, cache +from . denoiser import integrated, oidn, optix +from . filtering import opencv +from os import listdir +from os.path import isfile, join +from time import time, sleep + +previous_settings = {} + +def prepare_build(self=0, background_mode=False): + + if bpy.context.scene.TLM_EngineProperties.tlm_bake_mode == "Foreground" or background_mode==True: + + global start_time + start_time = time() + + scene = bpy.context.scene + sceneProperties = scene.TLM_SceneProperties + + #We dynamically load the renderer and denoiser, instead of loading something we don't use + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + pass + + if sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + + pass + + if sceneProperties.tlm_lightmap_engine == "OctaneRender": + + pass + + #Timer start here bound to global + + if check_save(): + print("Please save your file first") + self.report({'INFO'}, "Please save your file first") + return{'FINISHED'} + + if check_denoiser(): + print("No denoise OIDN path assigned") + self.report({'INFO'}, "No denoise OIDN path assigned") + return{'FINISHED'} + + if check_materials(): + print("Error with material") + self.report({'INFO'}, "Error with material") + return{'FINISHED'} + + if opencv_check(): + if sceneProperties.tlm_filtering_use: + print("Error:Filtering - OpenCV not installed") + self.report({'INFO'}, "Error:Filtering - OpenCV not installed") + return{'FINISHED'} + + setMode() + + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), bpy.context.scene.TLM_EngineProperties.tlm_lightmap_savedir) + if not os.path.isdir(dirpath): + os.mkdir(dirpath) + + #Naming check + naming_check() + + ## RENDER DEPENDENCY FROM HERE + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + prepare.init(self, previous_settings) + + if sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + + pass + + if sceneProperties.tlm_lightmap_engine == "OctaneRender": + + pass + + #Renderer - Store settings + + #Renderer - Set settings + + #Renderer - Config objects, lights, world + + begin_build() + + else: + + filepath = bpy.data.filepath + + start_time = time() + + scene = bpy.context.scene + sceneProperties = scene.TLM_SceneProperties + + #We dynamically load the renderer and denoiser, instead of loading something we don't use + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + pass + + if sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + + pass + + if sceneProperties.tlm_lightmap_engine == "OctaneRender": + + pass + + #Timer start here bound to global + + if check_save(): + print("Please save your file first") + self.report({'INFO'}, "Please save your file first") + return{'FINISHED'} + + if check_denoiser(): + print("No denoise OIDN path assigned") + self.report({'INFO'}, "No denoise OIDN path assigned") + return{'FINISHED'} + + if check_materials(): + print("Error with material") + self.report({'INFO'}, "Error with material") + return{'FINISHED'} + + if opencv_check(): + if sceneProperties.tlm_filtering_use: + print("Error:Filtering - OpenCV not installed") + self.report({'INFO'}, "Error:Filtering - OpenCV not installed") + return{'FINISHED'} + + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), bpy.context.scene.TLM_EngineProperties.tlm_lightmap_savedir) + if not os.path.isdir(dirpath): + os.mkdir(dirpath) + + #Naming check + naming_check() + + pipe_open([sys.executable,"-b",filepath,"--python-expr",'import bpy; import thelightmapper; thelightmapper.addon.utility.build.prepare_build(0, True);'], finish_assemble) + +def finish_assemble(): + pass + #bpy.ops.wm.revert_mainfile() We cannot use this, as Blender crashes... + print("Background baking finished") + + scene = bpy.context.scene + sceneProperties = scene.TLM_SceneProperties + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + prepare.init(previous_settings) + + if sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + pass + + if sceneProperties.tlm_lightmap_engine == "OctaneRender": + pass + + manage_build(True) + +def pipe_open(args, callback): + + def thread_process(args, callback): + process = subprocess.Popen(args) + process.wait() + callback() + return + + thread = threading.Thread(target=thread_process, args=(args, callback)) + thread.start() + return thread + +def begin_build(): + + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), bpy.context.scene.TLM_EngineProperties.tlm_lightmap_savedir) + + scene = bpy.context.scene + sceneProperties = scene.TLM_SceneProperties + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + lightmap.bake() + + if sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + pass + + if sceneProperties.tlm_lightmap_engine == "OctaneRender": + pass + + #Denoiser + + if sceneProperties.tlm_denoise_use: + + if sceneProperties.tlm_denoise_engine == "Integrated": + + baked_image_array = [] + + dirfiles = [f for f in listdir(dirpath) if isfile(join(dirpath, f))] + + for file in dirfiles: + if file.endswith("_baked.hdr"): + baked_image_array.append(file) + + print(baked_image_array) + + denoiser = integrated.TLM_Integrated_Denoise() + + denoiser.load(baked_image_array) + + denoiser.setOutputDir(dirpath) + + denoiser.denoise() + + elif sceneProperties.tlm_denoise_engine == "OIDN": + + baked_image_array = [] + + dirfiles = [f for f in listdir(dirpath) if isfile(join(dirpath, f))] + + for file in dirfiles: + if file.endswith("_baked.hdr"): + baked_image_array.append(file) + + oidnProperties = scene.TLM_OIDNEngineProperties + + denoiser = oidn.TLM_OIDN_Denoise(oidnProperties, baked_image_array, dirpath) + + denoiser.denoise() + + denoiser.clean() + + del denoiser + + else: + + baked_image_array = [] + + dirfiles = [f for f in listdir(dirpath) if isfile(join(dirpath, f))] + + for file in dirfiles: + if file.endswith("_baked.hdr"): + baked_image_array.append(file) + + optixProperties = scene.TLM_OptixEngineProperties + + denoiser = optix.TLM_Optix_Denoise(optixProperties, baked_image_array, dirpath) + + denoiser.denoise() + + denoiser.clean() + + del denoiser + + #Filtering + if sceneProperties.tlm_filtering_use: + + if sceneProperties.tlm_denoise_use: + useDenoise = True + else: + useDenoise = False + + filter = opencv.TLM_CV_Filtering + + filter.init(dirpath, useDenoise) + + if sceneProperties.tlm_encoding_use: + + if sceneProperties.tlm_encoding_mode == "HDR": + + if sceneProperties.tlm_format == "EXR": + + print("EXR Format") + + ren = bpy.context.scene.render + ren.image_settings.file_format = "OPEN_EXR" + #ren.image_settings.exr_codec = "scene.TLM_SceneProperties.tlm_exr_codec" + + end = "_baked" + + baked_image_array = [] + + if sceneProperties.tlm_denoise_use: + + end = "_denoised" + + if sceneProperties.tlm_filtering_use: + + end = "_filtered" + + #For each image in folder ending in denoised/filtered + dirfiles = [f for f in listdir(dirpath) if isfile(join(dirpath, f))] + + for file in dirfiles: + if file.endswith(end + ".hdr"): + + img = bpy.data.images.load(os.path.join(dirpath,file)) + img.save_render(img.filepath_raw[:-4] + ".exr") + + if sceneProperties.tlm_encoding_mode == "LogLuv": + + dirfiles = [f for f in listdir(dirpath) if isfile(join(dirpath, f))] + + end = "_baked" + + if sceneProperties.tlm_denoise_use: + + end = "_denoised" + + if sceneProperties.tlm_filtering_use: + + end = "_filtered" + + for file in dirfiles: + if file.endswith(end + ".hdr"): + + img = bpy.data.images.load(os.path.join(dirpath, file), check_existing=False) + + encoding.encodeLogLuv(img, dirpath, 0) + + if sceneProperties.tlm_encoding_mode == "RGBM": + + print("ENCODING RGBM") + + dirfiles = [f for f in listdir(dirpath) if isfile(join(dirpath, f))] + + end = "_baked" + + if sceneProperties.tlm_denoise_use: + + end = "_denoised" + + if sceneProperties.tlm_filtering_use: + + end = "_filtered" + + for file in dirfiles: + if file.endswith(end + ".hdr"): + + img = bpy.data.images.load(os.path.join(dirpath, file), check_existing=False) + + print("Encoding:" + str(file)) + encoding.encodeImageRGBM(img, sceneProperties.tlm_encoding_range, dirpath, 0) + + manage_build() + +def manage_build(background_pass=False): + + scene = bpy.context.scene + sceneProperties = scene.TLM_SceneProperties + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + if background_pass: + nodes.apply_lightmaps() + + nodes.apply_materials() #From here the name is changed... + + end = "_baked" + + if sceneProperties.tlm_denoise_use: + + end = "_denoised" + + if sceneProperties.tlm_filtering_use: + + end = "_filtered" + + formatEnc = ".hdr" + + if sceneProperties.tlm_encoding_use: + + if sceneProperties.tlm_encoding_mode == "HDR": + + if sceneProperties.tlm_format == "EXR": + + formatEnc = ".exr" + + if sceneProperties.tlm_encoding_mode == "LogLuv": + + formatEnc = "_encoded.png" + + if sceneProperties.tlm_encoding_mode == "RGBM": + + formatEnc = "_encoded.png" + + if not background_pass: + nodes.exchangeLightmapsToPostfix("_baked", end, formatEnc) + + if sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + + pass + + if sceneProperties.tlm_lightmap_engine == "OctaneRender": + + pass + + if bpy.context.scene.TLM_EngineProperties.tlm_bake_mode == "Background": + pass + #bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath + "baked") #Crashes Blender + + if scene.TLM_EngineProperties.tlm_setting_supersample == "2x": + supersampling_scale = 2 + elif scene.TLM_EngineProperties.tlm_setting_supersample == "4x": + supersampling_scale = 4 + else: + supersampling_scale = 1 + + # for image in bpy.data.images: + # if image.name.endswith("_baked"): + # resolution = image.size[0] + # rescale = resolution / supersampling_scale + # image.scale(rescale, rescale) + # image.save() + + for image in bpy.data.images: + if image.users < 1: + bpy.data.images.remove(image) + + if scene.TLM_SceneProperties.tlm_headless: + + filepath = bpy.data.filepath + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.TLM_EngineProperties.tlm_lightmap_savedir) + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + cache.backup_material_restore(obj) + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + cache.backup_material_rename(obj) + + for mat in bpy.data.materials: + if mat.users < 1: + bpy.data.materials.remove(mat) + + for mat in bpy.data.materials: + if mat.name.startswith("."): + if "_Original" in mat.name: + bpy.data.materials.remove(mat) + + for obj in bpy.data.objects: + + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + img_name = obj.name + '_baked' + Lightmapimage = bpy.data.images[img_name] + obj["Lightmap"] = Lightmapimage.filepath_raw + + for image in bpy.data.images: + if image.name.endswith("_baked"): + bpy.data.images.remove(image, do_unlink=True) + + total_time = sec_to_hours((time() - start_time)) + print(total_time) + + reset_settings(previous_settings["settings"]) + + if scene.TLM_SceneProperties.tlm_alert_on_finish: + + alertSelect = scene.TLM_SceneProperties.tlm_alert_sound + + if alertSelect == "dash": + soundfile = "dash.ogg" + elif alertSelect == "pingping": + soundfile = "pingping.ogg" + elif alertSelect == "gentle": + soundfile = "gentle.ogg" + else: + soundfile = "noot.ogg" + + scriptDir = os.path.dirname(os.path.realpath(__file__)) + sound_path = os.path.abspath(os.path.join(scriptDir, '..', 'assets/'+soundfile)) + + device = aud.Device() + sound = aud.Sound.file(sound_path) + device.play(sound) + +def reset_settings(prev_settings): + scene = bpy.context.scene + cycles = scene.cycles + + cycles.samples = int(prev_settings[0]) + cycles.max_bounces = int(prev_settings[1]) + cycles.diffuse_bounces = int(prev_settings[2]) + cycles.glossy_bounces = int(prev_settings[3]) + cycles.transparent_max_bounces = int(prev_settings[4]) + cycles.transmission_bounces = int(prev_settings[5]) + cycles.volume_bounces = int(prev_settings[6]) + cycles.caustics_reflective = prev_settings[7] + cycles.caustics_refractive = prev_settings[8] + cycles.device = prev_settings[9] + scene.render.engine = prev_settings[10] + bpy.context.view_layer.objects.active = prev_settings[11] + scene.render.resolution_x = prev_settings[13][0] + scene.render.resolution_y = prev_settings[13][1] + + #for obj in prev_settings[12]: + # obj.select_set(True) + +def naming_check(): + + for obj in bpy.data.objects: + + if obj.type == "MESH": + + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + + if obj.name != "": + + if "_" in obj.name: + obj.name = obj.name.replace("_",".") + if " " in obj.name: + obj.name = obj.name.replace(" ",".") + if "[" in obj.name: + obj.name = obj.name.replace("[",".") + if "]" in obj.name: + obj.name = obj.name.replace("]",".") + if "Ăž" in obj.name: + obj.name = obj.name.replace("Ăž","oe") + if "ĂŠ" in obj.name: + obj.name = obj.name.replace("ĂŠ","ae") + if "Ă„" in obj.name: + obj.name = obj.name.replace("Ă„","aa") + + for slot in obj.material_slots: + if "_" in slot.material.name: + slot.material.name = slot.material.name.replace("_",".") + if " " in slot.material.name: + slot.material.name = slot.material.name.replace(" ",".") + if "[" in slot.material.name: + slot.material.name = slot.material.name.replace("[",".") + if "[" in slot.material.name: + slot.material.name = slot.material.name.replace("]",".") + if "Ăž" in slot.material.name: + slot.material.name = slot.material.name.replace("Ăž","oe") + if "ĂŠ" in slot.material.name: + slot.material.name = slot.material.name.replace("ĂŠ","ae") + if "Ă„" in slot.material.name: + slot.material.name = slot.material.name.replace("Ă„","aa") + +def opencv_check(): + + cv2 = importlib.util.find_spec("cv2") + + if cv2 is not None: + return 0 + else: + return 1 + +def check_save(): + if not bpy.data.is_saved: + + return 1 + + else: + + return 0 + +def check_denoiser(): + + scene = bpy.context.scene + + if scene.TLM_SceneProperties.tlm_denoise_use: + + if scene.TLM_SceneProperties.tlm_denoise_engine == "OIDN": + + oidnPath = scene.TLM_OIDNEngineProperties.tlm_oidn_path + + if scene.TLM_OIDNEngineProperties.tlm_oidn_path == "": + return 1 + + if platform.system() == "Windows": + if not scene.TLM_OIDNEngineProperties.tlm_oidn_path.endswith(".exe"): + return 1 + else: + return 0 + +def check_materials(): + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + for slot in obj.material_slots: + mat = slot.material + + if mat is None: + print("MatNone") + mat = bpy.data.materials.new(name="Material") + mat.use_nodes = True + slot.material = mat + + nodes = mat.node_tree.nodes + + #TODO FINISH MATERIAL CHECK -> Nodes check + #Afterwards, redo build/utility + +def sec_to_hours(seconds): + a=str(seconds//3600) + b=str((seconds%3600)//60) + c=str((seconds%3600)%60) + d=["{} hours {} mins {} seconds".format(a, b, c)] + return d + +def setMode(): + bpy.ops.object.mode_set(mode='OBJECT') + + #TODO Make some checks that returns to previous selection \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/cycles/ao.py b/blender/arm/lightmapper/utility/cycles/ao.py new file mode 100644 index 00000000..e69de29b diff --git a/blender/arm/lightmapper/utility/cycles/cache.py b/blender/arm/lightmapper/utility/cycles/cache.py new file mode 100644 index 00000000..4c168e66 --- /dev/null +++ b/blender/arm/lightmapper/utility/cycles/cache.py @@ -0,0 +1,55 @@ +import bpy + +#Todo - Check if already exists, in case multiple objects has the same material + + +def backup_material_copy(slot): + material = slot.material + dup = material.copy() + dup.name = "." + material.name + "_Original" + dup.use_fake_user = True + +def backup_material_cache(slot, path): + bpy.ops.wm.save_as_mainfile(filepath=path, copy=True) + +def backup_material_cache_restore(slot, path): + print("Restore cache") + +def backup_material_rename(obj): + if "TLM_PrevMatArray" in obj: + print("Has PrevMat B") + for slot in obj.material_slots: + + if slot.material is not None: + if slot.material.name.endswith("_Original"): + newname = slot.material.name[1:-9] + if newname in bpy.data.materials: + bpy.data.materials.remove(bpy.data.materials[newname]) + slot.material.name = newname + + del obj["TLM_PrevMatArray"] + +def backup_material_restore(obj): + print("RESTORE") + + if "TLM_PrevMatArray" in obj: + + print("Has PrevMat A") + #Running through the slots + prevMatArray = obj["TLM_PrevMatArray"] + slotsLength = len(prevMatArray) + + if len(prevMatArray) > 0: + for idx, slot in enumerate(obj.material_slots): #For each slot, we get the index + #We only need the index, corresponds to the array index + try: + originalMaterial = prevMatArray[idx] + except IndexError: + originalMaterial = "" + + if slot.material is not None: + slot.material.user_clear() + + if "." + originalMaterial + "_Original" in bpy.data.materials: + slot.material = bpy.data.materials["." + originalMaterial + "_Original"] + slot.material.use_fake_user = False \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/cycles/indirect.py b/blender/arm/lightmapper/utility/cycles/indirect.py new file mode 100644 index 00000000..e69de29b diff --git a/blender/arm/lightmapper/utility/cycles/lightmap.py b/blender/arm/lightmapper/utility/cycles/lightmap.py new file mode 100644 index 00000000..e9449b9a --- /dev/null +++ b/blender/arm/lightmapper/utility/cycles/lightmap.py @@ -0,0 +1,50 @@ +import bpy, os + +def bake(): + + for obj in bpy.data.objects: + bpy.ops.object.select_all(action='DESELECT') + obj.select_set(False) + + iterNum = 0 + currentIterNum = 0 + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + iterNum = iterNum + 1 + + if iterNum > 1: + iterNum = iterNum - 1 + + for obj in bpy.data.objects: + if obj.type == 'MESH': + + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + + scene = bpy.context.scene + + bpy.ops.object.select_all(action='DESELECT') + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + obs = bpy.context.view_layer.objects + active = obs.active + obj.hide_render = False + scene.render.bake.use_clear = False + + print("Baking " + str(currentIterNum) + "/" + str(iterNum) + " (" + str(round(currentIterNum/iterNum*100, 2)) + "%) : " + obj.name) + + bpy.ops.object.bake(type="DIFFUSE", pass_filter={"DIRECT","INDIRECT"}, margin=scene.TLM_EngineProperties.tlm_dilation_margin, use_clear=False) + bpy.ops.object.select_all(action='DESELECT') + currentIterNum = currentIterNum + 1 + + for image in bpy.data.images: + if image.name.endswith("_baked"): + + saveDir = os.path.join(os.path.dirname(bpy.data.filepath), bpy.context.scene.TLM_EngineProperties.tlm_lightmap_savedir) + bakemap_path = os.path.join(saveDir, image.name) + filepath_ext = ".hdr" + image.filepath_raw = bakemap_path + filepath_ext + image.file_format = "HDR" + print("Saving to: " + image.filepath_raw) + image.save() \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/cycles/nodes.py b/blender/arm/lightmapper/utility/cycles/nodes.py new file mode 100644 index 00000000..1494b0fb --- /dev/null +++ b/blender/arm/lightmapper/utility/cycles/nodes.py @@ -0,0 +1,184 @@ +import bpy, os + +def apply_lightmaps(): + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + for slot in obj.material_slots: + mat = slot.material + node_tree = mat.node_tree + nodes = mat.node_tree.nodes + + scene = bpy.context.scene + + dirpath = os.path.join(os.path.dirname(bpy.data.filepath), scene.TLM_EngineProperties.tlm_lightmap_savedir) + + #Find nodes + for node in nodes: + if node.name == "Baked Image": + + extension = ".hdr" + + postfix = "_baked" + + if scene.TLM_SceneProperties.tlm_denoise_use: + postfix = "_denoised" + if scene.TLM_SceneProperties.tlm_filtering_use: + postfix = "_filtered" + + node.image.source = "FILE" + image_name = obj.name + postfix + extension #TODO FIX EXTENSION + node.image.filepath_raw = os.path.join(dirpath, image_name) + + +def apply_materials(): + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + + uv_layers = obj.data.uv_layers + uv_layers.active_index = 0 + scene = bpy.context.scene + + decoding = False + + #Sort name + for slot in obj.material_slots: + mat = slot.material + if mat.name.endswith('_temp'): + old = slot.material + slot.material = bpy.data.materials[old.name.split('_' + obj.name)[0]] + + if(scene.TLM_EngineProperties.tlm_exposure_multiplier > 0): + tlm_exposure = bpy.data.node_groups.get("Exposure") + + if tlm_exposure == None: + load_library("Exposure") + + #Apply materials + print(obj.name) + for slot in obj.material_slots: + mat = slot.material + print(slot.material) + + node_tree = mat.node_tree + nodes = mat.node_tree.nodes + + foundBakedNode = False + + #Find nodes + for node in nodes: + if node.name == "Baked Image": + lightmapNode = node + lightmapNode.location = -800, 300 + lightmapNode.name = "TLM_Lightmap" + foundBakedNode = True + + img_name = obj.name + '_baked' + + if not foundBakedNode: + lightmapNode = node_tree.nodes.new(type="ShaderNodeTexImage") + lightmapNode.location = -300, 300 + lightmapNode.image = bpy.data.images[img_name] + lightmapNode.name = "TLM_Lightmap" + lightmapNode.interpolation = "Smart" + + #Find output node + outputNode = nodes[0] + if(outputNode.type != "OUTPUT_MATERIAL"): + for node in node_tree.nodes: + if node.type == "OUTPUT_MATERIAL": + outputNode = node + break + + #Find mainnode + mainNode = outputNode.inputs[0].links[0].from_node + + #Add all nodes first + #Add lightmap multipliction texture + mixNode = node_tree.nodes.new(type="ShaderNodeMixRGB") + mixNode.name = "Lightmap_Multiplication" + mixNode.location = -300, 300 + mixNode.blend_type = 'MULTIPLY' + mixNode.inputs[0].default_value = 1.0 + + UVLightmap = node_tree.nodes.new(type="ShaderNodeUVMap") + UVLightmap.uv_map = "UVMap_Lightmap" + UVLightmap.name = "Lightmap_UV" + UVLightmap.location = -1000, 300 + + if(scene.TLM_EngineProperties.tlm_exposure_multiplier > 0): + ExposureNode = node_tree.nodes.new(type="ShaderNodeGroup") + ExposureNode.node_tree = bpy.data.node_groups["Exposure"] + ExposureNode.inputs[1].default_value = scene.TLM_EngineProperties.tlm_exposure_multiplier + ExposureNode.location = -500, 300 + ExposureNode.name = "Lightmap_Exposure" + + #Add Basecolor node + if len(mainNode.inputs[0].links) == 0: + baseColorValue = mainNode.inputs[0].default_value + baseColorNode = node_tree.nodes.new(type="ShaderNodeRGB") + baseColorNode.outputs[0].default_value = baseColorValue + baseColorNode.location = ((mainNode.location[0] - 500, mainNode.location[1] - 300)) + baseColorNode.name = "Lightmap_BasecolorNode_A" + else: + baseColorNode = mainNode.inputs[0].links[0].from_node + baseColorNode.name = "LM_P" + + #Linking + if(scene.TLM_EngineProperties.tlm_exposure_multiplier > 0): + mat.node_tree.links.new(lightmapNode.outputs[0], ExposureNode.inputs[0]) #Connect lightmap node to mixnode + mat.node_tree.links.new(ExposureNode.outputs[0], mixNode.inputs[1]) #Connect lightmap node to mixnode + else: + mat.node_tree.links.new(lightmapNode.outputs[0], mixNode.inputs[1]) #Connect lightmap node to mixnode + mat.node_tree.links.new(baseColorNode.outputs[0], mixNode.inputs[2]) #Connect basecolor to pbr node + mat.node_tree.links.new(mixNode.outputs[0], mainNode.inputs[0]) #Connect mixnode to pbr node + mat.node_tree.links.new(UVLightmap.outputs[0], lightmapNode.inputs[0]) #Connect uvnode to lightmapnode + +def exchangeLightmapsToPostfix(ext_postfix, new_postfix, formatHDR=".hdr"): + + print(ext_postfix, new_postfix, formatHDR) + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + for slot in obj.material_slots: + mat = slot.material + node_tree = mat.node_tree + nodes = mat.node_tree.nodes + + for node in nodes: + if node.name == "Baked Image" or node.name == "TLM_Lightmap": + img_name = node.image.filepath_raw + cutLen = len(ext_postfix + formatHDR) + print("Len:" + str(len(ext_postfix + formatHDR)) + "|" + ext_postfix + ".." + formatHDR) + + #Simple way to sort out objects with multiple materials + if formatHDR == ".hdr" or formatHDR == ".exr": + if not node.image.filepath_raw.endswith(new_postfix + formatHDR): + node.image.filepath_raw = img_name[:-cutLen] + new_postfix + formatHDR + else: + cutLen = len(ext_postfix + ".hdr") + if not node.image.filepath_raw.endswith(new_postfix + formatHDR): + node.image.filepath_raw = img_name[:-cutLen] + new_postfix + formatHDR + + for image in bpy.data.images: + image.reload() + +def load_library(asset_name): + + scriptDir = os.path.dirname(os.path.realpath(__file__)) + + if bpy.data.filepath.endswith('tlm_data.blend'): # Prevent load in library itself + return + + data_path = os.path.abspath(os.path.join(scriptDir, '..', '..', 'Assets/tlm_data.blend')) + data_names = [asset_name] + + # Import + data_refs = data_names.copy() + with bpy.data.libraries.load(data_path, link=False) as (data_from, data_to): + data_to.node_groups = data_refs + + for ref in data_refs: + ref.use_fake_user = True \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/cycles/prepare.py b/blender/arm/lightmapper/utility/cycles/prepare.py new file mode 100644 index 00000000..413ffc16 --- /dev/null +++ b/blender/arm/lightmapper/utility/cycles/prepare.py @@ -0,0 +1,420 @@ +import bpy + +from . import cache +from .. utility import * + +def assemble(): + + configure_world() + + configure_lights() + + configure_meshes() + +def init(self, prev_container): + + store_existing(prev_container) + + set_settings() + + configure_world() + + configure_lights() + + configure_meshes(self) + +def configure_world(): + pass + +def configure_lights(): + pass + +def configure_meshes(self): + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + cache.backup_material_restore(obj) + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + cache.backup_material_rename(obj) + + for mat in bpy.data.materials: + if mat.users < 1: + bpy.data.materials.remove(mat) + + for mat in bpy.data.materials: + if mat.name.startswith("."): + if "_Original" in mat.name: + bpy.data.materials.remove(mat) + + for image in bpy.data.images: + if image.name.endswith("_baked"): + bpy.data.images.remove(image, do_unlink=True) + + iterNum = 0 + currentIterNum = 0 + + scene = bpy.context.scene + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + for slot in obj.material_slots: + if "." + slot.name + '_Original' in bpy.data.materials: + print("The material: " + slot.name + " shifted to " + "." + slot.name + '_Original') + slot.material = bpy.data.materials["." + slot.name + '_Original'] + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + iterNum = iterNum + 1 + + for obj in bpy.data.objects: + if obj.type == "MESH": + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + + objWasHidden = False + + #For some reason, a Blender bug might prevent invisible objects from being smart projected + #We will turn the object temporarily visible + obj.hide_viewport = False + obj.hide_set(False) + + currentIterNum = currentIterNum + 1 + + #Configure selection + bpy.ops.object.select_all(action='DESELECT') + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + obs = bpy.context.view_layer.objects + active = obs.active + + #Provide material if none exists + preprocess_material(obj, scene) + + #UV Layer management here + if not obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode == "AtlasGroup": + uv_layers = obj.data.uv_layers + if not "UVMap_Lightmap" in uv_layers: + print("UVMap made B") + uvmap = uv_layers.new(name="UVMap_Lightmap") + uv_layers.active_index = len(uv_layers) - 1 + + #If lightmap + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode == "Lightmap": + if scene.TLM_SceneProperties.tlm_apply_on_unwrap: + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + bpy.ops.uv.lightmap_pack('EXEC_SCREEN', PREF_CONTEXT='ALL_FACES', PREF_MARGIN_DIV=obj.TLM_ObjectProperties.tlm_mesh_unwrap_margin) + + #If smart project + elif obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode == "SmartProject": + print("Smart Project B") + if scene.TLM_SceneProperties.tlm_apply_on_unwrap: + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + bpy.ops.object.select_all(action='DESELECT') + obj.select_set(True) + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.uv.smart_project(angle_limit=45.0, island_margin=obj.TLM_ObjectProperties.tlm_mesh_unwrap_margin, user_area_weight=1.0, use_aspect=True, stretch_to_bounds=False) + + elif obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode == "Xatlas": + + if scene.TLM_SceneProperties.tlm_apply_on_unwrap: + bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) + + #import blender_xatlas + #blender_xatlas.Unwrap_Lightmap_Group_Xatlas_2(bpy.context) + + #bpy.ops.object.setup_unwrap() + Unwrap_Lightmap_Group_Xatlas_2_headless_call(obj) + + elif obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode == "AtlasGroup": + + print("ATLAS GROUP: " + obj.TLM_ObjectProperties.tlm_atlas_pointer) + + else: #if copy existing + + print("Copied Existing B") + + #Here we copy an existing map + pass + else: + print("Existing found...skipping") + for i in range(0, len(uv_layers)): + if uv_layers[i].name == 'UVMap_Lightmap': + uv_layers.active_index = i + print("Lightmap shift B") + break + + #Sort out nodes + for slot in obj.material_slots: + + nodetree = slot.material.node_tree + + outputNode = nodetree.nodes[0] #Presumed to be material output node + + if(outputNode.type != "OUTPUT_MATERIAL"): + for node in nodetree.nodes: + if node.type == "OUTPUT_MATERIAL": + outputNode = node + break + + mainNode = outputNode.inputs[0].links[0].from_node + + if mainNode.type not in ['BSDF_PRINCIPLED','BSDF_DIFFUSE','GROUP']: + + #TODO! FIND THE PRINCIPLED PBR + self.report({'INFO'}, "The primary material node is not supported. Seeking first principled.") + + if len(find_node_by_type(nodetree.nodes, Node_Types.pbr_node)) > 0: + mainNode = find_node_by_type(nodetree.nodes, Node_Types.pbr_node)[0] + else: + self.report({'INFO'}, "No principled found. Seeking diffuse") + if len(find_node_by_type(nodetree.nodes, Node_Types.diffuse)) > 0: + mainNode = find_node_by_type(nodetree.nodes, Node_Types.diffuse)[0] + else: + self.report({'INFO'}, "No supported nodes. Continuing anyway.") + pass + + if mainNode.type == 'GROUP': + if mainNode.node_tree != "Armory PBR": + print("The material group is not supported!") + pass + + if (mainNode.type == "BSDF_PRINCIPLED"): + print("BSDF_Principled") + if scene.TLM_EngineProperties.tlm_directional_mode == "None": + print("Directional mode") + if not len(mainNode.inputs[19].links) == 0: + print("NOT LEN 0") + ninput = mainNode.inputs[19].links[0] + noutput = mainNode.inputs[19].links[0].from_node + nodetree.links.remove(noutput.outputs[0].links[0]) + + #Clamp metallic + # if(mainNode.inputs[4].default_value == 1 and scene.TLM_SceneProperties.tlm_clamp_metallic): + # mainNode.inputs[4].default_value = 0.99 + + if (mainNode.type == "BSDF_DIFFUSE"): + print("BSDF_Diffuse") + + for slot in obj.material_slots: + + nodetree = bpy.data.materials[slot.name].node_tree + nodes = nodetree.nodes + + #First search to get the first output material type + for node in nodetree.nodes: + if node.type == "OUTPUT_MATERIAL": + mainNode = node + break + + #Fallback to get search + if not mainNode.type == "OUTPUT_MATERIAL": + mainNode = nodetree.nodes.get("Material Output") + + #Last resort to first node in list + if not mainNode.type == "OUTPUT_MATERIAL": + mainNode = nodetree.nodes[0].inputs[0].links[0].from_node + + for node in nodes: + if "LM" in node.name: + nodetree.links.new(node.outputs[0], mainNode.inputs[0]) + + for node in nodes: + if "Lightmap" in node.name: + nodes.remove(node) + +def preprocess_material(obj, scene): + if len(obj.material_slots) == 0: + single = False + number = 0 + while single == False: + matname = obj.name + ".00" + str(number) + if matname in bpy.data.materials: + single = False + number = number + 1 + else: + mat = bpy.data.materials.new(name=matname) + mat.use_nodes = True + obj.data.materials.append(mat) + single = True + + #We copy the existing material slots to an ordered array, which corresponds to the slot index + matArray = [] + for slot in obj.material_slots: + matArray.append(slot.name) + + obj["TLM_PrevMatArray"] = matArray + + #We check and safeguard against NoneType + for slot in obj.material_slots: + if slot.material is None: + matName = obj.name + ".00" + str(0) + bpy.data.materials.new(name=matName) + slot.material = bpy.data.materials[matName] + slot.material.use_nodes = True + + for slot in obj.material_slots: + + cache.backup_material_copy(slot) + + mat = slot.material + if mat.users > 1: + copymat = mat.copy() + slot.material = copymat + + #SOME ATLAS EXCLUSION HERE? + ob = obj + for slot in ob.material_slots: + #If temporary material already exists + if slot.material.name.endswith('_temp'): + continue + n = slot.material.name + '_' + ob.name + '_temp' + if not n in bpy.data.materials: + slot.material = slot.material.copy() + slot.material.name = n + + #Add images for baking + img_name = obj.name + '_baked' + #Resolution is object lightmap resolution divided by global scaler + + if scene.TLM_EngineProperties.tlm_setting_supersample == "2x": + supersampling_scale = 2 + elif scene.TLM_EngineProperties.tlm_setting_supersample == "4x": + supersampling_scale = 4 + else: + supersampling_scale = 1 + + res = int(obj.TLM_ObjectProperties.tlm_mesh_lightmap_resolution) / int(scene.TLM_EngineProperties.tlm_resolution_scale) * int(supersampling_scale) + + #If image not in bpy.data.images or if size changed, make a new image + if img_name not in bpy.data.images or bpy.data.images[img_name].size[0] != res or bpy.data.images[img_name].size[1] != res: + img = bpy.data.images.new(img_name, res, res, alpha=True, float_buffer=True) + + num_pixels = len(img.pixels) + result_pixel = list(img.pixels) + + for i in range(0,num_pixels,4): + # result_pixel[i+0] = scene.TLM_SceneProperties.tlm_default_color[0] + # result_pixel[i+1] = scene.TLM_SceneProperties.tlm_default_color[1] + # result_pixel[i+2] = scene.TLM_SceneProperties.tlm_default_color[2] + result_pixel[i+0] = 0.0 + result_pixel[i+1] = 0.0 + result_pixel[i+2] = 0.0 + result_pixel[i+3] = 1.0 + + img.pixels = result_pixel + + img.name = img_name + else: + img = bpy.data.images[img_name] + + for slot in obj.material_slots: + mat = slot.material + mat.use_nodes = True + nodes = mat.node_tree.nodes + + if "Baked Image" in nodes: + img_node = nodes["Baked Image"] + else: + img_node = nodes.new('ShaderNodeTexImage') + img_node.name = 'Baked Image' + img_node.location = (100, 100) + img_node.image = img + img_node.select = True + nodes.active = img_node + +def set_settings(): + + scene = bpy.context.scene + cycles = scene.cycles + scene.render.engine = "CYCLES" + sceneProperties = scene.TLM_SceneProperties + engineProperties = scene.TLM_EngineProperties + cycles.device = scene.TLM_EngineProperties.tlm_mode + + if engineProperties.tlm_quality == "0": + cycles.samples = 32 + cycles.max_bounces = 1 + cycles.diffuse_bounces = 1 + cycles.glossy_bounces = 1 + cycles.transparent_max_bounces = 1 + cycles.transmission_bounces = 1 + cycles.volume_bounces = 1 + cycles.caustics_reflective = False + cycles.caustics_refractive = False + elif engineProperties.tlm_quality == "1": + cycles.samples = 64 + cycles.max_bounces = 2 + cycles.diffuse_bounces = 2 + cycles.glossy_bounces = 2 + cycles.transparent_max_bounces = 2 + cycles.transmission_bounces = 2 + cycles.volume_bounces = 2 + cycles.caustics_reflective = False + cycles.caustics_refractive = False + elif engineProperties.tlm_quality == "2": + cycles.samples = 512 + cycles.max_bounces = 2 + cycles.diffuse_bounces = 2 + cycles.glossy_bounces = 2 + cycles.transparent_max_bounces = 2 + cycles.transmission_bounces = 2 + cycles.volume_bounces = 2 + cycles.caustics_reflective = False + cycles.caustics_refractive = False + elif engineProperties.tlm_quality == "3": + cycles.samples = 1024 + cycles.max_bounces = 256 + cycles.diffuse_bounces = 256 + cycles.glossy_bounces = 256 + cycles.transparent_max_bounces = 256 + cycles.transmission_bounces = 256 + cycles.volume_bounces = 256 + cycles.caustics_reflective = False + cycles.caustics_refractive = False + elif engineProperties.tlm_quality == "4": + cycles.samples = 2048 + cycles.max_bounces = 512 + cycles.diffuse_bounces = 512 + cycles.glossy_bounces = 512 + cycles.transparent_max_bounces = 512 + cycles.transmission_bounces = 512 + cycles.volume_bounces = 512 + cycles.caustics_reflective = True + cycles.caustics_refractive = True + else: #Custom + pass + +def store_existing(prev_container): + + scene = bpy.context.scene + cycles = scene.cycles + + selected = [] + + for obj in bpy.data.objects: + if obj.select_get(): + selected.append(obj.name) + + prev_container["settings"] = [ + cycles.samples, + cycles.max_bounces, + cycles.diffuse_bounces, + cycles.glossy_bounces, + cycles.transparent_max_bounces, + cycles.transmission_bounces, + cycles.volume_bounces, + cycles.caustics_reflective, + cycles.caustics_refractive, + cycles.device, + scene.render.engine, + bpy.context.view_layer.objects.active, + selected, + [scene.render.resolution_x, scene.render.resolution_y] + ] \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/denoiser/integrated.py b/blender/arm/lightmapper/utility/denoiser/integrated.py new file mode 100644 index 00000000..1a2867c4 --- /dev/null +++ b/blender/arm/lightmapper/utility/denoiser/integrated.py @@ -0,0 +1,79 @@ +import bpy, os + +class TLM_Integrated_Denoise: + + image_array = [] + image_output_destination = "" + + def load(self, images): + self.image_array = images + + self.cull_undefined() + + def setOutputDir(self, dir): + self.image_output_destination = dir + + def cull_undefined(self): + + #Do a validation check before denoising + + cam = bpy.context.scene.camera + if not cam: + bpy.ops.object.camera_add() + + #Just select the first camera we find, needed for the compositor + for obj in bpy.data.objects: + if obj.type == "CAMERA": + bpy.context.scene.camera = obj + return + + def denoise(self): + + if not bpy.context.scene.use_nodes: + bpy.context.scene.use_nodes = True + + tree = bpy.context.scene.node_tree + + for image in self.image_array: + + print("Image...: " + image) + + img = bpy.data.images.load(self.image_output_destination + "/" + image) + + image_node = tree.nodes.new(type='CompositorNodeImage') + image_node.image = img + image_node.location = 0, 0 + + denoise_node = tree.nodes.new(type='CompositorNodeDenoise') + denoise_node.location = 300, 0 + + comp_node = tree.nodes.new('CompositorNodeComposite') + comp_node.location = 600, 0 + + links = tree.links + links.new(image_node.outputs[0], denoise_node.inputs[0]) + links.new(denoise_node.outputs[0], comp_node.inputs[0]) + + # set output resolution to image res + bpy.context.scene.render.resolution_x = img.size[0] + bpy.context.scene.render.resolution_y = img.size[1] + bpy.context.scene.render.resolution_percentage = 100 + + filePath = bpy.data.filepath + path = os.path.dirname(filePath) + + base = os.path.basename(image) + filename, file_extension = os.path.splitext(image) + filename = filename[:-6] + + bpy.data.scenes["Scene"].render.filepath = self.image_output_destination + "/" + filename + "_denoised" + file_extension + + denoised_image_path = self.image_output_destination + bpy.data.scenes["Scene"].render.image_settings.file_format = "HDR" + + bpy.ops.render.render(write_still=True) + + #Cleanup + comp_nodes = [image_node, denoise_node, comp_node] + for node in comp_nodes: + tree.nodes.remove(node) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/denoiser/oidn.py b/blender/arm/lightmapper/utility/denoiser/oidn.py new file mode 100644 index 00000000..c96f4552 --- /dev/null +++ b/blender/arm/lightmapper/utility/denoiser/oidn.py @@ -0,0 +1,200 @@ +import bpy, os, sys, re, platform, subprocess +import numpy as np + +class TLM_OIDN_Denoise: + + image_array = [] + + image_output_destination = "" + + denoised_array = [] + + def __init__(self, oidnProperties, img_array, dirpath): + + self.oidnProperties = oidnProperties + + self.image_array = img_array + + self.image_output_destination = dirpath + + self.check_binary() + + def check_binary(self): + + oidnPath = self.oidnProperties.tlm_oidn_path + + if oidnPath != "": + + file = os.path.basename(os.path.realpath(oidnPath)) + filename, file_extension = os.path.splitext(file) + + if(file_extension == ".exe"): + + #if file exists oidnDenoise or denoise + + pass + + else: + + #if file exists oidnDenoise or denoise + + self.oidnProperties.tlm_oidn_path = os.path.join(self.oidnProperties.tlm_oidn_path,"oidnDenoise.exe") + + else: + + print("Please provide OIDN path") + + def denoise(self): + + for image in self.image_array: + + if image not in self.denoised_array: + + image_path = os.path.join(self.image_output_destination, image) + + #Save to pfm + loaded_image = bpy.data.images.load(image_path, check_existing=False) + + width = loaded_image.size[0] + height = loaded_image.size[1] + + image_output_array = np.zeros([width, height, 3], dtype="float32") + image_output_array = np.array(loaded_image.pixels) + image_output_array = image_output_array.reshape(height, width, 4) + image_output_array = np.float32(image_output_array[:,:,:3]) + + image_output_denoise_destination = image_path[:-4] + ".pfm" + + image_output_denoise_result_destination = image_path[:-4] + "_denoised.pfm" + + with open(image_output_denoise_destination, "wb") as fileWritePFM: + self.save_pfm(fileWritePFM, image_output_array) + + #Denoise + print("Loaded image: " + str(loaded_image)) + + verbose = self.oidnProperties.tlm_oidn_verbose + affinity = self.oidnProperties.tlm_oidn_affinity + + if verbose: + print("Denoiser search: " + bpy.path.abspath(self.oidnProperties.tlm_oidn_path)) + v = "3" + else: + v = "0" + + if affinity: + a = "1" + else: + a = "0" + + threads = str(self.oidnProperties.tlm_oidn_threads) + maxmem = str(self.oidnProperties.tlm_oidn_maxmem) + + if platform.system() == 'Windows': + oidnPath = bpy.path.abspath(self.oidnProperties.tlm_oidn_path) + pipePath = [oidnPath, '-f', 'RTLightmap', '-hdr', image_output_denoise_destination, '-o', image_output_denoise_result_destination, '-verbose', v, '-threads', threads, '-affinity', a, '-maxmem', maxmem] + elif platform.system() == 'Darwin': + oidnPath = bpy.path.abspath(self.oidnProperties.tlm_oidn_path) + pipePath = [oidnPath + ' -f ' + ' RTLightmap ' + ' -hdr ' + image_output_denoise_destination + ' -o ' + image_output_denoise_result_destination + ' -verbose ' + v] + else: + oidnPath = bpy.path.abspath(self.oidnProperties.tlm_oidn_path) + pipePath = [oidnPath + ' -f ' + ' RTLightmap ' + ' -hdr ' + image_output_denoise_destination + ' -o ' + image_output_denoise_result_destination + ' -verbose ' + v] + + if not verbose: + denoisePipe = subprocess.Popen(pipePath, stdout=subprocess.PIPE, stderr=None, shell=True) + else: + denoisePipe = subprocess.Popen(pipePath, shell=True) + + denoisePipe.communicate()[0] + + with open(image_output_denoise_result_destination, "rb") as f: + denoise_data, scale = self.load_pfm(f) + + ndata = np.array(denoise_data) + ndata2 = np.dstack((ndata, np.ones((width,height)))) + img_array = ndata2.ravel() + + loaded_image.pixels = img_array + loaded_image.filepath_raw = image_output_denoise_result_destination = image_path[:-10] + "_denoised.hdr" + loaded_image.file_format = "HDR" + loaded_image.save() + + self.denoised_array.append(image) + + print(image_path) + + def clean(self): + + self.denoised_array.clear() + self.image_array.clear() + + for file in self.image_output_destination: + if file.endswith("_baked.hdr"): + baked_image_array.append(file) + + #self.image_output_destination + + #Clean temporary files here.. + #...pfm + #...denoised.hdr + + + def load_pfm(self, file, as_flat_list=False): + #start = time() + + header = file.readline().decode("utf-8").rstrip() + if header == "PF": + color = True + elif header == "Pf": + color = False + else: + raise Exception("Not a PFM file.") + + dim_match = re.match(r"^(\d+)\s(\d+)\s$", file.readline().decode("utf-8")) + if dim_match: + width, height = map(int, dim_match.groups()) + else: + raise Exception("Malformed PFM header.") + + scale = float(file.readline().decode("utf-8").rstrip()) + if scale < 0: # little-endian + endian = "<" + scale = -scale + else: + endian = ">" # big-endian + + data = np.fromfile(file, endian + "f") + shape = (height, width, 3) if color else (height, width) + if as_flat_list: + result = data + else: + result = np.reshape(data, shape) + #print("PFM import took %.3f s" % (time() - start)) + return result, scale + + def save_pfm(self, file, image, scale=1): + #start = time() + + if image.dtype.name != "float32": + raise Exception("Image dtype must be float32 (got %s)" % image.dtype.name) + + if len(image.shape) == 3 and image.shape[2] == 3: # color image + color = True + elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: # greyscale + color = False + else: + raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.") + + file.write(b"PF\n" if color else b"Pf\n") + file.write(b"%d %d\n" % (image.shape[1], image.shape[0])) + + endian = image.dtype.byteorder + + if endian == "<" or endian == "=" and sys.byteorder == "little": + scale = -scale + + file.write(b"%f\n" % scale) + + image.tofile(file) + + #print("PFM export took %.3f s" % (time() - start)) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/denoiser/optix.py b/blender/arm/lightmapper/utility/denoiser/optix.py new file mode 100644 index 00000000..aff36919 --- /dev/null +++ b/blender/arm/lightmapper/utility/denoiser/optix.py @@ -0,0 +1,89 @@ +import bpy, os, platform, subprocess + +class TLM_Optix_Denoise: + + image_array = [] + + image_output_destination = "" + + denoised_array = [] + + def __init__(self, optixProperties, img_array, dirpath): + + self.optixProperties = optixProperties + + self.image_array = img_array + + self.image_output_destination = dirpath + + self.check_binary() + + def check_binary(self): + + optixPath = self.optixProperties.tlm_optix_path + + if optixPath != "": + + file = os.path.basename(os.path.realpath(optixPath)) + filename, file_extension = os.path.splitext(file) + + if(file_extension == ".exe"): + + #if file exists optixDenoise or denoise + + pass + + else: + + #if file exists optixDenoise or denoise + + self.optixProperties.tlm_optix_path = os.path.join(self.optixProperties.tlm_optix_path,"Denoiser.exe") + + else: + + print("Please provide Optix path") + + def denoise(self): + + print("Optix: Denoising") + for image in self.image_array: + + if image not in self.denoised_array: + + image_path = os.path.join(self.image_output_destination, image) + + denoise_output_destination = image_path[:-10] + "_denoised.hdr" + + if platform.system() == 'Windows': + optixPath = bpy.path.abspath(self.optixProperties.tlm_optix_path) + pipePath = [optixPath, '-i', image_path, '-o', denoise_output_destination] + elif platform.system() == 'Darwin': + print("Mac for Optix is still unsupported") + else: + print("Linux for Optix is still unsupported") + + if self.optixProperties.tlm_optix_verbose: + denoisePipe = subprocess.Popen(pipePath, shell=True) + else: + denoisePipe = subprocess.Popen(pipePath, stdout=subprocess.PIPE, stderr=None, shell=True) + + denoisePipe.communicate()[0] + + image = bpy.data.images.load(image_path, check_existing=False) + bpy.data.images[image.name].filepath_raw = bpy.data.images[image.name].filepath_raw[:-4] + "_denoised.hdr" + bpy.data.images[image.name].reload() + + def clean(self): + + self.denoised_array.clear() + self.image_array.clear() + + for file in self.image_output_destination: + if file.endswith("_baked.hdr"): + baked_image_array.append(file) + + #self.image_output_destination + + #Clean temporary files here.. + #...pfm + #...denoised.hdr \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/encoding.py b/blender/arm/lightmapper/utility/encoding.py new file mode 100644 index 00000000..bfc4cd7c --- /dev/null +++ b/blender/arm/lightmapper/utility/encoding.py @@ -0,0 +1,245 @@ +import bpy, math, os, gpu, bgl +import numpy as np +from . import utility +from fractions import Fraction +from gpu_extras.batch import batch_for_shader + +def encodeLogLuv(image, outDir, quality): + input_image = bpy.data.images[image.name] + image_name = input_image.name + + offscreen = gpu.types.GPUOffScreen(input_image.size[0], input_image.size[1]) + + image = input_image + + vertex_shader = ''' + + uniform mat4 ModelViewProjectionMatrix; + + in vec2 texCoord; + in vec2 pos; + out vec2 texCoord_interp; + + void main() + { + //gl_Position = ModelViewProjectionMatrix * vec4(pos.xy, 0.0f, 1.0f); + //gl_Position.z = 1.0; + gl_Position = vec4(pos.xy, 100, 100); + texCoord_interp = texCoord; + } + + ''' + fragment_shader = ''' + in vec2 texCoord_interp; + out vec4 fragColor; + + uniform sampler2D image; + + const mat3 cLogLuvM = mat3( 0.2209, 0.3390, 0.4184, 0.1138, 0.6780, 0.7319, 0.0102, 0.1130, 0.2969 ); + vec4 LinearToLogLuv( in vec4 value ) { + vec3 Xp_Y_XYZp = cLogLuvM * value.rgb; + Xp_Y_XYZp = max( Xp_Y_XYZp, vec3( 1e-6, 1e-6, 1e-6 ) ); + vec4 vResult; + vResult.xy = Xp_Y_XYZp.xy / Xp_Y_XYZp.z; + float Le = 2.0 * log2(Xp_Y_XYZp.y) + 127.0; + vResult.w = fract( Le ); + vResult.z = ( Le - ( floor( vResult.w * 255.0 ) ) / 255.0 ) / 255.0; + return vResult; + //return vec4(Xp_Y_XYZp,1); + } + + const mat3 cLogLuvInverseM = mat3( 6.0014, -2.7008, -1.7996, -1.3320, 3.1029, -5.7721, 0.3008, -1.0882, 5.6268 ); + vec4 LogLuvToLinear( in vec4 value ) { + float Le = value.z * 255.0 + value.w; + vec3 Xp_Y_XYZp; + Xp_Y_XYZp.y = exp2( ( Le - 127.0 ) / 2.0 ); + Xp_Y_XYZp.z = Xp_Y_XYZp.y / value.y; + Xp_Y_XYZp.x = value.x * Xp_Y_XYZp.z; + vec3 vRGB = cLogLuvInverseM * Xp_Y_XYZp.rgb; + //return vec4( max( vRGB, 0.0 ), 1.0 ); + return vec4( max( Xp_Y_XYZp, 0.0 ), 1.0 ); + } + + void main() + { + //fragColor = LinearToLogLuv(pow(texture(image, texCoord_interp), vec4(0.454))); + fragColor = LinearToLogLuv(texture(image, texCoord_interp)); + //fragColor = LogLuvToLinear(LinearToLogLuv(texture(image, texCoord_interp))); + } + + ''' + + x_screen = 0 + off_x = -100 + off_y = -100 + y_screen_flip = 0 + sx = 200 + sy = 200 + + vertices = ( + (x_screen + off_x, y_screen_flip - off_y), + (x_screen + off_x, y_screen_flip - sy - off_y), + (x_screen + off_x + sx, y_screen_flip - sy - off_y), + (x_screen + off_x + sx, y_screen_flip - off_x)) + + if input_image.colorspace_settings.name != 'Linear': + input_image.colorspace_settings.name = 'Linear' + + # Removing .exr or .hdr prefix + if image_name[-4:] == '.exr' or image_name[-4:] == '.hdr': + image_name = image_name[:-4] + + target_image = bpy.data.images.get(image_name + '_encoded') + print(image_name + '_encoded') + if not target_image: + target_image = bpy.data.images.new( + name = image_name + '_encoded', + width = input_image.size[0], + height = input_image.size[1], + alpha = True, + float_buffer = False + ) + + shader = gpu.types.GPUShader(vertex_shader, fragment_shader) + batch = batch_for_shader( + shader, 'TRI_FAN', + { + "pos": vertices, + "texCoord": ((0, 1), (0, 0), (1, 0), (1, 1)), + }, + ) + + if image.gl_load(): + raise Exception() + + with offscreen.bind(): + bgl.glActiveTexture(bgl.GL_TEXTURE0) + bgl.glBindTexture(bgl.GL_TEXTURE_2D, image.bindcode) + + shader.bind() + shader.uniform_int("image", 0) + batch.draw(shader) + + buffer = bgl.Buffer(bgl.GL_BYTE, input_image.size[0] * input_image.size[1] * 4) + bgl.glReadBuffer(bgl.GL_BACK) + bgl.glReadPixels(0, 0, input_image.size[0], input_image.size[1], bgl.GL_RGBA, bgl.GL_UNSIGNED_BYTE, buffer) + + offscreen.free() + + target_image.pixels = [v / 255 for v in buffer] + input_image = target_image + + #Save LogLuv + print(input_image.name) + input_image.filepath_raw = outDir + "/" + input_image.name + ".png" + #input_image.filepath_raw = outDir + "_encoded.png" + input_image.file_format = "PNG" + bpy.context.scene.render.image_settings.quality = quality + #input_image.save_render(filepath = input_image.filepath_raw, scene = bpy.context.scene) + input_image.save() + + #Todo - Find a way to save + #bpy.ops.image.save_all_modified() + +def encodeImageRGBM(image, maxRange, outDir, quality): + input_image = bpy.data.images[image.name] + image_name = input_image.name + + if input_image.colorspace_settings.name != 'Linear': + input_image.colorspace_settings.name = 'Linear' + + # Removing .exr or .hdr prefix + if image_name[-4:] == '.exr' or image_name[-4:] == '.hdr': + image_name = image_name[:-4] + + target_image = bpy.data.images.get(image_name + '_encoded') + print(image_name + '_encoded') + if not target_image: + target_image = bpy.data.images.new( + name = image_name + '_encoded', + width = input_image.size[0], + height = input_image.size[1], + alpha = True, + float_buffer = False + ) + + num_pixels = len(input_image.pixels) + result_pixel = list(input_image.pixels) + + for i in range(0,num_pixels,4): + for j in range(3): + result_pixel[i+j] *= 1.0 / maxRange; + result_pixel[i+3] = saturate(max(result_pixel[i], result_pixel[i+1], result_pixel[i+2], 1e-6)) + result_pixel[i+3] = math.ceil(result_pixel[i+3] * 255.0) / 255.0 + for j in range(3): + result_pixel[i+j] /= result_pixel[i+3] + + target_image.pixels = result_pixel + input_image = target_image + + #Save RGBM + print(input_image.name) + input_image.filepath_raw = outDir + "/" + input_image.name + ".png" + input_image.file_format = "PNG" + bpy.context.scene.render.image_settings.quality = quality + input_image.save() + + #input_image.save_render(filepath = input_image.filepath_raw, scene = bpy.context.scene) + # input_image.filepath_raw = outDir + "_encoded.png" + # input_image.file_format = "PNG" + # bpy.context.scene.render.image_settings.quality = quality + # input_image.save_render(filepath = input_image.filepath_raw, scene = bpy.context.scene) + #input_image. + #input_image.save() + +def saturate(num, floats=True): + if num < 0: + num = 0 + elif num > (1 if floats else 255): + num = (1 if floats else 255) + return num + +def encodeImageRGBD(image, maxRange, outDir, quality): + input_image = bpy.data.images[image.name] + image_name = input_image.name + + if input_image.colorspace_settings.name != 'Linear': + input_image.colorspace_settings.name = 'Linear' + + # Removing .exr or .hdr prefix + if image_name[-4:] == '.exr' or image_name[-4:] == '.hdr': + image_name = image_name[:-4] + + target_image = bpy.data.images.get(image_name + '_encoded') + if not target_image: + target_image = bpy.data.images.new( + name = image_name + '_encoded', + width = input_image.size[0], + height = input_image.size[1], + alpha = True, + float_buffer = False + ) + + num_pixels = len(input_image.pixels) + result_pixel = list(input_image.pixels) + + for i in range(0,num_pixels,4): + + m = utility.saturate(max(result_pixel[i], result_pixel[i+1], result_pixel[i+2], 1e-6)) + d = max(maxRange / m, 1) + d = utility.saturate(math.floor(d) / 255 ) + + result_pixel[i] = result_pixel[i] * d * 255 / maxRange + result_pixel[i+1] = result_pixel[i+1] * d * 255 / maxRange + result_pixel[i+2] = result_pixel[i+2] * d * 255 / maxRange + result_pixel[i+3] = d + + target_image.pixels = result_pixel + + input_image = target_image + + #Save RGBD + input_image.filepath_raw = outDir + "_encoded.png" + input_image.file_format = "PNG" + bpy.context.scene.render.image_settings.quality = quality + input_image.save_render(filepath = input_image.filepath_raw, scene = bpy.context.scene) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/filtering/numpy.py b/blender/arm/lightmapper/utility/filtering/numpy.py new file mode 100644 index 00000000..7922dbd9 --- /dev/null +++ b/blender/arm/lightmapper/utility/filtering/numpy.py @@ -0,0 +1,49 @@ +import bpy, os, importlib +from os import listdir +from os.path import isfile, join + +class TLM_NP_Filtering: + + image_output_destination = "" + + def init(lightmap_dir, denoise): + + scene = bpy.context.scene + + print("Beginning filtering for files: ") + + if denoise: + file_ending = "_denoised.hdr" + else: + file_ending = "_baked.hdr" + + dirfiles = [f for f in listdir(lightmap_dir) if isfile(join(lightmap_dir, f))] + + for file in dirfiles: + + if denoise: + file_ending = "_denoised.hdr" + file_split = 13 + else: + file_ending = "_baked.hdr" + file_split = 10 + + if file.endswith(file_ending): + + file_input = os.path.join(lightmap_dir, file) + os.chdir(lightmap_dir) + + #opencv_process_image = cv2.imread(file_input, -1) + + print("Filtering: " + file_input) + + print(os.path.join(lightmap_dir, file)) + + if scene.TLM_SceneProperties.tlm_numpy_filtering_mode == "3x3 blur": + pass + + #filter_file_output = os.path.join(lightmap_dir, file[:-file_split] + "_filtered.hdr") + + #cv2.imwrite(filter_file_output, opencv_bl_result) + + print("Written to: " + filter_file_output) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/filtering/opencv.py b/blender/arm/lightmapper/utility/filtering/opencv.py new file mode 100644 index 00000000..69e27679 --- /dev/null +++ b/blender/arm/lightmapper/utility/filtering/opencv.py @@ -0,0 +1,160 @@ +import bpy, os, importlib +from os import listdir +from os.path import isfile, join + +class TLM_CV_Filtering: + + image_output_destination = "" + + def init(lightmap_dir, denoise): + + scene = bpy.context.scene + + print("Beginning filtering for files: ") + + if denoise: + file_ending = "_denoised.hdr" + else: + file_ending = "_baked.hdr" + + dirfiles = [f for f in listdir(lightmap_dir) if isfile(join(lightmap_dir, f))] + + cv2 = importlib.util.find_spec("cv2") + + if cv2 is None: + print("CV2 not found - Ignoring filtering") + return 0 + else: + cv2 = importlib.__import__("cv2") + + for file in dirfiles: + + if denoise: + file_ending = "_denoised.hdr" + file_split = 13 + else: + file_ending = "_baked.hdr" + file_split = 10 + + if file.endswith(file_ending): + + file_input = os.path.join(lightmap_dir, file) + os.chdir(lightmap_dir) + + opencv_process_image = cv2.imread(file_input, -1) + + print("Filtering: " + os.path.basename(file_input)) + + obj_name = os.path.basename(file_input).split("_")[0] + + if bpy.data.objects[obj_name].TLM_ObjectProperties.tlm_mesh_filter_override: + + print("OVERRIDE!") + + print(os.path.join(lightmap_dir, file)) + + objectProperties = bpy.data.objects[obj_name].TLM_ObjectProperties + + #TODO OVERRIDE FILTERING OPTION! REWRITE + if objectProperties.tlm_mesh_filtering_mode == "Box": + if objectProperties.tlm_mesh_filtering_box_strength % 2 == 0: + kernel_size = (objectProperties.tlm_mesh_filtering_box_strength + 1, objectProperties.tlm_mesh_filtering_box_strength + 1) + else: + kernel_size = (objectProperties.tlm_mesh_filtering_box_strength, objectProperties.tlm_mesh_filtering_box_strength) + opencv_bl_result = cv2.blur(opencv_process_image, kernel_size) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.blur(opencv_bl_result, kernel_size) + + elif objectProperties.tlm_mesh_filtering_mode == "Gaussian": + if objectProperties.tlm_mesh_filtering_gaussian_strength % 2 == 0: + kernel_size = (objectProperties.tlm_mesh_filtering_gaussian_strength + 1, objectProperties.tlm_mesh_filtering_gaussian_strength + 1) + else: + kernel_size = (objectProperties.tlm_mesh_filtering_gaussian_strength, objectProperties.tlm_mesh_filtering_gaussian_strength) + sigma_size = 0 + opencv_bl_result = cv2.GaussianBlur(opencv_process_image, kernel_size, sigma_size) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.GaussianBlur(opencv_bl_result, kernel_size, sigma_size) + + elif objectProperties.tlm_mesh_filtering_mode == "Bilateral": + diameter_size = objectProperties.tlm_mesh_filtering_bilateral_diameter + sigma_color = objectProperties.tlm_mesh_filtering_bilateral_color_deviation + sigma_space = objectProperties.tlm_mesh_filtering_bilateral_coordinate_deviation + opencv_bl_result = cv2.bilateralFilter(opencv_process_image, diameter_size, sigma_color, sigma_space) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.bilateralFilter(opencv_bl_result, diameter_size, sigma_color, sigma_space) + else: + + if objectProperties.tlm_mesh_filtering_median_kernel % 2 == 0: + kernel_size = (objectProperties.tlm_mesh_filtering_median_kernel + 1, objectProperties.tlm_mesh_filtering_median_kernel + 1) + else: + kernel_size = (objectProperties.tlm_mesh_filtering_median_kernel, objectProperties.tlm_mesh_filtering_median_kernel) + + opencv_bl_result = cv2.medianBlur(opencv_process_image, kernel_size[0]) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.medianBlur(opencv_bl_result, kernel_size[0]) + + filter_file_output = os.path.join(lightmap_dir, file[:-file_split] + "_filtered.hdr") + + cv2.imwrite(filter_file_output, opencv_bl_result) + + print("Written to: " + filter_file_output) + + else: + + print(os.path.join(lightmap_dir, file)) + + #TODO OVERRIDE FILTERING OPTION! + if scene.TLM_SceneProperties.tlm_filtering_mode == "Box": + if scene.TLM_SceneProperties.tlm_filtering_box_strength % 2 == 0: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_box_strength + 1,scene.TLM_SceneProperties.tlm_filtering_box_strength + 1) + else: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_box_strength,scene.TLM_SceneProperties.tlm_filtering_box_strength) + opencv_bl_result = cv2.blur(opencv_process_image, kernel_size) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.blur(opencv_bl_result, kernel_size) + + elif scene.TLM_SceneProperties.tlm_filtering_mode == "Gaussian": + if scene.TLM_SceneProperties.tlm_filtering_gaussian_strength % 2 == 0: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_gaussian_strength + 1,scene.TLM_SceneProperties.tlm_filtering_gaussian_strength + 1) + else: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_gaussian_strength,scene.TLM_SceneProperties.tlm_filtering_gaussian_strength) + sigma_size = 0 + opencv_bl_result = cv2.GaussianBlur(opencv_process_image, kernel_size, sigma_size) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.GaussianBlur(opencv_bl_result, kernel_size, sigma_size) + + elif scene.TLM_SceneProperties.tlm_filtering_mode == "Bilateral": + diameter_size = scene.TLM_SceneProperties.tlm_filtering_bilateral_diameter + sigma_color = scene.TLM_SceneProperties.tlm_filtering_bilateral_color_deviation + sigma_space = scene.TLM_SceneProperties.tlm_filtering_bilateral_coordinate_deviation + opencv_bl_result = cv2.bilateralFilter(opencv_process_image, diameter_size, sigma_color, sigma_space) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.bilateralFilter(opencv_bl_result, diameter_size, sigma_color, sigma_space) + else: + + if scene.TLM_SceneProperties.tlm_filtering_median_kernel % 2 == 0: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_median_kernel + 1 , scene.TLM_SceneProperties.tlm_filtering_median_kernel + 1) + else: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_median_kernel, scene.TLM_SceneProperties.tlm_filtering_median_kernel) + + opencv_bl_result = cv2.medianBlur(opencv_process_image, kernel_size[0]) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.medianBlur(opencv_bl_result, kernel_size[0]) + + filter_file_output = os.path.join(lightmap_dir, file[:-file_split] + "_filtered.hdr") + + cv2.imwrite(filter_file_output, opencv_bl_result) + + print("Written to: " + filter_file_output) + + # if file.endswith(file_ending): + # print() + # baked_image_array.append(file) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/filtering/shader.py b/blender/arm/lightmapper/utility/filtering/shader.py new file mode 100644 index 00000000..55028702 --- /dev/null +++ b/blender/arm/lightmapper/utility/filtering/shader.py @@ -0,0 +1,160 @@ +import bpy, os, importlib +from os import listdir +from os.path import isfile, join + +class TLM_Shader_Filtering: + + image_output_destination = "" + + def init(lightmap_dir, denoise): + + scene = bpy.context.scene + + print("Beginning filtering for files: ") + + if denoise: + file_ending = "_denoised.hdr" + else: + file_ending = "_baked.hdr" + + dirfiles = [f for f in listdir(lightmap_dir) if isfile(join(lightmap_dir, f))] + + cv2 = importlib.util.find_spec("cv2") + + if cv2 is None: + print("CV2 not found - Ignoring filtering") + return 0 + else: + cv2 = importlib.__import__("cv2") + + for file in dirfiles: + + if denoise: + file_ending = "_denoised.hdr" + file_split = 13 + else: + file_ending = "_baked.hdr" + file_split = 10 + + if file.endswith(file_ending): + + file_input = os.path.join(lightmap_dir, file) + os.chdir(lightmap_dir) + + opencv_process_image = cv2.imread(file_input, -1) + + print("Filtering: " + os.path.basename(file_input)) + + obj_name = os.path.basename(file_input).split("_")[0] + + if bpy.data.objects[obj_name].TLM_ObjectProperties.tlm_mesh_filter_override: + + print("OVERRIDE!") + + print(os.path.join(lightmap_dir, file)) + + objectProperties = bpy.data.objects[obj_name].TLM_ObjectProperties + + #TODO OVERRIDE FILTERING OPTION! REWRITE + if objectProperties.tlm_mesh_filtering_mode == "Box": + if objectProperties.tlm_mesh_filtering_box_strength % 2 == 0: + kernel_size = (objectProperties.tlm_mesh_filtering_box_strength + 1, objectProperties.tlm_mesh_filtering_box_strength + 1) + else: + kernel_size = (objectProperties.tlm_mesh_filtering_box_strength, objectProperties.tlm_mesh_filtering_box_strength) + opencv_bl_result = cv2.blur(opencv_process_image, kernel_size) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.blur(opencv_bl_result, kernel_size) + + elif objectProperties.tlm_mesh_filtering_mode == "Gaussian": + if objectProperties.tlm_mesh_filtering_gaussian_strength % 2 == 0: + kernel_size = (objectProperties.tlm_mesh_filtering_gaussian_strength + 1, objectProperties.tlm_mesh_filtering_gaussian_strength + 1) + else: + kernel_size = (objectProperties.tlm_mesh_filtering_gaussian_strength, objectProperties.tlm_mesh_filtering_gaussian_strength) + sigma_size = 0 + opencv_bl_result = cv2.GaussianBlur(opencv_process_image, kernel_size, sigma_size) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.GaussianBlur(opencv_bl_result, kernel_size, sigma_size) + + elif objectProperties.tlm_mesh_filtering_mode == "Bilateral": + diameter_size = objectProperties.tlm_mesh_filtering_bilateral_diameter + sigma_color = objectProperties.tlm_mesh_filtering_bilateral_color_deviation + sigma_space = objectProperties.tlm_mesh_filtering_bilateral_coordinate_deviation + opencv_bl_result = cv2.bilateralFilter(opencv_process_image, diameter_size, sigma_color, sigma_space) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.bilateralFilter(opencv_bl_result, diameter_size, sigma_color, sigma_space) + else: + + if objectProperties.tlm_mesh_filtering_median_kernel % 2 == 0: + kernel_size = (objectProperties.tlm_mesh_filtering_median_kernel + 1, objectProperties.tlm_mesh_filtering_median_kernel + 1) + else: + kernel_size = (objectProperties.tlm_mesh_filtering_median_kernel, objectProperties.tlm_mesh_filtering_median_kernel) + + opencv_bl_result = cv2.medianBlur(opencv_process_image, kernel_size[0]) + if objectProperties.tlm_mesh_filtering_iterations > 1: + for x in range(objectProperties.tlm_mesh_filtering_iterations): + opencv_bl_result = cv2.medianBlur(opencv_bl_result, kernel_size[0]) + + filter_file_output = os.path.join(lightmap_dir, file[:-file_split] + "_filtered.hdr") + + cv2.imwrite(filter_file_output, opencv_bl_result) + + print("Written to: " + filter_file_output) + + else: + + print(os.path.join(lightmap_dir, file)) + + #TODO OVERRIDE FILTERING OPTION! + if scene.TLM_SceneProperties.tlm_filtering_mode == "Box": + if scene.TLM_SceneProperties.tlm_filtering_box_strength % 2 == 0: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_box_strength + 1,scene.TLM_SceneProperties.tlm_filtering_box_strength + 1) + else: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_box_strength,scene.TLM_SceneProperties.tlm_filtering_box_strength) + opencv_bl_result = cv2.blur(opencv_process_image, kernel_size) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.blur(opencv_bl_result, kernel_size) + + elif scene.TLM_SceneProperties.tlm_filtering_mode == "Gaussian": + if scene.TLM_SceneProperties.tlm_filtering_gaussian_strength % 2 == 0: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_gaussian_strength + 1,scene.TLM_SceneProperties.tlm_filtering_gaussian_strength + 1) + else: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_gaussian_strength,scene.TLM_SceneProperties.tlm_filtering_gaussian_strength) + sigma_size = 0 + opencv_bl_result = cv2.GaussianBlur(opencv_process_image, kernel_size, sigma_size) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.GaussianBlur(opencv_bl_result, kernel_size, sigma_size) + + elif scene.TLM_SceneProperties.tlm_filtering_mode == "Bilateral": + diameter_size = scene.TLM_SceneProperties.tlm_filtering_bilateral_diameter + sigma_color = scene.TLM_SceneProperties.tlm_filtering_bilateral_color_deviation + sigma_space = scene.TLM_SceneProperties.tlm_filtering_bilateral_coordinate_deviation + opencv_bl_result = cv2.bilateralFilter(opencv_process_image, diameter_size, sigma_color, sigma_space) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.bilateralFilter(opencv_bl_result, diameter_size, sigma_color, sigma_space) + else: + + if scene.TLM_SceneProperties.tlm_filtering_median_kernel % 2 == 0: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_median_kernel + 1 , scene.TLM_SceneProperties.tlm_filtering_median_kernel + 1) + else: + kernel_size = (scene.TLM_SceneProperties.tlm_filtering_median_kernel, scene.TLM_SceneProperties.tlm_filtering_median_kernel) + + opencv_bl_result = cv2.medianBlur(opencv_process_image, kernel_size[0]) + if scene.TLM_SceneProperties.tlm_filtering_iterations > 1: + for x in range(scene.TLM_SceneProperties.tlm_filtering_iterations): + opencv_bl_result = cv2.medianBlur(opencv_bl_result, kernel_size[0]) + + filter_file_output = os.path.join(lightmap_dir, file[:-file_split] + "_filtered.hdr") + + cv2.imwrite(filter_file_output, opencv_bl_result) + + print("Written to: " + filter_file_output) + + # if file.endswith(file_ending): + # print() + # baked_image_array.append(file) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/icon.py b/blender/arm/lightmapper/utility/icon.py new file mode 100644 index 00000000..54f8acd8 --- /dev/null +++ b/blender/arm/lightmapper/utility/icon.py @@ -0,0 +1,31 @@ +import os +import bpy + +from bpy.utils import previews + +icons = None +directory = os.path.abspath(os.path.join(__file__, '..', '..', '..', 'icons')) + +def id(identifier): + return image(identifier).icon_id + +def image(identifier): + def icon(identifier): + if identifier in icons: + return icons[identifier] + return icons.load(identifier, os.path.join(directory, identifier + '.png'), 'IMAGE') + + if icons: + return icon(identifier) + else: + create() + return icon(identifier) + + +def create(): + global icons + icons = previews.new() + + +def remove(): + previews.remove(icons) \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/preconfiguration/object.py b/blender/arm/lightmapper/utility/preconfiguration/object.py new file mode 100644 index 00000000..103749f5 --- /dev/null +++ b/blender/arm/lightmapper/utility/preconfiguration/object.py @@ -0,0 +1,5 @@ +import bpy, os, re, sys + +def prepare(obj): + print("Preparing: " + obj.name) + pass \ No newline at end of file diff --git a/blender/arm/lightmapper/utility/utility.py b/blender/arm/lightmapper/utility/utility.py new file mode 100644 index 00000000..6849c2bf --- /dev/null +++ b/blender/arm/lightmapper/utility/utility.py @@ -0,0 +1,620 @@ +import bpy.ops as O +import bpy, os, re, sys, importlib, struct, platform, subprocess, threading, string, bmesh +from io import StringIO +from threading import Thread +from queue import Queue, Empty +from dataclasses import dataclass +from dataclasses import field +from typing import List + +########################################################### +########################################################### +# This set of utility functions are courtesy of LorenzWieseke +# +# Modified by Naxela +# +# https://github.com/Naxela/The_Lightmapper/tree/Lightmap-to-GLB +########################################################### + +class Node_Types: + output_node = 'OUTPUT_MATERIAL' + ao_node = 'AMBIENT_OCCLUSION' + image_texture = 'TEX_IMAGE' + pbr_node = 'BSDF_PRINCIPLED' + diffuse = 'BSDF_DIFFUSE' + mapping = 'MAPPING' + normal_map = 'NORMAL_MAP' + bump_map = 'BUMP' + attr_node = 'ATTRIBUTE' + +class Shader_Node_Types: + emission = "ShaderNodeEmission" + image_texture = "ShaderNodeTexImage" + mapping = "ShaderNodeMapping" + normal = "ShaderNodeNormalMap" + ao = "ShaderNodeAmbientOcclusion" + uv = "ShaderNodeUVMap" + mix = "ShaderNodeMixRGB" + +def select_object(self,obj): + C = bpy.context + try: + O.object.select_all(action='DESELECT') + C.view_layer.objects.active = obj + obj.select_set(True) + except: + self.report({'INFO'},"Object not in View Layer") + + +def select_obj_by_mat(self,mat): + D = bpy.data + for obj in D.objects: + if obj.type == "MESH": + object_materials = [ + slot.material for slot in obj.material_slots] + if mat in object_materials: + select_object(self,obj) + + +def save_image(image): + + filePath = bpy.data.filepath + path = os.path.dirname(filePath) + + try: + os.mkdir(path + "/tex") + except FileExistsError: + pass + + try: + os.mkdir(path + "/tex/" + str(image.size[0])) + except FileExistsError: + pass + + if image.file_format == "JPEG" : + file_ending = ".jpg" + elif image.file_format == "PNG" : + file_ending = ".png" + + savepath = path + "/tex/" + \ + str(image.size[0]) + "/" + image.name + file_ending + + image.filepath_raw = savepath + + # if "Normal" in image.name: + # bpy.context.scene.render.image_settings.quality = 90 + # image.save_render( filepath = image.filepath_raw, scene = bpy.context.scene ) + # else: + image.save() + + + + +def get_file_size(filepath): + size = "Unpack Files" + try: + path = bpy.path.abspath(filepath) + size = os.path.getsize(path) + size /= 1024 + except: + print("error getting file path for " + filepath) + + return (size) + + +def scale_image(image, newSize): + if (image.org_filepath != ''): + image.filepath = image.org_filepath + + image.org_filepath = image.filepath + image.scale(newSize[0], newSize[1]) + save_image(image) + + +def check_only_one_pbr(self,material): + check_ok = True + # get pbr shader + nodes = material.node_tree.nodes + pbr_node_type = Node_Types.pbr_node + pbr_nodes = find_node_by_type(nodes,pbr_node_type) + + # check only one pbr node + if len(pbr_nodes) == 0: + self.report({'INFO'}, 'No PBR Shader Found') + check_ok = False + + if len(pbr_nodes) > 1: + self.report({'INFO'}, 'More than one PBR Node found ! Clean before Baking.') + check_ok = False + + return check_ok + +# is material already the baked one +def check_is_org_material(self,material): + check_ok = True + if "_Bake" in material.name: + self.report({'INFO'}, 'Change back to org. Material') + check_ok = False + + return check_ok + + +def clean_empty_materials(self): + for obj in bpy.data.objects: + for slot in obj.material_slots: + mat = slot.material + if mat is None: + print("Removed Empty Materials from " + obj.name) + bpy.ops.object.select_all(action='DESELECT') + obj.select_set(True) + bpy.ops.object.material_slot_remove() + +def get_pbr_inputs(pbr_node): + + base_color_input = pbr_node.inputs["Base Color"] + metallic_input = pbr_node.inputs["Metallic"] + specular_input = pbr_node.inputs["Specular"] + roughness_input = pbr_node.inputs["Roughness"] + normal_input = pbr_node.inputs["Normal"] + + pbr_inputs = {"base_color_input":base_color_input, "metallic_input":metallic_input,"specular_input":specular_input,"roughness_input":roughness_input,"normal_input":normal_input} + return pbr_inputs + +def find_node_by_type(nodes, node_type): + nodes_found = [n for n in nodes if n.type == node_type] + return nodes_found + +def find_node_by_type_recusivly(material, note_to_start, node_type, del_nodes_inbetween=False): + nodes = material.node_tree.nodes + if note_to_start.type == node_type: + return note_to_start + + for input in note_to_start.inputs: + for link in input.links: + current_node = link.from_node + if (del_nodes_inbetween and note_to_start.type != Node_Types.normal_map and note_to_start.type != Node_Types.bump_map): + nodes.remove(note_to_start) + return find_node_by_type_recusivly(material, current_node, node_type, del_nodes_inbetween) + + +def find_node_by_name_recusivly(node, idname): + if node.bl_idname == idname: + return node + + for input in node.inputs: + for link in input.links: + current_node = link.from_node + return find_node_by_name_recusivly(current_node, idname) + +def make_link(material, socket1, socket2): + links = material.node_tree.links + links.new(socket1, socket2) + + +def add_gamma_node(material, pbrInput): + nodeToPrincipledOutput = pbrInput.links[0].from_socket + + gammaNode = material.node_tree.nodes.new("ShaderNodeGamma") + gammaNode.inputs[1].default_value = 2.2 + gammaNode.name = "Gamma Bake" + + # link in gamma + make_link(material, nodeToPrincipledOutput, gammaNode.inputs["Color"]) + make_link(material, gammaNode.outputs["Color"], pbrInput) + + +def remove_gamma_node(material, pbrInput): + nodes = material.node_tree.nodes + gammaNode = nodes.get("Gamma Bake") + nodeToPrincipledOutput = gammaNode.inputs[0].links[0].from_socket + + make_link(material, nodeToPrincipledOutput, pbrInput) + material.node_tree.nodes.remove(gammaNode) + +def apply_ao_toggle(self,context): + all_materials = bpy.data.materials + ao_toggle = context.scene.toggle_ao + for mat in all_materials: + nodes = mat.node_tree.nodes + ao_node = nodes.get("AO Bake") + if ao_node is not None: + if ao_toggle: + emission_setup(mat,ao_node.outputs["Color"]) + else: + pbr_node = find_node_by_type(nodes,Node_Types.pbr_node)[0] + remove_node(mat,"Emission Bake") + reconnect_PBR(mat, pbr_node) + + +def emission_setup(material, node_output): + nodes = material.node_tree.nodes + emission_node = add_node(material,Shader_Node_Types.emission,"Emission Bake") + + # link emission to whatever goes into current pbrInput + emission_input = emission_node.inputs[0] + make_link(material, node_output, emission_input) + + # link emission to materialOutput + surface_input = nodes.get("Material Output").inputs[0] + emission_output = emission_node.outputs[0] + make_link(material, emission_output, surface_input) + +def link_pbr_to_output(material,pbr_node): + nodes = material.node_tree.nodes + surface_input = nodes.get("Material Output").inputs[0] + make_link(material,pbr_node.outputs[0],surface_input) + + +def reconnect_PBR(material, pbrNode): + nodes = material.node_tree.nodes + pbr_output = pbrNode.outputs[0] + surface_input = nodes.get("Material Output").inputs[0] + make_link(material, pbr_output, surface_input) + +def mute_all_texture_mappings(material, do_mute): + nodes = material.node_tree.nodes + for node in nodes: + if node.bl_idname == "ShaderNodeMapping": + node.mute = do_mute + +def add_node(material,shader_node_type,node_name): + nodes = material.node_tree.nodes + new_node = nodes.get(node_name) + if new_node is None: + new_node = nodes.new(shader_node_type) + new_node.name = node_name + new_node.label = node_name + return new_node + +def remove_node(material,node_name): + nodes = material.node_tree.nodes + node = nodes.get(node_name) + if node is not None: + nodes.remove(node) + +def lightmap_to_ao(material,lightmap_node): + nodes = material.node_tree.nodes + # -----------------------AO SETUP--------------------# + # create group data + gltf_settings = bpy.data.node_groups.get('glTF Settings') + if gltf_settings is None: + bpy.data.node_groups.new('glTF Settings', 'ShaderNodeTree') + + # add group to node tree + ao_group = nodes.get('glTF Settings') + if ao_group is None: + ao_group = nodes.new('ShaderNodeGroup') + ao_group.name = 'glTF Settings' + ao_group.node_tree = bpy.data.node_groups['glTF Settings'] + + # create group inputs + if ao_group.inputs.get('Occlusion') is None: + ao_group.inputs.new('NodeSocketFloat','Occlusion') + + # mulitply to control strength + mix_node = add_node(material,Shader_Node_Types.mix,"Adjust Lightmap") + mix_node.blend_type = "MULTIPLY" + mix_node.inputs["Fac"].default_value = 1 + mix_node.inputs["Color2"].default_value = [3,3,3,1] + + # position node + ao_group.location = (lightmap_node.location[0]+600,lightmap_node.location[1]) + mix_node.location = (lightmap_node.location[0]+300,lightmap_node.location[1]) + + make_link(material,lightmap_node.outputs['Color'],mix_node.inputs['Color1']) + make_link(material,mix_node.outputs['Color'],ao_group.inputs['Occlusion']) + + +########################################################### +########################################################### +# This utility function is modified from blender_xatlas +# and calls the object without any explicit object context +# thus allowing blender_xatlas to pack from background. +########################################################### +# Code is courtesy of mattedicksoncom +# Modified by Naxela +# +# https://github.com/mattedicksoncom/blender-xatlas/ +########################################################### + +def Unwrap_Lightmap_Group_Xatlas_2_headless_call(obj): + + blender_xatlas = importlib.util.find_spec("blender_xatlas") + + if blender_xatlas is not None: + import blender_xatlas + else: + return 0 + + packOptions = bpy.context.scene.pack_tool + chartOptions = bpy.context.scene.chart_tool + sharedProperties = bpy.context.scene.shared_properties + + context = bpy.context + + if obj.type == 'MESH': + context.view_layer.objects.active = obj + if obj.data.users > 1: + obj.data = obj.data.copy() #make single user copy + uv_layers = obj.data.uv_layers + + #setup the lightmap uvs + uvName = "UVMap_Lightmap" + if sharedProperties.lightmapUVChoiceType == "NAME": + uvName = sharedProperties.lightmapUVName + elif sharedProperties.lightmapUVChoiceType == "INDEX": + if sharedProperties.lightmapUVIndex < len(uv_layers): + uvName = uv_layers[sharedProperties.lightmapUVIndex].name + + if not uvName in uv_layers: + uvmap = uv_layers.new(name=uvName) + uv_layers.active_index = len(uv_layers) - 1 + else: + for i in range(0, len(uv_layers)): + if uv_layers[i].name == uvName: + uv_layers.active_index = i + obj.select_set(True) + + #save all the current edges + if sharedProperties.packOnly: + edgeDict = dict() + for obj in selected_objects: + if obj.type == 'MESH': + tempEdgeDict = dict() + tempEdgeDict['object'] = obj.name + tempEdgeDict['edges'] = [] + print(len(obj.data.edges)) + for i in range(0,len(obj.data.edges)): + setEdge = obj.data.edges[i] + tempEdgeDict['edges'].append(i) + edgeDict[obj.name] = tempEdgeDict + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.quads_convert_to_tris(quad_method='FIXED', ngon_method='BEAUTY') + else: + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.quads_convert_to_tris(quad_method='FIXED', ngon_method='BEAUTY') + + bpy.ops.object.mode_set(mode='OBJECT') + + fakeFile = StringIO() + blender_xatlas.export_obj_simple.save( + context=bpy.context, + filepath=fakeFile, + mainUVChoiceType=sharedProperties.mainUVChoiceType, + uvIndex=sharedProperties.mainUVIndex, + uvName=sharedProperties.mainUVName, + use_selection=True, + use_animation=False, + use_mesh_modifiers=True, + use_edges=True, + use_smooth_groups=False, + use_smooth_groups_bitflags=False, + use_normals=True, + use_uvs=True, + use_materials=False, + use_triangles=False, + use_nurbs=False, + use_vertex_groups=False, + use_blen_objects=True, + group_by_object=False, + group_by_material=False, + keep_vertex_order=False, + ) + + file_path = os.path.dirname(os.path.abspath(blender_xatlas.__file__)) + if platform.system() == "Windows": + xatlas_path = os.path.join(file_path, "xatlas", "xatlas-blender.exe") + elif platform.system() == "Linux": + xatlas_path = os.path.join(file_path, "xatlas", "xatlas-blender") + #need to set permissions for the process on linux + subprocess.Popen( + 'chmod u+x "' + xatlas_path + '"', + shell=True + ) + + #setup the arguments to be passed to xatlas------------------- + arguments_string = "" + for argumentKey in packOptions.__annotations__.keys(): + key_string = str(argumentKey) + if argumentKey is not None: + print(getattr(packOptions,key_string)) + attrib = getattr(packOptions,key_string) + if type(attrib) == bool: + if attrib == True: + arguments_string = arguments_string + " -" + str(argumentKey) + else: + arguments_string = arguments_string + " -" + str(argumentKey) + " " + str(attrib) + + for argumentKey in chartOptions.__annotations__.keys(): + if argumentKey is not None: + key_string = str(argumentKey) + print(getattr(chartOptions,key_string)) + attrib = getattr(chartOptions,key_string) + if type(attrib) == bool: + if attrib == True: + arguments_string = arguments_string + " -" + str(argumentKey) + else: + arguments_string = arguments_string + " -" + str(argumentKey) + " " + str(attrib) + + #add pack only option + if sharedProperties.packOnly: + arguments_string = arguments_string + " -packOnly" + + arguments_string = arguments_string + " -atlasLayout" + " " + sharedProperties.atlasLayout + + print(arguments_string) + #END setup the arguments to be passed to xatlas------------------- + + #RUN xatlas process + xatlas_process = subprocess.Popen( + r'"{}"'.format(xatlas_path) + ' ' + arguments_string, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + shell=True + ) + + #shove the fake file in stdin + stdin = xatlas_process.stdin + value = bytes(fakeFile.getvalue() + "\n", 'UTF-8') #The \n is needed to end the input properly + stdin.write(value) + stdin.flush() + + #Get the output from xatlas + outObj = "" + while True: + output = xatlas_process.stdout.readline() + if not output: + break + outObj = outObj + (output.decode().strip() + "\n") + + #the objects after xatlas processing + # print(outObj) + + + #Setup for reading the output + @dataclass + class uvObject: + obName: string = "" + uvArray: List[float] = field(default_factory=list) + faceArray: List[int] = field(default_factory=list) + + convertedObjects = [] + uvArrayComplete = [] + + + #search through the out put for STARTOBJ + #then start reading the objects + obTest = None + startRead = False + for line in outObj.splitlines(): + + line_split = line.split() + + if not line_split: + continue + + line_start = line_split[0] # we compare with this a _lot_ + # print(line_start) + if line_start == "STARTOBJ": + print("Start reading the objects----------------------------------------") + startRead = True + # obTest = uvObject() + + if startRead: + #if it's a new obj + if line_start == 'o': + #if there is already an object append it + if obTest is not None: + convertedObjects.append(obTest) + + obTest = uvObject() #create new uv object + obTest.obName = line_split[1] + + if obTest is not None: + #the uv coords + if line_start == 'vt': + newUv = [float(line_split[1]),float(line_split[2])] + obTest.uvArray.append(newUv) + uvArrayComplete.append(newUv) + + #the face coords index + #faces are 1 indexed + if line_start == 'f': + #vert/uv/normal + #only need the uvs + newFace = [ + int(line_split[1].split("/")[1]), + int(line_split[2].split("/")[1]), + int(line_split[3].split("/")[1]) + ] + obTest.faceArray.append(newFace) + + #append the final object + convertedObjects.append(obTest) + # print(convertedObjects) + + + #apply the output------------------------------------------------------------- + #copy the uvs to the original objects + # objIndex = 0 + print("Applying the UVs----------------------------------------") + # print(convertedObjects) + for importObject in convertedObjects: + bpy.ops.object.select_all(action='DESELECT') + + obTest = importObject + + bpy.context.scene.objects[obTest.obName].select_set(True) + context.view_layer.objects.active = bpy.context.scene.objects[obTest.obName] + bpy.ops.object.mode_set(mode = 'OBJECT') + + obj = bpy.context.active_object + me = obj.data + #convert to bmesh to create the new uvs + bm = bmesh.new() + bm.from_mesh(me) + + uv_layer = bm.loops.layers.uv.verify() + + nFaces = len(bm.faces) + #need to ensure lookup table for some reason? + if hasattr(bm.faces, "ensure_lookup_table"): + bm.faces.ensure_lookup_table() + + #loop through the faces + for faceIndex in range(nFaces): + faceGroup = obTest.faceArray[faceIndex] + + bm.faces[faceIndex].loops[0][uv_layer].uv = ( + uvArrayComplete[faceGroup[0] - 1][0], + uvArrayComplete[faceGroup[0] - 1][1]) + + bm.faces[faceIndex].loops[1][uv_layer].uv = ( + uvArrayComplete[faceGroup[1] - 1][0], + uvArrayComplete[faceGroup[1] - 1][1]) + + bm.faces[faceIndex].loops[2][uv_layer].uv = ( + uvArrayComplete[faceGroup[2] - 1][0], + uvArrayComplete[faceGroup[2] - 1][1]) + + # objIndex = objIndex + 3 + + # print(objIndex) + #assign the mesh back to the original mesh + bm.to_mesh(me) + #END apply the output------------------------------------------------------------- + + + #Start setting the quads back again------------------------------------------------------------- + if sharedProperties.packOnly: + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='DESELECT') + bpy.ops.object.mode_set(mode='OBJECT') + + for edges in edgeDict: + edgeList = edgeDict[edges] + currentObject = bpy.context.scene.objects[edgeList['object']] + bm = bmesh.new() + bm.from_mesh(currentObject.data) + if hasattr(bm.edges, "ensure_lookup_table"): + bm.edges.ensure_lookup_table() + + #assume that all the triangulated edges come after the original edges + newEdges = [] + for edge in range(len(edgeList['edges']), len(bm.edges)): + newEdge = bm.edges[edge] + newEdge.select = True + newEdges.append(newEdge) + + bmesh.ops.dissolve_edges(bm, edges=newEdges, use_verts=False, use_face_split=False) + bpy.ops.object.mode_set(mode='OBJECT') + bm.to_mesh(currentObject.data) + bm.free() + bpy.ops.object.mode_set(mode='EDIT') + + #End setting the quads back again------------------------------------------------------------ + + print("Finished Xatlas----------------------------------------") \ No newline at end of file diff --git a/blender/arm/log.py b/blender/arm/log.py index 3ad44e6a..4f7b5256 100644 --- a/blender/arm/log.py +++ b/blender/arm/log.py @@ -1,3 +1,32 @@ +import platform + +DEBUG = 36 +INFO = 37 +WARN = 35 +ERROR = 31 + +if platform.system() == "Windows": + HAS_COLOR_SUPPORT = platform.release() == "10" + + if HAS_COLOR_SUPPORT: + # Enable ANSI codes. Otherwise, the ANSI sequences might not be + # evaluated correctly for the first colored print statement. + import ctypes + kernel32 = ctypes.windll.kernel32 + + # -11: stdout + handle_out = kernel32.GetStdHandle(-11) + + console_mode = ctypes.c_long() + kernel32.GetConsoleMode(handle_out, ctypes.byref(console_mode)) + + # 0b100: ENABLE_VIRTUAL_TERMINAL_PROCESSING, enables ANSI codes + # see https://docs.microsoft.com/en-us/windows/console/setconsolemode + console_mode.value |= 0b100 + kernel32.SetConsoleMode(handle_out, console_mode) +else: + HAS_COLOR_SUPPORT = True + info_text = '' num_warnings = 0 @@ -10,12 +39,27 @@ def clear(clear_warnings=False): def format_text(text): return (text[:80] + '..') if len(text) > 80 else text # Limit str size -def print_info(text): - global info_text +def log(text, color=None): + if HAS_COLOR_SUPPORT and color is not None: + csi = '\033[' + text = csi + str(color) + 'm' + text + csi + '0m' print(text) + +def debug(text): + log(text, DEBUG) + +def info(text): + global info_text + log(text, INFO) info_text = format_text(text) +def print_warn(text): + log('Warning: ' + text, WARN) + def warn(text): global num_warnings num_warnings += 1 - print('Armory Warning: ' + text) + print_warn(text) + +def error(text): + log('ERROR: ' + text, ERROR) diff --git a/blender/arm/logicnode/action_call_group.py b/blender/arm/logicnode/action_call_group.py index da32f3ff..b2f57da5 100644 --- a/blender/arm/logicnode/action_call_group.py +++ b/blender/arm/logicnode/action_call_group.py @@ -12,7 +12,7 @@ class CallGroupNode(Node, ArmLogicTreeNode): @property def property0(self): - return arm.utils.safesrc(bpy.data.worlds['Arm'].arm_project_package) + '.node.' + arm.utils.safesrc(self.property0_) + return arm.utils.safesrc(bpy.data.worlds['Arm'].arm_project_package) + '.node.' + arm.utils.safesrc(self.property0_.name) property0_: PointerProperty(name='Group', type=bpy.types.NodeTree) diff --git a/blender/arm/logicnode/animation_play_action_from.py b/blender/arm/logicnode/animation_play_action_from.py new file mode 100644 index 00000000..6d55b6c1 --- /dev/null +++ b/blender/arm/logicnode/animation_play_action_from.py @@ -0,0 +1,22 @@ +import bpy +from bpy.props import * +from bpy.types import Node, NodeSocket +from arm.logicnode.arm_nodes import * + +class PlayActionFromNode(Node, ArmLogicTreeNode): + '''Play action from node''' + bl_idname = 'LNPlayActionFromNode' + bl_label = 'Play Action From' + bl_icon = 'QUESTION' + + def init(self, context): + self.inputs.new('ArmNodeSocketAction', 'In') + self.inputs.new('ArmNodeSocketObject', 'Object') + self.inputs.new('ArmNodeSocketAnimAction', 'Action') + self.inputs.new('NodeSocketInt', 'Start Frame') + self.inputs.new('NodeSocketFloat', 'Blend') + self.inputs[-1].default_value = 0.2 + self.outputs.new('ArmNodeSocketAction', 'Out') + self.outputs.new('ArmNodeSocketAction', 'Done') + +add_node(PlayActionFromNode, category='Animation') diff --git a/blender/arm/logicnode/arm_nodes.py b/blender/arm/logicnode/arm_nodes.py index c94b93ae..af0636aa 100644 --- a/blender/arm/logicnode/arm_nodes.py +++ b/blender/arm/logicnode/arm_nodes.py @@ -24,6 +24,19 @@ class ArmActionSocket(bpy.types.NodeSocket): def draw_color(self, context, node): return (0.8, 0.3, 0.3, 1) +class ArmCustomSocket(bpy.types.NodeSocket): + """ + A custom socket that can be used to define more socket types for + logic node packs. Do not use this type directly (it is not + registered)! + """ + bl_idname = 'ArmCustomSocket' + bl_label = 'Custom Socket' + + def get_default_value(self): + """Override this for values of unconnected input sockets.""" + return None + class ArmArraySocket(bpy.types.NodeSocket): bl_idname = 'ArmNodeSocketArray' bl_label = 'Array Socket' diff --git a/blender/arm/logicnode/array_add.py b/blender/arm/logicnode/array_add.py index 8797c6e0..2eeb47e9 100644 --- a/blender/arm/logicnode/array_add.py +++ b/blender/arm/logicnode/array_add.py @@ -15,13 +15,16 @@ class ArrayAddNode(Node, ArmLogicTreeNode): def init(self, context): self.inputs.new('ArmNodeSocketAction', 'In') self.inputs.new('ArmNodeSocketArray', 'Array') + self.inputs.new('NodeSocketBool', 'Unique Values') + self.inputs.new('NodeSocketBool', 'Modify Original').default_value = True self.inputs.new('NodeSocketShader', 'Value') self.outputs.new('ArmNodeSocketAction', 'Out') + self.outputs.new('ArmNodeSocketArray', 'Array') def draw_buttons(self, context, layout): row = layout.row(align=True) - op = row.operator('arm.node_add_input_value', text='New', icon='PLUS', emboss=True) + op = row.operator('arm.node_add_input_value', text='Add Input', icon='PLUS', emboss=True) op.node_index = str(id(self)) op.socket_type = 'NodeSocketShader' op2 = row.operator('arm.node_remove_input_value', text='', icon='X', emboss=True) diff --git a/blender/arm/logicnode/array_add_unique.py b/blender/arm/logicnode/array_add_unique.py deleted file mode 100644 index e6048426..00000000 --- a/blender/arm/logicnode/array_add_unique.py +++ /dev/null @@ -1,30 +0,0 @@ -import bpy -from bpy.props import * -from bpy.types import Node, NodeSocket -from arm.logicnode.arm_nodes import * - -class ArrayAddUniqueNode(Node, ArmLogicTreeNode): - '''Array add unique node''' - bl_idname = 'LNArrayAddUniqueNode' - bl_label = 'Array Add Unique' - bl_icon = 'QUESTION' - - def __init__(self): - array_nodes[str(id(self))] = self - - def init(self, context): - self.inputs.new('ArmNodeSocketAction', 'In') - self.inputs.new('ArmNodeSocketArray', 'Array') - self.inputs.new('NodeSocketShader', 'Value') - self.outputs.new('ArmNodeSocketAction', 'Out') - - def draw_buttons(self, context, layout): - row = layout.row(align=True) - - op = row.operator('arm.node_add_input_value', text='New', icon='PLUS', emboss=True) - op.node_index = str(id(self)) - op.socket_type = 'NodeSocketShader' - op2 = row.operator('arm.node_remove_input_value', text='', icon='X', emboss=True) - op2.node_index = str(id(self)) - -add_node(ArrayAddUniqueNode, category='Array') diff --git a/blender/arm/logicnode/canvas_set_checkbox.py b/blender/arm/logicnode/canvas_set_checkbox.py new file mode 100644 index 00000000..1ddaee21 --- /dev/null +++ b/blender/arm/logicnode/canvas_set_checkbox.py @@ -0,0 +1,18 @@ +import bpy +from bpy.props import * +from bpy.types import Node, NodeSocket +from arm.logicnode.arm_nodes import * + +class CanvasSetCheckBoxNode(Node, ArmLogicTreeNode): + '''Set canvas check box''' + bl_idname = 'LNCanvasSetCheckBoxNode' + bl_label = 'Canvas Set Check Box' + bl_icon = 'QUESTION' + + def init(self, context): + self.inputs.new('ArmNodeSocketAction', 'In') + self.inputs.new('NodeSocketString', 'Element') + self.inputs.new('NodeSocketBool', 'Value') + self.outputs.new('ArmNodeSocketAction', 'Out') + +add_node(CanvasSetCheckBoxNode, category='Canvas') diff --git a/blender/arm/logicnode/input_on_canvas_element.py b/blender/arm/logicnode/input_on_canvas_element.py new file mode 100644 index 00000000..2d90da54 --- /dev/null +++ b/blender/arm/logicnode/input_on_canvas_element.py @@ -0,0 +1,38 @@ +import bpy +from bpy.props import * +from bpy.types import Node +from arm.logicnode.arm_nodes import * + +class OnCanvasElementNode(Node, ArmLogicTreeNode): + """On canvas element node""" + bl_idname = 'LNOnCanvasElementNode' + bl_label = 'On Canvas Element' + bl_icon = 'CURVE_PATH' + + property0: EnumProperty( + items=[('click', 'Click', 'Listen to mouse clicks'), + ('hover', 'Hover', 'Listen to mouse hover')], + name='Listen to', default='click') + property1: EnumProperty( + items=[('down', 'Down', 'Down'), + ('started', 'Started', 'Started'), + ('released', 'Released', 'Released')], + name='Status', default='down') + property2: EnumProperty( + items=[('left', 'Left', 'Left Button'), + ('right', 'Right', 'Right Button'), + ('middle', 'Middle', 'Middle Button')], + name='Mouse Button', default='left') + + def init(self, context): + self.inputs.new('NodeSocketString', 'Element') + self.outputs.new('ArmNodeSocketAction', 'Out') + + def draw_buttons(self, context, layout): + layout.prop(self, 'property0') + + if self.property0 == "click": + layout.prop(self, 'property1') + layout.prop(self, 'property2') + +add_node(OnCanvasElementNode, category='Input') diff --git a/blender/arm/logicnode/logic_array_loop.py b/blender/arm/logicnode/logic_array_loop.py index 4e3d161c..5ca6bd33 100644 --- a/blender/arm/logicnode/logic_array_loop.py +++ b/blender/arm/logicnode/logic_array_loop.py @@ -8,12 +8,13 @@ class ArrayLoopNode(Node, ArmLogicTreeNode): bl_idname = 'LNArrayLoopNode' bl_label = 'Array Loop' bl_icon = 'CURVE_PATH' - + def init(self, context): self.inputs.new('ArmNodeSocketAction', 'In') self.inputs.new('ArmNodeSocketArray', 'Array') self.outputs.new('ArmNodeSocketAction', 'Loop') - self.outputs.new('NodeSocketInt', 'Value') + self.outputs.new('NodeSocketShader', 'Value') + self.outputs.new('NodeSocketInt', 'Index') self.outputs.new('ArmNodeSocketAction', 'Done') add_node(ArrayLoopNode, category='Logic') diff --git a/blender/arm/logicnode/physics_has_contact_array.py b/blender/arm/logicnode/physics_has_contact_array.py new file mode 100644 index 00000000..1738c663 --- /dev/null +++ b/blender/arm/logicnode/physics_has_contact_array.py @@ -0,0 +1,17 @@ +import bpy +from bpy.props import * +from bpy.types import Node, NodeSocket +from arm.logicnode.arm_nodes import * + +class HasContactArrayNode(Node, ArmLogicTreeNode): + '''Has contact array node''' + bl_idname = 'LNHasContactArrayNode' + bl_label = 'Has Contact (Array)' + bl_icon = 'QUESTION' + + def init(self, context): + self.inputs.new('ArmNodeSocketObject', 'Object 1') + self.inputs.new('ArmNodeSocketArray', 'Objects') + self.outputs.new('NodeSocketBool', 'Bool') + +add_node(HasContactArrayNode, category='Physics') diff --git a/blender/arm/logicnode/sound_play_sound.py b/blender/arm/logicnode/sound_play_sound.py index 1dfb3bdf..17cebdd6 100644 --- a/blender/arm/logicnode/sound_play_sound.py +++ b/blender/arm/logicnode/sound_play_sound.py @@ -4,18 +4,52 @@ from bpy.types import Node, NodeSocket from arm.logicnode.arm_nodes import * class PlaySoundNode(Node, ArmLogicTreeNode): - '''Play sound node''' + """Play sound node""" bl_idname = 'LNPlaySoundRawNode' bl_label = 'Play Sound' - bl_icon = 'QUESTION' + bl_icon = 'PLAY_SOUND' property0: PointerProperty(name='', type=bpy.types.Sound) + property1: BoolProperty( + name='Loop', + description='Play the sound in a loop', + default=False) + property2: BoolProperty( + name='Retrigger', + description='Play the sound from the beginning everytime', + default=False) + property3: BoolProperty( + name='Use Custom Sample Rate', + description='If enabled, override the default sample rate', + default=False) + property4: IntProperty( + name='Sample Rate', + description='Set the sample rate used to play this sound', + default=44100, + min=0) def init(self, context): - self.inputs.new('ArmNodeSocketAction', 'In') + self.inputs.new('ArmNodeSocketAction', 'Play') + self.inputs.new('ArmNodeSocketAction', 'Pause') + self.inputs.new('ArmNodeSocketAction', 'Stop') self.outputs.new('ArmNodeSocketAction', 'Out') + self.outputs.new('ArmNodeSocketAction', 'Running') + self.outputs.new('ArmNodeSocketAction', 'Done') def draw_buttons(self, context, layout): layout.prop_search(self, 'property0', bpy.data, 'sounds', icon='NONE', text='') + col = layout.column(align=True) + col.prop(self, 'property1') + col.prop(self, 'property2') + + layout.label(text="Overrides:") + # Sample rate + split = layout.split(factor=0.15, align=False) + split.prop(self, 'property3', text="") + row = split.row() + if not self.property3: + row.enabled = False + row.prop(self, 'property4') + add_node(PlaySoundNode, category='Sound') diff --git a/blender/arm/logicnode/value_vector_math.py b/blender/arm/logicnode/value_vector_math.py index 9091981b..25d2ca66 100644 --- a/blender/arm/logicnode/value_vector_math.py +++ b/blender/arm/logicnode/value_vector_math.py @@ -18,9 +18,10 @@ class VectorMathNode(Node, ArmLogicTreeNode): ('Cross Product', 'Cross Product', 'Cross Product'), ('Length', 'Length', 'Length'), ('Distance', 'Distance', 'Distance'), + ('Reflect', 'Reflect', 'Reflect'), ], name='', default='Add') - + def init(self, context): self.inputs.new('NodeSocketVector', 'Vector') self.inputs[-1].default_value = [0.5, 0.5, 0.5] diff --git a/blender/arm/make.py b/blender/arm/make.py index 36748d3f..a7227902 100755 --- a/blender/arm/make.py +++ b/blender/arm/make.py @@ -1,27 +1,27 @@ -import os import glob -import time -import shutil -import bpy import json +import os +import shutil +import time import stat -from bpy.props import * import subprocess import threading import webbrowser -import arm.utils -import arm.write_data as write_data -import arm.make_logic as make_logic -import arm.make_renderpath as make_renderpath -import arm.make_world as make_world -import arm.make_state as state + +import bpy + import arm.assets as assets -import arm.log as log +from arm.exporter import ArmoryExporter import arm.lib.make_datas import arm.lib.server -from arm.exporter import ArmoryExporter +import arm.log as log +import arm.make_logic as make_logic +import arm.make_renderpath as make_renderpath +import arm.make_state as state +import arm.make_world as make_world +import arm.utils +import arm.write_data as write_data -exporter = ArmoryExporter() scripts_mtime = 0 # Monitor source changes profile_time = 0 @@ -57,7 +57,6 @@ def remove_readonly(func, path, excinfo): func(path) def export_data(fp, sdk_path): - global exporter wrd = bpy.data.worlds['Arm'] print('\n' + '_' * 10 + ' [Armory] Compiling ' + '_' * 10) @@ -121,7 +120,7 @@ def export_data(fp, sdk_path): if scene.arm_export: ext = '.lz4' if ArmoryExporter.compress_enabled else '.arm' asset_path = build_dir + '/compiled/Assets/' + arm.utils.safestr(scene.name) + ext - exporter.execute(bpy.context, asset_path, scene=scene, depsgraph=depsgraph) + ArmoryExporter.export_scene(bpy.context, asset_path, scene=scene, depsgraph=depsgraph) if ArmoryExporter.export_physics: physics_found = True if ArmoryExporter.export_navigation: @@ -156,9 +155,10 @@ def export_data(fp, sdk_path): cdefs = arm.utils.def_strings_to_array(wrd.compo_defs) if wrd.arm_verbose_output: - print('Exported modules: ' + str(modules)) - print('Shader flags: ' + str(defs)) - print('Khafile flags: ' + str(assets.khafile_defs)) + print('Exported modules:', modules) + print('Shader flags:', defs) + print('Compositor flags:', cdefs) + print('Khafile flags:', assets.khafile_defs) # Render path is configurable at runtime has_config = wrd.arm_write_config or os.path.exists(arm.utils.get_fp() + '/Bundled/config.arm') @@ -171,8 +171,8 @@ def export_data(fp, sdk_path): # Write referenced shader passes if not os.path.isfile(build_dir + '/compiled/Shaders/shader_datas.arm') or state.last_world_defs != wrd.world_defs: - res = {} - res['shader_datas'] = [] + res = {'shader_datas': []} + for ref in assets.shader_passes: # Ensure shader pass source exists if not os.path.exists(raw_shaders_path + '/' + ref): @@ -182,7 +182,12 @@ def export_data(fp, sdk_path): compile_shader_pass(res, raw_shaders_path, ref, defs + cdefs, make_variants=has_config) else: compile_shader_pass(res, raw_shaders_path, ref, defs, make_variants=has_config) + + # Workaround to also export non-material world shaders + res['shader_datas'] += make_world.shader_datas + arm.utils.write_arm(shaders_path + '/shader_datas.arm', res) + for ref in assets.shader_passes: for s in assets.shader_passes_assets[ref]: assets.add_shader(shaders_path + '/' + s + '.glsl') @@ -390,7 +395,7 @@ def assets_done(): else: state.proc_build = None state.redraw_ui = True - log.print_info('Build failed, check console') + log.error('Build failed, check console') def compilation_server_done(): if state.proc_build == None: @@ -405,12 +410,12 @@ def compilation_server_done(): else: state.proc_build = None state.redraw_ui = True - log.print_info('Build failed, check console') + log.error('Build failed, check console') def build_done(): print('Finished in ' + str(time.time() - profile_time)) if log.num_warnings > 0: - print(f'{log.num_warnings} warnings occurred during compilation!') + log.print_warn(f'{log.num_warnings} warnings occurred during compilation') if state.proc_build is None: return result = state.proc_build.poll() @@ -420,7 +425,7 @@ def build_done(): bpy.data.worlds['Arm'].arm_recompile = False build_success() else: - log.print_info('Build failed, check console') + log.error('Build failed, check console') def patch(): if state.proc_build != None: @@ -429,21 +434,17 @@ def patch(): fp = arm.utils.get_fp() os.chdir(fp) asset_path = arm.utils.get_fp_build() + '/compiled/Assets/' + arm.utils.safestr(bpy.context.scene.name) + '.arm' - exporter.execute(bpy.context, asset_path, scene=bpy.context.scene) + ArmoryExporter.export_scene(bpy.context, asset_path, scene=bpy.context.scene) if not os.path.isdir(arm.utils.build_dir() + '/compiled/Shaders/std'): raw_shaders_path = arm.utils.get_sdk_path() + '/armory/Shaders/' shutil.copytree(raw_shaders_path + 'std', arm.utils.build_dir() + '/compiled/Shaders/std') node_path = arm.utils.get_node_path() khamake_path = arm.utils.get_khamake_path() + cmd = [node_path, khamake_path, 'krom'] - cmd.append('--shaderversion') - cmd.append('330') - cmd.append('--parallelAssetConversion') - cmd.append('4') - cmd.append('--to') - cmd.append(arm.utils.build_dir() + '/debug') - cmd.append('--nohaxe') - cmd.append('--noproject') + cmd.extend(('--shaderversion', '330', '--parallelAssetConversion', '4', + '--to', arm.utils.build_dir() + '/debug', '--nohaxe', '--noproject')) + assets.invalidate_enabled = True state.proc_build = run_proc(cmd, patch_done) diff --git a/blender/arm/make_logic.py b/blender/arm/make_logic.py index 35751f7e..a2392530 100755 --- a/blender/arm/make_logic.py +++ b/blender/arm/make_logic.py @@ -100,7 +100,7 @@ def build_node_tree(node_group): f.write('}') node_group.arm_cached = True -def build_node(node, f): +def build_node(node: bpy.types.Node, f): global parsed_nodes global parsed_ids @@ -154,6 +154,8 @@ def build_node(node, f): prop = getattr(node, prop_name) if isinstance(prop, str): prop = '"' + str(prop) + '"' + elif isinstance(prop, bool): + prop = str(prop).lower() elif hasattr(prop, 'name'): # PointerProperty prop = '"' + str(prop.name) + '"' else: @@ -239,28 +241,42 @@ def get_root_nodes(node_group): roots.append(node) return roots -def build_default_node(inp): - inp_name = 'new armory.logicnode.NullNode(this)' +def build_default_node(inp: bpy.types.NodeSocket): + """Creates a new node to give a not connected input socket a value""" + null_node = 'new armory.logicnode.NullNode(this)' + + if isinstance(inp, arm.logicnode.arm_nodes.ArmCustomSocket): + # ArmCustomSockets need to implement get_default_value() + default_value = inp.get_default_value() + if default_value is None: + return null_node + if isinstance(default_value, str): + default_value = f'"{default_value}"' + + return f'new armory.logicnode.DynamicNode(this, {default_value})' + if inp.bl_idname == 'ArmNodeSocketAction' or inp.bl_idname == 'ArmNodeSocketArray': - return inp_name + return null_node if inp.bl_idname == 'ArmNodeSocketObject': - inp_name = 'new armory.logicnode.ObjectNode(this, "' + str(inp.get_default_value()) + '")' - return inp_name + return f'new armory.logicnode.ObjectNode(this, "{inp.get_default_value()}")' if inp.bl_idname == 'ArmNodeSocketAnimAction': - inp_name = 'new armory.logicnode.StringNode(this, "' + str(inp.get_default_value()) + '")' - return inp_name + # Backslashes are not allowed in f-strings so we need this variable + default_value = inp.get_default_value().replace("\"", "\\\"") + return f'new armory.logicnode.StringNode(this, "{default_value}")' if inp.type == 'VECTOR': - inp_name = 'new armory.logicnode.VectorNode(this, ' + str(inp.default_value[0]) + ', ' + str(inp.default_value[1]) + ', ' + str(inp.default_value[2]) + ')' + return f'new armory.logicnode.VectorNode(this, {inp.default_value[0]}, {inp.default_value[1]}, {inp.default_value[2]})' elif inp.type == 'RGBA': - inp_name = 'new armory.logicnode.ColorNode(this, ' + str(inp.default_value[0]) + ', ' + str(inp.default_value[1]) + ', ' + str(inp.default_value[2]) + ', ' + str(inp.default_value[3]) + ')' + return f'new armory.logicnode.ColorNode(this, {inp.default_value[0]}, {inp.default_value[1]}, {inp.default_value[2]}, {inp.default_value[3]})' elif inp.type == 'RGB': - inp_name = 'new armory.logicnode.ColorNode(this, ' + str(inp.default_value[0]) + ', ' + str(inp.default_value[1]) + ', ' + str(inp.default_value[2]) + ')' + return f'new armory.logicnode.ColorNode(this, {inp.default_value[0]}, {inp.default_value[1]}, {inp.default_value[2]})' elif inp.type == 'VALUE': - inp_name = 'new armory.logicnode.FloatNode(this, ' + str(inp.default_value) + ')' + return f'new armory.logicnode.FloatNode(this, {inp.default_value})' elif inp.type == 'INT': - inp_name = 'new armory.logicnode.IntegerNode(this, ' + str(inp.default_value) + ')' + return f'new armory.logicnode.IntegerNode(this, {inp.default_value})' elif inp.type == 'BOOLEAN': - inp_name = 'new armory.logicnode.BooleanNode(this, ' + str(inp.default_value).lower() + ')' + return f'new armory.logicnode.BooleanNode(this, {str(inp.default_value).lower()})' elif inp.type == 'STRING': - inp_name = 'new armory.logicnode.StringNode(this, "' + str(inp.default_value) + '")' - return inp_name + default_value = inp.default_value.replace("\"", "\\\"") + return f'new armory.logicnode.StringNode(this, "{default_value}")' + + return null_node diff --git a/blender/arm/make_renderpath.py b/blender/arm/make_renderpath.py index ca0e829b..7889cb88 100755 --- a/blender/arm/make_renderpath.py +++ b/blender/arm/make_renderpath.py @@ -127,9 +127,13 @@ def build(): assets.add_khafile_def('rp_shadowmap_cascade={0}'.format(arm.utils.get_cascade_size(rpdat))) assets.add_khafile_def('rp_shadowmap_cube={0}'.format(rpdat.rp_shadowmap_cube)) + if arm.utils.get_gapi() == 'metal': + assets.add_shader_pass('clear_color_depth_pass') + assets.add_shader_pass('clear_color_pass') + assets.add_shader_pass('clear_depth_pass') + assets.add_khafile_def('rp_background={0}'.format(rpdat.rp_background)) if rpdat.rp_background == 'World': - assets.add_shader_pass('world_pass') if '_EnvClouds' in wrd.world_defs: assets.add(assets_path + 'clouds_base.raw') assets.add_embedded_data('clouds_base.raw') @@ -221,7 +225,7 @@ def build(): if rpdat.rp_antialiasing == 'TAA': assets.add_khafile_def('arm_taa') - assets.add_khafile_def('rp_supersampling={0}'.format(rpdat.rp_supersampling)) + assets.add_khafile_def('rp_supersampling={0}'.format(rpdat.rp_supersampling)) if rpdat.rp_supersampling == '4': assets.add_shader_pass('supersample_resolve') @@ -276,7 +280,7 @@ def build(): else: # mobile, solid assets.add_shader_pass('deferred_light_' + rpdat.arm_material_model.lower()) assets.add_khafile_def('rp_material_' + rpdat.arm_material_model.lower()) - + if len(bpy.data.lightprobes) > 0: wrd.world_defs += '_Probes' assets.add_khafile_def('rp_probes') diff --git a/blender/arm/make_world.py b/blender/arm/make_world.py index f2646d4a..2816e475 100755 --- a/blender/arm/make_world.py +++ b/blender/arm/make_world.py @@ -1,105 +1,199 @@ -import bpy import os -from bpy.types import NodeTree, Node, NodeSocket -from bpy.props import * -import arm.write_probes as write_probes + +import bpy + import arm.assets as assets -import arm.utils -import arm.node_utils as node_utils import arm.log as log -import arm.make_state as state +from arm.material import make_shader +from arm.material.shader import ShaderContext, Shader +import arm.node_utils as node_utils +import arm.utils +import arm.write_probes as write_probes callback = None +shader_datas = [] + def build(): - worlds = [] - for scene in bpy.data.scenes: - if scene.arm_export and scene.world != None and scene.world not in worlds: - worlds.append(scene.world) - build_node_tree(scene.world) + global shader_datas -def build_node_tree(world): - wname = arm.utils.safestr(world.name) - wrd = bpy.data.worlds['Arm'] - wrd.world_defs = '' + bpy.data.worlds['Arm'].world_defs = '' + worlds = [] + shader_datas = [] + + for scene in bpy.data.scenes: + # Only export worlds from enabled scenes + if scene.arm_export and scene.world is not None and scene.world not in worlds: + worlds.append(scene.world) + create_world_shaders(scene.world) + + +def create_world_shaders(world: bpy.types.World): + """Creates fragment and vertex shaders for the given world.""" + global shader_datas + world_name = arm.utils.safestr(world.name) + pass_name = 'World_' + world_name + + shader_props = { + 'name': world_name, + 'depth_write': False, + 'compare_mode': 'less', + 'cull_mode': 'clockwise', + 'color_attachments': ['_HDR'], + 'vertex_elements': [{'name': 'pos', 'data': 'float3'}, {'name': 'nor', 'data': 'float3'}] + } + shader_data = {'name': world_name + '_data', 'contexts': [shader_props]} + + # ShaderContext expects a material, but using a world also works + shader_context = ShaderContext(world, shader_data, shader_props) + vert = shader_context.make_vert(custom_name="World_" + world_name) + frag = shader_context.make_frag(custom_name="World_" + world_name) + + # Update name, make_vert() and make_frag() above need another name + # to work + shader_context.data['name'] = pass_name + + vert.add_out('vec3 normal') + vert.add_uniform('mat4 SMVP', link="_skydomeMatrix") + + frag.add_include('compiled.inc') + frag.add_in('vec3 normal') + frag.add_out('vec4 fragColor') + + vert.write('''normal = nor; + vec4 position = SMVP * vec4(pos, 1.0); + gl_Position = vec4(position);''') + + build_node_tree(world, frag) + + # TODO: Rework shader export so that it doesn't depend on materials + # to prevent workaround code like this + rel_path = os.path.join(arm.utils.build_dir(), 'compiled', 'Shaders') + full_path = os.path.join(arm.utils.get_fp(), rel_path) + if not os.path.exists(full_path): + os.makedirs(full_path) + + # Output: World_[world_name].[frag/vert].glsl + make_shader.write_shader(rel_path, shader_context.vert, 'vert', world_name, 'World') + make_shader.write_shader(rel_path, shader_context.frag, 'frag', world_name, 'World') + + # Write shader data file + shader_data_file = pass_name + '_data.arm' + arm.utils.write_arm(os.path.join(full_path, shader_data_file), {'contexts': [shader_context.data]}) + shader_data_path = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Shaders', shader_data_file) + assets.add_shader_data(shader_data_path) + + assets.add_shader_pass(pass_name) + assets.shader_passes_assets[pass_name] = shader_context.data + shader_datas.append({'contexts': [shader_context.data], 'name': pass_name}) + + +def build_node_tree(world: bpy.types.World, frag: Shader): + """Generates the shader code for the given world.""" + world_name = arm.utils.safestr(world.name) + world.world_defs = '' rpdat = arm.utils.get_rp() - - if callback != None: + wrd = bpy.data.worlds['Arm'] + + if callback is not None: callback() - + # Traverse world node tree - parsed = False - if world.node_tree != None: + is_parsed = False + if world.node_tree is not None: output_node = node_utils.get_node_by_type(world.node_tree, 'OUTPUT_WORLD') - if output_node != None: - parse_world_output(world, output_node) - parsed = True - if parsed == False: + if output_node is not None: + is_parsed = parse_world_output(world, output_node, frag) + + # No world nodes/no output node, use background color + if not is_parsed: solid_mat = rpdat.arm_material_model == 'Solid' if rpdat.arm_irradiance and not solid_mat: - wrd.world_defs += '_Irr' - c = world.color - world.arm_envtex_color = [c[0], c[1], c[2], 1.0] + world.world_defs += '_Irr' + col = world.color + world.arm_envtex_color = [col[0], col[1], col[2], 1.0] world.arm_envtex_strength = 1.0 - + # Clear to color if no texture or sky is provided - if '_EnvSky' not in wrd.world_defs and '_EnvTex' not in wrd.world_defs: - if '_EnvImg' not in wrd.world_defs: - wrd.world_defs += '_EnvCol' - # Irradiance json file name - world.arm_envtex_name = wname - world.arm_envtex_irr_name = wname - write_probes.write_color_irradiance(wname, world.arm_envtex_color) + if '_EnvSky' not in world.world_defs and '_EnvTex' not in world.world_defs: + if '_EnvImg' not in world.world_defs: + world.world_defs += '_EnvCol' + frag.add_uniform('vec3 backgroundCol', link='_backgroundCol') + + # Irradiance json file name + world.arm_envtex_name = world_name + world.arm_envtex_irr_name = world_name + write_probes.write_color_irradiance(world_name, world.arm_envtex_color) # film_transparent - if bpy.context.scene != None and hasattr(bpy.context.scene.render, 'film_transparent') and bpy.context.scene.render.film_transparent: - wrd.world_defs += '_EnvTransp' - wrd.world_defs += '_EnvCol' + if bpy.context.scene is not None and hasattr(bpy.context.scene.render, 'film_transparent') and bpy.context.scene.render.film_transparent: + world.world_defs += '_EnvTransp' + world.world_defs += '_EnvCol' + frag.add_uniform('vec3 backgroundCol', link='_backgroundCol') # Clouds enabled - if rpdat.arm_clouds: + if rpdat.arm_clouds and world.arm_use_clouds: + world.world_defs += '_EnvClouds' + # Also set this flag globally so that the required textures are + # included wrd.world_defs += '_EnvClouds' + frag_write_clouds(world, frag) - if '_EnvSky' in wrd.world_defs or '_EnvTex' in wrd.world_defs or '_EnvImg' in wrd.world_defs or '_EnvClouds' in wrd.world_defs: - wrd.world_defs += '_EnvStr' + if '_EnvSky' in world.world_defs or '_EnvTex' in world.world_defs or '_EnvImg' in world.world_defs or '_EnvClouds' in world.world_defs: + frag.add_uniform('float envmapStrength', link='_envmapStrength') -def parse_world_output(world, node): - if node.inputs[0].is_linked: - surface_node = node_utils.find_node_by_link(world.node_tree, node, node.inputs[0]) - parse_surface(world, surface_node) - -def parse_surface(world, node): + frag_write_main(world, frag) + + +def parse_world_output(world: bpy.types.World, node_output: bpy.types.Node, frag: Shader) -> bool: + """Parse the world's output node. Return `False` when the node has + no connected surface input.""" + if not node_output.inputs[0].is_linked: + return False + + surface_node = node_utils.find_node_by_link(world.node_tree, node_output, node_output.inputs[0]) + parse_surface(world, surface_node, frag) + return True + + +def parse_surface(world: bpy.types.World, node_surface: bpy.types.Node, frag: Shader): wrd = bpy.data.worlds['Arm'] rpdat = arm.utils.get_rp() solid_mat = rpdat.arm_material_model == 'Solid' - - # Extract environment strength - if node.type == 'BACKGROUND': - + + if node_surface.type in ('BACKGROUND', 'EMISSION'): # Append irradiance define if rpdat.arm_irradiance and not solid_mat: wrd.world_defs += '_Irr' - world.arm_envtex_color = node.inputs[0].default_value - world.arm_envtex_strength = node.inputs[1].default_value + # Extract environment strength + # Todo: follow/parse strength input + world.arm_envtex_strength = node_surface.inputs[1].default_value - # Strength - if node.inputs[0].is_linked: - color_node = node_utils.find_node_by_link(world.node_tree, node, node.inputs[0]) - parse_color(world, color_node) + # Color + if node_surface.inputs[0].is_linked: + color_node = node_utils.find_node_by_link(world.node_tree, node_surface, node_surface.inputs[0]) + parse_color(world, color_node, frag) + else: + world.arm_envtex_color = node_surface.inputs[0].default_value -def parse_color(world, node): + +def parse_color(world: bpy.types.World, node: bpy.types.Node, frag: Shader): wrd = bpy.data.worlds['Arm'] rpdat = arm.utils.get_rp() mobile_mat = rpdat.arm_material_model == 'Mobile' or rpdat.arm_material_model == 'Solid' # Env map included - if node.type == 'TEX_ENVIRONMENT' and node.image != None: + if node.type == 'TEX_ENVIRONMENT' and node.image is not None: + world.world_defs += '_EnvTex' + + frag.add_include('std/math.glsl') + frag.add_uniform('sampler2D envmap', link='_envmap') image = node.image filepath = image.filepath - - if image.packed_file == None and not os.path.isfile(arm.utils.asset_path(filepath)): + + if image.packed_file is None and not os.path.isfile(arm.utils.asset_path(filepath)): log.warn(world.name + ' - unable to open ' + image.filepath) return @@ -121,7 +215,7 @@ def parse_color(world, node): tex_file = base[0] + '.jpg' target_format = 'JPEG' - if image.packed_file != None: + if image.packed_file is not None: # Extract packed data unpack_path = arm.utils.get_fp_build() + '/compiled/Assets/unpacked' if not os.path.exists(unpack_path): @@ -133,10 +227,10 @@ def parse_color(world, node): if not os.path.isfile(unpack_filepath): arm.utils.unpack_image(image, unpack_filepath, file_format=target_format) - elif os.path.isfile(unpack_filepath) == False or os.path.getsize(unpack_filepath) != image.packed_file.size: + elif not os.path.isfile(unpack_filepath) or os.path.getsize(unpack_filepath) != image.packed_file.size: with open(unpack_filepath, 'wb') as f: f.write(image.packed_file.data) - + assets.add(unpack_filepath) else: if do_convert: @@ -157,27 +251,31 @@ def parse_color(world, node): world.arm_envtex_name = tex_file world.arm_envtex_irr_name = tex_file.rsplit('.', 1)[0] disable_hdr = target_format == 'JPEG' - + mip_count = world.arm_envtex_num_mips mip_count = write_probes.write_probes(filepath, disable_hdr, mip_count, arm_radiance=rpdat.arm_radiance) - + world.arm_envtex_num_mips = mip_count - - # Append envtex define - wrd.world_defs += '_EnvTex' + # Append LDR define if disable_hdr: - wrd.world_defs += '_EnvLDR' + world.world_defs += '_EnvLDR' # Append radiance define if rpdat.arm_irradiance and rpdat.arm_radiance and not mobile_mat: wrd.world_defs += '_Rad' # Static image background - elif node.type == 'TEX_IMAGE': + elif node.type == 'TEX_IMAGE': + world.world_defs += '_EnvImg' + + # Background texture + frag.add_uniform('sampler2D envmap', link='_envmap') + frag.add_uniform('vec2 screenSize', link='_screenSize') + image = node.image filepath = image.filepath - if image.packed_file != None: + if image.packed_file is not None: # Extract packed data filepath = arm.utils.build_dir() + '/compiled/Assets/unpacked' unpack_path = arm.utils.get_fp() + filepath @@ -194,20 +292,52 @@ def parse_color(world, node): # Reference image name tex_file = arm.utils.extract_filename(image.filepath) + base = tex_file.rsplit('.', 1) + ext = base[1].lower() + + if ext == 'hdr': + target_format = 'HDR' + else: + target_format = 'JPEG' + + # Generate prefiltered envmaps world.arm_envtex_name = tex_file + world.arm_envtex_irr_name = tex_file.rsplit('.', 1)[0] + + disable_hdr = target_format == 'JPEG' + + mip_count = world.arm_envtex_num_mips + mip_count = write_probes.write_probes(filepath, disable_hdr, mip_count, arm_radiance=rpdat.arm_radiance) + + world.arm_envtex_num_mips = mip_count # Append sky define elif node.type == 'TEX_SKY': # Match to cycles world.arm_envtex_strength *= 0.1 - - wrd.world_defs += '_EnvSky' + + world.world_defs += '_EnvSky' assets.add_khafile_def('arm_hosek') - + frag.add_uniform('vec3 A', link="_hosekA") + frag.add_uniform('vec3 B', link="_hosekB") + frag.add_uniform('vec3 C', link="_hosekC") + frag.add_uniform('vec3 D', link="_hosekD") + frag.add_uniform('vec3 E', link="_hosekE") + frag.add_uniform('vec3 F', link="_hosekF") + frag.add_uniform('vec3 G', link="_hosekG") + frag.add_uniform('vec3 H', link="_hosekH") + frag.add_uniform('vec3 I', link="_hosekI") + frag.add_uniform('vec3 Z', link="_hosekZ") + frag.add_uniform('vec3 hosekSunDirection', link="_hosekSunDirection") + frag.add_function('''vec3 hosekWilkie(float cos_theta, float gamma, float cos_gamma) { +\tvec3 chi = (1 + cos_gamma * cos_gamma) / pow(1 + H * H - 2 * cos_gamma * H, vec3(1.5)); +\treturn (1 + A * exp(B / (cos_theta + 0.01))) * (C + D * exp(E * gamma) + F * (cos_gamma * cos_gamma) + G * chi + I * sqrt(cos_theta)); +}''') + world.arm_envtex_sun_direction = [node.sun_direction[0], node.sun_direction[1], node.sun_direction[2]] world.arm_envtex_turbidity = node.turbidity world.arm_envtex_ground_albedo = node.ground_albedo - + # Irradiance json file name wname = arm.utils.safestr(world.name) world.arm_envtex_irr_name = wname @@ -222,6 +352,131 @@ def parse_color(world, node): assets.add(sdk_path + '/' + hosek_path + 'hosek_radiance.hdr') for i in range(0, 8): assets.add(sdk_path + '/' + hosek_path + 'hosek_radiance_' + str(i) + '.hdr') - + world.arm_envtex_name = 'hosek' world.arm_envtex_num_mips = 8 + + +def frag_write_clouds(world: bpy.types.World, frag: Shader): + """References: + GPU PRO 7 - Real-time Volumetric Cloudscapes + https://www.guerrilla-games.com/read/the-real-time-volumetric-cloudscapes-of-horizon-zero-dawn + https://github.com/sebh/TileableVolumeNoise + """ + frag.add_uniform('sampler3D scloudsBase', link='$clouds_base.raw') + frag.add_uniform('sampler3D scloudsDetail', link='$clouds_detail.raw') + frag.add_uniform('sampler2D scloudsMap', link='$clouds_map.png') + frag.add_uniform('float time', link='_time') + + frag.add_const('float', 'cloudsLower', str(round(world.arm_clouds_lower * 100) / 100)) + frag.add_const('float', 'cloudsUpper', str(round(world.arm_clouds_upper * 100) / 100)) + frag.add_const('vec2', 'cloudsWind', 'vec2(' + str(round(world.arm_clouds_wind[0] * 100) / 100) + ',' + str(round(world.arm_clouds_wind[1] * 100) / 100) + ')') + frag.add_const('float', 'cloudsPrecipitation', str(round(world.arm_clouds_precipitation * 100) / 100)) + frag.add_const('float', 'cloudsSecondary', str(round(world.arm_clouds_secondary * 100) / 100)) + frag.add_const('float', 'cloudsSteps', str(round(world.arm_clouds_steps * 100) / 100)) + + frag.add_function('''float remap(float old_val, float old_min, float old_max, float new_min, float new_max) { +\treturn new_min + (((old_val - old_min) / (old_max - old_min)) * (new_max - new_min)); +}''') + + frag.add_function('''float getDensityHeightGradientForPoint(float height, float cloud_type) { +\tconst vec4 stratusGrad = vec4(0.02f, 0.05f, 0.09f, 0.11f); +\tconst vec4 stratocumulusGrad = vec4(0.02f, 0.2f, 0.48f, 0.625f); +\tconst vec4 cumulusGrad = vec4(0.01f, 0.0625f, 0.78f, 1.0f); +\tfloat stratus = 1.0f - clamp(cloud_type * 2.0f, 0, 1); +\tfloat stratocumulus = 1.0f - abs(cloud_type - 0.5f) * 2.0f; +\tfloat cumulus = clamp(cloud_type - 0.5f, 0, 1) * 2.0f; +\tvec4 cloudGradient = stratusGrad * stratus + stratocumulusGrad * stratocumulus + cumulusGrad * cumulus; +\treturn smoothstep(cloudGradient.x, cloudGradient.y, height) - smoothstep(cloudGradient.z, cloudGradient.w, height); +}''') + + frag.add_function('''float sampleCloudDensity(vec3 p) { +\tfloat cloud_base = textureLod(scloudsBase, p, 0).r * 40; // Base noise +\tvec3 weather_data = textureLod(scloudsMap, p.xy, 0).rgb; // Weather map +\tcloud_base *= getDensityHeightGradientForPoint(p.z, weather_data.b); // Cloud type +\tcloud_base = remap(cloud_base, weather_data.r, 1.0, 0.0, 1.0); // Coverage +\tcloud_base *= weather_data.r; +\tfloat cloud_detail = textureLod(scloudsDetail, p, 0).r * 2; // Detail noise +\tfloat cloud_detail_mod = mix(cloud_detail, 1.0 - cloud_detail, clamp(p.z * 10.0, 0, 1)); +\tcloud_base = remap(cloud_base, cloud_detail_mod * 0.2, 1.0, 0.0, 1.0); +\treturn cloud_base; +}''') + + func_cloud_radiance = 'float cloudRadiance(vec3 p, vec3 dir) {\n' + if '_EnvSky' in world.world_defs: + func_cloud_radiance += '\tvec3 sun_dir = hosekSunDirection;\n' + else: + func_cloud_radiance += '\tvec3 sun_dir = vec3(0, 0, -1);\n' + func_cloud_radiance += '''\tconst int steps = 8; +\tfloat step_size = 0.5 / float(steps); +\tfloat d = 0.0; +\tp += sun_dir * step_size; +\tfor(int i = 0; i < steps; ++i) { +\t\td += sampleCloudDensity(p + sun_dir * float(i) * step_size); +\t} +\treturn 1.0 - d; +}''' + frag.add_function(func_cloud_radiance) + + frag.add_function('''vec3 traceClouds(vec3 sky, vec3 dir) { +\tconst float step_size = 0.5 / float(cloudsSteps); +\tfloat T = 1.0; +\tfloat C = 0.0; +\tvec2 uv = dir.xy / dir.z * 0.4 * cloudsLower + cloudsWind * time * 0.02; + +\tfor (int i = 0; i < cloudsSteps; ++i) { +\t\tfloat h = float(i) / float(cloudsSteps); +\t\tvec3 p = vec3(uv * 0.04, h); +\t\tfloat d = sampleCloudDensity(p); + +\t\tif (d > 0) { +\t\t\t// float radiance = cloudRadiance(p, dir); +\t\t\tC += T * exp(h) * d * step_size * 0.6 * cloudsPrecipitation; +\t\t\tT *= exp(-d * step_size); +\t\t\tif (T < 0.01) break; +\t\t} +\t\tuv += (dir.xy / dir.z) * step_size * cloudsUpper; +\t} + +\treturn vec3(C) + sky * T; +}''') + + +def frag_write_main(world: bpy.types.World, frag: Shader): + if '_EnvSky' in world.world_defs or '_EnvTex' in world.world_defs or '_EnvClouds' in world.world_defs: + frag.write('vec3 n = normalize(normal);') + + if '_EnvCol' in world.world_defs: + frag.write('fragColor.rgb = backgroundCol;') + if '_EnvTransp' in world.world_defs: + frag.write('return;') + + # Static background image + elif '_EnvImg' in world.world_defs: + # Will have to get rid of gl_FragCoord, pass texture coords from + # vertex shader + frag.write('vec2 texco = gl_FragCoord.xy / screenSize;') + frag.write('fragColor.rgb = texture(envmap, vec2(texco.x, 1.0 - texco.y)).rgb * envmapStrength;') + + # Environment texture + # Also check for _EnvSky to prevent case when sky radiance is enabled + elif '_EnvTex' in world.world_defs and '_EnvSky' not in world.world_defs: + frag.write('fragColor.rgb = texture(envmap, envMapEquirect(n)).rgb * envmapStrength;') + + if '_EnvLDR' in world.world_defs: + frag.write('fragColor.rgb = pow(fragColor.rgb, vec3(2.2));') + + if '_EnvSky' in world.world_defs: + frag.write('float cos_theta = clamp(n.z, 0.0, 1.0);') + frag.write('float cos_gamma = dot(n, hosekSunDirection);') + frag.write('float gamma_val = acos(cos_gamma);') + frag.write('fragColor.rgb = Z * hosekWilkie(cos_theta, gamma_val, cos_gamma) * envmapStrength;') + + if '_EnvClouds' in world.world_defs: + frag.write('if (n.z > 0.0) fragColor.rgb = mix(fragColor.rgb, traceClouds(fragColor.rgb, n), clamp(n.z * 5.0, 0, 1));') + + if '_EnvLDR' in world.world_defs: + frag.write('fragColor.rgb = pow(fragColor.rgb, vec3(1.0 / 2.2));') + + # Mark as non-opaque + frag.write('fragColor.a = 0.0;') diff --git a/blender/arm/material/cycles.py b/blender/arm/material/cycles.py index 4c4fedd0..661d6861 100644 --- a/blender/arm/material/cycles.py +++ b/blender/arm/material/cycles.py @@ -14,19 +14,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import math -import bpy import os +import shutil +from typing import Optional + +import bpy +from mathutils import Euler, Vector + import arm.assets import arm.utils import arm.make_state import arm.log import arm.material.mat_state as mat_state import arm.material.cycles_functions as c_functions -import shutil +from arm.material.shader import Shader emission_found = False particle_info = None # Particle info export +curshader: Shader def parse(nodes, con, vert, frag, geom, tesc, tese, parse_surface=True, parse_opacity=True, parse_displacement=True, basecol_only=False): output_node = node_by_type(nodes, 'OUTPUT_MATERIAL') @@ -51,6 +56,7 @@ def parse_output(node, _con, _vert, _frag, _geom, _tesc, _tese, _parse_surface, global particle_info global sample_bump global sample_bump_res + global procedurals_written con = _con vert = _vert frag = _frag @@ -61,16 +67,18 @@ def parse_output(node, _con, _vert, _frag, _geom, _tesc, _tese, _parse_surface, parse_opacity = _parse_opacity basecol_only = _basecol_only emission_found = False - particle_info = {} - particle_info['index'] = False - particle_info['age'] = False - particle_info['lifetime'] = False - particle_info['location'] = False - particle_info['size'] = False - particle_info['velocity'] = False - particle_info['angular_velocity'] = False + particle_info = { + 'index': False, + 'age': False, + 'lifetime': False, + 'location': False, + 'size': False, + 'velocity': False, + 'angular_velocity': False + } sample_bump = False sample_bump_res = '' + procedurals_written = False wrd = bpy.data.worlds['Arm'] # Surface @@ -168,6 +176,10 @@ def parse_shader(node, socket): if node.type == 'GROUP': if node.node_tree.name.startswith('Armory PBR'): if parse_surface: + # Normal + if node.inputs[5].is_linked and node.inputs[5].links[0].from_node.type == 'NORMAL_MAP': + warn(mat_name() + ' - Do not use Normal Map node with Armory PBR, connect Image Texture directly') + parse_normal_map_color_input(node.inputs[5]) # Base color out_basecol = parse_vector_input(node.inputs[0]) # Occlusion @@ -176,10 +188,6 @@ def parse_shader(node, socket): out_roughness = parse_value_input(node.inputs[3]) # Metallic out_metallic = parse_value_input(node.inputs[4]) - # Normal - if node.inputs[5].is_linked and node.inputs[5].links[0].from_node.type == 'NORMAL_MAP': - warn(mat_name() + ' - Do not use Normal Map node with Armory PBR, connect Image Texture directly') - parse_normal_map_color_input(node.inputs[5]) # Emission if node.inputs[6].is_linked or node.inputs[6].default_value != 0.0: out_emission = parse_value_input(node.inputs[6]) @@ -351,7 +359,7 @@ def parse_displacement_input(inp): else: return None -def parse_vector_input(inp): +def parse_vector_input(inp) -> str: if inp.is_linked: l = inp.links[0] if l.from_node.type == 'REROUTE': @@ -371,7 +379,7 @@ def parse_vector_input(inp): else: return to_vec3(inp.default_value) -def parse_vector(node, socket): +def parse_vector(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> str: global particle_info global sample_bump global sample_bump_res @@ -525,20 +533,19 @@ def parse_vector(node, socket): return res elif node.type == 'TEX_NOISE': + write_procedurals() curshader.add_function(c_functions.str_tex_noise) assets_add(get_sdk_path() + '/armory/Assets/' + 'noise256.png') assets_add_embedded_data('noise256.png') curshader.add_uniform('sampler2D snoise256', link='$noise256.png') - curshader.add_function(c_functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'bposition' - scale = parse_value_input(node.inputs[1]) - # detail = parse_value_input(node.inputs[2]) - # distortion = parse_value_input(node.inputs[3]) - # Slow.. - res = 'vec3(tex_noise({0} * {1}), tex_noise({0} * {1} + 0.33), tex_noise({0} * {1} + 0.66))'.format(co, scale) + scale = parse_value_input(node.inputs[2]) + detail = parse_value_input(node.inputs[3]) + distortion = parse_value_input(node.inputs[4]) + res = 'vec3(tex_noise({0} * {1},{2},{3}), tex_noise({0} * {1} + 120.0,{2},{3}), tex_noise({0} * {1} + 168.0,{2},{3}))'.format(co, scale, detail, distortion) if sample_bump: write_bump(node, res, 0.1) return res @@ -552,31 +559,52 @@ def parse_vector(node, socket): return to_vec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_VORONOI': + write_procedurals() + outp = 0 + if socket.type == 'RGBA': + outp = 1 + elif socket.type == 'VECTOR': + outp = 2 + m = 0 + if node.distance == 'MANHATTAN': + m = 1 + elif node.distance == 'CHEBYCHEV': + m = 2 + elif node.distance == 'MINKOWSKI': + m = 3 curshader.add_function(c_functions.str_tex_voronoi) - assets_add(get_sdk_path() + '/armory/Assets/' + 'noise256.png') - assets_add_embedded_data('noise256.png') - curshader.add_uniform('sampler2D snoise256', link='$noise256.png') if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'bposition' - scale = parse_value_input(node.inputs[1]) - if node.coloring == 'INTENSITY': - res = 'vec3(tex_voronoi({0} * {1}).a)'.format(co, scale) - else: # CELLS - res = 'tex_voronoi({0} * {1}).rgb'.format(co, scale) + scale = parse_value_input(node.inputs[2]) + exp = parse_value_input(node.inputs[4]) + randomness = parse_value_input(node.inputs[5]) + res = 'tex_voronoi({0}, {1}, {2}, {3}, {4}, {5})'.format(co, randomness, m, outp, scale, exp) if sample_bump: write_bump(node, res) return res elif node.type == 'TEX_WAVE': + write_procedurals() curshader.add_function(c_functions.str_tex_wave) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'bposition' scale = parse_value_input(node.inputs[1]) - res = 'vec3(tex_wave_f({0} * {1}))'.format(co, scale) + distortion = parse_value_input(node.inputs[2]) + detail = parse_value_input(node.inputs[3]) + detail_scale = parse_value_input(node.inputs[4]) + if node.wave_profile == 'SIN': + wave_profile = 0 + else: + wave_profile = 1 + if node.wave_type == 'BANDS': + wave_type = 0 + else: + wave_type = 1 + res = 'vec3(tex_wave_f({0} * {1},{2},{3},{4},{5},{6}))'.format(co, scale, wave_type, wave_profile, distortion, detail, detail_scale) if sample_bump: write_bump(node, res) return res @@ -729,34 +757,49 @@ def parse_vector(node, socket): # Pass constant return to_vec3([rgb[0], rgb[1], rgb[2]]) - elif node.type == 'VALTORGB': # ColorRamp - fac = parse_value_input(node.inputs[0]) + # ColorRamp + elif node.type == 'VALTORGB': + input_fac: bpy.types.NodeSocket = node.inputs[0] + + fac: str = parse_value_input(input_fac) if input_fac.is_linked else to_vec1(input_fac.default_value) interp = node.color_ramp.interpolation elems = node.color_ramp.elements + if len(elems) == 1: return to_vec3(elems[0].color) - # Write cols array - cols_var = node_name(node.name) + '_cols' - curshader.write('vec3 {0}[{1}];'.format(cols_var, len(elems))) # TODO: Make const - for i in range(0, len(elems)): - curshader.write('{0}[{1}] = vec3({2}, {3}, {4});'.format(cols_var, i, elems[i].color[0], elems[i].color[1], elems[i].color[2])) - # Get index + + # Write color array + # The last entry is included twice so that the interpolation + # between indices works (no out of bounds error) + cols_var = node_name(node.name).upper() + '_COLS' + cols_entries = ', '.join(f'vec3({elem.color[0]}, {elem.color[1]}, {elem.color[2]})' for elem in elems) + cols_entries += f', vec3({elems[len(elems) - 1].color[0]}, {elems[len(elems) - 1].color[1]}, {elems[len(elems) - 1].color[2]})' + curshader.add_const("vec3", cols_var, cols_entries, array_size=len(elems) + 1) + fac_var = node_name(node.name) + '_fac' - curshader.write('float {0} = {1};'.format(fac_var, fac)) - index = '0' - for i in range(1, len(elems)): - index += ' + ({0} > {1} ? 1 : 0)'.format(fac_var, elems[i].position) + curshader.write(f'float {fac_var} = {fac};') + + # Get index of the nearest left element relative to the factor + index = '0 + ' + index += ' + '.join([f'(({fac_var} > {elems[i].position}) ? 1 : 0)' for i in range(1, len(elems))]) + # Write index index_var = node_name(node.name) + '_i' - curshader.write('int {0} = {1};'.format(index_var, index)) + curshader.write(f'int {index_var} = {index};') + if interp == 'CONSTANT': - return '{0}[{1}]'.format(cols_var, index_var) - else: # Linear - # Write facs array - facs_var = node_name(node.name) + '_facs' - curshader.write('float {0}[{1}];'.format(facs_var, len(elems))) # TODO: Make const - for i in range(0, len(elems)): - curshader.write('{0}[{1}] = {2};'.format(facs_var, i, elems[i].position)) + return f'{cols_var}[{index_var}]' + + # Linear interpolation + else: + # Write factor array + facs_var = node_name(node.name).upper() + '_FACS' + facs_entries = ', '.join(str(elem.position) for elem in elems) + # Add one more entry at the rightmost position so that the + # interpolation between indices works (no out of bounds error) + facs_entries += ', 1.0' + curshader.add_const("float", facs_var, facs_entries, array_size=len(elems) + 1) + # Mix color # float f = (pos - start) * (1.0 / (finish - start)) return 'mix({0}[{1}], {0}[{1} + 1], ({2} - {3}[{1}]) * (1.0 / ({3}[{1} + 1] - {3}[{1}]) ))'.format(cols_var, index_var, fac_var, facs_var) @@ -898,32 +941,60 @@ def parse_vector(node, socket): return res elif node.type == 'MAPPING': - out = parse_vector_input(node.inputs[0]) - scale = node.inputs['Scale'].default_value - rotation = node.inputs['Rotation'].default_value - location = node.inputs['Location'].default_value if node.inputs['Location'].enabled else [0.0, 0.0, 0.0] - if scale[0] != 1.0 or scale[1] != 1.0 or scale[2] != 1.0: - out = '({0} * vec3({1}, {2}, {3}))'.format(out, scale[0], scale[1], scale[2]) - if rotation[2] != 0.0: - # ZYX rotation, Z axis for now.. - a = rotation[2] - # x * cos(theta) - y * sin(theta) - # x * sin(theta) + y * cos(theta) - out = 'vec3({0}.x * {1} - ({0}.y) * {2}, {0}.x * {2} + ({0}.y) * {1}, 0.0)'.format(out, math.cos(a), math.sin(a)) - # if node.rotation[1] != 0.0: - # a = node.rotation[1] - # out = 'vec3({0}.x * {1} - {0}.z * {2}, {0}.x * {2} + {0}.z * {1}, 0.0)'.format(out, math.cos(a), math.sin(a)) - # if node.rotation[0] != 0.0: - # a = node.rotation[0] - # out = 'vec3({0}.y * {1} - {0}.z * {2}, {0}.y * {2} + {0}.z * {1}, 0.0)'.format(out, math.cos(a), math.sin(a)) + # Only "Point", "Texture" and "Vector" types supported for now.. + # More information about the order of operations for this node: + # https://docs.blender.org/manual/en/latest/render/shader_nodes/vector/mapping.html#properties + + input_vector: bpy.types.NodeSocket = node.inputs[0] + input_location: bpy.types.NodeSocket = node.inputs['Location'] + input_rotation: bpy.types.NodeSocket = node.inputs['Rotation'] + input_scale: bpy.types.NodeSocket = node.inputs['Scale'] + out: str = parse_vector_input(input_vector) if input_vector.is_linked else to_vec3(input_vector.default_value) + location: str = parse_vector_input(input_location) if input_location.is_linked else to_vec3(input_location.default_value) + rotation: str = parse_vector_input(input_rotation) if input_rotation.is_linked else to_vec3(input_rotation.default_value) + scale: str = parse_vector_input(input_scale) if input_scale.is_linked else to_vec3(input_scale.default_value) + + # Use inner functions because the order of operations varies between mapping node vector types. This adds a + # slight overhead but makes the code much more readable. + # "Point" and "Vector" use Scale -> Rotate -> Translate, "Texture" uses Translate -> Rotate -> Scale + def calc_location(output: str) -> str: + # Vectors and Eulers support the "!=" operator + if input_scale.is_linked or input_scale.default_value != Vector((1, 1, 1)): + if node.vector_type == 'TEXTURE': + output = f'({output} / {scale})' + else: + output = f'({output} * {scale})' + + return output + + def calc_scale(output: str) -> str: + if input_location.is_linked or input_location.default_value != Vector((0, 0, 0)): + # z location is a little off sometimes?... + if node.vector_type == 'TEXTURE': + output = f'({output} - {location})' + else: + output = f'({output} + {location})' + return output + + out = calc_location(out) if node.vector_type == 'TEXTURE' else calc_scale(out) + + if input_rotation.is_linked or input_rotation.default_value != Euler((0, 0, 0)): + var_name = node_name(node.name) + "_rotation" + if node.vector_type == 'TEXTURE': + curshader.write(f'mat3 {var_name}X = mat3(1.0, 0.0, 0.0, 0.0, cos({rotation}.x), sin({rotation}.x), 0.0, -sin({rotation}.x), cos({rotation}.x));') + curshader.write(f'mat3 {var_name}Y = mat3(cos({rotation}.y), 0.0, -sin({rotation}.y), 0.0, 1.0, 0.0, sin({rotation}.y), 0.0, cos({rotation}.y));') + curshader.write(f'mat3 {var_name}Z = mat3(cos({rotation}.z), sin({rotation}.z), 0.0, -sin({rotation}.z), cos({rotation}.z), 0.0, 0.0, 0.0, 1.0);') + else: + # A little bit redundant, but faster than 12 more multiplications to make it work dynamically + curshader.write(f'mat3 {var_name}X = mat3(1.0, 0.0, 0.0, 0.0, cos(-{rotation}.x), sin(-{rotation}.x), 0.0, -sin(-{rotation}.x), cos(-{rotation}.x));') + curshader.write(f'mat3 {var_name}Y = mat3(cos(-{rotation}.y), 0.0, -sin(-{rotation}.y), 0.0, 1.0, 0.0, sin(-{rotation}.y), 0.0, cos(-{rotation}.y));') + curshader.write(f'mat3 {var_name}Z = mat3(cos(-{rotation}.z), sin(-{rotation}.z), 0.0, -sin(-{rotation}.z), cos(-{rotation}.z), 0.0, 0.0, 0.0, 1.0);') + + # XYZ-order euler rotation + out = f'{out} * {var_name}X * {var_name}Y * {var_name}Z' + + out = calc_scale(out) if node.vector_type == 'TEXTURE' else calc_location(out) - if location[0] != 0.0 or location[1] != 0.0 or location[2] != 0.0: - out = '({0} + vec3({1}, {2}, {3}))'.format(out, location[0], location[1], location[2]) - # use Extension parameter from the Texture node instead - # if node.use_min: - # out = 'max({0}, vec3({1}, {2}, {3}))'.format(out, node.min[0], node.min[1]) - # if node.use_max: - # out = 'min({0}, vec3({1}, {2}, {3}))'.format(out, node.max[0], node.max[1]) return out elif node.type == 'NORMAL': @@ -985,7 +1056,7 @@ def parse_normal_map_color_input(inp, strength_input=None): global frag if basecol_only: return - if inp.is_linked == False: + if not inp.is_linked: return if normal_parsed: return @@ -999,7 +1070,7 @@ def parse_normal_map_color_input(inp, strength_input=None): frag.write('n = TBN * normalize(texn);') else: frag.write('vec3 n = ({0}) * 2.0 - 1.0;'.format(parse_vector_input(inp))) - if strength_input != None: + if strength_input is not None: strength = parse_value_input(strength_input) if strength != '1.0': frag.write('n.xy *= {0};'.format(strength)) @@ -1007,18 +1078,20 @@ def parse_normal_map_color_input(inp, strength_input=None): con.add_elem('tang', 'short4norm') frag.write_normal -= 1 -def parse_value_input(inp): +def parse_value_input(inp) -> str: if inp.is_linked: - l = inp.links[0] + link = inp.links[0] - if l.from_node.type == 'REROUTE': - return parse_value_input(l.from_node.inputs[0]) + if link.from_node.type == 'REROUTE': + return parse_value_input(link.from_node.inputs[0]) - res_var = write_result(l) - st = l.from_socket.type - if st == 'RGB' or st == 'RGBA' or st == 'VECTOR': - return '{0}.x'.format(res_var) - else: # VALUE + res_var = write_result(link) + socket_type = link.from_socket.type + if socket_type == 'RGB' or socket_type == 'RGBA' or socket_type == 'VECTOR': + # RGB to BW + return f'((({res_var}.r * 0.3 + {res_var}.g * 0.59 + {res_var}.b * 0.11) / 3.0) * 2.5)' + # VALUE + else: return res_var else: if mat_batch() and inp.is_uniform: @@ -1256,6 +1329,7 @@ def parse_value(node, socket): return res elif node.type == 'TEX_NOISE': + write_procedurals() curshader.add_function(c_functions.str_tex_noise) assets_add(get_sdk_path() + '/armory/Assets/' + 'noise256.png') assets_add_embedded_data('noise256.png') @@ -1264,10 +1338,10 @@ def parse_value(node, socket): co = parse_vector_input(node.inputs[0]) else: co = 'bposition' - scale = parse_value_input(node.inputs[1]) - # detail = parse_value_input(node.inputs[2]) - # distortion = parse_value_input(node.inputs[3]) - res = 'tex_noise({0} * {1})'.format(co, scale) + scale = parse_value_input(node.inputs[2]) + detail = parse_value_input(node.inputs[3]) + distortion = parse_value_input(node.inputs[4]) + res = 'tex_noise({0} * {1},{2},{3})'.format(co, scale, detail, distortion) if sample_bump: write_bump(node, res, 0.1) return res @@ -1276,31 +1350,52 @@ def parse_value(node, socket): return '0.0' elif node.type == 'TEX_VORONOI': + write_procedurals() + outp = 0 + if socket.type == 'RGBA': + outp = 1 + elif socket.type == 'VECTOR': + outp = 2 + m = 0 + if node.distance == 'MANHATTAN': + m = 1 + elif node.distance == 'CHEBYCHEV': + m = 2 + elif node.distance == 'MINKOWSKI': + m = 3 curshader.add_function(c_functions.str_tex_voronoi) - assets_add(get_sdk_path() + '/armory/Assets/' + 'noise256.png') - assets_add_embedded_data('noise256.png') - curshader.add_uniform('sampler2D snoise256', link='$noise256.png') if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'bposition' - scale = parse_value_input(node.inputs[1]) - if node.coloring == 'INTENSITY': - res = 'tex_voronoi({0} * {1}).a'.format(co, scale) - else: # CELLS - res = 'tex_voronoi({0} * {1}).r'.format(co, scale) + scale = parse_value_input(node.inputs[2]) + exp = parse_value_input(node.inputs[4]) + randomness = parse_value_input(node.inputs[5]) + res = 'tex_voronoi({0}, {1}, {2}, {3}, {4}, {5}).x'.format(co, randomness, m, outp, scale, exp) if sample_bump: write_bump(node, res) return res elif node.type == 'TEX_WAVE': + write_procedurals() curshader.add_function(c_functions.str_tex_wave) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'bposition' scale = parse_value_input(node.inputs[1]) - res = 'tex_wave_f({0} * {1})'.format(co, scale) + distortion = parse_value_input(node.inputs[2]) + detail = parse_value_input(node.inputs[3]) + detail_scale = parse_value_input(node.inputs[4]) + if node.wave_profile == 'SIN': + wave_profile = 0 + else: + wave_profile = 1 + if node.wave_type == 'BANDS': + wave_type = 0 + else: + wave_type = 1 + res = 'tex_wave_f({0} * {1},{2},{3},{4},{5},{6})'.format(co, scale, wave_type, wave_profile, distortion, detail, detail_scale) if sample_bump: write_bump(node, res) return res @@ -1446,31 +1541,41 @@ def is_parsed(s): global parsed return s in parsed -def res_var_name(node, socket): +def res_var_name(node: bpy.types.Node, socket: bpy.types.NodeSocket) -> str: return node_name(node.name) + '_' + safesrc(socket.name) + '_res' -def write_result(l): +def write_result(link: bpy.types.NodeLink) -> Optional[str]: global parsed - res_var = res_var_name(l.from_node, l.from_socket) + res_var = res_var_name(link.from_node, link.from_socket) # Unparsed node if not is_parsed(res_var): parsed[res_var] = True - st = l.from_socket.type + st = link.from_socket.type if st == 'RGB' or st == 'RGBA' or st == 'VECTOR': - res = parse_vector(l.from_node, l.from_socket) - if res == None: + res = parse_vector(link.from_node, link.from_socket) + if res is None: return None curshader.write('vec3 {0} = {1};'.format(res_var, res)) elif st == 'VALUE': - res = parse_value(l.from_node, l.from_socket) - if res == None: + res = parse_value(link.from_node, link.from_socket) + if res is None: return None - curshader.write('float {0} = {1};'.format(res_var, res)) + if link.from_node.type == "VALUE" and not link.from_node.arm_material_param: + curshader.add_const('float', res_var, res) + else: + curshader.write('float {0} = {1};'.format(res_var, res)) # Normal map already parsed, return - elif l.from_node.type == 'NORMAL_MAP': + elif link.from_node.type == 'NORMAL_MAP': return None return res_var +def write_procedurals(): + global procedurals_written + if(not procedurals_written): + curshader.add_function(c_functions.str_tex_proc) + procedurals_written = True + return + def glsl_type(t): if t == 'RGB' or t == 'RGBA' or t == 'VECTOR': return 'vec3' @@ -1496,18 +1601,23 @@ def texture_store(node, tex, tex_name, to_linear=False, tex_link=None): mat_bind_texture(tex) con.add_elem('tex', 'short2norm') curshader.add_uniform('sampler2D {0}'.format(tex_name), link=tex_link) + triplanar = node.projection == 'BOX' if node.inputs[0].is_linked: uv_name = parse_vector_input(node.inputs[0]) - uv_name = 'vec2({0}.x, 1.0 - {0}.y)'.format(uv_name) + if triplanar: + uv_name = 'vec3({0}.x, 1.0 - {0}.y, {0}.z)'.format(uv_name) + else: + uv_name = 'vec2({0}.x, 1.0 - {0}.y)'.format(uv_name) else: uv_name = 'texCoord' - triplanar = node.projection == 'BOX' if triplanar: - curshader.write(f'vec3 texCoordBlend = vec3(0.0); vec2 {uv_name}1 = vec2(0.0); vec2 {uv_name}2 = vec2(0.0);') # Temp - curshader.write(f'vec4 {tex_store} = vec4(0.0, 0.0, 0.0, 0.0);') - curshader.write(f'if (texCoordBlend.x > 0) {tex_store} += texture({tex_name}, {uv_name}.xy) * texCoordBlend.x;') - curshader.write(f'if (texCoordBlend.y > 0) {tex_store} += texture({tex_name}, {uv_name}1.xy) * texCoordBlend.y;') - curshader.write(f'if (texCoordBlend.z > 0) {tex_store} += texture({tex_name}, {uv_name}2.xy) * texCoordBlend.z;') + if not curshader.has_include('std/mapping.glsl'): + curshader.add_include('std/mapping.glsl') + if normal_parsed: + nor = 'TBN[2]' + else: + nor = 'n' + curshader.write('vec4 {0} = vec4(triplanarMapping({1}, {2}, {3}), 0.0);'.format(tex_store, tex_name, nor, uv_name)) else: if mat_texture_grad(): curshader.write('vec4 {0} = textureGrad({1}, {2}.xy, g2.xy, g2.zw);'.format(tex_store, tex_name, uv_name)) diff --git a/blender/arm/material/cycles_functions.py b/blender/arm/material/cycles_functions.py index cfc74122..4ff9fcc3 100644 --- a/blender/arm/material/cycles_functions.py +++ b/blender/arm/material/cycles_functions.py @@ -1,3 +1,57 @@ +str_tex_proc = """ +// +// By Morgan McGuire @morgan3d, http://graphicscodex.com +float hash_f(const float n) { return fract(sin(n) * 1e4); } +float hash_f(const vec2 p) { return fract(1e4 * sin(17.0 * p.x + p.y * 0.1) * (0.1 + abs(sin(p.y * 13.0 + p.x)))); } +float hash_f(const vec3 co){ return fract(sin(dot(co.xyz, vec3(12.9898,78.233,52.8265)) * 24.384) * 43758.5453); } + +float noise(const vec3 x) { + const vec3 step = vec3(110, 241, 171); + + vec3 i = floor(x); + vec3 f = fract(x); + + // For performance, compute the base input to a 1D hash from the integer part of the argument and the + // incremental change to the 1D based on the 3D -> 1D wrapping + float n = dot(i, step); + + vec3 u = f * f * (3.0 - 2.0 * f); + return mix(mix(mix( hash_f(n + dot(step, vec3(0, 0, 0))), hash_f(n + dot(step, vec3(1, 0, 0))), u.x), + mix( hash_f(n + dot(step, vec3(0, 1, 0))), hash_f(n + dot(step, vec3(1, 1, 0))), u.x), u.y), + mix(mix( hash_f(n + dot(step, vec3(0, 0, 1))), hash_f(n + dot(step, vec3(1, 0, 1))), u.x), + mix( hash_f(n + dot(step, vec3(0, 1, 1))), hash_f(n + dot(step, vec3(1, 1, 1))), u.x), u.y), u.z); +} + +// Shader-code adapted from Blender +// https://github.com/sobotka/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_tex_wave.glsl & /gpu_shader_material_fractal_noise.glsl +float fractal_noise(const vec3 p, const float o) +{ + float fscale = 1.0; + float amp = 1.0; + float sum = 0.0; + float octaves = clamp(o, 0.0, 16.0); + int n = int(octaves); + for (int i = 0; i <= n; i++) { + float t = noise(fscale * p); + sum += t * amp; + amp *= 0.5; + fscale *= 2.0; + } + float rmd = octaves - floor(octaves); + if (rmd != 0.0) { + float t = noise(fscale * p); + float sum2 = sum + t * amp; + sum *= float(pow(2, n)) / float(pow(2, n + 1) - 1.0); + sum2 *= float(pow(2, n + 1)) / float(pow(2, n + 2) - 1); + return (1.0 - rmd) * sum + rmd * sum2; + } + else { + sum *= float(pow(2, n)) / float(pow(2, n + 1) - 1); + return sum; + } +} +""" + str_tex_checker = """ vec3 tex_checker(const vec3 co, const vec3 col1, const vec3 col2, const float scale) { // Prevent precision issues on unit coordinates @@ -17,28 +71,66 @@ float tex_checker_f(const vec3 co, const float scale) { } """ -# Created by inigo quilez - iq/2013 -# License Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License str_tex_voronoi = """ -vec4 tex_voronoi(const vec3 x) { - vec3 p = floor(x); - vec3 f = fract(x); - float id = 0.0; - float res = 100.0; - for (int k = -1; k <= 1; k++) - for (int j = -1; j <= 1; j++) - for (int i = -1; i <= 1; i++) { - vec3 b = vec3(float(i), float(j), float(k)); - vec3 pb = p + b; - vec3 r = vec3(b) - f + texture(snoise256, (pb.xy + vec2(3.0, 1.0) * pb.z + 0.5) / 256.0).xyz; - float d = dot(r, r); - if (d < res) { - id = dot(p + b, vec3(1.0, 57.0, 113.0)); - res = d; +//Shader-code adapted from Blender +//https://github.com/sobotka/blender/blob/master/source/blender/gpu/shaders/material/gpu_shader_material_tex_voronoi.glsl +float voronoi_distance(const vec3 a, const vec3 b, const int metric, const float exponent) +{ + if (metric == 0) // SHD_VORONOI_EUCLIDEAN + { + return distance(a, b); + } + else if (metric == 1) // SHD_VORONOI_MANHATTAN + { + return abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z); + } + else if (metric == 2) // SHD_VORONOI_CHEBYCHEV + { + return max(abs(a.x - b.x), max(abs(a.y - b.y), abs(a.z - b.z))); + } + else if (metric == 3) // SHD_VORONOI_MINKOWSKI + { + return pow(pow(abs(a.x - b.x), exponent) + pow(abs(a.y - b.y), exponent) + + pow(abs(a.z - b.z), exponent), + 1.0 / exponent); + } + else { + return 0.5; + } +} + +vec3 tex_voronoi(const vec3 coord, const float r, const int metric, const int outp, const float scale, const float exp) +{ + float randomness = clamp(r, 0.0, 1.0); + + vec3 scaledCoord = coord * scale; + vec3 cellPosition = floor(scaledCoord); + vec3 localPosition = scaledCoord - cellPosition; + + float minDistance = 8.0; + vec3 targetOffset, targetPosition; + for (int k = -1; k <= 1; k++) { + for (int j = -1; j <= 1; j++) { + for (int i = -1; i <= 1; i++) { + vec3 cellOffset = vec3(float(i), float(j), float(k)); + vec3 pointPosition = cellOffset; + if(randomness != 0.) { + pointPosition += vec3(hash_f(cellPosition+cellOffset), hash_f(cellPosition+cellOffset+972.37), hash_f(cellPosition+cellOffset+342.48)) * randomness;} + float distanceToPoint = voronoi_distance(pointPosition, localPosition, metric, exp); + if (distanceToPoint < minDistance) { + targetOffset = cellOffset; + minDistance = distanceToPoint; + targetPosition = pointPosition; } + } } - vec3 col = 0.5 + 0.5 * cos(id * 0.35 + vec3(0.0, 1.0, 2.0)); - return vec4(col, sqrt(res)); + } + if(outp == 0){return vec3(minDistance);} + else if(outp == 1) { + if(randomness == 0.) {return vec3(hash_f(cellPosition+targetOffset), hash_f(cellPosition+targetOffset+972.37), hash_f(cellPosition+targetOffset+342.48));} + return (targetPosition - targetOffset)/randomness; + } + return (targetPosition + cellPosition) / scale; } """ @@ -56,25 +148,12 @@ vec4 tex_voronoi(const vec3 x) { # By Morgan McGuire @morgan3d, http://graphicscodex.com Reuse permitted under the BSD license. # https://www.shadertoy.com/view/4dS3Wd str_tex_noise = """ -float hash(float n) { return fract(sin(n) * 1e4); } -float tex_noise_f(vec3 x) { - const vec3 step = vec3(110, 241, 171); - vec3 i = floor(x); - vec3 f = fract(x); - float n = dot(i, step); - vec3 u = f * f * (3.0 - 2.0 * f); - return mix(mix(mix(hash(n + dot(step, vec3(0, 0, 0))), hash(n + dot(step, vec3(1, 0, 0))), u.x), - mix(hash(n + dot(step, vec3(0, 1, 0))), hash(n + dot(step, vec3(1, 1, 0))), u.x), u.y), - mix(mix(hash(n + dot(step, vec3(0, 0, 1))), hash(n + dot(step, vec3(1, 0, 1))), u.x), - mix(hash(n + dot(step, vec3(0, 1, 1))), hash(n + dot(step, vec3(1, 1, 1))), u.x), u.y), u.z); -} -float tex_noise(vec3 p) { - p *= 1.25; - float f = 0.5 * tex_noise_f(p); p *= 2.01; - f += 0.25 * tex_noise_f(p); p *= 2.02; - f += 0.125 * tex_noise_f(p); p *= 2.03; - f += 0.0625 * tex_noise_f(p); - return 1.0 - f; +float tex_noise(const vec3 p, const float detail, const float distortion) { + vec3 pk = p; + if (distortion != 0.0) { + pk += vec3(noise(p) * distortion); + } + return fractal_noise(pk, detail); } """ @@ -188,8 +267,16 @@ float tex_brick_f(vec3 p) { """ str_tex_wave = """ -float tex_wave_f(const vec3 p) { - return 1.0 - sin((p.x + p.y) * 10.0); +float tex_wave_f(const vec3 p, const int type, const int profile, const float dist, const float detail, const float detail_scale) { + float n; + if(type == 0) n = (p.x + p.y + p.z) * 9.5; + else n = length(p) * 13.0; + if(dist != 0.0) n += dist * fractal_noise(p * detail_scale, detail) * 2.0 - 1.0; + if(profile == 0) { return 0.5 + 0.5 * sin(n - PI); } + else { + n /= 2.0 * PI; + return n - floor(n); + } } """ diff --git a/blender/arm/material/make.py b/blender/arm/material/make.py index 63e5be4b..c1ece5e5 100755 --- a/blender/arm/material/make.py +++ b/blender/arm/material/make.py @@ -40,6 +40,14 @@ def parse(material, mat_data, mat_users, mat_armusers): elem['name'] = 'nor' elem['data'] = 'short2norm' con['vertex_elements'].append(elem) + elem = {} + elem['name'] = 'tex' + elem['data'] = 'short2norm' + con['vertex_elements'].append(elem) + elem = {} + elem['name'] = 'tex1' + elem['data'] = 'short2norm' + con['vertex_elements'].append(elem) sd['contexts'].append(con) shader_data_name = material.arm_custom_material bind_constants = {} diff --git a/blender/arm/material/make_cluster.py b/blender/arm/material/make_cluster.py index 9428f996..6864386d 100644 --- a/blender/arm/material/make_cluster.py +++ b/blender/arm/material/make_cluster.py @@ -3,13 +3,14 @@ import bpy def write(vert, frag): wrd = bpy.data.worlds['Arm'] is_shadows = '_ShadowMap' in wrd.world_defs - + frag.add_include('std/clusters.glsl') frag.add_uniform('vec2 cameraProj', link='_cameraPlaneProj') frag.add_uniform('vec2 cameraPlane', link='_cameraPlane') frag.add_uniform('vec4 lightsArray[maxLights * 2]', link='_lightsArray') frag.add_uniform('sampler2D clustersData', link='_clustersData') if is_shadows: + frag.add_uniform('bool receiveShadow') frag.add_uniform('vec2 lightProj', link='_lightPlaneProj', included=True) frag.add_uniform('samplerCubeShadow shadowMapPoint[4]', included=True) vert.add_out('vec4 wvpposition') @@ -36,7 +37,7 @@ def write(vert, frag): frag.write('for (int i = 0; i < min(numLights, maxLightsCluster); i++) {') frag.write('int li = int(texelFetch(clustersData, ivec2(clusterI, i + 1), 0).r * 255);') - + frag.write('direct += sampleLight(') frag.write(' wposition,') frag.write(' n,') @@ -49,7 +50,7 @@ def write(vert, frag): frag.write(' specular,') frag.write(' f0') if is_shadows: - frag.write(' , li, lightsArray[li * 2].w') # bias + frag.write(' , li, lightsArray[li * 2].w, receiveShadow') # bias if '_Spot' in wrd.world_defs: frag.write(' , li > numPoints - 1') frag.write(' , lightsArray[li * 2 + 1].w') # cutoff @@ -59,4 +60,4 @@ def write(vert, frag): frag.write(' , voxels, voxpos') frag.write(');') - frag.write('}') # for numLights \ No newline at end of file + frag.write('}') # for numLights diff --git a/blender/arm/material/make_decal.py b/blender/arm/material/make_decal.py index 90a026a1..32e896e9 100644 --- a/blender/arm/material/make_decal.py +++ b/blender/arm/material/make_decal.py @@ -30,7 +30,7 @@ def make(context_id): vert.write('wnormal = N * vec3(0.0, 0.0, 1.0);') vert.write('wvpposition = WVP * vec4(pos.xyz, 1.0);') vert.write('gl_Position = wvpposition;') - + frag.add_include('compiled.inc') frag.add_include('std/gbuffer.glsl') frag.ins = vert.outs @@ -43,11 +43,11 @@ def make(context_id): frag.write_attrib(' vec2 screenPosition = wvpposition.xy / wvpposition.w;') frag.write_attrib(' vec2 depthCoord = screenPosition * 0.5 + 0.5;') - frag.write_attrib('#ifdef HLSL') + frag.write_attrib('#ifdef _InvY') frag.write_attrib(' depthCoord.y = 1.0 - depthCoord.y;') frag.write_attrib('#endif') frag.write_attrib(' float depth = texture(gbufferD, depthCoord).r * 2.0 - 1.0;') - + frag.write_attrib(' vec3 wpos = getPos2(invVP, depth, depthCoord);') frag.write_attrib(' vec4 mpos = invW * vec4(wpos, 1.0);') frag.write_attrib(' if (abs(mpos.x) > 1.0) discard;') diff --git a/blender/arm/material/make_finalize.py b/blender/arm/material/make_finalize.py index 955b60e6..939e4bf9 100644 --- a/blender/arm/material/make_finalize.py +++ b/blender/arm/material/make_finalize.py @@ -68,6 +68,9 @@ def make(con_mesh): vert.add_uniform('vec3 hdim', link='_halfDim') vert.add_uniform('float posUnpack', link='_posUnpack') vert.write_attrib('bposition = (spos.xyz * posUnpack + hdim) / dim;') + vert.write_attrib('if (dim.z == 0) bposition.z = 0;') + vert.write_attrib('if (dim.y == 0) bposition.y = 0;') + vert.write_attrib('if (dim.x == 0) bposition.x = 0;') if tese != None: if frag_bpos: diff --git a/blender/arm/material/make_mesh.py b/blender/arm/material/make_mesh.py index 8b211fbf..72a93975 100644 --- a/blender/arm/material/make_mesh.py +++ b/blender/arm/material/make_mesh.py @@ -16,11 +16,12 @@ write_material_attribs_post = None write_vertex_attribs = None def make(context_id, rpasses): + wrd = bpy.data.worlds['Arm'] rpdat = arm.utils.get_rp() rid = rpdat.rp_renderer con = { 'name': context_id, 'depth_write': True, 'compare_mode': 'less', 'cull_mode': 'clockwise' } - + # Blend context mat = mat_state.material blend = mat.arm_blending @@ -42,6 +43,11 @@ def make(context_id, rpasses): con['depth_write'] = False con['compare_mode'] = 'equal' + attachment_format = 'RGBA32' if '_LDR' in wrd.world_defs else 'RGBA64' + con['color_attachments'] = [attachment_format, attachment_format] + if '_gbuffer2' in wrd.world_defs: + con['color_attachments'].append(attachment_format) + con_mesh = mat_state.data.add_context(con) mat_state.con_mesh = con_mesh @@ -243,7 +249,7 @@ def make_deferred(con_mesh, rpasses): frag.write('n /= (abs(n.x) + abs(n.y) + abs(n.z));') frag.write('n.xy = n.z >= 0.0 ? n.xy : octahedronWrap(n.xy);') - + if '_Emission' in wrd.world_defs or '_SSS' in wrd.world_defs or '_Hair' in wrd.world_defs: frag.write('uint matid = 0;') if '_Emission' in wrd.world_defs: @@ -360,8 +366,11 @@ def make_forward_mobile(con_mesh): vert.add_out('vec4 lightPosition') vert.add_uniform('mat4 LWVP', '_biasLightWorldViewProjectionMatrix') vert.write('lightPosition = LWVP * spos;') + frag.add_uniform('bool receiveShadow') frag.add_uniform('sampler2DShadow shadowMap') frag.add_uniform('float shadowsBias', '_sunShadowsBias') + + frag.write('if (receiveShadow) {') if '_CSM' in wrd.world_defs: frag.add_include('std/shadows.glsl') frag.add_uniform('vec4 casData[shadowmapCascades * 4 + 4]', '_cascadeData', included=True) @@ -375,6 +384,7 @@ def make_forward_mobile(con_mesh): else: frag.write(' svisibility = texture(shadowMap, vec3(lPos.xy, lPos.z - shadowsBias)).r;') frag.write('}') + frag.write('}') # receiveShadow frag.write('direct += basecol * sdotNL * sunCol * svisibility;') if '_SinglePoint' in wrd.world_defs: @@ -388,12 +398,15 @@ def make_forward_mobile(con_mesh): frag.write('vec3 l = normalize(ld);') frag.write('float dotNL = max(dot(n, l), 0.0);') if is_shadows: + frag.add_uniform('bool receiveShadow') frag.add_uniform('float pointBias', link='_pointShadowsBias') frag.add_include('std/shadows.glsl') + + frag.write('if (receiveShadow) {') if '_Spot' in wrd.world_defs: vert.add_out('vec4 spotPosition') vert.add_uniform('mat4 LWVPSpot0', link='_biasLightWorldViewProjectionMatrixSpot0') - vert.write('spotPosition = LWVPSpot0 * spos;') + vert.write('spotPosition = LWVPSpot0 * spos;') frag.add_uniform('sampler2DShadow shadowMapSpot[1]') frag.write('if (spotPosition.w > 0.0) {') frag.write(' vec3 lPos = spotPosition.xyz / spotPosition.w;') @@ -407,13 +420,14 @@ def make_forward_mobile(con_mesh): frag.add_uniform('samplerCubeShadow shadowMapPoint[1]') frag.write('const float s = shadowmapCubePcfSize;') # TODO: incorrect... frag.write('float compare = lpToDepth(ld, lightProj) - pointBias * 1.5;') - frag.write('#ifdef HLSL') + frag.write('#ifdef _InvY') frag.write('l.y = -l.y;') frag.write('#endif') if '_Legacy' in wrd.world_defs: frag.write('visibility = float(texture(shadowMapPoint[0], vec3(-l + n * pointBias * 20)).r > compare);') else: frag.write('visibility = texture(shadowMapPoint[0], vec4(-l + n * pointBias * 20, compare)).r;') + frag.write('}') # receiveShadow frag.write('direct += basecol * dotNL * pointCol * attenuate(distance(wposition, pointPos)) * visibility;') @@ -425,8 +439,8 @@ def make_forward_mobile(con_mesh): if '_Irr' in wrd.world_defs: frag.add_include('std/shirr.glsl') - frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance', included=True) - env_str = 'shIrradiance(n)' + frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance') + env_str = 'shIrradiance(n, shirr)' else: env_str = '0.5' @@ -469,7 +483,7 @@ def make_forward_solid(con_mesh): parse_opacity = (blend and is_transluc) or arm_discard if parse_opacity: frag.write('float opacity;') - + cycles.parse(mat_state.nodes, con_mesh, vert, frag, geom, tesc, tese, parse_opacity=parse_opacity, parse_displacement=False, basecol_only=True) if arm_discard: @@ -587,7 +601,7 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): float dotNV = max(dot(n, vVec), 0.0); """) - sh = tese if tese != None else vert + sh = tese if tese is not None else vert sh.add_out('vec3 eyeDir') sh.add_uniform('vec3 eye', '_cameraPosition') sh.write('eyeDir = eye - wposition;') @@ -604,8 +618,8 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): if '_Irr' in wrd.world_defs: frag.add_include('std/shirr.glsl') - frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance', included=True) - frag.write('vec3 indirect = shIrradiance(n);') + frag.add_uniform('vec4 shirr[7]', link='_envmapIrradiance') + frag.write('vec3 indirect = shIrradiance(n, shirr);') if '_EnvTex' in wrd.world_defs: frag.write('indirect /= PI;') frag.write('indirect *= albedo;') @@ -639,7 +653,6 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): frag.write('indirect *= vec3(1.0 - traceAO(voxpos, n, voxels));') frag.write('vec3 direct = vec3(0.0);') - frag.add_uniform('bool receiveShadow') if '_Sun' in wrd.world_defs: frag.add_uniform('vec3 sunCol', '_sunColor') @@ -650,6 +663,7 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): frag.write('float sdotNH = dot(n, sh);') frag.write('float sdotVH = dot(vVec, sh);') if is_shadows: + frag.add_uniform('bool receiveShadow') frag.add_uniform('sampler2DShadow shadowMap') frag.add_uniform('float shadowsBias', '_sunShadowsBias') frag.write('if (receiveShadow) {') @@ -659,7 +673,7 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): frag.add_uniform('vec3 eye', '_cameraPosition') frag.write('svisibility = shadowTestCascade(shadowMap, eye, wposition + n * shadowsBias * 10, shadowsBias);') else: - if tese != None: + if tese is not None: tese.add_out('vec4 lightPosition') tese.add_uniform('mat4 LVP', '_biasLightViewProjectionMatrix') tese.write('lightPosition = LVP * vec4(wposition, 1.0);') @@ -688,6 +702,7 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): frag.add_uniform('vec3 spotDir', link='_spotDirection') frag.add_uniform('vec2 spotData', link='_spotData') if is_shadows: + frag.add_uniform('bool receiveShadow') frag.add_uniform('float pointBias', link='_pointShadowsBias') if '_Spot' in wrd.world_defs: # Skip world matrix, already in world-space @@ -699,7 +714,7 @@ def make_forward_base(con_mesh, parse_opacity=False, transluc_pass=False): frag.write('direct += sampleLight(') frag.write(' wposition, n, vVec, dotNV, pointPos, pointCol, albedo, roughness, specular, f0') if is_shadows: - frag.write(' , 0, pointBias') + frag.write(' , 0, pointBias, receiveShadow') if '_Spot' in wrd.world_defs: frag.write(' , true, spotData.x, spotData.y, spotDir') if '_VoxelShadow' in wrd.world_defs and '_VoxelAOvar' in wrd.world_defs: diff --git a/blender/arm/material/make_shader.py b/blender/arm/material/make_shader.py index 24ea7755..b27f246b 100644 --- a/blender/arm/material/make_shader.py +++ b/blender/arm/material/make_shader.py @@ -55,7 +55,7 @@ def build(material, mat_users, mat_armusers): global_elems.append({'name': 'irot', 'data': 'float3'}) if bo.arm_instanced == 'Loc + Scale' or bo.arm_instanced == 'Loc + Rot + Scale': global_elems.append({'name': 'iscl', 'data': 'float3'}) - + mat_state.data.global_elems = global_elems bind_constants = dict() @@ -77,7 +77,7 @@ def build(material, mat_users, mat_armusers): if con != None: pass - + elif rp == 'mesh': con = make_mesh.make(rp, rpasses) @@ -130,7 +130,7 @@ def write_shader(rel_path, shader, ext, rpass, matname, keep_cache=True): return # TODO: blend context - if mat_state.material.arm_blending and rpass == 'mesh': + if rpass == 'mesh' and mat_state.material.arm_blending: rpass = 'blend' file_ext = '.glsl' diff --git a/blender/arm/material/shader.py b/blender/arm/material/shader.py index 54ac9e8c..1507cf50 100644 --- a/blender/arm/material/shader.py +++ b/blender/arm/material/shader.py @@ -65,6 +65,8 @@ class ShaderContext: self.data['color_writes_blue'] = props['color_writes_blue'] if 'color_writes_alpha' in props: self.data['color_writes_alpha'] = props['color_writes_alpha'] + if 'color_attachments' in props: + self.data['color_attachments'] = props['color_attachments'] self.data['texture_units'] = [] self.tunits = self.data['texture_units'] @@ -80,7 +82,7 @@ class ShaderContext: def sort_vs(self): vs = [] ar = ['pos', 'nor', 'tex', 'tex1', 'col', 'tang', 'bone', 'weight', 'ipos', 'irot', 'iscl'] - for ename in ar: + for ename in ar: elem = self.get_elem(ename) if elem != None: vs.append(elem) @@ -123,28 +125,43 @@ class ShaderContext: c['is_image'] = is_image self.tunits.append(c) - def make_vert(self): - self.data['vertex_shader'] = self.matname + '_' + self.data['name'] + '.vert' - self.vert = Shader(self, 'vert') + def make_vert(self, custom_name: str = None): + if custom_name is None: + self.data['vertex_shader'] = self.matname + '_' + self.data['name'] + '.vert' + else: + self.data['vertex_shader'] = custom_name + '.vert' + self.vert = Shader(self, 'vert') return self.vert - def make_frag(self): - self.data['fragment_shader'] = self.matname + '_' + self.data['name'] + '.frag' + def make_frag(self, custom_name: str = None): + if custom_name is None: + self.data['fragment_shader'] = self.matname + '_' + self.data['name'] + '.frag' + else: + self.data['fragment_shader'] = custom_name + '.frag' self.frag = Shader(self, 'frag') return self.frag - def make_geom(self): - self.data['geometry_shader'] = self.matname + '_' + self.data['name'] + '.geom' + def make_geom(self, custom_name: str = None): + if custom_name is None: + self.data['geometry_shader'] = self.matname + '_' + self.data['name'] + '.geom' + else: + self.data['geometry_shader'] = custom_name + '.geom' self.geom = Shader(self, 'geom') return self.geom - def make_tesc(self): - self.data['tesscontrol_shader'] = self.matname + '_' + self.data['name'] + '.tesc' + def make_tesc(self, custom_name: str = None): + if custom_name is None: + self.data['tesscontrol_shader'] = self.matname + '_' + self.data['name'] + '.tesc' + else: + self.data['tesscontrol_shader'] = custom_name + '.tesc' self.tesc = Shader(self, 'tesc') return self.tesc - def make_tese(self): - self.data['tesseval_shader'] = self.matname + '_' + self.data['name'] + '.tese' + def make_tese(self, custom_name: str = None): + if custom_name is None: + self.data['tesseval_shader'] = self.matname + '_' + self.data['name'] + '.tese' + else: + self.data['tesseval_shader'] = custom_name + '.tese' self.tese = Shader(self, 'tese') return self.tese @@ -157,6 +174,7 @@ class Shader: self.ins = [] self.outs = [] self.uniforms = [] + self.constants = [] self.functions = {} self.main = '' self.main_init = '' @@ -174,6 +192,9 @@ class Shader: self.is_linked = False # Use already generated shader self.noprocessing = False + def has_include(self, s): + return s in self.includes + def add_include(self, s): self.includes.append(s) @@ -206,9 +227,31 @@ class Shader: ar[0] = 'floats' ar[1] = ar[1].split('[', 1)[0] self.context.add_constant(ar[0], ar[1], link=link) - if included == False and s not in self.uniforms: + if not included and s not in self.uniforms: self.uniforms.append(s) + def add_const(self, type_str: str, name: str, value_str: str, array_size: int = 0): + """ + Add a global constant to the shader. + + Parameters + ---------- + type_str: str + The name of the type, like 'float' or 'vec3'. If the + constant is an array, there is no need to add `[]` to the + type + name: str + The name of the variable + value_str: str + The value of the constant as a string + array_size: int + If not 0 (default value), create an array with the given size + """ + if array_size == 0: + self.constants.append(f'{type_str} {name} = {value_str}') + elif array_size > 0: + self.constants.append(f'{type_str} {name}[{array_size}] = {type_str}[]({value_str})') + def add_function(self, s): fname = s.split('(', 1)[0] if fname in self.functions: @@ -295,7 +338,7 @@ class Shader: if self.shader_type == 'vert': self.vstruct_to_vsin() - + elif self.shader_type == 'tesc': in_ext = '[]' out_ext = '[]' @@ -332,8 +375,10 @@ class Shader: s += 'out {0}{1};\n'.format(a, out_ext) for a in self.uniforms: s += 'uniform ' + a + ';\n' + for c in self.constants: + s += 'const ' + c + ';\n' for f in self.functions: - s += self.functions[f] + s += self.functions[f] + '\n' s += 'void main() {\n' s += self.main_attribs s += self.main_textures diff --git a/blender/arm/nodes_logic.py b/blender/arm/nodes_logic.py index 791e9b47..7d1a2479 100755 --- a/blender/arm/nodes_logic.py +++ b/blender/arm/nodes_logic.py @@ -2,14 +2,14 @@ import bpy from bpy.types import NodeTree from bpy.props import * import nodeitems_utils -from nodeitems_utils import NodeCategory +from nodeitems_utils import NodeCategory, NodeItem from arm.logicnode import * import webbrowser registered_nodes = [] class ArmLogicTree(NodeTree): - '''Logic nodes''' + """Logic nodes""" bl_idname = 'ArmLogicTreeType' bl_label = 'Logic Node Editor' bl_icon = 'DECORATE' @@ -33,11 +33,29 @@ def register_nodes(): node_categories = [] for category in sorted(arm_nodes.category_items): - sorted_items=sorted(arm_nodes.category_items[category], key=lambda item: item.nodetype) + if category == 'Layout': + # Handled separately + continue + + sorted_items = sorted(arm_nodes.category_items[category], key=lambda item: item.nodetype) node_categories.append( LogicNodeCategory('Logic' + category + 'Nodes', category, items=sorted_items) ) + # Add special layout nodes known from Blender's node editors + if 'Layout' in arm_nodes.category_items: + # Clone with [:] to prevent double entries + layout_items = arm_nodes.category_items['Layout'][:] + else: + layout_items = [] + + layout_items += [NodeItem('NodeReroute'), NodeItem('NodeFrame')] + layout_items = sorted(layout_items, key=lambda item: item.nodetype) + + node_categories.append( + LogicNodeCategory('LogicLayoutNodes', 'Layout', description='Layout Nodes', items=layout_items) + ) + nodeitems_utils.register_node_categories('ArmLogicNodes', node_categories) def unregister_nodes(): diff --git a/blender/arm/props.py b/blender/arm/props.py index 6b4b39b7..8004efbd 100755 --- a/blender/arm/props.py +++ b/blender/arm/props.py @@ -1,18 +1,14 @@ import bpy from bpy.props import * -import os -import shutil -import arm.props_ui as props_ui + import arm.assets as assets -import arm.log as log -import arm.utils import arm.make -import arm.props_renderpath as props_renderpath -import arm.proxy import arm.nodes_logic +import arm.proxy +import arm.utils # Armory version -arm_version = '2020.3' +arm_version = '2020.8' arm_commit = '$Id$' def init_properties(): @@ -71,6 +67,7 @@ def init_properties(): items=[('Scene', 'Scene', 'Scene'), ('Viewport', 'Viewport', 'Viewport')], name="Camera", description="Viewport camera", default='Scene', update=assets.invalidate_compiler_cache) + bpy.types.World.arm_play_scene = PointerProperty(name="Scene", description="Scene to launch", update=assets.invalidate_compiler_cache, type=bpy.types.Scene) bpy.types.World.arm_debug_console = BoolProperty(name="Debug Console", description="Show inspector in player and enable debug draw.\nRequires that Zui is not disabled", default=False, update=assets.invalidate_shader_cache) bpy.types.World.arm_verbose_output = BoolProperty(name="Verbose Output", description="Print additional information to the console during compilation", default=False) bpy.types.World.arm_runtime = EnumProperty( @@ -124,6 +121,7 @@ def init_properties(): bpy.types.Object.arm_proxy_sync_materials = BoolProperty(name="Materials", description="Keep materials synchronized with proxy object", default=True, update=arm.proxy.proxy_sync_materials) bpy.types.Object.arm_proxy_sync_modifiers = BoolProperty(name="Modifiers", description="Keep modifiers synchronized with proxy object", default=True, update=arm.proxy.proxy_sync_modifiers) bpy.types.Object.arm_proxy_sync_traits = BoolProperty(name="Traits", description="Keep traits synchronized with proxy object", default=True, update=arm.proxy.proxy_sync_traits) + bpy.types.Object.arm_proxy_sync_trait_props = BoolProperty(name="Trait Property Values", description="Keep trait property values synchronized with proxy object", default=False, update=arm.proxy.proxy_sync_traits) # For speakers bpy.types.Speaker.arm_play_on_start = BoolProperty(name="Play on Start", description="Play this sound automatically", default=False) bpy.types.Speaker.arm_loop = BoolProperty(name="Loop", description="Loop this sound", default=False) @@ -267,6 +265,15 @@ def init_properties(): bpy.types.World.arm_wasm_list = CollectionProperty(type=bpy.types.PropertyGroup) bpy.types.World.world_defs = StringProperty(name="World Shader Defs", default='') bpy.types.World.compo_defs = StringProperty(name="Compositor Shader Defs", default='') + + bpy.types.World.arm_use_clouds = BoolProperty(name="Clouds", default=False, update=assets.invalidate_shader_cache) + bpy.types.World.arm_clouds_lower = FloatProperty(name="Lower", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) + bpy.types.World.arm_clouds_upper = FloatProperty(name="Upper", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) + bpy.types.World.arm_clouds_wind = FloatVectorProperty(name="Wind", default=[1.0, 0.0], size=2, update=assets.invalidate_shader_cache) + bpy.types.World.arm_clouds_secondary = FloatProperty(name="Secondary", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) + bpy.types.World.arm_clouds_precipitation = FloatProperty(name="Precipitation", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) + bpy.types.World.arm_clouds_steps = IntProperty(name="Steps", default=24, min=1, max=240, update=assets.invalidate_shader_cache) + bpy.types.Material.export_uvs = BoolProperty(name="Export UVs", default=False) bpy.types.Material.export_vcols = BoolProperty(name="Export VCols", default=False) bpy.types.Material.export_tangents = BoolProperty(name="Export Tangents", default=False) diff --git a/blender/arm/props_bake.py b/blender/arm/props_bake.py index 2c83c224..93d69647 100644 --- a/blender/arm/props_bake.py +++ b/blender/arm/props_bake.py @@ -3,6 +3,7 @@ import arm.assets import bpy from bpy.types import Menu, Panel, UIList from bpy.props import * +from arm.lightmapper import operators, properties, preferences, utility, keymap class ArmBakeListItem(bpy.types.PropertyGroup): obj: PointerProperty(type=bpy.types.Object, description="The object to bake") @@ -351,6 +352,18 @@ def register(): ('Smart UV Project', 'Smart UV Project', 'Smart UV Project')], name = "UV Unwrap", default='Smart UV Project') + + #Register lightmapper + bpy.types.Scene.arm_bakemode = EnumProperty( + items = [('Static Map', 'Static Map', 'Static Map'), + ('Lightmap', 'Lightmap', 'Lightmap')], + name = "Bake mode", default='Static Map') + + operators.register() + properties.register() + preferences.register() + keymap.register() + def unregister(): bpy.utils.unregister_class(ArmBakeListItem) bpy.utils.unregister_class(ARM_UL_BakeList) @@ -364,3 +377,10 @@ def unregister(): bpy.utils.unregister_class(ArmBakeAddSelectedButton) bpy.utils.unregister_class(ArmBakeClearAllButton) bpy.utils.unregister_class(ArmBakeRemoveBakedMaterialsButton) + + #Unregister lightmapper + + operators.unregister() + properties.unregister() + preferences.unregister() + keymap.unregister() \ No newline at end of file diff --git a/blender/arm/props_lod.py b/blender/arm/props_lod.py index 2de6612f..ef1b2e86 100755 --- a/blender/arm/props_lod.py +++ b/blender/arm/props_lod.py @@ -18,7 +18,7 @@ class ArmLodListItem(bpy.types.PropertyGroup): name="Name", description="A name for this item", default="") - + enabled_prop: BoolProperty( name="", description="A name for this item", @@ -82,9 +82,9 @@ class ArmLodListDeleteItem(bpy.types.Operator): index = mdata.arm_lodlist_index n = lodlist[index].name - if n in context.scene.objects: + if n in context.scene.collection.objects: obj = bpy.data.objects[n] - context.scene.objects.unlink(obj) + context.scene.collection.objects.unlink(obj) lodlist.remove(index) diff --git a/blender/arm/props_renderpath.py b/blender/arm/props_renderpath.py index 62e281df..e5ba673b 100644 --- a/blender/arm/props_renderpath.py +++ b/blender/arm/props_renderpath.py @@ -1,10 +1,8 @@ -import os -import shutil +import bpy +from bpy.props import * + import arm.assets as assets import arm.utils -import bpy -from bpy.types import Menu, Panel, UIList -from bpy.props import * def update_preset(self, context): rpdat = arm.utils.get_rp() @@ -233,12 +231,12 @@ class ArmRPListItem(bpy.types.PropertyGroup): ('Clear', 'Clear', 'Clear'), ('Off', 'No Clear', 'Off'), ], - name="Background", description="Background type", default='World', update=update_renderpath) + name="Background", description="Background type", default='World', update=update_renderpath) arm_irradiance: BoolProperty(name="Irradiance", description="Generate spherical harmonics", default=True, update=assets.invalidate_shader_cache) arm_radiance: BoolProperty(name="Radiance", description="Generate radiance textures", default=True, update=assets.invalidate_shader_cache) arm_radiance_size: EnumProperty( items=[('512', '512', '512'), - ('1024', '1024', '1024'), + ('1024', '1024', '1024'), ('2048', '2048', '2048')], name="Map Size", description="Prefiltered map size", default='1024', update=assets.invalidate_envmap_data) rp_autoexposure: BoolProperty(name="Auto Exposure", description="Adjust exposure based on luminance", default=False, update=update_renderpath) @@ -296,19 +294,19 @@ class ArmRPListItem(bpy.types.PropertyGroup): rp_translucency: BoolProperty(name="Translucency", description="Current render-path state", default=False) rp_translucency_state: EnumProperty( items=[('On', 'On', 'On'), - ('Off', 'Off', 'Off'), + ('Off', 'Off', 'Off'), ('Auto', 'Auto', 'Auto')], name="Translucency", description="Order independent translucency", default='Auto', update=update_translucency_state) rp_decals: BoolProperty(name="Decals", description="Current render-path state", default=False) rp_decals_state: EnumProperty( items=[('On', 'On', 'On'), - ('Off', 'Off', 'Off'), + ('Off', 'Off', 'Off'), ('Auto', 'Auto', 'Auto')], name="Decals", description="Decals pass", default='Auto', update=update_decals_state) rp_overlays: BoolProperty(name="Overlays", description="Current render-path state", default=False) rp_overlays_state: EnumProperty( items=[('On', 'On', 'On'), - ('Off', 'Off', 'Off'), + ('Off', 'Off', 'Off'), ('Auto', 'Auto', 'Auto')], name="Overlays", description="X-Ray pass", default='Auto', update=update_overlays_state) rp_sss: BoolProperty(name="SSS", description="Current render-path state", default=False) @@ -329,7 +327,7 @@ class ArmRPListItem(bpy.types.PropertyGroup): ('Shader', 'Shader', 'Shader')], name='Draw Order', description='Sort objects', default='Auto', update=assets.invalidate_compiled_data) rp_stereo: BoolProperty(name="VR", description="Stereo rendering", default=False, update=update_renderpath) - rp_water: BoolProperty(name="Water", description="Water surface pass", default=False, update=update_renderpath) + rp_water: BoolProperty(name="Water", description="Enable water surface pass", default=False, update=update_renderpath) rp_pp: BoolProperty(name="Realtime postprocess", description="Realtime postprocess", default=False, update=update_renderpath) rp_gi: EnumProperty( # TODO: remove in 0.8 items=[('Off', 'Off', 'Off'), @@ -350,13 +348,13 @@ class ArmRPListItem(bpy.types.PropertyGroup): ('0.5', '0.5', '0.5'), ('0.25', '0.25', '0.25')], name="Resolution Z", description="3D texture z resolution multiplier", default='1.0', update=update_renderpath) - arm_clouds: BoolProperty(name="Clouds", default=False, update=assets.invalidate_shader_cache) + arm_clouds: BoolProperty(name="Clouds", description="Enable clouds pass", default=False, update=assets.invalidate_shader_cache) arm_ssrs: BoolProperty(name="SSRS", description="Screen-space ray-traced shadows", default=False, update=assets.invalidate_shader_cache) arm_micro_shadowing: BoolProperty(name="Micro Shadowing", description="Micro shadowing based on ambient occlusion", default=False, update=assets.invalidate_shader_cache) arm_texture_filter: EnumProperty( items=[('Anisotropic', 'Anisotropic', 'Anisotropic'), - ('Linear', 'Linear', 'Linear'), - ('Point', 'Closest', 'Point'), + ('Linear', 'Linear', 'Linear'), + ('Point', 'Closest', 'Point'), ('Manual', 'Manual', 'Manual')], name="Texture Filtering", description="Set Manual to honor interpolation setting on Image Texture node", default='Anisotropic') arm_material_model: EnumProperty( @@ -380,7 +378,7 @@ class ArmRPListItem(bpy.types.PropertyGroup): name="Resolution", description="Resolution to perform rendering at", default='Display', update=update_renderpath) arm_rp_resolution_size: IntProperty(name="Size", description="Resolution height in pixels(for example 720p), width is auto-fit to preserve aspect ratio", default=720, min=0, update=update_renderpath) arm_rp_resolution_filter: EnumProperty( - items=[('Linear', 'Linear', 'Linear'), + items=[('Linear', 'Linear', 'Linear'), ('Point', 'Closest', 'Point')], name="Filter", description="Scaling filter", default='Linear') rp_dynres: BoolProperty(name="Dynamic Resolution", description="Dynamic resolution scaling for performance", default=False, update=update_renderpath) @@ -397,7 +395,7 @@ class ArmRPListItem(bpy.types.PropertyGroup): ('4', '4', '4'), ('8', '8', '8'), ('16', '16', '16')], - name="MSAA", description="Samples per pixel usable for render paths drawing directly to framebuffer", default='1') + name="MSAA", description="Samples per pixel usable for render paths drawing directly to framebuffer", default='1') arm_voxelgi_cones: EnumProperty( items=[('9', '9', '9'), @@ -412,13 +410,7 @@ class ArmRPListItem(bpy.types.PropertyGroup): arm_voxelgi_range: FloatProperty(name="Range", description="Maximum range", default=2.0, update=assets.invalidate_shader_cache) arm_voxelgi_aperture: FloatProperty(name="Aperture", description="Cone aperture for shadow trace", default=1.0, update=assets.invalidate_shader_cache) arm_sss_width: FloatProperty(name="Width", description="SSS blur strength", default=1.0, update=assets.invalidate_shader_cache) - arm_clouds_lower: FloatProperty(name="Lower", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) - arm_clouds_upper: FloatProperty(name="Upper", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) - arm_clouds_wind: FloatVectorProperty(name="Wind", default=[1.0, 0.0], size=2, update=assets.invalidate_shader_cache) - arm_clouds_secondary: FloatProperty(name="Secondary", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) - arm_clouds_precipitation: FloatProperty(name="Precipitation", default=1.0, min=0.1, max=10.0, update=assets.invalidate_shader_cache) - arm_clouds_steps: IntProperty(name="Steps", default=24, min=1, max=240, update=assets.invalidate_shader_cache) - arm_water_color: FloatVectorProperty(name="Color", size=3, default=[1,1,1], subtype='COLOR', min=0, max=1, update=assets.invalidate_shader_cache) + arm_water_color: FloatVectorProperty(name="Color", size=3, default=[1, 1, 1], subtype='COLOR', min=0, max=1, update=assets.invalidate_shader_cache) arm_water_level: FloatProperty(name="Level", default=0.0, update=assets.invalidate_shader_cache) arm_water_displace: FloatProperty(name="Displace", default=1.0, update=assets.invalidate_shader_cache) arm_water_speed: FloatProperty(name="Speed", default=1.0, update=assets.invalidate_shader_cache) diff --git a/blender/arm/props_traits.py b/blender/arm/props_traits.py index fdf97bce..65675af4 100755 --- a/blender/arm/props_traits.py +++ b/blender/arm/props_traits.py @@ -54,6 +54,7 @@ class ArmTraitListItem(bpy.types.PropertyGroup): name: StringProperty(name="Name", description="A name for this item", default="") enabled_prop: BoolProperty(name="", description="A name for this item", default=True, update=trigger_recompile) is_object: BoolProperty(name="", default=True) + fake_user: BoolProperty(name="Fake User", description="Export this trait even if it is deactivated", default=False) type_prop: EnumProperty( items = [('Haxe Script', 'Haxe', 'Haxe Script'), ('WebAssembly', 'Wasm', 'WebAssembly'), @@ -88,12 +89,16 @@ class ARM_UL_TraitList(bpy.types.UIList): # Make sure your code supports all 3 layout types if self.layout_type in {'DEFAULT', 'COMPACT'}: layout.prop(item, "enabled_prop") - layout.label(text=item.name, icon=custom_icon, icon_value=custom_icon_value) + # Display " " for props without a name to right-align the + # fake_user button + layout.label(text=item.name if item.name != "" else " ", icon=custom_icon, icon_value=custom_icon_value) elif self.layout_type in {'GRID'}: layout.alignment = 'CENTER' layout.label(text="", icon=custom_icon, icon_value=custom_icon_value) + layout.prop(item, "fake_user", text="", icon="FAKE_USER_ON" if item.fake_user else "FAKE_USER_OFF") + class ArmTraitListNewItem(bpy.types.Operator): # Add a new item to the list bl_idname = "arm_traitlist.new_item" @@ -562,33 +567,8 @@ def draw_traits(layout, obj, is_object): if obj.arm_traitlist_index >= 0 and len(obj.arm_traitlist) > 0: item = obj.arm_traitlist[obj.arm_traitlist_index] - # Default props + if item.type_prop == 'Haxe Script' or item.type_prop == 'Bundled Script': - item.name = item.class_name_prop - row = layout.row() - if item.type_prop == 'Haxe Script': - row.prop_search(item, "class_name_prop", bpy.data.worlds['Arm'], "arm_scripts_list", text="Class") - else: - # Bundled scripts not yet fetched - if not bpy.data.worlds['Arm'].arm_bundled_scripts_list: - arm.utils.fetch_bundled_script_names() - row.prop_search(item, "class_name_prop", bpy.data.worlds['Arm'], "arm_bundled_scripts_list", text="Class") - - # Props - if item.arm_traitpropslist: - layout.label(text="Trait Properties:") - if item.arm_traitpropswarnings: - box = layout.box() - box.label(text=f"Warnings ({len(item.arm_traitpropswarnings)}):", icon="ERROR") - - for warning in item.arm_traitpropswarnings: - box.label(text=warning.warning) - - propsrow = layout.row() - propsrows = max(len(item.arm_traitpropslist), 6) - row = layout.row() - row.template_list("ARM_UL_PropList", "The_List", item, "arm_traitpropslist", item, "arm_traitpropslist_index", rows=propsrows) - if item.type_prop == 'Haxe Script': row = layout.row(align=True) row.alignment = 'EXPAND' @@ -615,6 +595,17 @@ def draw_traits(layout, obj, is_object): op.is_object = is_object op = row.operator("arm.refresh_scripts") + # Default props + item.name = item.class_name_prop + row = layout.row() + if item.type_prop == 'Haxe Script': + row.prop_search(item, "class_name_prop", bpy.data.worlds['Arm'], "arm_scripts_list", text="Class") + else: + # Bundled scripts not yet fetched + if not bpy.data.worlds['Arm'].arm_bundled_scripts_list: + arm.utils.fetch_bundled_script_names() + row.prop_search(item, "class_name_prop", bpy.data.worlds['Arm'], "arm_bundled_scripts_list", text="Class") + elif item.type_prop == 'WebAssembly': item.name = item.webassembly_prop row = layout.row() @@ -633,8 +624,6 @@ def draw_traits(layout, obj, is_object): elif item.type_prop == 'UI Canvas': item.name = item.canvas_name_prop - row = layout.row() - row.prop_search(item, "canvas_name_prop", bpy.data.worlds['Arm'], "arm_canvas_list", text="Canvas") row = layout.row(align=True) row.alignment = 'EXPAND' @@ -648,10 +637,29 @@ def draw_traits(layout, obj, is_object): op.is_object = is_object op = row.operator("arm.refresh_canvas_list") + row = layout.row() + row.prop_search(item, "canvas_name_prop", bpy.data.worlds['Arm'], "arm_canvas_list", text="Canvas") + elif item.type_prop == 'Logic Nodes': row = layout.row() row.prop_search(item, "node_tree_prop", bpy.data, "node_groups", text="Tree") + if item.type_prop == 'Haxe Script' or item.type_prop == 'Bundled Script': + # Props + if item.arm_traitpropslist: + layout.label(text="Trait Properties:") + if item.arm_traitpropswarnings: + box = layout.box() + box.label(text=f"Warnings ({len(item.arm_traitpropswarnings)}):", icon="ERROR") + + for warning in item.arm_traitpropswarnings: + box.label(text=warning.warning) + + propsrow = layout.row() + propsrows = max(len(item.arm_traitpropslist), 6) + row = layout.row() + row.template_list("ARM_UL_PropList", "The_List", item, "arm_traitpropslist", item, "arm_traitpropslist_index", rows=propsrows) + def register(): global icons_dict bpy.utils.register_class(ArmTraitListItem) diff --git a/blender/arm/props_ui.py b/blender/arm/props_ui.py index 65b6c313..40db59fc 100644 --- a/blender/arm/props_ui.py +++ b/blender/arm/props_ui.py @@ -1,16 +1,20 @@ -import bpy -import webbrowser import os -from bpy.types import Menu, Panel, UIList -from bpy.props import * -import arm.utils -import arm.make as make -import arm.make_state as state + +import bpy + +import arm.api import arm.assets as assets import arm.log as log -import arm.proxy -import arm.api +import arm.make as make +import arm.make_state as state +import arm.props as props import arm.props_properties +import arm.proxy +import arm.utils + +from arm.lightmapper.utility import icon +from arm.lightmapper.properties.denoiser import oidn, optix +import importlib # Menu in object region class ARM_PT_ObjectPropsPanel(bpy.types.Panel): @@ -50,6 +54,49 @@ class ARM_PT_ObjectPropsPanel(bpy.types.Panel): # Properties list arm.props_properties.draw_properties(layout, obj) + # Lightmapping props + if obj.type == "MESH": + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_lightmap_use") + + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_use: + + row = layout.row() + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_lightmap_resolution") + row = layout.row() + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_lightmap_unwrap_mode") + row = layout.row() + if obj.TLM_ObjectProperties.tlm_mesh_lightmap_unwrap_mode == "AtlasGroup": + pass + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_unwrap_margin") + row = layout.row() + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filter_override") + row = layout.row() + if obj.TLM_ObjectProperties.tlm_mesh_filter_override: + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_mode") + row = layout.row(align=True) + if obj.TLM_ObjectProperties.tlm_mesh_filtering_mode == "Gaussian": + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_gaussian_strength") + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_iterations") + elif obj.TLM_ObjectProperties.tlm_mesh_filtering_mode == "Box": + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_box_strength") + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_iterations") + elif obj.TLM_ObjectProperties.tlm_mesh_filtering_mode == "Bilateral": + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_bilateral_diameter") + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_bilateral_color_deviation") + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_bilateral_coordinate_deviation") + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_iterations") + else: + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_median_kernel", expand=True) + row = layout.row(align=True) + row.prop(obj.TLM_ObjectProperties, "tlm_mesh_filtering_iterations") + class ARM_PT_ModifiersPropsPanel(bpy.types.Panel): bl_label = "Armory Props" bl_space_type = "PROPERTIES" @@ -145,6 +192,30 @@ class ARM_PT_DataPropsPanel(bpy.types.Panel): layout.prop(obj.data, 'arm_autobake') pass +class ARM_PT_WorldPropsPanel(bpy.types.Panel): + bl_label = "Armory World Properties" + bl_space_type = "PROPERTIES" + bl_region_type = "WINDOW" + bl_context = "world" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False + world = context.world + if world is None: + return + + layout.prop(world, 'arm_use_clouds') + col = layout.column(align=True) + col.enabled = world.arm_use_clouds + col.prop(world, 'arm_clouds_lower') + col.prop(world, 'arm_clouds_upper') + col.prop(world, 'arm_clouds_precipitation') + col.prop(world, 'arm_clouds_secondary') + col.prop(world, 'arm_clouds_wind') + col.prop(world, 'arm_clouds_steps') + class ARM_PT_ScenePropsPanel(bpy.types.Panel): bl_label = "Armory Props" bl_space_type = "PROPERTIES" @@ -272,6 +343,7 @@ class ARM_PT_ArmoryPlayerPanel(bpy.types.Panel): row.operator("arm.clean_menu") layout.prop(wrd, 'arm_runtime') layout.prop(wrd, 'arm_play_camera') + layout.prop(wrd, 'arm_play_scene') if log.num_warnings > 0: box = layout.box() @@ -356,7 +428,7 @@ class ARM_PT_ArmoryProjectPanel(bpy.types.Panel): layout.use_property_split = True layout.use_property_decorate = False row = layout.row(align=True) - row.operator("arm.open_editor") + row.operator("arm.open_editor", icon="DESKTOP") row.operator("arm.open_project_folder", icon="FILE_FOLDER") class ARM_PT_ProjectFlagsPanel(bpy.types.Panel): @@ -825,16 +897,8 @@ class ARM_PT_RenderPathWorldPanel(bpy.types.Panel): colb.enabled = rpdat.arm_radiance colb.prop(rpdat, 'arm_radiance_size') layout.prop(rpdat, 'arm_clouds') - col = layout.column() - col.enabled = rpdat.arm_clouds - col.prop(rpdat, 'arm_clouds_lower') - col.prop(rpdat, 'arm_clouds_upper') - col.prop(rpdat, 'arm_clouds_precipitation') - col.prop(rpdat, 'arm_clouds_secondary') - col.prop(rpdat, 'arm_clouds_wind') - col.prop(rpdat, 'arm_clouds_steps') layout.prop(rpdat, "rp_water") - col = layout.column() + col = layout.column(align=True) col.enabled = rpdat.rp_water col.prop(rpdat, 'arm_water_level') col.prop(rpdat, 'arm_water_density') @@ -977,7 +1041,7 @@ class ARM_PT_RenderPathCompositorPanel(bpy.types.Panel): col.enabled = rpdat.arm_grain col.prop(rpdat, 'arm_grain_strength') layout.prop(rpdat, 'arm_fog') - col = layout.column() + col = layout.column(align=True) col.enabled = rpdat.arm_fog col.prop(rpdat, 'arm_fog_color') col.prop(rpdat, 'arm_fog_amounta') @@ -1013,38 +1077,314 @@ class ARM_PT_BakePanel(bpy.types.Panel): scn = bpy.data.scenes[context.scene.name] row = layout.row(align=True) - row.alignment = 'EXPAND' - row.operator("arm.bake_textures", icon="RENDER_STILL") - row.operator("arm.bake_apply") + row.prop(scn, "arm_bakemode", expand=True) - col = layout.column() - col.prop(scn, 'arm_bakelist_scale') - col.prop(scn.cycles, "samples") + if scn.arm_bakemode == "Static Map": - layout.prop(scn, 'arm_bakelist_unwrap') + row = layout.row(align=True) + row.alignment = 'EXPAND' + row.operator("arm.bake_textures", icon="RENDER_STILL") + row.operator("arm.bake_apply") - rows = 2 - if len(scn.arm_bakelist) > 1: - rows = 4 - row = layout.row() - row.template_list("ARM_UL_BakeList", "The_List", scn, "arm_bakelist", scn, "arm_bakelist_index", rows=rows) - col = row.column(align=True) - col.operator("arm_bakelist.new_item", icon='ADD', text="") - col.operator("arm_bakelist.delete_item", icon='REMOVE', text="") - col.menu("ARM_MT_BakeListSpecials", icon='DOWNARROW_HLT', text="") + col = layout.column() + col.prop(scn, 'arm_bakelist_scale') + col.prop(scn.cycles, "samples") - if len(scn.arm_bakelist) > 1: - col.separator() - op = col.operator("arm_bakelist.move_item", icon='TRIA_UP', text="") - op.direction = 'UP' - op = col.operator("arm_bakelist.move_item", icon='TRIA_DOWN', text="") - op.direction = 'DOWN' + layout.prop(scn, 'arm_bakelist_unwrap') - if scn.arm_bakelist_index >= 0 and len(scn.arm_bakelist) > 0: - item = scn.arm_bakelist[scn.arm_bakelist_index] - layout.prop_search(item, "obj", bpy.data, "objects", text="Object") - layout.prop(item, "res_x") - layout.prop(item, "res_y") + rows = 2 + if len(scn.arm_bakelist) > 1: + rows = 4 + row = layout.row() + row.template_list("ARM_UL_BakeList", "The_List", scn, "arm_bakelist", scn, "arm_bakelist_index", rows=rows) + col = row.column(align=True) + col.operator("arm_bakelist.new_item", icon='ADD', text="") + col.operator("arm_bakelist.delete_item", icon='REMOVE', text="") + col.menu("ARM_MT_BakeListSpecials", icon='DOWNARROW_HLT', text="") + + if len(scn.arm_bakelist) > 1: + col.separator() + op = col.operator("arm_bakelist.move_item", icon='TRIA_UP', text="") + op.direction = 'UP' + op = col.operator("arm_bakelist.move_item", icon='TRIA_DOWN', text="") + op.direction = 'DOWN' + + if scn.arm_bakelist_index >= 0 and len(scn.arm_bakelist) > 0: + item = scn.arm_bakelist[scn.arm_bakelist_index] + layout.prop_search(item, "obj", bpy.data, "objects", text="Object") + layout.prop(item, "res_x") + layout.prop(item, "res_y") + + else: + + scene = context.scene + sceneProperties = scene.TLM_SceneProperties + row = layout.row(align=True) + + row = layout.row(align=True) + + #We list LuxCoreRender as available, by default we assume Cycles exists + row.prop(sceneProperties, "tlm_lightmap_engine") + + if sceneProperties.tlm_lightmap_engine == "Cycles": + + #CYCLES SETTINGS HERE + engineProperties = scene.TLM_EngineProperties + + row = layout.row(align=True) + row.label(text="General Settings") + row = layout.row(align=True) + row.operator("tlm.build_lightmaps") + row = layout.row(align=True) + row.operator("tlm.clean_lightmaps") + row = layout.row(align=True) + row.operator("tlm.explore_lightmaps") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_apply_on_unwrap") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_headless") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_alert_on_finish") + + row = layout.row(align=True) + row.label(text="Cycles Settings") + + row = layout.row(align=True) + row.prop(engineProperties, "tlm_mode") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_quality") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_resolution_scale") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_bake_mode") + + if scene.TLM_EngineProperties.tlm_bake_mode == "Background": + row = layout.row(align=True) + row.label(text="Warning! Background mode is currently unstable", icon_value=2) + row = layout.row(align=True) + row.prop(engineProperties, "tlm_caching_mode") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_directional_mode") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_lightmap_savedir") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_dilation_margin") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_exposure_multiplier") + row = layout.row(align=True) + row.prop(engineProperties, "tlm_setting_supersample") + + elif sceneProperties.tlm_lightmap_engine == "LuxCoreRender": + + #LUXCORE SETTINGS HERE + luxcore_available = False + + #Look for Luxcorerender in the renderengine classes + for engine in bpy.types.RenderEngine.__subclasses__(): + if engine.bl_idname == "LUXCORE": + luxcore_available = True + break + + row = layout.row(align=True) + if not luxcore_available: + row.label(text="Please install BlendLuxCore.") + else: + row.label(text="LuxCoreRender not yet available.") + + elif sceneProperties.tlm_lightmap_engine == "OctaneRender": + + #LUXCORE SETTINGS HERE + octane_available = False + + row = layout.row(align=True) + row.label(text="Octane Render not yet available.") + + + ################## + #DENOISE SETTINGS! + row = layout.row(align=True) + row.label(text="Denoise Settings") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_denoise_use") + row = layout.row(align=True) + + if sceneProperties.tlm_denoise_use: + row.prop(sceneProperties, "tlm_denoise_engine", expand=True) + row = layout.row(align=True) + + if sceneProperties.tlm_denoise_engine == "Integrated": + row.label(text="No options for Integrated.") + elif sceneProperties.tlm_denoise_engine == "OIDN": + denoiseProperties = scene.TLM_OIDNEngineProperties + row.prop(denoiseProperties, "tlm_oidn_path") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_oidn_verbose") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_oidn_threads") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_oidn_maxmem") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_oidn_affinity") + # row = layout.row(align=True) + # row.prop(denoiseProperties, "tlm_denoise_ao") + elif sceneProperties.tlm_denoise_engine == "Optix": + denoiseProperties = scene.TLM_OptixEngineProperties + row.prop(denoiseProperties, "tlm_optix_path") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_optix_verbose") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_optix_maxmem") + row = layout.row(align=True) + row.prop(denoiseProperties, "tlm_denoise_ao") + + + ################## + #FILTERING SETTINGS! + row = layout.row(align=True) + row.label(text="Filtering Settings") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_filtering_use") + row = layout.row(align=True) + + if sceneProperties.tlm_filtering_use: + + row.prop(sceneProperties, "tlm_filtering_engine", expand=True) + row = layout.row(align=True) + + if sceneProperties.tlm_filtering_engine == "OpenCV": + + cv2 = importlib.util.find_spec("cv2") + + if cv2 is None: + row = layout.row(align=True) + row.label(text="OpenCV is not installed. Install it through preferences.") + else: + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_mode") + row = layout.row(align=True) + if scene.TLM_SceneProperties.tlm_filtering_mode == "Gaussian": + row.prop(scene.TLM_SceneProperties, "tlm_filtering_gaussian_strength") + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_iterations") + elif scene.TLM_SceneProperties.tlm_filtering_mode == "Box": + row.prop(scene.TLM_SceneProperties, "tlm_filtering_box_strength") + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_iterations") + + elif scene.TLM_SceneProperties.tlm_filtering_mode == "Bilateral": + row.prop(scene.TLM_SceneProperties, "tlm_filtering_bilateral_diameter") + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_bilateral_color_deviation") + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_bilateral_coordinate_deviation") + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_iterations") + else: + row.prop(scene.TLM_SceneProperties, "tlm_filtering_median_kernel", expand=True) + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_filtering_iterations") + else: + row = layout.row(align=True) + row.prop(scene.TLM_SceneProperties, "tlm_numpy_filtering_mode") + + + ################## + #ENCODING SETTINGS! + row = layout.row(align=True) + row.label(text="Encoding Settings") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_encoding_use") + row = layout.row(align=True) + + if sceneProperties.tlm_encoding_use: + + row.prop(sceneProperties, "tlm_encoding_mode", expand=True) + if sceneProperties.tlm_encoding_mode == "RGBM" or sceneProperties.tlm_encoding_mode == "RGBD": + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_encoding_range") + if sceneProperties.tlm_encoding_mode == "LogLuv": + pass + if sceneProperties.tlm_encoding_mode == "HDR": + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_format") + + row = layout.row(align=True) + row.label(text="Encoding Settings") + row = layout.row(align=True) + + row = layout.row(align=True) + row.operator("tlm.enable_selection") + row = layout.row(align=True) + row.operator("tlm.disable_selection") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_override_object_settings") + + if sceneProperties.tlm_override_object_settings: + + row = layout.row(align=True) + row = layout.row() + row.prop(sceneProperties, "tlm_mesh_lightmap_unwrap_mode") + row = layout.row() + + if sceneProperties.tlm_mesh_lightmap_unwrap_mode == "AtlasGroup": + + if scene.TLM_AtlasList_index >= 0 and len(scene.TLM_AtlasList) > 0: + row = layout.row() + item = scene.TLM_AtlasList[scene.TLM_AtlasList_index] + row.prop_search(sceneProperties, "tlm_atlas_pointer", scene, "TLM_AtlasList", text='Atlas Group') + else: + row = layout.label(text="Add Atlas Groups from the scene lightmapping settings.") + + else: + + row.prop(sceneProperties, "tlm_mesh_lightmap_resolution") + row = layout.row() + row.prop(sceneProperties, "tlm_mesh_unwrap_margin") + + row = layout.row(align=True) + row.operator("tlm.remove_uv_selection") + row = layout.row(align=True) + + ################## + #SELECTION OPERATORS! + + row = layout.row(align=True) + row.label(text="Selection Operators") + row = layout.row(align=True) + + row = layout.row(align=True) + row.operator("tlm.enable_selection") + row = layout.row(align=True) + row.operator("tlm.disable_selection") + row = layout.row(align=True) + row.prop(sceneProperties, "tlm_override_object_settings") + + if sceneProperties.tlm_override_object_settings: + + row = layout.row(align=True) + row = layout.row() + row.prop(sceneProperties, "tlm_mesh_lightmap_unwrap_mode") + row = layout.row() + + if sceneProperties.tlm_mesh_lightmap_unwrap_mode == "AtlasGroup": + + if scene.TLM_AtlasList_index >= 0 and len(scene.TLM_AtlasList) > 0: + row = layout.row() + item = scene.TLM_AtlasList[scene.TLM_AtlasList_index] + row.prop_search(sceneProperties, "tlm_atlas_pointer", scene, "TLM_AtlasList", text='Atlas Group') + else: + row = layout.label(text="Add Atlas Groups from the scene lightmapping settings.") + + else: + + row.prop(sceneProperties, "tlm_mesh_lightmap_resolution") + row = layout.row() + row.prop(sceneProperties, "tlm_mesh_unwrap_margin") + + row = layout.row(align=True) + row.operator("tlm.remove_uv_selection") + row = layout.row(align=True) + class ArmGenLodButton(bpy.types.Operator): '''Automatically generate LoD levels''' @@ -1320,6 +1660,9 @@ class ARM_PT_ProxyPanel(bpy.types.Panel): layout.prop(obj, "arm_proxy_sync_materials") layout.prop(obj, "arm_proxy_sync_modifiers") layout.prop(obj, "arm_proxy_sync_traits") + row = layout.row() + row.enabled = obj.arm_proxy_sync_traits + row.prop(obj, "arm_proxy_sync_trait_props") layout.operator("arm.proxy_toggle_all") layout.operator("arm.proxy_apply_all") @@ -1349,11 +1692,13 @@ class ArmProxyToggleAllButton(bpy.types.Operator): obj.arm_proxy_sync_materials = b obj.arm_proxy_sync_modifiers = b obj.arm_proxy_sync_traits = b + obj.arm_proxy_sync_trait_props = b return{'FINISHED'} class ArmProxyApplyAllButton(bpy.types.Operator): bl_idname = 'arm.proxy_apply_all' bl_label = 'Apply to All' + def execute(self, context): for obj in bpy.data.objects: if obj.proxy == None: @@ -1365,6 +1710,7 @@ class ArmProxyApplyAllButton(bpy.types.Operator): obj.arm_proxy_sync_materials = context.object.arm_proxy_sync_materials obj.arm_proxy_sync_modifiers = context.object.arm_proxy_sync_modifiers obj.arm_proxy_sync_traits = context.object.arm_proxy_sync_traits + obj.arm_proxy_sync_trait_props = context.object.arm_proxy_sync_trait_props return{'FINISHED'} class ArmSyncProxyButton(bpy.types.Operator): @@ -1426,6 +1772,7 @@ def register(): bpy.utils.register_class(ARM_PT_PhysicsPropsPanel) bpy.utils.register_class(ARM_PT_DataPropsPanel) bpy.utils.register_class(ARM_PT_ScenePropsPanel) + bpy.utils.register_class(ARM_PT_WorldPropsPanel) bpy.utils.register_class(InvalidateCacheButton) bpy.utils.register_class(InvalidateMaterialCacheButton) bpy.utils.register_class(ARM_PT_MaterialPropsPanel) @@ -1475,6 +1822,7 @@ def unregister(): bpy.utils.unregister_class(ARM_PT_ParticlesPropsPanel) bpy.utils.unregister_class(ARM_PT_PhysicsPropsPanel) bpy.utils.unregister_class(ARM_PT_DataPropsPanel) + bpy.utils.unregister_class(ARM_PT_WorldPropsPanel) bpy.utils.unregister_class(ARM_PT_ScenePropsPanel) bpy.utils.unregister_class(InvalidateCacheButton) bpy.utils.unregister_class(InvalidateMaterialCacheButton) @@ -1515,4 +1863,4 @@ def unregister(): bpy.utils.unregister_class(ArmProxyApplyAllButton) bpy.utils.unregister_class(ArmSyncProxyButton) bpy.utils.unregister_class(ArmPrintTraitsButton) - bpy.utils.unregister_class(ARM_PT_MaterialNodePanel) + bpy.utils.unregister_class(ARM_PT_MaterialNodePanel) \ No newline at end of file diff --git a/blender/arm/proxy.py b/blender/arm/proxy.py index 558bd68f..fab18aed 100644 --- a/blender/arm/proxy.py +++ b/blender/arm/proxy.py @@ -1,3 +1,5 @@ +from typing import Any, Dict + import bpy def proxy_sync_loc(self, context): @@ -107,11 +109,40 @@ def sync_collection(cSrc, cDst): for prop in properties: setattr(mDst, prop, getattr(mSrc, prop)) -def sync_traits(obj): +def sync_traits(obj: bpy.types.Object): + """Synchronizes the traits of the given object with the traits of + its proxy. + If `arm.proxy_sync_trait_props` is `False`, the values of the trait + properties are kept where possible. + """ + # (Optionally) keep the old property values + values: Dict[bpy.types.Object, Dict[str, Dict[str, Any]]] = {} + for i in range(len(obj.arm_traitlist)): + if not obj.arm_proxy_sync_trait_props: + for prop in obj.arm_traitlist[i].arm_traitpropslist: + values[obj][obj.arm_traitlist[i].name][prop.name] = prop.get_value() + sync_collection(obj.proxy.arm_traitlist, obj.arm_traitlist) - for i in range(0, len(obj.arm_traitlist)): + + for i in range(len(obj.arm_traitlist)): sync_collection(obj.proxy.arm_traitlist[i].arm_traitpropslist, obj.arm_traitlist[i].arm_traitpropslist) + # Set stored property values + if not obj.arm_proxy_sync_trait_props: + if values.get(obj) is None: + continue + + value = values[obj].get(obj.arm_traitlist[i].name) + if value is None: + continue + + for prop in obj.arm_traitlist[i].arm_traitpropslist: + + value = values[obj].get(prop.name) + if value is not None: + prop.set_value(value) + + def sync_materials(obj): # Blender likes to crash here:( pass diff --git a/blender/arm/utils.py b/blender/arm/utils.py index 318a3717..2f1e54b5 100755 --- a/blender/arm/utils.py +++ b/blender/arm/utils.py @@ -2,6 +2,7 @@ import glob import json import os import platform +import re import subprocess import webbrowser @@ -397,15 +398,17 @@ def fetch_script_names(): wrd = bpy.data.worlds['Arm'] # Sources wrd.arm_scripts_list.clear() - sources_path = get_fp() + '/Sources/' + safestr(wrd.arm_project_package) + sources_path = os.path.join(get_fp(), 'Sources', safestr(wrd.arm_project_package)) if os.path.isdir(sources_path): os.chdir(sources_path) # Glob supports recursive search since python 3.5 so it should cover both blender 2.79 and 2.8 integrated python for file in glob.glob('**/*.hx', recursive=True): - name = file.rsplit('.')[0] - # Replace the path syntax for package syntax so that it can be searchable in blender traits "Class" dropdown - wrd.arm_scripts_list.add().name = name.replace(os.sep, '.') - fetch_script_props(file) + mod = file.rsplit('.')[0] + mod = mod.replace('\\', '/') + mod_parts = mod.rsplit('/') + if re.match('^[A-Z][A-Za-z0-9_]*$', mod_parts[-1]): + wrd.arm_scripts_list.add().name = mod.replace('/', '.') + fetch_script_props(file) # Canvas wrd.arm_canvas_list.clear() @@ -526,15 +529,19 @@ def safesrc(s): s = '_' + s return s -def safestr(s): +def safestr(s: str) -> str: + """Outputs a string where special characters have been replaced with + '_', which can be safely used in file and path names.""" for c in r'[]/\;,><&*:%=+@!#^()|?^': s = s.replace(c, '_') return ''.join([i if ord(i) < 128 else '_' for i in s]) def asset_name(bdata): + if bdata == None: + return None s = bdata.name # Append library name if linked - if bdata.library != None: + if bdata.library is not None: s += '_' + bdata.library.name return s @@ -557,10 +564,12 @@ def get_project_scene_name(): return get_active_scene().name def get_active_scene(): + wrd = bpy.data.worlds['Arm'] if not state.is_export: - return bpy.context.scene + if wrd.arm_play_scene == None: + return bpy.context.scene + return wrd.arm_play_scene else: - wrd = bpy.data.worlds['Arm'] item = wrd.arm_exporterlist[wrd.arm_exporterlist_index] return item.arm_project_scene @@ -590,7 +599,7 @@ def get_cascade_size(rpdat): def check_saved(self): if bpy.data.filepath == "": msg = "Save blend file first" - self.report({"ERROR"}, msg) if self != None else log.print_info(msg) + self.report({"ERROR"}, msg) if self is not None else log.warn(msg) return False return True @@ -605,18 +614,18 @@ def check_path(s): def check_sdkpath(self): s = get_sdk_path() - if check_path(s) == False: - msg = "SDK path '{0}' contains special characters. Please move SDK to different path for now.".format(s) - self.report({"ERROR"}, msg) if self != None else log.print_info(msg) + if not check_path(s): + msg = f"SDK path '{s}' contains special characters. Please move SDK to different path for now." + self.report({"ERROR"}, msg) if self is not None else log.warn(msg) return False else: return True def check_projectpath(self): s = get_fp() - if check_path(s) == False: - msg = "Project path '{0}' contains special characters, build process may fail.".format(s) - self.report({"ERROR"}, msg) if self != None else log.print_info(msg) + if not check_path(s): + msg = f"Project path '{s}' contains special characters, build process may fail." + self.report({"ERROR"}, msg) if self is not None else log.warn(msg) return False else: return True @@ -659,9 +668,12 @@ def is_bone_animation_enabled(bobject): return True return False -def export_bone_data(bobject): + +def export_bone_data(bobject: bpy.types.Object) -> bool: + """Returns whether the bone data of the given object should be exported.""" return bobject.find_armature() and is_bone_animation_enabled(bobject) and get_rp().arm_skin == 'On' + def open_editor(hx_path=None): ide_bin = get_ide_bin() @@ -690,7 +702,7 @@ def open_editor(hx_path=None): # Sublime Text if get_code_editor() == 'sublime': - project_name = bpy.data.worlds['Arm'].arm_project_name + project_name = arm.utils.safestr(bpy.data.worlds['Arm'].arm_project_name) subl_project_path = arm.utils.get_fp() + f'/{project_name}.sublime-project' if not os.path.exists(subl_project_path): diff --git a/blender/arm/write_data.py b/blender/arm/write_data.py index c3b5446b..dd53b0f3 100755 --- a/blender/arm/write_data.py +++ b/blender/arm/write_data.py @@ -225,6 +225,8 @@ project.addSources('Sources'); if wrd.arm_debug_console: assets.add_khafile_def('arm_debug') f.write(add_shaders(sdk_path + "/armory/Shaders/debug_draw/**", rel_path=rel_path)) + + if wrd.arm_verbose_output: f.write("project.addParameter('--times');\n") if export_ui: @@ -351,6 +353,7 @@ def write_mainhx(scene_name, resx, resy, is_play, is_publish): package ; class Main { public static inline var projectName = '""" + arm.utils.safestr(wrd.arm_project_name) + """'; + public static inline var projectVersion = '""" + arm.utils.safestr(wrd.arm_project_version) + """'; public static inline var projectPackage = '""" + arm.utils.safestr(wrd.arm_project_package) + """';""") if rpdat.rp_voxelao: @@ -452,21 +455,19 @@ def write_compiledglsl(defs, make_variants): if make_variants and d.endswith('var'): continue # Write a shader variant instead f.write("#define " + d + "\n") + + f.write("""#if defined(HLSL) || defined(METAL) +#define _InvY +#endif +""") + f.write("""const float PI = 3.1415926535; const float PI2 = PI * 2.0; const vec2 shadowmapSize = vec2(""" + str(shadowmap_size) + """, """ + str(shadowmap_size) + """); const float shadowmapCubePcfSize = """ + str((round(rpdat.arm_pcfsize * 100) / 100) / 1000) + """; const int shadowmapCascades = """ + str(rpdat.rp_shadowmap_cascades) + """; """) - if rpdat.arm_clouds: - f.write( -"""const float cloudsLower = """ + str(round(rpdat.arm_clouds_lower * 100) / 100) + """; -const float cloudsUpper = """ + str(round(rpdat.arm_clouds_upper * 100) / 100) + """; -const vec2 cloudsWind = vec2(""" + str(round(rpdat.arm_clouds_wind[0] * 100) / 100) + """, """ + str(round(rpdat.arm_clouds_wind[1] * 100) / 100) + """); -const float cloudsPrecipitation = """ + str(round(rpdat.arm_clouds_precipitation * 100) / 100) + """; -const float cloudsSecondary = """ + str(round(rpdat.arm_clouds_secondary * 100) / 100) + """; -const int cloudsSteps = """ + str(rpdat.arm_clouds_steps) + """; -""") + if rpdat.rp_water: f.write( """const float waterLevel = """ + str(round(rpdat.arm_water_level * 100) / 100) + """; diff --git a/blender/arm/write_probes.py b/blender/arm/write_probes.py index 0ee6cc83..11529394 100644 --- a/blender/arm/write_probes.py +++ b/blender/arm/write_probes.py @@ -1,4 +1,5 @@ import bpy +import multiprocessing import os import sys import subprocess @@ -18,12 +19,12 @@ def add_rad_assets(output_file_rad, rad_format, num_mips): # Generate probes from environment map def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True): envpath = arm.utils.get_fp_build() + '/compiled/Assets/envmaps' - + if not os.path.exists(envpath): os.makedirs(envpath) base_name = arm.utils.extract_filename(image_filepath).rsplit('.', 1)[0] - + # Assets to be generated output_file_irr = envpath + '/' + base_name + '_irradiance' output_file_rad = envpath + '/' + base_name + '_radiance' @@ -37,7 +38,7 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True if arm_radiance: add_rad_assets(output_file_rad, rad_format, cached_num_mips) return cached_num_mips - + # Get paths sdk_path = arm.utils.get_sdk_path() kha_path = arm.utils.get_kha_path() @@ -51,10 +52,10 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True else: cmft_path = '"' + sdk_path + '/lib/armory_tools/cmft/cmft-linux64"' kraffiti_path = '"' + kha_path + '/Kinc/Tools/kraffiti/kraffiti-linux64"' - + output_gama_numerator = '2.2' if disable_hdr else '1.0' input_file = arm.utils.asset_path(image_filepath) - + # Scale map rpdat = arm.utils.get_rp() target_w = int(rpdat.arm_radiance_size) @@ -77,7 +78,7 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True ' format=' + rad_format + \ ' width=' + str(target_w) + \ ' height=' + str(target_h)], shell=True) - + # Irradiance spherical harmonics if arm.utils.get_os() == 'win': subprocess.call([ \ @@ -96,7 +97,7 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True sh_to_json(output_file_irr) add_irr_assets(output_file_irr) - + # Mip-mapped radiance if arm_radiance == False: return cached_num_mips @@ -111,12 +112,13 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True mip_count = 8 else: mip_count = 7 - + wrd = bpy.data.worlds['Arm'] use_opencl = 'true' + cpu_count = multiprocessing.cpu_count() if arm.utils.get_os() == 'win': - subprocess.call([ \ + cmd = [ cmft_path, '--input', scaled_file, '--filter', 'radiance', @@ -128,7 +130,7 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True '--glossBias', '3', '--lightingModel', 'blinnbrdf', '--edgeFixup', 'none', - '--numCpuProcessingThreads', '4', + '--numCpuProcessingThreads', str(cpu_count), '--useOpenCL', use_opencl, '--clVendor', 'anyGpuVendor', '--deviceType', 'gpu', @@ -140,21 +142,25 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True '--outputGammaDenominator', '1.0', '--outputNum', '1', '--output0', output_file_rad, - '--output0params', 'hdr,rgbe,latlong']) + '--output0params', 'hdr,rgbe,latlong' + ] + if wrd.arm_verbose_output: + print(cmd) + else: + cmd.append('--silent') + subprocess.call(cmd) else: - subprocess.call([ \ - cmft_path + \ + cmd = cmft_path + \ ' --input "' + scaled_file + '"' + \ ' --filter radiance' + \ ' --dstFaceSize ' + str(face_size) + \ ' --srcFaceSize ' + str(face_size) + \ ' --excludeBase false' + \ - #' --mipCount ' + str(mip_count) + \ ' --glossScale 8' + \ ' --glossBias 3' + \ ' --lightingModel blinnbrdf' + \ ' --edgeFixup none' + \ - ' --numCpuProcessingThreads 4' + \ + ' --numCpuProcessingThreads ' + str(cpu_count) + \ ' --useOpenCL ' + use_opencl + \ ' --clVendor anyGpuVendor' + \ ' --deviceType gpu' + \ @@ -166,7 +172,12 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True ' --outputGammaDenominator 1.0' + \ ' --outputNum 1' + \ ' --output0 "' + output_file_rad + '"' + \ - ' --output0params hdr,rgbe,latlong'], shell=True) + ' --output0params hdr,rgbe,latlong' + if wrd.arm_verbose_output: + print(cmd) + else: + cmd += ' --silent' + subprocess.call([cmd], shell=True) # Remove size extensions in file name mip_w = int(face_size * 4) @@ -184,7 +195,7 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True generated_files = [] for i in range(0, mip_count): generated_files.append(output_file_rad + '_' + str(i)) - + # Convert to jpgs if disable_hdr is True: for f in generated_files: @@ -201,7 +212,7 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True ' to="' + f + '.jpg"' + \ ' format=jpg'], shell=True) os.remove(f + '.hdr') - + # Scale from (4x2 to 1x1> for i in range (0, 2): last = generated_files[-1] @@ -221,15 +232,15 @@ def write_probes(image_filepath, disable_hdr, cached_num_mips, arm_radiance=True ' scale=0.5' + \ ' format=' + rad_format], shell=True) generated_files.append(out) - + mip_count += 2 add_rad_assets(output_file_rad, rad_format, mip_count) return mip_count -# Parse sh coefs produced by cmft into json array def sh_to_json(sh_file): + """Parse sh coefs produced by cmft into json array""" with open(sh_file + '.c') as f: sh_lines = f.read().splitlines() band0_line = sh_lines[5] @@ -240,12 +251,11 @@ def sh_to_json(sh_file): parse_band_floats(irradiance_floats, band0_line) parse_band_floats(irradiance_floats, band1_line) parse_band_floats(irradiance_floats, band2_line) - - sh_json = {} - sh_json['irradiance'] = irradiance_floats - ext = '.arm' if bpy.data.worlds['Arm'].arm_minimize else '.json' + + sh_json = {'irradiance': irradiance_floats} + ext = '.arm' if bpy.data.worlds['Arm'].arm_minimize else '' arm.utils.write_arm(sh_file + ext, sh_json) - + # Clean up .c os.remove(sh_file + '.c') @@ -261,32 +271,31 @@ def write_sky_irradiance(base_name): for i in range(0, len(irradiance_floats)): irradiance_floats[i] /= 2 - envpath = arm.utils.get_fp_build() + '/compiled/Assets/envmaps' + envpath = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'envmaps') if not os.path.exists(envpath): os.makedirs(envpath) - - output_file = envpath + '/' + base_name + '_irradiance' - - sh_json = {} - sh_json['irradiance'] = irradiance_floats + + output_file = os.path.join(envpath, base_name + '_irradiance') + + sh_json = {'irradiance': irradiance_floats} arm.utils.write_arm(output_file + '.arm', sh_json) assets.add(output_file + '.arm') def write_color_irradiance(base_name, col): - # Constant color - irradiance_floats = [col[0] * 1.13, col[1] * 1.13, col[2] * 1.13] # Adjust to Cycles + """Constant color irradiance""" + # Adjust to Cycles + irradiance_floats = [col[0] * 1.13, col[1] * 1.13, col[2] * 1.13] for i in range(0, 24): irradiance_floats.append(0.0) - - envpath = arm.utils.get_fp_build() + '/compiled/Assets/envmaps' + + envpath = os.path.join(arm.utils.get_fp_build(), 'compiled', 'Assets', 'envmaps') if not os.path.exists(envpath): os.makedirs(envpath) - - output_file = envpath + '/' + base_name + '_irradiance' - - sh_json = {} - sh_json['irradiance'] = irradiance_floats + + output_file = os.path.join(envpath, base_name + '_irradiance') + + sh_json = {'irradiance': irradiance_floats} arm.utils.write_arm(output_file + '.arm', sh_json) assets.add(output_file + '.arm')