From 6e8b5aff666a5a0e6b7a26b93bd9460f77c486b0 Mon Sep 17 00:00:00 2001 From: Hugo Locurcio Date: Sun, 24 Nov 2019 11:51:53 +0100 Subject: [PATCH 01/34] Add visual feedback when hovering layer checkboxes in the Inspector This also changes how checkboxes are selected, which makes it possible to click in the small area between two checkboxes and still toggle a value successfully (which is arguably less frustrating). (cherry picked from commit bbc435624f6569660eb12203b7c32c87b4027ecf) --- editor/editor_properties.cpp | 98 ++++++++++++++++++++++-------------- 1 file changed, 61 insertions(+), 37 deletions(-) diff --git a/editor/editor_properties.cpp b/editor/editor_properties.cpp index c134786b89..c3ccaf22a4 100644 --- a/editor/editor_properties.cpp +++ b/editor/editor_properties.cpp @@ -610,6 +610,7 @@ public: Vector flag_rects; Vector names; Vector tooltips; + int hovered_index; virtual Size2 get_minimum_size() const { Ref font = get_font("font", "Label"); @@ -625,57 +626,79 @@ public: return String(); } void _gui_input(const Ref &p_ev) { - Ref mb = p_ev; - if (mb.is_valid() && mb->get_button_index() == BUTTON_LEFT && mb->is_pressed()) { + const Ref mm = p_ev; + + if (mm.is_valid()) { for (int i = 0; i < flag_rects.size(); i++) { - if (flag_rects[i].has_point(mb->get_position())) { - //toggle - if (value & (1 << i)) { - value &= ~(1 << i); - } else { - value |= (1 << i); - } - emit_signal("flag_changed", value); + if (flag_rects[i].has_point(mm->get_position())) { + // Used to highlight the hovered flag in the layers grid. + hovered_index = i; update(); + break; } } } + + const Ref mb = p_ev; + + if (mb.is_valid() && mb->get_button_index() == BUTTON_LEFT && mb->is_pressed()) { + // Toggle the flag. + // We base our choice on the hovered flag, so that it always matches the hovered flag. + if (value & (1 << hovered_index)) { + value &= ~(1 << hovered_index); + } else { + value |= (1 << hovered_index); + } + + emit_signal("flag_changed", value); + update(); + } } void _notification(int p_what) { - if (p_what == NOTIFICATION_DRAW) { + switch (p_what) { + case NOTIFICATION_DRAW: { + Rect2 rect; + rect.size = get_size(); + flag_rects.clear(); - Rect2 rect; - rect.size = get_size(); - flag_rects.clear(); + const int bsize = (rect.size.height * 80 / 100) / 2; + const int h = bsize * 2 + 1; + const int vofs = (rect.size.height - h) / 2; - int bsize = (rect.size.height * 80 / 100) / 2; + Color color = get_color("highlight_color", "Editor"); + for (int i = 0; i < 2; i++) { + Point2 ofs(4, vofs); + if (i == 1) + ofs.y += bsize + 1; - int h = bsize * 2 + 1; - int vofs = (rect.size.height - h) / 2; + ofs += rect.position; + for (int j = 0; j < 10; j++) { + Point2 o = ofs + Point2(j * (bsize + 1), 0); + if (j >= 5) + o.x += 1; - Color color = get_color("highlight_color", "Editor"); - for (int i = 0; i < 2; i++) { + const int idx = i * 10 + j; + const bool on = value & (1 << idx); + Rect2 rect2 = Rect2(o, Size2(bsize, bsize)); - Point2 ofs(4, vofs); - if (i == 1) - ofs.y += bsize + 1; + color.a = on ? 0.6 : 0.2; + if (idx == hovered_index) { + // Add visual feedback when hovering a flag. + color.a += 0.15; + } - ofs += rect.position; - for (int j = 0; j < 10; j++) { - - Point2 o = ofs + Point2(j * (bsize + 1), 0); - if (j >= 5) - o.x += 1; - - uint32_t idx = i * 10 + j; - bool on = value & (1 << idx); - Rect2 rect2 = Rect2(o, Size2(bsize, bsize)); - color.a = on ? 0.6 : 0.2; - draw_rect(rect2, color); - flag_rects.push_back(rect2); + draw_rect(rect2, color); + flag_rects.push_back(rect2); + } } - } + } break; + case NOTIFICATION_MOUSE_EXIT: { + hovered_index = -1; + update(); + } break; + default: + break; } } @@ -692,6 +715,7 @@ public: EditorPropertyLayersGrid() { value = 0; + hovered_index = -1; // Nothing is hovered. } }; void EditorPropertyLayers::_grid_changed(uint32_t p_grid) { @@ -792,7 +816,7 @@ EditorPropertyLayers::EditorPropertyLayers() { hb->add_child(grid); button = memnew(Button); button->set_toggle_mode(true); - button->set_text(".."); + button->set_text("..."); button->connect("pressed", this, "_button_pressed"); hb->add_child(button); set_bottom_editor(hb); From c63c902c74804b6f54684a52b482bc989ae54bd3 Mon Sep 17 00:00:00 2001 From: PouleyKetchoupp Date: Fri, 22 May 2020 21:50:16 +0200 Subject: [PATCH 02/34] Fix dialog spam when inspecting MeshInstance from model file Avoid load_scene for built-in resources to make sure we don't open a scene tab and prompt for model file editing. Load scene as regular resource instead and store the reference to keep the dependency until the remote inspector cache is cleared. (cherry picked from commit 01802074a01c41d8672664751f80b80069796f12) --- editor/script_editor_debugger.cpp | 10 ++++------ editor/script_editor_debugger.h | 1 + 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/editor/script_editor_debugger.cpp b/editor/script_editor_debugger.cpp index 5995777558..5ac7f05cef 100644 --- a/editor/script_editor_debugger.cpp +++ b/editor/script_editor_debugger.cpp @@ -646,12 +646,9 @@ void ScriptEditorDebugger::_parse_message(const String &p_msg, const Array &p_da if (path.find("::") != -1) { // built-in resource String base_path = path.get_slice("::", 0); - if (ResourceLoader::get_resource_type(base_path) == "PackedScene") { - if (!EditorNode::get_singleton()->is_scene_open(base_path)) { - EditorNode::get_singleton()->load_scene(base_path); - } - } else { - EditorNode::get_singleton()->load_resource(base_path); + RES dependency = ResourceLoader::load(base_path); + if (dependency.is_valid()) { + remote_dependencies.insert(dependency); } } var = ResourceLoader::load(path); @@ -2144,6 +2141,7 @@ void ScriptEditorDebugger::_clear_remote_objects() { memdelete(E->value()); } remote_objects.clear(); + remote_dependencies.clear(); } void ScriptEditorDebugger::_clear_errors_list() { diff --git a/editor/script_editor_debugger.h b/editor/script_editor_debugger.h index 2de1db6919..402e94892b 100644 --- a/editor/script_editor_debugger.h +++ b/editor/script_editor_debugger.h @@ -100,6 +100,7 @@ private: ObjectID inspected_object_id; ScriptEditorDebuggerVariables *variables; Map remote_objects; + Set remote_dependencies; Set unfold_cache; VBoxContainer *errors_tab; From a6d8773d0b51e2e0c04cd2ffa2def14563feeca9 Mon Sep 17 00:00:00 2001 From: azagaya Date: Wed, 1 Apr 2020 14:05:33 -0300 Subject: [PATCH 03/34] Fixing wrong blending rect methods Using Color.blend function instead of custom code Fixed clang_format Removed unnecessary help (cherry picked from commit b211a86ebe241599f95186f9db28be2061a19962) --- core/image.cpp | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/core/image.cpp b/core/image.cpp index ac2edf9c42..234b359e76 100644 --- a/core/image.cpp +++ b/core/image.cpp @@ -2223,12 +2223,11 @@ void Image::blend_rect(const Ref &p_src, const Rect2 &p_src_rect, const P int dst_y = dest_rect.position.y + i; Color sc = img->get_pixel(src_x, src_y); - Color dc = get_pixel(dst_x, dst_y); - dc.r = (double)(sc.a * sc.r + dc.a * (1.0 - sc.a) * dc.r); - dc.g = (double)(sc.a * sc.g + dc.a * (1.0 - sc.a) * dc.g); - dc.b = (double)(sc.a * sc.b + dc.a * (1.0 - sc.a) * dc.b); - dc.a = (double)(sc.a + dc.a * (1.0 - sc.a)); - set_pixel(dst_x, dst_y, dc); + if (sc.a != 0) { + Color dc = get_pixel(dst_x, dst_y); + dc = dc.blend(sc); + set_pixel(dst_x, dst_y, dc); + } } } @@ -2285,12 +2284,11 @@ void Image::blend_rect_mask(const Ref &p_src, const Ref &p_mask, c int dst_y = dest_rect.position.y + i; Color sc = img->get_pixel(src_x, src_y); - Color dc = get_pixel(dst_x, dst_y); - dc.r = (double)(sc.a * sc.r + dc.a * (1.0 - sc.a) * dc.r); - dc.g = (double)(sc.a * sc.g + dc.a * (1.0 - sc.a) * dc.g); - dc.b = (double)(sc.a * sc.b + dc.a * (1.0 - sc.a) * dc.b); - dc.a = (double)(sc.a + dc.a * (1.0 - sc.a)); - set_pixel(dst_x, dst_y, dc); + if (sc.a != 0) { + Color dc = get_pixel(dst_x, dst_y); + dc = dc.blend(sc); + set_pixel(dst_x, dst_y, dc); + } } } } From e94aab793624897365937fd6d570facb32ee6b40 Mon Sep 17 00:00:00 2001 From: Tomasz Chabora Date: Fri, 5 Jun 2020 23:26:25 +0200 Subject: [PATCH 04/34] Restore capture caches when missing (cherry picked from commit 642d91381a0f9077313c76368a1231bf7233c0ab) --- scene/animation/animation_player.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scene/animation/animation_player.cpp b/scene/animation/animation_player.cpp index 2bc9336b14..727671a6e2 100644 --- a/scene/animation/animation_player.cpp +++ b/scene/animation/animation_player.cpp @@ -416,8 +416,7 @@ void AnimationPlayer::_animation_process_animation(AnimationData *p_anim, float Animation::UpdateMode update_mode = a->value_track_get_update_mode(i); if (update_mode == Animation::UPDATE_CAPTURE) { - - if (p_started) { + if (p_started || pa->capture == Variant()) { pa->capture = pa->object->get_indexed(pa->subpath); } From 402a7e0094dbe69e294fee5e2e8677c84708d0e3 Mon Sep 17 00:00:00 2001 From: Fabio Alessandrelli Date: Sat, 6 Jun 2020 16:51:16 +0200 Subject: [PATCH 05/34] Fix editor crash when mbedtls is disabled. (cherry picked from commit 054f52364fe3c204ca65fca905a5bb3261315e47) --- core/io/dtls_server.cpp | 5 ++++- core/io/packet_peer_dtls.cpp | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/core/io/dtls_server.cpp b/core/io/dtls_server.cpp index 07e6abb1c9..aa960247ad 100644 --- a/core/io/dtls_server.cpp +++ b/core/io/dtls_server.cpp @@ -37,7 +37,10 @@ bool DTLSServer::available = false; DTLSServer *DTLSServer::create() { - return _create(); + if (_create) { + return _create(); + } + return NULL; } bool DTLSServer::is_available() { diff --git a/core/io/packet_peer_dtls.cpp b/core/io/packet_peer_dtls.cpp index 01218a6881..67e6095c7f 100644 --- a/core/io/packet_peer_dtls.cpp +++ b/core/io/packet_peer_dtls.cpp @@ -37,7 +37,10 @@ bool PacketPeerDTLS::available = false; PacketPeerDTLS *PacketPeerDTLS::create() { - return _create(); + if (_create) { + return _create(); + } + return NULL; } bool PacketPeerDTLS::is_available() { From b2c7f94ae287de9a5473b4093472d22ee8d6f58f Mon Sep 17 00:00:00 2001 From: Fabio Alessandrelli Date: Sat, 6 Jun 2020 15:47:51 +0200 Subject: [PATCH 06/34] Crypto as a custom instance class. (cherry picked from commit 87cc283b8b74f78ce58d8510558ff0a8a9e35482) --- core/crypto/crypto.cpp | 14 +------------- core/crypto/crypto.h | 6 +++--- 2 files changed, 4 insertions(+), 16 deletions(-) diff --git a/core/crypto/crypto.cpp b/core/crypto/crypto.cpp index 3bee34f8e4..c3a970251b 100644 --- a/core/crypto/crypto.cpp +++ b/core/crypto/crypto.cpp @@ -67,7 +67,7 @@ Crypto *(*Crypto::_create)() = NULL; Crypto *Crypto::create() { if (_create) return _create(); - return memnew(Crypto); + ERR_FAIL_V_MSG(NULL, "Crypto is not available when the mbedtls module is disabled."); } void Crypto::load_default_certificates(String p_path) { @@ -82,18 +82,6 @@ void Crypto::_bind_methods() { ClassDB::bind_method(D_METHOD("generate_self_signed_certificate", "key", "issuer_name", "not_before", "not_after"), &Crypto::generate_self_signed_certificate, DEFVAL("CN=myserver,O=myorganisation,C=IT"), DEFVAL("20140101000000"), DEFVAL("20340101000000")); } -PoolByteArray Crypto::generate_random_bytes(int p_bytes) { - ERR_FAIL_V_MSG(PoolByteArray(), "generate_random_bytes is not available when mbedtls module is disabled."); -} - -Ref Crypto::generate_rsa(int p_bytes) { - ERR_FAIL_V_MSG(NULL, "generate_rsa is not available when mbedtls module is disabled."); -} - -Ref Crypto::generate_self_signed_certificate(Ref p_key, String p_issuer_name, String p_not_before, String p_not_after) { - ERR_FAIL_V_MSG(NULL, "generate_self_signed_certificate is not available when mbedtls module is disabled."); -} - Crypto::Crypto() { } diff --git a/core/crypto/crypto.h b/core/crypto/crypto.h index 35d28d71d6..1a0c8a622b 100644 --- a/core/crypto/crypto.h +++ b/core/crypto/crypto.h @@ -76,9 +76,9 @@ public: static Crypto *create(); static void load_default_certificates(String p_path); - virtual PoolByteArray generate_random_bytes(int p_bytes); - virtual Ref generate_rsa(int p_bytes); - virtual Ref generate_self_signed_certificate(Ref p_key, String p_issuer_name, String p_not_before, String p_not_after); + virtual PoolByteArray generate_random_bytes(int p_bytes) = 0; + virtual Ref generate_rsa(int p_bytes) = 0; + virtual Ref generate_self_signed_certificate(Ref p_key, String p_issuer_name, String p_not_before, String p_not_after) = 0; Crypto(); }; From 221051d5234d386d512c759c8e4111ddddbbe254 Mon Sep 17 00:00:00 2001 From: Giuliano Barberi Date: Sat, 6 Jun 2020 17:02:22 -0400 Subject: [PATCH 07/34] Tree: Calling update in _gui_input less frequently (cherry picked from commit d443a13244093a3b094dc52b197f9abb363bdfee) --- scene/gui/tree.cpp | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/scene/gui/tree.cpp b/scene/gui/tree.cpp index 4e5804f1b9..6428da65be 100644 --- a/scene/gui/tree.cpp +++ b/scene/gui/tree.cpp @@ -2475,7 +2475,6 @@ void Tree::_gui_input(Ref p_event) { cache.hover_type = Cache::CLICK_TITLE; cache.hover_index = i; - update(); break; } } @@ -2494,6 +2493,9 @@ void Tree::_gui_input(Ref p_event) { if (v_scroll->is_visible_in_tree()) mpos.y += v_scroll->get_value(); + TreeItem *old_it = cache.hover_item; + int old_col = cache.hover_cell; + int col, h, section; TreeItem *it = _find_item_at_pos(root, mpos, col, h, section); @@ -2508,18 +2510,21 @@ void Tree::_gui_input(Ref p_event) { } } - if (it != cache.hover_item) { - cache.hover_item = it; - update(); - } + cache.hover_item = it; + cache.hover_cell = col; - if (it && col != cache.hover_cell) { - cache.hover_cell = col; - update(); + if (it != old_it || col != old_col) { + // Only need to update if mouse enters/exits a button + bool was_over_button = old_it && old_it->cells[old_col].custom_button; + bool is_over_button = it && it->cells[col].custom_button; + if (was_over_button || is_over_button) { + update(); + } } } } + // Update if mouse enters/exits columns if (cache.hover_type != old_hover || cache.hover_index != old_index) { update(); } From f1ca218ce790620b71f59941bc165f5f0d7e0907 Mon Sep 17 00:00:00 2001 From: Marcus Elg Date: Sun, 7 Jun 2020 14:52:05 +0200 Subject: [PATCH 08/34] Fix fbx import assimp error (cherry picked from commit 29abbccc5f96a53ef74dc9154b60ecc043c33d74) --- modules/assimp/editor_scene_importer_assimp.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/assimp/editor_scene_importer_assimp.cpp b/modules/assimp/editor_scene_importer_assimp.cpp index 806922886d..5f003b4918 100644 --- a/modules/assimp/editor_scene_importer_assimp.cpp +++ b/modules/assimp/editor_scene_importer_assimp.cpp @@ -147,7 +147,8 @@ Node *EditorSceneImporterAssimp::import_scene(const String &p_path, uint32_t p_f // aiProcess_EmbedTextures | //aiProcess_SplitByBoneCount | 0; - aiScene *scene = (aiScene *)importer.ReadFile(s_path.c_str(), post_process_Steps); + String g_path = ProjectSettings::get_singleton()->globalize_path(p_path); + aiScene *scene = (aiScene *)importer.ReadFile(g_path.utf8().ptr(), post_process_Steps); ERR_FAIL_COND_V_MSG(scene == NULL, NULL, String("Open Asset Import failed to open: ") + String(importer.GetErrorString())); From 56da70f2e880d04e14c20b7ad32b85c5d74c3020 Mon Sep 17 00:00:00 2001 From: unknown Date: Sun, 7 Jun 2020 22:56:26 +0530 Subject: [PATCH 09/34] Removed variables and #include in EditorSceneImporterAssimp::import_scene that became unused after the recent commit ec1bf96(#39363). (cherry picked from commit 365c35f30ea3b4749c8a0ca07db8464f9b6bc41c) --- modules/assimp/editor_scene_importer_assimp.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/modules/assimp/editor_scene_importer_assimp.cpp b/modules/assimp/editor_scene_importer_assimp.cpp index 5f003b4918..44df268602 100644 --- a/modules/assimp/editor_scene_importer_assimp.cpp +++ b/modules/assimp/editor_scene_importer_assimp.cpp @@ -44,7 +44,6 @@ #include #include #include -#include // move into assimp aiBone *get_bone_by_name(const aiScene *scene, aiString bone_name) { @@ -104,8 +103,6 @@ void EditorSceneImporterAssimp::_bind_methods() { Node *EditorSceneImporterAssimp::import_scene(const String &p_path, uint32_t p_flags, int p_bake_fps, List *r_missing_deps, Error *r_err) { Assimp::Importer importer; - std::wstring w_path = ProjectSettings::get_singleton()->globalize_path(p_path).c_str(); - std::string s_path(w_path.begin(), w_path.end()); importer.SetPropertyBool(AI_CONFIG_PP_FD_REMOVE, true); // Cannot remove pivot points because the static mesh will be in the wrong place importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_PRESERVE_PIVOTS, false); From eb6c7ec09cd03850fa09de69651b08fc153b62c1 Mon Sep 17 00:00:00 2001 From: Hugo Locurcio Date: Sun, 7 Jun 2020 15:34:01 +0200 Subject: [PATCH 10/34] Add an editor tooltip to document gizmo visibility options See discussion in https://github.com/godotengine/godot-proposals/issues/716. (cherry picked from commit 138a4eecb305016a686c43409c554331fcf7c3c7) --- editor/plugins/spatial_editor_plugin.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/editor/plugins/spatial_editor_plugin.cpp b/editor/plugins/spatial_editor_plugin.cpp index 59fe641c56..0b68e1b364 100644 --- a/editor/plugins/spatial_editor_plugin.cpp +++ b/editor/plugins/spatial_editor_plugin.cpp @@ -5380,6 +5380,9 @@ void SpatialEditor::_update_gizmos_menu() { const int plugin_state = gizmo_plugins_by_name[i]->get_state(); gizmos_menu->add_multistate_item(TTR(plugin_name), 3, plugin_state, i); const int idx = gizmos_menu->get_item_index(i); + gizmos_menu->set_item_tooltip( + idx, + TTR("Click to toggle between visibility states.\n\nOpen eye: Gizmo is visible.\nClosed eye: Gizmo is hidden.\nHalf-open eye: Gizmo is also visible through opaque surfaces (\"x-ray\").")); switch (plugin_state) { case EditorSpatialGizmoPlugin::VISIBLE: gizmos_menu->set_item_icon(idx, gizmos_menu->get_icon("visibility_visible")); From 0b6a4108902b3d08c4e90c91858c29f07440167a Mon Sep 17 00:00:00 2001 From: Aaron Franke Date: Sun, 7 Jun 2020 13:21:29 -0400 Subject: [PATCH 11/34] Change the default editor camera rotation to position it in +X +Y +Z (cherry picked from commit 6c2df6792b59fa6b84f99239797d1d54c8ded764) --- editor/plugins/spatial_editor_plugin.cpp | 6 +----- editor/plugins/spatial_editor_plugin.h | 4 +++- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/editor/plugins/spatial_editor_plugin.cpp b/editor/plugins/spatial_editor_plugin.cpp index 0b68e1b364..c42fef88a7 100644 --- a/editor/plugins/spatial_editor_plugin.cpp +++ b/editor/plugins/spatial_editor_plugin.cpp @@ -3407,11 +3407,7 @@ void SpatialEditorViewport::reset() { last_message = ""; name = ""; - cursor.x_rot = 0.5; - cursor.y_rot = 0.5; - cursor.distance = 4; - cursor.region_select = false; - cursor.pos = Vector3(); + cursor = Cursor(); _update_name(); } diff --git a/editor/plugins/spatial_editor_plugin.h b/editor/plugins/spatial_editor_plugin.h index aa45c7bbf5..c0b4cdadf8 100644 --- a/editor/plugins/spatial_editor_plugin.h +++ b/editor/plugins/spatial_editor_plugin.h @@ -375,7 +375,9 @@ private: Point2 region_begin, region_end; Cursor() { - x_rot = y_rot = 0.5; + // These rotations place the camera in +X +Y +Z, aka south east, facing north west. + x_rot = 0.5; + y_rot = -0.5; distance = 4; region_select = false; } From 3edae035d55b8232bdccff73b4db9401349cdb73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 8 Jun 2020 11:00:07 +0200 Subject: [PATCH 12/34] GDScript LSP: Fix crash in notify_client `latest_client_id` now defaults to `-1` (invalid ID) instead of `0`. Also fix typo in notification `gdscrip_client/changeWorkspace`, and fix argument names in method binds. Fixes #39375. (cherry picked from commit e34f33711b172a9fe6e5c562dee693ed467576be) --- .../language_server/gdscript_extend_parser.cpp | 1 + .../gdscript_language_protocol.cpp | 15 ++++++++++----- .../language_server/gdscript_language_protocol.h | 6 +++--- .../language_server/gdscript_language_server.cpp | 1 + .../language_server/gdscript_text_document.cpp | 1 + .../language_server/gdscript_workspace.cpp | 13 +++++++------ 6 files changed, 23 insertions(+), 14 deletions(-) diff --git a/modules/gdscript/language_server/gdscript_extend_parser.cpp b/modules/gdscript/language_server/gdscript_extend_parser.cpp index 0f6f13944b..121ccbd102 100644 --- a/modules/gdscript/language_server/gdscript_extend_parser.cpp +++ b/modules/gdscript/language_server/gdscript_extend_parser.cpp @@ -29,6 +29,7 @@ /*************************************************************************/ #include "gdscript_extend_parser.h" + #include "../gdscript.h" #include "core/io/json.h" #include "gdscript_language_protocol.h" diff --git a/modules/gdscript/language_server/gdscript_language_protocol.cpp b/modules/gdscript/language_server/gdscript_language_protocol.cpp index 2243a7b81d..b49bbf38d1 100644 --- a/modules/gdscript/language_server/gdscript_language_protocol.cpp +++ b/modules/gdscript/language_server/gdscript_language_protocol.cpp @@ -29,6 +29,7 @@ /*************************************************************************/ #include "gdscript_language_protocol.h" + #include "core/io/json.h" #include "core/os/copymem.h" #include "core/project_settings.h" @@ -160,7 +161,7 @@ void GDScriptLanguageProtocol::_bind_methods() { ClassDB::bind_method(D_METHOD("initialized", "params"), &GDScriptLanguageProtocol::initialized); ClassDB::bind_method(D_METHOD("on_client_connected"), &GDScriptLanguageProtocol::on_client_connected); ClassDB::bind_method(D_METHOD("on_client_disconnected"), &GDScriptLanguageProtocol::on_client_disconnected); - ClassDB::bind_method(D_METHOD("notify_client", "p_method", "p_params"), &GDScriptLanguageProtocol::notify_client, DEFVAL(Variant()), DEFVAL(-1)); + ClassDB::bind_method(D_METHOD("notify_client", "method", "params"), &GDScriptLanguageProtocol::notify_client, DEFVAL(Variant()), DEFVAL(-1)); ClassDB::bind_method(D_METHOD("is_smart_resolve_enabled"), &GDScriptLanguageProtocol::is_smart_resolve_enabled); ClassDB::bind_method(D_METHOD("get_text_document"), &GDScriptLanguageProtocol::get_text_document); ClassDB::bind_method(D_METHOD("get_workspace"), &GDScriptLanguageProtocol::get_workspace); @@ -188,8 +189,12 @@ Dictionary GDScriptLanguageProtocol::initialize(const Dictionary &p_params) { Dictionary params; params["path"] = workspace->root; - Dictionary request = make_notification("gdscrip_client/changeWorkspace", params); + Dictionary request = make_notification("gdscript_client/changeWorkspace", params); + ERR_FAIL_COND_V_MSG(latest_client_id == -1, ret.to_json(), + "GDScriptLanguageProtocol: Can't initialize as no client is connected."); + ERR_FAIL_INDEX_V_MSG((uint64_t)latest_client_id, clients.size(), ret.to_json(), + vformat("GDScriptLanguageProtocol: Can't initialize invalid peer '%d'.", latest_client_id)); Ref peer = clients.get(latest_client_id); if (peer != NULL) { String msg = JSON::print(request); @@ -271,8 +276,11 @@ void GDScriptLanguageProtocol::stop() { void GDScriptLanguageProtocol::notify_client(const String &p_method, const Variant &p_params, int p_client_id) { if (p_client_id == -1) { + ERR_FAIL_COND_MSG(latest_client_id == -1, + "GDScript LSP: Can't notify client as none was connected."); p_client_id = latest_client_id; } + ERR_FAIL_INDEX((uint64_t)p_client_id, clients.size()); Ref peer = clients.get(p_client_id); ERR_FAIL_COND(peer == NULL); @@ -293,13 +301,10 @@ bool GDScriptLanguageProtocol::is_goto_native_symbols_enabled() const { GDScriptLanguageProtocol::GDScriptLanguageProtocol() { server.instance(); singleton = this; - _initialized = false; workspace.instance(); text_document.instance(); set_scope("textDocument", text_document.ptr()); set_scope("completionItem", text_document.ptr()); set_scope("workspace", workspace.ptr()); workspace->root = ProjectSettings::get_singleton()->get_resource_path(); - latest_client_id = 0; - next_client_id = 0; } diff --git a/modules/gdscript/language_server/gdscript_language_protocol.h b/modules/gdscript/language_server/gdscript_language_protocol.h index 3f0ae36af2..5e1a5763b9 100644 --- a/modules/gdscript/language_server/gdscript_language_protocol.h +++ b/modules/gdscript/language_server/gdscript_language_protocol.h @@ -70,8 +70,8 @@ private: HashMap > clients; Ref server; - int latest_client_id; - int next_client_id; + int latest_client_id = -1; + int next_client_id = 0; Ref text_document; Ref workspace; @@ -82,7 +82,7 @@ private: String process_message(const String &p_text); String format_output(const String &p_text); - bool _initialized; + bool _initialized = false; protected: static void _bind_methods(); diff --git a/modules/gdscript/language_server/gdscript_language_server.cpp b/modules/gdscript/language_server/gdscript_language_server.cpp index 7170c63058..8a628a76c8 100644 --- a/modules/gdscript/language_server/gdscript_language_server.cpp +++ b/modules/gdscript/language_server/gdscript_language_server.cpp @@ -29,6 +29,7 @@ /*************************************************************************/ #include "gdscript_language_server.h" + #include "core/os/file_access.h" #include "core/os/os.h" #include "editor/editor_log.h" diff --git a/modules/gdscript/language_server/gdscript_text_document.cpp b/modules/gdscript/language_server/gdscript_text_document.cpp index d5723fd20f..32698ef74a 100644 --- a/modules/gdscript/language_server/gdscript_text_document.cpp +++ b/modules/gdscript/language_server/gdscript_text_document.cpp @@ -29,6 +29,7 @@ /*************************************************************************/ #include "gdscript_text_document.h" + #include "../gdscript.h" #include "core/os/os.h" #include "editor/editor_settings.h" diff --git a/modules/gdscript/language_server/gdscript_workspace.cpp b/modules/gdscript/language_server/gdscript_workspace.cpp index 205257b8f2..ea46b6a1ea 100644 --- a/modules/gdscript/language_server/gdscript_workspace.cpp +++ b/modules/gdscript/language_server/gdscript_workspace.cpp @@ -29,6 +29,7 @@ /*************************************************************************/ #include "gdscript_workspace.h" + #include "../gdscript.h" #include "../gdscript_parser.h" #include "core/project_settings.h" @@ -41,12 +42,12 @@ void GDScriptWorkspace::_bind_methods() { ClassDB::bind_method(D_METHOD("symbol"), &GDScriptWorkspace::symbol); - ClassDB::bind_method(D_METHOD("parse_script", "p_path", "p_content"), &GDScriptWorkspace::parse_script); - ClassDB::bind_method(D_METHOD("parse_local_script", "p_path"), &GDScriptWorkspace::parse_local_script); - ClassDB::bind_method(D_METHOD("get_file_path", "p_uri"), &GDScriptWorkspace::get_file_path); - ClassDB::bind_method(D_METHOD("get_file_uri", "p_path"), &GDScriptWorkspace::get_file_uri); - ClassDB::bind_method(D_METHOD("publish_diagnostics", "p_path"), &GDScriptWorkspace::publish_diagnostics); - ClassDB::bind_method(D_METHOD("generate_script_api", "p_path"), &GDScriptWorkspace::generate_script_api); + ClassDB::bind_method(D_METHOD("parse_script", "path", "content"), &GDScriptWorkspace::parse_script); + ClassDB::bind_method(D_METHOD("parse_local_script", "path"), &GDScriptWorkspace::parse_local_script); + ClassDB::bind_method(D_METHOD("get_file_path", "uri"), &GDScriptWorkspace::get_file_path); + ClassDB::bind_method(D_METHOD("get_file_uri", "path"), &GDScriptWorkspace::get_file_uri); + ClassDB::bind_method(D_METHOD("publish_diagnostics", "path"), &GDScriptWorkspace::publish_diagnostics); + ClassDB::bind_method(D_METHOD("generate_script_api", "path"), &GDScriptWorkspace::generate_script_api); } void GDScriptWorkspace::remove_cache_parser(const String &p_path) { From 1a1c30702d3d39629cead4955f986849d95963a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 8 Jun 2020 11:36:41 +0200 Subject: [PATCH 13/34] VariantParser: Fix crash on malformed vectors Each time `r_err_str` is set, we should return a parse error. Removed redundant `return OK;` which were already handled after the big `if`/`else if`/`else` for `TK_IDENTIFIER`. Part of #17372. (cherry picked from commit e7ebda975a4c9f9b8136a571df3c523931358f5b) --- core/variant_parser.cpp | 64 ++++++++--------------------------------- 1 file changed, 12 insertions(+), 52 deletions(-) diff --git a/core/variant_parser.cpp b/core/variant_parser.cpp index 89c5815ad9..1aa8298aff 100644 --- a/core/variant_parser.cpp +++ b/core/variant_parser.cpp @@ -485,13 +485,6 @@ Error VariantParser::_parse_construct(Stream *p_stream, Vector &r_construct, } Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, int &line, String &r_err_str, ResourceParser *p_res_parser) { - - /* { - Error err = get_token(p_stream,token,line,r_err_str); - if (err) - return err; - }*/ - if (token.type == TK_CURLY_BRACKET_OPEN) { Dictionary d; @@ -508,7 +501,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, return err; value = a; return OK; - } else if (token.type == TK_IDENTIFIER) { String id = token.value; @@ -531,10 +523,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 2) { r_err_str = "Expected 2 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Vector2(args[0], args[1]); - return OK; } else if (id == "Rect2") { Vector args; @@ -544,10 +536,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 4) { r_err_str = "Expected 4 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Rect2(args[0], args[1], args[2], args[3]); - return OK; } else if (id == "Vector3") { Vector args; @@ -557,12 +549,11 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 3) { r_err_str = "Expected 3 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Vector3(args[0], args[1], args[2]); - return OK; } else if (id == "Transform2D" || id == "Matrix32") { //compatibility - Vector args; Error err = _parse_construct(p_stream, args, line, r_err_str); if (err) @@ -570,13 +561,14 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 6) { r_err_str = "Expected 6 arguments for constructor"; + return ERR_PARSE_ERROR; } + Transform2D m; m[0] = Vector2(args[0], args[1]); m[1] = Vector2(args[2], args[3]); m[2] = Vector2(args[4], args[5]); value = m; - return OK; } else if (id == "Plane") { Vector args; @@ -586,10 +578,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 4) { r_err_str = "Expected 4 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Plane(args[0], args[1], args[2], args[3]); - return OK; } else if (id == "Quat") { Vector args; @@ -599,11 +591,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 4) { r_err_str = "Expected 4 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Quat(args[0], args[1], args[2], args[3]); - return OK; - } else if (id == "AABB" || id == "Rect3") { Vector args; @@ -613,13 +604,11 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 6) { r_err_str = "Expected 6 arguments for constructor"; + return ERR_PARSE_ERROR; } value = AABB(Vector3(args[0], args[1], args[2]), Vector3(args[3], args[4], args[5])); - return OK; - } else if (id == "Basis" || id == "Matrix3") { //compatibility - Vector args; Error err = _parse_construct(p_stream, args, line, r_err_str); if (err) @@ -627,10 +616,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 9) { r_err_str = "Expected 9 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Basis(args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8]); - return OK; } else if (id == "Transform") { Vector args; @@ -640,11 +629,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 12) { r_err_str = "Expected 12 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Transform(Basis(args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8]), Vector3(args[9], args[10], args[11])); - return OK; - } else if (id == "Color") { Vector args; @@ -654,11 +642,10 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, if (args.size() != 4) { r_err_str = "Expected 4 arguments for constructor"; + return ERR_PARSE_ERROR; } value = Color(args[0], args[1], args[2], args[3]); - return OK; - } else if (id == "NodePath") { get_token(p_stream, token, line, r_err_str); @@ -680,7 +667,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, r_err_str = "Expected ')'"; return ERR_PARSE_ERROR; } - } else if (id == "RID") { get_token(p_stream, token, line, r_err_str); @@ -702,8 +688,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, r_err_str = "Expected ')'"; return ERR_PARSE_ERROR; } - - return OK; } else if (id == "Object") { get_token(p_stream, token, line, r_err_str); @@ -806,7 +790,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, at_key = true; } } - } else if (id == "Resource" || id == "SubResource" || id == "ExtResource") { get_token(p_stream, token, line, r_err_str); @@ -823,8 +806,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, return err; value = res; - - return OK; } else if (p_res_parser && id == "ExtResource" && p_res_parser->ext_func) { RES res; @@ -833,8 +814,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, return err; value = res; - - return OK; } else if (p_res_parser && id == "SubResource" && p_res_parser->sub_func) { RES res; @@ -843,8 +822,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, return err; value = res; - - return OK; } else { get_token(p_stream, token, line, r_err_str); @@ -863,8 +840,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, } value = res; - return OK; - } else { r_err_str = "Expected string as argument for Resource()."; return ERR_PARSE_ERROR; @@ -1059,8 +1034,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, } value = ie; - - return OK; #endif } else if (id == "PoolByteArray" || id == "ByteArray") { @@ -1081,8 +1054,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, value = arr; - return OK; - } else if (id == "PoolIntArray" || id == "IntArray") { Vector args; @@ -1102,8 +1073,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, value = arr; - return OK; - } else if (id == "PoolRealArray" || id == "FloatArray") { Vector args; @@ -1123,7 +1092,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, value = arr; - return OK; } else if (id == "PoolStringArray" || id == "StringArray") { get_token(p_stream, token, line, r_err_str); @@ -1173,8 +1141,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, value = arr; - return OK; - } else if (id == "PoolVector2Array" || id == "Vector2Array") { Vector args; @@ -1194,8 +1160,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, value = arr; - return OK; - } else if (id == "PoolVector3Array" || id == "Vector3Array") { Vector args; @@ -1215,8 +1179,6 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, value = arr; - return OK; - } else if (id == "PoolColorArray" || id == "ColorArray") { Vector args; @@ -1235,15 +1197,13 @@ Error VariantParser::parse_value(Token &token, Variant &value, Stream *p_stream, } value = arr; - - return OK; } else { r_err_str = "Unexpected identifier: '" + id + "'."; return ERR_PARSE_ERROR; } + // All above branches end up here unless they had an early return. return OK; - } else if (token.type == TK_NUMBER) { value = token.value; From f3fcdfbdd00a5fefc4c57bf0af6e72099b7134f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 8 Jun 2020 13:05:09 +0200 Subject: [PATCH 14/34] PackedScene: Prevent crash when root node has `parent` attribute The crash happens further down when setting an invalid owner in `Node::_set_owner_nocheck` but I couldn't figure out how to fix it. But here the proper fix is to catch the invalid scene file early on and fail loading it. Part of #17372. (cherry picked from commit c080ec5da220474a80789afa33cc4f5612cddb50) --- editor/editor_node.cpp | 4 ++-- scene/resources/packed_scene.cpp | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/editor/editor_node.cpp b/editor/editor_node.cpp index e017cc1702..05bbbe6d54 100644 --- a/editor/editor_node.cpp +++ b/editor/editor_node.cpp @@ -3426,13 +3426,13 @@ Error EditorNode::load_scene(const String &p_scene, bool p_ignore_broken_deps, b if (!new_scene) { sdata.unref(); - _dialog_display_load_error(lpath, ERR_FILE_NOT_FOUND); + _dialog_display_load_error(lpath, ERR_FILE_CORRUPT); opening_prev = false; if (prev != -1) { set_current_scene(prev); editor_data.remove_scene(idx); } - return ERR_FILE_NOT_FOUND; + return ERR_FILE_CORRUPT; } if (p_set_inherited) { diff --git a/scene/resources/packed_scene.cpp b/scene/resources/packed_scene.cpp index 3e7d350eec..c6f52d7087 100644 --- a/scene/resources/packed_scene.cpp +++ b/scene/resources/packed_scene.cpp @@ -101,6 +101,9 @@ Node *SceneState::instance(GenEditState p_edit_state) const { } #endif parent = nparent; + } else { + // i == 0 is root node. Confirm that it doesn't have a parent defined. + ERR_FAIL_COND_V_MSG(n.parent != -1, nullptr, vformat("Invalid scene: root node %s cannot specify a parent node.", snames[n.name])); } Node *node = NULL; From 2652a2d1843ed0e3bfa60ac3a23b960ccfc026a9 Mon Sep 17 00:00:00 2001 From: Michael Alexsander Date: Mon, 8 Jun 2020 09:41:45 -0300 Subject: [PATCH 15/34] Add generic file icon and its modulation to the 'FileDialog' (cherry picked from commit 637927f8035bbffc4ffb0a71115071eaa1a82f3b) --- doc/classes/FileDialog.xml | 6 ++++++ scene/gui/file_dialog.cpp | 5 +++++ scene/resources/default_theme/default_theme.cpp | 2 ++ scene/resources/default_theme/icon_file.png | Bin 0 -> 183 bytes scene/resources/default_theme/theme_data.h | 4 ++++ 5 files changed, 17 insertions(+) create mode 100644 scene/resources/default_theme/icon_file.png diff --git a/doc/classes/FileDialog.xml b/doc/classes/FileDialog.xml index 56d31dec66..16de84680a 100644 --- a/doc/classes/FileDialog.xml +++ b/doc/classes/FileDialog.xml @@ -132,6 +132,12 @@ + + Custom icon for files. + + + The color modulation applied to the file icon. + The color tint for disabled files (when the [FileDialog] is used in open folder mode). diff --git a/scene/gui/file_dialog.cpp b/scene/gui/file_dialog.cpp index 84584756e9..c2764ee4fc 100644 --- a/scene/gui/file_dialog.cpp +++ b/scene/gui/file_dialog.cpp @@ -426,7 +426,9 @@ void FileDialog::update_file_list() { TreeItem *root = tree->create_item(); Ref folder = get_icon("folder"); + Ref file_icon = get_icon("file"); const Color folder_color = get_color("folder_icon_modulate"); + const Color file_color = get_color("file_icon_modulate"); List files; List dirs; @@ -521,7 +523,10 @@ void FileDialog::update_file_list() { Ref icon = get_icon_func(base_dir.plus_file(files.front()->get())); ti->set_icon(0, icon); + } else { + ti->set_icon(0, file_icon); } + ti->set_icon_modulate(0, file_color); if (mode == MODE_OPEN_DIR) { ti->set_custom_color(0, get_color("files_disabled")); diff --git a/scene/resources/default_theme/default_theme.cpp b/scene/resources/default_theme/default_theme.cpp index 18bf7bc2f2..3ab77aaa5f 100644 --- a/scene/resources/default_theme/default_theme.cpp +++ b/scene/resources/default_theme/default_theme.cpp @@ -759,7 +759,9 @@ void fill_default_theme(Ref &theme, const Ref &default_font, const // FileDialog theme->set_icon("folder", "FileDialog", make_icon(icon_folder_png)); + theme->set_icon("file", "FileDialog", make_icon(icon_file_png)); theme->set_color("folder_icon_modulate", "FileDialog", Color(1, 1, 1)); + theme->set_color("file_icon_modulate", "FileDialog", Color(1, 1, 1)); theme->set_color("files_disabled", "FileDialog", Color(0, 0, 0, 0.7)); // ColorPicker diff --git a/scene/resources/default_theme/icon_file.png b/scene/resources/default_theme/icon_file.png new file mode 100644 index 0000000000000000000000000000000000000000..bb4c361a8dea748ea6a4452b4a360475c9fa9843 GIT binary patch literal 183 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!93?!50ihlx9oCO|{#S9F5he4R}c>anMprB-l zYeY$Kep*R+Vo@qXd3m{BW?pu2a$-TMUVc&f>~}U&Ae{j|A+A9B{{8z89z1a3KkfGMHExrkY4R UiF5m|4phwG>FVdQ&MBb@01+}Y>i_@% literal 0 HcmV?d00001 diff --git a/scene/resources/default_theme/theme_data.h b/scene/resources/default_theme/theme_data.h index 0a4e557451..edcdb90db9 100644 --- a/scene/resources/default_theme/theme_data.h +++ b/scene/resources/default_theme/theme_data.h @@ -150,6 +150,10 @@ static const unsigned char icon_color_pick_png[] = { 0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa, 0x0, 0x0, 0x0, 0xd, 0x49, 0x48, 0x44, 0x52, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x10, 0x8, 0x4, 0x0, 0x0, 0x0, 0xb5, 0xfa, 0x37, 0xea, 0x0, 0x0, 0x0, 0xaa, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0x9d, 0x8e, 0x35, 0x82, 0x2, 0x41, 0x10, 0x45, 0x3b, 0xda, 0x3d, 0xca, 0xba, 0x44, 0x2b, 0x70, 0x9, 0xdc, 0xe1, 0x20, 0xe8, 0x91, 0x90, 0x78, 0x6e, 0x40, 0x4c, 0x82, 0x74, 0xff, 0xc2, 0x9d, 0x18, 0xa7, 0x6, 0x77, 0x7b, 0x23, 0x2d, 0xaf, 0x4c, 0xdc, 0xc, 0xbd, 0x65, 0x1e, 0x84, 0x80, 0x19, 0x55, 0x34, 0x60, 0x3e, 0xd0, 0xea, 0x17, 0x3d, 0x4a, 0xc8, 0x80, 0x1a, 0x60, 0xc2, 0x4f, 0xfd, 0x30, 0xe0, 0x1b, 0x2d, 0x16, 0xab, 0xa7, 0x2c, 0xe, 0x41, 0x68, 0xa5, 0xb9, 0xca, 0x91, 0x16, 0x2e, 0x54, 0xe0, 0x59, 0x54, 0x91, 0xfe, 0xa3, 0x3a, 0xff, 0xce, 0xab, 0x5b, 0xf, 0xa0, 0x4, 0x8f, 0x7b, 0x4c, 0xd3, 0x1b, 0xca, 0x32, 0xcc, 0x55, 0x7a, 0xf4, 0x76, 0x42, 0x2b, 0x97, 0x3e, 0xae, 0xfa, 0xdd, 0xd2, 0xd2, 0x8e, 0x72, 0xe1, 0x83, 0xaf, 0x9f, 0xa9, 0x28, 0x7d, 0x5b, 0xe2, 0x2a, 0xd, 0xc3, 0xa2, 0x78, 0xfe, 0x7d, 0x51, 0xfc, 0x0, 0x8a, 0x41, 0xcb, 0x3d, 0xb2, 0xae, 0x1c, 0xd3, 0xc, 0xa5, 0x30, 0x81, 0xc6, 0xda, 0x29, 0x8e, 0x83, 0x34, 0x25, 0x29, 0x4a, 0x46, 0x71, 0x1f, 0x33, 0xbe, 0x51, 0x89, 0xaf, 0x78, 0xe3, 0x97, 0x7e, 0x0, 0x0, 0x0, 0x0, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82 }; +static const unsigned char icon_file_png[] = { + 0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa, 0x0, 0x0, 0x0, 0xd, 0x49, 0x48, 0x44, 0x52, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x10, 0x2, 0x3, 0x0, 0x0, 0x0, 0x62, 0x9d, 0x17, 0xf2, 0x0, 0x0, 0x0, 0x9, 0x70, 0x48, 0x59, 0x73, 0x0, 0x0, 0xe, 0xc3, 0x0, 0x0, 0xe, 0xc3, 0x1, 0xc7, 0x6f, 0xa8, 0x64, 0x0, 0x0, 0x0, 0x19, 0x74, 0x45, 0x58, 0x74, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x0, 0x77, 0x77, 0x77, 0x2e, 0x69, 0x6e, 0x6b, 0x73, 0x63, 0x61, 0x70, 0x65, 0x2e, 0x6f, 0x72, 0x67, 0x9b, 0xee, 0x3c, 0x1a, 0x0, 0x0, 0x0, 0x9, 0x50, 0x4c, 0x54, 0x45, 0x0, 0x0, 0x0, 0xdf, 0xdf, 0xdf, 0xe0, 0xe0, 0xe0, 0x42, 0xf, 0xc7, 0x49, 0x0, 0x0, 0x0, 0x2, 0x74, 0x52, 0x4e, 0x53, 0x0, 0x88, 0x95, 0xf0, 0xc6, 0x2a, 0x0, 0x0, 0x0, 0x21, 0x49, 0x44, 0x41, 0x54, 0x8, 0xd7, 0x63, 0x60, 0x0, 0x1, 0xae, 0x55, 0x2d, 0x20, 0xa2, 0x13, 0x44, 0x74, 0x39, 0x80, 0x88, 0x9, 0x40, 0xa2, 0x1, 0xc4, 0x5d, 0xb5, 0x80, 0x68, 0x2, 0x4, 0x0, 0x95, 0x34, 0x18, 0xe4, 0x5e, 0x46, 0xf7, 0x27, 0x0, 0x0, 0x0, 0x0, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82 +}; + static const unsigned char icon_folder_png[] = { 0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa, 0x0, 0x0, 0x0, 0xd, 0x49, 0x48, 0x44, 0x52, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x10, 0x8, 0x4, 0x0, 0x0, 0x0, 0xb5, 0xfa, 0x37, 0xea, 0x0, 0x0, 0x0, 0x2e, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0x63, 0xa0, 0x6, 0x78, 0x70, 0xf4, 0xc1, 0x7f, 0x24, 0x78, 0x18, 0x53, 0xc1, 0x7f, 0x54, 0x48, 0x50, 0xc1, 0x43, 0x1b, 0xbc, 0xa, 0x50, 0xad, 0x23, 0xa4, 0xe0, 0xff, 0x70, 0x52, 0x70, 0x18, 0x97, 0xf4, 0xfd, 0x43, 0xd4, 0x88, 0x4a, 0x0, 0x5a, 0xcb, 0x18, 0xab, 0x5e, 0xd9, 0x1a, 0x53, 0x0, 0x0, 0x0, 0x0, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82 }; From ebb30ac45a9176f52d8bd64b1dd5a7175eaa598c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 8 Jun 2020 16:39:08 +0200 Subject: [PATCH 16/34] Core: Add hints to run with --verbose when leaking nodes/resources at exit (cherry picked from commit 2b5545270a60a1a60f57c91bb565131dc03de74d) --- core/object.cpp | 5 +++-- core/resource.cpp | 21 +++++++++++---------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/core/object.cpp b/core/object.cpp index d5f98ba4ba..3acb8a3441 100644 --- a/core/object.cpp +++ b/core/object.cpp @@ -2132,7 +2132,7 @@ void ObjectDB::cleanup() { rw_lock->write_lock(); if (instances.size()) { - WARN_PRINT("ObjectDB Instances still exist!"); + WARN_PRINT("ObjectDB instances leaked at exit (run with --verbose for details)."); if (OS::get_singleton()->is_stdout_verbose()) { const ObjectID *K = NULL; while ((K = instances.next(K))) { @@ -2141,9 +2141,10 @@ void ObjectDB::cleanup() { if (instances[*K]->is_class("Node")) node_name = " - Node name: " + String(instances[*K]->call("get_name")); if (instances[*K]->is_class("Resource")) - node_name = " - Resource name: " + String(instances[*K]->call("get_name")) + " Path: " + String(instances[*K]->call("get_path")); + node_name = " - Resource path: " + String(instances[*K]->call("get_path")); print_line("Leaked instance: " + String(instances[*K]->get_class()) + ":" + itos(*K) + node_name); } + print_line("Hint: Leaked instances typically happen when nodes are removed from the scene tree (with `remove_child()`) but not freed (with `free()` or `queue_free()`)."); } } instances.clear(); diff --git a/core/resource.cpp b/core/resource.cpp index 30e09716aa..26161a4ac3 100644 --- a/core/resource.cpp +++ b/core/resource.cpp @@ -33,6 +33,7 @@ #include "core/core_string_names.h" #include "core/io/resource_loader.h" #include "core/os/file_access.h" +#include "core/os/os.h" #include "core/script_language.h" #include "scene/main/node.h" //only so casting works @@ -472,21 +473,22 @@ void ResourceCache::setup() { } void ResourceCache::clear() { - if (resources.size()) - ERR_PRINT("Resources Still in use at Exit!"); + if (resources.size()) { + ERR_PRINT("Resources still in use at exit (run with --verbose for details)."); + if (OS::get_singleton()->is_stdout_verbose()) { + const String *K = nullptr; + while ((K = resources.next(K))) { + Resource *r = resources[*K]; + print_line(vformat("Resource still in use: %s (%s)", *K, r->get_class())); + } + } + } resources.clear(); memdelete(lock); } void ResourceCache::reload_externals() { - - /* - const String *K=NULL; - while ((K=resources.next(K))) { - resources[*K]->reload_external_data(); - } - */ } bool ResourceCache::has(const String &p_path) { @@ -573,6 +575,5 @@ void ResourceCache::dump(const char *p_file, bool p_short) { } lock->read_unlock(); - #endif } From 317c9b5fe90b31f8f5176e3941fb5aa3f9da85e8 Mon Sep 17 00:00:00 2001 From: Hugo Locurcio Date: Tue, 9 Jun 2020 10:24:13 +0200 Subject: [PATCH 17/34] Tweak the invalid preset error message to mention `export_presets.cfg` (cherry picked from commit a7b2f3d41aa47d64ad783d5afb35d08e706bf8ea) --- editor/editor_node.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/editor/editor_node.cpp b/editor/editor_node.cpp index 05bbbe6d54..d858002f79 100644 --- a/editor/editor_node.cpp +++ b/editor/editor_node.cpp @@ -593,7 +593,9 @@ void EditorNode::_fs_changed() { preset.unref(); } if (preset.is_null()) { - export_error = vformat("Invalid export preset name: %s.", preset_name); + export_error = vformat( + "Invalid export preset name: %s. Make sure `export_presets.cfg` is present in the current directory.", + preset_name); } else { Ref platform = preset->get_platform(); if (platform.is_null()) { From 51de6732c90b1ad7556aea1f2c26a7e206335940 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Tue, 9 Jun 2020 10:26:21 +0200 Subject: [PATCH 18/34] AStar: Make get_closest_point() deterministic for equidistant points Closes godotengine/godot-docs#3667. Supersedes #39405. (cherry picked from commit 187ba4c5a884aaecd97febcdfaaa76466820be07) --- core/math/a_star.cpp | 14 +++++++++----- doc/classes/AStar.xml | 3 ++- doc/classes/AStar2D.xml | 3 ++- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/core/math/a_star.cpp b/core/math/a_star.cpp index df77dd9494..21523963c1 100644 --- a/core/math/a_star.cpp +++ b/core/math/a_star.cpp @@ -292,10 +292,16 @@ int AStar::get_closest_point(const Vector3 &p_point, bool p_include_disabled) co if (!p_include_disabled && !(*it.value)->enabled) continue; // Disabled points should not be considered. + // Keep the closest point's ID, and in case of multiple closest IDs, + // the smallest one (makes it deterministic). real_t d = p_point.distance_squared_to((*it.value)->pos); - if (closest_id < 0 || d < closest_dist) { + int id = *(it.key); + if (d <= closest_dist) { + if (d == closest_dist && id > closest_id) { // Keep lowest ID. + continue; + } closest_dist = d; - closest_id = *(it.key); + closest_id = id; } } @@ -304,7 +310,6 @@ int AStar::get_closest_point(const Vector3 &p_point, bool p_include_disabled) co Vector3 AStar::get_closest_position_in_segment(const Vector3 &p_point) const { - bool found = false; real_t closest_dist = 1e20; Vector3 closest_point; @@ -325,11 +330,10 @@ Vector3 AStar::get_closest_position_in_segment(const Vector3 &p_point) const { Vector3 p = Geometry::get_closest_point_to_segment(p_point, segment); real_t d = p_point.distance_squared_to(p); - if (!found || d < closest_dist) { + if (d < closest_dist) { closest_point = p; closest_dist = d; - found = true; } } diff --git a/doc/classes/AStar.xml b/doc/classes/AStar.xml index 22c22b0c16..0383196472 100644 --- a/doc/classes/AStar.xml +++ b/doc/classes/AStar.xml @@ -131,7 +131,8 @@ - Returns the ID of the closest point to [code]to_position[/code], optionally taking disabled points into account. Returns -1 if there are no points in the points pool. + Returns the ID of the closest point to [code]to_position[/code], optionally taking disabled points into account. Returns [code]-1[/code] if there are no points in the points pool. + [b]Note:[/b] If several points are the closest to [code]to_position[/code], the one with the smallest ID will be returned, ensuring a deterministic result. diff --git a/doc/classes/AStar2D.xml b/doc/classes/AStar2D.xml index a8a5b34f63..b313597d14 100644 --- a/doc/classes/AStar2D.xml +++ b/doc/classes/AStar2D.xml @@ -114,7 +114,8 @@ - Returns the ID of the closest point to [code]to_position[/code], optionally taking disabled points into account. Returns -1 if there are no points in the points pool. + Returns the ID of the closest point to [code]to_position[/code], optionally taking disabled points into account. Returns [code]-1[/code] if there are no points in the points pool. + [b]Note:[/b] If several points are the closest to [code]to_position[/code], the one with the smallest ID will be returned, ensuring a deterministic result. From 7200a0eac59bed4b974b2d2011b7299531c3dfbe Mon Sep 17 00:00:00 2001 From: Marcel Admiraal Date: Tue, 9 Jun 2020 10:40:45 +0100 Subject: [PATCH 19/34] Update Rigidbody 2D and 3D sleep documentation. (cherry picked from commit 1b738a77c15cf3a462ffc8bc509f511d994fc488) --- doc/classes/RigidBody.xml | 7 ++++--- doc/classes/RigidBody2D.xml | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/doc/classes/RigidBody.xml b/doc/classes/RigidBody.xml index b05291e51a..2ae6ec1a55 100644 --- a/doc/classes/RigidBody.xml +++ b/doc/classes/RigidBody.xml @@ -151,7 +151,7 @@ Deprecated, use [member PhysicsMaterial.bounce] instead via [member physics_material_override]. - If [code]true[/code], the RigidBody will not calculate forces and will act as a static body while there is no movement. It will wake up when forces are applied through other collisions or when the [code]apply_impulse[/code] method is used. + If [code]true[/code], the body can enter sleep mode when there is no movement. See [member sleeping]. If [code]true[/code], the RigidBody will emit signals when it collides with another RigidBody. @@ -190,7 +190,7 @@ If a material is assigned to this property, it will be used instead of any other physics material, such as an inherited one. - If [code]true[/code], the body is sleeping and will not calculate forces until woken up by a collision or the [code]apply_impulse[/code] method. + If [code]true[/code], the body will not move and will not calculate forces until woken up by another body through, for example, a collision, or by using the [method apply_impulse] or [method add_force] methods. The body's weight based on its mass and the global 3D gravity. Global values are set in [b]Project > Project Settings > Physics > 3d[/b]. @@ -241,7 +241,8 @@ - Emitted when the body changes its sleeping state. Either by sleeping or waking up. + Emitted when the physics engine changes the body's sleeping state. + [b]Note:[/b] Changing the value [member sleeping] will not trigger this signal. It is only emitted if the sleeping state is changed by the physics engine or [code]emit_signal("sleeping_state_changed")[/code] is used. diff --git a/doc/classes/RigidBody2D.xml b/doc/classes/RigidBody2D.xml index 6751b840af..60f1e151da 100644 --- a/doc/classes/RigidBody2D.xml +++ b/doc/classes/RigidBody2D.xml @@ -131,7 +131,7 @@ Deprecated, use [member PhysicsMaterial.bounce] instead via [member physics_material_override]. - If [code]true[/code], the body will not calculate forces and will act as a static body if there is no movement. The body will wake up when other forces are applied via collisions or by using [method apply_impulse] or [method add_force]. + If [code]true[/code], the body can enter sleep mode when there is no movement. See [member sleeping]. If [code]true[/code], the body will emit signals when it collides with another RigidBody2D. See also [member contacts_reported]. @@ -173,7 +173,7 @@ If a material is assigned to this property, it will be used instead of any other physics material, such as an inherited one. - If [code]true[/code], the body is sleeping and will not calculate forces until woken up by a collision or by using [method apply_impulse] or [method add_force]. + If [code]true[/code], the body will not move and will not calculate forces until woken up by another body through, for example, a collision, or by using the [method apply_impulse] or [method add_force] methods. The body's weight based on its mass and the [b]Default Gravity[/b] value in [b]Project > Project Settings > Physics > 2d[/b]. @@ -222,7 +222,8 @@ - Emitted when [member sleeping] changes. + Emitted when the physics engine changes the body's sleeping state. + [b]Note:[/b] Changing the value [member sleeping] will not trigger this signal. It is only emitted if the sleeping state is changed by the physics engine or [code]emit_signal("sleeping_state_changed")[/code] is used. From 3202df9b5cf56b79eef067772a2e3928a8606724 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Tue, 9 Jun 2020 13:05:58 +0200 Subject: [PATCH 20/34] doc: Make File store/get integer methods clearer Add an example on how to store signed integers on less than 64 bits, using one bit for the signedness. (cherry picked from commit cd25d184a5f547d7e7b6332a255908aa00f4ddc7) --- doc/classes/File.xml | 35 ++++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/doc/classes/File.xml b/doc/classes/File.xml index 735bf69aa0..dbba026746 100644 --- a/doc/classes/File.xml +++ b/doc/classes/File.xml @@ -54,28 +54,28 @@ - Returns the next 16 bits from the file as an integer. + Returns the next 16 bits from the file as an integer. See [method store_16] for details on what values can be stored and retrieved this way. - Returns the next 32 bits from the file as an integer. + Returns the next 32 bits from the file as an integer. See [method store_32] for details on what values can be stored and retrieved this way. - Returns the next 64 bits from the file as an integer. + Returns the next 64 bits from the file as an integer. See [method store_64] for details on what values can be stored and retrieved this way. - Returns the next 8 bits from the file as an integer. + Returns the next 8 bits from the file as an integer. See [method store_8] for details on what values can be stored and retrieved this way. @@ -297,7 +297,26 @@ Stores an integer as 16 bits in the file. - [b]Note:[/b] The [code]value[/code] should lie in the interval [code][0, 2^16 - 1][/code]. + [b]Note:[/b] The [code]value[/code] should lie in the interval [code][0, 2^16 - 1][/code]. Any other value will overflow and wrap around. + To store a signed integer, use [method store_64] or store a signed integer from the interval [code][-2^15, 2^15 - 1][/code] (i.e. keeping one bit for the signedness) and compute its sign manually when reading. For example: + [codeblock] + const MAX_15B = 1 << 15 + const MAX_16B = 1 << 16 + + func unsigned16_to_signed(unsigned): + return (unsigned + MAX_15B) % MAX_16B - MAX_15B + + func _ready(): + var f = File.new() + f.open("user://file.dat", File.WRITE_READ) + f.store_16(-42) # This wraps around and stores 65494 (2^16 - 42). + f.store_16(121) # In bounds, will store 121. + f.seek(0) # Go back to start to read the stored value. + var read1 = f.get_16() # 65494 + var read2 = f.get_16() # 121 + var converted1 = unsigned16_to_signed(read1) # -42 + var converted2 = unsigned16_to_signed(read2) # 121 + [/codeblock] @@ -307,7 +326,8 @@ Stores an integer as 32 bits in the file. - [b]Note:[/b] The [code]value[/code] should lie in the interval [code][0, 2^32 - 1][/code]. + [b]Note:[/b] The [code]value[/code] should lie in the interval [code][0, 2^32 - 1][/code]. Any other value will overflow and wrap around. + To store a signed integer, use [method store_64], or convert it manually (see [method store_16] for an example). @@ -327,7 +347,8 @@ Stores an integer as 8 bits in the file. - [b]Note:[/b] The [code]value[/code] should lie in the interval [code][0, 255][/code]. + [b]Note:[/b] The [code]value[/code] should lie in the interval [code][0, 255][/code]. Any other value will overflow and wrap around. + To store a signed integer, use [method store_64], or convert it manually (see [method store_16] for an example). From 12786bcbdbffbba0674bf1cb6b812911be745eb9 Mon Sep 17 00:00:00 2001 From: Phischermen Date: Tue, 9 Jun 2020 12:59:59 -0700 Subject: [PATCH 21/34] Fix crash when creating new text file with no name (cherry picked from commit cdb29447b4da77bebc7adc944fe10ef7ea580ebf) --- editor/plugins/script_editor_plugin.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/editor/plugins/script_editor_plugin.cpp b/editor/plugins/script_editor_plugin.cpp index c03f901e80..17b6f283db 100644 --- a/editor/plugins/script_editor_plugin.cpp +++ b/editor/plugins/script_editor_plugin.cpp @@ -902,7 +902,6 @@ void ScriptEditor::_file_dialog_action(String p_file) { Error err; FileAccess *file = FileAccess::open(p_file, FileAccess::WRITE, &err); if (err) { - memdelete(file); editor->show_warning(TTR("Error writing TextFile:") + "\n" + p_file, TTR("Error!")); break; } From 5495bbdd470581458e79d77be6f9a66cec2bcbf6 Mon Sep 17 00:00:00 2001 From: Michael Alexsander Date: Tue, 9 Jun 2020 19:06:15 -0300 Subject: [PATCH 22/34] Fix scene tree showing up when the root selection is present (cherry picked from commit f30e4dbf54397646a81b45ab64978e22d4a5c8d9) --- editor/scene_tree_dock.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/editor/scene_tree_dock.cpp b/editor/scene_tree_dock.cpp index b2f290e435..e0326c77e4 100644 --- a/editor/scene_tree_dock.cpp +++ b/editor/scene_tree_dock.cpp @@ -2696,7 +2696,9 @@ void SceneTreeDock::_remote_tree_selected() { void SceneTreeDock::_local_tree_selected() { - scene_tree->show(); + if (!bool(EDITOR_GET("interface/editors/show_scene_tree_root_selection")) || get_tree()->get_edited_scene_root() != nullptr) { + scene_tree->show(); + } if (remote_tree) remote_tree->hide(); edit_remote->set_pressed(false); From 8e8861ed065d9d21d5dba7445b78e18a8cc69dcf Mon Sep 17 00:00:00 2001 From: Hugo Locurcio Date: Wed, 10 Jun 2020 09:33:43 +0200 Subject: [PATCH 23/34] Mention ordering caveats for `Dictionary.hash()` See https://github.com/godotengine/godot/issues/27615. (cherry picked from commit 77b89263dd37299c3034327a99374d114b42706d) --- doc/classes/Dictionary.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/classes/Dictionary.xml b/doc/classes/Dictionary.xml index d6497be793..32a3ba89fe 100644 --- a/doc/classes/Dictionary.xml +++ b/doc/classes/Dictionary.xml @@ -146,6 +146,7 @@ # The line below prints `true`, whereas it would have printed `false` if both variables were compared directly. print(dict1.hash() == dict2.hash()) [/codeblock] + [b]Note:[/b] Dictionaries with the same keys/values but in a different order will have a different hash. From c39c3e323766c092f8fe447f130447a4b35a65e9 Mon Sep 17 00:00:00 2001 From: Hugo Locurcio Date: Thu, 30 Jan 2020 01:29:54 +0100 Subject: [PATCH 24/34] Improve `CONTRIBUTING.md` and update it to follow recent changes - Improve the bug reporting guidelines for readability. - Make some guidelines more strictly applied in the interest of bug report quality. - Mention that feature proposals should now be opened on the Godot Proposals repository. - Update the Git commit message guide to be easier to follow. - Fix warnings reported by markdownlint. (cherry picked from commit 7b5c502ba5c69c68003441a425981a69ca4876c2) --- CONTRIBUTING.md | 147 ++++++++++++++++++++++++++++++------------------ 1 file changed, 91 insertions(+), 56 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b1afaaccfd..ba04008680 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,70 +1,106 @@ # How to contribute efficiently -Sections covered in this file: +## Table of contents -* [Reporting bugs or proposing features](#reporting-bugs-or-proposing-features) -* [Contributing pull requests](#contributing-pull-requests) -* [Contributing to Godot's translation](#contributing-to-godots-translation) -* [Communicating with developers](#communicating-with-developers) +- [Reporting bugs](#reporting-bugs) +- [Proposing features or improvements](#proposing-features-or-improvements) +- [Contributing pull requests](#contributing-pull-requests) +- [Contributing to Godot's translation](#contributing-to-godots-translation) +- [Communicating with developers](#communicating-with-developers) **Please read the first section before reporting a bug!** -## Reporting bugs or proposing features +## Reporting bugs The golden rule is to **always open *one* issue for *one* bug**. If you notice several bugs and want to report them, make sure to create one new issue for each of them. -Everything referred to hereafter as "bug" also applies for feature requests. +If you're reporting a new bug, you'll make our life simpler (and the +fix will come sooner) by following these guidelines: -If you are reporting a new issue, you will make our life much simpler (and the -fix come much sooner) by following these guidelines: +### Search first in the existing database -#### Search first in the existing database +Issues are often reported several times by various users. It's good practice to +**search first in the [issue tracker](https://github.com/godotengine/godot/issues) +before reporting your issue**. If you don't find a relevant match or if you're +unsure, don't hesitate to **open a new issue**. The bugsquad will handle it +from there if it's a duplicate. -Issues are often reported several times by various users. It's a good practice -to **search first** in the issues database before reporting your issue. If you -don't find a relevant match or if you are unsure, don't hesitate to **open a -new issue**. The bugsquad will handle it from there if it's a duplicate. - -#### Specify the platform +### Specify the platform Godot runs on a large variety of platforms and operating systems and devices. -If you believe your issue is device/platform dependent (for example if it is -related to the rendering, crashes or compilation errors), please specify: -* Operating system -* Device (including architecture, e.g. x86, x86_64, arm, etc.) -* GPU model (and driver in use if you know it) +**In your bug reports, please always specify:** -#### Specify steps to reproduce +- Operating system and version (e.g. Windows 10, macOS 10.15, Ubuntu 19.10) +- Godot version (e.g. 3.2, 3.1.2, or the Git commit hash if you're using a development branch) + +For bugs that are likely OS-specific and/or graphics-related, please also specify: + +- Device (CPU model including architecture, e.g. x86, x86_64, ARM, etc.) +- GPU model (and the driver version in use if you know it) + +**Bug reports not including the required information may be closed at the +maintainers' discretion.** If in doubt, always include all the requested +information; it's better to include too much information than not enough +information. + +### Specify steps to reproduce Many bugs can't be reproduced unless specific steps are taken. Please **specify the exact steps** that must be taken to reproduce the condition, and try to -keep them as minimal as possible. +keep them as minimal as possible. If you're describing a procedure to follow +in the editor, don't hesitate to include screenshots. -#### Provide a simple, example project +Making your bug report easy to reproduce will make it easier for contributors +to fix the bug. -Sometimes an unexpected behavior happens in your project. In such case, +### Provide a simple, example project + +Sometimes, unexpected behavior can happen in your project. In such case, understand that: -* What happens to you may not happen to other users. -* We can't take the time to look at your project, understand how it is set up + +- What happens to you may not happen to other users. +- We can't take the time to look at your project, understand how it is set up and then figure out why it's failing. -To speed up our work, please prepare for us **a simple project** that isolates +To speed up our work, **please upload a minimal project** that isolates and reproduces the issue. This is always the **best way for us to fix it**. -You can attach a zip file with the minimal project directly to the bug report, +You can attach a ZIP file with the minimal project directly to the bug report, by drag and dropping the file in the GitHub edition field. +We recommend always attaching a minimal reproduction project, even if the issue +may seem simple to reproduce manually. + +**If you've been asked by a maintainer to upload a minimal reproduction project, +you *must* do so within 7 days.** Otherwise, your bug report will be closed as +it'll be considered too difficult to diagnose. + +Now that you've read the guidelines, click the link below to create a +bug report: + +- **[Report a bug](https://github.com/godotengine/godot/issues/new?assignees=&labels=&template=bug_report.md&title=)** + +## Proposing features or improvements + +**Since August 2019, the main issue tracker no longer accepts feature proposals.** +Instead, head to the [Godot Proposals repository](https://github.com/godotengine/godot-proposals) +and follow the instructions in the README file. High-quality feature proposals +are more likely to be well-received by the maintainers and community, so do +your best :) + +See [this article](https://godotengine.org/article/introducing-godot-proposals-repository) +for detailed rationale on this change. + ## Contributing pull requests -If you want to add new engine functionalities, please make sure that: +If you want to add new engine features, please make sure that: -* This functionality is desired, which means that it solves a common use case +- This functionality is desired, which means that it solves a common use case that several users will need in their real-life projects. -* You talked to other developers on how to implement it best (on either - communication channel, and maybe in a GitHub issue first before making your - PR). -* Even if it does not get merged, your PR is useful for future work by another +- You talked to other developers on how to implement it best. See also + [Proposing features or improvements](#proposing-features-or-improvements). +- Even if it doesn't get merged, your PR is useful for future work by another developer. Similar rules can be applied when contributing bug fixes - it's always best to @@ -83,7 +119,7 @@ for an introduction to developing on Godot. The [Contributing docs](https://docs.godotengine.org/en/latest/community/contributing/index.html) also have important information on the PR workflow and the code style we use. -#### Be nice to the git history +### Be nice to the Git history Try to make simple PRs that handle one specific topic. Just like for reporting issues, it's better to open 3 different PRs that each address a different issue @@ -99,33 +135,31 @@ commit, try to merge them together before making your pull request (see ``git rebase -i`` and relevant help about rebasing or amending commits on the Internet). -This git style guide has some good practices to have in mind: -[Git Style Guide](https://github.com/agis-/git-style-guide) +This [Git style guide](https://github.com/agis-/git-style-guide) has some +good practices to have in mind. See our [PR workflow](https://docs.godotengine.org/en/latest/community/contributing/pr_workflow.html) documentation for tips on using Git, amending commits and rebasing branches. -#### Format your commit logs with readability in mind +### Format your commit messages with readability in mind -The way you format your commit logs is quite important to ensure that the -commit history and changelog will be easy to read and understand. A git commit -log is formatted as a short title (first line) and an extended description +The way you format your commit messages is quite important to ensure that the +commit history and changelog will be easy to read and understand. A Git commit +message is formatted as a short title (first line) and an extended description (everything after the first line and an empty separation line). The short title is the most important part, as it is what will appear in the `shortlog` changelog (one line per commit, so no description shown) or in the -GitHub interface unless you click the "expand" button. As the name tells it, -try to keep that first line relatively short (ideally <= 50 chars, though it's -rare to be able to tell enough in so few characters, so you can go a bit -higher) - it should describe what the commit does globally, while details would -go in the description. Typically, if you can't keep the title short because you -have too much stuff to mention, it means that you should probably split your -changes in several commits :) +GitHub interface unless you click the "expand" button. As the name says, try to +keep that first line under 72 characters. It should describe what the commit +does globally, while details would go in the description. Typically, if you +can't keep the title short because you have too much stuff to mention, it means +you should probably split your changes in several commits :) -Here's an example of a well-formatted commit log (note how the extended +Here's an example of a well-formatted commit message (note how the extended description is also manually wrapped at 80 chars for readability): -``` +```text Prevent French fries carbonization by fixing heat regulation When using the French fries frying module, Godot would not regulate the heat @@ -139,9 +173,9 @@ of cooking oil under normal atmospheric conditions. Fixes #1789, long live the Realm! ``` -*Note:* When using the GitHub online editor (or worse, the drag and drop -feature), *please* edit the commit title to something meaningful. Commits named -"Update my_file.cpp" will not be accepted. +**Note:** When using the GitHub online editor or its drag-and-drop +feature, *please* edit the commit title to something meaningful. Commits named +"Update my_file.cpp" won't be accepted. ## Contributing to Godot's translation @@ -162,6 +196,7 @@ discussions and support, others more for development discussions. To communicate with developers (e.g. to discuss a feature you want to implement or a bug you want to fix), the following channels can be used: + - [GitHub issues](https://github.com/godotengine/godot/issues): If there is an existing issue about a topic you want to discuss, just add a comment to it - all developers watch the repository and will get an email notification. You @@ -182,6 +217,6 @@ or a bug you want to fix), the following channels can be used: page](https://listengine.tuxfamily.org/godotengine.org/devel/) for subscription instructions. -Thanks! +Thanks for your interest in contributing! -The Godot development team +—The Godot development team From c3d04167a4b7b2f385a9bb9b908308ab836a8af3 Mon Sep 17 00:00:00 2001 From: Hugo Locurcio Date: Wed, 10 Jun 2020 10:32:46 +0200 Subject: [PATCH 25/34] Document the requirement to update the class reference when contributing Documenting new additions has been an informal requirement for a few months now. This makes it official. (cherry picked from commit 6d0e8f9fb70ff00088185f59a627c8f65f5d78ed) --- CONTRIBUTING.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ba04008680..c28692c34f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -119,6 +119,22 @@ for an introduction to developing on Godot. The [Contributing docs](https://docs.godotengine.org/en/latest/community/contributing/index.html) also have important information on the PR workflow and the code style we use. +### Document your changes + +If your pull request adds methods, properties or signals that are exposed to +scripting APIs, you **must** update the class reference to document those. +This is to ensure the documentation coverage doesn't decrease as contributions +are merged. + +[Update the documentation template](https://docs.godotengine.org/en/latest/community/contributing/updating_the_class_reference.html#updating-the-documentation-template) +using your compiled binary, then fill in the descriptions. +Follow the style guide described in the +[Docs writing guidelines](https://docs.godotengine.org/en/latest/community/contributing/docs_writing_guidelines.html). + +If your pull request modifies parts of the code in a non-obvious way, make sure +to add comments in the code as well. This helps other people understand the +change without having to look at `git blame`. + ### Be nice to the Git history Try to make simple PRs that handle one specific topic. Just like for reporting From 7bf9787921a9b73fb1d6c2628c68c22892634b2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 30 Mar 2020 08:28:32 +0200 Subject: [PATCH 26/34] SCons: Format buildsystem files with psf/black Configured for a max line length of 120 characters. psf/black is very opinionated and purposely doesn't leave much room for configuration. The output is mostly OK so that should be fine for us, but some things worth noting: - Manually wrapped strings will be reflowed, so by using a line length of 120 for the sake of preserving readability for our long command calls, it also means that some manually wrapped strings are back on the same line and should be manually merged again. - Code generators using string concatenation extensively look awful, since black puts each operand on a single line. We need to refactor these generators to use more pythonic string formatting, for which many options are available (`%`, `format` or f-strings). - CI checks and a pre-commit hook will be added to ensure that future buildsystem changes are well-formatted. (cherry picked from commit cd4e46ee65dab6baa6a143bf3b3f64244be36712) --- SConstruct | 437 ++++++++++-------- compat.py | 44 +- core/SCsub | 103 +++-- core/bind/SCsub | 2 +- core/core_builders.py | 104 +++-- core/crypto/SCsub | 8 +- core/io/SCsub | 2 +- core/make_binders.py | 48 +- core/math/SCsub | 2 +- core/os/SCsub | 2 +- doc/tools/doc_merge.py | 108 +++-- doc/tools/doc_status.py | 349 +++++++------- doc/tools/makerst.py | 295 ++++++------ drivers/SCsub | 35 +- drivers/alsa/SCsub | 2 +- drivers/alsamidi/SCsub | 2 +- drivers/coreaudio/SCsub | 2 +- drivers/coremidi/SCsub | 2 +- drivers/dummy/SCsub | 2 +- drivers/gl_context/SCsub | 8 +- drivers/gles2/SCsub | 2 +- drivers/gles2/shaders/SCsub | 40 +- drivers/gles3/SCsub | 4 +- drivers/gles3/shaders/SCsub | 40 +- drivers/png/SCsub | 9 +- drivers/pulseaudio/SCsub | 2 +- drivers/unix/SCsub | 4 +- drivers/wasapi/SCsub | 2 +- drivers/windows/SCsub | 2 +- drivers/winmidi/SCsub | 2 +- drivers/xaudio2/SCsub | 6 +- editor/SCsub | 36 +- editor/collada/SCsub | 2 +- editor/doc/SCsub | 2 +- editor/editor_builders.py | 8 +- editor/fileserver/SCsub | 2 +- editor/icons/SCsub | 12 +- editor/icons/editor_icons_builders.py | 21 +- editor/import/SCsub | 2 +- editor/plugins/SCsub | 2 +- editor/translations/extract.py | 40 +- gles_builders.py | 230 ++++++--- main/SCsub | 14 +- main/main_builders.py | 24 +- main/tests/SCsub | 2 +- methods.py | 353 ++++++++------ misc/scripts/fix_headers.py | 24 +- modules/SCsub | 8 +- modules/arkit/SCsub | 6 +- modules/arkit/config.py | 3 +- modules/assimp/SCsub | 150 +++--- modules/assimp/config.py | 3 +- modules/bmp/SCsub | 4 +- modules/bmp/config.py | 1 + modules/bullet/SCsub | 357 +++++++------- modules/bullet/config.py | 1 + modules/camera/SCsub | 7 +- modules/camera/config.py | 3 +- modules/csg/SCsub | 4 +- modules/csg/config.py | 3 + modules/cvtt/SCsub | 8 +- modules/cvtt/config.py | 3 +- modules/dds/SCsub | 4 +- modules/dds/config.py | 1 + modules/enet/SCsub | 6 +- modules/enet/config.py | 3 + modules/etc/SCsub | 34 +- modules/etc/config.py | 3 +- modules/freetype/SCsub | 30 +- modules/freetype/config.py | 1 + modules/gdnative/SCsub | 37 +- modules/gdnative/arvr/SCsub | 6 +- modules/gdnative/arvr/config.py | 5 +- modules/gdnative/config.py | 3 + modules/gdnative/gdnative_builders.py | 395 +++++++++------- modules/gdnative/nativescript/SCsub | 6 +- modules/gdnative/net/SCsub | 9 +- modules/gdnative/pluginscript/SCsub | 6 +- modules/gdnative/videodecoder/SCsub | 8 +- modules/gdscript/SCsub | 10 +- modules/gdscript/config.py | 3 + modules/gridmap/SCsub | 4 +- modules/gridmap/config.py | 3 + modules/hdr/SCsub | 4 +- modules/hdr/config.py | 1 + modules/jpg/SCsub | 4 +- modules/jpg/config.py | 1 + modules/jsonrpc/SCsub | 4 +- modules/jsonrpc/config.py | 5 +- modules/mbedtls/SCsub | 8 +- modules/mbedtls/config.py | 1 + modules/mobile_vr/SCsub | 6 +- modules/mobile_vr/config.py | 3 + modules/mono/SCsub | 40 +- .../mono/build_scripts/api_solution_build.py | 35 +- .../mono/build_scripts/gen_cs_glue_version.py | 20 +- .../mono/build_scripts/godot_tools_build.py | 18 +- .../build_scripts/make_android_mono_config.py | 17 +- modules/mono/build_scripts/mono_configure.py | 437 ++++++++++-------- modules/mono/build_scripts/mono_reg_utils.py | 53 +-- .../mono/build_scripts/solution_builder.py | 44 +- modules/mono/build_scripts/tls_configure.py | 21 +- modules/mono/config.py | 33 +- modules/ogg/SCsub | 6 +- modules/ogg/config.py | 1 + modules/opensimplex/SCsub | 4 +- modules/opensimplex/config.py | 12 +- modules/opus/SCsub | 31 +- modules/opus/config.py | 1 + modules/pvr/SCsub | 4 +- modules/pvr/config.py | 1 + modules/recast/SCsub | 28 +- modules/recast/config.py | 3 +- modules/regex/SCsub | 16 +- modules/regex/config.py | 3 + modules/squish/SCsub | 6 +- modules/squish/config.py | 1 + modules/stb_vorbis/SCsub | 4 +- modules/stb_vorbis/config.py | 3 + modules/svg/SCsub | 8 +- modules/svg/config.py | 1 + modules/tga/SCsub | 4 +- modules/tga/config.py | 1 + modules/theora/SCsub | 60 +-- modules/theora/config.py | 3 + modules/tinyexr/SCsub | 4 +- modules/tinyexr/config.py | 3 +- modules/upnp/SCsub | 6 +- modules/upnp/config.py | 8 +- modules/vhacd/SCsub | 6 +- modules/vhacd/config.py | 2 +- modules/visual_script/SCsub | 4 +- modules/visual_script/config.py | 3 + modules/vorbis/SCsub | 18 +- modules/vorbis/config.py | 1 + modules/webm/SCsub | 12 +- modules/webm/config.py | 5 +- modules/webm/libvpx/SCsub | 181 ++++---- modules/webp/SCsub | 6 +- modules/webp/config.py | 1 + modules/webrtc/SCsub | 8 +- modules/webrtc/config.py | 9 +- modules/websocket/SCsub | 6 +- modules/websocket/config.py | 10 +- modules/xatlas_unwrap/SCsub | 6 +- modules/xatlas_unwrap/config.py | 3 +- platform/SCsub | 24 +- platform/android/SCsub | 74 +-- platform/android/detect.py | 237 ++++++---- platform/haiku/SCsub | 27 +- platform/haiku/detect.py | 140 +++--- platform/iphone/SCsub | 36 +- platform/iphone/detect.py | 240 ++++++---- platform/javascript/SCsub | 51 +- platform/javascript/detect.py | 119 ++--- platform/osx/SCsub | 19 +- platform/osx/detect.py | 184 +++++--- platform/osx/platform_osx_builders.py | 10 +- platform/server/SCsub | 18 +- platform/server/detect.py | 248 +++++----- platform/uwp/SCsub | 22 +- platform/uwp/detect.py | 166 ++++--- platform/windows/SCsub | 10 +- platform/windows/detect.py | 390 +++++++++------- platform/windows/platform_windows_builders.py | 10 +- platform/x11/SCsub | 6 +- platform/x11/detect.py | 353 +++++++------- platform/x11/platform_x11_builders.py | 8 +- platform_methods.py | 37 +- scene/2d/SCsub | 2 +- scene/3d/SCsub | 4 +- scene/SCsub | 26 +- scene/animation/SCsub | 2 +- scene/audio/SCsub | 2 +- scene/debugger/SCsub | 2 +- scene/gui/SCsub | 2 +- scene/main/SCsub | 2 +- scene/resources/SCsub | 2 +- scene/resources/default_theme/SCsub | 2 +- scene/resources/default_theme/make_header.py | 18 +- servers/SCsub | 14 +- servers/arvr/SCsub | 2 +- servers/audio/SCsub | 2 +- servers/audio/effects/SCsub | 2 +- servers/camera/SCsub | 4 +- servers/physics/SCsub | 2 +- servers/physics/joints/SCsub | 2 +- servers/physics_2d/SCsub | 2 +- servers/visual/SCsub | 2 +- 189 files changed, 4050 insertions(+), 3315 deletions(-) diff --git a/SConstruct b/SConstruct index 5caca8e8f3..acb3f4d878 100644 --- a/SConstruct +++ b/SConstruct @@ -26,46 +26,46 @@ platform_exporters = [] platform_apis = [] for x in sorted(glob.glob("platform/*")): - if (not os.path.isdir(x) or not os.path.exists(x + "/detect.py")): + if not os.path.isdir(x) or not os.path.exists(x + "/detect.py"): continue tmppath = "./" + x sys.path.insert(0, tmppath) import detect - if (os.path.exists(x + "/export/export.cpp")): + if os.path.exists(x + "/export/export.cpp"): platform_exporters.append(x[9:]) - if (os.path.exists(x + "/api/api.cpp")): + if os.path.exists(x + "/api/api.cpp"): platform_apis.append(x[9:]) - if (detect.is_active()): + if detect.is_active(): active_platforms.append(detect.get_name()) active_platform_ids.append(x) - if (detect.can_build()): + if detect.can_build(): x = x.replace("platform/", "") # rest of world x = x.replace("platform\\", "") # win32 platform_list += [x] platform_opts[x] = detect.get_opts() platform_flags[x] = detect.get_flags() sys.path.remove(tmppath) - sys.modules.pop('detect') + sys.modules.pop("detect") methods.save_active_platforms(active_platforms, active_platform_ids) -custom_tools = ['default'] +custom_tools = ["default"] platform_arg = ARGUMENTS.get("platform", ARGUMENTS.get("p", False)) if os.name == "nt" and (platform_arg == "android" or ARGUMENTS.get("use_mingw", False)): - custom_tools = ['mingw'] -elif platform_arg == 'javascript': + custom_tools = ["mingw"] +elif platform_arg == "javascript": # Use generic POSIX build toolchain for Emscripten. - custom_tools = ['cc', 'c++', 'ar', 'link', 'textfile', 'zip'] + custom_tools = ["cc", "c++", "ar", "link", "textfile", "zip"] env_base = Environment(tools=custom_tools) -if 'TERM' in os.environ: - env_base['ENV']['TERM'] = os.environ['TERM'] -env_base.AppendENVPath('PATH', os.getenv('PATH')) -env_base.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH')) +if "TERM" in os.environ: + env_base["ENV"]["TERM"] = os.environ["TERM"] +env_base.AppendENVPath("PATH", os.getenv("PATH")) +env_base.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH")) env_base.disabled_modules = [] env_base.use_ptrcall = False env_base.module_version_string = "" @@ -93,7 +93,7 @@ env_base.SConsignFile(".sconsign{0}.dblite".format(pickle.HIGHEST_PROTOCOL)) # Build options -customs = ['custom.py'] +customs = ["custom.py"] profile = ARGUMENTS.get("profile", False) if profile: @@ -105,61 +105,67 @@ if profile: opts = Variables(customs, ARGUMENTS) # Target build options -opts.Add('arch', "Platform-dependent architecture (arm/arm64/x86/x64/mips/...)", '') -opts.Add(EnumVariable('bits', "Target platform bits", 'default', ('default', '32', '64'))) -opts.Add('p', "Platform (alias for 'platform')", '') -opts.Add('platform', "Target platform (%s)" % ('|'.join(platform_list), ), '') -opts.Add(EnumVariable('target', "Compilation target", 'debug', ('debug', 'release_debug', 'release'))) -opts.Add(EnumVariable('optimize', "Optimization type", 'speed', ('speed', 'size'))) -opts.Add(BoolVariable('tools', "Build the tools (a.k.a. the Godot editor)", True)) -opts.Add(BoolVariable('use_lto', 'Use link-time optimization', False)) -opts.Add(BoolVariable('use_precise_math_checks', 'Math checks use very precise epsilon (useful to debug the engine)', False)) +opts.Add("arch", "Platform-dependent architecture (arm/arm64/x86/x64/mips/...)", "") +opts.Add(EnumVariable("bits", "Target platform bits", "default", ("default", "32", "64"))) +opts.Add("p", "Platform (alias for 'platform')", "") +opts.Add("platform", "Target platform (%s)" % ("|".join(platform_list),), "") +opts.Add(EnumVariable("target", "Compilation target", "debug", ("debug", "release_debug", "release"))) +opts.Add(EnumVariable("optimize", "Optimization type", "speed", ("speed", "size"))) +opts.Add(BoolVariable("tools", "Build the tools (a.k.a. the Godot editor)", True)) +opts.Add(BoolVariable("use_lto", "Use link-time optimization", False)) +opts.Add(BoolVariable("use_precise_math_checks", "Math checks use very precise epsilon (debug option)", False)) # Components -opts.Add(BoolVariable('deprecated', "Enable deprecated features", True)) -opts.Add(BoolVariable('gdscript', "Enable GDScript support", True)) -opts.Add(BoolVariable('minizip', "Enable ZIP archive support using minizip", True)) -opts.Add(BoolVariable('xaudio2', "Enable the XAudio2 audio driver", False)) +opts.Add(BoolVariable("deprecated", "Enable deprecated features", True)) +opts.Add(BoolVariable("gdscript", "Enable GDScript support", True)) +opts.Add(BoolVariable("minizip", "Enable ZIP archive support using minizip", True)) +opts.Add(BoolVariable("xaudio2", "Enable the XAudio2 audio driver", False)) opts.Add("custom_modules", "A list of comma-separated directory paths containing custom modules to build.", "") # Advanced options -opts.Add(BoolVariable('verbose', "Enable verbose output for the compilation", False)) -opts.Add(BoolVariable('progress', "Show a progress indicator during compilation", True)) -opts.Add(EnumVariable('warnings', "Set the level of warnings emitted during compilation", 'all', ('extra', 'all', 'moderate', 'no'))) -opts.Add(BoolVariable('werror', "Treat compiler warnings as errors. Depends on the level of warnings set with 'warnings'", False)) -opts.Add(BoolVariable('dev', "If yes, alias for verbose=yes warnings=extra werror=yes", False)) -opts.Add('extra_suffix', "Custom extra suffix added to the base filename of all generated binary files", '') -opts.Add(BoolVariable('vsproj', "Generate a Visual Studio solution", False)) -opts.Add(EnumVariable('macports_clang', "Build using Clang from MacPorts", 'no', ('no', '5.0', 'devel'))) -opts.Add(BoolVariable('split_libmodules', "Split intermediate libmodules.a in smaller chunks to prevent exceeding linker command line size (forced to True when using MinGW)", False)) -opts.Add(BoolVariable('disable_3d', "Disable 3D nodes for a smaller executable", False)) -opts.Add(BoolVariable('disable_advanced_gui', "Disable advanced GUI nodes and behaviors", False)) -opts.Add(BoolVariable('no_editor_splash', "Don't use the custom splash screen for the editor", False)) -opts.Add('system_certs_path', "Use this path as SSL certificates default for editor (for package maintainers)", '') +opts.Add(BoolVariable("verbose", "Enable verbose output for the compilation", False)) +opts.Add(BoolVariable("progress", "Show a progress indicator during compilation", True)) +opts.Add(EnumVariable("warnings", "Level of compilation warnings", "all", ("extra", "all", "moderate", "no"))) +opts.Add(BoolVariable("werror", "Treat compiler warnings as errors", False)) +opts.Add(BoolVariable("dev", "If yes, alias for verbose=yes warnings=extra werror=yes", False)) +opts.Add("extra_suffix", "Custom extra suffix added to the base filename of all generated binary files", "") +opts.Add(BoolVariable("vsproj", "Generate a Visual Studio solution", False)) +opts.Add(EnumVariable("macports_clang", "Build using Clang from MacPorts", "no", ("no", "5.0", "devel"))) +opts.Add( + BoolVariable( + "split_libmodules", + "Split intermediate libmodules.a in smaller chunks to prevent exceeding linker command line size (forced to True when using MinGW)", + False, + ) +) +opts.Add(BoolVariable("disable_3d", "Disable 3D nodes for a smaller executable", False)) +opts.Add(BoolVariable("disable_advanced_gui", "Disable advanced GUI nodes and behaviors", False)) +opts.Add(BoolVariable("no_editor_splash", "Don't use the custom splash screen for the editor", False)) +opts.Add("system_certs_path", "Use this path as SSL certificates default for editor (for package maintainers)", "") # Thirdparty libraries -#opts.Add(BoolVariable('builtin_assimp', "Use the built-in Assimp library", True)) -opts.Add(BoolVariable('builtin_bullet', "Use the built-in Bullet library", True)) -opts.Add(BoolVariable('builtin_certs', "Bundle default SSL certificates to be used if you don't specify an override in the project settings", True)) -opts.Add(BoolVariable('builtin_enet', "Use the built-in ENet library", True)) -opts.Add(BoolVariable('builtin_freetype', "Use the built-in FreeType library", True)) -opts.Add(BoolVariable('builtin_libogg', "Use the built-in libogg library", True)) -opts.Add(BoolVariable('builtin_libpng', "Use the built-in libpng library", True)) -opts.Add(BoolVariable('builtin_libtheora', "Use the built-in libtheora library", True)) -opts.Add(BoolVariable('builtin_libvorbis', "Use the built-in libvorbis library", True)) -opts.Add(BoolVariable('builtin_libvpx', "Use the built-in libvpx library", True)) -opts.Add(BoolVariable('builtin_libwebp', "Use the built-in libwebp library", True)) -opts.Add(BoolVariable('builtin_wslay', "Use the built-in wslay library", True)) -opts.Add(BoolVariable('builtin_mbedtls', "Use the built-in mbedTLS library", True)) -opts.Add(BoolVariable('builtin_miniupnpc', "Use the built-in miniupnpc library", True)) -opts.Add(BoolVariable('builtin_opus', "Use the built-in Opus library", True)) -opts.Add(BoolVariable('builtin_pcre2', "Use the built-in PCRE2 library", True)) -opts.Add(BoolVariable('builtin_pcre2_with_jit', "Use JIT compiler for the built-in PCRE2 library", True)) -opts.Add(BoolVariable('builtin_recast', "Use the built-in Recast library", True)) -opts.Add(BoolVariable('builtin_squish', "Use the built-in squish library", True)) -opts.Add(BoolVariable('builtin_xatlas', "Use the built-in xatlas library", True)) -opts.Add(BoolVariable('builtin_zlib', "Use the built-in zlib library", True)) -opts.Add(BoolVariable('builtin_zstd', "Use the built-in Zstd library", True)) +# opts.Add(BoolVariable('builtin_assimp', "Use the built-in Assimp library", True)) +opts.Add(BoolVariable("builtin_bullet", "Use the built-in Bullet library", True)) +opts.Add(BoolVariable("builtin_certs", "Use the built-in SSL certificates bundles", True)) +opts.Add(BoolVariable("builtin_enet", "Use the built-in ENet library", True)) +opts.Add(BoolVariable("builtin_freetype", "Use the built-in FreeType library", True)) +opts.Add(BoolVariable("builtin_libogg", "Use the built-in libogg library", True)) +opts.Add(BoolVariable("builtin_libpng", "Use the built-in libpng library", True)) +opts.Add(BoolVariable("builtin_libtheora", "Use the built-in libtheora library", True)) +opts.Add(BoolVariable("builtin_libvorbis", "Use the built-in libvorbis library", True)) +opts.Add(BoolVariable("builtin_libvpx", "Use the built-in libvpx library", True)) +opts.Add(BoolVariable("builtin_libwebp", "Use the built-in libwebp library", True)) +opts.Add(BoolVariable("builtin_wslay", "Use the built-in wslay library", True)) +opts.Add(BoolVariable("builtin_mbedtls", "Use the built-in mbedTLS library", True)) +opts.Add(BoolVariable("builtin_miniupnpc", "Use the built-in miniupnpc library", True)) +opts.Add(BoolVariable("builtin_opus", "Use the built-in Opus library", True)) +opts.Add(BoolVariable("builtin_pcre2", "Use the built-in PCRE2 library", True)) +opts.Add(BoolVariable("builtin_pcre2_with_jit", "Use JIT compiler for the built-in PCRE2 library", True)) +opts.Add(BoolVariable("builtin_recast", "Use the built-in Recast library", True)) +opts.Add(BoolVariable("builtin_squish", "Use the built-in squish library", True)) +opts.Add(BoolVariable("builtin_xatlas", "Use the built-in xatlas library", True)) +opts.Add(BoolVariable("builtin_zlib", "Use the built-in zlib library", True)) +opts.Add(BoolVariable("builtin_zstd", "Use the built-in Zstd library", True)) # Compilation environment setup opts.Add("CXX", "C++ compiler") @@ -223,51 +229,51 @@ Help(opts.GenerateHelpText(env_base)) # add default include paths -env_base.Prepend(CPPPATH=['#']) +env_base.Prepend(CPPPATH=["#"]) # configure ENV for platform env_base.platform_exporters = platform_exporters env_base.platform_apis = platform_apis -if (env_base["use_precise_math_checks"]): - env_base.Append(CPPDEFINES=['PRECISE_MATH_CHECKS']) +if env_base["use_precise_math_checks"]: + env_base.Append(CPPDEFINES=["PRECISE_MATH_CHECKS"]) -if (env_base['target'] == 'debug'): - env_base.Append(CPPDEFINES=['DEBUG_MEMORY_ALLOC','DISABLE_FORCED_INLINE']) +if env_base["target"] == "debug": + env_base.Append(CPPDEFINES=["DEBUG_MEMORY_ALLOC", "DISABLE_FORCED_INLINE"]) # The two options below speed up incremental builds, but reduce the certainty that all files # will properly be rebuilt. As such, we only enable them for debug (dev) builds, not release. # To decide whether to rebuild a file, use the MD5 sum only if the timestamp has changed. # http://scons.org/doc/production/HTML/scons-user/ch06.html#idm139837621851792 - env_base.Decider('MD5-timestamp') + env_base.Decider("MD5-timestamp") # Use cached implicit dependencies by default. Can be overridden by specifying `--implicit-deps-changed` in the command line. # http://scons.org/doc/production/HTML/scons-user/ch06s04.html - env_base.SetOption('implicit_cache', 1) + env_base.SetOption("implicit_cache", 1) -if (env_base['no_editor_splash']): - env_base.Append(CPPDEFINES=['NO_EDITOR_SPLASH']) +if env_base["no_editor_splash"]: + env_base.Append(CPPDEFINES=["NO_EDITOR_SPLASH"]) -if not env_base['deprecated']: - env_base.Append(CPPDEFINES=['DISABLE_DEPRECATED']) +if not env_base["deprecated"]: + env_base.Append(CPPDEFINES=["DISABLE_DEPRECATED"]) env_base.platforms = {} selected_platform = "" -if env_base['platform'] != "": - selected_platform = env_base['platform'] -elif env_base['p'] != "": - selected_platform = env_base['p'] +if env_base["platform"] != "": + selected_platform = env_base["platform"] +elif env_base["p"] != "": + selected_platform = env_base["p"] env_base["platform"] = selected_platform else: # Missing `platform` argument, try to detect platform automatically - if sys.platform.startswith('linux'): - selected_platform = 'x11' - elif sys.platform == 'darwin': - selected_platform = 'osx' - elif sys.platform == 'win32': - selected_platform = 'windows' + if sys.platform.startswith("linux"): + selected_platform = "x11" + elif sys.platform == "darwin": + selected_platform = "osx" + elif sys.platform == "win32": + selected_platform = "windows" else: print("Could not detect platform automatically. Supported platforms:") for x in platform_list: @@ -282,6 +288,7 @@ if selected_platform in platform_list: tmppath = "./platform/" + selected_platform sys.path.insert(0, tmppath) import detect + if "create" in dir(detect): env = detect.create(env_base) else: @@ -295,12 +302,12 @@ if selected_platform in platform_list: env.Tool("compilation_db", toolpath=["misc/scons"]) env.Alias("compiledb", env.CompilationDatabase("compile_commands.json")) - if env['dev']: - env['verbose'] = True - env['warnings'] = "extra" - env['werror'] = True + if env["dev"]: + env["verbose"] = True + env["warnings"] = "extra" + env["werror"] = True - if env['vsproj']: + if env["vsproj"]: env.vs_incs = [] env.vs_srcs = [] @@ -313,7 +320,7 @@ if selected_platform in platform_list: pieces = fname.split(".") if len(pieces) > 0: basename = pieces[0] - basename = basename.replace('\\\\', '/') + basename = basename.replace("\\\\", "/") if os.path.isfile(basename + ".h"): env.vs_incs = env.vs_incs + [basename + ".h"] elif os.path.isfile(basename + ".hpp"): @@ -322,28 +329,29 @@ if selected_platform in platform_list: env.vs_srcs = env.vs_srcs + [basename + ".c"] elif os.path.isfile(basename + ".cpp"): env.vs_srcs = env.vs_srcs + [basename + ".cpp"] + env.AddToVSProject = AddToVSProject env.extra_suffix = "" - if env["extra_suffix"] != '': - env.extra_suffix += '.' + env["extra_suffix"] + if env["extra_suffix"] != "": + env.extra_suffix += "." + env["extra_suffix"] # Environment flags - CCFLAGS = env.get('CCFLAGS', '') - env['CCFLAGS'] = '' + CCFLAGS = env.get("CCFLAGS", "") + env["CCFLAGS"] = "" env.Append(CCFLAGS=str(CCFLAGS).split()) - CFLAGS = env.get('CFLAGS', '') - env['CFLAGS'] = '' + CFLAGS = env.get("CFLAGS", "") + env["CFLAGS"] = "" env.Append(CFLAGS=str(CFLAGS).split()) - CXXFLAGS = env.get('CXXFLAGS', '') - env['CXXFLAGS'] = '' + CXXFLAGS = env.get("CXXFLAGS", "") + env["CXXFLAGS"] = "" env.Append(CXXFLAGS=str(CXXFLAGS).split()) - LINKFLAGS = env.get('LINKFLAGS', '') - env['LINKFLAGS'] = '' + LINKFLAGS = env.get("LINKFLAGS", "") + env["LINKFLAGS"] = "" env.Append(LINKFLAGS=str(LINKFLAGS).split()) # Platform specific flags @@ -362,78 +370,83 @@ if selected_platform in platform_list: # Specifying GNU extensions support explicitly, which are supported by # both GCC and Clang. This mirrors GCC and Clang's current default # compile flags if no -std is specified. - env.Prepend(CFLAGS=['-std=gnu11']) - env.Prepend(CXXFLAGS=['-std=gnu++14']) + env.Prepend(CFLAGS=["-std=gnu11"]) + env.Prepend(CXXFLAGS=["-std=gnu++14"]) else: # MSVC doesn't have clear C standard support, /std only covers C++. # We apply it to CCFLAGS (both C and C++ code) in case it impacts C features. - env.Prepend(CCFLAGS=['/std:c++14']) + env.Prepend(CCFLAGS=["/std:c++14"]) # Configure compiler warnings if env.msvc: # Truncations, narrowing conversions, signed/unsigned comparisons... - disable_nonessential_warnings = ['/wd4267', '/wd4244', '/wd4305', '/wd4018', '/wd4800'] - if (env["warnings"] == 'extra'): - env.Append(CCFLAGS=['/Wall']) # Implies /W4 - elif (env["warnings"] == 'all'): - env.Append(CCFLAGS=['/W3'] + disable_nonessential_warnings) - elif (env["warnings"] == 'moderate'): - env.Append(CCFLAGS=['/W2'] + disable_nonessential_warnings) - else: # 'no' - env.Append(CCFLAGS=['/w']) + disable_nonessential_warnings = ["/wd4267", "/wd4244", "/wd4305", "/wd4018", "/wd4800"] + if env["warnings"] == "extra": + env.Append(CCFLAGS=["/Wall"]) # Implies /W4 + elif env["warnings"] == "all": + env.Append(CCFLAGS=["/W3"] + disable_nonessential_warnings) + elif env["warnings"] == "moderate": + env.Append(CCFLAGS=["/W2"] + disable_nonessential_warnings) + else: # 'no' + env.Append(CCFLAGS=["/w"]) # Set exception handling model to avoid warnings caused by Windows system headers. - env.Append(CCFLAGS=['/EHsc']) - if (env["werror"]): - env.Append(CCFLAGS=['/WX']) + env.Append(CCFLAGS=["/EHsc"]) + if env["werror"]: + env.Append(CCFLAGS=["/WX"]) # Force to use Unicode encoding - env.Append(MSVC_FLAGS=['/utf8']) - else: # Rest of the world + env.Append(MSVC_FLAGS=["/utf8"]) + else: # Rest of the world version = methods.get_compiler_version(env) or [-1, -1] shadow_local_warning = [] - all_plus_warnings = ['-Wwrite-strings'] + all_plus_warnings = ["-Wwrite-strings"] if methods.using_gcc(env): if version[0] >= 7: - shadow_local_warning = ['-Wshadow-local'] + shadow_local_warning = ["-Wshadow-local"] - if (env["warnings"] == 'extra'): + if env["warnings"] == "extra": # Note: enable -Wimplicit-fallthrough for Clang (already part of -Wextra for GCC) # once we switch to C++11 or later (necessary for our FALLTHROUGH macro). - env.Append(CCFLAGS=['-Wall', '-Wextra', '-Wno-unused-parameter'] - + all_plus_warnings + shadow_local_warning) - env.Append(CXXFLAGS=['-Wctor-dtor-privacy', '-Wnon-virtual-dtor']) + env.Append(CCFLAGS=["-Wall", "-Wextra", "-Wno-unused-parameter"] + all_plus_warnings + shadow_local_warning) + env.Append(CXXFLAGS=["-Wctor-dtor-privacy", "-Wnon-virtual-dtor"]) if methods.using_gcc(env): - env.Append(CCFLAGS=['-Walloc-zero', - '-Wduplicated-branches', '-Wduplicated-cond', - '-Wstringop-overflow=4', '-Wlogical-op']) - env.Append(CXXFLAGS=['-Wnoexcept', '-Wplacement-new=1']) + env.Append( + CCFLAGS=[ + "-Walloc-zero", + "-Wduplicated-branches", + "-Wduplicated-cond", + "-Wstringop-overflow=4", + "-Wlogical-op", + ] + ) + env.Append(CXXFLAGS=["-Wnoexcept", "-Wplacement-new=1"]) if version[0] >= 9: - env.Append(CCFLAGS=['-Wattribute-alias=2']) - elif (env["warnings"] == 'all'): - env.Append(CCFLAGS=['-Wall'] + shadow_local_warning) - elif (env["warnings"] == 'moderate'): - env.Append(CCFLAGS=['-Wall', '-Wno-unused'] + shadow_local_warning) - else: # 'no' - env.Append(CCFLAGS=['-w']) - if (env["werror"]): - env.Append(CCFLAGS=['-Werror']) - else: # always enable those errors - env.Append(CCFLAGS=['-Werror=return-type']) + env.Append(CCFLAGS=["-Wattribute-alias=2"]) + elif env["warnings"] == "all": + env.Append(CCFLAGS=["-Wall"] + shadow_local_warning) + elif env["warnings"] == "moderate": + env.Append(CCFLAGS=["-Wall", "-Wno-unused"] + shadow_local_warning) + else: # 'no' + env.Append(CCFLAGS=["-w"]) + if env["werror"]: + env.Append(CCFLAGS=["-Werror"]) + else: # always enable those errors + env.Append(CCFLAGS=["-Werror=return-type"]) - if (hasattr(detect, 'get_program_suffix')): + if hasattr(detect, "get_program_suffix"): suffix = "." + detect.get_program_suffix() else: suffix = "." + selected_platform - if (env["target"] == "release"): + if env["target"] == "release": if env["tools"]: print("Tools can only be built with targets 'debug' and 'release_debug'.") sys.exit(255) suffix += ".opt" - env.Append(CPPDEFINES=['NDEBUG']) + env.Append(CPPDEFINES=["NDEBUG"]) - elif (env["target"] == "release_debug"): + elif env["target"] == "release_debug": if env["tools"]: suffix += ".opt.tools" else: @@ -446,15 +459,15 @@ if selected_platform in platform_list: if env["arch"] != "": suffix += "." + env["arch"] - elif (env["bits"] == "32"): + elif env["bits"] == "32": suffix += ".32" - elif (env["bits"] == "64"): + elif env["bits"] == "64": suffix += ".64" suffix += env.extra_suffix sys.path.remove(tmppath) - sys.modules.pop('detect') + sys.modules.pop("detect") modules_enabled = OrderedDict() env.module_icons_paths = [] @@ -466,17 +479,20 @@ if selected_platform in platform_list: sys.path.insert(0, path) env.current_module = name import config + # can_build changed number of arguments between 3.0 (1) and 3.1 (2), # so try both to preserve compatibility for 3.0 modules can_build = False try: can_build = config.can_build(env, selected_platform) except TypeError: - print("Warning: module '%s' uses a deprecated `can_build` " - "signature in its config.py file, it should be " - "`can_build(env, platform)`." % x) + print( + "Warning: module '%s' uses a deprecated `can_build` " + "signature in its config.py file, it should be " + "`can_build(env, platform)`." % x + ) can_build = config.can_build(selected_platform) - if (can_build): + if can_build: config.configure(env) # Get doc classes paths (if present) try: @@ -517,47 +533,68 @@ if selected_platform in platform_list: env["LIBSUFFIX"] = suffix + env["LIBSUFFIX"] env["SHLIBSUFFIX"] = suffix + env["SHLIBSUFFIX"] - if (env.use_ptrcall): - env.Append(CPPDEFINES=['PTRCALL_ENABLED']) - if env['tools']: - env.Append(CPPDEFINES=['TOOLS_ENABLED']) - if env['disable_3d']: - if env['tools']: - print("Build option 'disable_3d=yes' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).") + if env.use_ptrcall: + env.Append(CPPDEFINES=["PTRCALL_ENABLED"]) + if env["tools"]: + env.Append(CPPDEFINES=["TOOLS_ENABLED"]) + if env["disable_3d"]: + if env["tools"]: + print( + "Build option 'disable_3d=yes' cannot be used with 'tools=yes' (editor), " + "only with 'tools=no' (export template)." + ) sys.exit(255) else: - env.Append(CPPDEFINES=['_3D_DISABLED']) - if env['gdscript']: - env.Append(CPPDEFINES=['GDSCRIPT_ENABLED']) - if env['disable_advanced_gui']: - if env['tools']: - print("Build option 'disable_advanced_gui=yes' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).") + env.Append(CPPDEFINES=["_3D_DISABLED"]) + if env["gdscript"]: + env.Append(CPPDEFINES=["GDSCRIPT_ENABLED"]) + if env["disable_advanced_gui"]: + if env["tools"]: + print( + "Build option 'disable_advanced_gui=yes' cannot be used with 'tools=yes' (editor), " + "only with 'tools=no' (export template)." + ) sys.exit(255) else: - env.Append(CPPDEFINES=['ADVANCED_GUI_DISABLED']) - if env['minizip']: - env.Append(CPPDEFINES=['MINIZIP_ENABLED']) + env.Append(CPPDEFINES=["ADVANCED_GUI_DISABLED"]) + if env["minizip"]: + env.Append(CPPDEFINES=["MINIZIP_ENABLED"]) - editor_module_list = ['regex'] + editor_module_list = ["regex"] for x in editor_module_list: - if not env['module_' + x + '_enabled']: - if env['tools']: - print("Build option 'module_" + x + "_enabled=no' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).") + if not env["module_" + x + "_enabled"]: + if env["tools"]: + print( + "Build option 'module_" + x + "_enabled=no' cannot be used with 'tools=yes' (editor), " + "only with 'tools=no' (export template)." + ) sys.exit(255) - if not env['verbose']: + if not env["verbose"]: methods.no_verbose(sys, env) - if (not env["platform"] == "server"): # FIXME: detect GLES3 - env.Append(BUILDERS = { 'GLES3_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_gles3_headers), suffix='glsl.gen.h', src_suffix='.glsl')}) - env.Append(BUILDERS = { 'GLES2_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_gles2_headers), suffix='glsl.gen.h', src_suffix='.glsl')}) + if not env["platform"] == "server": # FIXME: detect GLES3 + env.Append( + BUILDERS={ + "GLES3_GLSL": env.Builder( + action=run_in_subprocess(gles_builders.build_gles3_headers), suffix="glsl.gen.h", src_suffix=".glsl" + ) + } + ) + env.Append( + BUILDERS={ + "GLES2_GLSL": env.Builder( + action=run_in_subprocess(gles_builders.build_gles2_headers), suffix="glsl.gen.h", src_suffix=".glsl" + ) + } + ) scons_cache_path = os.environ.get("SCONS_CACHE") if scons_cache_path != None: CacheDir(scons_cache_path) print("Scons cache enabled... (path: '" + scons_cache_path + "')") - Export('env') + Export("env") # build subdirs, the build order is dependent on link order. @@ -574,16 +611,16 @@ if selected_platform in platform_list: SConscript("platform/" + selected_platform + "/SCsub") # build selected platform # Microsoft Visual Studio Project Generation - if env['vsproj']: - env['CPPPATH'] = [Dir(path) for path in env['CPPPATH']] + if env["vsproj"]: + env["CPPPATH"] = [Dir(path) for path in env["CPPPATH"]] methods.generate_vs_project(env, GetOption("num_jobs")) methods.generate_cpp_hint_file("cpp.hint") # Check for the existence of headers conf = Configure(env) - if ("check_c_headers" in env): + if "check_c_headers" in env: for header in env["check_c_headers"]: - if (conf.CheckCHeader(header[0])): + if conf.CheckCHeader(header[0]): env.AppendUnique(CPPDEFINES=[header[1]]) elif selected_platform != "": @@ -605,26 +642,30 @@ elif selected_platform != "": sys.exit(255) # The following only makes sense when the env is defined, and assumes it is -if 'env' in locals(): +if "env" in locals(): screen = sys.stdout # Progress reporting is not available in non-TTY environments since it # messes with the output (for example, when writing to a file) - show_progress = (env['progress'] and sys.stdout.isatty()) + show_progress = env["progress"] and sys.stdout.isatty() node_count = 0 node_count_max = 0 node_count_interval = 1 - node_count_fname = str(env.Dir('#')) + '/.scons_node_count' + node_count_fname = str(env.Dir("#")) + "/.scons_node_count" import time, math class cache_progress: # The default is 1 GB cache and 12 hours half life - def __init__(self, path = None, limit = 1073741824, half_life = 43200): + def __init__(self, path=None, limit=1073741824, half_life=43200): self.path = path self.limit = limit self.exponent_scale = math.log(2) / half_life - if env['verbose'] and path != None: - screen.write('Current cache limit is ' + self.convert_size(limit) + ' (used: ' + self.convert_size(self.get_size(path)) + ')\n') + if env["verbose"] and path != None: + screen.write( + "Current cache limit is {} (used: {})\n".format( + self.convert_size(limit), self.convert_size(self.get_size(path)) + ) + ) self.delete(self.file_list()) def __call__(self, node, *args, **kw): @@ -632,22 +673,22 @@ if 'env' in locals(): if show_progress: # Print the progress percentage node_count += node_count_interval - if (node_count_max > 0 and node_count <= node_count_max): - screen.write('\r[%3d%%] ' % (node_count * 100 / node_count_max)) + if node_count_max > 0 and node_count <= node_count_max: + screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max)) screen.flush() - elif (node_count_max > 0 and node_count > node_count_max): - screen.write('\r[100%] ') + elif node_count_max > 0 and node_count > node_count_max: + screen.write("\r[100%] ") screen.flush() else: - screen.write('\r[Initial build] ') + screen.write("\r[Initial build] ") screen.flush() def delete(self, files): if len(files) == 0: return - if env['verbose']: + if env["verbose"]: # Utter something - screen.write('\rPurging %d %s from cache...\n' % (len(files), len(files) > 1 and 'files' or 'file')) + screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file")) [os.remove(f) for f in files] def file_list(self): @@ -656,7 +697,7 @@ if 'env' in locals(): return [] # Gather a list of (filename, (size, atime)) within the # cache directory - file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, '*', '*'))] + file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))] if file_stat == []: # Nothing to do return [] @@ -671,7 +712,7 @@ if 'env' in locals(): # Search for the first entry where the storage limit is # reached sum, mark = 0, None - for i,x in enumerate(file_stat): + for i, x in enumerate(file_stat): sum += x[1] if sum > self.limit: mark = i @@ -690,7 +731,7 @@ if 'env' in locals(): s = round(size_bytes / p, 2) return "%s %s" % (int(s) if i == 0 else s, size_name[i]) - def get_size(self, start_path = '.'): + def get_size(self, start_path="."): total_size = 0 for dirpath, dirnames, filenames in os.walk(start_path): for f in filenames: @@ -700,8 +741,8 @@ if 'env' in locals(): def progress_finish(target, source, env): global node_count, progressor - with open(node_count_fname, 'w') as f: - f.write('%d\n' % node_count) + with open(node_count_fname, "w") as f: + f.write("%d\n" % node_count) progressor.delete(progressor.file_list()) try: @@ -715,7 +756,7 @@ if 'env' in locals(): # cache directory to a size not larger than cache_limit. cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024 progressor = cache_progress(cache_directory, cache_limit) - Progress(progressor, interval = node_count_interval) + Progress(progressor, interval=node_count_interval) - progress_finish_command = Command('progress_finish', [], progress_finish) + progress_finish_command = Command("progress_finish", [], progress_finish) AlwaysBuild(progress_finish_command) diff --git a/compat.py b/compat.py index de99eef9c2..ed378b6778 100644 --- a/compat.py +++ b/compat.py @@ -1,68 +1,90 @@ import sys if sys.version_info < (3,): + def isbasestring(s): return isinstance(s, basestring) + def open_utf8(filename, mode): return open(filename, mode) + def byte_to_str(x): return str(ord(x)) + import cStringIO + def StringIO(): return cStringIO.StringIO() + def encode_utf8(x): return x + def decode_utf8(x): return x + def iteritems(d): return d.iteritems() + def itervalues(d): return d.itervalues() + def escape_string(s): if isinstance(s, unicode): - s = s.encode('ascii') - result = '' + s = s.encode("ascii") + result = "" for c in s: - if not (32 <= ord(c) < 127) or c in ('\\', '"'): - result += '\\%03o' % ord(c) + if not (32 <= ord(c) < 127) or c in ("\\", '"'): + result += "\\%03o" % ord(c) else: result += c return result + else: + def isbasestring(s): return isinstance(s, (str, bytes)) + def open_utf8(filename, mode): return open(filename, mode, encoding="utf-8") + def byte_to_str(x): return str(x) + import io + def StringIO(): return io.StringIO() + import codecs + def encode_utf8(x): return codecs.utf_8_encode(x)[0] + def decode_utf8(x): return codecs.utf_8_decode(x)[0] + def iteritems(d): return iter(d.items()) + def itervalues(d): return iter(d.values()) + def charcode_to_c_escapes(c): rev_result = [] while c >= 256: c, low = (c // 256, c % 256) - rev_result.append('\\%03o' % low) - rev_result.append('\\%03o' % c) - return ''.join(reversed(rev_result)) + rev_result.append("\\%03o" % low) + rev_result.append("\\%03o" % c) + return "".join(reversed(rev_result)) + def escape_string(s): - result = '' + result = "" if isinstance(s, str): - s = s.encode('utf-8') + s = s.encode("utf-8") for c in s: - if not(32 <= c < 127) or c in (ord('\\'), ord('"')): + if not (32 <= c < 127) or c in (ord("\\"), ord('"')): result += charcode_to_c_escapes(c) else: result += chr(c) return result - diff --git a/core/SCsub b/core/SCsub index 755c5c65c6..a8eb135c79 100644 --- a/core/SCsub +++ b/core/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") import core_builders import make_binders @@ -11,31 +11,32 @@ env.core_sources = [] # Generate AES256 script encryption key import os + txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0" -if ("SCRIPT_AES256_ENCRYPTION_KEY" in os.environ): +if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ: e = os.environ["SCRIPT_AES256_ENCRYPTION_KEY"] txt = "" ec_valid = True - if (len(e) != 64): + if len(e) != 64: ec_valid = False else: for i in range(len(e) >> 1): - if (i > 0): + if i > 0: txt += "," - txts = "0x" + e[i * 2:i * 2 + 2] + txts = "0x" + e[i * 2 : i * 2 + 2] try: int(txts, 16) except: ec_valid = False txt += txts - if (not ec_valid): + if not ec_valid: txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0" print("Invalid AES256 encryption key, not 64 bits hex: " + e) # NOTE: It is safe to generate this file here, since this is still executed serially with open("script_encryption_key.gen.cpp", "w") as f: - f.write("#include \"core/project_settings.h\"\nuint8_t script_encryption_key[32]={" + txt + "};\n") + f.write('#include "core/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n") # Add required thirdparty code. @@ -49,7 +50,6 @@ thirdparty_misc_sources = [ # C sources "fastlz.c", "smaz.c", - # C++ sources "hq2x.cpp", "pcg.cpp", @@ -60,30 +60,30 @@ thirdparty_misc_sources = [thirdparty_misc_dir + file for file in thirdparty_mis env_thirdparty.add_source_files(env.core_sources, thirdparty_misc_sources) # Zlib library, can be unbundled -if env['builtin_zlib']: - thirdparty_zlib_dir = "#thirdparty/zlib/" - thirdparty_zlib_sources = [ - "adler32.c", - "compress.c", - "crc32.c", - "deflate.c", - "infback.c", - "inffast.c", - "inflate.c", - "inftrees.c", - "trees.c", - "uncompr.c", - "zutil.c", - ] - thirdparty_zlib_sources = [thirdparty_zlib_dir + file for file in thirdparty_zlib_sources] +if env["builtin_zlib"]: + thirdparty_zlib_dir = "#thirdparty/zlib/" + thirdparty_zlib_sources = [ + "adler32.c", + "compress.c", + "crc32.c", + "deflate.c", + "infback.c", + "inffast.c", + "inflate.c", + "inftrees.c", + "trees.c", + "uncompr.c", + "zutil.c", + ] + thirdparty_zlib_sources = [thirdparty_zlib_dir + file for file in thirdparty_zlib_sources] - env_thirdparty.Prepend(CPPPATH=[thirdparty_zlib_dir]) - # Needs to be available in main env too - env.Prepend(CPPPATH=[thirdparty_zlib_dir]) - if (env['target'] == 'debug'): - env_thirdparty.Append(CPPDEFINES=['ZLIB_DEBUG']) + env_thirdparty.Prepend(CPPPATH=[thirdparty_zlib_dir]) + # Needs to be available in main env too + env.Prepend(CPPPATH=[thirdparty_zlib_dir]) + if env["target"] == "debug": + env_thirdparty.Append(CPPDEFINES=["ZLIB_DEBUG"]) - env_thirdparty.add_source_files(env.core_sources, thirdparty_zlib_sources) + env_thirdparty.add_source_files(env.core_sources, thirdparty_zlib_sources) # Minizip library, could be unbundled in theory # However, our version has some custom modifications, so it won't compile with the system one @@ -99,7 +99,7 @@ env_thirdparty.add_source_files(env.core_sources, thirdparty_minizip_sources) # Zstd library, can be unbundled in theory # though we currently use some private symbols # https://github.com/godotengine/godot/issues/17374 -if env['builtin_zstd']: +if env["builtin_zstd"]: thirdparty_zstd_dir = "#thirdparty/zstd/" thirdparty_zstd_sources = [ "common/debug.c", @@ -142,30 +142,43 @@ if env['builtin_zstd']: env.add_source_files(env.core_sources, "*.cpp") # Certificates -env.Depends("#core/io/certs_compressed.gen.h", ["#thirdparty/certs/ca-certificates.crt", env.Value(env['builtin_certs']), env.Value(env['system_certs_path'])]) -env.CommandNoCache("#core/io/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt", run_in_subprocess(core_builders.make_certs_header)) +env.Depends( + "#core/io/certs_compressed.gen.h", + ["#thirdparty/certs/ca-certificates.crt", env.Value(env["builtin_certs"]), env.Value(env["system_certs_path"])], +) +env.CommandNoCache( + "#core/io/certs_compressed.gen.h", + "#thirdparty/certs/ca-certificates.crt", + run_in_subprocess(core_builders.make_certs_header), +) # Make binders -env.CommandNoCache(['method_bind.gen.inc', 'method_bind_ext.gen.inc', 'method_bind_free_func.gen.inc'], 'make_binders.py', run_in_subprocess(make_binders.run)) +env.CommandNoCache( + ["method_bind.gen.inc", "method_bind_ext.gen.inc", "method_bind_free_func.gen.inc"], + "make_binders.py", + run_in_subprocess(make_binders.run), +) # Authors -env.Depends('#core/authors.gen.h', "../AUTHORS.md") -env.CommandNoCache('#core/authors.gen.h', "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header)) +env.Depends("#core/authors.gen.h", "../AUTHORS.md") +env.CommandNoCache("#core/authors.gen.h", "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header)) # Donors -env.Depends('#core/donors.gen.h', "../DONORS.md") -env.CommandNoCache('#core/donors.gen.h', "../DONORS.md", run_in_subprocess(core_builders.make_donors_header)) +env.Depends("#core/donors.gen.h", "../DONORS.md") +env.CommandNoCache("#core/donors.gen.h", "../DONORS.md", run_in_subprocess(core_builders.make_donors_header)) # License -env.Depends('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"]) -env.CommandNoCache('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header)) +env.Depends("#core/license.gen.h", ["../COPYRIGHT.txt", "../LICENSE.txt"]) +env.CommandNoCache( + "#core/license.gen.h", ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header) +) # Chain load SCsubs -SConscript('os/SCsub') -SConscript('math/SCsub') -SConscript('crypto/SCsub') -SConscript('io/SCsub') -SConscript('bind/SCsub') +SConscript("os/SCsub") +SConscript("math/SCsub") +SConscript("crypto/SCsub") +SConscript("io/SCsub") +SConscript("bind/SCsub") # Build it all as a library diff --git a/core/bind/SCsub b/core/bind/SCsub index 1c5f954470..19a6549225 100644 --- a/core/bind/SCsub +++ b/core/bind/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.core_sources, "*.cpp") diff --git a/core/core_builders.py b/core/core_builders.py index f3a9e3b221..b174262764 100644 --- a/core/core_builders.py +++ b/core/core_builders.py @@ -16,6 +16,7 @@ def make_certs_header(target, source, env): buf = f.read() decomp_size = len(buf) import zlib + buf = zlib.compress(buf) g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") @@ -23,9 +24,9 @@ def make_certs_header(target, source, env): g.write("#define _CERTS_RAW_H\n") # System certs path. Editor will use them if defined. (for package maintainers) - path = env['system_certs_path'] - g.write("#define _SYSTEM_CERTS_PATH \"%s\"\n" % str(path)) - if env['builtin_certs']: + path = env["system_certs_path"] + g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path)) + if env["builtin_certs"]: # Defined here and not in env so changing it does not trigger a full rebuild. g.write("#define BUILTIN_CERTS_ENABLED\n") g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n") @@ -62,7 +63,7 @@ def make_authors_header(target, source, env): for line in f: if reading: if line.startswith(" "): - g.write("\t\"" + escape_string(line.strip()) + "\",\n") + g.write('\t"' + escape_string(line.strip()) + '",\n') continue if line.startswith("## "): if reading: @@ -85,10 +86,15 @@ def make_authors_header(target, source, env): def make_donors_header(target, source, env): - sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors", - "Gold donors", "Silver donors", "Bronze donors"] - sections_id = ["DONORS_SPONSOR_PLAT", "DONORS_SPONSOR_GOLD", "DONORS_SPONSOR_MINI", - "DONORS_GOLD", "DONORS_SILVER", "DONORS_BRONZE"] + sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors", "Gold donors", "Silver donors", "Bronze donors"] + sections_id = [ + "DONORS_SPONSOR_PLAT", + "DONORS_SPONSOR_GOLD", + "DONORS_SPONSOR_MINI", + "DONORS_GOLD", + "DONORS_SILVER", + "DONORS_BRONZE", + ] src = source[0] dst = target[0] @@ -108,7 +114,7 @@ def make_donors_header(target, source, env): for line in f: if reading >= 0: if line.startswith(" "): - g.write("\t\"" + escape_string(line.strip()) + "\",\n") + g.write('\t"' + escape_string(line.strip()) + '",\n') continue if line.startswith("## "): if reading: @@ -151,8 +157,8 @@ def make_license_header(target, source, env): return line def next_tag(self): - if not ':' in self.current: - return ('', []) + if not ":" in self.current: + return ("", []) tag, line = self.current.split(":", 1) lines = [line.strip()] while self.next_line() and self.current.startswith(" "): @@ -160,6 +166,7 @@ def make_license_header(target, source, env): return (tag, lines) from collections import OrderedDict + projects = OrderedDict() license_list = [] @@ -200,26 +207,30 @@ def make_license_header(target, source, env): with open_utf8(src_license, "r") as license_file: for line in license_file: escaped_string = escape_string(line.strip()) - f.write("\n\t\t\"" + escaped_string + "\\n\"") + f.write('\n\t\t"' + escaped_string + '\\n"') f.write(";\n\n") - f.write("struct ComponentCopyrightPart {\n" - "\tconst char *license;\n" - "\tconst char *const *files;\n" - "\tconst char *const *copyright_statements;\n" - "\tint file_count;\n" - "\tint copyright_count;\n" - "};\n\n") + f.write( + "struct ComponentCopyrightPart {\n" + "\tconst char *license;\n" + "\tconst char *const *files;\n" + "\tconst char *const *copyright_statements;\n" + "\tint file_count;\n" + "\tint copyright_count;\n" + "};\n\n" + ) - f.write("struct ComponentCopyright {\n" - "\tconst char *name;\n" - "\tconst ComponentCopyrightPart *parts;\n" - "\tint part_count;\n" - "};\n\n") + f.write( + "struct ComponentCopyright {\n" + "\tconst char *name;\n" + "\tconst ComponentCopyrightPart *parts;\n" + "\tint part_count;\n" + "};\n\n" + ) f.write("const char *const COPYRIGHT_INFO_DATA[] = {\n") for line in data_list: - f.write("\t\"" + escape_string(line) + "\",\n") + f.write('\t"' + escape_string(line) + '",\n') f.write("};\n\n") f.write("const ComponentCopyrightPart COPYRIGHT_PROJECT_PARTS[] = {\n") @@ -228,11 +239,21 @@ def make_license_header(target, source, env): for project_name, project in iteritems(projects): part_indexes[project_name] = part_index for part in project: - f.write("\t{ \"" + escape_string(part["License"][0]) + "\", " - + "©RIGHT_INFO_DATA[" + str(part["file_index"]) + "], " - + "©RIGHT_INFO_DATA[" + str(part["copyright_index"]) + "], " - + str(len(part["Files"])) + ", " - + str(len(part["Copyright"])) + " },\n") + f.write( + '\t{ "' + + escape_string(part["License"][0]) + + '", ' + + "©RIGHT_INFO_DATA[" + + str(part["file_index"]) + + "], " + + "©RIGHT_INFO_DATA[" + + str(part["copyright_index"]) + + "], " + + str(len(part["Files"])) + + ", " + + str(len(part["Copyright"])) + + " },\n" + ) part_index += 1 f.write("};\n\n") @@ -240,30 +261,37 @@ def make_license_header(target, source, env): f.write("const ComponentCopyright COPYRIGHT_INFO[] = {\n") for project_name, project in iteritems(projects): - f.write("\t{ \"" + escape_string(project_name) + "\", " - + "©RIGHT_PROJECT_PARTS[" + str(part_indexes[project_name]) + "], " - + str(len(project)) + " },\n") + f.write( + '\t{ "' + + escape_string(project_name) + + '", ' + + "©RIGHT_PROJECT_PARTS[" + + str(part_indexes[project_name]) + + "], " + + str(len(project)) + + " },\n" + ) f.write("};\n\n") f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n") f.write("const char *const LICENSE_NAMES[] = {\n") for l in license_list: - f.write("\t\"" + escape_string(l[0]) + "\",\n") + f.write('\t"' + escape_string(l[0]) + '",\n') f.write("};\n\n") f.write("const char *const LICENSE_BODIES[] = {\n\n") for l in license_list: for line in l[1:]: if line == ".": - f.write("\t\"\\n\"\n") + f.write('\t"\\n"\n') else: - f.write("\t\"" + escape_string(line) + "\\n\"\n") - f.write("\t\"\",\n\n") + f.write('\t"' + escape_string(line) + '\\n"\n') + f.write('\t"",\n\n') f.write("};\n\n") f.write("#endif\n") -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/core/crypto/SCsub b/core/crypto/SCsub index 0a3f05d87a..da4a9c9381 100644 --- a/core/crypto/SCsub +++ b/core/crypto/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env_crypto = env.Clone() @@ -22,7 +22,9 @@ if not has_module: env_thirdparty = env_crypto.Clone() env_thirdparty.disable_warnings() # Custom config file - env_thirdparty.Append(CPPDEFINES=[('MBEDTLS_CONFIG_FILE', '\\"thirdparty/mbedtls/include/godot_core_mbedtls_config.h\\"')]) + env_thirdparty.Append( + CPPDEFINES=[("MBEDTLS_CONFIG_FILE", '\\"thirdparty/mbedtls/include/godot_core_mbedtls_config.h\\"')] + ) thirdparty_mbedtls_dir = "#thirdparty/mbedtls/library/" thirdparty_mbedtls_sources = [ "aes.c", @@ -30,7 +32,7 @@ if not has_module: "md5.c", "sha1.c", "sha256.c", - "godot_core_mbedtls_platform.c" + "godot_core_mbedtls_platform.c", ] thirdparty_mbedtls_sources = [thirdparty_mbedtls_dir + file for file in thirdparty_mbedtls_sources] env_thirdparty.add_source_files(env.core_sources, thirdparty_mbedtls_sources) diff --git a/core/io/SCsub b/core/io/SCsub index 1c5f954470..19a6549225 100644 --- a/core/io/SCsub +++ b/core/io/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.core_sources, "*.cpp") diff --git a/core/make_binders.py b/core/make_binders.py index c38db5cef4..883583e5dc 100644 --- a/core/make_binders.py +++ b/core/make_binders.py @@ -280,58 +280,57 @@ MethodBind* create_method_bind($ifret R$ $ifnoret void$ (*p_method)($ifconst con """ - def make_version(template, nargs, argmax, const, ret): intext = template from_pos = 0 outtext = "" - while(True): + while True: to_pos = intext.find("$", from_pos) - if (to_pos == -1): + if to_pos == -1: outtext += intext[from_pos:] break else: outtext += intext[from_pos:to_pos] end = intext.find("$", to_pos + 1) - if (end == -1): + if end == -1: break # ignore - macro = intext[to_pos + 1:end] + macro = intext[to_pos + 1 : end] cmd = "" data = "" - if (macro.find(" ") != -1): - cmd = macro[0:macro.find(" ")] - data = macro[macro.find(" ") + 1:] + if macro.find(" ") != -1: + cmd = macro[0 : macro.find(" ")] + data = macro[macro.find(" ") + 1 :] else: cmd = macro - if (cmd == "argc"): + if cmd == "argc": outtext += str(nargs) - if (cmd == "ifret" and ret): + if cmd == "ifret" and ret: outtext += data - if (cmd == "ifargs" and nargs): + if cmd == "ifargs" and nargs: outtext += data - if (cmd == "ifretargs" and nargs and ret): + if cmd == "ifretargs" and nargs and ret: outtext += data - if (cmd == "ifconst" and const): + if cmd == "ifconst" and const: outtext += data - elif (cmd == "ifnoconst" and not const): + elif cmd == "ifnoconst" and not const: outtext += data - elif (cmd == "ifnoret" and not ret): + elif cmd == "ifnoret" and not ret: outtext += data - elif (cmd == "iftempl" and (nargs > 0 or ret)): + elif cmd == "iftempl" and (nargs > 0 or ret): outtext += data - elif (cmd == "arg,"): + elif cmd == "arg,": for i in range(1, nargs + 1): - if (i > 1): + if i > 1: outtext += ", " outtext += data.replace("@", str(i)) - elif (cmd == "arg"): + elif cmd == "arg": for i in range(1, nargs + 1): outtext += data.replace("@", str(i)) - elif (cmd == "noarg"): + elif cmd == "noarg": for i in range(nargs + 1, argmax + 1): outtext += data.replace("@", str(i)) @@ -348,7 +347,9 @@ def run(target, source, env): text_ext = "" text_free_func = "#ifndef METHOD_BIND_FREE_FUNC_H\n#define METHOD_BIND_FREE_FUNC_H\n" text_free_func += "\n//including this header file allows method binding to use free functions\n" - text_free_func += "//note that the free function must have a pointer to an instance of the class as its first parameter\n" + text_free_func += ( + "//note that the free function must have a pointer to an instance of the class as its first parameter\n" + ) for i in range(0, versions + 1): @@ -361,7 +362,7 @@ def run(target, source, env): t += make_version(template_typed, i, versions, True, False) t += make_version(template, i, versions, True, True) t += make_version(template_typed, i, versions, True, True) - if (i >= versions_ext): + if i >= versions_ext: text_ext += t else: text += t @@ -383,6 +384,7 @@ def run(target, source, env): f.write(text_free_func) -if __name__ == '__main__': +if __name__ == "__main__": from platform_methods import subprocess_main + subprocess_main(globals()) diff --git a/core/math/SCsub b/core/math/SCsub index be438fcfbe..c8fdac207e 100644 --- a/core/math/SCsub +++ b/core/math/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env_math = env.Clone() diff --git a/core/os/SCsub b/core/os/SCsub index 1c5f954470..19a6549225 100644 --- a/core/os/SCsub +++ b/core/os/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.core_sources, "*.cpp") diff --git a/doc/tools/doc_merge.py b/doc/tools/doc_merge.py index 496d5dcb74..f6f52f5d66 100755 --- a/doc/tools/doc_merge.py +++ b/doc/tools/doc_merge.py @@ -21,7 +21,7 @@ def write_string(_f, text, newline=True): for t in range(tab): _f.write("\t") _f.write(text) - if (newline): + if newline: _f.write("\n") @@ -30,7 +30,7 @@ def escape(ret): ret = ret.replace("<", ">") ret = ret.replace(">", "<") ret = ret.replace("'", "'") - ret = ret.replace("\"", """) + ret = ret.replace('"', """) return ret @@ -43,25 +43,26 @@ def dec_tab(): global tab tab -= 1 + write_string(f, '') write_string(f, '') def get_tag(node, name): tag = "" - if (name in node.attrib): - tag = ' ' + name + '="' + escape(node.attrib[name]) + '" ' + if name in node.attrib: + tag = " " + name + '="' + escape(node.attrib[name]) + '" ' return tag def find_method_descr(old_class, name): methods = old_class.find("methods") - if(methods != None and len(list(methods)) > 0): + if methods != None and len(list(methods)) > 0: for m in list(methods): - if (m.attrib["name"] == name): + if m.attrib["name"] == name: description = m.find("description") - if (description != None and description.text.strip() != ""): + if description != None and description.text.strip() != "": return description.text return None @@ -70,11 +71,11 @@ def find_method_descr(old_class, name): def find_signal_descr(old_class, name): signals = old_class.find("signals") - if(signals != None and len(list(signals)) > 0): + if signals != None and len(list(signals)) > 0: for m in list(signals): - if (m.attrib["name"] == name): + if m.attrib["name"] == name: description = m.find("description") - if (description != None and description.text.strip() != ""): + if description != None and description.text.strip() != "": return description.text return None @@ -82,13 +83,13 @@ def find_signal_descr(old_class, name): def find_constant_descr(old_class, name): - if (old_class is None): + if old_class is None: return None constants = old_class.find("constants") - if(constants != None and len(list(constants)) > 0): + if constants != None and len(list(constants)) > 0: for m in list(constants): - if (m.attrib["name"] == name): - if (m.text.strip() != ""): + if m.attrib["name"] == name: + if m.text.strip() != "": return m.text return None @@ -96,35 +97,35 @@ def find_constant_descr(old_class, name): def write_class(c): class_name = c.attrib["name"] print("Parsing Class: " + class_name) - if (class_name in old_classes): + if class_name in old_classes: old_class = old_classes[class_name] else: old_class = None category = get_tag(c, "category") inherits = get_tag(c, "inherits") - write_string(f, '') + write_string(f, '") inc_tab() write_string(f, "") - if (old_class != None): + if old_class != None: old_brief_descr = old_class.find("brief_description") - if (old_brief_descr != None): + if old_brief_descr != None: write_string(f, escape(old_brief_descr.text.strip())) write_string(f, "") write_string(f, "") - if (old_class != None): + if old_class != None: old_descr = old_class.find("description") - if (old_descr != None): + if old_descr != None: write_string(f, escape(old_descr.text.strip())) write_string(f, "") methods = c.find("methods") - if(methods != None and len(list(methods)) > 0): + if methods != None and len(list(methods)) > 0: write_string(f, "") inc_tab() @@ -132,35 +133,46 @@ def write_class(c): for m in list(methods): qualifiers = get_tag(m, "qualifiers") - write_string(f, '') + write_string(f, '") inc_tab() for a in list(m): - if (a.tag == "return"): + if a.tag == "return": typ = get_tag(a, "type") - write_string(f, '') - write_string(f, '') - elif (a.tag == "argument"): + write_string(f, "") + write_string(f, "") + elif a.tag == "argument": default = get_tag(a, "default") - write_string(f, '') - write_string(f, '') + write_string( + f, + '", + ) + write_string(f, "") - write_string(f, '') - if (old_class != None): + write_string(f, "") + if old_class != None: old_method_descr = find_method_descr(old_class, m.attrib["name"]) - if (old_method_descr): + if old_method_descr: write_string(f, escape(escape(old_method_descr.strip()))) - write_string(f, '') + write_string(f, "") dec_tab() write_string(f, "") dec_tab() write_string(f, "") signals = c.find("signals") - if(signals != None and len(list(signals)) > 0): + if signals != None and len(list(signals)) > 0: write_string(f, "") inc_tab() @@ -171,24 +183,33 @@ def write_class(c): inc_tab() for a in list(m): - if (a.tag == "argument"): + if a.tag == "argument": - write_string(f, '') - write_string(f, '') + write_string( + f, + '', + ) + write_string(f, "") - write_string(f, '') - if (old_class != None): + write_string(f, "") + if old_class != None: old_signal_descr = find_signal_descr(old_class, m.attrib["name"]) - if (old_signal_descr): + if old_signal_descr: write_string(f, escape(old_signal_descr.strip())) - write_string(f, '') + write_string(f, "") dec_tab() write_string(f, "") dec_tab() write_string(f, "") constants = c.find("constants") - if(constants != None and len(list(constants)) > 0): + if constants != None and len(list(constants)) > 0: write_string(f, "") inc_tab() @@ -197,7 +218,7 @@ def write_class(c): write_string(f, '') old_constant_descr = find_constant_descr(old_class, m.attrib["name"]) - if (old_constant_descr): + if old_constant_descr: write_string(f, escape(old_constant_descr.strip())) write_string(f, "") @@ -207,9 +228,10 @@ def write_class(c): dec_tab() write_string(f, "") + for c in list(old_doc): old_classes[c.attrib["name"]] = c for c in list(new_doc): write_class(c) -write_string(f, '\n') +write_string(f, "\n") diff --git a/doc/tools/doc_status.py b/doc/tools/doc_status.py index e6e6d5f606..629b5a032b 100755 --- a/doc/tools/doc_status.py +++ b/doc/tools/doc_status.py @@ -13,75 +13,74 @@ import xml.etree.ElementTree as ET ################################################################################ flags = { - 'c': platform.platform() != 'Windows', # Disable by default on windows, since we use ANSI escape codes - 'b': False, - 'g': False, - 's': False, - 'u': False, - 'h': False, - 'p': False, - 'o': True, - 'i': False, - 'a': True, - 'e': False, + "c": platform.platform() != "Windows", # Disable by default on windows, since we use ANSI escape codes + "b": False, + "g": False, + "s": False, + "u": False, + "h": False, + "p": False, + "o": True, + "i": False, + "a": True, + "e": False, } flag_descriptions = { - 'c': 'Toggle colors when outputting.', - 'b': 'Toggle showing only not fully described classes.', - 'g': 'Toggle showing only completed classes.', - 's': 'Toggle showing comments about the status.', - 'u': 'Toggle URLs to docs.', - 'h': 'Show help and exit.', - 'p': 'Toggle showing percentage as well as counts.', - 'o': 'Toggle overall column.', - 'i': 'Toggle collapse of class items columns.', - 'a': 'Toggle showing all items.', - 'e': 'Toggle hiding empty items.', + "c": "Toggle colors when outputting.", + "b": "Toggle showing only not fully described classes.", + "g": "Toggle showing only completed classes.", + "s": "Toggle showing comments about the status.", + "u": "Toggle URLs to docs.", + "h": "Show help and exit.", + "p": "Toggle showing percentage as well as counts.", + "o": "Toggle overall column.", + "i": "Toggle collapse of class items columns.", + "a": "Toggle showing all items.", + "e": "Toggle hiding empty items.", } long_flags = { - 'colors': 'c', - 'use-colors': 'c', - - 'bad': 'b', - 'only-bad': 'b', - - 'good': 'g', - 'only-good': 'g', - - 'comments': 's', - 'status': 's', - - 'urls': 'u', - 'gen-url': 'u', - - 'help': 'h', - - 'percent': 'p', - 'use-percentages': 'p', - - 'overall': 'o', - 'use-overall': 'o', - - 'items': 'i', - 'collapse': 'i', - - 'all': 'a', - - 'empty': 'e', + "colors": "c", + "use-colors": "c", + "bad": "b", + "only-bad": "b", + "good": "g", + "only-good": "g", + "comments": "s", + "status": "s", + "urls": "u", + "gen-url": "u", + "help": "h", + "percent": "p", + "use-percentages": "p", + "overall": "o", + "use-overall": "o", + "items": "i", + "collapse": "i", + "all": "a", + "empty": "e", } -table_columns = ['name', 'brief_description', 'description', 'methods', 'constants', 'members', 'signals', 'theme_items'] -table_column_names = ['Name', 'Brief Desc.', 'Desc.', 'Methods', 'Constants', 'Members', 'Signals', 'Theme Items'] +table_columns = [ + "name", + "brief_description", + "description", + "methods", + "constants", + "members", + "signals", + "theme_items", +] +table_column_names = ["Name", "Brief Desc.", "Desc.", "Methods", "Constants", "Members", "Signals", "Theme Items"] colors = { - 'name': [36], # cyan - 'part_big_problem': [4, 31], # underline, red - 'part_problem': [31], # red - 'part_mostly_good': [33], # yellow - 'part_good': [32], # green - 'url': [4, 34], # underline, blue - 'section': [1, 4], # bold, underline - 'state_off': [36], # cyan - 'state_on': [1, 35], # bold, magenta/plum - 'bold': [1], # bold + "name": [36], # cyan + "part_big_problem": [4, 31], # underline, red + "part_problem": [31], # red + "part_mostly_good": [33], # yellow + "part_good": [32], # green + "url": [4, 34], # underline, blue + "section": [1, 4], # bold, underline + "state_off": [36], # cyan + "state_on": [1, 35], # bold, magenta/plum + "bold": [1], # bold } overall_progress_description_weigth = 10 @@ -90,6 +89,7 @@ overall_progress_description_weigth = 10 # Utils # ################################################################################ + def validate_tag(elem, tag): if elem.tag != tag: print('Tag mismatch, expected "' + tag + '", got ' + elem.tag) @@ -97,36 +97,38 @@ def validate_tag(elem, tag): def color(color, string): - if flags['c'] and terminal_supports_color(): - color_format = '' + if flags["c"] and terminal_supports_color(): + color_format = "" for code in colors[color]: - color_format += '\033[' + str(code) + 'm' - return color_format + string + '\033[0m' + color_format += "\033[" + str(code) + "m" + return color_format + string + "\033[0m" else: return string -ansi_escape = re.compile(r'\x1b[^m]*m') + +ansi_escape = re.compile(r"\x1b[^m]*m") def nonescape_len(s): - return len(ansi_escape.sub('', s)) + return len(ansi_escape.sub("", s)) + def terminal_supports_color(): p = sys.platform - supported_platform = p != 'Pocket PC' and (p != 'win32' or - 'ANSICON' in os.environ) + supported_platform = p != "Pocket PC" and (p != "win32" or "ANSICON" in os.environ) - is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() + is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() if not supported_platform or not is_a_tty: return False return True + ################################################################################ # Classes # ################################################################################ -class ClassStatusProgress: +class ClassStatusProgress: def __init__(self, described=0, total=0): self.described = described self.total = total @@ -143,42 +145,41 @@ class ClassStatusProgress: return self.described >= self.total def to_configured_colored_string(self): - if flags['p']: - return self.to_colored_string('{percent}% ({has}/{total})', '{pad_percent}{pad_described}{s}{pad_total}') + if flags["p"]: + return self.to_colored_string("{percent}% ({has}/{total})", "{pad_percent}{pad_described}{s}{pad_total}") else: return self.to_colored_string() - def to_colored_string(self, format='{has}/{total}', pad_format='{pad_described}{s}{pad_total}'): + def to_colored_string(self, format="{has}/{total}", pad_format="{pad_described}{s}{pad_total}"): ratio = float(self.described) / float(self.total) if self.total != 0 else 1 percent = int(round(100 * ratio)) s = format.format(has=str(self.described), total=str(self.total), percent=str(percent)) if self.described >= self.total: - s = color('part_good', s) + s = color("part_good", s) elif self.described >= self.total / 4 * 3: - s = color('part_mostly_good', s) + s = color("part_mostly_good", s) elif self.described > 0: - s = color('part_problem', s) + s = color("part_problem", s) else: - s = color('part_big_problem', s) + s = color("part_big_problem", s) pad_size = max(len(str(self.described)), len(str(self.total))) - pad_described = ''.ljust(pad_size - len(str(self.described))) - pad_percent = ''.ljust(3 - len(str(percent))) - pad_total = ''.ljust(pad_size - len(str(self.total))) + pad_described = "".ljust(pad_size - len(str(self.described))) + pad_percent = "".ljust(3 - len(str(percent))) + pad_total = "".ljust(pad_size - len(str(self.total))) return pad_format.format(pad_described=pad_described, pad_total=pad_total, pad_percent=pad_percent, s=s) class ClassStatus: - - def __init__(self, name=''): + def __init__(self, name=""): self.name = name self.has_brief_description = True self.has_description = True self.progresses = { - 'methods': ClassStatusProgress(), - 'constants': ClassStatusProgress(), - 'members': ClassStatusProgress(), - 'theme_items': ClassStatusProgress(), - 'signals': ClassStatusProgress() + "methods": ClassStatusProgress(), + "constants": ClassStatusProgress(), + "members": ClassStatusProgress(), + "theme_items": ClassStatusProgress(), + "signals": ClassStatusProgress(), } def __add__(self, other): @@ -208,66 +209,70 @@ class ClassStatus: def make_output(self): output = {} - output['name'] = color('name', self.name) + output["name"] = color("name", self.name) - ok_string = color('part_good', 'OK') - missing_string = color('part_big_problem', 'MISSING') + ok_string = color("part_good", "OK") + missing_string = color("part_big_problem", "MISSING") - output['brief_description'] = ok_string if self.has_brief_description else missing_string - output['description'] = ok_string if self.has_description else missing_string + output["brief_description"] = ok_string if self.has_brief_description else missing_string + output["description"] = ok_string if self.has_description else missing_string description_progress = ClassStatusProgress( (self.has_brief_description + self.has_description) * overall_progress_description_weigth, - 2 * overall_progress_description_weigth + 2 * overall_progress_description_weigth, ) items_progress = ClassStatusProgress() - for k in ['methods', 'constants', 'members', 'signals', 'theme_items']: + for k in ["methods", "constants", "members", "signals", "theme_items"]: items_progress += self.progresses[k] output[k] = self.progresses[k].to_configured_colored_string() - output['items'] = items_progress.to_configured_colored_string() + output["items"] = items_progress.to_configured_colored_string() - output['overall'] = (description_progress + items_progress).to_colored_string(color('bold', '{percent}%'), '{pad_percent}{s}') + output["overall"] = (description_progress + items_progress).to_colored_string( + color("bold", "{percent}%"), "{pad_percent}{s}" + ) - if self.name.startswith('Total'): - output['url'] = color('url', 'https://docs.godotengine.org/en/latest/classes/') - if flags['s']: - output['comment'] = color('part_good', 'ALL OK') + if self.name.startswith("Total"): + output["url"] = color("url", "https://docs.godotengine.org/en/latest/classes/") + if flags["s"]: + output["comment"] = color("part_good", "ALL OK") else: - output['url'] = color('url', 'https://docs.godotengine.org/en/latest/classes/class_{name}.html'.format(name=self.name.lower())) + output["url"] = color( + "url", "https://docs.godotengine.org/en/latest/classes/class_{name}.html".format(name=self.name.lower()) + ) - if flags['s'] and not flags['g'] and self.is_ok(): - output['comment'] = color('part_good', 'ALL OK') + if flags["s"] and not flags["g"] and self.is_ok(): + output["comment"] = color("part_good", "ALL OK") return output @staticmethod def generate_for_class(c): status = ClassStatus() - status.name = c.attrib['name'] + status.name = c.attrib["name"] for tag in list(c): - if tag.tag == 'brief_description': + if tag.tag == "brief_description": status.has_brief_description = len(tag.text.strip()) > 0 - elif tag.tag == 'description': + elif tag.tag == "description": status.has_description = len(tag.text.strip()) > 0 - elif tag.tag in ['methods', 'signals']: + elif tag.tag in ["methods", "signals"]: for sub_tag in list(tag): - descr = sub_tag.find('description') + descr = sub_tag.find("description") status.progresses[tag.tag].increment(len(descr.text.strip()) > 0) - elif tag.tag in ['constants', 'members', 'theme_items']: + elif tag.tag in ["constants", "members", "theme_items"]: for sub_tag in list(tag): if not sub_tag.text is None: status.progresses[tag.tag].increment(len(sub_tag.text.strip()) > 0) - elif tag.tag in ['tutorials']: + elif tag.tag in ["tutorials"]: pass # Ignore those tags for now - elif tag.tag in ['theme_items']: + elif tag.tag in ["theme_items"]: pass # Ignore those tags, since they seem to lack description at all else: @@ -286,63 +291,69 @@ merged_file = "" for arg in sys.argv[1:]: try: - if arg.startswith('--'): + if arg.startswith("--"): flags[long_flags[arg[2:]]] = not flags[long_flags[arg[2:]]] - elif arg.startswith('-'): + elif arg.startswith("-"): for f in arg[1:]: flags[f] = not flags[f] elif os.path.isdir(arg): for f in os.listdir(arg): - if f.endswith('.xml'): - input_file_list.append(os.path.join(arg, f)); + if f.endswith(".xml"): + input_file_list.append(os.path.join(arg, f)) else: input_class_list.append(arg) except KeyError: print("Unknown command line flag: " + arg) sys.exit(1) -if flags['i']: - for r in ['methods', 'constants', 'members', 'signals', 'theme_items']: +if flags["i"]: + for r in ["methods", "constants", "members", "signals", "theme_items"]: index = table_columns.index(r) del table_column_names[index] del table_columns[index] - table_column_names.append('Items') - table_columns.append('items') + table_column_names.append("Items") + table_columns.append("items") -if flags['o'] == (not flags['i']): - table_column_names.append(color('bold', 'Overall')) - table_columns.append('overall') +if flags["o"] == (not flags["i"]): + table_column_names.append(color("bold", "Overall")) + table_columns.append("overall") -if flags['u']: - table_column_names.append('Docs URL') - table_columns.append('url') +if flags["u"]: + table_column_names.append("Docs URL") + table_columns.append("url") ################################################################################ # Help # ################################################################################ -if len(input_file_list) < 1 or flags['h']: - if not flags['h']: - print(color('section', 'Invalid usage') + ': Please specify a classes directory') - print(color('section', 'Usage') + ': doc_status.py [flags] [class names]') - print('\t< and > signify required parameters, while [ and ] signify optional parameters.') - print(color('section', 'Available flags') + ':') +if len(input_file_list) < 1 or flags["h"]: + if not flags["h"]: + print(color("section", "Invalid usage") + ": Please specify a classes directory") + print(color("section", "Usage") + ": doc_status.py [flags] [class names]") + print("\t< and > signify required parameters, while [ and ] signify optional parameters.") + print(color("section", "Available flags") + ":") possible_synonym_list = list(long_flags) possible_synonym_list.sort() flag_list = list(flags) flag_list.sort() for flag in flag_list: - synonyms = [color('name', '-' + flag)] + synonyms = [color("name", "-" + flag)] for synonym in possible_synonym_list: if long_flags[synonym] == flag: - synonyms.append(color('name', '--' + synonym)) + synonyms.append(color("name", "--" + synonym)) - print(('{synonyms} (Currently ' + color('state_' + ('on' if flags[flag] else 'off'), '{value}') + ')\n\t{description}').format( - synonyms=', '.join(synonyms), - value=('on' if flags[flag] else 'off'), - description=flag_descriptions[flag] - )) + print( + ( + "{synonyms} (Currently " + + color("state_" + ("on" if flags[flag] else "off"), "{value}") + + ")\n\t{description}" + ).format( + synonyms=", ".join(synonyms), + value=("on" if flags[flag] else "off"), + description=flag_descriptions[flag], + ) + ) sys.exit(0) @@ -357,21 +368,21 @@ for file in input_file_list: tree = ET.parse(file) doc = tree.getroot() - if 'version' not in doc.attrib: + if "version" not in doc.attrib: print('Version missing from "doc"') sys.exit(255) - version = doc.attrib['version'] + version = doc.attrib["version"] - if doc.attrib['name'] in class_names: + if doc.attrib["name"] in class_names: continue - class_names.append(doc.attrib['name']) - classes[doc.attrib['name']] = doc + class_names.append(doc.attrib["name"]) + classes[doc.attrib["name"]] = doc class_names.sort() if len(input_class_list) < 1: - input_class_list = ['*'] + input_class_list = ["*"] filtered_classes = set() for pattern in input_class_list: @@ -384,23 +395,23 @@ filtered_classes.sort() ################################################################################ table = [table_column_names] -table_row_chars = '| - ' -table_column_chars = '|' +table_row_chars = "| - " +table_column_chars = "|" -total_status = ClassStatus('Total') +total_status = ClassStatus("Total") for cn in filtered_classes: c = classes[cn] - validate_tag(c, 'class') + validate_tag(c, "class") status = ClassStatus.generate_for_class(c) total_status = total_status + status - if (flags['b'] and status.is_ok()) or (flags['g'] and not status.is_ok()) or (not flags['a']): + if (flags["b"] and status.is_ok()) or (flags["g"] and not status.is_ok()) or (not flags["a"]): continue - if flags['e'] and status.is_empty(): + if flags["e"] and status.is_empty(): continue out = status.make_output() @@ -409,10 +420,10 @@ for cn in filtered_classes: if column in out: row.append(out[column]) else: - row.append('') + row.append("") - if 'comment' in out and out['comment'] != '': - row.append(out['comment']) + if "comment" in out and out["comment"] != "": + row.append(out["comment"]) table.append(row) @@ -421,22 +432,22 @@ for cn in filtered_classes: # Print output table # ################################################################################ -if len(table) == 1 and flags['a']: - print(color('part_big_problem', 'No classes suitable for printing!')) +if len(table) == 1 and flags["a"]: + print(color("part_big_problem", "No classes suitable for printing!")) sys.exit(0) -if len(table) > 2 or not flags['a']: - total_status.name = 'Total = {0}'.format(len(table) - 1) +if len(table) > 2 or not flags["a"]: + total_status.name = "Total = {0}".format(len(table) - 1) out = total_status.make_output() row = [] for column in table_columns: if column in out: row.append(out[column]) else: - row.append('') + row.append("") table.append(row) -if flags['a']: +if flags["a"]: # Duplicate the headers at the bottom of the table so they can be viewed # without having to scroll back to the top. table.append(table_column_names) @@ -451,7 +462,9 @@ for row in table: divider_string = table_row_chars[0] for cell_i in range(len(table[0])): - divider_string += table_row_chars[1] + table_row_chars[2] * (table_column_sizes[cell_i]) + table_row_chars[1] + table_row_chars[0] + divider_string += ( + table_row_chars[1] + table_row_chars[2] * (table_column_sizes[cell_i]) + table_row_chars[1] + table_row_chars[0] + ) print(divider_string) for row_i, row in enumerate(table): @@ -461,7 +474,11 @@ for row_i, row in enumerate(table): if cell_i == 0: row_string += table_row_chars[3] + cell + table_row_chars[3] * (padding_needed - 1) else: - row_string += table_row_chars[3] * int(math.floor(float(padding_needed) / 2)) + cell + table_row_chars[3] * int(math.ceil(float(padding_needed) / 2)) + row_string += ( + table_row_chars[3] * int(math.floor(float(padding_needed) / 2)) + + cell + + table_row_chars[3] * int(math.ceil(float(padding_needed) / 2)) + ) row_string += table_column_chars print(row_string) @@ -474,5 +491,5 @@ for row_i, row in enumerate(table): print(divider_string) -if total_status.is_ok() and not flags['g']: - print('All listed classes are ' + color('part_good', 'OK') + '!') +if total_status.is_ok() and not flags["g"]: + print("All listed classes are " + color("part_good", "OK") + "!") diff --git a/doc/tools/makerst.py b/doc/tools/makerst.py index 883bf5bf13..0d6c2be5e6 100755 --- a/doc/tools/makerst.py +++ b/doc/tools/makerst.py @@ -7,10 +7,12 @@ import xml.etree.ElementTree as ET from collections import OrderedDict # Uncomment to do type checks. I have it commented out so it works below Python 3.5 -#from typing import List, Dict, TextIO, Tuple, Iterable, Optional, DefaultDict, Any, Union +# from typing import List, Dict, TextIO, Tuple, Iterable, Optional, DefaultDict, Any, Union # http(s)://docs.godotengine.org///path/to/page.html(#fragment-tag) -GODOT_DOCS_PATTERN = re.compile(r'^http(?:s)?://docs\.godotengine\.org/(?:[a-zA-Z0-9.\-_]*)/(?:[a-zA-Z0-9.\-_]*)/(.*)\.html(#.*)?$') +GODOT_DOCS_PATTERN = re.compile( + r"^http(?:s)?://docs\.godotengine\.org/(?:[a-zA-Z0-9.\-_]*)/(?:[a-zA-Z0-9.\-_]*)/(.*)\.html(#.*)?$" +) def print_error(error, state): # type: (str, State) -> None @@ -37,7 +39,9 @@ class TypeName: class PropertyDef: - def __init__(self, name, type_name, setter, getter, text, default_value, overridden): # type: (str, TypeName, Optional[str], Optional[str], Optional[str], Optional[str], Optional[bool]) -> None + def __init__( + self, name, type_name, setter, getter, text, default_value, overridden + ): # type: (str, TypeName, Optional[str], Optional[str], Optional[str], Optional[str], Optional[bool]) -> None self.name = name self.type_name = type_name self.setter = setter @@ -46,6 +50,7 @@ class PropertyDef: self.default_value = default_value self.overridden = overridden + class ParameterDef: def __init__(self, name, type_name, default_value): # type: (str, TypeName, Optional[str]) -> None self.name = name @@ -61,7 +66,9 @@ class SignalDef: class MethodDef: - def __init__(self, name, return_type, parameters, description, qualifiers): # type: (str, TypeName, List[ParameterDef], Optional[str], Optional[str]) -> None + def __init__( + self, name, return_type, parameters, description, qualifiers + ): # type: (str, TypeName, List[ParameterDef], Optional[str], Optional[str]) -> None self.name = name self.return_type = return_type self.parameters = parameters @@ -144,10 +151,12 @@ class State: getter = property.get("getter") or None default_value = property.get("default") or None if default_value is not None: - default_value = '``{}``'.format(default_value) + default_value = "``{}``".format(default_value) overridden = property.get("override") or False - property_def = PropertyDef(property_name, type_name, setter, getter, property.text, default_value, overridden) + property_def = PropertyDef( + property_name, type_name, setter, getter, property.text, default_value, overridden + ) class_def.properties[property_name] = property_def methods = class_root.find("methods") @@ -246,8 +255,6 @@ class State: if link.text is not None: class_def.tutorials.append(link.text) - - def sort_classes(self): # type: () -> None self.classes = OrderedDict(sorted(self.classes.items(), key=lambda t: t[0])) @@ -273,7 +280,11 @@ def main(): # type: () -> None parser.add_argument("path", nargs="+", help="A path to an XML file or a directory containing XML files to parse.") group = parser.add_mutually_exclusive_group() group.add_argument("--output", "-o", default=".", help="The directory to save output .rst files in.") - group.add_argument("--dry-run", action="store_true", help="If passed, no output will be generated and XML files are only checked for errors.") + group.add_argument( + "--dry-run", + action="store_true", + help="If passed, no output will be generated and XML files are only checked for errors.", + ) args = parser.parse_args() print("Checking for errors in the XML class reference...") @@ -285,15 +296,15 @@ def main(): # type: () -> None if path.endswith(os.sep): path = path[:-1] - if os.path.basename(path) == 'modules': + if os.path.basename(path) == "modules": for subdir, dirs, _ in os.walk(path): - if 'doc_classes' in dirs: - doc_dir = os.path.join(subdir, 'doc_classes') - class_file_names = (f for f in os.listdir(doc_dir) if f.endswith('.xml')) + if "doc_classes" in dirs: + doc_dir = os.path.join(subdir, "doc_classes") + class_file_names = (f for f in os.listdir(doc_dir) if f.endswith(".xml")) file_list += (os.path.join(doc_dir, f) for f in class_file_names) elif os.path.isdir(path): - file_list += (os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xml')) + file_list += (os.path.join(path, f) for f in os.listdir(path) if f.endswith(".xml")) elif os.path.isfile(path): if not path.endswith(".xml"): @@ -313,7 +324,7 @@ def main(): # type: () -> None continue doc = tree.getroot() - if 'version' not in doc.attrib: + if "version" not in doc.attrib: print_error("Version missing from 'doc', file: {}".format(cur_file), state) continue @@ -342,13 +353,14 @@ def main(): # type: () -> None print("Errors were found in the class reference XML. Please check the messages above.") exit(1) + def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, State, bool, str) -> None class_name = class_def.name if dry_run: f = open(os.devnull, "w", encoding="utf-8") else: - f = open(os.path.join(output_dir, "class_" + class_name.lower() + '.rst'), 'w', encoding='utf-8') + f = open(os.path.join(output_dir, "class_" + class_name.lower() + ".rst"), "w", encoding="utf-8") # Warn contributors not to edit this file directly f.write(":github_url: hide\n\n") @@ -357,13 +369,13 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S f.write(".. The source is found in doc/classes or modules//doc_classes.\n\n") f.write(".. _class_" + class_name + ":\n\n") - f.write(make_heading(class_name, '=')) + f.write(make_heading(class_name, "=")) # Inheritance tree # Ascendants if class_def.inherits: inh = class_def.inherits.strip() - f.write('**Inherits:** ') + f.write("**Inherits:** ") first = True while inh in state.classes: if not first: @@ -386,7 +398,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S inherited.append(c.name) if len(inherited): - f.write('**Inherited By:** ') + f.write("**Inherited By:** ") for i, child in enumerate(inherited): if i > 0: f.write(", ") @@ -398,20 +410,20 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S f.write(rstize_text(class_def.brief_description.strip(), state) + "\n\n") # Class description - if class_def.description is not None and class_def.description.strip() != '': - f.write(make_heading('Description', '-')) + if class_def.description is not None and class_def.description.strip() != "": + f.write(make_heading("Description", "-")) f.write(rstize_text(class_def.description.strip(), state) + "\n\n") # Online tutorials if len(class_def.tutorials) > 0: - f.write(make_heading('Tutorials', '-')) + f.write(make_heading("Tutorials", "-")) for t in class_def.tutorials: link = t.strip() f.write("- " + make_url(link) + "\n\n") # Properties overview if len(class_def.properties) > 0: - f.write(make_heading('Properties', '-')) + f.write(make_heading("Properties", "-")) ml = [] # type: List[Tuple[str, str, str]] for property_def in class_def.properties.values(): type_rst = property_def.type_name.to_rst(state) @@ -425,7 +437,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S # Methods overview if len(class_def.methods) > 0: - f.write(make_heading('Methods', '-')) + f.write(make_heading("Methods", "-")) ml = [] for method_list in class_def.methods.values(): for m in method_list: @@ -434,7 +446,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S # Theme properties if class_def.theme_items is not None and len(class_def.theme_items) > 0: - f.write(make_heading('Theme Properties', '-')) + f.write(make_heading("Theme Properties", "-")) pl = [] for theme_item_list in class_def.theme_items.values(): for theme_item in theme_item_list: @@ -443,30 +455,30 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S # Signals if len(class_def.signals) > 0: - f.write(make_heading('Signals', '-')) + f.write(make_heading("Signals", "-")) index = 0 for signal in class_def.signals.values(): if index != 0: - f.write('----\n\n') + f.write("----\n\n") f.write(".. _class_{}_signal_{}:\n\n".format(class_name, signal.name)) _, signature = make_method_signature(class_def, signal, False, state) f.write("- {}\n\n".format(signature)) - if signal.description is not None and signal.description.strip() != '': - f.write(rstize_text(signal.description.strip(), state) + '\n\n') + if signal.description is not None and signal.description.strip() != "": + f.write(rstize_text(signal.description.strip(), state) + "\n\n") index += 1 # Enums if len(class_def.enums) > 0: - f.write(make_heading('Enumerations', '-')) + f.write(make_heading("Enumerations", "-")) index = 0 for e in class_def.enums.values(): if index != 0: - f.write('----\n\n') + f.write("----\n\n") f.write(".. _enum_{}_{}:\n\n".format(class_name, e.name)) # Sphinx seems to divide the bullet list into individual
    tags if we weave the labels into it. @@ -479,16 +491,16 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S f.write("enum **{}**:\n\n".format(e.name)) for value in e.values.values(): f.write("- **{}** = **{}**".format(value.name, value.value)) - if value.text is not None and value.text.strip() != '': - f.write(' --- ' + rstize_text(value.text.strip(), state)) + if value.text is not None and value.text.strip() != "": + f.write(" --- " + rstize_text(value.text.strip(), state)) - f.write('\n\n') + f.write("\n\n") index += 1 # Constants if len(class_def.constants) > 0: - f.write(make_heading('Constants', '-')) + f.write(make_heading("Constants", "-")) # Sphinx seems to divide the bullet list into individual
      tags if we weave the labels into it. # As such I'll put them all above the list. Won't be perfect but better than making the list visually broken. for constant in class_def.constants.values(): @@ -496,14 +508,14 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S for constant in class_def.constants.values(): f.write("- **{}** = **{}**".format(constant.name, constant.value)) - if constant.text is not None and constant.text.strip() != '': - f.write(' --- ' + rstize_text(constant.text.strip(), state)) + if constant.text is not None and constant.text.strip() != "": + f.write(" --- " + rstize_text(constant.text.strip(), state)) - f.write('\n\n') + f.write("\n\n") # Property descriptions if any(not p.overridden for p in class_def.properties.values()) > 0: - f.write(make_heading('Property Descriptions', '-')) + f.write(make_heading("Property Descriptions", "-")) index = 0 for property_def in class_def.properties.values(): @@ -511,36 +523,36 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S continue if index != 0: - f.write('----\n\n') + f.write("----\n\n") f.write(".. _class_{}_property_{}:\n\n".format(class_name, property_def.name)) - f.write('- {} **{}**\n\n'.format(property_def.type_name.to_rst(state), property_def.name)) + f.write("- {} **{}**\n\n".format(property_def.type_name.to_rst(state), property_def.name)) info = [] if property_def.default_value is not None: info.append(("*Default*", property_def.default_value)) if property_def.setter is not None and not property_def.setter.startswith("_"): - info.append(("*Setter*", property_def.setter + '(value)')) + info.append(("*Setter*", property_def.setter + "(value)")) if property_def.getter is not None and not property_def.getter.startswith("_"): - info.append(('*Getter*', property_def.getter + '()')) + info.append(("*Getter*", property_def.getter + "()")) if len(info) > 0: format_table(f, info) - if property_def.text is not None and property_def.text.strip() != '': - f.write(rstize_text(property_def.text.strip(), state) + '\n\n') + if property_def.text is not None and property_def.text.strip() != "": + f.write(rstize_text(property_def.text.strip(), state) + "\n\n") index += 1 # Method descriptions if len(class_def.methods) > 0: - f.write(make_heading('Method Descriptions', '-')) + f.write(make_heading("Method Descriptions", "-")) index = 0 for method_list in class_def.methods.values(): for i, m in enumerate(method_list): if index != 0: - f.write('----\n\n') + f.write("----\n\n") if i == 0: f.write(".. _class_{}_method_{}:\n\n".format(class_name, m.name)) @@ -548,8 +560,8 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S ret_type, signature = make_method_signature(class_def, m, False, state) f.write("- {} {}\n\n".format(ret_type, signature)) - if m.description is not None and m.description.strip() != '': - f.write(rstize_text(m.description.strip(), state) + '\n\n') + if m.description is not None and m.description.strip() != "": + f.write(rstize_text(m.description.strip(), state) + "\n\n") index += 1 @@ -558,29 +570,29 @@ def escape_rst(text, until_pos=-1): # type: (str) -> str # Escape \ character, otherwise it ends up as an escape character in rst pos = 0 while True: - pos = text.find('\\', pos, until_pos) + pos = text.find("\\", pos, until_pos) if pos == -1: break - text = text[:pos] + "\\\\" + text[pos + 1:] + text = text[:pos] + "\\\\" + text[pos + 1 :] pos += 2 # Escape * character to avoid interpreting it as emphasis pos = 0 while True: - pos = text.find('*', pos, until_pos) + pos = text.find("*", pos, until_pos) if pos == -1: break - text = text[:pos] + "\*" + text[pos + 1:] + text = text[:pos] + "\*" + text[pos + 1 :] pos += 2 # Escape _ character at the end of a word to avoid interpreting it as an inline hyperlink pos = 0 while True: - pos = text.find('_', pos, until_pos) + pos = text.find("_", pos, until_pos) if pos == -1: break if not text[pos + 1].isalnum(): # don't escape within a snake_case word - text = text[:pos] + "\_" + text[pos + 1:] + text = text[:pos] + "\_" + text[pos + 1 :] pos += 2 else: pos += 1 @@ -592,16 +604,16 @@ def rstize_text(text, state): # type: (str, State) -> str # Linebreak + tabs in the XML should become two line breaks unless in a "codeblock" pos = 0 while True: - pos = text.find('\n', pos) + pos = text.find("\n", pos) if pos == -1: break pre_text = text[:pos] indent_level = 0 - while text[pos + 1] == '\t': + while text[pos + 1] == "\t": pos += 1 indent_level += 1 - post_text = text[pos + 1:] + post_text = text[pos + 1 :] # Handle codeblocks if post_text.startswith("[codeblock]"): @@ -610,28 +622,33 @@ def rstize_text(text, state): # type: (str, State) -> str print_error("[codeblock] without a closing tag, file: {}".format(state.current_class), state) return "" - code_text = post_text[len("[codeblock]"):end_pos] + code_text = post_text[len("[codeblock]") : end_pos] post_text = post_text[end_pos:] # Remove extraneous tabs code_pos = 0 while True: - code_pos = code_text.find('\n', code_pos) + code_pos = code_text.find("\n", code_pos) if code_pos == -1: break to_skip = 0 - while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == '\t': + while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == "\t": to_skip += 1 if to_skip > indent_level: - print_error("Four spaces should be used for indentation within [codeblock], file: {}".format(state.current_class), state) + print_error( + "Four spaces should be used for indentation within [codeblock], file: {}".format( + state.current_class + ), + state, + ) - if len(code_text[code_pos + to_skip + 1:]) == 0: + if len(code_text[code_pos + to_skip + 1 :]) == 0: code_text = code_text[:code_pos] + "\n" code_pos += 1 else: - code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1:] + code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1 :] code_pos += 5 - to_skip text = pre_text + "\n[codeblock]" + code_text + post_text @@ -642,7 +659,7 @@ def rstize_text(text, state): # type: (str, State) -> str text = pre_text + "\n\n" + post_text pos += 2 - next_brac_pos = text.find('[') + next_brac_pos = text.find("[") text = escape_rst(text, next_brac_pos) # Handle [tags] @@ -654,54 +671,59 @@ def rstize_text(text, state): # type: (str, State) -> str tag_depth = 0 previous_pos = 0 while True: - pos = text.find('[', pos) + pos = text.find("[", pos) if inside_url and (pos > previous_pos): url_has_name = True if pos == -1: break - endq_pos = text.find(']', pos + 1) + endq_pos = text.find("]", pos + 1) if endq_pos == -1: break pre_text = text[:pos] - post_text = text[endq_pos + 1:] - tag_text = text[pos + 1:endq_pos] + post_text = text[endq_pos + 1 :] + tag_text = text[pos + 1 : endq_pos] escape_post = False if tag_text in state.classes: if tag_text == state.current_class: # We don't want references to the same class - tag_text = '``{}``'.format(tag_text) + tag_text = "``{}``".format(tag_text) else: tag_text = make_type(tag_text, state) escape_post = True else: # command cmd = tag_text - space_pos = tag_text.find(' ') - if cmd == '/codeblock': - tag_text = '' + space_pos = tag_text.find(" ") + if cmd == "/codeblock": + tag_text = "" tag_depth -= 1 inside_code = False # Strip newline if the tag was alone on one - if pre_text[-1] == '\n': + if pre_text[-1] == "\n": pre_text = pre_text[:-1] - elif cmd == '/code': - tag_text = '``' + elif cmd == "/code": + tag_text = "``" tag_depth -= 1 inside_code = False escape_post = True elif inside_code: - tag_text = '[' + tag_text + ']' - elif cmd.find('html') == 0: - param = tag_text[space_pos + 1:] + tag_text = "[" + tag_text + "]" + elif cmd.find("html") == 0: + param = tag_text[space_pos + 1 :] tag_text = param - elif cmd.startswith('method') or cmd.startswith('member') or cmd.startswith('signal') or cmd.startswith('constant'): - param = tag_text[space_pos + 1:] + elif ( + cmd.startswith("method") + or cmd.startswith("member") + or cmd.startswith("signal") + or cmd.startswith("constant") + ): + param = tag_text[space_pos + 1 :] - if param.find('.') != -1: - ss = param.split('.') + if param.find(".") != -1: + ss = param.split(".") if len(ss) > 2: print_error("Bad reference: '{}', file: {}".format(param, state.current_class), state) class_param, method_param = ss @@ -734,7 +756,7 @@ def rstize_text(text, state): # type: (str, State) -> str # Search in the current class search_class_defs = [class_def] - if param.find('.') == -1: + if param.find(".") == -1: # Also search in @GlobalScope as a last resort if no class was specified search_class_defs.append(state.classes["@GlobalScope"]) @@ -755,66 +777,71 @@ def rstize_text(text, state): # type: (str, State) -> str ref_type = "_constant" else: - print_error("Unresolved type reference '{}' in method reference '{}', file: {}".format(class_param, param, state.current_class), state) + print_error( + "Unresolved type reference '{}' in method reference '{}', file: {}".format( + class_param, param, state.current_class + ), + state, + ) repl_text = method_param if class_param != state.current_class: repl_text = "{}.{}".format(class_param, method_param) - tag_text = ':ref:`{}`'.format(repl_text, class_param, ref_type, method_param) + tag_text = ":ref:`{}`".format(repl_text, class_param, ref_type, method_param) escape_post = True - elif cmd.find('image=') == 0: + elif cmd.find("image=") == 0: tag_text = "" # '![](' + cmd[6:] + ')' - elif cmd.find('url=') == 0: + elif cmd.find("url=") == 0: url_link = cmd[4:] - tag_text = '`' + tag_text = "`" tag_depth += 1 inside_url = True url_has_name = False - elif cmd == '/url': - tag_text = ('' if url_has_name else url_link) + " <" + url_link + ">`_" + elif cmd == "/url": + tag_text = ("" if url_has_name else url_link) + " <" + url_link + ">`_" tag_depth -= 1 escape_post = True inside_url = False url_has_name = False - elif cmd == 'center': + elif cmd == "center": tag_depth += 1 - tag_text = '' - elif cmd == '/center': + tag_text = "" + elif cmd == "/center": tag_depth -= 1 - tag_text = '' - elif cmd == 'codeblock': + tag_text = "" + elif cmd == "codeblock": tag_depth += 1 - tag_text = '\n::\n' + tag_text = "\n::\n" inside_code = True - elif cmd == 'br': + elif cmd == "br": # Make a new paragraph instead of a linebreak, rst is not so linebreak friendly - tag_text = '\n\n' + tag_text = "\n\n" # Strip potential leading spaces - while post_text[0] == ' ': + while post_text[0] == " ": post_text = post_text[1:] - elif cmd == 'i' or cmd == '/i': + elif cmd == "i" or cmd == "/i": if cmd == "/i": tag_depth -= 1 else: tag_depth += 1 - tag_text = '*' - elif cmd == 'b' or cmd == '/b': + tag_text = "*" + elif cmd == "b" or cmd == "/b": if cmd == "/b": tag_depth -= 1 else: tag_depth += 1 - tag_text = '**' - elif cmd == 'u' or cmd == '/u': + tag_text = "**" + elif cmd == "u" or cmd == "/u": if cmd == "/u": tag_depth -= 1 else: tag_depth += 1 - tag_text = '' - elif cmd == 'code': - tag_text = '``' + tag_text = "" + elif cmd == "code": + tag_text = "``" tag_depth += 1 inside_code = True - elif cmd.startswith('enum '): + elif cmd.startswith("enum "): tag_text = make_enum(cmd[5:], state) escape_post = True else: @@ -823,24 +850,24 @@ def rstize_text(text, state): # type: (str, State) -> str # Properly escape things like `[Node]s` if escape_post and post_text and (post_text[0].isalnum() or post_text[0] == "("): # not punctuation, escape - post_text = '\ ' + post_text + post_text = "\ " + post_text - next_brac_pos = post_text.find('[', 0) + next_brac_pos = post_text.find("[", 0) iter_pos = 0 while not inside_code: - iter_pos = post_text.find('*', iter_pos, next_brac_pos) + iter_pos = post_text.find("*", iter_pos, next_brac_pos) if iter_pos == -1: break - post_text = post_text[:iter_pos] + "\*" + post_text[iter_pos + 1:] + post_text = post_text[:iter_pos] + "\*" + post_text[iter_pos + 1 :] iter_pos += 2 iter_pos = 0 while not inside_code: - iter_pos = post_text.find('_', iter_pos, next_brac_pos) + iter_pos = post_text.find("_", iter_pos, next_brac_pos) if iter_pos == -1: break if not post_text[iter_pos + 1].isalnum(): # don't escape within a snake_case word - post_text = post_text[:iter_pos] + "\_" + post_text[iter_pos + 1:] + post_text = post_text[:iter_pos] + "\_" + post_text[iter_pos + 1 :] iter_pos += 2 else: iter_pos += 1 @@ -862,7 +889,7 @@ def format_table(f, data, remove_empty_columns=False): # type: (TextIO, Iterabl column_sizes = [0] * len(data[0]) for row in data: for i, text in enumerate(row): - text_length = len(text or '') + text_length = len(text or "") if text_length > column_sizes[i]: column_sizes[i] = text_length @@ -879,16 +906,16 @@ def format_table(f, data, remove_empty_columns=False): # type: (TextIO, Iterabl for i, text in enumerate(row): if column_sizes[i] == 0 and remove_empty_columns: continue - row_text += " " + (text or '').ljust(column_sizes[i]) + " |" + row_text += " " + (text or "").ljust(column_sizes[i]) + " |" row_text += "\n" f.write(row_text) f.write(sep) - f.write('\n') + f.write("\n") def make_type(t, state): # type: (str, State) -> str if t in state.classes: - return ':ref:`{0}`'.format(t) + return ":ref:`{0}`".format(t) print_error("Unresolved type '{}', file: {}".format(t, state.current_class), state) return t @@ -897,7 +924,7 @@ def make_enum(t, state): # type: (str, State) -> str p = t.find(".") if p >= 0: c = t[0:p] - e = t[p + 1:] + e = t[p + 1 :] # Variant enums live in GlobalScope but still use periods. if c == "Variant": c = "@GlobalScope" @@ -909,7 +936,7 @@ def make_enum(t, state): # type: (str, State) -> str c = "@GlobalScope" if not c in state.classes and c.startswith("_"): - c = c[1:] # Remove the underscore prefix + c = c[1:] # Remove the underscore prefix if c in state.classes and e in state.classes[c].enums: return ":ref:`{0}`".format(e, c) @@ -921,7 +948,9 @@ def make_enum(t, state): # type: (str, State) -> str return t -def make_method_signature(class_def, method_def, make_ref, state): # type: (ClassDef, Union[MethodDef, SignalDef], bool, State) -> Tuple[str, str] +def make_method_signature( + class_def, method_def, make_ref, state +): # type: (ClassDef, Union[MethodDef, SignalDef], bool, State) -> Tuple[str, str] ret_type = " " ref_type = "signal" @@ -936,34 +965,34 @@ def make_method_signature(class_def, method_def, make_ref, state): # type: (Cla else: out += "**{}** ".format(method_def.name) - out += '**(**' + out += "**(**" for i, arg in enumerate(method_def.parameters): if i > 0: - out += ', ' + out += ", " else: - out += ' ' + out += " " out += "{} {}".format(arg.type_name.to_rst(state), arg.name) if arg.default_value is not None: - out += '=' + arg.default_value + out += "=" + arg.default_value - if isinstance(method_def, MethodDef) and method_def.qualifiers is not None and 'vararg' in method_def.qualifiers: + if isinstance(method_def, MethodDef) and method_def.qualifiers is not None and "vararg" in method_def.qualifiers: if len(method_def.parameters) > 0: - out += ', ...' + out += ", ..." else: - out += ' ...' + out += " ..." - out += ' **)**' + out += " **)**" if isinstance(method_def, MethodDef) and method_def.qualifiers is not None: - out += ' ' + method_def.qualifiers + out += " " + method_def.qualifiers return ret_type, out def make_heading(title, underline): # type: (str, str) -> str - return title + '\n' + (underline * len(title)) + "\n\n" + return title + "\n" + (underline * len(title)) + "\n\n" def make_url(link): # type: (str) -> str @@ -987,5 +1016,5 @@ def make_url(link): # type: (str) -> str return "`" + link + " <" + link + ">`_" -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/drivers/SCsub b/drivers/SCsub index d91d98a713..35524868bb 100644 --- a/drivers/SCsub +++ b/drivers/SCsub @@ -1,40 +1,41 @@ #!/usr/bin/env python -Import('env') +Import("env") env.drivers_sources = [] # OS drivers -SConscript('unix/SCsub') -SConscript('windows/SCsub') +SConscript("unix/SCsub") +SConscript("windows/SCsub") # Sounds drivers -SConscript('alsa/SCsub') -SConscript('coreaudio/SCsub') -SConscript('pulseaudio/SCsub') -if (env["platform"] == "windows"): +SConscript("alsa/SCsub") +SConscript("coreaudio/SCsub") +SConscript("pulseaudio/SCsub") +if env["platform"] == "windows": SConscript("wasapi/SCsub") -if env['xaudio2']: +if env["xaudio2"]: SConscript("xaudio2/SCsub") # Midi drivers -SConscript('alsamidi/SCsub') -SConscript('coremidi/SCsub') -SConscript('winmidi/SCsub') +SConscript("alsamidi/SCsub") +SConscript("coremidi/SCsub") +SConscript("winmidi/SCsub") # Graphics drivers -if (env["platform"] != "server"): - SConscript('gles3/SCsub') - SConscript('gles2/SCsub') - SConscript('gl_context/SCsub') +if env["platform"] != "server": + SConscript("gles3/SCsub") + SConscript("gles2/SCsub") + SConscript("gl_context/SCsub") else: - SConscript('dummy/SCsub') + SConscript("dummy/SCsub") # Core dependencies SConscript("png/SCsub") -if env['vsproj']: +if env["vsproj"]: import os + path = os.getcwd() # Change directory so the path resolves correctly in the function call. os.chdir("..") diff --git a/drivers/alsa/SCsub b/drivers/alsa/SCsub index 28b315ae66..91e1140b75 100644 --- a/drivers/alsa/SCsub +++ b/drivers/alsa/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/alsamidi/SCsub b/drivers/alsamidi/SCsub index 4c24925192..4e1b5f2a36 100644 --- a/drivers/alsamidi/SCsub +++ b/drivers/alsamidi/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") # Driver source files env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/coreaudio/SCsub b/drivers/coreaudio/SCsub index 4c24925192..4e1b5f2a36 100644 --- a/drivers/coreaudio/SCsub +++ b/drivers/coreaudio/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") # Driver source files env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/coremidi/SCsub b/drivers/coremidi/SCsub index 4c24925192..4e1b5f2a36 100644 --- a/drivers/coremidi/SCsub +++ b/drivers/coremidi/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") # Driver source files env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/dummy/SCsub b/drivers/dummy/SCsub index 28b315ae66..91e1140b75 100644 --- a/drivers/dummy/SCsub +++ b/drivers/dummy/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/gl_context/SCsub b/drivers/gl_context/SCsub index b9f0ea2254..e2e499f5c2 100644 --- a/drivers/gl_context/SCsub +++ b/drivers/gl_context/SCsub @@ -1,8 +1,8 @@ #!/usr/bin/env python -Import('env') +Import("env") -if (env["platform"] in ["haiku", "osx", "windows", "x11"]): +if env["platform"] in ["haiku", "osx", "windows", "x11"]: # Thirdparty source files thirdparty_dir = "#thirdparty/glad/" thirdparty_sources = [ @@ -12,8 +12,8 @@ if (env["platform"] in ["haiku", "osx", "windows", "x11"]): env.Prepend(CPPPATH=[thirdparty_dir]) - env.Append(CPPDEFINES=['GLAD_ENABLED']) - env.Append(CPPDEFINES=['GLES_OVER_GL']) + env.Append(CPPDEFINES=["GLAD_ENABLED"]) + env.Append(CPPDEFINES=["GLES_OVER_GL"]) env_thirdparty = env.Clone() env_thirdparty.disable_warnings() diff --git a/drivers/gles2/SCsub b/drivers/gles2/SCsub index 9923e52c73..987ddcd16e 100644 --- a/drivers/gles2/SCsub +++ b/drivers/gles2/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/gles2/shaders/SCsub b/drivers/gles2/shaders/SCsub index d7ae0243e6..bcd6ea79fb 100644 --- a/drivers/gles2/shaders/SCsub +++ b/drivers/gles2/shaders/SCsub @@ -1,23 +1,23 @@ #!/usr/bin/env python -Import('env') +Import("env") -if 'GLES2_GLSL' in env['BUILDERS']: - env.GLES2_GLSL('copy.glsl'); -# env.GLES2_GLSL('resolve.glsl'); - env.GLES2_GLSL('canvas.glsl'); - env.GLES2_GLSL('canvas_shadow.glsl'); - env.GLES2_GLSL('scene.glsl'); - env.GLES2_GLSL('cubemap_filter.glsl'); - env.GLES2_GLSL('cube_to_dp.glsl'); -# env.GLES2_GLSL('blend_shape.glsl'); -# env.GLES2_GLSL('screen_space_reflection.glsl'); - env.GLES2_GLSL('effect_blur.glsl'); -# env.GLES2_GLSL('subsurf_scattering.glsl'); -# env.GLES2_GLSL('ssao.glsl'); -# env.GLES2_GLSL('ssao_minify.glsl'); -# env.GLES2_GLSL('ssao_blur.glsl'); -# env.GLES2_GLSL('exposure.glsl'); - env.GLES2_GLSL('tonemap.glsl'); -# env.GLES2_GLSL('particles.glsl'); - env.GLES2_GLSL('lens_distorted.glsl'); +if "GLES2_GLSL" in env["BUILDERS"]: + env.GLES2_GLSL("copy.glsl") + # env.GLES2_GLSL('resolve.glsl'); + env.GLES2_GLSL("canvas.glsl") + env.GLES2_GLSL("canvas_shadow.glsl") + env.GLES2_GLSL("scene.glsl") + env.GLES2_GLSL("cubemap_filter.glsl") + env.GLES2_GLSL("cube_to_dp.glsl") + # env.GLES2_GLSL('blend_shape.glsl'); + # env.GLES2_GLSL('screen_space_reflection.glsl'); + env.GLES2_GLSL("effect_blur.glsl") + # env.GLES2_GLSL('subsurf_scattering.glsl'); + # env.GLES2_GLSL('ssao.glsl'); + # env.GLES2_GLSL('ssao_minify.glsl'); + # env.GLES2_GLSL('ssao_blur.glsl'); + # env.GLES2_GLSL('exposure.glsl'); + env.GLES2_GLSL("tonemap.glsl") + # env.GLES2_GLSL('particles.glsl'); + env.GLES2_GLSL("lens_distorted.glsl") diff --git a/drivers/gles3/SCsub b/drivers/gles3/SCsub index 2471dd3739..987ddcd16e 100644 --- a/drivers/gles3/SCsub +++ b/drivers/gles3/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') +Import("env") -env.add_source_files(env.drivers_sources,"*.cpp") +env.add_source_files(env.drivers_sources, "*.cpp") SConscript("shaders/SCsub") diff --git a/drivers/gles3/shaders/SCsub b/drivers/gles3/shaders/SCsub index 27fd1514e7..0f3f8ccb72 100644 --- a/drivers/gles3/shaders/SCsub +++ b/drivers/gles3/shaders/SCsub @@ -1,23 +1,23 @@ #!/usr/bin/env python -Import('env') +Import("env") -if 'GLES3_GLSL' in env['BUILDERS']: - env.GLES3_GLSL('copy.glsl'); - env.GLES3_GLSL('resolve.glsl'); - env.GLES3_GLSL('canvas.glsl'); - env.GLES3_GLSL('canvas_shadow.glsl'); - env.GLES3_GLSL('scene.glsl'); - env.GLES3_GLSL('cubemap_filter.glsl'); - env.GLES3_GLSL('cube_to_dp.glsl'); - env.GLES3_GLSL('blend_shape.glsl'); - env.GLES3_GLSL('screen_space_reflection.glsl'); - env.GLES3_GLSL('effect_blur.glsl'); - env.GLES3_GLSL('subsurf_scattering.glsl'); - env.GLES3_GLSL('ssao.glsl'); - env.GLES3_GLSL('ssao_minify.glsl'); - env.GLES3_GLSL('ssao_blur.glsl'); - env.GLES3_GLSL('exposure.glsl'); - env.GLES3_GLSL('tonemap.glsl'); - env.GLES3_GLSL('particles.glsl'); - env.GLES3_GLSL('lens_distorted.glsl'); +if "GLES3_GLSL" in env["BUILDERS"]: + env.GLES3_GLSL("copy.glsl") + env.GLES3_GLSL("resolve.glsl") + env.GLES3_GLSL("canvas.glsl") + env.GLES3_GLSL("canvas_shadow.glsl") + env.GLES3_GLSL("scene.glsl") + env.GLES3_GLSL("cubemap_filter.glsl") + env.GLES3_GLSL("cube_to_dp.glsl") + env.GLES3_GLSL("blend_shape.glsl") + env.GLES3_GLSL("screen_space_reflection.glsl") + env.GLES3_GLSL("effect_blur.glsl") + env.GLES3_GLSL("subsurf_scattering.glsl") + env.GLES3_GLSL("ssao.glsl") + env.GLES3_GLSL("ssao_minify.glsl") + env.GLES3_GLSL("ssao_blur.glsl") + env.GLES3_GLSL("exposure.glsl") + env.GLES3_GLSL("tonemap.glsl") + env.GLES3_GLSL("particles.glsl") + env.GLES3_GLSL("lens_distorted.glsl") diff --git a/drivers/png/SCsub b/drivers/png/SCsub index 87b54cecaf..db08be0c47 100644 --- a/drivers/png/SCsub +++ b/drivers/png/SCsub @@ -1,11 +1,11 @@ #!/usr/bin/env python -Import('env') +Import("env") env_png = env.Clone() # Thirdparty source files -if env['builtin_libpng']: +if env["builtin_libpng"]: thirdparty_dir = "#thirdparty/libpng/" thirdparty_sources = [ "png.c", @@ -32,6 +32,7 @@ if env['builtin_libpng']: # Currently .ASM filter_neon.S does not compile on NT. import os + use_neon = "neon_enabled" in env and env["neon_enabled"] and os.name != "nt" if use_neon: env_png.Append(CPPDEFINES=[("PNG_ARM_NEON_OPT", 2)]) @@ -45,7 +46,7 @@ if env['builtin_libpng']: if use_neon: env_neon = env_thirdparty.Clone() if "S_compiler" in env: - env_neon['CC'] = env['S_compiler'] + env_neon["CC"] = env["S_compiler"] neon_sources = [] neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/arm_init.c")) neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/filter_neon_intrinsics.c")) @@ -56,4 +57,4 @@ if env['builtin_libpng']: # Godot source files env_png.add_source_files(env.drivers_sources, "*.cpp") -Export('env') +Export("env") diff --git a/drivers/pulseaudio/SCsub b/drivers/pulseaudio/SCsub index 28b315ae66..91e1140b75 100644 --- a/drivers/pulseaudio/SCsub +++ b/drivers/pulseaudio/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/unix/SCsub b/drivers/unix/SCsub index 4888f56099..91ef613546 100644 --- a/drivers/unix/SCsub +++ b/drivers/unix/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") -env["check_c_headers"] = [ [ "mntent.h", "HAVE_MNTENT" ] ] +env["check_c_headers"] = [["mntent.h", "HAVE_MNTENT"]] diff --git a/drivers/wasapi/SCsub b/drivers/wasapi/SCsub index 4c24925192..4e1b5f2a36 100644 --- a/drivers/wasapi/SCsub +++ b/drivers/wasapi/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") # Driver source files env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/windows/SCsub b/drivers/windows/SCsub index 28b315ae66..91e1140b75 100644 --- a/drivers/windows/SCsub +++ b/drivers/windows/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/winmidi/SCsub b/drivers/winmidi/SCsub index 4c24925192..4e1b5f2a36 100644 --- a/drivers/winmidi/SCsub +++ b/drivers/winmidi/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") # Driver source files env.add_source_files(env.drivers_sources, "*.cpp") diff --git a/drivers/xaudio2/SCsub b/drivers/xaudio2/SCsub index de750525ab..6778ad281e 100644 --- a/drivers/xaudio2/SCsub +++ b/drivers/xaudio2/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.drivers_sources, "*.cpp") -env.Append(CPPDEFINES=['XAUDIO2_ENABLED']) -env.Append(LINKFLAGS=['xaudio2_8.lib']) +env.Append(CPPDEFINES=["XAUDIO2_ENABLED"]) +env.Append(LINKFLAGS=["xaudio2_8.lib"]) diff --git a/editor/SCsub b/editor/SCsub index bccfce327d..67787ca49d 100644 --- a/editor/SCsub +++ b/editor/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env.editor_sources = [] @@ -18,24 +18,24 @@ def _make_doc_data_class_path(to_path): g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n") g.write("struct _DocDataClassPath { const char* name; const char* path; };\n") - g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n"); + g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n") for c in sorted(env.doc_class_path): - g.write("\t{\"" + c + "\", \"" + env.doc_class_path[c] + "\"},\n") + g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n') g.write("\t{NULL, NULL}\n") g.write("};\n") g.close() -if env['tools']: +if env["tools"]: # Register exporters reg_exporters_inc = '#include "register_exporters.h"\n' - reg_exporters = 'void register_exporters() {\n' + reg_exporters = "void register_exporters() {\n" for e in env.platform_exporters: env.add_source_files(env.editor_sources, "#platform/" + e + "/export/export.cpp") - reg_exporters += '\tregister_' + e + '_exporter();\n' + reg_exporters += "\tregister_" + e + "_exporter();\n" reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n' - reg_exporters += '}\n' + reg_exporters += "}\n" # NOTE: It is safe to generate this file here, since this is still executed serially with open_utf8("register_exporters.gen.cpp", "w") as f: @@ -58,7 +58,7 @@ if env['tools']: else: docs += Glob(d + "/*.xml") # Custom. - _make_doc_data_class_path(os.path.join(env.Dir('#').abspath, "editor/doc")) + _make_doc_data_class_path(os.path.join(env.Dir("#").abspath, "editor/doc")) docs = sorted(docs) env.Depends("#editor/doc_data_compressed.gen.h", docs) @@ -68,24 +68,24 @@ if env['tools']: # Translations tlist = glob.glob(path + "/translations/*.po") - env.Depends('#editor/translations.gen.h', tlist) - env.CommandNoCache('#editor/translations.gen.h', tlist, run_in_subprocess(editor_builders.make_translations_header)) + env.Depends("#editor/translations.gen.h", tlist) + env.CommandNoCache("#editor/translations.gen.h", tlist, run_in_subprocess(editor_builders.make_translations_header)) # Fonts flist = glob.glob(path + "/../thirdparty/fonts/*.ttf") flist.extend(glob.glob(path + "/../thirdparty/fonts/*.otf")) flist.sort() - env.Depends('#editor/builtin_fonts.gen.h', flist) - env.CommandNoCache('#editor/builtin_fonts.gen.h', flist, run_in_subprocess(editor_builders.make_fonts_header)) + env.Depends("#editor/builtin_fonts.gen.h", flist) + env.CommandNoCache("#editor/builtin_fonts.gen.h", flist, run_in_subprocess(editor_builders.make_fonts_header)) env.add_source_files(env.editor_sources, "*.cpp") - SConscript('collada/SCsub') - SConscript('doc/SCsub') - SConscript('fileserver/SCsub') - SConscript('icons/SCsub') - SConscript('import/SCsub') - SConscript('plugins/SCsub') + SConscript("collada/SCsub") + SConscript("doc/SCsub") + SConscript("fileserver/SCsub") + SConscript("icons/SCsub") + SConscript("import/SCsub") + SConscript("plugins/SCsub") lib = env.add_library("editor", env.editor_sources) env.Prepend(LIBS=[lib]) diff --git a/editor/collada/SCsub b/editor/collada/SCsub index 2b1e889fb0..359d04e5df 100644 --- a/editor/collada/SCsub +++ b/editor/collada/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.editor_sources, "*.cpp") diff --git a/editor/doc/SCsub b/editor/doc/SCsub index 2b1e889fb0..359d04e5df 100644 --- a/editor/doc/SCsub +++ b/editor/doc/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.editor_sources, "*.cpp") diff --git a/editor/editor_builders.py b/editor/editor_builders.py index 910c53e2ff..70b3d6a55c 100644 --- a/editor/editor_builders.py +++ b/editor/editor_builders.py @@ -26,6 +26,7 @@ def make_doc_header(target, source, env): buf = encode_utf8(docbegin + buf + docend) decomp_size = len(buf) import zlib + buf = zlib.compress(buf) g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") @@ -56,7 +57,7 @@ def make_fonts_header(target, source, env): # saving uncompressed, since freetype will reference from memory pointer xl_names = [] for i in range(len(source)): - with open(source[i], "rb")as f: + with open(source[i], "rb") as f: buf = f.read() name = os.path.splitext(os.path.basename(source[i]))[0] @@ -112,7 +113,7 @@ def make_translations_header(target, source, env): g.write("};\n\n") g.write("static EditorTranslationList _editor_translations[] = {\n") for x in xl_names: - g.write("\t{ \"" + x[0] + "\", " + str(x[1]) + ", " + str(x[2]) + ", _translation_" + x[0] + "_compressed},\n") + g.write('\t{ "' + x[0] + '", ' + str(x[1]) + ", " + str(x[2]) + ", _translation_" + x[0] + "_compressed},\n") g.write("\t{NULL, 0, 0, NULL}\n") g.write("};\n") @@ -120,5 +121,6 @@ def make_translations_header(target, source, env): g.close() -if __name__ == '__main__': + +if __name__ == "__main__": subprocess_main(globals()) diff --git a/editor/fileserver/SCsub b/editor/fileserver/SCsub index 2b1e889fb0..359d04e5df 100644 --- a/editor/fileserver/SCsub +++ b/editor/fileserver/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.editor_sources, "*.cpp") diff --git a/editor/icons/SCsub b/editor/icons/SCsub index a481e70eef..e143276259 100644 --- a/editor/icons/SCsub +++ b/editor/icons/SCsub @@ -1,17 +1,17 @@ #!/usr/bin/env python -Import('env') +Import("env") import os from platform_methods import run_in_subprocess import editor_icons_builders -make_editor_icons_builder = Builder(action=run_in_subprocess(editor_icons_builders.make_editor_icons_action), - suffix='.h', - src_suffix='.svg') +make_editor_icons_builder = Builder( + action=run_in_subprocess(editor_icons_builders.make_editor_icons_action), suffix=".h", src_suffix=".svg" +) -env['BUILDERS']['MakeEditorIconsBuilder'] = make_editor_icons_builder +env["BUILDERS"]["MakeEditorIconsBuilder"] = make_editor_icons_builder # Editor's own icons icon_sources = Glob("*.svg") @@ -23,4 +23,4 @@ for path in env.module_icons_paths: else: icon_sources += Glob(path + "/*.svg") # Custom. -env.Alias('editor_icons', [env.MakeEditorIconsBuilder('#editor/editor_icons.gen.h', icon_sources)]) +env.Alias("editor_icons", [env.MakeEditorIconsBuilder("#editor/editor_icons.gen.h", icon_sources)]) diff --git a/editor/icons/editor_icons_builders.py b/editor/icons/editor_icons_builders.py index dfd0802ce9..ce6b9a8686 100644 --- a/editor/icons/editor_icons_builders.py +++ b/editor/icons/editor_icons_builders.py @@ -21,17 +21,16 @@ def make_editor_icons_action(target, source, env): icons_string.write('\t"') - with open(fname, 'rb') as svgf: + with open(fname, "rb") as svgf: b = svgf.read(1) - while(len(b) == 1): + while len(b) == 1: icons_string.write("\\" + str(hex(ord(b)))[1:]) b = svgf.read(1) - icons_string.write('"') if fname != svg_icons[-1]: icons_string.write(",") - icons_string.write('\n') + icons_string.write("\n") s = StringIO() s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") @@ -40,12 +39,12 @@ def make_editor_icons_action(target, source, env): s.write("static const int editor_icons_count = {};\n".format(len(svg_icons))) s.write("static const char *editor_icons_sources[] = {\n") s.write(icons_string.getvalue()) - s.write('};\n\n') + s.write("};\n\n") s.write("static const char *editor_icons_names[] = {\n") # this is used to store the indices of thumbnail icons - thumb_medium_indices = []; - thumb_big_indices = []; + thumb_medium_indices = [] + thumb_big_indices = [] index = 0 for f in svg_icons: @@ -53,7 +52,7 @@ def make_editor_icons_action(target, source, env): icon_name = os.path.basename(fname)[5:-4].title().replace("_", "") # some special cases - if icon_name in ['Int', 'Bool', 'Float']: + if icon_name in ["Int", "Bool", "Float"]: icon_name = icon_name.lower() if icon_name.endswith("MediumThumb"): # don't know a better way to handle this thumb_medium_indices.append(str(index)) @@ -64,11 +63,11 @@ def make_editor_icons_action(target, source, env): if fname != svg_icons[-1]: s.write(",") - s.write('\n') + s.write("\n") index += 1 - s.write('};\n') + s.write("};\n") if thumb_medium_indices: s.write("\n\n") @@ -92,5 +91,5 @@ def make_editor_icons_action(target, source, env): icons_string.close() -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/editor/import/SCsub b/editor/import/SCsub index 2b1e889fb0..359d04e5df 100644 --- a/editor/import/SCsub +++ b/editor/import/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.editor_sources, "*.cpp") diff --git a/editor/plugins/SCsub b/editor/plugins/SCsub index 2b1e889fb0..359d04e5df 100644 --- a/editor/plugins/SCsub +++ b/editor/plugins/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.editor_sources, "*.cpp") diff --git a/editor/translations/extract.py b/editor/translations/extract.py index e7ebda32df..6a0dafa7d7 100755 --- a/editor/translations/extract.py +++ b/editor/translations/extract.py @@ -10,23 +10,23 @@ import sys line_nb = False for arg in sys.argv[1:]: - if (arg == "--with-line-nb"): + if arg == "--with-line-nb": print("Enabling line numbers in the context locations.") line_nb = True else: os.sys.exit("Non supported argument '" + arg + "'. Aborting.") -if (not os.path.exists("editor")): +if not os.path.exists("editor"): os.sys.exit("ERROR: This script should be started from the root of the git repo.") matches = [] -for root, dirnames, filenames in os.walk('.'): +for root, dirnames, filenames in os.walk("."): dirnames[:] = [d for d in dirnames if d not in ["thirdparty"]] - for filename in fnmatch.filter(filenames, '*.cpp'): + for filename in fnmatch.filter(filenames, "*.cpp"): matches.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.h'): + for filename in fnmatch.filter(filenames, "*.h"): matches.append(os.path.join(root, filename)) matches.sort() @@ -49,52 +49,54 @@ msgstr "" "Content-Transfer-Encoding: 8-bit\\n" """ + def process_file(f, fname): global main_po, unique_str, unique_loc l = f.readline() lc = 1 - while (l): + while l: - patterns = ['RTR(\"', 'TTR(\"', 'TTRC(\"'] + patterns = ['RTR("', 'TTR("', 'TTRC("'] idx = 0 pos = 0 - while (pos >= 0): + while pos >= 0: pos = l.find(patterns[idx], pos) - if (pos == -1): - if (idx < len(patterns) - 1): + if pos == -1: + if idx < len(patterns) - 1: idx += 1 pos = 0 continue pos += len(patterns[idx]) msg = "" - while (pos < len(l) and (l[pos] != '"' or l[pos - 1] == '\\')): + while pos < len(l) and (l[pos] != '"' or l[pos - 1] == "\\"): msg += l[pos] pos += 1 - location = os.path.relpath(fname).replace('\\', '/') - if (line_nb): + location = os.path.relpath(fname).replace("\\", "/") + if line_nb: location += ":" + str(lc) - if (not msg in unique_str): + if not msg in unique_str: main_po += "\n#: " + location + "\n" main_po += 'msgid "' + msg + '"\n' main_po += 'msgstr ""\n' unique_str.append(msg) unique_loc[msg] = [location] - elif (not location in unique_loc[msg]): + elif not location in unique_loc[msg]: # Add additional location to previous occurrence too msg_pos = main_po.find('\nmsgid "' + msg + '"') - if (msg_pos == -1): + if msg_pos == -1: print("Someone apparently thought writing Python was as easy as GDScript. Ping Akien.") - main_po = main_po[:msg_pos] + ' ' + location + main_po[msg_pos:] + main_po = main_po[:msg_pos] + " " + location + main_po[msg_pos:] unique_loc[msg].append(location) l = f.readline() lc += 1 + print("Updating the editor.pot template...") for fname in matches: @@ -104,7 +106,7 @@ for fname in matches: with open("editor.pot", "w") as f: f.write(main_po) -if (os.name == "posix"): +if os.name == "posix": print("Wrapping template at 79 characters for compatibility with Weblate.") os.system("msgmerge -w79 editor.pot editor.pot > editor.pot.wrap") shutil.move("editor.pot.wrap", "editor.pot") @@ -112,7 +114,7 @@ if (os.name == "posix"): shutil.move("editor.pot", "editor/translations/editor.pot") # TODO: Make that in a portable way, if we care; if not, kudos to Unix users -if (os.name == "posix"): +if os.name == "posix": added = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \+msgid | wc -l", shell=True) removed = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \\\-msgid | wc -l", shell=True) print("\n# Template changes compared to the staged status:") diff --git a/gles_builders.py b/gles_builders.py index e56ccc4431..dda3922ed3 100644 --- a/gles_builders.py +++ b/gles_builders.py @@ -7,7 +7,6 @@ from platform_methods import subprocess_main class LegacyGLHeaderStruct: - def __init__(self): self.vertex_lines = [] self.fragment_lines = [] @@ -73,7 +72,7 @@ def include_file_in_legacygl_header(filename, header_data, depth): ifdefline = line.replace("#ifdef ", "").strip() if line.find("_EN_") != -1: - enumbase = ifdefline[:ifdefline.find("_EN_")] + enumbase = ifdefline[: ifdefline.find("_EN_")] ifdefline = ifdefline.replace("_EN_", "_") line = line.replace("_EN_", "_") if enumbase not in header_data.enums: @@ -86,12 +85,12 @@ def include_file_in_legacygl_header(filename, header_data, depth): if line.find("uniform") != -1 and line.lower().find("texunit:") != -1: # texture unit - texunitstr = line[line.find(":") + 1:].strip() + texunitstr = line[line.find(":") + 1 :].strip() if texunitstr == "auto": texunit = "-1" else: texunit = str(int(texunitstr)) - uline = line[:line.lower().find("//")] + uline = line[: line.lower().find("//")] uline = uline.replace("uniform", "") uline = uline.replace("highp", "") uline = uline.replace(";", "") @@ -99,10 +98,10 @@ def include_file_in_legacygl_header(filename, header_data, depth): for x in lines: x = x.strip() - x = x[x.rfind(" ") + 1:] + x = x[x.rfind(" ") + 1 :] if x.find("[") != -1: # unfiorm array - x = x[:x.find("[")] + x = x[: x.find("[")] if not x in header_data.texunit_names: header_data.texunits += [(x, texunit)] @@ -110,10 +109,10 @@ def include_file_in_legacygl_header(filename, header_data, depth): elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1: # uniform buffer object - ubostr = line[line.find(":") + 1:].strip() + ubostr = line[line.find(":") + 1 :].strip() ubo = str(int(ubostr)) - uline = line[:line.lower().find("//")] - uline = uline[uline.find("uniform") + len("uniform"):] + uline = line[: line.lower().find("//")] + uline = uline[uline.find("uniform") + len("uniform") :] uline = uline.replace("highp", "") uline = uline.replace(";", "") uline = uline.replace("{", "").strip() @@ -121,10 +120,10 @@ def include_file_in_legacygl_header(filename, header_data, depth): for x in lines: x = x.strip() - x = x[x.rfind(" ") + 1:] + x = x[x.rfind(" ") + 1 :] if x.find("[") != -1: # unfiorm array - x = x[:x.find("[")] + x = x[: x.find("[")] if not x in header_data.ubo_names: header_data.ubos += [(x, ubo)] @@ -137,10 +136,10 @@ def include_file_in_legacygl_header(filename, header_data, depth): for x in lines: x = x.strip() - x = x[x.rfind(" ") + 1:] + x = x[x.rfind(" ") + 1 :] if x.find("[") != -1: # unfiorm array - x = x[:x.find("[")] + x = x[: x.find("[")] if not x in header_data.uniforms: header_data.uniforms += [x] @@ -150,7 +149,7 @@ def include_file_in_legacygl_header(filename, header_data, depth): uline = uline.replace("attribute ", "") uline = uline.replace("highp ", "") uline = uline.replace(";", "") - uline = uline[uline.find(" "):].strip() + uline = uline[uline.find(" ") :].strip() if uline.find("//") != -1: name, bind = uline.split("//") @@ -163,7 +162,7 @@ def include_file_in_legacygl_header(filename, header_data, depth): uline = line.replace("out ", "") uline = uline.replace("highp ", "") uline = uline.replace(";", "") - uline = uline[uline.find(" "):].strip() + uline = uline[uline.find(" ") :].strip() if uline.find("//") != -1: name, bind = uline.split("//") @@ -200,17 +199,19 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n") out_file_base = out_file - out_file_base = out_file_base[out_file_base.rfind("/") + 1:] - out_file_base = out_file_base[out_file_base.rfind("\\") + 1:] + out_file_base = out_file_base[out_file_base.rfind("/") + 1 :] + out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :] out_file_ifdef = out_file_base.replace(".", "_").upper() fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n") fd.write("#define " + out_file_ifdef + class_suffix + "_120\n") - out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix + out_file_class = ( + out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix + ) fd.write("\n\n") - fd.write("#include \"" + include + "\"\n\n\n") + fd.write('#include "' + include + '"\n\n\n') fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n") - fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n") + fd.write('\t virtual String get_shader_name() const { return "' + out_file_class + '"; }\n') fd.write("public:\n\n") @@ -228,29 +229,64 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n") if header_data.conditionals: - fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n") + fd.write( + "\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n" + ) fd.write("\t#ifdef DEBUG_ENABLED\n ") - fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; if (!is_version_valid()) return; ERR_FAIL_COND( get_active()!=this ); \n\n ") + fd.write( + "\t#define _FU if (get_uniform(p_uniform)<0) return; if (!is_version_valid()) return; ERR_FAIL_COND( get_active()!=this ); \n\n " + ) fd.write("\t#else\n ") fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; \n\n ") fd.write("\t#endif\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n") - fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n") + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n" + ) + fd.write( + "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n" + ) - fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU + fd.write( + """\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU const Transform &tr = p_transform; @@ -279,9 +315,11 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 } - """) + """ + ) - fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) { _FU + fd.write( + """_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) { _FU const Transform2D &tr = p_transform; @@ -310,9 +348,11 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 } - """) + """ + ) - fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU + fd.write( + """_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU GLfloat matrix[16]; @@ -324,7 +364,8 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 } glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix); -}""") +}""" + ) fd.write("\n\n#undef _FU\n\n\n") @@ -344,21 +385,25 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 x = header_data.enums[xv] bits = 1 amt = len(x) - while (2 ** bits < amt): + while 2 ** bits < amt: bits += 1 strs = "{" for i in range(amt): - strs += "\"#define " + x[i] + "\\n\"," + strs += '"#define ' + x[i] + '\\n",' c = {} c["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs) - c["clear_mask"] = "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")" + c["clear_mask"] = ( + "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")" + ) enum_vals.append(c) enum_constants.append(x[i]) strs += "NULL}" - fd.write("\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n") + fd.write( + "\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n" + ) bitofs += bits fd.write("\t\t};\n\n") @@ -377,7 +422,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 fd.write("\t\tstatic const char* _conditional_strings[]={\n") if header_data.conditionals: for x in header_data.conditionals: - fd.write("\t\t\t\"#define " + x + "\\n\",\n") + fd.write('\t\t\t"#define ' + x + '\\n",\n') conditionals_found.append(x) fd.write("\t\t};\n\n") else: @@ -388,7 +433,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 fd.write("\t\tstatic const char* _uniform_strings[]={\n") if header_data.uniforms: for x in header_data.uniforms: - fd.write("\t\t\t\"" + x + "\",\n") + fd.write('\t\t\t"' + x + '",\n') fd.write("\t\t};\n\n") else: fd.write("\t\tstatic const char **_uniform_strings=NULL;\n") @@ -398,7 +443,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n") for x in header_data.attributes: - fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n") + fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n") fd.write("\t\t};\n\n") else: fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n") @@ -412,9 +457,9 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 name = x[0] cond = x[1] if cond in conditionals_found: - fd.write("\t\t\t{\"" + name + "\"," + str(conditionals_found.index(cond)) + "},\n") + fd.write('\t\t\t{"' + name + '",' + str(conditionals_found.index(cond)) + "},\n") else: - fd.write("\t\t\t{\"" + name + "\",-1},\n") + fd.write('\t\t\t{"' + name + '",-1},\n') feedback_count += 1 @@ -428,7 +473,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 if header_data.texunits: fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n") for x in header_data.texunits: - fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n") + fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n") fd.write("\t\t};\n\n") else: fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n") @@ -436,7 +481,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 if not gles2 and header_data.ubos: fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n") for x in header_data.ubos: - fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n") + fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n") fd.write("\t\t};\n\n") else: if gles2: @@ -449,7 +494,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 for c in x: fd.write(str(ord(c)) + ",") - fd.write(str(ord('\n')) + ",") + fd.write(str(ord("\n")) + ",") fd.write("\t\t0};\n\n") fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n") @@ -459,28 +504,73 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2 for c in x: fd.write(str(ord(c)) + ",") - fd.write(str(ord('\n')) + ",") + fd.write(str(ord("\n")) + ",") fd.write("\t\t0};\n\n") fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n") if output_attribs: if gles2: - fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str( - len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write( + "\t\tsetup(_conditional_strings," + + str(len(header_data.conditionals)) + + ",_uniform_strings," + + str(len(header_data.uniforms)) + + ",_attribute_pairs," + + str(len(header_data.attributes)) + + ", _texunit_pairs," + + str(len(header_data.texunits)) + + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n" + ) else: - fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str( - len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str( - feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write( + "\t\tsetup(_conditional_strings," + + str(len(header_data.conditionals)) + + ",_uniform_strings," + + str(len(header_data.uniforms)) + + ",_attribute_pairs," + + str(len(header_data.attributes)) + + ", _texunit_pairs," + + str(len(header_data.texunits)) + + ",_ubo_pairs," + + str(len(header_data.ubos)) + + ",_feedbacks," + + str(feedback_count) + + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n" + ) else: if gles2: - fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str( - len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str( - enum_value_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write( + "\t\tsetup(_conditional_strings," + + str(len(header_data.conditionals)) + + ",_uniform_strings," + + str(len(header_data.uniforms)) + + ",_texunit_pairs," + + str(len(header_data.texunits)) + + ",_enums," + + str(len(header_data.enums)) + + ",_enum_values," + + str(enum_value_count) + + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n" + ) else: - fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str( - len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str( - feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write( + "\t\tsetup(_conditional_strings," + + str(len(header_data.conditionals)) + + ",_uniform_strings," + + str(len(header_data.uniforms)) + + ",_texunit_pairs," + + str(len(header_data.texunits)) + + ",_enums," + + str(len(header_data.enums)) + + ",_enum_values," + + str(enum_value_count) + + ",_ubo_pairs," + + str(len(header_data.ubos)) + + ",_feedbacks," + + str(feedback_count) + + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n" + ) fd.write("\t}\n\n") @@ -504,8 +594,10 @@ def build_gles3_headers(target, source, env): def build_gles2_headers(target, source, env): for x in source: - build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True) + build_legacygl_header( + str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True + ) -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/main/SCsub b/main/SCsub index 1addfed29d..dd1b30eca9 100644 --- a/main/SCsub +++ b/main/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") from platform_methods import run_in_subprocess import main_builders @@ -13,7 +13,11 @@ env.add_source_files(env.main_sources, "*.cpp") controller_databases = ["#main/gamecontrollerdb.txt", "#main/godotcontrollerdb.txt"] env.Depends("#main/default_controller_mappings.gen.cpp", controller_databases) -env.CommandNoCache("#main/default_controller_mappings.gen.cpp", controller_databases, run_in_subprocess(main_builders.make_default_controller_mappings)) +env.CommandNoCache( + "#main/default_controller_mappings.gen.cpp", + controller_databases, + run_in_subprocess(main_builders.make_default_controller_mappings), +) # Don't warn about duplicate entry here, we need it registered manually for first build, # even if later builds will pick it up twice due to above *.cpp globbing. @@ -23,13 +27,15 @@ env.Depends("#main/splash.gen.h", "#main/splash.png") env.CommandNoCache("#main/splash.gen.h", "#main/splash.png", run_in_subprocess(main_builders.make_splash)) env.Depends("#main/splash_editor.gen.h", "#main/splash_editor.png") -env.CommandNoCache("#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor)) +env.CommandNoCache( + "#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor) +) env.Depends("#main/app_icon.gen.h", "#main/app_icon.png") env.CommandNoCache("#main/app_icon.gen.h", "#main/app_icon.png", run_in_subprocess(main_builders.make_app_icon)) if env["tools"]: - SConscript('tests/SCsub') + SConscript("tests/SCsub") lib = env.add_library("main", env.main_sources) env.Prepend(LIBS=[lib]) diff --git a/main/main_builders.py b/main/main_builders.py index c48aaaa572..d86f58cb37 100644 --- a/main/main_builders.py +++ b/main/main_builders.py @@ -19,7 +19,7 @@ def make_splash(target, source, env): g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("#ifndef BOOT_SPLASH_H\n") g.write("#define BOOT_SPLASH_H\n") - g.write('static const Color boot_splash_bg_color = Color(0.14, 0.14, 0.14);\n') + g.write("static const Color boot_splash_bg_color = Color(0.14, 0.14, 0.14);\n") g.write("static const unsigned char boot_splash_png[] = {\n") for i in range(len(buf)): g.write(byte_to_str(buf[i]) + ",\n") @@ -38,7 +38,7 @@ def make_splash_editor(target, source, env): g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("#ifndef BOOT_SPLASH_EDITOR_H\n") g.write("#define BOOT_SPLASH_EDITOR_H\n") - g.write('static const Color boot_splash_editor_bg_color = Color(0.14, 0.14, 0.14);\n') + g.write("static const Color boot_splash_editor_bg_color = Color(0.14, 0.14, 0.14);\n") g.write("static const unsigned char boot_splash_editor_png[] = {\n") for i in range(len(buf)): g.write(byte_to_str(buf[i]) + ",\n") @@ -69,8 +69,8 @@ def make_default_controller_mappings(target, source, env): g = open(dst, "w") g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") - g.write("#include \"core/typedefs.h\"\n") - g.write("#include \"main/default_controller_mappings.h\"\n") + g.write('#include "core/typedefs.h"\n') + g.write('#include "main/default_controller_mappings.h"\n') # ensure mappings have a consistent order platform_mappings = OrderedDict() @@ -94,11 +94,19 @@ def make_default_controller_mappings(target, source, env): line_parts = line.split(",") guid = line_parts[0] if guid in platform_mappings[current_platform]: - g.write("// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(src_path, current_platform, platform_mappings[current_platform][guid])) + g.write( + "// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format( + src_path, current_platform, platform_mappings[current_platform][guid] + ) + ) valid_mapping = True for input_map in line_parts[2:]: if "+" in input_map or "-" in input_map or "~" in input_map: - g.write("// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(src_path, current_platform, line)) + g.write( + "// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format( + src_path, current_platform, line + ) + ) valid_mapping = False break if valid_mapping: @@ -119,12 +127,12 @@ def make_default_controller_mappings(target, source, env): variable = platform_variables[platform] g.write("{}\n".format(variable)) for mapping in mappings.values(): - g.write("\t\"{}\",\n".format(mapping)) + g.write('\t"{}",\n'.format(mapping)) g.write("#endif\n") g.write("\tNULL\n};\n") g.close() -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/main/tests/SCsub b/main/tests/SCsub index 437d9ed777..cb1d35b12f 100644 --- a/main/tests/SCsub +++ b/main/tests/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/python -Import('env') +Import("env") env.tests_sources = [] env.add_source_files(env.tests_sources, "*.cpp") diff --git a/methods.py b/methods.py index c3f4bffb61..dea5d5e60b 100644 --- a/methods.py +++ b/methods.py @@ -10,13 +10,13 @@ def add_source_files(self, sources, files, warn_duplicates=True): # Convert string to list of absolute paths (including expanding wildcard) if isbasestring(files): # Keep SCons project-absolute path as they are (no wildcard support) - if files.startswith('#'): - if '*' in files: + if files.startswith("#"): + if "*" in files: print("ERROR: Wildcards can't be expanded in SCons project-absolute path: '{}'".format(files)) return files = [files] else: - dir_path = self.Dir('.').abspath + dir_path = self.Dir(".").abspath files = sorted(glob.glob(dir_path + "/" + files)) # Add each path as compiled Object following environment (self) configuration @@ -24,7 +24,7 @@ def add_source_files(self, sources, files, warn_duplicates=True): obj = self.Object(path) if obj in sources: if warn_duplicates: - print("WARNING: Object \"{}\" already included in environment sources.".format(obj)) + print('WARNING: Object "{}" already included in environment sources.'.format(obj)) else: continue sources.append(obj) @@ -35,20 +35,20 @@ def disable_warnings(self): if self.msvc: # We have to remove existing warning level defines before appending /w, # otherwise we get: "warning D9025 : overriding '/W3' with '/w'" - warn_flags = ['/Wall', '/W4', '/W3', '/W2', '/W1', '/WX'] - self.Append(CCFLAGS=['/w']) - self.Append(CFLAGS=['/w']) - self.Append(CXXFLAGS=['/w']) - self['CCFLAGS'] = [x for x in self['CCFLAGS'] if not x in warn_flags] - self['CFLAGS'] = [x for x in self['CFLAGS'] if not x in warn_flags] - self['CXXFLAGS'] = [x for x in self['CXXFLAGS'] if not x in warn_flags] + warn_flags = ["/Wall", "/W4", "/W3", "/W2", "/W1", "/WX"] + self.Append(CCFLAGS=["/w"]) + self.Append(CFLAGS=["/w"]) + self.Append(CXXFLAGS=["/w"]) + self["CCFLAGS"] = [x for x in self["CCFLAGS"] if not x in warn_flags] + self["CFLAGS"] = [x for x in self["CFLAGS"] if not x in warn_flags] + self["CXXFLAGS"] = [x for x in self["CXXFLAGS"] if not x in warn_flags] else: - self.Append(CCFLAGS=['-w']) - self.Append(CFLAGS=['-w']) - self.Append(CXXFLAGS=['-w']) + self.Append(CCFLAGS=["-w"]) + self.Append(CFLAGS=["-w"]) + self.Append(CXXFLAGS=["-w"]) -def add_module_version_string(self,s): +def add_module_version_string(self, s): self.module_version_string += "." + s @@ -63,16 +63,16 @@ def update_version(module_version_string=""): # NOTE: It is safe to generate this file here, since this is still executed serially f = open("core/version_generated.gen.h", "w") - f.write("#define VERSION_SHORT_NAME \"" + str(version.short_name) + "\"\n") - f.write("#define VERSION_NAME \"" + str(version.name) + "\"\n") + f.write('#define VERSION_SHORT_NAME "' + str(version.short_name) + '"\n') + f.write('#define VERSION_NAME "' + str(version.name) + '"\n') f.write("#define VERSION_MAJOR " + str(version.major) + "\n") f.write("#define VERSION_MINOR " + str(version.minor) + "\n") f.write("#define VERSION_PATCH " + str(version.patch) + "\n") - f.write("#define VERSION_STATUS \"" + str(version.status) + "\"\n") - f.write("#define VERSION_BUILD \"" + str(build_name) + "\"\n") - f.write("#define VERSION_MODULE_CONFIG \"" + str(version.module_config) + module_version_string + "\"\n") + f.write('#define VERSION_STATUS "' + str(version.status) + '"\n') + f.write('#define VERSION_BUILD "' + str(build_name) + '"\n') + f.write('#define VERSION_MODULE_CONFIG "' + str(version.module_config) + module_version_string + '"\n') f.write("#define VERSION_YEAR " + str(version.year) + "\n") - f.write("#define VERSION_WEBSITE \"" + str(version.website) + "\"\n") + f.write('#define VERSION_WEBSITE "' + str(version.website) + '"\n') f.close() # NOTE: It is safe to generate this file here, since this is still executed serially @@ -94,7 +94,7 @@ def update_version(module_version_string=""): else: githash = head - fhash.write("#define VERSION_HASH \"" + githash + "\"") + fhash.write('#define VERSION_HASH "' + githash + '"') fhash.close() @@ -161,29 +161,37 @@ def write_modules(module_list): try: with open(os.path.join(path, "register_types.h")): includes_cpp += '#include "' + path + '/register_types.h"\n' - register_cpp += '#ifdef MODULE_' + name.upper() + '_ENABLED\n' - register_cpp += '\tregister_' + name + '_types();\n' - register_cpp += '#endif\n' - unregister_cpp += '#ifdef MODULE_' + name.upper() + '_ENABLED\n' - unregister_cpp += '\tunregister_' + name + '_types();\n' - unregister_cpp += '#endif\n' + register_cpp += "#ifdef MODULE_" + name.upper() + "_ENABLED\n" + register_cpp += "\tregister_" + name + "_types();\n" + register_cpp += "#endif\n" + unregister_cpp += "#ifdef MODULE_" + name.upper() + "_ENABLED\n" + unregister_cpp += "\tunregister_" + name + "_types();\n" + unregister_cpp += "#endif\n" except IOError: pass - modules_cpp = """ + modules_cpp = ( + """ // modules.cpp - THIS FILE IS GENERATED, DO NOT EDIT!!!!!!! #include "register_module_types.h" -""" + includes_cpp + """ +""" + + includes_cpp + + """ void register_module_types() { -""" + register_cpp + """ +""" + + register_cpp + + """ } void unregister_module_types() { -""" + unregister_cpp + """ +""" + + unregister_cpp + + """ } """ + ) # NOTE: It is safe to generate this file here, since this is still executed serially with open("modules/register_module_types.gen.cpp", "w") as f: @@ -206,9 +214,10 @@ def convert_custom_modules_path(path): def disable_module(self): self.disabled_modules.append(self.current_module) + def use_windows_spawn_fix(self, platform=None): - if (os.name != "nt"): + if os.name != "nt": return # not needed, only for windows # On Windows, due to the limited command line length, when creating a static library @@ -219,14 +228,21 @@ def use_windows_spawn_fix(self, platform=None): # got built correctly regardless the invocation strategy. # Furthermore, since SCons will rebuild the library from scratch when an object file # changes, no multiple versions of the same object file will be present. - self.Replace(ARFLAGS='q') + self.Replace(ARFLAGS="q") def mySubProcess(cmdline, env): startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env) + proc = subprocess.Popen( + cmdline, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + startupinfo=startupinfo, + shell=False, + env=env, + ) _, err = proc.communicate() rv = proc.wait() if rv: @@ -237,7 +253,7 @@ def use_windows_spawn_fix(self, platform=None): def mySpawn(sh, escape, cmd, args, env): - newargs = ' '.join(args[1:]) + newargs = " ".join(args[1:]) cmdline = cmd + " " + newargs rv = 0 @@ -253,10 +269,10 @@ def use_windows_spawn_fix(self, platform=None): return rv - self['SPAWN'] = mySpawn + self["SPAWN"] = mySpawn -def split_lib(self, libname, src_list = None, env_lib = None): +def split_lib(self, libname, src_list=None, env_lib=None): env = self num = 0 @@ -307,22 +323,20 @@ def split_lib(self, libname, src_list = None, env_lib = None): # impacts the linker call, we need to hack our way into the linking commands # LINKCOM and SHLINKCOM to set those flags. - if '-Wl,--start-group' in env['LINKCOM'] and '-Wl,--start-group' in env['SHLINKCOM']: + if "-Wl,--start-group" in env["LINKCOM"] and "-Wl,--start-group" in env["SHLINKCOM"]: # Already added by a previous call, skip. return - env['LINKCOM'] = str(env['LINKCOM']).replace('$_LIBFLAGS', - '-Wl,--start-group $_LIBFLAGS -Wl,--end-group') - env['SHLINKCOM'] = str(env['LINKCOM']).replace('$_LIBFLAGS', - '-Wl,--start-group $_LIBFLAGS -Wl,--end-group') + env["LINKCOM"] = str(env["LINKCOM"]).replace("$_LIBFLAGS", "-Wl,--start-group $_LIBFLAGS -Wl,--end-group") + env["SHLINKCOM"] = str(env["LINKCOM"]).replace("$_LIBFLAGS", "-Wl,--start-group $_LIBFLAGS -Wl,--end-group") def save_active_platforms(apnames, ap): for x in ap: - names = ['logo'] + names = ["logo"] if os.path.isfile(x + "/run_icon.png"): - names.append('run_icon') + names.append("run_icon") for name in names: pngf = open(x + "/" + name + ".png", "rb") @@ -332,7 +346,7 @@ def save_active_platforms(apnames, ap): while len(b) == 1: str += hex(ord(b)) b = pngf.read(1) - if (len(b) == 1): + if len(b) == 1: str += "," str += "};\n" @@ -352,30 +366,70 @@ def no_verbose(sys, env): # Colors are disabled in non-TTY environments such as pipes. This means # that if output is redirected to a file, it will not contain color codes if sys.stdout.isatty(): - colors['cyan'] = '\033[96m' - colors['purple'] = '\033[95m' - colors['blue'] = '\033[94m' - colors['green'] = '\033[92m' - colors['yellow'] = '\033[93m' - colors['red'] = '\033[91m' - colors['end'] = '\033[0m' + colors["cyan"] = "\033[96m" + colors["purple"] = "\033[95m" + colors["blue"] = "\033[94m" + colors["green"] = "\033[92m" + colors["yellow"] = "\033[93m" + colors["red"] = "\033[91m" + colors["end"] = "\033[0m" else: - colors['cyan'] = '' - colors['purple'] = '' - colors['blue'] = '' - colors['green'] = '' - colors['yellow'] = '' - colors['red'] = '' - colors['end'] = '' + colors["cyan"] = "" + colors["purple"] = "" + colors["blue"] = "" + colors["green"] = "" + colors["yellow"] = "" + colors["red"] = "" + colors["end"] = "" - compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) - java_compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) - compile_shared_source_message = '%sCompiling shared %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) - link_program_message = '%sLinking Program %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - link_library_message = '%sLinking Static Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - ranlib_library_message = '%sRanlib Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - link_shared_library_message = '%sLinking Shared Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) - java_library_message = '%sCreating Java Archive %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) + compile_source_message = "%sCompiling %s==> %s$SOURCE%s" % ( + colors["blue"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + java_compile_source_message = "%sCompiling %s==> %s$SOURCE%s" % ( + colors["blue"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + compile_shared_source_message = "%sCompiling shared %s==> %s$SOURCE%s" % ( + colors["blue"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + link_program_message = "%sLinking Program %s==> %s$TARGET%s" % ( + colors["red"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + link_library_message = "%sLinking Static Library %s==> %s$TARGET%s" % ( + colors["red"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + ranlib_library_message = "%sRanlib Library %s==> %s$TARGET%s" % ( + colors["red"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + link_shared_library_message = "%sLinking Shared Library %s==> %s$TARGET%s" % ( + colors["red"], + colors["purple"], + colors["yellow"], + colors["end"], + ) + java_library_message = "%sCreating Java Archive %s==> %s$TARGET%s" % ( + colors["red"], + colors["purple"], + colors["yellow"], + colors["end"], + ) env.Append(CXXCOMSTR=[compile_source_message]) env.Append(CCCOMSTR=[compile_source_message]) @@ -416,70 +470,79 @@ def detect_visual_c_compiler_version(tools_env): vc_chosen_compiler_str = "" # Start with Pre VS 2017 checks which uses VCINSTALLDIR: - if 'VCINSTALLDIR' in tools_env: + if "VCINSTALLDIR" in tools_env: # print("Checking VCINSTALLDIR") # find() works with -1 so big ifs below are needed... the simplest solution, in fact # First test if amd64 and amd64_x86 compilers are present in the path vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;") - if(vc_amd64_compiler_detection_index > -1): + if vc_amd64_compiler_detection_index > -1: vc_chosen_compiler_index = vc_amd64_compiler_detection_index vc_chosen_compiler_str = "amd64" vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;") - if(vc_amd64_x86_compiler_detection_index > -1 - and (vc_chosen_compiler_index == -1 - or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)): + if vc_amd64_x86_compiler_detection_index > -1 and ( + vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index + ): vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index vc_chosen_compiler_str = "amd64_x86" # Now check the 32 bit compilers vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;") - if(vc_x86_compiler_detection_index > -1 - and (vc_chosen_compiler_index == -1 - or vc_chosen_compiler_index > vc_x86_compiler_detection_index)): + if vc_x86_compiler_detection_index > -1 and ( + vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index + ): vc_chosen_compiler_index = vc_x86_compiler_detection_index vc_chosen_compiler_str = "x86" - vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env['VCINSTALLDIR'] + "BIN\\x86_amd64;") - if(vc_x86_amd64_compiler_detection_index > -1 - and (vc_chosen_compiler_index == -1 - or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)): + vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\x86_amd64;") + if vc_x86_amd64_compiler_detection_index > -1 and ( + vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index + ): vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index vc_chosen_compiler_str = "x86_amd64" # and for VS 2017 and newer we check VCTOOLSINSTALLDIR: - if 'VCTOOLSINSTALLDIR' in tools_env: + if "VCTOOLSINSTALLDIR" in tools_env: # Newer versions have a different path available - vc_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X64;") - if(vc_amd64_compiler_detection_index > -1): + vc_amd64_compiler_detection_index = ( + tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X64;") + ) + if vc_amd64_compiler_detection_index > -1: vc_chosen_compiler_index = vc_amd64_compiler_detection_index vc_chosen_compiler_str = "amd64" - vc_amd64_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X86;") - if(vc_amd64_x86_compiler_detection_index > -1 - and (vc_chosen_compiler_index == -1 - or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)): + vc_amd64_x86_compiler_detection_index = ( + tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X86;") + ) + if vc_amd64_x86_compiler_detection_index > -1 and ( + vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index + ): vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index vc_chosen_compiler_str = "amd64_x86" - vc_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X86;") - if(vc_x86_compiler_detection_index > -1 - and (vc_chosen_compiler_index == -1 - or vc_chosen_compiler_index > vc_x86_compiler_detection_index)): + vc_x86_compiler_detection_index = ( + tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X86;") + ) + if vc_x86_compiler_detection_index > -1 and ( + vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index + ): vc_chosen_compiler_index = vc_x86_compiler_detection_index vc_chosen_compiler_str = "x86" - vc_x86_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X64;") - if(vc_x86_amd64_compiler_detection_index > -1 - and (vc_chosen_compiler_index == -1 - or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)): + vc_x86_amd64_compiler_detection_index = ( + tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X64;") + ) + if vc_x86_amd64_compiler_detection_index > -1 and ( + vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index + ): vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index vc_chosen_compiler_str = "x86_amd64" return vc_chosen_compiler_str + def find_visual_c_batch_file(env): from SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file @@ -487,6 +550,7 @@ def find_visual_c_batch_file(env): (host_platform, target_platform, _) = get_host_target(env) return find_batch_file(env, version, host_platform, target_platform)[0] + def generate_cpp_hint_file(filename): if os.path.isfile(filename): # Don't overwrite an existing hint file since the user may have customized it. @@ -498,15 +562,19 @@ def generate_cpp_hint_file(filename): except IOError: print("Could not write cpp.hint file.") + def generate_vs_project(env, num_jobs): batch_file = find_visual_c_batch_file(env) if batch_file: + def build_commandline(commands): - common_build_prefix = ['cmd /V /C set "plat=$(PlatformTarget)"', - '(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))', - 'set "tools=yes"', - '(if "$(Configuration)"=="release" (set "tools=no"))', - 'call "' + batch_file + '" !plat!'] + common_build_prefix = [ + 'cmd /V /C set "plat=$(PlatformTarget)"', + '(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))', + 'set "tools=yes"', + '(if "$(Configuration)"=="release" (set "tools=no"))', + 'call "' + batch_file + '" !plat!', + ] result = " ^& ".join(common_build_prefix + [commands]) return result @@ -522,83 +590,102 @@ def generate_vs_project(env, num_jobs): # to double quote off the directory. However, the path ends # in a backslash, so we need to remove this, lest it escape the # last double quote off, confusing MSBuild - env['MSVSBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs)) - env['MSVSREBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j' + str(num_jobs)) - env['MSVSCLEANCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs)) + env["MSVSBUILDCOM"] = build_commandline( + "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" platform=windows progress=no target=$(Configuration) tools=!tools! -j" + + str(num_jobs) + ) + env["MSVSREBUILDCOM"] = build_commandline( + "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j" + + str(num_jobs) + ) + env["MSVSCLEANCOM"] = build_commandline( + "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j" + + str(num_jobs) + ) # This version information (Win32, x64, Debug, Release, Release_Debug seems to be # required for Visual Studio to understand that it needs to generate an NMAKE # project. Do not modify without knowing what you are doing. - debug_variants = ['debug|Win32'] + ['debug|x64'] - release_variants = ['release|Win32'] + ['release|x64'] - release_debug_variants = ['release_debug|Win32'] + ['release_debug|x64'] + debug_variants = ["debug|Win32"] + ["debug|x64"] + release_variants = ["release|Win32"] + ["release|x64"] + release_debug_variants = ["release_debug|Win32"] + ["release_debug|x64"] variants = debug_variants + release_variants + release_debug_variants - debug_targets = ['bin\\godot.windows.tools.32.exe'] + ['bin\\godot.windows.tools.64.exe'] - release_targets = ['bin\\godot.windows.opt.32.exe'] + ['bin\\godot.windows.opt.64.exe'] - release_debug_targets = ['bin\\godot.windows.opt.tools.32.exe'] + ['bin\\godot.windows.opt.tools.64.exe'] + debug_targets = ["bin\\godot.windows.tools.32.exe"] + ["bin\\godot.windows.tools.64.exe"] + release_targets = ["bin\\godot.windows.opt.32.exe"] + ["bin\\godot.windows.opt.64.exe"] + release_debug_targets = ["bin\\godot.windows.opt.tools.32.exe"] + ["bin\\godot.windows.opt.tools.64.exe"] targets = debug_targets + release_targets + release_debug_targets - if not env.get('MSVS'): - env['MSVS']['PROJECTSUFFIX'] = '.vcxproj' - env['MSVS']['SOLUTIONSUFFIX'] = '.sln' + if not env.get("MSVS"): + env["MSVS"]["PROJECTSUFFIX"] = ".vcxproj" + env["MSVS"]["SOLUTIONSUFFIX"] = ".sln" env.MSVSProject( - target=['#godot' + env['MSVSPROJECTSUFFIX']], + target=["#godot" + env["MSVSPROJECTSUFFIX"]], incs=env.vs_incs, srcs=env.vs_srcs, runfile=targets, buildtarget=targets, auto_build_solution=1, - variant=variants) + variant=variants, + ) else: - print("Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project.") + print( + "Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project." + ) + def precious_program(env, program, sources, **args): program = env.ProgramOriginal(program, sources, **args) env.Precious(program) return program + def add_shared_library(env, name, sources, **args): library = env.SharedLibrary(name, sources, **args) env.NoCache(library) return library + def add_library(env, name, sources, **args): library = env.Library(name, sources, **args) env.NoCache(library) return library + def add_program(env, name, sources, **args): program = env.Program(name, sources, **args) env.NoCache(program) return program + def CommandNoCache(env, target, sources, command, **args): result = env.Command(target, sources, command, **args) env.NoCache(result) return result + def detect_darwin_sdk_path(platform, env): - sdk_name = '' - if platform == 'osx': - sdk_name = 'macosx' - var_name = 'MACOS_SDK_PATH' - elif platform == 'iphone': - sdk_name = 'iphoneos' - var_name = 'IPHONESDK' - elif platform == 'iphonesimulator': - sdk_name = 'iphonesimulator' - var_name = 'IPHONESDK' + sdk_name = "" + if platform == "osx": + sdk_name = "macosx" + var_name = "MACOS_SDK_PATH" + elif platform == "iphone": + sdk_name = "iphoneos" + var_name = "IPHONESDK" + elif platform == "iphonesimulator": + sdk_name = "iphonesimulator" + var_name = "IPHONESDK" else: raise Exception("Invalid platform argument passed to detect_darwin_sdk_path") if not env[var_name]: try: - sdk_path = decode_utf8(subprocess.check_output(['xcrun', '--sdk', sdk_name, '--show-sdk-path']).strip()) + sdk_path = decode_utf8(subprocess.check_output(["xcrun", "--sdk", sdk_name, "--show-sdk-path"]).strip()) if sdk_path: env[var_name] = sdk_path except (subprocess.CalledProcessError, OSError): print("Failed to find SDK path while running xcrun --sdk {} --show-sdk-path.".format(sdk_name)) raise + def get_compiler_version(env): """ Returns an array of version numbers as ints: [major, minor, patch]. @@ -608,20 +695,22 @@ def get_compiler_version(env): # Not using -dumpversion as some GCC distros only return major, and # Clang used to return hardcoded 4.2.1: # https://reviews.llvm.org/D56803 try: - version = decode_utf8(subprocess.check_output([env.subst(env['CXX']), '--version']).strip()) + version = decode_utf8(subprocess.check_output([env.subst(env["CXX"]), "--version"]).strip()) except (subprocess.CalledProcessError, OSError): print("Couldn't parse CXX environment variable to infer compiler version.") return None else: # TODO: Implement for MSVC return None - match = re.search('[0-9]+\.[0-9.]+', version) + match = re.search("[0-9]+\.[0-9.]+", version) if match is not None: - return list(map(int, match.group().split('.'))) + return list(map(int, match.group().split("."))) else: return None + def using_gcc(env): - return 'gcc' in os.path.basename(env["CC"]) + return "gcc" in os.path.basename(env["CC"]) + def using_clang(env): - return 'clang' in os.path.basename(env["CC"]) + return "clang" in os.path.basename(env["CC"]) diff --git a/misc/scripts/fix_headers.py b/misc/scripts/fix_headers.py index f0038a8351..7af97eec4b 100755 --- a/misc/scripts/fix_headers.py +++ b/misc/scripts/fix_headers.py @@ -37,24 +37,24 @@ files = open("files", "r") fname = files.readline() -while (fname != ""): +while fname != "": # Handle replacing $filename with actual filename and keep alignment fsingle = fname.strip() - if (fsingle.find("/") != -1): - fsingle = fsingle[fsingle.rfind("/") + 1:] + if fsingle.find("/") != -1: + fsingle = fsingle[fsingle.rfind("/") + 1 :] rep_fl = "$filename" rep_fi = fsingle len_fl = len(rep_fl) len_fi = len(rep_fi) # Pad with spaces to keep alignment - if (len_fi < len_fl): + if len_fi < len_fl: for x in range(len_fl - len_fi): rep_fi += " " - elif (len_fl < len_fi): + elif len_fl < len_fi: for x in range(len_fi - len_fl): rep_fl += " " - if (header.find(rep_fl) != -1): + if header.find(rep_fl) != -1: text = header.replace(rep_fl, rep_fi) else: text = header.replace("$filename", fsingle) @@ -71,21 +71,21 @@ while (fname != ""): line = fileread.readline() header_done = False - while (line.strip() == ""): # Skip empty lines at the top + while line.strip() == "": # Skip empty lines at the top line = fileread.readline() - if (line.find("/**********") == -1): # Godot header starts this way + if line.find("/**********") == -1: # Godot header starts this way # Maybe starting with a non-Godot comment, abort header magic header_done = True - while (not header_done): # Handle header now - if (line.find("/*") != 0): # No more starting with a comment + while not header_done: # Handle header now + if line.find("/*") != 0: # No more starting with a comment header_done = True - if (line.strip() != ""): + if line.strip() != "": text += line line = fileread.readline() - while (line != ""): # Dump everything until EOF + while line != "": # Dump everything until EOF text += line line = fileread.readline() diff --git a/modules/SCsub b/modules/SCsub index 1bbc0b6501..d9de2c999a 100644 --- a/modules/SCsub +++ b/modules/SCsub @@ -6,14 +6,14 @@ import os env_modules = env.Clone() -Export('env_modules') +Export("env_modules") env.modules_sources = [] env_modules.add_source_files(env.modules_sources, "register_module_types.gen.cpp") for name, path in env.module_list.items(): - if (name in env.disabled_modules): + if name in env.disabled_modules: continue env_modules.Append(CPPDEFINES=["MODULE_" + name.upper() + "_ENABLED"]) @@ -22,8 +22,8 @@ for name, path in env.module_list.items(): else: SConscript(path + "/SCsub") # Custom. -if env['split_libmodules']: - env.split_lib("modules", env_lib = env_modules) +if env["split_libmodules"]: + env.split_lib("modules", env_lib=env_modules) else: lib = env_modules.add_library("modules", env.modules_sources) diff --git a/modules/arkit/SCsub b/modules/arkit/SCsub index e605703a72..61c0a8248c 100644 --- a/modules/arkit/SCsub +++ b/modules/arkit/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_arkit = env_modules.Clone() @@ -9,4 +9,4 @@ env_arkit = env_modules.Clone() modules_sources = [] env_arkit.add_source_files(modules_sources, "*.cpp") env_arkit.add_source_files(modules_sources, "*.mm") -mod_lib = env_modules.add_library('#bin/libgodot_arkit_module' + env['LIBSUFFIX'], modules_sources) \ No newline at end of file +mod_lib = env_modules.add_library("#bin/libgodot_arkit_module" + env["LIBSUFFIX"], modules_sources) diff --git a/modules/arkit/config.py b/modules/arkit/config.py index 96e41826c5..e68603fc93 100644 --- a/modules/arkit/config.py +++ b/modules/arkit/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return platform == 'iphone' + return platform == "iphone" + def configure(env): pass diff --git a/modules/assimp/SCsub b/modules/assimp/SCsub index 90cdd7f5fc..a61bb418bf 100644 --- a/modules/assimp/SCsub +++ b/modules/assimp/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_assimp = env_modules.Clone() @@ -10,85 +10,85 @@ env_assimp = env_modules.Clone() if True: # env['builtin_assimp']: thirdparty_dir = "#thirdparty/assimp" - env_assimp.Prepend(CPPPATH=['#thirdparty/assimp']) - env_assimp.Prepend(CPPPATH=['#thirdparty/assimp/code']) - env_assimp.Prepend(CPPPATH=['#thirdparty/assimp/include']) + env_assimp.Prepend(CPPPATH=["#thirdparty/assimp"]) + env_assimp.Prepend(CPPPATH=["#thirdparty/assimp/code"]) + env_assimp.Prepend(CPPPATH=["#thirdparty/assimp/include"]) - #env_assimp.Append(CPPDEFINES=['ASSIMP_DOUBLE_PRECISION']) # TODO default to what godot is compiled with for future double support - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_SINGLETHREADED']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_BOOST_WORKAROUND']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OWN_ZLIB']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_EXPORT']) + # env_assimp.Append(CPPDEFINES=['ASSIMP_DOUBLE_PRECISION']) # TODO default to what godot is compiled with for future double support + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_SINGLETHREADED"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_BOOST_WORKAROUND"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OWN_ZLIB"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_EXPORT"]) # Importers we don't need - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3DS_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3MF_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_AC_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_AMF_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_ASE_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_ASSBIN_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_B3D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_BLEND_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_BVH_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_C4D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_COB_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_COLLADA_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_CSM_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_DXF_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_GLTF2_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_GLTF_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_HMP_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IFC_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IRR_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IRRMESH_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_LWO_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_LWS_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_M3D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD2_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD3_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD5_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD5_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MDC_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MDL_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MMD_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MS3D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_NDO_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_NFF_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OBJ_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OFF_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OGRE_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OPENGEX_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_PLY_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_Q3BSP_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_Q3D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_RAW_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_SIB_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_SMD_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_STEP_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_STL_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_TERRAGEN_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_X3D_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_XGL_IMPORTER']) - env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_X_IMPORTER']) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3DS_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3MF_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_AC_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_AMF_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_ASE_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_ASSBIN_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_B3D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_BLEND_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_BVH_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_C4D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_COB_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_COLLADA_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_CSM_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_DXF_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_GLTF2_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_GLTF_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_HMP_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IFC_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IRR_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IRRMESH_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_LWO_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_LWS_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_M3D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD2_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD3_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD5_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD5_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MDC_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MDL_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MMD_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MS3D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_NDO_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_NFF_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OBJ_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OFF_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OGRE_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OPENGEX_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_PLY_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_Q3BSP_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_Q3D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_RAW_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_SIB_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_SMD_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_STEP_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_STL_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_TERRAGEN_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_X3D_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_XGL_IMPORTER"]) + env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_X_IMPORTER"]) + + if env["platform"] == "windows": + env_assimp.Append(CPPDEFINES=["PLATFORM_WINDOWS"]) + env_assimp.Append(CPPDEFINES=[("PLATFORM", "WINDOWS")]) + elif env["platform"] == "x11": + env_assimp.Append(CPPDEFINES=["PLATFORM_LINUX"]) + env_assimp.Append(CPPDEFINES=[("PLATFORM", "LINUX")]) + elif env["platform"] == "osx": + env_assimp.Append(CPPDEFINES=["PLATFORM_DARWIN"]) + env_assimp.Append(CPPDEFINES=[("PLATFORM", "DARWIN")]) - if(env['platform'] == 'windows'): - env_assimp.Append(CPPDEFINES=['PLATFORM_WINDOWS']) - env_assimp.Append(CPPDEFINES=[('PLATFORM', 'WINDOWS')]) - elif(env['platform'] == 'x11'): - env_assimp.Append(CPPDEFINES=['PLATFORM_LINUX']) - env_assimp.Append(CPPDEFINES=[('PLATFORM', 'LINUX')]) - elif(env['platform'] == 'osx'): - env_assimp.Append(CPPDEFINES=['PLATFORM_DARWIN']) - env_assimp.Append(CPPDEFINES=[('PLATFORM', 'DARWIN')]) - env_thirdparty = env_assimp.Clone() env_thirdparty.disable_warnings() - env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/CApi/*.cpp')) - env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/Common/*.cpp')) - env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/PostProcessing/*.cpp')) - env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/Material/*.cpp')) - env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/FBX/*.cpp')) + env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/CApi/*.cpp")) + env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/Common/*.cpp")) + env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/PostProcessing/*.cpp")) + env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/Material/*.cpp")) + env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/FBX/*.cpp")) # Godot's own source files env_assimp.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/assimp/config.py b/modules/assimp/config.py index 098f1eafa9..53b8f2f2e3 100644 --- a/modules/assimp/config.py +++ b/modules/assimp/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return env['tools'] + return env["tools"] + def configure(env): pass diff --git a/modules/bmp/SCsub b/modules/bmp/SCsub index e7da7cf108..4f3405ff28 100644 --- a/modules/bmp/SCsub +++ b/modules/bmp/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_bmp = env_modules.Clone() diff --git a/modules/bmp/config.py b/modules/bmp/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/bmp/config.py +++ b/modules/bmp/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/bullet/SCsub b/modules/bullet/SCsub index 02d0a31a69..692c749886 100644 --- a/modules/bullet/SCsub +++ b/modules/bullet/SCsub @@ -1,208 +1,203 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_bullet = env_modules.Clone() # Thirdparty source files -if env['builtin_bullet']: +if env["builtin_bullet"]: # Build only version 2 for now (as of 2.89) # Sync file list with relevant upstream CMakeLists.txt for each folder. thirdparty_dir = "#thirdparty/bullet/" bullet2_src = [ # BulletCollision - "BulletCollision/BroadphaseCollision/btAxisSweep3.cpp" - , "BulletCollision/BroadphaseCollision/btBroadphaseProxy.cpp" - , "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.cpp" - , "BulletCollision/BroadphaseCollision/btDbvt.cpp" - , "BulletCollision/BroadphaseCollision/btDbvtBroadphase.cpp" - , "BulletCollision/BroadphaseCollision/btDispatcher.cpp" - , "BulletCollision/BroadphaseCollision/btOverlappingPairCache.cpp" - , "BulletCollision/BroadphaseCollision/btQuantizedBvh.cpp" - , "BulletCollision/BroadphaseCollision/btSimpleBroadphase.cpp" - , "BulletCollision/CollisionDispatch/btActivatingCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btBoxBoxCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btBoxBoxDetector.cpp" - , "BulletCollision/CollisionDispatch/btCollisionDispatcher.cpp" - , "BulletCollision/CollisionDispatch/btCollisionDispatcherMt.cpp" - , "BulletCollision/CollisionDispatch/btCollisionObject.cpp" - , "BulletCollision/CollisionDispatch/btCollisionWorld.cpp" - , "BulletCollision/CollisionDispatch/btCollisionWorldImporter.cpp" - , "BulletCollision/CollisionDispatch/btCompoundCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btCompoundCompoundCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btConvexConcaveCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btConvexConvexAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btConvexPlaneCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btDefaultCollisionConfiguration.cpp" - , "BulletCollision/CollisionDispatch/btEmptyCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btGhostObject.cpp" - , "BulletCollision/CollisionDispatch/btHashedSimplePairCache.cpp" - , "BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp" - , "BulletCollision/CollisionDispatch/btManifoldResult.cpp" - , "BulletCollision/CollisionDispatch/btSimulationIslandManager.cpp" - , "BulletCollision/CollisionDispatch/btSphereBoxCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btSphereSphereCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btSphereTriangleCollisionAlgorithm.cpp" - , "BulletCollision/CollisionDispatch/btUnionFind.cpp" - , "BulletCollision/CollisionDispatch/SphereTriangleDetector.cpp" - , "BulletCollision/CollisionShapes/btBoxShape.cpp" - , "BulletCollision/CollisionShapes/btBox2dShape.cpp" - , "BulletCollision/CollisionShapes/btBvhTriangleMeshShape.cpp" - , "BulletCollision/CollisionShapes/btCapsuleShape.cpp" - , "BulletCollision/CollisionShapes/btCollisionShape.cpp" - , "BulletCollision/CollisionShapes/btCompoundShape.cpp" - , "BulletCollision/CollisionShapes/btConcaveShape.cpp" - , "BulletCollision/CollisionShapes/btConeShape.cpp" - , "BulletCollision/CollisionShapes/btConvexHullShape.cpp" - , "BulletCollision/CollisionShapes/btConvexInternalShape.cpp" - , "BulletCollision/CollisionShapes/btConvexPointCloudShape.cpp" - , "BulletCollision/CollisionShapes/btConvexPolyhedron.cpp" - , "BulletCollision/CollisionShapes/btConvexShape.cpp" - , "BulletCollision/CollisionShapes/btConvex2dShape.cpp" - , "BulletCollision/CollisionShapes/btConvexTriangleMeshShape.cpp" - , "BulletCollision/CollisionShapes/btCylinderShape.cpp" - , "BulletCollision/CollisionShapes/btEmptyShape.cpp" - , "BulletCollision/CollisionShapes/btHeightfieldTerrainShape.cpp" - , "BulletCollision/CollisionShapes/btMiniSDF.cpp" - , "BulletCollision/CollisionShapes/btMinkowskiSumShape.cpp" - , "BulletCollision/CollisionShapes/btMultimaterialTriangleMeshShape.cpp" - , "BulletCollision/CollisionShapes/btMultiSphereShape.cpp" - , "BulletCollision/CollisionShapes/btOptimizedBvh.cpp" - , "BulletCollision/CollisionShapes/btPolyhedralConvexShape.cpp" - , "BulletCollision/CollisionShapes/btScaledBvhTriangleMeshShape.cpp" - , "BulletCollision/CollisionShapes/btSdfCollisionShape.cpp" - , "BulletCollision/CollisionShapes/btShapeHull.cpp" - , "BulletCollision/CollisionShapes/btSphereShape.cpp" - , "BulletCollision/CollisionShapes/btStaticPlaneShape.cpp" - , "BulletCollision/CollisionShapes/btStridingMeshInterface.cpp" - , "BulletCollision/CollisionShapes/btTetrahedronShape.cpp" - , "BulletCollision/CollisionShapes/btTriangleBuffer.cpp" - , "BulletCollision/CollisionShapes/btTriangleCallback.cpp" - , "BulletCollision/CollisionShapes/btTriangleIndexVertexArray.cpp" - , "BulletCollision/CollisionShapes/btTriangleIndexVertexMaterialArray.cpp" - , "BulletCollision/CollisionShapes/btTriangleMesh.cpp" - , "BulletCollision/CollisionShapes/btTriangleMeshShape.cpp" - , "BulletCollision/CollisionShapes/btUniformScalingShape.cpp" - , "BulletCollision/Gimpact/btContactProcessing.cpp" - , "BulletCollision/Gimpact/btGenericPoolAllocator.cpp" - , "BulletCollision/Gimpact/btGImpactBvh.cpp" - , "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.cpp" - , "BulletCollision/Gimpact/btGImpactQuantizedBvh.cpp" - , "BulletCollision/Gimpact/btGImpactShape.cpp" - , "BulletCollision/Gimpact/btTriangleShapeEx.cpp" - , "BulletCollision/Gimpact/gim_box_set.cpp" - , "BulletCollision/Gimpact/gim_contact.cpp" - , "BulletCollision/Gimpact/gim_memory.cpp" - , "BulletCollision/Gimpact/gim_tri_collision.cpp" - , "BulletCollision/NarrowPhaseCollision/btContinuousConvexCollision.cpp" - , "BulletCollision/NarrowPhaseCollision/btConvexCast.cpp" - , "BulletCollision/NarrowPhaseCollision/btGjkConvexCast.cpp" - , "BulletCollision/NarrowPhaseCollision/btGjkEpa2.cpp" - , "BulletCollision/NarrowPhaseCollision/btGjkEpaPenetrationDepthSolver.cpp" - , "BulletCollision/NarrowPhaseCollision/btGjkPairDetector.cpp" - , "BulletCollision/NarrowPhaseCollision/btMinkowskiPenetrationDepthSolver.cpp" - , "BulletCollision/NarrowPhaseCollision/btPersistentManifold.cpp" - , "BulletCollision/NarrowPhaseCollision/btRaycastCallback.cpp" - , "BulletCollision/NarrowPhaseCollision/btSubSimplexConvexCast.cpp" - , "BulletCollision/NarrowPhaseCollision/btVoronoiSimplexSolver.cpp" - , "BulletCollision/NarrowPhaseCollision/btPolyhedralContactClipping.cpp" - + "BulletCollision/BroadphaseCollision/btAxisSweep3.cpp", + "BulletCollision/BroadphaseCollision/btBroadphaseProxy.cpp", + "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.cpp", + "BulletCollision/BroadphaseCollision/btDbvt.cpp", + "BulletCollision/BroadphaseCollision/btDbvtBroadphase.cpp", + "BulletCollision/BroadphaseCollision/btDispatcher.cpp", + "BulletCollision/BroadphaseCollision/btOverlappingPairCache.cpp", + "BulletCollision/BroadphaseCollision/btQuantizedBvh.cpp", + "BulletCollision/BroadphaseCollision/btSimpleBroadphase.cpp", + "BulletCollision/CollisionDispatch/btActivatingCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btBoxBoxCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btBoxBoxDetector.cpp", + "BulletCollision/CollisionDispatch/btCollisionDispatcher.cpp", + "BulletCollision/CollisionDispatch/btCollisionDispatcherMt.cpp", + "BulletCollision/CollisionDispatch/btCollisionObject.cpp", + "BulletCollision/CollisionDispatch/btCollisionWorld.cpp", + "BulletCollision/CollisionDispatch/btCollisionWorldImporter.cpp", + "BulletCollision/CollisionDispatch/btCompoundCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btCompoundCompoundCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btConvexConcaveCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btConvexConvexAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btConvexPlaneCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btDefaultCollisionConfiguration.cpp", + "BulletCollision/CollisionDispatch/btEmptyCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btGhostObject.cpp", + "BulletCollision/CollisionDispatch/btHashedSimplePairCache.cpp", + "BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp", + "BulletCollision/CollisionDispatch/btManifoldResult.cpp", + "BulletCollision/CollisionDispatch/btSimulationIslandManager.cpp", + "BulletCollision/CollisionDispatch/btSphereBoxCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btSphereSphereCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btSphereTriangleCollisionAlgorithm.cpp", + "BulletCollision/CollisionDispatch/btUnionFind.cpp", + "BulletCollision/CollisionDispatch/SphereTriangleDetector.cpp", + "BulletCollision/CollisionShapes/btBoxShape.cpp", + "BulletCollision/CollisionShapes/btBox2dShape.cpp", + "BulletCollision/CollisionShapes/btBvhTriangleMeshShape.cpp", + "BulletCollision/CollisionShapes/btCapsuleShape.cpp", + "BulletCollision/CollisionShapes/btCollisionShape.cpp", + "BulletCollision/CollisionShapes/btCompoundShape.cpp", + "BulletCollision/CollisionShapes/btConcaveShape.cpp", + "BulletCollision/CollisionShapes/btConeShape.cpp", + "BulletCollision/CollisionShapes/btConvexHullShape.cpp", + "BulletCollision/CollisionShapes/btConvexInternalShape.cpp", + "BulletCollision/CollisionShapes/btConvexPointCloudShape.cpp", + "BulletCollision/CollisionShapes/btConvexPolyhedron.cpp", + "BulletCollision/CollisionShapes/btConvexShape.cpp", + "BulletCollision/CollisionShapes/btConvex2dShape.cpp", + "BulletCollision/CollisionShapes/btConvexTriangleMeshShape.cpp", + "BulletCollision/CollisionShapes/btCylinderShape.cpp", + "BulletCollision/CollisionShapes/btEmptyShape.cpp", + "BulletCollision/CollisionShapes/btHeightfieldTerrainShape.cpp", + "BulletCollision/CollisionShapes/btMiniSDF.cpp", + "BulletCollision/CollisionShapes/btMinkowskiSumShape.cpp", + "BulletCollision/CollisionShapes/btMultimaterialTriangleMeshShape.cpp", + "BulletCollision/CollisionShapes/btMultiSphereShape.cpp", + "BulletCollision/CollisionShapes/btOptimizedBvh.cpp", + "BulletCollision/CollisionShapes/btPolyhedralConvexShape.cpp", + "BulletCollision/CollisionShapes/btScaledBvhTriangleMeshShape.cpp", + "BulletCollision/CollisionShapes/btSdfCollisionShape.cpp", + "BulletCollision/CollisionShapes/btShapeHull.cpp", + "BulletCollision/CollisionShapes/btSphereShape.cpp", + "BulletCollision/CollisionShapes/btStaticPlaneShape.cpp", + "BulletCollision/CollisionShapes/btStridingMeshInterface.cpp", + "BulletCollision/CollisionShapes/btTetrahedronShape.cpp", + "BulletCollision/CollisionShapes/btTriangleBuffer.cpp", + "BulletCollision/CollisionShapes/btTriangleCallback.cpp", + "BulletCollision/CollisionShapes/btTriangleIndexVertexArray.cpp", + "BulletCollision/CollisionShapes/btTriangleIndexVertexMaterialArray.cpp", + "BulletCollision/CollisionShapes/btTriangleMesh.cpp", + "BulletCollision/CollisionShapes/btTriangleMeshShape.cpp", + "BulletCollision/CollisionShapes/btUniformScalingShape.cpp", + "BulletCollision/Gimpact/btContactProcessing.cpp", + "BulletCollision/Gimpact/btGenericPoolAllocator.cpp", + "BulletCollision/Gimpact/btGImpactBvh.cpp", + "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.cpp", + "BulletCollision/Gimpact/btGImpactQuantizedBvh.cpp", + "BulletCollision/Gimpact/btGImpactShape.cpp", + "BulletCollision/Gimpact/btTriangleShapeEx.cpp", + "BulletCollision/Gimpact/gim_box_set.cpp", + "BulletCollision/Gimpact/gim_contact.cpp", + "BulletCollision/Gimpact/gim_memory.cpp", + "BulletCollision/Gimpact/gim_tri_collision.cpp", + "BulletCollision/NarrowPhaseCollision/btContinuousConvexCollision.cpp", + "BulletCollision/NarrowPhaseCollision/btConvexCast.cpp", + "BulletCollision/NarrowPhaseCollision/btGjkConvexCast.cpp", + "BulletCollision/NarrowPhaseCollision/btGjkEpa2.cpp", + "BulletCollision/NarrowPhaseCollision/btGjkEpaPenetrationDepthSolver.cpp", + "BulletCollision/NarrowPhaseCollision/btGjkPairDetector.cpp", + "BulletCollision/NarrowPhaseCollision/btMinkowskiPenetrationDepthSolver.cpp", + "BulletCollision/NarrowPhaseCollision/btPersistentManifold.cpp", + "BulletCollision/NarrowPhaseCollision/btRaycastCallback.cpp", + "BulletCollision/NarrowPhaseCollision/btSubSimplexConvexCast.cpp", + "BulletCollision/NarrowPhaseCollision/btVoronoiSimplexSolver.cpp", + "BulletCollision/NarrowPhaseCollision/btPolyhedralContactClipping.cpp", # BulletDynamics - , "BulletDynamics/Character/btKinematicCharacterController.cpp" - , "BulletDynamics/ConstraintSolver/btConeTwistConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btContactConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btFixedConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btGearConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btGeneric6DofConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btGeneric6DofSpringConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btGeneric6DofSpring2Constraint.cpp" - , "BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp" - , "BulletDynamics/ConstraintSolver/btHingeConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btPoint2PointConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolver.cpp" - , "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolverMt.cpp" - , "BulletDynamics/ConstraintSolver/btBatchedConstraints.cpp" - , "BulletDynamics/ConstraintSolver/btNNCGConstraintSolver.cpp" - , "BulletDynamics/ConstraintSolver/btSliderConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btSolve2LinearConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btTypedConstraint.cpp" - , "BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp" - , "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.cpp" - , "BulletDynamics/Dynamics/btDiscreteDynamicsWorldMt.cpp" - , "BulletDynamics/Dynamics/btSimulationIslandManagerMt.cpp" - , "BulletDynamics/Dynamics/btRigidBody.cpp" - , "BulletDynamics/Dynamics/btSimpleDynamicsWorld.cpp" - #, "BulletDynamics/Dynamics/Bullet-C-API.cpp" - , "BulletDynamics/Vehicle/btRaycastVehicle.cpp" - , "BulletDynamics/Vehicle/btWheelInfo.cpp" - , "BulletDynamics/Featherstone/btMultiBody.cpp" - , "BulletDynamics/Featherstone/btMultiBodyConstraint.cpp" - , "BulletDynamics/Featherstone/btMultiBodyConstraintSolver.cpp" - , "BulletDynamics/Featherstone/btMultiBodyDynamicsWorld.cpp" - , "BulletDynamics/Featherstone/btMultiBodyFixedConstraint.cpp" - , "BulletDynamics/Featherstone/btMultiBodyGearConstraint.cpp" - , "BulletDynamics/Featherstone/btMultiBodyJointLimitConstraint.cpp" - , "BulletDynamics/Featherstone/btMultiBodyJointMotor.cpp" - , "BulletDynamics/Featherstone/btMultiBodyMLCPConstraintSolver.cpp" - , "BulletDynamics/Featherstone/btMultiBodyPoint2Point.cpp" - , "BulletDynamics/Featherstone/btMultiBodySliderConstraint.cpp" - , "BulletDynamics/Featherstone/btMultiBodySphericalJointMotor.cpp" - , "BulletDynamics/MLCPSolvers/btDantzigLCP.cpp" - , "BulletDynamics/MLCPSolvers/btMLCPSolver.cpp" - , "BulletDynamics/MLCPSolvers/btLemkeAlgorithm.cpp" - + "BulletDynamics/Character/btKinematicCharacterController.cpp", + "BulletDynamics/ConstraintSolver/btConeTwistConstraint.cpp", + "BulletDynamics/ConstraintSolver/btContactConstraint.cpp", + "BulletDynamics/ConstraintSolver/btFixedConstraint.cpp", + "BulletDynamics/ConstraintSolver/btGearConstraint.cpp", + "BulletDynamics/ConstraintSolver/btGeneric6DofConstraint.cpp", + "BulletDynamics/ConstraintSolver/btGeneric6DofSpringConstraint.cpp", + "BulletDynamics/ConstraintSolver/btGeneric6DofSpring2Constraint.cpp", + "BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp", + "BulletDynamics/ConstraintSolver/btHingeConstraint.cpp", + "BulletDynamics/ConstraintSolver/btPoint2PointConstraint.cpp", + "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolver.cpp", + "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolverMt.cpp", + "BulletDynamics/ConstraintSolver/btBatchedConstraints.cpp", + "BulletDynamics/ConstraintSolver/btNNCGConstraintSolver.cpp", + "BulletDynamics/ConstraintSolver/btSliderConstraint.cpp", + "BulletDynamics/ConstraintSolver/btSolve2LinearConstraint.cpp", + "BulletDynamics/ConstraintSolver/btTypedConstraint.cpp", + "BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp", + "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.cpp", + "BulletDynamics/Dynamics/btDiscreteDynamicsWorldMt.cpp", + "BulletDynamics/Dynamics/btSimulationIslandManagerMt.cpp", + "BulletDynamics/Dynamics/btRigidBody.cpp", + "BulletDynamics/Dynamics/btSimpleDynamicsWorld.cpp", + # "BulletDynamics/Dynamics/Bullet-C-API.cpp", + "BulletDynamics/Vehicle/btRaycastVehicle.cpp", + "BulletDynamics/Vehicle/btWheelInfo.cpp", + "BulletDynamics/Featherstone/btMultiBody.cpp", + "BulletDynamics/Featherstone/btMultiBodyConstraint.cpp", + "BulletDynamics/Featherstone/btMultiBodyConstraintSolver.cpp", + "BulletDynamics/Featherstone/btMultiBodyDynamicsWorld.cpp", + "BulletDynamics/Featherstone/btMultiBodyFixedConstraint.cpp", + "BulletDynamics/Featherstone/btMultiBodyGearConstraint.cpp", + "BulletDynamics/Featherstone/btMultiBodyJointLimitConstraint.cpp", + "BulletDynamics/Featherstone/btMultiBodyJointMotor.cpp", + "BulletDynamics/Featherstone/btMultiBodyMLCPConstraintSolver.cpp", + "BulletDynamics/Featherstone/btMultiBodyPoint2Point.cpp", + "BulletDynamics/Featherstone/btMultiBodySliderConstraint.cpp", + "BulletDynamics/Featherstone/btMultiBodySphericalJointMotor.cpp", + "BulletDynamics/MLCPSolvers/btDantzigLCP.cpp", + "BulletDynamics/MLCPSolvers/btMLCPSolver.cpp", + "BulletDynamics/MLCPSolvers/btLemkeAlgorithm.cpp", # BulletInverseDynamics - , "BulletInverseDynamics/IDMath.cpp" - , "BulletInverseDynamics/MultiBodyTree.cpp" - , "BulletInverseDynamics/details/MultiBodyTreeInitCache.cpp" - , "BulletInverseDynamics/details/MultiBodyTreeImpl.cpp" - + "BulletInverseDynamics/IDMath.cpp", + "BulletInverseDynamics/MultiBodyTree.cpp", + "BulletInverseDynamics/details/MultiBodyTreeInitCache.cpp", + "BulletInverseDynamics/details/MultiBodyTreeImpl.cpp", # BulletSoftBody - , "BulletSoftBody/btSoftBody.cpp" - , "BulletSoftBody/btSoftBodyConcaveCollisionAlgorithm.cpp" - , "BulletSoftBody/btSoftBodyHelpers.cpp" - , "BulletSoftBody/btSoftBodyRigidBodyCollisionConfiguration.cpp" - , "BulletSoftBody/btSoftRigidCollisionAlgorithm.cpp" - , "BulletSoftBody/btSoftRigidDynamicsWorld.cpp" - , "BulletSoftBody/btSoftMultiBodyDynamicsWorld.cpp" - , "BulletSoftBody/btSoftSoftCollisionAlgorithm.cpp" - , "BulletSoftBody/btDefaultSoftBodySolver.cpp" - , "BulletSoftBody/btDeformableBackwardEulerObjective.cpp" - , "BulletSoftBody/btDeformableBodySolver.cpp" - , "BulletSoftBody/btDeformableMultiBodyConstraintSolver.cpp" - , "BulletSoftBody/btDeformableContactProjection.cpp" - , "BulletSoftBody/btDeformableMultiBodyDynamicsWorld.cpp" - , "BulletSoftBody/btDeformableContactConstraint.cpp" - + "BulletSoftBody/btSoftBody.cpp", + "BulletSoftBody/btSoftBodyConcaveCollisionAlgorithm.cpp", + "BulletSoftBody/btSoftBodyHelpers.cpp", + "BulletSoftBody/btSoftBodyRigidBodyCollisionConfiguration.cpp", + "BulletSoftBody/btSoftRigidCollisionAlgorithm.cpp", + "BulletSoftBody/btSoftRigidDynamicsWorld.cpp", + "BulletSoftBody/btSoftMultiBodyDynamicsWorld.cpp", + "BulletSoftBody/btSoftSoftCollisionAlgorithm.cpp", + "BulletSoftBody/btDefaultSoftBodySolver.cpp", + "BulletSoftBody/btDeformableBackwardEulerObjective.cpp", + "BulletSoftBody/btDeformableBodySolver.cpp", + "BulletSoftBody/btDeformableMultiBodyConstraintSolver.cpp", + "BulletSoftBody/btDeformableContactProjection.cpp", + "BulletSoftBody/btDeformableMultiBodyDynamicsWorld.cpp", + "BulletSoftBody/btDeformableContactConstraint.cpp", # clew - , "clew/clew.c" - + "clew/clew.c", # LinearMath - , "LinearMath/btAlignedAllocator.cpp" - , "LinearMath/btConvexHull.cpp" - , "LinearMath/btConvexHullComputer.cpp" - , "LinearMath/btGeometryUtil.cpp" - , "LinearMath/btPolarDecomposition.cpp" - , "LinearMath/btQuickprof.cpp" - , "LinearMath/btSerializer.cpp" - , "LinearMath/btSerializer64.cpp" - , "LinearMath/btThreads.cpp" - , "LinearMath/btVector3.cpp" - , "LinearMath/TaskScheduler/btTaskScheduler.cpp" - , "LinearMath/TaskScheduler/btThreadSupportPosix.cpp" - , "LinearMath/TaskScheduler/btThreadSupportWin32.cpp" + "LinearMath/btAlignedAllocator.cpp", + "LinearMath/btConvexHull.cpp", + "LinearMath/btConvexHullComputer.cpp", + "LinearMath/btGeometryUtil.cpp", + "LinearMath/btPolarDecomposition.cpp", + "LinearMath/btQuickprof.cpp", + "LinearMath/btSerializer.cpp", + "LinearMath/btSerializer64.cpp", + "LinearMath/btThreads.cpp", + "LinearMath/btVector3.cpp", + "LinearMath/TaskScheduler/btTaskScheduler.cpp", + "LinearMath/TaskScheduler/btThreadSupportPosix.cpp", + "LinearMath/TaskScheduler/btThreadSupportWin32.cpp", ] thirdparty_sources = [thirdparty_dir + file for file in bullet2_src] # Treat Bullet headers as system headers to avoid raising warnings. Not supported on MSVC. if not env.msvc: - env_bullet.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path]) + env_bullet.Append(CPPFLAGS=["-isystem", Dir(thirdparty_dir).path]) else: env_bullet.Prepend(CPPPATH=[thirdparty_dir]) # if env['target'] == "debug" or env['target'] == "release_debug": diff --git a/modules/bullet/config.py b/modules/bullet/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/bullet/config.py +++ b/modules/bullet/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/camera/SCsub b/modules/camera/SCsub index 23f031f06e..63c4e9fbab 100644 --- a/modules/camera/SCsub +++ b/modules/camera/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_camera = env_modules.Clone() @@ -10,7 +10,7 @@ if env["platform"] == "iphone": modules_sources = [] env_camera.add_source_files(modules_sources, "register_types.cpp") env_camera.add_source_files(modules_sources, "camera_ios.mm") - mod_lib = env_modules.add_library('#bin/libgodot_camera_module' + env['LIBSUFFIX'], modules_sources) + mod_lib = env_modules.add_library("#bin/libgodot_camera_module" + env["LIBSUFFIX"], modules_sources) elif env["platform"] == "windows": env_camera.add_source_files(env.modules_sources, "register_types.cpp") @@ -19,4 +19,3 @@ elif env["platform"] == "windows": elif env["platform"] == "osx": env_camera.add_source_files(env.modules_sources, "register_types.cpp") env_camera.add_source_files(env.modules_sources, "camera_osx.mm") - diff --git a/modules/camera/config.py b/modules/camera/config.py index d308c04195..87d7542741 100644 --- a/modules/camera/config.py +++ b/modules/camera/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return platform == 'iphone' or platform == 'osx' or platform == 'windows' + return platform == "iphone" or platform == "osx" or platform == "windows" + def configure(env): pass diff --git a/modules/csg/SCsub b/modules/csg/SCsub index 57c504efd8..641a42c187 100644 --- a/modules/csg/SCsub +++ b/modules/csg/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_csg = env_modules.Clone() diff --git a/modules/csg/config.py b/modules/csg/config.py index 38ccc66d91..0f267872d7 100644 --- a/modules/csg/config.py +++ b/modules/csg/config.py @@ -1,9 +1,11 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "CSGBox", @@ -17,5 +19,6 @@ def get_doc_classes(): "CSGTorus", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/cvtt/SCsub b/modules/cvtt/SCsub index 746b23ca28..3a27a59945 100644 --- a/modules/cvtt/SCsub +++ b/modules/cvtt/SCsub @@ -1,15 +1,13 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_cvtt = env_modules.Clone() # Thirdparty source files thirdparty_dir = "#thirdparty/cvtt/" -thirdparty_sources = [ - "ConvectionKernels.cpp" -] +thirdparty_sources = ["ConvectionKernels.cpp"] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] diff --git a/modules/cvtt/config.py b/modules/cvtt/config.py index 098f1eafa9..53b8f2f2e3 100644 --- a/modules/cvtt/config.py +++ b/modules/cvtt/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return env['tools'] + return env["tools"] + def configure(env): pass diff --git a/modules/dds/SCsub b/modules/dds/SCsub index 3d92ff02d6..06980bd670 100644 --- a/modules/dds/SCsub +++ b/modules/dds/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_dds = env_modules.Clone() diff --git a/modules/dds/config.py b/modules/dds/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/dds/config.py +++ b/modules/dds/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/enet/SCsub b/modules/enet/SCsub index 485c33b1a8..c8f4b3885e 100644 --- a/modules/enet/SCsub +++ b/modules/enet/SCsub @@ -1,13 +1,13 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_enet = env_modules.Clone() # Thirdparty source files -if env['builtin_enet']: +if env["builtin_enet"]: thirdparty_dir = "#thirdparty/enet/" thirdparty_sources = [ "godot.cpp", diff --git a/modules/enet/config.py b/modules/enet/config.py index 3e30bbe778..5fd343c75d 100644 --- a/modules/enet/config.py +++ b/modules/enet/config.py @@ -1,13 +1,16 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "NetworkedMultiplayerENet", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/etc/SCsub b/modules/etc/SCsub index 1742d3534f..383bbf83c3 100644 --- a/modules/etc/SCsub +++ b/modules/etc/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_etc = env_modules.Clone() @@ -9,21 +9,21 @@ env_etc = env_modules.Clone() # Not unbundled so far since not widespread as shared library thirdparty_dir = "#thirdparty/etc2comp/" thirdparty_sources = [ - "EtcBlock4x4.cpp", - "EtcBlock4x4Encoding.cpp", - "EtcBlock4x4Encoding_ETC1.cpp", - "EtcBlock4x4Encoding_R11.cpp", - "EtcBlock4x4Encoding_RG11.cpp", - "EtcBlock4x4Encoding_RGB8A1.cpp", - "EtcBlock4x4Encoding_RGB8.cpp", - "EtcBlock4x4Encoding_RGBA8.cpp", - "Etc.cpp", - "EtcDifferentialTrys.cpp", - "EtcFilter.cpp", - "EtcImage.cpp", - "EtcIndividualTrys.cpp", - "EtcMath.cpp", - "EtcSortedBlockList.cpp", + "EtcBlock4x4.cpp", + "EtcBlock4x4Encoding.cpp", + "EtcBlock4x4Encoding_ETC1.cpp", + "EtcBlock4x4Encoding_R11.cpp", + "EtcBlock4x4Encoding_RG11.cpp", + "EtcBlock4x4Encoding_RGB8A1.cpp", + "EtcBlock4x4Encoding_RGB8.cpp", + "EtcBlock4x4Encoding_RGBA8.cpp", + "Etc.cpp", + "EtcDifferentialTrys.cpp", + "EtcFilter.cpp", + "EtcImage.cpp", + "EtcIndividualTrys.cpp", + "EtcMath.cpp", + "EtcSortedBlockList.cpp", ] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] diff --git a/modules/etc/config.py b/modules/etc/config.py index 098f1eafa9..53b8f2f2e3 100644 --- a/modules/etc/config.py +++ b/modules/etc/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return env['tools'] + return env["tools"] + def configure(env): pass diff --git a/modules/freetype/SCsub b/modules/freetype/SCsub index 8f4a8de895..cb9eb36b41 100644 --- a/modules/freetype/SCsub +++ b/modules/freetype/SCsub @@ -1,14 +1,14 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") from compat import isbasestring env_freetype = env_modules.Clone() # Thirdparty source files -if env['builtin_freetype']: +if env["builtin_freetype"]: thirdparty_dir = "#thirdparty/freetype/" thirdparty_sources = [ "src/autofit/autofit.c", @@ -55,31 +55,31 @@ if env['builtin_freetype']: ] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] - if env['platform'] == 'uwp': + if env["platform"] == "uwp": # Include header for UWP to fix build issues - env_freetype.Append(CCFLAGS=['/FI', '"modules/freetype/uwpdef.h"']) + env_freetype.Append(CCFLAGS=["/FI", '"modules/freetype/uwpdef.h"']) # Globally too, as freetype is used in scene (see bottom) - env.Append(CCFLAGS=['/FI', '"modules/freetype/uwpdef.h"']) + env.Append(CCFLAGS=["/FI", '"modules/freetype/uwpdef.h"']) env_freetype.Prepend(CPPPATH=[thirdparty_dir + "/include"]) # Also needed in main env for scene/ env.Prepend(CPPPATH=[thirdparty_dir + "/include"]) - env_freetype.Append(CPPDEFINES=['FT2_BUILD_LIBRARY', 'FT_CONFIG_OPTION_USE_PNG']) - if (env['target'] == 'debug'): - env_freetype.Append(CPPDEFINES=['ZLIB_DEBUG']) + env_freetype.Append(CPPDEFINES=["FT2_BUILD_LIBRARY", "FT_CONFIG_OPTION_USE_PNG"]) + if env["target"] == "debug": + env_freetype.Append(CPPDEFINES=["ZLIB_DEBUG"]) # Also requires libpng headers - if env['builtin_libpng']: + if env["builtin_libpng"]: env_freetype.Prepend(CPPPATH=["#thirdparty/libpng"]) - sfnt = thirdparty_dir + 'src/sfnt/sfnt.c' + sfnt = thirdparty_dir + "src/sfnt/sfnt.c" # Must be done after all CPPDEFINES are being set so we can copy them. - if env['platform'] == 'javascript': + if env["platform"] == "javascript": # Forcibly undefine this macro so SIMD is not used in this file, # since currently unsupported in WASM tmp_env = env_freetype.Clone() - tmp_env.Append(CPPFLAGS=['-U__OPTIMIZE__']) + tmp_env.Append(CPPFLAGS=["-U__OPTIMIZE__"]) sfnt = tmp_env.Object(sfnt) thirdparty_sources += [sfnt] @@ -93,7 +93,7 @@ if env['builtin_freetype']: # and then plain strings for system library. We insert between the two. inserted = False for idx, linklib in enumerate(env["LIBS"]): - if isbasestring(linklib): # first system lib such as "X11", otherwise SCons lib object + if isbasestring(linklib): # first system lib such as "X11", otherwise SCons lib object env["LIBS"].insert(idx, lib) inserted = True break @@ -103,4 +103,4 @@ if env['builtin_freetype']: # Godot source files env_freetype.add_source_files(env.modules_sources, "*.cpp") # Used in scene/, needs to be in main env -env.Append(CPPDEFINES=['FREETYPE_ENABLED']) +env.Append(CPPDEFINES=["FREETYPE_ENABLED"]) diff --git a/modules/freetype/config.py b/modules/freetype/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/freetype/config.py +++ b/modules/freetype/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/gdnative/SCsub b/modules/gdnative/SCsub index 0cdd585558..1c2079d037 100644 --- a/modules/gdnative/SCsub +++ b/modules/gdnative/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_gdnative = env_modules.Clone() env_gdnative.add_source_files(env.modules_sources, "gdnative.cpp") @@ -12,9 +12,9 @@ env_gdnative.add_source_files(env.modules_sources, "nativescript/*.cpp") env_gdnative.add_source_files(env.modules_sources, "gdnative_library_singleton_editor.cpp") env_gdnative.add_source_files(env.modules_sources, "gdnative_library_editor_plugin.cpp") -env_gdnative.Prepend(CPPPATH=['#modules/gdnative/include/']) +env_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"]) -Export('env_gdnative') +Export("env_gdnative") SConscript("net/SCsub") SConscript("arvr/SCsub") @@ -25,28 +25,35 @@ SConscript("videodecoder/SCsub") from platform_methods import run_in_subprocess import gdnative_builders -_, gensource = env_gdnative.CommandNoCache(['include/gdnative_api_struct.gen.h', 'gdnative_api_struct.gen.cpp'], - 'gdnative_api.json', run_in_subprocess(gdnative_builders.build_gdnative_api_struct)) +_, gensource = env_gdnative.CommandNoCache( + ["include/gdnative_api_struct.gen.h", "gdnative_api_struct.gen.cpp"], + "gdnative_api.json", + run_in_subprocess(gdnative_builders.build_gdnative_api_struct), +) env_gdnative.add_source_files(env.modules_sources, [gensource]) env.use_ptrcall = True -if ARGUMENTS.get('gdnative_wrapper', False): - gensource, = env_gdnative.CommandNoCache('gdnative_wrapper_code.gen.cpp', 'gdnative_api.json', run_in_subprocess(gdnative_builders.build_gdnative_wrapper_code)) +if ARGUMENTS.get("gdnative_wrapper", False): + (gensource,) = env_gdnative.CommandNoCache( + "gdnative_wrapper_code.gen.cpp", + "gdnative_api.json", + run_in_subprocess(gdnative_builders.build_gdnative_wrapper_code), + ) gd_wrapper_env = env.Clone() - gd_wrapper_env.Prepend(CPPPATH=['#modules/gdnative/include/']) + gd_wrapper_env.Prepend(CPPPATH=["#modules/gdnative/include/"]) - if gd_wrapper_env['use_lto']: + if gd_wrapper_env["use_lto"]: if not env.msvc: - gd_wrapper_env.Append(CCFLAGS=['-fno-lto']) - gd_wrapper_env.Append(LINKFLAGS=['-fno-lto']) + gd_wrapper_env.Append(CCFLAGS=["-fno-lto"]) + gd_wrapper_env.Append(LINKFLAGS=["-fno-lto"]) else: - gd_wrapper_env.Append(CCFLAGS=['/GL-']) - gd_wrapper_env.Append(LINKFLAGS=['/LTCG:OFF']) + gd_wrapper_env.Append(CCFLAGS=["/GL-"]) + gd_wrapper_env.Append(LINKFLAGS=["/LTCG:OFF"]) if not env.msvc: - gd_wrapper_env.Append(CCFLAGS=['-fPIC']) + gd_wrapper_env.Append(CCFLAGS=["-fPIC"]) lib = gd_wrapper_env.add_library("#bin/gdnative_wrapper_code", [gensource]) diff --git a/modules/gdnative/arvr/SCsub b/modules/gdnative/arvr/SCsub index 20eaa99592..0b2db3b504 100644 --- a/modules/gdnative/arvr/SCsub +++ b/modules/gdnative/arvr/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') -Import('env_gdnative') +Import("env") +Import("env_gdnative") -env_gdnative.add_source_files(env.modules_sources, '*.cpp') +env_gdnative.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/gdnative/arvr/config.py b/modules/gdnative/arvr/config.py index 53bc827027..d22f9454ed 100644 --- a/modules/gdnative/arvr/config.py +++ b/modules/gdnative/arvr/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return True + return True + def configure(env): - pass + pass diff --git a/modules/gdnative/config.py b/modules/gdnative/config.py index b9e5afcdf3..37e25a46d4 100644 --- a/modules/gdnative/config.py +++ b/modules/gdnative/config.py @@ -1,9 +1,11 @@ def can_build(env, platform): return True + def configure(env): env.use_ptrcall = True + def get_doc_classes(): return [ "@NativeScript", @@ -20,5 +22,6 @@ def get_doc_classes(): "WebRTCDataChannelGDNative", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/gdnative/gdnative_builders.py b/modules/gdnative/gdnative_builders.py index 20c1a2233c..d44bf45214 100644 --- a/modules/gdnative/gdnative_builders.py +++ b/modules/gdnative/gdnative_builders.py @@ -8,306 +8,343 @@ from platform_methods import subprocess_main def _spaced(e): - return e if e[-1] == '*' else e + ' ' + return e if e[-1] == "*" else e + " " def _build_gdnative_api_struct_header(api): - gdnative_api_init_macro = [ - '\textern const godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct;' - ] + gdnative_api_init_macro = ["\textern const godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct;"] - for ext in api['extensions']: - name = ext['name'] + for ext in api["extensions"]: + name = ext["name"] gdnative_api_init_macro.append( - '\textern const godot_gdnative_ext_{0}_api_struct *_gdnative_wrapper_{0}_api_struct;'.format(name)) + "\textern const godot_gdnative_ext_{0}_api_struct *_gdnative_wrapper_{0}_api_struct;".format(name) + ) - gdnative_api_init_macro.append('\t_gdnative_wrapper_api_struct = options->api_struct;') - gdnative_api_init_macro.append('\tfor (unsigned int i = 0; i < _gdnative_wrapper_api_struct->num_extensions; i++) { ') - gdnative_api_init_macro.append('\t\tswitch (_gdnative_wrapper_api_struct->extensions[i]->type) {') + gdnative_api_init_macro.append("\t_gdnative_wrapper_api_struct = options->api_struct;") + gdnative_api_init_macro.append( + "\tfor (unsigned int i = 0; i < _gdnative_wrapper_api_struct->num_extensions; i++) { " + ) + gdnative_api_init_macro.append("\t\tswitch (_gdnative_wrapper_api_struct->extensions[i]->type) {") - for ext in api['extensions']: - name = ext['name'] + for ext in api["extensions"]: + name = ext["name"] + gdnative_api_init_macro.append("\t\t\tcase GDNATIVE_EXT_%s:" % ext["type"]) gdnative_api_init_macro.append( - '\t\t\tcase GDNATIVE_EXT_%s:' % ext['type']) - gdnative_api_init_macro.append( - '\t\t\t\t_gdnative_wrapper_{0}_api_struct = (godot_gdnative_ext_{0}_api_struct *)' - ' _gdnative_wrapper_api_struct->extensions[i];'.format(name)) - gdnative_api_init_macro.append('\t\t\t\tbreak;') - gdnative_api_init_macro.append('\t\t}') - gdnative_api_init_macro.append('\t}') + "\t\t\t\t_gdnative_wrapper_{0}_api_struct = (godot_gdnative_ext_{0}_api_struct *)" + " _gdnative_wrapper_api_struct->extensions[i];".format(name) + ) + gdnative_api_init_macro.append("\t\t\t\tbreak;") + gdnative_api_init_macro.append("\t\t}") + gdnative_api_init_macro.append("\t}") out = [ - '/* THIS FILE IS GENERATED DO NOT EDIT */', - '#ifndef GODOT_GDNATIVE_API_STRUCT_H', - '#define GODOT_GDNATIVE_API_STRUCT_H', - '', - '#include ', - '#include ', - '#include ', - '#include ', - '#include ', - '#include ', - '#include ', - '', - '#define GDNATIVE_API_INIT(options) do { \\\n' + ' \\\n'.join(gdnative_api_init_macro) + ' \\\n } while (0)', - '', - '#ifdef __cplusplus', + "/* THIS FILE IS GENERATED DO NOT EDIT */", + "#ifndef GODOT_GDNATIVE_API_STRUCT_H", + "#define GODOT_GDNATIVE_API_STRUCT_H", + "", + "#include ", + "#include ", + "#include ", + "#include ", + "#include ", + "#include ", + "#include ", + "", + "#define GDNATIVE_API_INIT(options) do { \\\n" + " \\\n".join(gdnative_api_init_macro) + " \\\n } while (0)", + "", + "#ifdef __cplusplus", 'extern "C" {', - '#endif', - '', - 'enum GDNATIVE_API_TYPES {', - '\tGDNATIVE_' + api['core']['type'] + ',' + "#endif", + "", + "enum GDNATIVE_API_TYPES {", + "\tGDNATIVE_" + api["core"]["type"] + ",", ] - for ext in api['extensions']: - out += ['\tGDNATIVE_EXT_' + ext['type'] + ','] + for ext in api["extensions"]: + out += ["\tGDNATIVE_EXT_" + ext["type"] + ","] - out += ['};', ''] + out += ["};", ""] def generate_extension_struct(name, ext, include_version=True): ret_val = [] - if ext['next']: - ret_val += generate_extension_struct(name, ext['next']) + if ext["next"]: + ret_val += generate_extension_struct(name, ext["next"]) ret_val += [ - 'typedef struct godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct {', - '\tunsigned int type;', - '\tgodot_gdnative_api_version version;', - '\tconst godot_gdnative_api_struct *next;' + "typedef struct godot_gdnative_ext_" + + name + + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"]))) + + "_api_struct {", + "\tunsigned int type;", + "\tgodot_gdnative_api_version version;", + "\tconst godot_gdnative_api_struct *next;", ] - for funcdef in ext['api']: - args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) - ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args)) + for funcdef in ext["api"]: + args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]]) + ret_val.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args)) - ret_val += ['} godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct;', ''] + ret_val += [ + "} godot_gdnative_ext_" + + name + + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"]))) + + "_api_struct;", + "", + ] return ret_val - def generate_core_extension_struct(core): ret_val = [] - if core['next']: - ret_val += generate_core_extension_struct(core['next']) + if core["next"]: + ret_val += generate_core_extension_struct(core["next"]) ret_val += [ - 'typedef struct godot_gdnative_core_' + ('{0}_{1}'.format(core['version']['major'], core['version']['minor'])) + '_api_struct {', - '\tunsigned int type;', - '\tgodot_gdnative_api_version version;', - '\tconst godot_gdnative_api_struct *next;', + "typedef struct godot_gdnative_core_" + + ("{0}_{1}".format(core["version"]["major"], core["version"]["minor"])) + + "_api_struct {", + "\tunsigned int type;", + "\tgodot_gdnative_api_version version;", + "\tconst godot_gdnative_api_struct *next;", ] - for funcdef in core['api']: - args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) - ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args)) + for funcdef in core["api"]: + args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]]) + ret_val.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args)) - ret_val += ['} godot_gdnative_core_' + '{0}_{1}'.format(core['version']['major'], core['version']['minor']) + '_api_struct;', ''] + ret_val += [ + "} godot_gdnative_core_" + + "{0}_{1}".format(core["version"]["major"], core["version"]["minor"]) + + "_api_struct;", + "", + ] return ret_val - - for ext in api['extensions']: - name = ext['name'] + for ext in api["extensions"]: + name = ext["name"] out += generate_extension_struct(name, ext, False) - if api['core']['next']: - out += generate_core_extension_struct(api['core']['next']) + if api["core"]["next"]: + out += generate_core_extension_struct(api["core"]["next"]) out += [ - 'typedef struct godot_gdnative_core_api_struct {', - '\tunsigned int type;', - '\tgodot_gdnative_api_version version;', - '\tconst godot_gdnative_api_struct *next;', - '\tunsigned int num_extensions;', - '\tconst godot_gdnative_api_struct **extensions;', + "typedef struct godot_gdnative_core_api_struct {", + "\tunsigned int type;", + "\tgodot_gdnative_api_version version;", + "\tconst godot_gdnative_api_struct *next;", + "\tunsigned int num_extensions;", + "\tconst godot_gdnative_api_struct **extensions;", ] - for funcdef in api['core']['api']: - args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) - out.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args)) + for funcdef in api["core"]["api"]: + args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]]) + out.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args)) out += [ - '} godot_gdnative_core_api_struct;', - '', - '#ifdef __cplusplus', - '}', - '#endif', - '', - '#endif // GODOT_GDNATIVE_API_STRUCT_H', - '' + "} godot_gdnative_core_api_struct;", + "", + "#ifdef __cplusplus", + "}", + "#endif", + "", + "#endif // GODOT_GDNATIVE_API_STRUCT_H", + "", ] - return '\n'.join(out) + return "\n".join(out) def _build_gdnative_api_struct_source(api): - out = [ - '/* THIS FILE IS GENERATED DO NOT EDIT */', - '', - '#include ', - '' - ] + out = ["/* THIS FILE IS GENERATED DO NOT EDIT */", "", "#include ", ""] def get_extension_struct_name(name, ext, include_version=True): - return 'godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct' + return ( + "godot_gdnative_ext_" + + name + + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"]))) + + "_api_struct" + ) def get_extension_struct_instance_name(name, ext, include_version=True): - return 'api_extension_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_struct' + return ( + "api_extension_" + + name + + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"]))) + + "_struct" + ) def get_extension_struct_definition(name, ext, include_version=True): ret_val = [] - if ext['next']: - ret_val += get_extension_struct_definition(name, ext['next']) + if ext["next"]: + ret_val += get_extension_struct_definition(name, ext["next"]) ret_val += [ - 'extern const ' + get_extension_struct_name(name, ext, include_version) + ' ' + get_extension_struct_instance_name(name, ext, include_version) + ' = {', - '\tGDNATIVE_EXT_' + ext['type'] + ',', - '\t{' + str(ext['version']['major']) + ', ' + str(ext['version']['minor']) + '},', - '\t' + ('NULL' if not ext['next'] else ('(const godot_gdnative_api_struct *)&' + get_extension_struct_instance_name(name, ext['next']))) + ',' + "extern const " + + get_extension_struct_name(name, ext, include_version) + + " " + + get_extension_struct_instance_name(name, ext, include_version) + + " = {", + "\tGDNATIVE_EXT_" + ext["type"] + ",", + "\t{" + str(ext["version"]["major"]) + ", " + str(ext["version"]["minor"]) + "},", + "\t" + + ( + "NULL" + if not ext["next"] + else ("(const godot_gdnative_api_struct *)&" + get_extension_struct_instance_name(name, ext["next"])) + ) + + ",", ] - for funcdef in ext['api']: - ret_val.append('\t%s,' % funcdef['name']) + for funcdef in ext["api"]: + ret_val.append("\t%s," % funcdef["name"]) - ret_val += ['};\n'] + ret_val += ["};\n"] return ret_val - def get_core_struct_definition(core): ret_val = [] - if core['next']: - ret_val += get_core_struct_definition(core['next']) + if core["next"]: + ret_val += get_core_struct_definition(core["next"]) ret_val += [ - 'extern const godot_gdnative_core_' + ('{0}_{1}_api_struct api_{0}_{1}'.format(core['version']['major'], core['version']['minor'])) + ' = {', - '\tGDNATIVE_' + core['type'] + ',', - '\t{' + str(core['version']['major']) + ', ' + str(core['version']['minor']) + '},', - '\t' + ('NULL' if not core['next'] else ('(const godot_gdnative_api_struct *)& api_{0}_{1}'.format(core['next']['version']['major'], core['next']['version']['minor']))) + ',' + "extern const godot_gdnative_core_" + + ("{0}_{1}_api_struct api_{0}_{1}".format(core["version"]["major"], core["version"]["minor"])) + + " = {", + "\tGDNATIVE_" + core["type"] + ",", + "\t{" + str(core["version"]["major"]) + ", " + str(core["version"]["minor"]) + "},", + "\t" + + ( + "NULL" + if not core["next"] + else ( + "(const godot_gdnative_api_struct *)& api_{0}_{1}".format( + core["next"]["version"]["major"], core["next"]["version"]["minor"] + ) + ) + ) + + ",", ] - for funcdef in core['api']: - ret_val.append('\t%s,' % funcdef['name']) + for funcdef in core["api"]: + ret_val.append("\t%s," % funcdef["name"]) - ret_val += ['};\n'] + ret_val += ["};\n"] return ret_val - for ext in api['extensions']: - name = ext['name'] + for ext in api["extensions"]: + name = ext["name"] out += get_extension_struct_definition(name, ext, False) - out += ['', 'const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {'] + out += ["", "const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {"] - for ext in api['extensions']: - name = ext['name'] - out += ['\t(godot_gdnative_api_struct *)&api_extension_' + name + '_struct,'] + for ext in api["extensions"]: + name = ext["name"] + out += ["\t(godot_gdnative_api_struct *)&api_extension_" + name + "_struct,"] - out += ['};\n'] + out += ["};\n"] - if api['core']['next']: - out += get_core_struct_definition(api['core']['next']) + if api["core"]["next"]: + out += get_core_struct_definition(api["core"]["next"]) out += [ - 'extern const godot_gdnative_core_api_struct api_struct = {', - '\tGDNATIVE_' + api['core']['type'] + ',', - '\t{' + str(api['core']['version']['major']) + ', ' + str(api['core']['version']['minor']) + '},', - '\t(const godot_gdnative_api_struct *)&api_1_1,', - '\t' + str(len(api['extensions'])) + ',', - '\tgdnative_extensions_pointers,', + "extern const godot_gdnative_core_api_struct api_struct = {", + "\tGDNATIVE_" + api["core"]["type"] + ",", + "\t{" + str(api["core"]["version"]["major"]) + ", " + str(api["core"]["version"]["minor"]) + "},", + "\t(const godot_gdnative_api_struct *)&api_1_1,", + "\t" + str(len(api["extensions"])) + ",", + "\tgdnative_extensions_pointers,", ] - for funcdef in api['core']['api']: - out.append('\t%s,' % funcdef['name']) - out.append('};\n') + for funcdef in api["core"]["api"]: + out.append("\t%s," % funcdef["name"]) + out.append("};\n") - return '\n'.join(out) + return "\n".join(out) def build_gdnative_api_struct(target, source, env): - with open(source[0], 'r') as fd: + with open(source[0], "r") as fd: api = json.load(fd) header, source = target - with open(header, 'w') as fd: + with open(header, "w") as fd: fd.write(_build_gdnative_api_struct_header(api)) - with open(source, 'w') as fd: + with open(source, "w") as fd: fd.write(_build_gdnative_api_struct_source(api)) def _build_gdnative_wrapper_code(api): out = [ - '/* THIS FILE IS GENERATED DO NOT EDIT */', - '', - '#include ', - '#include ', - '#include ', - '#include ', - '#include ', - '', - '#include ', - '', - '#ifdef __cplusplus', + "/* THIS FILE IS GENERATED DO NOT EDIT */", + "", + "#include ", + "#include ", + "#include ", + "#include ", + "#include ", + "", + "#include ", + "", + "#ifdef __cplusplus", 'extern "C" {', - '#endif', - '', - 'godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct = 0;', + "#endif", + "", + "godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct = 0;", ] - for ext in api['extensions']: - name = ext['name'] - out.append('godot_gdnative_ext_' + name + '_api_struct *_gdnative_wrapper_' + name + '_api_struct = 0;') + for ext in api["extensions"]: + name = ext["name"] + out.append("godot_gdnative_ext_" + name + "_api_struct *_gdnative_wrapper_" + name + "_api_struct = 0;") - out += [''] + out += [""] - for funcdef in api['core']['api']: - args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) - out.append('%s%s(%s) {' % (_spaced(funcdef['return_type']), funcdef['name'], args)) + for funcdef in api["core"]["api"]: + args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]]) + out.append("%s%s(%s) {" % (_spaced(funcdef["return_type"]), funcdef["name"], args)) - args = ', '.join(['%s' % n for t, n in funcdef['arguments']]) + args = ", ".join(["%s" % n for t, n in funcdef["arguments"]]) - return_line = '\treturn ' if funcdef['return_type'] != 'void' else '\t' - return_line += '_gdnative_wrapper_api_struct->' + funcdef['name'] + '(' + args + ');' + return_line = "\treturn " if funcdef["return_type"] != "void" else "\t" + return_line += "_gdnative_wrapper_api_struct->" + funcdef["name"] + "(" + args + ");" out.append(return_line) - out.append('}') - out.append('') + out.append("}") + out.append("") - for ext in api['extensions']: - name = ext['name'] - for funcdef in ext['api']: - args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) - out.append('%s%s(%s) {' % (_spaced(funcdef['return_type']), funcdef['name'], args)) + for ext in api["extensions"]: + name = ext["name"] + for funcdef in ext["api"]: + args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]]) + out.append("%s%s(%s) {" % (_spaced(funcdef["return_type"]), funcdef["name"], args)) - args = ', '.join(['%s' % n for t, n in funcdef['arguments']]) + args = ", ".join(["%s" % n for t, n in funcdef["arguments"]]) - return_line = '\treturn ' if funcdef['return_type'] != 'void' else '\t' - return_line += '_gdnative_wrapper_' + name + '_api_struct->' + funcdef['name'] + '(' + args + ');' + return_line = "\treturn " if funcdef["return_type"] != "void" else "\t" + return_line += "_gdnative_wrapper_" + name + "_api_struct->" + funcdef["name"] + "(" + args + ");" out.append(return_line) - out.append('}') - out.append('') + out.append("}") + out.append("") - out += [ - '#ifdef __cplusplus', - '}', - '#endif' - ] + out += ["#ifdef __cplusplus", "}", "#endif"] - return '\n'.join(out) + return "\n".join(out) def build_gdnative_wrapper_code(target, source, env): - with open(source[0], 'r') as fd: + with open(source[0], "r") as fd: api = json.load(fd) wrapper_file = target[0] - with open(wrapper_file, 'w') as fd: + with open(wrapper_file, "w") as fd: fd.write(_build_gdnative_wrapper_code(api)) -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/modules/gdnative/nativescript/SCsub b/modules/gdnative/nativescript/SCsub index 92c9d6630d..b1ddb2489c 100644 --- a/modules/gdnative/nativescript/SCsub +++ b/modules/gdnative/nativescript/SCsub @@ -1,9 +1,9 @@ #!/usr/bin/env python -Import('env') -Import('env_gdnative') +Import("env") +Import("env_gdnative") -env_gdnative.add_source_files(env.modules_sources, '*.cpp') +env_gdnative.add_source_files(env.modules_sources, "*.cpp") if "platform" in env and env["platform"] in ["x11", "iphone"]: env.Append(LINKFLAGS=["-rdynamic"]) diff --git a/modules/gdnative/net/SCsub b/modules/gdnative/net/SCsub index 18ab9986b0..b76500c003 100644 --- a/modules/gdnative/net/SCsub +++ b/modules/gdnative/net/SCsub @@ -1,13 +1,12 @@ #!/usr/bin/env python -Import('env') -Import('env_gdnative') +Import("env") +Import("env_gdnative") env_net = env_gdnative.Clone() has_webrtc = env_net["module_webrtc_enabled"] if has_webrtc: - env_net.Append(CPPDEFINES=['WEBRTC_GDNATIVE_ENABLED']) - -env_net.add_source_files(env.modules_sources, '*.cpp') + env_net.Append(CPPDEFINES=["WEBRTC_GDNATIVE_ENABLED"]) +env_net.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/gdnative/pluginscript/SCsub b/modules/gdnative/pluginscript/SCsub index 20eaa99592..0b2db3b504 100644 --- a/modules/gdnative/pluginscript/SCsub +++ b/modules/gdnative/pluginscript/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') -Import('env_gdnative') +Import("env") +Import("env_gdnative") -env_gdnative.add_source_files(env.modules_sources, '*.cpp') +env_gdnative.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/gdnative/videodecoder/SCsub b/modules/gdnative/videodecoder/SCsub index 04cc8ed604..5948b9a3dd 100644 --- a/modules/gdnative/videodecoder/SCsub +++ b/modules/gdnative/videodecoder/SCsub @@ -1,9 +1,9 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_vsdecoder_gdnative = env_modules.Clone() -env_vsdecoder_gdnative.Prepend(CPPPATH=['#modules/gdnative/include/']) -env_vsdecoder_gdnative.add_source_files(env.modules_sources, '*.cpp') +env_vsdecoder_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"]) +env_vsdecoder_gdnative.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/gdscript/SCsub b/modules/gdscript/SCsub index 74e653ce43..e58a1d8edc 100644 --- a/modules/gdscript/SCsub +++ b/modules/gdscript/SCsub @@ -1,19 +1,19 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_gdscript = env_modules.Clone() env_gdscript.add_source_files(env.modules_sources, "*.cpp") -if env['tools']: +if env["tools"]: env_gdscript.add_source_files(env.modules_sources, "./editor/*.cpp") # Those two modules are required for the language server protocol - if env['module_jsonrpc_enabled'] and env['module_websocket_enabled']: + if env["module_jsonrpc_enabled"] and env["module_websocket_enabled"]: env_gdscript.add_source_files(env.modules_sources, "./language_server/*.cpp") else: # Using a define in the disabled case, to avoid having an extra define # in regular builds where all modules are enabled. - env_gdscript.Append(CPPDEFINES=['GDSCRIPT_NO_LSP']) + env_gdscript.Append(CPPDEFINES=["GDSCRIPT_NO_LSP"]) diff --git a/modules/gdscript/config.py b/modules/gdscript/config.py index aceeee22d9..6fc227e7f5 100644 --- a/modules/gdscript/config.py +++ b/modules/gdscript/config.py @@ -1,9 +1,11 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "@GDScript", @@ -11,5 +13,6 @@ def get_doc_classes(): "GDScriptFunctionState", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/gridmap/SCsub b/modules/gridmap/SCsub index 62b8a0ff93..970ce534f0 100644 --- a/modules/gridmap/SCsub +++ b/modules/gridmap/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_gridmap = env_modules.Clone() diff --git a/modules/gridmap/config.py b/modules/gridmap/config.py index 5022116c9b..a6319fe1ea 100644 --- a/modules/gridmap/config.py +++ b/modules/gridmap/config.py @@ -1,13 +1,16 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "GridMap", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/hdr/SCsub b/modules/hdr/SCsub index c960e8126b..a709397c9a 100644 --- a/modules/hdr/SCsub +++ b/modules/hdr/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_hdr = env_modules.Clone() diff --git a/modules/hdr/config.py b/modules/hdr/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/hdr/config.py +++ b/modules/hdr/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/jpg/SCsub b/modules/jpg/SCsub index 96e8e704dd..8ee8e6dd6e 100644 --- a/modules/jpg/SCsub +++ b/modules/jpg/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_jpg = env_modules.Clone() diff --git a/modules/jpg/config.py b/modules/jpg/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/jpg/config.py +++ b/modules/jpg/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/jsonrpc/SCsub b/modules/jsonrpc/SCsub index 13c9ffb253..fe5312670a 100644 --- a/modules/jsonrpc/SCsub +++ b/modules/jsonrpc/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_jsonrpc = env_modules.Clone() env_jsonrpc.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/jsonrpc/config.py b/modules/jsonrpc/config.py index 53bc827027..d22f9454ed 100644 --- a/modules/jsonrpc/config.py +++ b/modules/jsonrpc/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return True + return True + def configure(env): - pass + pass diff --git a/modules/mbedtls/SCsub b/modules/mbedtls/SCsub index 0c6c703e16..5f5d25a3ee 100755 --- a/modules/mbedtls/SCsub +++ b/modules/mbedtls/SCsub @@ -1,11 +1,11 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_mbed_tls = env_modules.Clone() -if env['builtin_mbedtls']: +if env["builtin_mbedtls"]: # Thirdparty source files thirdparty_sources = [ "aes.c", @@ -86,7 +86,7 @@ if env['builtin_mbedtls']: "x509_csr.c", "x509write_crt.c", "x509write_csr.c", - "xtea.c" + "xtea.c", ] thirdparty_dir = "#thirdparty/mbedtls/library/" diff --git a/modules/mbedtls/config.py b/modules/mbedtls/config.py index 1c8cd12a2d..d22f9454ed 100755 --- a/modules/mbedtls/config.py +++ b/modules/mbedtls/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/mobile_vr/SCsub b/modules/mobile_vr/SCsub index 4bd184f025..e6c43228b4 100644 --- a/modules/mobile_vr/SCsub +++ b/modules/mobile_vr/SCsub @@ -1,8 +1,8 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_mobile_vr = env_modules.Clone() -env_mobile_vr.add_source_files(env.modules_sources, '*.cpp') +env_mobile_vr.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/mobile_vr/config.py b/modules/mobile_vr/config.py index e85fa631dd..ee401c1a2a 100644 --- a/modules/mobile_vr/config.py +++ b/modules/mobile_vr/config.py @@ -1,13 +1,16 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "MobileVRInterface", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/mono/SCsub b/modules/mono/SCsub index 2bfeea9d2e..ca16ffae07 100644 --- a/modules/mono/SCsub +++ b/modules/mono/SCsub @@ -3,22 +3,24 @@ import build_scripts.tls_configure as tls_configure import build_scripts.mono_configure as mono_configure -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_mono = env_modules.Clone() -if env_mono['tools']: +if env_mono["tools"]: # NOTE: It is safe to generate this file here, since this is still executed serially import build_scripts.gen_cs_glue_version as gen_cs_glue_version - gen_cs_glue_version.generate_header('glue/GodotSharp', 'glue/cs_glue_version.gen.h') + + gen_cs_glue_version.generate_header("glue/GodotSharp", "glue/cs_glue_version.gen.h") # Glue sources -if env_mono['mono_glue']: - env_mono.Append(CPPDEFINES=['MONO_GLUE_ENABLED']) +if env_mono["mono_glue"]: + env_mono.Append(CPPDEFINES=["MONO_GLUE_ENABLED"]) import os.path - if not os.path.isfile('glue/mono_glue.gen.cpp'): + + if not os.path.isfile("glue/mono_glue.gen.cpp"): raise RuntimeError("Mono glue sources not found. Did you forget to run '--generate-mono-glue'?") # Configure Thread Local Storage @@ -31,27 +33,29 @@ env_mono = conf.Finish() mono_configure.configure(env, env_mono) -if env_mono['tools'] and env_mono['mono_glue'] and env_mono['build_cil']: +if env_mono["tools"] and env_mono["mono_glue"] and env_mono["build_cil"]: # Build Godot API solution import build_scripts.api_solution_build as api_solution_build + api_sln_cmd = api_solution_build.build(env_mono) # Build GodotTools import build_scripts.godot_tools_build as godot_tools_build + godot_tools_build.build(env_mono, api_sln_cmd) # Add sources -env_mono.add_source_files(env.modules_sources, '*.cpp') -env_mono.add_source_files(env.modules_sources, 'glue/*.cpp') -env_mono.add_source_files(env.modules_sources, 'mono_gd/*.cpp') -env_mono.add_source_files(env.modules_sources, 'utils/*.cpp') +env_mono.add_source_files(env.modules_sources, "*.cpp") +env_mono.add_source_files(env.modules_sources, "glue/*.cpp") +env_mono.add_source_files(env.modules_sources, "mono_gd/*.cpp") +env_mono.add_source_files(env.modules_sources, "utils/*.cpp") -env_mono.add_source_files(env.modules_sources, 'mono_gd/support/*.cpp') +env_mono.add_source_files(env.modules_sources, "mono_gd/support/*.cpp") -if env['platform'] in ['osx', 'iphone']: - env_mono.add_source_files(env.modules_sources, 'mono_gd/support/*.mm') - env_mono.add_source_files(env.modules_sources, 'mono_gd/support/*.m') +if env["platform"] in ["osx", "iphone"]: + env_mono.add_source_files(env.modules_sources, "mono_gd/support/*.mm") + env_mono.add_source_files(env.modules_sources, "mono_gd/support/*.m") -if env['tools']: - env_mono.add_source_files(env.modules_sources, 'editor/*.cpp') +if env["tools"]: + env_mono.add_source_files(env.modules_sources, "editor/*.cpp") diff --git a/modules/mono/build_scripts/api_solution_build.py b/modules/mono/build_scripts/api_solution_build.py index 639197c285..9abac22df6 100644 --- a/modules/mono/build_scripts/api_solution_build.py +++ b/modules/mono/build_scripts/api_solution_build.py @@ -8,21 +8,22 @@ from SCons.Script import Dir def build_api_solution(source, target, env): # source and target elements are of type SCons.Node.FS.File, hence why we convert them to str - module_dir = env['module_dir'] + module_dir = env["module_dir"] - solution_path = os.path.join(module_dir, 'glue/GodotSharp/GodotSharp.sln') + solution_path = os.path.join(module_dir, "glue/GodotSharp/GodotSharp.sln") - build_config = env['solution_build_config'] + build_config = env["solution_build_config"] - extra_msbuild_args = ['/p:NoWarn=1591'] # Ignore missing documentation warnings + extra_msbuild_args = ["/p:NoWarn=1591"] # Ignore missing documentation warnings from .solution_builder import build_solution + build_solution(env, solution_path, build_config, extra_msbuild_args=extra_msbuild_args) # Copy targets - core_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, 'GodotSharp', 'bin', build_config)) - editor_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, 'GodotSharpEditor', 'bin', build_config)) + core_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, "GodotSharp", "bin", build_config)) + editor_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, "GodotSharpEditor", "bin", build_config)) dst_dir = os.path.abspath(os.path.join(str(target[0]), os.pardir)) @@ -32,6 +33,7 @@ def build_api_solution(source, target, env): def copy_target(target_path): from shutil import copy + filename = os.path.basename(target_path) src_path = os.path.join(core_src_dir, filename) @@ -45,23 +47,28 @@ def build_api_solution(source, target, env): def build(env_mono): - assert env_mono['tools'] + assert env_mono["tools"] target_filenames = [ - 'GodotSharp.dll', 'GodotSharp.pdb', 'GodotSharp.xml', - 'GodotSharpEditor.dll', 'GodotSharpEditor.pdb', 'GodotSharpEditor.xml' + "GodotSharp.dll", + "GodotSharp.pdb", + "GodotSharp.xml", + "GodotSharpEditor.dll", + "GodotSharpEditor.pdb", + "GodotSharpEditor.xml", ] depend_cmd = [] - for build_config in ['Debug', 'Release']: - output_dir = Dir('#bin').abspath - editor_api_dir = os.path.join(output_dir, 'GodotSharp', 'Api', build_config) + for build_config in ["Debug", "Release"]: + output_dir = Dir("#bin").abspath + editor_api_dir = os.path.join(output_dir, "GodotSharp", "Api", build_config) targets = [os.path.join(editor_api_dir, filename) for filename in target_filenames] - cmd = env_mono.CommandNoCache(targets, depend_cmd, build_api_solution, - module_dir=os.getcwd(), solution_build_config=build_config) + cmd = env_mono.CommandNoCache( + targets, depend_cmd, build_api_solution, module_dir=os.getcwd(), solution_build_config=build_config + ) env_mono.AlwaysBuild(cmd) # Make the Release build of the API solution depend on the Debug build. diff --git a/modules/mono/build_scripts/gen_cs_glue_version.py b/modules/mono/build_scripts/gen_cs_glue_version.py index 5d1056c2fc..98bbb4d9be 100644 --- a/modules/mono/build_scripts/gen_cs_glue_version.py +++ b/modules/mono/build_scripts/gen_cs_glue_version.py @@ -1,20 +1,20 @@ - def generate_header(solution_dir, version_header_dst): import os + latest_mtime = 0 for root, dirs, files in os.walk(solution_dir, topdown=True): - dirs[:] = [d for d in dirs if d not in ['Generated']] # Ignored generated files - files = [f for f in files if f.endswith('.cs')] + dirs[:] = [d for d in dirs if d not in ["Generated"]] # Ignored generated files + files = [f for f in files if f.endswith(".cs")] for file in files: filepath = os.path.join(root, file) mtime = os.path.getmtime(filepath) latest_mtime = mtime if mtime > latest_mtime else latest_mtime - glue_version = int(latest_mtime) # The latest modified time will do for now + glue_version = int(latest_mtime) # The latest modified time will do for now - with open(version_header_dst, 'w') as version_header: - version_header.write('/* THIS FILE IS GENERATED DO NOT EDIT */\n') - version_header.write('#ifndef CS_GLUE_VERSION_H\n') - version_header.write('#define CS_GLUE_VERSION_H\n\n') - version_header.write('#define CS_GLUE_VERSION UINT32_C(' + str(glue_version) + ')\n') - version_header.write('\n#endif // CS_GLUE_VERSION_H\n') + with open(version_header_dst, "w") as version_header: + version_header.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") + version_header.write("#ifndef CS_GLUE_VERSION_H\n") + version_header.write("#define CS_GLUE_VERSION_H\n\n") + version_header.write("#define CS_GLUE_VERSION UINT32_C(" + str(glue_version) + ")\n") + version_header.write("\n#endif // CS_GLUE_VERSION_H\n") diff --git a/modules/mono/build_scripts/godot_tools_build.py b/modules/mono/build_scripts/godot_tools_build.py index f629b39c9d..7391e8790d 100644 --- a/modules/mono/build_scripts/godot_tools_build.py +++ b/modules/mono/build_scripts/godot_tools_build.py @@ -8,10 +8,10 @@ from SCons.Script import Dir def build_godot_tools(source, target, env): # source and target elements are of type SCons.Node.FS.File, hence why we convert them to str - module_dir = env['module_dir'] + module_dir = env["module_dir"] - solution_path = os.path.join(module_dir, 'editor/GodotTools/GodotTools.sln') - build_config = 'Debug' if env['target'] == 'debug' else 'Release' + solution_path = os.path.join(module_dir, "editor/GodotTools/GodotTools.sln") + build_config = "Debug" if env["target"] == "debug" else "Release" from .solution_builder import build_solution @@ -20,15 +20,15 @@ def build_godot_tools(source, target, env): def build(env_mono, api_sln_cmd): - assert env_mono['tools'] + assert env_mono["tools"] - output_dir = Dir('#bin').abspath - editor_tools_dir = os.path.join(output_dir, 'GodotSharp', 'Tools') + output_dir = Dir("#bin").abspath + editor_tools_dir = os.path.join(output_dir, "GodotSharp", "Tools") - target_filenames = ['GodotTools.dll'] + target_filenames = ["GodotTools.dll"] - if env_mono['target'] == 'debug': - target_filenames += ['GodotTools.pdb'] + if env_mono["target"] == "debug": + target_filenames += ["GodotTools.pdb"] targets = [os.path.join(editor_tools_dir, filename) for filename in target_filenames] diff --git a/modules/mono/build_scripts/make_android_mono_config.py b/modules/mono/build_scripts/make_android_mono_config.py index 8cad204d7b..ef41f92917 100644 --- a/modules/mono/build_scripts/make_android_mono_config.py +++ b/modules/mono/build_scripts/make_android_mono_config.py @@ -1,24 +1,25 @@ - def generate_compressed_config(config_src, output_dir): import os.path from compat import byte_to_str # Source file - with open(os.path.join(output_dir, 'android_mono_config.gen.cpp'), 'w') as cpp: - with open(config_src, 'rb') as f: + with open(os.path.join(output_dir, "android_mono_config.gen.cpp"), "w") as cpp: + with open(config_src, "rb") as f: buf = f.read() decompr_size = len(buf) import zlib + buf = zlib.compress(buf) compr_size = len(buf) - bytes_seq_str = '' + bytes_seq_str = "" for i, buf_idx in enumerate(range(compr_size)): if i > 0: - bytes_seq_str += ', ' + bytes_seq_str += ", " bytes_seq_str += byte_to_str(buf[buf_idx]) - cpp.write('''/* THIS FILE IS GENERATED DO NOT EDIT */ + cpp.write( + """/* THIS FILE IS GENERATED DO NOT EDIT */ #include "android_mono_config.h" #ifdef ANDROID_ENABLED @@ -49,4 +50,6 @@ String get_godot_android_mono_config() { } #endif // ANDROID_ENABLED -''' % (compr_size, decompr_size, bytes_seq_str)) +""" + % (compr_size, decompr_size, bytes_seq_str) + ) diff --git a/modules/mono/build_scripts/mono_configure.py b/modules/mono/build_scripts/mono_configure.py index e3b8a8ba2a..bb305c55c3 100644 --- a/modules/mono/build_scripts/mono_configure.py +++ b/modules/mono/build_scripts/mono_configure.py @@ -5,48 +5,45 @@ import subprocess from SCons.Script import Dir, Environment -if os.name == 'nt': +if os.name == "nt": from . import mono_reg_utils as monoreg -android_arch_dirs = { - 'armv7': 'armeabi-v7a', - 'arm64v8': 'arm64-v8a', - 'x86': 'x86', - 'x86_64': 'x86_64' -} +android_arch_dirs = {"armv7": "armeabi-v7a", "arm64v8": "arm64-v8a", "x86": "x86", "x86_64": "x86_64"} def get_android_out_dir(env): - return os.path.join(Dir('#platform/android/java/lib/libs').abspath, - 'release' if env['target'] == 'release' else 'debug', - android_arch_dirs[env['android_arch']]) + return os.path.join( + Dir("#platform/android/java/lib/libs").abspath, + "release" if env["target"] == "release" else "debug", + android_arch_dirs[env["android_arch"]], + ) -def find_name_in_dir_files(directory, names, prefixes=[''], extensions=['']): +def find_name_in_dir_files(directory, names, prefixes=[""], extensions=[""]): for extension in extensions: - if extension and not extension.startswith('.'): - extension = '.' + extension + if extension and not extension.startswith("."): + extension = "." + extension for prefix in prefixes: for curname in names: if os.path.isfile(os.path.join(directory, prefix + curname + extension)): return curname - return '' + return "" -def find_file_in_dir(directory, names, prefixes=[''], extensions=['']): +def find_file_in_dir(directory, names, prefixes=[""], extensions=[""]): for extension in extensions: - if extension and not extension.startswith('.'): - extension = '.' + extension + if extension and not extension.startswith("."): + extension = "." + extension for prefix in prefixes: for curname in names: filename = prefix + curname + extension if os.path.isfile(os.path.join(directory, filename)): return filename - return '' + return "" -def copy_file(src_dir, dst_dir, src_name, dst_name=''): +def copy_file(src_dir, dst_dir, src_name, dst_name=""): from shutil import copy src_path = os.path.join(Dir(src_dir).abspath, src_name) @@ -62,163 +59,171 @@ def copy_file(src_dir, dst_dir, src_name, dst_name=''): def is_desktop(platform): - return platform in ['windows', 'osx', 'x11', 'server', 'uwp', 'haiku'] + return platform in ["windows", "osx", "x11", "server", "uwp", "haiku"] def is_unix_like(platform): - return platform in ['osx', 'x11', 'server', 'android', 'haiku', 'iphone'] + return platform in ["osx", "x11", "server", "android", "haiku", "iphone"] def module_supports_tools_on(platform): - return platform not in ['android', 'javascript', 'iphone'] + return platform not in ["android", "javascript", "iphone"] def find_wasm_src_dir(mono_root): hint_dirs = [ - os.path.join(mono_root, 'src'), - os.path.join(mono_root, '../src'), + os.path.join(mono_root, "src"), + os.path.join(mono_root, "../src"), ] for hint_dir in hint_dirs: - if os.path.isfile(os.path.join(hint_dir, 'driver.c')): + if os.path.isfile(os.path.join(hint_dir, "driver.c")): return hint_dir - return '' + return "" def configure(env, env_mono): - bits = env['bits'] - is_android = env['platform'] == 'android' - is_javascript = env['platform'] == 'javascript' - is_ios = env['platform'] == 'iphone' - is_ios_sim = is_ios and env['arch'] in ['x86', 'x86_64'] + bits = env["bits"] + is_android = env["platform"] == "android" + is_javascript = env["platform"] == "javascript" + is_ios = env["platform"] == "iphone" + is_ios_sim = is_ios and env["arch"] in ["x86", "x86_64"] - tools_enabled = env['tools'] - mono_static = env['mono_static'] - copy_mono_root = env['copy_mono_root'] + tools_enabled = env["tools"] + mono_static = env["mono_static"] + copy_mono_root = env["copy_mono_root"] - mono_prefix = env['mono_prefix'] + mono_prefix = env["mono_prefix"] - mono_lib_names = ['mono-2.0-sgen', 'monosgen-2.0'] + mono_lib_names = ["mono-2.0-sgen", "monosgen-2.0"] - is_travis = os.environ.get('TRAVIS') == 'true' + is_travis = os.environ.get("TRAVIS") == "true" if is_travis: # Travis CI may have a Mono version lower than 5.12 - env_mono.Append(CPPDEFINES=['NO_PENDING_EXCEPTIONS']) + env_mono.Append(CPPDEFINES=["NO_PENDING_EXCEPTIONS"]) - if is_android and not env['android_arch'] in android_arch_dirs: - raise RuntimeError('This module does not support the specified \'android_arch\': ' + env['android_arch']) + if is_android and not env["android_arch"] in android_arch_dirs: + raise RuntimeError("This module does not support the specified 'android_arch': " + env["android_arch"]) - if tools_enabled and not module_supports_tools_on(env['platform']): + if tools_enabled and not module_supports_tools_on(env["platform"]): # TODO: # Android: We have to add the data directory to the apk, concretely the Api and Tools folders. - raise RuntimeError('This module does not currently support building for this platform with tools enabled') + raise RuntimeError("This module does not currently support building for this platform with tools enabled") if is_android and mono_static: # FIXME: When static linking and doing something that requires libmono-native, we get a dlopen error as 'libmono-native' # seems to depend on 'libmonosgen-2.0'. Could be fixed by re-directing to '__Internal' with a dllmap or in the dlopen hook. - raise RuntimeError('Statically linking Mono is not currently supported for this platform') + raise RuntimeError("Statically linking Mono is not currently supported for this platform") if not mono_static and (is_javascript or is_ios): - raise RuntimeError('Dynamically linking Mono is not currently supported for this platform') + raise RuntimeError("Dynamically linking Mono is not currently supported for this platform") - if not mono_prefix and (os.getenv('MONO32_PREFIX') or os.getenv('MONO64_PREFIX')): - print("WARNING: The environment variables 'MONO32_PREFIX' and 'MONO64_PREFIX' are deprecated; use the 'mono_prefix' SCons parameter instead") + if not mono_prefix and (os.getenv("MONO32_PREFIX") or os.getenv("MONO64_PREFIX")): + print( + "WARNING: The environment variables 'MONO32_PREFIX' and 'MONO64_PREFIX' are deprecated; use the 'mono_prefix' SCons parameter instead" + ) # Although we don't support building with tools for any platform where we currently use static AOT, # if these are supported in the future, we won't be using static AOT for them as that would be # too restrictive for the editor. These builds would probably be made to only use the interpreter. - mono_aot_static = (is_ios and not is_ios_sim) and not env['tools'] + mono_aot_static = (is_ios and not is_ios_sim) and not env["tools"] # Static AOT is only supported on the root domain mono_single_appdomain = mono_aot_static if mono_single_appdomain: - env_mono.Append(CPPDEFINES=['GD_MONO_SINGLE_APPDOMAIN']) + env_mono.Append(CPPDEFINES=["GD_MONO_SINGLE_APPDOMAIN"]) - if (env['tools'] or env['target'] != 'release') and not mono_single_appdomain: - env_mono.Append(CPPDEFINES=['GD_MONO_HOT_RELOAD']) + if (env["tools"] or env["target"] != "release") and not mono_single_appdomain: + env_mono.Append(CPPDEFINES=["GD_MONO_HOT_RELOAD"]) - if env['platform'] == 'windows': + if env["platform"] == "windows": mono_root = mono_prefix - if not mono_root and os.name == 'nt': + if not mono_root and os.name == "nt": mono_root = monoreg.find_mono_root_dir(bits) if not mono_root: - raise RuntimeError("Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter") + raise RuntimeError( + "Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter" + ) - print('Found Mono root directory: ' + mono_root) + print("Found Mono root directory: " + mono_root) - mono_lib_path = os.path.join(mono_root, 'lib') + mono_lib_path = os.path.join(mono_root, "lib") env.Append(LIBPATH=mono_lib_path) - env_mono.Prepend(CPPPATH=os.path.join(mono_root, 'include', 'mono-2.0')) + env_mono.Prepend(CPPPATH=os.path.join(mono_root, "include", "mono-2.0")) - lib_suffixes = ['.lib'] + lib_suffixes = [".lib"] if not env.msvc: # MingW supports both '.a' and '.lib' - lib_suffixes.insert(0, '.a') + lib_suffixes.insert(0, ".a") if mono_static: if env.msvc: - mono_static_lib_name = 'libmono-static-sgen' + mono_static_lib_name = "libmono-static-sgen" else: - mono_static_lib_name = 'libmonosgen-2.0' + mono_static_lib_name = "libmonosgen-2.0" mono_static_lib_file = find_file_in_dir(mono_lib_path, [mono_static_lib_name], extensions=lib_suffixes) if not mono_static_lib_file: - raise RuntimeError('Could not find static mono library in: ' + mono_lib_path) + raise RuntimeError("Could not find static mono library in: " + mono_lib_path) if env.msvc: env.Append(LINKFLAGS=mono_static_lib_file) - env.Append(LINKFLAGS='Mincore.lib') - env.Append(LINKFLAGS='msvcrt.lib') - env.Append(LINKFLAGS='LIBCMT.lib') - env.Append(LINKFLAGS='Psapi.lib') + env.Append(LINKFLAGS="Mincore.lib") + env.Append(LINKFLAGS="msvcrt.lib") + env.Append(LINKFLAGS="LIBCMT.lib") + env.Append(LINKFLAGS="Psapi.lib") else: mono_static_lib_file_path = os.path.join(mono_lib_path, mono_static_lib_file) - env.Append(LINKFLAGS=['-Wl,-whole-archive', mono_static_lib_file_path, '-Wl,-no-whole-archive']) + env.Append(LINKFLAGS=["-Wl,-whole-archive", mono_static_lib_file_path, "-Wl,-no-whole-archive"]) - env.Append(LIBS=['psapi']) - env.Append(LIBS=['version']) + env.Append(LIBS=["psapi"]) + env.Append(LIBS=["version"]) else: - mono_lib_name = find_name_in_dir_files(mono_lib_path, mono_lib_names, prefixes=['', 'lib'], extensions=lib_suffixes) + mono_lib_name = find_name_in_dir_files( + mono_lib_path, mono_lib_names, prefixes=["", "lib"], extensions=lib_suffixes + ) if not mono_lib_name: - raise RuntimeError('Could not find mono library in: ' + mono_lib_path) + raise RuntimeError("Could not find mono library in: " + mono_lib_path) if env.msvc: - env.Append(LINKFLAGS=mono_lib_name + '.lib') + env.Append(LINKFLAGS=mono_lib_name + ".lib") else: env.Append(LIBS=[mono_lib_name]) - mono_bin_path = os.path.join(mono_root, 'bin') + mono_bin_path = os.path.join(mono_root, "bin") - mono_dll_file = find_file_in_dir(mono_bin_path, mono_lib_names, prefixes=['', 'lib'], extensions=['.dll']) + mono_dll_file = find_file_in_dir(mono_bin_path, mono_lib_names, prefixes=["", "lib"], extensions=[".dll"]) if not mono_dll_file: - raise RuntimeError('Could not find mono shared library in: ' + mono_bin_path) + raise RuntimeError("Could not find mono shared library in: " + mono_bin_path) - copy_file(mono_bin_path, '#bin', mono_dll_file) + copy_file(mono_bin_path, "#bin", mono_dll_file) else: - is_apple = env['platform'] in ['osx', 'iphone'] + is_apple = env["platform"] in ["osx", "iphone"] is_macos = is_apple and not is_ios - sharedlib_ext = '.dylib' if is_apple else '.so' + sharedlib_ext = ".dylib" if is_apple else ".so" mono_root = mono_prefix - mono_lib_path = '' - mono_so_file = '' + mono_lib_path = "" + mono_so_file = "" if not mono_root and (is_android or is_javascript or is_ios): - raise RuntimeError("Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter") + raise RuntimeError( + "Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter" + ) if not mono_root and is_macos: # Try with some known directories under OSX - hint_dirs = ['/Library/Frameworks/Mono.framework/Versions/Current', '/usr/local/var/homebrew/linked/mono'] + hint_dirs = ["/Library/Frameworks/Mono.framework/Versions/Current", "/usr/local/var/homebrew/linked/mono"] for hint_dir in hint_dirs: if os.path.isdir(hint_dir): mono_root = hint_dir @@ -229,147 +234,165 @@ def configure(env, env_mono): if not mono_root and mono_static: mono_root = pkgconfig_try_find_mono_root(mono_lib_names, sharedlib_ext) if not mono_root: - raise RuntimeError("Building with mono_static=yes, but failed to find the mono prefix with pkg-config; " + \ - "specify one manually with the 'mono_prefix' SCons parameter") + raise RuntimeError( + "Building with mono_static=yes, but failed to find the mono prefix with pkg-config; " + + "specify one manually with the 'mono_prefix' SCons parameter" + ) if is_ios and not is_ios_sim: - env_mono.Append(CPPDEFINES=['IOS_DEVICE']) + env_mono.Append(CPPDEFINES=["IOS_DEVICE"]) if mono_root: - print('Found Mono root directory: ' + mono_root) + print("Found Mono root directory: " + mono_root) - mono_lib_path = os.path.join(mono_root, 'lib') + mono_lib_path = os.path.join(mono_root, "lib") env.Append(LIBPATH=[mono_lib_path]) - env_mono.Prepend(CPPPATH=os.path.join(mono_root, 'include', 'mono-2.0')) + env_mono.Prepend(CPPPATH=os.path.join(mono_root, "include", "mono-2.0")) - mono_lib = find_name_in_dir_files(mono_lib_path, mono_lib_names, prefixes=['lib'], extensions=['.a']) + mono_lib = find_name_in_dir_files(mono_lib_path, mono_lib_names, prefixes=["lib"], extensions=[".a"]) if not mono_lib: - raise RuntimeError('Could not find mono library in: ' + mono_lib_path) + raise RuntimeError("Could not find mono library in: " + mono_lib_path) - env_mono.Append(CPPDEFINES=['_REENTRANT']) + env_mono.Append(CPPDEFINES=["_REENTRANT"]) if mono_static: - env.Append(LINKFLAGS=['-rdynamic']) + env.Append(LINKFLAGS=["-rdynamic"]) - mono_lib_file = os.path.join(mono_lib_path, 'lib' + mono_lib + '.a') + mono_lib_file = os.path.join(mono_lib_path, "lib" + mono_lib + ".a") if is_apple: if is_macos: - env.Append(LINKFLAGS=['-Wl,-force_load,' + mono_lib_file]) + env.Append(LINKFLAGS=["-Wl,-force_load," + mono_lib_file]) else: - arch = env['arch'] + arch = env["arch"] + def copy_mono_lib(libname_wo_ext): - copy_file(mono_lib_path, '#bin', libname_wo_ext + '.a', '%s.iphone.%s.a' % (libname_wo_ext, arch)) + copy_file( + mono_lib_path, "#bin", libname_wo_ext + ".a", "%s.iphone.%s.a" % (libname_wo_ext, arch) + ) # Copy Mono libraries to the output folder. These are meant to be bundled with # the export templates and added to the Xcode project when exporting a game. - copy_mono_lib('lib' + mono_lib) - copy_mono_lib('libmono-native') - copy_mono_lib('libmono-profiler-log') + copy_mono_lib("lib" + mono_lib) + copy_mono_lib("libmono-native") + copy_mono_lib("libmono-profiler-log") if not is_ios_sim: - copy_mono_lib('libmono-ee-interp') - copy_mono_lib('libmono-icall-table') - copy_mono_lib('libmono-ilgen') + copy_mono_lib("libmono-ee-interp") + copy_mono_lib("libmono-icall-table") + copy_mono_lib("libmono-ilgen") else: - assert is_desktop(env['platform']) or is_android or is_javascript - env.Append(LINKFLAGS=['-Wl,-whole-archive', mono_lib_file, '-Wl,-no-whole-archive']) + assert is_desktop(env["platform"]) or is_android or is_javascript + env.Append(LINKFLAGS=["-Wl,-whole-archive", mono_lib_file, "-Wl,-no-whole-archive"]) if is_javascript: - env.Append(LIBS=['mono-icall-table', 'mono-native', 'mono-ilgen', 'mono-ee-interp']) + env.Append(LIBS=["mono-icall-table", "mono-native", "mono-ilgen", "mono-ee-interp"]) - wasm_src_dir = os.path.join(mono_root, 'src') + wasm_src_dir = os.path.join(mono_root, "src") if not os.path.isdir(wasm_src_dir): - raise RuntimeError('Could not find mono wasm src directory') + raise RuntimeError("Could not find mono wasm src directory") # Ideally this should be defined only for 'driver.c', but I can't fight scons for another 2 hours - env_mono.Append(CPPDEFINES=['CORE_BINDINGS']) + env_mono.Append(CPPDEFINES=["CORE_BINDINGS"]) - env_mono.add_source_files(env.modules_sources, [ - os.path.join(wasm_src_dir, 'driver.c'), - os.path.join(wasm_src_dir, 'zlib-helper.c'), - os.path.join(wasm_src_dir, 'corebindings.c') - ]) + env_mono.add_source_files( + env.modules_sources, + [ + os.path.join(wasm_src_dir, "driver.c"), + os.path.join(wasm_src_dir, "zlib-helper.c"), + os.path.join(wasm_src_dir, "corebindings.c"), + ], + ) - env.Append(LINKFLAGS=[ - '--js-library', os.path.join(wasm_src_dir, 'library_mono.js'), - '--js-library', os.path.join(wasm_src_dir, 'binding_support.js'), - '--js-library', os.path.join(wasm_src_dir, 'dotnet_support.js') - ]) + env.Append( + LINKFLAGS=[ + "--js-library", + os.path.join(wasm_src_dir, "library_mono.js"), + "--js-library", + os.path.join(wasm_src_dir, "binding_support.js"), + "--js-library", + os.path.join(wasm_src_dir, "dotnet_support.js"), + ] + ) else: env.Append(LIBS=[mono_lib]) if is_macos: - env.Append(LIBS=['iconv', 'pthread']) + env.Append(LIBS=["iconv", "pthread"]) elif is_android: - pass # Nothing + pass # Nothing elif is_ios: - pass # Nothing, linking is delegated to the exported Xcode project + pass # Nothing, linking is delegated to the exported Xcode project elif is_javascript: - env.Append(LIBS=['m', 'rt', 'dl', 'pthread']) + env.Append(LIBS=["m", "rt", "dl", "pthread"]) else: - env.Append(LIBS=['m', 'rt', 'dl', 'pthread']) + env.Append(LIBS=["m", "rt", "dl", "pthread"]) if not mono_static: - mono_so_file = find_file_in_dir(mono_lib_path, mono_lib_names, prefixes=['lib'], extensions=[sharedlib_ext]) + mono_so_file = find_file_in_dir( + mono_lib_path, mono_lib_names, prefixes=["lib"], extensions=[sharedlib_ext] + ) if not mono_so_file: - raise RuntimeError('Could not find mono shared library in: ' + mono_lib_path) + raise RuntimeError("Could not find mono shared library in: " + mono_lib_path) else: assert not mono_static # TODO: Add option to force using pkg-config - print('Mono root directory not found. Using pkg-config instead') + print("Mono root directory not found. Using pkg-config instead") - env.ParseConfig('pkg-config monosgen-2 --libs') - env_mono.ParseConfig('pkg-config monosgen-2 --cflags') + env.ParseConfig("pkg-config monosgen-2 --libs") + env_mono.ParseConfig("pkg-config monosgen-2 --cflags") tmpenv = Environment() - tmpenv.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH')) - tmpenv.ParseConfig('pkg-config monosgen-2 --libs-only-L') + tmpenv.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH")) + tmpenv.ParseConfig("pkg-config monosgen-2 --libs-only-L") - for hint_dir in tmpenv['LIBPATH']: - file_found = find_file_in_dir(hint_dir, mono_lib_names, prefixes=['lib'], extensions=[sharedlib_ext]) + for hint_dir in tmpenv["LIBPATH"]: + file_found = find_file_in_dir(hint_dir, mono_lib_names, prefixes=["lib"], extensions=[sharedlib_ext]) if file_found: mono_lib_path = hint_dir mono_so_file = file_found break if not mono_so_file: - raise RuntimeError('Could not find mono shared library in: ' + str(tmpenv['LIBPATH'])) + raise RuntimeError("Could not find mono shared library in: " + str(tmpenv["LIBPATH"])) if not mono_static: - libs_output_dir = get_android_out_dir(env) if is_android else '#bin' + libs_output_dir = get_android_out_dir(env) if is_android else "#bin" copy_file(mono_lib_path, libs_output_dir, mono_so_file) if not tools_enabled: - if is_desktop(env['platform']): + if is_desktop(env["platform"]): if not mono_root: - mono_root = subprocess.check_output(['pkg-config', 'mono-2', '--variable=prefix']).decode('utf8').strip() + mono_root = ( + subprocess.check_output(["pkg-config", "mono-2", "--variable=prefix"]).decode("utf8").strip() + ) make_template_dir(env, mono_root) elif is_android: # Compress Android Mono Config from . import make_android_mono_config + module_dir = os.getcwd() - config_file_path = os.path.join(module_dir, 'build_scripts', 'mono_android_config.xml') - make_android_mono_config.generate_compressed_config(config_file_path, 'mono_gd/') + config_file_path = os.path.join(module_dir, "build_scripts", "mono_android_config.xml") + make_android_mono_config.generate_compressed_config(config_file_path, "mono_gd/") # Copy the required shared libraries copy_mono_shared_libs(env, mono_root, None) elif is_javascript: - pass # No data directory for this platform + pass # No data directory for this platform elif is_ios: - pass # No data directory for this platform + pass # No data directory for this platform if copy_mono_root: if not mono_root: - mono_root = subprocess.check_output(['pkg-config', 'mono-2', '--variable=prefix']).decode('utf8').strip() + mono_root = subprocess.check_output(["pkg-config", "mono-2", "--variable=prefix"]).decode("utf8").strip() if tools_enabled: - copy_mono_root_files(env, mono_root) + copy_mono_root_files(env, mono_root) else: print("Ignoring option: 'copy_mono_root'; only available for builds with 'tools' enabled.") @@ -377,26 +400,26 @@ def configure(env, env_mono): def make_template_dir(env, mono_root): from shutil import rmtree - platform = env['platform'] - target = env['target'] + platform = env["platform"] + target = env["target"] - template_dir_name = '' + template_dir_name = "" assert is_desktop(platform) - template_dir_name = 'data.mono.%s.%s.%s' % (platform, env['bits'], target) + template_dir_name = "data.mono.%s.%s.%s" % (platform, env["bits"], target) - output_dir = Dir('#bin').abspath + output_dir = Dir("#bin").abspath template_dir = os.path.join(output_dir, template_dir_name) - template_mono_root_dir = os.path.join(template_dir, 'Mono') + template_mono_root_dir = os.path.join(template_dir, "Mono") if os.path.isdir(template_mono_root_dir): - rmtree(template_mono_root_dir) # Clean first + rmtree(template_mono_root_dir) # Clean first # Copy etc/mono/ - template_mono_config_dir = os.path.join(template_mono_root_dir, 'etc', 'mono') + template_mono_config_dir = os.path.join(template_mono_root_dir, "etc", "mono") copy_mono_etc_dir(mono_root, template_mono_config_dir, platform) # Copy the required shared libraries @@ -410,18 +433,18 @@ def copy_mono_root_files(env, mono_root): from shutil import rmtree if not mono_root: - raise RuntimeError('Mono installation directory not found') + raise RuntimeError("Mono installation directory not found") - output_dir = Dir('#bin').abspath - editor_mono_root_dir = os.path.join(output_dir, 'GodotSharp', 'Mono') + output_dir = Dir("#bin").abspath + editor_mono_root_dir = os.path.join(output_dir, "GodotSharp", "Mono") if os.path.isdir(editor_mono_root_dir): - rmtree(editor_mono_root_dir) # Clean first + rmtree(editor_mono_root_dir) # Clean first # Copy etc/mono/ - editor_mono_config_dir = os.path.join(editor_mono_root_dir, 'etc', 'mono') - copy_mono_etc_dir(mono_root, editor_mono_config_dir, env['platform']) + editor_mono_config_dir = os.path.join(editor_mono_root_dir, "etc", "mono") + copy_mono_etc_dir(mono_root, editor_mono_config_dir, env["platform"]) # Copy the required shared libraries @@ -429,20 +452,20 @@ def copy_mono_root_files(env, mono_root): # Copy framework assemblies - mono_framework_dir = os.path.join(mono_root, 'lib', 'mono', '4.5') - mono_framework_facades_dir = os.path.join(mono_framework_dir, 'Facades') + mono_framework_dir = os.path.join(mono_root, "lib", "mono", "4.5") + mono_framework_facades_dir = os.path.join(mono_framework_dir, "Facades") - editor_mono_framework_dir = os.path.join(editor_mono_root_dir, 'lib', 'mono', '4.5') - editor_mono_framework_facades_dir = os.path.join(editor_mono_framework_dir, 'Facades') + editor_mono_framework_dir = os.path.join(editor_mono_root_dir, "lib", "mono", "4.5") + editor_mono_framework_facades_dir = os.path.join(editor_mono_framework_dir, "Facades") if not os.path.isdir(editor_mono_framework_dir): os.makedirs(editor_mono_framework_dir) if not os.path.isdir(editor_mono_framework_facades_dir): os.makedirs(editor_mono_framework_facades_dir) - for assembly in glob(os.path.join(mono_framework_dir, '*.dll')): + for assembly in glob(os.path.join(mono_framework_dir, "*.dll")): copy(assembly, editor_mono_framework_dir) - for assembly in glob(os.path.join(mono_framework_facades_dir, '*.dll')): + for assembly in glob(os.path.join(mono_framework_facades_dir, "*.dll")): copy(assembly, editor_mono_framework_facades_dir) @@ -454,28 +477,28 @@ def copy_mono_etc_dir(mono_root, target_mono_config_dir, platform): if not os.path.isdir(target_mono_config_dir): os.makedirs(target_mono_config_dir) - mono_etc_dir = os.path.join(mono_root, 'etc', 'mono') + mono_etc_dir = os.path.join(mono_root, "etc", "mono") if not os.path.isdir(mono_etc_dir): - mono_etc_dir = '' + mono_etc_dir = "" etc_hint_dirs = [] - if platform != 'windows': - etc_hint_dirs += ['/etc/mono', '/usr/local/etc/mono'] - if 'MONO_CFG_DIR' in os.environ: - etc_hint_dirs += [os.path.join(os.environ['MONO_CFG_DIR'], 'mono')] + if platform != "windows": + etc_hint_dirs += ["/etc/mono", "/usr/local/etc/mono"] + if "MONO_CFG_DIR" in os.environ: + etc_hint_dirs += [os.path.join(os.environ["MONO_CFG_DIR"], "mono")] for etc_hint_dir in etc_hint_dirs: if os.path.isdir(etc_hint_dir): mono_etc_dir = etc_hint_dir break if not mono_etc_dir: - raise RuntimeError('Mono installation etc directory not found') + raise RuntimeError("Mono installation etc directory not found") - copy_tree(os.path.join(mono_etc_dir, '2.0'), os.path.join(target_mono_config_dir, '2.0')) - copy_tree(os.path.join(mono_etc_dir, '4.0'), os.path.join(target_mono_config_dir, '4.0')) - copy_tree(os.path.join(mono_etc_dir, '4.5'), os.path.join(target_mono_config_dir, '4.5')) - if os.path.isdir(os.path.join(mono_etc_dir, 'mconfig')): - copy_tree(os.path.join(mono_etc_dir, 'mconfig'), os.path.join(target_mono_config_dir, 'mconfig')) + copy_tree(os.path.join(mono_etc_dir, "2.0"), os.path.join(target_mono_config_dir, "2.0")) + copy_tree(os.path.join(mono_etc_dir, "4.0"), os.path.join(target_mono_config_dir, "4.0")) + copy_tree(os.path.join(mono_etc_dir, "4.5"), os.path.join(target_mono_config_dir, "4.5")) + if os.path.isdir(os.path.join(mono_etc_dir, "mconfig")): + copy_tree(os.path.join(mono_etc_dir, "mconfig"), os.path.join(target_mono_config_dir, "mconfig")) - for file in glob(os.path.join(mono_etc_dir, '*')): + for file in glob(os.path.join(mono_etc_dir, "*")): if os.path.isfile(file): copy(file, target_mono_config_dir) @@ -487,48 +510,66 @@ def copy_mono_shared_libs(env, mono_root, target_mono_root_dir): if os.path.isfile(src): copy(src, dst) - platform = env['platform'] + platform = env["platform"] - if platform == 'windows': - src_mono_bin_dir = os.path.join(mono_root, 'bin') - target_mono_bin_dir = os.path.join(target_mono_root_dir, 'bin') + if platform == "windows": + src_mono_bin_dir = os.path.join(mono_root, "bin") + target_mono_bin_dir = os.path.join(target_mono_root_dir, "bin") if not os.path.isdir(target_mono_bin_dir): os.makedirs(target_mono_bin_dir) - mono_posix_helper_file = find_file_in_dir(src_mono_bin_dir, ['MonoPosixHelper'], prefixes=['', 'lib'], extensions=['.dll']) - copy(os.path.join(src_mono_bin_dir, mono_posix_helper_file), os.path.join(target_mono_bin_dir, 'MonoPosixHelper.dll')) + mono_posix_helper_file = find_file_in_dir( + src_mono_bin_dir, ["MonoPosixHelper"], prefixes=["", "lib"], extensions=[".dll"] + ) + copy( + os.path.join(src_mono_bin_dir, mono_posix_helper_file), + os.path.join(target_mono_bin_dir, "MonoPosixHelper.dll"), + ) # For newer versions - btls_dll_path = os.path.join(src_mono_bin_dir, 'libmono-btls-shared.dll') + btls_dll_path = os.path.join(src_mono_bin_dir, "libmono-btls-shared.dll") if os.path.isfile(btls_dll_path): copy(btls_dll_path, target_mono_bin_dir) else: - target_mono_lib_dir = get_android_out_dir(env) if platform == 'android' else os.path.join(target_mono_root_dir, 'lib') + target_mono_lib_dir = ( + get_android_out_dir(env) if platform == "android" else os.path.join(target_mono_root_dir, "lib") + ) if not os.path.isdir(target_mono_lib_dir): os.makedirs(target_mono_lib_dir) lib_file_names = [] - if platform == 'osx': - lib_file_names = [lib_name + '.dylib' for lib_name in [ - 'libmono-btls-shared', 'libmono-native-compat', 'libMonoPosixHelper' - ]] + if platform == "osx": + lib_file_names = [ + lib_name + ".dylib" + for lib_name in ["libmono-btls-shared", "libmono-native-compat", "libMonoPosixHelper"] + ] elif is_unix_like(platform): - lib_file_names = [lib_name + '.so' for lib_name in [ - 'libmono-btls-shared', 'libmono-ee-interp', 'libmono-native', 'libMonoPosixHelper', - 'libmono-profiler-aot', 'libmono-profiler-coverage', 'libmono-profiler-log', 'libMonoSupportW' - ]] + lib_file_names = [ + lib_name + ".so" + for lib_name in [ + "libmono-btls-shared", + "libmono-ee-interp", + "libmono-native", + "libMonoPosixHelper", + "libmono-profiler-aot", + "libmono-profiler-coverage", + "libmono-profiler-log", + "libMonoSupportW", + ] + ] for lib_file_name in lib_file_names: - copy_if_exists(os.path.join(mono_root, 'lib', lib_file_name), target_mono_lib_dir) + copy_if_exists(os.path.join(mono_root, "lib", lib_file_name), target_mono_lib_dir) + def pkgconfig_try_find_mono_root(mono_lib_names, sharedlib_ext): tmpenv = Environment() - tmpenv.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH')) - tmpenv.ParseConfig('pkg-config monosgen-2 --libs-only-L') - for hint_dir in tmpenv['LIBPATH']: - name_found = find_name_in_dir_files(hint_dir, mono_lib_names, prefixes=['lib'], extensions=[sharedlib_ext]) - if name_found and os.path.isdir(os.path.join(hint_dir, '..', 'include', 'mono-2.0')): - return os.path.join(hint_dir, '..') - return '' + tmpenv.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH")) + tmpenv.ParseConfig("pkg-config monosgen-2 --libs-only-L") + for hint_dir in tmpenv["LIBPATH"]: + name_found = find_name_in_dir_files(hint_dir, mono_lib_names, prefixes=["lib"], extensions=[sharedlib_ext]) + if name_found and os.path.isdir(os.path.join(hint_dir, "..", "include", "mono-2.0")): + return os.path.join(hint_dir, "..") + return "" diff --git a/modules/mono/build_scripts/mono_reg_utils.py b/modules/mono/build_scripts/mono_reg_utils.py index b2c48f0a61..bad4314ce1 100644 --- a/modules/mono/build_scripts/mono_reg_utils.py +++ b/modules/mono/build_scripts/mono_reg_utils.py @@ -3,8 +3,9 @@ import platform from compat import decode_utf8 -if os.name == 'nt': +if os.name == "nt": import sys + if sys.version_info < (3,): import _winreg as winreg else: @@ -15,7 +16,7 @@ def _reg_open_key(key, subkey): try: return winreg.OpenKey(key, subkey) except (WindowsError, OSError): - if platform.architecture()[0] == '32bit': + if platform.architecture()[0] == "32bit": bitness_sam = winreg.KEY_WOW64_64KEY else: bitness_sam = winreg.KEY_WOW64_32KEY @@ -25,12 +26,12 @@ def _reg_open_key(key, subkey): def _reg_open_key_bits(key, subkey, bits): sam = winreg.KEY_READ - if platform.architecture()[0] == '32bit': - if bits == '64': + if platform.architecture()[0] == "32bit": + if bits == "64": # Force 32bit process to search in 64bit registry sam |= winreg.KEY_WOW64_64KEY else: - if bits == '32': + if bits == "32": # Force 64bit process to search in 32bit registry sam |= winreg.KEY_WOW64_32KEY @@ -40,7 +41,7 @@ def _reg_open_key_bits(key, subkey, bits): def _find_mono_in_reg(subkey, bits): try: with _reg_open_key_bits(winreg.HKEY_LOCAL_MACHINE, subkey, bits) as hKey: - value = winreg.QueryValueEx(hKey, 'SdkInstallRoot')[0] + value = winreg.QueryValueEx(hKey, "SdkInstallRoot")[0] return value except (WindowsError, OSError): return None @@ -49,70 +50,70 @@ def _find_mono_in_reg(subkey, bits): def _find_mono_in_reg_old(subkey, bits): try: with _reg_open_key_bits(winreg.HKEY_LOCAL_MACHINE, subkey, bits) as hKey: - default_clr = winreg.QueryValueEx(hKey, 'DefaultCLR')[0] + default_clr = winreg.QueryValueEx(hKey, "DefaultCLR")[0] if default_clr: - return _find_mono_in_reg(subkey + '\\' + default_clr, bits) + return _find_mono_in_reg(subkey + "\\" + default_clr, bits) return None except (WindowsError, EnvironmentError): return None def find_mono_root_dir(bits): - root_dir = _find_mono_in_reg(r'SOFTWARE\Mono', bits) + root_dir = _find_mono_in_reg(r"SOFTWARE\Mono", bits) if root_dir is not None: return str(root_dir) - root_dir = _find_mono_in_reg_old(r'SOFTWARE\Novell\Mono', bits) + root_dir = _find_mono_in_reg_old(r"SOFTWARE\Novell\Mono", bits) if root_dir is not None: return str(root_dir) - return '' + return "" def find_msbuild_tools_path_reg(): import subprocess - vswhere = os.getenv('PROGRAMFILES(X86)') + vswhere = os.getenv("PROGRAMFILES(X86)") if not vswhere: - vswhere = os.getenv('PROGRAMFILES') - vswhere += r'\Microsoft Visual Studio\Installer\vswhere.exe' + vswhere = os.getenv("PROGRAMFILES") + vswhere += r"\Microsoft Visual Studio\Installer\vswhere.exe" - vswhere_args = ['-latest', '-products', '*', '-requires', 'Microsoft.Component.MSBuild'] + vswhere_args = ["-latest", "-products", "*", "-requires", "Microsoft.Component.MSBuild"] try: lines = subprocess.check_output([vswhere] + vswhere_args).splitlines() for line in lines: - parts = decode_utf8(line).split(':', 1) + parts = decode_utf8(line).split(":", 1) - if len(parts) < 2 or parts[0] != 'installationPath': + if len(parts) < 2 or parts[0] != "installationPath": continue val = parts[1].strip() if not val: - raise ValueError('Value of `installationPath` entry is empty') + raise ValueError("Value of `installationPath` entry is empty") # Since VS2019, the directory is simply named "Current" - msbuild_dir = os.path.join(val, 'MSBuild\\Current\\Bin') + msbuild_dir = os.path.join(val, "MSBuild\\Current\\Bin") if os.path.isdir(msbuild_dir): return msbuild_dir # Directory name "15.0" is used in VS 2017 - return os.path.join(val, 'MSBuild\\15.0\\Bin') + return os.path.join(val, "MSBuild\\15.0\\Bin") - raise ValueError('Cannot find `installationPath` entry') + raise ValueError("Cannot find `installationPath` entry") except ValueError as e: - print('Error reading output from vswhere: ' + e.message) + print("Error reading output from vswhere: " + e.message) except WindowsError: - pass # Fine, vswhere not found + pass # Fine, vswhere not found except (subprocess.CalledProcessError, OSError): pass # Try to find 14.0 in the Registry try: - subkey = r'SOFTWARE\Microsoft\MSBuild\ToolsVersions\14.0' + subkey = r"SOFTWARE\Microsoft\MSBuild\ToolsVersions\14.0" with _reg_open_key(winreg.HKEY_LOCAL_MACHINE, subkey) as hKey: - value = winreg.QueryValueEx(hKey, 'MSBuildToolsPath')[0] + value = winreg.QueryValueEx(hKey, "MSBuildToolsPath")[0] return value except (WindowsError, OSError): - return '' + return "" diff --git a/modules/mono/build_scripts/solution_builder.py b/modules/mono/build_scripts/solution_builder.py index b633c28b0b..371819fd72 100644 --- a/modules/mono/build_scripts/solution_builder.py +++ b/modules/mono/build_scripts/solution_builder.py @@ -1,4 +1,3 @@ - import os @@ -42,44 +41,44 @@ def find_msbuild_unix(): hint_path = os.path.join(hint_dir, "msbuild") if os.path.isfile(hint_path): return hint_path - elif os.path.isfile(hint_path + '.exe'): - return hint_path + '.exe' + elif os.path.isfile(hint_path + ".exe"): + return hint_path + ".exe" - for hint_dir in os.environ['PATH'].split(os.pathsep): + for hint_dir in os.environ["PATH"].split(os.pathsep): hint_dir = hint_dir.strip('"') hint_path = os.path.join(hint_dir, "msbuild") if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK): return hint_path - if os.path.isfile(hint_path + '.exe') and os.access(hint_path + '.exe', os.X_OK): - return hint_path + '.exe' + if os.path.isfile(hint_path + ".exe") and os.access(hint_path + ".exe", os.X_OK): + return hint_path + ".exe" return None def find_msbuild_windows(env): - from . mono_reg_utils import find_mono_root_dir, find_msbuild_tools_path_reg + from .mono_reg_utils import find_mono_root_dir, find_msbuild_tools_path_reg - mono_root = env['mono_prefix'] or find_mono_root_dir(env['bits']) + mono_root = env["mono_prefix"] or find_mono_root_dir(env["bits"]) if not mono_root: - raise RuntimeError('Cannot find mono root directory') + raise RuntimeError("Cannot find mono root directory") - mono_bin_dir = os.path.join(mono_root, 'bin') - msbuild_mono = os.path.join(mono_bin_dir, 'msbuild.bat') + mono_bin_dir = os.path.join(mono_root, "bin") + msbuild_mono = os.path.join(mono_bin_dir, "msbuild.bat") msbuild_tools_path = find_msbuild_tools_path_reg() if msbuild_tools_path: - return (os.path.join(msbuild_tools_path, 'MSBuild.exe'), {}) + return (os.path.join(msbuild_tools_path, "MSBuild.exe"), {}) if os.path.isfile(msbuild_mono): # The (Csc/Vbc/Fsc)ToolExe environment variables are required when # building with Mono's MSBuild. They must point to the batch files # in Mono's bin directory to make sure they are executed with Mono. mono_msbuild_env = { - 'CscToolExe': os.path.join(mono_bin_dir, 'csc.bat'), - 'VbcToolExe': os.path.join(mono_bin_dir, 'vbc.bat'), - 'FscToolExe': os.path.join(mono_bin_dir, 'fsharpc.bat') + "CscToolExe": os.path.join(mono_bin_dir, "csc.bat"), + "VbcToolExe": os.path.join(mono_bin_dir, "vbc.bat"), + "FscToolExe": os.path.join(mono_bin_dir, "fsharpc.bat"), } return (msbuild_mono, mono_msbuild_env) @@ -88,7 +87,7 @@ def find_msbuild_windows(env): def run_command(command, args, env_override=None, name=None): def cmd_args_to_str(cmd_args): - return ' '.join([arg if not ' ' in arg else '"%s"' % arg for arg in cmd_args]) + return " ".join([arg if not " " in arg else '"%s"' % arg for arg in cmd_args]) args = [command] + args @@ -99,6 +98,7 @@ def run_command(command, args, env_override=None, name=None): print("Running '%s': %s" % (name, cmd_args_to_str(args))) import subprocess + try: if env_override is None: subprocess.check_call(args) @@ -110,13 +110,13 @@ def run_command(command, args, env_override=None, name=None): def build_solution(env, solution_path, build_config, extra_msbuild_args=[]): global verbose - verbose = env['verbose'] + verbose = env["verbose"] msbuild_env = os.environ.copy() # Needed when running from Developer Command Prompt for VS - if 'PLATFORM' in msbuild_env: - del msbuild_env['PLATFORM'] + if "PLATFORM" in msbuild_env: + del msbuild_env["PLATFORM"] msbuild_args = [] @@ -124,7 +124,7 @@ def build_solution(env, solution_path, build_config, extra_msbuild_args=[]): if dotnet_cli: msbuild_path = dotnet_cli - msbuild_args += ["msbuild"] # `dotnet msbuild` command + msbuild_args += ["msbuild"] # `dotnet msbuild` command else: # Find MSBuild if os.name == "nt": @@ -138,7 +138,7 @@ def build_solution(env, solution_path, build_config, extra_msbuild_args=[]): if msbuild_path is None: raise RuntimeError("Cannot find MSBuild executable") - print('MSBuild path: ' + msbuild_path) + print("MSBuild path: " + msbuild_path) # Build solution @@ -147,4 +147,4 @@ def build_solution(env, solution_path, build_config, extra_msbuild_args=[]): msbuild_args += [solution_path, "/t:%s" % ",".join(targets), "/p:Configuration=" + build_config] msbuild_args += extra_msbuild_args - run_command(msbuild_path, msbuild_args, env_override=msbuild_env, name='msbuild') + run_command(msbuild_path, msbuild_args, env_override=msbuild_env, name="msbuild") diff --git a/modules/mono/build_scripts/tls_configure.py b/modules/mono/build_scripts/tls_configure.py index 622280b00b..522be4b29a 100644 --- a/modules/mono/build_scripts/tls_configure.py +++ b/modules/mono/build_scripts/tls_configure.py @@ -1,36 +1,37 @@ from __future__ import print_function + def supported(result): - return 'supported' if result else 'not supported' + return "supported" if result else "not supported" def check_cxx11_thread_local(conf): - print('Checking for `thread_local` support...', end=" ") - result = conf.TryCompile('thread_local int foo = 0; int main() { return foo; }', '.cpp') + print("Checking for `thread_local` support...", end=" ") + result = conf.TryCompile("thread_local int foo = 0; int main() { return foo; }", ".cpp") print(supported(result)) return bool(result) def check_declspec_thread(conf): - print('Checking for `__declspec(thread)` support...', end=" ") - result = conf.TryCompile('__declspec(thread) int foo = 0; int main() { return foo; }', '.cpp') + print("Checking for `__declspec(thread)` support...", end=" ") + result = conf.TryCompile("__declspec(thread) int foo = 0; int main() { return foo; }", ".cpp") print(supported(result)) return bool(result) def check_gcc___thread(conf): - print('Checking for `__thread` support...', end=" ") - result = conf.TryCompile('__thread int foo = 0; int main() { return foo; }', '.cpp') + print("Checking for `__thread` support...", end=" ") + result = conf.TryCompile("__thread int foo = 0; int main() { return foo; }", ".cpp") print(supported(result)) return bool(result) def configure(conf): if check_cxx11_thread_local(conf): - conf.env.Append(CPPDEFINES=['HAVE_CXX11_THREAD_LOCAL']) + conf.env.Append(CPPDEFINES=["HAVE_CXX11_THREAD_LOCAL"]) else: if conf.env.msvc: if check_declspec_thread(conf): - conf.env.Append(CPPDEFINES=['HAVE_DECLSPEC_THREAD']) + conf.env.Append(CPPDEFINES=["HAVE_DECLSPEC_THREAD"]) elif check_gcc___thread(conf): - conf.env.Append(CPPDEFINES=['HAVE_GCC___THREAD']) + conf.env.Append(CPPDEFINES=["HAVE_GCC___THREAD"]) diff --git a/modules/mono/config.py b/modules/mono/config.py index f24c0f5711..775307e2c9 100644 --- a/modules/mono/config.py +++ b/modules/mono/config.py @@ -1,5 +1,4 @@ - -supported_platforms = ['windows', 'osx', 'x11', 'server', 'android', 'haiku', 'javascript', 'iphone'] +supported_platforms = ["windows", "osx", "x11", "server", "android", "haiku", "javascript", "iphone"] def can_build(env, platform): @@ -7,18 +6,18 @@ def can_build(env, platform): def configure(env): - platform = env['platform'] + platform = env["platform"] if platform not in supported_platforms: - raise RuntimeError('This module does not currently support building for this platform') + raise RuntimeError("This module does not currently support building for this platform") env.use_ptrcall = True - env.add_module_version_string('mono') + env.add_module_version_string("mono") from SCons.Script import BoolVariable, PathVariable, Variables, Help - default_mono_static = platform in ['iphone', 'javascript'] - default_mono_bundles_zlib = platform in ['javascript'] + default_mono_static = platform in ["iphone", "javascript"] + default_mono_bundles_zlib = platform in ["javascript"] envvars = Variables() envvars.Add( @@ -39,29 +38,33 @@ def configure(env): ) # TODO: It would be great if this could be detected automatically instead - envvars.Add(BoolVariable('mono_bundles_zlib', 'Specify if the Mono runtime was built with bundled zlib', default_mono_bundles_zlib)) + envvars.Add( + BoolVariable( + "mono_bundles_zlib", "Specify if the Mono runtime was built with bundled zlib", default_mono_bundles_zlib + ) + ) envvars.Update(env) Help(envvars.GenerateHelpText(env)) - if env['mono_bundles_zlib']: + if env["mono_bundles_zlib"]: # Mono may come with zlib bundled for WASM or on newer version when built with MinGW. - print('This Mono runtime comes with zlib bundled. Disabling \'builtin_zlib\'...') - env['builtin_zlib'] = False + print("This Mono runtime comes with zlib bundled. Disabling 'builtin_zlib'...") + env["builtin_zlib"] = False thirdparty_zlib_dir = "#thirdparty/zlib/" env.Prepend(CPPPATH=[thirdparty_zlib_dir]) def get_doc_classes(): return [ - '@C#', - 'CSharpScript', - 'GodotSharp', + "@C#", + "CSharpScript", + "GodotSharp", ] def get_doc_path(): - return 'doc_classes' + return "doc_classes" def is_enabled(): diff --git a/modules/ogg/SCsub b/modules/ogg/SCsub index 44c7963cd3..e768fb4ae8 100644 --- a/modules/ogg/SCsub +++ b/modules/ogg/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") # Only kept to build the thirdparty library used by the theora and webm # modules. @@ -9,7 +9,7 @@ Import('env_modules') env_ogg = env_modules.Clone() # Thirdparty source files -if env['builtin_libogg']: +if env["builtin_libogg"]: thirdparty_dir = "#thirdparty/libogg/" thirdparty_sources = [ "bitwise.c", diff --git a/modules/ogg/config.py b/modules/ogg/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/ogg/config.py +++ b/modules/ogg/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/opensimplex/SCsub b/modules/opensimplex/SCsub index 311d33b047..52d8b145ef 100644 --- a/modules/opensimplex/SCsub +++ b/modules/opensimplex/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_opensimplex = env_modules.Clone() diff --git a/modules/opensimplex/config.py b/modules/opensimplex/config.py index c1010ad433..8584b2bcb3 100644 --- a/modules/opensimplex/config.py +++ b/modules/opensimplex/config.py @@ -1,14 +1,14 @@ def can_build(env, platform): - return True + return True + def configure(env): - pass + pass + def get_doc_classes(): - return [ - "NoiseTexture", - "OpenSimplexNoise" - ] + return ["NoiseTexture", "OpenSimplexNoise"] + def get_doc_path(): return "doc_classes" diff --git a/modules/opus/SCsub b/modules/opus/SCsub index fec2911d6d..e51590d808 100644 --- a/modules/opus/SCsub +++ b/modules/opus/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") # Only kept to build the thirdparty library used by the webm module. # AudioStreamOpus was dropped in 3.0 due to incompatibility with the new audio @@ -10,11 +10,10 @@ Import('env_modules') env_opus = env_modules.Clone() # Thirdparty source files -if env['builtin_opus']: +if env["builtin_opus"]: thirdparty_dir = "#thirdparty/opus/" thirdparty_sources = [ - # Sync with opus_sources.mk "opus.c", "opus_decoder.c", @@ -23,17 +22,14 @@ if env['builtin_opus']: "opus_multistream_encoder.c", "opus_multistream_decoder.c", "repacketizer.c", - "analysis.c", "mlp.c", "mlp_data.c", - # Sync with libopusfile Makefile.am "info.c", "internal.c", "opusfile.c", "stream.c", - # Sync with celt_sources.mk "celt/bands.c", "celt/celt.c", @@ -53,12 +49,11 @@ if env['builtin_opus']: "celt/quant_bands.c", "celt/rate.c", "celt/vq.c", - #"celt/arm/arm_celt_map.c", - #"celt/arm/armcpu.c", - #"celt/arm/celt_ne10_fft.c", - #"celt/arm/celt_ne10_mdct.c", - #"celt/arm/celt_neon_intr.c", - + # "celt/arm/arm_celt_map.c", + # "celt/arm/armcpu.c", + # "celt/arm/celt_ne10_fft.c", + # "celt/arm/celt_ne10_mdct.c", + # "celt/arm/celt_neon_intr.c", # Sync with silk_sources.mk "silk/CNG.c", "silk/code_signs.c", @@ -207,7 +202,7 @@ if env['builtin_opus']: thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources + opus_sources_silk] # also requires libogg - if env['builtin_libogg']: + if env["builtin_libogg"]: env_opus.Prepend(CPPPATH=["#thirdparty/libogg"]) env_opus.Append(CPPDEFINES=["HAVE_CONFIG_H"]) @@ -223,14 +218,14 @@ if env['builtin_opus']: env_opus.Prepend(CPPPATH=[thirdparty_dir + "/" + dir for dir in thirdparty_include_paths]) if env["platform"] == "android": - if ("android_arch" in env and env["android_arch"] == "armv7"): + if "android_arch" in env and env["android_arch"] == "armv7": env_opus.Append(CPPDEFINES=["OPUS_ARM_OPT"]) - elif ("android_arch" in env and env["android_arch"] == "arm64v8"): + elif "android_arch" in env and env["android_arch"] == "arm64v8": env_opus.Append(CPPDEFINES=["OPUS_ARM64_OPT"]) elif env["platform"] == "iphone": - if ("arch" in env and env["arch"] == "arm"): + if "arch" in env and env["arch"] == "arm": env_opus.Append(CPPDEFINES=["OPUS_ARM_OPT"]) - elif ("arch" in env and env["arch"] == "arm64"): + elif "arch" in env and env["arch"] == "arm64": env_opus.Append(CPPDEFINES=["OPUS_ARM64_OPT"]) env_thirdparty = env_opus.Clone() diff --git a/modules/opus/config.py b/modules/opus/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/opus/config.py +++ b/modules/opus/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/pvr/SCsub b/modules/pvr/SCsub index 18da38fbbd..e0baf851f1 100644 --- a/modules/pvr/SCsub +++ b/modules/pvr/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_pvr = env_modules.Clone() diff --git a/modules/pvr/config.py b/modules/pvr/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/pvr/config.py +++ b/modules/pvr/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/recast/SCsub b/modules/recast/SCsub index 94d9968164..5ef7e0b489 100644 --- a/modules/recast/SCsub +++ b/modules/recast/SCsub @@ -1,25 +1,25 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_recast = env_modules.Clone() # Thirdparty source files -if env['builtin_recast']: +if env["builtin_recast"]: thirdparty_dir = "#thirdparty/recastnavigation/Recast/" thirdparty_sources = [ - "Source/Recast.cpp", - "Source/RecastAlloc.cpp", - "Source/RecastArea.cpp", - "Source/RecastAssert.cpp", - "Source/RecastContour.cpp", - "Source/RecastFilter.cpp", - "Source/RecastLayers.cpp", - "Source/RecastMesh.cpp", - "Source/RecastMeshDetail.cpp", - "Source/RecastRasterization.cpp", - "Source/RecastRegion.cpp", + "Source/Recast.cpp", + "Source/RecastAlloc.cpp", + "Source/RecastArea.cpp", + "Source/RecastAssert.cpp", + "Source/RecastContour.cpp", + "Source/RecastFilter.cpp", + "Source/RecastLayers.cpp", + "Source/RecastMesh.cpp", + "Source/RecastMeshDetail.cpp", + "Source/RecastRasterization.cpp", + "Source/RecastRegion.cpp", ] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] diff --git a/modules/recast/config.py b/modules/recast/config.py index 098f1eafa9..53b8f2f2e3 100644 --- a/modules/recast/config.py +++ b/modules/recast/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return env['tools'] + return env["tools"] + def configure(env): pass diff --git a/modules/regex/SCsub b/modules/regex/SCsub index 6238cd3d9f..753650adcb 100644 --- a/modules/regex/SCsub +++ b/modules/regex/SCsub @@ -1,16 +1,16 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_regex = env_modules.Clone() -if env['builtin_pcre2']: - thirdparty_dir = '#thirdparty/pcre2/src/' - thirdparty_flags = ['PCRE2_STATIC', 'HAVE_CONFIG_H'] +if env["builtin_pcre2"]: + thirdparty_dir = "#thirdparty/pcre2/src/" + thirdparty_flags = ["PCRE2_STATIC", "HAVE_CONFIG_H"] - if env['builtin_pcre2_with_jit']: - thirdparty_flags.append('SUPPORT_JIT') + if env["builtin_pcre2_with_jit"]: + thirdparty_flags.append("SUPPORT_JIT") thirdparty_sources = [ "pcre2_auto_possess.c", @@ -24,7 +24,7 @@ if env['builtin_pcre2']: "pcre2_extuni.c", "pcre2_find_bracket.c", "pcre2_jit_compile.c", - #"pcre2_jit_match.c", "pcre2_jit_misc.c", # these files are included in pcre2_jit_compile.c. + # "pcre2_jit_match.c", "pcre2_jit_misc.c", # these files are included in pcre2_jit_compile.c. "pcre2_maketables.c", "pcre2_match.c", "pcre2_match_data.c", diff --git a/modules/regex/config.py b/modules/regex/config.py index 42cfe3b43c..df9f44cb95 100644 --- a/modules/regex/config.py +++ b/modules/regex/config.py @@ -1,14 +1,17 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "RegEx", "RegExMatch", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/squish/SCsub b/modules/squish/SCsub index 15320bcd0c..b31032403f 100644 --- a/modules/squish/SCsub +++ b/modules/squish/SCsub @@ -1,12 +1,12 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_squish = env_modules.Clone() # Thirdparty source files -if env['builtin_squish']: +if env["builtin_squish"]: thirdparty_dir = "#thirdparty/squish/" thirdparty_sources = [ "alpha.cpp", diff --git a/modules/squish/config.py b/modules/squish/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/squish/config.py +++ b/modules/squish/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/stb_vorbis/SCsub b/modules/stb_vorbis/SCsub index d14939a3b1..266c87c802 100644 --- a/modules/stb_vorbis/SCsub +++ b/modules/stb_vorbis/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_stb_vorbis = env_modules.Clone() diff --git a/modules/stb_vorbis/config.py b/modules/stb_vorbis/config.py index 200b8dfd50..1eb0a8cf33 100644 --- a/modules/stb_vorbis/config.py +++ b/modules/stb_vorbis/config.py @@ -1,13 +1,16 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "AudioStreamOGGVorbis", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/svg/SCsub b/modules/svg/SCsub index 9324c1634b..3d17f2dcf8 100644 --- a/modules/svg/SCsub +++ b/modules/svg/SCsub @@ -1,15 +1,13 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_svg = env_modules.Clone() # Thirdparty source files thirdparty_dir = "#thirdparty/nanosvg/" -thirdparty_sources = [ - "nanosvg.cc" -] +thirdparty_sources = ["nanosvg.cc"] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] env_svg.Prepend(CPPPATH=[thirdparty_dir]) diff --git a/modules/svg/config.py b/modules/svg/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/svg/config.py +++ b/modules/svg/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/tga/SCsub b/modules/tga/SCsub index 7e405f405c..067caa6ea0 100644 --- a/modules/tga/SCsub +++ b/modules/tga/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_tga = env_modules.Clone() diff --git a/modules/tga/config.py b/modules/tga/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/tga/config.py +++ b/modules/tga/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/theora/SCsub b/modules/theora/SCsub index ff65d2f8ec..a01e65b4b0 100644 --- a/modules/theora/SCsub +++ b/modules/theora/SCsub @@ -1,71 +1,71 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_theora = env_modules.Clone() # Thirdparty source files -if env['builtin_libtheora']: +if env["builtin_libtheora"]: thirdparty_dir = "#thirdparty/libtheora/" thirdparty_sources = [ - #"analyze.c", - #"apiwrapper.c", + # "analyze.c", + # "apiwrapper.c", "bitpack.c", "cpu.c", - #"decapiwrapper.c", + # "decapiwrapper.c", "decinfo.c", "decode.c", "dequant.c", - #"encapiwrapper.c", - #"encfrag.c", - #"encinfo.c", - #"encode.c", - #"encoder_disabled.c", - #"enquant.c", - #"fdct.c", + # "encapiwrapper.c", + # "encfrag.c", + # "encinfo.c", + # "encode.c", + # "encoder_disabled.c", + # "enquant.c", + # "fdct.c", "fragment.c", "huffdec.c", - #"huffenc.c", + # "huffenc.c", "idct.c", "info.c", "internal.c", - #"mathops.c", - #"mcenc.c", + # "mathops.c", + # "mcenc.c", "quant.c", - #"rate.c", + # "rate.c", "state.c", - #"tokenize.c", + # "tokenize.c", ] thirdparty_sources_x86 = [ - #"x86/mmxencfrag.c", - #"x86/mmxfdct.c", + # "x86/mmxencfrag.c", + # "x86/mmxfdct.c", "x86/mmxfrag.c", "x86/mmxidct.c", "x86/mmxstate.c", - #"x86/sse2fdct.c", - #"x86/x86enc.c", + # "x86/sse2fdct.c", + # "x86/x86enc.c", "x86/x86state.c", ] thirdparty_sources_x86_vc = [ - #"x86_vc/mmxencfrag.c", - #"x86_vc/mmxfdct.c", + # "x86_vc/mmxencfrag.c", + # "x86_vc/mmxfdct.c", "x86_vc/mmxfrag.c", "x86_vc/mmxidct.c", "x86_vc/mmxstate.c", - #"x86_vc/x86enc.c", + # "x86_vc/x86enc.c", "x86_vc/x86state.c", ] - if (env["x86_libtheora_opt_gcc"]): + if env["x86_libtheora_opt_gcc"]: thirdparty_sources += thirdparty_sources_x86 - if (env["x86_libtheora_opt_vc"]): + if env["x86_libtheora_opt_vc"]: thirdparty_sources += thirdparty_sources_x86_vc - if (env["x86_libtheora_opt_gcc"] or env["x86_libtheora_opt_vc"]): + if env["x86_libtheora_opt_gcc"] or env["x86_libtheora_opt_vc"]: env_theora.Append(CPPDEFINES=["OC_X86_ASM"]) thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] @@ -73,9 +73,9 @@ if env['builtin_libtheora']: env_theora.Prepend(CPPPATH=[thirdparty_dir]) # also requires libogg and libvorbis - if env['builtin_libogg']: + if env["builtin_libogg"]: env_theora.Prepend(CPPPATH=["#thirdparty/libogg"]) - if env['builtin_libvorbis']: + if env["builtin_libvorbis"]: env_theora.Prepend(CPPPATH=["#thirdparty/libvorbis"]) env_thirdparty = env_theora.Clone() diff --git a/modules/theora/config.py b/modules/theora/config.py index c7713d7607..413acce2df 100644 --- a/modules/theora/config.py +++ b/modules/theora/config.py @@ -1,13 +1,16 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "VideoStreamTheora", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/tinyexr/SCsub b/modules/tinyexr/SCsub index 1eb8b54a15..84b3b4015b 100644 --- a/modules/tinyexr/SCsub +++ b/modules/tinyexr/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_tinyexr = env_modules.Clone() diff --git a/modules/tinyexr/config.py b/modules/tinyexr/config.py index 098f1eafa9..53b8f2f2e3 100644 --- a/modules/tinyexr/config.py +++ b/modules/tinyexr/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return env['tools'] + return env["tools"] + def configure(env): pass diff --git a/modules/upnp/SCsub b/modules/upnp/SCsub index 3f56a69594..2e129e15ca 100644 --- a/modules/upnp/SCsub +++ b/modules/upnp/SCsub @@ -1,13 +1,13 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_upnp = env_modules.Clone() # Thirdparty source files -if env['builtin_miniupnpc']: +if env["builtin_miniupnpc"]: thirdparty_dir = "#thirdparty/miniupnpc/" thirdparty_sources = [ "miniupnpc.c", diff --git a/modules/upnp/config.py b/modules/upnp/config.py index 8724ff1a51..bfb9a59703 100644 --- a/modules/upnp/config.py +++ b/modules/upnp/config.py @@ -1,14 +1,14 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): - return [ - "UPNP", - "UPNPDevice" - ] + return ["UPNP", "UPNPDevice"] + def get_doc_path(): return "doc_classes" diff --git a/modules/vhacd/SCsub b/modules/vhacd/SCsub index 685976dc33..ecd432b275 100644 --- a/modules/vhacd/SCsub +++ b/modules/vhacd/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_vhacd = env_modules.Clone() @@ -19,7 +19,7 @@ thirdparty_sources = [ "src/btAlignedAllocator.cpp", "src/vhacdRaycastMesh.cpp", "src/VHACD.cpp", - "src/btConvexHullComputer.cpp" + "src/btConvexHullComputer.cpp", ] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] diff --git a/modules/vhacd/config.py b/modules/vhacd/config.py index 9ced70d2fb..d22f9454ed 100644 --- a/modules/vhacd/config.py +++ b/modules/vhacd/config.py @@ -1,6 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass - diff --git a/modules/visual_script/SCsub b/modules/visual_script/SCsub index 3c3d2caa57..16faea08d7 100644 --- a/modules/visual_script/SCsub +++ b/modules/visual_script/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_vs = env_modules.Clone() diff --git a/modules/visual_script/config.py b/modules/visual_script/config.py index 087a13a200..bd459ca344 100644 --- a/modules/visual_script/config.py +++ b/modules/visual_script/config.py @@ -1,9 +1,11 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): return [ "@VisualScript", @@ -56,5 +58,6 @@ def get_doc_classes(): "VisualScriptYield", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/vorbis/SCsub b/modules/vorbis/SCsub index bde4359595..05d46757d3 100644 --- a/modules/vorbis/SCsub +++ b/modules/vorbis/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") # Only kept to build the thirdparty library used by the theora and webm # modules. We now use stb_vorbis for AudioStreamOGGVorbis. @@ -11,11 +11,11 @@ env_vorbis = env_modules.Clone() stub = True # Thirdparty source files -if env['builtin_libvorbis']: +if env["builtin_libvorbis"]: thirdparty_dir = "#thirdparty/libvorbis/" thirdparty_sources = [ - #"analysis.c", - #"barkmel.c", + # "analysis.c", + # "barkmel.c", "bitrate.c", "block.c", "codebook.c", @@ -29,14 +29,14 @@ if env['builtin_libvorbis']: "mapping0.c", "mdct.c", "psy.c", - #"psytune.c", + # "psytune.c", "registry.c", "res0.c", "sharedbook.c", "smallft.c", "synthesis.c", - #"tone.c", - #"vorbisenc.c", + # "tone.c", + # "vorbisenc.c", "vorbisfile.c", "window.c", ] @@ -46,7 +46,7 @@ if env['builtin_libvorbis']: env_vorbis.Prepend(CPPPATH=[thirdparty_dir]) # also requires libogg - if env['builtin_libogg']: + if env["builtin_libogg"]: env_vorbis.Prepend(CPPPATH=["#thirdparty/libogg"]) env_thirdparty = env_vorbis.Clone() diff --git a/modules/vorbis/config.py b/modules/vorbis/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/vorbis/config.py +++ b/modules/vorbis/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/webm/SCsub b/modules/webm/SCsub index 32e6727656..247b4ead37 100644 --- a/modules/webm/SCsub +++ b/modules/webm/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_webm = env_modules.Clone() @@ -18,14 +18,14 @@ thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] env_webm.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "libwebm/"]) # also requires libogg, libvorbis and libopus -if env['builtin_libogg']: +if env["builtin_libogg"]: env_webm.Prepend(CPPPATH=["#thirdparty/libogg"]) -if env['builtin_libvorbis']: +if env["builtin_libvorbis"]: env_webm.Prepend(CPPPATH=["#thirdparty/libvorbis"]) -if env['builtin_opus']: +if env["builtin_opus"]: env_webm.Prepend(CPPPATH=["#thirdparty/opus"]) -if env['builtin_libvpx']: +if env["builtin_libvpx"]: env_webm.Prepend(CPPPATH=["#thirdparty/libvpx"]) SConscript("libvpx/SCsub") diff --git a/modules/webm/config.py b/modules/webm/config.py index ba4dcce2f5..93b49d177a 100644 --- a/modules/webm/config.py +++ b/modules/webm/config.py @@ -1,13 +1,16 @@ def can_build(env, platform): - return platform not in ['iphone'] + return platform not in ["iphone"] + def configure(env): pass + def get_doc_classes(): return [ "VideoStreamWebm", ] + def get_doc_path(): return "doc_classes" diff --git a/modules/webm/libvpx/SCsub b/modules/webm/libvpx/SCsub index 14fa6c1268..e85d430861 100644 --- a/modules/webm/libvpx/SCsub +++ b/modules/webm/libvpx/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") # Thirdparty sources @@ -9,9 +9,7 @@ libvpx_dir = "#thirdparty/libvpx/" libvpx_sources = [ "vp8/vp8_dx_iface.c", - "vp8/common/generic/systemdependent.c", - "vp8/common/alloccommon.c", "vp8/common/blockd.c", "vp8/common/copy_c.c", @@ -37,16 +35,12 @@ libvpx_sources = [ "vp8/common/swapyv12buffer.c", "vp8/common/treecoder.c", "vp8/common/vp8_loopfilter.c", - "vp8/decoder/dboolhuff.c", "vp8/decoder/decodeframe.c", "vp8/decoder/decodemv.c", "vp8/decoder/detokenize.c", "vp8/decoder/onyxd_if.c", - - "vp9/vp9_dx_iface.c", - "vp9/common/vp9_alloccommon.c", "vp9/common/vp9_blockd.c", "vp9/common/vp9_common_data.c", @@ -69,21 +63,16 @@ libvpx_sources = [ "vp9/common/vp9_seg_common.c", "vp9/common/vp9_thread_common.c", "vp9/common/vp9_tile_common.c", - "vp9/decoder/vp9_decodeframe.c", "vp9/decoder/vp9_decodemv.c", "vp9/decoder/vp9_decoder.c", "vp9/decoder/vp9_detokenize.c", "vp9/decoder/vp9_dsubexp.c", "vp9/decoder/vp9_dthread.c", - - "vpx/src/vpx_codec.c", "vpx/src/vpx_decoder.c", "vpx/src/vpx_image.c", "vpx/src/vpx_psnr.c", - - "vpx_dsp/bitreader.c", "vpx_dsp/bitreader_buffer.c", "vpx_dsp/intrapred.c", @@ -92,18 +81,11 @@ libvpx_sources = [ "vpx_dsp/prob.c", "vpx_dsp/vpx_convolve.c", "vpx_dsp/vpx_dsp_rtcd.c", - - "vpx_mem/vpx_mem.c", - - "vpx_scale/vpx_scale_rtcd.c", - "vpx_scale/generic/yv12config.c", "vpx_scale/generic/yv12extend.c", - - - "vpx_util/vpx_thread.c" + "vpx_util/vpx_thread.c", ] libvpx_sources_mt = [ @@ -114,30 +96,19 @@ libvpx_sources_intrin_x86 = [ "vp8/common/x86/filter_x86.c", "vp8/common/x86/loopfilter_x86.c", "vp8/common/x86/vp8_asm_stubs.c", - - - "vpx_dsp/x86/vpx_asm_stubs.c" + "vpx_dsp/x86/vpx_asm_stubs.c", ] libvpx_sources_intrin_x86_mmx = [ "vp8/common/x86/idct_blk_mmx.c", ] libvpx_sources_intrin_x86_sse2 = [ "vp8/common/x86/idct_blk_sse2.c", - - "vp9/common/x86/vp9_idct_intrin_sse2.c", - - "vpx_dsp/x86/inv_txfm_sse2.c", "vpx_dsp/x86/loopfilter_sse2.c", ] -libvpx_sources_intrin_x86_ssse3 = [ - "vpx_dsp/x86/vpx_subpixel_8t_intrin_ssse3.c" -] -libvpx_sources_intrin_x86_avx2 = [ - "vpx_dsp/x86/loopfilter_avx2.c", - "vpx_dsp/x86/vpx_subpixel_8t_intrin_avx2.c" -] +libvpx_sources_intrin_x86_ssse3 = ["vpx_dsp/x86/vpx_subpixel_8t_intrin_ssse3.c"] +libvpx_sources_intrin_x86_avx2 = ["vpx_dsp/x86/loopfilter_avx2.c", "vpx_dsp/x86/vpx_subpixel_8t_intrin_avx2.c"] libvpx_sources_x86asm = [ "vp8/common/x86/copy_sse2.asm", "vp8/common/x86/copy_sse3.asm", @@ -153,8 +124,6 @@ libvpx_sources_x86asm = [ "vp8/common/x86/subpixel_sse2.asm", "vp8/common/x86/subpixel_ssse3.asm", "vp8/common/x86/vp8_loopfilter_mmx.asm", - - "vpx_dsp/x86/intrapred_sse2.asm", "vpx_dsp/x86/intrapred_ssse3.asm", "vpx_dsp/x86/inv_wht_sse2.asm", @@ -163,21 +132,12 @@ libvpx_sources_x86asm = [ "vpx_dsp/x86/vpx_subpixel_8t_ssse3.asm", "vpx_dsp/x86/vpx_subpixel_bilinear_sse2.asm", "vpx_dsp/x86/vpx_subpixel_bilinear_ssse3.asm", - - - "vpx_ports/emms.asm" -] -libvpx_sources_x86_64asm = [ - "vp8/common/x86/loopfilter_block_sse2_x86_64.asm", - - - "vpx_dsp/x86/inv_txfm_ssse3_x86_64.asm" + "vpx_ports/emms.asm", ] +libvpx_sources_x86_64asm = ["vp8/common/x86/loopfilter_block_sse2_x86_64.asm", "vpx_dsp/x86/inv_txfm_ssse3_x86_64.asm"] libvpx_sources_arm = [ "vpx_ports/arm_cpudetect.c", - - "vp8/common/arm/loopfilter_arm.c", ] libvpx_sources_arm_neon = [ @@ -196,12 +156,8 @@ libvpx_sources_arm_neon = [ "vp8/common/arm/neon/shortidct4x4llm_neon.c", "vp8/common/arm/neon/sixtappredict_neon.c", "vp8/common/arm/neon/vp8_loopfilter_neon.c", - - "vp9/common/arm/neon/vp9_iht4x4_add_neon.c", "vp9/common/arm/neon/vp9_iht8x8_add_neon.c", - - "vpx_dsp/arm/idct16x16_1_add_neon.c", "vpx_dsp/arm/idct16x16_add_neon.c", "vpx_dsp/arm/idct16x16_neon.c", @@ -220,22 +176,22 @@ libvpx_sources_arm_neon = [ "vpx_dsp/arm/vpx_convolve8_neon.c", "vpx_dsp/arm/vpx_convolve_avg_neon.c", "vpx_dsp/arm/vpx_convolve_copy_neon.c", - "vpx_dsp/arm/vpx_convolve_neon.c" + "vpx_dsp/arm/vpx_convolve_neon.c", ] libvpx_sources_arm_neon_gas = [ "vpx_dsp/arm/gas/intrapred_neon_asm.s", "vpx_dsp/arm/gas/loopfilter_mb_neon.s", - "vpx_dsp/arm/gas/save_reg_neon.s" + "vpx_dsp/arm/gas/save_reg_neon.s", ] libvpx_sources_arm_neon_armasm_ms = [ "vpx_dsp/arm/armasm_ms/intrapred_neon_asm.asm", "vpx_dsp/arm/armasm_ms/loopfilter_mb_neon.asm", - "vpx_dsp/arm/armasm_ms/save_reg_neon.asm" + "vpx_dsp/arm/armasm_ms/save_reg_neon.asm", ] libvpx_sources_arm_neon_gas_apple = [ "vpx_dsp/arm/gas_apple/intrapred_neon_asm.s", "vpx_dsp/arm/gas_apple/loopfilter_mb_neon.s", - "vpx_dsp/arm/gas_apple/save_reg_neon.s" + "vpx_dsp/arm/gas_apple/save_reg_neon.s", ] libvpx_sources = [libvpx_dir + file for file in libvpx_sources] @@ -258,25 +214,43 @@ env_libvpx = env_modules.Clone() env_libvpx.disable_warnings() env_libvpx.Prepend(CPPPATH=[libvpx_dir]) -webm_multithread = env["platform"] != 'javascript' +webm_multithread = env["platform"] != "javascript" cpu_bits = env["bits"] webm_cpu_x86 = False webm_cpu_arm = False -if env["platform"] == 'uwp': - if 'arm' in env["PROGSUFFIX"]: +if env["platform"] == "uwp": + if "arm" in env["PROGSUFFIX"]: webm_cpu_arm = True else: webm_cpu_x86 = True else: import platform - is_x11_or_server_arm = ((env["platform"] == 'x11' or env["platform"] == 'server') and (platform.machine().startswith('arm') or platform.machine().startswith('aarch'))) - is_ios_x86 = (env["platform"] == 'iphone' and ("arch" in env and env["arch"].startswith('x86'))) - is_android_x86 = (env["platform"] == 'android' and env["android_arch"].startswith('x86')) + + is_x11_or_server_arm = (env["platform"] == "x11" or env["platform"] == "server") and ( + platform.machine().startswith("arm") or platform.machine().startswith("aarch") + ) + is_ios_x86 = env["platform"] == "iphone" and ("arch" in env and env["arch"].startswith("x86")) + is_android_x86 = env["platform"] == "android" and env["android_arch"].startswith("x86") if is_android_x86: - cpu_bits = '32' if env["android_arch"] == 'x86' else '64' - webm_cpu_x86 = not is_x11_or_server_arm and (cpu_bits == '32' or cpu_bits == '64') and (env["platform"] == 'windows' or env["platform"] == 'x11' or env["platform"] == 'osx' or env["platform"] == 'haiku' or is_android_x86 or is_ios_x86) - webm_cpu_arm = is_x11_or_server_arm or (not is_ios_x86 and env["platform"] == 'iphone') or (not is_android_x86 and env["platform"] == 'android') + cpu_bits = "32" if env["android_arch"] == "x86" else "64" + webm_cpu_x86 = ( + not is_x11_or_server_arm + and (cpu_bits == "32" or cpu_bits == "64") + and ( + env["platform"] == "windows" + or env["platform"] == "x11" + or env["platform"] == "osx" + or env["platform"] == "haiku" + or is_android_x86 + or is_ios_x86 + ) + ) + webm_cpu_arm = ( + is_x11_or_server_arm + or (not is_ios_x86 and env["platform"] == "iphone") + or (not is_android_x86 and env["platform"] == "android") + ) if webm_cpu_x86: import subprocess @@ -306,38 +280,43 @@ if webm_cpu_x86: webm_simd_optimizations = False if webm_cpu_x86: - if env["platform"] == 'windows' or env["platform"] == 'uwp': - env_libvpx["ASFORMAT"] = 'win' - elif env["platform"] == 'osx' or env["platform"] == "iphone": - env_libvpx["ASFORMAT"] = 'macho' + if env["platform"] == "windows" or env["platform"] == "uwp": + env_libvpx["ASFORMAT"] = "win" + elif env["platform"] == "osx" or env["platform"] == "iphone": + env_libvpx["ASFORMAT"] = "macho" else: - env_libvpx["ASFORMAT"] = 'elf' + env_libvpx["ASFORMAT"] = "elf" env_libvpx["ASFORMAT"] += cpu_bits - env_libvpx["AS"] = 'yasm' - env_libvpx["ASFLAGS"] = '-I' + libvpx_dir[1:] + ' -f $ASFORMAT -D $ASCPU' - env_libvpx["ASCOM"] = '$AS $ASFLAGS -o $TARGET $SOURCES' + env_libvpx["AS"] = "yasm" + env_libvpx["ASFLAGS"] = "-I" + libvpx_dir[1:] + " -f $ASFORMAT -D $ASCPU" + env_libvpx["ASCOM"] = "$AS $ASFLAGS -o $TARGET $SOURCES" - if cpu_bits == '32': - env_libvpx["ASCPU"] = 'X86_32' - elif cpu_bits == '64': - env_libvpx["ASCPU"] = 'X86_64' + if cpu_bits == "32": + env_libvpx["ASCPU"] = "X86_32" + elif cpu_bits == "64": + env_libvpx["ASCPU"] = "X86_64" - env_libvpx.Append(CPPDEFINES=['WEBM_X86ASM']) + env_libvpx.Append(CPPDEFINES=["WEBM_X86ASM"]) webm_simd_optimizations = True if webm_cpu_arm: - if env["platform"] == 'iphone': - env_libvpx["ASFLAGS"] = '-arch armv7' - elif env["platform"] == 'android' and env["android_arch"] == 'armv7' or env["platform"] == 'x11' or env["platform"] == 'server': - env_libvpx["ASFLAGS"] = '-mfpu=neon' - elif env["platform"] == 'uwp': - env_libvpx["AS"] = 'armasm' - env_libvpx["ASFLAGS"] = '' - env_libvpx["ASCOM"] = '$AS $ASFLAGS -o $TARGET $SOURCES' + if env["platform"] == "iphone": + env_libvpx["ASFLAGS"] = "-arch armv7" + elif ( + env["platform"] == "android" + and env["android_arch"] == "armv7" + or env["platform"] == "x11" + or env["platform"] == "server" + ): + env_libvpx["ASFLAGS"] = "-mfpu=neon" + elif env["platform"] == "uwp": + env_libvpx["AS"] = "armasm" + env_libvpx["ASFLAGS"] = "" + env_libvpx["ASCOM"] = "$AS $ASFLAGS -o $TARGET $SOURCES" - env_libvpx.Append(CPPDEFINES=['WEBM_ARMASM']) + env_libvpx.Append(CPPDEFINES=["WEBM_ARMASM"]) webm_simd_optimizations = True @@ -350,45 +329,49 @@ if webm_multithread: env_libvpx.add_source_files(env.modules_sources, libvpx_sources_mt) if webm_cpu_x86: - is_clang_or_gcc = ('gcc' in os.path.basename(env["CC"])) or ('clang' in os.path.basename(env["CC"])) or ("osxcross" in env) + is_clang_or_gcc = ( + ("gcc" in os.path.basename(env["CC"])) or ("clang" in os.path.basename(env["CC"])) or ("osxcross" in env) + ) env_libvpx_mmx = env_libvpx.Clone() - if cpu_bits == '32' and is_clang_or_gcc: - env_libvpx_mmx.Append(CCFLAGS=['-mmmx']) + if cpu_bits == "32" and is_clang_or_gcc: + env_libvpx_mmx.Append(CCFLAGS=["-mmmx"]) env_libvpx_mmx.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_mmx) env_libvpx_sse2 = env_libvpx.Clone() - if cpu_bits == '32' and is_clang_or_gcc: - env_libvpx_sse2.Append(CCFLAGS=['-msse2']) + if cpu_bits == "32" and is_clang_or_gcc: + env_libvpx_sse2.Append(CCFLAGS=["-msse2"]) env_libvpx_sse2.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_sse2) env_libvpx_ssse3 = env_libvpx.Clone() if is_clang_or_gcc: - env_libvpx_ssse3.Append(CCFLAGS=['-mssse3']) + env_libvpx_ssse3.Append(CCFLAGS=["-mssse3"]) env_libvpx_ssse3.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_ssse3) env_libvpx_avx2 = env_libvpx.Clone() if is_clang_or_gcc: - env_libvpx_avx2.Append(CCFLAGS=['-mavx2']) + env_libvpx_avx2.Append(CCFLAGS=["-mavx2"]) env_libvpx_avx2.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_avx2) env_libvpx.add_source_files(env.modules_sources, libvpx_sources_intrin_x86) env_libvpx.add_source_files(env.modules_sources, libvpx_sources_x86asm) - if cpu_bits == '64': + if cpu_bits == "64": env_libvpx.add_source_files(env.modules_sources, libvpx_sources_x86_64asm) elif webm_cpu_arm: env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm) - if env["platform"] == 'android': + if env["platform"] == "android": env_libvpx.Prepend(CPPPATH=[libvpx_dir + "third_party/android"]) env_libvpx.add_source_files(env.modules_sources, [libvpx_dir + "third_party/android/cpu-features.c"]) env_libvpx_neon = env_libvpx.Clone() env_libvpx_neon.add_source_files(env.modules_sources, libvpx_sources_arm_neon) - if env["platform"] == 'uwp': + if env["platform"] == "uwp": env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm_neon_armasm_ms) - elif env["platform"] == 'iphone': + elif env["platform"] == "iphone": env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm_neon_gas_apple) - elif (is_x11_or_server_arm and cpu_bits == '32') or (env["platform"] == 'android' and not env["android_arch"] == 'arm64v8'): + elif (is_x11_or_server_arm and cpu_bits == "32") or ( + env["platform"] == "android" and not env["android_arch"] == "arm64v8" + ): env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm_neon_gas) diff --git a/modules/webp/SCsub b/modules/webp/SCsub index 666628bb44..58f2bb35e6 100644 --- a/modules/webp/SCsub +++ b/modules/webp/SCsub @@ -1,12 +1,12 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_webp = env_modules.Clone() # Thirdparty source files -if env['builtin_libwebp']: +if env["builtin_libwebp"]: thirdparty_dir = "#thirdparty/libwebp/" thirdparty_sources = [ "dec/alpha_dec.c", diff --git a/modules/webp/config.py b/modules/webp/config.py index 1c8cd12a2d..d22f9454ed 100644 --- a/modules/webp/config.py +++ b/modules/webp/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): return True + def configure(env): pass diff --git a/modules/webrtc/SCsub b/modules/webrtc/SCsub index 868553b879..20b4c8f8d2 100644 --- a/modules/webrtc/SCsub +++ b/modules/webrtc/SCsub @@ -1,15 +1,15 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") # Thirdparty source files env_webrtc = env_modules.Clone() use_gdnative = env_webrtc["module_gdnative_enabled"] -if use_gdnative: # GDNative is retained in Javascript for export compatibility - env_webrtc.Append(CPPDEFINES=['WEBRTC_GDNATIVE_ENABLED']) +if use_gdnative: # GDNative is retained in Javascript for export compatibility + env_webrtc.Append(CPPDEFINES=["WEBRTC_GDNATIVE_ENABLED"]) env_webrtc.Prepend(CPPPATH=["#modules/gdnative/include/"]) env_webrtc.add_source_files(env.modules_sources, "*.cpp") diff --git a/modules/webrtc/config.py b/modules/webrtc/config.py index 48b4c33c5d..d73d7ab39d 100644 --- a/modules/webrtc/config.py +++ b/modules/webrtc/config.py @@ -1,15 +1,14 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): - return [ - "WebRTCPeerConnection", - "WebRTCDataChannel", - "WebRTCMultiplayer" - ] + return ["WebRTCPeerConnection", "WebRTCDataChannel", "WebRTCMultiplayer"] + def get_doc_path(): return "doc_classes" diff --git a/modules/websocket/SCsub b/modules/websocket/SCsub index 033169411f..af60055855 100644 --- a/modules/websocket/SCsub +++ b/modules/websocket/SCsub @@ -1,13 +1,13 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") # Thirdparty source files env_ws = env_modules.Clone() -if env['builtin_wslay'] and not env["platform"] == "javascript": # already builtin for javascript +if env["builtin_wslay"] and not env["platform"] == "javascript": # already builtin for javascript wslay_dir = "#thirdparty/wslay/" wslay_sources = [ "wslay_net.c", diff --git a/modules/websocket/config.py b/modules/websocket/config.py index f59ef432b4..d372240d9e 100644 --- a/modules/websocket/config.py +++ b/modules/websocket/config.py @@ -1,16 +1,14 @@ def can_build(env, platform): return True + def configure(env): pass + def get_doc_classes(): - return [ - "WebSocketClient", - "WebSocketMultiplayerPeer", - "WebSocketPeer", - "WebSocketServer" - ] + return ["WebSocketClient", "WebSocketMultiplayerPeer", "WebSocketPeer", "WebSocketServer"] + def get_doc_path(): return "doc_classes" diff --git a/modules/xatlas_unwrap/SCsub b/modules/xatlas_unwrap/SCsub index b242fd4673..c659349d05 100644 --- a/modules/xatlas_unwrap/SCsub +++ b/modules/xatlas_unwrap/SCsub @@ -1,12 +1,12 @@ #!/usr/bin/env python -Import('env') -Import('env_modules') +Import("env") +Import("env_modules") env_xatlas_unwrap = env_modules.Clone() # Thirdparty source files -if env['builtin_xatlas']: +if env["builtin_xatlas"]: thirdparty_dir = "#thirdparty/xatlas/" thirdparty_sources = [ "xatlas.cpp", diff --git a/modules/xatlas_unwrap/config.py b/modules/xatlas_unwrap/config.py index bd092bdc16..2e73c51626 100644 --- a/modules/xatlas_unwrap/config.py +++ b/modules/xatlas_unwrap/config.py @@ -1,5 +1,6 @@ def can_build(env, platform): - return (env['tools'] and platform not in ["android", "ios"]) + return env["tools"] and platform not in ["android", "ios"] + def configure(env): pass diff --git a/platform/SCsub b/platform/SCsub index 38bab59d74..ecd0926be7 100644 --- a/platform/SCsub +++ b/platform/SCsub @@ -2,31 +2,31 @@ from compat import open_utf8 -Import('env') +Import("env") env.platform_sources = [] # Register platform-exclusive APIs reg_apis_inc = '#include "register_platform_apis.h"\n' -reg_apis = 'void register_platform_apis() {\n' -unreg_apis = 'void unregister_platform_apis() {\n' +reg_apis = "void register_platform_apis() {\n" +unreg_apis = "void unregister_platform_apis() {\n" for platform in env.platform_apis: platform_dir = env.Dir(platform) - env.add_source_files(env.platform_sources, platform + '/api/api.cpp') - reg_apis += '\tregister_' + platform + '_api();\n' - unreg_apis += '\tunregister_' + platform + '_api();\n' + env.add_source_files(env.platform_sources, platform + "/api/api.cpp") + reg_apis += "\tregister_" + platform + "_api();\n" + unreg_apis += "\tunregister_" + platform + "_api();\n" reg_apis_inc += '#include "' + platform + '/api/api.h"\n' -reg_apis_inc += '\n' -reg_apis += '}\n\n' -unreg_apis += '}\n' +reg_apis_inc += "\n" +reg_apis += "}\n\n" +unreg_apis += "}\n" # NOTE: It is safe to generate this file here, since this is still execute serially -with open_utf8('register_platform_apis.gen.cpp', 'w') as f: +with open_utf8("register_platform_apis.gen.cpp", "w") as f: f.write(reg_apis_inc) f.write(reg_apis) f.write(unreg_apis) -env.add_source_files(env.platform_sources, 'register_platform_apis.gen.cpp') +env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp") -lib = env.add_library('platform', env.platform_sources) +lib = env.add_library("platform", env.platform_sources) env.Prepend(LIBS=[lib]) diff --git a/platform/android/SCsub b/platform/android/SCsub index de50bd9f4f..d98871a1ed 100644 --- a/platform/android/SCsub +++ b/platform/android/SCsub @@ -1,23 +1,23 @@ #!/usr/bin/env python -Import('env') +Import("env") android_files = [ - 'os_android.cpp', - 'file_access_android.cpp', - 'audio_driver_opensl.cpp', - 'file_access_jandroid.cpp', - 'dir_access_jandroid.cpp', - 'thread_jandroid.cpp', - 'net_socket_android.cpp', - 'audio_driver_jandroid.cpp', - 'java_godot_lib_jni.cpp', - 'java_class_wrapper.cpp', - 'java_godot_wrapper.cpp', - 'java_godot_io_wrapper.cpp', - 'jni_utils.cpp', - 'android_keys_utils.cpp', - 'plugin/godot_plugin_jni.cpp', + "os_android.cpp", + "file_access_android.cpp", + "audio_driver_opensl.cpp", + "file_access_jandroid.cpp", + "dir_access_jandroid.cpp", + "thread_jandroid.cpp", + "net_socket_android.cpp", + "audio_driver_jandroid.cpp", + "java_godot_lib_jni.cpp", + "java_class_wrapper.cpp", + "java_godot_wrapper.cpp", + "java_godot_io_wrapper.cpp", + "jni_utils.cpp", + "android_keys_utils.cpp", + "plugin/godot_plugin_jni.cpp", #'power_android.cpp' ] @@ -29,30 +29,34 @@ for x in android_files: env_thirdparty = env_android.Clone() env_thirdparty.disable_warnings() -android_objects.append(env_thirdparty.SharedObject('#thirdparty/misc/ifaddrs-android.cc')) +android_objects.append(env_thirdparty.SharedObject("#thirdparty/misc/ifaddrs-android.cc")) lib = env_android.add_shared_library("#bin/libgodot", [android_objects], SHLIBSUFFIX=env["SHLIBSUFFIX"]) -lib_arch_dir = '' -if env['android_arch'] == 'armv7': - lib_arch_dir = 'armeabi-v7a' -elif env['android_arch'] == 'arm64v8': - lib_arch_dir = 'arm64-v8a' -elif env['android_arch'] == 'x86': - lib_arch_dir = 'x86' -elif env['android_arch'] == 'x86_64': - lib_arch_dir = 'x86_64' +lib_arch_dir = "" +if env["android_arch"] == "armv7": + lib_arch_dir = "armeabi-v7a" +elif env["android_arch"] == "arm64v8": + lib_arch_dir = "arm64-v8a" +elif env["android_arch"] == "x86": + lib_arch_dir = "x86" +elif env["android_arch"] == "x86_64": + lib_arch_dir = "x86_64" else: - print('WARN: Architecture not suitable for embedding into APK; keeping .so at \\bin') + print("WARN: Architecture not suitable for embedding into APK; keeping .so at \\bin") -if lib_arch_dir != '': - if env['target'] == 'release': - lib_type_dir = 'release' +if lib_arch_dir != "": + if env["target"] == "release": + lib_type_dir = "release" else: # release_debug, debug - lib_type_dir = 'debug' + lib_type_dir = "debug" - out_dir = '#platform/android/java/lib/libs/' + lib_type_dir + '/' + lib_arch_dir - env_android.Command(out_dir + '/libgodot_android.so', '#bin/libgodot' + env['SHLIBSUFFIX'], Move("$TARGET", "$SOURCE")) + out_dir = "#platform/android/java/lib/libs/" + lib_type_dir + "/" + lib_arch_dir + env_android.Command( + out_dir + "/libgodot_android.so", "#bin/libgodot" + env["SHLIBSUFFIX"], Move("$TARGET", "$SOURCE") + ) - stl_lib_path = str(env['ANDROID_NDK_ROOT']) + '/sources/cxx-stl/llvm-libc++/libs/' + lib_arch_dir + '/libc++_shared.so' - env_android.Command(out_dir + '/libc++_shared.so', stl_lib_path, Copy("$TARGET", "$SOURCE")) + stl_lib_path = ( + str(env["ANDROID_NDK_ROOT"]) + "/sources/cxx-stl/llvm-libc++/libs/" + lib_arch_dir + "/libc++_shared.so" + ) + env_android.Command(out_dir + "/libc++_shared.so", stl_lib_path, Copy("$TARGET", "$SOURCE")) diff --git a/platform/android/detect.py b/platform/android/detect.py index ff3ca0706c..ed0643e3b3 100644 --- a/platform/android/detect.py +++ b/platform/android/detect.py @@ -13,7 +13,7 @@ def get_name(): def can_build(): - return ("ANDROID_NDK_ROOT" in os.environ) + return "ANDROID_NDK_ROOT" in os.environ def get_platform(platform): @@ -24,33 +24,33 @@ def get_opts(): from SCons.Variables import BoolVariable, EnumVariable return [ - ('ANDROID_NDK_ROOT', 'Path to the Android NDK', os.environ.get("ANDROID_NDK_ROOT", 0)), - ('ndk_platform', 'Target platform (android-, e.g. "android-18")', "android-18"), - EnumVariable('android_arch', 'Target architecture', "armv7", ('armv7', 'arm64v8', 'x86', 'x86_64')), - BoolVariable('android_neon', 'Enable NEON support (armv7 only)', True), + ("ANDROID_NDK_ROOT", "Path to the Android NDK", os.environ.get("ANDROID_NDK_ROOT", 0)), + ("ndk_platform", 'Target platform (android-, e.g. "android-18")', "android-18"), + EnumVariable("android_arch", "Target architecture", "armv7", ("armv7", "arm64v8", "x86", "x86_64")), + BoolVariable("android_neon", "Enable NEON support (armv7 only)", True), ] def get_flags(): return [ - ('tools', False), + ("tools", False), ] def create(env): - tools = env['TOOLS'] + tools = env["TOOLS"] if "mingw" in tools: - tools.remove('mingw') + tools.remove("mingw") if "applelink" in tools: tools.remove("applelink") - env.Tool('gcc') + env.Tool("gcc") return env.Clone(tools=tools) def configure(env): # Workaround for MinGW. See: # http://www.scons.org/wiki/LongCmdLinesOnWin32 - if (os.name == "nt"): + if os.name == "nt": import subprocess @@ -58,8 +58,15 @@ def configure(env): # print("SPAWNED : " + cmdline) startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env) + proc = subprocess.Popen( + cmdline, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + startupinfo=startupinfo, + shell=False, + env=env, + ) data, err = proc.communicate() rv = proc.wait() if rv: @@ -70,7 +77,7 @@ def configure(env): def mySpawn(sh, escape, cmd, args, env): - newargs = ' '.join(args[1:]) + newargs = " ".join(args[1:]) cmdline = cmd + " " + newargs rv = 0 @@ -85,50 +92,54 @@ def configure(env): return rv - env['SPAWN'] = mySpawn + env["SPAWN"] = mySpawn # Architecture - if env['android_arch'] not in ['armv7', 'arm64v8', 'x86', 'x86_64']: - env['android_arch'] = 'armv7' + if env["android_arch"] not in ["armv7", "arm64v8", "x86", "x86_64"]: + env["android_arch"] = "armv7" neon_text = "" - if env["android_arch"] == "armv7" and env['android_neon']: + if env["android_arch"] == "armv7" and env["android_neon"]: neon_text = " (with NEON)" - print("Building for Android (" + env['android_arch'] + ")" + neon_text) + print("Building for Android (" + env["android_arch"] + ")" + neon_text) can_vectorize = True - if env['android_arch'] == 'x86': - env['ARCH'] = 'arch-x86' + if env["android_arch"] == "x86": + env["ARCH"] = "arch-x86" env.extra_suffix = ".x86" + env.extra_suffix target_subpath = "x86-4.9" abi_subpath = "i686-linux-android" arch_subpath = "x86" env["x86_libtheora_opt_gcc"] = True - if env['android_arch'] == 'x86_64': + if env["android_arch"] == "x86_64": if get_platform(env["ndk_platform"]) < 21: - print("WARNING: android_arch=x86_64 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21") + print( + "WARNING: android_arch=x86_64 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21" + ) env["ndk_platform"] = "android-21" - env['ARCH'] = 'arch-x86_64' + env["ARCH"] = "arch-x86_64" env.extra_suffix = ".x86_64" + env.extra_suffix target_subpath = "x86_64-4.9" abi_subpath = "x86_64-linux-android" arch_subpath = "x86_64" env["x86_libtheora_opt_gcc"] = True elif env["android_arch"] == "armv7": - env['ARCH'] = 'arch-arm' + env["ARCH"] = "arch-arm" target_subpath = "arm-linux-androideabi-4.9" abi_subpath = "arm-linux-androideabi" arch_subpath = "armeabi-v7a" - if env['android_neon']: + if env["android_neon"]: env.extra_suffix = ".armv7.neon" + env.extra_suffix else: env.extra_suffix = ".armv7" + env.extra_suffix elif env["android_arch"] == "arm64v8": if get_platform(env["ndk_platform"]) < 21: - print("WARNING: android_arch=arm64v8 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21") + print( + "WARNING: android_arch=arm64v8 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21" + ) env["ndk_platform"] = "android-21" - env['ARCH'] = 'arch-arm64' + env["ARCH"] = "arch-arm64" target_subpath = "aarch64-linux-android-4.9" abi_subpath = "aarch64-linux-android" arch_subpath = "arm64-v8a" @@ -136,40 +147,40 @@ def configure(env): # Build type - if (env["target"].startswith("release")): - if (env["optimize"] == "speed"): # optimize for speed (default) - env.Append(LINKFLAGS=['-O2']) - env.Append(CCFLAGS=['-O2', '-fomit-frame-pointer']) - env.Append(CPPDEFINES=['NDEBUG']) + if env["target"].startswith("release"): + if env["optimize"] == "speed": # optimize for speed (default) + env.Append(LINKFLAGS=["-O2"]) + env.Append(CCFLAGS=["-O2", "-fomit-frame-pointer"]) + env.Append(CPPDEFINES=["NDEBUG"]) else: # optimize for size - env.Append(CCFLAGS=['-Os']) - env.Append(CPPDEFINES=['NDEBUG']) - env.Append(LINKFLAGS=['-Os']) + env.Append(CCFLAGS=["-Os"]) + env.Append(CPPDEFINES=["NDEBUG"]) + env.Append(LINKFLAGS=["-Os"]) - if (can_vectorize): - env.Append(CCFLAGS=['-ftree-vectorize']) - if (env["target"] == "release_debug"): - env.Append(CPPDEFINES=['DEBUG_ENABLED']) - elif (env["target"] == "debug"): - env.Append(LINKFLAGS=['-O0']) - env.Append(CCFLAGS=['-O0', '-g', '-fno-limit-debug-info']) - env.Append(CPPDEFINES=['_DEBUG', 'DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) - env.Append(CPPFLAGS=['-UNDEBUG']) + if can_vectorize: + env.Append(CCFLAGS=["-ftree-vectorize"]) + if env["target"] == "release_debug": + env.Append(CPPDEFINES=["DEBUG_ENABLED"]) + elif env["target"] == "debug": + env.Append(LINKFLAGS=["-O0"]) + env.Append(CCFLAGS=["-O0", "-g", "-fno-limit-debug-info"]) + env.Append(CPPDEFINES=["_DEBUG", "DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) + env.Append(CPPFLAGS=["-UNDEBUG"]) # Compiler configuration - env['SHLIBSUFFIX'] = '.so' + env["SHLIBSUFFIX"] = ".so" - if env['PLATFORM'] == 'win32': - env.Tool('gcc') + if env["PLATFORM"] == "win32": + env.Tool("gcc") env.use_windows_spawn_fix() - if (sys.platform.startswith("linux")): + if sys.platform.startswith("linux"): host_subpath = "linux-x86_64" - elif (sys.platform.startswith("darwin")): + elif sys.platform.startswith("darwin"): host_subpath = "darwin-x86_64" - elif (sys.platform.startswith('win')): - if (platform.machine().endswith('64')): + elif sys.platform.startswith("win"): + if platform.machine().endswith("64"): host_subpath = "windows-x86_64" else: host_subpath = "windows" @@ -179,22 +190,22 @@ def configure(env): tools_path = gcc_toolchain_path + "/" + abi_subpath + "/bin" # For Clang to find NDK tools in preference of those system-wide - env.PrependENVPath('PATH', tools_path) + env.PrependENVPath("PATH", tools_path) ccache_path = os.environ.get("CCACHE") if ccache_path is None: - env['CC'] = compiler_path + '/clang' - env['CXX'] = compiler_path + '/clang++' + env["CC"] = compiler_path + "/clang" + env["CXX"] = compiler_path + "/clang++" else: # there aren't any ccache wrappers available for Android, # to enable caching we need to prepend the path to the ccache binary - env['CC'] = ccache_path + ' ' + compiler_path + '/clang' - env['CXX'] = ccache_path + ' ' + compiler_path + '/clang++' - env['AR'] = tools_path + "/ar" - env['RANLIB'] = tools_path + "/ranlib" - env['AS'] = tools_path + "/as" + env["CC"] = ccache_path + " " + compiler_path + "/clang" + env["CXX"] = ccache_path + " " + compiler_path + "/clang++" + env["AR"] = tools_path + "/ar" + env["RANLIB"] = tools_path + "/ranlib" + env["AS"] = tools_path + "/as" - common_opts = ['-fno-integrated-as', '-gcc-toolchain', gcc_toolchain_path] + common_opts = ["-fno-integrated-as", "-gcc-toolchain", gcc_toolchain_path] # Compile flags @@ -202,14 +213,14 @@ def configure(env): env.Append(CPPFLAGS=["-isystem", env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++abi/include"]) # Disable exceptions and rtti on non-tools (template) builds - if env['tools']: - env.Append(CXXFLAGS=['-frtti']) + if env["tools"]: + env.Append(CXXFLAGS=["-frtti"]) else: - env.Append(CXXFLAGS=['-fno-rtti', '-fno-exceptions']) + env.Append(CXXFLAGS=["-fno-rtti", "-fno-exceptions"]) # Don't use dynamic_cast, necessary with no-rtti. - env.Append(CPPDEFINES=['NO_SAFE_CAST']) + env.Append(CPPDEFINES=["NO_SAFE_CAST"]) - lib_sysroot = env["ANDROID_NDK_ROOT"] + "/platforms/" + env['ndk_platform'] + "/" + env['ARCH'] + lib_sysroot = env["ANDROID_NDK_ROOT"] + "/platforms/" + env["ndk_platform"] + "/" + env["ARCH"] # Using NDK unified headers (NDK r15+) sysroot = env["ANDROID_NDK_ROOT"] + "/sysroot" @@ -217,35 +228,37 @@ def configure(env): env.Append(CPPFLAGS=["-isystem", sysroot + "/usr/include/" + abi_subpath]) env.Append(CPPFLAGS=["-isystem", env["ANDROID_NDK_ROOT"] + "/sources/android/support/include"]) # For unified headers this define has to be set manually - env.Append(CPPDEFINES=[('__ANDROID_API__', str(get_platform(env['ndk_platform'])))]) + env.Append(CPPDEFINES=[("__ANDROID_API__", str(get_platform(env["ndk_platform"])))]) - env.Append(CCFLAGS='-fpic -ffunction-sections -funwind-tables -fstack-protector-strong -fvisibility=hidden -fno-strict-aliasing'.split()) - env.Append(CPPDEFINES=['NO_STATVFS', 'GLES_ENABLED']) + env.Append( + CCFLAGS="-fpic -ffunction-sections -funwind-tables -fstack-protector-strong -fvisibility=hidden -fno-strict-aliasing".split() + ) + env.Append(CPPDEFINES=["NO_STATVFS", "GLES_ENABLED"]) - env['neon_enabled'] = False - if env['android_arch'] == 'x86': - target_opts = ['-target', 'i686-none-linux-android'] + env["neon_enabled"] = False + if env["android_arch"] == "x86": + target_opts = ["-target", "i686-none-linux-android"] # The NDK adds this if targeting API < 21, so we can drop it when Godot targets it at least - env.Append(CCFLAGS=['-mstackrealign']) + env.Append(CCFLAGS=["-mstackrealign"]) - elif env['android_arch'] == 'x86_64': - target_opts = ['-target', 'x86_64-none-linux-android'] + elif env["android_arch"] == "x86_64": + target_opts = ["-target", "x86_64-none-linux-android"] elif env["android_arch"] == "armv7": - target_opts = ['-target', 'armv7-none-linux-androideabi'] - env.Append(CCFLAGS='-march=armv7-a -mfloat-abi=softfp'.split()) - env.Append(CPPDEFINES=['__ARM_ARCH_7__', '__ARM_ARCH_7A__']) - if env['android_neon']: - env['neon_enabled'] = True - env.Append(CCFLAGS=['-mfpu=neon']) - env.Append(CPPDEFINES=['__ARM_NEON__']) + target_opts = ["-target", "armv7-none-linux-androideabi"] + env.Append(CCFLAGS="-march=armv7-a -mfloat-abi=softfp".split()) + env.Append(CPPDEFINES=["__ARM_ARCH_7__", "__ARM_ARCH_7A__"]) + if env["android_neon"]: + env["neon_enabled"] = True + env.Append(CCFLAGS=["-mfpu=neon"]) + env.Append(CPPDEFINES=["__ARM_NEON__"]) else: - env.Append(CCFLAGS=['-mfpu=vfpv3-d16']) + env.Append(CCFLAGS=["-mfpu=vfpv3-d16"]) elif env["android_arch"] == "arm64v8": - target_opts = ['-target', 'aarch64-none-linux-android'] - env.Append(CCFLAGS=['-mfix-cortex-a53-835769']) - env.Append(CPPDEFINES=['__ARM_ARCH_8A__']) + target_opts = ["-target", "aarch64-none-linux-android"] + env.Append(CCFLAGS=["-mfix-cortex-a53-835769"]) + env.Append(CPPDEFINES=["__ARM_ARCH_8A__"]) env.Append(CCFLAGS=target_opts) env.Append(CCFLAGS=common_opts) @@ -254,29 +267,55 @@ def configure(env): ndk_version = get_ndk_version(env["ANDROID_NDK_ROOT"]) if ndk_version != None and LooseVersion(ndk_version) >= LooseVersion("17.1.4828580"): - env.Append(LINKFLAGS=['-Wl,--exclude-libs,libgcc.a', '-Wl,--exclude-libs,libatomic.a', '-nostdlib++']) + env.Append(LINKFLAGS=["-Wl,--exclude-libs,libgcc.a", "-Wl,--exclude-libs,libatomic.a", "-nostdlib++"]) else: - env.Append(LINKFLAGS=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libandroid_support.a"]) - env.Append(LINKFLAGS=['-shared', '--sysroot=' + lib_sysroot, '-Wl,--warn-shared-textrel']) + env.Append( + LINKFLAGS=[ + env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libandroid_support.a" + ] + ) + env.Append(LINKFLAGS=["-shared", "--sysroot=" + lib_sysroot, "-Wl,--warn-shared-textrel"]) env.Append(LIBPATH=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/"]) - env.Append(LINKFLAGS=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libc++_shared.so"]) + env.Append( + LINKFLAGS=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libc++_shared.so"] + ) if env["android_arch"] == "armv7": - env.Append(LINKFLAGS='-Wl,--fix-cortex-a8'.split()) - env.Append(LINKFLAGS='-Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now'.split()) - env.Append(LINKFLAGS='-Wl,-soname,libgodot_android.so -Wl,--gc-sections'.split()) + env.Append(LINKFLAGS="-Wl,--fix-cortex-a8".split()) + env.Append(LINKFLAGS="-Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now".split()) + env.Append(LINKFLAGS="-Wl,-soname,libgodot_android.so -Wl,--gc-sections".split()) env.Append(LINKFLAGS=target_opts) env.Append(LINKFLAGS=common_opts) - env.Append(LIBPATH=[env["ANDROID_NDK_ROOT"] + '/toolchains/' + target_subpath + '/prebuilt/' + - host_subpath + '/lib/gcc/' + abi_subpath + '/4.9.x']) - env.Append(LIBPATH=[env["ANDROID_NDK_ROOT"] + - '/toolchains/' + target_subpath + '/prebuilt/' + host_subpath + '/' + abi_subpath + '/lib']) + env.Append( + LIBPATH=[ + env["ANDROID_NDK_ROOT"] + + "/toolchains/" + + target_subpath + + "/prebuilt/" + + host_subpath + + "/lib/gcc/" + + abi_subpath + + "/4.9.x" + ] + ) + env.Append( + LIBPATH=[ + env["ANDROID_NDK_ROOT"] + + "/toolchains/" + + target_subpath + + "/prebuilt/" + + host_subpath + + "/" + + abi_subpath + + "/lib" + ] + ) - env.Prepend(CPPPATH=['#platform/android']) - env.Append(CPPDEFINES=['ANDROID_ENABLED', 'UNIX_ENABLED', 'NO_FCNTL']) - env.Append(LIBS=['OpenSLES', 'EGL', 'GLESv3', 'GLESv2', 'android', 'log', 'z', 'dl']) + env.Prepend(CPPPATH=["#platform/android"]) + env.Append(CPPDEFINES=["ANDROID_ENABLED", "UNIX_ENABLED", "NO_FCNTL"]) + env.Append(LIBS=["OpenSLES", "EGL", "GLESv3", "GLESv2", "android", "log", "z", "dl"]) # Return NDK version string in source.properties (adapted from the Chromium project). diff --git a/platform/haiku/SCsub b/platform/haiku/SCsub index 592f56bbbf..dbff6c5ae9 100644 --- a/platform/haiku/SCsub +++ b/platform/haiku/SCsub @@ -1,28 +1,25 @@ #!/usr/bin/env python -Import('env') +Import("env") common_haiku = [ - 'os_haiku.cpp', - 'context_gl_haiku.cpp', - 'haiku_application.cpp', - 'haiku_direct_window.cpp', - 'haiku_gl_view.cpp', - 'key_mapping_haiku.cpp', - 'audio_driver_media_kit.cpp' + "os_haiku.cpp", + "context_gl_haiku.cpp", + "haiku_application.cpp", + "haiku_direct_window.cpp", + "haiku_gl_view.cpp", + "key_mapping_haiku.cpp", + "audio_driver_media_kit.cpp", ] -target = env.add_program( - '#bin/godot', - ['godot_haiku.cpp'] + common_haiku -) +target = env.add_program("#bin/godot", ["godot_haiku.cpp"] + common_haiku) -command = env.Command('#bin/godot.rsrc', '#platform/haiku/godot.rdef', - ['rc -o $TARGET $SOURCE']) +command = env.Command("#bin/godot.rsrc", "#platform/haiku/godot.rdef", ["rc -o $TARGET $SOURCE"]) def addResourcesAction(target=None, source=None, env=None): - return env.Execute('xres -o ' + File(target)[0].path + ' bin/godot.rsrc') + return env.Execute("xres -o " + File(target)[0].path + " bin/godot.rsrc") + env.AddPostAction(target, addResourcesAction) env.Depends(target, command) diff --git a/platform/haiku/detect.py b/platform/haiku/detect.py index dd72294816..0b84df8f9b 100644 --- a/platform/haiku/detect.py +++ b/platform/haiku/detect.py @@ -12,7 +12,7 @@ def get_name(): def can_build(): - if (os.name != "posix" or sys.platform == "darwin"): + if os.name != "posix" or sys.platform == "darwin": return False return True @@ -22,41 +22,40 @@ def get_opts(): from SCons.Variables import EnumVariable return [ - EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')), + EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")), ] def get_flags(): - return [ - ] + return [] def configure(env): ## Build type - if (env["target"] == "release"): - env.Prepend(CCFLAGS=['-O3']) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + if env["target"] == "release": + env.Prepend(CCFLAGS=["-O3"]) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "release_debug"): - env.Prepend(CCFLAGS=['-O2', '-DDEBUG_ENABLED']) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + elif env["target"] == "release_debug": + env.Prepend(CCFLAGS=["-O2", "-DDEBUG_ENABLED"]) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "debug"): - env.Prepend(CCFLAGS=['-g3', '-DDEBUG_ENABLED', '-DDEBUG_MEMORY_ENABLED']) + elif env["target"] == "debug": + env.Prepend(CCFLAGS=["-g3", "-DDEBUG_ENABLED", "-DDEBUG_MEMORY_ENABLED"]) ## Architecture - is64 = sys.maxsize > 2**32 - if (env["bits"] == "default"): + is64 = sys.maxsize > 2 ** 32 + if env["bits"] == "default": env["bits"] = "64" if is64 else "32" ## Compiler configuration @@ -66,89 +65,94 @@ def configure(env): ## Dependencies - if not env['builtin_libwebp']: - env.ParseConfig('pkg-config libwebp --cflags --libs') + if not env["builtin_libwebp"]: + env.ParseConfig("pkg-config libwebp --cflags --libs") # freetype depends on libpng and zlib, so bundling one of them while keeping others # as shared libraries leads to weird issues - if env['builtin_freetype'] or env['builtin_libpng'] or env['builtin_zlib']: - env['builtin_freetype'] = True - env['builtin_libpng'] = True - env['builtin_zlib'] = True + if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]: + env["builtin_freetype"] = True + env["builtin_libpng"] = True + env["builtin_zlib"] = True - if not env['builtin_freetype']: - env.ParseConfig('pkg-config freetype2 --cflags --libs') + if not env["builtin_freetype"]: + env.ParseConfig("pkg-config freetype2 --cflags --libs") - if not env['builtin_libpng']: - env.ParseConfig('pkg-config libpng16 --cflags --libs') + if not env["builtin_libpng"]: + env.ParseConfig("pkg-config libpng16 --cflags --libs") - if not env['builtin_bullet']: + if not env["builtin_bullet"]: # We need at least version 2.88 import subprocess - bullet_version = subprocess.check_output(['pkg-config', 'bullet', '--modversion']).strip() + + bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip() if bullet_version < "2.88": # Abort as system bullet was requested but too old - print("Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(bullet_version, "2.88")) + print( + "Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format( + bullet_version, "2.88" + ) + ) sys.exit(255) - env.ParseConfig('pkg-config bullet --cflags --libs') + env.ParseConfig("pkg-config bullet --cflags --libs") - if not env['builtin_enet']: - env.ParseConfig('pkg-config libenet --cflags --libs') + if not env["builtin_enet"]: + env.ParseConfig("pkg-config libenet --cflags --libs") - if not env['builtin_squish']: - env.ParseConfig('pkg-config libsquish --cflags --libs') + if not env["builtin_squish"]: + env.ParseConfig("pkg-config libsquish --cflags --libs") - if not env['builtin_zstd']: - env.ParseConfig('pkg-config libzstd --cflags --libs') + if not env["builtin_zstd"]: + env.ParseConfig("pkg-config libzstd --cflags --libs") # Sound and video libraries # Keep the order as it triggers chained dependencies (ogg needed by others, etc.) - if not env['builtin_libtheora']: - env['builtin_libogg'] = False # Needed to link against system libtheora - env['builtin_libvorbis'] = False # Needed to link against system libtheora - env.ParseConfig('pkg-config theora theoradec --cflags --libs') + if not env["builtin_libtheora"]: + env["builtin_libogg"] = False # Needed to link against system libtheora + env["builtin_libvorbis"] = False # Needed to link against system libtheora + env.ParseConfig("pkg-config theora theoradec --cflags --libs") - if not env['builtin_libvpx']: - env.ParseConfig('pkg-config vpx --cflags --libs') + if not env["builtin_libvpx"]: + env.ParseConfig("pkg-config vpx --cflags --libs") - if not env['builtin_libvorbis']: - env['builtin_libogg'] = False # Needed to link against system libvorbis - env.ParseConfig('pkg-config vorbis vorbisfile --cflags --libs') + if not env["builtin_libvorbis"]: + env["builtin_libogg"] = False # Needed to link against system libvorbis + env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs") - if not env['builtin_opus']: - env['builtin_libogg'] = False # Needed to link against system opus - env.ParseConfig('pkg-config opus opusfile --cflags --libs') + if not env["builtin_opus"]: + env["builtin_libogg"] = False # Needed to link against system opus + env.ParseConfig("pkg-config opus opusfile --cflags --libs") - if not env['builtin_libogg']: - env.ParseConfig('pkg-config ogg --cflags --libs') + if not env["builtin_libogg"]: + env.ParseConfig("pkg-config ogg --cflags --libs") - if env['builtin_libtheora']: - list_of_x86 = ['x86_64', 'x86', 'i386', 'i586'] + if env["builtin_libtheora"]: + list_of_x86 = ["x86_64", "x86", "i386", "i586"] if any(platform.machine() in s for s in list_of_x86): env["x86_libtheora_opt_gcc"] = True - if not env['builtin_wslay']: - env.ParseConfig('pkg-config libwslay --cflags --libs') + if not env["builtin_wslay"]: + env.ParseConfig("pkg-config libwslay --cflags --libs") - if not env['builtin_mbedtls']: + if not env["builtin_mbedtls"]: # mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228 - env.Append(LIBS=['mbedtls', 'mbedcrypto', 'mbedx509']) + env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"]) - if not env['builtin_miniupnpc']: + if not env["builtin_miniupnpc"]: # No pkgconfig file so far, hardcode default paths. env.Prepend(CPPPATH=["/system/develop/headers/x86/miniupnpc"]) env.Append(LIBS=["miniupnpc"]) # On Linux wchar_t should be 32-bits # 16-bit library shouldn't be required due to compiler optimisations - if not env['builtin_pcre2']: - env.ParseConfig('pkg-config libpcre2-32 --cflags --libs') + if not env["builtin_pcre2"]: + env.ParseConfig("pkg-config libpcre2-32 --cflags --libs") ## Flags - env.Prepend(CPPPATH=['#platform/haiku']) - env.Append(CPPDEFINES=['UNIX_ENABLED', 'OPENGL_ENABLED', 'GLES_ENABLED']) - env.Append(CPPDEFINES=['MEDIA_KIT_ENABLED']) - env.Append(CPPDEFINES=['PTHREAD_NO_RENAME']) # TODO: enable when we have pthread_setname_np - env.Append(LIBS=['be', 'game', 'media', 'network', 'bnetapi', 'z', 'GL']) + env.Prepend(CPPPATH=["#platform/haiku"]) + env.Append(CPPDEFINES=["UNIX_ENABLED", "OPENGL_ENABLED", "GLES_ENABLED"]) + env.Append(CPPDEFINES=["MEDIA_KIT_ENABLED"]) + env.Append(CPPDEFINES=["PTHREAD_NO_RENAME"]) # TODO: enable when we have pthread_setname_np + env.Append(LIBS=["be", "game", "media", "network", "bnetapi", "z", "GL"]) diff --git a/platform/iphone/SCsub b/platform/iphone/SCsub index fa1b124561..58f4ab1335 100644 --- a/platform/iphone/SCsub +++ b/platform/iphone/SCsub @@ -1,30 +1,34 @@ #!/usr/bin/env python -Import('env') +Import("env") iphone_lib = [ - 'godot_iphone.cpp', - 'os_iphone.cpp', - 'semaphore_iphone.cpp', - 'gl_view.mm', - 'main.m', - 'app_delegate.mm', - 'view_controller.mm', - 'game_center.mm', - 'in_app_store.mm', - 'icloud.mm', - 'ios.mm', + "godot_iphone.cpp", + "os_iphone.cpp", + "semaphore_iphone.cpp", + "gl_view.mm", + "main.m", + "app_delegate.mm", + "view_controller.mm", + "game_center.mm", + "in_app_store.mm", + "icloud.mm", + "ios.mm", ] env_ios = env.Clone() -ios_lib = env_ios.add_library('iphone', iphone_lib) +ios_lib = env_ios.add_library("iphone", iphone_lib) + def combine_libs(target=None, source=None, env=None): lib_path = target[0].srcnode().abspath if "osxcross" in env: - libtool = '$IPHONEPATH/usr/bin/${ios_triple}libtool' + libtool = "$IPHONEPATH/usr/bin/${ios_triple}libtool" else: libtool = "$IPHONEPATH/usr/bin/libtool" - env.Execute(libtool + ' -static -o "' + lib_path + '" ' + ' '.join([('"' + lib.srcnode().abspath + '"') for lib in source])) + env.Execute( + libtool + ' -static -o "' + lib_path + '" ' + " ".join([('"' + lib.srcnode().abspath + '"') for lib in source]) + ) -combine_command = env_ios.Command('#bin/libgodot' + env_ios['LIBSUFFIX'], [ios_lib] + env_ios['LIBS'], combine_libs) + +combine_command = env_ios.Command("#bin/libgodot" + env_ios["LIBSUFFIX"], [ios_lib] + env_ios["LIBS"], combine_libs) diff --git a/platform/iphone/detect.py b/platform/iphone/detect.py index f646b8b1d5..20a640a3f0 100644 --- a/platform/iphone/detect.py +++ b/platform/iphone/detect.py @@ -2,6 +2,7 @@ import os import sys from methods import detect_darwin_sdk_path + def is_active(): return True @@ -12,7 +13,7 @@ def get_name(): def can_build(): - if sys.platform == 'darwin' or ("OSXCROSS_IOS" in os.environ): + if sys.platform == "darwin" or ("OSXCROSS_IOS" in os.environ): return True return False @@ -20,21 +21,26 @@ def can_build(): def get_opts(): from SCons.Variables import BoolVariable + return [ - ('IPHONEPATH', 'Path to iPhone toolchain', '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain'), - ('IPHONESDK', 'Path to the iPhone SDK', ''), - BoolVariable('game_center', 'Support for game center', True), - BoolVariable('store_kit', 'Support for in-app store', True), - BoolVariable('icloud', 'Support for iCloud', True), - BoolVariable('ios_exceptions', 'Enable exceptions', False), - ('ios_triple', 'Triple for ios toolchain', ''), + ( + "IPHONEPATH", + "Path to iPhone toolchain", + "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain", + ), + ("IPHONESDK", "Path to the iPhone SDK", ""), + BoolVariable("game_center", "Support for game center", True), + BoolVariable("store_kit", "Support for in-app store", True), + BoolVariable("icloud", "Support for iCloud", True), + BoolVariable("ios_exceptions", "Enable exceptions", False), + ("ios_triple", "Triple for ios toolchain", ""), ] def get_flags(): return [ - ('tools', False), + ("tools", False), ] @@ -42,32 +48,32 @@ def configure(env): ## Build type - if (env["target"].startswith("release")): - env.Append(CPPDEFINES=['NDEBUG', ('NS_BLOCK_ASSERTIONS', 1)]) - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Append(CCFLAGS=['-O2', '-ftree-vectorize', '-fomit-frame-pointer']) - env.Append(LINKFLAGS=['-O2']) - else: #optimize for size - env.Append(CCFLAGS=['-Os', '-ftree-vectorize']) - env.Append(LINKFLAGS=['-Os']) + if env["target"].startswith("release"): + env.Append(CPPDEFINES=["NDEBUG", ("NS_BLOCK_ASSERTIONS", 1)]) + if env["optimize"] == "speed": # optimize for speed (default) + env.Append(CCFLAGS=["-O2", "-ftree-vectorize", "-fomit-frame-pointer"]) + env.Append(LINKFLAGS=["-O2"]) + else: # optimize for size + env.Append(CCFLAGS=["-Os", "-ftree-vectorize"]) + env.Append(LINKFLAGS=["-Os"]) if env["target"] == "release_debug": - env.Append(CPPDEFINES=['DEBUG_ENABLED']) + env.Append(CPPDEFINES=["DEBUG_ENABLED"]) - elif (env["target"] == "debug"): - env.Append(CCFLAGS=['-gdwarf-2', '-O0']) - env.Append(CPPDEFINES=['_DEBUG', ('DEBUG', 1), 'DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) + elif env["target"] == "debug": + env.Append(CCFLAGS=["-gdwarf-2", "-O0"]) + env.Append(CPPDEFINES=["_DEBUG", ("DEBUG", 1), "DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) - if (env["use_lto"]): - env.Append(CCFLAGS=['-flto']) - env.Append(LINKFLAGS=['-flto']) + if env["use_lto"]: + env.Append(CCFLAGS=["-flto"]) + env.Append(LINKFLAGS=["-flto"]) ## Architecture if env["arch"] == "x86": # i386 env["bits"] = "32" elif env["arch"] == "x86_64": env["bits"] = "64" - elif (env["arch"] == "arm" or env["arch"] == "arm32" or env["arch"] == "armv7" or env["bits"] == "32"): # arm + elif env["arch"] == "arm" or env["arch"] == "arm32" or env["arch"] == "armv7" or env["bits"] == "32": # arm env["arch"] = "arm" env["bits"] = "32" else: # armv64 @@ -80,101 +86,141 @@ def configure(env): if "OSXCROSS_IOS" in os.environ: env["osxcross"] = True - env['ENV']['PATH'] = env['IPHONEPATH'] + "/Developer/usr/bin/:" + env['ENV']['PATH'] + env["ENV"]["PATH"] = env["IPHONEPATH"] + "/Developer/usr/bin/:" + env["ENV"]["PATH"] - compiler_path = '$IPHONEPATH/usr/bin/${ios_triple}' - s_compiler_path = '$IPHONEPATH/Developer/usr/bin/' + compiler_path = "$IPHONEPATH/usr/bin/${ios_triple}" + s_compiler_path = "$IPHONEPATH/Developer/usr/bin/" ccache_path = os.environ.get("CCACHE") if ccache_path is None: - env['CC'] = compiler_path + 'clang' - env['CXX'] = compiler_path + 'clang++' - env['S_compiler'] = s_compiler_path + 'gcc' + env["CC"] = compiler_path + "clang" + env["CXX"] = compiler_path + "clang++" + env["S_compiler"] = s_compiler_path + "gcc" else: # there aren't any ccache wrappers available for iOS, # to enable caching we need to prepend the path to the ccache binary - env['CC'] = ccache_path + ' ' + compiler_path + 'clang' - env['CXX'] = ccache_path + ' ' + compiler_path + 'clang++' - env['S_compiler'] = ccache_path + ' ' + s_compiler_path + 'gcc' - env['AR'] = compiler_path + 'ar' - env['RANLIB'] = compiler_path + 'ranlib' + env["CC"] = ccache_path + " " + compiler_path + "clang" + env["CXX"] = ccache_path + " " + compiler_path + "clang++" + env["S_compiler"] = ccache_path + " " + s_compiler_path + "gcc" + env["AR"] = compiler_path + "ar" + env["RANLIB"] = compiler_path + "ranlib" ## Compile flags - if (env["arch"] == "x86" or env["arch"] == "x86_64"): - detect_darwin_sdk_path('iphonesimulator', env) - env['ENV']['MACOSX_DEPLOYMENT_TARGET'] = '10.9' + if env["arch"] == "x86" or env["arch"] == "x86_64": + detect_darwin_sdk_path("iphonesimulator", env) + env["ENV"]["MACOSX_DEPLOYMENT_TARGET"] = "10.9" arch_flag = "i386" if env["arch"] == "x86" else env["arch"] - env.Append(CCFLAGS=('-arch ' + arch_flag + ' -fobjc-abi-version=2 -fobjc-legacy-dispatch -fmessage-length=0 -fpascal-strings -fblocks -fasm-blocks -isysroot $IPHONESDK -mios-simulator-version-min=10.0').split()) - elif (env["arch"] == "arm"): - detect_darwin_sdk_path('iphone', env) - env.Append(CCFLAGS='-fno-objc-arc -arch armv7 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -isysroot $IPHONESDK -fvisibility=hidden -mthumb "-DIBOutlet=__attribute__((iboutlet))" "-DIBOutletCollection(ClassName)=__attribute__((iboutletcollection(ClassName)))" "-DIBAction=void)__attribute__((ibaction)" -miphoneos-version-min=10.0 -MMD -MT dependencies'.split()) - elif (env["arch"] == "arm64"): - detect_darwin_sdk_path('iphone', env) - env.Append(CCFLAGS='-fno-objc-arc -arch arm64 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -fvisibility=hidden -MMD -MT dependencies -miphoneos-version-min=10.0 -isysroot $IPHONESDK'.split()) - env.Append(CPPDEFINES=['NEED_LONG_INT']) - env.Append(CPPDEFINES=['LIBYUV_DISABLE_NEON']) + env.Append( + CCFLAGS=( + "-arch " + + arch_flag + + " -fobjc-abi-version=2 -fobjc-legacy-dispatch -fmessage-length=0 -fpascal-strings -fblocks -fasm-blocks -isysroot $IPHONESDK -mios-simulator-version-min=10.0" + ).split() + ) + elif env["arch"] == "arm": + detect_darwin_sdk_path("iphone", env) + env.Append( + CCFLAGS='-fno-objc-arc -arch armv7 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -isysroot $IPHONESDK -fvisibility=hidden -mthumb "-DIBOutlet=__attribute__((iboutlet))" "-DIBOutletCollection(ClassName)=__attribute__((iboutletcollection(ClassName)))" "-DIBAction=void)__attribute__((ibaction)" -miphoneos-version-min=10.0 -MMD -MT dependencies'.split() + ) + elif env["arch"] == "arm64": + detect_darwin_sdk_path("iphone", env) + env.Append( + CCFLAGS="-fno-objc-arc -arch arm64 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -fvisibility=hidden -MMD -MT dependencies -miphoneos-version-min=10.0 -isysroot $IPHONESDK".split() + ) + env.Append(CPPDEFINES=["NEED_LONG_INT"]) + env.Append(CPPDEFINES=["LIBYUV_DISABLE_NEON"]) # Disable exceptions on non-tools (template) builds - if not env['tools']: - if env['ios_exceptions']: - env.Append(CCFLAGS=['-fexceptions']) + if not env["tools"]: + if env["ios_exceptions"]: + env.Append(CCFLAGS=["-fexceptions"]) else: - env.Append(CCFLAGS=['-fno-exceptions']) + env.Append(CCFLAGS=["-fno-exceptions"]) ## Link flags - if (env["arch"] == "x86" or env["arch"] == "x86_64"): + if env["arch"] == "x86" or env["arch"] == "x86_64": arch_flag = "i386" if env["arch"] == "x86" else env["arch"] - env.Append(LINKFLAGS=['-arch', arch_flag, '-mios-simulator-version-min=10.0', - '-isysroot', '$IPHONESDK', - '-Xlinker', - '-objc_abi_version', - '-Xlinker', '2', - '-F$IPHONESDK', - ]) - elif (env["arch"] == "arm"): - env.Append(LINKFLAGS=['-arch', 'armv7', '-Wl,-dead_strip', '-miphoneos-version-min=10.0']) - if (env["arch"] == "arm64"): - env.Append(LINKFLAGS=['-arch', 'arm64', '-Wl,-dead_strip', '-miphoneos-version-min=10.0']) + env.Append( + LINKFLAGS=[ + "-arch", + arch_flag, + "-mios-simulator-version-min=10.0", + "-isysroot", + "$IPHONESDK", + "-Xlinker", + "-objc_abi_version", + "-Xlinker", + "2", + "-F$IPHONESDK", + ] + ) + elif env["arch"] == "arm": + env.Append(LINKFLAGS=["-arch", "armv7", "-Wl,-dead_strip", "-miphoneos-version-min=10.0"]) + if env["arch"] == "arm64": + env.Append(LINKFLAGS=["-arch", "arm64", "-Wl,-dead_strip", "-miphoneos-version-min=10.0"]) - env.Append(LINKFLAGS=['-isysroot', '$IPHONESDK', - '-framework', 'AudioToolbox', - '-framework', 'AVFoundation', - '-framework', 'CoreAudio', - '-framework', 'CoreGraphics', - '-framework', 'CoreMedia', - '-framework', 'CoreVideo', - '-framework', 'CoreMotion', - '-framework', 'Foundation', - '-framework', 'GameController', - '-framework', 'MediaPlayer', - '-framework', 'OpenGLES', - '-framework', 'QuartzCore', - '-framework', 'Security', - '-framework', 'SystemConfiguration', - '-framework', 'UIKit', - '-framework', 'ARKit', - ]) + env.Append( + LINKFLAGS=[ + "-isysroot", + "$IPHONESDK", + "-framework", + "AudioToolbox", + "-framework", + "AVFoundation", + "-framework", + "CoreAudio", + "-framework", + "CoreGraphics", + "-framework", + "CoreMedia", + "-framework", + "CoreVideo", + "-framework", + "CoreMotion", + "-framework", + "Foundation", + "-framework", + "GameController", + "-framework", + "MediaPlayer", + "-framework", + "OpenGLES", + "-framework", + "QuartzCore", + "-framework", + "Security", + "-framework", + "SystemConfiguration", + "-framework", + "UIKit", + "-framework", + "ARKit", + ] + ) # Feature options - if env['game_center']: - env.Append(CPPDEFINES=['GAME_CENTER_ENABLED']) - env.Append(LINKFLAGS=['-framework', 'GameKit']) + if env["game_center"]: + env.Append(CPPDEFINES=["GAME_CENTER_ENABLED"]) + env.Append(LINKFLAGS=["-framework", "GameKit"]) - if env['store_kit']: - env.Append(CPPDEFINES=['STOREKIT_ENABLED']) - env.Append(LINKFLAGS=['-framework', 'StoreKit']) + if env["store_kit"]: + env.Append(CPPDEFINES=["STOREKIT_ENABLED"]) + env.Append(LINKFLAGS=["-framework", "StoreKit"]) - if env['icloud']: - env.Append(CPPDEFINES=['ICLOUD_ENABLED']) + if env["icloud"]: + env.Append(CPPDEFINES=["ICLOUD_ENABLED"]) - env.Prepend(CPPPATH=['$IPHONESDK/usr/include', - '$IPHONESDK/System/Library/Frameworks/OpenGLES.framework/Headers', - '$IPHONESDK/System/Library/Frameworks/AudioUnit.framework/Headers', - ]) + env.Prepend( + CPPPATH=[ + "$IPHONESDK/usr/include", + "$IPHONESDK/System/Library/Frameworks/OpenGLES.framework/Headers", + "$IPHONESDK/System/Library/Frameworks/AudioUnit.framework/Headers", + ] + ) - env['ENV']['CODESIGN_ALLOCATE'] = '/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin/codesign_allocate' + env["ENV"]["CODESIGN_ALLOCATE"] = "/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin/codesign_allocate" - env.Prepend(CPPPATH=['#platform/iphone']) - env.Append(CPPDEFINES=['IPHONE_ENABLED', 'UNIX_ENABLED', 'GLES_ENABLED', 'COREAUDIO_ENABLED']) + env.Prepend(CPPPATH=["#platform/iphone"]) + env.Append(CPPDEFINES=["IPHONE_ENABLED", "UNIX_ENABLED", "GLES_ENABLED", "COREAUDIO_ENABLED"]) diff --git a/platform/javascript/SCsub b/platform/javascript/SCsub index 85a633442e..fd8ea9fec0 100644 --- a/platform/javascript/SCsub +++ b/platform/javascript/SCsub @@ -1,44 +1,45 @@ #!/usr/bin/env python -Import('env') +Import("env") javascript_files = [ - 'audio_driver_javascript.cpp', - 'http_client_javascript.cpp', - 'javascript_eval.cpp', - 'javascript_main.cpp', - 'os_javascript.cpp', + "audio_driver_javascript.cpp", + "http_client_javascript.cpp", + "javascript_eval.cpp", + "javascript_main.cpp", + "os_javascript.cpp", ] -build = env.add_program(['#bin/godot${PROGSUFFIX}.js', '#bin/godot${PROGSUFFIX}.wasm'], javascript_files); +build = env.add_program(["#bin/godot${PROGSUFFIX}.js", "#bin/godot${PROGSUFFIX}.wasm"], javascript_files) js, wasm = build js_libraries = [ - 'http_request.js', + "http_request.js", ] for lib in js_libraries: - env.Append(LINKFLAGS=['--js-library', env.File(lib).path]) + env.Append(LINKFLAGS=["--js-library", env.File(lib).path]) env.Depends(build, js_libraries) js_modules = [ - 'id_handler.js', + "id_handler.js", ] for module in js_modules: - env.Append(LINKFLAGS=['--pre-js', env.File(module).path]) + env.Append(LINKFLAGS=["--pre-js", env.File(module).path]) env.Depends(build, js_modules) -wrapper_start = env.File('pre.js') -wrapper_end = env.File('engine.js') -js_wrapped = env.Textfile('#bin/godot', [wrapper_start, js, wrapper_end], TEXTFILESUFFIX='${PROGSUFFIX}.wrapped.js') +wrapper_start = env.File("pre.js") +wrapper_end = env.File("engine.js") +js_wrapped = env.Textfile("#bin/godot", [wrapper_start, js, wrapper_end], TEXTFILESUFFIX="${PROGSUFFIX}.wrapped.js") -zip_dir = env.Dir('#bin/.javascript_zip') -zip_files = env.InstallAs([ - zip_dir.File('godot.js'), - zip_dir.File('godot.wasm'), - zip_dir.File('godot.html') -], [ - js_wrapped, - wasm, - '#misc/dist/html/full-size.html' -]) -env.Zip('#bin/godot', zip_files, ZIPROOT=zip_dir, ZIPSUFFIX='${PROGSUFFIX}${ZIPSUFFIX}', ZIPCOMSTR='Archving $SOURCES as $TARGET') +zip_dir = env.Dir("#bin/.javascript_zip") +zip_files = env.InstallAs( + [zip_dir.File("godot.js"), zip_dir.File("godot.wasm"), zip_dir.File("godot.html")], + [js_wrapped, wasm, "#misc/dist/html/full-size.html"], +) +env.Zip( + "#bin/godot", + zip_files, + ZIPROOT=zip_dir, + ZIPSUFFIX="${PROGSUFFIX}${ZIPSUFFIX}", + ZIPCOMSTR="Archving $SOURCES as $TARGET", +) diff --git a/platform/javascript/detect.py b/platform/javascript/detect.py index 1766833364..fabcc5f96b 100644 --- a/platform/javascript/detect.py +++ b/platform/javascript/detect.py @@ -6,30 +6,31 @@ def is_active(): def get_name(): - return 'JavaScript' + return "JavaScript" def can_build(): - return 'EM_CONFIG' in os.environ or os.path.exists(os.path.expanduser('~/.emscripten')) + return "EM_CONFIG" in os.environ or os.path.exists(os.path.expanduser("~/.emscripten")) def get_opts(): from SCons.Variables import BoolVariable + return [ # eval() can be a security concern, so it can be disabled. - BoolVariable('javascript_eval', 'Enable JavaScript eval interface', True), + BoolVariable("javascript_eval", "Enable JavaScript eval interface", True), ] def get_flags(): return [ - ('tools', False), - ('builtin_pcre2_with_jit', False), + ("tools", False), + ("builtin_pcre2_with_jit", False), # Disabling the mbedtls module reduces file size. # The module has little use due to the limited networking functionality # in this platform. For the available networking methods, the browser # manages TLS. - ('module_mbedtls_enabled', False), + ("module_mbedtls_enabled", False), ] @@ -37,30 +38,30 @@ def configure(env): ## Build type - if env['target'] != 'debug': + if env["target"] != "debug": # Use -Os to prioritize optimizing for reduced file size. This is # particularly valuable for the web platform because it directly # decreases download time. # -Os reduces file size by around 5 MiB over -O3. -Oz only saves about # 100 KiB over -Os, which does not justify the negative impact on # run-time performance. - env.Append(CCFLAGS=['-Os']) - env.Append(LINKFLAGS=['-Os']) - if env['target'] == 'release_debug': - env.Append(CPPDEFINES=['DEBUG_ENABLED']) + env.Append(CCFLAGS=["-Os"]) + env.Append(LINKFLAGS=["-Os"]) + if env["target"] == "release_debug": + env.Append(CPPDEFINES=["DEBUG_ENABLED"]) # Retain function names for backtraces at the cost of file size. - env.Append(LINKFLAGS=['--profiling-funcs']) + env.Append(LINKFLAGS=["--profiling-funcs"]) else: - env.Append(CPPDEFINES=['DEBUG_ENABLED']) - env.Append(CCFLAGS=['-O1', '-g']) - env.Append(LINKFLAGS=['-O1', '-g']) - env.Append(LINKFLAGS=['-s', 'ASSERTIONS=1']) + env.Append(CPPDEFINES=["DEBUG_ENABLED"]) + env.Append(CCFLAGS=["-O1", "-g"]) + env.Append(LINKFLAGS=["-O1", "-g"]) + env.Append(LINKFLAGS=["-s", "ASSERTIONS=1"]) ## Compiler configuration - env['ENV'] = os.environ + env["ENV"] = os.environ - em_config_file = os.getenv('EM_CONFIG') or os.path.expanduser('~/.emscripten') + em_config_file = os.getenv("EM_CONFIG") or os.path.expanduser("~/.emscripten") if not os.path.exists(em_config_file): raise RuntimeError("Emscripten configuration file '%s' does not exist" % em_config_file) with open(em_config_file) as f: @@ -70,91 +71,91 @@ def configure(env): exec(f.read(), em_config) except StandardError as e: raise RuntimeError("Emscripten configuration file '%s' is invalid:\n%s" % (em_config_file, e)) - if 'BINARYEN_ROOT' in em_config and os.path.isdir(os.path.join(em_config.get('BINARYEN_ROOT'), 'emscripten')): + if "BINARYEN_ROOT" in em_config and os.path.isdir(os.path.join(em_config.get("BINARYEN_ROOT"), "emscripten")): # New style, emscripten path as a subfolder of BINARYEN_ROOT - env.PrependENVPath('PATH', os.path.join(em_config.get('BINARYEN_ROOT'), 'emscripten')) - elif 'EMSCRIPTEN_ROOT' in em_config: + env.PrependENVPath("PATH", os.path.join(em_config.get("BINARYEN_ROOT"), "emscripten")) + elif "EMSCRIPTEN_ROOT" in em_config: # Old style (but can be there as a result from previous activation, so do last) - env.PrependENVPath('PATH', em_config.get('EMSCRIPTEN_ROOT')) + env.PrependENVPath("PATH", em_config.get("EMSCRIPTEN_ROOT")) else: - raise RuntimeError("'BINARYEN_ROOT' or 'EMSCRIPTEN_ROOT' missing in Emscripten configuration file '%s'" % em_config_file) + raise RuntimeError( + "'BINARYEN_ROOT' or 'EMSCRIPTEN_ROOT' missing in Emscripten configuration file '%s'" % em_config_file + ) - env['CC'] = 'emcc' - env['CXX'] = 'em++' - env['LINK'] = 'emcc' + env["CC"] = "emcc" + env["CXX"] = "em++" + env["LINK"] = "emcc" - env['AR'] = 'emar' - env['RANLIB'] = 'emranlib' + env["AR"] = "emar" + env["RANLIB"] = "emranlib" # Use TempFileMunge since some AR invocations are too long for cmd.exe. # Use POSIX-style paths, required with TempFileMunge. - env['ARCOM_POSIX'] = env['ARCOM'].replace( - '$TARGET', '$TARGET.posix').replace( - '$SOURCES', '$SOURCES.posix') - env['ARCOM'] = '${TEMPFILE(ARCOM_POSIX)}' + env["ARCOM_POSIX"] = env["ARCOM"].replace("$TARGET", "$TARGET.posix").replace("$SOURCES", "$SOURCES.posix") + env["ARCOM"] = "${TEMPFILE(ARCOM_POSIX)}" # All intermediate files are just LLVM bitcode. - env['OBJPREFIX'] = '' - env['OBJSUFFIX'] = '.bc' - env['PROGPREFIX'] = '' + env["OBJPREFIX"] = "" + env["OBJSUFFIX"] = ".bc" + env["PROGPREFIX"] = "" # Program() output consists of multiple files, so specify suffixes manually at builder. - env['PROGSUFFIX'] = '' - env['LIBPREFIX'] = 'lib' - env['LIBSUFFIX'] = '.bc' - env['LIBPREFIXES'] = ['$LIBPREFIX'] - env['LIBSUFFIXES'] = ['$LIBSUFFIX'] + env["PROGSUFFIX"] = "" + env["LIBPREFIX"] = "lib" + env["LIBSUFFIX"] = ".bc" + env["LIBPREFIXES"] = ["$LIBPREFIX"] + env["LIBSUFFIXES"] = ["$LIBSUFFIX"] ## Compile flags - env.Prepend(CPPPATH=['#platform/javascript']) - env.Append(CPPDEFINES=['JAVASCRIPT_ENABLED', 'UNIX_ENABLED']) + env.Prepend(CPPPATH=["#platform/javascript"]) + env.Append(CPPDEFINES=["JAVASCRIPT_ENABLED", "UNIX_ENABLED"]) # No multi-threading (SharedArrayBuffer) available yet, # once feasible also consider memory buffer size issues. - env.Append(CPPDEFINES=['NO_THREADS']) + env.Append(CPPDEFINES=["NO_THREADS"]) # Disable exceptions and rtti on non-tools (template) builds - if not env['tools']: + if not env["tools"]: # These flags help keep the file size down. - env.Append(CCFLAGS=['-fno-exceptions', '-fno-rtti']) + env.Append(CCFLAGS=["-fno-exceptions", "-fno-rtti"]) # Don't use dynamic_cast, necessary with no-rtti. - env.Append(CPPDEFINES=['NO_SAFE_CAST']) + env.Append(CPPDEFINES=["NO_SAFE_CAST"]) - if env['javascript_eval']: - env.Append(CPPDEFINES=['JAVASCRIPT_EVAL_ENABLED']) + if env["javascript_eval"]: + env.Append(CPPDEFINES=["JAVASCRIPT_EVAL_ENABLED"]) ## Link flags # We use IDBFS in javascript_main.cpp. Since Emscripten 1.39.1 it needs to # be linked explicitly. - env.Append(LIBS=['idbfs.js']) + env.Append(LIBS=["idbfs.js"]) - env.Append(LINKFLAGS=['-s', 'BINARYEN=1']) + env.Append(LINKFLAGS=["-s", "BINARYEN=1"]) # Only include the JavaScript support code for the web environment # (i.e. exclude Node.js and other unused environments). # This makes the JavaScript support code about 4 KB smaller. - env.Append(LINKFLAGS=['-s', 'ENVIRONMENT=web']) + env.Append(LINKFLAGS=["-s", "ENVIRONMENT=web"]) # This needs to be defined for Emscripten using 'fastcomp' (default pre-1.39.0) # and undefined if using 'upstream'. And to make things simple, earlier # Emscripten versions didn't include 'fastcomp' in their path, so we check # against the presence of 'upstream' to conditionally add the flag. - if not "upstream" in em_config['EMSCRIPTEN_ROOT']: - env.Append(LINKFLAGS=['-s', 'BINARYEN_TRAP_MODE=\'clamp\'']) + if not "upstream" in em_config["EMSCRIPTEN_ROOT"]: + env.Append(LINKFLAGS=["-s", "BINARYEN_TRAP_MODE='clamp'"]) # Allow increasing memory buffer size during runtime. This is efficient # when using WebAssembly (in comparison to asm.js) and works well for # us since we don't know requirements at compile-time. - env.Append(LINKFLAGS=['-s', 'ALLOW_MEMORY_GROWTH=1']) + env.Append(LINKFLAGS=["-s", "ALLOW_MEMORY_GROWTH=1"]) # This setting just makes WebGL 2 APIs available, it does NOT disable WebGL 1. - env.Append(LINKFLAGS=['-s', 'USE_WEBGL2=1']) + env.Append(LINKFLAGS=["-s", "USE_WEBGL2=1"]) - env.Append(LINKFLAGS=['-s', 'INVOKE_RUN=0']) + env.Append(LINKFLAGS=["-s", "INVOKE_RUN=0"]) # TODO: Reevaluate usage of this setting now that engine.js manages engine runtime. - env.Append(LINKFLAGS=['-s', 'NO_EXIT_RUNTIME=1']) + env.Append(LINKFLAGS=["-s", "NO_EXIT_RUNTIME=1"]) - #adding flag due to issue with emscripten 1.38.41 callMain method https://github.com/emscripten-core/emscripten/blob/incoming/ChangeLog.md#v13841-08072019 - env.Append(LINKFLAGS=['-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["callMain"]']) + # adding flag due to issue with emscripten 1.38.41 callMain method https://github.com/emscripten-core/emscripten/blob/incoming/ChangeLog.md#v13841-08072019 + env.Append(LINKFLAGS=["-s", 'EXTRA_EXPORTED_RUNTIME_METHODS=["callMain"]']) diff --git a/platform/osx/SCsub b/platform/osx/SCsub index e15b4339a7..6d912ed955 100644 --- a/platform/osx/SCsub +++ b/platform/osx/SCsub @@ -1,22 +1,21 @@ #!/usr/bin/env python -Import('env') +Import("env") from platform_methods import run_in_subprocess import platform_osx_builders files = [ - 'crash_handler_osx.mm', - 'os_osx.mm', - 'godot_main_osx.mm', - 'semaphore_osx.cpp', - 'dir_access_osx.mm', - 'joypad_osx.cpp', - 'power_osx.cpp', + "crash_handler_osx.mm", + "os_osx.mm", + "godot_main_osx.mm", + "semaphore_osx.cpp", + "dir_access_osx.mm", + "joypad_osx.cpp", + "power_osx.cpp", ] -prog = env.add_program('#bin/godot', files) +prog = env.add_program("#bin/godot", files) if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]: env.AddPostAction(prog, run_in_subprocess(platform_osx_builders.make_debug_osx)) - diff --git a/platform/osx/detect.py b/platform/osx/detect.py index fe839199e8..27dc58c26c 100644 --- a/platform/osx/detect.py +++ b/platform/osx/detect.py @@ -13,7 +13,7 @@ def get_name(): def can_build(): - if (sys.platform == "darwin" or ("OSXCROSS_ROOT" in os.environ)): + if sys.platform == "darwin" or ("OSXCROSS_ROOT" in os.environ): return True return False @@ -23,51 +23,50 @@ def get_opts(): from SCons.Variables import BoolVariable, EnumVariable return [ - ('osxcross_sdk', 'OSXCross SDK version', 'darwin14'), - ('MACOS_SDK_PATH', 'Path to the macOS SDK', ''), - EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')), - BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False), - BoolVariable('use_ubsan', 'Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)', False), - BoolVariable('use_asan', 'Use LLVM/GCC compiler address sanitizer (ASAN))', False), - BoolVariable('use_tsan', 'Use LLVM/GCC compiler thread sanitizer (TSAN))', False), + ("osxcross_sdk", "OSXCross SDK version", "darwin14"), + ("MACOS_SDK_PATH", "Path to the macOS SDK", ""), + EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")), + BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False), + BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False), + BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False), + BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False), ] def get_flags(): - return [ - ] + return [] def configure(env): - ## Build type + ## Build type - if (env["target"] == "release"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Prepend(CCFLAGS=['-O3', '-fomit-frame-pointer', '-ftree-vectorize', '-msse2']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os','-ftree-vectorize', '-msse2']) + if env["target"] == "release": + if env["optimize"] == "speed": # optimize for speed (default) + env.Prepend(CCFLAGS=["-O3", "-fomit-frame-pointer", "-ftree-vectorize", "-msse2"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os", "-ftree-vectorize", "-msse2"]) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "release_debug"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Prepend(CCFLAGS=['-O2']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) - env.Prepend(CPPDEFINES=['DEBUG_ENABLED']) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + elif env["target"] == "release_debug": + if env["optimize"] == "speed": # optimize for speed (default) + env.Prepend(CCFLAGS=["-O2"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) + env.Prepend(CPPDEFINES=["DEBUG_ENABLED"]) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "debug"): - env.Prepend(CCFLAGS=['-g3']) - env.Prepend(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) + elif env["target"] == "debug": + env.Prepend(CCFLAGS=["-g3"]) + env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) ## Architecture @@ -81,76 +80,113 @@ def configure(env): if "OSXCROSS_ROOT" in os.environ: env["osxcross"] = True - if not "osxcross" in env: # regular native build - env.Append(CCFLAGS=['-arch', 'x86_64']) - env.Append(LINKFLAGS=['-arch', 'x86_64']) - if (env["macports_clang"] != 'no'): + if not "osxcross" in env: # regular native build + env.Append(CCFLAGS=["-arch", "x86_64"]) + env.Append(LINKFLAGS=["-arch", "x86_64"]) + if env["macports_clang"] != "no": mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local") mpclangver = env["macports_clang"] env["CC"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang" env["LINK"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang++" env["CXX"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang++" - env['AR'] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ar" - env['RANLIB'] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ranlib" - env['AS'] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-as" - env.Append(CPPDEFINES=['__MACPORTS__']) #hack to fix libvpx MM256_BROADCASTSI128_SI256 define + env["AR"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ar" + env["RANLIB"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ranlib" + env["AS"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-as" + env.Append(CPPDEFINES=["__MACPORTS__"]) # hack to fix libvpx MM256_BROADCASTSI128_SI256 define else: - env['CC'] = 'clang' - env['CXX'] = 'clang++' + env["CC"] = "clang" + env["CXX"] = "clang++" - detect_darwin_sdk_path('osx', env) - env.Append(CCFLAGS=['-isysroot', '$MACOS_SDK_PATH']) - env.Append(LINKFLAGS=['-isysroot', '$MACOS_SDK_PATH']) + detect_darwin_sdk_path("osx", env) + env.Append(CCFLAGS=["-isysroot", "$MACOS_SDK_PATH"]) + env.Append(LINKFLAGS=["-isysroot", "$MACOS_SDK_PATH"]) - else: # osxcross build + else: # osxcross build root = os.environ.get("OSXCROSS_ROOT", 0) basecmd = root + "/target/bin/x86_64-apple-" + env["osxcross_sdk"] + "-" ccache_path = os.environ.get("CCACHE") if ccache_path is None: - env['CC'] = basecmd + "cc" - env['CXX'] = basecmd + "c++" + env["CC"] = basecmd + "cc" + env["CXX"] = basecmd + "c++" else: # there aren't any ccache wrappers available for OS X cross-compile, # to enable caching we need to prepend the path to the ccache binary - env['CC'] = ccache_path + ' ' + basecmd + "cc" - env['CXX'] = ccache_path + ' ' + basecmd + "c++" - env['AR'] = basecmd + "ar" - env['RANLIB'] = basecmd + "ranlib" - env['AS'] = basecmd + "as" - env.Append(CPPDEFINES=['__MACPORTS__']) #hack to fix libvpx MM256_BROADCASTSI128_SI256 define + env["CC"] = ccache_path + " " + basecmd + "cc" + env["CXX"] = ccache_path + " " + basecmd + "c++" + env["AR"] = basecmd + "ar" + env["RANLIB"] = basecmd + "ranlib" + env["AS"] = basecmd + "as" + env.Append(CPPDEFINES=["__MACPORTS__"]) # hack to fix libvpx MM256_BROADCASTSI128_SI256 define - if (env["CXX"] == "clang++"): - env.Append(CPPDEFINES=['TYPED_METHOD_BIND']) + if env["CXX"] == "clang++": + env.Append(CPPDEFINES=["TYPED_METHOD_BIND"]) env["CC"] = "clang" env["LINK"] = "clang++" - if env['use_ubsan'] or env['use_asan'] or env['use_tsan']: + if env["use_ubsan"] or env["use_asan"] or env["use_tsan"]: env.extra_suffix += "s" - if env['use_ubsan']: - env.Append(CCFLAGS=['-fsanitize=undefined']) - env.Append(LINKFLAGS=['-fsanitize=undefined']) + if env["use_ubsan"]: + env.Append(CCFLAGS=["-fsanitize=undefined"]) + env.Append(LINKFLAGS=["-fsanitize=undefined"]) - if env['use_asan']: - env.Append(CCFLAGS=['-fsanitize=address']) - env.Append(LINKFLAGS=['-fsanitize=address']) + if env["use_asan"]: + env.Append(CCFLAGS=["-fsanitize=address"]) + env.Append(LINKFLAGS=["-fsanitize=address"]) - if env['use_tsan']: - env.Append(CCFLAGS=['-fsanitize=thread']) - env.Append(LINKFLAGS=['-fsanitize=thread']) + if env["use_tsan"]: + env.Append(CCFLAGS=["-fsanitize=thread"]) + env.Append(LINKFLAGS=["-fsanitize=thread"]) ## Dependencies - if env['builtin_libtheora']: + if env["builtin_libtheora"]: env["x86_libtheora_opt_gcc"] = True ## Flags - env.Prepend(CPPPATH=['#platform/osx']) - env.Append(CPPDEFINES=['OSX_ENABLED', 'UNIX_ENABLED', 'GLES_ENABLED', 'APPLE_STYLE_KEYS', 'COREAUDIO_ENABLED', 'COREMIDI_ENABLED']) - env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-framework', 'OpenGL', '-framework', 'AGL', '-framework', 'AudioUnit', '-framework', 'CoreAudio', '-framework', 'CoreMIDI', '-lz', '-framework', 'IOKit', '-framework', 'ForceFeedback', '-framework', 'AVFoundation', '-framework', 'CoreMedia', '-framework', 'CoreVideo']) - env.Append(LIBS=['pthread']) + env.Prepend(CPPPATH=["#platform/osx"]) + env.Append( + CPPDEFINES=[ + "OSX_ENABLED", + "UNIX_ENABLED", + "GLES_ENABLED", + "APPLE_STYLE_KEYS", + "COREAUDIO_ENABLED", + "COREMIDI_ENABLED", + ] + ) + env.Append( + LINKFLAGS=[ + "-framework", + "Cocoa", + "-framework", + "Carbon", + "-framework", + "OpenGL", + "-framework", + "AGL", + "-framework", + "AudioUnit", + "-framework", + "CoreAudio", + "-framework", + "CoreMIDI", + "-lz", + "-framework", + "IOKit", + "-framework", + "ForceFeedback", + "-framework", + "AVFoundation", + "-framework", + "CoreMedia", + "-framework", + "CoreVideo", + ] + ) + env.Append(LIBS=["pthread"]) - env.Append(CCFLAGS=['-mmacosx-version-min=10.9']) - env.Append(LINKFLAGS=['-mmacosx-version-min=10.9']) + env.Append(CCFLAGS=["-mmacosx-version-min=10.9"]) + env.Append(LINKFLAGS=["-mmacosx-version-min=10.9"]) diff --git a/platform/osx/platform_osx_builders.py b/platform/osx/platform_osx_builders.py index 81997f674b..953ed479db 100644 --- a/platform/osx/platform_osx_builders.py +++ b/platform/osx/platform_osx_builders.py @@ -8,14 +8,14 @@ from platform_methods import subprocess_main def make_debug_osx(target, source, env): - if (env["macports_clang"] != 'no'): + if env["macports_clang"] != "no": mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local") mpclangver = env["macports_clang"] - os.system(mpprefix + '/libexec/llvm-' + mpclangver + '/bin/llvm-dsymutil {0} -o {0}.dSYM'.format(target[0])) + os.system(mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-dsymutil {0} -o {0}.dSYM".format(target[0])) else: - os.system('dsymutil {0} -o {0}.dSYM'.format(target[0])) - os.system('strip -u -r {0}'.format(target[0])) + os.system("dsymutil {0} -o {0}.dSYM".format(target[0])) + os.system("strip -u -r {0}".format(target[0])) -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/platform/server/SCsub b/platform/server/SCsub index f977275595..00402d0929 100644 --- a/platform/server/SCsub +++ b/platform/server/SCsub @@ -2,18 +2,18 @@ import sys -Import('env') +Import("env") -common_server = [\ - "os_server.cpp",\ +common_server = [ + "os_server.cpp", ] if sys.platform == "darwin": - common_server.append("#platform/osx/crash_handler_osx.mm") - common_server.append("#platform/osx/power_osx.cpp") - common_server.append("#platform/osx/semaphore_osx.cpp") + common_server.append("#platform/osx/crash_handler_osx.mm") + common_server.append("#platform/osx/power_osx.cpp") + common_server.append("#platform/osx/semaphore_osx.cpp") else: - common_server.append("#platform/x11/crash_handler_x11.cpp") - common_server.append("#platform/x11/power_x11.cpp") + common_server.append("#platform/x11/crash_handler_x11.cpp") + common_server.append("#platform/x11/power_x11.cpp") -prog = env.add_program('#bin/godot_server', ['godot_server.cpp'] + common_server) +prog = env.add_program("#bin/godot_server", ["godot_server.cpp"] + common_server) diff --git a/platform/server/detect.py b/platform/server/detect.py index b195ecb186..32dc1a164a 100644 --- a/platform/server/detect.py +++ b/platform/server/detect.py @@ -5,6 +5,7 @@ import sys # This file is mostly based on platform/x11/detect.py. # If editing this file, make sure to apply relevant changes here too. + def is_active(): return True @@ -14,14 +15,14 @@ def get_name(): def get_program_suffix(): - if (sys.platform == "darwin"): + if sys.platform == "darwin": return "osx" return "x11" def can_build(): - if (os.name != "posix"): + if os.name != "posix": return False return True @@ -29,16 +30,17 @@ def can_build(): def get_opts(): from SCons.Variables import BoolVariable, EnumVariable + return [ - BoolVariable('use_llvm', 'Use the LLVM compiler', False), - BoolVariable('use_static_cpp', 'Link libgcc and libstdc++ statically for better portability', False), - BoolVariable('use_ubsan', 'Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)', False), - BoolVariable('use_asan', 'Use LLVM/GCC compiler address sanitizer (ASAN))', False), - BoolVariable('use_lsan', 'Use LLVM/GCC compiler leak sanitizer (LSAN))', False), - BoolVariable('use_tsan', 'Use LLVM/GCC compiler thread sanitizer (TSAN))', False), - EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')), - BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False), - BoolVariable('execinfo', 'Use libexecinfo on systems where glibc is not available', False), + BoolVariable("use_llvm", "Use the LLVM compiler", False), + BoolVariable("use_static_cpp", "Link libgcc and libstdc++ statically for better portability", False), + BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False), + BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False), + BoolVariable("use_lsan", "Use LLVM/GCC compiler leak sanitizer (LSAN))", False), + BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False), + EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")), + BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False), + BoolVariable("execinfo", "Use libexecinfo on systems where glibc is not available", False), ] @@ -51,86 +53,85 @@ def configure(env): ## Build type - if (env["target"] == "release"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Prepend(CCFLAGS=['-O3']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) + if env["target"] == "release": + if env["optimize"] == "speed": # optimize for speed (default) + env.Prepend(CCFLAGS=["-O3"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "release_debug"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Prepend(CCFLAGS=['-O2']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) - env.Prepend(CPPDEFINES=['DEBUG_ENABLED']) + elif env["target"] == "release_debug": + if env["optimize"] == "speed": # optimize for speed (default) + env.Prepend(CCFLAGS=["-O2"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) + env.Prepend(CPPDEFINES=["DEBUG_ENABLED"]) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "debug"): - env.Prepend(CCFLAGS=['-g3']) - env.Prepend(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) - env.Append(LINKFLAGS=['-rdynamic']) + elif env["target"] == "debug": + env.Prepend(CCFLAGS=["-g3"]) + env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) + env.Append(LINKFLAGS=["-rdynamic"]) ## Architecture - is64 = sys.maxsize > 2**32 - if (env["bits"] == "default"): + is64 = sys.maxsize > 2 ** 32 + if env["bits"] == "default": env["bits"] = "64" if is64 else "32" ## Compiler configuration - if 'CXX' in env and 'clang' in os.path.basename(env['CXX']): + if "CXX" in env and "clang" in os.path.basename(env["CXX"]): # Convenience check to enforce the use_llvm overrides when CXX is clang(++) - env['use_llvm'] = True + env["use_llvm"] = True - if env['use_llvm']: - if ('clang++' not in os.path.basename(env['CXX'])): + if env["use_llvm"]: + if "clang++" not in os.path.basename(env["CXX"]): env["CC"] = "clang" env["CXX"] = "clang++" env["LINK"] = "clang++" - env.Append(CPPDEFINES=['TYPED_METHOD_BIND']) + env.Append(CPPDEFINES=["TYPED_METHOD_BIND"]) env.extra_suffix = ".llvm" + env.extra_suffix - - if env['use_ubsan'] or env['use_asan'] or env['use_lsan'] or env['use_tsan']: + if env["use_ubsan"] or env["use_asan"] or env["use_lsan"] or env["use_tsan"]: env.extra_suffix += "s" - if env['use_ubsan']: - env.Append(CCFLAGS=['-fsanitize=undefined']) - env.Append(LINKFLAGS=['-fsanitize=undefined']) + if env["use_ubsan"]: + env.Append(CCFLAGS=["-fsanitize=undefined"]) + env.Append(LINKFLAGS=["-fsanitize=undefined"]) - if env['use_asan']: - env.Append(CCFLAGS=['-fsanitize=address']) - env.Append(LINKFLAGS=['-fsanitize=address']) + if env["use_asan"]: + env.Append(CCFLAGS=["-fsanitize=address"]) + env.Append(LINKFLAGS=["-fsanitize=address"]) - if env['use_lsan']: - env.Append(CCFLAGS=['-fsanitize=leak']) - env.Append(LINKFLAGS=['-fsanitize=leak']) + if env["use_lsan"]: + env.Append(CCFLAGS=["-fsanitize=leak"]) + env.Append(LINKFLAGS=["-fsanitize=leak"]) - if env['use_tsan']: - env.Append(CCFLAGS=['-fsanitize=thread']) - env.Append(LINKFLAGS=['-fsanitize=thread']) + if env["use_tsan"]: + env.Append(CCFLAGS=["-fsanitize=thread"]) + env.Append(LINKFLAGS=["-fsanitize=thread"]) - if env['use_lto']: - env.Append(CCFLAGS=['-flto']) - if not env['use_llvm'] and env.GetOption("num_jobs") > 1: - env.Append(LINKFLAGS=['-flto=' + str(env.GetOption("num_jobs"))]) + if env["use_lto"]: + env.Append(CCFLAGS=["-flto"]) + if not env["use_llvm"] and env.GetOption("num_jobs") > 1: + env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))]) else: - env.Append(LINKFLAGS=['-flto']) - if not env['use_llvm']: - env['RANLIB'] = 'gcc-ranlib' - env['AR'] = 'gcc-ar' + env.Append(LINKFLAGS=["-flto"]) + if not env["use_llvm"]: + env["RANLIB"] = "gcc-ranlib" + env["AR"] = "gcc-ar" - env.Append(CCFLAGS=['-pipe']) - env.Append(LINKFLAGS=['-pipe']) + env.Append(CCFLAGS=["-pipe"]) + env.Append(LINKFLAGS=["-pipe"]) ## Dependencies @@ -138,109 +139,114 @@ def configure(env): # freetype depends on libpng and zlib, so bundling one of them while keeping others # as shared libraries leads to weird issues - if env['builtin_freetype'] or env['builtin_libpng'] or env['builtin_zlib']: - env['builtin_freetype'] = True - env['builtin_libpng'] = True - env['builtin_zlib'] = True + if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]: + env["builtin_freetype"] = True + env["builtin_libpng"] = True + env["builtin_zlib"] = True - if not env['builtin_freetype']: - env.ParseConfig('pkg-config freetype2 --cflags --libs') + if not env["builtin_freetype"]: + env.ParseConfig("pkg-config freetype2 --cflags --libs") - if not env['builtin_libpng']: - env.ParseConfig('pkg-config libpng16 --cflags --libs') + if not env["builtin_libpng"]: + env.ParseConfig("pkg-config libpng16 --cflags --libs") - if not env['builtin_bullet']: + if not env["builtin_bullet"]: # We need at least version 2.89 import subprocess - bullet_version = subprocess.check_output(['pkg-config', 'bullet', '--modversion']).strip() + + bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip() if str(bullet_version) < "2.89": # Abort as system bullet was requested but too old - print("Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(bullet_version, "2.89")) + print( + "Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format( + bullet_version, "2.89" + ) + ) sys.exit(255) - env.ParseConfig('pkg-config bullet --cflags --libs') + env.ParseConfig("pkg-config bullet --cflags --libs") if False: # not env['builtin_assimp']: # FIXME: Add min version check - env.ParseConfig('pkg-config assimp --cflags --libs') + env.ParseConfig("pkg-config assimp --cflags --libs") - if not env['builtin_enet']: - env.ParseConfig('pkg-config libenet --cflags --libs') + if not env["builtin_enet"]: + env.ParseConfig("pkg-config libenet --cflags --libs") - if not env['builtin_squish']: - env.ParseConfig('pkg-config libsquish --cflags --libs') + if not env["builtin_squish"]: + env.ParseConfig("pkg-config libsquish --cflags --libs") - if not env['builtin_zstd']: - env.ParseConfig('pkg-config libzstd --cflags --libs') + if not env["builtin_zstd"]: + env.ParseConfig("pkg-config libzstd --cflags --libs") # Sound and video libraries # Keep the order as it triggers chained dependencies (ogg needed by others, etc.) - if not env['builtin_libtheora']: - env['builtin_libogg'] = False # Needed to link against system libtheora - env['builtin_libvorbis'] = False # Needed to link against system libtheora - env.ParseConfig('pkg-config theora theoradec --cflags --libs') + if not env["builtin_libtheora"]: + env["builtin_libogg"] = False # Needed to link against system libtheora + env["builtin_libvorbis"] = False # Needed to link against system libtheora + env.ParseConfig("pkg-config theora theoradec --cflags --libs") else: - list_of_x86 = ['x86_64', 'x86', 'i386', 'i586'] + list_of_x86 = ["x86_64", "x86", "i386", "i586"] if any(platform.machine() in s for s in list_of_x86): env["x86_libtheora_opt_gcc"] = True - if not env['builtin_libvpx']: - env.ParseConfig('pkg-config vpx --cflags --libs') + if not env["builtin_libvpx"]: + env.ParseConfig("pkg-config vpx --cflags --libs") - if not env['builtin_libvorbis']: - env['builtin_libogg'] = False # Needed to link against system libvorbis - env.ParseConfig('pkg-config vorbis vorbisfile --cflags --libs') + if not env["builtin_libvorbis"]: + env["builtin_libogg"] = False # Needed to link against system libvorbis + env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs") - if not env['builtin_opus']: - env['builtin_libogg'] = False # Needed to link against system opus - env.ParseConfig('pkg-config opus opusfile --cflags --libs') + if not env["builtin_opus"]: + env["builtin_libogg"] = False # Needed to link against system opus + env.ParseConfig("pkg-config opus opusfile --cflags --libs") - if not env['builtin_libogg']: - env.ParseConfig('pkg-config ogg --cflags --libs') + if not env["builtin_libogg"]: + env.ParseConfig("pkg-config ogg --cflags --libs") - if not env['builtin_libwebp']: - env.ParseConfig('pkg-config libwebp --cflags --libs') + if not env["builtin_libwebp"]: + env.ParseConfig("pkg-config libwebp --cflags --libs") - if not env['builtin_mbedtls']: + if not env["builtin_mbedtls"]: # mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228 - env.Append(LIBS=['mbedtls', 'mbedcrypto', 'mbedx509']) + env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"]) - if not env['builtin_wslay']: - env.ParseConfig('pkg-config libwslay --cflags --libs') + if not env["builtin_wslay"]: + env.ParseConfig("pkg-config libwslay --cflags --libs") - if not env['builtin_miniupnpc']: + if not env["builtin_miniupnpc"]: # No pkgconfig file so far, hardcode default paths. env.Prepend(CPPPATH=["/usr/include/miniupnpc"]) env.Append(LIBS=["miniupnpc"]) # On Linux wchar_t should be 32-bits # 16-bit library shouldn't be required due to compiler optimisations - if not env['builtin_pcre2']: - env.ParseConfig('pkg-config libpcre2-32 --cflags --libs') + if not env["builtin_pcre2"]: + env.ParseConfig("pkg-config libpcre2-32 --cflags --libs") ## Flags # Linkflags below this line should typically stay the last ones - if not env['builtin_zlib']: - env.ParseConfig('pkg-config zlib --cflags --libs') + if not env["builtin_zlib"]: + env.ParseConfig("pkg-config zlib --cflags --libs") - env.Prepend(CPPPATH=['#platform/server']) - env.Append(CPPDEFINES=['SERVER_ENABLED', 'UNIX_ENABLED']) + env.Prepend(CPPPATH=["#platform/server"]) + env.Append(CPPDEFINES=["SERVER_ENABLED", "UNIX_ENABLED"]) - if (platform.system() == "Darwin"): - env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-lz', '-framework', 'IOKit']) + if platform.system() == "Darwin": + env.Append(LINKFLAGS=["-framework", "Cocoa", "-framework", "Carbon", "-lz", "-framework", "IOKit"]) - env.Append(LIBS=['pthread']) + env.Append(LIBS=["pthread"]) - if (platform.system() == "Linux"): - env.Append(LIBS=['dl']) + if platform.system() == "Linux": + env.Append(LIBS=["dl"]) - if (platform.system().find("BSD") >= 0): + if platform.system().find("BSD") >= 0: env["execinfo"] = True if env["execinfo"]: - env.Append(LIBS=['execinfo']) + env.Append(LIBS=["execinfo"]) # Link those statically for portability - if env['use_static_cpp']: - env.Append(LINKFLAGS=['-static-libgcc', '-static-libstdc++']) + if env["use_static_cpp"]: + env.Append(LINKFLAGS=["-static-libgcc", "-static-libstdc++"]) diff --git a/platform/uwp/SCsub b/platform/uwp/SCsub index c14290f0c4..e8295b9df9 100644 --- a/platform/uwp/SCsub +++ b/platform/uwp/SCsub @@ -1,22 +1,22 @@ #!/usr/bin/env python -Import('env') +Import("env") files = [ - 'thread_uwp.cpp', - '#platform/windows/key_mapping_windows.cpp', - '#platform/windows/windows_terminal_logger.cpp', - 'joypad_uwp.cpp', - 'power_uwp.cpp', - 'context_egl_uwp.cpp', - 'app.cpp', - 'os_uwp.cpp', + "thread_uwp.cpp", + "#platform/windows/key_mapping_windows.cpp", + "#platform/windows/windows_terminal_logger.cpp", + "joypad_uwp.cpp", + "power_uwp.cpp", + "context_egl_uwp.cpp", + "app.cpp", + "os_uwp.cpp", ] if "build_angle" in env and env["build_angle"]: - cmd = env.AlwaysBuild(env.ANGLE('libANGLE.lib', None)) + cmd = env.AlwaysBuild(env.ANGLE("libANGLE.lib", None)) -prog = env.add_program('#bin/godot', files) +prog = env.add_program("#bin/godot", files) if "build_angle" in env and env["build_angle"]: env.Depends(prog, [cmd]) diff --git a/platform/uwp/detect.py b/platform/uwp/detect.py index 000bd18e7d..669bfe6814 100644 --- a/platform/uwp/detect.py +++ b/platform/uwp/detect.py @@ -12,11 +12,11 @@ def get_name(): def can_build(): - if (os.name == "nt"): + if os.name == "nt": # building natively on windows! - if (os.getenv("VSINSTALLDIR")): + if os.getenv("VSINSTALLDIR"): - if (os.getenv("ANGLE_SRC_PATH") is None): + if os.getenv("ANGLE_SRC_PATH") is None: return False return True @@ -25,16 +25,16 @@ def can_build(): def get_opts(): return [ - ('msvc_version', 'MSVC version to use (ignored if the VCINSTALLDIR environment variable is set)', None), + ("msvc_version", "MSVC version to use (ignored if the VCINSTALLDIR environment variable is set)", None), ] def get_flags(): return [ - ('tools', False), - ('xaudio2', True), - ('builtin_pcre2_with_jit', False), + ("tools", False), + ("xaudio2", True), + ("builtin_pcre2_with_jit", False), ] @@ -42,45 +42,53 @@ def configure(env): env.msvc = True - if (env["bits"] != "default"): + if env["bits"] != "default": print("Error: bits argument is disabled for MSVC") - print(""" + print( + """ Bits argument is not supported for MSVC compilation. Architecture depends on the Native/Cross Compile Tools Prompt/Developer Console (or Visual Studio settings) that is being used to run SCons. As a consequence, bits argument is disabled. Run scons again without bits argument (example: scons p=uwp) and SCons will attempt to detect what MSVC compiler will be executed and inform you. - """) + """ + ) sys.exit() ## Build type - if (env["target"] == "release"): - env.Append(CCFLAGS=['/O2', '/GL']) - env.Append(CCFLAGS=['/MD']) - env.Append(LINKFLAGS=['/SUBSYSTEM:WINDOWS', '/LTCG']) + if env["target"] == "release": + env.Append(CCFLAGS=["/O2", "/GL"]) + env.Append(CCFLAGS=["/MD"]) + env.Append(LINKFLAGS=["/SUBSYSTEM:WINDOWS", "/LTCG"]) - elif (env["target"] == "release_debug"): - env.Append(CCFLAGS=['/O2', '/Zi']) - env.Append(CCFLAGS=['/MD']) - env.Append(CPPDEFINES=['DEBUG_ENABLED']) - env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE']) + elif env["target"] == "release_debug": + env.Append(CCFLAGS=["/O2", "/Zi"]) + env.Append(CCFLAGS=["/MD"]) + env.Append(CPPDEFINES=["DEBUG_ENABLED"]) + env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"]) - elif (env["target"] == "debug"): - env.Append(CCFLAGS=['/Zi']) - env.Append(CCFLAGS=['/MDd']) - env.Append(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) - env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE']) - env.Append(LINKFLAGS=['/DEBUG']) + elif env["target"] == "debug": + env.Append(CCFLAGS=["/Zi"]) + env.Append(CCFLAGS=["/MDd"]) + env.Append(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) + env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"]) + env.Append(LINKFLAGS=["/DEBUG"]) ## Compiler configuration - env['ENV'] = os.environ - vc_base_path = os.environ['VCTOOLSINSTALLDIR'] if "VCTOOLSINSTALLDIR" in os.environ else os.environ['VCINSTALLDIR'] + env["ENV"] = os.environ + vc_base_path = os.environ["VCTOOLSINSTALLDIR"] if "VCTOOLSINSTALLDIR" in os.environ else os.environ["VCINSTALLDIR"] # ANGLE angle_root = os.getenv("ANGLE_SRC_PATH") - env.Prepend(CPPPATH=[angle_root + '/include']) + env.Prepend(CPPPATH=[angle_root + "/include"]) jobs = str(env.GetOption("num_jobs")) - angle_build_cmd = "msbuild.exe " + angle_root + "/winrt/10/src/angle.sln /nologo /v:m /m:" + jobs + " /p:Configuration=Release /p:Platform=" + angle_build_cmd = ( + "msbuild.exe " + + angle_root + + "/winrt/10/src/angle.sln /nologo /v:m /m:" + + jobs + + " /p:Configuration=Release /p:Platform=" + ) if os.path.isfile(str(os.getenv("ANGLE_SRC_PATH")) + "/winrt/10/src/angle.sln"): env["build_angle"] = True @@ -88,49 +96,51 @@ def configure(env): ## Architecture arch = "" - if str(os.getenv('Platform')).lower() == "arm": + if str(os.getenv("Platform")).lower() == "arm": print("Compiled program architecture will be an ARM executable. (forcing bits=32).") arch = "arm" env["bits"] = "32" - env.Append(LINKFLAGS=['/MACHINE:ARM']) - env.Append(LIBPATH=[vc_base_path + 'lib/store/arm']) + env.Append(LINKFLAGS=["/MACHINE:ARM"]) + env.Append(LIBPATH=[vc_base_path + "lib/store/arm"]) angle_build_cmd += "ARM" - env.Append(LIBPATH=[angle_root + '/winrt/10/src/Release_ARM/lib']) + env.Append(LIBPATH=[angle_root + "/winrt/10/src/Release_ARM/lib"]) else: - compiler_version_str = methods.detect_visual_c_compiler_version(env['ENV']) + compiler_version_str = methods.detect_visual_c_compiler_version(env["ENV"]) - if(compiler_version_str == "amd64" or compiler_version_str == "x86_amd64"): + if compiler_version_str == "amd64" or compiler_version_str == "x86_amd64": env["bits"] = "64" print("Compiled program architecture will be a x64 executable (forcing bits=64).") - elif (compiler_version_str == "x86" or compiler_version_str == "amd64_x86"): + elif compiler_version_str == "x86" or compiler_version_str == "amd64_x86": env["bits"] = "32" print("Compiled program architecture will be a x86 executable. (forcing bits=32).") else: - print("Failed to detect MSVC compiler architecture version... Defaulting to 32-bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup.") + print( + "Failed to detect MSVC compiler architecture version... Defaulting to 32-bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup." + ) env["bits"] = "32" - if (env["bits"] == "32"): + if env["bits"] == "32": arch = "x86" angle_build_cmd += "Win32" - env.Append(LINKFLAGS=['/MACHINE:X86']) - env.Append(LIBPATH=[vc_base_path + 'lib/store']) - env.Append(LIBPATH=[angle_root + '/winrt/10/src/Release_Win32/lib']) + env.Append(LINKFLAGS=["/MACHINE:X86"]) + env.Append(LIBPATH=[vc_base_path + "lib/store"]) + env.Append(LIBPATH=[angle_root + "/winrt/10/src/Release_Win32/lib"]) else: arch = "x64" angle_build_cmd += "x64" - env.Append(LINKFLAGS=['/MACHINE:X64']) - env.Append(LIBPATH=[os.environ['VCINSTALLDIR'] + 'lib/store/amd64']) - env.Append(LIBPATH=[angle_root + '/winrt/10/src/Release_x64/lib']) + env.Append(LINKFLAGS=["/MACHINE:X64"]) + env.Append(LIBPATH=[os.environ["VCINSTALLDIR"] + "lib/store/amd64"]) + env.Append(LIBPATH=[angle_root + "/winrt/10/src/Release_x64/lib"]) env["PROGSUFFIX"] = "." + arch + env["PROGSUFFIX"] env["OBJSUFFIX"] = "." + arch + env["OBJSUFFIX"] @@ -138,39 +148,61 @@ def configure(env): ## Compile flags - env.Prepend(CPPPATH=['#platform/uwp', '#drivers/windows']) - env.Append(CPPDEFINES=['UWP_ENABLED', 'WINDOWS_ENABLED', 'TYPED_METHOD_BIND']) - env.Append(CPPDEFINES=['GLES_ENABLED', 'GL_GLEXT_PROTOTYPES', 'EGL_EGLEXT_PROTOTYPES', 'ANGLE_ENABLED']) - winver = "0x0602" # Windows 8 is the minimum target for UWP build - env.Append(CPPDEFINES=[('WINVER', winver), ('_WIN32_WINNT', winver), 'WIN32']) + env.Prepend(CPPPATH=["#platform/uwp", "#drivers/windows"]) + env.Append(CPPDEFINES=["UWP_ENABLED", "WINDOWS_ENABLED", "TYPED_METHOD_BIND"]) + env.Append(CPPDEFINES=["GLES_ENABLED", "GL_GLEXT_PROTOTYPES", "EGL_EGLEXT_PROTOTYPES", "ANGLE_ENABLED"]) + winver = "0x0602" # Windows 8 is the minimum target for UWP build + env.Append(CPPDEFINES=[("WINVER", winver), ("_WIN32_WINNT", winver), "WIN32"]) - env.Append(CPPDEFINES=['__WRL_NO_DEFAULT_LIB__', ('PNG_ABORT', 'abort')]) + env.Append(CPPDEFINES=["__WRL_NO_DEFAULT_LIB__", ("PNG_ABORT", "abort")]) - env.Append(CPPFLAGS=['/AI', vc_base_path + 'lib/store/references']) - env.Append(CPPFLAGS=['/AI', vc_base_path + 'lib/x86/store/references']) + env.Append(CPPFLAGS=["/AI", vc_base_path + "lib/store/references"]) + env.Append(CPPFLAGS=["/AI", vc_base_path + "lib/x86/store/references"]) - env.Append(CCFLAGS='/FS /MP /GS /wd"4453" /wd"28204" /wd"4291" /Zc:wchar_t /Gm- /fp:precise /errorReport:prompt /WX- /Zc:forScope /Gd /EHsc /nologo'.split()) - env.Append(CPPDEFINES=['_UNICODE', 'UNICODE', ('WINAPI_FAMILY', 'WINAPI_FAMILY_APP')]) - env.Append(CXXFLAGS=['/ZW']) - env.Append(CCFLAGS=['/AI', vc_base_path + '\\vcpackages', '/AI', os.environ['WINDOWSSDKDIR'] + '\\References\\CommonConfiguration\\Neutral']) + env.Append( + CCFLAGS='/FS /MP /GS /wd"4453" /wd"28204" /wd"4291" /Zc:wchar_t /Gm- /fp:precise /errorReport:prompt /WX- /Zc:forScope /Gd /EHsc /nologo'.split() + ) + env.Append(CPPDEFINES=["_UNICODE", "UNICODE", ("WINAPI_FAMILY", "WINAPI_FAMILY_APP")]) + env.Append(CXXFLAGS=["/ZW"]) + env.Append( + CCFLAGS=[ + "/AI", + vc_base_path + "\\vcpackages", + "/AI", + os.environ["WINDOWSSDKDIR"] + "\\References\\CommonConfiguration\\Neutral", + ] + ) ## Link flags - env.Append(LINKFLAGS=['/MANIFEST:NO', '/NXCOMPAT', '/DYNAMICBASE', '/WINMD', '/APPCONTAINER', '/ERRORREPORT:PROMPT', '/NOLOGO', '/TLBID:1', '/NODEFAULTLIB:"kernel32.lib"', '/NODEFAULTLIB:"ole32.lib"']) + env.Append( + LINKFLAGS=[ + "/MANIFEST:NO", + "/NXCOMPAT", + "/DYNAMICBASE", + "/WINMD", + "/APPCONTAINER", + "/ERRORREPORT:PROMPT", + "/NOLOGO", + "/TLBID:1", + '/NODEFAULTLIB:"kernel32.lib"', + '/NODEFAULTLIB:"ole32.lib"', + ] + ) LIBS = [ - 'WindowsApp', - 'mincore', - 'ws2_32', - 'libANGLE', - 'libEGL', - 'libGLESv2', - 'bcrypt', + "WindowsApp", + "mincore", + "ws2_32", + "libANGLE", + "libEGL", + "libGLESv2", + "bcrypt", ] env.Append(LINKFLAGS=[p + ".lib" for p in LIBS]) # Incremental linking fix - env['BUILDERS']['ProgramOriginal'] = env['BUILDERS']['Program'] - env['BUILDERS']['Program'] = methods.precious_program + env["BUILDERS"]["ProgramOriginal"] = env["BUILDERS"]["Program"] + env["BUILDERS"]["Program"] = methods.precious_program - env.Append(BUILDERS={'ANGLE': env.Builder(action=angle_build_cmd)}) + env.Append(BUILDERS={"ANGLE": env.Builder(action=angle_build_cmd)}) diff --git a/platform/windows/SCsub b/platform/windows/SCsub index 892d734734..93e2dfaa4c 100644 --- a/platform/windows/SCsub +++ b/platform/windows/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") import os from platform_methods import run_in_subprocess @@ -14,17 +14,17 @@ common_win = [ "key_mapping_windows.cpp", "joypad_windows.cpp", "power_windows.cpp", - "windows_terminal_logger.cpp" + "windows_terminal_logger.cpp", ] -res_file = 'godot_res.rc' +res_file = "godot_res.rc" res_target = "godot_res" + env["OBJSUFFIX"] res_obj = env.RES(res_target, res_file) -prog = env.add_program('#bin/godot', common_win + res_obj, PROGSUFFIX=env["PROGSUFFIX"]) +prog = env.add_program("#bin/godot", common_win + res_obj, PROGSUFFIX=env["PROGSUFFIX"]) # Microsoft Visual Studio Project Generation -if env['vsproj']: +if env["vsproj"]: env.vs_srcs = env.vs_srcs + ["platform/windows/" + res_file] env.vs_srcs = env.vs_srcs + ["platform/windows/godot.natvis"] for x in common_win: diff --git a/platform/windows/detect.py b/platform/windows/detect.py index 782fbcb754..a10533222b 100644 --- a/platform/windows/detect.py +++ b/platform/windows/detect.py @@ -14,10 +14,10 @@ def get_name(): def can_build(): - if (os.name == "nt"): + if os.name == "nt": # Building natively on Windows # If VCINSTALLDIR is set in the OS environ, use traditional Godot logic to set up MSVC - if (os.getenv("VCINSTALLDIR")): # MSVC, manual setup + if os.getenv("VCINSTALLDIR"): # MSVC, manual setup return True # Otherwise, let SCons find MSVC if installed, or else Mingw. @@ -26,18 +26,18 @@ def can_build(): # null compiler. return True - if (os.name == "posix"): + if os.name == "posix": # Cross-compiling with MinGW-w64 (old MinGW32 is not supported) mingw32 = "i686-w64-mingw32-" mingw64 = "x86_64-w64-mingw32-" - if (os.getenv("MINGW32_PREFIX")): + if os.getenv("MINGW32_PREFIX"): mingw32 = os.getenv("MINGW32_PREFIX") - if (os.getenv("MINGW64_PREFIX")): + if os.getenv("MINGW64_PREFIX"): mingw64 = os.getenv("MINGW64_PREFIX") test = "gcc --version > /dev/null 2>&1" - if (os.system(mingw64 + test) == 0 or os.system(mingw32 + test) == 0): + if os.system(mingw64 + test) == 0 or os.system(mingw32 + test) == 0: return True return False @@ -48,47 +48,47 @@ def get_opts(): mingw32 = "" mingw64 = "" - if (os.name == "posix"): + if os.name == "posix": mingw32 = "i686-w64-mingw32-" mingw64 = "x86_64-w64-mingw32-" - if (os.getenv("MINGW32_PREFIX")): + if os.getenv("MINGW32_PREFIX"): mingw32 = os.getenv("MINGW32_PREFIX") - if (os.getenv("MINGW64_PREFIX")): + if os.getenv("MINGW64_PREFIX"): mingw64 = os.getenv("MINGW64_PREFIX") return [ - ('mingw_prefix_32', 'MinGW prefix (Win32)', mingw32), - ('mingw_prefix_64', 'MinGW prefix (Win64)', mingw64), + ("mingw_prefix_32", "MinGW prefix (Win32)", mingw32), + ("mingw_prefix_64", "MinGW prefix (Win64)", mingw64), # Targeted Windows version: 7 (and later), minimum supported version # XP support dropped after EOL due to missing API for IPv6 and other issues # Vista support dropped after EOL due to GH-10243 - ('target_win_version', 'Targeted Windows version, >= 0x0601 (Windows 7)', '0x0601'), - EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')), - BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False), - ('msvc_version', 'MSVC version to use. Ignored if VCINSTALLDIR is set in shell env.', None), - BoolVariable('use_mingw', 'Use the Mingw compiler, even if MSVC is installed. Only used on Windows.', False), - BoolVariable('use_llvm', 'Use the LLVM compiler', False), - BoolVariable('use_thinlto', 'Use ThinLTO', False), + ("target_win_version", "Targeted Windows version, >= 0x0601 (Windows 7)", "0x0601"), + EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")), + BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False), + ("msvc_version", "MSVC version to use. Ignored if VCINSTALLDIR is set in shell env.", None), + BoolVariable("use_mingw", "Use the Mingw compiler, even if MSVC is installed. Only used on Windows.", False), + BoolVariable("use_llvm", "Use the LLVM compiler", False), + BoolVariable("use_thinlto", "Use ThinLTO", False), ] def get_flags(): - return [ - ] + return [] def build_res_file(target, source, env): - if (env["bits"] == "32"): - cmdbase = env['mingw_prefix_32'] + if env["bits"] == "32": + cmdbase = env["mingw_prefix_32"] else: - cmdbase = env['mingw_prefix_64'] - cmdbase = cmdbase + 'windres --include-dir . ' + cmdbase = env["mingw_prefix_64"] + cmdbase = cmdbase + "windres --include-dir . " import subprocess + for x in range(len(source)): - cmd = cmdbase + '-i ' + str(source[x]) + ' -o ' + str(target[x]) + cmd = cmdbase + "-i " + str(source[x]) + " -o " + str(target[x]) try: out = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE).communicate() if len(out[1]): @@ -100,12 +100,14 @@ def build_res_file(target, source, env): def setup_msvc_manual(env): """Set up env to use MSVC manually, using VCINSTALLDIR""" - if (env["bits"] != "default"): - print(""" + if env["bits"] != "default": + print( + """ Bits argument is not supported for MSVC compilation. Architecture depends on the Native/Cross Compile Tools Prompt/Developer Console (or Visual Studio settings) that is being used to run SCons. As a consequence, bits argument is disabled. Run scons again without bits argument (example: scons p=windows) and SCons will attempt to detect what MSVC compiler will be executed and inform you. - """) + """ + ) raise SCons.Errors.UserError("Bits argument should not be used when using VCINSTALLDIR") # Force bits arg @@ -114,18 +116,21 @@ def setup_msvc_manual(env): env["x86_libtheora_opt_vc"] = True # find compiler manually - compiler_version_str = methods.detect_visual_c_compiler_version(env['ENV']) + compiler_version_str = methods.detect_visual_c_compiler_version(env["ENV"]) print("Found MSVC compiler: " + compiler_version_str) # If building for 64bit architecture, disable assembly optimisations for 32 bit builds (theora as of writing)... vc compiler for 64bit can not compile _asm - if(compiler_version_str == "amd64" or compiler_version_str == "x86_amd64"): + if compiler_version_str == "amd64" or compiler_version_str == "x86_amd64": env["bits"] = "64" env["x86_libtheora_opt_vc"] = False print("Compiled program architecture will be a 64 bit executable (forcing bits=64).") - elif (compiler_version_str == "x86" or compiler_version_str == "amd64_x86"): + elif compiler_version_str == "x86" or compiler_version_str == "amd64_x86": print("Compiled program architecture will be a 32 bit executable. (forcing bits=32).") else: - print("Failed to manually detect MSVC compiler architecture version... Defaulting to 32bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup, or avoid setting VCINSTALLDIR.") + print( + "Failed to manually detect MSVC compiler architecture version... Defaulting to 32bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup, or avoid setting VCINSTALLDIR." + ) + def setup_msvc_auto(env): """Set up MSVC using SCons's auto-detection logic""" @@ -138,94 +143,120 @@ def setup_msvc_auto(env): # (Ideally we'd decide on the tool config before configuring any # environment, and just set the env up once, but this function runs # on an existing env so this is the simplest way.) - env['MSVC_SETUP_RUN'] = False # Need to set this to re-run the tool - env['MSVS_VERSION'] = None - env['MSVC_VERSION'] = None - env['TARGET_ARCH'] = None - if env['bits'] != 'default': - env['TARGET_ARCH'] = {'32': 'x86', '64': 'x86_64'}[env['bits']] - if env.has_key('msvc_version'): - env['MSVC_VERSION'] = env['msvc_version'] - env.Tool('msvc') - env.Tool('mssdk') # we want the MS SDK + env["MSVC_SETUP_RUN"] = False # Need to set this to re-run the tool + env["MSVS_VERSION"] = None + env["MSVC_VERSION"] = None + env["TARGET_ARCH"] = None + if env["bits"] != "default": + env["TARGET_ARCH"] = {"32": "x86", "64": "x86_64"}[env["bits"]] + if env.has_key("msvc_version"): + env["MSVC_VERSION"] = env["msvc_version"] + env.Tool("msvc") + env.Tool("mssdk") # we want the MS SDK # Note: actual compiler version can be found in env['MSVC_VERSION'], e.g. "14.1" for VS2015 # Get actual target arch into bits (it may be "default" at this point): - if env['TARGET_ARCH'] in ('amd64', 'x86_64'): - env['bits'] = '64' + if env["TARGET_ARCH"] in ("amd64", "x86_64"): + env["bits"] = "64" else: - env['bits'] = '32' - print("Found MSVC version %s, arch %s, bits=%s" % (env['MSVC_VERSION'], env['TARGET_ARCH'], env['bits'])) - if env['TARGET_ARCH'] in ('amd64', 'x86_64'): + env["bits"] = "32" + print("Found MSVC version %s, arch %s, bits=%s" % (env["MSVC_VERSION"], env["TARGET_ARCH"], env["bits"])) + if env["TARGET_ARCH"] in ("amd64", "x86_64"): env["x86_libtheora_opt_vc"] = False + def setup_mingw(env): """Set up env for use with mingw""" # Nothing to do here print("Using MinGW") pass + def configure_msvc(env, manual_msvc_config): """Configure env to work with MSVC""" # Build type - if (env["target"] == "release"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Append(CCFLAGS=['/O2']) - else: # optimize for size - env.Append(CCFLAGS=['/O1']) - env.Append(LINKFLAGS=['/SUBSYSTEM:WINDOWS']) - env.Append(LINKFLAGS=['/ENTRY:mainCRTStartup']) - env.Append(LINKFLAGS=['/OPT:REF']) + if env["target"] == "release": + if env["optimize"] == "speed": # optimize for speed (default) + env.Append(CCFLAGS=["/O2"]) + else: # optimize for size + env.Append(CCFLAGS=["/O1"]) + env.Append(LINKFLAGS=["/SUBSYSTEM:WINDOWS"]) + env.Append(LINKFLAGS=["/ENTRY:mainCRTStartup"]) + env.Append(LINKFLAGS=["/OPT:REF"]) - elif (env["target"] == "release_debug"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Append(CCFLAGS=['/O2']) - else: # optimize for size - env.Append(CCFLAGS=['/O1']) - env.AppendUnique(CPPDEFINES = ['DEBUG_ENABLED']) - env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE']) - env.Append(LINKFLAGS=['/OPT:REF']) + elif env["target"] == "release_debug": + if env["optimize"] == "speed": # optimize for speed (default) + env.Append(CCFLAGS=["/O2"]) + else: # optimize for size + env.Append(CCFLAGS=["/O1"]) + env.AppendUnique(CPPDEFINES=["DEBUG_ENABLED"]) + env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"]) + env.Append(LINKFLAGS=["/OPT:REF"]) - elif (env["target"] == "debug"): - env.AppendUnique(CCFLAGS=['/Z7', '/Od', '/EHsc']) - env.AppendUnique(CPPDEFINES = ['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED', - 'D3D_DEBUG_INFO']) - env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE']) - env.Append(LINKFLAGS=['/DEBUG']) + elif env["target"] == "debug": + env.AppendUnique(CCFLAGS=["/Z7", "/Od", "/EHsc"]) + env.AppendUnique(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED", "D3D_DEBUG_INFO"]) + env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"]) + env.Append(LINKFLAGS=["/DEBUG"]) - if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes"): - env.AppendUnique(CCFLAGS=['/Z7']) - env.AppendUnique(LINKFLAGS=['/DEBUG']) + if env["debug_symbols"] == "full" or env["debug_symbols"] == "yes": + env.AppendUnique(CCFLAGS=["/Z7"]) + env.AppendUnique(LINKFLAGS=["/DEBUG"]) ## Compile/link flags - env.AppendUnique(CCFLAGS=['/MT', '/Gd', '/GR', '/nologo']) - if int(env['MSVC_VERSION'].split('.')[0]) >= 14: #vs2015 and later - env.AppendUnique(CCFLAGS=['/utf-8']) - env.AppendUnique(CXXFLAGS=['/TP']) # assume all sources are C++ - if manual_msvc_config: # should be automatic if SCons found it + env.AppendUnique(CCFLAGS=["/MT", "/Gd", "/GR", "/nologo"]) + if int(env["MSVC_VERSION"].split(".")[0]) >= 14: # vs2015 and later + env.AppendUnique(CCFLAGS=["/utf-8"]) + env.AppendUnique(CXXFLAGS=["/TP"]) # assume all sources are C++ + if manual_msvc_config: # should be automatic if SCons found it if os.getenv("WindowsSdkDir") is not None: env.Prepend(CPPPATH=[os.getenv("WindowsSdkDir") + "/Include"]) else: print("Missing environment variable: WindowsSdkDir") - env.AppendUnique(CPPDEFINES = ['WINDOWS_ENABLED', 'OPENGL_ENABLED', - 'WASAPI_ENABLED', 'WINMIDI_ENABLED', - 'TYPED_METHOD_BIND', - 'WIN32', 'MSVC', - 'WINVER=%s' % env["target_win_version"], - '_WIN32_WINNT=%s' % env["target_win_version"]]) - env.AppendUnique(CPPDEFINES=['NOMINMAX']) # disable bogus min/max WinDef.h macros + env.AppendUnique( + CPPDEFINES=[ + "WINDOWS_ENABLED", + "OPENGL_ENABLED", + "WASAPI_ENABLED", + "WINMIDI_ENABLED", + "TYPED_METHOD_BIND", + "WIN32", + "MSVC", + "WINVER=%s" % env["target_win_version"], + "_WIN32_WINNT=%s" % env["target_win_version"], + ] + ) + env.AppendUnique(CPPDEFINES=["NOMINMAX"]) # disable bogus min/max WinDef.h macros if env["bits"] == "64": - env.AppendUnique(CPPDEFINES=['_WIN64']) + env.AppendUnique(CPPDEFINES=["_WIN64"]) ## Libs - LIBS = ['winmm', 'opengl32', 'dsound', 'kernel32', 'ole32', 'oleaut32', - 'user32', 'gdi32', 'IPHLPAPI', 'Shlwapi', 'wsock32', 'Ws2_32', - 'shell32', 'advapi32', 'dinput8', 'dxguid', 'imm32', 'bcrypt','Avrt', - 'dwmapi'] + LIBS = [ + "winmm", + "opengl32", + "dsound", + "kernel32", + "ole32", + "oleaut32", + "user32", + "gdi32", + "IPHLPAPI", + "Shlwapi", + "wsock32", + "Ws2_32", + "shell32", + "advapi32", + "dinput8", + "dxguid", + "imm32", + "bcrypt", + "Avrt", + "dwmapi", + ] env.Append(LINKFLAGS=[p + env["LIBSUFFIX"] for p in LIBS]) if manual_msvc_config: @@ -236,23 +267,24 @@ def configure_msvc(env, manual_msvc_config): ## LTO - if (env["use_lto"]): - env.AppendUnique(CCFLAGS=['/GL']) - env.AppendUnique(ARFLAGS=['/LTCG']) + if env["use_lto"]: + env.AppendUnique(CCFLAGS=["/GL"]) + env.AppendUnique(ARFLAGS=["/LTCG"]) if env["progress"]: - env.AppendUnique(LINKFLAGS=['/LTCG:STATUS']) + env.AppendUnique(LINKFLAGS=["/LTCG:STATUS"]) else: - env.AppendUnique(LINKFLAGS=['/LTCG']) + env.AppendUnique(LINKFLAGS=["/LTCG"]) if manual_msvc_config: env.Prepend(CPPPATH=[p for p in os.getenv("INCLUDE").split(";")]) env.Append(LIBPATH=[p for p in os.getenv("LIB").split(";")]) # Incremental linking fix - env['BUILDERS']['ProgramOriginal'] = env['BUILDERS']['Program'] - env['BUILDERS']['Program'] = methods.precious_program + env["BUILDERS"]["ProgramOriginal"] = env["BUILDERS"]["Program"] + env["BUILDERS"]["Program"] = methods.precious_program + + env.AppendUnique(LINKFLAGS=["/STACK:" + str(STACK_SIZE)]) - env.AppendUnique(LINKFLAGS=['/STACK:' + str(STACK_SIZE)]) def configure_mingw(env): # Workaround for MinGW. See: @@ -261,119 +293,143 @@ def configure_mingw(env): ## Build type - if (env["target"] == "release"): - env.Append(CCFLAGS=['-msse2']) + if env["target"] == "release": + env.Append(CCFLAGS=["-msse2"]) - if (env["optimize"] == "speed"): #optimize for speed (default) - if (env["bits"] == "64"): - env.Append(CCFLAGS=['-O3']) + if env["optimize"] == "speed": # optimize for speed (default) + if env["bits"] == "64": + env.Append(CCFLAGS=["-O3"]) else: - env.Append(CCFLAGS=['-O2']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) + env.Append(CCFLAGS=["-O2"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) + env.Append(LINKFLAGS=["-Wl,--subsystem,windows"]) - env.Append(LINKFLAGS=['-Wl,--subsystem,windows']) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + elif env["target"] == "release_debug": + env.Append(CCFLAGS=["-O2"]) + env.Append(CPPDEFINES=["DEBUG_ENABLED"]) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) + if env["optimize"] == "speed": # optimize for speed (default) + env.Append(CCFLAGS=["-O2"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) - elif (env["target"] == "release_debug"): - env.Append(CCFLAGS=['-O2']) - env.Append(CPPDEFINES=['DEBUG_ENABLED']) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Append(CCFLAGS=['-O2']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) - - elif (env["target"] == "debug"): - env.Append(CCFLAGS=['-g3']) - env.Append(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) + elif env["target"] == "debug": + env.Append(CCFLAGS=["-g3"]) + env.Append(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) ## Compiler configuration - if (os.name == "nt"): + if os.name == "nt": # Force splitting libmodules.a in multiple chunks to work around # issues reaching the linker command line size limit, which also # seem to induce huge slowdown for 'ar' (GH-30892). - env['split_libmodules'] = True + env["split_libmodules"] = True else: env["PROGSUFFIX"] = env["PROGSUFFIX"] + ".exe" # for linux cross-compilation - if (env["bits"] == "default"): - if (os.name == "nt"): + if env["bits"] == "default": + if os.name == "nt": env["bits"] = "64" if "PROGRAMFILES(X86)" in os.environ else "32" - else: # default to 64-bit on Linux + else: # default to 64-bit on Linux env["bits"] = "64" mingw_prefix = "" - if (env["bits"] == "32"): - env.Append(LINKFLAGS=['-static']) - env.Append(LINKFLAGS=['-static-libgcc']) - env.Append(LINKFLAGS=['-static-libstdc++']) + if env["bits"] == "32": + env.Append(LINKFLAGS=["-static"]) + env.Append(LINKFLAGS=["-static-libgcc"]) + env.Append(LINKFLAGS=["-static-libstdc++"]) mingw_prefix = env["mingw_prefix_32"] else: - env.Append(LINKFLAGS=['-static']) + env.Append(LINKFLAGS=["-static"]) mingw_prefix = env["mingw_prefix_64"] - if env['use_llvm']: + if env["use_llvm"]: env["CC"] = mingw_prefix + "clang" - env['AS'] = mingw_prefix + "as" + env["AS"] = mingw_prefix + "as" env["CXX"] = mingw_prefix + "clang++" - env['AR'] = mingw_prefix + "ar" - env['RANLIB'] = mingw_prefix + "ranlib" + env["AR"] = mingw_prefix + "ar" + env["RANLIB"] = mingw_prefix + "ranlib" env["LINK"] = mingw_prefix + "clang++" else: env["CC"] = mingw_prefix + "gcc" - env['AS'] = mingw_prefix + "as" - env['CXX'] = mingw_prefix + "g++" - env['AR'] = mingw_prefix + "gcc-ar" - env['RANLIB'] = mingw_prefix + "gcc-ranlib" - env['LINK'] = mingw_prefix + "g++" + env["AS"] = mingw_prefix + "as" + env["CXX"] = mingw_prefix + "g++" + env["AR"] = mingw_prefix + "gcc-ar" + env["RANLIB"] = mingw_prefix + "gcc-ranlib" + env["LINK"] = mingw_prefix + "g++" env["x86_libtheora_opt_gcc"] = True - if env['use_lto']: - if not env['use_llvm'] and env.GetOption("num_jobs") > 1: - env.Append(CCFLAGS=['-flto']) - env.Append(LINKFLAGS=['-flto=' + str(env.GetOption("num_jobs"))]) + if env["use_lto"]: + if not env["use_llvm"] and env.GetOption("num_jobs") > 1: + env.Append(CCFLAGS=["-flto"]) + env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))]) else: - if env['use_thinlto']: - env.Append(CCFLAGS=['-flto=thin']) - env.Append(LINKFLAGS=['-flto=thin']) + if env["use_thinlto"]: + env.Append(CCFLAGS=["-flto=thin"]) + env.Append(LINKFLAGS=["-flto=thin"]) else: - env.Append(CCFLAGS=['-flto']) - env.Append(LINKFLAGS=['-flto']) + env.Append(CCFLAGS=["-flto"]) + env.Append(LINKFLAGS=["-flto"]) - env.Append(LINKFLAGS=['-Wl,--stack,' + str(STACK_SIZE)]) + env.Append(LINKFLAGS=["-Wl,--stack," + str(STACK_SIZE)]) ## Compile flags - env.Append(CCFLAGS=['-mwindows']) - env.Append(CPPDEFINES=['WINDOWS_ENABLED', 'OPENGL_ENABLED', 'WASAPI_ENABLED', 'WINMIDI_ENABLED']) - env.Append(CPPDEFINES=[('WINVER', env['target_win_version']), ('_WIN32_WINNT', env['target_win_version'])]) - env.Append(LIBS=['mingw32', 'opengl32', 'dsound', 'ole32', 'd3d9', 'winmm', 'gdi32', 'iphlpapi', 'shlwapi', 'wsock32', 'ws2_32', 'kernel32', 'oleaut32', 'dinput8', 'dxguid', 'ksuser', 'imm32', 'bcrypt', 'avrt', 'uuid', 'dwmapi']) + env.Append(CCFLAGS=["-mwindows"]) + env.Append(CPPDEFINES=["WINDOWS_ENABLED", "OPENGL_ENABLED", "WASAPI_ENABLED", "WINMIDI_ENABLED"]) + env.Append(CPPDEFINES=[("WINVER", env["target_win_version"]), ("_WIN32_WINNT", env["target_win_version"])]) + env.Append( + LIBS=[ + "mingw32", + "opengl32", + "dsound", + "ole32", + "d3d9", + "winmm", + "gdi32", + "iphlpapi", + "shlwapi", + "wsock32", + "ws2_32", + "kernel32", + "oleaut32", + "dinput8", + "dxguid", + "ksuser", + "imm32", + "bcrypt", + "avrt", + "uuid", + "dwmapi", + ] + ) - env.Append(CPPDEFINES=['MINGW_ENABLED', ('MINGW_HAS_SECURE_API', 1)]) + env.Append(CPPDEFINES=["MINGW_ENABLED", ("MINGW_HAS_SECURE_API", 1)]) # resrc - env.Append(BUILDERS={'RES': env.Builder(action=build_res_file, suffix='.o', src_suffix='.rc')}) + env.Append(BUILDERS={"RES": env.Builder(action=build_res_file, suffix=".o", src_suffix=".rc")}) + def configure(env): # At this point the env has been set up with basic tools/compilers. - env.Prepend(CPPPATH=['#platform/windows']) + env.Prepend(CPPPATH=["#platform/windows"]) - print("Configuring for Windows: target=%s, bits=%s" % (env['target'], env['bits'])) + print("Configuring for Windows: target=%s, bits=%s" % (env["target"], env["bits"])) - if (os.name == "nt"): - env['ENV'] = os.environ # this makes build less repeatable, but simplifies some things - env['ENV']['TMP'] = os.environ['TMP'] + if os.name == "nt": + env["ENV"] = os.environ # this makes build less repeatable, but simplifies some things + env["ENV"]["TMP"] = os.environ["TMP"] # First figure out which compiler, version, and target arch we're using if os.getenv("VCINSTALLDIR") and not env["use_mingw"]: @@ -381,7 +437,7 @@ def configure(env): setup_msvc_manual(env) env.msvc = True manual_msvc_config = True - elif env.get('MSVC_VERSION', '') and not env["use_mingw"]: + elif env.get("MSVC_VERSION", "") and not env["use_mingw"]: setup_msvc_auto(env) env.msvc = True manual_msvc_config = False @@ -393,5 +449,5 @@ def configure(env): if env.msvc: configure_msvc(env, manual_msvc_config) - else: # MinGW + else: # MinGW configure_mingw(env) diff --git a/platform/windows/platform_windows_builders.py b/platform/windows/platform_windows_builders.py index a1ad3b8b50..22e33b51b4 100644 --- a/platform/windows/platform_windows_builders.py +++ b/platform/windows/platform_windows_builders.py @@ -9,14 +9,14 @@ from platform_methods import subprocess_main def make_debug_mingw(target, source, env): mingw_prefix = "" - if (env["bits"] == "32"): + if env["bits"] == "32": mingw_prefix = env["mingw_prefix_32"] else: mingw_prefix = env["mingw_prefix_64"] - os.system(mingw_prefix + 'objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0])) - os.system(mingw_prefix + 'strip --strip-debug --strip-unneeded {0}'.format(target[0])) - os.system(mingw_prefix + 'objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0])) + os.system(mingw_prefix + "objcopy --only-keep-debug {0} {0}.debugsymbols".format(target[0])) + os.system(mingw_prefix + "strip --strip-debug --strip-unneeded {0}".format(target[0])) + os.system(mingw_prefix + "objcopy --add-gnu-debuglink={0}.debugsymbols {0}".format(target[0])) -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/platform/x11/SCsub b/platform/x11/SCsub index 3d5aa15208..8337d75846 100644 --- a/platform/x11/SCsub +++ b/platform/x11/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") from platform_methods import run_in_subprocess import platform_x11_builders @@ -12,10 +12,10 @@ common_x11 = [ "key_mapping_x11.cpp", "joypad_linux.cpp", "power_x11.cpp", - "detect_prime.cpp" + "detect_prime.cpp", ] -prog = env.add_program('#bin/godot', ['godot_x11.cpp'] + common_x11) +prog = env.add_program("#bin/godot", ["godot_x11.cpp"] + common_x11) if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]: env.AddPostAction(prog, run_in_subprocess(platform_x11_builders.make_debug_x11)) diff --git a/platform/x11/detect.py b/platform/x11/detect.py index 457425b359..5674e78350 100644 --- a/platform/x11/detect.py +++ b/platform/x11/detect.py @@ -14,63 +14,64 @@ def get_name(): def can_build(): - if (os.name != "posix" or sys.platform == "darwin"): + if os.name != "posix" or sys.platform == "darwin": return False # Check the minimal dependencies x11_error = os.system("pkg-config --version > /dev/null") - if (x11_error): + if x11_error: return False x11_error = os.system("pkg-config x11 --modversion > /dev/null ") - if (x11_error): + if x11_error: return False x11_error = os.system("pkg-config xcursor --modversion > /dev/null ") - if (x11_error): + if x11_error: print("xcursor not found.. x11 disabled.") return False x11_error = os.system("pkg-config xinerama --modversion > /dev/null ") - if (x11_error): + if x11_error: print("xinerama not found.. x11 disabled.") return False x11_error = os.system("pkg-config xrandr --modversion > /dev/null ") - if (x11_error): + if x11_error: print("xrandr not found.. x11 disabled.") return False x11_error = os.system("pkg-config xrender --modversion > /dev/null ") - if (x11_error): + if x11_error: print("xrender not found.. x11 disabled.") return False x11_error = os.system("pkg-config xi --modversion > /dev/null ") - if (x11_error): + if x11_error: print("xi not found.. Aborting.") return False return True + def get_opts(): from SCons.Variables import BoolVariable, EnumVariable return [ - BoolVariable('use_llvm', 'Use the LLVM compiler', False), - BoolVariable('use_lld', 'Use the LLD linker', False), - BoolVariable('use_thinlto', 'Use ThinLTO', False), - BoolVariable('use_static_cpp', 'Link libgcc and libstdc++ statically for better portability', False), - BoolVariable('use_ubsan', 'Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)', False), - BoolVariable('use_asan', 'Use LLVM/GCC compiler address sanitizer (ASAN))', False), - BoolVariable('use_lsan', 'Use LLVM/GCC compiler leak sanitizer (LSAN))', False), - BoolVariable('use_tsan', 'Use LLVM/GCC compiler thread sanitizer (TSAN))', False), - BoolVariable('pulseaudio', 'Detect and use PulseAudio', True), - BoolVariable('udev', 'Use udev for gamepad connection callbacks', False), - EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')), - BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False), - BoolVariable('touch', 'Enable touch events', True), - BoolVariable('execinfo', 'Use libexecinfo on systems where glibc is not available', False), + BoolVariable("use_llvm", "Use the LLVM compiler", False), + BoolVariable("use_lld", "Use the LLD linker", False), + BoolVariable("use_thinlto", "Use ThinLTO", False), + BoolVariable("use_static_cpp", "Link libgcc and libstdc++ statically for better portability", False), + BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False), + BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False), + BoolVariable("use_lsan", "Use LLVM/GCC compiler leak sanitizer (LSAN))", False), + BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False), + BoolVariable("pulseaudio", "Detect and use PulseAudio", True), + BoolVariable("udev", "Use udev for gamepad connection callbacks", False), + EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")), + BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False), + BoolVariable("touch", "Enable touch events", True), + BoolVariable("execinfo", "Use libexecinfo on systems where glibc is not available", False), ] @@ -83,278 +84,286 @@ def configure(env): ## Build type - if (env["target"] == "release"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Prepend(CCFLAGS=['-O3']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) + if env["target"] == "release": + if env["optimize"] == "speed": # optimize for speed (default) + env.Prepend(CCFLAGS=["-O3"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "release_debug"): - if (env["optimize"] == "speed"): #optimize for speed (default) - env.Prepend(CCFLAGS=['-O2']) - else: #optimize for size - env.Prepend(CCFLAGS=['-Os']) - env.Prepend(CPPDEFINES=['DEBUG_ENABLED']) + elif env["target"] == "release_debug": + if env["optimize"] == "speed": # optimize for speed (default) + env.Prepend(CCFLAGS=["-O2"]) + else: # optimize for size + env.Prepend(CCFLAGS=["-Os"]) + env.Prepend(CPPDEFINES=["DEBUG_ENABLED"]) - if (env["debug_symbols"] == "yes"): - env.Prepend(CCFLAGS=['-g1']) - if (env["debug_symbols"] == "full"): - env.Prepend(CCFLAGS=['-g2']) + if env["debug_symbols"] == "yes": + env.Prepend(CCFLAGS=["-g1"]) + if env["debug_symbols"] == "full": + env.Prepend(CCFLAGS=["-g2"]) - elif (env["target"] == "debug"): - env.Prepend(CCFLAGS=['-g3']) - env.Prepend(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED']) - env.Append(LINKFLAGS=['-rdynamic']) + elif env["target"] == "debug": + env.Prepend(CCFLAGS=["-g3"]) + env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) + env.Append(LINKFLAGS=["-rdynamic"]) ## Architecture - is64 = sys.maxsize > 2**32 - if (env["bits"] == "default"): + is64 = sys.maxsize > 2 ** 32 + if env["bits"] == "default": env["bits"] = "64" if is64 else "32" ## Compiler configuration - if 'CXX' in env and 'clang' in os.path.basename(env['CXX']): + if "CXX" in env and "clang" in os.path.basename(env["CXX"]): # Convenience check to enforce the use_llvm overrides when CXX is clang(++) - env['use_llvm'] = True + env["use_llvm"] = True - if env['use_llvm']: - if ('clang++' not in os.path.basename(env['CXX'])): + if env["use_llvm"]: + if "clang++" not in os.path.basename(env["CXX"]): env["CC"] = "clang" env["CXX"] = "clang++" env["LINK"] = "clang++" - env.Append(CPPDEFINES=['TYPED_METHOD_BIND']) + env.Append(CPPDEFINES=["TYPED_METHOD_BIND"]) env.extra_suffix = ".llvm" + env.extra_suffix - if env['use_lld']: - if env['use_llvm']: - env.Append(LINKFLAGS=['-fuse-ld=lld']) - if env['use_thinlto']: + if env["use_lld"]: + if env["use_llvm"]: + env.Append(LINKFLAGS=["-fuse-ld=lld"]) + if env["use_thinlto"]: # A convenience so you don't need to write use_lto too when using SCons - env['use_lto'] = True + env["use_lto"] = True else: print("Using LLD with GCC is not supported yet, try compiling with 'use_llvm=yes'.") sys.exit(255) - if env['use_ubsan'] or env['use_asan'] or env['use_lsan'] or env['use_tsan']: + if env["use_ubsan"] or env["use_asan"] or env["use_lsan"] or env["use_tsan"]: env.extra_suffix += "s" - if env['use_ubsan']: - env.Append(CCFLAGS=['-fsanitize=undefined']) - env.Append(LINKFLAGS=['-fsanitize=undefined']) + if env["use_ubsan"]: + env.Append(CCFLAGS=["-fsanitize=undefined"]) + env.Append(LINKFLAGS=["-fsanitize=undefined"]) - if env['use_asan']: - env.Append(CCFLAGS=['-fsanitize=address']) - env.Append(LINKFLAGS=['-fsanitize=address']) + if env["use_asan"]: + env.Append(CCFLAGS=["-fsanitize=address"]) + env.Append(LINKFLAGS=["-fsanitize=address"]) - if env['use_lsan']: - env.Append(CCFLAGS=['-fsanitize=leak']) - env.Append(LINKFLAGS=['-fsanitize=leak']) + if env["use_lsan"]: + env.Append(CCFLAGS=["-fsanitize=leak"]) + env.Append(LINKFLAGS=["-fsanitize=leak"]) - if env['use_tsan']: - env.Append(CCFLAGS=['-fsanitize=thread']) - env.Append(LINKFLAGS=['-fsanitize=thread']) + if env["use_tsan"]: + env.Append(CCFLAGS=["-fsanitize=thread"]) + env.Append(LINKFLAGS=["-fsanitize=thread"]) - if env['use_lto']: - if not env['use_llvm'] and env.GetOption("num_jobs") > 1: - env.Append(CCFLAGS=['-flto']) - env.Append(LINKFLAGS=['-flto=' + str(env.GetOption("num_jobs"))]) + if env["use_lto"]: + if not env["use_llvm"] and env.GetOption("num_jobs") > 1: + env.Append(CCFLAGS=["-flto"]) + env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))]) else: - if env['use_lld'] and env['use_thinlto']: - env.Append(CCFLAGS=['-flto=thin']) - env.Append(LINKFLAGS=['-flto=thin']) + if env["use_lld"] and env["use_thinlto"]: + env.Append(CCFLAGS=["-flto=thin"]) + env.Append(LINKFLAGS=["-flto=thin"]) else: - env.Append(CCFLAGS=['-flto']) - env.Append(LINKFLAGS=['-flto']) + env.Append(CCFLAGS=["-flto"]) + env.Append(LINKFLAGS=["-flto"]) - if not env['use_llvm']: - env['RANLIB'] = 'gcc-ranlib' - env['AR'] = 'gcc-ar' + if not env["use_llvm"]: + env["RANLIB"] = "gcc-ranlib" + env["AR"] = "gcc-ar" - env.Append(CCFLAGS=['-pipe']) - env.Append(LINKFLAGS=['-pipe']) + env.Append(CCFLAGS=["-pipe"]) + env.Append(LINKFLAGS=["-pipe"]) # Check for gcc version >= 6 before adding -no-pie version = get_compiler_version(env) or [-1, -1] if using_gcc(env): if version[0] >= 6: - env.Append(CCFLAGS=['-fpie']) - env.Append(LINKFLAGS=['-no-pie']) + env.Append(CCFLAGS=["-fpie"]) + env.Append(LINKFLAGS=["-no-pie"]) # Do the same for clang should be fine with Clang 4 and higher if using_clang(env): if version[0] >= 4: - env.Append(CCFLAGS=['-fpie']) - env.Append(LINKFLAGS=['-no-pie']) + env.Append(CCFLAGS=["-fpie"]) + env.Append(LINKFLAGS=["-no-pie"]) ## Dependencies - env.ParseConfig('pkg-config x11 --cflags --libs') - env.ParseConfig('pkg-config xcursor --cflags --libs') - env.ParseConfig('pkg-config xinerama --cflags --libs') - env.ParseConfig('pkg-config xrandr --cflags --libs') - env.ParseConfig('pkg-config xrender --cflags --libs') - env.ParseConfig('pkg-config xi --cflags --libs') + env.ParseConfig("pkg-config x11 --cflags --libs") + env.ParseConfig("pkg-config xcursor --cflags --libs") + env.ParseConfig("pkg-config xinerama --cflags --libs") + env.ParseConfig("pkg-config xrandr --cflags --libs") + env.ParseConfig("pkg-config xrender --cflags --libs") + env.ParseConfig("pkg-config xi --cflags --libs") - if (env['touch']): - env.Append(CPPDEFINES=['TOUCH_ENABLED']) + if env["touch"]: + env.Append(CPPDEFINES=["TOUCH_ENABLED"]) # FIXME: Check for existence of the libs before parsing their flags with pkg-config # freetype depends on libpng and zlib, so bundling one of them while keeping others # as shared libraries leads to weird issues - if env['builtin_freetype'] or env['builtin_libpng'] or env['builtin_zlib']: - env['builtin_freetype'] = True - env['builtin_libpng'] = True - env['builtin_zlib'] = True + if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]: + env["builtin_freetype"] = True + env["builtin_libpng"] = True + env["builtin_zlib"] = True - if not env['builtin_freetype']: - env.ParseConfig('pkg-config freetype2 --cflags --libs') + if not env["builtin_freetype"]: + env.ParseConfig("pkg-config freetype2 --cflags --libs") - if not env['builtin_libpng']: - env.ParseConfig('pkg-config libpng16 --cflags --libs') + if not env["builtin_libpng"]: + env.ParseConfig("pkg-config libpng16 --cflags --libs") - if not env['builtin_bullet']: + if not env["builtin_bullet"]: # We need at least version 2.89 import subprocess - bullet_version = subprocess.check_output(['pkg-config', 'bullet', '--modversion']).strip() + + bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip() if str(bullet_version) < "2.89": # Abort as system bullet was requested but too old - print("Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(bullet_version, "2.89")) + print( + "Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format( + bullet_version, "2.89" + ) + ) sys.exit(255) - env.ParseConfig('pkg-config bullet --cflags --libs') + env.ParseConfig("pkg-config bullet --cflags --libs") if False: # not env['builtin_assimp']: # FIXME: Add min version check - env.ParseConfig('pkg-config assimp --cflags --libs') + env.ParseConfig("pkg-config assimp --cflags --libs") - if not env['builtin_enet']: - env.ParseConfig('pkg-config libenet --cflags --libs') + if not env["builtin_enet"]: + env.ParseConfig("pkg-config libenet --cflags --libs") - if not env['builtin_squish']: - env.ParseConfig('pkg-config libsquish --cflags --libs') + if not env["builtin_squish"]: + env.ParseConfig("pkg-config libsquish --cflags --libs") - if not env['builtin_zstd']: - env.ParseConfig('pkg-config libzstd --cflags --libs') + if not env["builtin_zstd"]: + env.ParseConfig("pkg-config libzstd --cflags --libs") # Sound and video libraries # Keep the order as it triggers chained dependencies (ogg needed by others, etc.) - if not env['builtin_libtheora']: - env['builtin_libogg'] = False # Needed to link against system libtheora - env['builtin_libvorbis'] = False # Needed to link against system libtheora - env.ParseConfig('pkg-config theora theoradec --cflags --libs') + if not env["builtin_libtheora"]: + env["builtin_libogg"] = False # Needed to link against system libtheora + env["builtin_libvorbis"] = False # Needed to link against system libtheora + env.ParseConfig("pkg-config theora theoradec --cflags --libs") else: - list_of_x86 = ['x86_64', 'x86', 'i386', 'i586'] + list_of_x86 = ["x86_64", "x86", "i386", "i586"] if any(platform.machine() in s for s in list_of_x86): env["x86_libtheora_opt_gcc"] = True - if not env['builtin_libvpx']: - env.ParseConfig('pkg-config vpx --cflags --libs') + if not env["builtin_libvpx"]: + env.ParseConfig("pkg-config vpx --cflags --libs") - if not env['builtin_libvorbis']: - env['builtin_libogg'] = False # Needed to link against system libvorbis - env.ParseConfig('pkg-config vorbis vorbisfile --cflags --libs') + if not env["builtin_libvorbis"]: + env["builtin_libogg"] = False # Needed to link against system libvorbis + env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs") - if not env['builtin_opus']: - env['builtin_libogg'] = False # Needed to link against system opus - env.ParseConfig('pkg-config opus opusfile --cflags --libs') + if not env["builtin_opus"]: + env["builtin_libogg"] = False # Needed to link against system opus + env.ParseConfig("pkg-config opus opusfile --cflags --libs") - if not env['builtin_libogg']: - env.ParseConfig('pkg-config ogg --cflags --libs') + if not env["builtin_libogg"]: + env.ParseConfig("pkg-config ogg --cflags --libs") - if not env['builtin_libwebp']: - env.ParseConfig('pkg-config libwebp --cflags --libs') + if not env["builtin_libwebp"]: + env.ParseConfig("pkg-config libwebp --cflags --libs") - if not env['builtin_mbedtls']: + if not env["builtin_mbedtls"]: # mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228 - env.Append(LIBS=['mbedtls', 'mbedcrypto', 'mbedx509']) + env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"]) - if not env['builtin_wslay']: - env.ParseConfig('pkg-config libwslay --cflags --libs') + if not env["builtin_wslay"]: + env.ParseConfig("pkg-config libwslay --cflags --libs") - if not env['builtin_miniupnpc']: + if not env["builtin_miniupnpc"]: # No pkgconfig file so far, hardcode default paths. env.Prepend(CPPPATH=["/usr/include/miniupnpc"]) env.Append(LIBS=["miniupnpc"]) # On Linux wchar_t should be 32-bits # 16-bit library shouldn't be required due to compiler optimisations - if not env['builtin_pcre2']: - env.ParseConfig('pkg-config libpcre2-32 --cflags --libs') + if not env["builtin_pcre2"]: + env.ParseConfig("pkg-config libpcre2-32 --cflags --libs") ## Flags - if (os.system("pkg-config --exists alsa") == 0): # 0 means found + if os.system("pkg-config --exists alsa") == 0: # 0 means found print("Enabling ALSA") env.Append(CPPDEFINES=["ALSA_ENABLED", "ALSAMIDI_ENABLED"]) - # Don't parse --cflags, we don't need to add /usr/include/alsa to include path - env.ParseConfig('pkg-config alsa --libs') + # Don't parse --cflags, we don't need to add /usr/include/alsa to include path + env.ParseConfig("pkg-config alsa --libs") else: print("ALSA libraries not found, disabling driver") - if env['pulseaudio']: - if (os.system("pkg-config --exists libpulse") == 0): # 0 means found + if env["pulseaudio"]: + if os.system("pkg-config --exists libpulse") == 0: # 0 means found print("Enabling PulseAudio") env.Append(CPPDEFINES=["PULSEAUDIO_ENABLED"]) - env.ParseConfig('pkg-config --cflags --libs libpulse') + env.ParseConfig("pkg-config --cflags --libs libpulse") else: print("PulseAudio development libraries not found, disabling driver") - if (platform.system() == "Linux"): + if platform.system() == "Linux": env.Append(CPPDEFINES=["JOYDEV_ENABLED"]) - if env['udev']: - if (os.system("pkg-config --exists libudev") == 0): # 0 means found + if env["udev"]: + if os.system("pkg-config --exists libudev") == 0: # 0 means found print("Enabling udev support") env.Append(CPPDEFINES=["UDEV_ENABLED"]) - env.ParseConfig('pkg-config libudev --cflags --libs') + env.ParseConfig("pkg-config libudev --cflags --libs") else: print("libudev development libraries not found, disabling udev support") # Linkflags below this line should typically stay the last ones - if not env['builtin_zlib']: - env.ParseConfig('pkg-config zlib --cflags --libs') + if not env["builtin_zlib"]: + env.ParseConfig("pkg-config zlib --cflags --libs") - env.Prepend(CPPPATH=['#platform/x11']) - env.Append(CPPDEFINES=['X11_ENABLED', 'UNIX_ENABLED', 'OPENGL_ENABLED', 'GLES_ENABLED']) - env.Append(LIBS=['GL', 'pthread']) + env.Prepend(CPPPATH=["#platform/x11"]) + env.Append(CPPDEFINES=["X11_ENABLED", "UNIX_ENABLED", "OPENGL_ENABLED", "GLES_ENABLED"]) + env.Append(LIBS=["GL", "pthread"]) - if (platform.system() == "Linux"): - env.Append(LIBS=['dl']) + if platform.system() == "Linux": + env.Append(LIBS=["dl"]) - if (platform.system().find("BSD") >= 0): + if platform.system().find("BSD") >= 0: env["execinfo"] = True if env["execinfo"]: - env.Append(LIBS=['execinfo']) + env.Append(LIBS=["execinfo"]) - if not env['tools']: + if not env["tools"]: import subprocess import re - linker_version_str = subprocess.check_output([env.subst(env["LINK"]), '-Wl,--version']).decode("utf-8") - gnu_ld_version = re.search('^GNU ld [^$]*(\d+\.\d+)$', linker_version_str, re.MULTILINE) + + linker_version_str = subprocess.check_output([env.subst(env["LINK"]), "-Wl,--version"]).decode("utf-8") + gnu_ld_version = re.search("^GNU ld [^$]*(\d+\.\d+)$", linker_version_str, re.MULTILINE) if not gnu_ld_version: - print("Warning: Creating template binaries enabled for PCK embedding is currently only supported with GNU ld") + print( + "Warning: Creating template binaries enabled for PCK embedding is currently only supported with GNU ld" + ) else: if float(gnu_ld_version.group(1)) >= 2.30: - env.Append(LINKFLAGS=['-T', 'platform/x11/pck_embed.ld']) + env.Append(LINKFLAGS=["-T", "platform/x11/pck_embed.ld"]) else: - env.Append(LINKFLAGS=['-T', 'platform/x11/pck_embed.legacy.ld']) + env.Append(LINKFLAGS=["-T", "platform/x11/pck_embed.legacy.ld"]) ## Cross-compilation - if (is64 and env["bits"] == "32"): - env.Append(CCFLAGS=['-m32']) - env.Append(LINKFLAGS=['-m32', '-L/usr/lib/i386-linux-gnu']) - elif (not is64 and env["bits"] == "64"): - env.Append(CCFLAGS=['-m64']) - env.Append(LINKFLAGS=['-m64', '-L/usr/lib/i686-linux-gnu']) + if is64 and env["bits"] == "32": + env.Append(CCFLAGS=["-m32"]) + env.Append(LINKFLAGS=["-m32", "-L/usr/lib/i386-linux-gnu"]) + elif not is64 and env["bits"] == "64": + env.Append(CCFLAGS=["-m64"]) + env.Append(LINKFLAGS=["-m64", "-L/usr/lib/i686-linux-gnu"]) # Link those statically for portability - if env['use_static_cpp']: - env.Append(LINKFLAGS=['-static-libgcc', '-static-libstdc++']) + if env["use_static_cpp"]: + env.Append(LINKFLAGS=["-static-libgcc", "-static-libstdc++"]) diff --git a/platform/x11/platform_x11_builders.py b/platform/x11/platform_x11_builders.py index 5ff0c6fb14..5884f8e161 100644 --- a/platform/x11/platform_x11_builders.py +++ b/platform/x11/platform_x11_builders.py @@ -8,10 +8,10 @@ from platform_methods import subprocess_main def make_debug_x11(target, source, env): - os.system('objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0])) - os.system('strip --strip-debug --strip-unneeded {0}'.format(target[0])) - os.system('objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0])) + os.system("objcopy --only-keep-debug {0} {0}.debugsymbols".format(target[0])) + os.system("strip --strip-debug --strip-unneeded {0}".format(target[0])) + os.system("objcopy --add-gnu-debuglink={0}.debugsymbols {0}".format(target[0])) -if __name__ == '__main__': +if __name__ == "__main__": subprocess_main(globals()) diff --git a/platform_methods.py b/platform_methods.py index 4300216427..e72927bc43 100644 --- a/platform_methods.py +++ b/platform_methods.py @@ -14,7 +14,6 @@ else: def run_in_subprocess(builder_function): - @functools.wraps(builder_function) def wrapper(target, source, env): @@ -23,38 +22,36 @@ def run_in_subprocess(builder_function): source = [node.srcnode().abspath for node in source] # Short circuit on non-Windows platforms, no need to run in subprocess - if sys.platform not in ('win32', 'cygwin'): + if sys.platform not in ("win32", "cygwin"): return builder_function(target, source, env) # Identify module module_name = builder_function.__module__ function_name = builder_function.__name__ module_path = sys.modules[module_name].__file__ - if module_path.endswith('.pyc') or module_path.endswith('.pyo'): + if module_path.endswith(".pyc") or module_path.endswith(".pyo"): module_path = module_path[:-1] # Subprocess environment subprocess_env = os.environ.copy() - subprocess_env['PYTHONPATH'] = os.pathsep.join([os.getcwd()] + sys.path) + subprocess_env["PYTHONPATH"] = os.pathsep.join([os.getcwd()] + sys.path) # Keep only JSON serializable environment items - filtered_env = dict( - (key, value) - for key, value in env.items() - if isinstance(value, JSON_SERIALIZABLE_TYPES) - ) + filtered_env = dict((key, value) for key, value in env.items() if isinstance(value, JSON_SERIALIZABLE_TYPES)) # Save parameters args = (target, source, filtered_env) data = dict(fn=function_name, args=args) - json_path = os.path.join(os.environ['TMP'], uuid.uuid4().hex + '.json') - with open(json_path, 'wt') as json_file: + json_path = os.path.join(os.environ["TMP"], uuid.uuid4().hex + ".json") + with open(json_path, "wt") as json_file: json.dump(data, json_file, indent=2) json_file_size = os.stat(json_path).st_size - print('Executing builder function in subprocess: ' - 'module_path=%r, parameter_file=%r, parameter_file_size=%r, target=%r, source=%r' % ( - module_path, json_path, json_file_size, target, source)) + print( + "Executing builder function in subprocess: " + "module_path=%r, parameter_file=%r, parameter_file_size=%r, target=%r, source=%r" + % (module_path, json_path, json_file_size, target, source) + ) try: exit_code = subprocess.call([sys.executable, module_path, json_path], env=subprocess_env) finally: @@ -62,13 +59,15 @@ def run_in_subprocess(builder_function): os.remove(json_path) except (OSError, IOError) as e: # Do not fail the entire build if it cannot delete a temporary file - print('WARNING: Could not delete temporary file: path=%r; [%s] %s' % - (json_path, e.__class__.__name__, e)) + print( + "WARNING: Could not delete temporary file: path=%r; [%s] %s" % (json_path, e.__class__.__name__, e) + ) # Must succeed if exit_code: raise RuntimeError( - 'Failed to run builder function in subprocess: module_path=%r; data=%r' % (module_path, data)) + "Failed to run builder function in subprocess: module_path=%r; data=%r" % (module_path, data) + ) return wrapper @@ -78,5 +77,5 @@ def subprocess_main(namespace): with open(sys.argv[1]) as json_file: data = json.load(json_file) - fn = namespace[data['fn']] - fn(*data['args']) + fn = namespace[data["fn"]] + fn(*data["args"]) diff --git a/scene/2d/SCsub b/scene/2d/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/2d/SCsub +++ b/scene/2d/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/3d/SCsub b/scene/3d/SCsub index 31a443bad1..a382b0c8e3 100644 --- a/scene/3d/SCsub +++ b/scene/3d/SCsub @@ -1,8 +1,8 @@ #!/usr/bin/env python -Import('env') +Import("env") -if env['disable_3d']: +if env["disable_3d"]: env.add_source_files(env.scene_sources, "spatial.cpp") env.add_source_files(env.scene_sources, "skeleton.cpp") env.add_source_files(env.scene_sources, "particles.cpp") diff --git a/scene/SCsub b/scene/SCsub index 1c5b87b87a..f9fc00f3f2 100644 --- a/scene/SCsub +++ b/scene/SCsub @@ -1,16 +1,16 @@ #!/usr/bin/env python -Import('env') +Import("env") env.scene_sources = [] # Thirdparty code thirdparty_dir = "#thirdparty/misc/" thirdparty_sources = [ - # C++ sources - "easing_equations.cpp", - # C sources - "mikktspace.c", + # C++ sources + "easing_equations.cpp", + # C sources + "mikktspace.c", ] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] @@ -23,14 +23,14 @@ env.add_source_files(env.scene_sources, "*.cpp") # Chain load SCsubs -SConscript('main/SCsub') -SConscript('gui/SCsub') -SConscript('3d/SCsub') -SConscript('2d/SCsub') -SConscript('animation/SCsub') -SConscript('audio/SCsub') -SConscript('resources/SCsub') -SConscript('debugger/SCsub') +SConscript("main/SCsub") +SConscript("gui/SCsub") +SConscript("3d/SCsub") +SConscript("2d/SCsub") +SConscript("animation/SCsub") +SConscript("audio/SCsub") +SConscript("resources/SCsub") +SConscript("debugger/SCsub") # Build it all as a library diff --git a/scene/animation/SCsub b/scene/animation/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/animation/SCsub +++ b/scene/animation/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/audio/SCsub b/scene/audio/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/audio/SCsub +++ b/scene/audio/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/debugger/SCsub b/scene/debugger/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/debugger/SCsub +++ b/scene/debugger/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/gui/SCsub b/scene/gui/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/gui/SCsub +++ b/scene/gui/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/main/SCsub b/scene/main/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/main/SCsub +++ b/scene/main/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/resources/SCsub b/scene/resources/SCsub index 5e5b6f8fd5..3a86b22835 100644 --- a/scene/resources/SCsub +++ b/scene/resources/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/resources/default_theme/SCsub b/scene/resources/default_theme/SCsub index b01e2fd54d..fc61250247 100644 --- a/scene/resources/default_theme/SCsub +++ b/scene/resources/default_theme/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.scene_sources, "*.cpp") diff --git a/scene/resources/default_theme/make_header.py b/scene/resources/default_theme/make_header.py index cf0ccf1c3a..efad3b2815 100755 --- a/scene/resources/default_theme/make_header.py +++ b/scene/resources/default_theme/make_header.py @@ -13,7 +13,7 @@ os.chdir(os.path.dirname(os.path.realpath(__file__))) f = open("theme_data.h", "wb") -f.write(b"// THIS FILE HAS BEEN AUTOGENERATED, DON\'T EDIT!!\n") +f.write(b"// THIS FILE HAS BEEN AUTOGENERATED, DON'T EDIT!!\n") # Generate png image block f.write(b"\n// png image block\n") @@ -31,17 +31,17 @@ for x in pixmaps: pngf = open(x, "rb") b = pngf.read(1) - while(len(b) == 1): + while len(b) == 1: f.write(hex(ord(b)).encode(enc)) b = pngf.read(1) - if (len(b) == 1): + if len(b) == 1: f.write(b", ") f.write(b"\n};\n") pngf.close() # Generate shaders block -f.write(b"\n// shaders block\n"); +f.write(b"\n// shaders block\n") shaders = glob.glob("*.gsl") shaders.sort() @@ -56,15 +56,15 @@ for x in shaders: sf = open(x, "rb") b = sf.readline() - while(b != ""): - if (b.endswith("\r\n")): + while b != "": + if b.endswith("\r\n"): b = b[:-2] - if (b.endswith("\n")): + if b.endswith("\n"): b = b[:-1] - s = ' \"' + b + s = ' "' + b f.write(s.encode(enc)) b = sf.readline() - if (b != ""): + if b != "": f.write(b'"\n') f.write(b'";\n') diff --git a/servers/SCsub b/servers/SCsub index 34ba70b8cb..34bf0a8055 100644 --- a/servers/SCsub +++ b/servers/SCsub @@ -1,16 +1,16 @@ #!/usr/bin/env python -Import('env') +Import("env") env.servers_sources = [] env.add_source_files(env.servers_sources, "*.cpp") -SConscript('arvr/SCsub') -SConscript('camera/SCsub') -SConscript('physics/SCsub') -SConscript('physics_2d/SCsub') -SConscript('visual/SCsub') -SConscript('audio/SCsub') +SConscript("arvr/SCsub") +SConscript("camera/SCsub") +SConscript("physics/SCsub") +SConscript("physics_2d/SCsub") +SConscript("visual/SCsub") +SConscript("audio/SCsub") lib = env.add_library("servers", env.servers_sources) diff --git a/servers/arvr/SCsub b/servers/arvr/SCsub index d730144861..86681f9c74 100644 --- a/servers/arvr/SCsub +++ b/servers/arvr/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") diff --git a/servers/audio/SCsub b/servers/audio/SCsub index 3c18c18043..5021e578c3 100644 --- a/servers/audio/SCsub +++ b/servers/audio/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") diff --git a/servers/audio/effects/SCsub b/servers/audio/effects/SCsub index d730144861..86681f9c74 100644 --- a/servers/audio/effects/SCsub +++ b/servers/audio/effects/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") diff --git a/servers/camera/SCsub b/servers/camera/SCsub index ccc76e823f..c949f3bb25 100644 --- a/servers/camera/SCsub +++ b/servers/camera/SCsub @@ -1,7 +1,7 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") -Export('env') +Export("env") diff --git a/servers/physics/SCsub b/servers/physics/SCsub index c5cc889112..df7b521693 100644 --- a/servers/physics/SCsub +++ b/servers/physics/SCsub @@ -1,6 +1,6 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") diff --git a/servers/physics/joints/SCsub b/servers/physics/joints/SCsub index d730144861..86681f9c74 100644 --- a/servers/physics/joints/SCsub +++ b/servers/physics/joints/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") diff --git a/servers/physics_2d/SCsub b/servers/physics_2d/SCsub index d730144861..86681f9c74 100644 --- a/servers/physics_2d/SCsub +++ b/servers/physics_2d/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") diff --git a/servers/visual/SCsub b/servers/visual/SCsub index d730144861..86681f9c74 100644 --- a/servers/visual/SCsub +++ b/servers/visual/SCsub @@ -1,5 +1,5 @@ #!/usr/bin/env python -Import('env') +Import("env") env.add_source_files(env.servers_sources, "*.cpp") From 1ec7a73d1c1c5f00a012a478127f405c9d1b49a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 30 Mar 2020 08:55:21 +0200 Subject: [PATCH 27/34] Hooks: Add pre-commit hook for psf/black formatting (cherry picked from commit 164826a39bca2fb7b7277752cbc1df8833ce0f1a) --- misc/hooks/README.md | 26 +++--- misc/hooks/pre-commit | 2 +- misc/hooks/pre-commit-black | 128 +++++++++++++++++++++++++++++ misc/hooks/pre-commit-clang-format | 6 +- 4 files changed, 146 insertions(+), 16 deletions(-) create mode 100755 misc/hooks/pre-commit-black diff --git a/misc/hooks/README.md b/misc/hooks/README.md index b18ba7df38..dad5300a09 100644 --- a/misc/hooks/README.md +++ b/misc/hooks/README.md @@ -5,16 +5,22 @@ contributors to make sure they comply with our requirements. ## List of hooks -- Pre-commit hook for clang-format: Applies clang-format to the staged files - before accepting a commit; blocks the commit and generates a patch if the - style is not respected. - Should work on Linux and macOS. You may need to edit the file if your - clang-format binary is not in the `$PATH`, or if you want to enable colored - output with pygmentize. -- Pre-commit hook for makerst: Checks the class reference syntax using `makerst.py`. - Should work on Linux and macOS. +- Pre-commit hook for `clang-format`: Applies `clang-format` to the staged + files before accepting a commit; blocks the commit and generates a patch if + the style is not respected. + You may need to edit the file if your `clang-format` binary is not in the + `PATH`, or if you want to enable colored output with `pygmentize`. +- Pre-commit hook for `black`: Applies `black` to the staged Python files + before accepting a commit. +- Pre-commit hook for `makerst`: Checks the class reference syntax using + `makerst.py`. ## Installation -Copy all the files from this folder into your `.git/hooks` folder, and make sure -the hooks and helper scripts are executable. +Copy all the files from this folder into your `.git/hooks` folder, and make +sure the hooks and helper scripts are executable. + +The hooks rely on bash scripts and tools which should be in the system `PATH`, +so they should work out of the box on Linux/macOS, and might work on Windows +when using `git-bash.exe` with `clang-format`, Python, `black`, etc. in the +`PATH`. diff --git a/misc/hooks/pre-commit b/misc/hooks/pre-commit index 36e9935785..40cb00253b 100755 --- a/misc/hooks/pre-commit +++ b/misc/hooks/pre-commit @@ -14,7 +14,7 @@ # as this script. Hooks should return 0 if successful and nonzero to cancel the # commit. They are executed in the order in which they are listed. #HOOKS="pre-commit-compile pre-commit-uncrustify" -HOOKS="pre-commit-clang-format pre-commit-makerst" +HOOKS="pre-commit-clang-format pre-commit-black pre-commit-makerst" ########################################################### # There should be no need to change anything below this line. diff --git a/misc/hooks/pre-commit-black b/misc/hooks/pre-commit-black new file mode 100755 index 0000000000..3dd0a13330 --- /dev/null +++ b/misc/hooks/pre-commit-black @@ -0,0 +1,128 @@ +#!/usr/bin/env bash + +# git pre-commit hook that runs a black stylecheck. +# Based on pre-commit-clang-format. + +################################################################## +# SETTINGS +# Set path to black binary. +BLACK=`which black` +BLACK_OPTIONS="-l 120" + +# Remove any older patches from previous commits. Set to true or false. +DELETE_OLD_PATCHES=false + +# File types to parse. +FILE_NAMES="SConstruct SCsub" +FILE_EXTS="py" + +# Use pygmentize instead of cat to parse diff with highlighting. +# Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac) +# READER="pygmentize -l diff" +READER=cat + +################################################################## +# There should be no need to change anything below this line. + +. "$(dirname -- "$0")/canonicalize_filename.sh" + +# exit on error +set -e + +# check whether the given file matches any of the set extensions +matches_name_or_extension() { + local filename=$(basename "$1") + local extension=".${filename##*.}" + + for name in $FILE_NAMES; do [[ "$name" == "$filename" ]] && return 0; done + for ext in $FILE_EXTS; do [[ "$ext" == "$extension" ]] && return 0; done + + return 1 +} + +# necessary check for initial commit +if git rev-parse --verify HEAD >/dev/null 2>&1 ; then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +if [ ! -x "$BLACK" ] ; then + printf "Error: black executable not found.\n" + printf "Set the correct path in $(canonicalize_filename "$0").\n" + exit 1 +fi + +# create a random filename to store our generated patch +prefix="pre-commit-black" +suffix="$(date +%s)" +patch="/tmp/$prefix-$suffix.patch" + +# clean up any older black patches +$DELETE_OLD_PATCHES && rm -f /tmp/$prefix*.patch + +# create one patch containing all changes to the files +git diff-index --cached --diff-filter=ACMR --name-only $against -- | while read file; +do + # ignore thirdparty files + if grep -q "thirdparty" <<< $file; then + continue; + fi + + # ignore file if not one of the names or extensions we handle + if ! matches_name_or_extension "$file"; then + continue; + fi + + # format our file with black, create a patch with diff and append it to our $patch + # The sed call is necessary to transform the patch from + # --- $file timestamp + # +++ $file timestamp + # to both lines working on the same file and having a/ and b/ prefix. + # Else it can not be applied with 'git apply'. + "$BLACK" "$BLACK_OPTIONS" --diff "$file" | \ + sed -e "1s|--- |--- a/|" -e "2s|+++ |+++ b/|" >> "$patch" +done + +# if no patch has been generated all is ok, clean up the file stub and exit +if [ ! -s "$patch" ] ; then + printf "Files in this commit comply with the black formatter rules.\n" + rm -f "$patch" + exit 0 +fi + +# a patch has been created, notify the user and exit +printf "\nThe following differences were found between the code to commit " +printf "and the black formatter rules:\n\n" +$READER "$patch" +printf "\n" + +# Allows us to read user input below, assigns stdin to keyboard +exec < /dev/tty + +while true; do + read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn + case $yn in + [Yy] ) git apply $patch; + printf "The patch was applied. You can now stage the changes and commit again.\n\n"; + break + ;; + [Nn] ) printf "\nYou can apply these changes with:\n git apply $patch\n"; + printf "(may need to be called from the root directory of your repository)\n"; + printf "Aborting commit. Apply changes and commit again or skip checking with"; + printf " --no-verify (not recommended).\n\n"; + break + ;; + [Ss] ) git apply $patch; + git diff-index --cached --diff-filter=ACMR --name-only $against -- | while read file; + do git add $file; + done + printf "The patch was applied and the changed files staged. You can now commit.\n\n"; + break + ;; + * ) echo "Please answer yes or no." + ;; + esac +done +exit 1 # we don't commit in any case diff --git a/misc/hooks/pre-commit-clang-format b/misc/hooks/pre-commit-clang-format index e309233a8b..f3689890df 100755 --- a/misc/hooks/pre-commit-clang-format +++ b/misc/hooks/pre-commit-clang-format @@ -15,22 +15,18 @@ ################################################################## # SETTINGS -# Set path to clang-format binary -# CLANG_FORMAT="/usr/bin/clang-format" +# Set path to clang-format binary. CLANG_FORMAT=`which clang-format` # Remove any older patches from previous commits. Set to true or false. -# DELETE_OLD_PATCHES=false DELETE_OLD_PATCHES=false # Only parse files with the extensions in FILE_EXTS. Set to true or false. # If false every changed file in the commit will be parsed with clang-format. # If true only files matching one of the extensions are parsed with clang-format. -# PARSE_EXTS=true PARSE_EXTS=true # File types to parse. Only effective when PARSE_EXTS is true. -# FILE_EXTS=".c .h .cpp .hpp" FILE_EXTS=".c .h .cpp .hpp .cc .hh .cxx .m .mm .inc .java .glsl" # Use pygmentize instead of cat to parse diff with highlighting. From 243377ffacc74b234e47cd5928207cd8911d757b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 30 Mar 2020 09:03:38 +0200 Subject: [PATCH 28/34] Hooks: Use pygmentize if available to visualize diff (cherry picked from commit 4d52761da6f15ee0374a4cac958cc7cd12507adc) --- misc/hooks/pre-commit-black | 8 ++++++-- misc/hooks/pre-commit-clang-format | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/misc/hooks/pre-commit-black b/misc/hooks/pre-commit-black index 3dd0a13330..2dcc2e8cf1 100755 --- a/misc/hooks/pre-commit-black +++ b/misc/hooks/pre-commit-black @@ -18,8 +18,12 @@ FILE_EXTS="py" # Use pygmentize instead of cat to parse diff with highlighting. # Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac) -# READER="pygmentize -l diff" -READER=cat +PYGMENTIZE=`which pygmentize` +if [ ! -z "$PYGMENTIZE" ]; then + READER="pygmentize -l diff" +else + READER=cat +fi ################################################################## # There should be no need to change anything below this line. diff --git a/misc/hooks/pre-commit-clang-format b/misc/hooks/pre-commit-clang-format index f3689890df..c5cf4ecbb1 100755 --- a/misc/hooks/pre-commit-clang-format +++ b/misc/hooks/pre-commit-clang-format @@ -31,8 +31,12 @@ FILE_EXTS=".c .h .cpp .hpp .cc .hh .cxx .m .mm .inc .java .glsl" # Use pygmentize instead of cat to parse diff with highlighting. # Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac) -# READER="pygmentize -l diff" -READER=cat +PYGMENTIZE=`which pygmentize` +if [ ! -z "$PYGMENTIZE" ]; then + READER="pygmentize -l diff" +else + READER=cat +fi ################################################################## # There should be no need to change anything below this line. From efdeba33267035524504694ea5f9f123f635960e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= Date: Mon, 30 Mar 2020 09:10:37 +0200 Subject: [PATCH 29/34] Travis: Add static check for Python black formatting Also install and use pygmentize to visualize clang-format and black diffs. (cherry picked from commit 3644036fd3d8678ac44695cc49ae63c4aaeb1b97) --- .travis.yml | 8 ++++--- misc/travis/black-format.sh | 48 +++++++++++++++++++++++++++++++++++++ misc/travis/clang-format.sh | 8 ++++--- 3 files changed, 58 insertions(+), 6 deletions(-) create mode 100755 misc/travis/black-format.sh diff --git a/.travis.yml b/.travis.yml index e957437d55..6acd0ab0d9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -114,9 +114,6 @@ matrix: before_install: - eval "${MATRIX_EVAL}" - - if [ "$STATIC_CHECKS" = "yes" ]; then - unset SCONS_CACHE; - fi install: - if [ "$TRAVIS_OS_NAME" = "linux" ]; then @@ -130,6 +127,10 @@ install: java -version; misc/travis/android-tools-linux.sh; fi + - if [ "$STATIC_CHECKS" = "yes" ]; then + unset SCONS_CACHE; + pip3 install --user black pygments; + fi before_script: - if [ "$PLATFORM" = "android" ]; then @@ -140,6 +141,7 @@ before_script: script: - if [ "$STATIC_CHECKS" = "yes" ]; then sh ./misc/travis/clang-format.sh && + sh ./misc/travis/black-format.sh && doc/tools/makerst.py --dry-run doc/classes modules; else scons -j2 CC=$CC CXX=$CXX platform=$PLATFORM tools=$TOOLS target=$TARGET $OPTIONS $EXTRA_ARGS && diff --git a/misc/travis/black-format.sh b/misc/travis/black-format.sh new file mode 100755 index 0000000000..75b153f6bb --- /dev/null +++ b/misc/travis/black-format.sh @@ -0,0 +1,48 @@ +#!/bin/sh + +BLACK=black +BLACK_OPTIONS="-l 120" + +if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then + # Travis only clones the PR branch and uses its HEAD commit as detached HEAD, + # so it's problematic when we want an exact commit range for format checks. + # We fetch upstream to ensure that we have the proper references to resolve. + # Ideally we would use $TRAVIS_COMMIT_RANGE but it doesn't play well with PR + # updates, as it only includes changes since the previous state of the PR. + if [ -z "$(git remote | grep upstream)" ]; then + git remote add upstream https://github.com/godotengine/godot \ + --no-tags -f -t $TRAVIS_BRANCH + fi + RANGE="upstream/$TRAVIS_BRANCH HEAD" +else + # Test only the last commit, since $TRAVIS_COMMIT_RANGE wouldn't support + # force pushes. + RANGE=HEAD +fi + +FILES=$(git diff-tree --no-commit-id --name-only -r $RANGE | grep -v thirdparty/| grep -E "(SConstruct|SCsub|\.py)$") +echo "Checking files:\n$FILES" + +# create a random filename to store our generated patch +prefix="static-check-black" +suffix="$(date +%s)" +patch="/tmp/$prefix-$suffix.patch" + +for file in $FILES; do + "$BLACK" "$BLACK_OPTIONS" --diff "$file" | \ + sed -e "1s|--- |--- a/|" -e "2s|+++ |+++ b/|" >> "$patch" +done + +# if no patch has been generated all is ok, clean up the file stub and exit +if [ ! -s "$patch" ] ; then + printf "Files in this commit comply with the black formatting rules.\n" + rm -f "$patch" + exit 0 +fi + +# a patch has been created, notify the user and exit +printf "\n*** The following differences were found between the code to commit " +printf "and the black formatting rules:\n\n" +pygmentize -l diff "$patch" +printf "\n*** Aborting, please fix your commit(s) with 'git commit --amend' or 'git rebase -i '\n" +exit 1 diff --git a/misc/travis/clang-format.sh b/misc/travis/clang-format.sh index a6585578e1..c917744ece 100755 --- a/misc/travis/clang-format.sh +++ b/misc/travis/clang-format.sh @@ -8,8 +8,10 @@ if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then # We fetch upstream to ensure that we have the proper references to resolve. # Ideally we would use $TRAVIS_COMMIT_RANGE but it doesn't play well with PR # updates, as it only includes changes since the previous state of the PR. - git remote add upstream https://github.com/godotengine/godot \ - --no-tags -f -t $TRAVIS_BRANCH + if [ -z "$(git remote | grep upstream)" ]; then + git remote add upstream https://github.com/godotengine/godot \ + --no-tags -f -t $TRAVIS_BRANCH + fi RANGE="upstream/$TRAVIS_BRANCH HEAD" else # Test only the last commit, since $TRAVIS_COMMIT_RANGE wouldn't support @@ -41,6 +43,6 @@ fi # a patch has been created, notify the user and exit printf "\n*** The following differences were found between the code to commit " printf "and the clang-format rules:\n\n" -cat "$patch" +pygmentize -l diff "$patch" printf "\n*** Aborting, please fix your commit(s) with 'git commit --amend' or 'git rebase -i '\n" exit 1 From bc21cd8592838385c6cc9dc5a59ec8399cf62b4a Mon Sep 17 00:00:00 2001 From: "Andrii Doroshenko (Xrayez)" Date: Mon, 30 Mar 2020 17:35:13 +0300 Subject: [PATCH 30/34] Moved to methods.py as a `show_progress` method. Some required changes are made: - locally imported SCons-specific packages within the method; - `global` variables converted to `nonlocal` (used in nested functions). (cherry picked from commit d753a7630ae0cd20ce7e769346b0329f28ab8de9) --- SConstruct | 118 +------------------------------------------------- methods.py | 123 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 117 deletions(-) diff --git a/SConstruct b/SConstruct index acb3f4d878..d129fd32a6 100644 --- a/SConstruct +++ b/SConstruct @@ -643,120 +643,4 @@ elif selected_platform != "": # The following only makes sense when the env is defined, and assumes it is if "env" in locals(): - screen = sys.stdout - # Progress reporting is not available in non-TTY environments since it - # messes with the output (for example, when writing to a file) - show_progress = env["progress"] and sys.stdout.isatty() - node_count = 0 - node_count_max = 0 - node_count_interval = 1 - node_count_fname = str(env.Dir("#")) + "/.scons_node_count" - - import time, math - - class cache_progress: - # The default is 1 GB cache and 12 hours half life - def __init__(self, path=None, limit=1073741824, half_life=43200): - self.path = path - self.limit = limit - self.exponent_scale = math.log(2) / half_life - if env["verbose"] and path != None: - screen.write( - "Current cache limit is {} (used: {})\n".format( - self.convert_size(limit), self.convert_size(self.get_size(path)) - ) - ) - self.delete(self.file_list()) - - def __call__(self, node, *args, **kw): - global node_count, node_count_max, node_count_interval, node_count_fname, show_progress - if show_progress: - # Print the progress percentage - node_count += node_count_interval - if node_count_max > 0 and node_count <= node_count_max: - screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max)) - screen.flush() - elif node_count_max > 0 and node_count > node_count_max: - screen.write("\r[100%] ") - screen.flush() - else: - screen.write("\r[Initial build] ") - screen.flush() - - def delete(self, files): - if len(files) == 0: - return - if env["verbose"]: - # Utter something - screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file")) - [os.remove(f) for f in files] - - def file_list(self): - if self.path is None: - # Nothing to do - return [] - # Gather a list of (filename, (size, atime)) within the - # cache directory - file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))] - if file_stat == []: - # Nothing to do - return [] - # Weight the cache files by size (assumed to be roughly - # proportional to the recompilation time) times an exponential - # decay since the ctime, and return a list with the entries - # (filename, size, weight). - current_time = time.time() - file_stat = [(x[0], x[1][0], (current_time - x[1][1])) for x in file_stat] - # Sort by the most recently accessed files (most sensible to keep) first - file_stat.sort(key=lambda x: x[2]) - # Search for the first entry where the storage limit is - # reached - sum, mark = 0, None - for i, x in enumerate(file_stat): - sum += x[1] - if sum > self.limit: - mark = i - break - if mark is None: - return [] - else: - return [x[0] for x in file_stat[mark:]] - - def convert_size(self, size_bytes): - if size_bytes == 0: - return "0 bytes" - size_name = ("bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB") - i = int(math.floor(math.log(size_bytes, 1024))) - p = math.pow(1024, i) - s = round(size_bytes / p, 2) - return "%s %s" % (int(s) if i == 0 else s, size_name[i]) - - def get_size(self, start_path="."): - total_size = 0 - for dirpath, dirnames, filenames in os.walk(start_path): - for f in filenames: - fp = os.path.join(dirpath, f) - total_size += os.path.getsize(fp) - return total_size - - def progress_finish(target, source, env): - global node_count, progressor - with open(node_count_fname, "w") as f: - f.write("%d\n" % node_count) - progressor.delete(progressor.file_list()) - - try: - with open(node_count_fname) as f: - node_count_max = int(f.readline()) - except: - pass - - cache_directory = os.environ.get("SCONS_CACHE") - # Simple cache pruning, attached to SCons' progress callback. Trim the - # cache directory to a size not larger than cache_limit. - cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024 - progressor = cache_progress(cache_directory, cache_limit) - Progress(progressor, interval=node_count_interval) - - progress_finish_command = Command("progress_finish", [], progress_finish) - AlwaysBuild(progress_finish_command) + methods.show_progress(env) diff --git a/methods.py b/methods.py index dea5d5e60b..3f5800a229 100644 --- a/methods.py +++ b/methods.py @@ -714,3 +714,126 @@ def using_gcc(env): def using_clang(env): return "clang" in os.path.basename(env["CC"]) + + +def show_progress(env): + import sys + from SCons.Script import Progress, Command, AlwaysBuild + + screen = sys.stdout + # Progress reporting is not available in non-TTY environments since it + # messes with the output (for example, when writing to a file) + show_progress = env["progress"] and sys.stdout.isatty() + node_count = 0 + node_count_max = 0 + node_count_interval = 1 + node_count_fname = str(env.Dir("#")) + "/.scons_node_count" + + import time, math + + class cache_progress: + # The default is 1 GB cache and 12 hours half life + def __init__(self, path=None, limit=1073741824, half_life=43200): + self.path = path + self.limit = limit + self.exponent_scale = math.log(2) / half_life + if env["verbose"] and path != None: + screen.write( + "Current cache limit is {} (used: {})\n".format( + self.convert_size(limit), self.convert_size(self.get_size(path)) + ) + ) + self.delete(self.file_list()) + + def __call__(self, node, *args, **kw): + nonlocal node_count, node_count_max, node_count_interval, node_count_fname, show_progress + if show_progress: + # Print the progress percentage + node_count += node_count_interval + if node_count_max > 0 and node_count <= node_count_max: + screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max)) + screen.flush() + elif node_count_max > 0 and node_count > node_count_max: + screen.write("\r[100%] ") + screen.flush() + else: + screen.write("\r[Initial build] ") + screen.flush() + + def delete(self, files): + if len(files) == 0: + return + if env["verbose"]: + # Utter something + screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file")) + [os.remove(f) for f in files] + + def file_list(self): + if self.path is None: + # Nothing to do + return [] + # Gather a list of (filename, (size, atime)) within the + # cache directory + file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))] + if file_stat == []: + # Nothing to do + return [] + # Weight the cache files by size (assumed to be roughly + # proportional to the recompilation time) times an exponential + # decay since the ctime, and return a list with the entries + # (filename, size, weight). + current_time = time.time() + file_stat = [(x[0], x[1][0], (current_time - x[1][1])) for x in file_stat] + # Sort by the most recently accessed files (most sensible to keep) first + file_stat.sort(key=lambda x: x[2]) + # Search for the first entry where the storage limit is + # reached + sum, mark = 0, None + for i, x in enumerate(file_stat): + sum += x[1] + if sum > self.limit: + mark = i + break + if mark is None: + return [] + else: + return [x[0] for x in file_stat[mark:]] + + def convert_size(self, size_bytes): + if size_bytes == 0: + return "0 bytes" + size_name = ("bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB") + i = int(math.floor(math.log(size_bytes, 1024))) + p = math.pow(1024, i) + s = round(size_bytes / p, 2) + return "%s %s" % (int(s) if i == 0 else s, size_name[i]) + + def get_size(self, start_path="."): + total_size = 0 + for dirpath, dirnames, filenames in os.walk(start_path): + for f in filenames: + fp = os.path.join(dirpath, f) + total_size += os.path.getsize(fp) + return total_size + + def progress_finish(target, source, env): + nonlocal node_count, progressor + with open(node_count_fname, "w") as f: + f.write("%d\n" % node_count) + progressor.delete(progressor.file_list()) + + try: + with open(node_count_fname) as f: + node_count_max = int(f.readline()) + except: + pass + + cache_directory = os.environ.get("SCONS_CACHE") + # Simple cache pruning, attached to SCons' progress callback. Trim the + # cache directory to a size not larger than cache_limit. + cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024 + progressor = cache_progress(cache_directory, cache_limit) + Progress(progressor, interval=node_count_interval) + + progress_finish_command = Command("progress_finish", [], progress_finish) + AlwaysBuild(progress_finish_command) From 23ef1e0f7032db16bd7075a86da31f66cbf3b85e Mon Sep 17 00:00:00 2001 From: "Andrii Doroshenko (Xrayez)" Date: Fri, 20 Mar 2020 22:49:38 +0200 Subject: [PATCH 31/34] SCons: Dump construction environment to a file A new `methods.dump(env)` is added to dump the construction environment used by SCons to build Godot to a `.scons_env.json`. The file can be used for debugging purposes and any external tool. (cherry picked from commit 42bee75e86e81fcd9b51b2e960d13b89b18ad4bf) --- .gitignore | 3 +++ SConstruct | 5 ++++- methods.py | 11 +++++++++++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 55d14e51bd..2c8288e092 100644 --- a/.gitignore +++ b/.gitignore @@ -339,6 +339,9 @@ platform/windows/godot_res.res # Visual Studio Code workspace file *.code-workspace +# Scons construction environment dump +.scons_env.json + # Scons progress indicator .scons_node_count diff --git a/SConstruct b/SConstruct index d129fd32a6..b3d033dc90 100644 --- a/SConstruct +++ b/SConstruct @@ -641,6 +641,9 @@ elif selected_platform != "": else: sys.exit(255) -# The following only makes sense when the env is defined, and assumes it is +# The following only makes sense when the 'env' is defined, and assumes it is. if "env" in locals(): methods.show_progress(env) + # TODO: replace this with `env.Dump(format="json")` + # once we start requiring SCons 4.0 as min version. + methods.dump(env) diff --git a/methods.py b/methods.py index 3f5800a229..c96bfa9444 100644 --- a/methods.py +++ b/methods.py @@ -837,3 +837,14 @@ def show_progress(env): progress_finish_command = Command("progress_finish", [], progress_finish) AlwaysBuild(progress_finish_command) + + +def dump(env): + # Dumps latest build information for debugging purposes and external tools. + from json import dump + + def non_serializable(obj): + return "<>" % (type(obj).__qualname__) + + with open(".scons_env.json", "w") as f: + dump(env.Dictionary(), f, indent=4, default=non_serializable) From ff11fdd0172413cc67500a782fc27f5f6b401117 Mon Sep 17 00:00:00 2001 From: PouleyKetchoupp Date: Mon, 30 Mar 2020 22:59:06 +0200 Subject: [PATCH 32/34] Pre-commit hook instructions on Windows (cherry picked from commit 7b6e664178ce0ff559a7a5033606a089881f21b4) --- misc/hooks/README.md | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/misc/hooks/README.md b/misc/hooks/README.md index dad5300a09..5661c239ed 100644 --- a/misc/hooks/README.md +++ b/misc/hooks/README.md @@ -20,7 +20,18 @@ contributors to make sure they comply with our requirements. Copy all the files from this folder into your `.git/hooks` folder, and make sure the hooks and helper scripts are executable. +#### Linux/MacOS + The hooks rely on bash scripts and tools which should be in the system `PATH`, -so they should work out of the box on Linux/macOS, and might work on Windows -when using `git-bash.exe` with `clang-format`, Python, `black`, etc. in the -`PATH`. +so they should work out of the box on Linux/macOS. + +#### Windows + +##### clang-format +- Download LLVM for Windows (version 8 or later) from + +- Make sure LLVM is added to the `PATH` during installation + +##### black +- Python installation: make sure Python is added to the `PATH` +- Install `black` - in any console: `pip3 install black` From 320ae61090b06322f6989301c7d4b460a34c3014 Mon Sep 17 00:00:00 2001 From: bruvzg <7645683+bruvzg@users.noreply.github.com> Date: Tue, 14 Apr 2020 16:23:15 +0300 Subject: [PATCH 33/34] Git Hooks: Add support for GUI git clients. [ci skip] (cherry picked from commit fdad0e3bd1e2f8dcfcdeae7c2617eda5ac24324e) --- misc/hooks/pre-commit-black | 80 ++++++++++++++++++++-- misc/hooks/pre-commit-clang-format | 80 ++++++++++++++++++++-- misc/hooks/winmessage.ps1 | 103 +++++++++++++++++++++++++++++ 3 files changed, 253 insertions(+), 10 deletions(-) create mode 100755 misc/hooks/winmessage.ps1 diff --git a/misc/hooks/pre-commit-black b/misc/hooks/pre-commit-black index 2dcc2e8cf1..633b432b89 100755 --- a/misc/hooks/pre-commit-black +++ b/misc/hooks/pre-commit-black @@ -25,6 +25,15 @@ else READER=cat fi +# Path to zenity +ZENITY=`which zenity` + +# Path to xmessage +XMSG=`which xmessage` + +# Path to powershell (Windows only) +PWSH=`which powershell` + ################################################################## # There should be no need to change anything below this line. @@ -53,6 +62,19 @@ else fi if [ ! -x "$BLACK" ] ; then + if [ ! -t 1 ] ; then + if [ -x "$ZENITY" ] ; then + $ZENITY --error --title="Error" --text="Error: black executable not found." + exit 1 + elif [ -x "$XMSG" ] ; then + $XMSG -center -title "Error" "Error: black executable not found." + exit 1 + elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then + winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")" + $PWSH -noprofile -executionpolicy bypass -file "$winmessage" -center -title "Error" --text "Error: black executable not found." + exit 1 + fi + fi printf "Error: black executable not found.\n" printf "Set the correct path in $(canonicalize_filename "$0").\n" exit 1 @@ -99,14 +121,62 @@ fi # a patch has been created, notify the user and exit printf "\nThe following differences were found between the code to commit " printf "and the black formatter rules:\n\n" -$READER "$patch" -printf "\n" -# Allows us to read user input below, assigns stdin to keyboard -exec < /dev/tty +if [ -t 1 ] ; then + $READER "$patch" + printf "\n" + # Allows us to read user input below, assigns stdin to keyboard + exec < /dev/tty + terminal="1" +else + cat "$patch" + printf "\n" + # Allows non zero zenity/powershell output + set +e + terminal="0" +fi while true; do - read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn + if [ $terminal = "0" ] ; then + if [ -x "$ZENITY" ] ; then + ans=$($ZENITY --text-info --filename="$patch" --width=800 --height=600 --title="Do you want to apply that patch?" --ok-label="Apply" --cancel-label="Do not apply" --extra-button="Apply and stage") + if [ "$?" = "0" ] ; then + yn="Y" + else + if [ "$ans" = "Apply and stage" ] ; then + yn="S" + else + yn="N" + fi + fi + elif [ -x "$XMSG" ] ; then + $XMSG -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?" + ans=$? + if [ "$ans" = "100" ] ; then + yn="Y" + elif [ "$ans" = "200" ] ; then + yn="S" + else + yn="N" + fi + elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then + winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")" + $PWSH -noprofile -executionpolicy bypass -file "$winmessage" -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?" + ans=$? + if [ "$ans" = "100" ] ; then + yn="Y" + elif [ "$ans" = "200" ] ; then + yn="S" + else + yn="N" + fi + else + printf "Error: zenity, xmessage, or powershell executable not found.\n" + exit 1 + fi + else + read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn + fi case $yn in [Yy] ) git apply $patch; printf "The patch was applied. You can now stage the changes and commit again.\n\n"; diff --git a/misc/hooks/pre-commit-clang-format b/misc/hooks/pre-commit-clang-format index c5cf4ecbb1..5a48aa1bfa 100755 --- a/misc/hooks/pre-commit-clang-format +++ b/misc/hooks/pre-commit-clang-format @@ -38,6 +38,15 @@ else READER=cat fi +# Path to zenity +ZENITY=`which zenity` + +# Path to xmessage +XMSG=`which xmessage` + +# Path to powershell (Windows only) +PWSH=`which powershell` + ################################################################## # There should be no need to change anything below this line. @@ -66,6 +75,19 @@ else fi if [ ! -x "$CLANG_FORMAT" ] ; then + if [ ! -t 1 ] ; then + if [ -x "$ZENITY" ] ; then + $ZENITY --error --title="Error" --text="Error: clang-format executable not found." + exit 1 + elif [ -x "$XMSG" ] ; then + $XMSG -center -title "Error" "Error: clang-format executable not found." + exit 1 + elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then + winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")" + $PWSH -noprofile -executionpolicy bypass -file "$winmessage" -center -title "Error" --text "Error: clang-format executable not found." + exit 1 + fi + fi printf "Error: clang-format executable not found.\n" printf "Set the correct path in $(canonicalize_filename "$0").\n" exit 1 @@ -117,14 +139,62 @@ fi # a patch has been created, notify the user and exit printf "\nThe following differences were found between the code to commit " printf "and the clang-format rules:\n\n" -$READER "$patch" -printf "\n" -# Allows us to read user input below, assigns stdin to keyboard -exec < /dev/tty +if [ -t 1 ] ; then + $READER "$patch" + printf "\n" + # Allows us to read user input below, assigns stdin to keyboard + exec < /dev/tty + terminal="1" +else + cat "$patch" + printf "\n" + # Allows non zero zenity/powershell output + set +e + terminal="0" +fi while true; do - read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn + if [ $terminal = "0" ] ; then + if [ -x "$ZENITY" ] ; then + ans=$($ZENITY --text-info --filename="$patch" --width=800 --height=600 --title="Do you want to apply that patch?" --ok-label="Apply" --cancel-label="Do not apply" --extra-button="Apply and stage") + if [ "$?" = "0" ] ; then + yn="Y" + else + if [ "$ans" = "Apply and stage" ] ; then + yn="S" + else + yn="N" + fi + fi + elif [ -x "$XMSG" ] ; then + $XMSG -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?" + ans=$? + if [ "$ans" = "100" ] ; then + yn="Y" + elif [ "$ans" = "200" ] ; then + yn="S" + else + yn="N" + fi + elif [ \( \( "$OSTYPE" = "msys" \) -o \( "$OSTYPE" = "win32" \) \) -a \( -x "$PWSH" \) ]; then + winmessage="$(canonicalize_filename "./.git/hooks/winmessage.ps1")" + $PWSH -noprofile -executionpolicy bypass -file "$winmessage" -file "$patch" -buttons "Apply":100,"Apply and stage":200,"Do not apply":0 -center -default "Do not apply" -geometry 800x600 -title "Do you want to apply that patch?" + ans=$? + if [ "$ans" = "100" ] ; then + yn="Y" + elif [ "$ans" = "200" ] ; then + yn="S" + else + yn="N" + fi + else + printf "Error: zenity, xmessage, or powershell executable not found.\n" + exit 1 + fi + else + read -p "Do you want to apply that patch (Y - Apply, N - Do not apply, S - Apply and stage files)? [Y/N/S] " yn + fi case $yn in [Yy] ) git apply $patch; printf "The patch was applied. You can now stage the changes and commit again.\n\n"; diff --git a/misc/hooks/winmessage.ps1 b/misc/hooks/winmessage.ps1 new file mode 100755 index 0000000000..3672579544 --- /dev/null +++ b/misc/hooks/winmessage.ps1 @@ -0,0 +1,103 @@ +Param ( + [string]$file = "", + [string]$text = "", + [string]$buttons = "OK:0", + [string]$default = "", + [switch]$nearmouse = $false, + [switch]$center = $false, + [string]$geometry = "", + [int32]$timeout = 0, + [string]$title = "Message" +) +Add-Type -assembly System.Windows.Forms + +$global:Result = 0 + +$main_form = New-Object System.Windows.Forms.Form +$main_form.Text = $title + +$geometry_data = $geometry.Split("+") +if ($geometry_data.Length -ge 1) { + $size_data = $geometry_data[0].Split("x") + if ($size_data.Length -eq 2) { + $main_form.Width = $size_data[0] + $main_form.Height = $size_data[1] + } +} +if ($geometry_data.Length -eq 3) { + $main_form.StartPosition = [System.Windows.Forms.FormStartPosition]::Manual + $main_form.Location = New-Object System.Drawing.Point($geometry_data[1], $geometry_data[2]) +} +if ($nearmouse) { + $main_form.StartPosition = [System.Windows.Forms.FormStartPosition]::Manual + $main_form.Location = System.Windows.Forms.Cursor.Position +} +if ($center) { + $main_form.StartPosition = [System.Windows.Forms.FormStartPosition]::CenterScreen +} + +$main_form.SuspendLayout() + +$button_panel = New-Object System.Windows.Forms.FlowLayoutPanel +$button_panel.SuspendLayout() +$button_panel.FlowDirection = [System.Windows.Forms.FlowDirection]::RightToLeft +$button_panel.Dock = [System.Windows.Forms.DockStyle]::Bottom +$button_panel.Autosize = $true + +if ($file -ne "") { + $text = [IO.File]::ReadAllText($file).replace("`n", "`r`n") +} + +if ($text -ne "") { + $text_box = New-Object System.Windows.Forms.TextBox + $text_box.Multiline = $true + $text_box.ReadOnly = $true + $text_box.Autosize = $true + $text_box.Text = $text + $text_box.Select(0,0) + $text_box.Dock = [System.Windows.Forms.DockStyle]::Fill + $main_form.Controls.Add($text_box) +} + +$buttons_array = $buttons.Split(",") +foreach ($button in $buttons_array) { + $button_data = $button.Split(":") + $button_ctl = New-Object System.Windows.Forms.Button + if ($button_data.Length -eq 2) { + $button_ctl.Tag = $button_data[1] + } else { + $button_ctl.Tag = 100 + $buttons_array.IndexOf($button) + } + if ($default -eq $button_data[0]) { + $main_form.AcceptButton = $button_ctl + } + $button_ctl.Autosize = $true + $button_ctl.Text = $button_data[0] + $button_ctl.Add_Click( + { + Param($sender) + $global:Result = $sender.Tag + $main_form.Close() + } + ) + $button_panel.Controls.Add($button_ctl) +} +$main_form.Controls.Add($button_panel) + +$button_panel.ResumeLayout($false) +$main_form.ResumeLayout($false) + +if ($timeout -gt 0) { + $timer = New-Object System.Windows.Forms.Timer + $timer.Add_Tick( + { + $global:Result = 0 + $main_form.Close() + } + ) + $timer.Interval = $timeout + $timer.Start() +} +$dlg_res = $main_form.ShowDialog() + +[Environment]::Exit($global:Result) From f99ead735c886876e445e2600eb3db4e723746d1 Mon Sep 17 00:00:00 2001 From: bruvzg <7645683+bruvzg@users.noreply.github.com> Date: Fri, 24 Apr 2020 20:11:53 +0300 Subject: [PATCH 34/34] Git Hooks: Suppress "which" error messages. (cherry picked from commit 05f041127d3811fe44403d72784ac7553f6681c3) --- misc/hooks/pre-commit-black | 10 +++++----- misc/hooks/pre-commit-clang-format | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/misc/hooks/pre-commit-black b/misc/hooks/pre-commit-black index 633b432b89..76d97294da 100755 --- a/misc/hooks/pre-commit-black +++ b/misc/hooks/pre-commit-black @@ -6,7 +6,7 @@ ################################################################## # SETTINGS # Set path to black binary. -BLACK=`which black` +BLACK=`which black 2>/dev/null` BLACK_OPTIONS="-l 120" # Remove any older patches from previous commits. Set to true or false. @@ -18,7 +18,7 @@ FILE_EXTS="py" # Use pygmentize instead of cat to parse diff with highlighting. # Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac) -PYGMENTIZE=`which pygmentize` +PYGMENTIZE=`which pygmentize 2>/dev/null` if [ ! -z "$PYGMENTIZE" ]; then READER="pygmentize -l diff" else @@ -26,13 +26,13 @@ else fi # Path to zenity -ZENITY=`which zenity` +ZENITY=`which zenity 2>/dev/null` # Path to xmessage -XMSG=`which xmessage` +XMSG=`which xmessage 2>/dev/null` # Path to powershell (Windows only) -PWSH=`which powershell` +PWSH=`which powershell 2>/dev/null` ################################################################## # There should be no need to change anything below this line. diff --git a/misc/hooks/pre-commit-clang-format b/misc/hooks/pre-commit-clang-format index 5a48aa1bfa..4e1fbdeb20 100755 --- a/misc/hooks/pre-commit-clang-format +++ b/misc/hooks/pre-commit-clang-format @@ -16,7 +16,7 @@ ################################################################## # SETTINGS # Set path to clang-format binary. -CLANG_FORMAT=`which clang-format` +CLANG_FORMAT=`which clang-format 2>/dev/null` # Remove any older patches from previous commits. Set to true or false. DELETE_OLD_PATCHES=false @@ -31,7 +31,7 @@ FILE_EXTS=".c .h .cpp .hpp .cc .hh .cxx .m .mm .inc .java .glsl" # Use pygmentize instead of cat to parse diff with highlighting. # Install it with `pip install pygments` (Linux) or `easy_install Pygments` (Mac) -PYGMENTIZE=`which pygmentize` +PYGMENTIZE=`which pygmentize 2>/dev/null` if [ ! -z "$PYGMENTIZE" ]; then READER="pygmentize -l diff" else @@ -39,13 +39,13 @@ else fi # Path to zenity -ZENITY=`which zenity` +ZENITY=`which zenity 2>/dev/null` # Path to xmessage -XMSG=`which xmessage` +XMSG=`which xmessage 2>/dev/null` # Path to powershell (Windows only) -PWSH=`which powershell` +PWSH=`which powershell 2>/dev/null` ################################################################## # There should be no need to change anything below this line.