SCons: Format buildsystem files with psf/black

Configured for a max line length of 120 characters.

psf/black is very opinionated and purposely doesn't leave much room for
configuration. The output is mostly OK so that should be fine for us,
but some things worth noting:

- Manually wrapped strings will be reflowed, so by using a line length
  of 120 for the sake of preserving readability for our long command
  calls, it also means that some manually wrapped strings are back on
  the same line and should be manually merged again.

- Code generators using string concatenation extensively look awful,
  since black puts each operand on a single line. We need to refactor
  these generators to use more pythonic string formatting, for which
  many options are available (`%`, `format` or f-strings).

- CI checks and a pre-commit hook will be added to ensure that future
  buildsystem changes are well-formatted.
This commit is contained in:
Rémi Verschelde 2020-03-30 08:28:32 +02:00
parent 0168709978
commit cd4e46ee65
198 changed files with 4216 additions and 3446 deletions

View file

@ -26,48 +26,48 @@ platform_exporters = []
platform_apis = [] platform_apis = []
for x in sorted(glob.glob("platform/*")): for x in sorted(glob.glob("platform/*")):
if (not os.path.isdir(x) or not os.path.exists(x + "/detect.py")): if not os.path.isdir(x) or not os.path.exists(x + "/detect.py"):
continue continue
tmppath = "./" + x tmppath = "./" + x
sys.path.insert(0, tmppath) sys.path.insert(0, tmppath)
import detect import detect
if (os.path.exists(x + "/export/export.cpp")): if os.path.exists(x + "/export/export.cpp"):
platform_exporters.append(x[9:]) platform_exporters.append(x[9:])
if (os.path.exists(x + "/api/api.cpp")): if os.path.exists(x + "/api/api.cpp"):
platform_apis.append(x[9:]) platform_apis.append(x[9:])
if (detect.is_active()): if detect.is_active():
active_platforms.append(detect.get_name()) active_platforms.append(detect.get_name())
active_platform_ids.append(x) active_platform_ids.append(x)
if (detect.can_build()): if detect.can_build():
x = x.replace("platform/", "") # rest of world x = x.replace("platform/", "") # rest of world
x = x.replace("platform\\", "") # win32 x = x.replace("platform\\", "") # win32
platform_list += [x] platform_list += [x]
platform_opts[x] = detect.get_opts() platform_opts[x] = detect.get_opts()
platform_flags[x] = detect.get_flags() platform_flags[x] = detect.get_flags()
sys.path.remove(tmppath) sys.path.remove(tmppath)
sys.modules.pop('detect') sys.modules.pop("detect")
module_list = methods.detect_modules() module_list = methods.detect_modules()
methods.save_active_platforms(active_platforms, active_platform_ids) methods.save_active_platforms(active_platforms, active_platform_ids)
custom_tools = ['default'] custom_tools = ["default"]
platform_arg = ARGUMENTS.get("platform", ARGUMENTS.get("p", False)) platform_arg = ARGUMENTS.get("platform", ARGUMENTS.get("p", False))
if os.name == "nt" and (platform_arg == "android" or ARGUMENTS.get("use_mingw", False)): if os.name == "nt" and (platform_arg == "android" or ARGUMENTS.get("use_mingw", False)):
custom_tools = ['mingw'] custom_tools = ["mingw"]
elif platform_arg == 'javascript': elif platform_arg == "javascript":
# Use generic POSIX build toolchain for Emscripten. # Use generic POSIX build toolchain for Emscripten.
custom_tools = ['cc', 'c++', 'ar', 'link', 'textfile', 'zip'] custom_tools = ["cc", "c++", "ar", "link", "textfile", "zip"]
env_base = Environment(tools=custom_tools) env_base = Environment(tools=custom_tools)
if 'TERM' in os.environ: if "TERM" in os.environ:
env_base['ENV']['TERM'] = os.environ['TERM'] env_base["ENV"]["TERM"] = os.environ["TERM"]
env_base.AppendENVPath('PATH', os.getenv('PATH')) env_base.AppendENVPath("PATH", os.getenv("PATH"))
env_base.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH')) env_base.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH"))
env_base.disabled_modules = [] env_base.disabled_modules = []
env_base.use_ptrcall = False env_base.use_ptrcall = False
env_base.module_version_string = "" env_base.module_version_string = ""
@ -94,7 +94,7 @@ env_base.SConsignFile(".sconsign{0}.dblite".format(pickle.HIGHEST_PROTOCOL))
# Build options # Build options
customs = ['custom.py'] customs = ["custom.py"]
profile = ARGUMENTS.get("profile", False) profile = ARGUMENTS.get("profile", False)
if profile: if profile:
@ -106,62 +106,62 @@ if profile:
opts = Variables(customs, ARGUMENTS) opts = Variables(customs, ARGUMENTS)
# Target build options # Target build options
opts.Add('arch', "Platform-dependent architecture (arm/arm64/x86/x64/mips/...)", '') opts.Add("arch", "Platform-dependent architecture (arm/arm64/x86/x64/mips/...)", "")
opts.Add(EnumVariable('bits', "Target platform bits", 'default', ('default', '32', '64'))) opts.Add(EnumVariable("bits", "Target platform bits", "default", ("default", "32", "64")))
opts.Add('p', "Platform (alias for 'platform')", '') opts.Add("p", "Platform (alias for 'platform')", "")
opts.Add('platform', "Target platform (%s)" % ('|'.join(platform_list), ), '') opts.Add("platform", "Target platform (%s)" % ("|".join(platform_list),), "")
opts.Add(EnumVariable('target', "Compilation target", 'debug', ('debug', 'release_debug', 'release'))) opts.Add(EnumVariable("target", "Compilation target", "debug", ("debug", "release_debug", "release")))
opts.Add(EnumVariable('optimize', "Optimization type", 'speed', ('speed', 'size'))) opts.Add(EnumVariable("optimize", "Optimization type", "speed", ("speed", "size")))
opts.Add(BoolVariable('tools', "Build the tools (a.k.a. the Godot editor)", True)) opts.Add(BoolVariable("tools", "Build the tools (a.k.a. the Godot editor)", True))
opts.Add(BoolVariable('use_lto', 'Use link-time optimization', False)) opts.Add(BoolVariable("use_lto", "Use link-time optimization", False))
opts.Add(BoolVariable('use_precise_math_checks', 'Math checks use very precise epsilon (useful to debug the engine)', False)) opts.Add(BoolVariable("use_precise_math_checks", "Math checks use very precise epsilon (debug option)", False))
# Components # Components
opts.Add(BoolVariable('deprecated', "Enable deprecated features", True)) opts.Add(BoolVariable("deprecated", "Enable deprecated features", True))
opts.Add(BoolVariable('minizip', "Enable ZIP archive support using minizip", True)) opts.Add(BoolVariable("minizip", "Enable ZIP archive support using minizip", True))
opts.Add(BoolVariable('xaudio2', "Enable the XAudio2 audio driver", False)) opts.Add(BoolVariable("xaudio2", "Enable the XAudio2 audio driver", False))
# Advanced options # Advanced options
opts.Add(BoolVariable('verbose', "Enable verbose output for the compilation", False)) opts.Add(BoolVariable("verbose", "Enable verbose output for the compilation", False))
opts.Add(BoolVariable('progress', "Show a progress indicator during compilation", True)) opts.Add(BoolVariable("progress", "Show a progress indicator during compilation", True))
opts.Add(EnumVariable('warnings', "Set the level of warnings emitted during compilation", 'all', ('extra', 'all', 'moderate', 'no'))) opts.Add(EnumVariable("warnings", "Level of compilation warnings", "all", ("extra", "all", "moderate", "no")))
opts.Add(BoolVariable('werror', "Treat compiler warnings as errors. Depends on the level of warnings set with 'warnings'", False)) opts.Add(BoolVariable("werror", "Treat compiler warnings as errors", False))
opts.Add(BoolVariable('dev', "If yes, alias for verbose=yes warnings=extra werror=yes", False)) opts.Add(BoolVariable("dev", "If yes, alias for verbose=yes warnings=extra werror=yes", False))
opts.Add('extra_suffix', "Custom extra suffix added to the base filename of all generated binary files", '') opts.Add("extra_suffix", "Custom extra suffix added to the base filename of all generated binary files", "")
opts.Add(BoolVariable('vsproj', "Generate a Visual Studio solution", False)) opts.Add(BoolVariable("vsproj", "Generate a Visual Studio solution", False))
opts.Add(EnumVariable('macports_clang', "Build using Clang from MacPorts", 'no', ('no', '5.0', 'devel'))) opts.Add(EnumVariable("macports_clang", "Build using Clang from MacPorts", "no", ("no", "5.0", "devel")))
opts.Add(BoolVariable('disable_3d', "Disable 3D nodes for a smaller executable", False)) opts.Add(BoolVariable("disable_3d", "Disable 3D nodes for a smaller executable", False))
opts.Add(BoolVariable('disable_advanced_gui', "Disable advanced GUI nodes and behaviors", False)) opts.Add(BoolVariable("disable_advanced_gui", "Disable advanced GUI nodes and behaviors", False))
opts.Add(BoolVariable('no_editor_splash', "Don't use the custom splash screen for the editor", False)) opts.Add(BoolVariable("no_editor_splash", "Don't use the custom splash screen for the editor", False))
opts.Add('system_certs_path', "Use this path as SSL certificates default for editor (for package maintainers)", '') opts.Add("system_certs_path", "Use this path as SSL certificates default for editor (for package maintainers)", "")
# Thirdparty libraries # Thirdparty libraries
#opts.Add(BoolVariable('builtin_assimp', "Use the built-in Assimp library", True)) # opts.Add(BoolVariable('builtin_assimp', "Use the built-in Assimp library", True))
opts.Add(BoolVariable('builtin_bullet', "Use the built-in Bullet library", True)) opts.Add(BoolVariable("builtin_bullet", "Use the built-in Bullet library", True))
opts.Add(BoolVariable('builtin_certs', "Bundle default SSL certificates to be used if you don't specify an override in the project settings", True)) opts.Add(BoolVariable("builtin_certs", "Use the built-in SSL certificates bundles", True))
opts.Add(BoolVariable('builtin_enet', "Use the built-in ENet library", True)) opts.Add(BoolVariable("builtin_enet", "Use the built-in ENet library", True))
opts.Add(BoolVariable('builtin_freetype', "Use the built-in FreeType library", True)) opts.Add(BoolVariable("builtin_freetype", "Use the built-in FreeType library", True))
opts.Add(BoolVariable('builtin_glslang', "Use the built-in glslang library", True)) opts.Add(BoolVariable("builtin_glslang", "Use the built-in glslang library", True))
opts.Add(BoolVariable('builtin_libogg', "Use the built-in libogg library", True)) opts.Add(BoolVariable("builtin_libogg", "Use the built-in libogg library", True))
opts.Add(BoolVariable('builtin_libpng', "Use the built-in libpng library", True)) opts.Add(BoolVariable("builtin_libpng", "Use the built-in libpng library", True))
opts.Add(BoolVariable('builtin_libtheora', "Use the built-in libtheora library", True)) opts.Add(BoolVariable("builtin_libtheora", "Use the built-in libtheora library", True))
opts.Add(BoolVariable('builtin_libvorbis', "Use the built-in libvorbis library", True)) opts.Add(BoolVariable("builtin_libvorbis", "Use the built-in libvorbis library", True))
opts.Add(BoolVariable('builtin_libvpx', "Use the built-in libvpx library", True)) opts.Add(BoolVariable("builtin_libvpx", "Use the built-in libvpx library", True))
opts.Add(BoolVariable('builtin_libwebp', "Use the built-in libwebp library", True)) opts.Add(BoolVariable("builtin_libwebp", "Use the built-in libwebp library", True))
opts.Add(BoolVariable('builtin_wslay', "Use the built-in wslay library", True)) opts.Add(BoolVariable("builtin_wslay", "Use the built-in wslay library", True))
opts.Add(BoolVariable('builtin_mbedtls', "Use the built-in mbedTLS library", True)) opts.Add(BoolVariable("builtin_mbedtls", "Use the built-in mbedTLS library", True))
opts.Add(BoolVariable('builtin_miniupnpc', "Use the built-in miniupnpc library", True)) opts.Add(BoolVariable("builtin_miniupnpc", "Use the built-in miniupnpc library", True))
opts.Add(BoolVariable('builtin_opus', "Use the built-in Opus library", True)) opts.Add(BoolVariable("builtin_opus", "Use the built-in Opus library", True))
opts.Add(BoolVariable('builtin_pcre2', "Use the built-in PCRE2 library", True)) opts.Add(BoolVariable("builtin_pcre2", "Use the built-in PCRE2 library", True))
opts.Add(BoolVariable('builtin_pcre2_with_jit', "Use JIT compiler for the built-in PCRE2 library", True)) opts.Add(BoolVariable("builtin_pcre2_with_jit", "Use JIT compiler for the built-in PCRE2 library", True))
opts.Add(BoolVariable('builtin_recast', "Use the built-in Recast library", True)) opts.Add(BoolVariable("builtin_recast", "Use the built-in Recast library", True))
opts.Add(BoolVariable('builtin_rvo2', "Use the built-in RVO2 library", True)) opts.Add(BoolVariable("builtin_rvo2", "Use the built-in RVO2 library", True))
opts.Add(BoolVariable('builtin_squish', "Use the built-in squish library", True)) opts.Add(BoolVariable("builtin_squish", "Use the built-in squish library", True))
opts.Add(BoolVariable('builtin_vulkan', "Use the built-in Vulkan loader library and headers", True)) opts.Add(BoolVariable("builtin_vulkan", "Use the built-in Vulkan loader library and headers", True))
opts.Add(BoolVariable('builtin_xatlas', "Use the built-in xatlas library", True)) opts.Add(BoolVariable("builtin_xatlas", "Use the built-in xatlas library", True))
opts.Add(BoolVariable('builtin_zlib', "Use the built-in zlib library", True)) opts.Add(BoolVariable("builtin_zlib", "Use the built-in zlib library", True))
opts.Add(BoolVariable('builtin_zstd', "Use the built-in Zstd library", True)) opts.Add(BoolVariable("builtin_zstd", "Use the built-in Zstd library", True))
# Compilation environment setup # Compilation environment setup
opts.Add("CXX", "C++ compiler") opts.Add("CXX", "C++ compiler")
@ -184,63 +184,64 @@ for x in module_list:
tmppath = "./modules/" + x tmppath = "./modules/" + x
sys.path.insert(0, tmppath) sys.path.insert(0, tmppath)
import config import config
enabled_attr = getattr(config, "is_enabled", None) enabled_attr = getattr(config, "is_enabled", None)
if (callable(enabled_attr) and not config.is_enabled()): if callable(enabled_attr) and not config.is_enabled():
module_enabled = False module_enabled = False
sys.path.remove(tmppath) sys.path.remove(tmppath)
sys.modules.pop('config') sys.modules.pop("config")
opts.Add(BoolVariable('module_' + x + '_enabled', "Enable module '%s'" % (x, ), module_enabled)) opts.Add(BoolVariable("module_" + x + "_enabled", "Enable module '%s'" % (x,), module_enabled))
opts.Update(env_base) # update environment opts.Update(env_base) # update environment
Help(opts.GenerateHelpText(env_base)) # generate help Help(opts.GenerateHelpText(env_base)) # generate help
# add default include paths # add default include paths
env_base.Prepend(CPPPATH=['#']) env_base.Prepend(CPPPATH=["#"])
# configure ENV for platform # configure ENV for platform
env_base.platform_exporters = platform_exporters env_base.platform_exporters = platform_exporters
env_base.platform_apis = platform_apis env_base.platform_apis = platform_apis
if (env_base["use_precise_math_checks"]): if env_base["use_precise_math_checks"]:
env_base.Append(CPPDEFINES=['PRECISE_MATH_CHECKS']) env_base.Append(CPPDEFINES=["PRECISE_MATH_CHECKS"])
if (env_base['target'] == 'debug'): if env_base["target"] == "debug":
env_base.Append(CPPDEFINES=['DEBUG_MEMORY_ALLOC','DISABLE_FORCED_INLINE']) env_base.Append(CPPDEFINES=["DEBUG_MEMORY_ALLOC", "DISABLE_FORCED_INLINE"])
# The two options below speed up incremental builds, but reduce the certainty that all files # The two options below speed up incremental builds, but reduce the certainty that all files
# will properly be rebuilt. As such, we only enable them for debug (dev) builds, not release. # will properly be rebuilt. As such, we only enable them for debug (dev) builds, not release.
# To decide whether to rebuild a file, use the MD5 sum only if the timestamp has changed. # To decide whether to rebuild a file, use the MD5 sum only if the timestamp has changed.
# http://scons.org/doc/production/HTML/scons-user/ch06.html#idm139837621851792 # http://scons.org/doc/production/HTML/scons-user/ch06.html#idm139837621851792
env_base.Decider('MD5-timestamp') env_base.Decider("MD5-timestamp")
# Use cached implicit dependencies by default. Can be overridden by specifying `--implicit-deps-changed` in the command line. # Use cached implicit dependencies by default. Can be overridden by specifying `--implicit-deps-changed` in the command line.
# http://scons.org/doc/production/HTML/scons-user/ch06s04.html # http://scons.org/doc/production/HTML/scons-user/ch06s04.html
env_base.SetOption('implicit_cache', 1) env_base.SetOption("implicit_cache", 1)
if (env_base['no_editor_splash']): if env_base["no_editor_splash"]:
env_base.Append(CPPDEFINES=['NO_EDITOR_SPLASH']) env_base.Append(CPPDEFINES=["NO_EDITOR_SPLASH"])
if not env_base['deprecated']: if not env_base["deprecated"]:
env_base.Append(CPPDEFINES=['DISABLE_DEPRECATED']) env_base.Append(CPPDEFINES=["DISABLE_DEPRECATED"])
env_base.platforms = {} env_base.platforms = {}
selected_platform = "" selected_platform = ""
if env_base['platform'] != "": if env_base["platform"] != "":
selected_platform = env_base['platform'] selected_platform = env_base["platform"]
elif env_base['p'] != "": elif env_base["p"] != "":
selected_platform = env_base['p'] selected_platform = env_base["p"]
env_base["platform"] = selected_platform env_base["platform"] = selected_platform
else: else:
# Missing `platform` argument, try to detect platform automatically # Missing `platform` argument, try to detect platform automatically
if sys.platform.startswith('linux'): if sys.platform.startswith("linux"):
selected_platform = 'linuxbsd' selected_platform = "linuxbsd"
elif sys.platform == 'darwin': elif sys.platform == "darwin":
selected_platform = 'osx' selected_platform = "osx"
elif sys.platform == 'win32': elif sys.platform == "win32":
selected_platform = 'windows' selected_platform = "windows"
else: else:
print("Could not detect platform automatically. Supported platforms:") print("Could not detect platform automatically. Supported platforms:")
for x in platform_list: for x in platform_list:
@ -254,8 +255,7 @@ else:
if selected_platform in ["linux", "bsd", "x11"]: if selected_platform in ["linux", "bsd", "x11"]:
if selected_platform == "x11": if selected_platform == "x11":
# Deprecated alias kept for compatibility. # Deprecated alias kept for compatibility.
print('Platform "x11" has been renamed to "linuxbsd" in Godot 4.0. ' print('Platform "x11" has been renamed to "linuxbsd" in Godot 4.0. Building for platform "linuxbsd".')
'Building for platform "linuxbsd".')
# Alias for convenience. # Alias for convenience.
selected_platform = "linuxbsd" selected_platform = "linuxbsd"
@ -263,17 +263,18 @@ if selected_platform in platform_list:
tmppath = "./platform/" + selected_platform tmppath = "./platform/" + selected_platform
sys.path.insert(0, tmppath) sys.path.insert(0, tmppath)
import detect import detect
if "create" in dir(detect): if "create" in dir(detect):
env = detect.create(env_base) env = detect.create(env_base)
else: else:
env = env_base.Clone() env = env_base.Clone()
if env['dev']: if env["dev"]:
env['verbose'] = True env["verbose"] = True
env['warnings'] = "extra" env["warnings"] = "extra"
env['werror'] = True env["werror"] = True
if env['vsproj']: if env["vsproj"]:
env.vs_incs = [] env.vs_incs = []
env.vs_srcs = [] env.vs_srcs = []
@ -286,7 +287,7 @@ if selected_platform in platform_list:
pieces = fname.split(".") pieces = fname.split(".")
if len(pieces) > 0: if len(pieces) > 0:
basename = pieces[0] basename = pieces[0]
basename = basename.replace('\\\\', '/') basename = basename.replace("\\\\", "/")
if os.path.isfile(basename + ".h"): if os.path.isfile(basename + ".h"):
env.vs_incs = env.vs_incs + [basename + ".h"] env.vs_incs = env.vs_incs + [basename + ".h"]
elif os.path.isfile(basename + ".hpp"): elif os.path.isfile(basename + ".hpp"):
@ -295,28 +296,29 @@ if selected_platform in platform_list:
env.vs_srcs = env.vs_srcs + [basename + ".c"] env.vs_srcs = env.vs_srcs + [basename + ".c"]
elif os.path.isfile(basename + ".cpp"): elif os.path.isfile(basename + ".cpp"):
env.vs_srcs = env.vs_srcs + [basename + ".cpp"] env.vs_srcs = env.vs_srcs + [basename + ".cpp"]
env.AddToVSProject = AddToVSProject env.AddToVSProject = AddToVSProject
env.extra_suffix = "" env.extra_suffix = ""
if env["extra_suffix"] != '': if env["extra_suffix"] != "":
env.extra_suffix += '.' + env["extra_suffix"] env.extra_suffix += "." + env["extra_suffix"]
# Environment flags # Environment flags
CCFLAGS = env.get('CCFLAGS', '') CCFLAGS = env.get("CCFLAGS", "")
env['CCFLAGS'] = '' env["CCFLAGS"] = ""
env.Append(CCFLAGS=str(CCFLAGS).split()) env.Append(CCFLAGS=str(CCFLAGS).split())
CFLAGS = env.get('CFLAGS', '') CFLAGS = env.get("CFLAGS", "")
env['CFLAGS'] = '' env["CFLAGS"] = ""
env.Append(CFLAGS=str(CFLAGS).split()) env.Append(CFLAGS=str(CFLAGS).split())
CXXFLAGS = env.get('CXXFLAGS', '') CXXFLAGS = env.get("CXXFLAGS", "")
env['CXXFLAGS'] = '' env["CXXFLAGS"] = ""
env.Append(CXXFLAGS=str(CXXFLAGS).split()) env.Append(CXXFLAGS=str(CXXFLAGS).split())
LINKFLAGS = env.get('LINKFLAGS', '') LINKFLAGS = env.get("LINKFLAGS", "")
env['LINKFLAGS'] = '' env["LINKFLAGS"] = ""
env.Append(LINKFLAGS=str(LINKFLAGS).split()) env.Append(LINKFLAGS=str(LINKFLAGS).split())
# Platform specific flags # Platform specific flags
@ -335,12 +337,12 @@ if selected_platform in platform_list:
if not env.msvc: if not env.msvc:
# Specifying GNU extensions support explicitly, which are supported by # Specifying GNU extensions support explicitly, which are supported by
# both GCC and Clang. Both currently default to gnu11 and gnu++14. # both GCC and Clang. Both currently default to gnu11 and gnu++14.
env.Prepend(CFLAGS=['-std=gnu11']) env.Prepend(CFLAGS=["-std=gnu11"])
env.Prepend(CXXFLAGS=['-std=gnu++17']) env.Prepend(CXXFLAGS=["-std=gnu++17"])
else: else:
# MSVC doesn't have clear C standard support, /std only covers C++. # MSVC doesn't have clear C standard support, /std only covers C++.
# We apply it to CCFLAGS (both C and C++ code) in case it impacts C features. # We apply it to CCFLAGS (both C and C++ code) in case it impacts C features.
env.Prepend(CCFLAGS=['/std:c++17']) env.Prepend(CCFLAGS=["/std:c++17"])
# Enforce our minimal compiler version requirements # Enforce our minimal compiler version requirements
cc_version = methods.get_compiler_version(env) or [-1, -1] cc_version = methods.get_compiler_version(env) or [-1, -1]
@ -351,16 +353,20 @@ if selected_platform in platform_list:
# GCC 8 before 8.4 has a regression in the support of guaranteed copy elision # GCC 8 before 8.4 has a regression in the support of guaranteed copy elision
# which causes a build failure: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86521 # which causes a build failure: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86521
if cc_version_major == 8 and cc_version_minor < 4: if cc_version_major == 8 and cc_version_minor < 4:
print("Detected GCC 8 version < 8.4, which is not supported due to a " print(
"regression in its C++17 guaranteed copy elision support. Use a " "Detected GCC 8 version < 8.4, which is not supported due to a "
"newer GCC version, or Clang 6 or later by passing \"use_llvm=yes\" " "regression in its C++17 guaranteed copy elision support. Use a "
"to the SCons command line.") 'newer GCC version, or Clang 6 or later by passing "use_llvm=yes" '
"to the SCons command line."
)
sys.exit(255) sys.exit(255)
elif cc_version_major < 7: elif cc_version_major < 7:
print("Detected GCC version older than 7, which does not fully support " print(
"C++17. Supported versions are GCC 7, 9 and later. Use a newer GCC " "Detected GCC version older than 7, which does not fully support "
"version, or Clang 6 or later by passing \"use_llvm=yes\" to the " "C++17. Supported versions are GCC 7, 9 and later. Use a newer GCC "
"SCons command line.") 'version, or Clang 6 or later by passing "use_llvm=yes" to the '
"SCons command line."
)
sys.exit(255) sys.exit(255)
elif methods.using_clang(env): elif methods.using_clang(env):
# Apple LLVM versions differ from upstream LLVM version \o/, compare # Apple LLVM versions differ from upstream LLVM version \o/, compare
@ -368,87 +374,98 @@ if selected_platform in platform_list:
if env["platform"] == "osx" or env["platform"] == "iphone": if env["platform"] == "osx" or env["platform"] == "iphone":
vanilla = methods.is_vanilla_clang(env) vanilla = methods.is_vanilla_clang(env)
if vanilla and cc_version_major < 6: if vanilla and cc_version_major < 6:
print("Detected Clang version older than 6, which does not fully support " print(
"C++17. Supported versions are Clang 6 and later.") "Detected Clang version older than 6, which does not fully support "
"C++17. Supported versions are Clang 6 and later."
)
sys.exit(255) sys.exit(255)
elif not vanilla and cc_version_major < 10: elif not vanilla and cc_version_major < 10:
print("Detected Apple Clang version older than 10, which does not fully " print(
"support C++17. Supported versions are Apple Clang 10 and later.") "Detected Apple Clang version older than 10, which does not fully "
"support C++17. Supported versions are Apple Clang 10 and later."
)
sys.exit(255) sys.exit(255)
elif cc_version_major < 6: elif cc_version_major < 6:
print("Detected Clang version older than 6, which does not fully support " print(
"C++17. Supported versions are Clang 6 and later.") "Detected Clang version older than 6, which does not fully support "
"C++17. Supported versions are Clang 6 and later."
)
sys.exit(255) sys.exit(255)
# Configure compiler warnings # Configure compiler warnings
if env.msvc: if env.msvc:
# Truncations, narrowing conversions, signed/unsigned comparisons... # Truncations, narrowing conversions, signed/unsigned comparisons...
disable_nonessential_warnings = ['/wd4267', '/wd4244', '/wd4305', '/wd4018', '/wd4800'] disable_nonessential_warnings = ["/wd4267", "/wd4244", "/wd4305", "/wd4018", "/wd4800"]
if (env["warnings"] == 'extra'): if env["warnings"] == "extra":
env.Append(CCFLAGS=['/Wall']) # Implies /W4 env.Append(CCFLAGS=["/Wall"]) # Implies /W4
elif (env["warnings"] == 'all'): elif env["warnings"] == "all":
env.Append(CCFLAGS=['/W3'] + disable_nonessential_warnings) env.Append(CCFLAGS=["/W3"] + disable_nonessential_warnings)
elif (env["warnings"] == 'moderate'): elif env["warnings"] == "moderate":
env.Append(CCFLAGS=['/W2'] + disable_nonessential_warnings) env.Append(CCFLAGS=["/W2"] + disable_nonessential_warnings)
else: # 'no' else: # 'no'
env.Append(CCFLAGS=['/w']) env.Append(CCFLAGS=["/w"])
# Set exception handling model to avoid warnings caused by Windows system headers. # Set exception handling model to avoid warnings caused by Windows system headers.
env.Append(CCFLAGS=['/EHsc']) env.Append(CCFLAGS=["/EHsc"])
if (env["werror"]): if env["werror"]:
env.Append(CCFLAGS=['/WX']) env.Append(CCFLAGS=["/WX"])
# Force to use Unicode encoding # Force to use Unicode encoding
env.Append(MSVC_FLAGS=['/utf8']) env.Append(MSVC_FLAGS=["/utf8"])
else: # Rest of the world else: # Rest of the world
shadow_local_warning = [] shadow_local_warning = []
all_plus_warnings = ['-Wwrite-strings'] all_plus_warnings = ["-Wwrite-strings"]
if methods.using_gcc(env): if methods.using_gcc(env):
if cc_version_major >= 7: if cc_version_major >= 7:
shadow_local_warning = ['-Wshadow-local'] shadow_local_warning = ["-Wshadow-local"]
if (env["warnings"] == 'extra'): if env["warnings"] == "extra":
env.Append(CCFLAGS=['-Wall', '-Wextra', '-Wno-unused-parameter'] env.Append(CCFLAGS=["-Wall", "-Wextra", "-Wno-unused-parameter"] + all_plus_warnings + shadow_local_warning)
+ all_plus_warnings + shadow_local_warning) env.Append(CXXFLAGS=["-Wctor-dtor-privacy", "-Wnon-virtual-dtor"])
env.Append(CXXFLAGS=['-Wctor-dtor-privacy', '-Wnon-virtual-dtor'])
if methods.using_gcc(env): if methods.using_gcc(env):
env.Append(CCFLAGS=['-Walloc-zero', env.Append(
'-Wduplicated-branches', '-Wduplicated-cond', CCFLAGS=[
'-Wstringop-overflow=4', '-Wlogical-op']) "-Walloc-zero",
"-Wduplicated-branches",
"-Wduplicated-cond",
"-Wstringop-overflow=4",
"-Wlogical-op",
]
)
# -Wnoexcept was removed temporarily due to GH-36325. # -Wnoexcept was removed temporarily due to GH-36325.
env.Append(CXXFLAGS=['-Wplacement-new=1']) env.Append(CXXFLAGS=["-Wplacement-new=1"])
if cc_version_major >= 9: if cc_version_major >= 9:
env.Append(CCFLAGS=['-Wattribute-alias=2']) env.Append(CCFLAGS=["-Wattribute-alias=2"])
if methods.using_clang(env): if methods.using_clang(env):
env.Append(CCFLAGS=['-Wimplicit-fallthrough']) env.Append(CCFLAGS=["-Wimplicit-fallthrough"])
elif (env["warnings"] == 'all'): elif env["warnings"] == "all":
env.Append(CCFLAGS=['-Wall'] + shadow_local_warning) env.Append(CCFLAGS=["-Wall"] + shadow_local_warning)
elif (env["warnings"] == 'moderate'): elif env["warnings"] == "moderate":
env.Append(CCFLAGS=['-Wall', '-Wno-unused'] + shadow_local_warning) env.Append(CCFLAGS=["-Wall", "-Wno-unused"] + shadow_local_warning)
else: # 'no' else: # 'no'
env.Append(CCFLAGS=['-w']) env.Append(CCFLAGS=["-w"])
if (env["werror"]): if env["werror"]:
env.Append(CCFLAGS=['-Werror']) env.Append(CCFLAGS=["-Werror"])
# FIXME: Temporary workaround after the Vulkan merge, remove once warnings are fixed. # FIXME: Temporary workaround after the Vulkan merge, remove once warnings are fixed.
if methods.using_gcc(env): if methods.using_gcc(env):
env.Append(CXXFLAGS=['-Wno-error=cpp']) env.Append(CXXFLAGS=["-Wno-error=cpp"])
else: else:
env.Append(CXXFLAGS=['-Wno-error=#warnings']) env.Append(CXXFLAGS=["-Wno-error=#warnings"])
else: # always enable those errors else: # always enable those errors
env.Append(CCFLAGS=['-Werror=return-type']) env.Append(CCFLAGS=["-Werror=return-type"])
if (hasattr(detect, 'get_program_suffix')): if hasattr(detect, "get_program_suffix"):
suffix = "." + detect.get_program_suffix() suffix = "." + detect.get_program_suffix()
else: else:
suffix = "." + selected_platform suffix = "." + selected_platform
if (env["target"] == "release"): if env["target"] == "release":
if env["tools"]: if env["tools"]:
print("Tools can only be built with targets 'debug' and 'release_debug'.") print("Tools can only be built with targets 'debug' and 'release_debug'.")
sys.exit(255) sys.exit(255)
suffix += ".opt" suffix += ".opt"
env.Append(CPPDEFINES=['NDEBUG']) env.Append(CPPDEFINES=["NDEBUG"])
elif (env["target"] == "release_debug"): elif env["target"] == "release_debug":
if env["tools"]: if env["tools"]:
suffix += ".opt.tools" suffix += ".opt.tools"
else: else:
@ -461,27 +478,28 @@ if selected_platform in platform_list:
if env["arch"] != "": if env["arch"] != "":
suffix += "." + env["arch"] suffix += "." + env["arch"]
elif (env["bits"] == "32"): elif env["bits"] == "32":
suffix += ".32" suffix += ".32"
elif (env["bits"] == "64"): elif env["bits"] == "64":
suffix += ".64" suffix += ".64"
suffix += env.extra_suffix suffix += env.extra_suffix
sys.path.remove(tmppath) sys.path.remove(tmppath)
sys.modules.pop('detect') sys.modules.pop("detect")
env.module_list = [] env.module_list = []
env.module_icons_paths = [] env.module_icons_paths = []
env.doc_class_path = {} env.doc_class_path = {}
for x in sorted(module_list): for x in sorted(module_list):
if not env['module_' + x + '_enabled']: if not env["module_" + x + "_enabled"]:
continue continue
tmppath = "./modules/" + x tmppath = "./modules/" + x
sys.path.insert(0, tmppath) sys.path.insert(0, tmppath)
env.current_module = x env.current_module = x
import config import config
if config.can_build(env, selected_platform): if config.can_build(env, selected_platform):
config.configure(env) config.configure(env)
env.module_list.append(x) env.module_list.append(x)
@ -503,7 +521,7 @@ if selected_platform in platform_list:
env.module_icons_paths.append("modules/" + x + "/" + "icons") env.module_icons_paths.append("modules/" + x + "/" + "icons")
sys.path.remove(tmppath) sys.path.remove(tmppath)
sys.modules.pop('config') sys.modules.pop("config")
methods.update_version(env.module_version_string) methods.update_version(env.module_version_string)
@ -522,45 +540,66 @@ if selected_platform in platform_list:
env["LIBSUFFIX"] = suffix + env["LIBSUFFIX"] env["LIBSUFFIX"] = suffix + env["LIBSUFFIX"]
env["SHLIBSUFFIX"] = suffix + env["SHLIBSUFFIX"] env["SHLIBSUFFIX"] = suffix + env["SHLIBSUFFIX"]
if (env.use_ptrcall): if env.use_ptrcall:
env.Append(CPPDEFINES=['PTRCALL_ENABLED']) env.Append(CPPDEFINES=["PTRCALL_ENABLED"])
if env['tools']: if env["tools"]:
env.Append(CPPDEFINES=['TOOLS_ENABLED']) env.Append(CPPDEFINES=["TOOLS_ENABLED"])
if env['disable_3d']: if env["disable_3d"]:
if env['tools']: if env["tools"]:
print("Build option 'disable_3d=yes' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).") print(
"Build option 'disable_3d=yes' cannot be used with 'tools=yes' (editor), "
"only with 'tools=no' (export template)."
)
sys.exit(255) sys.exit(255)
else: else:
env.Append(CPPDEFINES=['_3D_DISABLED']) env.Append(CPPDEFINES=["_3D_DISABLED"])
if env['disable_advanced_gui']: if env["disable_advanced_gui"]:
if env['tools']: if env["tools"]:
print("Build option 'disable_advanced_gui=yes' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).") print(
"Build option 'disable_advanced_gui=yes' cannot be used with 'tools=yes' (editor), "
"only with 'tools=no' (export template)."
)
sys.exit(255) sys.exit(255)
else: else:
env.Append(CPPDEFINES=['ADVANCED_GUI_DISABLED']) env.Append(CPPDEFINES=["ADVANCED_GUI_DISABLED"])
if env['minizip']: if env["minizip"]:
env.Append(CPPDEFINES=['MINIZIP_ENABLED']) env.Append(CPPDEFINES=["MINIZIP_ENABLED"])
editor_module_list = ['regex'] editor_module_list = ["regex"]
for x in editor_module_list: for x in editor_module_list:
if not env['module_' + x + '_enabled']: if not env["module_" + x + "_enabled"]:
if env['tools']: if env["tools"]:
print("Build option 'module_" + x + "_enabled=no' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).") print(
"Build option 'module_" + x + "_enabled=no' cannot be used with 'tools=yes' (editor), "
"only with 'tools=no' (export template)."
)
sys.exit(255) sys.exit(255)
if not env['verbose']: if not env["verbose"]:
methods.no_verbose(sys, env) methods.no_verbose(sys, env)
if (not env["platform"] == "server"): if not env["platform"] == "server":
env.Append(BUILDERS = { 'GLES2_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_gles2_headers), suffix='glsl.gen.h', src_suffix='.glsl')}) env.Append(
env.Append(BUILDERS = { 'RD_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_rd_headers), suffix='glsl.gen.h', src_suffix='.glsl')}) BUILDERS={
"GLES2_GLSL": env.Builder(
action=run_in_subprocess(gles_builders.build_gles2_headers), suffix="glsl.gen.h", src_suffix=".glsl"
)
}
)
env.Append(
BUILDERS={
"RD_GLSL": env.Builder(
action=run_in_subprocess(gles_builders.build_rd_headers), suffix="glsl.gen.h", src_suffix=".glsl"
)
}
)
scons_cache_path = os.environ.get("SCONS_CACHE") scons_cache_path = os.environ.get("SCONS_CACHE")
if scons_cache_path != None: if scons_cache_path != None:
CacheDir(scons_cache_path) CacheDir(scons_cache_path)
print("Scons cache enabled... (path: '" + scons_cache_path + "')") print("Scons cache enabled... (path: '" + scons_cache_path + "')")
Export('env') Export("env")
# build subdirs, the build order is dependent on link order. # build subdirs, the build order is dependent on link order.
@ -577,16 +616,16 @@ if selected_platform in platform_list:
SConscript("platform/" + selected_platform + "/SCsub") # build selected platform SConscript("platform/" + selected_platform + "/SCsub") # build selected platform
# Microsoft Visual Studio Project Generation # Microsoft Visual Studio Project Generation
if env['vsproj']: if env["vsproj"]:
env['CPPPATH'] = [Dir(path) for path in env['CPPPATH']] env["CPPPATH"] = [Dir(path) for path in env["CPPPATH"]]
methods.generate_vs_project(env, GetOption("num_jobs")) methods.generate_vs_project(env, GetOption("num_jobs"))
methods.generate_cpp_hint_file("cpp.hint") methods.generate_cpp_hint_file("cpp.hint")
# Check for the existence of headers # Check for the existence of headers
conf = Configure(env) conf = Configure(env)
if ("check_c_headers" in env): if "check_c_headers" in env:
for header in env["check_c_headers"]: for header in env["check_c_headers"]:
if (conf.CheckCHeader(header[0])): if conf.CheckCHeader(header[0]):
env.AppendUnique(CPPDEFINES=[header[1]]) env.AppendUnique(CPPDEFINES=[header[1]])
elif selected_platform != "": elif selected_platform != "":
@ -608,26 +647,30 @@ elif selected_platform != "":
sys.exit(255) sys.exit(255)
# The following only makes sense when the env is defined, and assumes it is # The following only makes sense when the env is defined, and assumes it is
if 'env' in locals(): if "env" in locals():
screen = sys.stdout screen = sys.stdout
# Progress reporting is not available in non-TTY environments since it # Progress reporting is not available in non-TTY environments since it
# messes with the output (for example, when writing to a file) # messes with the output (for example, when writing to a file)
show_progress = (env['progress'] and sys.stdout.isatty()) show_progress = env["progress"] and sys.stdout.isatty()
node_count = 0 node_count = 0
node_count_max = 0 node_count_max = 0
node_count_interval = 1 node_count_interval = 1
node_count_fname = str(env.Dir('#')) + '/.scons_node_count' node_count_fname = str(env.Dir("#")) + "/.scons_node_count"
import time, math import time, math
class cache_progress: class cache_progress:
# The default is 1 GB cache and 12 hours half life # The default is 1 GB cache and 12 hours half life
def __init__(self, path = None, limit = 1073741824, half_life = 43200): def __init__(self, path=None, limit=1073741824, half_life=43200):
self.path = path self.path = path
self.limit = limit self.limit = limit
self.exponent_scale = math.log(2) / half_life self.exponent_scale = math.log(2) / half_life
if env['verbose'] and path != None: if env["verbose"] and path != None:
screen.write('Current cache limit is ' + self.convert_size(limit) + ' (used: ' + self.convert_size(self.get_size(path)) + ')\n') screen.write(
"Current cache limit is {} (used: {})\n".format(
self.convert_size(limit), self.convert_size(self.get_size(path))
)
)
self.delete(self.file_list()) self.delete(self.file_list())
def __call__(self, node, *args, **kw): def __call__(self, node, *args, **kw):
@ -635,22 +678,22 @@ if 'env' in locals():
if show_progress: if show_progress:
# Print the progress percentage # Print the progress percentage
node_count += node_count_interval node_count += node_count_interval
if (node_count_max > 0 and node_count <= node_count_max): if node_count_max > 0 and node_count <= node_count_max:
screen.write('\r[%3d%%] ' % (node_count * 100 / node_count_max)) screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max))
screen.flush() screen.flush()
elif (node_count_max > 0 and node_count > node_count_max): elif node_count_max > 0 and node_count > node_count_max:
screen.write('\r[100%] ') screen.write("\r[100%] ")
screen.flush() screen.flush()
else: else:
screen.write('\r[Initial build] ') screen.write("\r[Initial build] ")
screen.flush() screen.flush()
def delete(self, files): def delete(self, files):
if len(files) == 0: if len(files) == 0:
return return
if env['verbose']: if env["verbose"]:
# Utter something # Utter something
screen.write('\rPurging %d %s from cache...\n' % (len(files), len(files) > 1 and 'files' or 'file')) screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file"))
[os.remove(f) for f in files] [os.remove(f) for f in files]
def file_list(self): def file_list(self):
@ -659,7 +702,7 @@ if 'env' in locals():
return [] return []
# Gather a list of (filename, (size, atime)) within the # Gather a list of (filename, (size, atime)) within the
# cache directory # cache directory
file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, '*', '*'))] file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))]
if file_stat == []: if file_stat == []:
# Nothing to do # Nothing to do
return [] return []
@ -674,7 +717,7 @@ if 'env' in locals():
# Search for the first entry where the storage limit is # Search for the first entry where the storage limit is
# reached # reached
sum, mark = 0, None sum, mark = 0, None
for i,x in enumerate(file_stat): for i, x in enumerate(file_stat):
sum += x[1] sum += x[1]
if sum > self.limit: if sum > self.limit:
mark = i mark = i
@ -693,7 +736,7 @@ if 'env' in locals():
s = round(size_bytes / p, 2) s = round(size_bytes / p, 2)
return "%s %s" % (int(s) if i == 0 else s, size_name[i]) return "%s %s" % (int(s) if i == 0 else s, size_name[i])
def get_size(self, start_path = '.'): def get_size(self, start_path="."):
total_size = 0 total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path): for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames: for f in filenames:
@ -703,8 +746,8 @@ if 'env' in locals():
def progress_finish(target, source, env): def progress_finish(target, source, env):
global node_count, progressor global node_count, progressor
with open(node_count_fname, 'w') as f: with open(node_count_fname, "w") as f:
f.write('%d\n' % node_count) f.write("%d\n" % node_count)
progressor.delete(progressor.file_list()) progressor.delete(progressor.file_list())
try: try:
@ -718,7 +761,7 @@ if 'env' in locals():
# cache directory to a size not larger than cache_limit. # cache directory to a size not larger than cache_limit.
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024 cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024
progressor = cache_progress(cache_directory, cache_limit) progressor = cache_progress(cache_directory, cache_limit)
Progress(progressor, interval = node_count_interval) Progress(progressor, interval=node_count_interval)
progress_finish_command = Command('progress_finish', [], progress_finish) progress_finish_command = Command("progress_finish", [], progress_finish)
AlwaysBuild(progress_finish_command) AlwaysBuild(progress_finish_command)

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
import core_builders import core_builders
import make_binders import make_binders
@ -11,31 +11,32 @@ env.core_sources = []
# Generate AES256 script encryption key # Generate AES256 script encryption key
import os import os
txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0" txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
if ("SCRIPT_AES256_ENCRYPTION_KEY" in os.environ): if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ:
e = os.environ["SCRIPT_AES256_ENCRYPTION_KEY"] e = os.environ["SCRIPT_AES256_ENCRYPTION_KEY"]
txt = "" txt = ""
ec_valid = True ec_valid = True
if (len(e) != 64): if len(e) != 64:
ec_valid = False ec_valid = False
else: else:
for i in range(len(e) >> 1): for i in range(len(e) >> 1):
if (i > 0): if i > 0:
txt += "," txt += ","
txts = "0x" + e[i * 2:i * 2 + 2] txts = "0x" + e[i * 2 : i * 2 + 2]
try: try:
int(txts, 16) int(txts, 16)
except: except:
ec_valid = False ec_valid = False
txt += txts txt += txts
if (not ec_valid): if not ec_valid:
txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0" txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
print("Invalid AES256 encryption key, not 64 bits hex: " + e) print("Invalid AES256 encryption key, not 64 bits hex: " + e)
# NOTE: It is safe to generate this file here, since this is still executed serially # NOTE: It is safe to generate this file here, since this is still executed serially
with open("script_encryption_key.gen.cpp", "w") as f: with open("script_encryption_key.gen.cpp", "w") as f:
f.write("#include \"core/project_settings.h\"\nuint8_t script_encryption_key[32]={" + txt + "};\n") f.write('#include "core/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n")
# Add required thirdparty code. # Add required thirdparty code.
@ -49,7 +50,6 @@ thirdparty_misc_sources = [
# C sources # C sources
"fastlz.c", "fastlz.c",
"smaz.c", "smaz.c",
# C++ sources # C++ sources
"hq2x.cpp", "hq2x.cpp",
"pcg.cpp", "pcg.cpp",
@ -60,30 +60,30 @@ thirdparty_misc_sources = [thirdparty_misc_dir + file for file in thirdparty_mis
env_thirdparty.add_source_files(env.core_sources, thirdparty_misc_sources) env_thirdparty.add_source_files(env.core_sources, thirdparty_misc_sources)
# Zlib library, can be unbundled # Zlib library, can be unbundled
if env['builtin_zlib']: if env["builtin_zlib"]:
thirdparty_zlib_dir = "#thirdparty/zlib/" thirdparty_zlib_dir = "#thirdparty/zlib/"
thirdparty_zlib_sources = [ thirdparty_zlib_sources = [
"adler32.c", "adler32.c",
"compress.c", "compress.c",
"crc32.c", "crc32.c",
"deflate.c", "deflate.c",
"infback.c", "infback.c",
"inffast.c", "inffast.c",
"inflate.c", "inflate.c",
"inftrees.c", "inftrees.c",
"trees.c", "trees.c",
"uncompr.c", "uncompr.c",
"zutil.c", "zutil.c",
] ]
thirdparty_zlib_sources = [thirdparty_zlib_dir + file for file in thirdparty_zlib_sources] thirdparty_zlib_sources = [thirdparty_zlib_dir + file for file in thirdparty_zlib_sources]
env_thirdparty.Prepend(CPPPATH=[thirdparty_zlib_dir]) env_thirdparty.Prepend(CPPPATH=[thirdparty_zlib_dir])
# Needs to be available in main env too # Needs to be available in main env too
env.Prepend(CPPPATH=[thirdparty_zlib_dir]) env.Prepend(CPPPATH=[thirdparty_zlib_dir])
if (env['target'] == 'debug'): if env["target"] == "debug":
env_thirdparty.Append(CPPDEFINES=['ZLIB_DEBUG']) env_thirdparty.Append(CPPDEFINES=["ZLIB_DEBUG"])
env_thirdparty.add_source_files(env.core_sources, thirdparty_zlib_sources) env_thirdparty.add_source_files(env.core_sources, thirdparty_zlib_sources)
# Minizip library, could be unbundled in theory # Minizip library, could be unbundled in theory
# However, our version has some custom modifications, so it won't compile with the system one # However, our version has some custom modifications, so it won't compile with the system one
@ -99,7 +99,7 @@ env_thirdparty.add_source_files(env.core_sources, thirdparty_minizip_sources)
# Zstd library, can be unbundled in theory # Zstd library, can be unbundled in theory
# though we currently use some private symbols # though we currently use some private symbols
# https://github.com/godotengine/godot/issues/17374 # https://github.com/godotengine/godot/issues/17374
if env['builtin_zstd']: if env["builtin_zstd"]:
thirdparty_zstd_dir = "#thirdparty/zstd/" thirdparty_zstd_dir = "#thirdparty/zstd/"
thirdparty_zstd_sources = [ thirdparty_zstd_sources = [
"common/debug.c", "common/debug.c",
@ -142,32 +142,45 @@ if env['builtin_zstd']:
env.add_source_files(env.core_sources, "*.cpp") env.add_source_files(env.core_sources, "*.cpp")
# Certificates # Certificates
env.Depends("#core/io/certs_compressed.gen.h", ["#thirdparty/certs/ca-certificates.crt", env.Value(env['builtin_certs']), env.Value(env['system_certs_path'])]) env.Depends(
env.CommandNoCache("#core/io/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt", run_in_subprocess(core_builders.make_certs_header)) "#core/io/certs_compressed.gen.h",
["#thirdparty/certs/ca-certificates.crt", env.Value(env["builtin_certs"]), env.Value(env["system_certs_path"])],
)
env.CommandNoCache(
"#core/io/certs_compressed.gen.h",
"#thirdparty/certs/ca-certificates.crt",
run_in_subprocess(core_builders.make_certs_header),
)
# Make binders # Make binders
env.CommandNoCache(['method_bind.gen.inc', 'method_bind_ext.gen.inc', 'method_bind_free_func.gen.inc'], 'make_binders.py', run_in_subprocess(make_binders.run)) env.CommandNoCache(
["method_bind.gen.inc", "method_bind_ext.gen.inc", "method_bind_free_func.gen.inc"],
"make_binders.py",
run_in_subprocess(make_binders.run),
)
# Authors # Authors
env.Depends('#core/authors.gen.h', "../AUTHORS.md") env.Depends("#core/authors.gen.h", "../AUTHORS.md")
env.CommandNoCache('#core/authors.gen.h', "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header)) env.CommandNoCache("#core/authors.gen.h", "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header))
# Donors # Donors
env.Depends('#core/donors.gen.h', "../DONORS.md") env.Depends("#core/donors.gen.h", "../DONORS.md")
env.CommandNoCache('#core/donors.gen.h', "../DONORS.md", run_in_subprocess(core_builders.make_donors_header)) env.CommandNoCache("#core/donors.gen.h", "../DONORS.md", run_in_subprocess(core_builders.make_donors_header))
# License # License
env.Depends('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"]) env.Depends("#core/license.gen.h", ["../COPYRIGHT.txt", "../LICENSE.txt"])
env.CommandNoCache('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header)) env.CommandNoCache(
"#core/license.gen.h", ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header)
)
# Chain load SCsubs # Chain load SCsubs
SConscript('os/SCsub') SConscript("os/SCsub")
SConscript('math/SCsub') SConscript("math/SCsub")
SConscript('crypto/SCsub') SConscript("crypto/SCsub")
SConscript('io/SCsub') SConscript("io/SCsub")
SConscript('debugger/SCsub') SConscript("debugger/SCsub")
SConscript('input/SCsub') SConscript("input/SCsub")
SConscript('bind/SCsub') SConscript("bind/SCsub")
# Build it all as a library # Build it all as a library

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.core_sources, "*.cpp") env.add_source_files(env.core_sources, "*.cpp")

View file

@ -11,15 +11,15 @@ def escape_string(s):
rev_result = [] rev_result = []
while c >= 256: while c >= 256:
c, low = (c // 256, c % 256) c, low = (c // 256, c % 256)
rev_result.append('\\%03o' % low) rev_result.append("\\%03o" % low)
rev_result.append('\\%03o' % c) rev_result.append("\\%03o" % c)
return ''.join(reversed(rev_result)) return "".join(reversed(rev_result))
result = '' result = ""
if isinstance(s, str): if isinstance(s, str):
s = s.encode('utf-8') s = s.encode("utf-8")
for c in s: for c in s:
if not(32 <= c < 127) or c in (ord('\\'), ord('"')): if not (32 <= c < 127) or c in (ord("\\"), ord('"')):
result += charcode_to_c_escapes(c) result += charcode_to_c_escapes(c)
else: else:
result += chr(c) result += chr(c)
@ -34,6 +34,7 @@ def make_certs_header(target, source, env):
buf = f.read() buf = f.read()
decomp_size = len(buf) decomp_size = len(buf)
import zlib import zlib
buf = zlib.compress(buf) buf = zlib.compress(buf)
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
@ -41,9 +42,9 @@ def make_certs_header(target, source, env):
g.write("#define CERTS_COMPRESSED_GEN_H\n") g.write("#define CERTS_COMPRESSED_GEN_H\n")
# System certs path. Editor will use them if defined. (for package maintainers) # System certs path. Editor will use them if defined. (for package maintainers)
path = env['system_certs_path'] path = env["system_certs_path"]
g.write("#define _SYSTEM_CERTS_PATH \"%s\"\n" % str(path)) g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path))
if env['builtin_certs']: if env["builtin_certs"]:
# Defined here and not in env so changing it does not trigger a full rebuild. # Defined here and not in env so changing it does not trigger a full rebuild.
g.write("#define BUILTIN_CERTS_ENABLED\n") g.write("#define BUILTIN_CERTS_ENABLED\n")
g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n") g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
@ -59,8 +60,18 @@ def make_certs_header(target, source, env):
def make_authors_header(target, source, env): def make_authors_header(target, source, env):
sections = ["Project Founders", "Lead Developer", "Project Manager", "Developers"] sections = [
sections_id = ["AUTHORS_FOUNDERS", "AUTHORS_LEAD_DEVELOPERS", "AUTHORS_PROJECT_MANAGERS", "AUTHORS_DEVELOPERS"] "Project Founders",
"Lead Developer",
"Project Manager",
"Developers",
]
sections_id = [
"AUTHORS_FOUNDERS",
"AUTHORS_LEAD_DEVELOPERS",
"AUTHORS_PROJECT_MANAGERS",
"AUTHORS_DEVELOPERS",
]
src = source[0] src = source[0]
dst = target[0] dst = target[0]
@ -80,7 +91,7 @@ def make_authors_header(target, source, env):
for line in f: for line in f:
if reading: if reading:
if line.startswith(" "): if line.startswith(" "):
g.write("\t\"" + escape_string(line.strip()) + "\",\n") g.write('\t"' + escape_string(line.strip()) + '",\n')
continue continue
if line.startswith("## "): if line.startswith("## "):
if reading: if reading:
@ -103,10 +114,22 @@ def make_authors_header(target, source, env):
def make_donors_header(target, source, env): def make_donors_header(target, source, env):
sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors", sections = [
"Gold donors", "Silver donors", "Bronze donors"] "Platinum sponsors",
sections_id = ["DONORS_SPONSOR_PLAT", "DONORS_SPONSOR_GOLD", "DONORS_SPONSOR_MINI", "Gold sponsors",
"DONORS_GOLD", "DONORS_SILVER", "DONORS_BRONZE"] "Mini sponsors",
"Gold donors",
"Silver donors",
"Bronze donors",
]
sections_id = [
"DONORS_SPONSOR_PLAT",
"DONORS_SPONSOR_GOLD",
"DONORS_SPONSOR_MINI",
"DONORS_GOLD",
"DONORS_SILVER",
"DONORS_BRONZE",
]
src = source[0] src = source[0]
dst = target[0] dst = target[0]
@ -126,7 +149,7 @@ def make_donors_header(target, source, env):
for line in f: for line in f:
if reading >= 0: if reading >= 0:
if line.startswith(" "): if line.startswith(" "):
g.write("\t\"" + escape_string(line.strip()) + "\",\n") g.write('\t"' + escape_string(line.strip()) + '",\n')
continue continue
if line.startswith("## "): if line.startswith("## "):
if reading: if reading:
@ -169,8 +192,8 @@ def make_license_header(target, source, env):
return line return line
def next_tag(self): def next_tag(self):
if not ':' in self.current: if not ":" in self.current:
return ('', []) return ("", [])
tag, line = self.current.split(":", 1) tag, line = self.current.split(":", 1)
lines = [line.strip()] lines = [line.strip()]
while self.next_line() and self.current.startswith(" "): while self.next_line() and self.current.startswith(" "):
@ -178,6 +201,7 @@ def make_license_header(target, source, env):
return (tag, lines) return (tag, lines)
from collections import OrderedDict from collections import OrderedDict
projects = OrderedDict() projects = OrderedDict()
license_list = [] license_list = []
@ -218,26 +242,30 @@ def make_license_header(target, source, env):
with open(src_license, "r", encoding="utf-8") as license_file: with open(src_license, "r", encoding="utf-8") as license_file:
for line in license_file: for line in license_file:
escaped_string = escape_string(line.strip()) escaped_string = escape_string(line.strip())
f.write("\n\t\t\"" + escaped_string + "\\n\"") f.write('\n\t\t"' + escaped_string + '\\n"')
f.write(";\n\n") f.write(";\n\n")
f.write("struct ComponentCopyrightPart {\n" f.write(
"\tconst char *license;\n" "struct ComponentCopyrightPart {\n"
"\tconst char *const *files;\n" "\tconst char *license;\n"
"\tconst char *const *copyright_statements;\n" "\tconst char *const *files;\n"
"\tint file_count;\n" "\tconst char *const *copyright_statements;\n"
"\tint copyright_count;\n" "\tint file_count;\n"
"};\n\n") "\tint copyright_count;\n"
"};\n\n"
)
f.write("struct ComponentCopyright {\n" f.write(
"\tconst char *name;\n" "struct ComponentCopyright {\n"
"\tconst ComponentCopyrightPart *parts;\n" "\tconst char *name;\n"
"\tint part_count;\n" "\tconst ComponentCopyrightPart *parts;\n"
"};\n\n") "\tint part_count;\n"
"};\n\n"
)
f.write("const char *const COPYRIGHT_INFO_DATA[] = {\n") f.write("const char *const COPYRIGHT_INFO_DATA[] = {\n")
for line in data_list: for line in data_list:
f.write("\t\"" + escape_string(line) + "\",\n") f.write('\t"' + escape_string(line) + '",\n')
f.write("};\n\n") f.write("};\n\n")
f.write("const ComponentCopyrightPart COPYRIGHT_PROJECT_PARTS[] = {\n") f.write("const ComponentCopyrightPart COPYRIGHT_PROJECT_PARTS[] = {\n")
@ -246,11 +274,21 @@ def make_license_header(target, source, env):
for project_name, project in iter(projects.items()): for project_name, project in iter(projects.items()):
part_indexes[project_name] = part_index part_indexes[project_name] = part_index
for part in project: for part in project:
f.write("\t{ \"" + escape_string(part["License"][0]) + "\", " f.write(
+ "&COPYRIGHT_INFO_DATA[" + str(part["file_index"]) + "], " '\t{ "'
+ "&COPYRIGHT_INFO_DATA[" + str(part["copyright_index"]) + "], " + escape_string(part["License"][0])
+ str(len(part["Files"])) + ", " + '", '
+ str(len(part["Copyright"])) + " },\n") + "&COPYRIGHT_INFO_DATA["
+ str(part["file_index"])
+ "], "
+ "&COPYRIGHT_INFO_DATA["
+ str(part["copyright_index"])
+ "], "
+ str(len(part["Files"]))
+ ", "
+ str(len(part["Copyright"]))
+ " },\n"
)
part_index += 1 part_index += 1
f.write("};\n\n") f.write("};\n\n")
@ -258,30 +296,37 @@ def make_license_header(target, source, env):
f.write("const ComponentCopyright COPYRIGHT_INFO[] = {\n") f.write("const ComponentCopyright COPYRIGHT_INFO[] = {\n")
for project_name, project in iter(projects.items()): for project_name, project in iter(projects.items()):
f.write("\t{ \"" + escape_string(project_name) + "\", " f.write(
+ "&COPYRIGHT_PROJECT_PARTS[" + str(part_indexes[project_name]) + "], " '\t{ "'
+ str(len(project)) + " },\n") + escape_string(project_name)
+ '", '
+ "&COPYRIGHT_PROJECT_PARTS["
+ str(part_indexes[project_name])
+ "], "
+ str(len(project))
+ " },\n"
)
f.write("};\n\n") f.write("};\n\n")
f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n") f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n")
f.write("const char *const LICENSE_NAMES[] = {\n") f.write("const char *const LICENSE_NAMES[] = {\n")
for l in license_list: for l in license_list:
f.write("\t\"" + escape_string(l[0]) + "\",\n") f.write('\t"' + escape_string(l[0]) + '",\n')
f.write("};\n\n") f.write("};\n\n")
f.write("const char *const LICENSE_BODIES[] = {\n\n") f.write("const char *const LICENSE_BODIES[] = {\n\n")
for l in license_list: for l in license_list:
for line in l[1:]: for line in l[1:]:
if line == ".": if line == ".":
f.write("\t\"\\n\"\n") f.write('\t"\\n"\n')
else: else:
f.write("\t\"" + escape_string(line) + "\\n\"\n") f.write('\t"' + escape_string(line) + '\\n"\n')
f.write("\t\"\",\n\n") f.write('\t"",\n\n')
f.write("};\n\n") f.write("};\n\n")
f.write("#endif // LICENSE_GEN_H\n") f.write("#endif // LICENSE_GEN_H\n")
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env_crypto = env.Clone() env_crypto = env.Clone()
@ -22,7 +22,9 @@ if not has_module:
env_thirdparty = env_crypto.Clone() env_thirdparty = env_crypto.Clone()
env_thirdparty.disable_warnings() env_thirdparty.disable_warnings()
# Custom config file # Custom config file
env_thirdparty.Append(CPPDEFINES=[('MBEDTLS_CONFIG_FILE', '\\"thirdparty/mbedtls/include/godot_core_mbedtls_config.h\\"')]) env_thirdparty.Append(
CPPDEFINES=[("MBEDTLS_CONFIG_FILE", '\\"thirdparty/mbedtls/include/godot_core_mbedtls_config.h\\"')]
)
thirdparty_mbedtls_dir = "#thirdparty/mbedtls/library/" thirdparty_mbedtls_dir = "#thirdparty/mbedtls/library/"
thirdparty_mbedtls_sources = [ thirdparty_mbedtls_sources = [
"aes.c", "aes.c",
@ -30,7 +32,7 @@ if not has_module:
"md5.c", "md5.c",
"sha1.c", "sha1.c",
"sha256.c", "sha256.c",
"godot_core_mbedtls_platform.c" "godot_core_mbedtls_platform.c",
] ]
thirdparty_mbedtls_sources = [thirdparty_mbedtls_dir + file for file in thirdparty_mbedtls_sources] thirdparty_mbedtls_sources = [thirdparty_mbedtls_dir + file for file in thirdparty_mbedtls_sources]
env_thirdparty.add_source_files(env.core_sources, thirdparty_mbedtls_sources) env_thirdparty.add_source_files(env.core_sources, thirdparty_mbedtls_sources)

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.core_sources, "*.cpp") env.add_source_files(env.core_sources, "*.cpp")

View file

@ -1,20 +1,28 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
from platform_methods import run_in_subprocess from platform_methods import run_in_subprocess
import input_builders import input_builders
# Order matters here. Higher index controller database files write on top of lower index database files. # Order matters here. Higher index controller database files write on top of lower index database files.
controller_databases = ["#core/input/gamecontrollerdb_204.txt", "#core/input/gamecontrollerdb_205.txt", "#core/input/gamecontrollerdb.txt", "#core/input/godotcontrollerdb.txt"] controller_databases = [
"#core/input/gamecontrollerdb_204.txt",
"#core/input/gamecontrollerdb_205.txt",
"#core/input/gamecontrollerdb.txt",
"#core/input/godotcontrollerdb.txt",
]
env.Depends("#core/input/default_controller_mappings.gen.cpp", controller_databases) env.Depends("#core/input/default_controller_mappings.gen.cpp", controller_databases)
env.CommandNoCache("#core/input/default_controller_mappings.gen.cpp", controller_databases, run_in_subprocess(input_builders.make_default_controller_mappings)) env.CommandNoCache(
"#core/input/default_controller_mappings.gen.cpp",
controller_databases,
run_in_subprocess(input_builders.make_default_controller_mappings),
)
env.add_source_files(env.core_sources, "*.cpp") env.add_source_files(env.core_sources, "*.cpp")
# Don't warn about duplicate entry here, we need it registered manually for first build, # Don't warn about duplicate entry here, we need it registered manually for first build,
# even if later builds will pick it up twice due to above *.cpp globbing. # even if later builds will pick it up twice due to above *.cpp globbing.
env.add_source_files(env.core_sources, "#core/input/default_controller_mappings.gen.cpp", warn_duplicates=False) env.add_source_files(env.core_sources, "#core/input/default_controller_mappings.gen.cpp", warn_duplicates=False)

View file

@ -12,8 +12,8 @@ def make_default_controller_mappings(target, source, env):
g = open(dst, "w") g = open(dst, "w")
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
g.write("#include \"core/typedefs.h\"\n") g.write('#include "core/typedefs.h"\n')
g.write("#include \"core/input/default_controller_mappings.h\"\n") g.write('#include "core/input/default_controller_mappings.h"\n')
# ensure mappings have a consistent order # ensure mappings have a consistent order
platform_mappings = OrderedDict() platform_mappings = OrderedDict()
@ -37,11 +37,19 @@ def make_default_controller_mappings(target, source, env):
line_parts = line.split(",") line_parts = line.split(",")
guid = line_parts[0] guid = line_parts[0]
if guid in platform_mappings[current_platform]: if guid in platform_mappings[current_platform]:
g.write("// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(src_path, current_platform, platform_mappings[current_platform][guid])) g.write(
"// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(
src_path, current_platform, platform_mappings[current_platform][guid]
)
)
valid_mapping = True valid_mapping = True
for input_map in line_parts[2:]: for input_map in line_parts[2:]:
if "+" in input_map or "-" in input_map or "~" in input_map: if "+" in input_map or "-" in input_map or "~" in input_map:
g.write("// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(src_path, current_platform, line)) g.write(
"// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(
src_path, current_platform, line
)
)
valid_mapping = False valid_mapping = False
break break
if valid_mapping: if valid_mapping:
@ -62,12 +70,12 @@ def make_default_controller_mappings(target, source, env):
variable = platform_variables[platform] variable = platform_variables[platform]
g.write("{}\n".format(variable)) g.write("{}\n".format(variable))
for mapping in mappings.values(): for mapping in mappings.values():
g.write("\t\"{}\",\n".format(mapping)) g.write('\t"{}",\n'.format(mapping))
g.write("#endif\n") g.write("#endif\n")
g.write("\tNULL\n};\n") g.write("\tNULL\n};\n")
g.close() g.close()
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.core_sources, "*.cpp") env.add_source_files(env.core_sources, "*.cpp")

View file

@ -280,58 +280,57 @@ MethodBind* create_method_bind($ifret R$ $ifnoret void$ (*p_method)($ifconst con
""" """
def make_version(template, nargs, argmax, const, ret): def make_version(template, nargs, argmax, const, ret):
intext = template intext = template
from_pos = 0 from_pos = 0
outtext = "" outtext = ""
while(True): while True:
to_pos = intext.find("$", from_pos) to_pos = intext.find("$", from_pos)
if (to_pos == -1): if to_pos == -1:
outtext += intext[from_pos:] outtext += intext[from_pos:]
break break
else: else:
outtext += intext[from_pos:to_pos] outtext += intext[from_pos:to_pos]
end = intext.find("$", to_pos + 1) end = intext.find("$", to_pos + 1)
if (end == -1): if end == -1:
break # ignore break # ignore
macro = intext[to_pos + 1:end] macro = intext[to_pos + 1 : end]
cmd = "" cmd = ""
data = "" data = ""
if (macro.find(" ") != -1): if macro.find(" ") != -1:
cmd = macro[0:macro.find(" ")] cmd = macro[0 : macro.find(" ")]
data = macro[macro.find(" ") + 1:] data = macro[macro.find(" ") + 1 :]
else: else:
cmd = macro cmd = macro
if (cmd == "argc"): if cmd == "argc":
outtext += str(nargs) outtext += str(nargs)
if (cmd == "ifret" and ret): if cmd == "ifret" and ret:
outtext += data outtext += data
if (cmd == "ifargs" and nargs): if cmd == "ifargs" and nargs:
outtext += data outtext += data
if (cmd == "ifretargs" and nargs and ret): if cmd == "ifretargs" and nargs and ret:
outtext += data outtext += data
if (cmd == "ifconst" and const): if cmd == "ifconst" and const:
outtext += data outtext += data
elif (cmd == "ifnoconst" and not const): elif cmd == "ifnoconst" and not const:
outtext += data outtext += data
elif (cmd == "ifnoret" and not ret): elif cmd == "ifnoret" and not ret:
outtext += data outtext += data
elif (cmd == "iftempl" and (nargs > 0 or ret)): elif cmd == "iftempl" and (nargs > 0 or ret):
outtext += data outtext += data
elif (cmd == "arg,"): elif cmd == "arg,":
for i in range(1, nargs + 1): for i in range(1, nargs + 1):
if (i > 1): if i > 1:
outtext += ", " outtext += ", "
outtext += data.replace("@", str(i)) outtext += data.replace("@", str(i))
elif (cmd == "arg"): elif cmd == "arg":
for i in range(1, nargs + 1): for i in range(1, nargs + 1):
outtext += data.replace("@", str(i)) outtext += data.replace("@", str(i))
elif (cmd == "noarg"): elif cmd == "noarg":
for i in range(nargs + 1, argmax + 1): for i in range(nargs + 1, argmax + 1):
outtext += data.replace("@", str(i)) outtext += data.replace("@", str(i))
@ -348,7 +347,9 @@ def run(target, source, env):
text_ext = "" text_ext = ""
text_free_func = "#ifndef METHOD_BIND_FREE_FUNC_H\n#define METHOD_BIND_FREE_FUNC_H\n" text_free_func = "#ifndef METHOD_BIND_FREE_FUNC_H\n#define METHOD_BIND_FREE_FUNC_H\n"
text_free_func += "\n//including this header file allows method binding to use free functions\n" text_free_func += "\n//including this header file allows method binding to use free functions\n"
text_free_func += "//note that the free function must have a pointer to an instance of the class as its first parameter\n" text_free_func += (
"//note that the free function must have a pointer to an instance of the class as its first parameter\n"
)
for i in range(0, versions + 1): for i in range(0, versions + 1):
@ -361,7 +362,7 @@ def run(target, source, env):
t += make_version(template_typed, i, versions, True, False) t += make_version(template_typed, i, versions, True, False)
t += make_version(template, i, versions, True, True) t += make_version(template, i, versions, True, True)
t += make_version(template_typed, i, versions, True, True) t += make_version(template_typed, i, versions, True, True)
if (i >= versions_ext): if i >= versions_ext:
text_ext += t text_ext += t
else: else:
text += t text += t
@ -383,6 +384,7 @@ def run(target, source, env):
f.write(text_free_func) f.write(text_free_func)
if __name__ == '__main__': if __name__ == "__main__":
from platform_methods import subprocess_main from platform_methods import subprocess_main
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env_math = env.Clone() env_math = env.Clone()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.core_sources, "*.cpp") env.add_source_files(env.core_sources, "*.cpp")

View file

@ -21,7 +21,7 @@ def write_string(_f, text, newline=True):
for t in range(tab): for t in range(tab):
_f.write("\t") _f.write("\t")
_f.write(text) _f.write(text)
if (newline): if newline:
_f.write("\n") _f.write("\n")
@ -30,7 +30,7 @@ def escape(ret):
ret = ret.replace("<", "&gt;") ret = ret.replace("<", "&gt;")
ret = ret.replace(">", "&lt;") ret = ret.replace(">", "&lt;")
ret = ret.replace("'", "&apos;") ret = ret.replace("'", "&apos;")
ret = ret.replace("\"", "&quot;") ret = ret.replace('"', "&quot;")
return ret return ret
@ -43,25 +43,26 @@ def dec_tab():
global tab global tab
tab -= 1 tab -= 1
write_string(f, '<?xml version="1.0" encoding="UTF-8" ?>') write_string(f, '<?xml version="1.0" encoding="UTF-8" ?>')
write_string(f, '<doc version="' + new_doc.attrib["version"] + '">') write_string(f, '<doc version="' + new_doc.attrib["version"] + '">')
def get_tag(node, name): def get_tag(node, name):
tag = "" tag = ""
if (name in node.attrib): if name in node.attrib:
tag = ' ' + name + '="' + escape(node.attrib[name]) + '" ' tag = " " + name + '="' + escape(node.attrib[name]) + '" '
return tag return tag
def find_method_descr(old_class, name): def find_method_descr(old_class, name):
methods = old_class.find("methods") methods = old_class.find("methods")
if(methods != None and len(list(methods)) > 0): if methods != None and len(list(methods)) > 0:
for m in list(methods): for m in list(methods):
if (m.attrib["name"] == name): if m.attrib["name"] == name:
description = m.find("description") description = m.find("description")
if (description != None and description.text.strip() != ""): if description != None and description.text.strip() != "":
return description.text return description.text
return None return None
@ -70,11 +71,11 @@ def find_method_descr(old_class, name):
def find_signal_descr(old_class, name): def find_signal_descr(old_class, name):
signals = old_class.find("signals") signals = old_class.find("signals")
if(signals != None and len(list(signals)) > 0): if signals != None and len(list(signals)) > 0:
for m in list(signals): for m in list(signals):
if (m.attrib["name"] == name): if m.attrib["name"] == name:
description = m.find("description") description = m.find("description")
if (description != None and description.text.strip() != ""): if description != None and description.text.strip() != "":
return description.text return description.text
return None return None
@ -82,13 +83,13 @@ def find_signal_descr(old_class, name):
def find_constant_descr(old_class, name): def find_constant_descr(old_class, name):
if (old_class is None): if old_class is None:
return None return None
constants = old_class.find("constants") constants = old_class.find("constants")
if(constants != None and len(list(constants)) > 0): if constants != None and len(list(constants)) > 0:
for m in list(constants): for m in list(constants):
if (m.attrib["name"] == name): if m.attrib["name"] == name:
if (m.text.strip() != ""): if m.text.strip() != "":
return m.text return m.text
return None return None
@ -96,35 +97,35 @@ def find_constant_descr(old_class, name):
def write_class(c): def write_class(c):
class_name = c.attrib["name"] class_name = c.attrib["name"]
print("Parsing Class: " + class_name) print("Parsing Class: " + class_name)
if (class_name in old_classes): if class_name in old_classes:
old_class = old_classes[class_name] old_class = old_classes[class_name]
else: else:
old_class = None old_class = None
category = get_tag(c, "category") category = get_tag(c, "category")
inherits = get_tag(c, "inherits") inherits = get_tag(c, "inherits")
write_string(f, '<class name="' + class_name + '" ' + category + inherits + '>') write_string(f, '<class name="' + class_name + '" ' + category + inherits + ">")
inc_tab() inc_tab()
write_string(f, "<brief_description>") write_string(f, "<brief_description>")
if (old_class != None): if old_class != None:
old_brief_descr = old_class.find("brief_description") old_brief_descr = old_class.find("brief_description")
if (old_brief_descr != None): if old_brief_descr != None:
write_string(f, escape(old_brief_descr.text.strip())) write_string(f, escape(old_brief_descr.text.strip()))
write_string(f, "</brief_description>") write_string(f, "</brief_description>")
write_string(f, "<description>") write_string(f, "<description>")
if (old_class != None): if old_class != None:
old_descr = old_class.find("description") old_descr = old_class.find("description")
if (old_descr != None): if old_descr != None:
write_string(f, escape(old_descr.text.strip())) write_string(f, escape(old_descr.text.strip()))
write_string(f, "</description>") write_string(f, "</description>")
methods = c.find("methods") methods = c.find("methods")
if(methods != None and len(list(methods)) > 0): if methods != None and len(list(methods)) > 0:
write_string(f, "<methods>") write_string(f, "<methods>")
inc_tab() inc_tab()
@ -132,35 +133,46 @@ def write_class(c):
for m in list(methods): for m in list(methods):
qualifiers = get_tag(m, "qualifiers") qualifiers = get_tag(m, "qualifiers")
write_string(f, '<method name="' + escape(m.attrib["name"]) + '" ' + qualifiers + '>') write_string(f, '<method name="' + escape(m.attrib["name"]) + '" ' + qualifiers + ">")
inc_tab() inc_tab()
for a in list(m): for a in list(m):
if (a.tag == "return"): if a.tag == "return":
typ = get_tag(a, "type") typ = get_tag(a, "type")
write_string(f, '<return' + typ + '>') write_string(f, "<return" + typ + ">")
write_string(f, '</return>') write_string(f, "</return>")
elif (a.tag == "argument"): elif a.tag == "argument":
default = get_tag(a, "default") default = get_tag(a, "default")
write_string(f, '<argument index="' + a.attrib["index"] + '" name="' + escape(a.attrib["name"]) + '" type="' + a.attrib["type"] + '"' + default + '>') write_string(
write_string(f, '</argument>') f,
'<argument index="'
+ a.attrib["index"]
+ '" name="'
+ escape(a.attrib["name"])
+ '" type="'
+ a.attrib["type"]
+ '"'
+ default
+ ">",
)
write_string(f, "</argument>")
write_string(f, '<description>') write_string(f, "<description>")
if (old_class != None): if old_class != None:
old_method_descr = find_method_descr(old_class, m.attrib["name"]) old_method_descr = find_method_descr(old_class, m.attrib["name"])
if (old_method_descr): if old_method_descr:
write_string(f, escape(escape(old_method_descr.strip()))) write_string(f, escape(escape(old_method_descr.strip())))
write_string(f, '</description>') write_string(f, "</description>")
dec_tab() dec_tab()
write_string(f, "</method>") write_string(f, "</method>")
dec_tab() dec_tab()
write_string(f, "</methods>") write_string(f, "</methods>")
signals = c.find("signals") signals = c.find("signals")
if(signals != None and len(list(signals)) > 0): if signals != None and len(list(signals)) > 0:
write_string(f, "<signals>") write_string(f, "<signals>")
inc_tab() inc_tab()
@ -171,24 +183,33 @@ def write_class(c):
inc_tab() inc_tab()
for a in list(m): for a in list(m):
if (a.tag == "argument"): if a.tag == "argument":
write_string(f, '<argument index="' + a.attrib["index"] + '" name="' + escape(a.attrib["name"]) + '" type="' + a.attrib["type"] + '">') write_string(
write_string(f, '</argument>') f,
'<argument index="'
+ a.attrib["index"]
+ '" name="'
+ escape(a.attrib["name"])
+ '" type="'
+ a.attrib["type"]
+ '">',
)
write_string(f, "</argument>")
write_string(f, '<description>') write_string(f, "<description>")
if (old_class != None): if old_class != None:
old_signal_descr = find_signal_descr(old_class, m.attrib["name"]) old_signal_descr = find_signal_descr(old_class, m.attrib["name"])
if (old_signal_descr): if old_signal_descr:
write_string(f, escape(old_signal_descr.strip())) write_string(f, escape(old_signal_descr.strip()))
write_string(f, '</description>') write_string(f, "</description>")
dec_tab() dec_tab()
write_string(f, "</signal>") write_string(f, "</signal>")
dec_tab() dec_tab()
write_string(f, "</signals>") write_string(f, "</signals>")
constants = c.find("constants") constants = c.find("constants")
if(constants != None and len(list(constants)) > 0): if constants != None and len(list(constants)) > 0:
write_string(f, "<constants>") write_string(f, "<constants>")
inc_tab() inc_tab()
@ -197,7 +218,7 @@ def write_class(c):
write_string(f, '<constant name="' + escape(m.attrib["name"]) + '" value="' + m.attrib["value"] + '">') write_string(f, '<constant name="' + escape(m.attrib["name"]) + '" value="' + m.attrib["value"] + '">')
old_constant_descr = find_constant_descr(old_class, m.attrib["name"]) old_constant_descr = find_constant_descr(old_class, m.attrib["name"])
if (old_constant_descr): if old_constant_descr:
write_string(f, escape(old_constant_descr.strip())) write_string(f, escape(old_constant_descr.strip()))
write_string(f, "</constant>") write_string(f, "</constant>")
@ -207,9 +228,10 @@ def write_class(c):
dec_tab() dec_tab()
write_string(f, "</class>") write_string(f, "</class>")
for c in list(old_doc): for c in list(old_doc):
old_classes[c.attrib["name"]] = c old_classes[c.attrib["name"]] = c
for c in list(new_doc): for c in list(new_doc):
write_class(c) write_class(c)
write_string(f, '</doc>\n') write_string(f, "</doc>\n")

View file

@ -13,75 +13,74 @@ import xml.etree.ElementTree as ET
################################################################################ ################################################################################
flags = { flags = {
'c': platform.platform() != 'Windows', # Disable by default on windows, since we use ANSI escape codes "c": platform.platform() != "Windows", # Disable by default on windows, since we use ANSI escape codes
'b': False, "b": False,
'g': False, "g": False,
's': False, "s": False,
'u': False, "u": False,
'h': False, "h": False,
'p': False, "p": False,
'o': True, "o": True,
'i': False, "i": False,
'a': True, "a": True,
'e': False, "e": False,
} }
flag_descriptions = { flag_descriptions = {
'c': 'Toggle colors when outputting.', "c": "Toggle colors when outputting.",
'b': 'Toggle showing only not fully described classes.', "b": "Toggle showing only not fully described classes.",
'g': 'Toggle showing only completed classes.', "g": "Toggle showing only completed classes.",
's': 'Toggle showing comments about the status.', "s": "Toggle showing comments about the status.",
'u': 'Toggle URLs to docs.', "u": "Toggle URLs to docs.",
'h': 'Show help and exit.', "h": "Show help and exit.",
'p': 'Toggle showing percentage as well as counts.', "p": "Toggle showing percentage as well as counts.",
'o': 'Toggle overall column.', "o": "Toggle overall column.",
'i': 'Toggle collapse of class items columns.', "i": "Toggle collapse of class items columns.",
'a': 'Toggle showing all items.', "a": "Toggle showing all items.",
'e': 'Toggle hiding empty items.', "e": "Toggle hiding empty items.",
} }
long_flags = { long_flags = {
'colors': 'c', "colors": "c",
'use-colors': 'c', "use-colors": "c",
"bad": "b",
'bad': 'b', "only-bad": "b",
'only-bad': 'b', "good": "g",
"only-good": "g",
'good': 'g', "comments": "s",
'only-good': 'g', "status": "s",
"urls": "u",
'comments': 's', "gen-url": "u",
'status': 's', "help": "h",
"percent": "p",
'urls': 'u', "use-percentages": "p",
'gen-url': 'u', "overall": "o",
"use-overall": "o",
'help': 'h', "items": "i",
"collapse": "i",
'percent': 'p', "all": "a",
'use-percentages': 'p', "empty": "e",
'overall': 'o',
'use-overall': 'o',
'items': 'i',
'collapse': 'i',
'all': 'a',
'empty': 'e',
} }
table_columns = ['name', 'brief_description', 'description', 'methods', 'constants', 'members', 'signals', 'theme_items'] table_columns = [
table_column_names = ['Name', 'Brief Desc.', 'Desc.', 'Methods', 'Constants', 'Members', 'Signals', 'Theme Items'] "name",
"brief_description",
"description",
"methods",
"constants",
"members",
"signals",
"theme_items",
]
table_column_names = ["Name", "Brief Desc.", "Desc.", "Methods", "Constants", "Members", "Signals", "Theme Items"]
colors = { colors = {
'name': [36], # cyan "name": [36], # cyan
'part_big_problem': [4, 31], # underline, red "part_big_problem": [4, 31], # underline, red
'part_problem': [31], # red "part_problem": [31], # red
'part_mostly_good': [33], # yellow "part_mostly_good": [33], # yellow
'part_good': [32], # green "part_good": [32], # green
'url': [4, 34], # underline, blue "url": [4, 34], # underline, blue
'section': [1, 4], # bold, underline "section": [1, 4], # bold, underline
'state_off': [36], # cyan "state_off": [36], # cyan
'state_on': [1, 35], # bold, magenta/plum "state_on": [1, 35], # bold, magenta/plum
'bold': [1], # bold "bold": [1], # bold
} }
overall_progress_description_weigth = 10 overall_progress_description_weigth = 10
@ -90,6 +89,7 @@ overall_progress_description_weigth = 10
# Utils # # Utils #
################################################################################ ################################################################################
def validate_tag(elem, tag): def validate_tag(elem, tag):
if elem.tag != tag: if elem.tag != tag:
print('Tag mismatch, expected "' + tag + '", got ' + elem.tag) print('Tag mismatch, expected "' + tag + '", got ' + elem.tag)
@ -97,36 +97,38 @@ def validate_tag(elem, tag):
def color(color, string): def color(color, string):
if flags['c'] and terminal_supports_color(): if flags["c"] and terminal_supports_color():
color_format = '' color_format = ""
for code in colors[color]: for code in colors[color]:
color_format += '\033[' + str(code) + 'm' color_format += "\033[" + str(code) + "m"
return color_format + string + '\033[0m' return color_format + string + "\033[0m"
else: else:
return string return string
ansi_escape = re.compile(r'\x1b[^m]*m')
ansi_escape = re.compile(r"\x1b[^m]*m")
def nonescape_len(s): def nonescape_len(s):
return len(ansi_escape.sub('', s)) return len(ansi_escape.sub("", s))
def terminal_supports_color(): def terminal_supports_color():
p = sys.platform p = sys.platform
supported_platform = p != 'Pocket PC' and (p != 'win32' or supported_platform = p != "Pocket PC" and (p != "win32" or "ANSICON" in os.environ)
'ANSICON' in os.environ)
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
if not supported_platform or not is_a_tty: if not supported_platform or not is_a_tty:
return False return False
return True return True
################################################################################ ################################################################################
# Classes # # Classes #
################################################################################ ################################################################################
class ClassStatusProgress:
class ClassStatusProgress:
def __init__(self, described=0, total=0): def __init__(self, described=0, total=0):
self.described = described self.described = described
self.total = total self.total = total
@ -143,42 +145,41 @@ class ClassStatusProgress:
return self.described >= self.total return self.described >= self.total
def to_configured_colored_string(self): def to_configured_colored_string(self):
if flags['p']: if flags["p"]:
return self.to_colored_string('{percent}% ({has}/{total})', '{pad_percent}{pad_described}{s}{pad_total}') return self.to_colored_string("{percent}% ({has}/{total})", "{pad_percent}{pad_described}{s}{pad_total}")
else: else:
return self.to_colored_string() return self.to_colored_string()
def to_colored_string(self, format='{has}/{total}', pad_format='{pad_described}{s}{pad_total}'): def to_colored_string(self, format="{has}/{total}", pad_format="{pad_described}{s}{pad_total}"):
ratio = float(self.described) / float(self.total) if self.total != 0 else 1 ratio = float(self.described) / float(self.total) if self.total != 0 else 1
percent = int(round(100 * ratio)) percent = int(round(100 * ratio))
s = format.format(has=str(self.described), total=str(self.total), percent=str(percent)) s = format.format(has=str(self.described), total=str(self.total), percent=str(percent))
if self.described >= self.total: if self.described >= self.total:
s = color('part_good', s) s = color("part_good", s)
elif self.described >= self.total / 4 * 3: elif self.described >= self.total / 4 * 3:
s = color('part_mostly_good', s) s = color("part_mostly_good", s)
elif self.described > 0: elif self.described > 0:
s = color('part_problem', s) s = color("part_problem", s)
else: else:
s = color('part_big_problem', s) s = color("part_big_problem", s)
pad_size = max(len(str(self.described)), len(str(self.total))) pad_size = max(len(str(self.described)), len(str(self.total)))
pad_described = ''.ljust(pad_size - len(str(self.described))) pad_described = "".ljust(pad_size - len(str(self.described)))
pad_percent = ''.ljust(3 - len(str(percent))) pad_percent = "".ljust(3 - len(str(percent)))
pad_total = ''.ljust(pad_size - len(str(self.total))) pad_total = "".ljust(pad_size - len(str(self.total)))
return pad_format.format(pad_described=pad_described, pad_total=pad_total, pad_percent=pad_percent, s=s) return pad_format.format(pad_described=pad_described, pad_total=pad_total, pad_percent=pad_percent, s=s)
class ClassStatus: class ClassStatus:
def __init__(self, name=""):
def __init__(self, name=''):
self.name = name self.name = name
self.has_brief_description = True self.has_brief_description = True
self.has_description = True self.has_description = True
self.progresses = { self.progresses = {
'methods': ClassStatusProgress(), "methods": ClassStatusProgress(),
'constants': ClassStatusProgress(), "constants": ClassStatusProgress(),
'members': ClassStatusProgress(), "members": ClassStatusProgress(),
'theme_items': ClassStatusProgress(), "theme_items": ClassStatusProgress(),
'signals': ClassStatusProgress() "signals": ClassStatusProgress(),
} }
def __add__(self, other): def __add__(self, other):
@ -208,66 +209,70 @@ class ClassStatus:
def make_output(self): def make_output(self):
output = {} output = {}
output['name'] = color('name', self.name) output["name"] = color("name", self.name)
ok_string = color('part_good', 'OK') ok_string = color("part_good", "OK")
missing_string = color('part_big_problem', 'MISSING') missing_string = color("part_big_problem", "MISSING")
output['brief_description'] = ok_string if self.has_brief_description else missing_string output["brief_description"] = ok_string if self.has_brief_description else missing_string
output['description'] = ok_string if self.has_description else missing_string output["description"] = ok_string if self.has_description else missing_string
description_progress = ClassStatusProgress( description_progress = ClassStatusProgress(
(self.has_brief_description + self.has_description) * overall_progress_description_weigth, (self.has_brief_description + self.has_description) * overall_progress_description_weigth,
2 * overall_progress_description_weigth 2 * overall_progress_description_weigth,
) )
items_progress = ClassStatusProgress() items_progress = ClassStatusProgress()
for k in ['methods', 'constants', 'members', 'signals', 'theme_items']: for k in ["methods", "constants", "members", "signals", "theme_items"]:
items_progress += self.progresses[k] items_progress += self.progresses[k]
output[k] = self.progresses[k].to_configured_colored_string() output[k] = self.progresses[k].to_configured_colored_string()
output['items'] = items_progress.to_configured_colored_string() output["items"] = items_progress.to_configured_colored_string()
output['overall'] = (description_progress + items_progress).to_colored_string(color('bold', '{percent}%'), '{pad_percent}{s}') output["overall"] = (description_progress + items_progress).to_colored_string(
color("bold", "{percent}%"), "{pad_percent}{s}"
)
if self.name.startswith('Total'): if self.name.startswith("Total"):
output['url'] = color('url', 'https://docs.godotengine.org/en/latest/classes/') output["url"] = color("url", "https://docs.godotengine.org/en/latest/classes/")
if flags['s']: if flags["s"]:
output['comment'] = color('part_good', 'ALL OK') output["comment"] = color("part_good", "ALL OK")
else: else:
output['url'] = color('url', 'https://docs.godotengine.org/en/latest/classes/class_{name}.html'.format(name=self.name.lower())) output["url"] = color(
"url", "https://docs.godotengine.org/en/latest/classes/class_{name}.html".format(name=self.name.lower())
)
if flags['s'] and not flags['g'] and self.is_ok(): if flags["s"] and not flags["g"] and self.is_ok():
output['comment'] = color('part_good', 'ALL OK') output["comment"] = color("part_good", "ALL OK")
return output return output
@staticmethod @staticmethod
def generate_for_class(c): def generate_for_class(c):
status = ClassStatus() status = ClassStatus()
status.name = c.attrib['name'] status.name = c.attrib["name"]
for tag in list(c): for tag in list(c):
if tag.tag == 'brief_description': if tag.tag == "brief_description":
status.has_brief_description = len(tag.text.strip()) > 0 status.has_brief_description = len(tag.text.strip()) > 0
elif tag.tag == 'description': elif tag.tag == "description":
status.has_description = len(tag.text.strip()) > 0 status.has_description = len(tag.text.strip()) > 0
elif tag.tag in ['methods', 'signals']: elif tag.tag in ["methods", "signals"]:
for sub_tag in list(tag): for sub_tag in list(tag):
descr = sub_tag.find('description') descr = sub_tag.find("description")
status.progresses[tag.tag].increment(len(descr.text.strip()) > 0) status.progresses[tag.tag].increment(len(descr.text.strip()) > 0)
elif tag.tag in ['constants', 'members', 'theme_items']: elif tag.tag in ["constants", "members", "theme_items"]:
for sub_tag in list(tag): for sub_tag in list(tag):
if not sub_tag.text is None: if not sub_tag.text is None:
status.progresses[tag.tag].increment(len(sub_tag.text.strip()) > 0) status.progresses[tag.tag].increment(len(sub_tag.text.strip()) > 0)
elif tag.tag in ['tutorials']: elif tag.tag in ["tutorials"]:
pass # Ignore those tags for now pass # Ignore those tags for now
elif tag.tag in ['theme_items']: elif tag.tag in ["theme_items"]:
pass # Ignore those tags, since they seem to lack description at all pass # Ignore those tags, since they seem to lack description at all
else: else:
@ -286,63 +291,69 @@ merged_file = ""
for arg in sys.argv[1:]: for arg in sys.argv[1:]:
try: try:
if arg.startswith('--'): if arg.startswith("--"):
flags[long_flags[arg[2:]]] = not flags[long_flags[arg[2:]]] flags[long_flags[arg[2:]]] = not flags[long_flags[arg[2:]]]
elif arg.startswith('-'): elif arg.startswith("-"):
for f in arg[1:]: for f in arg[1:]:
flags[f] = not flags[f] flags[f] = not flags[f]
elif os.path.isdir(arg): elif os.path.isdir(arg):
for f in os.listdir(arg): for f in os.listdir(arg):
if f.endswith('.xml'): if f.endswith(".xml"):
input_file_list.append(os.path.join(arg, f)); input_file_list.append(os.path.join(arg, f))
else: else:
input_class_list.append(arg) input_class_list.append(arg)
except KeyError: except KeyError:
print("Unknown command line flag: " + arg) print("Unknown command line flag: " + arg)
sys.exit(1) sys.exit(1)
if flags['i']: if flags["i"]:
for r in ['methods', 'constants', 'members', 'signals', 'theme_items']: for r in ["methods", "constants", "members", "signals", "theme_items"]:
index = table_columns.index(r) index = table_columns.index(r)
del table_column_names[index] del table_column_names[index]
del table_columns[index] del table_columns[index]
table_column_names.append('Items') table_column_names.append("Items")
table_columns.append('items') table_columns.append("items")
if flags['o'] == (not flags['i']): if flags["o"] == (not flags["i"]):
table_column_names.append(color('bold', 'Overall')) table_column_names.append(color("bold", "Overall"))
table_columns.append('overall') table_columns.append("overall")
if flags['u']: if flags["u"]:
table_column_names.append('Docs URL') table_column_names.append("Docs URL")
table_columns.append('url') table_columns.append("url")
################################################################################ ################################################################################
# Help # # Help #
################################################################################ ################################################################################
if len(input_file_list) < 1 or flags['h']: if len(input_file_list) < 1 or flags["h"]:
if not flags['h']: if not flags["h"]:
print(color('section', 'Invalid usage') + ': Please specify a classes directory') print(color("section", "Invalid usage") + ": Please specify a classes directory")
print(color('section', 'Usage') + ': doc_status.py [flags] <classes_dir> [class names]') print(color("section", "Usage") + ": doc_status.py [flags] <classes_dir> [class names]")
print('\t< and > signify required parameters, while [ and ] signify optional parameters.') print("\t< and > signify required parameters, while [ and ] signify optional parameters.")
print(color('section', 'Available flags') + ':') print(color("section", "Available flags") + ":")
possible_synonym_list = list(long_flags) possible_synonym_list = list(long_flags)
possible_synonym_list.sort() possible_synonym_list.sort()
flag_list = list(flags) flag_list = list(flags)
flag_list.sort() flag_list.sort()
for flag in flag_list: for flag in flag_list:
synonyms = [color('name', '-' + flag)] synonyms = [color("name", "-" + flag)]
for synonym in possible_synonym_list: for synonym in possible_synonym_list:
if long_flags[synonym] == flag: if long_flags[synonym] == flag:
synonyms.append(color('name', '--' + synonym)) synonyms.append(color("name", "--" + synonym))
print(('{synonyms} (Currently ' + color('state_' + ('on' if flags[flag] else 'off'), '{value}') + ')\n\t{description}').format( print(
synonyms=', '.join(synonyms), (
value=('on' if flags[flag] else 'off'), "{synonyms} (Currently "
description=flag_descriptions[flag] + color("state_" + ("on" if flags[flag] else "off"), "{value}")
)) + ")\n\t{description}"
).format(
synonyms=", ".join(synonyms),
value=("on" if flags[flag] else "off"),
description=flag_descriptions[flag],
)
)
sys.exit(0) sys.exit(0)
@ -357,21 +368,21 @@ for file in input_file_list:
tree = ET.parse(file) tree = ET.parse(file)
doc = tree.getroot() doc = tree.getroot()
if 'version' not in doc.attrib: if "version" not in doc.attrib:
print('Version missing from "doc"') print('Version missing from "doc"')
sys.exit(255) sys.exit(255)
version = doc.attrib['version'] version = doc.attrib["version"]
if doc.attrib['name'] in class_names: if doc.attrib["name"] in class_names:
continue continue
class_names.append(doc.attrib['name']) class_names.append(doc.attrib["name"])
classes[doc.attrib['name']] = doc classes[doc.attrib["name"]] = doc
class_names.sort() class_names.sort()
if len(input_class_list) < 1: if len(input_class_list) < 1:
input_class_list = ['*'] input_class_list = ["*"]
filtered_classes = set() filtered_classes = set()
for pattern in input_class_list: for pattern in input_class_list:
@ -384,23 +395,23 @@ filtered_classes.sort()
################################################################################ ################################################################################
table = [table_column_names] table = [table_column_names]
table_row_chars = '| - ' table_row_chars = "| - "
table_column_chars = '|' table_column_chars = "|"
total_status = ClassStatus('Total') total_status = ClassStatus("Total")
for cn in filtered_classes: for cn in filtered_classes:
c = classes[cn] c = classes[cn]
validate_tag(c, 'class') validate_tag(c, "class")
status = ClassStatus.generate_for_class(c) status = ClassStatus.generate_for_class(c)
total_status = total_status + status total_status = total_status + status
if (flags['b'] and status.is_ok()) or (flags['g'] and not status.is_ok()) or (not flags['a']): if (flags["b"] and status.is_ok()) or (flags["g"] and not status.is_ok()) or (not flags["a"]):
continue continue
if flags['e'] and status.is_empty(): if flags["e"] and status.is_empty():
continue continue
out = status.make_output() out = status.make_output()
@ -409,10 +420,10 @@ for cn in filtered_classes:
if column in out: if column in out:
row.append(out[column]) row.append(out[column])
else: else:
row.append('') row.append("")
if 'comment' in out and out['comment'] != '': if "comment" in out and out["comment"] != "":
row.append(out['comment']) row.append(out["comment"])
table.append(row) table.append(row)
@ -421,22 +432,22 @@ for cn in filtered_classes:
# Print output table # # Print output table #
################################################################################ ################################################################################
if len(table) == 1 and flags['a']: if len(table) == 1 and flags["a"]:
print(color('part_big_problem', 'No classes suitable for printing!')) print(color("part_big_problem", "No classes suitable for printing!"))
sys.exit(0) sys.exit(0)
if len(table) > 2 or not flags['a']: if len(table) > 2 or not flags["a"]:
total_status.name = 'Total = {0}'.format(len(table) - 1) total_status.name = "Total = {0}".format(len(table) - 1)
out = total_status.make_output() out = total_status.make_output()
row = [] row = []
for column in table_columns: for column in table_columns:
if column in out: if column in out:
row.append(out[column]) row.append(out[column])
else: else:
row.append('') row.append("")
table.append(row) table.append(row)
if flags['a']: if flags["a"]:
# Duplicate the headers at the bottom of the table so they can be viewed # Duplicate the headers at the bottom of the table so they can be viewed
# without having to scroll back to the top. # without having to scroll back to the top.
table.append(table_column_names) table.append(table_column_names)
@ -451,7 +462,9 @@ for row in table:
divider_string = table_row_chars[0] divider_string = table_row_chars[0]
for cell_i in range(len(table[0])): for cell_i in range(len(table[0])):
divider_string += table_row_chars[1] + table_row_chars[2] * (table_column_sizes[cell_i]) + table_row_chars[1] + table_row_chars[0] divider_string += (
table_row_chars[1] + table_row_chars[2] * (table_column_sizes[cell_i]) + table_row_chars[1] + table_row_chars[0]
)
print(divider_string) print(divider_string)
for row_i, row in enumerate(table): for row_i, row in enumerate(table):
@ -461,7 +474,11 @@ for row_i, row in enumerate(table):
if cell_i == 0: if cell_i == 0:
row_string += table_row_chars[3] + cell + table_row_chars[3] * (padding_needed - 1) row_string += table_row_chars[3] + cell + table_row_chars[3] * (padding_needed - 1)
else: else:
row_string += table_row_chars[3] * int(math.floor(float(padding_needed) / 2)) + cell + table_row_chars[3] * int(math.ceil(float(padding_needed) / 2)) row_string += (
table_row_chars[3] * int(math.floor(float(padding_needed) / 2))
+ cell
+ table_row_chars[3] * int(math.ceil(float(padding_needed) / 2))
)
row_string += table_column_chars row_string += table_column_chars
print(row_string) print(row_string)
@ -474,5 +491,5 @@ for row_i, row in enumerate(table):
print(divider_string) print(divider_string)
if total_status.is_ok() and not flags['g']: if total_status.is_ok() and not flags["g"]:
print('All listed classes are ' + color('part_good', 'OK') + '!') print("All listed classes are " + color("part_good", "OK") + "!")

View file

@ -7,10 +7,12 @@ import xml.etree.ElementTree as ET
from collections import OrderedDict from collections import OrderedDict
# Uncomment to do type checks. I have it commented out so it works below Python 3.5 # Uncomment to do type checks. I have it commented out so it works below Python 3.5
#from typing import List, Dict, TextIO, Tuple, Iterable, Optional, DefaultDict, Any, Union # from typing import List, Dict, TextIO, Tuple, Iterable, Optional, DefaultDict, Any, Union
# http(s)://docs.godotengine.org/<langcode>/<tag>/path/to/page.html(#fragment-tag) # http(s)://docs.godotengine.org/<langcode>/<tag>/path/to/page.html(#fragment-tag)
GODOT_DOCS_PATTERN = re.compile(r'^http(?:s)?://docs\.godotengine\.org/(?:[a-zA-Z0-9.\-_]*)/(?:[a-zA-Z0-9.\-_]*)/(.*)\.html(#.*)?$') GODOT_DOCS_PATTERN = re.compile(
r"^http(?:s)?://docs\.godotengine\.org/(?:[a-zA-Z0-9.\-_]*)/(?:[a-zA-Z0-9.\-_]*)/(.*)\.html(#.*)?$"
)
def print_error(error, state): # type: (str, State) -> None def print_error(error, state): # type: (str, State) -> None
@ -37,7 +39,9 @@ class TypeName:
class PropertyDef: class PropertyDef:
def __init__(self, name, type_name, setter, getter, text, default_value, overridden): # type: (str, TypeName, Optional[str], Optional[str], Optional[str], Optional[str], Optional[bool]) -> None def __init__(
self, name, type_name, setter, getter, text, default_value, overridden
): # type: (str, TypeName, Optional[str], Optional[str], Optional[str], Optional[str], Optional[bool]) -> None
self.name = name self.name = name
self.type_name = type_name self.type_name = type_name
self.setter = setter self.setter = setter
@ -46,6 +50,7 @@ class PropertyDef:
self.default_value = default_value self.default_value = default_value
self.overridden = overridden self.overridden = overridden
class ParameterDef: class ParameterDef:
def __init__(self, name, type_name, default_value): # type: (str, TypeName, Optional[str]) -> None def __init__(self, name, type_name, default_value): # type: (str, TypeName, Optional[str]) -> None
self.name = name self.name = name
@ -61,7 +66,9 @@ class SignalDef:
class MethodDef: class MethodDef:
def __init__(self, name, return_type, parameters, description, qualifiers): # type: (str, TypeName, List[ParameterDef], Optional[str], Optional[str]) -> None def __init__(
self, name, return_type, parameters, description, qualifiers
): # type: (str, TypeName, List[ParameterDef], Optional[str], Optional[str]) -> None
self.name = name self.name = name
self.return_type = return_type self.return_type = return_type
self.parameters = parameters self.parameters = parameters
@ -144,10 +151,12 @@ class State:
getter = property.get("getter") or None getter = property.get("getter") or None
default_value = property.get("default") or None default_value = property.get("default") or None
if default_value is not None: if default_value is not None:
default_value = '``{}``'.format(default_value) default_value = "``{}``".format(default_value)
overridden = property.get("override") or False overridden = property.get("override") or False
property_def = PropertyDef(property_name, type_name, setter, getter, property.text, default_value, overridden) property_def = PropertyDef(
property_name, type_name, setter, getter, property.text, default_value, overridden
)
class_def.properties[property_name] = property_def class_def.properties[property_name] = property_def
methods = class_root.find("methods") methods = class_root.find("methods")
@ -246,8 +255,6 @@ class State:
if link.text is not None: if link.text is not None:
class_def.tutorials.append(link.text) class_def.tutorials.append(link.text)
def sort_classes(self): # type: () -> None def sort_classes(self): # type: () -> None
self.classes = OrderedDict(sorted(self.classes.items(), key=lambda t: t[0])) self.classes = OrderedDict(sorted(self.classes.items(), key=lambda t: t[0]))
@ -273,7 +280,11 @@ def main(): # type: () -> None
parser.add_argument("path", nargs="+", help="A path to an XML file or a directory containing XML files to parse.") parser.add_argument("path", nargs="+", help="A path to an XML file or a directory containing XML files to parse.")
group = parser.add_mutually_exclusive_group() group = parser.add_mutually_exclusive_group()
group.add_argument("--output", "-o", default=".", help="The directory to save output .rst files in.") group.add_argument("--output", "-o", default=".", help="The directory to save output .rst files in.")
group.add_argument("--dry-run", action="store_true", help="If passed, no output will be generated and XML files are only checked for errors.") group.add_argument(
"--dry-run",
action="store_true",
help="If passed, no output will be generated and XML files are only checked for errors.",
)
args = parser.parse_args() args = parser.parse_args()
file_list = [] # type: List[str] file_list = [] # type: List[str]
@ -283,15 +294,15 @@ def main(): # type: () -> None
if path.endswith(os.sep): if path.endswith(os.sep):
path = path[:-1] path = path[:-1]
if os.path.basename(path) == 'modules': if os.path.basename(path) == "modules":
for subdir, dirs, _ in os.walk(path): for subdir, dirs, _ in os.walk(path):
if 'doc_classes' in dirs: if "doc_classes" in dirs:
doc_dir = os.path.join(subdir, 'doc_classes') doc_dir = os.path.join(subdir, "doc_classes")
class_file_names = (f for f in os.listdir(doc_dir) if f.endswith('.xml')) class_file_names = (f for f in os.listdir(doc_dir) if f.endswith(".xml"))
file_list += (os.path.join(doc_dir, f) for f in class_file_names) file_list += (os.path.join(doc_dir, f) for f in class_file_names)
elif os.path.isdir(path): elif os.path.isdir(path):
file_list += (os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xml')) file_list += (os.path.join(path, f) for f in os.listdir(path) if f.endswith(".xml"))
elif os.path.isfile(path): elif os.path.isfile(path):
if not path.endswith(".xml"): if not path.endswith(".xml"):
@ -311,7 +322,7 @@ def main(): # type: () -> None
continue continue
doc = tree.getroot() doc = tree.getroot()
if 'version' not in doc.attrib: if "version" not in doc.attrib:
print_error("Version missing from 'doc', file: {}".format(cur_file), state) print_error("Version missing from 'doc', file: {}".format(cur_file), state)
continue continue
@ -337,13 +348,14 @@ def main(): # type: () -> None
if state.errored: if state.errored:
exit(1) exit(1)
def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, State, bool, str) -> None def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, State, bool, str) -> None
class_name = class_def.name class_name = class_def.name
if dry_run: if dry_run:
f = open(os.devnull, "w") f = open(os.devnull, "w")
else: else:
f = open(os.path.join(output_dir, "class_" + class_name.lower() + '.rst'), 'w', encoding='utf-8') f = open(os.path.join(output_dir, "class_" + class_name.lower() + ".rst"), "w", encoding="utf-8")
# Warn contributors not to edit this file directly # Warn contributors not to edit this file directly
f.write(":github_url: hide\n\n") f.write(":github_url: hide\n\n")
@ -352,13 +364,13 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
f.write(".. The source is found in doc/classes or modules/<name>/doc_classes.\n\n") f.write(".. The source is found in doc/classes or modules/<name>/doc_classes.\n\n")
f.write(".. _class_" + class_name + ":\n\n") f.write(".. _class_" + class_name + ":\n\n")
f.write(make_heading(class_name, '=')) f.write(make_heading(class_name, "="))
# Inheritance tree # Inheritance tree
# Ascendants # Ascendants
if class_def.inherits: if class_def.inherits:
inh = class_def.inherits.strip() inh = class_def.inherits.strip()
f.write('**Inherits:** ') f.write("**Inherits:** ")
first = True first = True
while inh in state.classes: while inh in state.classes:
if not first: if not first:
@ -381,7 +393,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
inherited.append(c.name) inherited.append(c.name)
if len(inherited): if len(inherited):
f.write('**Inherited By:** ') f.write("**Inherited By:** ")
for i, child in enumerate(inherited): for i, child in enumerate(inherited):
if i > 0: if i > 0:
f.write(", ") f.write(", ")
@ -393,20 +405,20 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
f.write(rstize_text(class_def.brief_description.strip(), state) + "\n\n") f.write(rstize_text(class_def.brief_description.strip(), state) + "\n\n")
# Class description # Class description
if class_def.description is not None and class_def.description.strip() != '': if class_def.description is not None and class_def.description.strip() != "":
f.write(make_heading('Description', '-')) f.write(make_heading("Description", "-"))
f.write(rstize_text(class_def.description.strip(), state) + "\n\n") f.write(rstize_text(class_def.description.strip(), state) + "\n\n")
# Online tutorials # Online tutorials
if len(class_def.tutorials) > 0: if len(class_def.tutorials) > 0:
f.write(make_heading('Tutorials', '-')) f.write(make_heading("Tutorials", "-"))
for t in class_def.tutorials: for t in class_def.tutorials:
link = t.strip() link = t.strip()
f.write("- " + make_url(link) + "\n\n") f.write("- " + make_url(link) + "\n\n")
# Properties overview # Properties overview
if len(class_def.properties) > 0: if len(class_def.properties) > 0:
f.write(make_heading('Properties', '-')) f.write(make_heading("Properties", "-"))
ml = [] # type: List[Tuple[str, str, str]] ml = [] # type: List[Tuple[str, str, str]]
for property_def in class_def.properties.values(): for property_def in class_def.properties.values():
type_rst = property_def.type_name.to_rst(state) type_rst = property_def.type_name.to_rst(state)
@ -420,7 +432,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
# Methods overview # Methods overview
if len(class_def.methods) > 0: if len(class_def.methods) > 0:
f.write(make_heading('Methods', '-')) f.write(make_heading("Methods", "-"))
ml = [] ml = []
for method_list in class_def.methods.values(): for method_list in class_def.methods.values():
for m in method_list: for m in method_list:
@ -429,7 +441,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
# Theme properties # Theme properties
if class_def.theme_items is not None and len(class_def.theme_items) > 0: if class_def.theme_items is not None and len(class_def.theme_items) > 0:
f.write(make_heading('Theme Properties', '-')) f.write(make_heading("Theme Properties", "-"))
pl = [] pl = []
for theme_item_list in class_def.theme_items.values(): for theme_item_list in class_def.theme_items.values():
for theme_item in theme_item_list: for theme_item in theme_item_list:
@ -438,30 +450,30 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
# Signals # Signals
if len(class_def.signals) > 0: if len(class_def.signals) > 0:
f.write(make_heading('Signals', '-')) f.write(make_heading("Signals", "-"))
index = 0 index = 0
for signal in class_def.signals.values(): for signal in class_def.signals.values():
if index != 0: if index != 0:
f.write('----\n\n') f.write("----\n\n")
f.write(".. _class_{}_signal_{}:\n\n".format(class_name, signal.name)) f.write(".. _class_{}_signal_{}:\n\n".format(class_name, signal.name))
_, signature = make_method_signature(class_def, signal, False, state) _, signature = make_method_signature(class_def, signal, False, state)
f.write("- {}\n\n".format(signature)) f.write("- {}\n\n".format(signature))
if signal.description is not None and signal.description.strip() != '': if signal.description is not None and signal.description.strip() != "":
f.write(rstize_text(signal.description.strip(), state) + '\n\n') f.write(rstize_text(signal.description.strip(), state) + "\n\n")
index += 1 index += 1
# Enums # Enums
if len(class_def.enums) > 0: if len(class_def.enums) > 0:
f.write(make_heading('Enumerations', '-')) f.write(make_heading("Enumerations", "-"))
index = 0 index = 0
for e in class_def.enums.values(): for e in class_def.enums.values():
if index != 0: if index != 0:
f.write('----\n\n') f.write("----\n\n")
f.write(".. _enum_{}_{}:\n\n".format(class_name, e.name)) f.write(".. _enum_{}_{}:\n\n".format(class_name, e.name))
# Sphinx seems to divide the bullet list into individual <ul> tags if we weave the labels into it. # Sphinx seems to divide the bullet list into individual <ul> tags if we weave the labels into it.
@ -474,16 +486,16 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
f.write("enum **{}**:\n\n".format(e.name)) f.write("enum **{}**:\n\n".format(e.name))
for value in e.values.values(): for value in e.values.values():
f.write("- **{}** = **{}**".format(value.name, value.value)) f.write("- **{}** = **{}**".format(value.name, value.value))
if value.text is not None and value.text.strip() != '': if value.text is not None and value.text.strip() != "":
f.write(' --- ' + rstize_text(value.text.strip(), state)) f.write(" --- " + rstize_text(value.text.strip(), state))
f.write('\n\n') f.write("\n\n")
index += 1 index += 1
# Constants # Constants
if len(class_def.constants) > 0: if len(class_def.constants) > 0:
f.write(make_heading('Constants', '-')) f.write(make_heading("Constants", "-"))
# Sphinx seems to divide the bullet list into individual <ul> tags if we weave the labels into it. # Sphinx seems to divide the bullet list into individual <ul> tags if we weave the labels into it.
# As such I'll put them all above the list. Won't be perfect but better than making the list visually broken. # As such I'll put them all above the list. Won't be perfect but better than making the list visually broken.
for constant in class_def.constants.values(): for constant in class_def.constants.values():
@ -491,14 +503,14 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
for constant in class_def.constants.values(): for constant in class_def.constants.values():
f.write("- **{}** = **{}**".format(constant.name, constant.value)) f.write("- **{}** = **{}**".format(constant.name, constant.value))
if constant.text is not None and constant.text.strip() != '': if constant.text is not None and constant.text.strip() != "":
f.write(' --- ' + rstize_text(constant.text.strip(), state)) f.write(" --- " + rstize_text(constant.text.strip(), state))
f.write('\n\n') f.write("\n\n")
# Property descriptions # Property descriptions
if any(not p.overridden for p in class_def.properties.values()) > 0: if any(not p.overridden for p in class_def.properties.values()) > 0:
f.write(make_heading('Property Descriptions', '-')) f.write(make_heading("Property Descriptions", "-"))
index = 0 index = 0
for property_def in class_def.properties.values(): for property_def in class_def.properties.values():
@ -506,36 +518,36 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
continue continue
if index != 0: if index != 0:
f.write('----\n\n') f.write("----\n\n")
f.write(".. _class_{}_property_{}:\n\n".format(class_name, property_def.name)) f.write(".. _class_{}_property_{}:\n\n".format(class_name, property_def.name))
f.write('- {} **{}**\n\n'.format(property_def.type_name.to_rst(state), property_def.name)) f.write("- {} **{}**\n\n".format(property_def.type_name.to_rst(state), property_def.name))
info = [] info = []
if property_def.default_value is not None: if property_def.default_value is not None:
info.append(("*Default*", property_def.default_value)) info.append(("*Default*", property_def.default_value))
if property_def.setter is not None and not property_def.setter.startswith("_"): if property_def.setter is not None and not property_def.setter.startswith("_"):
info.append(("*Setter*", property_def.setter + '(value)')) info.append(("*Setter*", property_def.setter + "(value)"))
if property_def.getter is not None and not property_def.getter.startswith("_"): if property_def.getter is not None and not property_def.getter.startswith("_"):
info.append(('*Getter*', property_def.getter + '()')) info.append(("*Getter*", property_def.getter + "()"))
if len(info) > 0: if len(info) > 0:
format_table(f, info) format_table(f, info)
if property_def.text is not None and property_def.text.strip() != '': if property_def.text is not None and property_def.text.strip() != "":
f.write(rstize_text(property_def.text.strip(), state) + '\n\n') f.write(rstize_text(property_def.text.strip(), state) + "\n\n")
index += 1 index += 1
# Method descriptions # Method descriptions
if len(class_def.methods) > 0: if len(class_def.methods) > 0:
f.write(make_heading('Method Descriptions', '-')) f.write(make_heading("Method Descriptions", "-"))
index = 0 index = 0
for method_list in class_def.methods.values(): for method_list in class_def.methods.values():
for i, m in enumerate(method_list): for i, m in enumerate(method_list):
if index != 0: if index != 0:
f.write('----\n\n') f.write("----\n\n")
if i == 0: if i == 0:
f.write(".. _class_{}_method_{}:\n\n".format(class_name, m.name)) f.write(".. _class_{}_method_{}:\n\n".format(class_name, m.name))
@ -543,24 +555,24 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
ret_type, signature = make_method_signature(class_def, m, False, state) ret_type, signature = make_method_signature(class_def, m, False, state)
f.write("- {} {}\n\n".format(ret_type, signature)) f.write("- {} {}\n\n".format(ret_type, signature))
if m.description is not None and m.description.strip() != '': if m.description is not None and m.description.strip() != "":
f.write(rstize_text(m.description.strip(), state) + '\n\n') f.write(rstize_text(m.description.strip(), state) + "\n\n")
index += 1 index += 1
def make_class_list(class_list, columns): # type: (List[str], int) -> None def make_class_list(class_list, columns): # type: (List[str], int) -> None
# This function is no longer used. # This function is no longer used.
f = open('class_list.rst', 'w', encoding='utf-8') f = open("class_list.rst", "w", encoding="utf-8")
col_max = len(class_list) // columns + 1 col_max = len(class_list) // columns + 1
print(('col max is ', col_max)) print(("col max is ", col_max))
fit_columns = [] # type: List[List[str]] fit_columns = [] # type: List[List[str]]
for _ in range(0, columns): for _ in range(0, columns):
fit_columns.append([]) fit_columns.append([])
indexers = [] # type List[str] indexers = [] # type List[str]
last_initial = '' last_initial = ""
for idx, name in enumerate(class_list): for idx, name in enumerate(class_list):
col = idx // col_max col = idx // col_max
@ -590,7 +602,7 @@ def make_class_list(class_list, columns): # type: (List[str], int) -> None
f.write("\n") f.write("\n")
for r in range(0, row_max): for r in range(0, row_max):
s = '+ ' s = "+ "
for c in range(0, columns): for c in range(0, columns):
if r >= len(fit_columns[c]): if r >= len(fit_columns[c]):
continue continue
@ -598,13 +610,13 @@ def make_class_list(class_list, columns): # type: (List[str], int) -> None
classname = fit_columns[c][r] classname = fit_columns[c][r]
initial = classname[0] initial = classname[0]
if classname in indexers: if classname in indexers:
s += '**' + initial + '** | ' s += "**" + initial + "** | "
else: else:
s += ' | ' s += " | "
s += '[' + classname + '](class_' + classname.lower() + ') | ' s += "[" + classname + "](class_" + classname.lower() + ") | "
s += '\n' s += "\n"
f.write(s) f.write(s)
for n in range(0, columns): for n in range(0, columns):
@ -618,29 +630,29 @@ def escape_rst(text, until_pos=-1): # type: (str) -> str
# Escape \ character, otherwise it ends up as an escape character in rst # Escape \ character, otherwise it ends up as an escape character in rst
pos = 0 pos = 0
while True: while True:
pos = text.find('\\', pos, until_pos) pos = text.find("\\", pos, until_pos)
if pos == -1: if pos == -1:
break break
text = text[:pos] + "\\\\" + text[pos + 1:] text = text[:pos] + "\\\\" + text[pos + 1 :]
pos += 2 pos += 2
# Escape * character to avoid interpreting it as emphasis # Escape * character to avoid interpreting it as emphasis
pos = 0 pos = 0
while True: while True:
pos = text.find('*', pos, until_pos) pos = text.find("*", pos, until_pos)
if pos == -1: if pos == -1:
break break
text = text[:pos] + "\*" + text[pos + 1:] text = text[:pos] + "\*" + text[pos + 1 :]
pos += 2 pos += 2
# Escape _ character at the end of a word to avoid interpreting it as an inline hyperlink # Escape _ character at the end of a word to avoid interpreting it as an inline hyperlink
pos = 0 pos = 0
while True: while True:
pos = text.find('_', pos, until_pos) pos = text.find("_", pos, until_pos)
if pos == -1: if pos == -1:
break break
if not text[pos + 1].isalnum(): # don't escape within a snake_case word if not text[pos + 1].isalnum(): # don't escape within a snake_case word
text = text[:pos] + "\_" + text[pos + 1:] text = text[:pos] + "\_" + text[pos + 1 :]
pos += 2 pos += 2
else: else:
pos += 1 pos += 1
@ -652,16 +664,16 @@ def rstize_text(text, state): # type: (str, State) -> str
# Linebreak + tabs in the XML should become two line breaks unless in a "codeblock" # Linebreak + tabs in the XML should become two line breaks unless in a "codeblock"
pos = 0 pos = 0
while True: while True:
pos = text.find('\n', pos) pos = text.find("\n", pos)
if pos == -1: if pos == -1:
break break
pre_text = text[:pos] pre_text = text[:pos]
indent_level = 0 indent_level = 0
while text[pos + 1] == '\t': while text[pos + 1] == "\t":
pos += 1 pos += 1
indent_level += 1 indent_level += 1
post_text = text[pos + 1:] post_text = text[pos + 1 :]
# Handle codeblocks # Handle codeblocks
if post_text.startswith("[codeblock]"): if post_text.startswith("[codeblock]"):
@ -670,28 +682,33 @@ def rstize_text(text, state): # type: (str, State) -> str
print_error("[codeblock] without a closing tag, file: {}".format(state.current_class), state) print_error("[codeblock] without a closing tag, file: {}".format(state.current_class), state)
return "" return ""
code_text = post_text[len("[codeblock]"):end_pos] code_text = post_text[len("[codeblock]") : end_pos]
post_text = post_text[end_pos:] post_text = post_text[end_pos:]
# Remove extraneous tabs # Remove extraneous tabs
code_pos = 0 code_pos = 0
while True: while True:
code_pos = code_text.find('\n', code_pos) code_pos = code_text.find("\n", code_pos)
if code_pos == -1: if code_pos == -1:
break break
to_skip = 0 to_skip = 0
while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == '\t': while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == "\t":
to_skip += 1 to_skip += 1
if to_skip > indent_level: if to_skip > indent_level:
print_error("Four spaces should be used for indentation within [codeblock], file: {}".format(state.current_class), state) print_error(
"Four spaces should be used for indentation within [codeblock], file: {}".format(
state.current_class
),
state,
)
if len(code_text[code_pos + to_skip + 1:]) == 0: if len(code_text[code_pos + to_skip + 1 :]) == 0:
code_text = code_text[:code_pos] + "\n" code_text = code_text[:code_pos] + "\n"
code_pos += 1 code_pos += 1
else: else:
code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1:] code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1 :]
code_pos += 5 - to_skip code_pos += 5 - to_skip
text = pre_text + "\n[codeblock]" + code_text + post_text text = pre_text + "\n[codeblock]" + code_text + post_text
@ -702,7 +719,7 @@ def rstize_text(text, state): # type: (str, State) -> str
text = pre_text + "\n\n" + post_text text = pre_text + "\n\n" + post_text
pos += 2 pos += 2
next_brac_pos = text.find('[') next_brac_pos = text.find("[")
text = escape_rst(text, next_brac_pos) text = escape_rst(text, next_brac_pos)
# Handle [tags] # Handle [tags]
@ -714,54 +731,59 @@ def rstize_text(text, state): # type: (str, State) -> str
tag_depth = 0 tag_depth = 0
previous_pos = 0 previous_pos = 0
while True: while True:
pos = text.find('[', pos) pos = text.find("[", pos)
if inside_url and (pos > previous_pos): if inside_url and (pos > previous_pos):
url_has_name = True url_has_name = True
if pos == -1: if pos == -1:
break break
endq_pos = text.find(']', pos + 1) endq_pos = text.find("]", pos + 1)
if endq_pos == -1: if endq_pos == -1:
break break
pre_text = text[:pos] pre_text = text[:pos]
post_text = text[endq_pos + 1:] post_text = text[endq_pos + 1 :]
tag_text = text[pos + 1:endq_pos] tag_text = text[pos + 1 : endq_pos]
escape_post = False escape_post = False
if tag_text in state.classes: if tag_text in state.classes:
if tag_text == state.current_class: if tag_text == state.current_class:
# We don't want references to the same class # We don't want references to the same class
tag_text = '``{}``'.format(tag_text) tag_text = "``{}``".format(tag_text)
else: else:
tag_text = make_type(tag_text, state) tag_text = make_type(tag_text, state)
escape_post = True escape_post = True
else: # command else: # command
cmd = tag_text cmd = tag_text
space_pos = tag_text.find(' ') space_pos = tag_text.find(" ")
if cmd == '/codeblock': if cmd == "/codeblock":
tag_text = '' tag_text = ""
tag_depth -= 1 tag_depth -= 1
inside_code = False inside_code = False
# Strip newline if the tag was alone on one # Strip newline if the tag was alone on one
if pre_text[-1] == '\n': if pre_text[-1] == "\n":
pre_text = pre_text[:-1] pre_text = pre_text[:-1]
elif cmd == '/code': elif cmd == "/code":
tag_text = '``' tag_text = "``"
tag_depth -= 1 tag_depth -= 1
inside_code = False inside_code = False
escape_post = True escape_post = True
elif inside_code: elif inside_code:
tag_text = '[' + tag_text + ']' tag_text = "[" + tag_text + "]"
elif cmd.find('html') == 0: elif cmd.find("html") == 0:
param = tag_text[space_pos + 1:] param = tag_text[space_pos + 1 :]
tag_text = param tag_text = param
elif cmd.startswith('method') or cmd.startswith('member') or cmd.startswith('signal') or cmd.startswith('constant'): elif (
param = tag_text[space_pos + 1:] cmd.startswith("method")
or cmd.startswith("member")
or cmd.startswith("signal")
or cmd.startswith("constant")
):
param = tag_text[space_pos + 1 :]
if param.find('.') != -1: if param.find(".") != -1:
ss = param.split('.') ss = param.split(".")
if len(ss) > 2: if len(ss) > 2:
print_error("Bad reference: '{}', file: {}".format(param, state.current_class), state) print_error("Bad reference: '{}', file: {}".format(param, state.current_class), state)
class_param, method_param = ss class_param, method_param = ss
@ -794,7 +816,7 @@ def rstize_text(text, state): # type: (str, State) -> str
# Search in the current class # Search in the current class
search_class_defs = [class_def] search_class_defs = [class_def]
if param.find('.') == -1: if param.find(".") == -1:
# Also search in @GlobalScope as a last resort if no class was specified # Also search in @GlobalScope as a last resort if no class was specified
search_class_defs.append(state.classes["@GlobalScope"]) search_class_defs.append(state.classes["@GlobalScope"])
@ -815,66 +837,71 @@ def rstize_text(text, state): # type: (str, State) -> str
ref_type = "_constant" ref_type = "_constant"
else: else:
print_error("Unresolved type reference '{}' in method reference '{}', file: {}".format(class_param, param, state.current_class), state) print_error(
"Unresolved type reference '{}' in method reference '{}', file: {}".format(
class_param, param, state.current_class
),
state,
)
repl_text = method_param repl_text = method_param
if class_param != state.current_class: if class_param != state.current_class:
repl_text = "{}.{}".format(class_param, method_param) repl_text = "{}.{}".format(class_param, method_param)
tag_text = ':ref:`{}<class_{}{}_{}>`'.format(repl_text, class_param, ref_type, method_param) tag_text = ":ref:`{}<class_{}{}_{}>`".format(repl_text, class_param, ref_type, method_param)
escape_post = True escape_post = True
elif cmd.find('image=') == 0: elif cmd.find("image=") == 0:
tag_text = "" # '![](' + cmd[6:] + ')' tag_text = "" # '![](' + cmd[6:] + ')'
elif cmd.find('url=') == 0: elif cmd.find("url=") == 0:
url_link = cmd[4:] url_link = cmd[4:]
tag_text = '`' tag_text = "`"
tag_depth += 1 tag_depth += 1
inside_url = True inside_url = True
url_has_name = False url_has_name = False
elif cmd == '/url': elif cmd == "/url":
tag_text = ('' if url_has_name else url_link) + " <" + url_link + ">`_" tag_text = ("" if url_has_name else url_link) + " <" + url_link + ">`_"
tag_depth -= 1 tag_depth -= 1
escape_post = True escape_post = True
inside_url = False inside_url = False
url_has_name = False url_has_name = False
elif cmd == 'center': elif cmd == "center":
tag_depth += 1 tag_depth += 1
tag_text = '' tag_text = ""
elif cmd == '/center': elif cmd == "/center":
tag_depth -= 1 tag_depth -= 1
tag_text = '' tag_text = ""
elif cmd == 'codeblock': elif cmd == "codeblock":
tag_depth += 1 tag_depth += 1
tag_text = '\n::\n' tag_text = "\n::\n"
inside_code = True inside_code = True
elif cmd == 'br': elif cmd == "br":
# Make a new paragraph instead of a linebreak, rst is not so linebreak friendly # Make a new paragraph instead of a linebreak, rst is not so linebreak friendly
tag_text = '\n\n' tag_text = "\n\n"
# Strip potential leading spaces # Strip potential leading spaces
while post_text[0] == ' ': while post_text[0] == " ":
post_text = post_text[1:] post_text = post_text[1:]
elif cmd == 'i' or cmd == '/i': elif cmd == "i" or cmd == "/i":
if cmd == "/i": if cmd == "/i":
tag_depth -= 1 tag_depth -= 1
else: else:
tag_depth += 1 tag_depth += 1
tag_text = '*' tag_text = "*"
elif cmd == 'b' or cmd == '/b': elif cmd == "b" or cmd == "/b":
if cmd == "/b": if cmd == "/b":
tag_depth -= 1 tag_depth -= 1
else: else:
tag_depth += 1 tag_depth += 1
tag_text = '**' tag_text = "**"
elif cmd == 'u' or cmd == '/u': elif cmd == "u" or cmd == "/u":
if cmd == "/u": if cmd == "/u":
tag_depth -= 1 tag_depth -= 1
else: else:
tag_depth += 1 tag_depth += 1
tag_text = '' tag_text = ""
elif cmd == 'code': elif cmd == "code":
tag_text = '``' tag_text = "``"
tag_depth += 1 tag_depth += 1
inside_code = True inside_code = True
elif cmd.startswith('enum '): elif cmd.startswith("enum "):
tag_text = make_enum(cmd[5:], state) tag_text = make_enum(cmd[5:], state)
escape_post = True escape_post = True
else: else:
@ -883,24 +910,24 @@ def rstize_text(text, state): # type: (str, State) -> str
# Properly escape things like `[Node]s` # Properly escape things like `[Node]s`
if escape_post and post_text and (post_text[0].isalnum() or post_text[0] == "("): # not punctuation, escape if escape_post and post_text and (post_text[0].isalnum() or post_text[0] == "("): # not punctuation, escape
post_text = '\ ' + post_text post_text = "\ " + post_text
next_brac_pos = post_text.find('[', 0) next_brac_pos = post_text.find("[", 0)
iter_pos = 0 iter_pos = 0
while not inside_code: while not inside_code:
iter_pos = post_text.find('*', iter_pos, next_brac_pos) iter_pos = post_text.find("*", iter_pos, next_brac_pos)
if iter_pos == -1: if iter_pos == -1:
break break
post_text = post_text[:iter_pos] + "\*" + post_text[iter_pos + 1:] post_text = post_text[:iter_pos] + "\*" + post_text[iter_pos + 1 :]
iter_pos += 2 iter_pos += 2
iter_pos = 0 iter_pos = 0
while not inside_code: while not inside_code:
iter_pos = post_text.find('_', iter_pos, next_brac_pos) iter_pos = post_text.find("_", iter_pos, next_brac_pos)
if iter_pos == -1: if iter_pos == -1:
break break
if not post_text[iter_pos + 1].isalnum(): # don't escape within a snake_case word if not post_text[iter_pos + 1].isalnum(): # don't escape within a snake_case word
post_text = post_text[:iter_pos] + "\_" + post_text[iter_pos + 1:] post_text = post_text[:iter_pos] + "\_" + post_text[iter_pos + 1 :]
iter_pos += 2 iter_pos += 2
else: else:
iter_pos += 1 iter_pos += 1
@ -922,7 +949,7 @@ def format_table(f, data, remove_empty_columns=False): # type: (TextIO, Iterabl
column_sizes = [0] * len(data[0]) column_sizes = [0] * len(data[0])
for row in data: for row in data:
for i, text in enumerate(row): for i, text in enumerate(row):
text_length = len(text or '') text_length = len(text or "")
if text_length > column_sizes[i]: if text_length > column_sizes[i]:
column_sizes[i] = text_length column_sizes[i] = text_length
@ -939,16 +966,16 @@ def format_table(f, data, remove_empty_columns=False): # type: (TextIO, Iterabl
for i, text in enumerate(row): for i, text in enumerate(row):
if column_sizes[i] == 0 and remove_empty_columns: if column_sizes[i] == 0 and remove_empty_columns:
continue continue
row_text += " " + (text or '').ljust(column_sizes[i]) + " |" row_text += " " + (text or "").ljust(column_sizes[i]) + " |"
row_text += "\n" row_text += "\n"
f.write(row_text) f.write(row_text)
f.write(sep) f.write(sep)
f.write('\n') f.write("\n")
def make_type(t, state): # type: (str, State) -> str def make_type(t, state): # type: (str, State) -> str
if t in state.classes: if t in state.classes:
return ':ref:`{0}<class_{0}>`'.format(t) return ":ref:`{0}<class_{0}>`".format(t)
print_error("Unresolved type '{}', file: {}".format(t, state.current_class), state) print_error("Unresolved type '{}', file: {}".format(t, state.current_class), state)
return t return t
@ -957,7 +984,7 @@ def make_enum(t, state): # type: (str, State) -> str
p = t.find(".") p = t.find(".")
if p >= 0: if p >= 0:
c = t[0:p] c = t[0:p]
e = t[p + 1:] e = t[p + 1 :]
# Variant enums live in GlobalScope but still use periods. # Variant enums live in GlobalScope but still use periods.
if c == "Variant": if c == "Variant":
c = "@GlobalScope" c = "@GlobalScope"
@ -969,7 +996,7 @@ def make_enum(t, state): # type: (str, State) -> str
c = "@GlobalScope" c = "@GlobalScope"
if not c in state.classes and c.startswith("_"): if not c in state.classes and c.startswith("_"):
c = c[1:] # Remove the underscore prefix c = c[1:] # Remove the underscore prefix
if c in state.classes and e in state.classes[c].enums: if c in state.classes and e in state.classes[c].enums:
return ":ref:`{0}<enum_{1}_{0}>`".format(e, c) return ":ref:`{0}<enum_{1}_{0}>`".format(e, c)
@ -981,7 +1008,9 @@ def make_enum(t, state): # type: (str, State) -> str
return t return t
def make_method_signature(class_def, method_def, make_ref, state): # type: (ClassDef, Union[MethodDef, SignalDef], bool, State) -> Tuple[str, str] def make_method_signature(
class_def, method_def, make_ref, state
): # type: (ClassDef, Union[MethodDef, SignalDef], bool, State) -> Tuple[str, str]
ret_type = " " ret_type = " "
ref_type = "signal" ref_type = "signal"
@ -996,34 +1025,34 @@ def make_method_signature(class_def, method_def, make_ref, state): # type: (Cla
else: else:
out += "**{}** ".format(method_def.name) out += "**{}** ".format(method_def.name)
out += '**(**' out += "**(**"
for i, arg in enumerate(method_def.parameters): for i, arg in enumerate(method_def.parameters):
if i > 0: if i > 0:
out += ', ' out += ", "
else: else:
out += ' ' out += " "
out += "{} {}".format(arg.type_name.to_rst(state), arg.name) out += "{} {}".format(arg.type_name.to_rst(state), arg.name)
if arg.default_value is not None: if arg.default_value is not None:
out += '=' + arg.default_value out += "=" + arg.default_value
if isinstance(method_def, MethodDef) and method_def.qualifiers is not None and 'vararg' in method_def.qualifiers: if isinstance(method_def, MethodDef) and method_def.qualifiers is not None and "vararg" in method_def.qualifiers:
if len(method_def.parameters) > 0: if len(method_def.parameters) > 0:
out += ', ...' out += ", ..."
else: else:
out += ' ...' out += " ..."
out += ' **)**' out += " **)**"
if isinstance(method_def, MethodDef) and method_def.qualifiers is not None: if isinstance(method_def, MethodDef) and method_def.qualifiers is not None:
out += ' ' + method_def.qualifiers out += " " + method_def.qualifiers
return ret_type, out return ret_type, out
def make_heading(title, underline): # type: (str, str) -> str def make_heading(title, underline): # type: (str, str) -> str
return title + '\n' + (underline * len(title)) + "\n\n" return title + "\n" + (underline * len(title)) + "\n\n"
def make_url(link): # type: (str) -> str def make_url(link): # type: (str) -> str
@ -1047,5 +1076,5 @@ def make_url(link): # type: (str) -> str
return "`" + link + " <" + link + ">`_" return "`" + link + " <" + link + ">`_"
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -7,7 +7,7 @@ import shutil
from collections import OrderedDict from collections import OrderedDict
EXTRACT_TAGS = ["description", "brief_description", "member", "constant", "theme_item", "link"] EXTRACT_TAGS = ["description", "brief_description", "member", "constant", "theme_item", "link"]
HEADER = '''\ HEADER = """\
# LANGUAGE translation of the Godot Engine class reference. # LANGUAGE translation of the Godot Engine class reference.
# Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. # Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur.
# Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). # Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md).
@ -24,7 +24,7 @@ msgstr ""
"Content-Type: text/plain; charset=UTF-8\\n" "Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8-bit\\n" "Content-Transfer-Encoding: 8-bit\\n"
''' """
# Some strings used by makerst.py are normally part of the editor translations, # Some strings used by makerst.py are normally part of the editor translations,
# so we need to include them manually here for the online docs. # so we need to include them manually here for the online docs.
BASE_STRINGS = [ BASE_STRINGS = [
@ -42,7 +42,8 @@ BASE_STRINGS = [
## <xml-line-number-hack from="https://stackoverflow.com/a/36430270/10846399"> ## <xml-line-number-hack from="https://stackoverflow.com/a/36430270/10846399">
import sys import sys
sys.modules['_elementtree'] = None
sys.modules["_elementtree"] = None
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
## override the parser to get the line number ## override the parser to get the line number
@ -62,8 +63,11 @@ class LineNumberingParser(ET.XMLParser):
element._end_column_number = self.parser.CurrentColumnNumber element._end_column_number = self.parser.CurrentColumnNumber
element._end_byte_index = self.parser.CurrentByteIndex element._end_byte_index = self.parser.CurrentByteIndex
return element return element
## </xml-line-number-hack> ## </xml-line-number-hack>
class Desc: class Desc:
def __init__(self, line_no, msg, desc_list=None): def __init__(self, line_no, msg, desc_list=None):
## line_no : the line number where the desc is ## line_no : the line number where the desc is
@ -73,6 +77,7 @@ class Desc:
self.msg = msg self.msg = msg
self.desc_list = desc_list self.desc_list = desc_list
class DescList: class DescList:
def __init__(self, doc, path): def __init__(self, doc, path):
## doc : root xml element of the document ## doc : root xml element of the document
@ -82,29 +87,32 @@ class DescList:
self.path = path self.path = path
self.list = [] self.list = []
def print_error(error): def print_error(error):
print("ERROR: {}".format(error)) print("ERROR: {}".format(error))
## build classes with xml elements recursively ## build classes with xml elements recursively
def _collect_classes_dir(path, classes): def _collect_classes_dir(path, classes):
if not os.path.isdir(path): if not os.path.isdir(path):
print_error("Invalid directory path: {}".format(path)) print_error("Invalid directory path: {}".format(path))
exit(1) exit(1)
for _dir in map(lambda dir : os.path.join(path, dir), os.listdir(path)): for _dir in map(lambda dir: os.path.join(path, dir), os.listdir(path)):
if os.path.isdir(_dir): if os.path.isdir(_dir):
_collect_classes_dir(_dir, classes) _collect_classes_dir(_dir, classes)
elif os.path.isfile(_dir): elif os.path.isfile(_dir):
if not _dir.endswith(".xml"): if not _dir.endswith(".xml"):
#print("Got non-.xml file '{}', skipping.".format(path)) # print("Got non-.xml file '{}', skipping.".format(path))
continue continue
_collect_classes_file(_dir, classes) _collect_classes_file(_dir, classes)
## opens a file and parse xml add to classes ## opens a file and parse xml add to classes
def _collect_classes_file(path, classes): def _collect_classes_file(path, classes):
if not os.path.isfile(path) or not path.endswith(".xml"): if not os.path.isfile(path) or not path.endswith(".xml"):
print_error("Invalid xml file path: {}".format(path)) print_error("Invalid xml file path: {}".format(path))
exit(1) exit(1)
print('Collecting file: {}'.format(os.path.basename(path))) print("Collecting file: {}".format(os.path.basename(path)))
try: try:
tree = ET.parse(path, parser=LineNumberingParser()) tree = ET.parse(path, parser=LineNumberingParser())
@ -114,8 +122,8 @@ def _collect_classes_file(path, classes):
doc = tree.getroot() doc = tree.getroot()
if 'name' in doc.attrib: if "name" in doc.attrib:
if 'version' not in doc.attrib: if "version" not in doc.attrib:
print_error("Version missing from 'doc', file: {}".format(path)) print_error("Version missing from 'doc', file: {}".format(path))
name = doc.attrib["name"] name = doc.attrib["name"]
@ -124,7 +132,7 @@ def _collect_classes_file(path, classes):
exit(1) exit(1)
classes[name] = DescList(doc, path) classes[name] = DescList(doc, path)
else: else:
print_error('Unknown XML file {}, skipping'.format(path)) print_error("Unknown XML file {}, skipping".format(path))
## regions are list of tuples with size 3 (start_index, end_index, indent) ## regions are list of tuples with size 3 (start_index, end_index, indent)
@ -132,56 +140,64 @@ def _collect_classes_file(path, classes):
## if i inside the region returns the indent, else returns -1 ## if i inside the region returns the indent, else returns -1
def _get_xml_indent(i, regions): def _get_xml_indent(i, regions):
for region in regions: for region in regions:
if region[0] < i < region[1] : if region[0] < i < region[1]:
return region[2] return region[2]
return -1 return -1
## find and build all regions of codeblock which we need later ## find and build all regions of codeblock which we need later
def _make_codeblock_regions(desc, path=''): def _make_codeblock_regions(desc, path=""):
code_block_end = False code_block_end = False
code_block_index = 0 code_block_index = 0
code_block_regions = [] code_block_regions = []
while not code_block_end: while not code_block_end:
code_block_index = desc.find("[codeblock]", code_block_index) code_block_index = desc.find("[codeblock]", code_block_index)
if code_block_index < 0: break if code_block_index < 0:
xml_indent=0 break
while True : xml_indent = 0
while True:
## [codeblock] always have a trailing new line and some tabs ## [codeblock] always have a trailing new line and some tabs
## those tabs are belongs to xml indentations not code indent ## those tabs are belongs to xml indentations not code indent
if desc[code_block_index+len("[codeblock]\n")+xml_indent] == '\t': if desc[code_block_index + len("[codeblock]\n") + xml_indent] == "\t":
xml_indent+=1 xml_indent += 1
else: break else:
break
end_index = desc.find("[/codeblock]", code_block_index) end_index = desc.find("[/codeblock]", code_block_index)
if end_index < 0 : if end_index < 0:
print_error('Non terminating codeblock: {}'.format(path)) print_error("Non terminating codeblock: {}".format(path))
exit(1) exit(1)
code_block_regions.append( (code_block_index, end_index, xml_indent) ) code_block_regions.append((code_block_index, end_index, xml_indent))
code_block_index += 1 code_block_index += 1
return code_block_regions return code_block_regions
def _strip_and_split_desc(desc, code_block_regions): def _strip_and_split_desc(desc, code_block_regions):
desc_strip = '' ## a stripped desc msg desc_strip = "" ## a stripped desc msg
total_indent = 0 ## code indent = total indent - xml indent total_indent = 0 ## code indent = total indent - xml indent
for i in range(len(desc)): for i in range(len(desc)):
c = desc[i] c = desc[i]
if c == '\n' : c = '\\n' if c == "\n":
if c == '"': c = '\\"' c = "\\n"
if c == '\\': c = '\\\\' ## <element \> is invalid for msgmerge if c == '"':
if c == '\t': c = '\\"'
if c == "\\":
c = "\\\\" ## <element \> is invalid for msgmerge
if c == "\t":
xml_indent = _get_xml_indent(i, code_block_regions) xml_indent = _get_xml_indent(i, code_block_regions)
if xml_indent >= 0: if xml_indent >= 0:
total_indent += 1 total_indent += 1
if xml_indent < total_indent: if xml_indent < total_indent:
c = '\\t' c = "\\t"
else: else:
continue continue
else: else:
continue continue
desc_strip += c desc_strip += c
if c == '\\n': if c == "\\n":
total_indent = 0 total_indent = 0
return desc_strip return desc_strip
## make catalog strings from xml elements ## make catalog strings from xml elements
def _make_translation_catalog(classes): def _make_translation_catalog(classes):
unique_msgs = OrderedDict() unique_msgs = OrderedDict()
@ -189,8 +205,9 @@ def _make_translation_catalog(classes):
desc_list = classes[class_name] desc_list = classes[class_name]
for elem in desc_list.doc.iter(): for elem in desc_list.doc.iter():
if elem.tag in EXTRACT_TAGS: if elem.tag in EXTRACT_TAGS:
if not elem.text or len(elem.text) == 0 : continue if not elem.text or len(elem.text) == 0:
line_no = elem._start_line_number if elem.text[0]!='\n' else elem._start_line_number+1 continue
line_no = elem._start_line_number if elem.text[0] != "\n" else elem._start_line_number + 1
desc_str = elem.text.strip() desc_str = elem.text.strip()
code_block_regions = _make_codeblock_regions(desc_str, desc_list.path) code_block_regions = _make_codeblock_regions(desc_str, desc_list.path)
desc_msg = _strip_and_split_desc(desc_str, code_block_regions) desc_msg = _strip_and_split_desc(desc_str, code_block_regions)
@ -203,44 +220,48 @@ def _make_translation_catalog(classes):
unique_msgs[desc_msg].append(desc_obj) unique_msgs[desc_msg].append(desc_obj)
return unique_msgs return unique_msgs
## generate the catalog file ## generate the catalog file
def _generate_translation_catalog_file(unique_msgs, output): def _generate_translation_catalog_file(unique_msgs, output):
with open(output, 'w', encoding='utf8') as f: with open(output, "w", encoding="utf8") as f:
f.write(HEADER) f.write(HEADER)
for msg in BASE_STRINGS: for msg in BASE_STRINGS:
f.write('#: doc/tools/makerst.py\n') f.write("#: doc/tools/makerst.py\n")
f.write('msgid "{}"\n'.format(msg)) f.write('msgid "{}"\n'.format(msg))
f.write('msgstr ""\n\n') f.write('msgstr ""\n\n')
for msg in unique_msgs: for msg in unique_msgs:
if len(msg) == 0 or msg in BASE_STRINGS: if len(msg) == 0 or msg in BASE_STRINGS:
continue continue
f.write('#:') f.write("#:")
desc_list = unique_msgs[msg] desc_list = unique_msgs[msg]
for desc in desc_list: for desc in desc_list:
path = desc.desc_list.path.replace('\\', '/') path = desc.desc_list.path.replace("\\", "/")
if path.startswith('./'): if path.startswith("./"):
path = path[2:] path = path[2:]
f.write(' {}:{}'.format(path, desc.line_no)) f.write(" {}:{}".format(path, desc.line_no))
f.write('\n') f.write("\n")
f.write('msgid "{}"\n'.format(msg)) f.write('msgid "{}"\n'.format(msg))
f.write('msgstr ""\n\n') f.write('msgstr ""\n\n')
## TODO: what if 'nt'? ## TODO: what if 'nt'?
if (os.name == "posix"): if os.name == "posix":
print("Wrapping template at 79 characters for compatibility with Weblate.") print("Wrapping template at 79 characters for compatibility with Weblate.")
os.system("msgmerge -w79 {0} {0} > {0}.wrap".format(output)) os.system("msgmerge -w79 {0} {0} > {0}.wrap".format(output))
shutil.move("{}.wrap".format(output), output) shutil.move("{}.wrap".format(output), output)
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--path", "-p", nargs="+", default=".", help="The directory or directories containing XML files to collect.") parser.add_argument(
"--path", "-p", nargs="+", default=".", help="The directory or directories containing XML files to collect."
)
parser.add_argument("--output", "-o", default="translation_catalog.pot", help="The path to the output file.") parser.add_argument("--output", "-o", default="translation_catalog.pot", help="The path to the output file.")
args = parser.parse_args() args = parser.parse_args()
output = os.path.abspath(args.output) output = os.path.abspath(args.output)
if not os.path.isdir(os.path.dirname(output)) or not output.endswith('.pot'): if not os.path.isdir(os.path.dirname(output)) or not output.endswith(".pot"):
print_error("Invalid output path: {}".format(output)) print_error("Invalid output path: {}".format(output))
exit(1) exit(1)
@ -252,13 +273,14 @@ def main():
print("\nCurrent working dir: {}".format(path)) print("\nCurrent working dir: {}".format(path))
path_classes = OrderedDict() ## dictionary of key=class_name, value=DescList objects path_classes = OrderedDict() ## dictionary of key=class_name, value=DescList objects
_collect_classes_dir(path, path_classes) _collect_classes_dir(path, path_classes)
classes.update(path_classes) classes.update(path_classes)
classes = OrderedDict(sorted(classes.items(), key = lambda kv: kv[0].lower())) classes = OrderedDict(sorted(classes.items(), key=lambda kv: kv[0].lower()))
unique_msgs = _make_translation_catalog(classes) unique_msgs = _make_translation_catalog(classes)
_generate_translation_catalog_file(unique_msgs, output) _generate_translation_catalog_file(unique_msgs, output)
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

View file

@ -1,41 +1,42 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.drivers_sources = [] env.drivers_sources = []
# OS drivers # OS drivers
SConscript('unix/SCsub') SConscript("unix/SCsub")
SConscript('windows/SCsub') SConscript("windows/SCsub")
# Sounds drivers # Sounds drivers
SConscript('alsa/SCsub') SConscript("alsa/SCsub")
SConscript('coreaudio/SCsub') SConscript("coreaudio/SCsub")
SConscript('pulseaudio/SCsub') SConscript("pulseaudio/SCsub")
if (env["platform"] == "windows"): if env["platform"] == "windows":
SConscript("wasapi/SCsub") SConscript("wasapi/SCsub")
if env['xaudio2']: if env["xaudio2"]:
SConscript("xaudio2/SCsub") SConscript("xaudio2/SCsub")
# Midi drivers # Midi drivers
SConscript('alsamidi/SCsub') SConscript("alsamidi/SCsub")
SConscript('coremidi/SCsub') SConscript("coremidi/SCsub")
SConscript('winmidi/SCsub') SConscript("winmidi/SCsub")
# Graphics drivers # Graphics drivers
if (env["platform"] != "server" and env["platform"] != "javascript"): if env["platform"] != "server" and env["platform"] != "javascript":
# SConscript('gles2/SCsub') # SConscript('gles2/SCsub')
SConscript('vulkan/SCsub') SConscript("vulkan/SCsub")
SConscript('gl_context/SCsub') SConscript("gl_context/SCsub")
else: else:
SConscript('dummy/SCsub') SConscript("dummy/SCsub")
# Core dependencies # Core dependencies
SConscript("png/SCsub") SConscript("png/SCsub")
SConscript("spirv-reflect/SCsub") SConscript("spirv-reflect/SCsub")
if env['vsproj']: if env["vsproj"]:
import os import os
path = os.getcwd() path = os.getcwd()
# Change directory so the path resolves correctly in the function call. # Change directory so the path resolves correctly in the function call.
os.chdir("..") os.chdir("..")

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
# Driver source files # Driver source files
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
# Driver source files # Driver source files
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
# Driver source files # Driver source files
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,8 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
if (env["platform"] in ["haiku", "osx", "windows", "linuxbsd"]): if env["platform"] in ["haiku", "osx", "windows", "linuxbsd"]:
# Thirdparty source files # Thirdparty source files
thirdparty_dir = "#thirdparty/glad/" thirdparty_dir = "#thirdparty/glad/"
thirdparty_sources = [ thirdparty_sources = [
@ -12,8 +12,8 @@ if (env["platform"] in ["haiku", "osx", "windows", "linuxbsd"]):
env.Prepend(CPPPATH=[thirdparty_dir]) env.Prepend(CPPPATH=[thirdparty_dir])
env.Append(CPPDEFINES=['GLAD_ENABLED']) env.Append(CPPDEFINES=["GLAD_ENABLED"])
env.Append(CPPDEFINES=['GLES_OVER_GL']) env.Append(CPPDEFINES=["GLES_OVER_GL"])
env_thirdparty = env.Clone() env_thirdparty = env.Clone()
env_thirdparty.disable_warnings() env_thirdparty.disable_warnings()

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,23 +1,23 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
if 'GLES2_GLSL' in env['BUILDERS']: if "GLES2_GLSL" in env["BUILDERS"]:
env.GLES2_GLSL('copy.glsl'); env.GLES2_GLSL("copy.glsl")
# env.GLES2_GLSL('resolve.glsl'); # env.GLES2_GLSL('resolve.glsl');
env.GLES2_GLSL('canvas.glsl'); env.GLES2_GLSL("canvas.glsl")
env.GLES2_GLSL('canvas_shadow.glsl'); env.GLES2_GLSL("canvas_shadow.glsl")
env.GLES2_GLSL('scene.glsl'); env.GLES2_GLSL("scene.glsl")
env.GLES2_GLSL('cubemap_filter.glsl'); env.GLES2_GLSL("cubemap_filter.glsl")
env.GLES2_GLSL('cube_to_dp.glsl'); env.GLES2_GLSL("cube_to_dp.glsl")
# env.GLES2_GLSL('blend_shape.glsl'); # env.GLES2_GLSL('blend_shape.glsl');
# env.GLES2_GLSL('screen_space_reflection.glsl'); # env.GLES2_GLSL('screen_space_reflection.glsl');
env.GLES2_GLSL('effect_blur.glsl'); env.GLES2_GLSL("effect_blur.glsl")
# env.GLES2_GLSL('subsurf_scattering.glsl'); # env.GLES2_GLSL('subsurf_scattering.glsl');
# env.GLES2_GLSL('ssao.glsl'); # env.GLES2_GLSL('ssao.glsl');
# env.GLES2_GLSL('ssao_minify.glsl'); # env.GLES2_GLSL('ssao_minify.glsl');
# env.GLES2_GLSL('ssao_blur.glsl'); # env.GLES2_GLSL('ssao_blur.glsl');
# env.GLES2_GLSL('exposure.glsl'); # env.GLES2_GLSL('exposure.glsl');
env.GLES2_GLSL('tonemap.glsl'); env.GLES2_GLSL("tonemap.glsl")
# env.GLES2_GLSL('particles.glsl'); # env.GLES2_GLSL('particles.glsl');
env.GLES2_GLSL('lens_distorted.glsl'); env.GLES2_GLSL("lens_distorted.glsl")

View file

@ -1,11 +1,11 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env_png = env.Clone() env_png = env.Clone()
# Thirdparty source files # Thirdparty source files
if env['builtin_libpng']: if env["builtin_libpng"]:
thirdparty_dir = "#thirdparty/libpng/" thirdparty_dir = "#thirdparty/libpng/"
thirdparty_sources = [ thirdparty_sources = [
"png.c", "png.c",
@ -32,6 +32,7 @@ if env['builtin_libpng']:
# Currently .ASM filter_neon.S does not compile on NT. # Currently .ASM filter_neon.S does not compile on NT.
import os import os
use_neon = "neon_enabled" in env and env["neon_enabled"] and os.name != "nt" use_neon = "neon_enabled" in env and env["neon_enabled"] and os.name != "nt"
if use_neon: if use_neon:
env_png.Append(CPPDEFINES=[("PNG_ARM_NEON_OPT", 2)]) env_png.Append(CPPDEFINES=[("PNG_ARM_NEON_OPT", 2)])
@ -45,7 +46,7 @@ if env['builtin_libpng']:
if use_neon: if use_neon:
env_neon = env_thirdparty.Clone() env_neon = env_thirdparty.Clone()
if "S_compiler" in env: if "S_compiler" in env:
env_neon['CC'] = env['S_compiler'] env_neon["CC"] = env["S_compiler"]
neon_sources = [] neon_sources = []
neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/arm_init.c")) neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/arm_init.c"))
neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/filter_neon_intrinsics.c")) neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/filter_neon_intrinsics.c"))
@ -56,4 +57,4 @@ if env['builtin_libpng']:
# Godot source files # Godot source files
env_png.add_source_files(env.drivers_sources, "*.cpp") env_png.add_source_files(env.drivers_sources, "*.cpp")
Export('env') Export("env")

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,17 +1,17 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env_spirv_reflect = env.Clone() env_spirv_reflect = env.Clone()
env_spirv_reflect.disable_warnings() env_spirv_reflect.disable_warnings()
thirdparty_dir = "#thirdparty/spirv-reflect/" thirdparty_dir = "#thirdparty/spirv-reflect/"
thirdparty_sources = [ thirdparty_sources = [
"spirv_reflect.c" "spirv_reflect.c",
] ]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
env_spirv_reflect.add_source_files(env.drivers_sources, thirdparty_sources) env_spirv_reflect.add_source_files(env.drivers_sources, thirdparty_sources)
Export('env') Export("env")

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")
env["check_c_headers"] = [ [ "mntent.h", "HAVE_MNTENT" ] ] env["check_c_headers"] = [["mntent.h", "HAVE_MNTENT"]]

View file

@ -1,10 +1,10 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")
if env['builtin_vulkan']: if env["builtin_vulkan"]:
# Use bundled Vulkan headers # Use bundled Vulkan headers
thirdparty_dir = "#thirdparty/vulkan" thirdparty_dir = "#thirdparty/vulkan"
env.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "/include", thirdparty_dir + "/loader"]) env.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "/include", thirdparty_dir + "/loader"])
@ -27,48 +27,56 @@ if env['builtin_vulkan']:
] ]
vma_sources = [thirdparty_dir + "/vk_mem_alloc.cpp"] vma_sources = [thirdparty_dir + "/vk_mem_alloc.cpp"]
if env['platform'] == "windows": if env["platform"] == "windows":
loader_sources.append("dirent_on_windows.c") loader_sources.append("dirent_on_windows.c")
loader_sources.append("dxgi_loader.c") loader_sources.append("dxgi_loader.c")
env_thirdparty.AppendUnique(CPPDEFINES=[ env_thirdparty.AppendUnique(
'VK_USE_PLATFORM_WIN32_KHR', CPPDEFINES=[
'VULKAN_NON_CMAKE_BUILD', "VK_USE_PLATFORM_WIN32_KHR",
'WIN32_LEAN_AND_MEAN', "VULKAN_NON_CMAKE_BUILD",
'API_NAME=\\"%s\\"' % 'Vulkan' "WIN32_LEAN_AND_MEAN",
]) 'API_NAME=\\"%s\\"' % "Vulkan",
if not env.msvc: # Windows 7+, missing in mingw headers ]
env_thirdparty.AppendUnique(CPPDEFINES=[ )
"CM_GETIDLIST_FILTER_CLASS=0x00000200", if not env.msvc: # Windows 7+, missing in mingw headers
"CM_GETIDLIST_FILTER_PRESENT=0x00000100" env_thirdparty.AppendUnique(
]) CPPDEFINES=["CM_GETIDLIST_FILTER_CLASS=0x00000200", "CM_GETIDLIST_FILTER_PRESENT=0x00000100"]
elif env['platform'] == "osx": )
env_thirdparty.AppendUnique(CPPDEFINES=[ elif env["platform"] == "osx":
'VK_USE_PLATFORM_MACOS_MVK', env_thirdparty.AppendUnique(
'VULKAN_NON_CMAKE_BUILD', CPPDEFINES=[
'SYSCONFDIR=\\"%s\\"' % '/etc', "VK_USE_PLATFORM_MACOS_MVK",
'FALLBACK_DATA_DIRS=\\"%s\\"' % '/usr/local/share:/usr/share', "VULKAN_NON_CMAKE_BUILD",
'FALLBACK_CONFIG_DIRS=\\"%s\\"' % '/etc/xdg' 'SYSCONFDIR=\\"%s\\"' % "/etc",
]) 'FALLBACK_DATA_DIRS=\\"%s\\"' % "/usr/local/share:/usr/share",
elif env['platform'] == "iphone": 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % "/etc/xdg",
env_thirdparty.AppendUnique(CPPDEFINES=[ ]
'VK_USE_PLATFORM_IOS_MVK', )
'VULKAN_NON_CMAKE_BUILD', elif env["platform"] == "iphone":
'SYSCONFDIR=\\"%s\\"' % '/etc', env_thirdparty.AppendUnique(
'FALLBACK_DATA_DIRS=\\"%s\\"' % '/usr/local/share:/usr/share', CPPDEFINES=[
'FALLBACK_CONFIG_DIRS=\\"%s\\"' % '/etc/xdg' "VK_USE_PLATFORM_IOS_MVK",
]) "VULKAN_NON_CMAKE_BUILD",
elif env['platform'] == "linuxbsd": 'SYSCONFDIR=\\"%s\\"' % "/etc",
env_thirdparty.AppendUnique(CPPDEFINES=[ 'FALLBACK_DATA_DIRS=\\"%s\\"' % "/usr/local/share:/usr/share",
'VK_USE_PLATFORM_XLIB_KHR', 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % "/etc/xdg",
'VULKAN_NON_CMAKE_BUILD', ]
'SYSCONFDIR=\\"%s\\"' % '/etc', )
'FALLBACK_DATA_DIRS=\\"%s\\"' % '/usr/local/share:/usr/share', elif env["platform"] == "linuxbsd":
'FALLBACK_CONFIG_DIRS=\\"%s\\"' % '/etc/xdg' env_thirdparty.AppendUnique(
]) CPPDEFINES=[
"VK_USE_PLATFORM_XLIB_KHR",
"VULKAN_NON_CMAKE_BUILD",
'SYSCONFDIR=\\"%s\\"' % "/etc",
'FALLBACK_DATA_DIRS=\\"%s\\"' % "/usr/local/share:/usr/share",
'FALLBACK_CONFIG_DIRS=\\"%s\\"' % "/etc/xdg",
]
)
import platform import platform
if (platform.system() == "Linux"):
if platform.system() == "Linux":
# In glibc since 2.17 and musl libc since 1.1.24. Used by loader.c. # In glibc since 2.17 and musl libc since 1.1.24. Used by loader.c.
env_thirdparty.AppendUnique(CPPDEFINES=['HAVE_SECURE_GETENV']) env_thirdparty.AppendUnique(CPPDEFINES=["HAVE_SECURE_GETENV"])
loader_sources = [thirdparty_dir + "/loader/" + file for file in loader_sources] loader_sources = [thirdparty_dir + "/loader/" + file for file in loader_sources]
env_thirdparty.add_source_files(env.drivers_sources, loader_sources) env_thirdparty.add_source_files(env.drivers_sources, loader_sources)

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
# Driver source files # Driver source files
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
# Driver source files # Driver source files
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.drivers_sources, "*.cpp") env.add_source_files(env.drivers_sources, "*.cpp")
env.Append(CPPDEFINES=['XAUDIO2_ENABLED']) env.Append(CPPDEFINES=["XAUDIO2_ENABLED"])
env.Append(LINKFLAGS=['xaudio2_8.lib']) env.Append(LINKFLAGS=["xaudio2_8.lib"])

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.editor_sources = [] env.editor_sources = []
@ -16,24 +16,24 @@ def _make_doc_data_class_path(to_path):
g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n") g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
g.write("struct _DocDataClassPath { const char* name; const char* path; };\n") g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n"); g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
for c in sorted(env.doc_class_path): for c in sorted(env.doc_class_path):
g.write("\t{\"" + c + "\", \"" + env.doc_class_path[c] + "\"},\n") g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
g.write("\t{NULL, NULL}\n") g.write("\t{NULL, NULL}\n")
g.write("};\n") g.write("};\n")
g.close() g.close()
if env['tools']: if env["tools"]:
# Register exporters # Register exporters
reg_exporters_inc = '#include "register_exporters.h"\n' reg_exporters_inc = '#include "register_exporters.h"\n'
reg_exporters = 'void register_exporters() {\n' reg_exporters = "void register_exporters() {\n"
for e in env.platform_exporters: for e in env.platform_exporters:
env.add_source_files(env.editor_sources, "#platform/" + e + "/export/export.cpp") env.add_source_files(env.editor_sources, "#platform/" + e + "/export/export.cpp")
reg_exporters += '\tregister_' + e + '_exporter();\n' reg_exporters += "\tregister_" + e + "_exporter();\n"
reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n' reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n'
reg_exporters += '}\n' reg_exporters += "}\n"
# NOTE: It is safe to generate this file here, since this is still executed serially # NOTE: It is safe to generate this file here, since this is still executed serially
with open("register_exporters.gen.cpp", "w", encoding="utf-8") as f: with open("register_exporters.gen.cpp", "w", encoding="utf-8") as f:
@ -50,12 +50,12 @@ if env['tools']:
for d in doc_dirs: for d in doc_dirs:
try: try:
for f in os.listdir(os.path.join(env.Dir('#').abspath, d)): for f in os.listdir(os.path.join(env.Dir("#").abspath, d)):
docs.append("#" + os.path.join(d, f)) docs.append("#" + os.path.join(d, f))
except OSError: except OSError:
pass pass
_make_doc_data_class_path(os.path.join(env.Dir('#').abspath, "editor")) _make_doc_data_class_path(os.path.join(env.Dir("#").abspath, "editor"))
docs = sorted(docs) docs = sorted(docs)
env.Depends("#editor/doc_data_compressed.gen.h", docs) env.Depends("#editor/doc_data_compressed.gen.h", docs)
@ -63,32 +63,36 @@ if env['tools']:
import glob import glob
path = env.Dir('.').abspath path = env.Dir(".").abspath
# Editor translations # Editor translations
tlist = glob.glob(path + "/translations/*.po") tlist = glob.glob(path + "/translations/*.po")
env.Depends('#editor/editor_translations.gen.h', tlist) env.Depends("#editor/editor_translations.gen.h", tlist)
env.CommandNoCache('#editor/editor_translations.gen.h', tlist, run_in_subprocess(editor_builders.make_editor_translations_header)) env.CommandNoCache(
"#editor/editor_translations.gen.h", tlist, run_in_subprocess(editor_builders.make_editor_translations_header)
)
# Documentation translations # Documentation translations
tlist = glob.glob(env.Dir("#doc").abspath + "/translations/*.po") tlist = glob.glob(env.Dir("#doc").abspath + "/translations/*.po")
env.Depends('#editor/doc_translations.gen.h', tlist) env.Depends("#editor/doc_translations.gen.h", tlist)
env.CommandNoCache('#editor/doc_translations.gen.h', tlist, run_in_subprocess(editor_builders.make_doc_translations_header)) env.CommandNoCache(
"#editor/doc_translations.gen.h", tlist, run_in_subprocess(editor_builders.make_doc_translations_header)
)
# Fonts # Fonts
flist = glob.glob(path + "/../thirdparty/fonts/*.ttf") flist = glob.glob(path + "/../thirdparty/fonts/*.ttf")
flist.extend(glob.glob(path + "/../thirdparty/fonts/*.otf")) flist.extend(glob.glob(path + "/../thirdparty/fonts/*.otf"))
flist.sort() flist.sort()
env.Depends('#editor/builtin_fonts.gen.h', flist) env.Depends("#editor/builtin_fonts.gen.h", flist)
env.CommandNoCache('#editor/builtin_fonts.gen.h', flist, run_in_subprocess(editor_builders.make_fonts_header)) env.CommandNoCache("#editor/builtin_fonts.gen.h", flist, run_in_subprocess(editor_builders.make_fonts_header))
env.add_source_files(env.editor_sources, "*.cpp") env.add_source_files(env.editor_sources, "*.cpp")
SConscript('debugger/SCsub') SConscript("debugger/SCsub")
SConscript('fileserver/SCsub') SConscript("fileserver/SCsub")
SConscript('icons/SCsub') SConscript("icons/SCsub")
SConscript('import/SCsub') SConscript("import/SCsub")
SConscript('plugins/SCsub') SConscript("plugins/SCsub")
lib = env.add_library("editor", env.editor_sources) lib = env.add_library("editor", env.editor_sources)
env.Prepend(LIBS=[lib]) env.Prepend(LIBS=[lib])

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.editor_sources, "*.cpp") env.add_source_files(env.editor_sources, "*.cpp")

View file

@ -25,6 +25,7 @@ def make_doc_header(target, source, env):
buf = (docbegin + buf + docend).encode("utf-8") buf = (docbegin + buf + docend).encode("utf-8")
decomp_size = len(buf) decomp_size = len(buf)
import zlib import zlib
buf = zlib.compress(buf) buf = zlib.compress(buf)
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
@ -55,7 +56,7 @@ def make_fonts_header(target, source, env):
# saving uncompressed, since freetype will reference from memory pointer # saving uncompressed, since freetype will reference from memory pointer
xl_names = [] xl_names = []
for i in range(len(source)): for i in range(len(source)):
with open(source[i], "rb")as f: with open(source[i], "rb") as f:
buf = f.read() buf = f.read()
name = os.path.splitext(os.path.basename(source[i]))[0] name = os.path.splitext(os.path.basename(source[i]))[0]
@ -111,7 +112,9 @@ def make_translations_header(target, source, env, category):
g.write("};\n\n") g.write("};\n\n")
g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category)) g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category))
for x in xl_names: for x in xl_names:
g.write("\t{{ \"{}\", {}, {}, _{}_translation_{}_compressed }},\n".format(x[0], str(x[1]), str(x[2]), category, x[0])) g.write(
'\t{{ "{}", {}, {}, _{}_translation_{}_compressed }},\n'.format(x[0], str(x[1]), str(x[2]), category, x[0])
)
g.write("\t{NULL, 0, 0, NULL}\n") g.write("\t{NULL, 0, 0, NULL}\n")
g.write("};\n") g.write("};\n")
@ -128,5 +131,5 @@ def make_doc_translations_header(target, source, env):
make_translations_header(target, source, env, "doc") make_translations_header(target, source, env, "doc")
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.editor_sources, "*.cpp") env.add_source_files(env.editor_sources, "*.cpp")

View file

@ -1,21 +1,21 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
from platform_methods import run_in_subprocess from platform_methods import run_in_subprocess
import editor_icons_builders import editor_icons_builders
make_editor_icons_builder = Builder(action=run_in_subprocess(editor_icons_builders.make_editor_icons_action), make_editor_icons_builder = Builder(
suffix='.h', action=run_in_subprocess(editor_icons_builders.make_editor_icons_action), suffix=".h", src_suffix=".svg"
src_suffix='.svg') )
env['BUILDERS']['MakeEditorIconsBuilder'] = make_editor_icons_builder env["BUILDERS"]["MakeEditorIconsBuilder"] = make_editor_icons_builder
# Editor's own icons # Editor's own icons
icon_sources = Glob("*.svg") icon_sources = Glob("*.svg")
# Module icons # Module icons
for module_icons in env.module_icons_paths: for module_icons in env.module_icons_paths:
icon_sources += Glob('#' + module_icons + "/*.svg") icon_sources += Glob("#" + module_icons + "/*.svg")
env.Alias('editor_icons', [env.MakeEditorIconsBuilder('#editor/editor_icons.gen.h', icon_sources)]) env.Alias("editor_icons", [env.MakeEditorIconsBuilder("#editor/editor_icons.gen.h", icon_sources)])

View file

@ -22,16 +22,16 @@ def make_editor_icons_action(target, source, env):
icons_string.write('\t"') icons_string.write('\t"')
with open(fname, 'rb') as svgf: with open(fname, "rb") as svgf:
b = svgf.read(1) b = svgf.read(1)
while(len(b) == 1): while len(b) == 1:
icons_string.write("\\" + str(hex(ord(b)))[1:]) icons_string.write("\\" + str(hex(ord(b)))[1:])
b = svgf.read(1) b = svgf.read(1)
icons_string.write('"') icons_string.write('"')
if fname != svg_icons[-1]: if fname != svg_icons[-1]:
icons_string.write(",") icons_string.write(",")
icons_string.write('\n') icons_string.write("\n")
s = StringIO() s = StringIO()
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
@ -40,7 +40,7 @@ def make_editor_icons_action(target, source, env):
s.write("static const int editor_icons_count = {};\n".format(len(svg_icons))) s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
s.write("static const char *editor_icons_sources[] = {\n") s.write("static const char *editor_icons_sources[] = {\n")
s.write(icons_string.getvalue()) s.write(icons_string.getvalue())
s.write('};\n\n') s.write("};\n\n")
s.write("static const char *editor_icons_names[] = {\n") s.write("static const char *editor_icons_names[] = {\n")
# this is used to store the indices of thumbnail icons # this is used to store the indices of thumbnail icons
@ -63,11 +63,11 @@ def make_editor_icons_action(target, source, env):
if fname != svg_icons[-1]: if fname != svg_icons[-1]:
s.write(",") s.write(",")
s.write('\n') s.write("\n")
index += 1 index += 1
s.write('};\n') s.write("};\n")
if thumb_medium_indices: if thumb_medium_indices:
s.write("\n\n") s.write("\n\n")
@ -91,5 +91,5 @@ def make_editor_icons_action(target, source, env):
icons_string.close() icons_string.close()
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.editor_sources, "*.cpp") env.add_source_files(env.editor_sources, "*.cpp")

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
env.add_source_files(env.editor_sources, "*.cpp") env.add_source_files(env.editor_sources, "*.cpp")

View file

@ -10,23 +10,23 @@ import sys
line_nb = False line_nb = False
for arg in sys.argv[1:]: for arg in sys.argv[1:]:
if (arg == "--with-line-nb"): if arg == "--with-line-nb":
print("Enabling line numbers in the context locations.") print("Enabling line numbers in the context locations.")
line_nb = True line_nb = True
else: else:
os.sys.exit("Non supported argument '" + arg + "'. Aborting.") os.sys.exit("Non supported argument '" + arg + "'. Aborting.")
if (not os.path.exists("editor")): if not os.path.exists("editor"):
os.sys.exit("ERROR: This script should be started from the root of the git repo.") os.sys.exit("ERROR: This script should be started from the root of the git repo.")
matches = [] matches = []
for root, dirnames, filenames in os.walk('.'): for root, dirnames, filenames in os.walk("."):
dirnames[:] = [d for d in dirnames if d not in ["thirdparty"]] dirnames[:] = [d for d in dirnames if d not in ["thirdparty"]]
for filename in fnmatch.filter(filenames, '*.cpp'): for filename in fnmatch.filter(filenames, "*.cpp"):
matches.append(os.path.join(root, filename)) matches.append(os.path.join(root, filename))
for filename in fnmatch.filter(filenames, '*.h'): for filename in fnmatch.filter(filenames, "*.h"):
matches.append(os.path.join(root, filename)) matches.append(os.path.join(root, filename))
matches.sort() matches.sort()
@ -51,52 +51,54 @@ msgstr ""
"Content-Transfer-Encoding: 8-bit\\n" "Content-Transfer-Encoding: 8-bit\\n"
""" """
def process_file(f, fname): def process_file(f, fname):
global main_po, unique_str, unique_loc global main_po, unique_str, unique_loc
l = f.readline() l = f.readline()
lc = 1 lc = 1
while (l): while l:
patterns = ['RTR(\"', 'TTR(\"', 'TTRC(\"'] patterns = ['RTR("', 'TTR("', 'TTRC("']
idx = 0 idx = 0
pos = 0 pos = 0
while (pos >= 0): while pos >= 0:
pos = l.find(patterns[idx], pos) pos = l.find(patterns[idx], pos)
if (pos == -1): if pos == -1:
if (idx < len(patterns) - 1): if idx < len(patterns) - 1:
idx += 1 idx += 1
pos = 0 pos = 0
continue continue
pos += len(patterns[idx]) pos += len(patterns[idx])
msg = "" msg = ""
while (pos < len(l) and (l[pos] != '"' or l[pos - 1] == '\\')): while pos < len(l) and (l[pos] != '"' or l[pos - 1] == "\\"):
msg += l[pos] msg += l[pos]
pos += 1 pos += 1
location = os.path.relpath(fname).replace('\\', '/') location = os.path.relpath(fname).replace("\\", "/")
if (line_nb): if line_nb:
location += ":" + str(lc) location += ":" + str(lc)
if (not msg in unique_str): if not msg in unique_str:
main_po += "\n#: " + location + "\n" main_po += "\n#: " + location + "\n"
main_po += 'msgid "' + msg + '"\n' main_po += 'msgid "' + msg + '"\n'
main_po += 'msgstr ""\n' main_po += 'msgstr ""\n'
unique_str.append(msg) unique_str.append(msg)
unique_loc[msg] = [location] unique_loc[msg] = [location]
elif (not location in unique_loc[msg]): elif not location in unique_loc[msg]:
# Add additional location to previous occurrence too # Add additional location to previous occurrence too
msg_pos = main_po.find('\nmsgid "' + msg + '"') msg_pos = main_po.find('\nmsgid "' + msg + '"')
if (msg_pos == -1): if msg_pos == -1:
print("Someone apparently thought writing Python was as easy as GDScript. Ping Akien.") print("Someone apparently thought writing Python was as easy as GDScript. Ping Akien.")
main_po = main_po[:msg_pos] + ' ' + location + main_po[msg_pos:] main_po = main_po[:msg_pos] + " " + location + main_po[msg_pos:]
unique_loc[msg].append(location) unique_loc[msg].append(location)
l = f.readline() l = f.readline()
lc += 1 lc += 1
print("Updating the editor.pot template...") print("Updating the editor.pot template...")
for fname in matches: for fname in matches:
@ -106,7 +108,7 @@ for fname in matches:
with open("editor.pot", "w") as f: with open("editor.pot", "w") as f:
f.write(main_po) f.write(main_po)
if (os.name == "posix"): if os.name == "posix":
print("Wrapping template at 79 characters for compatibility with Weblate.") print("Wrapping template at 79 characters for compatibility with Weblate.")
os.system("msgmerge -w79 editor.pot editor.pot > editor.pot.wrap") os.system("msgmerge -w79 editor.pot editor.pot > editor.pot.wrap")
shutil.move("editor.pot.wrap", "editor.pot") shutil.move("editor.pot.wrap", "editor.pot")
@ -114,7 +116,7 @@ if (os.name == "posix"):
shutil.move("editor.pot", "editor/translations/editor.pot") shutil.move("editor.pot", "editor/translations/editor.pot")
# TODO: Make that in a portable way, if we care; if not, kudos to Unix users # TODO: Make that in a portable way, if we care; if not, kudos to Unix users
if (os.name == "posix"): if os.name == "posix":
added = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \+msgid | wc -l", shell=True) added = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \+msgid | wc -l", shell=True)
removed = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \\\-msgid | wc -l", shell=True) removed = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \\\-msgid | wc -l", shell=True)
print("\n# Template changes compared to the staged status:") print("\n# Template changes compared to the staged status:")

View file

@ -7,7 +7,6 @@ from platform_methods import subprocess_main
class LegacyGLHeaderStruct: class LegacyGLHeaderStruct:
def __init__(self): def __init__(self):
self.vertex_lines = [] self.vertex_lines = []
self.fragment_lines = [] self.fragment_lines = []
@ -73,7 +72,7 @@ def include_file_in_legacygl_header(filename, header_data, depth):
ifdefline = line.replace("#ifdef ", "").strip() ifdefline = line.replace("#ifdef ", "").strip()
if line.find("_EN_") != -1: if line.find("_EN_") != -1:
enumbase = ifdefline[:ifdefline.find("_EN_")] enumbase = ifdefline[: ifdefline.find("_EN_")]
ifdefline = ifdefline.replace("_EN_", "_") ifdefline = ifdefline.replace("_EN_", "_")
line = line.replace("_EN_", "_") line = line.replace("_EN_", "_")
if enumbase not in header_data.enums: if enumbase not in header_data.enums:
@ -86,12 +85,12 @@ def include_file_in_legacygl_header(filename, header_data, depth):
if line.find("uniform") != -1 and line.lower().find("texunit:") != -1: if line.find("uniform") != -1 and line.lower().find("texunit:") != -1:
# texture unit # texture unit
texunitstr = line[line.find(":") + 1:].strip() texunitstr = line[line.find(":") + 1 :].strip()
if texunitstr == "auto": if texunitstr == "auto":
texunit = "-1" texunit = "-1"
else: else:
texunit = str(int(texunitstr)) texunit = str(int(texunitstr))
uline = line[:line.lower().find("//")] uline = line[: line.lower().find("//")]
uline = uline.replace("uniform", "") uline = uline.replace("uniform", "")
uline = uline.replace("highp", "") uline = uline.replace("highp", "")
uline = uline.replace(";", "") uline = uline.replace(";", "")
@ -99,10 +98,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
for x in lines: for x in lines:
x = x.strip() x = x.strip()
x = x[x.rfind(" ") + 1:] x = x[x.rfind(" ") + 1 :]
if x.find("[") != -1: if x.find("[") != -1:
# unfiorm array # unfiorm array
x = x[:x.find("[")] x = x[: x.find("[")]
if not x in header_data.texunit_names: if not x in header_data.texunit_names:
header_data.texunits += [(x, texunit)] header_data.texunits += [(x, texunit)]
@ -110,10 +109,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1: elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1:
# uniform buffer object # uniform buffer object
ubostr = line[line.find(":") + 1:].strip() ubostr = line[line.find(":") + 1 :].strip()
ubo = str(int(ubostr)) ubo = str(int(ubostr))
uline = line[:line.lower().find("//")] uline = line[: line.lower().find("//")]
uline = uline[uline.find("uniform") + len("uniform"):] uline = uline[uline.find("uniform") + len("uniform") :]
uline = uline.replace("highp", "") uline = uline.replace("highp", "")
uline = uline.replace(";", "") uline = uline.replace(";", "")
uline = uline.replace("{", "").strip() uline = uline.replace("{", "").strip()
@ -121,10 +120,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
for x in lines: for x in lines:
x = x.strip() x = x.strip()
x = x[x.rfind(" ") + 1:] x = x[x.rfind(" ") + 1 :]
if x.find("[") != -1: if x.find("[") != -1:
# unfiorm array # unfiorm array
x = x[:x.find("[")] x = x[: x.find("[")]
if not x in header_data.ubo_names: if not x in header_data.ubo_names:
header_data.ubos += [(x, ubo)] header_data.ubos += [(x, ubo)]
@ -137,10 +136,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
for x in lines: for x in lines:
x = x.strip() x = x.strip()
x = x[x.rfind(" ") + 1:] x = x[x.rfind(" ") + 1 :]
if x.find("[") != -1: if x.find("[") != -1:
# unfiorm array # unfiorm array
x = x[:x.find("[")] x = x[: x.find("[")]
if not x in header_data.uniforms: if not x in header_data.uniforms:
header_data.uniforms += [x] header_data.uniforms += [x]
@ -150,7 +149,7 @@ def include_file_in_legacygl_header(filename, header_data, depth):
uline = uline.replace("attribute ", "") uline = uline.replace("attribute ", "")
uline = uline.replace("highp ", "") uline = uline.replace("highp ", "")
uline = uline.replace(";", "") uline = uline.replace(";", "")
uline = uline[uline.find(" "):].strip() uline = uline[uline.find(" ") :].strip()
if uline.find("//") != -1: if uline.find("//") != -1:
name, bind = uline.split("//") name, bind = uline.split("//")
@ -163,7 +162,7 @@ def include_file_in_legacygl_header(filename, header_data, depth):
uline = line.replace("out ", "") uline = line.replace("out ", "")
uline = uline.replace("highp ", "") uline = uline.replace("highp ", "")
uline = uline.replace(";", "") uline = uline.replace(";", "")
uline = uline[uline.find(" "):].strip() uline = uline[uline.find(" ") :].strip()
if uline.find("//") != -1: if uline.find("//") != -1:
name, bind = uline.split("//") name, bind = uline.split("//")
@ -200,17 +199,19 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n") fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
out_file_base = out_file out_file_base = out_file
out_file_base = out_file_base[out_file_base.rfind("/") + 1:] out_file_base = out_file_base[out_file_base.rfind("/") + 1 :]
out_file_base = out_file_base[out_file_base.rfind("\\") + 1:] out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :]
out_file_ifdef = out_file_base.replace(".", "_").upper() out_file_ifdef = out_file_base.replace(".", "_").upper()
fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n") fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n")
fd.write("#define " + out_file_ifdef + class_suffix + "_120\n") fd.write("#define " + out_file_ifdef + class_suffix + "_120\n")
out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix out_file_class = (
out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
)
fd.write("\n\n") fd.write("\n\n")
fd.write("#include \"" + include + "\"\n\n\n") fd.write('#include "' + include + '"\n\n\n')
fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n") fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n")
fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n") fd.write('\t virtual String get_shader_name() const { return "' + out_file_class + '"; }\n')
fd.write("public:\n\n") fd.write("public:\n\n")
@ -228,29 +229,64 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n") fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n")
if header_data.conditionals: if header_data.conditionals:
fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n") fd.write(
"\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n"
)
fd.write("\t#ifdef DEBUG_ENABLED\n ") fd.write("\t#ifdef DEBUG_ENABLED\n ")
fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; if (!is_version_valid()) return; ERR_FAIL_COND( get_active()!=this ); \n\n ") fd.write(
"\t#define _FU if (get_uniform(p_uniform)<0) return; if (!is_version_valid()) return; ERR_FAIL_COND( get_active()!=this ); \n\n "
)
fd.write("\t#else\n ") fd.write("\t#else\n ")
fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; \n\n ") fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; \n\n ")
fd.write("\t#endif\n") fd.write("\t#endif\n")
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n") fd.write(
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n") "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n"
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") )
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") fd.write(
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n"
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") )
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") fd.write(
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n") "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n") )
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n") fd.write(
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n") "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n") )
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n") fd.write(
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n") "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n") )
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n"
)
fd.write(
"\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n"
)
fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU fd.write(
"""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU
const Transform &tr = p_transform; const Transform &tr = p_transform;
@ -276,9 +312,11 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix); glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
} }
""") """
)
fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) { _FU fd.write(
"""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) { _FU
const Transform2D &tr = p_transform; const Transform2D &tr = p_transform;
@ -304,9 +342,11 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix); glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
} }
""") """
)
fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU fd.write(
"""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU
GLfloat matrix[16]; GLfloat matrix[16];
@ -320,7 +360,8 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix); glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
} }
""") """
)
fd.write("\n\n#undef _FU\n\n\n") fd.write("\n\n#undef _FU\n\n\n")
@ -340,21 +381,25 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
x = header_data.enums[xv] x = header_data.enums[xv]
bits = 1 bits = 1
amt = len(x) amt = len(x)
while (2 ** bits < amt): while 2 ** bits < amt:
bits += 1 bits += 1
strs = "{" strs = "{"
for i in range(amt): for i in range(amt):
strs += "\"#define " + x[i] + "\\n\"," strs += '"#define ' + x[i] + '\\n",'
c = {} c = {}
c["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs) c["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs)
c["clear_mask"] = "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")" c["clear_mask"] = (
"((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")"
)
enum_vals.append(c) enum_vals.append(c)
enum_constants.append(x[i]) enum_constants.append(x[i])
strs += "NULL}" strs += "NULL}"
fd.write("\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n") fd.write(
"\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n"
)
bitofs += bits bitofs += bits
fd.write("\t\t};\n\n") fd.write("\t\t};\n\n")
@ -373,7 +418,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t\tstatic const char* _conditional_strings[]={\n") fd.write("\t\tstatic const char* _conditional_strings[]={\n")
if header_data.conditionals: if header_data.conditionals:
for x in header_data.conditionals: for x in header_data.conditionals:
fd.write("\t\t\t\"#define " + x + "\\n\",\n") fd.write('\t\t\t"#define ' + x + '\\n",\n')
conditionals_found.append(x) conditionals_found.append(x)
fd.write("\t\t};\n\n") fd.write("\t\t};\n\n")
else: else:
@ -384,7 +429,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t\tstatic const char* _uniform_strings[]={\n") fd.write("\t\tstatic const char* _uniform_strings[]={\n")
if header_data.uniforms: if header_data.uniforms:
for x in header_data.uniforms: for x in header_data.uniforms:
fd.write("\t\t\t\"" + x + "\",\n") fd.write('\t\t\t"' + x + '",\n')
fd.write("\t\t};\n\n") fd.write("\t\t};\n\n")
else: else:
fd.write("\t\tstatic const char **_uniform_strings=NULL;\n") fd.write("\t\tstatic const char **_uniform_strings=NULL;\n")
@ -394,7 +439,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n") fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n")
for x in header_data.attributes: for x in header_data.attributes:
fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n") fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
fd.write("\t\t};\n\n") fd.write("\t\t};\n\n")
else: else:
fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n") fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n")
@ -408,9 +453,9 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
name = x[0] name = x[0]
cond = x[1] cond = x[1]
if cond in conditionals_found: if cond in conditionals_found:
fd.write("\t\t\t{\"" + name + "\"," + str(conditionals_found.index(cond)) + "},\n") fd.write('\t\t\t{"' + name + '",' + str(conditionals_found.index(cond)) + "},\n")
else: else:
fd.write("\t\t\t{\"" + name + "\",-1},\n") fd.write('\t\t\t{"' + name + '",-1},\n')
feedback_count += 1 feedback_count += 1
@ -424,7 +469,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
if header_data.texunits: if header_data.texunits:
fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n") fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n")
for x in header_data.texunits: for x in header_data.texunits:
fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n") fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
fd.write("\t\t};\n\n") fd.write("\t\t};\n\n")
else: else:
fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n") fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n")
@ -432,7 +477,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
if not gles2 and header_data.ubos: if not gles2 and header_data.ubos:
fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n") fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n")
for x in header_data.ubos: for x in header_data.ubos:
fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n") fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
fd.write("\t\t};\n\n") fd.write("\t\t};\n\n")
else: else:
if gles2: if gles2:
@ -445,7 +490,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
for c in x: for c in x:
fd.write(str(ord(c)) + ",") fd.write(str(ord(c)) + ",")
fd.write(str(ord('\n')) + ",") fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n") fd.write("\t\t0};\n\n")
fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n") fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n")
@ -455,28 +500,73 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
for c in x: for c in x:
fd.write(str(ord(c)) + ",") fd.write(str(ord(c)) + ",")
fd.write(str(ord('\n')) + ",") fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n") fd.write("\t\t0};\n\n")
fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n") fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n")
if output_attribs: if output_attribs:
if gles2: if gles2:
fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str( fd.write(
len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") "\t\tsetup(_conditional_strings,"
+ str(len(header_data.conditionals))
+ ",_uniform_strings,"
+ str(len(header_data.uniforms))
+ ",_attribute_pairs,"
+ str(len(header_data.attributes))
+ ", _texunit_pairs,"
+ str(len(header_data.texunits))
+ ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
)
else: else:
fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str( fd.write(
len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str( "\t\tsetup(_conditional_strings,"
feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + str(len(header_data.conditionals))
+ ",_uniform_strings,"
+ str(len(header_data.uniforms))
+ ",_attribute_pairs,"
+ str(len(header_data.attributes))
+ ", _texunit_pairs,"
+ str(len(header_data.texunits))
+ ",_ubo_pairs,"
+ str(len(header_data.ubos))
+ ",_feedbacks,"
+ str(feedback_count)
+ ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
)
else: else:
if gles2: if gles2:
fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str( fd.write(
len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str( "\t\tsetup(_conditional_strings,"
enum_value_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + str(len(header_data.conditionals))
+ ",_uniform_strings,"
+ str(len(header_data.uniforms))
+ ",_texunit_pairs,"
+ str(len(header_data.texunits))
+ ",_enums,"
+ str(len(header_data.enums))
+ ",_enum_values,"
+ str(enum_value_count)
+ ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
)
else: else:
fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str( fd.write(
len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str( "\t\tsetup(_conditional_strings,"
feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + str(len(header_data.conditionals))
+ ",_uniform_strings,"
+ str(len(header_data.uniforms))
+ ",_texunit_pairs,"
+ str(len(header_data.texunits))
+ ",_enums,"
+ str(len(header_data.enums))
+ ",_enum_values,"
+ str(enum_value_count)
+ ",_ubo_pairs,"
+ str(len(header_data.ubos))
+ ",_feedbacks,"
+ str(feedback_count)
+ ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
)
fd.write("\t}\n\n") fd.write("\t}\n\n")
@ -495,12 +585,12 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
def build_gles2_headers(target, source, env): def build_gles2_headers(target, source, env):
for x in source: for x in source:
build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True) build_legacygl_header(
str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True
)
class RDHeaderStruct: class RDHeaderStruct:
def __init__(self): def __init__(self):
self.vertex_lines = [] self.vertex_lines = []
self.fragment_lines = [] self.fragment_lines = []
@ -594,30 +684,30 @@ def build_rd_header(filename):
fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n") fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
out_file_base = out_file out_file_base = out_file
out_file_base = out_file_base[out_file_base.rfind("/") + 1:] out_file_base = out_file_base[out_file_base.rfind("/") + 1 :]
out_file_base = out_file_base[out_file_base.rfind("\\") + 1:] out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :]
out_file_ifdef = out_file_base.replace(".", "_").upper() out_file_ifdef = out_file_base.replace(".", "_").upper()
fd.write("#ifndef " + out_file_ifdef + "_RD\n") fd.write("#ifndef " + out_file_ifdef + "_RD\n")
fd.write("#define " + out_file_ifdef + "_RD\n") fd.write("#define " + out_file_ifdef + "_RD\n")
out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "ShaderRD" out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "ShaderRD"
fd.write("\n") fd.write("\n")
fd.write("#include \"servers/rendering/rasterizer_rd/shader_rd.h\"\n\n") fd.write('#include "servers/rendering/rasterizer_rd/shader_rd.h"\n\n')
fd.write("class " + out_file_class + " : public ShaderRD {\n\n") fd.write("class " + out_file_class + " : public ShaderRD {\n\n")
fd.write("public:\n\n") fd.write("public:\n\n")
fd.write("\t" + out_file_class + "() {\n\n") fd.write("\t" + out_file_class + "() {\n\n")
if (len(header_data.compute_lines)): if len(header_data.compute_lines):
fd.write("\t\tstatic const char _compute_code[] = {\n") fd.write("\t\tstatic const char _compute_code[] = {\n")
for x in header_data.compute_lines: for x in header_data.compute_lines:
for c in x: for c in x:
fd.write(str(ord(c)) + ",") fd.write(str(ord(c)) + ",")
fd.write(str(ord('\n')) + ",") fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n") fd.write("\t\t0};\n\n")
fd.write("\t\tsetup(nullptr, nullptr, _compute_code, \"" + out_file_class + "\");\n") fd.write('\t\tsetup(nullptr, nullptr, _compute_code, "' + out_file_class + '");\n')
fd.write("\t}\n") fd.write("\t}\n")
else: else:
@ -626,17 +716,17 @@ def build_rd_header(filename):
for x in header_data.vertex_lines: for x in header_data.vertex_lines:
for c in x: for c in x:
fd.write(str(ord(c)) + ",") fd.write(str(ord(c)) + ",")
fd.write(str(ord('\n')) + ",") fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n") fd.write("\t\t0};\n\n")
fd.write("\t\tstatic const char _fragment_code[]={\n") fd.write("\t\tstatic const char _fragment_code[]={\n")
for x in header_data.fragment_lines: for x in header_data.fragment_lines:
for c in x: for c in x:
fd.write(str(ord(c)) + ",") fd.write(str(ord(c)) + ",")
fd.write(str(ord('\n')) + ",") fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n") fd.write("\t\t0};\n\n")
fd.write("\t\tsetup(_vertex_code, _fragment_code, nullptr, \"" + out_file_class + "\");\n") fd.write('\t\tsetup(_vertex_code, _fragment_code, nullptr, "' + out_file_class + '");\n')
fd.write("\t}\n") fd.write("\t}\n")
fd.write("};\n\n") fd.write("};\n\n")
@ -650,5 +740,5 @@ def build_rd_headers(target, source, env):
build_rd_header(str(x)) build_rd_header(str(x))
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
from platform_methods import run_in_subprocess from platform_methods import run_in_subprocess
import main_builders import main_builders
@ -14,13 +14,15 @@ env.Depends("#main/splash.gen.h", "#main/splash.png")
env.CommandNoCache("#main/splash.gen.h", "#main/splash.png", run_in_subprocess(main_builders.make_splash)) env.CommandNoCache("#main/splash.gen.h", "#main/splash.png", run_in_subprocess(main_builders.make_splash))
env.Depends("#main/splash_editor.gen.h", "#main/splash_editor.png") env.Depends("#main/splash_editor.gen.h", "#main/splash_editor.png")
env.CommandNoCache("#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor)) env.CommandNoCache(
"#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor)
)
env.Depends("#main/app_icon.gen.h", "#main/app_icon.png") env.Depends("#main/app_icon.gen.h", "#main/app_icon.png")
env.CommandNoCache("#main/app_icon.gen.h", "#main/app_icon.png", run_in_subprocess(main_builders.make_app_icon)) env.CommandNoCache("#main/app_icon.gen.h", "#main/app_icon.png", run_in_subprocess(main_builders.make_app_icon))
if env["tools"]: if env["tools"]:
SConscript('tests/SCsub') SConscript("tests/SCsub")
lib = env.add_library("main", env.main_sources) lib = env.add_library("main", env.main_sources)
env.Prepend(LIBS=[lib]) env.Prepend(LIBS=[lib])

View file

@ -18,7 +18,7 @@ def make_splash(target, source, env):
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
g.write("#ifndef BOOT_SPLASH_H\n") g.write("#ifndef BOOT_SPLASH_H\n")
g.write("#define BOOT_SPLASH_H\n") g.write("#define BOOT_SPLASH_H\n")
g.write('static const Color boot_splash_bg_color = Color(0.14, 0.14, 0.14);\n') g.write("static const Color boot_splash_bg_color = Color(0.14, 0.14, 0.14);\n")
g.write("static const unsigned char boot_splash_png[] = {\n") g.write("static const unsigned char boot_splash_png[] = {\n")
for i in range(len(buf)): for i in range(len(buf)):
g.write(str(buf[i]) + ",\n") g.write(str(buf[i]) + ",\n")
@ -37,7 +37,7 @@ def make_splash_editor(target, source, env):
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
g.write("#ifndef BOOT_SPLASH_EDITOR_H\n") g.write("#ifndef BOOT_SPLASH_EDITOR_H\n")
g.write("#define BOOT_SPLASH_EDITOR_H\n") g.write("#define BOOT_SPLASH_EDITOR_H\n")
g.write('static const Color boot_splash_editor_bg_color = Color(0.14, 0.14, 0.14);\n') g.write("static const Color boot_splash_editor_bg_color = Color(0.14, 0.14, 0.14);\n")
g.write("static const unsigned char boot_splash_editor_png[] = {\n") g.write("static const unsigned char boot_splash_editor_png[] = {\n")
for i in range(len(buf)): for i in range(len(buf)):
g.write(str(buf[i]) + ",\n") g.write(str(buf[i]) + ",\n")
@ -63,5 +63,5 @@ def make_app_icon(target, source, env):
g.write("#endif") g.write("#endif")
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,6 +1,6 @@
#!/usr/bin/python #!/usr/bin/python
Import('env') Import("env")
env.tests_sources = [] env.tests_sources = []
env.add_source_files(env.tests_sources, "*.cpp") env.add_source_files(env.tests_sources, "*.cpp")

View file

@ -8,13 +8,13 @@ def add_source_files(self, sources, files, warn_duplicates=True):
# Convert string to list of absolute paths (including expanding wildcard) # Convert string to list of absolute paths (including expanding wildcard)
if isinstance(files, (str, bytes)): if isinstance(files, (str, bytes)):
# Keep SCons project-absolute path as they are (no wildcard support) # Keep SCons project-absolute path as they are (no wildcard support)
if files.startswith('#'): if files.startswith("#"):
if '*' in files: if "*" in files:
print("ERROR: Wildcards can't be expanded in SCons project-absolute path: '{}'".format(files)) print("ERROR: Wildcards can't be expanded in SCons project-absolute path: '{}'".format(files))
return return
files = [files] files = [files]
else: else:
dir_path = self.Dir('.').abspath dir_path = self.Dir(".").abspath
files = sorted(glob.glob(dir_path + "/" + files)) files = sorted(glob.glob(dir_path + "/" + files))
# Add each path as compiled Object following environment (self) configuration # Add each path as compiled Object following environment (self) configuration
@ -22,7 +22,7 @@ def add_source_files(self, sources, files, warn_duplicates=True):
obj = self.Object(path) obj = self.Object(path)
if obj in sources: if obj in sources:
if warn_duplicates: if warn_duplicates:
print("WARNING: Object \"{}\" already included in environment sources.".format(obj)) print('WARNING: Object "{}" already included in environment sources.'.format(obj))
else: else:
continue continue
sources.append(obj) sources.append(obj)
@ -33,20 +33,20 @@ def disable_warnings(self):
if self.msvc: if self.msvc:
# We have to remove existing warning level defines before appending /w, # We have to remove existing warning level defines before appending /w,
# otherwise we get: "warning D9025 : overriding '/W3' with '/w'" # otherwise we get: "warning D9025 : overriding '/W3' with '/w'"
warn_flags = ['/Wall', '/W4', '/W3', '/W2', '/W1', '/WX'] warn_flags = ["/Wall", "/W4", "/W3", "/W2", "/W1", "/WX"]
self.Append(CCFLAGS=['/w']) self.Append(CCFLAGS=["/w"])
self.Append(CFLAGS=['/w']) self.Append(CFLAGS=["/w"])
self.Append(CXXFLAGS=['/w']) self.Append(CXXFLAGS=["/w"])
self['CCFLAGS'] = [x for x in self['CCFLAGS'] if not x in warn_flags] self["CCFLAGS"] = [x for x in self["CCFLAGS"] if not x in warn_flags]
self['CFLAGS'] = [x for x in self['CFLAGS'] if not x in warn_flags] self["CFLAGS"] = [x for x in self["CFLAGS"] if not x in warn_flags]
self['CXXFLAGS'] = [x for x in self['CXXFLAGS'] if not x in warn_flags] self["CXXFLAGS"] = [x for x in self["CXXFLAGS"] if not x in warn_flags]
else: else:
self.Append(CCFLAGS=['-w']) self.Append(CCFLAGS=["-w"])
self.Append(CFLAGS=['-w']) self.Append(CFLAGS=["-w"])
self.Append(CXXFLAGS=['-w']) self.Append(CXXFLAGS=["-w"])
def add_module_version_string(self,s): def add_module_version_string(self, s):
self.module_version_string += "." + s self.module_version_string += "." + s
@ -64,16 +64,16 @@ def update_version(module_version_string=""):
f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
f.write("#ifndef VERSION_GENERATED_GEN_H\n") f.write("#ifndef VERSION_GENERATED_GEN_H\n")
f.write("#define VERSION_GENERATED_GEN_H\n") f.write("#define VERSION_GENERATED_GEN_H\n")
f.write("#define VERSION_SHORT_NAME \"" + str(version.short_name) + "\"\n") f.write('#define VERSION_SHORT_NAME "' + str(version.short_name) + '"\n')
f.write("#define VERSION_NAME \"" + str(version.name) + "\"\n") f.write('#define VERSION_NAME "' + str(version.name) + '"\n')
f.write("#define VERSION_MAJOR " + str(version.major) + "\n") f.write("#define VERSION_MAJOR " + str(version.major) + "\n")
f.write("#define VERSION_MINOR " + str(version.minor) + "\n") f.write("#define VERSION_MINOR " + str(version.minor) + "\n")
f.write("#define VERSION_PATCH " + str(version.patch) + "\n") f.write("#define VERSION_PATCH " + str(version.patch) + "\n")
f.write("#define VERSION_STATUS \"" + str(version.status) + "\"\n") f.write('#define VERSION_STATUS "' + str(version.status) + '"\n')
f.write("#define VERSION_BUILD \"" + str(build_name) + "\"\n") f.write('#define VERSION_BUILD "' + str(build_name) + '"\n')
f.write("#define VERSION_MODULE_CONFIG \"" + str(version.module_config) + module_version_string + "\"\n") f.write('#define VERSION_MODULE_CONFIG "' + str(version.module_config) + module_version_string + '"\n')
f.write("#define VERSION_YEAR " + str(version.year) + "\n") f.write("#define VERSION_YEAR " + str(version.year) + "\n")
f.write("#define VERSION_WEBSITE \"" + str(version.website) + "\"\n") f.write('#define VERSION_WEBSITE "' + str(version.website) + '"\n')
f.write("#endif // VERSION_GENERATED_GEN_H\n") f.write("#endif // VERSION_GENERATED_GEN_H\n")
f.close() f.close()
@ -99,7 +99,7 @@ def update_version(module_version_string=""):
else: else:
githash = head githash = head
fhash.write("#define VERSION_HASH \"" + githash + "\"\n") fhash.write('#define VERSION_HASH "' + githash + '"\n')
fhash.write("#endif // VERSION_HASH_GEN_H\n") fhash.write("#endif // VERSION_HASH_GEN_H\n")
fhash.close() fhash.close()
@ -158,17 +158,17 @@ def detect_modules():
try: try:
with open("modules/" + x + "/register_types.h"): with open("modules/" + x + "/register_types.h"):
includes_cpp += '#include "modules/' + x + '/register_types.h"\n' includes_cpp += '#include "modules/' + x + '/register_types.h"\n'
register_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n' register_cpp += "#ifdef MODULE_" + x.upper() + "_ENABLED\n"
register_cpp += '\tregister_' + x + '_types();\n' register_cpp += "\tregister_" + x + "_types();\n"
register_cpp += '#endif\n' register_cpp += "#endif\n"
preregister_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n' preregister_cpp += "#ifdef MODULE_" + x.upper() + "_ENABLED\n"
preregister_cpp += '#ifdef MODULE_' + x.upper() + '_HAS_PREREGISTER\n' preregister_cpp += "#ifdef MODULE_" + x.upper() + "_HAS_PREREGISTER\n"
preregister_cpp += '\tpreregister_' + x + '_types();\n' preregister_cpp += "\tpreregister_" + x + "_types();\n"
preregister_cpp += '#endif\n' preregister_cpp += "#endif\n"
preregister_cpp += '#endif\n' preregister_cpp += "#endif\n"
unregister_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n' unregister_cpp += "#ifdef MODULE_" + x.upper() + "_ENABLED\n"
unregister_cpp += '\tunregister_' + x + '_types();\n' unregister_cpp += "\tunregister_" + x + "_types();\n"
unregister_cpp += '#endif\n' unregister_cpp += "#endif\n"
except IOError: except IOError:
pass pass
@ -191,7 +191,12 @@ void register_module_types() {
void unregister_module_types() { void unregister_module_types() {
%s %s
} }
""" % (includes_cpp, preregister_cpp, register_cpp, unregister_cpp) """ % (
includes_cpp,
preregister_cpp,
register_cpp,
unregister_cpp,
)
# NOTE: It is safe to generate this file here, since this is still executed serially # NOTE: It is safe to generate this file here, since this is still executed serially
with open("modules/register_module_types.gen.cpp", "w") as f: with open("modules/register_module_types.gen.cpp", "w") as f:
@ -206,7 +211,7 @@ def disable_module(self):
def use_windows_spawn_fix(self, platform=None): def use_windows_spawn_fix(self, platform=None):
if (os.name != "nt"): if os.name != "nt":
return # not needed, only for windows return # not needed, only for windows
# On Windows, due to the limited command line length, when creating a static library # On Windows, due to the limited command line length, when creating a static library
@ -217,14 +222,21 @@ def use_windows_spawn_fix(self, platform=None):
# got built correctly regardless the invocation strategy. # got built correctly regardless the invocation strategy.
# Furthermore, since SCons will rebuild the library from scratch when an object file # Furthermore, since SCons will rebuild the library from scratch when an object file
# changes, no multiple versions of the same object file will be present. # changes, no multiple versions of the same object file will be present.
self.Replace(ARFLAGS='q') self.Replace(ARFLAGS="q")
def mySubProcess(cmdline, env): def mySubProcess(cmdline, env):
startupinfo = subprocess.STARTUPINFO() startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, proc = subprocess.Popen(
stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env) cmdline,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
startupinfo=startupinfo,
shell=False,
env=env,
)
_, err = proc.communicate() _, err = proc.communicate()
rv = proc.wait() rv = proc.wait()
if rv: if rv:
@ -235,7 +247,7 @@ def use_windows_spawn_fix(self, platform=None):
def mySpawn(sh, escape, cmd, args, env): def mySpawn(sh, escape, cmd, args, env):
newargs = ' '.join(args[1:]) newargs = " ".join(args[1:])
cmdline = cmd + " " + newargs cmdline = cmd + " " + newargs
rv = 0 rv = 0
@ -251,15 +263,15 @@ def use_windows_spawn_fix(self, platform=None):
return rv return rv
self['SPAWN'] = mySpawn self["SPAWN"] = mySpawn
def save_active_platforms(apnames, ap): def save_active_platforms(apnames, ap):
for x in ap: for x in ap:
names = ['logo'] names = ["logo"]
if os.path.isfile(x + "/run_icon.png"): if os.path.isfile(x + "/run_icon.png"):
names.append('run_icon') names.append("run_icon")
for name in names: for name in names:
pngf = open(x + "/" + name + ".png", "rb") pngf = open(x + "/" + name + ".png", "rb")
@ -269,7 +281,7 @@ def save_active_platforms(apnames, ap):
while len(b) == 1: while len(b) == 1:
str += hex(ord(b)) str += hex(ord(b))
b = pngf.read(1) b = pngf.read(1)
if (len(b) == 1): if len(b) == 1:
str += "," str += ","
str += "};\n" str += "};\n"
@ -289,30 +301,46 @@ def no_verbose(sys, env):
# Colors are disabled in non-TTY environments such as pipes. This means # Colors are disabled in non-TTY environments such as pipes. This means
# that if output is redirected to a file, it will not contain color codes # that if output is redirected to a file, it will not contain color codes
if sys.stdout.isatty(): if sys.stdout.isatty():
colors['cyan'] = '\033[96m' colors["cyan"] = "\033[96m"
colors['purple'] = '\033[95m' colors["purple"] = "\033[95m"
colors['blue'] = '\033[94m' colors["blue"] = "\033[94m"
colors['green'] = '\033[92m' colors["green"] = "\033[92m"
colors['yellow'] = '\033[93m' colors["yellow"] = "\033[93m"
colors['red'] = '\033[91m' colors["red"] = "\033[91m"
colors['end'] = '\033[0m' colors["end"] = "\033[0m"
else: else:
colors['cyan'] = '' colors["cyan"] = ""
colors['purple'] = '' colors["purple"] = ""
colors['blue'] = '' colors["blue"] = ""
colors['green'] = '' colors["green"] = ""
colors['yellow'] = '' colors["yellow"] = ""
colors['red'] = '' colors["red"] = ""
colors['end'] = '' colors["end"] = ""
compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) compile_source_message = "{}Compiling {}==> {}$SOURCE{}".format(
java_compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) colors["blue"], colors["purple"], colors["yellow"], colors["end"]
compile_shared_source_message = '%sCompiling shared %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end']) )
link_program_message = '%sLinking Program %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) java_compile_source_message = "{}Compiling {}==> {}$SOURCE{}".format(
link_library_message = '%sLinking Static Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) colors["blue"], colors["purple"], colors["yellow"], colors["end"]
ranlib_library_message = '%sRanlib Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) )
link_shared_library_message = '%sLinking Shared Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) compile_shared_source_message = "{}Compiling shared {}==> {}$SOURCE{}".format(
java_library_message = '%sCreating Java Archive %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end']) colors["blue"], colors["purple"], colors["yellow"], colors["end"]
)
link_program_message = "{}Linking Program {}==> {}$TARGET{}".format(
colors["red"], colors["purple"], colors["yellow"], colors["end"]
)
link_library_message = "{}Linking Static Library {}==> {}$TARGET{}".format(
colors["red"], colors["purple"], colors["yellow"], colors["end"]
)
ranlib_library_message = "{}Ranlib Library {}==> {}$TARGET{}".format(
colors["red"], colors["purple"], colors["yellow"], colors["end"]
)
link_shared_library_message = "{}Linking Shared Library {}==> {}$TARGET{}".format(
colors["red"], colors["purple"], colors["yellow"], colors["end"]
)
java_library_message = "{}Creating Java Archive {}==> {}$TARGET{}".format(
colors["red"], colors["purple"], colors["yellow"], colors["end"]
)
env.Append(CXXCOMSTR=[compile_source_message]) env.Append(CXXCOMSTR=[compile_source_message])
env.Append(CCCOMSTR=[compile_source_message]) env.Append(CCCOMSTR=[compile_source_message])
@ -353,70 +381,79 @@ def detect_visual_c_compiler_version(tools_env):
vc_chosen_compiler_str = "" vc_chosen_compiler_str = ""
# Start with Pre VS 2017 checks which uses VCINSTALLDIR: # Start with Pre VS 2017 checks which uses VCINSTALLDIR:
if 'VCINSTALLDIR' in tools_env: if "VCINSTALLDIR" in tools_env:
# print("Checking VCINSTALLDIR") # print("Checking VCINSTALLDIR")
# find() works with -1 so big ifs below are needed... the simplest solution, in fact # find() works with -1 so big ifs below are needed... the simplest solution, in fact
# First test if amd64 and amd64_x86 compilers are present in the path # First test if amd64 and amd64_x86 compilers are present in the path
vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;") vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;")
if(vc_amd64_compiler_detection_index > -1): if vc_amd64_compiler_detection_index > -1:
vc_chosen_compiler_index = vc_amd64_compiler_detection_index vc_chosen_compiler_index = vc_amd64_compiler_detection_index
vc_chosen_compiler_str = "amd64" vc_chosen_compiler_str = "amd64"
vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;") vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;")
if(vc_amd64_x86_compiler_detection_index > -1 if vc_amd64_x86_compiler_detection_index > -1 and (
and (vc_chosen_compiler_index == -1 vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index
or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)): ):
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
vc_chosen_compiler_str = "amd64_x86" vc_chosen_compiler_str = "amd64_x86"
# Now check the 32 bit compilers # Now check the 32 bit compilers
vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;") vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;")
if(vc_x86_compiler_detection_index > -1 if vc_x86_compiler_detection_index > -1 and (
and (vc_chosen_compiler_index == -1 vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index
or vc_chosen_compiler_index > vc_x86_compiler_detection_index)): ):
vc_chosen_compiler_index = vc_x86_compiler_detection_index vc_chosen_compiler_index = vc_x86_compiler_detection_index
vc_chosen_compiler_str = "x86" vc_chosen_compiler_str = "x86"
vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env['VCINSTALLDIR'] + "BIN\\x86_amd64;") vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\x86_amd64;")
if(vc_x86_amd64_compiler_detection_index > -1 if vc_x86_amd64_compiler_detection_index > -1 and (
and (vc_chosen_compiler_index == -1 vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index
or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)): ):
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
vc_chosen_compiler_str = "x86_amd64" vc_chosen_compiler_str = "x86_amd64"
# and for VS 2017 and newer we check VCTOOLSINSTALLDIR: # and for VS 2017 and newer we check VCTOOLSINSTALLDIR:
if 'VCTOOLSINSTALLDIR' in tools_env: if "VCTOOLSINSTALLDIR" in tools_env:
# Newer versions have a different path available # Newer versions have a different path available
vc_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X64;") vc_amd64_compiler_detection_index = (
if(vc_amd64_compiler_detection_index > -1): tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X64;")
)
if vc_amd64_compiler_detection_index > -1:
vc_chosen_compiler_index = vc_amd64_compiler_detection_index vc_chosen_compiler_index = vc_amd64_compiler_detection_index
vc_chosen_compiler_str = "amd64" vc_chosen_compiler_str = "amd64"
vc_amd64_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X86;") vc_amd64_x86_compiler_detection_index = (
if(vc_amd64_x86_compiler_detection_index > -1 tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X86;")
and (vc_chosen_compiler_index == -1 )
or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)): if vc_amd64_x86_compiler_detection_index > -1 and (
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index
):
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
vc_chosen_compiler_str = "amd64_x86" vc_chosen_compiler_str = "amd64_x86"
vc_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X86;") vc_x86_compiler_detection_index = (
if(vc_x86_compiler_detection_index > -1 tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X86;")
and (vc_chosen_compiler_index == -1 )
or vc_chosen_compiler_index > vc_x86_compiler_detection_index)): if vc_x86_compiler_detection_index > -1 and (
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index
):
vc_chosen_compiler_index = vc_x86_compiler_detection_index vc_chosen_compiler_index = vc_x86_compiler_detection_index
vc_chosen_compiler_str = "x86" vc_chosen_compiler_str = "x86"
vc_x86_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X64;") vc_x86_amd64_compiler_detection_index = (
if(vc_x86_amd64_compiler_detection_index > -1 tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X64;")
and (vc_chosen_compiler_index == -1 )
or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)): if vc_x86_amd64_compiler_detection_index > -1 and (
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index
):
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
vc_chosen_compiler_str = "x86_amd64" vc_chosen_compiler_str = "x86_amd64"
return vc_chosen_compiler_str return vc_chosen_compiler_str
def find_visual_c_batch_file(env): def find_visual_c_batch_file(env):
from SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file from SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file
@ -424,6 +461,7 @@ def find_visual_c_batch_file(env):
(host_platform, target_platform, _) = get_host_target(env) (host_platform, target_platform, _) = get_host_target(env)
return find_batch_file(env, version, host_platform, target_platform)[0] return find_batch_file(env, version, host_platform, target_platform)[0]
def generate_cpp_hint_file(filename): def generate_cpp_hint_file(filename):
if os.path.isfile(filename): if os.path.isfile(filename):
# Don't overwrite an existing hint file since the user may have customized it. # Don't overwrite an existing hint file since the user may have customized it.
@ -435,15 +473,19 @@ def generate_cpp_hint_file(filename):
except IOError: except IOError:
print("Could not write cpp.hint file.") print("Could not write cpp.hint file.")
def generate_vs_project(env, num_jobs): def generate_vs_project(env, num_jobs):
batch_file = find_visual_c_batch_file(env) batch_file = find_visual_c_batch_file(env)
if batch_file: if batch_file:
def build_commandline(commands): def build_commandline(commands):
common_build_prefix = ['cmd /V /C set "plat=$(PlatformTarget)"', common_build_prefix = [
'(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))', 'cmd /V /C set "plat=$(PlatformTarget)"',
'set "tools=yes"', '(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))',
'(if "$(Configuration)"=="release" (set "tools=no"))', 'set "tools=yes"',
'call "' + batch_file + '" !plat!'] '(if "$(Configuration)"=="release" (set "tools=no"))',
'call "' + batch_file + '" !plat!',
]
result = " ^& ".join(common_build_prefix + [commands]) result = " ^& ".join(common_build_prefix + [commands])
return result return result
@ -459,87 +501,104 @@ def generate_vs_project(env, num_jobs):
# to double quote off the directory. However, the path ends # to double quote off the directory. However, the path ends
# in a backslash, so we need to remove this, lest it escape the # in a backslash, so we need to remove this, lest it escape the
# last double quote off, confusing MSBuild # last double quote off, confusing MSBuild
env['MSVSBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs)) env["MSVSBUILDCOM"] = build_commandline(
env['MSVSREBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j' + str(num_jobs)) "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" platform=windows progress=no target=$(Configuration) tools=!tools! -j"
env['MSVSCLEANCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs)) + str(num_jobs)
)
env["MSVSREBUILDCOM"] = build_commandline(
"scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j"
+ str(num_jobs)
)
env["MSVSCLEANCOM"] = build_commandline(
"scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j"
+ str(num_jobs)
)
# This version information (Win32, x64, Debug, Release, Release_Debug seems to be # This version information (Win32, x64, Debug, Release, Release_Debug seems to be
# required for Visual Studio to understand that it needs to generate an NMAKE # required for Visual Studio to understand that it needs to generate an NMAKE
# project. Do not modify without knowing what you are doing. # project. Do not modify without knowing what you are doing.
debug_variants = ['debug|Win32'] + ['debug|x64'] debug_variants = ["debug|Win32"] + ["debug|x64"]
release_variants = ['release|Win32'] + ['release|x64'] release_variants = ["release|Win32"] + ["release|x64"]
release_debug_variants = ['release_debug|Win32'] + ['release_debug|x64'] release_debug_variants = ["release_debug|Win32"] + ["release_debug|x64"]
variants = debug_variants + release_variants + release_debug_variants variants = debug_variants + release_variants + release_debug_variants
debug_targets = ['bin\\godot.windows.tools.32.exe'] + ['bin\\godot.windows.tools.64.exe'] debug_targets = ["bin\\godot.windows.tools.32.exe"] + ["bin\\godot.windows.tools.64.exe"]
release_targets = ['bin\\godot.windows.opt.32.exe'] + ['bin\\godot.windows.opt.64.exe'] release_targets = ["bin\\godot.windows.opt.32.exe"] + ["bin\\godot.windows.opt.64.exe"]
release_debug_targets = ['bin\\godot.windows.opt.tools.32.exe'] + ['bin\\godot.windows.opt.tools.64.exe'] release_debug_targets = ["bin\\godot.windows.opt.tools.32.exe"] + ["bin\\godot.windows.opt.tools.64.exe"]
targets = debug_targets + release_targets + release_debug_targets targets = debug_targets + release_targets + release_debug_targets
if not env.get('MSVS'): if not env.get("MSVS"):
env['MSVS']['PROJECTSUFFIX'] = '.vcxproj' env["MSVS"]["PROJECTSUFFIX"] = ".vcxproj"
env['MSVS']['SOLUTIONSUFFIX'] = '.sln' env["MSVS"]["SOLUTIONSUFFIX"] = ".sln"
env.MSVSProject( env.MSVSProject(
target=['#godot' + env['MSVSPROJECTSUFFIX']], target=["#godot" + env["MSVSPROJECTSUFFIX"]],
incs=env.vs_incs, incs=env.vs_incs,
srcs=env.vs_srcs, srcs=env.vs_srcs,
runfile=targets, runfile=targets,
buildtarget=targets, buildtarget=targets,
auto_build_solution=1, auto_build_solution=1,
variant=variants) variant=variants,
)
else: else:
print("Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project.") print("Could not locate Visual Studio batch file to set up the build environment. Not generating VS project.")
def precious_program(env, program, sources, **args): def precious_program(env, program, sources, **args):
program = env.ProgramOriginal(program, sources, **args) program = env.ProgramOriginal(program, sources, **args)
env.Precious(program) env.Precious(program)
return program return program
def add_shared_library(env, name, sources, **args): def add_shared_library(env, name, sources, **args):
library = env.SharedLibrary(name, sources, **args) library = env.SharedLibrary(name, sources, **args)
env.NoCache(library) env.NoCache(library)
return library return library
def add_library(env, name, sources, **args): def add_library(env, name, sources, **args):
library = env.Library(name, sources, **args) library = env.Library(name, sources, **args)
env.NoCache(library) env.NoCache(library)
return library return library
def add_program(env, name, sources, **args): def add_program(env, name, sources, **args):
program = env.Program(name, sources, **args) program = env.Program(name, sources, **args)
env.NoCache(program) env.NoCache(program)
return program return program
def CommandNoCache(env, target, sources, command, **args): def CommandNoCache(env, target, sources, command, **args):
result = env.Command(target, sources, command, **args) result = env.Command(target, sources, command, **args)
env.NoCache(result) env.NoCache(result)
return result return result
def detect_darwin_sdk_path(platform, env): def detect_darwin_sdk_path(platform, env):
sdk_name = '' sdk_name = ""
if platform == 'osx': if platform == "osx":
sdk_name = 'macosx' sdk_name = "macosx"
var_name = 'MACOS_SDK_PATH' var_name = "MACOS_SDK_PATH"
elif platform == 'iphone': elif platform == "iphone":
sdk_name = 'iphoneos' sdk_name = "iphoneos"
var_name = 'IPHONESDK' var_name = "IPHONESDK"
elif platform == 'iphonesimulator': elif platform == "iphonesimulator":
sdk_name = 'iphonesimulator' sdk_name = "iphonesimulator"
var_name = 'IPHONESDK' var_name = "IPHONESDK"
else: else:
raise Exception("Invalid platform argument passed to detect_darwin_sdk_path") raise Exception("Invalid platform argument passed to detect_darwin_sdk_path")
if not env[var_name]: if not env[var_name]:
try: try:
sdk_path = subprocess.check_output(['xcrun', '--sdk', sdk_name, '--show-sdk-path']).strip().decode("utf-8") sdk_path = subprocess.check_output(["xcrun", "--sdk", sdk_name, "--show-sdk-path"]).strip().decode("utf-8")
if sdk_path: if sdk_path:
env[var_name] = sdk_path env[var_name] = sdk_path
except (subprocess.CalledProcessError, OSError): except (subprocess.CalledProcessError, OSError):
print("Failed to find SDK path while running xcrun --sdk {} --show-sdk-path.".format(sdk_name)) print("Failed to find SDK path while running xcrun --sdk {} --show-sdk-path.".format(sdk_name))
raise raise
def is_vanilla_clang(env): def is_vanilla_clang(env):
if not using_clang(env): if not using_clang(env):
return False return False
version = subprocess.check_output([env['CXX'], '--version']).strip().decode("utf-8") version = subprocess.check_output([env["CXX"], "--version"]).strip().decode("utf-8")
return not version.startswith("Apple") return not version.startswith("Apple")
@ -552,20 +611,22 @@ def get_compiler_version(env):
# Not using -dumpversion as some GCC distros only return major, and # Not using -dumpversion as some GCC distros only return major, and
# Clang used to return hardcoded 4.2.1: # https://reviews.llvm.org/D56803 # Clang used to return hardcoded 4.2.1: # https://reviews.llvm.org/D56803
try: try:
version = subprocess.check_output([env.subst(env['CXX']), '--version']).strip().decode("utf-8") version = subprocess.check_output([env.subst(env["CXX"]), "--version"]).strip().decode("utf-8")
except (subprocess.CalledProcessError, OSError): except (subprocess.CalledProcessError, OSError):
print("Couldn't parse CXX environment variable to infer compiler version.") print("Couldn't parse CXX environment variable to infer compiler version.")
return None return None
else: # TODO: Implement for MSVC else: # TODO: Implement for MSVC
return None return None
match = re.search('[0-9]+\.[0-9.]+', version) match = re.search("[0-9]+\.[0-9.]+", version)
if match is not None: if match is not None:
return list(map(int, match.group().split('.'))) return list(map(int, match.group().split(".")))
else: else:
return None return None
def using_gcc(env): def using_gcc(env):
return 'gcc' in os.path.basename(env["CC"]) return "gcc" in os.path.basename(env["CC"])
def using_clang(env): def using_clang(env):
return 'clang' in os.path.basename(env["CC"]) return "clang" in os.path.basename(env["CC"])

View file

@ -37,24 +37,24 @@ files = open("files", "r")
fname = files.readline() fname = files.readline()
while (fname != ""): while fname != "":
# Handle replacing $filename with actual filename and keep alignment # Handle replacing $filename with actual filename and keep alignment
fsingle = fname.strip() fsingle = fname.strip()
if (fsingle.find("/") != -1): if fsingle.find("/") != -1:
fsingle = fsingle[fsingle.rfind("/") + 1:] fsingle = fsingle[fsingle.rfind("/") + 1 :]
rep_fl = "$filename" rep_fl = "$filename"
rep_fi = fsingle rep_fi = fsingle
len_fl = len(rep_fl) len_fl = len(rep_fl)
len_fi = len(rep_fi) len_fi = len(rep_fi)
# Pad with spaces to keep alignment # Pad with spaces to keep alignment
if (len_fi < len_fl): if len_fi < len_fl:
for x in range(len_fl - len_fi): for x in range(len_fl - len_fi):
rep_fi += " " rep_fi += " "
elif (len_fl < len_fi): elif len_fl < len_fi:
for x in range(len_fi - len_fl): for x in range(len_fi - len_fl):
rep_fl += " " rep_fl += " "
if (header.find(rep_fl) != -1): if header.find(rep_fl) != -1:
text = header.replace(rep_fl, rep_fi) text = header.replace(rep_fl, rep_fi)
else: else:
text = header.replace("$filename", fsingle) text = header.replace("$filename", fsingle)
@ -71,21 +71,21 @@ while (fname != ""):
line = fileread.readline() line = fileread.readline()
header_done = False header_done = False
while (line.strip() == ""): # Skip empty lines at the top while line.strip() == "": # Skip empty lines at the top
line = fileread.readline() line = fileread.readline()
if (line.find("/**********") == -1): # Godot header starts this way if line.find("/**********") == -1: # Godot header starts this way
# Maybe starting with a non-Godot comment, abort header magic # Maybe starting with a non-Godot comment, abort header magic
header_done = True header_done = True
while (not header_done): # Handle header now while not header_done: # Handle header now
if (line.find("/*") != 0): # No more starting with a comment if line.find("/*") != 0: # No more starting with a comment
header_done = True header_done = True
if (line.strip() != ""): if line.strip() != "":
text += line text += line
line = fileread.readline() line = fileread.readline()
while (line != ""): # Dump everything until EOF while line != "": # Dump everything until EOF
text += line text += line
line = fileread.readline() line = fileread.readline()

View file

@ -1,12 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
import modules_builders import modules_builders
env_modules = env.Clone() env_modules = env.Clone()
Export('env_modules') Export("env_modules")
# Header with MODULE_*_ENABLED defines. # Header with MODULE_*_ENABLED defines.
env.CommandNoCache("modules_enabled.gen.h", Value(env.module_list), modules_builders.generate_modules_enabled) env.CommandNoCache("modules_enabled.gen.h", Value(env.module_list), modules_builders.generate_modules_enabled)

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_arkit = env_modules.Clone() env_arkit = env_modules.Clone()
@ -9,4 +9,4 @@ env_arkit = env_modules.Clone()
modules_sources = [] modules_sources = []
env_arkit.add_source_files(modules_sources, "*.cpp") env_arkit.add_source_files(modules_sources, "*.cpp")
env_arkit.add_source_files(modules_sources, "*.mm") env_arkit.add_source_files(modules_sources, "*.mm")
mod_lib = env_modules.add_library('#bin/libgodot_arkit_module' + env['LIBSUFFIX'], modules_sources) mod_lib = env_modules.add_library("#bin/libgodot_arkit_module" + env["LIBSUFFIX"], modules_sources)

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return platform == 'iphone' return platform == "iphone"
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_assimp = env_modules.Clone() env_assimp = env_modules.Clone()
@ -10,85 +10,85 @@ env_assimp = env_modules.Clone()
if True: # env['builtin_assimp']: if True: # env['builtin_assimp']:
thirdparty_dir = "#thirdparty/assimp" thirdparty_dir = "#thirdparty/assimp"
env_assimp.Prepend(CPPPATH=['#thirdparty/assimp']) env_assimp.Prepend(CPPPATH=["#thirdparty/assimp"])
env_assimp.Prepend(CPPPATH=['#thirdparty/assimp/code']) env_assimp.Prepend(CPPPATH=["#thirdparty/assimp/code"])
env_assimp.Prepend(CPPPATH=['#thirdparty/assimp/include']) env_assimp.Prepend(CPPPATH=["#thirdparty/assimp/include"])
#env_assimp.Append(CPPDEFINES=['ASSIMP_DOUBLE_PRECISION']) # TODO default to what godot is compiled with for future double support # env_assimp.Append(CPPDEFINES=['ASSIMP_DOUBLE_PRECISION']) # TODO default to what godot is compiled with for future double support
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_SINGLETHREADED']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_SINGLETHREADED"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_BOOST_WORKAROUND']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_BOOST_WORKAROUND"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OWN_ZLIB']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OWN_ZLIB"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_EXPORT']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_EXPORT"])
# Importers we don't need # Importers we don't need
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3DS_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3DS_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3MF_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3MF_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_AC_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_AC_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_AMF_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_AMF_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_ASE_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_ASE_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_ASSBIN_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_ASSBIN_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_B3D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_B3D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_BLEND_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_BLEND_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_BVH_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_BVH_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_C4D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_C4D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_COB_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_COB_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_COLLADA_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_COLLADA_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_CSM_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_CSM_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_DXF_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_DXF_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_GLTF2_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_GLTF2_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_GLTF_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_GLTF_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_HMP_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_HMP_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IFC_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IFC_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IRR_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IRR_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IRRMESH_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IRRMESH_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_LWO_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_LWO_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_LWS_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_LWS_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_M3D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_M3D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD2_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD2_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD3_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD3_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD5_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD5_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD5_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD5_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MDC_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MDC_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MDL_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MDL_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MMD_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MMD_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MS3D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MS3D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_NDO_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_NDO_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_NFF_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_NFF_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OBJ_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OBJ_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OFF_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OFF_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OGRE_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OGRE_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OPENGEX_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OPENGEX_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_PLY_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_PLY_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_Q3BSP_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_Q3BSP_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_Q3D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_Q3D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_RAW_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_RAW_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_SIB_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_SIB_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_SMD_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_SMD_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_STEP_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_STEP_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_STL_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_STL_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_TERRAGEN_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_TERRAGEN_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_X3D_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_X3D_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_XGL_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_XGL_IMPORTER"])
env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_X_IMPORTER']) env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_X_IMPORTER"])
if env["platform"] == "windows":
env_assimp.Append(CPPDEFINES=["PLATFORM_WINDOWS"])
env_assimp.Append(CPPDEFINES=[("PLATFORM", "WINDOWS")])
elif env["platform"] == "linuxbsd":
env_assimp.Append(CPPDEFINES=["PLATFORM_LINUX"])
env_assimp.Append(CPPDEFINES=[("PLATFORM", "LINUX")])
elif env["platform"] == "osx":
env_assimp.Append(CPPDEFINES=["PLATFORM_DARWIN"])
env_assimp.Append(CPPDEFINES=[("PLATFORM", "DARWIN")])
if(env['platform'] == 'windows'):
env_assimp.Append(CPPDEFINES=['PLATFORM_WINDOWS'])
env_assimp.Append(CPPDEFINES=[('PLATFORM', 'WINDOWS')])
elif(env['platform'] == 'linuxbsd'):
env_assimp.Append(CPPDEFINES=['PLATFORM_LINUX'])
env_assimp.Append(CPPDEFINES=[('PLATFORM', 'LINUX')])
elif(env['platform'] == 'osx'):
env_assimp.Append(CPPDEFINES=['PLATFORM_DARWIN'])
env_assimp.Append(CPPDEFINES=[('PLATFORM', 'DARWIN')])
env_thirdparty = env_assimp.Clone() env_thirdparty = env_assimp.Clone()
env_thirdparty.disable_warnings() env_thirdparty.disable_warnings()
env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/CApi/*.cpp')) env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/CApi/*.cpp"))
env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/Common/*.cpp')) env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/Common/*.cpp"))
env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/PostProcessing/*.cpp')) env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/PostProcessing/*.cpp"))
env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/Material/*.cpp')) env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/Material/*.cpp"))
env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/FBX/*.cpp')) env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/FBX/*.cpp"))
# Godot's own source files # Godot's own source files
env_assimp.add_source_files(env.modules_sources, "*.cpp") env_assimp.add_source_files(env.modules_sources, "*.cpp")

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return env['tools'] return env["tools"]
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_basisu = env_modules.Clone() env_basisu = env_modules.Clone()
@ -9,36 +9,38 @@ env_basisu = env_modules.Clone()
# Not unbundled so far since not widespread as shared library # Not unbundled so far since not widespread as shared library
thirdparty_dir = "#thirdparty/basis_universal/" thirdparty_dir = "#thirdparty/basis_universal/"
tool_sources = [ tool_sources = [
"basisu_astc_decomp.cpp", "basisu_astc_decomp.cpp",
"basisu_backend.cpp", "basisu_backend.cpp",
"basisu_basis_file.cpp", "basisu_basis_file.cpp",
"basisu_comp.cpp", "basisu_comp.cpp",
"basisu_enc.cpp", "basisu_enc.cpp",
"basisu_etc.cpp", "basisu_etc.cpp",
"basisu_frontend.cpp", "basisu_frontend.cpp",
"basisu_global_selector_palette_helpers.cpp", "basisu_global_selector_palette_helpers.cpp",
"basisu_gpu_texture.cpp", "basisu_gpu_texture.cpp",
"basisu_pvrtc1_4.cpp", "basisu_pvrtc1_4.cpp",
"basisu_resample_filters.cpp", "basisu_resample_filters.cpp",
"basisu_resampler.cpp", "basisu_resampler.cpp",
"basisu_ssim.cpp", "basisu_ssim.cpp",
"lodepng.cpp", "lodepng.cpp",
] ]
tool_sources = [thirdparty_dir + file for file in tool_sources] tool_sources = [thirdparty_dir + file for file in tool_sources]
transcoder_sources = [thirdparty_dir + "transcoder/basisu_transcoder.cpp"] transcoder_sources = [thirdparty_dir + "transcoder/basisu_transcoder.cpp"]
# Treat Basis headers as system headers to avoid raising warnings. Not supported on MSVC. # Treat Basis headers as system headers to avoid raising warnings. Not supported on MSVC.
if not env.msvc: if not env.msvc:
env_basisu.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path, '-isystem', Dir(thirdparty_dir + "transcoder").path]) env_basisu.Append(
CPPFLAGS=["-isystem", Dir(thirdparty_dir).path, "-isystem", Dir(thirdparty_dir + "transcoder").path]
)
else: else:
env_basisu.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "transcoder"]) env_basisu.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "transcoder"])
if env['target'] == "debug": if env["target"] == "debug":
env_basisu.Append(CPPFLAGS=["-DBASISU_DEVEL_MESSAGES=1", "-DBASISD_ENABLE_DEBUG_FLAGS=1"]) env_basisu.Append(CPPFLAGS=["-DBASISU_DEVEL_MESSAGES=1", "-DBASISD_ENABLE_DEBUG_FLAGS=1"])
env_thirdparty = env_basisu.Clone() env_thirdparty = env_basisu.Clone()
env_thirdparty.disable_warnings() env_thirdparty.disable_warnings()
if env['tools']: if env["tools"]:
env_thirdparty.add_source_files(env.modules_sources, tool_sources) env_thirdparty.add_source_files(env.modules_sources, tool_sources)
env_thirdparty.add_source_files(env.modules_sources, transcoder_sources) env_thirdparty.add_source_files(env.modules_sources, transcoder_sources)

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_bmp = env_modules.Clone() env_bmp = env_modules.Clone()

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,208 +1,203 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_bullet = env_modules.Clone() env_bullet = env_modules.Clone()
# Thirdparty source files # Thirdparty source files
if env['builtin_bullet']: if env["builtin_bullet"]:
# Build only version 2 for now (as of 2.89) # Build only version 2 for now (as of 2.89)
# Sync file list with relevant upstream CMakeLists.txt for each folder. # Sync file list with relevant upstream CMakeLists.txt for each folder.
thirdparty_dir = "#thirdparty/bullet/" thirdparty_dir = "#thirdparty/bullet/"
bullet2_src = [ bullet2_src = [
# BulletCollision # BulletCollision
"BulletCollision/BroadphaseCollision/btAxisSweep3.cpp" "BulletCollision/BroadphaseCollision/btAxisSweep3.cpp",
, "BulletCollision/BroadphaseCollision/btBroadphaseProxy.cpp" "BulletCollision/BroadphaseCollision/btBroadphaseProxy.cpp",
, "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.cpp" "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.cpp",
, "BulletCollision/BroadphaseCollision/btDbvt.cpp" "BulletCollision/BroadphaseCollision/btDbvt.cpp",
, "BulletCollision/BroadphaseCollision/btDbvtBroadphase.cpp" "BulletCollision/BroadphaseCollision/btDbvtBroadphase.cpp",
, "BulletCollision/BroadphaseCollision/btDispatcher.cpp" "BulletCollision/BroadphaseCollision/btDispatcher.cpp",
, "BulletCollision/BroadphaseCollision/btOverlappingPairCache.cpp" "BulletCollision/BroadphaseCollision/btOverlappingPairCache.cpp",
, "BulletCollision/BroadphaseCollision/btQuantizedBvh.cpp" "BulletCollision/BroadphaseCollision/btQuantizedBvh.cpp",
, "BulletCollision/BroadphaseCollision/btSimpleBroadphase.cpp" "BulletCollision/BroadphaseCollision/btSimpleBroadphase.cpp",
, "BulletCollision/CollisionDispatch/btActivatingCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btActivatingCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btBoxBoxCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btBoxBoxCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btBoxBoxDetector.cpp" "BulletCollision/CollisionDispatch/btBoxBoxDetector.cpp",
, "BulletCollision/CollisionDispatch/btCollisionDispatcher.cpp" "BulletCollision/CollisionDispatch/btCollisionDispatcher.cpp",
, "BulletCollision/CollisionDispatch/btCollisionDispatcherMt.cpp" "BulletCollision/CollisionDispatch/btCollisionDispatcherMt.cpp",
, "BulletCollision/CollisionDispatch/btCollisionObject.cpp" "BulletCollision/CollisionDispatch/btCollisionObject.cpp",
, "BulletCollision/CollisionDispatch/btCollisionWorld.cpp" "BulletCollision/CollisionDispatch/btCollisionWorld.cpp",
, "BulletCollision/CollisionDispatch/btCollisionWorldImporter.cpp" "BulletCollision/CollisionDispatch/btCollisionWorldImporter.cpp",
, "BulletCollision/CollisionDispatch/btCompoundCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btCompoundCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btCompoundCompoundCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btCompoundCompoundCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btConvexConcaveCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btConvexConcaveCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btConvexConvexAlgorithm.cpp" "BulletCollision/CollisionDispatch/btConvexConvexAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btConvexPlaneCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btConvexPlaneCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp" "BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btDefaultCollisionConfiguration.cpp" "BulletCollision/CollisionDispatch/btDefaultCollisionConfiguration.cpp",
, "BulletCollision/CollisionDispatch/btEmptyCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btEmptyCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btGhostObject.cpp" "BulletCollision/CollisionDispatch/btGhostObject.cpp",
, "BulletCollision/CollisionDispatch/btHashedSimplePairCache.cpp" "BulletCollision/CollisionDispatch/btHashedSimplePairCache.cpp",
, "BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp" "BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp",
, "BulletCollision/CollisionDispatch/btManifoldResult.cpp" "BulletCollision/CollisionDispatch/btManifoldResult.cpp",
, "BulletCollision/CollisionDispatch/btSimulationIslandManager.cpp" "BulletCollision/CollisionDispatch/btSimulationIslandManager.cpp",
, "BulletCollision/CollisionDispatch/btSphereBoxCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btSphereBoxCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btSphereSphereCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btSphereSphereCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btSphereTriangleCollisionAlgorithm.cpp" "BulletCollision/CollisionDispatch/btSphereTriangleCollisionAlgorithm.cpp",
, "BulletCollision/CollisionDispatch/btUnionFind.cpp" "BulletCollision/CollisionDispatch/btUnionFind.cpp",
, "BulletCollision/CollisionDispatch/SphereTriangleDetector.cpp" "BulletCollision/CollisionDispatch/SphereTriangleDetector.cpp",
, "BulletCollision/CollisionShapes/btBoxShape.cpp" "BulletCollision/CollisionShapes/btBoxShape.cpp",
, "BulletCollision/CollisionShapes/btBox2dShape.cpp" "BulletCollision/CollisionShapes/btBox2dShape.cpp",
, "BulletCollision/CollisionShapes/btBvhTriangleMeshShape.cpp" "BulletCollision/CollisionShapes/btBvhTriangleMeshShape.cpp",
, "BulletCollision/CollisionShapes/btCapsuleShape.cpp" "BulletCollision/CollisionShapes/btCapsuleShape.cpp",
, "BulletCollision/CollisionShapes/btCollisionShape.cpp" "BulletCollision/CollisionShapes/btCollisionShape.cpp",
, "BulletCollision/CollisionShapes/btCompoundShape.cpp" "BulletCollision/CollisionShapes/btCompoundShape.cpp",
, "BulletCollision/CollisionShapes/btConcaveShape.cpp" "BulletCollision/CollisionShapes/btConcaveShape.cpp",
, "BulletCollision/CollisionShapes/btConeShape.cpp" "BulletCollision/CollisionShapes/btConeShape.cpp",
, "BulletCollision/CollisionShapes/btConvexHullShape.cpp" "BulletCollision/CollisionShapes/btConvexHullShape.cpp",
, "BulletCollision/CollisionShapes/btConvexInternalShape.cpp" "BulletCollision/CollisionShapes/btConvexInternalShape.cpp",
, "BulletCollision/CollisionShapes/btConvexPointCloudShape.cpp" "BulletCollision/CollisionShapes/btConvexPointCloudShape.cpp",
, "BulletCollision/CollisionShapes/btConvexPolyhedron.cpp" "BulletCollision/CollisionShapes/btConvexPolyhedron.cpp",
, "BulletCollision/CollisionShapes/btConvexShape.cpp" "BulletCollision/CollisionShapes/btConvexShape.cpp",
, "BulletCollision/CollisionShapes/btConvex2dShape.cpp" "BulletCollision/CollisionShapes/btConvex2dShape.cpp",
, "BulletCollision/CollisionShapes/btConvexTriangleMeshShape.cpp" "BulletCollision/CollisionShapes/btConvexTriangleMeshShape.cpp",
, "BulletCollision/CollisionShapes/btCylinderShape.cpp" "BulletCollision/CollisionShapes/btCylinderShape.cpp",
, "BulletCollision/CollisionShapes/btEmptyShape.cpp" "BulletCollision/CollisionShapes/btEmptyShape.cpp",
, "BulletCollision/CollisionShapes/btHeightfieldTerrainShape.cpp" "BulletCollision/CollisionShapes/btHeightfieldTerrainShape.cpp",
, "BulletCollision/CollisionShapes/btMiniSDF.cpp" "BulletCollision/CollisionShapes/btMiniSDF.cpp",
, "BulletCollision/CollisionShapes/btMinkowskiSumShape.cpp" "BulletCollision/CollisionShapes/btMinkowskiSumShape.cpp",
, "BulletCollision/CollisionShapes/btMultimaterialTriangleMeshShape.cpp" "BulletCollision/CollisionShapes/btMultimaterialTriangleMeshShape.cpp",
, "BulletCollision/CollisionShapes/btMultiSphereShape.cpp" "BulletCollision/CollisionShapes/btMultiSphereShape.cpp",
, "BulletCollision/CollisionShapes/btOptimizedBvh.cpp" "BulletCollision/CollisionShapes/btOptimizedBvh.cpp",
, "BulletCollision/CollisionShapes/btPolyhedralConvexShape.cpp" "BulletCollision/CollisionShapes/btPolyhedralConvexShape.cpp",
, "BulletCollision/CollisionShapes/btScaledBvhTriangleMeshShape.cpp" "BulletCollision/CollisionShapes/btScaledBvhTriangleMeshShape.cpp",
, "BulletCollision/CollisionShapes/btSdfCollisionShape.cpp" "BulletCollision/CollisionShapes/btSdfCollisionShape.cpp",
, "BulletCollision/CollisionShapes/btShapeHull.cpp" "BulletCollision/CollisionShapes/btShapeHull.cpp",
, "BulletCollision/CollisionShapes/btSphereShape.cpp" "BulletCollision/CollisionShapes/btSphereShape.cpp",
, "BulletCollision/CollisionShapes/btStaticPlaneShape.cpp" "BulletCollision/CollisionShapes/btStaticPlaneShape.cpp",
, "BulletCollision/CollisionShapes/btStridingMeshInterface.cpp" "BulletCollision/CollisionShapes/btStridingMeshInterface.cpp",
, "BulletCollision/CollisionShapes/btTetrahedronShape.cpp" "BulletCollision/CollisionShapes/btTetrahedronShape.cpp",
, "BulletCollision/CollisionShapes/btTriangleBuffer.cpp" "BulletCollision/CollisionShapes/btTriangleBuffer.cpp",
, "BulletCollision/CollisionShapes/btTriangleCallback.cpp" "BulletCollision/CollisionShapes/btTriangleCallback.cpp",
, "BulletCollision/CollisionShapes/btTriangleIndexVertexArray.cpp" "BulletCollision/CollisionShapes/btTriangleIndexVertexArray.cpp",
, "BulletCollision/CollisionShapes/btTriangleIndexVertexMaterialArray.cpp" "BulletCollision/CollisionShapes/btTriangleIndexVertexMaterialArray.cpp",
, "BulletCollision/CollisionShapes/btTriangleMesh.cpp" "BulletCollision/CollisionShapes/btTriangleMesh.cpp",
, "BulletCollision/CollisionShapes/btTriangleMeshShape.cpp" "BulletCollision/CollisionShapes/btTriangleMeshShape.cpp",
, "BulletCollision/CollisionShapes/btUniformScalingShape.cpp" "BulletCollision/CollisionShapes/btUniformScalingShape.cpp",
, "BulletCollision/Gimpact/btContactProcessing.cpp" "BulletCollision/Gimpact/btContactProcessing.cpp",
, "BulletCollision/Gimpact/btGenericPoolAllocator.cpp" "BulletCollision/Gimpact/btGenericPoolAllocator.cpp",
, "BulletCollision/Gimpact/btGImpactBvh.cpp" "BulletCollision/Gimpact/btGImpactBvh.cpp",
, "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.cpp" "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.cpp",
, "BulletCollision/Gimpact/btGImpactQuantizedBvh.cpp" "BulletCollision/Gimpact/btGImpactQuantizedBvh.cpp",
, "BulletCollision/Gimpact/btGImpactShape.cpp" "BulletCollision/Gimpact/btGImpactShape.cpp",
, "BulletCollision/Gimpact/btTriangleShapeEx.cpp" "BulletCollision/Gimpact/btTriangleShapeEx.cpp",
, "BulletCollision/Gimpact/gim_box_set.cpp" "BulletCollision/Gimpact/gim_box_set.cpp",
, "BulletCollision/Gimpact/gim_contact.cpp" "BulletCollision/Gimpact/gim_contact.cpp",
, "BulletCollision/Gimpact/gim_memory.cpp" "BulletCollision/Gimpact/gim_memory.cpp",
, "BulletCollision/Gimpact/gim_tri_collision.cpp" "BulletCollision/Gimpact/gim_tri_collision.cpp",
, "BulletCollision/NarrowPhaseCollision/btContinuousConvexCollision.cpp" "BulletCollision/NarrowPhaseCollision/btContinuousConvexCollision.cpp",
, "BulletCollision/NarrowPhaseCollision/btConvexCast.cpp" "BulletCollision/NarrowPhaseCollision/btConvexCast.cpp",
, "BulletCollision/NarrowPhaseCollision/btGjkConvexCast.cpp" "BulletCollision/NarrowPhaseCollision/btGjkConvexCast.cpp",
, "BulletCollision/NarrowPhaseCollision/btGjkEpa2.cpp" "BulletCollision/NarrowPhaseCollision/btGjkEpa2.cpp",
, "BulletCollision/NarrowPhaseCollision/btGjkEpaPenetrationDepthSolver.cpp" "BulletCollision/NarrowPhaseCollision/btGjkEpaPenetrationDepthSolver.cpp",
, "BulletCollision/NarrowPhaseCollision/btGjkPairDetector.cpp" "BulletCollision/NarrowPhaseCollision/btGjkPairDetector.cpp",
, "BulletCollision/NarrowPhaseCollision/btMinkowskiPenetrationDepthSolver.cpp" "BulletCollision/NarrowPhaseCollision/btMinkowskiPenetrationDepthSolver.cpp",
, "BulletCollision/NarrowPhaseCollision/btPersistentManifold.cpp" "BulletCollision/NarrowPhaseCollision/btPersistentManifold.cpp",
, "BulletCollision/NarrowPhaseCollision/btRaycastCallback.cpp" "BulletCollision/NarrowPhaseCollision/btRaycastCallback.cpp",
, "BulletCollision/NarrowPhaseCollision/btSubSimplexConvexCast.cpp" "BulletCollision/NarrowPhaseCollision/btSubSimplexConvexCast.cpp",
, "BulletCollision/NarrowPhaseCollision/btVoronoiSimplexSolver.cpp" "BulletCollision/NarrowPhaseCollision/btVoronoiSimplexSolver.cpp",
, "BulletCollision/NarrowPhaseCollision/btPolyhedralContactClipping.cpp" "BulletCollision/NarrowPhaseCollision/btPolyhedralContactClipping.cpp",
# BulletDynamics # BulletDynamics
, "BulletDynamics/Character/btKinematicCharacterController.cpp" "BulletDynamics/Character/btKinematicCharacterController.cpp",
, "BulletDynamics/ConstraintSolver/btConeTwistConstraint.cpp" "BulletDynamics/ConstraintSolver/btConeTwistConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btContactConstraint.cpp" "BulletDynamics/ConstraintSolver/btContactConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btFixedConstraint.cpp" "BulletDynamics/ConstraintSolver/btFixedConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btGearConstraint.cpp" "BulletDynamics/ConstraintSolver/btGearConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btGeneric6DofConstraint.cpp" "BulletDynamics/ConstraintSolver/btGeneric6DofConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btGeneric6DofSpringConstraint.cpp" "BulletDynamics/ConstraintSolver/btGeneric6DofSpringConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btGeneric6DofSpring2Constraint.cpp" "BulletDynamics/ConstraintSolver/btGeneric6DofSpring2Constraint.cpp",
, "BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp" "BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp",
, "BulletDynamics/ConstraintSolver/btHingeConstraint.cpp" "BulletDynamics/ConstraintSolver/btHingeConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btPoint2PointConstraint.cpp" "BulletDynamics/ConstraintSolver/btPoint2PointConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolver.cpp" "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolver.cpp",
, "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolverMt.cpp" "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolverMt.cpp",
, "BulletDynamics/ConstraintSolver/btBatchedConstraints.cpp" "BulletDynamics/ConstraintSolver/btBatchedConstraints.cpp",
, "BulletDynamics/ConstraintSolver/btNNCGConstraintSolver.cpp" "BulletDynamics/ConstraintSolver/btNNCGConstraintSolver.cpp",
, "BulletDynamics/ConstraintSolver/btSliderConstraint.cpp" "BulletDynamics/ConstraintSolver/btSliderConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btSolve2LinearConstraint.cpp" "BulletDynamics/ConstraintSolver/btSolve2LinearConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btTypedConstraint.cpp" "BulletDynamics/ConstraintSolver/btTypedConstraint.cpp",
, "BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp" "BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp",
, "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.cpp" "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.cpp",
, "BulletDynamics/Dynamics/btDiscreteDynamicsWorldMt.cpp" "BulletDynamics/Dynamics/btDiscreteDynamicsWorldMt.cpp",
, "BulletDynamics/Dynamics/btSimulationIslandManagerMt.cpp" "BulletDynamics/Dynamics/btSimulationIslandManagerMt.cpp",
, "BulletDynamics/Dynamics/btRigidBody.cpp" "BulletDynamics/Dynamics/btRigidBody.cpp",
, "BulletDynamics/Dynamics/btSimpleDynamicsWorld.cpp" "BulletDynamics/Dynamics/btSimpleDynamicsWorld.cpp",
#, "BulletDynamics/Dynamics/Bullet-C-API.cpp" # "BulletDynamics/Dynamics/Bullet-C-API.cpp",
, "BulletDynamics/Vehicle/btRaycastVehicle.cpp" "BulletDynamics/Vehicle/btRaycastVehicle.cpp",
, "BulletDynamics/Vehicle/btWheelInfo.cpp" "BulletDynamics/Vehicle/btWheelInfo.cpp",
, "BulletDynamics/Featherstone/btMultiBody.cpp" "BulletDynamics/Featherstone/btMultiBody.cpp",
, "BulletDynamics/Featherstone/btMultiBodyConstraint.cpp" "BulletDynamics/Featherstone/btMultiBodyConstraint.cpp",
, "BulletDynamics/Featherstone/btMultiBodyConstraintSolver.cpp" "BulletDynamics/Featherstone/btMultiBodyConstraintSolver.cpp",
, "BulletDynamics/Featherstone/btMultiBodyDynamicsWorld.cpp" "BulletDynamics/Featherstone/btMultiBodyDynamicsWorld.cpp",
, "BulletDynamics/Featherstone/btMultiBodyFixedConstraint.cpp" "BulletDynamics/Featherstone/btMultiBodyFixedConstraint.cpp",
, "BulletDynamics/Featherstone/btMultiBodyGearConstraint.cpp" "BulletDynamics/Featherstone/btMultiBodyGearConstraint.cpp",
, "BulletDynamics/Featherstone/btMultiBodyJointLimitConstraint.cpp" "BulletDynamics/Featherstone/btMultiBodyJointLimitConstraint.cpp",
, "BulletDynamics/Featherstone/btMultiBodyJointMotor.cpp" "BulletDynamics/Featherstone/btMultiBodyJointMotor.cpp",
, "BulletDynamics/Featherstone/btMultiBodyMLCPConstraintSolver.cpp" "BulletDynamics/Featherstone/btMultiBodyMLCPConstraintSolver.cpp",
, "BulletDynamics/Featherstone/btMultiBodyPoint2Point.cpp" "BulletDynamics/Featherstone/btMultiBodyPoint2Point.cpp",
, "BulletDynamics/Featherstone/btMultiBodySliderConstraint.cpp" "BulletDynamics/Featherstone/btMultiBodySliderConstraint.cpp",
, "BulletDynamics/Featherstone/btMultiBodySphericalJointMotor.cpp" "BulletDynamics/Featherstone/btMultiBodySphericalJointMotor.cpp",
, "BulletDynamics/MLCPSolvers/btDantzigLCP.cpp" "BulletDynamics/MLCPSolvers/btDantzigLCP.cpp",
, "BulletDynamics/MLCPSolvers/btMLCPSolver.cpp" "BulletDynamics/MLCPSolvers/btMLCPSolver.cpp",
, "BulletDynamics/MLCPSolvers/btLemkeAlgorithm.cpp" "BulletDynamics/MLCPSolvers/btLemkeAlgorithm.cpp",
# BulletInverseDynamics # BulletInverseDynamics
, "BulletInverseDynamics/IDMath.cpp" "BulletInverseDynamics/IDMath.cpp",
, "BulletInverseDynamics/MultiBodyTree.cpp" "BulletInverseDynamics/MultiBodyTree.cpp",
, "BulletInverseDynamics/details/MultiBodyTreeInitCache.cpp" "BulletInverseDynamics/details/MultiBodyTreeInitCache.cpp",
, "BulletInverseDynamics/details/MultiBodyTreeImpl.cpp" "BulletInverseDynamics/details/MultiBodyTreeImpl.cpp",
# BulletSoftBody # BulletSoftBody
, "BulletSoftBody/btSoftBody.cpp" "BulletSoftBody/btSoftBody.cpp",
, "BulletSoftBody/btSoftBodyConcaveCollisionAlgorithm.cpp" "BulletSoftBody/btSoftBodyConcaveCollisionAlgorithm.cpp",
, "BulletSoftBody/btSoftBodyHelpers.cpp" "BulletSoftBody/btSoftBodyHelpers.cpp",
, "BulletSoftBody/btSoftBodyRigidBodyCollisionConfiguration.cpp" "BulletSoftBody/btSoftBodyRigidBodyCollisionConfiguration.cpp",
, "BulletSoftBody/btSoftRigidCollisionAlgorithm.cpp" "BulletSoftBody/btSoftRigidCollisionAlgorithm.cpp",
, "BulletSoftBody/btSoftRigidDynamicsWorld.cpp" "BulletSoftBody/btSoftRigidDynamicsWorld.cpp",
, "BulletSoftBody/btSoftMultiBodyDynamicsWorld.cpp" "BulletSoftBody/btSoftMultiBodyDynamicsWorld.cpp",
, "BulletSoftBody/btSoftSoftCollisionAlgorithm.cpp" "BulletSoftBody/btSoftSoftCollisionAlgorithm.cpp",
, "BulletSoftBody/btDefaultSoftBodySolver.cpp" "BulletSoftBody/btDefaultSoftBodySolver.cpp",
, "BulletSoftBody/btDeformableBackwardEulerObjective.cpp" "BulletSoftBody/btDeformableBackwardEulerObjective.cpp",
, "BulletSoftBody/btDeformableBodySolver.cpp" "BulletSoftBody/btDeformableBodySolver.cpp",
, "BulletSoftBody/btDeformableMultiBodyConstraintSolver.cpp" "BulletSoftBody/btDeformableMultiBodyConstraintSolver.cpp",
, "BulletSoftBody/btDeformableContactProjection.cpp" "BulletSoftBody/btDeformableContactProjection.cpp",
, "BulletSoftBody/btDeformableMultiBodyDynamicsWorld.cpp" "BulletSoftBody/btDeformableMultiBodyDynamicsWorld.cpp",
, "BulletSoftBody/btDeformableContactConstraint.cpp" "BulletSoftBody/btDeformableContactConstraint.cpp",
# clew # clew
, "clew/clew.c" "clew/clew.c",
# LinearMath # LinearMath
, "LinearMath/btAlignedAllocator.cpp" "LinearMath/btAlignedAllocator.cpp",
, "LinearMath/btConvexHull.cpp" "LinearMath/btConvexHull.cpp",
, "LinearMath/btConvexHullComputer.cpp" "LinearMath/btConvexHullComputer.cpp",
, "LinearMath/btGeometryUtil.cpp" "LinearMath/btGeometryUtil.cpp",
, "LinearMath/btPolarDecomposition.cpp" "LinearMath/btPolarDecomposition.cpp",
, "LinearMath/btQuickprof.cpp" "LinearMath/btQuickprof.cpp",
, "LinearMath/btSerializer.cpp" "LinearMath/btSerializer.cpp",
, "LinearMath/btSerializer64.cpp" "LinearMath/btSerializer64.cpp",
, "LinearMath/btThreads.cpp" "LinearMath/btThreads.cpp",
, "LinearMath/btVector3.cpp" "LinearMath/btVector3.cpp",
, "LinearMath/TaskScheduler/btTaskScheduler.cpp" "LinearMath/TaskScheduler/btTaskScheduler.cpp",
, "LinearMath/TaskScheduler/btThreadSupportPosix.cpp" "LinearMath/TaskScheduler/btThreadSupportPosix.cpp",
, "LinearMath/TaskScheduler/btThreadSupportWin32.cpp" "LinearMath/TaskScheduler/btThreadSupportWin32.cpp",
] ]
thirdparty_sources = [thirdparty_dir + file for file in bullet2_src] thirdparty_sources = [thirdparty_dir + file for file in bullet2_src]
# Treat Bullet headers as system headers to avoid raising warnings. Not supported on MSVC. # Treat Bullet headers as system headers to avoid raising warnings. Not supported on MSVC.
if not env.msvc: if not env.msvc:
env_bullet.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path]) env_bullet.Append(CPPFLAGS=["-isystem", Dir(thirdparty_dir).path])
else: else:
env_bullet.Prepend(CPPPATH=[thirdparty_dir]) env_bullet.Prepend(CPPPATH=[thirdparty_dir])
# if env['target'] == "debug" or env['target'] == "release_debug": # if env['target'] == "debug" or env['target'] == "release_debug":

View file

@ -1,14 +1,17 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass
def get_doc_classes(): def get_doc_classes():
return [ return [
"BulletPhysicsDirectBodyState", "BulletPhysicsDirectBodyState",
"BulletPhysicsServer", "BulletPhysicsServer",
] ]
def get_doc_path(): def get_doc_path():
return "doc_classes" return "doc_classes"

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_camera = env_modules.Clone() env_camera = env_modules.Clone()
@ -10,7 +10,7 @@ if env["platform"] == "iphone":
modules_sources = [] modules_sources = []
env_camera.add_source_files(modules_sources, "register_types.cpp") env_camera.add_source_files(modules_sources, "register_types.cpp")
env_camera.add_source_files(modules_sources, "camera_ios.mm") env_camera.add_source_files(modules_sources, "camera_ios.mm")
mod_lib = env_modules.add_library('#bin/libgodot_camera_module' + env['LIBSUFFIX'], modules_sources) mod_lib = env_modules.add_library("#bin/libgodot_camera_module" + env["LIBSUFFIX"], modules_sources)
elif env["platform"] == "windows": elif env["platform"] == "windows":
env_camera.add_source_files(env.modules_sources, "register_types.cpp") env_camera.add_source_files(env.modules_sources, "register_types.cpp")
@ -19,4 +19,3 @@ elif env["platform"] == "windows":
elif env["platform"] == "osx": elif env["platform"] == "osx":
env_camera.add_source_files(env.modules_sources, "register_types.cpp") env_camera.add_source_files(env.modules_sources, "register_types.cpp")
env_camera.add_source_files(env.modules_sources, "camera_osx.mm") env_camera.add_source_files(env.modules_sources, "camera_osx.mm")

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return platform == 'iphone' or platform == 'osx' or platform == 'windows' return platform == "iphone" or platform == "osx" or platform == "windows"
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_csg = env_modules.Clone() env_csg = env_modules.Clone()

View file

@ -1,9 +1,11 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass
def get_doc_classes(): def get_doc_classes():
return [ return [
"CSGBox", "CSGBox",
@ -17,5 +19,6 @@ def get_doc_classes():
"CSGTorus", "CSGTorus",
] ]
def get_doc_path(): def get_doc_path():
return "doc_classes" return "doc_classes"

View file

@ -1,14 +1,14 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_cvtt = env_modules.Clone() env_cvtt = env_modules.Clone()
# Thirdparty source files # Thirdparty source files
thirdparty_dir = "#thirdparty/cvtt/" thirdparty_dir = "#thirdparty/cvtt/"
thirdparty_sources = [ thirdparty_sources = [
"ConvectionKernels.cpp" "ConvectionKernels.cpp",
] ]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return env['tools'] return env["tools"]
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_dds = env_modules.Clone() env_dds = env_modules.Clone()

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,13 +1,13 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_enet = env_modules.Clone() env_enet = env_modules.Clone()
# Thirdparty source files # Thirdparty source files
if env['builtin_enet']: if env["builtin_enet"]:
thirdparty_dir = "#thirdparty/enet/" thirdparty_dir = "#thirdparty/enet/"
thirdparty_sources = [ thirdparty_sources = [
"godot.cpp", "godot.cpp",

View file

@ -1,13 +1,16 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass
def get_doc_classes(): def get_doc_classes():
return [ return [
"NetworkedMultiplayerENet", "NetworkedMultiplayerENet",
] ]
def get_doc_path(): def get_doc_path():
return "doc_classes" return "doc_classes"

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_etc = env_modules.Clone() env_etc = env_modules.Clone()
@ -9,21 +9,21 @@ env_etc = env_modules.Clone()
# Not unbundled so far since not widespread as shared library # Not unbundled so far since not widespread as shared library
thirdparty_dir = "#thirdparty/etc2comp/" thirdparty_dir = "#thirdparty/etc2comp/"
thirdparty_sources = [ thirdparty_sources = [
"EtcBlock4x4.cpp", "EtcBlock4x4.cpp",
"EtcBlock4x4Encoding.cpp", "EtcBlock4x4Encoding.cpp",
"EtcBlock4x4Encoding_ETC1.cpp", "EtcBlock4x4Encoding_ETC1.cpp",
"EtcBlock4x4Encoding_R11.cpp", "EtcBlock4x4Encoding_R11.cpp",
"EtcBlock4x4Encoding_RG11.cpp", "EtcBlock4x4Encoding_RG11.cpp",
"EtcBlock4x4Encoding_RGB8A1.cpp", "EtcBlock4x4Encoding_RGB8A1.cpp",
"EtcBlock4x4Encoding_RGB8.cpp", "EtcBlock4x4Encoding_RGB8.cpp",
"EtcBlock4x4Encoding_RGBA8.cpp", "EtcBlock4x4Encoding_RGBA8.cpp",
"Etc.cpp", "Etc.cpp",
"EtcDifferentialTrys.cpp", "EtcDifferentialTrys.cpp",
"EtcFilter.cpp", "EtcFilter.cpp",
"EtcImage.cpp", "EtcImage.cpp",
"EtcIndividualTrys.cpp", "EtcIndividualTrys.cpp",
"EtcMath.cpp", "EtcMath.cpp",
"EtcSortedBlockList.cpp", "EtcSortedBlockList.cpp",
] ]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return env['tools'] return env["tools"]
def configure(env): def configure(env):
pass pass

View file

@ -1,12 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_freetype = env_modules.Clone() env_freetype = env_modules.Clone()
# Thirdparty source files # Thirdparty source files
if env['builtin_freetype']: if env["builtin_freetype"]:
thirdparty_dir = "#thirdparty/freetype/" thirdparty_dir = "#thirdparty/freetype/"
thirdparty_sources = [ thirdparty_sources = [
"src/autofit/autofit.c", "src/autofit/autofit.c",
@ -53,31 +53,31 @@ if env['builtin_freetype']:
] ]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
if env['platform'] == 'uwp': if env["platform"] == "uwp":
# Include header for UWP to fix build issues # Include header for UWP to fix build issues
env_freetype.Append(CCFLAGS=['/FI', '"modules/freetype/uwpdef.h"']) env_freetype.Append(CCFLAGS=["/FI", '"modules/freetype/uwpdef.h"'])
# Globally too, as freetype is used in scene (see bottom) # Globally too, as freetype is used in scene (see bottom)
env.Append(CCFLAGS=['/FI', '"modules/freetype/uwpdef.h"']) env.Append(CCFLAGS=["/FI", '"modules/freetype/uwpdef.h"'])
env_freetype.Prepend(CPPPATH=[thirdparty_dir + "/include"]) env_freetype.Prepend(CPPPATH=[thirdparty_dir + "/include"])
# Also needed in main env for scene/ # Also needed in main env for scene/
env.Prepend(CPPPATH=[thirdparty_dir + "/include"]) env.Prepend(CPPPATH=[thirdparty_dir + "/include"])
env_freetype.Append(CPPDEFINES=['FT2_BUILD_LIBRARY', 'FT_CONFIG_OPTION_USE_PNG']) env_freetype.Append(CPPDEFINES=["FT2_BUILD_LIBRARY", "FT_CONFIG_OPTION_USE_PNG"])
if (env['target'] == 'debug'): if env["target"] == "debug":
env_freetype.Append(CPPDEFINES=['ZLIB_DEBUG']) env_freetype.Append(CPPDEFINES=["ZLIB_DEBUG"])
# Also requires libpng headers # Also requires libpng headers
if env['builtin_libpng']: if env["builtin_libpng"]:
env_freetype.Prepend(CPPPATH=["#thirdparty/libpng"]) env_freetype.Prepend(CPPPATH=["#thirdparty/libpng"])
sfnt = thirdparty_dir + 'src/sfnt/sfnt.c' sfnt = thirdparty_dir + "src/sfnt/sfnt.c"
# Must be done after all CPPDEFINES are being set so we can copy them. # Must be done after all CPPDEFINES are being set so we can copy them.
if env['platform'] == 'javascript': if env["platform"] == "javascript":
# Forcibly undefine this macro so SIMD is not used in this file, # Forcibly undefine this macro so SIMD is not used in this file,
# since currently unsupported in WASM # since currently unsupported in WASM
tmp_env = env_freetype.Clone() tmp_env = env_freetype.Clone()
tmp_env.Append(CPPFLAGS=['-U__OPTIMIZE__']) tmp_env.Append(CPPFLAGS=["-U__OPTIMIZE__"])
sfnt = tmp_env.Object(sfnt) sfnt = tmp_env.Object(sfnt)
thirdparty_sources += [sfnt] thirdparty_sources += [sfnt]
@ -91,7 +91,7 @@ if env['builtin_freetype']:
# and then plain strings for system library. We insert between the two. # and then plain strings for system library. We insert between the two.
inserted = False inserted = False
for idx, linklib in enumerate(env["LIBS"]): for idx, linklib in enumerate(env["LIBS"]):
if isinstance(linklib, (str, bytes)): # first system lib such as "X11", otherwise SCons lib object if isinstance(linklib, (str, bytes)): # first system lib such as "X11", otherwise SCons lib object
env["LIBS"].insert(idx, lib) env["LIBS"].insert(idx, lib)
inserted = True inserted = True
break break

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_gdnative = env_modules.Clone() env_gdnative = env_modules.Clone()
env_gdnative.add_source_files(env.modules_sources, "gdnative.cpp") env_gdnative.add_source_files(env.modules_sources, "gdnative.cpp")
@ -12,9 +12,9 @@ env_gdnative.add_source_files(env.modules_sources, "nativescript/*.cpp")
env_gdnative.add_source_files(env.modules_sources, "gdnative_library_singleton_editor.cpp") env_gdnative.add_source_files(env.modules_sources, "gdnative_library_singleton_editor.cpp")
env_gdnative.add_source_files(env.modules_sources, "gdnative_library_editor_plugin.cpp") env_gdnative.add_source_files(env.modules_sources, "gdnative_library_editor_plugin.cpp")
env_gdnative.Prepend(CPPPATH=['#modules/gdnative/include/']) env_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"])
Export('env_gdnative') Export("env_gdnative")
SConscript("net/SCsub") SConscript("net/SCsub")
SConscript("arvr/SCsub") SConscript("arvr/SCsub")
@ -25,8 +25,11 @@ SConscript("videodecoder/SCsub")
from platform_methods import run_in_subprocess from platform_methods import run_in_subprocess
import gdnative_builders import gdnative_builders
_, gensource = env_gdnative.CommandNoCache(['include/gdnative_api_struct.gen.h', 'gdnative_api_struct.gen.cpp'], _, gensource = env_gdnative.CommandNoCache(
'gdnative_api.json', run_in_subprocess(gdnative_builders.build_gdnative_api_struct)) ["include/gdnative_api_struct.gen.h", "gdnative_api_struct.gen.cpp"],
"gdnative_api.json",
run_in_subprocess(gdnative_builders.build_gdnative_api_struct),
)
env_gdnative.add_source_files(env.modules_sources, [gensource]) env_gdnative.add_source_files(env.modules_sources, [gensource])
env.use_ptrcall = True env.use_ptrcall = True

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_gdnative') Import("env_gdnative")
env_gdnative.add_source_files(env.modules_sources, '*.cpp') env_gdnative.add_source_files(env.modules_sources, "*.cpp")

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,9 +1,11 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
env.use_ptrcall = True env.use_ptrcall = True
def get_doc_classes(): def get_doc_classes():
return [ return [
"@NativeScript", "@NativeScript",
@ -20,5 +22,6 @@ def get_doc_classes():
"WebRTCDataChannelGDNative", "WebRTCDataChannelGDNative",
] ]
def get_doc_path(): def get_doc_path():
return "doc_classes" return "doc_classes"

View file

@ -8,209 +8,249 @@ from platform_methods import subprocess_main
def _spaced(e): def _spaced(e):
return e if e[-1] == '*' else e + ' ' return e if e[-1] == "*" else e + " "
def _build_gdnative_api_struct_header(api): def _build_gdnative_api_struct_header(api):
out = [ out = [
'/* THIS FILE IS GENERATED DO NOT EDIT */', "/* THIS FILE IS GENERATED DO NOT EDIT */",
'#ifndef GODOT_GDNATIVE_API_STRUCT_H', "#ifndef GODOT_GDNATIVE_API_STRUCT_H",
'#define GODOT_GDNATIVE_API_STRUCT_H', "#define GODOT_GDNATIVE_API_STRUCT_H",
'', "",
'#include <gdnative/gdnative.h>', "#include <gdnative/gdnative.h>",
'#include <android/godot_android.h>', "#include <android/godot_android.h>",
'#include <arvr/godot_arvr.h>', "#include <arvr/godot_arvr.h>",
'#include <nativescript/godot_nativescript.h>', "#include <nativescript/godot_nativescript.h>",
'#include <net/godot_net.h>', "#include <net/godot_net.h>",
'#include <pluginscript/godot_pluginscript.h>', "#include <pluginscript/godot_pluginscript.h>",
'#include <videodecoder/godot_videodecoder.h>', "#include <videodecoder/godot_videodecoder.h>",
'', "",
'#ifdef __cplusplus', "#ifdef __cplusplus",
'extern "C" {', 'extern "C" {',
'#endif', "#endif",
'', "",
'enum GDNATIVE_API_TYPES {', "enum GDNATIVE_API_TYPES {",
'\tGDNATIVE_' + api['core']['type'] + ',' "\tGDNATIVE_" + api["core"]["type"] + ",",
] ]
for ext in api['extensions']: for ext in api["extensions"]:
out += ['\tGDNATIVE_EXT_' + ext['type'] + ','] out += ["\tGDNATIVE_EXT_" + ext["type"] + ","]
out += ['};', ''] out += ["};", ""]
def generate_extension_struct(name, ext, include_version=True): def generate_extension_struct(name, ext, include_version=True):
ret_val = [] ret_val = []
if ext['next']: if ext["next"]:
ret_val += generate_extension_struct(name, ext['next']) ret_val += generate_extension_struct(name, ext["next"])
ret_val += [ ret_val += [
'typedef struct godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct {', "typedef struct godot_gdnative_ext_"
'\tunsigned int type;', + name
'\tgodot_gdnative_api_version version;', + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
'\tconst godot_gdnative_api_struct *next;' + "_api_struct {",
"\tunsigned int type;",
"\tgodot_gdnative_api_version version;",
"\tconst godot_gdnative_api_struct *next;",
] ]
for funcdef in ext['api']: for funcdef in ext["api"]:
args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]])
ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args)) ret_val.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args))
ret_val += ['} godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct;', ''] ret_val += [
"} godot_gdnative_ext_"
+ name
+ ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ "_api_struct;",
"",
]
return ret_val return ret_val
def generate_core_extension_struct(core): def generate_core_extension_struct(core):
ret_val = [] ret_val = []
if core['next']: if core["next"]:
ret_val += generate_core_extension_struct(core['next']) ret_val += generate_core_extension_struct(core["next"])
ret_val += [ ret_val += [
'typedef struct godot_gdnative_core_' + ('{0}_{1}'.format(core['version']['major'], core['version']['minor'])) + '_api_struct {', "typedef struct godot_gdnative_core_"
'\tunsigned int type;', + ("{0}_{1}".format(core["version"]["major"], core["version"]["minor"]))
'\tgodot_gdnative_api_version version;', + "_api_struct {",
'\tconst godot_gdnative_api_struct *next;', "\tunsigned int type;",
"\tgodot_gdnative_api_version version;",
"\tconst godot_gdnative_api_struct *next;",
] ]
for funcdef in core['api']: for funcdef in core["api"]:
args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]])
ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args)) ret_val.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args))
ret_val += ['} godot_gdnative_core_' + '{0}_{1}'.format(core['version']['major'], core['version']['minor']) + '_api_struct;', ''] ret_val += [
"} godot_gdnative_core_"
+ "{0}_{1}".format(core["version"]["major"], core["version"]["minor"])
+ "_api_struct;",
"",
]
return ret_val return ret_val
for ext in api["extensions"]:
for ext in api['extensions']: name = ext["name"]
name = ext['name']
out += generate_extension_struct(name, ext, False) out += generate_extension_struct(name, ext, False)
if api['core']['next']: if api["core"]["next"]:
out += generate_core_extension_struct(api['core']['next']) out += generate_core_extension_struct(api["core"]["next"])
out += [ out += [
'typedef struct godot_gdnative_core_api_struct {', "typedef struct godot_gdnative_core_api_struct {",
'\tunsigned int type;', "\tunsigned int type;",
'\tgodot_gdnative_api_version version;', "\tgodot_gdnative_api_version version;",
'\tconst godot_gdnative_api_struct *next;', "\tconst godot_gdnative_api_struct *next;",
'\tunsigned int num_extensions;', "\tunsigned int num_extensions;",
'\tconst godot_gdnative_api_struct **extensions;', "\tconst godot_gdnative_api_struct **extensions;",
] ]
for funcdef in api['core']['api']: for funcdef in api["core"]["api"]:
args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']]) args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]])
out.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args)) out.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args))
out += [ out += [
'} godot_gdnative_core_api_struct;', "} godot_gdnative_core_api_struct;",
'', "",
'#ifdef __cplusplus', "#ifdef __cplusplus",
'}', "}",
'#endif', "#endif",
'', "",
'#endif // GODOT_GDNATIVE_API_STRUCT_H', "#endif // GODOT_GDNATIVE_API_STRUCT_H",
'' "",
] ]
return '\n'.join(out) return "\n".join(out)
def _build_gdnative_api_struct_source(api): def _build_gdnative_api_struct_source(api):
out = [ out = ["/* THIS FILE IS GENERATED DO NOT EDIT */", "", "#include <gdnative_api_struct.gen.h>", ""]
'/* THIS FILE IS GENERATED DO NOT EDIT */',
'',
'#include <gdnative_api_struct.gen.h>',
''
]
def get_extension_struct_name(name, ext, include_version=True): def get_extension_struct_name(name, ext, include_version=True):
return 'godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct' return (
"godot_gdnative_ext_"
+ name
+ ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ "_api_struct"
)
def get_extension_struct_instance_name(name, ext, include_version=True): def get_extension_struct_instance_name(name, ext, include_version=True):
return 'api_extension_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_struct' return (
"api_extension_"
+ name
+ ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ "_struct"
)
def get_extension_struct_definition(name, ext, include_version=True): def get_extension_struct_definition(name, ext, include_version=True):
ret_val = [] ret_val = []
if ext['next']: if ext["next"]:
ret_val += get_extension_struct_definition(name, ext['next']) ret_val += get_extension_struct_definition(name, ext["next"])
ret_val += [ ret_val += [
'extern const ' + get_extension_struct_name(name, ext, include_version) + ' ' + get_extension_struct_instance_name(name, ext, include_version) + ' = {', "extern const "
'\tGDNATIVE_EXT_' + ext['type'] + ',', + get_extension_struct_name(name, ext, include_version)
'\t{' + str(ext['version']['major']) + ', ' + str(ext['version']['minor']) + '},', + " "
'\t' + ('NULL' if not ext['next'] else ('(const godot_gdnative_api_struct *)&' + get_extension_struct_instance_name(name, ext['next']))) + ',' + get_extension_struct_instance_name(name, ext, include_version)
+ " = {",
"\tGDNATIVE_EXT_" + ext["type"] + ",",
"\t{" + str(ext["version"]["major"]) + ", " + str(ext["version"]["minor"]) + "},",
"\t"
+ (
"NULL"
if not ext["next"]
else ("(const godot_gdnative_api_struct *)&" + get_extension_struct_instance_name(name, ext["next"]))
)
+ ",",
] ]
for funcdef in ext['api']: for funcdef in ext["api"]:
ret_val.append('\t%s,' % funcdef['name']) ret_val.append("\t%s," % funcdef["name"])
ret_val += ['};\n'] ret_val += ["};\n"]
return ret_val return ret_val
def get_core_struct_definition(core): def get_core_struct_definition(core):
ret_val = [] ret_val = []
if core['next']: if core["next"]:
ret_val += get_core_struct_definition(core['next']) ret_val += get_core_struct_definition(core["next"])
ret_val += [ ret_val += [
'extern const godot_gdnative_core_' + ('{0}_{1}_api_struct api_{0}_{1}'.format(core['version']['major'], core['version']['minor'])) + ' = {', "extern const godot_gdnative_core_"
'\tGDNATIVE_' + core['type'] + ',', + ("{0}_{1}_api_struct api_{0}_{1}".format(core["version"]["major"], core["version"]["minor"]))
'\t{' + str(core['version']['major']) + ', ' + str(core['version']['minor']) + '},', + " = {",
'\t' + ('NULL' if not core['next'] else ('(const godot_gdnative_api_struct *)& api_{0}_{1}'.format(core['next']['version']['major'], core['next']['version']['minor']))) + ',' "\tGDNATIVE_" + core["type"] + ",",
"\t{" + str(core["version"]["major"]) + ", " + str(core["version"]["minor"]) + "},",
"\t"
+ (
"NULL"
if not core["next"]
else (
"(const godot_gdnative_api_struct *)& api_{0}_{1}".format(
core["next"]["version"]["major"], core["next"]["version"]["minor"]
)
)
)
+ ",",
] ]
for funcdef in core['api']: for funcdef in core["api"]:
ret_val.append('\t%s,' % funcdef['name']) ret_val.append("\t%s," % funcdef["name"])
ret_val += ['};\n'] ret_val += ["};\n"]
return ret_val return ret_val
for ext in api['extensions']: for ext in api["extensions"]:
name = ext['name'] name = ext["name"]
out += get_extension_struct_definition(name, ext, False) out += get_extension_struct_definition(name, ext, False)
out += ['', 'const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {'] out += ["", "const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {"]
for ext in api['extensions']: for ext in api["extensions"]:
name = ext['name'] name = ext["name"]
out += ['\t(godot_gdnative_api_struct *)&api_extension_' + name + '_struct,'] out += ["\t(godot_gdnative_api_struct *)&api_extension_" + name + "_struct,"]
out += ['};\n'] out += ["};\n"]
if api['core']['next']: if api["core"]["next"]:
out += get_core_struct_definition(api['core']['next']) out += get_core_struct_definition(api["core"]["next"])
out += [ out += [
'extern const godot_gdnative_core_api_struct api_struct = {', "extern const godot_gdnative_core_api_struct api_struct = {",
'\tGDNATIVE_' + api['core']['type'] + ',', "\tGDNATIVE_" + api["core"]["type"] + ",",
'\t{' + str(api['core']['version']['major']) + ', ' + str(api['core']['version']['minor']) + '},', "\t{" + str(api["core"]["version"]["major"]) + ", " + str(api["core"]["version"]["minor"]) + "},",
'\t(const godot_gdnative_api_struct *)&api_1_1,', "\t(const godot_gdnative_api_struct *)&api_1_1,",
'\t' + str(len(api['extensions'])) + ',', "\t" + str(len(api["extensions"])) + ",",
'\tgdnative_extensions_pointers,', "\tgdnative_extensions_pointers,",
] ]
for funcdef in api['core']['api']: for funcdef in api["core"]["api"]:
out.append('\t%s,' % funcdef['name']) out.append("\t%s," % funcdef["name"])
out.append('};\n') out.append("};\n")
return '\n'.join(out) return "\n".join(out)
def build_gdnative_api_struct(target, source, env): def build_gdnative_api_struct(target, source, env):
with open(source[0], 'r') as fd: with open(source[0], "r") as fd:
api = json.load(fd) api = json.load(fd)
header, source = target header, source = target
with open(header, 'w') as fd: with open(header, "w") as fd:
fd.write(_build_gdnative_api_struct_header(api)) fd.write(_build_gdnative_api_struct_header(api))
with open(source, 'w') as fd: with open(source, "w") as fd:
fd.write(_build_gdnative_api_struct_source(api)) fd.write(_build_gdnative_api_struct_source(api))
if __name__ == '__main__': if __name__ == "__main__":
subprocess_main(globals()) subprocess_main(globals())

View file

@ -1,9 +1,9 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_gdnative') Import("env_gdnative")
env_gdnative.add_source_files(env.modules_sources, '*.cpp') env_gdnative.add_source_files(env.modules_sources, "*.cpp")
if "platform" in env and env["platform"] in ["linuxbsd", "iphone"]: if "platform" in env and env["platform"] in ["linuxbsd", "iphone"]:
env.Append(LINKFLAGS=["-rdynamic"]) env.Append(LINKFLAGS=["-rdynamic"])

View file

@ -1,13 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_gdnative') Import("env_gdnative")
env_net = env_gdnative.Clone() env_net = env_gdnative.Clone()
has_webrtc = env_net["module_webrtc_enabled"] has_webrtc = env_net["module_webrtc_enabled"]
if has_webrtc: if has_webrtc:
env_net.Append(CPPDEFINES=['WEBRTC_GDNATIVE_ENABLED']) env_net.Append(CPPDEFINES=["WEBRTC_GDNATIVE_ENABLED"])
env_net.add_source_files(env.modules_sources, '*.cpp')
env_net.add_source_files(env.modules_sources, "*.cpp")

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_gdnative') Import("env_gdnative")
env_gdnative.add_source_files(env.modules_sources, '*.cpp') env_gdnative.add_source_files(env.modules_sources, "*.cpp")

View file

@ -1,9 +1,9 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_vsdecoder_gdnative = env_modules.Clone() env_vsdecoder_gdnative = env_modules.Clone()
env_vsdecoder_gdnative.Prepend(CPPPATH=['#modules/gdnative/include/']) env_vsdecoder_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"])
env_vsdecoder_gdnative.add_source_files(env.modules_sources, '*.cpp') env_vsdecoder_gdnative.add_source_files(env.modules_sources, "*.cpp")

View file

@ -1,25 +1,25 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_navigation = env_modules.Clone() env_navigation = env_modules.Clone()
# Recast Thirdparty source files # Recast Thirdparty source files
if env['builtin_recast']: if env["builtin_recast"]:
thirdparty_dir = "#thirdparty/recastnavigation/Recast/" thirdparty_dir = "#thirdparty/recastnavigation/Recast/"
thirdparty_sources = [ thirdparty_sources = [
"Source/Recast.cpp", "Source/Recast.cpp",
"Source/RecastAlloc.cpp", "Source/RecastAlloc.cpp",
"Source/RecastArea.cpp", "Source/RecastArea.cpp",
"Source/RecastAssert.cpp", "Source/RecastAssert.cpp",
"Source/RecastContour.cpp", "Source/RecastContour.cpp",
"Source/RecastFilter.cpp", "Source/RecastFilter.cpp",
"Source/RecastLayers.cpp", "Source/RecastLayers.cpp",
"Source/RecastMesh.cpp", "Source/RecastMesh.cpp",
"Source/RecastMeshDetail.cpp", "Source/RecastMeshDetail.cpp",
"Source/RecastRasterization.cpp", "Source/RecastRasterization.cpp",
"Source/RecastRegion.cpp", "Source/RecastRegion.cpp",
] ]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
@ -31,11 +31,11 @@ if env['builtin_recast']:
# RVO Thirdparty source files # RVO Thirdparty source files
if env['builtin_rvo2']: if env["builtin_rvo2"]:
thirdparty_dir = "#thirdparty/rvo2" thirdparty_dir = "#thirdparty/rvo2"
thirdparty_sources = [ thirdparty_sources = [
"/src/Agent.cpp", "/src/Agent.cpp",
"/src/KdTree.cpp", "/src/KdTree.cpp",
] ]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources] thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,19 +1,19 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_gdscript = env_modules.Clone() env_gdscript = env_modules.Clone()
env_gdscript.add_source_files(env.modules_sources, "*.cpp") env_gdscript.add_source_files(env.modules_sources, "*.cpp")
if env['tools']: if env["tools"]:
env_gdscript.add_source_files(env.modules_sources, "./editor/*.cpp") env_gdscript.add_source_files(env.modules_sources, "./editor/*.cpp")
# Those two modules are required for the language server protocol # Those two modules are required for the language server protocol
if env['module_jsonrpc_enabled'] and env['module_websocket_enabled']: if env["module_jsonrpc_enabled"] and env["module_websocket_enabled"]:
env_gdscript.add_source_files(env.modules_sources, "./language_server/*.cpp") env_gdscript.add_source_files(env.modules_sources, "./language_server/*.cpp")
else: else:
# Using a define in the disabled case, to avoid having an extra define # Using a define in the disabled case, to avoid having an extra define
# in regular builds where all modules are enabled. # in regular builds where all modules are enabled.
env_gdscript.Append(CPPDEFINES=['GDSCRIPT_NO_LSP']) env_gdscript.Append(CPPDEFINES=["GDSCRIPT_NO_LSP"])

View file

@ -1,9 +1,11 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass
def get_doc_classes(): def get_doc_classes():
return [ return [
"@GDScript", "@GDScript",
@ -12,5 +14,6 @@ def get_doc_classes():
"GDScriptNativeClass", "GDScriptNativeClass",
] ]
def get_doc_path(): def get_doc_path():
return "doc_classes" return "doc_classes"

View file

@ -1,12 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_glslang = env_modules.Clone() env_glslang = env_modules.Clone()
# Thirdparty source files # Thirdparty source files
if env['builtin_glslang']: if env["builtin_glslang"]:
thirdparty_dir = "#thirdparty/glslang/" thirdparty_dir = "#thirdparty/glslang/"
thirdparty_sources = [ thirdparty_sources = [
"glslang/MachineIndependent/RemoveTree.cpp", "glslang/MachineIndependent/RemoveTree.cpp",
@ -48,10 +48,10 @@ if env['builtin_glslang']:
"SPIRV/doc.cpp", "SPIRV/doc.cpp",
"SPIRV/SPVRemapper.cpp", "SPIRV/SPVRemapper.cpp",
"SPIRV/SpvPostProcess.cpp", "SPIRV/SpvPostProcess.cpp",
"SPIRV/Logger.cpp" "SPIRV/Logger.cpp",
] ]
if (env["platform"]=="windows"): if env["platform"] == "windows":
thirdparty_sources.append("glslang/OSDependent/Windows/ossource.cpp") thirdparty_sources.append("glslang/OSDependent/Windows/ossource.cpp")
else: else:
thirdparty_sources.append("glslang/OSDependent/Unix/ossource.cpp") thirdparty_sources.append("glslang/OSDependent/Unix/ossource.cpp")
@ -60,7 +60,7 @@ if env['builtin_glslang']:
# Treat glslang headers as system headers to avoid raising warnings. Not supported on MSVC. # Treat glslang headers as system headers to avoid raising warnings. Not supported on MSVC.
if not env.msvc: if not env.msvc:
env_glslang.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path]) env_glslang.Append(CPPFLAGS=["-isystem", Dir(thirdparty_dir).path])
else: else:
env_glslang.Prepend(CPPPATH=[thirdparty_dir]) env_glslang.Prepend(CPPPATH=[thirdparty_dir])

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_gridmap = env_modules.Clone() env_gridmap = env_modules.Clone()

View file

@ -1,13 +1,16 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass
def get_doc_classes(): def get_doc_classes():
return [ return [
"GridMap", "GridMap",
] ]
def get_doc_path(): def get_doc_path():
return "doc_classes" return "doc_classes"

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_hdr = env_modules.Clone() env_hdr = env_modules.Clone()

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_jpg = env_modules.Clone() env_jpg = env_modules.Clone()

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_jsonrpc = env_modules.Clone() env_jsonrpc = env_modules.Clone()
env_jsonrpc.add_source_files(env.modules_sources, "*.cpp") env_jsonrpc.add_source_files(env.modules_sources, "*.cpp")

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,11 +1,11 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_mbed_tls = env_modules.Clone() env_mbed_tls = env_modules.Clone()
if env['builtin_mbedtls']: if env["builtin_mbedtls"]:
# Thirdparty source files # Thirdparty source files
thirdparty_sources = [ thirdparty_sources = [
"aes.c", "aes.c",
@ -86,7 +86,7 @@ if env['builtin_mbedtls']:
"x509_csr.c", "x509_csr.c",
"x509write_crt.c", "x509write_crt.c",
"x509write_csr.c", "x509write_csr.c",
"xtea.c" "xtea.c",
] ]
thirdparty_dir = "#thirdparty/mbedtls/library/" thirdparty_dir = "#thirdparty/mbedtls/library/"

View file

@ -1,5 +1,6 @@
def can_build(env, platform): def can_build(env, platform):
return True return True
def configure(env): def configure(env):
pass pass

View file

@ -1,8 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
Import('env') Import("env")
Import('env_modules') Import("env_modules")
env_mobile_vr = env_modules.Clone() env_mobile_vr = env_modules.Clone()
env_mobile_vr.add_source_files(env.modules_sources, '*.cpp') env_mobile_vr.add_source_files(env.modules_sources, "*.cpp")

Some files were not shown because too many files have changed in this diff Show more