2018-03-18 06:23:55 +08:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
2021-12-16 09:38:10 +08:00
|
|
|
import platform
|
2018-03-18 06:23:55 +08:00
|
|
|
import uuid
|
|
|
|
import functools
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
# NOTE: The multiprocessing module is not compatible with SCons due to conflict on cPickle
|
|
|
|
|
2020-03-25 21:36:03 +08:00
|
|
|
JSON_SERIALIZABLE_TYPES = (bool, int, float, str)
|
2018-07-31 05:35:35 +08:00
|
|
|
|
2018-03-18 06:23:55 +08:00
|
|
|
|
|
|
|
def run_in_subprocess(builder_function):
|
|
|
|
@functools.wraps(builder_function)
|
|
|
|
def wrapper(target, source, env):
|
|
|
|
# Convert SCons Node instances to absolute paths
|
|
|
|
target = [node.srcnode().abspath for node in target]
|
|
|
|
source = [node.srcnode().abspath for node in source]
|
|
|
|
|
2018-07-31 05:55:15 +08:00
|
|
|
# Short circuit on non-Windows platforms, no need to run in subprocess
|
2020-03-30 14:28:32 +08:00
|
|
|
if sys.platform not in ("win32", "cygwin"):
|
2018-07-31 05:55:15 +08:00
|
|
|
return builder_function(target, source, env)
|
|
|
|
|
2018-03-18 06:23:55 +08:00
|
|
|
# Identify module
|
|
|
|
module_name = builder_function.__module__
|
|
|
|
function_name = builder_function.__name__
|
|
|
|
module_path = sys.modules[module_name].__file__
|
2020-03-30 14:28:32 +08:00
|
|
|
if module_path.endswith(".pyc") or module_path.endswith(".pyo"):
|
2018-03-18 06:23:55 +08:00
|
|
|
module_path = module_path[:-1]
|
|
|
|
|
|
|
|
# Subprocess environment
|
|
|
|
subprocess_env = os.environ.copy()
|
2020-03-30 14:28:32 +08:00
|
|
|
subprocess_env["PYTHONPATH"] = os.pathsep.join([os.getcwd()] + sys.path)
|
2018-03-18 06:23:55 +08:00
|
|
|
|
2018-07-31 05:35:35 +08:00
|
|
|
# Keep only JSON serializable environment items
|
2020-03-30 14:28:32 +08:00
|
|
|
filtered_env = dict((key, value) for key, value in env.items() if isinstance(value, JSON_SERIALIZABLE_TYPES))
|
2018-07-31 05:35:35 +08:00
|
|
|
|
2018-03-18 06:23:55 +08:00
|
|
|
# Save parameters
|
2018-07-31 05:35:35 +08:00
|
|
|
args = (target, source, filtered_env)
|
2018-03-18 06:23:55 +08:00
|
|
|
data = dict(fn=function_name, args=args)
|
2020-03-30 14:28:32 +08:00
|
|
|
json_path = os.path.join(os.environ["TMP"], uuid.uuid4().hex + ".json")
|
2024-03-10 04:29:24 +08:00
|
|
|
with open(json_path, "wt", encoding="utf-8", newline="\n") as json_file:
|
2018-03-18 06:23:55 +08:00
|
|
|
json.dump(data, json_file, indent=2)
|
2018-07-31 05:35:35 +08:00
|
|
|
json_file_size = os.stat(json_path).st_size
|
|
|
|
|
2020-07-28 02:00:26 +08:00
|
|
|
if env["verbose"]:
|
|
|
|
print(
|
|
|
|
"Executing builder function in subprocess: "
|
|
|
|
"module_path=%r, parameter_file=%r, parameter_file_size=%r, target=%r, source=%r"
|
|
|
|
% (module_path, json_path, json_file_size, target, source)
|
|
|
|
)
|
2018-03-18 06:23:55 +08:00
|
|
|
try:
|
|
|
|
exit_code = subprocess.call([sys.executable, module_path, json_path], env=subprocess_env)
|
|
|
|
finally:
|
|
|
|
try:
|
|
|
|
os.remove(json_path)
|
2020-09-02 22:58:07 +08:00
|
|
|
except OSError as e:
|
2018-03-18 06:23:55 +08:00
|
|
|
# Do not fail the entire build if it cannot delete a temporary file
|
2020-03-30 14:28:32 +08:00
|
|
|
print(
|
|
|
|
"WARNING: Could not delete temporary file: path=%r; [%s] %s" % (json_path, e.__class__.__name__, e)
|
|
|
|
)
|
2018-03-18 06:23:55 +08:00
|
|
|
|
|
|
|
# Must succeed
|
|
|
|
if exit_code:
|
|
|
|
raise RuntimeError(
|
2020-03-30 14:28:32 +08:00
|
|
|
"Failed to run builder function in subprocess: module_path=%r; data=%r" % (module_path, data)
|
|
|
|
)
|
2018-03-18 06:23:55 +08:00
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
def subprocess_main(namespace):
|
|
|
|
with open(sys.argv[1]) as json_file:
|
|
|
|
data = json.load(json_file)
|
|
|
|
|
2020-03-30 14:28:32 +08:00
|
|
|
fn = namespace[data["fn"]]
|
|
|
|
fn(*data["args"])
|
2021-12-16 09:38:10 +08:00
|
|
|
|
|
|
|
|
|
|
|
# CPU architecture options.
|
|
|
|
architectures = ["x86_32", "x86_64", "arm32", "arm64", "rv64", "ppc32", "ppc64", "wasm32"]
|
|
|
|
architecture_aliases = {
|
|
|
|
"x86": "x86_32",
|
|
|
|
"x64": "x86_64",
|
|
|
|
"amd64": "x86_64",
|
|
|
|
"armv7": "arm32",
|
|
|
|
"armv8": "arm64",
|
|
|
|
"arm64v8": "arm64",
|
|
|
|
"aarch64": "arm64",
|
|
|
|
"rv": "rv64",
|
|
|
|
"riscv": "rv64",
|
|
|
|
"riscv64": "rv64",
|
|
|
|
"ppcle": "ppc32",
|
|
|
|
"ppc": "ppc32",
|
|
|
|
"ppc64le": "ppc64",
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def detect_arch():
|
|
|
|
host_machine = platform.machine().lower()
|
|
|
|
if host_machine in architectures:
|
|
|
|
return host_machine
|
|
|
|
elif host_machine in architecture_aliases.keys():
|
|
|
|
return architecture_aliases[host_machine]
|
|
|
|
elif "86" in host_machine:
|
|
|
|
# Catches x86, i386, i486, i586, i686, etc.
|
|
|
|
return "x86_32"
|
|
|
|
else:
|
|
|
|
print("Unsupported CPU architecture: " + host_machine)
|
|
|
|
print("Falling back to x86_64.")
|
|
|
|
return "x86_64"
|
2023-06-19 17:28:22 +08:00
|
|
|
|
|
|
|
|
|
|
|
def generate_export_icons(platform_path, platform_name):
|
|
|
|
"""
|
|
|
|
Generate headers for logo and run icon for the export plugin.
|
|
|
|
"""
|
|
|
|
export_path = platform_path + "/export"
|
|
|
|
svg_names = []
|
|
|
|
if os.path.isfile(export_path + "/logo.svg"):
|
|
|
|
svg_names.append("logo")
|
|
|
|
if os.path.isfile(export_path + "/run_icon.svg"):
|
|
|
|
svg_names.append("run_icon")
|
|
|
|
|
|
|
|
for name in svg_names:
|
|
|
|
svgf = open(export_path + "/" + name + ".svg", "rb")
|
|
|
|
b = svgf.read(1)
|
|
|
|
svg_str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
|
|
|
|
svg_str += " static const char *_" + platform_name + "_" + name + '_svg = "'
|
|
|
|
while len(b) == 1:
|
|
|
|
svg_str += "\\" + hex(ord(b))[1:]
|
|
|
|
b = svgf.read(1)
|
|
|
|
|
|
|
|
svg_str += '";\n'
|
|
|
|
|
|
|
|
svgf.close()
|
|
|
|
|
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially.
|
|
|
|
wf = export_path + "/" + name + "_svg.gen.h"
|
2024-03-10 04:29:24 +08:00
|
|
|
with open(wf, "w", encoding="utf-8", newline="\n") as svgw:
|
2023-06-19 17:28:22 +08:00
|
|
|
svgw.write(svg_str)
|
2023-12-12 02:50:44 +08:00
|
|
|
|
|
|
|
|
|
|
|
def get_build_version(short):
|
|
|
|
import version
|
|
|
|
|
|
|
|
name = "custom_build"
|
|
|
|
if os.getenv("BUILD_NAME") != None:
|
|
|
|
name = os.getenv("BUILD_NAME")
|
|
|
|
v = "%d.%d" % (version.major, version.minor)
|
|
|
|
if version.patch > 0:
|
|
|
|
v += ".%d" % version.patch
|
|
|
|
status = version.status
|
|
|
|
if not short:
|
|
|
|
if os.getenv("GODOT_VERSION_STATUS") != None:
|
|
|
|
status = str(os.getenv("GODOT_VERSION_STATUS"))
|
|
|
|
v += ".%s.%s" % (status, name)
|
|
|
|
return v
|
|
|
|
|
|
|
|
|
|
|
|
def lipo(prefix, suffix):
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
target_bin = ""
|
|
|
|
lipo_command = ["lipo", "-create"]
|
|
|
|
arch_found = 0
|
|
|
|
|
|
|
|
for arch in architectures:
|
|
|
|
bin_name = prefix + "." + arch + suffix
|
|
|
|
if Path(bin_name).is_file():
|
|
|
|
target_bin = bin_name
|
|
|
|
lipo_command += [bin_name]
|
|
|
|
arch_found += 1
|
|
|
|
|
|
|
|
if arch_found > 1:
|
|
|
|
target_bin = prefix + ".fat" + suffix
|
|
|
|
lipo_command += ["-output", target_bin]
|
|
|
|
subprocess.run(lipo_command)
|
|
|
|
|
|
|
|
return target_bin
|
|
|
|
|
|
|
|
|
|
|
|
def get_mvk_sdk_path(osname):
|
|
|
|
def int_or_zero(i):
|
|
|
|
try:
|
|
|
|
return int(i)
|
|
|
|
except:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def ver_parse(a):
|
|
|
|
return [int_or_zero(i) for i in a.split(".")]
|
|
|
|
|
|
|
|
dirname = os.path.expanduser("~/VulkanSDK")
|
|
|
|
if not os.path.exists(dirname):
|
|
|
|
return ""
|
|
|
|
|
|
|
|
ver_min = ver_parse("1.3.231.0")
|
|
|
|
ver_num = ver_parse("0.0.0.0")
|
|
|
|
files = os.listdir(dirname)
|
|
|
|
lib_name_out = dirname
|
|
|
|
for file in files:
|
|
|
|
if os.path.isdir(os.path.join(dirname, file)):
|
|
|
|
ver_comp = ver_parse(file)
|
|
|
|
if ver_comp > ver_num and ver_comp >= ver_min:
|
|
|
|
# Try new SDK location.
|
|
|
|
lib_name = os.path.join(os.path.join(dirname, file), "macOS/lib/MoltenVK.xcframework/" + osname + "/")
|
|
|
|
if os.path.isfile(os.path.join(lib_name, "libMoltenVK.a")):
|
|
|
|
ver_num = ver_comp
|
|
|
|
lib_name_out = os.path.join(os.path.join(dirname, file), "macOS/lib/MoltenVK.xcframework")
|
|
|
|
else:
|
|
|
|
# Try old SDK location.
|
|
|
|
lib_name = os.path.join(
|
|
|
|
os.path.join(dirname, file), "MoltenVK/MoltenVK.xcframework/" + osname + "/"
|
|
|
|
)
|
|
|
|
if os.path.isfile(os.path.join(lib_name, "libMoltenVK.a")):
|
|
|
|
ver_num = ver_comp
|
|
|
|
lib_name_out = os.path.join(os.path.join(dirname, file), "MoltenVK/MoltenVK.xcframework")
|
|
|
|
|
|
|
|
return lib_name_out
|
|
|
|
|
|
|
|
|
|
|
|
def detect_mvk(env, osname):
|
|
|
|
mvk_list = [
|
|
|
|
get_mvk_sdk_path(osname),
|
|
|
|
"/opt/homebrew/Frameworks/MoltenVK.xcframework",
|
|
|
|
"/usr/local/homebrew/Frameworks/MoltenVK.xcframework",
|
|
|
|
"/opt/local/Frameworks/MoltenVK.xcframework",
|
|
|
|
]
|
|
|
|
if env["vulkan_sdk_path"] != "":
|
|
|
|
mvk_list.insert(0, os.path.expanduser(env["vulkan_sdk_path"]))
|
|
|
|
mvk_list.insert(
|
|
|
|
0,
|
|
|
|
os.path.join(os.path.expanduser(env["vulkan_sdk_path"]), "macOS/lib/MoltenVK.xcframework"),
|
|
|
|
)
|
|
|
|
mvk_list.insert(
|
|
|
|
0,
|
|
|
|
os.path.join(os.path.expanduser(env["vulkan_sdk_path"]), "MoltenVK/MoltenVK.xcframework"),
|
|
|
|
)
|
|
|
|
|
|
|
|
for mvk_path in mvk_list:
|
|
|
|
if mvk_path and os.path.isfile(os.path.join(mvk_path, osname + "/libMoltenVK.a")):
|
|
|
|
mvk_found = True
|
|
|
|
print("MoltenVK found at: " + mvk_path)
|
|
|
|
return mvk_path
|
|
|
|
|
|
|
|
return ""
|