Browse Source

Merge pull request #17595 from viktor-ferenczi/issue-5042-subproc

Running builder (content generator) functions in subprocesses on Windows
Rémi Verschelde 7 years ago
parent
commit
66429a1576

+ 4 - 2
SConstruct

@@ -8,6 +8,8 @@ import os.path
 import glob
 import glob
 import sys
 import sys
 import methods
 import methods
+import gles_builders
+from platform_methods import run_in_subprocess
 
 
 # scan possible build platforms
 # scan possible build platforms
 
 
@@ -444,8 +446,8 @@ if selected_platform in platform_list:
         methods.no_verbose(sys, env)
         methods.no_verbose(sys, env)
 
 
     if (not env["platform"] == "server"): # FIXME: detect GLES3
     if (not env["platform"] == "server"): # FIXME: detect GLES3
-        env.Append( BUILDERS = { 'GLES3_GLSL' : env.Builder(action = methods.build_gles3_headers, suffix = 'glsl.gen.h', src_suffix = '.glsl') } )
-        env.Append( BUILDERS = { 'GLES2_GLSL' : env.Builder(action = methods.build_gles2_headers, suffix = 'glsl.gen.h', src_suffix = '.glsl') } )
+        env.Append(BUILDERS = { 'GLES3_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_gles3_headers), suffix='glsl.gen.h', src_suffix='.glsl')})
+        env.Append(BUILDERS = { 'GLES2_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_gles2_headers), suffix='glsl.gen.h', src_suffix='.glsl')})
 
 
     scons_cache_path = os.environ.get("SCONS_CACHE")
     scons_cache_path = os.environ.get("SCONS_CACHE")
     if scons_cache_path != None:
     if scons_cache_path != None:

+ 25 - 23
core/SCsub

@@ -2,7 +2,9 @@
 
 
 Import('env')
 Import('env')
 
 
-import methods
+import core_builders
+import make_binders
+from platform_methods import run_in_subprocess
 
 
 env.core_sources = []
 env.core_sources = []
 
 
@@ -21,7 +23,7 @@ gd_cpp += gd_inc
 gd_cpp += "void ProjectSettings::register_global_defaults() {\n" + gd_call + "\n}\n"
 gd_cpp += "void ProjectSettings::register_global_defaults() {\n" + gd_call + "\n}\n"
 
 
 with open("global_defaults.gen.cpp", "w") as f:
 with open("global_defaults.gen.cpp", "w") as f:
-	f.write(gd_cpp)
+    f.write(gd_cpp)
 
 
 
 
 # Generate AES256 script encryption key
 # Generate AES256 script encryption key
@@ -48,26 +50,27 @@ if ("SCRIPT_AES256_ENCRYPTION_KEY" in os.environ):
         txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
         txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
         print("Invalid AES256 encryption key, not 64 bits hex: " + e)
         print("Invalid AES256 encryption key, not 64 bits hex: " + e)
 
 
+# NOTE: It is safe to generate this file here, since this is still executed serially
 with open("script_encryption_key.gen.cpp", "w") as f:
 with open("script_encryption_key.gen.cpp", "w") as f:
-	f.write("#include \"project_settings.h\"\nuint8_t script_encryption_key[32]={" + txt + "};\n")
+    f.write("#include \"project_settings.h\"\nuint8_t script_encryption_key[32]={" + txt + "};\n")
 
 
 
 
 # Add required thirdparty code. Header paths are hardcoded, we don't need to append
 # Add required thirdparty code. Header paths are hardcoded, we don't need to append
 # to the include path (saves a few chars on the compiler invocation for touchy MSVC...)
 # to the include path (saves a few chars on the compiler invocation for touchy MSVC...)
 thirdparty_dir = "#thirdparty/misc/"
 thirdparty_dir = "#thirdparty/misc/"
 thirdparty_sources = [
 thirdparty_sources = [
-	# C sources
-	"base64.c",
-	"fastlz.c",
-	"sha256.c",
-	"smaz.c",
-
-	# C++ sources
-	"aes256.cpp",
-	"hq2x.cpp",
-	"md5.cpp",
-	"pcg.cpp",
-	"triangulator.cpp",
+    # C sources
+    "base64.c",
+    "fastlz.c",
+    "sha256.c",
+    "smaz.c",
+
+    # C++ sources
+    "aes256.cpp",
+    "hq2x.cpp",
+    "md5.cpp",
+    "pcg.cpp",
+    "triangulator.cpp",
 ]
 ]
 thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
 thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
 env.add_source_files(env.core_sources, thirdparty_sources)
 env.add_source_files(env.core_sources, thirdparty_sources)
@@ -76,9 +79,9 @@ env.add_source_files(env.core_sources, thirdparty_sources)
 # However, our version has some custom modifications, so it won't compile with the system one
 # However, our version has some custom modifications, so it won't compile with the system one
 thirdparty_minizip_dir = "#thirdparty/minizip/"
 thirdparty_minizip_dir = "#thirdparty/minizip/"
 thirdparty_minizip_sources = [
 thirdparty_minizip_sources = [
-	"ioapi.c",
-	"unzip.c",
-	"zip.c",
+    "ioapi.c",
+    "unzip.c",
+    "zip.c",
 ]
 ]
 thirdparty_minizip_sources = [thirdparty_minizip_dir + file for file in thirdparty_minizip_sources]
 thirdparty_minizip_sources = [thirdparty_minizip_dir + file for file in thirdparty_minizip_sources]
 env.add_source_files(env.core_sources, thirdparty_minizip_sources)
 env.add_source_files(env.core_sources, thirdparty_minizip_sources)
@@ -92,20 +95,19 @@ env.add_source_files(env.core_sources, "*.cpp")
 
 
 
 
 # Make binders
 # Make binders
-import make_binders
-env.CommandNoCache(['method_bind.gen.inc', 'method_bind_ext.gen.inc'], 'make_binders.py', make_binders.run)
+env.CommandNoCache(['method_bind.gen.inc', 'method_bind_ext.gen.inc'], 'make_binders.py', run_in_subprocess(make_binders.run))
 
 
 # Authors
 # Authors
 env.Depends('#core/authors.gen.h', "../AUTHORS.md")
 env.Depends('#core/authors.gen.h', "../AUTHORS.md")
-env.CommandNoCache('#core/authors.gen.h', "../AUTHORS.md", methods.make_authors_header)
+env.CommandNoCache('#core/authors.gen.h', "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header))
 
 
 # Donors
 # Donors
 env.Depends('#core/donors.gen.h', "../DONORS.md")
 env.Depends('#core/donors.gen.h', "../DONORS.md")
-env.CommandNoCache('#core/donors.gen.h', "../DONORS.md", methods.make_donors_header)
+env.CommandNoCache('#core/donors.gen.h', "../DONORS.md", run_in_subprocess(core_builders.make_donors_header))
 
 
 # License
 # License
 env.Depends('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"])
 env.Depends('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"])
-env.CommandNoCache('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"], methods.make_license_header)
+env.CommandNoCache('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header))
 
 
 # Chain load SCsubs
 # Chain load SCsubs
 SConscript('os/SCsub')
 SConscript('os/SCsub')

+ 236 - 0
core/core_builders.py

@@ -0,0 +1,236 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+from platform_methods import subprocess_main
+from compat import iteritems, itervalues, open_utf8, escape_string
+
+
+def make_authors_header(target, source, env):
+    sections = ["Project Founders", "Lead Developer", "Project Manager", "Developers"]
+    sections_id = ["AUTHORS_FOUNDERS", "AUTHORS_LEAD_DEVELOPERS", "AUTHORS_PROJECT_MANAGERS", "AUTHORS_DEVELOPERS"]
+
+    src = source[0]
+    dst = target[0]
+    f = open_utf8(src, "r")
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_AUTHORS_H\n")
+    g.write("#define _EDITOR_AUTHORS_H\n")
+
+    reading = False
+
+    def close_section():
+        g.write("\t0\n")
+        g.write("};\n")
+
+    for line in f:
+        if reading:
+            if line.startswith("    "):
+                g.write("\t\"" + escape_string(line.strip()) + "\",\n")
+                continue
+        if line.startswith("## "):
+            if reading:
+                close_section()
+                reading = False
+            for section, section_id in zip(sections, sections_id):
+                if line.strip().endswith(section):
+                    current_section = escape_string(section_id)
+                    reading = True
+                    g.write("const char *const " + current_section + "[] = {\n")
+                    break
+
+    if reading:
+        close_section()
+
+    g.write("#endif\n")
+
+    g.close()
+    f.close()
+
+
+def make_donors_header(target, source, env):
+    sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors",
+                "Gold donors", "Silver donors", "Bronze donors"]
+    sections_id = ["DONORS_SPONSOR_PLAT", "DONORS_SPONSOR_GOLD", "DONORS_SPONSOR_MINI",
+                   "DONORS_GOLD", "DONORS_SILVER", "DONORS_BRONZE"]
+
+    src = source[0]
+    dst = target[0]
+    f = open_utf8(src, "r")
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_DONORS_H\n")
+    g.write("#define _EDITOR_DONORS_H\n")
+
+    reading = False
+
+    def close_section():
+        g.write("\t0\n")
+        g.write("};\n")
+
+    for line in f:
+        if reading >= 0:
+            if line.startswith("    "):
+                g.write("\t\"" + escape_string(line.strip()) + "\",\n")
+                continue
+        if line.startswith("## "):
+            if reading:
+                close_section()
+                reading = False
+            for section, section_id in zip(sections, sections_id):
+                if line.strip().endswith(section):
+                    current_section = escape_string(section_id)
+                    reading = True
+                    g.write("const char *const " + current_section + "[] = {\n")
+                    break
+
+    if reading:
+        close_section()
+
+    g.write("#endif\n")
+
+    g.close()
+    f.close()
+
+
+def make_license_header(target, source, env):
+    src_copyright = source[0]
+    src_license = source[1]
+    dst = target[0]
+
+    class LicenseReader:
+        def __init__(self, license_file):
+            self._license_file = license_file
+            self.line_num = 0
+            self.current = self.next_line()
+
+        def next_line(self):
+            line = self._license_file.readline()
+            self.line_num += 1
+            while line.startswith("#"):
+                line = self._license_file.readline()
+                self.line_num += 1
+            self.current = line
+            return line
+
+        def next_tag(self):
+            if not ':' in self.current:
+                return ('', [])
+            tag, line = self.current.split(":", 1)
+            lines = [line.strip()]
+            while self.next_line() and self.current.startswith(" "):
+                lines.append(self.current.strip())
+            return (tag, lines)
+
+    from collections import OrderedDict
+    projects = OrderedDict()
+    license_list = []
+
+    with open_utf8(src_copyright, "r") as copyright_file:
+        reader = LicenseReader(copyright_file)
+        part = {}
+        while reader.current:
+            tag, content = reader.next_tag()
+            if tag in ("Files", "Copyright", "License"):
+                part[tag] = content[:]
+            elif tag == "Comment":
+                # attach part to named project
+                projects[content[0]] = projects.get(content[0], []) + [part]
+
+            if not tag or not reader.current:
+                # end of a paragraph start a new part
+                if "License" in part and not "Files" in part:
+                    # no Files tag in this one, so assume standalone license
+                    license_list.append(part["License"])
+                part = {}
+                reader.next_line()
+
+    data_list = []
+    for project in itervalues(projects):
+        for part in project:
+            part["file_index"] = len(data_list)
+            data_list += part["Files"]
+            part["copyright_index"] = len(data_list)
+            data_list += part["Copyright"]
+
+    with open_utf8(dst, "w") as f:
+
+        f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+        f.write("#ifndef _EDITOR_LICENSE_H\n")
+        f.write("#define _EDITOR_LICENSE_H\n")
+        f.write("const char *const GODOT_LICENSE_TEXT =")
+
+        with open_utf8(src_license, "r") as license_file:
+            for line in license_file:
+                escaped_string = escape_string(line.strip())
+                f.write("\n\t\t\"" + escaped_string + "\\n\"")
+        f.write(";\n\n")
+
+        f.write("struct ComponentCopyrightPart {\n"
+                "\tconst char *license;\n"
+                "\tconst char *const *files;\n"
+                "\tconst char *const *copyright_statements;\n"
+                "\tint file_count;\n"
+                "\tint copyright_count;\n"
+                "};\n\n")
+
+        f.write("struct ComponentCopyright {\n"
+                "\tconst char *name;\n"
+                "\tconst ComponentCopyrightPart *parts;\n"
+                "\tint part_count;\n"
+                "};\n\n")
+
+        f.write("const char *const COPYRIGHT_INFO_DATA[] = {\n")
+        for line in data_list:
+            f.write("\t\"" + escape_string(line) + "\",\n")
+        f.write("};\n\n")
+
+        f.write("const ComponentCopyrightPart COPYRIGHT_PROJECT_PARTS[] = {\n")
+        part_index = 0
+        part_indexes = {}
+        for project_name, project in iteritems(projects):
+            part_indexes[project_name] = part_index
+            for part in project:
+                f.write("\t{ \"" + escape_string(part["License"][0]) + "\", "
+                        + "&COPYRIGHT_INFO_DATA[" + str(part["file_index"]) + "], "
+                        + "&COPYRIGHT_INFO_DATA[" + str(part["copyright_index"]) + "], "
+                        + str(len(part["Files"])) + ", "
+                        + str(len(part["Copyright"])) + " },\n")
+                part_index += 1
+        f.write("};\n\n")
+
+        f.write("const int COPYRIGHT_INFO_COUNT = " + str(len(projects)) + ";\n")
+
+        f.write("const ComponentCopyright COPYRIGHT_INFO[] = {\n")
+        for project_name, project in iteritems(projects):
+            f.write("\t{ \"" + escape_string(project_name) + "\", "
+                    + "&COPYRIGHT_PROJECT_PARTS[" + str(part_indexes[project_name]) + "], "
+                    + str(len(project)) + " },\n")
+        f.write("};\n\n")
+
+        f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n")
+
+        f.write("const char *const LICENSE_NAMES[] = {\n")
+        for l in license_list:
+            f.write("\t\"" + escape_string(l[0]) + "\",\n")
+        f.write("};\n\n")
+
+        f.write("const char *const LICENSE_BODIES[] = {\n\n")
+        for l in license_list:
+            for line in l[1:]:
+                if line == ".":
+                    f.write("\t\"\\n\"\n")
+                else:
+                    f.write("\t\"" + escape_string(line) + "\\n\"\n")
+            f.write("\t\"\",\n\n")
+        f.write("};\n\n")
+
+        f.write("#endif\n")
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 7 - 2
core/make_binders.py

@@ -1,4 +1,5 @@
 # -*- coding: ibm850 -*-
 # -*- coding: ibm850 -*-
+from platform_methods import subprocess_main
 
 
 
 
 template_typed = """
 template_typed = """
@@ -265,8 +266,12 @@ def run(target, source, env):
         else:
         else:
             text += t
             text += t
 
 
-    with open(target[0].path, "w") as f:
+    with open(target[0], "w") as f:
         f.write(text)
         f.write(text)
 
 
-    with open(target[1].path, "w") as f:
+    with open(target[1], "w") as f:
         f.write(text_ext)
         f.write(text_ext)
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 25 - 144
editor/SCsub

@@ -5,149 +5,14 @@ env.editor_sources = []
 
 
 import os
 import os
 import os.path
 import os.path
-from compat import encode_utf8, byte_to_str, open_utf8, escape_string
-
-def make_certs_header(target, source, env):
-
-    src = source[0].srcnode().abspath
-    dst = target[0].srcnode().abspath
-    f = open(src, "rb")
-    g = open_utf8(dst, "w")
-    buf = f.read()
-    decomp_size = len(buf)
-    import zlib
-    buf = zlib.compress(buf)
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#ifndef _CERTS_RAW_H\n")
-    g.write("#define _CERTS_RAW_H\n")
-    g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
-    g.write("static const int _certs_uncompressed_size = " + str(decomp_size) + ";\n")
-    g.write("static const unsigned char _certs_compressed[] = {\n")
-    for i in range(len(buf)):
-        g.write("\t" + byte_to_str(buf[i]) + ",\n")
-    g.write("};\n")
-    g.write("#endif")
-
-    g.close()
-    f.close()
-
-
-def make_doc_header(target, source, env):
-
-    dst = target[0].srcnode().abspath
-    g = open_utf8(dst, "w")
-    buf = ""
-    docbegin = ""
-    docend = ""
-    for s in source:
-        src = s.srcnode().abspath
-        if not src.endswith(".xml"):
-            continue
-        with open_utf8(src, "r") as f:
-            content = f.read()
-        buf += content
-
-    buf = encode_utf8(docbegin + buf + docend)
-    decomp_size = len(buf)
-    import zlib
-    buf = zlib.compress(buf)
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#ifndef _DOC_DATA_RAW_H\n")
-    g.write("#define _DOC_DATA_RAW_H\n")
-    g.write("static const int _doc_data_compressed_size = " + str(len(buf)) + ";\n")
-    g.write("static const int _doc_data_uncompressed_size = " + str(decomp_size) + ";\n")
-    g.write("static const unsigned char _doc_data_compressed[] = {\n")
-    for i in range(len(buf)):
-        g.write("\t" + byte_to_str(buf[i]) + ",\n")
-    g.write("};\n")
-
-    g.write("#endif")
-
-    g.close()
-
-
-def make_fonts_header(target, source, env):
-
-    dst = target[0].srcnode().abspath
-
-    g = open_utf8(dst, "w")
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#ifndef _EDITOR_FONTS_H\n")
-    g.write("#define _EDITOR_FONTS_H\n")
-
-    # saving uncompressed, since freetype will reference from memory pointer
-    xl_names = []
-    for i in range(len(source)):
-        with open(source[i].srcnode().abspath, "rb")as f:
-            buf = f.read()
-
-        name = os.path.splitext(os.path.basename(source[i].srcnode().abspath))[0]
-
-        g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
-        g.write("static const unsigned char _font_" + name + "[] = {\n")
-        for i in range(len(buf)):
-            g.write("\t" + byte_to_str(buf[i]) + ",\n")
-
-        g.write("};\n")
-
-    g.write("#endif")
-
-    g.close()
-
+from platform_methods import run_in_subprocess
+from compat import open_utf8
+import editor_builders
 
 
-def make_translations_header(target, source, env):
-
-    dst = target[0].srcnode().abspath
-
-    g = open_utf8(dst, "w")
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#ifndef _EDITOR_TRANSLATIONS_H\n")
-    g.write("#define _EDITOR_TRANSLATIONS_H\n")
-
-    import zlib
-    import os.path
-
-    paths = [node.srcnode().abspath for node in source]
-    sorted_paths = sorted(paths, key=lambda path: os.path.splitext(os.path.basename(path))[0])
-
-    xl_names = []
-    for i in range(len(sorted_paths)):
-        with open(sorted_paths[i], "rb") as f:
-            buf = f.read()
-        decomp_size = len(buf)
-        buf = zlib.compress(buf)
-        name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
-
-        g.write("static const unsigned char _translation_" + name + "_compressed[] = {\n")
-        for i in range(len(buf)):
-            g.write("\t" + byte_to_str(buf[i]) + ",\n")
-
-        g.write("};\n")
-
-        xl_names.append([name, len(buf), str(decomp_size)])
-
-    g.write("struct EditorTranslationList {\n")
-    g.write("\tconst char* lang;\n")
-    g.write("\tint comp_size;\n")
-    g.write("\tint uncomp_size;\n")
-    g.write("\tconst unsigned char* data;\n")
-    g.write("};\n\n")
-    g.write("static EditorTranslationList _editor_translations[] = {\n")
-    for x in xl_names:
-        g.write("\t{ \"" + x[0] + "\", " + str(x[1]) + ", " + str(x[2]) + ", _translation_" + x[0] + "_compressed},\n")
-    g.write("\t{NULL, 0, 0, NULL}\n")
-    g.write("};\n")
-
-    g.write("#endif")
-
-    g.close()
 
 
 def _make_doc_data_class_path(to_path):
 def _make_doc_data_class_path(to_path):
-    g = open_utf8(os.path.join(to_path,"doc_data_class_path.gen.h"), "w")
+    # NOTE: It is safe to generate this file here, since this is still executed serially
+    g = open_utf8(os.path.join(to_path, "doc_data_class_path.gen.h"), "w")
     g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
     g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
     g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
     g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
 
 
@@ -169,6 +34,8 @@ if env['tools']:
         reg_exporters += '\tregister_' + e + '_exporter();\n'
         reg_exporters += '\tregister_' + e + '_exporter();\n'
         reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n'
         reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n'
     reg_exporters += '}\n'
     reg_exporters += '}\n'
+
+    # NOTE: It is safe to generate this file here, since this is still executed serially
     with open_utf8("register_exporters.gen.cpp", "w") as f:
     with open_utf8("register_exporters.gen.cpp", "w") as f:
         f.write(reg_exporters_inc)
         f.write(reg_exporters_inc)
         f.write(reg_exporters)
         f.write(reg_exporters)
@@ -192,24 +59,38 @@ if env['tools']:
 
 
     docs = sorted(docs)
     docs = sorted(docs)
     env.Depends("#editor/doc_data_compressed.gen.h", docs)
     env.Depends("#editor/doc_data_compressed.gen.h", docs)
-    env.CommandNoCache("#editor/doc_data_compressed.gen.h", docs, make_doc_header)
+    env.CommandNoCache("#editor/doc_data_compressed.gen.h", docs, run_in_subprocess(editor_builders.make_doc_header))
+
     # Certificates
     # Certificates
     env.Depends("#editor/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt")
     env.Depends("#editor/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt")
-    env.CommandNoCache("#editor/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt", make_certs_header)
+    env.CommandNoCache("#editor/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt", run_in_subprocess(editor_builders.make_certs_header))
 
 
     import glob
     import glob
+
     path = env.Dir('.').abspath
     path = env.Dir('.').abspath
 
 
     # Translations
     # Translations
     tlist = glob.glob(path + "/translations/*.po")
     tlist = glob.glob(path + "/translations/*.po")
     env.Depends('#editor/translations.gen.h', tlist)
     env.Depends('#editor/translations.gen.h', tlist)
-    env.CommandNoCache('#editor/translations.gen.h', tlist, make_translations_header)
+    env.CommandNoCache('#editor/translations.gen.h', tlist, run_in_subprocess(editor_builders.make_translations_header))
 
 
     # Fonts
     # Fonts
     flist = glob.glob(path + "/../thirdparty/fonts/*.ttf")
     flist = glob.glob(path + "/../thirdparty/fonts/*.ttf")
     flist.append(glob.glob(path + "/../thirdparty/fonts/*.otf"))
     flist.append(glob.glob(path + "/../thirdparty/fonts/*.otf"))
     env.Depends('#editor/builtin_fonts.gen.h', flist)
     env.Depends('#editor/builtin_fonts.gen.h', flist)
-    env.CommandNoCache('#editor/builtin_fonts.gen.h', flist, make_fonts_header)
+    env.CommandNoCache('#editor/builtin_fonts.gen.h', flist, run_in_subprocess(editor_builders.make_fonts_header))
+
+    # Authors
+    env.Depends('#editor/authors.gen.h', "../AUTHORS.md")
+    env.CommandNoCache('#editor/authors.gen.h', "../AUTHORS.md", run_in_subprocess(editor_builders.make_authors_header))
+
+    # Donors
+    env.Depends('#editor/donors.gen.h', "../DONORS.md")
+    env.CommandNoCache('#editor/donors.gen.h', "../DONORS.md", run_in_subprocess(editor_builders.make_donors_header))
+
+    # License
+    env.Depends('#editor/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"])
+    env.CommandNoCache('#editor/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(editor_builders.make_license_header))
 
 
     env.add_source_files(env.editor_sources, "*.cpp")
     env.add_source_files(env.editor_sources, "*.cpp")
     env.add_source_files(env.editor_sources, ["#thirdparty/misc/clipper.cpp"])
     env.add_source_files(env.editor_sources, ["#thirdparty/misc/clipper.cpp"])

+ 412 - 0
editor/editor_builders.py

@@ -0,0 +1,412 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+import os
+import os.path
+from platform_methods import subprocess_main
+from compat import encode_utf8, byte_to_str, open_utf8, escape_string
+
+
+def make_certs_header(target, source, env):
+
+    src = source[0]
+    dst = target[0]
+    f = open(src, "rb")
+    g = open_utf8(dst, "w")
+    buf = f.read()
+    decomp_size = len(buf)
+    import zlib
+    buf = zlib.compress(buf)
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _CERTS_RAW_H\n")
+    g.write("#define _CERTS_RAW_H\n")
+    g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
+    g.write("static const int _certs_uncompressed_size = " + str(decomp_size) + ";\n")
+    g.write("static const unsigned char _certs_compressed[] = {\n")
+    for i in range(len(buf)):
+        g.write("\t" + byte_to_str(buf[i]) + ",\n")
+    g.write("};\n")
+    g.write("#endif")
+
+    g.close()
+    f.close()
+
+
+def make_doc_header(target, source, env):
+
+    dst = target[0]
+    g = open_utf8(dst, "w")
+    buf = ""
+    docbegin = ""
+    docend = ""
+    for src in source:
+        if not src.endswith(".xml"):
+            continue
+        with open_utf8(src, "r") as f:
+            content = f.read()
+        buf += content
+
+    buf = encode_utf8(docbegin + buf + docend)
+    decomp_size = len(buf)
+    import zlib
+    buf = zlib.compress(buf)
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _DOC_DATA_RAW_H\n")
+    g.write("#define _DOC_DATA_RAW_H\n")
+    g.write("static const int _doc_data_compressed_size = " + str(len(buf)) + ";\n")
+    g.write("static const int _doc_data_uncompressed_size = " + str(decomp_size) + ";\n")
+    g.write("static const unsigned char _doc_data_compressed[] = {\n")
+    for i in range(len(buf)):
+        g.write("\t" + byte_to_str(buf[i]) + ",\n")
+    g.write("};\n")
+
+    g.write("#endif")
+
+    g.close()
+
+
+def make_fonts_header(target, source, env):
+
+    dst = target[0]
+
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_FONTS_H\n")
+    g.write("#define _EDITOR_FONTS_H\n")
+
+    # saving uncompressed, since freetype will reference from memory pointer
+    xl_names = []
+    for i in range(len(source)):
+        with open(source[i], "rb")as f:
+            buf = f.read()
+
+        name = os.path.splitext(os.path.basename(source[i]))[0]
+
+        g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
+        g.write("static const unsigned char _font_" + name + "[] = {\n")
+        for i in range(len(buf)):
+            g.write("\t" + byte_to_str(buf[i]) + ",\n")
+
+        g.write("};\n")
+
+    g.write("#endif")
+
+    g.close()
+
+
+def make_translations_header(target, source, env):
+
+    dst = target[0]
+
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_TRANSLATIONS_H\n")
+    g.write("#define _EDITOR_TRANSLATIONS_H\n")
+
+    import zlib
+    import os.path
+
+    sorted_paths = sorted(source, key=lambda path: os.path.splitext(os.path.basename(path))[0])
+
+    xl_names = []
+    for i in range(len(sorted_paths)):
+        with open(sorted_paths[i], "rb") as f:
+            buf = f.read()
+        decomp_size = len(buf)
+        buf = zlib.compress(buf)
+        name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
+
+        g.write("static const unsigned char _translation_" + name + "_compressed[] = {\n")
+        for i in range(len(buf)):
+            g.write("\t" + byte_to_str(buf[i]) + ",\n")
+
+        g.write("};\n")
+
+        xl_names.append([name, len(buf), str(decomp_size)])
+
+    g.write("struct EditorTranslationList {\n")
+    g.write("\tconst char* lang;\n")
+    g.write("\tint comp_size;\n")
+    g.write("\tint uncomp_size;\n")
+    g.write("\tconst unsigned char* data;\n")
+    g.write("};\n\n")
+    g.write("static EditorTranslationList _editor_translations[] = {\n")
+    for x in xl_names:
+        g.write("\t{ \"" + x[0] + "\", " + str(x[1]) + ", " + str(x[2]) + ", _translation_" + x[0] + "_compressed},\n")
+    g.write("\t{NULL, 0, 0, NULL}\n")
+    g.write("};\n")
+
+    g.write("#endif")
+
+    g.close()
+
+
+def make_authors_header(target, source, env):
+
+    sections = ["Project Founders", "Lead Developer", "Project Manager", "Developers"]
+    sections_id = ["dev_founders", "dev_lead", "dev_manager", "dev_names"]
+
+    src = source[0]
+    dst = target[0]
+    f = open_utf8(src, "r")
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_AUTHORS_H\n")
+    g.write("#define _EDITOR_AUTHORS_H\n")
+
+    current_section = ""
+    reading = False
+
+    def close_section():
+        g.write("\t0\n")
+        g.write("};\n")
+
+    for line in f:
+        if reading:
+            if line.startswith("    "):
+                g.write("\t\"" + escape_string(line.strip()) + "\",\n")
+                continue
+        if line.startswith("## "):
+            if reading:
+                close_section()
+                reading = False
+            for i in range(len(sections)):
+                if line.strip().endswith(sections[i]):
+                    current_section = escape_string(sections_id[i])
+                    reading = True
+                    g.write("static const char *" + current_section + "[] = {\n")
+                    break
+
+    if reading:
+        close_section()
+
+    g.write("#endif\n")
+
+    g.close()
+    f.close()
+
+def make_donors_header(target, source, env):
+
+    sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors", "Gold donors", "Silver donors", "Bronze donors"]
+    sections_id = ["donor_s_plat", "donor_s_gold", "donor_s_mini", "donor_gold", "donor_silver", "donor_bronze"]
+
+    src = source[0]
+    dst = target[0]
+    f = open_utf8(src, "r")
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_DONORS_H\n")
+    g.write("#define _EDITOR_DONORS_H\n")
+
+    current_section = ""
+    reading = False
+
+    def close_section():
+        g.write("\t0\n")
+        g.write("};\n")
+
+    for line in f:
+        if reading >= 0:
+            if line.startswith("    "):
+                g.write("\t\"" + escape_string(line.strip()) + "\",\n")
+                continue
+        if line.startswith("## "):
+            if reading:
+                close_section()
+                reading = False
+            for i in range(len(sections)):
+                if line.strip().endswith(sections[i]):
+                    current_section = escape_string(sections_id[i])
+                    reading = True
+                    g.write("static const char *" + current_section + "[] = {\n")
+                    break
+
+    if reading:
+        close_section()
+
+    g.write("#endif\n")
+
+    g.close()
+    f.close()
+
+
+def make_license_header(target, source, env):
+
+    src_copyright = source[0]
+    src_license = source[1]
+    dst = target[0]
+    f = open_utf8(src_license, "r")
+    fc = open_utf8(src_copyright, "r")
+    g = open_utf8(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#ifndef _EDITOR_LICENSE_H\n")
+    g.write("#define _EDITOR_LICENSE_H\n")
+    g.write("static const char *about_license =")
+
+    for line in f:
+        escaped_string = escape_string(line.strip())
+        g.write("\n\t\"" + escaped_string + "\\n\"")
+
+    g.write(";\n")
+
+    tp_current = 0
+    tp_file = ""
+    tp_comment = ""
+    tp_copyright = ""
+    tp_license = ""
+
+    tp_licensename = ""
+    tp_licensebody = ""
+
+    tp = []
+    tp_licensetext = []
+    for line in fc:
+        if line.startswith("#"):
+            continue
+
+        if line.startswith("Files:"):
+            tp_file = line[6:].strip()
+            tp_current = 1
+        elif line.startswith("Comment:"):
+            tp_comment = line[8:].strip()
+            tp_current = 2
+        elif line.startswith("Copyright:"):
+            tp_copyright = line[10:].strip()
+            tp_current = 3
+        elif line.startswith("License:"):
+            if tp_current != 0:
+                tp_license = line[8:].strip()
+                tp_current = 4
+            else:
+                tp_licensename = line[8:].strip()
+                tp_current = 5
+        elif line.startswith(" "):
+            if tp_current == 1:
+                tp_file += "\n" + line.strip()
+            elif tp_current == 3:
+                tp_copyright += "\n" + line.strip()
+            elif tp_current == 5:
+                if line.strip() == ".":
+                    tp_licensebody += "\n"
+                else:
+                    tp_licensebody += line[1:]
+        else:
+            if tp_current != 0:
+                if tp_current == 5:
+                    tp_licensetext.append([tp_licensename, tp_licensebody])
+
+                    tp_licensename = ""
+                    tp_licensebody = ""
+                else:
+                    added = False
+                    for i in tp:
+                        if i[0] == tp_comment:
+                            i[1].append([tp_file, tp_copyright, tp_license])
+                            added = True
+                            break
+                    if not added:
+                        tp.append([tp_comment,[[tp_file, tp_copyright, tp_license]]])
+
+                    tp_file = []
+                    tp_comment = ""
+                    tp_copyright = []
+                    tp_license = ""
+                tp_current = 0
+
+    tp_licensetext.append([tp_licensename, tp_licensebody])
+
+    about_thirdparty = ""
+    about_tp_copyright_count = ""
+    about_tp_license = ""
+    about_tp_copyright = ""
+    about_tp_file = ""
+
+    for i in tp:
+        about_thirdparty += "\t\"" + i[0] + "\",\n"
+        about_tp_copyright_count += str(len(i[1])) + ", "
+        for j in i[1]:
+            file_body = ""
+            copyright_body = ""
+            for k in j[0].split("\n"):
+                if file_body != "":
+                    file_body += "\\n\"\n"
+                escaped_string = escape_string(k.strip())
+                file_body += "\t\"" + escaped_string
+            for k in j[1].split("\n"):
+                if copyright_body != "":
+                    copyright_body += "\\n\"\n"
+                escaped_string = escape_string(k.strip())
+                copyright_body += "\t\"" + escaped_string
+
+            about_tp_file += "\t" + file_body + "\",\n"
+            about_tp_copyright += "\t" + copyright_body + "\",\n"
+            about_tp_license += "\t\"" + j[2] + "\",\n"
+
+    about_license_name = ""
+    about_license_body = ""
+
+    for i in tp_licensetext:
+        body = ""
+        for j in i[1].split("\n"):
+            if body != "":
+                body += "\\n\"\n"
+            escaped_string = escape_string(j.strip())
+            body += "\t\"" + escaped_string
+
+        about_license_name += "\t\"" + i[0] + "\",\n"
+        about_license_body += "\t" + body + "\",\n"
+
+    g.write("static const char *about_thirdparty[] = {\n")
+    g.write(about_thirdparty)
+    g.write("\t0\n")
+    g.write("};\n")
+    g.write("#define THIRDPARTY_COUNT " + str(len(tp)) + "\n")
+
+    g.write("static const int about_tp_copyright_count[] = {\n\t")
+    g.write(about_tp_copyright_count)
+    g.write("0\n};\n")
+
+    g.write("static const char *about_tp_file[] = {\n")
+    g.write(about_tp_file)
+    g.write("\t0\n")
+    g.write("};\n")
+
+    g.write("static const char *about_tp_copyright[] = {\n")
+    g.write(about_tp_copyright)
+    g.write("\t0\n")
+    g.write("};\n")
+
+    g.write("static const char *about_tp_license[] = {\n")
+    g.write(about_tp_license)
+    g.write("\t0\n")
+    g.write("};\n")
+
+    g.write("static const char *about_license_name[] = {\n")
+    g.write(about_license_name)
+    g.write("\t0\n")
+    g.write("};\n")
+    g.write("#define LICENSE_COUNT " + str(len(tp_licensetext)) + "\n")
+
+    g.write("static const char *about_license_body[] = {\n")
+    g.write(about_license_body)
+    g.write("\t0\n")
+    g.write("};\n")
+
+    g.write("#endif\n")
+
+    g.close()
+    fc.close()
+    f.close()
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 4 - 86
editor/icons/SCsub

@@ -1,96 +1,14 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
 
 
 Import('env')
 Import('env')
-from compat import StringIO
+from platform_methods import run_in_subprocess
+import editor_icons_builders
 
 
-def make_editor_icons_action(target, source, env):
 
 
-    import os
-
-    dst = target[0].srcnode().abspath
-    svg_icons = source
-
-    icons_string = StringIO()
-
-    for f in svg_icons:
-
-        fname = str(f)
-
-        icons_string.write('\t"')
-
-        with open(fname, 'rb') as svgf:
-            b = svgf.read(1)
-            while(len(b) == 1):
-                icons_string.write("\\" + str(hex(ord(b)))[1:])
-                b = svgf.read(1)
-
-
-        icons_string.write('"')
-        if fname != svg_icons[-1]:
-            icons_string.write(",")
-        icons_string.write('\n')
-
-    s = StringIO()
-    s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    s.write("#ifndef _EDITOR_ICONS_H\n")
-    s.write("#define _EDITOR_ICONS_H\n")
-    s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
-    s.write("static const char *editor_icons_sources[] = {\n")
-    s.write(icons_string.getvalue())
-    s.write('};\n\n')
-    s.write("static const char *editor_icons_names[] = {\n")
-
-    # this is used to store the indices of thumbnail icons
-    thumb_medium_indices = [];
-    thumb_big_indices = [];
-    index = 0
-    for f in svg_icons:
-
-        fname = str(f)
-
-        icon_name = os.path.basename(fname)[5:-4].title().replace("_", "")
-        # some special cases
-        if icon_name in ['Int', 'Bool', 'Float']:
-            icon_name = icon_name.lower()
-        if icon_name.endswith("MediumThumb"):  # don't know a better way to handle this
-            thumb_medium_indices.append(str(index))
-        if icon_name.endswith("BigThumb"):  # don't know a better way to handle this
-            thumb_big_indices.append(str(index))
-
-        s.write('\t"{0}"'.format(icon_name))
-
-        if fname != svg_icons[-1]:
-            s.write(",")
-        s.write('\n')
-
-        index += 1
-
-    s.write('};\n')
-
-    if thumb_medium_indices:
-        s.write("\n\n")
-        s.write("static const int editor_md_thumbs_count = {};\n".format(len(thumb_medium_indices)))
-        s.write("static const int editor_md_thumbs_indices[] = {")
-        s.write(", ".join(thumb_medium_indices))
-        s.write("};\n")
-    if thumb_big_indices:
-        s.write("\n\n")
-        s.write("static const int editor_bg_thumbs_count = {};\n".format(len(thumb_big_indices)))
-        s.write("static const int editor_bg_thumbs_indices[] = {")
-        s.write(", ".join(thumb_big_indices))
-        s.write("};\n")
-
-    s.write("#endif\n")
-
-    with open(dst, "w") as f:
-        f.write(s.getvalue())
-
-    s.close()
-    icons_string.close()
-
-make_editor_icons_builder = Builder(action=make_editor_icons_action,
+make_editor_icons_builder = Builder(action=run_in_subprocess(editor_icons_builders.make_editor_icons_action),
                                     suffix='.h',
                                     suffix='.h',
                                     src_suffix='.svg')
                                     src_suffix='.svg')
+
 env['BUILDERS']['MakeEditorIconsBuilder'] = make_editor_icons_builder
 env['BUILDERS']['MakeEditorIconsBuilder'] = make_editor_icons_builder
 env.Alias('editor_icons', [env.MakeEditorIconsBuilder('#editor/editor_icons.gen.h', Glob("*.svg"))])
 env.Alias('editor_icons', [env.MakeEditorIconsBuilder('#editor/editor_icons.gen.h', Glob("*.svg"))])
 
 

+ 96 - 0
editor/icons/editor_icons_builders.py

@@ -0,0 +1,96 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+import os
+from platform_methods import subprocess_main
+from compat import StringIO
+
+
+def make_editor_icons_action(target, source, env):
+
+    dst = target[0]
+    svg_icons = source
+
+    icons_string = StringIO()
+
+    for f in svg_icons:
+
+        fname = str(f)
+
+        icons_string.write('\t"')
+
+        with open(fname, 'rb') as svgf:
+            b = svgf.read(1)
+            while(len(b) == 1):
+                icons_string.write("\\" + str(hex(ord(b)))[1:])
+                b = svgf.read(1)
+
+
+        icons_string.write('"')
+        if fname != svg_icons[-1]:
+            icons_string.write(",")
+        icons_string.write('\n')
+
+    s = StringIO()
+    s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    s.write("#ifndef _EDITOR_ICONS_H\n")
+    s.write("#define _EDITOR_ICONS_H\n")
+    s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
+    s.write("static const char *editor_icons_sources[] = {\n")
+    s.write(icons_string.getvalue())
+    s.write('};\n\n')
+    s.write("static const char *editor_icons_names[] = {\n")
+
+    # this is used to store the indices of thumbnail icons
+    thumb_medium_indices = [];
+    thumb_big_indices = [];
+    index = 0
+    for f in svg_icons:
+
+        fname = str(f)
+
+        icon_name = os.path.basename(fname)[5:-4].title().replace("_", "")
+        # some special cases
+        if icon_name in ['Int', 'Bool', 'Float']:
+            icon_name = icon_name.lower()
+        if icon_name.endswith("MediumThumb"):  # don't know a better way to handle this
+            thumb_medium_indices.append(str(index))
+        if icon_name.endswith("BigThumb"):  # don't know a better way to handle this
+            thumb_big_indices.append(str(index))
+
+        s.write('\t"{0}"'.format(icon_name))
+
+        if fname != svg_icons[-1]:
+            s.write(",")
+        s.write('\n')
+
+        index += 1
+
+    s.write('};\n')
+
+    if thumb_medium_indices:
+        s.write("\n\n")
+        s.write("static const int editor_md_thumbs_count = {};\n".format(len(thumb_medium_indices)))
+        s.write("static const int editor_md_thumbs_indices[] = {")
+        s.write(", ".join(thumb_medium_indices))
+        s.write("};\n")
+    if thumb_big_indices:
+        s.write("\n\n")
+        s.write("static const int editor_bg_thumbs_count = {};\n".format(len(thumb_big_indices)))
+        s.write("static const int editor_bg_thumbs_indices[] = {")
+        s.write(", ".join(thumb_big_indices))
+        s.write("};\n")
+
+    s.write("#endif\n")
+
+    with open(dst, "w") as f:
+        f.write(s.getvalue())
+
+    s.close()
+    icons_string.close()
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 510 - 0
gles_builders.py

@@ -0,0 +1,510 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+from platform_methods import subprocess_main
+
+
+class LegacyGLHeaderStruct:
+
+    def __init__(self):
+        self.vertex_lines = []
+        self.fragment_lines = []
+        self.uniforms = []
+        self.attributes = []
+        self.feedbacks = []
+        self.fbos = []
+        self.conditionals = []
+        self.enums = {}
+        self.texunits = []
+        self.texunit_names = []
+        self.ubos = []
+        self.ubo_names = []
+
+        self.vertex_included_files = []
+        self.fragment_included_files = []
+
+        self.reading = ""
+        self.line_offset = 0
+        self.vertex_offset = 0
+        self.fragment_offset = 0
+
+
+def include_file_in_legacygl_header(filename, header_data, depth):
+    fs = open(filename, "r")
+    line = fs.readline()
+
+    while line:
+
+        if line.find("[vertex]") != -1:
+            header_data.reading = "vertex"
+            line = fs.readline()
+            header_data.line_offset += 1
+            header_data.vertex_offset = header_data.line_offset
+            continue
+
+        if line.find("[fragment]") != -1:
+            header_data.reading = "fragment"
+            line = fs.readline()
+            header_data.line_offset += 1
+            header_data.fragment_offset = header_data.line_offset
+            continue
+
+        while line.find("#include ") != -1:
+            includeline = line.replace("#include ", "").strip()[1:-1]
+
+            import os.path
+
+            included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
+            if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
+                header_data.vertex_included_files += [included_file]
+                if include_file_in_legacygl_header(included_file, header_data, depth + 1) == None:
+                    print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+            elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
+                header_data.fragment_included_files += [included_file]
+                if include_file_in_legacygl_header(included_file, header_data, depth + 1) == None:
+                    print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+
+            line = fs.readline()
+
+        if line.find("#ifdef ") != -1 or line.find("#elif defined(") != -1:
+            if line.find("#ifdef ") != -1:
+                ifdefline = line.replace("#ifdef ", "").strip()
+            else:
+                ifdefline = line.replace("#elif defined(", "").strip()
+                ifdefline = ifdefline.replace(")", "").strip()
+
+            if line.find("_EN_") != -1:
+                enumbase = ifdefline[:ifdefline.find("_EN_")]
+                ifdefline = ifdefline.replace("_EN_", "_")
+                line = line.replace("_EN_", "_")
+                if enumbase not in header_data.enums:
+                    header_data.enums[enumbase] = []
+                if ifdefline not in header_data.enums[enumbase]:
+                    header_data.enums[enumbase].append(ifdefline)
+
+            elif not ifdefline in header_data.conditionals:
+                header_data.conditionals += [ifdefline]
+
+        if line.find("uniform") != -1 and line.lower().find("texunit:") != -1:
+            # texture unit
+            texunitstr = line[line.find(":") + 1:].strip()
+            if texunitstr == "auto":
+                texunit = "-1"
+            else:
+                texunit = str(int(texunitstr))
+            uline = line[:line.lower().find("//")]
+            uline = uline.replace("uniform", "")
+            uline = uline.replace("highp", "")
+            uline = uline.replace(";", "")
+            lines = uline.split(",")
+            for x in lines:
+
+                x = x.strip()
+                x = x[x.rfind(" ") + 1:]
+                if x.find("[") != -1:
+                    # unfiorm array
+                    x = x[:x.find("[")]
+
+                if not x in header_data.texunit_names:
+                    header_data.texunits += [(x, texunit)]
+                    header_data.texunit_names += [x]
+
+        elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1:
+            # uniform buffer object
+            ubostr = line[line.find(":") + 1:].strip()
+            ubo = str(int(ubostr))
+            uline = line[:line.lower().find("//")]
+            uline = uline[uline.find("uniform") + len("uniform"):]
+            uline = uline.replace("highp", "")
+            uline = uline.replace(";", "")
+            uline = uline.replace("{", "").strip()
+            lines = uline.split(",")
+            for x in lines:
+
+                x = x.strip()
+                x = x[x.rfind(" ") + 1:]
+                if x.find("[") != -1:
+                    # unfiorm array
+                    x = x[:x.find("[")]
+
+                if not x in header_data.ubo_names:
+                    header_data.ubos += [(x, ubo)]
+                    header_data.ubo_names += [x]
+
+        elif line.find("uniform") != -1 and line.find("{") == -1 and line.find(";") != -1:
+            uline = line.replace("uniform", "")
+            uline = uline.replace(";", "")
+            lines = uline.split(",")
+            for x in lines:
+
+                x = x.strip()
+                x = x[x.rfind(" ") + 1:]
+                if x.find("[") != -1:
+                    # unfiorm array
+                    x = x[:x.find("[")]
+
+                if not x in header_data.uniforms:
+                    header_data.uniforms += [x]
+
+        if line.strip().find("attribute ") == 0 and line.find("attrib:") != -1:
+            uline = line.replace("in ", "")
+            uline = uline.replace("attribute ", "")
+            uline = uline.replace("highp ", "")
+            uline = uline.replace(";", "")
+            uline = uline[uline.find(" "):].strip()
+
+            if uline.find("//") != -1:
+                name, bind = uline.split("//")
+                if bind.find("attrib:") != -1:
+                    name = name.strip()
+                    bind = bind.replace("attrib:", "").strip()
+                    header_data.attributes += [(name, bind)]
+
+        if line.strip().find("out ") == 0 and line.find("tfb:") != -1:
+            uline = line.replace("out ", "")
+            uline = uline.replace("highp ", "")
+            uline = uline.replace(";", "")
+            uline = uline[uline.find(" "):].strip()
+
+            if uline.find("//") != -1:
+                name, bind = uline.split("//")
+                if bind.find("tfb:") != -1:
+                    name = name.strip()
+                    bind = bind.replace("tfb:", "").strip()
+                    header_data.feedbacks += [(name, bind)]
+
+        line = line.replace("\r", "")
+        line = line.replace("\n", "")
+
+        if header_data.reading == "vertex":
+            header_data.vertex_lines += [line]
+        if header_data.reading == "fragment":
+            header_data.fragment_lines += [line]
+
+        line = fs.readline()
+        header_data.line_offset += 1
+
+    fs.close()
+
+    return header_data
+
+
+def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2=False):
+    header_data = LegacyGLHeaderStruct()
+    include_file_in_legacygl_header(filename, header_data, 0)
+
+    out_file = filename + ".gen.h"
+    fd = open(out_file, "w")
+
+    enum_constants = []
+
+    fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
+
+    out_file_base = out_file
+    out_file_base = out_file_base[out_file_base.rfind("/") + 1:]
+    out_file_base = out_file_base[out_file_base.rfind("\\") + 1:]
+    out_file_ifdef = out_file_base.replace(".", "_").upper()
+    fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n")
+    fd.write("#define " + out_file_ifdef + class_suffix + "_120\n")
+
+    out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
+    fd.write("\n\n")
+    fd.write("#include \"" + include + "\"\n\n\n")
+    fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n")
+    fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n")
+
+    fd.write("public:\n\n")
+
+    if header_data.conditionals:
+        fd.write("\tenum Conditionals {\n")
+        for x in header_data.conditionals:
+            fd.write("\t\t" + x.upper() + ",\n")
+        fd.write("\t};\n\n")
+
+    if header_data.uniforms:
+        fd.write("\tenum Uniforms {\n")
+        for x in header_data.uniforms:
+            fd.write("\t\t" + x.upper() + ",\n")
+        fd.write("\t};\n\n")
+
+    fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n")
+    if header_data.conditionals:
+        fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable)  {  _set_conditional(p_conditional,p_enable); }\n\n")
+    fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; ERR_FAIL_COND( get_active()!=this );\n\n ")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n")
+    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n")
+
+    fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) {  _FU
+
+		const Transform &tr = p_transform;
+
+		GLfloat matrix[16]={ /* build a 16x16 matrix */
+			tr.basis.elements[0][0],
+			tr.basis.elements[1][0],
+			tr.basis.elements[2][0],
+			0,
+			tr.basis.elements[0][1],
+			tr.basis.elements[1][1],
+			tr.basis.elements[2][1],
+			0,
+			tr.basis.elements[0][2],
+			tr.basis.elements[1][2],
+			tr.basis.elements[2][2],
+			0,
+			tr.origin.x,
+			tr.origin.y,
+			tr.origin.z,
+			1
+		};
+
+
+                glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
+
+
+	}
+
+	""")
+
+    fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) {  _FU
+
+		const Transform2D &tr = p_transform;
+
+		GLfloat matrix[16]={ /* build a 16x16 matrix */
+			tr.elements[0][0],
+			tr.elements[0][1],
+			0,
+			0,
+			tr.elements[1][0],
+			tr.elements[1][1],
+			0,
+			0,
+			0,
+			0,
+			1,
+			0,
+			tr.elements[2][0],
+			tr.elements[2][1],
+			0,
+			1
+		};
+
+
+        glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
+
+
+	}
+
+	""")
+
+    fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) {  _FU
+
+		GLfloat matrix[16];
+
+		for (int i=0;i<4;i++) {
+			for (int j=0;j<4;j++) {
+
+				matrix[i*4+j]=p_matrix.matrix[i][j];
+			}
+		}
+
+		glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
+}""")
+
+    fd.write("\n\n#undef _FU\n\n\n")
+
+    fd.write("\tvirtual void init() {\n\n")
+
+    enum_value_count = 0
+
+    if header_data.enums:
+
+        fd.write("\t\t//Written using math, given nonstandarity of 64 bits integer constants..\n")
+        fd.write("\t\tstatic const Enum _enums[]={\n")
+
+        bitofs = len(header_data.conditionals)
+        enum_vals = []
+
+        for xv in header_data.enums:
+            x = header_data.enums[xv]
+            bits = 1
+            amt = len(x)
+            while (2 ** bits < amt):
+                bits += 1
+            strs = "{"
+            for i in range(amt):
+                strs += "\"#define " + x[i] + "\\n\","
+
+                c = {}
+                c["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs)
+                c["clear_mask"] = "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")"
+                enum_vals.append(c)
+                enum_constants.append(x[i])
+
+            strs += "NULL}"
+
+            fd.write("\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n")
+            bitofs += bits
+
+        fd.write("\t\t};\n\n")
+
+        fd.write("\t\tstatic const EnumValue _enum_values[]={\n")
+
+        enum_value_count = len(enum_vals)
+        for x in enum_vals:
+            fd.write("\t\t\t{" + x["set_mask"] + "," + x["clear_mask"] + "},\n")
+
+        fd.write("\t\t};\n\n")
+
+    conditionals_found = []
+    if header_data.conditionals:
+
+        fd.write("\t\tstatic const char* _conditional_strings[]={\n")
+        if header_data.conditionals:
+            for x in header_data.conditionals:
+                fd.write("\t\t\t\"#define " + x + "\\n\",\n")
+                conditionals_found.append(x)
+        fd.write("\t\t};\n\n")
+    else:
+        fd.write("\t\tstatic const char **_conditional_strings=NULL;\n")
+
+    if header_data.uniforms:
+
+        fd.write("\t\tstatic const char* _uniform_strings[]={\n")
+        if header_data.uniforms:
+            for x in header_data.uniforms:
+                fd.write("\t\t\t\"" + x + "\",\n")
+        fd.write("\t\t};\n\n")
+    else:
+        fd.write("\t\tstatic const char **_uniform_strings=NULL;\n")
+
+    if output_attribs:
+        if header_data.attributes:
+
+            fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n")
+            for x in header_data.attributes:
+                fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
+            fd.write("\t\t};\n\n")
+        else:
+            fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n")
+
+    feedback_count = 0
+
+    if not gles2 and len(header_data.feedbacks):
+
+        fd.write("\t\tstatic const Feedback _feedbacks[]={\n")
+        for x in header_data.feedbacks:
+            name = x[0]
+            cond = x[1]
+            if cond in conditionals_found:
+                fd.write("\t\t\t{\"" + name + "\"," + str(conditionals_found.index(cond)) + "},\n")
+            else:
+                fd.write("\t\t\t{\"" + name + "\",-1},\n")
+
+            feedback_count += 1
+
+        fd.write("\t\t};\n\n")
+    else:
+        if gles2:
+            pass
+        else:
+            fd.write("\t\tstatic const Feedback* _feedbacks=NULL;\n")
+
+    if header_data.texunits:
+        fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n")
+        for x in header_data.texunits:
+            fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
+        fd.write("\t\t};\n\n")
+    else:
+        fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n")
+
+    if not gles2 and header_data.ubos:
+        fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n")
+        for x in header_data.ubos:
+            fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
+        fd.write("\t\t};\n\n")
+    else:
+        if gles2:
+            pass
+        else:
+            fd.write("\t\tstatic UBOPair *_ubo_pairs=NULL;\n")
+
+    fd.write("\t\tstatic const char _vertex_code[]={\n")
+    for x in header_data.vertex_lines:
+        for c in x:
+            fd.write(str(ord(c)) + ",")
+
+        fd.write(str(ord('\n')) + ",")
+    fd.write("\t\t0};\n\n")
+
+    fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n")
+
+    fd.write("\t\tstatic const char _fragment_code[]={\n")
+    for x in header_data.fragment_lines:
+        for c in x:
+            fd.write(str(ord(c)) + ",")
+
+        fd.write(str(ord('\n')) + ",")
+    fd.write("\t\t0};\n\n")
+
+    fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n")
+
+    if output_attribs:
+        if gles2:
+            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(
+                len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+        else:
+            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(
+                len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str(
+                feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+    else:
+        if gles2:
+            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(
+                len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(
+                enum_value_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+        else:
+            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(
+                len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str(
+                feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+
+    fd.write("\t}\n\n")
+
+    if enum_constants:
+
+        fd.write("\tenum EnumConditionals {\n")
+        for x in enum_constants:
+            fd.write("\t\t" + x.upper() + ",\n")
+        fd.write("\t};\n\n")
+        fd.write("\tvoid set_enum_conditional(EnumConditionals p_cond) { _set_enum_conditional(p_cond); }\n")
+
+    fd.write("};\n\n")
+    fd.write("#endif\n\n")
+    fd.close()
+
+
+def build_gles3_headers(target, source, env):
+    for x in source:
+        build_legacygl_header(str(x), include="drivers/gles3/shader_gles3.h", class_suffix="GLES3", output_attribs=True)
+
+
+def build_gles2_headers(target, source, env):
+    for x in source:
+        build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True)
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 6 - 126
main/SCsub

@@ -1,128 +1,8 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
 
 
 Import('env')
 Import('env')
-from compat import byte_to_str
-from collections import OrderedDict
-
-def make_splash(target, source, env):
-
-    src = source[0].srcnode().abspath
-    dst = target[0].srcnode().abspath
-
-    with open(src, "rb") as f:
-        buf = f.read()
-
-    with open(dst, "w") as g:
-        g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-        g.write("#ifndef BOOT_SPLASH_H\n")
-        g.write("#define BOOT_SPLASH_H\n")
-        g.write('static const Color boot_splash_bg_color = Color::html("#232323");\n')
-        g.write("static const unsigned char boot_splash_png[] = {\n")
-        for i in range(len(buf)):
-            g.write(byte_to_str(buf[i]) + ",\n")
-        g.write("};\n")
-        g.write("#endif")
-
-
-def make_splash_editor(target, source, env):
-
-    src = source[0].srcnode().abspath
-    dst = target[0].srcnode().abspath
-
-    with open(src, "rb") as f:
-        buf = f.read()
-
-    with open(dst, "w") as g:
-        g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-        g.write("#ifndef BOOT_SPLASH_EDITOR_H\n")
-        g.write("#define BOOT_SPLASH_EDITOR_H\n")
-        g.write('static const Color boot_splash_editor_bg_color = Color::html("#232323");\n')
-        g.write("static const unsigned char boot_splash_editor_png[] = {\n")
-        for i in range(len(buf)):
-            g.write(byte_to_str(buf[i]) + ",\n")
-        g.write("};\n")
-        g.write("#endif")
-
-
-def make_app_icon(target, source, env):
-
-    src = source[0].srcnode().abspath
-    dst = target[0].srcnode().abspath
-
-    with open(src, "rb") as f:
-        buf = f.read()
-
-    with open(dst, "w") as g:
-        g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-        g.write("#ifndef APP_ICON_H\n")
-        g.write("#define APP_ICON_H\n")
-        g.write("static const unsigned char app_icon_png[] = {\n")
-        for i in range(len(buf)):
-            g.write(byte_to_str(buf[i]) + ",\n")
-        g.write("};\n")
-        g.write("#endif")
-
-def make_default_controller_mappings(target, source, env):
-    dst = target[0].srcnode().abspath
-    g = open(dst, "w")
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#include \"default_controller_mappings.h\"\n")
-    g.write("#include \"typedefs.h\"\n")
-
-    # ensure mappings have a consistent order
-    platform_mappings = OrderedDict()
-    for src in source:
-        src_path = src.srcnode().abspath
-        with open(src_path, "r") as f:
-            # read mapping file and skip header
-            mapping_file_lines = f.readlines()[2:]
-
-        current_platform = None
-        for line in mapping_file_lines:
-            if not line:
-                continue
-            line = line.strip()
-            if len(line) == 0:
-                continue
-            if line[0] == "#":
-                current_platform = line[1:].strip()
-                if current_platform not in platform_mappings:
-                    platform_mappings[current_platform] = {}
-            elif current_platform:
-                line_parts = line.split(",")
-                guid = line_parts[0]
-                if guid in platform_mappings[current_platform]:
-                    g.write("// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(src_path, current_platform, platform_mappings[current_platform][guid]))
-                valid_mapping = True
-                for input_map in line_parts[2:]:
-                    if "+" in input_map or "-" in input_map or "~" in input_map:
-                        g.write("// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(src_path, current_platform, line))
-                        valid_mapping = False
-                        break
-                if valid_mapping:
-                    platform_mappings[current_platform][guid] = line
-            
-    platform_variables = {
-        "Linux": "#if X11_ENABLED",
-        "Windows": "#ifdef WINDOWS_ENABLED",
-        "Mac OS X": "#ifdef OSX_ENABLED",
-        "Android": "#if defined(__ANDROID__)",
-        "iOS": "#ifdef IPHONE_ENABLED",
-        "Javascript": "#ifdef JAVASCRIPT_ENABLED",
-        "UWP": "#ifdef UWP_ENABLED",
-    }
-
-    g.write("const char* DefaultControllerMappings::mappings[] = {\n")
-    for platform, mappings in platform_mappings.items():
-        variable = platform_variables[platform]
-        g.write("{}\n".format(variable))
-        for mapping in mappings.values():
-            g.write("\t\"{}\",\n".format(mapping))
-        g.write("#endif\n")
-
-    g.write("\tNULL\n};\n")
-    g.close()
+from platform_methods import run_in_subprocess
+import main_builders
 
 
 env.main_sources = []
 env.main_sources = []
 env.add_source_files(env.main_sources, "*.cpp")
 env.add_source_files(env.main_sources, "*.cpp")
@@ -131,20 +11,20 @@ env.add_source_files(env.main_sources, "*.cpp")
 controller_databases = ["#main/gamecontrollerdb.txt", "#main/gamecontrollerdb_205.txt", "#main/gamecontrollerdb_204.txt", "#main/godotcontrollerdb.txt"]
 controller_databases = ["#main/gamecontrollerdb.txt", "#main/gamecontrollerdb_205.txt", "#main/gamecontrollerdb_204.txt", "#main/godotcontrollerdb.txt"]
 
 
 env.Depends("#main/default_controller_mappings.gen.cpp", controller_databases)
 env.Depends("#main/default_controller_mappings.gen.cpp", controller_databases)
-env.CommandNoCache("#main/default_controller_mappings.gen.cpp", controller_databases, make_default_controller_mappings)
+env.CommandNoCache("#main/default_controller_mappings.gen.cpp", controller_databases, run_in_subprocess(main_builders.make_default_controller_mappings))
 
 
 env.main_sources.append("#main/default_controller_mappings.gen.cpp")
 env.main_sources.append("#main/default_controller_mappings.gen.cpp")
 
 
 Export('env')
 Export('env')
 
 
 env.Depends("#main/splash.gen.h", "#main/splash.png")
 env.Depends("#main/splash.gen.h", "#main/splash.png")
-env.CommandNoCache("#main/splash.gen.h", "#main/splash.png", make_splash)
+env.CommandNoCache("#main/splash.gen.h", "#main/splash.png", run_in_subprocess(main_builders.make_splash))
 
 
 env.Depends("#main/splash_editor.gen.h", "#main/splash_editor.png")
 env.Depends("#main/splash_editor.gen.h", "#main/splash_editor.png")
-env.CommandNoCache("#main/splash_editor.gen.h", "#main/splash_editor.png", make_splash_editor)
+env.CommandNoCache("#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor))
 
 
 env.Depends("#main/app_icon.gen.h", "#main/app_icon.png")
 env.Depends("#main/app_icon.gen.h", "#main/app_icon.png")
-env.CommandNoCache("#main/app_icon.gen.h", "#main/app_icon.png", make_app_icon)
+env.CommandNoCache("#main/app_icon.gen.h", "#main/app_icon.png", run_in_subprocess(main_builders.make_app_icon))
 
 
 SConscript('tests/SCsub')
 SConscript('tests/SCsub')
 
 

+ 130 - 0
main/main_builders.py

@@ -0,0 +1,130 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+from platform_methods import subprocess_main
+from compat import byte_to_str
+from collections import OrderedDict
+
+
+def make_splash(target, source, env):
+    src = source[0]
+    dst = target[0]
+
+    with open(src, "rb") as f:
+        buf = f.read()
+
+    with open(dst, "w") as g:
+        g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+        g.write("#ifndef BOOT_SPLASH_H\n")
+        g.write("#define BOOT_SPLASH_H\n")
+        g.write('static const Color boot_splash_bg_color = Color::html("#232323");\n')
+        g.write("static const unsigned char boot_splash_png[] = {\n")
+        for i in range(len(buf)):
+            g.write(byte_to_str(buf[i]) + ",\n")
+        g.write("};\n")
+        g.write("#endif")
+
+
+def make_splash_editor(target, source, env):
+    src = source[0]
+    dst = target[0]
+
+    with open(src, "rb") as f:
+        buf = f.read()
+
+    with open(dst, "w") as g:
+        g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+        g.write("#ifndef BOOT_SPLASH_EDITOR_H\n")
+        g.write("#define BOOT_SPLASH_EDITOR_H\n")
+        g.write('static const Color boot_splash_editor_bg_color = Color::html("#232323");\n')
+        g.write("static const unsigned char boot_splash_editor_png[] = {\n")
+        for i in range(len(buf)):
+            g.write(byte_to_str(buf[i]) + ",\n")
+        g.write("};\n")
+        g.write("#endif")
+
+
+def make_app_icon(target, source, env):
+    src = source[0]
+    dst = target[0]
+
+    with open(src, "rb") as f:
+        buf = f.read()
+
+    with open(dst, "w") as g:
+        g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+        g.write("#ifndef APP_ICON_H\n")
+        g.write("#define APP_ICON_H\n")
+        g.write("static const unsigned char app_icon_png[] = {\n")
+        for i in range(len(buf)):
+            g.write(byte_to_str(buf[i]) + ",\n")
+        g.write("};\n")
+        g.write("#endif")
+
+
+def make_default_controller_mappings(target, source, env):
+    dst = target[0]
+    g = open(dst, "w")
+
+    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+    g.write("#include \"default_controller_mappings.h\"\n")
+    g.write("#include \"typedefs.h\"\n")
+
+    # ensure mappings have a consistent order
+    platform_mappings = OrderedDict()
+    for src_path in source:
+        with open(src_path, "r") as f:
+            # read mapping file and skip header
+            mapping_file_lines = f.readlines()[2:]
+
+        current_platform = None
+        for line in mapping_file_lines:
+            if not line:
+                continue
+            line = line.strip()
+            if len(line) == 0:
+                continue
+            if line[0] == "#":
+                current_platform = line[1:].strip()
+                if current_platform not in platform_mappings:
+                    platform_mappings[current_platform] = {}
+            elif current_platform:
+                line_parts = line.split(",")
+                guid = line_parts[0]
+                if guid in platform_mappings[current_platform]:
+                    g.write("// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(src_path, current_platform, platform_mappings[current_platform][guid]))
+                valid_mapping = True
+                for input_map in line_parts[2:]:
+                    if "+" in input_map or "-" in input_map or "~" in input_map:
+                        g.write("// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(src_path, current_platform, line))
+                        valid_mapping = False
+                        break
+                if valid_mapping:
+                    platform_mappings[current_platform][guid] = line
+
+    platform_variables = {
+        "Linux": "#if X11_ENABLED",
+        "Windows": "#ifdef WINDOWS_ENABLED",
+        "Mac OS X": "#ifdef OSX_ENABLED",
+        "Android": "#if defined(__ANDROID__)",
+        "iOS": "#ifdef IPHONE_ENABLED",
+        "Javascript": "#ifdef JAVASCRIPT_ENABLED",
+        "UWP": "#ifdef UWP_ENABLED",
+    }
+
+    g.write("const char* DefaultControllerMappings::mappings[] = {\n")
+    for platform, mappings in platform_mappings.items():
+        variable = platform_variables[platform]
+        g.write("{}\n".format(variable))
+        for mapping in mappings.values():
+            g.write("\t\"{}\",\n".format(mapping))
+        g.write("#endif\n")
+
+    g.write("\tNULL\n};\n")
+    g.close()
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 33 - 760
methods.py

@@ -1,773 +1,52 @@
 import os
 import os
-from compat import iteritems, itervalues, open_utf8, escape_string
+import os.path
+import sys
+import re
+import glob
+import string
+import datetime
+import subprocess
+from compat import iteritems
 
 
 
 
 def add_source_files(self, sources, filetype, lib_env=None, shared=False):
 def add_source_files(self, sources, filetype, lib_env=None, shared=False):
-    import glob
-    import string
-    # if not lib_objects:
-    if not lib_env:
-        lib_env = self
-    if type(filetype) == type(""):
-
-        dir = self.Dir('.').abspath
-        list = glob.glob(dir + "/" + filetype)
-        for f in list:
-            sources.append(self.Object(f))
-    else:
-        for f in filetype:
-            sources.append(self.Object(f))
-
-
-
-class LegacyGLHeaderStruct:
-
-    def __init__(self):
-        self.vertex_lines = []
-        self.fragment_lines = []
-        self.uniforms = []
-        self.attributes = []
-        self.feedbacks = []
-        self.fbos = []
-        self.conditionals = []
-        self.enums = {}
-        self.texunits = []
-        self.texunit_names = []
-        self.ubos = []
-        self.ubo_names = []
-
-        self.vertex_included_files = []
-        self.fragment_included_files = []
-
-        self.reading = ""
-        self.line_offset = 0
-        self.vertex_offset = 0
-        self.fragment_offset = 0
-
-
-def include_file_in_legacygl_header(filename, header_data, depth):
-    fs = open(filename, "r")
-    line = fs.readline()
-
-    while(line):
-
-        if (line.find("[vertex]") != -1):
-            header_data.reading = "vertex"
-            line = fs.readline()
-            header_data.line_offset += 1
-            header_data.vertex_offset = header_data.line_offset
-            continue
-
-        if (line.find("[fragment]") != -1):
-            header_data.reading = "fragment"
-            line = fs.readline()
-            header_data.line_offset += 1
-            header_data.fragment_offset = header_data.line_offset
-            continue
-
-        while(line.find("#include ") != -1):
-            includeline = line.replace("#include ", "").strip()[1:-1]
-
-            import os.path
-
-            included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
-            if (not included_file in header_data.vertex_included_files and header_data.reading == "vertex"):
-                header_data.vertex_included_files += [included_file]
-                if(include_file_in_legacygl_header(included_file, header_data, depth + 1) == None):
-                    print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
-            elif (not included_file in header_data.fragment_included_files and header_data.reading == "fragment"):
-                header_data.fragment_included_files += [included_file]
-                if(include_file_in_legacygl_header(included_file, header_data, depth + 1) == None):
-                    print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
-
-            line = fs.readline()
-
-        if (line.find("#ifdef ") != -1 or line.find("#elif defined(") != -1):
-            if (line.find("#ifdef ") != -1):
-                ifdefline = line.replace("#ifdef ", "").strip()
-            else:
-                ifdefline = line.replace("#elif defined(", "").strip()
-                ifdefline = ifdefline.replace(")", "").strip()
-
-            if (line.find("_EN_") != -1):
-                enumbase = ifdefline[:ifdefline.find("_EN_")]
-                ifdefline = ifdefline.replace("_EN_", "_")
-                line = line.replace("_EN_", "_")
-                if (enumbase not in header_data.enums):
-                    header_data.enums[enumbase] = []
-                if (ifdefline not in header_data.enums[enumbase]):
-                    header_data.enums[enumbase].append(ifdefline)
-
-            elif (not ifdefline in header_data.conditionals):
-                header_data.conditionals += [ifdefline]
-
-        if (line.find("uniform") != -1 and line.lower().find("texunit:") != -1):
-            # texture unit
-            texunitstr = line[line.find(":") + 1:].strip()
-            if (texunitstr == "auto"):
-                texunit = "-1"
-            else:
-                texunit = str(int(texunitstr))
-            uline = line[:line.lower().find("//")]
-            uline = uline.replace("uniform", "")
-            uline = uline.replace("highp", "")
-            uline = uline.replace(";", "")
-            lines = uline.split(",")
-            for x in lines:
-
-                x = x.strip()
-                x = x[x.rfind(" ") + 1:]
-                if (x.find("[") != -1):
-                    # unfiorm array
-                    x = x[:x.find("[")]
-
-                if (not x in header_data.texunit_names):
-                    header_data.texunits += [(x, texunit)]
-                    header_data.texunit_names += [x]
-
-        elif (line.find("uniform") != -1 and line.lower().find("ubo:") != -1):
-            # uniform buffer object
-            ubostr = line[line.find(":") + 1:].strip()
-            ubo = str(int(ubostr))
-            uline = line[:line.lower().find("//")]
-            uline = uline[uline.find("uniform") + len("uniform"):]
-            uline = uline.replace("highp", "")
-            uline = uline.replace(";", "")
-            uline = uline.replace("{", "").strip()
-            lines = uline.split(",")
-            for x in lines:
-
-                x = x.strip()
-                x = x[x.rfind(" ") + 1:]
-                if (x.find("[") != -1):
-                    # unfiorm array
-                    x = x[:x.find("[")]
-
-                if (not x in header_data.ubo_names):
-                    header_data.ubos += [(x, ubo)]
-                    header_data.ubo_names += [x]
-
-        elif (line.find("uniform") != -1 and line.find("{") == -1 and line.find(";") != -1):
-            uline = line.replace("uniform", "")
-            uline = uline.replace(";", "")
-            lines = uline.split(",")
-            for x in lines:
-
-                x = x.strip()
-                x = x[x.rfind(" ") + 1:]
-                if (x.find("[") != -1):
-                    # unfiorm array
-                    x = x[:x.find("[")]
-
-                if (not x in header_data.uniforms):
-                    header_data.uniforms += [x]
-
-        if (line.strip().find("attribute ") == 0 and line.find("attrib:") != -1):
-            uline = line.replace("in ", "")
-            uline = uline.replace("attribute ", "")
-            uline = uline.replace("highp ", "")
-            uline = uline.replace(";", "")
-            uline = uline[uline.find(" "):].strip()
-
-            if (uline.find("//") != -1):
-                name, bind = uline.split("//")
-                if (bind.find("attrib:") != -1):
-                    name = name.strip()
-                    bind = bind.replace("attrib:", "").strip()
-                    header_data.attributes += [(name, bind)]
-
-        if (line.strip().find("out ") == 0 and line.find("tfb:") != -1):
-            uline = line.replace("out ", "")
-            uline = uline.replace("highp ", "")
-            uline = uline.replace(";", "")
-            uline = uline[uline.find(" "):].strip()
-
-            if (uline.find("//") != -1):
-                name, bind = uline.split("//")
-                if (bind.find("tfb:") != -1):
-                    name = name.strip()
-                    bind = bind.replace("tfb:", "").strip()
-                    header_data.feedbacks += [(name, bind)]
-
-        line = line.replace("\r", "")
-        line = line.replace("\n", "")
-
-        if (header_data.reading == "vertex"):
-            header_data.vertex_lines += [line]
-        if (header_data.reading == "fragment"):
-            header_data.fragment_lines += [line]
-
-        line = fs.readline()
-        header_data.line_offset += 1
-
-    fs.close()
-
-    return header_data
-
-
-def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2=False):
-
-    header_data = LegacyGLHeaderStruct()
-    include_file_in_legacygl_header(filename, header_data, 0)
-
-    out_file = filename + ".gen.h"
-    fd = open(out_file, "w")
-
-    enum_constants = []
-
-    fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
-
-    out_file_base = out_file
-    out_file_base = out_file_base[out_file_base.rfind("/") + 1:]
-    out_file_base = out_file_base[out_file_base.rfind("\\") + 1:]
-    out_file_ifdef = out_file_base.replace(".", "_").upper()
-    fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n")
-    fd.write("#define " + out_file_ifdef + class_suffix + "_120\n")
-
-    out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
-    fd.write("\n\n")
-    fd.write("#include \"" + include + "\"\n\n\n")
-    fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n")
-    fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n")
-
-    fd.write("public:\n\n")
-
-    if (len(header_data.conditionals)):
-        fd.write("\tenum Conditionals {\n")
-        for x in header_data.conditionals:
-            fd.write("\t\t" + x.upper() + ",\n")
-        fd.write("\t};\n\n")
-
-    if (len(header_data.uniforms)):
-        fd.write("\tenum Uniforms {\n")
-        for x in header_data.uniforms:
-            fd.write("\t\t" + x.upper() + ",\n")
-        fd.write("\t};\n\n")
-
-    fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n")
-    if (len(header_data.conditionals)):
-
-        fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable)  {  _set_conditional(p_conditional,p_enable); }\n\n")
-    fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; ERR_FAIL_COND( get_active()!=this );\n\n ")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n")
-    fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n")
 
 
-    fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) {  _FU
+    if isinstance(filetype, basestring):
+        dir_path = self.Dir('.').abspath
+        filetype = glob.glob(dir_path + "/" + filetype)
 
 
-		const Transform &tr = p_transform;
-
-		GLfloat matrix[16]={ /* build a 16x16 matrix */
-			tr.basis.elements[0][0],
-			tr.basis.elements[1][0],
-			tr.basis.elements[2][0],
-			0,
-			tr.basis.elements[0][1],
-			tr.basis.elements[1][1],
-			tr.basis.elements[2][1],
-			0,
-			tr.basis.elements[0][2],
-			tr.basis.elements[1][2],
-			tr.basis.elements[2][2],
-			0,
-			tr.origin.x,
-			tr.origin.y,
-			tr.origin.z,
-			1
-		};
-
-
-                glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
-
-
-	}
-
-	""")
-
-    fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) {  _FU
-
-		const Transform2D &tr = p_transform;
-
-		GLfloat matrix[16]={ /* build a 16x16 matrix */
-			tr.elements[0][0],
-			tr.elements[0][1],
-			0,
-			0,
-			tr.elements[1][0],
-			tr.elements[1][1],
-			0,
-			0,
-			0,
-			0,
-			1,
-			0,
-			tr.elements[2][0],
-			tr.elements[2][1],
-			0,
-			1
-		};
-
-
-                glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
-
-
-	}
-
-	""")
-
-    fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) {  _FU
-
-		GLfloat matrix[16];
-
-		for (int i=0;i<4;i++) {
-			for (int j=0;j<4;j++) {
-
-				matrix[i*4+j]=p_matrix.matrix[i][j];
-			}
-		}
-
-		glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
-        } """)
-
-    fd.write("\n\n#undef _FU\n\n\n")
-
-    fd.write("\tvirtual void init() {\n\n")
-
-    enum_value_count = 0
-
-    if (len(header_data.enums)):
-
-        fd.write("\t\t//Written using math, given nonstandarity of 64 bits integer constants..\n")
-        fd.write("\t\tstatic const Enum _enums[]={\n")
-
-        bitofs = len(header_data.conditionals)
-        enum_vals = []
-
-        for xv in header_data.enums:
-            x = header_data.enums[xv]
-            bits = 1
-            amt = len(x)
-            while(2**bits < amt):
-                bits += 1
-            strs = "{"
-            for i in range(amt):
-                strs += "\"#define " + x[i] + "\\n\","
-
-                v = {}
-                v["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs)
-                v["clear_mask"] = "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")"
-                enum_vals.append(v)
-                enum_constants.append(x[i])
-
-            strs += "NULL}"
-
-            fd.write("\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n")
-            bitofs += bits
-
-        fd.write("\t\t};\n\n")
-
-        fd.write("\t\tstatic const EnumValue _enum_values[]={\n")
-
-        enum_value_count = len(enum_vals)
-        for x in enum_vals:
-            fd.write("\t\t\t{" + x["set_mask"] + "," + x["clear_mask"] + "},\n")
-
-        fd.write("\t\t};\n\n")
-
-    conditionals_found = []
-    if (len(header_data.conditionals)):
-
-        fd.write("\t\tstatic const char* _conditional_strings[]={\n")
-        if (len(header_data.conditionals)):
-            for x in header_data.conditionals:
-                fd.write("\t\t\t\"#define " + x + "\\n\",\n")
-                conditionals_found.append(x)
-        fd.write("\t\t};\n\n")
-    else:
-        fd.write("\t\tstatic const char **_conditional_strings=NULL;\n")
+    for path in filetype:
+        sources.append(self.Object(path))
 
 
-    if (len(header_data.uniforms)):
 
 
-        fd.write("\t\tstatic const char* _uniform_strings[]={\n")
-        if (len(header_data.uniforms)):
-            for x in header_data.uniforms:
-                fd.write("\t\t\t\"" + x + "\",\n")
-        fd.write("\t\t};\n\n")
-    else:
-        fd.write("\t\tstatic const char **_uniform_strings=NULL;\n")
-
-    if output_attribs:
-        if (len(header_data.attributes)):
-
-            fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n")
-            for x in header_data.attributes:
-                fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
-            fd.write("\t\t};\n\n")
-        else:
-            fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n")
-
-    feedback_count = 0
-
-    if (not gles2 and len(header_data.feedbacks)):
-
-        fd.write("\t\tstatic const Feedback _feedbacks[]={\n")
-        for x in header_data.feedbacks:
-            name = x[0]
-            cond = x[1]
-            if (cond in conditionals_found):
-                fd.write("\t\t\t{\"" + name + "\"," + str(conditionals_found.index(cond)) + "},\n")
-            else:
-                fd.write("\t\t\t{\"" + name + "\",-1},\n")
-
-            feedback_count += 1
-
-        fd.write("\t\t};\n\n")
-    else:
-        if gles2:
-            pass
-        else:
-            fd.write("\t\tstatic const Feedback* _feedbacks=NULL;\n")
-
-    if (len(header_data.texunits)):
-        fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n")
-        for x in header_data.texunits:
-            fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
-        fd.write("\t\t};\n\n")
-    else:
-        fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n")
-
-    if (not gles2 and len(header_data.ubos)):
-        fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n")
-        for x in header_data.ubos:
-            fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
-        fd.write("\t\t};\n\n")
-    else:
-        if gles2:
-            pass
-        else:
-            fd.write("\t\tstatic UBOPair *_ubo_pairs=NULL;\n")
-
-    fd.write("\t\tstatic const char _vertex_code[]={\n")
-    for x in header_data.vertex_lines:
-        for i in range(len(x)):
-            fd.write(str(ord(x[i])) + ",")
-
-        fd.write(str(ord('\n')) + ",")
-    fd.write("\t\t0};\n\n")
-
-    fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n")
-
-    fd.write("\t\tstatic const char _fragment_code[]={\n")
-    for x in header_data.fragment_lines:
-        for i in range(len(x)):
-            fd.write(str(ord(x[i])) + ",")
-
-        fd.write(str(ord('\n')) + ",")
-    fd.write("\t\t0};\n\n")
-
-    fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n")
-
-    if output_attribs:
-        if gles2:
-            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
-        else:
-            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str(feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
-    else:
-        if gles2:
-            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
-        else:
-            fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str(feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
-
-    fd.write("\t}\n\n")
-
-    if (len(enum_constants)):
-
-        fd.write("\tenum EnumConditionals {\n")
-        for x in enum_constants:
-            fd.write("\t\t" + x.upper() + ",\n")
-        fd.write("\t};\n\n")
-        fd.write("\tvoid set_enum_conditional(EnumConditionals p_cond) { _set_enum_conditional(p_cond); }\n")
-
-    fd.write("};\n\n")
-    fd.write("#endif\n\n")
-    fd.close()
-
-
-def build_gles3_headers(target, source, env):
-
-    for x in source:
-        build_legacygl_header(str(x), include="drivers/gles3/shader_gles3.h", class_suffix="GLES3", output_attribs=True)
-
-
-def build_gles2_headers(target, source, env):
-
-    for x in source:
-        build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True)
-
-def make_authors_header(target, source, env):
-
-    sections = ["Project Founders", "Lead Developer", "Project Manager", "Developers"]
-    sections_id = ["AUTHORS_FOUNDERS", "AUTHORS_LEAD_DEVELOPERS", "AUTHORS_PROJECT_MANAGERS", "AUTHORS_DEVELOPERS"]
-
-    src = source[0].srcnode().abspath
-    dst = target[0].srcnode().abspath
-    f = open_utf8(src, "r")
-    g = open_utf8(dst, "w")
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#ifndef _EDITOR_AUTHORS_H\n")
-    g.write("#define _EDITOR_AUTHORS_H\n")
-
-    current_section = ""
-    reading = False
-
-    def close_section():
-        g.write("\t0\n")
-        g.write("};\n")
-
-    for line in f:
-        if reading:
-            if line.startswith("    "):
-                g.write("\t\"" + escape_string(line.strip()) + "\",\n")
-                continue
-        if line.startswith("## "):
-            if reading:
-                close_section()
-                reading = False
-            for i in range(len(sections)):
-                if line.strip().endswith(sections[i]):
-                    current_section = escape_string(sections_id[i])
-                    reading = True
-                    g.write("const char *const " + current_section + "[] = {\n")
-                    break
-
-    if reading:
-        close_section()
-
-    g.write("#endif\n")
-
-    g.close()
-    f.close()
-
-def make_donors_header(target, source, env):
-
-    sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors",
-            "Gold donors", "Silver donors", "Bronze donors"]
-    sections_id = ["DONORS_SPONSOR_PLAT", "DONORS_SPONSOR_GOLD", "DONORS_SPONSOR_MINI",
-            "DONORS_GOLD", "DONORS_SILVER", "DONORS_BRONZE"]
-
-    src = source[0].srcnode().abspath
-    dst = target[0].srcnode().abspath
-    f = open_utf8(src, "r")
-    g = open_utf8(dst, "w")
-
-    g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-    g.write("#ifndef _EDITOR_DONORS_H\n")
-    g.write("#define _EDITOR_DONORS_H\n")
-
-    current_section = ""
-    reading = False
-
-    def close_section():
-        g.write("\t0\n")
-        g.write("};\n")
-
-    for line in f:
-        if reading >= 0:
-            if line.startswith("    "):
-                g.write("\t\"" + escape_string(line.strip()) + "\",\n")
-                continue
-        if line.startswith("## "):
-            if reading:
-                close_section()
-                reading = False
-            for i in range(len(sections)):
-                if line.strip().endswith(sections[i]):
-                    current_section = escape_string(sections_id[i])
-                    reading = True
-                    g.write("const char *const " + current_section + "[] = {\n")
-                    break
-
-    if reading:
-        close_section()
-
-    g.write("#endif\n")
-
-    g.close()
-    f.close()
-
-
-def make_license_header(target, source, env):
-    src_copyright = source[0].srcnode().abspath
-    src_license = source[1].srcnode().abspath
-    dst = target[0].srcnode().abspath
-
-    class LicenseReader:
-        def __init__(self, license_file):
-            self._license_file = license_file
-            self.line_num = 0
-            self.current = self.next_line()
-
-        def next_line(self):
-            line = self._license_file.readline()
-            self.line_num += 1
-            while line.startswith("#"):
-                line = self._license_file.readline()
-                self.line_num += 1
-            self.current = line
-            return line
-
-        def next_tag(self):
-            if not ':' in self.current:
-                return ('',[])
-            tag, line = self.current.split(":", 1)
-            lines = [line.strip()]
-            while self.next_line() and self.current.startswith(" "):
-                lines.append(self.current.strip())
-            return (tag, lines)
-
-    from collections import OrderedDict
-    projects = OrderedDict()
-    license_list = []
-
-    with open_utf8(src_copyright, "r") as copyright_file:
-        reader = LicenseReader(copyright_file)
-        part = {}
-        while reader.current:
-            tag, content = reader.next_tag()
-            if tag in ("Files", "Copyright", "License"):
-                part[tag] = content[:]
-            elif tag == "Comment":
-                # attach part to named project
-                projects[content[0]] = projects.get(content[0], []) + [part]
-
-            if not tag or not reader.current:
-                # end of a paragraph start a new part
-                if "License" in part and not "Files" in part:
-                    # no Files tag in this one, so assume standalone license
-                    license_list.append(part["License"])
-                part = {}
-                reader.next_line()
-
-    data_list = []
-    for project in itervalues(projects):
-        for part in project:
-            part["file_index"] = len(data_list)
-            data_list += part["Files"]
-            part["copyright_index"] = len(data_list)
-            data_list += part["Copyright"]
-
-    with open_utf8(dst, "w") as f:
-
-        f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
-        f.write("#ifndef _EDITOR_LICENSE_H\n")
-        f.write("#define _EDITOR_LICENSE_H\n")
-        f.write("const char *const GODOT_LICENSE_TEXT =")
-
-        with open_utf8(src_license, "r") as license_file:
-            for line in license_file:
-                escaped_string = escape_string(line.strip())
-                f.write("\n\t\t\"" + escaped_string + "\\n\"")
-        f.write(";\n\n")
-
-        f.write("struct ComponentCopyrightPart {\n"
-                "\tconst char *license;\n"
-                "\tconst char *const *files;\n"
-                "\tconst char *const *copyright_statements;\n"
-                "\tint file_count;\n"
-                "\tint copyright_count;\n"
-                "};\n\n")
-
-        f.write("struct ComponentCopyright {\n"
-                "\tconst char *name;\n"
-                "\tconst ComponentCopyrightPart *parts;\n"
-                "\tint part_count;\n"
-                "};\n\n")
-
-        f.write("const char *const COPYRIGHT_INFO_DATA[] = {\n")
-        for line in data_list:
-            f.write("\t\"" + escape_string(line) + "\",\n")
-        f.write("};\n\n")
-
-        f.write("const ComponentCopyrightPart COPYRIGHT_PROJECT_PARTS[] = {\n")
-        part_index = 0
-        part_indexes = {}
-        for project_name, project in iteritems(projects):
-            part_indexes[project_name] = part_index
-            for part in project:
-                f.write("\t{ \"" + escape_string(part["License"][0]) + "\", "
-                        + "&COPYRIGHT_INFO_DATA[" + str(part["file_index"]) + "], "
-                        + "&COPYRIGHT_INFO_DATA[" + str(part["copyright_index"]) + "], "
-                        + str(len(part["Files"])) + ", "
-                        + str(len(part["Copyright"])) + " },\n")
-                part_index += 1
-        f.write("};\n\n")
-
-        f.write("const int COPYRIGHT_INFO_COUNT = " + str(len(projects)) + ";\n")
-
-        f.write("const ComponentCopyright COPYRIGHT_INFO[] = {\n")
-        for project_name, project in iteritems(projects):
-            f.write("\t{ \"" + escape_string(project_name) + "\", "
-                    + "&COPYRIGHT_PROJECT_PARTS[" + str(part_indexes[project_name]) + "], "
-                    + str(len(project)) + " },\n")
-        f.write("};\n\n")
-
-        f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n")
-
-        f.write("const char *const LICENSE_NAMES[] = {\n")
-        for l in license_list:
-            f.write("\t\"" + escape_string(l[0]) + "\",\n")
-        f.write("};\n\n")
-
-        f.write("const char *const LICENSE_BODIES[] = {\n\n")
-        for l in license_list:
-            for line in l[1:]:
-                if line == ".":
-                    f.write("\t\"\\n\"\n")
-                else:
-                    f.write("\t\"" + escape_string(line) + "\\n\"\n")
-            f.write("\t\"\",\n\n")
-        f.write("};\n\n")
-
-        f.write("#endif\n")
 def add_module_version_string(self,s):
 def add_module_version_string(self,s):
     self.module_version_string += "." + s
     self.module_version_string += "." + s
 
 
+
 def update_version(module_version_string=""):
 def update_version(module_version_string=""):
 
 
     build_name = "custom_build"
     build_name = "custom_build"
-    if (os.getenv("BUILD_NAME") != None):
+    if os.getenv("BUILD_NAME") != None:
         build_name = os.getenv("BUILD_NAME")
         build_name = os.getenv("BUILD_NAME")
         print("Using custom build name: " + build_name)
         print("Using custom build name: " + build_name)
 
 
     import version
     import version
 
 
+    # NOTE: It is safe to generate this file here, since this is still executed serially
     f = open("core/version_generated.gen.h", "w")
     f = open("core/version_generated.gen.h", "w")
     f.write("#define VERSION_SHORT_NAME \"" + str(version.short_name) + "\"\n")
     f.write("#define VERSION_SHORT_NAME \"" + str(version.short_name) + "\"\n")
     f.write("#define VERSION_NAME \"" + str(version.name) + "\"\n")
     f.write("#define VERSION_NAME \"" + str(version.name) + "\"\n")
     f.write("#define VERSION_MAJOR " + str(version.major) + "\n")
     f.write("#define VERSION_MAJOR " + str(version.major) + "\n")
     f.write("#define VERSION_MINOR " + str(version.minor) + "\n")
     f.write("#define VERSION_MINOR " + str(version.minor) + "\n")
-    if (hasattr(version, 'patch')):
+    if hasattr(version, 'patch'):
         f.write("#define VERSION_PATCH " + str(version.patch) + "\n")
         f.write("#define VERSION_PATCH " + str(version.patch) + "\n")
     f.write("#define VERSION_STATUS \"" + str(version.status) + "\"\n")
     f.write("#define VERSION_STATUS \"" + str(version.status) + "\"\n")
     f.write("#define VERSION_BUILD \"" + str(build_name) + "\"\n")
     f.write("#define VERSION_BUILD \"" + str(build_name) + "\"\n")
     f.write("#define VERSION_MODULE_CONFIG \"" + str(version.module_config) + module_version_string + "\"\n")
     f.write("#define VERSION_MODULE_CONFIG \"" + str(version.module_config) + module_version_string + "\"\n")
-    import datetime
     f.write("#define VERSION_YEAR " + str(datetime.datetime.now().year) + "\n")
     f.write("#define VERSION_YEAR " + str(datetime.datetime.now().year) + "\n")
     f.close()
     f.close()
 
 
+    # NOTE: It is safe to generate this file here, since this is still executed serially
     fhash = open("core/version_hash.gen.h", "w")
     fhash = open("core/version_hash.gen.h", "w")
     githash = ""
     githash = ""
     if os.path.isfile(".git/HEAD"):
     if os.path.isfile(".git/HEAD"):
@@ -784,7 +63,6 @@ def update_version(module_version_string=""):
 
 
 def parse_cg_file(fname, uniforms, sizes, conditionals):
 def parse_cg_file(fname, uniforms, sizes, conditionals):
 
 
-    import re
     fs = open(fname, "r")
     fs = open(fname, "r")
     line = fs.readline()
     line = fs.readline()
 
 
@@ -798,7 +76,7 @@ def parse_cg_file(fname, uniforms, sizes, conditionals):
 
 
             uniforms.append(name)
             uniforms.append(name)
 
 
-            if (type.find("texobj") != -1):
+            if type.find("texobj") != -1:
                 sizes.append(1)
                 sizes.append(1)
             else:
             else:
                 t = re.match(r"float(\d)x(\d)", type)
                 t = re.match(r"float(\d)x(\d)", type)
@@ -816,9 +94,6 @@ def parse_cg_file(fname, uniforms, sizes, conditionals):
     fs.close()
     fs.close()
 
 
 
 
-import glob
-
-
 def detect_modules():
 def detect_modules():
 
 
     module_list = []
     module_list = []
@@ -829,9 +104,9 @@ def detect_modules():
     files = glob.glob("modules/*")
     files = glob.glob("modules/*")
     files.sort()  # so register_module_types does not change that often, and also plugins are registered in alphabetic order
     files.sort()  # so register_module_types does not change that often, and also plugins are registered in alphabetic order
     for x in files:
     for x in files:
-        if (not os.path.isdir(x)):
+        if not os.path.isdir(x):
             continue
             continue
-        if (not os.path.exists(x + "/config.py")):
+        if not os.path.exists(x + "/config.py"):
             continue
             continue
         x = x.replace("modules/", "")  # rest of world
         x = x.replace("modules/", "")  # rest of world
         x = x.replace("modules\\", "")  # win32
         x = x.replace("modules\\", "")  # win32
@@ -863,6 +138,7 @@ void unregister_module_types() {
 }
 }
 """
 """
 
 
+    # NOTE: It is safe to generate this file here, since this is still executed serially
     with open("modules/register_module_types.gen.cpp", "w") as f:
     with open("modules/register_module_types.gen.cpp", "w") as f:
         f.write(modules_cpp)
         f.write(modules_cpp)
 
 
@@ -998,8 +274,6 @@ def use_windows_spawn_fix(self, platform=None):
     # changes, no multiple versions of the same object file will be present.
     # changes, no multiple versions of the same object file will be present.
     self.Replace(ARFLAGS='q')
     self.Replace(ARFLAGS='q')
 
 
-    import subprocess
-
     def mySubProcess(cmdline, env):
     def mySubProcess(cmdline, env):
 
 
         startupinfo = subprocess.STARTUPINFO()
         startupinfo = subprocess.STARTUPINFO()
@@ -1036,7 +310,6 @@ def use_windows_spawn_fix(self, platform=None):
 
 
 
 
 def split_lib(self, libname, src_list = None, env_lib = None):
 def split_lib(self, libname, src_list = None, env_lib = None):
-    import string
     env = self
     env = self
 
 
     num = 0
     num = 0
@@ -1072,7 +345,6 @@ def split_lib(self, libname, src_list = None, env_lib = None):
     lib_list.append(lib)
     lib_list.append(lib)
 
 
     if len(lib_list) > 0:
     if len(lib_list) > 0:
-        import os, sys
         if os.name == 'posix' and sys.platform == 'msys':
         if os.name == 'posix' and sys.platform == 'msys':
             env.Replace(ARFLAGS=['rcsT'])
             env.Replace(ARFLAGS=['rcsT'])
             lib = env_lib.add_library(libname + "_collated", lib_list)
             lib = env_lib.add_library(libname + "_collated", lib_list)
@@ -1098,7 +370,7 @@ def save_active_platforms(apnames, ap):
             b = pngf.read(1)
             b = pngf.read(1)
             str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
             str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
             str += " static const unsigned char _" + x[9:] + "_" + name + "[]={"
             str += " static const unsigned char _" + x[9:] + "_" + name + "[]={"
-            while(len(b) == 1):
+            while len(b) == 1:
                 str += hex(ord(b))
                 str += hex(ord(b))
                 b = pngf.read(1)
                 b = pngf.read(1)
                 if (len(b) == 1):
                 if (len(b) == 1):
@@ -1108,6 +380,7 @@ def save_active_platforms(apnames, ap):
 
 
             pngf.close()
             pngf.close()
 
 
+            # NOTE: It is safe to generate this file here, since this is still executed serially
             wf = x + "/" + name + ".gen.h"
             wf = x + "/" + name + ".gen.h"
             with open(wf, "w") as pngw:
             with open(wf, "w") as pngw:
                 pngw.write(str)
                 pngw.write(str)
@@ -1249,14 +522,13 @@ def detect_visual_c_compiler_version(tools_env):
     return vc_chosen_compiler_str
     return vc_chosen_compiler_str
 
 
 def find_visual_c_batch_file(env):
 def find_visual_c_batch_file(env):
-    from  SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file
+    from SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file
 
 
     version = get_default_version(env)
     version = get_default_version(env)
     (host_platform, target_platform,req_target_platform) = get_host_target(env)
     (host_platform, target_platform,req_target_platform) = get_host_target(env)
     return find_batch_file(env, version, host_platform, target_platform)[0]
     return find_batch_file(env, version, host_platform, target_platform)[0]
 
 
 def generate_cpp_hint_file(filename):
 def generate_cpp_hint_file(filename):
-    import os.path
     if os.path.isfile(filename):
     if os.path.isfile(filename):
         # Don't overwrite an existing hint file since the user may have customized it.
         # Don't overwrite an existing hint file since the user may have customized it.
         pass
         pass
@@ -1306,13 +578,14 @@ def generate_vs_project(env, num_jobs):
         release_targets = ['bin\\godot.windows.opt.32.exe'] + ['bin\\godot.windows.opt.64.exe']
         release_targets = ['bin\\godot.windows.opt.32.exe'] + ['bin\\godot.windows.opt.64.exe']
         release_debug_targets = ['bin\\godot.windows.opt.tools.32.exe'] + ['bin\\godot.windows.opt.tools.64.exe']
         release_debug_targets = ['bin\\godot.windows.opt.tools.32.exe'] + ['bin\\godot.windows.opt.tools.64.exe']
         targets = debug_targets + release_targets + release_debug_targets
         targets = debug_targets + release_targets + release_debug_targets
-        msvproj = env.MSVSProject(target=['#godot' + env['MSVSPROJECTSUFFIX']],
-                                    incs=env.vs_incs,
-                                    srcs=env.vs_srcs,
-                                    runfile=targets,
-                                    buildtarget=targets,
-                                    auto_build_solution=1,
-                                    variant=variants)
+        env.MSVSProject(
+            target=['#godot' + env['MSVSPROJECTSUFFIX']],
+            incs=env.vs_incs,
+            srcs=env.vs_srcs,
+            runfile=targets,
+            buildtarget=targets,
+            auto_build_solution=1,
+            variant=variants)
     else:
     else:
         print("Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project.")
         print("Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project.")
 
 

+ 4 - 253
modules/gdnative/SCsub

@@ -17,268 +17,19 @@ SConscript("net/SCsub")
 SConscript("arvr/SCsub")
 SConscript("arvr/SCsub")
 SConscript("pluginscript/SCsub")
 SConscript("pluginscript/SCsub")
 
 
-def _spaced(e):
-    return e if e[-1] == '*' else e + ' '
+from platform_methods import run_in_subprocess
+import gdnative_builders
 
 
-def _build_gdnative_api_struct_header(api):
-    gdnative_api_init_macro = [
-        '\textern const godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct;'
-    ]
-
-    for ext in api['extensions']:
-        name = ext['name']
-        gdnative_api_init_macro.append(
-            '\textern const godot_gdnative_ext_{0}_api_struct *_gdnative_wrapper_{0}_api_struct;'.format(name))
-
-    gdnative_api_init_macro.append('\t_gdnative_wrapper_api_struct = options->api_struct;')
-    gdnative_api_init_macro.append('\tfor (unsigned int i = 0; i < _gdnative_wrapper_api_struct->num_extensions; i++) { ')
-    gdnative_api_init_macro.append('\t\tswitch (_gdnative_wrapper_api_struct->extensions[i]->type) {')
-
-    for ext in api['extensions']:
-        name = ext['name']
-        gdnative_api_init_macro.append(
-            '\t\t\tcase GDNATIVE_EXT_%s:' % ext['type'])
-        gdnative_api_init_macro.append(
-            '\t\t\t\t_gdnative_wrapper_{0}_api_struct = (godot_gdnative_ext_{0}_api_struct *)'
-            ' _gdnative_wrapper_api_struct->extensions[i];'.format(name))
-        gdnative_api_init_macro.append('\t\t\t\tbreak;')
-    gdnative_api_init_macro.append('\t\t}')
-    gdnative_api_init_macro.append('\t}')
-
-    out = [
-        '/* THIS FILE IS GENERATED DO NOT EDIT */',
-        '#ifndef GODOT_GDNATIVE_API_STRUCT_H',
-        '#define GODOT_GDNATIVE_API_STRUCT_H',
-        '',
-        '#include <gdnative/gdnative.h>',
-        '#include <android/godot_android.h>',
-        '#include <arvr/godot_arvr.h>',
-        '#include <nativescript/godot_nativescript.h>',
-        '#include <pluginscript/godot_pluginscript.h>',
-        '',
-        '#define GDNATIVE_API_INIT(options) do {  \\\n' + '  \\\n'.join(gdnative_api_init_macro) + '  \\\n } while (0)',
-        '',
-        '#ifdef __cplusplus',
-        'extern "C" {',
-        '#endif',
-        '',
-        'enum GDNATIVE_API_TYPES {',
-        '\tGDNATIVE_' + api['core']['type'] + ','
-    ]
-
-    for ext in api['extensions']:
-        out += ['\tGDNATIVE_EXT_' + ext['type'] + ',']
-
-    out += ['};', '']
-
-
-    def generate_extension_struct(name, ext, include_version=True):
-        ret_val = []
-        if ext['next']:
-            ret_val += generate_extension_struct(name, ext['next'])
-        
-        ret_val += [
-            'typedef struct godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct {',
-            '\tunsigned int type;',
-            '\tgodot_gdnative_api_version version;',
-            '\tconst godot_gdnative_api_struct *next;'
-        ]
-
-        for funcdef in ext['api']:
-            args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
-            ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
-
-        ret_val += ['} godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct;', '']
-
-        return ret_val
-
-
-    for ext in api['extensions']:
-        name = ext['name']
-        out += generate_extension_struct(name, ext, False)
-
-    out += [
-        'typedef struct godot_gdnative_core_api_struct {',
-        '\tunsigned int type;',
-        '\tgodot_gdnative_api_version version;',
-        '\tconst godot_gdnative_api_struct *next;',
-        '\tunsigned int num_extensions;',
-        '\tconst godot_gdnative_api_struct **extensions;',
-    ]
-
-    for funcdef in api['core']['api']:
-        args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
-        out.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
-
-    out += [
-        '} godot_gdnative_core_api_struct;',
-        '',
-        '#ifdef __cplusplus',
-        '}',
-        '#endif',
-        '',
-        '#endif // GODOT_GDNATIVE_API_STRUCT_H',
-        ''
-    ]
-    return '\n'.join(out)
-
-def _build_gdnative_api_struct_source(api):
-    out = [
-        '/* THIS FILE IS GENERATED DO NOT EDIT */',
-        '',
-        '#include <gdnative_api_struct.gen.h>',
-        ''
-    ]
-
-    def get_extension_struct_name(name, ext, include_version=True):
-        return 'godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct'
-
-    def get_extension_struct_instance_name(name, ext, include_version=True):
-        return 'api_extension_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_struct'
-
-    def get_extension_struct_definition(name, ext, include_version=True):
-
-        ret_val = []
-
-        if ext['next']:
-            ret_val += get_extension_struct_definition(name, ext['next'])
-
-        ret_val += [
-            'extern const ' + get_extension_struct_name(name, ext, include_version) + ' ' + get_extension_struct_instance_name(name, ext, include_version) + ' = {',
-            '\tGDNATIVE_EXT_' + ext['type'] + ',',
-            '\t{' + str(ext['version']['major']) + ', ' + str(ext['version']['minor']) + '},',
-            '\t' + ('NULL' if not ext['next'] else ('(const godot_gdnative_api_struct *)&' + get_extension_struct_instance_name(name, ext['next']))) + ','
-        ]
-
-        for funcdef in ext['api']:
-            ret_val.append('\t%s,' % funcdef['name'])
-
-        ret_val += ['};\n']
-
-        return ret_val
-
-    for ext in api['extensions']:
-        name = ext['name']
-        out += get_extension_struct_definition(name, ext, False)
-
-    out += ['', 'const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {']
-
-    for ext in api['extensions']:
-        name = ext['name']
-        out += ['\t(godot_gdnative_api_struct *)&api_extension_' + name + '_struct,']
-
-    out += ['};\n']
-
-    out += [
-        'extern const godot_gdnative_core_api_struct api_struct = {',
-        '\tGDNATIVE_' + api['core']['type'] + ',',
-        '\t{' + str(api['core']['version']['major']) + ', ' + str(api['core']['version']['minor']) + '},',
-        '\tNULL,',
-        '\t' + str(len(api['extensions'])) + ',',
-        '\tgdnative_extensions_pointers,',
-    ]
-
-    for funcdef in api['core']['api']:
-        out.append('\t%s,' % funcdef['name'])
-    out.append('};\n')
-
-    return '\n'.join(out)
-
-def build_gdnative_api_struct(target, source, env):
-    import json
-    from collections import OrderedDict
-
-    with open(source[0].path, 'r') as fd:
-        api = json.load(fd)
-
-    header, source = target
-    with open(header.path, 'w') as fd:
-        fd.write(_build_gdnative_api_struct_header(api))
-    with open(source.path, 'w') as fd:
-        fd.write(_build_gdnative_api_struct_source(api))
 
 
 _, gensource = gdn_env.CommandNoCache(['include/gdnative_api_struct.gen.h', 'gdnative_api_struct.gen.cpp'],
 _, gensource = gdn_env.CommandNoCache(['include/gdnative_api_struct.gen.h', 'gdnative_api_struct.gen.cpp'],
-                               'gdnative_api.json', build_gdnative_api_struct)
+                               'gdnative_api.json', run_in_subprocess(gdnative_builders.build_gdnative_api_struct))
 gdn_env.add_source_files(env.modules_sources, [gensource])
 gdn_env.add_source_files(env.modules_sources, [gensource])
 
 
 env.use_ptrcall = True
 env.use_ptrcall = True
 
 
 
 
-def _build_gdnative_wrapper_code(api):
-    out = [
-        '/* THIS FILE IS GENERATED DO NOT EDIT */',
-        '',
-        '#include <gdnative/gdnative.h>',
-        '#include <nativescript/godot_nativescript.h>',
-        '#include <pluginscript/godot_pluginscript.h>',
-        '#include <arvr/godot_arvr.h>',
-        '',
-        '#include <gdnative_api_struct.gen.h>',
-        '',
-        '#ifdef __cplusplus',
-        'extern "C" {',
-        '#endif',
-        '',
-        'godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct = 0;',
-    ]
-
-    for ext in api['extensions']:
-        name = ext['name']
-        out.append('godot_gdnative_ext_' + name + '_api_struct *_gdnative_wrapper_' + name + '_api_struct = 0;')
-
-    out += ['']
-
-    for funcdef in api['core']['api']:
-        args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
-        out.append('%s%s(%s) {' % (_spaced(funcdef['return_type']), funcdef['name'], args))
-
-        args = ', '.join(['%s' % n for t, n in funcdef['arguments']])
-
-        return_line = '\treturn ' if funcdef['return_type'] != 'void' else '\t'
-        return_line += '_gdnative_wrapper_api_struct->' + funcdef['name'] + '(' + args + ');'
-
-        out.append(return_line)
-        out.append('}')
-        out.append('')
-
-    for ext in api['extensions']:
-        name = ext['name']
-        for funcdef in ext['api']:
-            args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
-            out.append('%s%s(%s) {' % (_spaced(funcdef['return_type']), funcdef['name'], args))
-
-            args = ', '.join(['%s' % n for t, n in funcdef['arguments']])
-
-            return_line = '\treturn ' if funcdef['return_type'] != 'void' else '\t'
-            return_line += '_gdnative_wrapper_' + name + '_api_struct->' + funcdef['name'] + '(' + args + ');'
-
-            out.append(return_line)
-            out.append('}')
-            out.append('')
-
-    out += [
-        '#ifdef __cplusplus',
-        '}',
-        '#endif'
-    ]
-
-    return '\n'.join(out)
-
-
-def build_gdnative_wrapper_code(target, source, env):
-    import json
-    with open(source[0].path, 'r') as fd:
-        api = json.load(fd)
-
-    wrapper_file = target[0]
-    with open(wrapper_file.path, 'w') as fd:
-        fd.write(_build_gdnative_wrapper_code(api))
-
-
-
 if ARGUMENTS.get('gdnative_wrapper', False):
 if ARGUMENTS.get('gdnative_wrapper', False):
-#build wrapper code
-    gensource, = gdn_env.CommandNoCache('gdnative_wrapper_code.gen.cpp', 'gdnative_api.json', build_gdnative_wrapper_code)
+    gensource, = gdn_env.CommandNoCache('gdnative_wrapper_code.gen.cpp', 'gdnative_api.json', run_in_subprocess(gdnative_builders.build_gdnative_wrapper_code))
 
 
     gd_wrapper_env = env.Clone()
     gd_wrapper_env = env.Clone()
     gd_wrapper_env.Append(CPPPATH=['#modules/gdnative/include/'])
     gd_wrapper_env.Append(CPPPATH=['#modules/gdnative/include/'])

+ 261 - 0
modules/gdnative/gdnative_builders.py

@@ -0,0 +1,261 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+import json
+from platform_methods import subprocess_main
+
+
+def _spaced(e):
+    return e if e[-1] == '*' else e + ' '
+
+
+def _build_gdnative_api_struct_header(api):
+    gdnative_api_init_macro = [
+        '\textern const godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct;'
+    ]
+
+    for ext in api['extensions']:
+        name = ext['name']
+        gdnative_api_init_macro.append(
+            '\textern const godot_gdnative_ext_{0}_api_struct *_gdnative_wrapper_{0}_api_struct;'.format(name))
+
+    gdnative_api_init_macro.append('\t_gdnative_wrapper_api_struct = options->api_struct;')
+    gdnative_api_init_macro.append('\tfor (unsigned int i = 0; i < _gdnative_wrapper_api_struct->num_extensions; i++) { ')
+    gdnative_api_init_macro.append('\t\tswitch (_gdnative_wrapper_api_struct->extensions[i]->type) {')
+
+    for ext in api['extensions']:
+        name = ext['name']
+        gdnative_api_init_macro.append(
+            '\t\t\tcase GDNATIVE_EXT_%s:' % ext['type'])
+        gdnative_api_init_macro.append(
+            '\t\t\t\t_gdnative_wrapper_{0}_api_struct = (godot_gdnative_ext_{0}_api_struct *)'
+            ' _gdnative_wrapper_api_struct->extensions[i];'.format(name))
+        gdnative_api_init_macro.append('\t\t\t\tbreak;')
+    gdnative_api_init_macro.append('\t\t}')
+    gdnative_api_init_macro.append('\t}')
+
+    out = [
+        '/* THIS FILE IS GENERATED DO NOT EDIT */',
+        '#ifndef GODOT_GDNATIVE_API_STRUCT_H',
+        '#define GODOT_GDNATIVE_API_STRUCT_H',
+        '',
+        '#include <gdnative/gdnative.h>',
+        '#include <android/godot_android.h>',
+        '#include <arvr/godot_arvr.h>',
+        '#include <nativescript/godot_nativescript.h>',
+        '#include <pluginscript/godot_pluginscript.h>',
+        '',
+        '#define GDNATIVE_API_INIT(options) do {  \\\n' + '  \\\n'.join(gdnative_api_init_macro) + '  \\\n } while (0)',
+        '',
+        '#ifdef __cplusplus',
+        'extern "C" {',
+        '#endif',
+        '',
+        'enum GDNATIVE_API_TYPES {',
+        '\tGDNATIVE_' + api['core']['type'] + ','
+    ]
+
+    for ext in api['extensions']:
+        out += ['\tGDNATIVE_EXT_' + ext['type'] + ',']
+
+    out += ['};', '']
+
+    def generate_extension_struct(name, ext, include_version=True):
+        ret_val = []
+        if ext['next']:
+            ret_val += generate_extension_struct(name, ext['next'])
+
+        ret_val += [
+            'typedef struct godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct {',
+            '\tunsigned int type;',
+            '\tgodot_gdnative_api_version version;',
+            '\tconst godot_gdnative_api_struct *next;'
+        ]
+
+        for funcdef in ext['api']:
+            args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
+            ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+
+        ret_val += ['} godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct;', '']
+
+        return ret_val
+
+    for ext in api['extensions']:
+        name = ext['name']
+        out += generate_extension_struct(name, ext, False)
+
+    out += [
+        'typedef struct godot_gdnative_core_api_struct {',
+        '\tunsigned int type;',
+        '\tgodot_gdnative_api_version version;',
+        '\tconst godot_gdnative_api_struct *next;',
+        '\tunsigned int num_extensions;',
+        '\tconst godot_gdnative_api_struct **extensions;',
+    ]
+
+    for funcdef in api['core']['api']:
+        args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
+        out.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+
+    out += [
+        '} godot_gdnative_core_api_struct;',
+        '',
+        '#ifdef __cplusplus',
+        '}',
+        '#endif',
+        '',
+        '#endif // GODOT_GDNATIVE_API_STRUCT_H',
+        ''
+    ]
+    return '\n'.join(out)
+
+
+def _build_gdnative_api_struct_source(api):
+    out = [
+        '/* THIS FILE IS GENERATED DO NOT EDIT */',
+        '',
+        '#include <gdnative_api_struct.gen.h>',
+        ''
+    ]
+
+    def get_extension_struct_name(name, ext, include_version=True):
+        return 'godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct'
+
+    def get_extension_struct_instance_name(name, ext, include_version=True):
+        return 'api_extension_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_struct'
+
+    def get_extension_struct_definition(name, ext, include_version=True):
+
+        ret_val = []
+
+        if ext['next']:
+            ret_val += get_extension_struct_definition(name, ext['next'])
+
+        ret_val += [
+            'extern const ' + get_extension_struct_name(name, ext, include_version) + ' ' + get_extension_struct_instance_name(name, ext, include_version) + ' = {',
+            '\tGDNATIVE_EXT_' + ext['type'] + ',',
+            '\t{' + str(ext['version']['major']) + ', ' + str(ext['version']['minor']) + '},',
+            '\t' + ('NULL' if not ext['next'] else ('(const godot_gdnative_api_struct *)&' + get_extension_struct_instance_name(name, ext['next']))) + ','
+        ]
+
+        for funcdef in ext['api']:
+            ret_val.append('\t%s,' % funcdef['name'])
+
+        ret_val += ['};\n']
+
+        return ret_val
+
+    for ext in api['extensions']:
+        name = ext['name']
+        out += get_extension_struct_definition(name, ext, False)
+
+    out += ['', 'const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {']
+
+    for ext in api['extensions']:
+        name = ext['name']
+        out += ['\t(godot_gdnative_api_struct *)&api_extension_' + name + '_struct,']
+
+    out += ['};\n']
+
+    out += [
+        'extern const godot_gdnative_core_api_struct api_struct = {',
+        '\tGDNATIVE_' + api['core']['type'] + ',',
+        '\t{' + str(api['core']['version']['major']) + ', ' + str(api['core']['version']['minor']) + '},',
+        '\tNULL,',
+        '\t' + str(len(api['extensions'])) + ',',
+        '\tgdnative_extensions_pointers,',
+    ]
+
+    for funcdef in api['core']['api']:
+        out.append('\t%s,' % funcdef['name'])
+    out.append('};\n')
+
+    return '\n'.join(out)
+
+
+def build_gdnative_api_struct(target, source, env):
+
+    with open(source[0], 'r') as fd:
+        api = json.load(fd)
+
+    header, source = target
+    with open(header, 'w') as fd:
+        fd.write(_build_gdnative_api_struct_header(api))
+    with open(source, 'w') as fd:
+        fd.write(_build_gdnative_api_struct_source(api))
+
+
+def _build_gdnative_wrapper_code(api):
+    out = [
+        '/* THIS FILE IS GENERATED DO NOT EDIT */',
+        '',
+        '#include <gdnative/gdnative.h>',
+        '#include <nativescript/godot_nativescript.h>',
+        '#include <pluginscript/godot_pluginscript.h>',
+        '#include <arvr/godot_arvr.h>',
+        '',
+        '#include <gdnative_api_struct.gen.h>',
+        '',
+        '#ifdef __cplusplus',
+        'extern "C" {',
+        '#endif',
+        '',
+        'godot_gdnative_core_api_struct *_gdnative_wrapper_api_struct = 0;',
+    ]
+
+    for ext in api['extensions']:
+        name = ext['name']
+        out.append('godot_gdnative_ext_' + name + '_api_struct *_gdnative_wrapper_' + name + '_api_struct = 0;')
+
+    out += ['']
+
+    for funcdef in api['core']['api']:
+        args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
+        out.append('%s%s(%s) {' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+
+        args = ', '.join(['%s' % n for t, n in funcdef['arguments']])
+
+        return_line = '\treturn ' if funcdef['return_type'] != 'void' else '\t'
+        return_line += '_gdnative_wrapper_api_struct->' + funcdef['name'] + '(' + args + ');'
+
+        out.append(return_line)
+        out.append('}')
+        out.append('')
+
+    for ext in api['extensions']:
+        name = ext['name']
+        for funcdef in ext['api']:
+            args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
+            out.append('%s%s(%s) {' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+
+            args = ', '.join(['%s' % n for t, n in funcdef['arguments']])
+
+            return_line = '\treturn ' if funcdef['return_type'] != 'void' else '\t'
+            return_line += '_gdnative_wrapper_' + name + '_api_struct->' + funcdef['name'] + '(' + args + ');'
+
+            out.append(return_line)
+            out.append('}')
+            out.append('')
+
+    out += [
+        '#ifdef __cplusplus',
+        '}',
+        '#endif'
+    ]
+
+    return '\n'.join(out)
+
+
+def build_gdnative_wrapper_code(target, source, env):
+    with open(source[0], 'r') as fd:
+        api = json.load(fd)
+
+    wrapper_file = target[0]
+    with open(wrapper_file, 'w') as fd:
+        fd.write(_build_gdnative_wrapper_code(api))
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 1 - 0
modules/mono/SCsub

@@ -61,6 +61,7 @@ env_mono.add_source_files(env.modules_sources, 'utils/*.cpp')
 
 
 if env['tools']:
 if env['tools']:
     env_mono.add_source_files(env.modules_sources, 'editor/*.cpp')
     env_mono.add_source_files(env.modules_sources, 'editor/*.cpp')
+    # NOTE: It is safe to generate this file here, since this is still execute serially
     make_cs_files_header('glue/cs_files', 'glue/cs_compressed.gen.h')
     make_cs_files_header('glue/cs_files', 'glue/cs_compressed.gen.h')
 
 
 vars = Variables()
 vars = Variables()

+ 6 - 3
platform/SCsub

@@ -18,10 +18,13 @@ for platform in env.platform_apis:
 reg_apis_inc += '\n'
 reg_apis_inc += '\n'
 reg_apis += '}\n\n'
 reg_apis += '}\n\n'
 unreg_apis += '}\n'
 unreg_apis += '}\n'
+
+# NOTE: It is safe to generate this file here, since this is still execute serially
 with open_utf8('register_platform_apis.gen.cpp', 'w') as f:
 with open_utf8('register_platform_apis.gen.cpp', 'w') as f:
-	f.write(reg_apis_inc)
-	f.write(reg_apis)
-	f.write(unreg_apis)
+    f.write(reg_apis_inc)
+    f.write(reg_apis)
+    f.write(unreg_apis)
+
 platform_sources.append('register_platform_apis.gen.cpp')
 platform_sources.append('register_platform_apis.gen.cpp')
 
 
 lib = env.add_library('platform', platform_sources)
 lib = env.add_library('platform', platform_sources)

+ 3 - 9
platform/osx/SCsub

@@ -3,14 +3,8 @@
 import os
 import os
 Import('env')
 Import('env')
 
 
-def make_debug(target, source, env):
-    if (env["macports_clang"] != 'no'):
-        mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local")
-        mpclangver = env["macports_clang"]
-        os.system(mpprefix + '/libexec/llvm-' + mpclangver + '/bin/llvm-dsymutil {0} -o {0}.dSYM'.format(target[0]))
-    else:
-        os.system('dsymutil {0} -o {0}.dSYM'.format(target[0]))
-    os.system('strip -u -r {0}'.format(target[0]))
+from platform_methods import run_in_subprocess
+import platform_osx_builders
 
 
 files = [
 files = [
     'crash_handler_osx.mm',
     'crash_handler_osx.mm',
@@ -25,5 +19,5 @@ files = [
 prog = env.add_program('#bin/godot', files)
 prog = env.add_program('#bin/godot', files)
 
 
 if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
 if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
-    env.AddPostAction(prog, make_debug)
+    env.AddPostAction(prog, run_in_subprocess(platform_osx_builders.make_debug_osx))
 
 

+ 21 - 0
platform/osx/platform_osx_builders.py

@@ -0,0 +1,21 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+import os
+from platform_methods import subprocess_main
+
+
+def make_debug_osx(target, source, env):
+    if (env["macports_clang"] != 'no'):
+        mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local")
+        mpclangver = env["macports_clang"]
+        os.system(mpprefix + '/libexec/llvm-' + mpclangver + '/bin/llvm-dsymutil {0} -o {0}.dSYM'.format(target[0]))
+    else:
+        os.system('dsymutil {0} -o {0}.dSYM'.format(target[0]))
+    os.system('strip -u -r {0}'.format(target[0]))
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 3 - 10
platform/windows/SCsub

@@ -3,15 +3,8 @@
 import os
 import os
 Import('env')
 Import('env')
 
 
-def make_debug_mingw(target, source, env):
-    mingw_prefix = ""
-    if (env["bits"] == "32"):
-        mingw_prefix = env["mingw_prefix_32"]
-    else:
-        mingw_prefix = env["mingw_prefix_64"]
-    os.system(mingw_prefix + 'objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0]))
-    os.system(mingw_prefix + 'strip --strip-debug --strip-unneeded {0}'.format(target[0]))
-    os.system(mingw_prefix + 'objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0]))
+from platform_methods import run_in_subprocess
+import platform_windows_builders
 
 
 common_win = [
 common_win = [
     "context_gl_win.cpp",
     "context_gl_win.cpp",
@@ -40,4 +33,4 @@ if env['vsproj']:
 
 
 if not os.getenv("VCINSTALLDIR"):
 if not os.getenv("VCINSTALLDIR"):
     if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
     if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
-        env.AddPostAction(prog, make_debug_mingw)
+        env.AddPostAction(prog, run_in_subprocess(platform_windows_builders.make_debug_mingw))

+ 22 - 0
platform/windows/platform_windows_builders.py

@@ -0,0 +1,22 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+import os
+from platform_methods import subprocess_main
+
+
+def make_debug_mingw(target, source, env):
+    mingw_prefix = ""
+    if (env["bits"] == "32"):
+        mingw_prefix = env["mingw_prefix_32"]
+    else:
+        mingw_prefix = env["mingw_prefix_64"]
+    os.system(mingw_prefix + 'objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0]))
+    os.system(mingw_prefix + 'strip --strip-debug --strip-unneeded {0}'.format(target[0]))
+    os.system(mingw_prefix + 'objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0]))
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 3 - 5
platform/x11/SCsub

@@ -3,10 +3,8 @@
 import os
 import os
 Import('env')
 Import('env')
 
 
-def make_debug(target, source, env):
-    os.system('objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0]))
-    os.system('strip --strip-debug --strip-unneeded {0}'.format(target[0]))
-    os.system('objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0]))
+from platform_methods import run_in_subprocess
+import platform_x11_builders
 
 
 common_x11 = [
 common_x11 = [
     "context_gl_x11.cpp",
     "context_gl_x11.cpp",
@@ -20,4 +18,4 @@ common_x11 = [
 prog = env.add_program('#bin/godot', ['godot_x11.cpp'] + common_x11)
 prog = env.add_program('#bin/godot', ['godot_x11.cpp'] + common_x11)
 
 
 if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
 if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
-    env.AddPostAction(prog, make_debug)
+    env.AddPostAction(prog, run_in_subprocess(platform_x11_builders.make_debug_x11))

+ 17 - 0
platform/x11/platform_x11_builders.py

@@ -0,0 +1,17 @@
+"""Functions used to generate source files during build time
+
+All such functions are invoked in a subprocess on Windows to prevent build flakiness.
+
+"""
+import os
+from platform_methods import subprocess_main
+
+
+def make_debug_x11(target, source, env):
+    os.system('objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0]))
+    os.system('strip --strip-debug --strip-unneeded {0}'.format(target[0]))
+    os.system('objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0]))
+
+
+if __name__ == '__main__':
+    subprocess_main(globals())

+ 66 - 0
platform_methods.py

@@ -0,0 +1,66 @@
+import os
+import sys
+import json
+import uuid
+import functools
+import subprocess
+
+# NOTE: The multiprocessing module is not compatible with SCons due to conflict on cPickle
+
+
+def run_in_subprocess(builder_function):
+
+    @functools.wraps(builder_function)
+    def wrapper(target, source, env):
+
+        # Convert SCons Node instances to absolute paths
+        target = [node.srcnode().abspath for node in target]
+        source = [node.srcnode().abspath for node in source]
+
+        # Short circuit on non-Windows platforms
+        if os.name != 'nt':
+            return builder_function(target, source, None)
+
+        # Identify module
+        module_name = builder_function.__module__
+        function_name = builder_function.__name__
+        module_path = sys.modules[module_name].__file__
+        if module_path.endswith('.pyc') or module_path.endswith('.pyo'):
+            module_path = module_path[:-1]
+
+        # Subprocess environment
+        subprocess_env = os.environ.copy()
+        subprocess_env['PYTHONPATH'] = os.pathsep.join([os.getcwd()] + sys.path)
+
+        # Save parameters
+        args = (target, source, None)
+        data = dict(fn=function_name, args=args)
+        json_path = os.path.join(os.environ['TMP'], uuid.uuid4().hex + '.json')
+        with open(json_path, 'wt') as json_file:
+            json.dump(data, json_file, indent=2)
+        try:
+            print('Executing builder function in subprocess: module_path=%r; data=%r' % (module_path, data))
+            exit_code = subprocess.call([sys.executable, module_path, json_path], env=subprocess_env)
+        finally:
+            try:
+                os.remove(json_path)
+            except (OSError, IOError) as e:
+                # Do not fail the entire build if it cannot delete a temporary file
+                print('WARNING: Could not delete temporary file: path=%r; [%s] %s' %
+                      (json_path, e.__class__.__name__, e))
+
+        # Must succeed
+        if exit_code:
+            raise RuntimeError(
+                'Failed to run builder function in subprocess: module_path=%r; data=%r' % (module_path, data))
+
+    return wrapper
+
+
+def subprocess_main(namespace):
+
+    with open(sys.argv[1]) as json_file:
+        data = json.load(json_file)
+
+    fn = namespace[data['fn']]
+    fn(*data['args'])