summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to 'BuildTools')
-rwxr-xr-xBuildTools/CheckHeaders.py46
-rwxr-xr-xBuildTools/CheckTranslations.py70
-rwxr-xr-xBuildTools/Copyrighter.py218
-rwxr-xr-xBuildTools/Coverage/FilterLCovData.py25
-rwxr-xr-xBuildTools/Coverage/GenerateSummary.py32
-rw-r--r--BuildTools/DocBook/SCons/DocBook.py184
-rw-r--r--BuildTools/DocBook/SCons/FO.py64
-rw-r--r--BuildTools/DocBook/SCons/XSLT.py106
-rwxr-xr-xBuildTools/FilterScanBuildResults.py38
-rwxr-xr-xBuildTools/FixIncludes.py186
-rwxr-xr-xBuildTools/Gource/GetGravatars.py56
-rw-r--r--BuildTools/SCons/SConscript.boot10
-rw-r--r--BuildTools/SCons/SConstruct20
-rw-r--r--BuildTools/SCons/Tools/AppBundle.py102
-rw-r--r--BuildTools/SCons/Tools/BuildVersion.py10
-rw-r--r--BuildTools/SCons/Tools/DoxyGen.py38
-rw-r--r--BuildTools/SCons/Tools/Flags.py16
-rw-r--r--BuildTools/SCons/Tools/Nib.py14
-rw-r--r--BuildTools/SCons/Tools/ReplacePragmaOnce.py36
-rw-r--r--BuildTools/SCons/Tools/SLOCCount.py22
-rw-r--r--BuildTools/SCons/Tools/WindowsBundle.py222
-rw-r--r--BuildTools/SCons/Tools/WriteVal.py16
-rw-r--r--BuildTools/SCons/Tools/textfile.py218
-rw-r--r--BuildTools/SCons/Version.py110
-rwxr-xr-xBuildTools/UpdateDebianChangelog.py32
-rwxr-xr-xBuildTools/scons2ninja.py926
26 files changed, 1408 insertions, 1409 deletions
diff --git a/BuildTools/CheckHeaders.py b/BuildTools/CheckHeaders.py
index 274a760..b599099 100755
--- a/BuildTools/CheckHeaders.py
+++ b/BuildTools/CheckHeaders.py
@@ -3,18 +3,18 @@
import os, sys
FORBIDDEN_INCLUDES = [
- ("iostream", ["Swiften/Base/format.h"]),
- ("Base/Log.h", []),
- ("Base/format.h", []),
- ("algorithm", ["Swiften/Base/Algorithm.h", "Swiften/Base/SafeAllocator.h", "Swiften/Base/Listenable.h"]),
- ("boost/bind.hpp", ["Swiften/Base/Listenable.h"]),
- ("boost/filesystem.hpp", []),
- ("Base/foreach.h", []),
- ("boost/date_time/date_time.hpp", []),
- ("boost/filesystem/filesystem.hpp", []),
-
- # To avoid
- ("Base/Algorithm.h", ["Swiften/StringCodecs/HMAC.h"]),
+ ("iostream", ["Swiften/Base/format.h"]),
+ ("Base/Log.h", []),
+ ("Base/format.h", []),
+ ("algorithm", ["Swiften/Base/Algorithm.h", "Swiften/Base/SafeAllocator.h", "Swiften/Base/Listenable.h"]),
+ ("boost/bind.hpp", ["Swiften/Base/Listenable.h"]),
+ ("boost/filesystem.hpp", []),
+ ("Base/foreach.h", []),
+ ("boost/date_time/date_time.hpp", []),
+ ("boost/filesystem/filesystem.hpp", []),
+
+ # To avoid
+ ("Base/Algorithm.h", ["Swiften/StringCodecs/HMAC.h"]),
]
foundBadHeaders = False
@@ -22,21 +22,21 @@ foundBadHeaders = False
filename = sys.argv[1]
if "3rdParty" in filename or ".sconf" in filename or ".framework" in filename or not filename.endswith(".h") :
- sys.exit(0)
+ sys.exit(0)
if not "Swiften" in filename :
- sys.exit(0)
+ sys.exit(0)
if filename.endswith("Swiften.h") :
- sys.exit(0)
+ sys.exit(0)
file = open(filename, "r")
for line in file.readlines() :
- if not "#include" in line :
- continue
- if "Base/Log.h" in filename :
- continue
- for forbiddenInclude, ignores in FORBIDDEN_INCLUDES :
- if forbiddenInclude in line and len([x for x in ignores if x in filename]) == 0 :
- print "Found " + forbiddenInclude + " include in " + filename
- foundBadHeaders = True
+ if not "#include" in line :
+ continue
+ if "Base/Log.h" in filename :
+ continue
+ for forbiddenInclude, ignores in FORBIDDEN_INCLUDES :
+ if forbiddenInclude in line and len([x for x in ignores if x in filename]) == 0 :
+ print "Found " + forbiddenInclude + " include in " + filename
+ foundBadHeaders = True
sys.exit(foundBadHeaders)
diff --git a/BuildTools/CheckTranslations.py b/BuildTools/CheckTranslations.py
index b39af08..a9b6afd 100755
--- a/BuildTools/CheckTranslations.py
+++ b/BuildTools/CheckTranslations.py
@@ -3,48 +3,48 @@
import os, sys, re, xml.dom.minidom
def getText(nodelist):
- text = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- text += node.data
- return text
+ text = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ text += node.data
+ return text
desktop_generic_names = set()
desktop_comments = set()
f = open("Swift/resources/swift.desktop", "r")
for l in f.readlines() :
- m = re.match("GenericName\[(\w+)\].*", l)
- if m :
- desktop_generic_names.add(m.group(1))
- m = re.match("Comment\[(\w+)\].*", l)
- if m :
- desktop_comments.add(m.group(1))
+ m = re.match("GenericName\[(\w+)\].*", l)
+ if m :
+ desktop_generic_names.add(m.group(1))
+ m = re.match("Comment\[(\w+)\].*", l)
+ if m :
+ desktop_comments.add(m.group(1))
f.close()
for filename in os.listdir("Swift/Translations") :
- m = re.match("swift_(.*)\.ts", filename)
- if m :
- language = m.group(1)
- finished = True
- f = open("Swift/Translations/" + filename, "r")
- document = xml.dom.minidom.parse(f)
- f.close()
+ m = re.match("swift_(.*)\.ts", filename)
+ if m :
+ language = m.group(1)
+ finished = True
+ f = open("Swift/Translations/" + filename, "r")
+ document = xml.dom.minidom.parse(f)
+ f.close()
- for message in document.getElementsByTagName("message") :
- source = message.getElementsByTagName("source")[0]
- sourceText = getText(source.childNodes)
- sourcePlaceholders = set(re.findall("%\d+%?", sourceText))
- translation = message.getElementsByTagName("translation")[0]
- if "type" in translation.attributes.keys() and translation.attributes["type"]. value == "unfinished" :
- finished = False
- translationText = getText(translation.childNodes)
- translationPlaceholders = set(re.findall("%\d+%?", translationText))
- if translationPlaceholders != sourcePlaceholders :
- print "[Error] " + filename + ": Placeholder mismatch in translation '" + sourceText + "'"
- if not finished :
- print "[Warning] " + filename + ": Unfinished"
- if language not in desktop_generic_names and language != "en" :
- print "[Warning] GenericName field missing in desktop entry for " + language
- if language not in desktop_comments and language != "en" :
- print "[Warning] Comment field missing in desktop entry for " + language
+ for message in document.getElementsByTagName("message") :
+ source = message.getElementsByTagName("source")[0]
+ sourceText = getText(source.childNodes)
+ sourcePlaceholders = set(re.findall("%\d+%?", sourceText))
+ translation = message.getElementsByTagName("translation")[0]
+ if "type" in translation.attributes.keys() and translation.attributes["type"]. value == "unfinished" :
+ finished = False
+ translationText = getText(translation.childNodes)
+ translationPlaceholders = set(re.findall("%\d+%?", translationText))
+ if translationPlaceholders != sourcePlaceholders :
+ print "[Error] " + filename + ": Placeholder mismatch in translation '" + sourceText + "'"
+ if not finished :
+ print "[Warning] " + filename + ": Unfinished"
+ if language not in desktop_generic_names and language != "en" :
+ print "[Warning] GenericName field missing in desktop entry for " + language
+ if language not in desktop_comments and language != "en" :
+ print "[Warning] Comment field missing in desktop entry for " + language
diff --git a/BuildTools/Copyrighter.py b/BuildTools/Copyrighter.py
index ccb2019..56fcf01 100755
--- a/BuildTools/Copyrighter.py
+++ b/BuildTools/Copyrighter.py
@@ -8,135 +8,135 @@ CONTRIBUTOR_LICENSE = "mit"
LICENSE_DIR = "Documentation/Licenses"
class License :
- def __init__(self, name, file) :
- self.name = name
- self.file = file
+ def __init__(self, name, file) :
+ self.name = name
+ self.file = file
licenses = {
- "gpl3" : License("GNU General Public License v3", "GPLv3.txt"),
- "mit" : License("MIT License", "MIT.txt"),
- }
+ "gpl3" : License("GNU General Public License v3", "GPLv3.txt"),
+ "mit" : License("MIT License", "MIT.txt"),
+ }
class Copyright :
- def __init__(self, author, year, license) :
- self.author = author
- self.year = year
- self.license = license
-
- def to_string(self, comment_chars) :
- return "\n".join([
- comment_chars[0],
- comment_chars[1] + " Copyright (c) %(year)s %(name)s" % {"year" : self.year, "name" : self.author },
- comment_chars[1] + " Licensed under the " + licenses[self.license].name + ".",
- comment_chars[1] + " See " + LICENSE_DIR + "/" + licenses[self.license].file + " for more information.",
- comment_chars[2],
- "\n"])
+ def __init__(self, author, year, license) :
+ self.author = author
+ self.year = year
+ self.license = license
+
+ def to_string(self, comment_chars) :
+ return "\n".join([
+ comment_chars[0],
+ comment_chars[1] + " Copyright (c) %(year)s %(name)s" % {"year" : self.year, "name" : self.author },
+ comment_chars[1] + " Licensed under the " + licenses[self.license].name + ".",
+ comment_chars[1] + " See " + LICENSE_DIR + "/" + licenses[self.license].file + " for more information.",
+ comment_chars[2],
+ "\n"])
def get_comment_chars_for_filename(filename) :
- return ("/*", " *", " */")
+ return ("/*", " *", " */")
def get_comment_chars_re_for_filename(filename) :
- comment_chars = get_comment_chars_for_filename(filename)
- return "|".join(comment_chars).replace("*", "\\*")
+ comment_chars = get_comment_chars_for_filename(filename)
+ return "|".join(comment_chars).replace("*", "\\*")
def parse_file(filename) :
- file = open(filename)
- copyright_text = []
- prolog = ""
- epilog = ""
- inProlog = True
- inCopyright = False
- inEpilog = False
- for line in file.readlines() :
- if inProlog :
- if line.startswith("#!") or len(line.strip()) == 0 :
- prolog += line
- continue
- else :
- inProlog = False
- inCopyright = True
-
- if inCopyright :
- if re.match(get_comment_chars_re_for_filename(filename), line) != None :
- copyright_text.append(line.rstrip())
- continue
- else :
- inCopyright = False
- inEpilog = True
- if len(line.strip()) == 0 :
- continue
-
- if inEpilog :
- epilog += line
- continue
-
- file.close()
-
- # Parse the copyright
- copyright = None
- if len(copyright_text) == 5 :
- comment_chars = get_comment_chars_for_filename(filename)
- if copyright_text[0] == comment_chars[0] and copyright_text[4] == comment_chars[2] :
- matchstring = "(" + get_comment_chars_re_for_filename(filename) + ") Copyright \(c\) (?P<startYear>\d\d\d\d)(-(?P<endYear>\d\d\d\d))? (?P<author>.*)"
- m = re.match(matchstring, copyright_text[1])
- if m != None :
- # FIXME: Do better copyright reconstruction here
- copyright = True
- if not copyright :
- epilog = "\n".join(copyright_text) + epilog
- return (prolog, copyright, epilog)
+ file = open(filename)
+ copyright_text = []
+ prolog = ""
+ epilog = ""
+ inProlog = True
+ inCopyright = False
+ inEpilog = False
+ for line in file.readlines() :
+ if inProlog :
+ if line.startswith("#!") or len(line.strip()) == 0 :
+ prolog += line
+ continue
+ else :
+ inProlog = False
+ inCopyright = True
+
+ if inCopyright :
+ if re.match(get_comment_chars_re_for_filename(filename), line) != None :
+ copyright_text.append(line.rstrip())
+ continue
+ else :
+ inCopyright = False
+ inEpilog = True
+ if len(line.strip()) == 0 :
+ continue
+
+ if inEpilog :
+ epilog += line
+ continue
+
+ file.close()
+
+ # Parse the copyright
+ copyright = None
+ if len(copyright_text) == 5 :
+ comment_chars = get_comment_chars_for_filename(filename)
+ if copyright_text[0] == comment_chars[0] and copyright_text[4] == comment_chars[2] :
+ matchstring = "(" + get_comment_chars_re_for_filename(filename) + ") Copyright \(c\) (?P<startYear>\d\d\d\d)(-(?P<endYear>\d\d\d\d))? (?P<author>.*)"
+ m = re.match(matchstring, copyright_text[1])
+ if m != None :
+ # FIXME: Do better copyright reconstruction here
+ copyright = True
+ if not copyright :
+ epilog = "\n".join(copyright_text) + epilog
+ return (prolog, copyright, epilog)
def get_userinfo() :
- p = subprocess.Popen("git config user.name", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
- username = p.stdout.read().rstrip()
- p.stdin.close()
- if p.wait() != 0 :
- return None
- p = subprocess.Popen("git config user.email", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
- email = p.stdout.read().rstrip()
- p.stdin.close()
- if p.wait() != 0 :
- return None
- return (username, email)
+ p = subprocess.Popen("git config user.name", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
+ username = p.stdout.read().rstrip()
+ p.stdin.close()
+ if p.wait() != 0 :
+ return None
+ p = subprocess.Popen("git config user.email", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
+ email = p.stdout.read().rstrip()
+ p.stdin.close()
+ if p.wait() != 0 :
+ return None
+ return (username, email)
def get_copyright(username, email) :
- if email in ["git@el-tramo.be", "git@kismith.co.uk"] :
- license = DEFAULT_LICENSE
- else :
- license = CONTRIBUTOR_LICENSE
- return Copyright(username, datetime.date.today().strftime("%Y"), license)
+ if email in ["git@el-tramo.be", "git@kismith.co.uk"] :
+ license = DEFAULT_LICENSE
+ else :
+ license = CONTRIBUTOR_LICENSE
+ return Copyright(username, datetime.date.today().strftime("%Y"), license)
def check_copyright(filename) :
- (prolog, copyright, epilog) = parse_file(filename)
- if copyright == None :
- print "No copyright found in: " + filename
- #print "Please run '" + sys.argv[0] + " set-copyright " + filename + "'"
- return False
- else :
- return True
+ (prolog, copyright, epilog) = parse_file(filename)
+ if copyright == None :
+ print "No copyright found in: " + filename
+ #print "Please run '" + sys.argv[0] + " set-copyright " + filename + "'"
+ return False
+ else :
+ return True
def set_copyright(filename, copyright) :
- (prolog, c, epilog) = parse_file(filename)
- comment_chars = get_comment_chars_for_filename(filename)
- copyright_text = copyright.to_string(comment_chars)
- file = open(filename, "w")
- if prolog != "":
- file.write(prolog)
- file.write(copyright_text)
- if epilog != "" :
- file.write(epilog)
- file.close()
+ (prolog, c, epilog) = parse_file(filename)
+ comment_chars = get_comment_chars_for_filename(filename)
+ copyright_text = copyright.to_string(comment_chars)
+ file = open(filename, "w")
+ if prolog != "":
+ file.write(prolog)
+ file.write(copyright_text)
+ if epilog != "" :
+ file.write(epilog)
+ file.close()
if sys.argv[1] == "check-copyright" :
- file = sys.argv[2]
- if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file :
- if not check_copyright(file) :
- sys.exit(-1)
+ file = sys.argv[2]
+ if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file :
+ if not check_copyright(file) :
+ sys.exit(-1)
elif sys.argv[1] == "set-copyright" :
- (username, email) = get_userinfo()
- copyright = get_copyright(username, email)
- set_copyright(sys.argv[2], copyright)
+ (username, email) = get_userinfo()
+ copyright = get_copyright(username, email)
+ set_copyright(sys.argv[2], copyright)
else :
- print "Unknown command: " + sys.argv[1]
- sys.exit(-1)
+ print "Unknown command: " + sys.argv[1]
+ sys.exit(-1)
diff --git a/BuildTools/Coverage/FilterLCovData.py b/BuildTools/Coverage/FilterLCovData.py
index a3a7ee5..33fd682 100755
--- a/BuildTools/Coverage/FilterLCovData.py
+++ b/BuildTools/Coverage/FilterLCovData.py
@@ -7,24 +7,23 @@ import sys, re, os.path
assert(len(sys.argv) == 2)
def isIgnored(file) :
- return (file.find("/Swiften/") == -1 and file.find("/Slimber/") == -1 and file.find("/Swift/") == -1) or (file.find("/UnitTest/") != -1 or file.find("/QA/") != -1)
-
+ return (file.find("/Swiften/") == -1 and file.find("/Slimber/") == -1 and file.find("/Swift/") == -1) or (file.find("/UnitTest/") != -1 or file.find("/QA/") != -1)
output = []
inputFile = open(sys.argv[1])
inIgnoredFile = False
for line in inputFile.readlines() :
- if inIgnoredFile :
- if line == "end_of_record\n" :
- inIgnoredFile = False
- else :
- if line.startswith("SF:") and isIgnored(line) :
- inIgnoredFile = True
- else :
- m = re.match("SF:(.*)", line)
- if m :
- line = "SF:" + os.path.realpath(m.group(1)) + "\n"
- output.append(line)
+ if inIgnoredFile :
+ if line == "end_of_record\n" :
+ inIgnoredFile = False
+ else :
+ if line.startswith("SF:") and isIgnored(line) :
+ inIgnoredFile = True
+ else :
+ m = re.match("SF:(.*)", line)
+ if m :
+ line = "SF:" + os.path.realpath(m.group(1)) + "\n"
+ output.append(line)
inputFile.close()
outputFile = open(sys.argv[1], 'w')
diff --git a/BuildTools/Coverage/GenerateSummary.py b/BuildTools/Coverage/GenerateSummary.py
index ec94a4f..e572082 100755
--- a/BuildTools/Coverage/GenerateSummary.py
+++ b/BuildTools/Coverage/GenerateSummary.py
@@ -8,27 +8,27 @@ inputFile = open(sys.argv[1])
currentFile = ""
coverage = {}
for line in inputFile.readlines() :
- line = line.strip()
- m = re.match("^SF:(.*)", line)
- if m :
- currentFile = m.group(1)
- else :
- m = re.match("^DA:(\d+),(\d+)", line)
- if m :
- currentFileCoverage = coverage.get(currentFile, {})
- line = int(m.group(1))
- count = int(m.group(2))
- currentFileCoverage[line] = currentFileCoverage.get(line, 0) + count
- coverage[currentFile] = currentFileCoverage
+ line = line.strip()
+ m = re.match("^SF:(.*)", line)
+ if m :
+ currentFile = m.group(1)
+ else :
+ m = re.match("^DA:(\d+),(\d+)", line)
+ if m :
+ currentFileCoverage = coverage.get(currentFile, {})
+ line = int(m.group(1))
+ count = int(m.group(2))
+ currentFileCoverage[line] = currentFileCoverage.get(line, 0) + count
+ coverage[currentFile] = currentFileCoverage
inputFile.close()
totalLines = 0
coveredLines = 0
for c in coverage.values() :
- totalLines += len(c)
- for l in c.values() :
- if l > 0 :
- coveredLines += 1
+ totalLines += len(c)
+ for l in c.values() :
+ if l > 0 :
+ coveredLines += 1
outputFile = open(sys.argv[2], 'w')
outputFile.write(str(coveredLines) + "/" + str(totalLines))
diff --git a/BuildTools/DocBook/SCons/DocBook.py b/BuildTools/DocBook/SCons/DocBook.py
index be3d4f6..7641b65 100644
--- a/BuildTools/DocBook/SCons/DocBook.py
+++ b/BuildTools/DocBook/SCons/DocBook.py
@@ -6,100 +6,100 @@ import SCons.Util, SCons.Action
import xml.dom.minidom, re, os.path, sys
def generate(env) :
- # Location of stylesheets and catalogs
- docbook_dir = "#/BuildTools/DocBook"
- docbook_xsl_style_dir = env.Dir(docbook_dir + "/Stylesheets").abspath
- docbook_xml_catalog = env.File("catalog.xml").abspath
- if "DOCBOOK_XML_DIR" in env :
- docbook_xml_dir = env.Dir("$DOCBOOK_XML_DIR").abspath
- else :
- docbook_xml_dir = env.Dir("#/3rdParty/DocBook/XML").abspath
- if "DOCBOOK_XSL_DIR" in env :
- docbook_xsl_dir = env.Dir("$DOCBOOK_XSL_DIR").abspath
- else :
- docbook_xsl_dir = env.Dir("#/3rdParty/DocBook/XSL").abspath
- fop_fonts_dir = env.Dir(docbook_dir + "/Fonts").abspath
-
- # Generates a catalog from paths to external tools
- def buildCatalog(target, source, env) :
- catalog = """<?xml version='1.0'?>
+ # Location of stylesheets and catalogs
+ docbook_dir = "#/BuildTools/DocBook"
+ docbook_xsl_style_dir = env.Dir(docbook_dir + "/Stylesheets").abspath
+ docbook_xml_catalog = env.File("catalog.xml").abspath
+ if "DOCBOOK_XML_DIR" in env :
+ docbook_xml_dir = env.Dir("$DOCBOOK_XML_DIR").abspath
+ else :
+ docbook_xml_dir = env.Dir("#/3rdParty/DocBook/XML").abspath
+ if "DOCBOOK_XSL_DIR" in env :
+ docbook_xsl_dir = env.Dir("$DOCBOOK_XSL_DIR").abspath
+ else :
+ docbook_xsl_dir = env.Dir("#/3rdParty/DocBook/XSL").abspath
+ fop_fonts_dir = env.Dir(docbook_dir + "/Fonts").abspath
+
+ # Generates a catalog from paths to external tools
+ def buildCatalog(target, source, env) :
+ catalog = """<?xml version='1.0'?>
<catalog xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog" prefer="public">
- <rewriteSystem
- systemIdStartString="http://www.oasis-open.org/docbook/xml/4.5/"
- rewritePrefix="%(docbook_xml_dir)s/" />
- <rewriteSystem
- systemIdStartString="docbook-xsl:/"
- rewritePrefix="%(docbook_xsl_dir)s/" />
+ <rewriteSystem
+ systemIdStartString="http://www.oasis-open.org/docbook/xml/4.5/"
+ rewritePrefix="%(docbook_xml_dir)s/" />
+ <rewriteSystem
+ systemIdStartString="docbook-xsl:/"
+ rewritePrefix="%(docbook_xsl_dir)s/" />
</catalog>"""
-
- docbook_xml_dir = source[0].get_contents()
- docbook_xsl_dir = source[1].get_contents()
- if env["PLATFORM"] == "win32" :
- docbook_xml_dir = docbook_xml_dir.replace("\\","/")
- docbook_xsl_dir = docbook_xsl_dir.replace("\\","/")
- file = open(target[0].abspath, "w")
- file.write(catalog % {
- "docbook_xml_dir" : docbook_xml_dir,
- "docbook_xsl_dir" : docbook_xsl_dir,
- })
- file.close()
-
- # Generates a FOP config file
- def buildFopConfig(target, source, env) :
- fopcfg = """<fop version=\"1.0\">
- <renderers>
- <renderer mime=\"application/pdf\">
- <fonts>
- <directory recursive=\"true\">%(fonts_dir)s</directory>
- </fonts>
- </renderer>
- </renderers>
+
+ docbook_xml_dir = source[0].get_contents()
+ docbook_xsl_dir = source[1].get_contents()
+ if env["PLATFORM"] == "win32" :
+ docbook_xml_dir = docbook_xml_dir.replace("\\","/")
+ docbook_xsl_dir = docbook_xsl_dir.replace("\\","/")
+ file = open(target[0].abspath, "w")
+ file.write(catalog % {
+ "docbook_xml_dir" : docbook_xml_dir,
+ "docbook_xsl_dir" : docbook_xsl_dir,
+ })
+ file.close()
+
+ # Generates a FOP config file
+ def buildFopConfig(target, source, env) :
+ fopcfg = """<fop version=\"1.0\">
+ <renderers>
+ <renderer mime=\"application/pdf\">
+ <fonts>
+ <directory recursive=\"true\">%(fonts_dir)s</directory>
+ </fonts>
+ </renderer>
+ </renderers>
</fop>"""
- file = open(target[0].abspath, "w")
- file.write(fopcfg % {
- "fonts_dir" : source[0].get_contents()
- })
- file.close()
-
- # Builds a DocBook file
- def buildDocBook(env, source) :
- db_env = env.Clone()
- db_env["XMLCATALOGS"] = [docbook_xml_catalog]
- db_env["ENV"].update({"OS" : os.environ.get("OS", "")})
-
- db_env["XMLLINT"] = env.WhereIs("xmllint")
- db_env["XSLT"] = env.WhereIs("xsltproc")
- db_env["FO"] = env.WhereIs("fop")
-
- if not db_env["XMLLINT"] or not db_env["XSLT"] :
- return
-
- # PDF generation
- if db_env["FO"] :
- fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source,
- XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"])
- pdf = db_env.FO(fo)
-
- # HTML generation
- db_env.XSLT(os.path.splitext(source)[0] + ".html", source,
- XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"])
-
- # Import tools
- env.Tool("FO", toolpath = [docbook_dir + "/SCons"])
- env.Tool("XSLT", toolpath = [docbook_dir + "/SCons"])
-
- # Catalog file generation
- env.Command("catalog.xml", [env.Value(docbook_xml_dir), env.Value(docbook_xsl_dir)], SCons.Action.Action(buildCatalog, cmdstr = "$GENCOMSTR"))
-
- # FO config file generation
- env["FOCFG"] = env.File("fop.cfg").abspath
- env.Command("fop.cfg", [env.Value(fop_fonts_dir)], SCons.Action.Action(buildFopConfig, cmdstr = "$GENCOMSTR"))
-
- # DocBook stylesheets
- env["DOCBOOK_XSL_FO"] = docbook_xsl_style_dir + "/fo/docbook.xsl"
- env["DOCBOOK_XSL_HTML"] = docbook_xsl_style_dir + "/html/docbook.xsl"
- env.AddMethod(buildDocBook, "DocBook")
-
+ file = open(target[0].abspath, "w")
+ file.write(fopcfg % {
+ "fonts_dir" : source[0].get_contents()
+ })
+ file.close()
+
+ # Builds a DocBook file
+ def buildDocBook(env, source) :
+ db_env = env.Clone()
+ db_env["XMLCATALOGS"] = [docbook_xml_catalog]
+ db_env["ENV"].update({"OS" : os.environ.get("OS", "")})
+
+ db_env["XMLLINT"] = env.WhereIs("xmllint")
+ db_env["XSLT"] = env.WhereIs("xsltproc")
+ db_env["FO"] = env.WhereIs("fop")
+
+ if not db_env["XMLLINT"] or not db_env["XSLT"] :
+ return
+
+ # PDF generation
+ if db_env["FO"] :
+ fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source,
+ XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"])
+ pdf = db_env.FO(fo)
+
+ # HTML generation
+ db_env.XSLT(os.path.splitext(source)[0] + ".html", source,
+ XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"])
+
+ # Import tools
+ env.Tool("FO", toolpath = [docbook_dir + "/SCons"])
+ env.Tool("XSLT", toolpath = [docbook_dir + "/SCons"])
+
+ # Catalog file generation
+ env.Command("catalog.xml", [env.Value(docbook_xml_dir), env.Value(docbook_xsl_dir)], SCons.Action.Action(buildCatalog, cmdstr = "$GENCOMSTR"))
+
+ # FO config file generation
+ env["FOCFG"] = env.File("fop.cfg").abspath
+ env.Command("fop.cfg", [env.Value(fop_fonts_dir)], SCons.Action.Action(buildFopConfig, cmdstr = "$GENCOMSTR"))
+
+ # DocBook stylesheets
+ env["DOCBOOK_XSL_FO"] = docbook_xsl_style_dir + "/fo/docbook.xsl"
+ env["DOCBOOK_XSL_HTML"] = docbook_xsl_style_dir + "/html/docbook.xsl"
+ env.AddMethod(buildDocBook, "DocBook")
+
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/DocBook/SCons/FO.py b/BuildTools/DocBook/SCons/FO.py
index c7596d6..a4079d7 100644
--- a/BuildTools/DocBook/SCons/FO.py
+++ b/BuildTools/DocBook/SCons/FO.py
@@ -6,40 +6,40 @@ import xml.dom.minidom, re
################################################################################
def generate(env) :
- def generate_actions(source, target, env, for_signature) :
- if len(env["FOCFG"]) > 0 :
- cmd = "$FO -c $FOCFG $FOFLAGS $SOURCE $TARGET"
- else :
- cmd = "$FO $FOFLAGS $SOURCE $TARGET"
- return SCons.Action.Action(cmd, cmdstr = "$FOCOMSTR")
+ def generate_actions(source, target, env, for_signature) :
+ if len(env["FOCFG"]) > 0 :
+ cmd = "$FO -c $FOCFG $FOFLAGS $SOURCE $TARGET"
+ else :
+ cmd = "$FO $FOFLAGS $SOURCE $TARGET"
+ return SCons.Action.Action(cmd, cmdstr = "$FOCOMSTR")
- def modify_sources(target, source, env) :
- if len(env["FOCFG"]) > 0 :
- source.append(env["FOCFG"])
- return target, source
+ def modify_sources(target, source, env) :
+ if len(env["FOCFG"]) > 0 :
+ source.append(env["FOCFG"])
+ return target, source
- def scan_fo(node, env, path) :
- dependencies = set()
- try :
- document = xml.dom.minidom.parseString(node.get_contents())
- except xml.parsers.expat.ExpatError:
- return []
- for include in document.getElementsByTagNameNS("http://www.w3.org/1999/XSL/Format", "external-graphic") :
- m = re.match("url\((.*)\)", include.getAttribute("src"))
- if m :
- dependencies.add(m.group(1))
- return list(dependencies)
+ def scan_fo(node, env, path) :
+ dependencies = set()
+ try :
+ document = xml.dom.minidom.parseString(node.get_contents())
+ except xml.parsers.expat.ExpatError:
+ return []
+ for include in document.getElementsByTagNameNS("http://www.w3.org/1999/XSL/Format", "external-graphic") :
+ m = re.match("url\((.*)\)", include.getAttribute("src"))
+ if m :
+ dependencies.add(m.group(1))
+ return list(dependencies)
- env["FO"] = "fop"
- env["FOFLAGS"] = ""
- env["FOCFG"] = ""
- env["BUILDERS"]["FO"] = SCons.Builder.Builder(
- generator = generate_actions,
- emitter = modify_sources,
- source_scanner = SCons.Scanner.Scanner(function = scan_fo, skeys = [".fo"]),
- suffix = ".pdf",
- src_suffix = ".fo"
- )
+ env["FO"] = "fop"
+ env["FOFLAGS"] = ""
+ env["FOCFG"] = ""
+ env["BUILDERS"]["FO"] = SCons.Builder.Builder(
+ generator = generate_actions,
+ emitter = modify_sources,
+ source_scanner = SCons.Scanner.Scanner(function = scan_fo, skeys = [".fo"]),
+ suffix = ".pdf",
+ src_suffix = ".fo"
+ )
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/DocBook/SCons/XSLT.py b/BuildTools/DocBook/SCons/XSLT.py
index 83b5ec2..825f129 100644
--- a/BuildTools/DocBook/SCons/XSLT.py
+++ b/BuildTools/DocBook/SCons/XSLT.py
@@ -6,58 +6,58 @@ import xml.dom.minidom, os, os.path
################################################################################
def generate(env) :
- def generate_actions(source, target, env, for_signature) :
- if not env.has_key("XSLTSTYLESHEET") :
- raise SCons.Errors.UserError, "The XSLTSTYLESHEET construction variable must be defined"
-
- # Process the XML catalog files
- # FIXME: It's probably not clean to do an ENV assignment globally
- env["ENV"]["XML_CATALOG_FILES"] = " ".join(env.get("XMLCATALOGS", ""))
-
- # Build the XMLLint command
- xmllintcmd = ["$XMLLINT", "--nonet", "--xinclude", "--postvalid", "--noout", "$SOURCE"]
-
- # Build the XSLT command
- xsltcmd = ["$XSLT", "--nonet", "--xinclude"]
- for (param, value) in env["XSLTPARAMS"] :
- xsltcmd += ["--stringparam", param, value]
- xsltcmd += ["-o", "$TARGET", "$XSLTSTYLESHEET", "$SOURCE"]
-
- return [
- SCons.Action.Action([xmllintcmd], cmdstr = "$XMLLINTCOMSTR"),
- SCons.Action.Action([xsltcmd], cmdstr = "$XSLTCOMSTR")]
-
- def modify_sources(target, source, env) :
- if len(env["FOCFG"]) > 0 :
- source.append(env["FOCFG"])
- source.append(env.get("XMLCATALOGS", []))
- return target, source
-
- def scan_xml(node, env, path) :
- dependencies = set()
- nodes = [node]
- while len(nodes) > 0 :
- node = nodes.pop()
- try :
- document = xml.dom.minidom.parseString(node.get_contents())
- except xml.parsers.expat.ExpatError:
- continue
- for include in document.getElementsByTagNameNS("http://www.w3.org/2001/XInclude", "include") :
- include_file = include.getAttribute("href")
- dependencies.add(include_file)
- if include.getAttribute("parse") != "text" :
- nodes.append(env.File(include_file))
- return list(dependencies)
-
- env["XMLLINT"] = "xmllint"
- env["XSLT"] = "xsltproc"
- env["XSLTPARAMS"] = []
- env["BUILDERS"]["XSLT"] = SCons.Builder.Builder(
- generator = generate_actions,
- emitter = modify_sources,
- source_scanner = SCons.Scanner.Scanner(function = scan_xml),
- src_suffix = ".xml"
- )
+ def generate_actions(source, target, env, for_signature) :
+ if not env.has_key("XSLTSTYLESHEET") :
+ raise SCons.Errors.UserError, "The XSLTSTYLESHEET construction variable must be defined"
+
+ # Process the XML catalog files
+ # FIXME: It's probably not clean to do an ENV assignment globally
+ env["ENV"]["XML_CATALOG_FILES"] = " ".join(env.get("XMLCATALOGS", ""))
+
+ # Build the XMLLint command
+ xmllintcmd = ["$XMLLINT", "--nonet", "--xinclude", "--postvalid", "--noout", "$SOURCE"]
+
+ # Build the XSLT command
+ xsltcmd = ["$XSLT", "--nonet", "--xinclude"]
+ for (param, value) in env["XSLTPARAMS"] :
+ xsltcmd += ["--stringparam", param, value]
+ xsltcmd += ["-o", "$TARGET", "$XSLTSTYLESHEET", "$SOURCE"]
+
+ return [
+ SCons.Action.Action([xmllintcmd], cmdstr = "$XMLLINTCOMSTR"),
+ SCons.Action.Action([xsltcmd], cmdstr = "$XSLTCOMSTR")]
+
+ def modify_sources(target, source, env) :
+ if len(env["FOCFG"]) > 0 :
+ source.append(env["FOCFG"])
+ source.append(env.get("XMLCATALOGS", []))
+ return target, source
+
+ def scan_xml(node, env, path) :
+ dependencies = set()
+ nodes = [node]
+ while len(nodes) > 0 :
+ node = nodes.pop()
+ try :
+ document = xml.dom.minidom.parseString(node.get_contents())
+ except xml.parsers.expat.ExpatError:
+ continue
+ for include in document.getElementsByTagNameNS("http://www.w3.org/2001/XInclude", "include") :
+ include_file = include.getAttribute("href")
+ dependencies.add(include_file)
+ if include.getAttribute("parse") != "text" :
+ nodes.append(env.File(include_file))
+ return list(dependencies)
+
+ env["XMLLINT"] = "xmllint"
+ env["XSLT"] = "xsltproc"
+ env["XSLTPARAMS"] = []
+ env["BUILDERS"]["XSLT"] = SCons.Builder.Builder(
+ generator = generate_actions,
+ emitter = modify_sources,
+ source_scanner = SCons.Scanner.Scanner(function = scan_xml),
+ src_suffix = ".xml"
+ )
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/FilterScanBuildResults.py b/BuildTools/FilterScanBuildResults.py
index 53a345f..ed4a55f 100755
--- a/BuildTools/FilterScanBuildResults.py
+++ b/BuildTools/FilterScanBuildResults.py
@@ -6,23 +6,23 @@ resultsDir = sys.argv[1]
resultDirs = [ d for d in os.listdir(resultsDir) if os.path.isdir(os.path.join(resultsDir, d)) ]
resultDirs.sort()
if len(resultDirs) > 0 :
- resultDir = os.path.join(resultsDir, resultDirs[-1])
- resultFileName = os.path.join(resultDir, "index.html")
- resultData = []
- f = open(resultFileName, "r")
- skipLines = 0
- for line in f.readlines() :
- if skipLines > 0 :
- skipLines -= 1
- else :
- if ("3rdParty" in line or "SHA1.cpp" in line or "lua.c" in line) :
- m = re.match(".*(report-.*\.html)", line)
- os.remove(os.path.join(resultDir, m.group(1)))
- skipLines = 2
- else :
- resultData.append(line)
- f.close()
+ resultDir = os.path.join(resultsDir, resultDirs[-1])
+ resultFileName = os.path.join(resultDir, "index.html")
+ resultData = []
+ f = open(resultFileName, "r")
+ skipLines = 0
+ for line in f.readlines() :
+ if skipLines > 0 :
+ skipLines -= 1
+ else :
+ if ("3rdParty" in line or "SHA1.cpp" in line or "lua.c" in line) :
+ m = re.match(".*(report-.*\.html)", line)
+ os.remove(os.path.join(resultDir, m.group(1)))
+ skipLines = 2
+ else :
+ resultData.append(line)
+ f.close()
- f = open(resultFileName, "w")
- f.writelines(resultData)
- f.close()
+ f = open(resultFileName, "w")
+ f.writelines(resultData)
+ f.close()
diff --git a/BuildTools/FixIncludes.py b/BuildTools/FixIncludes.py
index 0854e68..8ecbd4a 100755
--- a/BuildTools/FixIncludes.py
+++ b/BuildTools/FixIncludes.py
@@ -9,7 +9,7 @@ filename = sys.argv[1]
inPlace = False
if "-i" in sys.argv:
- inPlace = True
+ inPlace = True
filename_base = os.path.basename(filename)
(filename_name, filename_ext) = os.path.splitext(filename_base)
@@ -19,135 +19,135 @@ c_stdlib_headers = Set(["assert.h", "limits.h", "signal.h", "stdlib.h", "ctyp
cpp_stdlib_headers = Set(["algorithm", "fstream", "list", "regex", "typeindex", "array", "functional", "locale", "set", "typeinfo", "atomic", "future", "map", "sstream", "type_traits", "bitset", "initializer_list", "memory", "stack", "unordered_map", "chrono", "iomanip", "mutex", "stdexcept", "unordered_set", "codecvt", "ios", "new", "streambuf", "utility", "complex", "iosfwd", "numeric", "string", "valarray", "condition_variable", "iostream", "ostream", "strstream", "vector", "deque", "istream", "queue", "system_error", "exception", "iterator", "random", "thread", "forward_list", "limits", "ratio", "tuple", "cassert", "ciso646", "csetjmp", "cstdio", "ctime", "cctype", "climits", "csignal", "cstdlib", "cwchar", "cerrno", "clocale", "cstdarg", "cstring", "cwctype", "cfloat", "cmath", "cstddef"])
class HeaderType:
- PRAGMA_ONCE, CORRESPONDING_HEADER, C_STDLIB, CPP_STDLIB, BOOST, QT, OTHER, SWIFTEN, SWIFT_CONTROLLERS, SWIFTOOLS, SWIFT = range(11)
+ PRAGMA_ONCE, CORRESPONDING_HEADER, C_STDLIB, CPP_STDLIB, BOOST, QT, OTHER, SWIFTEN, SWIFT_CONTROLLERS, SWIFTOOLS, SWIFT = range(11)
def findHeaderBlock(lines):
- start = False
- end = False
- lastLine = None
-
- for idx, line in enumerate(lines):
- if not start and line.startswith("#"):
- start = idx
- elif start and (not end) and (not line.startswith("#")) and line.strip():
- end = idx-1
- break
- if not end:
- end = len(lines)
- return (start, end)
+ start = False
+ end = False
+ lastLine = None
+
+ for idx, line in enumerate(lines):
+ if not start and line.startswith("#"):
+ start = idx
+ elif start and (not end) and (not line.startswith("#")) and line.strip():
+ end = idx-1
+ break
+ if not end:
+ end = len(lines)
+ return (start, end)
def lineToFileName(line):
- match = re.match( r'#include "(.*)"', line)
- if match:
- return match.group(1)
- match = re.match( r'#include <(.*)>', line)
- if match:
- return match.group(1)
- return False
+ match = re.match( r'#include "(.*)"', line)
+ if match:
+ return match.group(1)
+ match = re.match( r'#include <(.*)>', line)
+ if match:
+ return match.group(1)
+ return False
def fileNameToHeaderType(name):
- if name.endswith(filename_name + ".h"):
- return HeaderType.CORRESPONDING_HEADER
+ if name.endswith(filename_name + ".h"):
+ return HeaderType.CORRESPONDING_HEADER
- if name in c_stdlib_headers:
- return HeaderType.C_STDLIB
-
- if name in cpp_stdlib_headers:
- return HeaderType.CPP_STDLIB
+ if name in c_stdlib_headers:
+ return HeaderType.C_STDLIB
- if name.startswith("boost"):
- return HeaderType.BOOST
+ if name in cpp_stdlib_headers:
+ return HeaderType.CPP_STDLIB
- if name.startswith("Q"):
- return HeaderType.QT
+ if name.startswith("boost"):
+ return HeaderType.BOOST
- if name.startswith("Swiften"):
- return HeaderType.SWIFTEN
+ if name.startswith("Q"):
+ return HeaderType.QT
- if name.startswith("Swift/Controllers"):
- return HeaderType.SWIFT_CONTROLLERS
+ if name.startswith("Swiften"):
+ return HeaderType.SWIFTEN
- if name.startswith("SwifTools"):
- return HeaderType.SWIFTOOLS
+ if name.startswith("Swift/Controllers"):
+ return HeaderType.SWIFT_CONTROLLERS
- if name.startswith("Swift"):
- return HeaderType.SWIFT
+ if name.startswith("SwifTools"):
+ return HeaderType.SWIFTOOLS
- return HeaderType.OTHER
+ if name.startswith("Swift"):
+ return HeaderType.SWIFT
+
+ return HeaderType.OTHER
def serializeHeaderGroups(groups):
- headerList = []
- for group in range(0, HeaderType.SWIFT + 1):
- if group in groups:
- # sorted and without duplicates
- headers = sorted(list(set(groups[group])))
- headerList.extend(headers)
- headerList.extend(["\n"])
- headerList.pop()
- return headerList
+ headerList = []
+ for group in range(0, HeaderType.SWIFT + 1):
+ if group in groups:
+ # sorted and without duplicates
+ headers = sorted(list(set(groups[group])))
+ headerList.extend(headers)
+ headerList.extend(["\n"])
+ headerList.pop()
+ return headerList
def overwriteFile(filename, content):
- with open(filename, 'w') as f:
- for line in content:
- f.write(line)
+ with open(filename, 'w') as f:
+ for line in content:
+ f.write(line)
def cleanHeaderFile(content, headerStart, headerEnd, headerGroups):
- del content[headerStart:headerEnd]
- newHeaders = serializeHeaderGroups(headerGroups)
- content[headerStart:1] = newHeaders
+ del content[headerStart:headerEnd]
+ newHeaders = serializeHeaderGroups(headerGroups)
+ content[headerStart:1] = newHeaders
- if inPlace :
- overwriteFile(filename, content)
- else :
- for line in content:
- print line,
+ if inPlace :
+ overwriteFile(filename, content)
+ else :
+ for line in content:
+ print line,
def cleanImplementationFile(content, headerStart, headerEnd, headerGroups):
- del content[headerStart:headerEnd]
- newHeaders = serializeHeaderGroups(headerGroups)
- content[headerStart:1] = newHeaders
+ del content[headerStart:headerEnd]
+ newHeaders = serializeHeaderGroups(headerGroups)
+ content[headerStart:1] = newHeaders
- if inPlace :
- overwriteFile(filename, content)
- else :
- for line in content:
- print line,
+ if inPlace :
+ overwriteFile(filename, content)
+ else :
+ for line in content:
+ print line,
containsComplexPreprocessorDirectives = False
with open(filename) as f:
- content = f.readlines()
+ content = f.readlines()
(headerStart, headerEnd) = findHeaderBlock(content)
headerGroups = {}
for line in content[headerStart:headerEnd]:
- if line.strip():
- if line.strip().startswith("#pragma once"):
- headerType = HeaderType.PRAGMA_ONCE
- elif line.strip().startswith("#if") or line.strip().startswith("#def") or line.strip().startswith("#undef") or line.strip().startswith("#pragma "):
- containsComplexPreprocessorDirectives = True
- break
- else:
- #print line
- headerType = fileNameToHeaderType(lineToFileName(line))
-
- #filename = lineToFileName(line)
- if headerType in headerGroups:
- headerGroups[headerType].append(line)
- else:
- headerGroups[headerType] = [line]
+ if line.strip():
+ if line.strip().startswith("#pragma once"):
+ headerType = HeaderType.PRAGMA_ONCE
+ elif line.strip().startswith("#if") or line.strip().startswith("#def") or line.strip().startswith("#undef") or line.strip().startswith("#pragma "):
+ containsComplexPreprocessorDirectives = True
+ break
+ else:
+ #print line
+ headerType = fileNameToHeaderType(lineToFileName(line))
+
+ #filename = lineToFileName(line)
+ if headerType in headerGroups:
+ headerGroups[headerType].append(line)
+ else:
+ headerGroups[headerType] = [line]
if containsComplexPreprocessorDirectives:
- print "Cannot format headers containing preprocessor #if, #pragma, #define or #undef statements!"
- exit(1)
+ print "Cannot format headers containing preprocessor #if, #pragma, #define or #undef statements!"
+ exit(1)
if filename_base.endswith(".h"):
- if not HeaderType.PRAGMA_ONCE in headerGroups:
- print "Missing #pragma once!"
- exit(2)
- cleanHeaderFile(content, headerStart, headerEnd, headerGroups)
+ if not HeaderType.PRAGMA_ONCE in headerGroups:
+ print "Missing #pragma once!"
+ exit(2)
+ cleanHeaderFile(content, headerStart, headerEnd, headerGroups)
elif filename_base.endswith(".cpp"):
- cleanImplementationFile(content, headerStart, headerEnd, headerGroups)
+ cleanImplementationFile(content, headerStart, headerEnd, headerGroups)
diff --git a/BuildTools/Gource/GetGravatars.py b/BuildTools/Gource/GetGravatars.py
index 8adb13c..47f8a68 100755
--- a/BuildTools/Gource/GetGravatars.py
+++ b/BuildTools/Gource/GetGravatars.py
@@ -5,8 +5,8 @@ import subprocess, os, sys, hashlib, urllib
GRAVATAR_URL = "http://www.gravatar.com/avatar/%(id)s?d=404"
if len(sys.argv) != 2 :
- print "Usage: " + sys.argv[0] + " <output-dir>"
- sys.exit(-1)
+ print "Usage: " + sys.argv[0] + " <output-dir>"
+ sys.exit(-1)
output_dir = sys.argv[1]
@@ -14,36 +14,36 @@ output_dir = sys.argv[1]
authors = {}
p = subprocess.Popen("git log --pretty=format:'%ae|%an'", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
for line in p.stdout.readlines() :
- author_components = line.rstrip().split("|")
- authors[author_components[0]] = author_components[1]
+ author_components = line.rstrip().split("|")
+ authors[author_components[0]] = author_components[1]
p.stdin.close()
if p.wait() != 0 :
- print "Error"
- sys.exit(-1)
+ print "Error"
+ sys.exit(-1)
# Get & save the avatars
if not os.path.isdir(output_dir) :
- os.makedirs(output_dir)
+ os.makedirs(output_dir)
for email, name in authors.items() :
- print "Processing avatar for " + name + " <" + email + ">"
- filename = os.path.join(output_dir, name + ".png")
- if os.path.isfile(filename) :
- print "-> Already there. Skipping."
- continue
+ print "Processing avatar for " + name + " <" + email + ">"
+ filename = os.path.join(output_dir, name + ".png")
+ if os.path.isfile(filename) :
+ print "-> Already there. Skipping."
+ continue
- m = hashlib.md5()
- m.update(email)
- url = GRAVATAR_URL % {"id" : m.hexdigest()}
- print "- Downloading " + url
- f = urllib.urlopen(url)
- input = None
- if f.getcode() == 200 :
- input = f.read()
- f.close()
- if input :
- print "- Saving file " + filename
- f = open(filename, "w")
- f.write(input)
- f.close()
- else :
- print "- No Gravatar found"
+ m = hashlib.md5()
+ m.update(email)
+ url = GRAVATAR_URL % {"id" : m.hexdigest()}
+ print "- Downloading " + url
+ f = urllib.urlopen(url)
+ input = None
+ if f.getcode() == 200 :
+ input = f.read()
+ f.close()
+ if input :
+ print "- Saving file " + filename
+ f = open(filename, "w")
+ f.write(input)
+ f.close()
+ else :
+ print "- No Gravatar found"
diff --git a/BuildTools/SCons/SConscript.boot b/BuildTools/SCons/SConscript.boot
index 60aebdc..8b50044 100644
--- a/BuildTools/SCons/SConscript.boot
+++ b/BuildTools/SCons/SConscript.boot
@@ -107,15 +107,15 @@ vars.Add(BoolVariable("install_git_hooks", "Install git hooks", "true"))
################################################################################
env_ENV = {
- 'PATH' : os.environ['PATH'],
- 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""),
- 'TERM' : os.environ.get("TERM", ""),
+ 'PATH' : os.environ['PATH'],
+ 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""),
+ 'TERM' : os.environ.get("TERM", ""),
}
if "MSVC_VERSION" in ARGUMENTS :
- env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None))
+ env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None))
else :
- env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None))
+ env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None))
Help(vars.GenerateHelpText(env))
diff --git a/BuildTools/SCons/SConstruct b/BuildTools/SCons/SConstruct
index 9fba359..4ead554 100644
--- a/BuildTools/SCons/SConstruct
+++ b/BuildTools/SCons/SConstruct
@@ -73,7 +73,7 @@ def checkObjCHeader(context, header) :
################################################################################
if ARGUMENTS.get("force-configure", 0) :
- SCons.SConf.SetCacheMode("force")
+ SCons.SConf.SetCacheMode("force")
def CheckPKG(context, name):
context.Message( 'Checking for package %s... ' % name )
@@ -99,9 +99,9 @@ int main(int argc, char* argv[]) {
}
""" % { "header" : header, "define": define }, extension)
if ret[0] :
- return int(ret[1])
+ return int(ret[1])
else :
- return -1
+ return -1
conf = Configure(conf_env)
@@ -339,7 +339,7 @@ icu_env = conf_env.Clone()
use_icu = bool(env["icu"])
icu_prefix = ""
if isinstance(env["icu"], str) :
- icu_prefix = env["icu"]
+ icu_prefix = env["icu"]
icu_flags = {}
if icu_prefix :
icu_flags = { "CPPPATH": [os.path.join(icu_prefix, "include")] }
@@ -381,11 +381,11 @@ if not env.get("HAVE_ICU", False) and not env.get("HAVE_LIBIDN", False) :
# Unbound
if env["unbound"] :
- env["LDNS_BUNDLED"] = 1
- env["UNBOUND_BUNDLED"] = 1
+ env["LDNS_BUNDLED"] = 1
+ env["UNBOUND_BUNDLED"] = 1
else :
- env["LDNS_FLAGS"] = {}
- env["UNBOUND_FLAGS"] = {}
+ env["LDNS_FLAGS"] = {}
+ env["UNBOUND_FLAGS"] = {}
# LibMiniUPnPc
if env["experimental_ft"] :
@@ -693,8 +693,8 @@ for stage in ["flags", "build"] :
# SLOCCount
if ARGUMENTS.get("sloccount", False) :
- for project in env["PROJECTS"] :
- env.SLOCCount("#/" + project)
+ for project in env["PROJECTS"] :
+ env.SLOCCount("#/" + project)
################################################################################
diff --git a/BuildTools/SCons/Tools/AppBundle.py b/BuildTools/SCons/Tools/AppBundle.py
index 6a343f6..f1072f5 100644
--- a/BuildTools/SCons/Tools/AppBundle.py
+++ b/BuildTools/SCons/Tools/AppBundle.py
@@ -1,64 +1,64 @@
import SCons.Util, os.path
def generate(env) :
- def createAppBundle(env, bundle, version = "1.0", resources = [], frameworks = [], info = {}, handlesXMPPURIs = False) :
- bundleDir = bundle + ".app"
- bundleContentsDir = bundleDir + "/Contents"
- resourcesDir = bundleContentsDir + "/Resources"
- frameworksDir = bundleContentsDir + "/Frameworks"
- env.Install(bundleContentsDir + "/MacOS", bundle)
- env.WriteVal(bundleContentsDir + "/PkgInfo", env.Value("APPL\77\77\77\77"))
+ def createAppBundle(env, bundle, version = "1.0", resources = [], frameworks = [], info = {}, handlesXMPPURIs = False) :
+ bundleDir = bundle + ".app"
+ bundleContentsDir = bundleDir + "/Contents"
+ resourcesDir = bundleContentsDir + "/Resources"
+ frameworksDir = bundleContentsDir + "/Frameworks"
+ env.Install(bundleContentsDir + "/MacOS", bundle)
+ env.WriteVal(bundleContentsDir + "/PkgInfo", env.Value("APPL\77\77\77\77"))
- infoDict = {
- "CFBundleDevelopmentRegion" : "English",
- "CFBundleExecutable" : bundle,
- "CFBundleIdentifier" : "im.swift." + bundle,
- "CFBundleInfoDictionaryVersion" : "6.0",
- "CFBundleName" : bundle,
- "CFBundlePackageType" : "APPL",
- "CFBundleSignature": "\77\77\77\77",
- "CFBundleVersion" : version,
- "CFBundleIconFile" : bundle,
- "NSPrincipalClass" : "NSApplication",
- "NSHumanReadableCopyright" : unichr(0xA9) + " 2010 Swift Development Team.\nAll Rights Reserved."
- }
- infoDict.update(info)
-
- plist = """<?xml version="1.0" encoding="UTF-8"?>
- <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
- <plist version="1.0">
- <dict>
- """
- for key, value in infoDict.items() :
- plist += "<key>" + key + "</key>\n"
- plist += "<string>" + value.encode("utf-8") + "</string>\n"
- if handlesXMPPURIs :
- plist += """<key>CFBundleURLTypes</key>
+ infoDict = {
+ "CFBundleDevelopmentRegion" : "English",
+ "CFBundleExecutable" : bundle,
+ "CFBundleIdentifier" : "im.swift." + bundle,
+ "CFBundleInfoDictionaryVersion" : "6.0",
+ "CFBundleName" : bundle,
+ "CFBundlePackageType" : "APPL",
+ "CFBundleSignature": "\77\77\77\77",
+ "CFBundleVersion" : version,
+ "CFBundleIconFile" : bundle,
+ "NSPrincipalClass" : "NSApplication",
+ "NSHumanReadableCopyright" : unichr(0xA9) + " 2010 Swift Development Team.\nAll Rights Reserved."
+ }
+ infoDict.update(info)
+
+ plist = """<?xml version="1.0" encoding="UTF-8"?>
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+ <plist version="1.0">
+ <dict>
+ """
+ for key, value in infoDict.items() :
+ plist += "<key>" + key + "</key>\n"
+ plist += "<string>" + value.encode("utf-8") + "</string>\n"
+ if handlesXMPPURIs :
+ plist += """<key>CFBundleURLTypes</key>
<array>
- <dict>
- <key>CFBundleURLName</key>
- <string>XMPP URL</string>
- <key>CFBundleURLSchemes</key>
- <array>
- <string>xmpp</string>
- </array>
- </dict>
+ <dict>
+ <key>CFBundleURLName</key>
+ <string>XMPP URL</string>
+ <key>CFBundleURLSchemes</key>
+ <array>
+ <string>xmpp</string>
+ </array>
+ </dict>
</array>\n"""
- plist += """</dict>
- </plist>
- """
- env.WriteVal(bundleContentsDir + "/Info.plist", env.Value(plist))
+ plist += """</dict>
+ </plist>
+ """
+ env.WriteVal(bundleContentsDir + "/Info.plist", env.Value(plist))
- for (target, resource) in resources.items() :
- env.Install(os.path.join(resourcesDir, target), resource)
+ for (target, resource) in resources.items() :
+ env.Install(os.path.join(resourcesDir, target), resource)
- for framework in frameworks :
- env.Install(frameworksDir, framework)
+ for framework in frameworks :
+ env.Install(frameworksDir, framework)
- return env.Dir(bundleDir)
+ return env.Dir(bundleDir)
- env.AddMethod(createAppBundle, "AppBundle")
+ env.AddMethod(createAppBundle, "AppBundle")
def exists(env) :
- return env["PLATFORM"] == "darwin"
+ return env["PLATFORM"] == "darwin"
diff --git a/BuildTools/SCons/Tools/BuildVersion.py b/BuildTools/SCons/Tools/BuildVersion.py
index 41e6d8d..b15448a 100644
--- a/BuildTools/SCons/Tools/BuildVersion.py
+++ b/BuildTools/SCons/Tools/BuildVersion.py
@@ -3,16 +3,16 @@ import SCons.Util
import Version
def generate(env) :
- def createBuildVersion(env, target, project) :
- buildVersion = """#pragma once
+ def createBuildVersion(env, target, project) :
+ buildVersion = """#pragma once
static const char* buildVersion = \"%(buildVersion)s\";\n
#define SWIFT_VERSION_STRING \"%(buildVersion)s\";\n
""" % { "buildVersion" : Version.getBuildVersion(env.Dir("#").abspath, project) }
- env.WriteVal(target, env.Value(buildVersion))
+ env.WriteVal(target, env.Value(buildVersion))
- env.AddMethod(createBuildVersion, "BuildVersion")
+ env.AddMethod(createBuildVersion, "BuildVersion")
def exists(env) :
- return true
+ return true
diff --git a/BuildTools/SCons/Tools/DoxyGen.py b/BuildTools/SCons/Tools/DoxyGen.py
index 3fc53c4..79af1c9 100644
--- a/BuildTools/SCons/Tools/DoxyGen.py
+++ b/BuildTools/SCons/Tools/DoxyGen.py
@@ -1,26 +1,26 @@
import SCons.Util, os
def generate(env) :
- def modify_targets(target, source, env) :
- target = [env.File("html/index.html")]
- return target, source
+ def modify_targets(target, source, env) :
+ target = [env.File("html/index.html")]
+ return target, source
- def generate_actions(source, target, env, for_signature) :
- if env.WhereIs("$DOXYGEN") and env.WhereIs("$DOT") :
- return [SCons.Action.Action("$DOXYGEN $SOURCE", cmdstr = "$DOXYCOMSTR")]
- else :
- return []
+ def generate_actions(source, target, env, for_signature) :
+ if env.WhereIs("$DOXYGEN") and env.WhereIs("$DOT") :
+ return [SCons.Action.Action("$DOXYGEN $SOURCE", cmdstr = "$DOXYCOMSTR")]
+ else :
+ return []
- env["DOXYGEN"] = "doxygen"
- # FIXME: For some reason, things go incredibly slow (at least on OS X)
- # when not doing this. Some environment flag is having an effect on
- # this; find out which
- env["ENV"] = os.environ
- env["DOT"] = "dot"
- env["BUILDERS"]["DoxyGen"] = SCons.Builder.Builder(
- emitter = modify_targets,
- generator = generate_actions,
- single_source = True)
+ env["DOXYGEN"] = "doxygen"
+ # FIXME: For some reason, things go incredibly slow (at least on OS X)
+ # when not doing this. Some environment flag is having an effect on
+ # this; find out which
+ env["ENV"] = os.environ
+ env["DOT"] = "dot"
+ env["BUILDERS"]["DoxyGen"] = SCons.Builder.Builder(
+ emitter = modify_targets,
+ generator = generate_actions,
+ single_source = True)
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/Flags.py b/BuildTools/SCons/Tools/Flags.py
index c130faf..0768181 100644
--- a/BuildTools/SCons/Tools/Flags.py
+++ b/BuildTools/SCons/Tools/Flags.py
@@ -1,13 +1,13 @@
import SCons.Util
def generate(env) :
- def useFlags(env, flags) :
- for flag in flags :
- if flag in env :
- env[flag] = env[flag] + flags[flag]
- else :
- env[flag] = flags[flag]
- env.AddMethod(useFlags, "UseFlags")
+ def useFlags(env, flags) :
+ for flag in flags :
+ if flag in env :
+ env[flag] = env[flag] + flags[flag]
+ else :
+ env[flag] = flags[flag]
+ env.AddMethod(useFlags, "UseFlags")
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/Nib.py b/BuildTools/SCons/Tools/Nib.py
index ccfd884..91eb064 100644
--- a/BuildTools/SCons/Tools/Nib.py
+++ b/BuildTools/SCons/Tools/Nib.py
@@ -1,12 +1,12 @@
import SCons.Util
def generate(env) :
- env["IBTOOL"] = "ibtool"
- env["BUILDERS"]["Nib"] = SCons.Builder.Builder(
- action = SCons.Action.Action("$IBTOOL --errors --warnings --notices --output-format human-readable-text --compile $TARGET $SOURCE", cmdstr = "$NIBCOMSTR"),
- suffix = ".nib",
- src_suffix = ".xib",
- single_source = True)
+ env["IBTOOL"] = "ibtool"
+ env["BUILDERS"]["Nib"] = SCons.Builder.Builder(
+ action = SCons.Action.Action("$IBTOOL --errors --warnings --notices --output-format human-readable-text --compile $TARGET $SOURCE", cmdstr = "$NIBCOMSTR"),
+ suffix = ".nib",
+ src_suffix = ".xib",
+ single_source = True)
def exists(env) :
- return env["PLATFORM"] == "darwin"
+ return env["PLATFORM"] == "darwin"
diff --git a/BuildTools/SCons/Tools/ReplacePragmaOnce.py b/BuildTools/SCons/Tools/ReplacePragmaOnce.py
index 466c31e..3df0f41 100644
--- a/BuildTools/SCons/Tools/ReplacePragmaOnce.py
+++ b/BuildTools/SCons/Tools/ReplacePragmaOnce.py
@@ -1,25 +1,25 @@
import SCons.Util, os.path
def generate(env) :
- root = env.Dir("#").abspath
- def relpath(path, start) :
- i = len(os.path.commonprefix([path, start]))
- return path[i+1:]
+ root = env.Dir("#").abspath
+ def relpath(path, start) :
+ i = len(os.path.commonprefix([path, start]))
+ return path[i+1:]
- def replacePragmaOnce(env, target, source) :
- guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper()
- data = source[0].get_contents()
- f = open(str(target[0]), 'wb')
- if "#pragma once" in data :
- f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard}))
- f.write("\n#endif\n")
- else :
- f.write(data)
- f.close()
+ def replacePragmaOnce(env, target, source) :
+ guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper()
+ data = source[0].get_contents()
+ f = open(str(target[0]), 'wb')
+ if "#pragma once" in data :
+ f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard}))
+ f.write("\n#endif\n")
+ else :
+ f.write(data)
+ f.close()
- env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder(
- action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
- single_source = True)
+ env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder(
+ action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
+ single_source = True)
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/SLOCCount.py b/BuildTools/SCons/Tools/SLOCCount.py
index abf4a3c..682a797 100644
--- a/BuildTools/SCons/Tools/SLOCCount.py
+++ b/BuildTools/SCons/Tools/SLOCCount.py
@@ -1,17 +1,17 @@
import SCons.Util, os.path, os
def generate(env) :
- def createSLOCCount(env, source) :
- myenv = env.Clone()
- myenv["ENV"]["HOME"] = os.environ["HOME"]
- source = myenv.Dir(source)
- target = myenv.File("#/" + source.path + ".sloccount")
- # FIXME: There's probably a better way to force building the .sc
- if os.path.exists(target.abspath) :
- os.unlink(target.abspath)
- return myenv.Command(target, source, [SCons.Action.Action("sloccount --duplicates --wide --details " + source.path + " | grep -v qrc_ > $TARGET", cmdstr = "$GENCOMSTR")])
+ def createSLOCCount(env, source) :
+ myenv = env.Clone()
+ myenv["ENV"]["HOME"] = os.environ["HOME"]
+ source = myenv.Dir(source)
+ target = myenv.File("#/" + source.path + ".sloccount")
+ # FIXME: There's probably a better way to force building the .sc
+ if os.path.exists(target.abspath) :
+ os.unlink(target.abspath)
+ return myenv.Command(target, source, [SCons.Action.Action("sloccount --duplicates --wide --details " + source.path + " | grep -v qrc_ > $TARGET", cmdstr = "$GENCOMSTR")])
- env.AddMethod(createSLOCCount, "SLOCCount")
+ env.AddMethod(createSLOCCount, "SLOCCount")
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/WindowsBundle.py b/BuildTools/SCons/Tools/WindowsBundle.py
index 744d5c8..5afb56e 100644
--- a/BuildTools/SCons/Tools/WindowsBundle.py
+++ b/BuildTools/SCons/Tools/WindowsBundle.py
@@ -4,119 +4,119 @@ import re
import shutil
def which(program_name):
- if hasattr(shutil, "which"):
- return shutil.which(program_name)
- else:
- path = os.getenv('PATH')
- for p in path.split(os.path.pathsep):
- p = os.path.join(p,program_name)
- if os.path.exists(p) and os.access(p,os.X_OK):
- return p
+ if hasattr(shutil, "which"):
+ return shutil.which(program_name)
+ else:
+ path = os.getenv('PATH')
+ for p in path.split(os.path.pathsep):
+ p = os.path.join(p,program_name)
+ if os.path.exists(p) and os.access(p,os.X_OK):
+ return p
def generate(env) :
- def captureWinDeployQtMapping(release = True):
- p = False
-
- qt_bin_folder = ""
- if "QTDIR" in env:
- qt_bin_folder = env["QTDIR"] + "\\bin;"
-
- environ = {"PATH": qt_bin_folder + os.getenv("PATH"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP")}
-
- if release:
- p = subprocess.Popen(['windeployqt', '--release', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
- else:
- p = subprocess.Popen(['windeployqt', '--debug', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
-
- if p:
- stdout, stderr = p.communicate()
-
- mappings = []
-
- p = re.compile(ur'"([^\"]*)" "([^\"]*)"')
-
- matches = re.findall(p, stdout)
- for match in matches:
- mappings.append(match)
- return mappings
- else:
- return False
-
- def createWindowsBundleManual(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
- all_files = []
- all_files += env.Install(bundle, bundle + ".exe")
- for lib in qtlibs :
- all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll"))
- plugins_suffix = '4'
- if qtversion == '5' :
- plugins_suffix = ''
- for plugin_type in qtplugins:
- all_files += env.Install(os.path.join(bundle, plugin_type), [os.path.join(env["QTDIR"], "plugins", plugin_type, "q" + plugin + plugins_suffix + ".dll") for plugin in qtplugins[plugin_type]])
- for dir, resourceFiles in resources.items() :
- for resource in resourceFiles :
- e = env.Entry(resource)
- if e.isdir() :
- for subresource in env.Glob(str(e) + "/*") :
- all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
- else :
- all_files += env.Install(os.path.join(bundle, dir), resource)
- return all_files
-
- # This version of uses windeployqt tool
- def createWindowsBundleWithWinDeployQt(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
- assert(qtversion == '5')
- all_files = []
-
- # add swift executable
- all_files += env.Install(bundle, bundle + ".exe")
-
- # adding resources (swift sounds/images/translations)
- for dir, resourceFiles in resources.items() :
- for resource in resourceFiles :
- e = env.Entry(resource)
- if e.isdir() :
- for subresource in env.Glob(str(e) + "/*") :
- all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
- else :
- all_files += env.Install(os.path.join(bundle, dir), resource)
-
- qtmappings = captureWinDeployQtMapping()
- assert(qtmappings)
-
- # handle core DLLs
- qt_corelib_regex = re.compile(ur".*bin.*\\(.*)\.dll")
-
- for qtlib in qtlibs:
- if qtlib.startswith("Qt5"):
- (src_path, target_path) = next(((src_path, target_path) for (src_path, target_path) in qtmappings if qt_corelib_regex.match(src_path) and qt_corelib_regex.match(src_path).group(1) == qtlib), (None, None))
- if src_path != None:
- all_files += env.Install(bundle, src_path)
-
- # handle core dependencies
- for (src_path, target_path) in qtmappings:
- if qt_corelib_regex.match(src_path) and not qt_corelib_regex.match(src_path).group(1).startswith("Qt5"):
- all_files += env.Install(bundle, src_path)
-
- # handle plugins
- qt_plugin_regex = re.compile(ur".*plugins.*\\(.*)\\(.*)\.dll")
- for (src_path, target_path) in qtmappings:
- if qt_plugin_regex.match(src_path):
- plugin_folder, filename = qt_plugin_regex.match(src_path).groups()
- try:
- if filename[1:] in qtplugins[plugin_folder]:
- all_files += env.Install(os.path.join(bundle, plugin_folder), src_path)
- except:
- pass
- return all_files
-
- def createWindowsBundle(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4'):
- if which("windeployqt.exe"):
- return createWindowsBundleWithWinDeployQt(env, bundle, resources, qtplugins, qtlibs, qtversion)
- else:
- return createWindowsBundleManual(env, bundle, resources, qtplugins, qtlibs, qtversion)
-
- env.AddMethod(createWindowsBundle, "WindowsBundle")
+ def captureWinDeployQtMapping(release = True):
+ p = False
+
+ qt_bin_folder = ""
+ if "QTDIR" in env:
+ qt_bin_folder = env["QTDIR"] + "\\bin;"
+
+ environ = {"PATH": qt_bin_folder + os.getenv("PATH"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP")}
+
+ if release:
+ p = subprocess.Popen(['windeployqt', '--release', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
+ else:
+ p = subprocess.Popen(['windeployqt', '--debug', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
+
+ if p:
+ stdout, stderr = p.communicate()
+
+ mappings = []
+
+ p = re.compile(ur'"([^\"]*)" "([^\"]*)"')
+
+ matches = re.findall(p, stdout)
+ for match in matches:
+ mappings.append(match)
+ return mappings
+ else:
+ return False
+
+ def createWindowsBundleManual(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
+ all_files = []
+ all_files += env.Install(bundle, bundle + ".exe")
+ for lib in qtlibs :
+ all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll"))
+ plugins_suffix = '4'
+ if qtversion == '5' :
+ plugins_suffix = ''
+ for plugin_type in qtplugins:
+ all_files += env.Install(os.path.join(bundle, plugin_type), [os.path.join(env["QTDIR"], "plugins", plugin_type, "q" + plugin + plugins_suffix + ".dll") for plugin in qtplugins[plugin_type]])
+ for dir, resourceFiles in resources.items() :
+ for resource in resourceFiles :
+ e = env.Entry(resource)
+ if e.isdir() :
+ for subresource in env.Glob(str(e) + "/*") :
+ all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
+ else :
+ all_files += env.Install(os.path.join(bundle, dir), resource)
+ return all_files
+
+ # This version of uses windeployqt tool
+ def createWindowsBundleWithWinDeployQt(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
+ assert(qtversion == '5')
+ all_files = []
+
+ # add swift executable
+ all_files += env.Install(bundle, bundle + ".exe")
+
+ # adding resources (swift sounds/images/translations)
+ for dir, resourceFiles in resources.items() :
+ for resource in resourceFiles :
+ e = env.Entry(resource)
+ if e.isdir() :
+ for subresource in env.Glob(str(e) + "/*") :
+ all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
+ else :
+ all_files += env.Install(os.path.join(bundle, dir), resource)
+
+ qtmappings = captureWinDeployQtMapping()
+ assert(qtmappings)
+
+ # handle core DLLs
+ qt_corelib_regex = re.compile(ur".*bin.*\\(.*)\.dll")
+
+ for qtlib in qtlibs:
+ if qtlib.startswith("Qt5"):
+ (src_path, target_path) = next(((src_path, target_path) for (src_path, target_path) in qtmappings if qt_corelib_regex.match(src_path) and qt_corelib_regex.match(src_path).group(1) == qtlib), (None, None))
+ if src_path != None:
+ all_files += env.Install(bundle, src_path)
+
+ # handle core dependencies
+ for (src_path, target_path) in qtmappings:
+ if qt_corelib_regex.match(src_path) and not qt_corelib_regex.match(src_path).group(1).startswith("Qt5"):
+ all_files += env.Install(bundle, src_path)
+
+ # handle plugins
+ qt_plugin_regex = re.compile(ur".*plugins.*\\(.*)\\(.*)\.dll")
+ for (src_path, target_path) in qtmappings:
+ if qt_plugin_regex.match(src_path):
+ plugin_folder, filename = qt_plugin_regex.match(src_path).groups()
+ try:
+ if filename[1:] in qtplugins[plugin_folder]:
+ all_files += env.Install(os.path.join(bundle, plugin_folder), src_path)
+ except:
+ pass
+ return all_files
+
+ def createWindowsBundle(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4'):
+ if which("windeployqt.exe"):
+ return createWindowsBundleWithWinDeployQt(env, bundle, resources, qtplugins, qtlibs, qtversion)
+ else:
+ return createWindowsBundleManual(env, bundle, resources, qtplugins, qtlibs, qtversion)
+
+ env.AddMethod(createWindowsBundle, "WindowsBundle")
def exists(env) :
- return env["PLATFORM"] == "win32"
+ return env["PLATFORM"] == "win32"
diff --git a/BuildTools/SCons/Tools/WriteVal.py b/BuildTools/SCons/Tools/WriteVal.py
index 0a1e1ad..4e8d3bb 100644
--- a/BuildTools/SCons/Tools/WriteVal.py
+++ b/BuildTools/SCons/Tools/WriteVal.py
@@ -1,15 +1,15 @@
import SCons.Util
def generate(env) :
- def replacePragmaOnce(env, target, source) :
- f = open(str(target[0]), 'wb')
- f.write(source[0].get_contents())
- f.close()
+ def replacePragmaOnce(env, target, source) :
+ f = open(str(target[0]), 'wb')
+ f.write(source[0].get_contents())
+ f.close()
- env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder(
- action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
- single_source = True)
+ env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder(
+ action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
+ single_source = True)
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/textfile.py b/BuildTools/SCons/Tools/textfile.py
index 89f8963..b290125 100644
--- a/BuildTools/SCons/Tools/textfile.py
+++ b/BuildTools/SCons/Tools/textfile.py
@@ -25,23 +25,23 @@
__doc__ = """
Textfile/Substfile builder for SCons.
- Create file 'target' which typically is a textfile. The 'source'
- may be any combination of strings, Nodes, or lists of same. A
- 'linesep' will be put between any part written and defaults to
- os.linesep.
-
- The only difference between the Textfile builder and the Substfile
- builder is that strings are converted to Value() nodes for the
- former and File() nodes for the latter. To insert files in the
- former or strings in the latter, wrap them in a File() or Value(),
- respectively.
-
- The values of SUBST_DICT first have any construction variables
- expanded (its keys are not expanded). If a value of SUBST_DICT is
- a python callable function, it is called and the result is expanded
- as the value. Values are substituted in a "random" order; if any
- substitution could be further expanded by another subsitition, it
- is unpredictible whether the expansion will occur.
+ Create file 'target' which typically is a textfile. The 'source'
+ may be any combination of strings, Nodes, or lists of same. A
+ 'linesep' will be put between any part written and defaults to
+ os.linesep.
+
+ The only difference between the Textfile builder and the Substfile
+ builder is that strings are converted to Value() nodes for the
+ former and File() nodes for the latter. To insert files in the
+ former or strings in the latter, wrap them in a File() or Value(),
+ respectively.
+
+ The values of SUBST_DICT first have any construction variables
+ expanded (its keys are not expanded). If a value of SUBST_DICT is
+ a python callable function, it is called and the result is expanded
+ as the value. Values are substituted in a "random" order; if any
+ substitution could be further expanded by another subsitition, it
+ is unpredictible whether the expansion will occur.
"""
__revision__ = "src/engine/SCons/Tool/textfile.py 5357 2011/09/09 21:31:03 bdeegan"
@@ -56,117 +56,117 @@ from SCons.Node.Python import Value
from SCons.Util import is_String, is_Sequence, is_Dict
def _do_subst(node, subs):
- """
- Fetch the node contents and replace all instances of the keys with
- their values. For example, if subs is
- {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'},
- then all instances of %VERSION% in the file will be replaced with
- 1.2345 and so forth.
- """
- contents = node.get_text_contents()
- if not subs: return contents
- for (k,v) in subs:
- contents = re.sub(k, v, contents)
- return contents
+ """
+ Fetch the node contents and replace all instances of the keys with
+ their values. For example, if subs is
+ {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'},
+ then all instances of %VERSION% in the file will be replaced with
+ 1.2345 and so forth.
+ """
+ contents = node.get_text_contents()
+ if not subs: return contents
+ for (k,v) in subs:
+ contents = re.sub(k, v, contents)
+ return contents
def _action(target, source, env):
- # prepare the line separator
- linesep = env['LINESEPARATOR']
- if linesep is None:
- linesep = os.linesep
- elif is_String(linesep):
- pass
- elif isinstance(linesep, Value):
- linesep = linesep.get_text_contents()
- else:
- raise SCons.Errors.UserError(
- 'unexpected type/class for LINESEPARATOR: %s'
- % repr(linesep), None)
-
- # create a dictionary to use for the substitutions
- if 'SUBST_DICT' not in env:
- subs = None # no substitutions
- else:
- d = env['SUBST_DICT']
- if is_Dict(d):
- d = list(d.items())
- elif is_Sequence(d):
- pass
- else:
- raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence')
- subs = []
- for (k,v) in d:
- if callable(v):
- v = v()
- if is_String(v):
- v = env.subst(v)
- else:
- v = str(v)
- subs.append((k,v))
-
- # write the file
- try:
- fd = open(target[0].get_path(), "wb")
- except (OSError,IOError), e:
- raise SCons.Errors.UserError("Can't write target file %s" % target[0])
- # separate lines by 'linesep' only if linesep is not empty
- lsep = None
- for s in source:
- if lsep: fd.write(lsep)
- fd.write(_do_subst(s, subs))
- lsep = linesep
- fd.close()
+ # prepare the line separator
+ linesep = env['LINESEPARATOR']
+ if linesep is None:
+ linesep = os.linesep
+ elif is_String(linesep):
+ pass
+ elif isinstance(linesep, Value):
+ linesep = linesep.get_text_contents()
+ else:
+ raise SCons.Errors.UserError(
+ 'unexpected type/class for LINESEPARATOR: %s'
+ % repr(linesep), None)
+
+ # create a dictionary to use for the substitutions
+ if 'SUBST_DICT' not in env:
+ subs = None # no substitutions
+ else:
+ d = env['SUBST_DICT']
+ if is_Dict(d):
+ d = list(d.items())
+ elif is_Sequence(d):
+ pass
+ else:
+ raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence')
+ subs = []
+ for (k,v) in d:
+ if callable(v):
+ v = v()
+ if is_String(v):
+ v = env.subst(v)
+ else:
+ v = str(v)
+ subs.append((k,v))
+
+ # write the file
+ try:
+ fd = open(target[0].get_path(), "wb")
+ except (OSError,IOError), e:
+ raise SCons.Errors.UserError("Can't write target file %s" % target[0])
+ # separate lines by 'linesep' only if linesep is not empty
+ lsep = None
+ for s in source:
+ if lsep: fd.write(lsep)
+ fd.write(_do_subst(s, subs))
+ lsep = linesep
+ fd.close()
def _strfunc(target, source, env):
- return "Creating '%s'" % target[0]
+ return "Creating '%s'" % target[0]
def _convert_list_R(newlist, sources):
- for elem in sources:
- if is_Sequence(elem):
- _convert_list_R(newlist, elem)
- elif isinstance(elem, Node):
- newlist.append(elem)
- else:
- newlist.append(Value(elem))
+ for elem in sources:
+ if is_Sequence(elem):
+ _convert_list_R(newlist, elem)
+ elif isinstance(elem, Node):
+ newlist.append(elem)
+ else:
+ newlist.append(Value(elem))
def _convert_list(target, source, env):
- if len(target) != 1:
- raise SCons.Errors.UserError("Only one target file allowed")
- newlist = []
- _convert_list_R(newlist, source)
- return target, newlist
+ if len(target) != 1:
+ raise SCons.Errors.UserError("Only one target file allowed")
+ newlist = []
+ _convert_list_R(newlist, source)
+ return target, newlist
_common_varlist = ['SUBST_DICT', 'LINESEPARATOR']
_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX']
_text_builder = SCons.Builder.Builder(
- action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist),
- source_factory = Value,
- emitter = _convert_list,
- prefix = '$TEXTFILEPREFIX',
- suffix = '$TEXTFILESUFFIX',
- )
+ action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist),
+ source_factory = Value,
+ emitter = _convert_list,
+ prefix = '$TEXTFILEPREFIX',
+ suffix = '$TEXTFILESUFFIX',
+ )
_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX']
_subst_builder = SCons.Builder.Builder(
- action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist),
- source_factory = SCons.Node.FS.File,
- emitter = _convert_list,
- prefix = '$SUBSTFILEPREFIX',
- suffix = '$SUBSTFILESUFFIX',
- src_suffix = ['.in'],
- )
+ action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist),
+ source_factory = SCons.Node.FS.File,
+ emitter = _convert_list,
+ prefix = '$SUBSTFILEPREFIX',
+ suffix = '$SUBSTFILESUFFIX',
+ src_suffix = ['.in'],
+ )
def generate(env):
- env['LINESEPARATOR'] = os.linesep
- env['BUILDERS']['MyTextfile'] = _text_builder
- env['TEXTFILEPREFIX'] = ''
- env['TEXTFILESUFFIX'] = '.txt'
- env['BUILDERS']['MySubstfile'] = _subst_builder
- env['SUBSTFILEPREFIX'] = ''
- env['SUBSTFILESUFFIX'] = ''
+ env['LINESEPARATOR'] = os.linesep
+ env['BUILDERS']['MyTextfile'] = _text_builder
+ env['TEXTFILEPREFIX'] = ''
+ env['TEXTFILESUFFIX'] = '.txt'
+ env['BUILDERS']['MySubstfile'] = _subst_builder
+ env['SUBSTFILEPREFIX'] = ''
+ env['SUBSTFILESUFFIX'] = ''
def exists(env):
- return 1
+ return 1
# Local Variables:
# tab-width:4
diff --git a/BuildTools/SCons/Version.py b/BuildTools/SCons/Version.py
index 4fd9171..6482664 100644
--- a/BuildTools/SCons/Version.py
+++ b/BuildTools/SCons/Version.py
@@ -1,68 +1,68 @@
import subprocess, os, datetime, re, os.path
def getGitBuildVersion(root, project) :
- tag = git("describe --tags --exact --match \"" + project + "-*\"", root)
- if tag :
- return tag.rstrip()[len(project)+1:]
- tag = git("describe --tags --match \"" + project + "-*\"", root)
- if tag :
- m = re.match(project + "-(.*)-(.*)-(.*)", tag)
- if m :
- return m.group(1) + "-dev" + m.group(2)
- return None
+ tag = git("describe --tags --exact --match \"" + project + "-*\"", root)
+ if tag :
+ return tag.rstrip()[len(project)+1:]
+ tag = git("describe --tags --match \"" + project + "-*\"", root)
+ if tag :
+ m = re.match(project + "-(.*)-(.*)-(.*)", tag)
+ if m :
+ return m.group(1) + "-dev" + m.group(2)
+ return None
def git(cmd, root) :
- full_cmd = "git " + cmd
- p = subprocess.Popen(full_cmd, cwd=root, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
- gitVersion = p.stdout.read()
- # error = p.stderr.read()
- # if error:
- # print "Git error: " + error
- p.stdin.close()
- if p.wait() == 0 :
- return gitVersion
- return None
+ full_cmd = "git " + cmd
+ p = subprocess.Popen(full_cmd, cwd=root, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
+ gitVersion = p.stdout.read()
+ # error = p.stderr.read()
+ # if error:
+ # print "Git error: " + error
+ p.stdin.close()
+ if p.wait() == 0 :
+ return gitVersion
+ return None
def getBuildVersion(root, project) :
- versionFilename = os.path.join(root, "VERSION." + project)
- if os.path.isfile(versionFilename) :
- f = open(versionFilename)
- version = f.read().strip()
- f.close()
- return version
+ versionFilename = os.path.join(root, "VERSION." + project)
+ if os.path.isfile(versionFilename) :
+ f = open(versionFilename)
+ version = f.read().strip()
+ f.close()
+ return version
- gitVersion = getGitBuildVersion(root, project)
- if gitVersion :
- return gitVersion
+ gitVersion = getGitBuildVersion(root, project)
+ if gitVersion :
+ return gitVersion
- return datetime.date.today().strftime("%Y%m%d")
+ return datetime.date.today().strftime("%Y%m%d")
def convertToWindowsVersion(version) :
- version_match = re.match("(\d+)\.(\d+)(.*)", version)
- major = version_match and int(version_match.group(1)) or 0
- minor = version_match and int(version_match.group(2)) or 0
- if version_match and len(version_match.group(3)) == 0 :
- patch = 60000
- else :
- match = re.match("^beta(\d+)(.*)", version_match.group(3))
- build_string = ""
- if match :
- patch = 1000*int(match.group(1))
- build_string = match.group(2)
- else :
- rc_match = re.match("^rc(\d+)(.*)", version_match.group(3))
- if rc_match :
- patch = 10000*int(rc_match.group(1))
- build_string = rc_match.group(2)
- else :
- patch = 0
- alpha_match = re.match("^alpha(.*)", version_match.group(3))
- if alpha_match :
- build_string = alpha_match.group(1)
+ version_match = re.match("(\d+)\.(\d+)(.*)", version)
+ major = version_match and int(version_match.group(1)) or 0
+ minor = version_match and int(version_match.group(2)) or 0
+ if version_match and len(version_match.group(3)) == 0 :
+ patch = 60000
+ else :
+ match = re.match("^beta(\d+)(.*)", version_match.group(3))
+ build_string = ""
+ if match :
+ patch = 1000*int(match.group(1))
+ build_string = match.group(2)
+ else :
+ rc_match = re.match("^rc(\d+)(.*)", version_match.group(3))
+ if rc_match :
+ patch = 10000*int(rc_match.group(1))
+ build_string = rc_match.group(2)
+ else :
+ patch = 0
+ alpha_match = re.match("^alpha(.*)", version_match.group(3))
+ if alpha_match :
+ build_string = alpha_match.group(1)
- if len(build_string) > 0 :
- build_match = re.match("^-dev(\d+)", build_string)
- if build_match :
- patch += int(build_match.group(1))
+ if len(build_string) > 0 :
+ build_match = re.match("^-dev(\d+)", build_string)
+ if build_match :
+ patch += int(build_match.group(1))
- return (major, minor, patch)
+ return (major, minor, patch)
diff --git a/BuildTools/UpdateDebianChangelog.py b/BuildTools/UpdateDebianChangelog.py
index 20e72da..0693461 100755
--- a/BuildTools/UpdateDebianChangelog.py
+++ b/BuildTools/UpdateDebianChangelog.py
@@ -14,28 +14,28 @@ project = ""
last_version = ""
m = re.match("([\w-]+) \((.*)-\d+\)", last_version_line)
if m :
- project = m.group(1)
- last_version = m.group(2)
+ project = m.group(1)
+ last_version = m.group(2)
if project == "" :
- project="swift-im"
+ project="swift-im"
if "dev" in version :
- distribution = "development"
+ distribution = "development"
elif "beta" in version or "rc" in version :
- distribution = "beta development"
+ distribution = "beta development"
else :
- distribution = "release beta development"
+ distribution = "release beta development"
if last_version != version :
- changelog = open(sys.argv[1])
- changelog_data = changelog.read()
- changelog.close()
- changelog = open(sys.argv[1], "w")
- changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n")
- changelog.write(" * Upstream development snapshot\n\n")
- changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n")
- changelog.write("\n")
- changelog.write(changelog_data)
- changelog.close()
+ changelog = open(sys.argv[1])
+ changelog_data = changelog.read()
+ changelog.close()
+ changelog = open(sys.argv[1], "w")
+ changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n")
+ changelog.write(" * Upstream development snapshot\n\n")
+ changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n")
+ changelog.write("\n")
+ changelog.write(changelog_data)
+ changelog.close()
diff --git a/BuildTools/scons2ninja.py b/BuildTools/scons2ninja.py
index c0eca0d..5d084cd 100755
--- a/BuildTools/scons2ninja.py
+++ b/BuildTools/scons2ninja.py
@@ -16,7 +16,7 @@ import re, os, os.path, subprocess, sys, fnmatch, shlex
################################################################################
# Helper methods & variables
################################################################################
-
+
SCRIPT = sys.argv[0]
SCONS_ARGS = ' '.join(sys.argv[1:])
@@ -24,203 +24,203 @@ SCONS_ARGS = ' '.join(sys.argv[1:])
BINARY_FLAGS = ["-framework", "-arch", "-x", "--output-format", "-isystem", "-include"]
if sys.platform == 'win32' :
- LIB_PREFIX = ""
- LIB_SUFFIX = ""
- EXE_SUFFIX = ".exe"
+ LIB_PREFIX = ""
+ LIB_SUFFIX = ""
+ EXE_SUFFIX = ".exe"
else :
- LIB_PREFIX = "lib"
- LIB_SUFFIX = ".a"
- EXE_SUFFIX = ""
+ LIB_PREFIX = "lib"
+ LIB_SUFFIX = ".a"
+ EXE_SUFFIX = ""
def is_regexp(x) :
- return 'match' in dir(x)
+ return 'match' in dir(x)
def is_list(l) :
- return type(l) is list
+ return type(l) is list
def escape(s) :
- return s.replace(' ', '$ ').replace(':', '$:')
+ return s.replace(' ', '$ ').replace(':', '$:')
def quote_spaces(s) :
- if ' ' in s :
- return '"' + s + '"'
- else :
- return s
-
+ if ' ' in s :
+ return '"' + s + '"'
+ else :
+ return s
+
def to_list(l) :
- if not l :
- return []
- if is_list(l) :
- return l
- return [l]
+ if not l :
+ return []
+ if is_list(l) :
+ return l
+ return [l]
def partition(l, f) :
- x = []
- y = []
- for v in l :
- if f(v) :
- x.append(v)
- else :
- y.append(v)
- return (x, y)
+ x = []
+ y = []
+ for v in l :
+ if f(v) :
+ x.append(v)
+ else :
+ y.append(v)
+ return (x, y)
def get_unary_flags(prefix, flags) :
- return [x[len(prefix):] for x in flags if x.lower().startswith(prefix.lower())]
+ return [x[len(prefix):] for x in flags if x.lower().startswith(prefix.lower())]
def extract_unary_flags(prefix, flags) :
- f1, f2 = partition(flags, lambda x : x.lower().startswith(prefix.lower()))
- return ([f[len(prefix):] for f in f1], f2)
+ f1, f2 = partition(flags, lambda x : x.lower().startswith(prefix.lower()))
+ return ([f[len(prefix):] for f in f1], f2)
def extract_unary_flag(prefix, flags) :
- flag, flags = extract_unary_flags(prefix, flags)
- return (flag[0], flags)
+ flag, flags = extract_unary_flags(prefix, flags)
+ return (flag[0], flags)
def extract_binary_flag(prefix, flags) :
- i = flags.index(prefix)
- flag = flags[i + 1]
- del flags[i]
- del flags[i]
- return (flag, flags)
+ i = flags.index(prefix)
+ flag = flags[i + 1]
+ del flags[i]
+ del flags[i]
+ return (flag, flags)
def get_non_flags(flags) :
- skip = False
- result = []
- for f in flags :
- if skip :
- skip = False
- elif f in BINARY_FLAGS :
- skip = True
- elif not f.startswith("/") and not f.startswith("-") :
- result.append(f)
- return result
+ skip = False
+ result = []
+ for f in flags :
+ if skip :
+ skip = False
+ elif f in BINARY_FLAGS :
+ skip = True
+ elif not f.startswith("/") and not f.startswith("-") :
+ result.append(f)
+ return result
def extract_non_flags(flags) :
- non_flags = get_non_flags(flags)
- return (non_flags, filter(lambda x : x not in non_flags, flags))
+ non_flags = get_non_flags(flags)
+ return (non_flags, filter(lambda x : x not in non_flags, flags))
def get_dependencies(target, build_targets) :
- result = []
- queue = list(dependencies.get(target, []))
- while len(queue) > 0 :
- n = queue.pop()
- # Filter out Value() results
- if n in build_targets or os.path.exists(n) :
- result.append(n)
- queue += list(dependencies.get(n, []))
- return result
+ result = []
+ queue = list(dependencies.get(target, []))
+ while len(queue) > 0 :
+ n = queue.pop()
+ # Filter out Value() results
+ if n in build_targets or os.path.exists(n) :
+ result.append(n)
+ queue += list(dependencies.get(n, []))
+ return result
def get_built_libs(libs, libpaths, outputs) :
- canonical_outputs = [os.path.abspath(p) for p in outputs]
- result = []
- for libpath in libpaths :
- for lib in libs :
- lib_libpath = os.path.join(libpath, LIB_PREFIX + lib + LIB_SUFFIX)
- if os.path.abspath(lib_libpath) in canonical_outputs :
- result.append(lib_libpath)
- return result
+ canonical_outputs = [os.path.abspath(p) for p in outputs]
+ result = []
+ for libpath in libpaths :
+ for lib in libs :
+ lib_libpath = os.path.join(libpath, LIB_PREFIX + lib + LIB_SUFFIX)
+ if os.path.abspath(lib_libpath) in canonical_outputs :
+ result.append(lib_libpath)
+ return result
def parse_tool_command(line) :
- command = shlex.split(line, False, False if sys.platform == 'win32' else True)
- flags = command[1:]
- tool = os.path.splitext(os.path.basename(command[0]))[0]
- if tool.startswith('clang++') or tool.startswith('g++') :
- tool = "cxx"
- elif tool.startswith('clang') or tool.startswith('gcc') :
- tool = "cc"
- if tool in ["cc", "cxx"] and not "-c" in flags :
- tool = "glink"
- tool = tool.replace('-qt4', '')
- return tool, command, flags
+ command = shlex.split(line, False, False if sys.platform == 'win32' else True)
+ flags = command[1:]
+ tool = os.path.splitext(os.path.basename(command[0]))[0]
+ if tool.startswith('clang++') or tool.startswith('g++') :
+ tool = "cxx"
+ elif tool.startswith('clang') or tool.startswith('gcc') :
+ tool = "cc"
+ if tool in ["cc", "cxx"] and not "-c" in flags :
+ tool = "glink"
+ tool = tool.replace('-qt4', '')
+ return tool, command, flags
def rglob(pattern, root = '.') :
- return [os.path.join(path, f) for path, dirs, files in os.walk(root) for f in fnmatch.filter(files, pattern)]
+ return [os.path.join(path, f) for path, dirs, files in os.walk(root) for f in fnmatch.filter(files, pattern)]
################################################################################
# Helper for building Ninja files
################################################################################
class NinjaBuilder :
- def __init__(self) :
- self._header = ""
- self.variables = ""
- self.rules = ""
- self._build = ""
- self.pools = ""
- self._flags = {}
- self.targets = []
-
- def header(self, text) :
- self._header += text + "\n"
-
- def rule(self, name, **kwargs) :
- self.rules += "rule " + name + "\n"
- for k, v in kwargs.iteritems() :
- self.rules += " " + str(k) + " = " + str(v) + "\n"
- self.rules += "\n"
-
- def pool(self, name, **kwargs) :
- self.pools += "pool " + name + "\n"
- for k, v in kwargs.iteritems() :
- self.pools += " " + str(k) + " = " + str(v) + "\n"
- self.pools += "\n"
-
- def variable(self, name, value) :
- self.variables += str(name) + " = " + str(value) + "\n"
-
- def build(self, target, rule, sources = None, **kwargs) :
- self._build += "build " + self.to_string(target) + ": " + rule
- if sources :
- self._build += " " + self.to_string(sources)
- if 'deps' in kwargs and kwargs['deps'] :
- self._build += " | " + self.to_string(kwargs["deps"])
- if 'order_deps' in kwargs :
- self._build += " || " + self.to_string(kwargs['order_deps'])
- self._build += "\n"
- for var, value in kwargs.iteritems() :
- if var in ['deps', 'order_deps'] :
- continue
- value = self.to_string(value, quote = True)
- if var.endswith("flags") :
- value = self.get_flags_variable(var, value)
- self._build += " " + var + " = " + value + "\n"
- self.targets += to_list(target)
-
- def header_targets(self) :
- return [x for x in self.targets if x.endswith('.h') or x.endswith('.hh')]
-
- def serialize(self) :
- result = ""
- result += self._header + "\n"
- result += self.variables + "\n"
- for prefix in self._flags.values() :
- for k, v in prefix.iteritems() :
- result += v + " = " + k + "\n"
- result += "\n"
- result += self.pools + "\n"
- result += self.rules + "\n"
- result += self._build + "\n"
- return result
-
- def to_string(self, lst, quote = False) :
- if is_list(lst) :
- if quote :
- return ' '.join([quote_spaces(x) for x in lst])
- else :
- return ' '.join([escape(x) for x in lst])
- if is_regexp(lst) :
- return ' '.join([escape(x) for x in self.targets if lst.match(x)])
- return escape(lst)
-
- def get_flags_variable(self, flags_type, flags) :
- if len(flags) == 0 :
- return ''
- if flags_type not in self._flags :
- self._flags[flags_type] = {}
- type_flags = self._flags[flags_type]
- if flags not in type_flags :
- type_flags[flags] = flags_type + "_" + str(len(type_flags))
- return "$" + type_flags[flags]
-
+ def __init__(self) :
+ self._header = ""
+ self.variables = ""
+ self.rules = ""
+ self._build = ""
+ self.pools = ""
+ self._flags = {}
+ self.targets = []
+
+ def header(self, text) :
+ self._header += text + "\n"
+
+ def rule(self, name, **kwargs) :
+ self.rules += "rule " + name + "\n"
+ for k, v in kwargs.iteritems() :
+ self.rules += " " + str(k) + " = " + str(v) + "\n"
+ self.rules += "\n"
+
+ def pool(self, name, **kwargs) :
+ self.pools += "pool " + name + "\n"
+ for k, v in kwargs.iteritems() :
+ self.pools += " " + str(k) + " = " + str(v) + "\n"
+ self.pools += "\n"
+
+ def variable(self, name, value) :
+ self.variables += str(name) + " = " + str(value) + "\n"
+
+ def build(self, target, rule, sources = None, **kwargs) :
+ self._build += "build " + self.to_string(target) + ": " + rule
+ if sources :
+ self._build += " " + self.to_string(sources)
+ if 'deps' in kwargs and kwargs['deps'] :
+ self._build += " | " + self.to_string(kwargs["deps"])
+ if 'order_deps' in kwargs :
+ self._build += " || " + self.to_string(kwargs['order_deps'])
+ self._build += "\n"
+ for var, value in kwargs.iteritems() :
+ if var in ['deps', 'order_deps'] :
+ continue
+ value = self.to_string(value, quote = True)
+ if var.endswith("flags") :
+ value = self.get_flags_variable(var, value)
+ self._build += " " + var + " = " + value + "\n"
+ self.targets += to_list(target)
+
+ def header_targets(self) :
+ return [x for x in self.targets if x.endswith('.h') or x.endswith('.hh')]
+
+ def serialize(self) :
+ result = ""
+ result += self._header + "\n"
+ result += self.variables + "\n"
+ for prefix in self._flags.values() :
+ for k, v in prefix.iteritems() :
+ result += v + " = " + k + "\n"
+ result += "\n"
+ result += self.pools + "\n"
+ result += self.rules + "\n"
+ result += self._build + "\n"
+ return result
+
+ def to_string(self, lst, quote = False) :
+ if is_list(lst) :
+ if quote :
+ return ' '.join([quote_spaces(x) for x in lst])
+ else :
+ return ' '.join([escape(x) for x in lst])
+ if is_regexp(lst) :
+ return ' '.join([escape(x) for x in self.targets if lst.match(x)])
+ return escape(lst)
+
+ def get_flags_variable(self, flags_type, flags) :
+ if len(flags) == 0 :
+ return ''
+ if flags_type not in self._flags :
+ self._flags[flags_type] = {}
+ type_flags = self._flags[flags_type]
+ if flags not in type_flags :
+ type_flags[flags] = flags_type + "_" + str(len(type_flags))
+ return "$" + type_flags[flags]
+
################################################################################
# Configuration
@@ -231,7 +231,7 @@ scons_cmd = "scons"
scons_dependencies = ['SConstruct'] + rglob('SConscript')
def ninja_custom_command(ninja, line) :
- return False
+ return False
CONFIGURATION_FILE = '.scons2ninja.conf'
execfile(CONFIGURATION_FILE)
@@ -248,104 +248,104 @@ ninja = NinjaBuilder()
ninja.pool('scons_pool', depth = 1)
if sys.platform == 'win32' :
- ninja.rule('cl',
- deps = 'msvc',
- command = '$cl /showIncludes $clflags -c $in /Fo$out',
- description = 'CXX $out')
-
- ninja.rule('link',
- command = '$link $in $linkflags $libs /out:$out',
- description = 'LINK $out')
-
- ninja.rule('link_mt',
- command = '$link $in $linkflags $libs /out:$out ; $mt $mtflags',
- description = 'LINK $out')
-
- ninja.rule('lib',
- command = '$lib $libflags /out:$out $in',
- description = 'AR $out')
-
- ninja.rule('rc',
- command = '$rc $rcflags /Fo$out $in',
- description = 'RC $out')
-
- # SCons doesn't touch files if they didn't change, which makes
- # ninja rebuild the file over and over again. There's no touch on Windows :(
- # Could implement it with a script, but for now, delete the file if
- # this problem occurs. I'll fix it if it occurs too much.
- ninja.rule('scons',
- command = scons_cmd + " ${scons_args} $out",
- pool = 'scons_pool',
- description = 'GEN $out')
-
- ninja.rule('install', command = 'cmd /c copy $in $out')
- ninja.rule('run', command = '$in')
+ ninja.rule('cl',
+ deps = 'msvc',
+ command = '$cl /showIncludes $clflags -c $in /Fo$out',
+ description = 'CXX $out')
+
+ ninja.rule('link',
+ command = '$link $in $linkflags $libs /out:$out',
+ description = 'LINK $out')
+
+ ninja.rule('link_mt',
+ command = '$link $in $linkflags $libs /out:$out ; $mt $mtflags',
+ description = 'LINK $out')
+
+ ninja.rule('lib',
+ command = '$lib $libflags /out:$out $in',
+ description = 'AR $out')
+
+ ninja.rule('rc',
+ command = '$rc $rcflags /Fo$out $in',
+ description = 'RC $out')
+
+ # SCons doesn't touch files if they didn't change, which makes
+ # ninja rebuild the file over and over again. There's no touch on Windows :(
+ # Could implement it with a script, but for now, delete the file if
+ # this problem occurs. I'll fix it if it occurs too much.
+ ninja.rule('scons',
+ command = scons_cmd + " ${scons_args} $out",
+ pool = 'scons_pool',
+ description = 'GEN $out')
+
+ ninja.rule('install', command = 'cmd /c copy $in $out')
+ ninja.rule('run', command = '$in')
else :
- ninja.rule('cxx',
- deps = 'gcc',
- depfile = '$out.d',
- command = '$cxx -MMD -MF $out.d $cxxflags -c $in -o $out',
- description = 'CXX $out')
+ ninja.rule('cxx',
+ deps = 'gcc',
+ depfile = '$out.d',
+ command = '$cxx -MMD -MF $out.d $cxxflags -c $in -o $out',
+ description = 'CXX $out')
- ninja.rule('cc',
- deps = 'gcc',
- depfile = '$out.d',
- command = '$cc -MMD -MF $out.d $ccflags -c $in -o $out',
- description = 'CC $out')
+ ninja.rule('cc',
+ deps = 'gcc',
+ depfile = '$out.d',
+ command = '$cc -MMD -MF $out.d $ccflags -c $in -o $out',
+ description = 'CC $out')
- ninja.rule('link',
- command = '$glink -o $out $in $linkflags',
- description = 'LINK $out')
+ ninja.rule('link',
+ command = '$glink -o $out $in $linkflags',
+ description = 'LINK $out')
- ninja.rule('ar',
- command = 'ar $arflags $out $in && ranlib $out',
- description = 'AR $out')
+ ninja.rule('ar',
+ command = 'ar $arflags $out $in && ranlib $out',
+ description = 'AR $out')
- # SCons doesn't touch files if they didn't change, which makes
- # ninja rebuild the file over and over again. Touching solves this.
- ninja.rule('scons',
- command = scons_cmd + " $out && touch $out",
- pool = 'scons_pool',
- description = 'GEN $out')
+ # SCons doesn't touch files if they didn't change, which makes
+ # ninja rebuild the file over and over again. Touching solves this.
+ ninja.rule('scons',
+ command = scons_cmd + " $out && touch $out",
+ pool = 'scons_pool',
+ description = 'GEN $out')
- ninja.rule('install', command = 'install $in $out')
- ninja.rule('run', command = './$in')
+ ninja.rule('install', command = 'install $in $out')
+ ninja.rule('run', command = './$in')
ninja.rule('moc',
- command = '$moc $mocflags -o $out $in',
- description = 'MOC $out')
+ command = '$moc $mocflags -o $out $in',
+ description = 'MOC $out')
ninja.rule('rcc',
- command = '$rcc $rccflags -name $name -o $out $in',
- description = 'RCC $out')
+ command = '$rcc $rccflags -name $name -o $out $in',
+ description = 'RCC $out')
ninja.rule('uic',
- command = '$uic $uicflags -o $out $in',
- description = 'UIC $out')
+ command = '$uic $uicflags -o $out $in',
+ description = 'UIC $out')
ninja.rule('lrelease',
- command = '$lrelease $lreleaseflags $in -qm $out',
- description = 'LRELEASE $out')
+ command = '$lrelease $lreleaseflags $in -qm $out',
+ description = 'LRELEASE $out')
ninja.rule('ibtool',
- command = '$ibtool $ibtoolflags --compile $out $in',
- description = 'IBTOOL $out')
+ command = '$ibtool $ibtoolflags --compile $out $in',
+ description = 'IBTOOL $out')
ninja.rule('dsymutil',
- command = '$dsymutil $dsymutilflags -o $out $in',
- description = 'DSYMUTIL $out')
+ command = '$dsymutil $dsymutilflags -o $out $in',
+ description = 'DSYMUTIL $out')
ninja.rule('generator',
- command = "python " + SCRIPT + " ${scons_args}",
- depfile = ".scons2ninja.deps",
- pool = 'scons_pool',
- generator = '1',
- description = 'Regenerating build.ninja')
+ command = "python " + SCRIPT + " ${scons_args}",
+ depfile = ".scons2ninja.deps",
+ pool = 'scons_pool',
+ generator = '1',
+ description = 'Regenerating build.ninja')
ninja.rule('sdef',
- command = 'sdef $in | sdp -fh --basename $basename -o $outdir',
- description = 'SDEF $out')
+ command = 'sdef $in | sdp -fh --basename $basename -o $outdir',
+ description = 'SDEF $out')
################################################################################
# Build Statements
@@ -365,237 +365,237 @@ stage = 'preamble'
skip_nth_line = -1
stack = ['.']
for line in f.stdout :
- line = line.rstrip()
-
- # Skip lines if requested from previous command
- if skip_nth_line >= 0 :
- skip_nth_line -= 1
- if skip_nth_line == 0 :
- continue
-
- if line.startswith('scons: done building targets') :
- break
-
- if stage == "preamble" :
- # Pass all lines from the SCons configuration step to output
- if re.match("^scons: Building targets ...", line) :
- stage = "build"
- else :
- print line
-
- elif stage == "build" :
- if line.startswith('+-') :
- stage = "dependencies"
- elif re.match("^Using tempfile", line) :
- # Ignore response files from MSVS
- skip_nth_line = 2
- else :
- build_lines.append(line)
-
- # Already detect targets that will need 'mt'
- tool, _, flags = parse_tool_command(line)
- if tool == 'mt' :
- target = get_unary_flags("-outputresource:", flags)[0]
- target = target[0:target.index(';')]
- mtflags[target] = flags
-
- elif stage == "dependencies" :
- if not re.match('^[\s|]+\+\-', line) :
- # Work around bug in SCons that splits output over multiple lines
- continue
-
- level = line.index('+-') / 2
- filename = line[level*2+2:]
- if filename.startswith('[') :
- filename = filename[1:-1]
-
- # Check if we use the 'fixed' format which escapes filenamenames
- if filename.startswith('\'') and filename.endswith('\'') :
- filename = eval(filename)
-
- if level < len(stack) :
- stack = stack[0:level]
- elif level > len(stack) :
- if level != len(stack) + 1 :
- raise Exception("Internal Error" )
- stack.append(previous_filename)
-
- # Skip absolute paths
- if not os.path.isabs(filename) :
- target = stack[-1]
- if target not in dependencies :
- dependencies[target] = []
- dependencies[target].append(filename)
- previous_filename = filename
+ line = line.rstrip()
+
+ # Skip lines if requested from previous command
+ if skip_nth_line >= 0 :
+ skip_nth_line -= 1
+ if skip_nth_line == 0 :
+ continue
+
+ if line.startswith('scons: done building targets') :
+ break
+
+ if stage == "preamble" :
+ # Pass all lines from the SCons configuration step to output
+ if re.match("^scons: Building targets ...", line) :
+ stage = "build"
+ else :
+ print line
+
+ elif stage == "build" :
+ if line.startswith('+-') :
+ stage = "dependencies"
+ elif re.match("^Using tempfile", line) :
+ # Ignore response files from MSVS
+ skip_nth_line = 2
+ else :
+ build_lines.append(line)
+
+ # Already detect targets that will need 'mt'
+ tool, _, flags = parse_tool_command(line)
+ if tool == 'mt' :
+ target = get_unary_flags("-outputresource:", flags)[0]
+ target = target[0:target.index(';')]
+ mtflags[target] = flags
+
+ elif stage == "dependencies" :
+ if not re.match('^[\s|]+\+\-', line) :
+ # Work around bug in SCons that splits output over multiple lines
+ continue
+
+ level = line.index('+-') / 2
+ filename = line[level*2+2:]
+ if filename.startswith('[') :
+ filename = filename[1:-1]
+
+ # Check if we use the 'fixed' format which escapes filenamenames
+ if filename.startswith('\'') and filename.endswith('\'') :
+ filename = eval(filename)
+
+ if level < len(stack) :
+ stack = stack[0:level]
+ elif level > len(stack) :
+ if level != len(stack) + 1 :
+ raise Exception("Internal Error" )
+ stack.append(previous_filename)
+
+ # Skip absolute paths
+ if not os.path.isabs(filename) :
+ target = stack[-1]
+ if target not in dependencies :
+ dependencies[target] = []
+ dependencies[target].append(filename)
+ previous_filename = filename
if f.wait() != 0 :
- print "Error calling '" + scons_generate_cmd + "'"
- print f.stderr.read()
- exit(-1)
+ print "Error calling '" + scons_generate_cmd + "'"
+ print f.stderr.read()
+ exit(-1)
# Pass 2: Parse build rules
tools = {}
for line in build_lines :
- # Custom python function
- m = re.match('^(\w+)\(\[([^\]]*)\]', line)
- if m :
- out = [x[1:-1] for x in m.group(2).split(',')]
- for x in out :
- # 'Note' = To be more correct, deps should also include $scons_dependencies,
- # but this regenerates a bit too often, so leaving it out for now.
- ninja.build(x, 'scons', None, deps = sorted(get_dependencies(x, ninja.targets)))
- continue
-
-
- # TextFile
- m = re.match("^Creating '([^']+)'", line)
- if m :
- out = m.group(1)
- # Note: To be more correct, deps should also include $scons_dependencies,
- # but this regenerates a bit too often, so leaving it out for now.
- ninja.build(out, 'scons', None, deps = sorted(get_dependencies(out, ninja.targets)))
- continue
-
- # Install
- m = re.match('^Install file: "(.*)" as "(.*)"', line)
- if m :
- ninja.build(m.group(2), 'install', m.group(1))
- continue
-
- m = re.match('^Install directory: "(.*)" as "(.*)"', line)
- if m :
- for source in rglob('*', m.group(1)) :
- if os.path.isdir(source) :
- continue
- target = os.path.join(m.group(2), os.path.relpath(source, m.group(1)))
- ninja.build(target, 'install', source)
- continue
-
- # Tools
- tool, command, flags = parse_tool_command(line)
- tools[tool] = command[0]
-
- ############################################################
- # clang/gcc tools
- ############################################################
-
- if tool == 'cc':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'cc', files, order_deps = '_generated_headers', ccflags = flags)
-
- elif tool == 'cxx':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'cxx', files, order_deps = '_generated_headers', cxxflags = flags)
-
- elif tool == 'glink':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- libs = get_unary_flags('-l', flags)
- libpaths = get_unary_flags("-L", flags)
- deps = get_built_libs(libs, libpaths, ninja.targets)
- ninja.build(out, 'link', files, deps = sorted(deps), linkflags = flags)
-
- elif tool == 'ar':
- objects, flags = partition(flags, lambda x: x.endswith('.o'))
- libs, flags = partition(flags, lambda x: x.endswith('.a'))
- out = libs[0]
- ninja.build(out, 'ar', objects, arflags = flags)
-
- elif tool == 'ranlib':
- pass
-
-
- ############################################################
- # MSVC tools
- ############################################################
-
- elif tool == 'cl':
- out, flags = extract_unary_flag("/Fo", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'cl', files, order_deps = '_generated_headers', clflags = flags)
-
- elif tool == 'lib':
- out, flags = extract_unary_flag("/out:", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'lib', files, libflags = flags)
-
- elif tool == 'link':
- objects, flags = partition(flags, lambda x: x.endswith('.obj') or x.endswith('.res'))
- out, flags = extract_unary_flag("/out:", flags)
- libs, flags = partition(flags, lambda x: not x.startswith("/") and x.endswith(".lib"))
- libpaths = get_unary_flags("/libpath:", flags)
- deps = get_built_libs(libs, libpaths, ninja.targets)
- if out in mtflags :
- ninja.build(out, 'link_mt', objects, deps = sorted(deps),
- libs = libs, linkflags = flags, mtflags = mtflags[out])
- else :
- ninja.build(out, 'link', objects, deps = sorted(deps),
- libs = libs, linkflags = flags)
-
- elif tool == 'rc':
- out, flags = extract_unary_flag("/fo", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'rc', files[0], order_deps = '_generated_headers', rcflags = flags)
-
- elif tool == 'mt':
- # Already handled
- pass
-
- ############################################################
- # Qt tools
- ############################################################
-
- elif tool == 'moc':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'moc', files, mocflags = flags)
-
- elif tool == 'uic':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'uic', files, uicflags = flags)
-
- elif tool == 'lrelease':
- out, flags = extract_binary_flag("-qm", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'lrelease', files, lreleaseflags = flags)
-
- elif tool == 'rcc':
- out, flags = extract_binary_flag("-o", flags)
- name, flags = extract_binary_flag("-name", flags)
- files, flags = extract_non_flags(flags)
- deps = list(set(get_dependencies(out, ninja.targets)) - set(files))
- ninja.build(out, 'rcc', files, deps = sorted(deps), name = name, rccflags = flags)
-
- ############################################################
- # OS X tools
- ############################################################
-
- elif tool == 'ibtool':
- out, flags = extract_binary_flag("--compile", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'ibtool', files, ibtoolflags = flags)
-
- elif tool == 'dsymutil':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'dsymutil', files, dsymutilflags = flags)
-
- elif tool == 'sdef' :
- source = flags[0];
- outdir, flags = extract_binary_flag("-o", flags)
- basename, flags = extract_binary_flag("--basename", flags)
- ninja.build(os.path.join(outdir, basename + ".h"), 'sdef', [source],
- basename = basename,
- outdir = outdir)
-
-
- elif not ninja_custom_command(ninja, line) :
- raise Exception("Unknown tool: '" + line + "'")
-
-
+ # Custom python function
+ m = re.match('^(\w+)\(\[([^\]]*)\]', line)
+ if m :
+ out = [x[1:-1] for x in m.group(2).split(',')]
+ for x in out :
+ # 'Note' = To be more correct, deps should also include $scons_dependencies,
+ # but this regenerates a bit too often, so leaving it out for now.
+ ninja.build(x, 'scons', None, deps = sorted(get_dependencies(x, ninja.targets)))
+ continue
+
+
+ # TextFile
+ m = re.match("^Creating '([^']+)'", line)
+ if m :
+ out = m.group(1)
+ # Note: To be more correct, deps should also include $scons_dependencies,
+ # but this regenerates a bit too often, so leaving it out for now.
+ ninja.build(out, 'scons', None, deps = sorted(get_dependencies(out, ninja.targets)))
+ continue
+
+ # Install
+ m = re.match('^Install file: "(.*)" as "(.*)"', line)
+ if m :
+ ninja.build(m.group(2), 'install', m.group(1))
+ continue
+
+ m = re.match('^Install directory: "(.*)" as "(.*)"', line)
+ if m :
+ for source in rglob('*', m.group(1)) :
+ if os.path.isdir(source) :
+ continue
+ target = os.path.join(m.group(2), os.path.relpath(source, m.group(1)))
+ ninja.build(target, 'install', source)
+ continue
+
+ # Tools
+ tool, command, flags = parse_tool_command(line)
+ tools[tool] = command[0]
+
+ ############################################################
+ # clang/gcc tools
+ ############################################################
+
+ if tool == 'cc':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'cc', files, order_deps = '_generated_headers', ccflags = flags)
+
+ elif tool == 'cxx':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'cxx', files, order_deps = '_generated_headers', cxxflags = flags)
+
+ elif tool == 'glink':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ libs = get_unary_flags('-l', flags)
+ libpaths = get_unary_flags("-L", flags)
+ deps = get_built_libs(libs, libpaths, ninja.targets)
+ ninja.build(out, 'link', files, deps = sorted(deps), linkflags = flags)
+
+ elif tool == 'ar':
+ objects, flags = partition(flags, lambda x: x.endswith('.o'))
+ libs, flags = partition(flags, lambda x: x.endswith('.a'))
+ out = libs[0]
+ ninja.build(out, 'ar', objects, arflags = flags)
+
+ elif tool == 'ranlib':
+ pass
+
+
+ ############################################################
+ # MSVC tools
+ ############################################################
+
+ elif tool == 'cl':
+ out, flags = extract_unary_flag("/Fo", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'cl', files, order_deps = '_generated_headers', clflags = flags)
+
+ elif tool == 'lib':
+ out, flags = extract_unary_flag("/out:", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'lib', files, libflags = flags)
+
+ elif tool == 'link':
+ objects, flags = partition(flags, lambda x: x.endswith('.obj') or x.endswith('.res'))
+ out, flags = extract_unary_flag("/out:", flags)
+ libs, flags = partition(flags, lambda x: not x.startswith("/") and x.endswith(".lib"))
+ libpaths = get_unary_flags("/libpath:", flags)
+ deps = get_built_libs(libs, libpaths, ninja.targets)
+ if out in mtflags :
+ ninja.build(out, 'link_mt', objects, deps = sorted(deps),
+ libs = libs, linkflags = flags, mtflags = mtflags[out])
+ else :
+ ninja.build(out, 'link', objects, deps = sorted(deps),
+ libs = libs, linkflags = flags)
+
+ elif tool == 'rc':
+ out, flags = extract_unary_flag("/fo", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'rc', files[0], order_deps = '_generated_headers', rcflags = flags)
+
+ elif tool == 'mt':
+ # Already handled
+ pass
+
+ ############################################################
+ # Qt tools
+ ############################################################
+
+ elif tool == 'moc':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'moc', files, mocflags = flags)
+
+ elif tool == 'uic':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'uic', files, uicflags = flags)
+
+ elif tool == 'lrelease':
+ out, flags = extract_binary_flag("-qm", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'lrelease', files, lreleaseflags = flags)
+
+ elif tool == 'rcc':
+ out, flags = extract_binary_flag("-o", flags)
+ name, flags = extract_binary_flag("-name", flags)
+ files, flags = extract_non_flags(flags)
+ deps = list(set(get_dependencies(out, ninja.targets)) - set(files))
+ ninja.build(out, 'rcc', files, deps = sorted(deps), name = name, rccflags = flags)
+
+ ############################################################
+ # OS X tools
+ ############################################################
+
+ elif tool == 'ibtool':
+ out, flags = extract_binary_flag("--compile", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'ibtool', files, ibtoolflags = flags)
+
+ elif tool == 'dsymutil':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'dsymutil', files, dsymutilflags = flags)
+
+ elif tool == 'sdef' :
+ source = flags[0];
+ outdir, flags = extract_binary_flag("-o", flags)
+ basename, flags = extract_binary_flag("--basename", flags)
+ ninja.build(os.path.join(outdir, basename + ".h"), 'sdef', [source],
+ basename = basename,
+ outdir = outdir)
+
+
+ elif not ninja_custom_command(ninja, line) :
+ raise Exception("Unknown tool: '" + line + "'")
+
+
# Phony target for all generated headers, used as an order-only depency from all C/C++ sources
ninja.build('_generated_headers', 'phony', ninja.header_targets())
@@ -607,11 +607,11 @@ ninja.header("# This file is generated by " + SCRIPT)
ninja.variable("ninja_required_version", "1.3")
ninja.variable("scons_args", SCONS_ARGS)
for k, v in tools.iteritems() :
- ninja.variable(k, v)
+ ninja.variable(k, v)
# Extra customizations
if 'ninja_post' in dir() :
- ninja_post(ninja)
+ ninja_post(ninja)
################################################################################