summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to 'BuildTools')
-rwxr-xr-xBuildTools/CheckHeaders.py46
-rwxr-xr-xBuildTools/CheckTabs.py36
-rwxr-xr-xBuildTools/CheckTests.py48
-rwxr-xr-xBuildTools/CheckTranslations.py70
-rwxr-xr-xBuildTools/Copyright/find-contribs.py80
-rwxr-xr-xBuildTools/Copyrighter.py276
-rwxr-xr-xBuildTools/Coverage/FilterLCovData.py24
-rwxr-xr-xBuildTools/Coverage/GenerateSummary.py32
-rwxr-xr-xBuildTools/Cppcheck.sh36
-rw-r--r--BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py134
-rw-r--r--BuildTools/DocBook/SCons/DocBook.py162
-rw-r--r--BuildTools/DocBook/SCons/FO.py64
-rw-r--r--BuildTools/DocBook/SCons/XSLT.py106
-rwxr-xr-xBuildTools/FilterScanBuildResults.py38
-rwxr-xr-xBuildTools/FixIncludes.py198
-rwxr-xr-xBuildTools/GenerateAppCastFeeds.py153
-rwxr-xr-xBuildTools/GetBuildVersion.py16
-rwxr-xr-xBuildTools/Gource/GetGravatars.py56
-rwxr-xr-xBuildTools/InstallSwiftDependencies.sh48
-rw-r--r--BuildTools/SCons/SConscript.boot511
-rw-r--r--BuildTools/SCons/SConstruct920
-rw-r--r--BuildTools/SCons/Tools/AppBundle.py108
-rw-r--r--BuildTools/SCons/Tools/BuildVersion.py10
-rw-r--r--BuildTools/SCons/Tools/DoxyGen.py38
-rw-r--r--BuildTools/SCons/Tools/Flags.py16
-rw-r--r--BuildTools/SCons/Tools/InstallWithSymLinks.py114
-rw-r--r--BuildTools/SCons/Tools/Nib.py14
-rw-r--r--BuildTools/SCons/Tools/ReplacePragmaOnce.py36
-rw-r--r--BuildTools/SCons/Tools/SLOCCount.py22
-rw-r--r--BuildTools/SCons/Tools/Test.py96
-rw-r--r--BuildTools/SCons/Tools/WindowsBundle.py222
-rw-r--r--BuildTools/SCons/Tools/WriteVal.py16
-rw-r--r--BuildTools/SCons/Tools/nsis.py48
-rw-r--r--BuildTools/SCons/Tools/qt4.py1015
-rw-r--r--BuildTools/SCons/Tools/textfile.py218
-rw-r--r--BuildTools/SCons/Tools/wix.py86
-rw-r--r--BuildTools/SCons/Version.py110
-rwxr-xr-xBuildTools/UpdateDebianChangelog.py32
-rwxr-xr-xBuildTools/scons2ninja.py918
39 files changed, 3338 insertions, 2835 deletions
diff --git a/BuildTools/CheckHeaders.py b/BuildTools/CheckHeaders.py
index 8d20f4e..79ff85c 100755
--- a/BuildTools/CheckHeaders.py
+++ b/BuildTools/CheckHeaders.py
@@ -3,18 +3,18 @@
import os, sys
FORBIDDEN_INCLUDES = [
- ("iostream", ["Swiften/Base/format.h"]),
- ("Base/Log.h", []),
- ("Base/format.h", []),
- ("algorithm", ["Swiften/Base/Algorithm.h", "Swiften/Base/SafeAllocator.h", "Swiften/Base/Listenable.h", "Swiften/Base/Concat.h"]),
- ("boost/bind.hpp", ["Swiften/Base/Listenable.h", "Swiften/FileTransfer/S5BTransportSession.h"]),
- ("boost/filesystem.hpp", []),
- ("Base/foreach.h", []),
- ("boost/date_time/date_time.hpp", []),
- ("boost/filesystem/filesystem.hpp", []),
-
- # To avoid
- ("Base/Algorithm.h", ["Swiften/StringCodecs/HMAC.h"]),
+ ("iostream", ["Swiften/Base/format.h"]),
+ ("Base/Log.h", []),
+ ("Base/format.h", []),
+ ("algorithm", ["Swiften/Base/Algorithm.h", "Swiften/Base/SafeAllocator.h", "Swiften/Base/Listenable.h", "Swiften/Base/Concat.h"]),
+ ("boost/bind.hpp", ["Swiften/Base/Listenable.h", "Swiften/FileTransfer/S5BTransportSession.h"]),
+ ("boost/filesystem.hpp", []),
+ ("Base/foreach.h", []),
+ ("boost/date_time/date_time.hpp", []),
+ ("boost/filesystem/filesystem.hpp", []),
+
+ # To avoid
+ ("Base/Algorithm.h", ["Swiften/StringCodecs/HMAC.h"]),
]
foundBadHeaders = False
@@ -22,21 +22,21 @@ foundBadHeaders = False
filename = sys.argv[1]
if "3rdParty" in filename or ".sconf" in filename or ".framework" in filename or not filename.endswith(".h") :
- sys.exit(0)
+ sys.exit(0)
if not "Swiften" in filename :
- sys.exit(0)
+ sys.exit(0)
if filename.endswith("Swiften.h") :
- sys.exit(0)
+ sys.exit(0)
file = open(filename, "r")
for line in file.readlines() :
- if not "#include" in line :
- continue
- if "Base/Log.h" in filename :
- continue
- for forbiddenInclude, ignores in FORBIDDEN_INCLUDES :
- if forbiddenInclude in line and len([x for x in ignores if x in filename]) == 0 :
- print "Found " + forbiddenInclude + " include in " + filename
- foundBadHeaders = True
+ if not "#include" in line :
+ continue
+ if "Base/Log.h" in filename :
+ continue
+ for forbiddenInclude, ignores in FORBIDDEN_INCLUDES :
+ if forbiddenInclude in line and len([x for x in ignores if x in filename]) == 0 :
+ print "Found " + forbiddenInclude + " include in " + filename
+ foundBadHeaders = True
sys.exit(foundBadHeaders)
diff --git a/BuildTools/CheckTabs.py b/BuildTools/CheckTabs.py
index e2029b0..f0ec0ab 100755
--- a/BuildTools/CheckTabs.py
+++ b/BuildTools/CheckTabs.py
@@ -5,27 +5,15 @@ import os, sys
foundExpandedTabs = False
filename = sys.argv[1]
-if (filename.endswith(".cpp") or filename.endswith(".h") or filename.endswith(".py") or filename.endswith("SConscript") or filename.endswith("SConscript.boot") or filename.endswith("SConstruct")) and not "3rdParty" in filename :
- file = open(filename, "r")
- contents = []
- contentsChanged = False
- for line in file.readlines() :
- newline = ""
- previousChar = None
- pastInitialSpace = False
- for char in line :
- if not pastInitialSpace :
- if char == ' ' and previousChar == ' ' :
- contentsChanged = True
- previousChar = '\t'
- continue
- pastInitialSpace = (char != ' ')
- if previousChar :
- newline += previousChar
- previousChar = char
- if previousChar :
- newline += previousChar
- contents.append(newline)
- file.close()
- if contentsChanged :
- sys.exit(-1)
+if (filename.endswith(".cpp") or filename.endswith(".h") or filename.endswith(".c") or filename.endswith(".mm") or filename.endswith(".ipp") or filename.endswith(".hpp") or filename.endswith(".py") or filename.endswith("SConscript") or filename.endswith("SConscript.boot") or filename.endswith("SConstruct")) and not "3rdParty" in filename :
+ file = open(filename, "r")
+ contents = []
+ contentsChanged = False
+ for line in file.readlines() :
+ if "\t" in line:
+ print("File %s contains hard tabs. This is not allowed." % filename)
+ file.close()
+ sys.exit(-1)
+ file.close()
+ if contentsChanged :
+ sys.exit(-1)
diff --git a/BuildTools/CheckTests.py b/BuildTools/CheckTests.py
index 0ea56bd..7f160e7 100755
--- a/BuildTools/CheckTests.py
+++ b/BuildTools/CheckTests.py
@@ -5,29 +5,29 @@ import os, sys, re
foundUnregisteredTests = False
for (path, dirs, files) in os.walk(".") :
- if not "3rdParty" in path :
- for filename in [os.path.join(path, file) for file in files if file.endswith("Test.cpp") and file != "IdleQuerierTest.cpp" and file != "NotifierTest.cpp" and file != "ClientTest.cpp" and file != "ConnectivityTest.cpp" and file != "ReconnectTest.cpp"] :
- file = open(filename, "r")
- isRegistered = False
- registeredTests = set()
- declaredTests = set()
- for line in file.readlines() :
- m = re.match("\s*CPPUNIT_TEST_SUITE_REGISTRATION\((.*)\)", line)
- if m :
- isRegistered = True
- m = re.match("\s*CPPUNIT_TEST\((.*)\)", line)
- if m :
- registeredTests.add(m.group(1))
- continue
- m = re.match("\s*void (test.*)\(\)", line)
- if m :
- declaredTests.add(m.group(1))
- if not isRegistered :
- print filename + ": Registration missing"
- foundUnregisteredTests = True
- if registeredTests - declaredTests != set([]) :
- print filename + ": " + str(registeredTests - declaredTests)
- foundUnregisteredTests = True
- file.close()
+ if not "3rdParty" in path :
+ for filename in [os.path.join(path, file) for file in files if file.endswith("Test.cpp") and file != "IdleQuerierTest.cpp" and file != "NotifierTest.cpp" and file != "ClientTest.cpp" and file != "ConnectivityTest.cpp" and file != "ReconnectTest.cpp"] :
+ file = open(filename, "r")
+ isRegistered = False
+ registeredTests = set()
+ declaredTests = set()
+ for line in file.readlines() :
+ m = re.match("\s*CPPUNIT_TEST_SUITE_REGISTRATION\((.*)\)", line)
+ if m :
+ isRegistered = True
+ m = re.match("\s*CPPUNIT_TEST\((.*)\)", line)
+ if m :
+ registeredTests.add(m.group(1))
+ continue
+ m = re.match("\s*void (test.*)\(\)", line)
+ if m :
+ declaredTests.add(m.group(1))
+ if not isRegistered :
+ print filename + ": Registration missing"
+ foundUnregisteredTests = True
+ if registeredTests - declaredTests != set([]) :
+ print filename + ": " + str(registeredTests - declaredTests)
+ foundUnregisteredTests = True
+ file.close()
sys.exit(foundUnregisteredTests)
diff --git a/BuildTools/CheckTranslations.py b/BuildTools/CheckTranslations.py
index a9b6afd..615f81f 100755
--- a/BuildTools/CheckTranslations.py
+++ b/BuildTools/CheckTranslations.py
@@ -3,48 +3,48 @@
import os, sys, re, xml.dom.minidom
def getText(nodelist):
- text = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- text += node.data
- return text
+ text = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ text += node.data
+ return text
desktop_generic_names = set()
desktop_comments = set()
f = open("Swift/resources/swift.desktop", "r")
for l in f.readlines() :
- m = re.match("GenericName\[(\w+)\].*", l)
- if m :
- desktop_generic_names.add(m.group(1))
- m = re.match("Comment\[(\w+)\].*", l)
- if m :
- desktop_comments.add(m.group(1))
+ m = re.match("GenericName\[(\w+)\].*", l)
+ if m :
+ desktop_generic_names.add(m.group(1))
+ m = re.match("Comment\[(\w+)\].*", l)
+ if m :
+ desktop_comments.add(m.group(1))
f.close()
for filename in os.listdir("Swift/Translations") :
- m = re.match("swift_(.*)\.ts", filename)
- if m :
- language = m.group(1)
- finished = True
- f = open("Swift/Translations/" + filename, "r")
- document = xml.dom.minidom.parse(f)
- f.close()
+ m = re.match("swift_(.*)\.ts", filename)
+ if m :
+ language = m.group(1)
+ finished = True
+ f = open("Swift/Translations/" + filename, "r")
+ document = xml.dom.minidom.parse(f)
+ f.close()
- for message in document.getElementsByTagName("message") :
- source = message.getElementsByTagName("source")[0]
- sourceText = getText(source.childNodes)
- sourcePlaceholders = set(re.findall("%\d+%?", sourceText))
- translation = message.getElementsByTagName("translation")[0]
- if "type" in translation.attributes.keys() and translation.attributes["type"]. value == "unfinished" :
- finished = False
- translationText = getText(translation.childNodes)
- translationPlaceholders = set(re.findall("%\d+%?", translationText))
- if translationPlaceholders != sourcePlaceholders :
- print "[Error] " + filename + ": Placeholder mismatch in translation '" + sourceText + "'"
- if not finished :
- print "[Warning] " + filename + ": Unfinished"
- if language not in desktop_generic_names and language != "en" :
- print "[Warning] GenericName field missing in desktop entry for " + language
- if language not in desktop_comments and language != "en" :
- print "[Warning] Comment field missing in desktop entry for " + language
+ for message in document.getElementsByTagName("message") :
+ source = message.getElementsByTagName("source")[0]
+ sourceText = getText(source.childNodes)
+ sourcePlaceholders = set(re.findall("%\d+%?", sourceText))
+ translation = message.getElementsByTagName("translation")[0]
+ if "type" in translation.attributes.keys() and translation.attributes["type"]. value == "unfinished" :
+ finished = False
+ translationText = getText(translation.childNodes)
+ translationPlaceholders = set(re.findall("%\d+%?", translationText))
+ if translationPlaceholders != sourcePlaceholders :
+ print "[Error] " + filename + ": Placeholder mismatch in translation '" + sourceText + "'"
+ if not finished :
+ print "[Warning] " + filename + ": Unfinished"
+ if language not in desktop_generic_names and language != "en" :
+ print "[Warning] GenericName field missing in desktop entry for " + language
+ if language not in desktop_comments and language != "en" :
+ print "[Warning] Comment field missing in desktop entry for " + language
diff --git a/BuildTools/Copyright/find-contribs.py b/BuildTools/Copyright/find-contribs.py
index 63c454e..ac30afb 100755
--- a/BuildTools/Copyright/find-contribs.py
+++ b/BuildTools/Copyright/find-contribs.py
@@ -3,46 +3,46 @@
import subprocess
def print_log(full_log):
- full_log_lines = full_log.split("\n")
-
- commits = []
-
- commit_bit = "commit "
- author_bit = "Author: "
- date_bit = "Date: "
-
- commit = None
- for line in full_log_lines:
-
- if line[0:len(commit_bit)] == commit_bit:
- if commit:
- commits.append(commit)
- commit = {'text':''}
- handled = False
- for bit in [commit_bit, author_bit, date_bit]:
- if line[0:len(bit)] == bit:
- commit[bit] = line
- handled = True
- if not handled:
- commit['text'] += line
-
- commits.append(commit)
-
- contributions = []
-
- for commit in commits:
- if not "git@kismith.co.uk" in commit[author_bit] and not "git@el-tramo.be" in commit[author_bit]:
- contributions.append(commit)
-
- #print contributions
- contributors = {}
- for commit in contributions:
- if not commit[author_bit] in contributors:
- contributors[commit[author_bit]] = []
- contributors[commit[author_bit]].append(commit[commit_bit])
-
- for contributor in contributors:
- print contributor + " has contributed patches " + ", ".join([commit[len(commit_bit):] for commit in contributors[contributor]])
+ full_log_lines = full_log.split("\n")
+
+ commits = []
+
+ commit_bit = "commit "
+ author_bit = "Author: "
+ date_bit = "Date: "
+
+ commit = None
+ for line in full_log_lines:
+
+ if line[0:len(commit_bit)] == commit_bit:
+ if commit:
+ commits.append(commit)
+ commit = {'text':''}
+ handled = False
+ for bit in [commit_bit, author_bit, date_bit]:
+ if line[0:len(bit)] == bit:
+ commit[bit] = line
+ handled = True
+ if not handled:
+ commit['text'] += line
+
+ commits.append(commit)
+
+ contributions = []
+
+ for commit in commits:
+ if not "git@kismith.co.uk" in commit[author_bit] and not "git@el-tramo.be" in commit[author_bit]:
+ contributions.append(commit)
+
+ #print contributions
+ contributors = {}
+ for commit in contributions:
+ if not commit[author_bit] in contributors:
+ contributors[commit[author_bit]] = []
+ contributors[commit[author_bit]].append(commit[commit_bit])
+
+ for contributor in contributors:
+ print contributor + " has contributed patches " + ", ".join([commit[len(commit_bit):] for commit in contributors[contributor]])
full_swiften_log = subprocess.check_output(["git", "log", "--", "Swiften"])
diff --git a/BuildTools/Copyrighter.py b/BuildTools/Copyrighter.py
index a3b6379..a16050c 100755
--- a/BuildTools/Copyrighter.py
+++ b/BuildTools/Copyrighter.py
@@ -11,32 +11,32 @@ LICENSE_DIR = "Documentation/Licenses"
reParseLicenseCommentBlocks = re.compile(ur'(\/\*\n\s\*\sCopyright \(c\) (?P<startYear>\d\d\d\d)(-(?P<endYear>\d\d\d\d))? (?P<author>[^\n\.]*)\.?\n.\* (?P<license>[^\n]*)\n \* (?P<seeMore>[^\n]+)\n *\*\/)')
class License :
- def __init__(self, name, file) :
- self.name = name
- self.file = file
+ def __init__(self, name, file) :
+ self.name = name
+ self.file = file
licenses = {
- "default": License("All rights reserved.", "See the COPYING file for more information."),
- "gpl3" : License("Licensed under the GNU General Public License v3.", "See " + LICENSE_DIR + "/" + "GPLv3.txt" + " for more information."),
- "mit" : License("Licensed under the MIT License.", "See " + LICENSE_DIR + "/" + "MIT.txt" + " for more information."),
- }
+ "default": License("All rights reserved.", "See the COPYING file for more information."),
+ "gpl3" : License("Licensed under the GNU General Public License v3.", "See " + LICENSE_DIR + "/" + "GPLv3.txt" + " for more information."),
+ "mit" : License("Licensed under the MIT License.", "See " + LICENSE_DIR + "/" + "MIT.txt" + " for more information."),
+ }
class Copyright :
- def __init__(self, author, year, license) :
- self.author = author
- self.year = year
- self.license = license
-
- def to_string(self, comment_chars) :
- return "\n".join([
- comment_chars[0],
- comment_chars[1] + " Copyright (c) %(year)s %(name)s" % {"year" : self.year, "name" : self.author },
- comment_chars[1] + licenses[self.license].name,
- comment_chars[1] + licenses[self.license].file,
- comment_chars[2],
- "\n"])
- def __str__(self):
- return """/*
+ def __init__(self, author, year, license) :
+ self.author = author
+ self.year = year
+ self.license = license
+
+ def to_string(self, comment_chars) :
+ return "\n".join([
+ comment_chars[0],
+ comment_chars[1] + " Copyright (c) %(year)s %(name)s" % {"year" : self.year, "name" : self.author },
+ comment_chars[1] + licenses[self.license].name,
+ comment_chars[1] + licenses[self.license].file,
+ comment_chars[2],
+ "\n"])
+ def __str__(self):
+ return """/*
* Copyright (c) %s %s.
* %s
* %s
@@ -44,145 +44,145 @@ class Copyright :
""" % (self.year, self.author, licenses[self.license].name, licenses[self.license].file)
class ContentRef :
- def __init__(self, begin, end, content):
- self.begin = begin
- self.end = end
- self.content = content
+ def __init__(self, begin, end, content):
+ self.begin = begin
+ self.end = end
+ self.content = content
class CopyrightBlock :
- def __init__(self, yearBegin, yearEnd, author, license, seeMore, total):
- self.yearBegin = yearBegin
- self.yearEnd = yearEnd
- self.author = author
- self.license = license
- self.seeMore = seeMore
- self.total = total
+ def __init__(self, yearBegin, yearEnd, author, license, seeMore, total):
+ self.yearBegin = yearBegin
+ self.yearEnd = yearEnd
+ self.author = author
+ self.license = license
+ self.seeMore = seeMore
+ self.total = total
def cref_from_group(match, group):
- if match.group(group):
- return ContentRef(match.start(group), match.end(group), match.group(group))
- else :
- return None
+ if match.group(group):
+ return ContentRef(match.start(group), match.end(group), match.group(group))
+ else :
+ return None
def parse_file_new(filename):
- copyrightBlocks = []
- with open(filename, 'r') as file:
- content = file.read()
- for match in re.finditer(reParseLicenseCommentBlocks, content):
- copyrightBlocks.append(CopyrightBlock(
- cref_from_group(match, "startYear"),
- cref_from_group(match, "endYear"),
- cref_from_group(match, "author"),
- cref_from_group(match, "license"),
- cref_from_group(match, "seeMore"),
- cref_from_group(match, 0)))
- return copyrightBlocks
+ copyrightBlocks = []
+ with open(filename, 'r') as file:
+ content = file.read()
+ for match in re.finditer(reParseLicenseCommentBlocks, content):
+ copyrightBlocks.append(CopyrightBlock(
+ cref_from_group(match, "startYear"),
+ cref_from_group(match, "endYear"),
+ cref_from_group(match, "author"),
+ cref_from_group(match, "license"),
+ cref_from_group(match, "seeMore"),
+ cref_from_group(match, 0)))
+ return copyrightBlocks
def get_userinfo() :
- p = subprocess.Popen("git config user.name", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
- username = p.stdout.read().rstrip()
- p.stdin.close()
- if p.wait() != 0 :
- return None
- p = subprocess.Popen("git config user.email", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
- email = p.stdout.read().rstrip()
- p.stdin.close()
- if p.wait() != 0 :
- return None
- return (username, email)
+ p = subprocess.Popen("git config user.name", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
+ username = p.stdout.read().rstrip()
+ p.stdin.close()
+ if p.wait() != 0 :
+ return None
+ p = subprocess.Popen("git config user.email", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
+ email = p.stdout.read().rstrip()
+ p.stdin.close()
+ if p.wait() != 0 :
+ return None
+ return (username, email)
def get_copyright(username, email) :
- if email in ["git@el-tramo.be", "git@kismith.co.uk"] :
- license = DEFAULT_LICENSE
- else :
- license = CONTRIBUTOR_LICENSE
- return Copyright(username, datetime.date.today().strftime("%Y"), license)
+ if email in ["git@el-tramo.be", "git@kismith.co.uk"] :
+ license = DEFAULT_LICENSE
+ else :
+ license = CONTRIBUTOR_LICENSE
+ return Copyright(username, datetime.date.today().strftime("%Y"), license)
def get_copyright_setting(username, email) :
- config = os.getenv("SWIFT_LICENSE_CONFIG")
- if config :
- copyrightHolder, license = config.split("|")
- else :
- if email.endswith("isode.com") or email in ["git@el-tramo.be", "git@kismith.co.uk", "tm@ayena.de"] :
- copyrightHolder, license = "Isode Limited", "default"
- else :
- copyrightHolder, license = username, "mit"
- return Copyright(copyrightHolder, datetime.date.today().year, license)
+ config = os.getenv("SWIFT_LICENSE_CONFIG")
+ if config :
+ copyrightHolder, license = config.split("|")
+ else :
+ if email.endswith("isode.com") or email in ["git@el-tramo.be", "git@kismith.co.uk", "tm@ayena.de"] :
+ copyrightHolder, license = "Isode Limited", "default"
+ else :
+ copyrightHolder, license = username, "mit"
+ return Copyright(copyrightHolder, datetime.date.today().year, license)
def check_copyright(filename, hints) :
- copyrightBlocks = parse_file_new(filename)
- if copyrightBlocks :
- # looking for copyright block for current author
- username, email = get_userinfo()
- copyrightSetting = get_copyright_setting(username, email)
- for block in copyrightBlocks :
- if block.author.content == copyrightSetting.author:
- year = block.yearBegin.content if not block.yearEnd else block.yearEnd.content
- if int(year) == copyrightSetting.year:
- return True
- else :
- if hints :
- print "Copyright block for " + copyrightSetting.author + " does not cover current year in: " + filename
- return False
- if hints :
- print "Missing copyright block for " + copyrightSetting.author + " in: " + filename
- return False
- else :
- if hints :
- print "No copyright found in: " + filename
- return False
+ copyrightBlocks = parse_file_new(filename)
+ if copyrightBlocks :
+ # looking for copyright block for current author
+ username, email = get_userinfo()
+ copyrightSetting = get_copyright_setting(username, email)
+ for block in copyrightBlocks :
+ if block.author.content == copyrightSetting.author:
+ year = block.yearBegin.content if not block.yearEnd else block.yearEnd.content
+ if int(year) == copyrightSetting.year:
+ return True
+ else :
+ if hints :
+ print "Copyright block for " + copyrightSetting.author + " does not cover current year in: " + filename
+ return False
+ if hints :
+ print "Missing copyright block for " + copyrightSetting.author + " in: " + filename
+ return False
+ else :
+ if hints :
+ print "No copyright found in: " + filename
+ return False
def replace_data_in_file(filename, begin, end, replaceWith) :
- with open(filename, 'r') as file:
- content = file.read()
- with open(filename, 'w') as file:
- file.write(content[:begin] + replaceWith + content[end:])
+ with open(filename, 'r') as file:
+ content = file.read()
+ with open(filename, 'w') as file:
+ file.write(content[:begin] + replaceWith + content[end:])
def set_or_update_copyright(filename) :
- if check_copyright(filename, False) :
- print "No update required for file: " + filename
- else :
- copyrightBlocks = parse_file_new(filename)
- username, email = get_userinfo()
- copyrightSetting = get_copyright_setting(username, email)
- lastBlock = 0
- for block in copyrightBlocks :
- if block.author.content == copyrightSetting.author :
- if not block.yearEnd :
- # replace year with range
- replace_data_in_file(filename, block.yearBegin.begin, block.yearBegin.end, "%s-%s" % (block.yearBegin.content, str(copyrightSetting.year)))
- else :
- # replace end of range with current year
- replace_data_in_file(filename, block.yearEnd.begin, block.yearEnd.end, "%s" % str(copyrightSetting.year))
- return
- lastBlock = block.total.end
-
- # No copyright block found. Append a new one.
- replace_data_in_file(filename, lastBlock+1, lastBlock+1, "\n" + str(copyrightSetting))
+ if check_copyright(filename, False) :
+ print "No update required for file: " + filename
+ else :
+ copyrightBlocks = parse_file_new(filename)
+ username, email = get_userinfo()
+ copyrightSetting = get_copyright_setting(username, email)
+ lastBlock = 0
+ for block in copyrightBlocks :
+ if block.author.content == copyrightSetting.author :
+ if not block.yearEnd :
+ # replace year with range
+ replace_data_in_file(filename, block.yearBegin.begin, block.yearBegin.end, "%s-%s" % (block.yearBegin.content, str(copyrightSetting.year)))
+ else :
+ # replace end of range with current year
+ replace_data_in_file(filename, block.yearEnd.begin, block.yearEnd.end, "%s" % str(copyrightSetting.year))
+ return
+ lastBlock = block.total.end
+
+ # No copyright block found. Append a new one.
+ replace_data_in_file(filename, lastBlock+1, lastBlock+1, "\n" + str(copyrightSetting))
def print_help() :
- print """Usage:
- Copyrighter.py check-copyright $filename
- Cheks for the existence of a copyright comment block.
+ print """Usage:
+ Copyrighter.py check-copyright $filename
+ Cheks for the existence of a copyright comment block.
- Copyrighter.py set-copyright $filename
- Adds or updates the existing copyright comment block.
+ Copyrighter.py set-copyright $filename
+ Adds or updates the existing copyright comment block.
- License setting:
- A users license configuration can be set via the SWIFT_LICENSE_CONFIG environment variable
- in the format "$copyright holder|$license", e.g. "Jane Doe|mit". Possible values for
- $license are default, mit and gpl.
- """
+ License setting:
+ A users license configuration can be set via the SWIFT_LICENSE_CONFIG environment variable
+ in the format "$copyright holder|$license", e.g. "Jane Doe|mit". Possible values for
+ $license are default, mit and gpl.
+ """
if sys.argv[1] == "check-copyright" :
- file = sys.argv[2]
- if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file :
- if not check_copyright(file, True) :
- sys.exit(-1)
+ file = sys.argv[2]
+ if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file :
+ if not check_copyright(file, True) :
+ sys.exit(-1)
elif sys.argv[1] == "set-copyright" :
- file = sys.argv[2]
- set_or_update_copyright(file)
+ file = sys.argv[2]
+ set_or_update_copyright(file)
else :
- print "Unknown command: " + sys.argv[1]
- print_help()
- sys.exit(-1)
+ print "Unknown command: " + sys.argv[1]
+ print_help()
+ sys.exit(-1)
diff --git a/BuildTools/Coverage/FilterLCovData.py b/BuildTools/Coverage/FilterLCovData.py
index 33fd682..e0f5c92 100755
--- a/BuildTools/Coverage/FilterLCovData.py
+++ b/BuildTools/Coverage/FilterLCovData.py
@@ -7,23 +7,23 @@ import sys, re, os.path
assert(len(sys.argv) == 2)
def isIgnored(file) :
- return (file.find("/Swiften/") == -1 and file.find("/Slimber/") == -1 and file.find("/Swift/") == -1) or (file.find("/UnitTest/") != -1 or file.find("/QA/") != -1)
+ return (file.find("/Swiften/") == -1 and file.find("/Slimber/") == -1 and file.find("/Swift/") == -1) or (file.find("/UnitTest/") != -1 or file.find("/QA/") != -1)
output = []
inputFile = open(sys.argv[1])
inIgnoredFile = False
for line in inputFile.readlines() :
- if inIgnoredFile :
- if line == "end_of_record\n" :
- inIgnoredFile = False
- else :
- if line.startswith("SF:") and isIgnored(line) :
- inIgnoredFile = True
- else :
- m = re.match("SF:(.*)", line)
- if m :
- line = "SF:" + os.path.realpath(m.group(1)) + "\n"
- output.append(line)
+ if inIgnoredFile :
+ if line == "end_of_record\n" :
+ inIgnoredFile = False
+ else :
+ if line.startswith("SF:") and isIgnored(line) :
+ inIgnoredFile = True
+ else :
+ m = re.match("SF:(.*)", line)
+ if m :
+ line = "SF:" + os.path.realpath(m.group(1)) + "\n"
+ output.append(line)
inputFile.close()
outputFile = open(sys.argv[1], 'w')
diff --git a/BuildTools/Coverage/GenerateSummary.py b/BuildTools/Coverage/GenerateSummary.py
index e572082..9de0f4d 100755
--- a/BuildTools/Coverage/GenerateSummary.py
+++ b/BuildTools/Coverage/GenerateSummary.py
@@ -8,27 +8,27 @@ inputFile = open(sys.argv[1])
currentFile = ""
coverage = {}
for line in inputFile.readlines() :
- line = line.strip()
- m = re.match("^SF:(.*)", line)
- if m :
- currentFile = m.group(1)
- else :
- m = re.match("^DA:(\d+),(\d+)", line)
- if m :
- currentFileCoverage = coverage.get(currentFile, {})
- line = int(m.group(1))
- count = int(m.group(2))
- currentFileCoverage[line] = currentFileCoverage.get(line, 0) + count
- coverage[currentFile] = currentFileCoverage
+ line = line.strip()
+ m = re.match("^SF:(.*)", line)
+ if m :
+ currentFile = m.group(1)
+ else :
+ m = re.match("^DA:(\d+),(\d+)", line)
+ if m :
+ currentFileCoverage = coverage.get(currentFile, {})
+ line = int(m.group(1))
+ count = int(m.group(2))
+ currentFileCoverage[line] = currentFileCoverage.get(line, 0) + count
+ coverage[currentFile] = currentFileCoverage
inputFile.close()
totalLines = 0
coveredLines = 0
for c in coverage.values() :
- totalLines += len(c)
- for l in c.values() :
- if l > 0 :
- coveredLines += 1
+ totalLines += len(c)
+ for l in c.values() :
+ if l > 0 :
+ coveredLines += 1
outputFile = open(sys.argv[2], 'w')
outputFile.write(str(coveredLines) + "/" + str(totalLines))
diff --git a/BuildTools/Cppcheck.sh b/BuildTools/Cppcheck.sh
index 9f14cb2..ee52b9b 100755
--- a/BuildTools/Cppcheck.sh
+++ b/BuildTools/Cppcheck.sh
@@ -1,21 +1,21 @@
#!/bin/sh
cppcheck $@ \
- --enable=all \
- --inline-suppr \
- --suppress=postfixOperator:3rdParty/hippomocks.h \
- --suppress=stlSize:3rdParty/hippomocks.h \
- --suppress=noConstructor \
- --suppress=publicAllocationError:Swift/Controllers/Chat/UnitTest/ChatsManagerTest.cpp \
- -i 3rdParty -i .git -i .sconf_temp \
- -i Swift/QtUI/EventViewer/main.cpp \
- -i Swift/QtUI/ApplicationTest \
- -i Swift/QtUI/ChatView/main.cpp \
- -i Swift/QtUI/Roster/main.cpp \
- -i Swift/QtUI/NotifierTest/NotifierTest.cpp \
- -DSWIFTEN_BUILDING -DSWIFTEN_STATIC \
- -U__BEOS__ -U__CYGWIN__ -U__QNNXTO__ -U__amigaos__ -Uhpux -U__sgi \
- \
- -I . \
- -I Swift/QtUI \
- .
+ --enable=all \
+ --inline-suppr \
+ --suppress=postfixOperator:3rdParty/hippomocks.h \
+ --suppress=stlSize:3rdParty/hippomocks.h \
+ --suppress=noConstructor \
+ --suppress=publicAllocationError:Swift/Controllers/Chat/UnitTest/ChatsManagerTest.cpp \
+ -i 3rdParty -i .git -i .sconf_temp \
+ -i Swift/QtUI/EventViewer/main.cpp \
+ -i Swift/QtUI/ApplicationTest \
+ -i Swift/QtUI/ChatView/main.cpp \
+ -i Swift/QtUI/Roster/main.cpp \
+ -i Swift/QtUI/NotifierTest/NotifierTest.cpp \
+ -DSWIFTEN_BUILDING -DSWIFTEN_STATIC \
+ -U__BEOS__ -U__CYGWIN__ -U__QNNXTO__ -U__amigaos__ -Uhpux -U__sgi \
+ \
+ -I . \
+ -I Swift/QtUI \
+ .
diff --git a/BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py b/BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py
new file mode 100644
index 0000000..dada920
--- /dev/null
+++ b/BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Note
+# ----
+# This script requires:
+# - cdb, the Windows command line debugger installed and available in PATH.
+# - the SWIFT_DIST environment variable set to a locatioon that contains msi and pdb.gz files.
+
+import sys
+from subprocess import call
+from subprocess import Popen, PIPE
+import ntpath
+import shutil
+import re
+import urllib2
+import os
+import gzip
+import time
+
+
+swiftWindowBuildsPathPrefix = os.getenv("SWIFT_DIST")
+if swiftWindowBuildsPathPrefix == None :
+ print "Please set the SWIFT_DIST environment variable to a location containing msi and pdb.gz files."
+ sys.exit(1)
+
+if len(sys.argv) != 3:
+ print "Usage: python WindowsMinidumpAnalyse.py VERSION MINIDUMP_FILE"
+ sys.exit(1)
+
+version = sys.argv[1]
+minidump_file = sys.argv[2]
+minidump_filename = ntpath.basename(minidump_file)
+minidump_fullpath = os.path.abspath(minidump_file)
+humantext_fullpath = os.path.splitext(minidump_fullpath)[0]+".txt"
+symbol_cache_path = os.path.join(os.getenv("TEMP"), "\symbols")
+working_folder = "tmp-crash-{0}".format(minidump_filename)
+commit = ""
+
+def downloadInstaller(version) :
+ onlineFilename = "{0}.msi".format(version.capitalize())
+ url = "{0}{1}".format(swiftWindowBuildsPathPrefix, onlineFilename)
+ print("Download {0}.".format(url))
+ file = urllib2.urlopen(url)
+ with open(onlineFilename,'wb') as output:
+ output.write(file.read())
+
+def unpackInstaller(version) :
+ msiFilename = "{0}.msi".format(version.capitalize())
+ msiExtractDirectory = os.getcwd() + "\\msi"
+ if not os.path.exists(msiExtractDirectory):
+ os.makedirs(msiExtractDirectory)
+ print("Unpack {0} to {1}.".format(msiFilename, os.getcwd()))
+ call(["msiexec", "/a", msiFilename, "/qb", "TARGETDIR={0}".format(msiExtractDirectory)], shell=True)
+
+def unpackDebugSymbols(version) :
+ symbolsFilename = "{0}.pdb.gz".format(version.capitalize())
+ print("Unpack {0}.".format(symbolsFilename))
+ if not os.path.isdir(symbolsFilename):
+ with gzip.open(symbolsFilename, 'rb') as in_file:
+ s = in_file.read()
+
+ path_to_store = symbolsFilename[:-3]
+
+ with open("msi\PFiles\Swift\{0}".format("Swift.pdb"), 'wb') as f:
+ f.write(s)
+
+def downloadDebugSymbols(version) :
+ onlineFilename = "{0}.pdb.gz".format(version.capitalize())
+ url = "{0}{1}".format(swiftWindowBuildsPathPrefix, onlineFilename)
+ print("Download {0}.".format(url))
+ file = urllib2.urlopen(url)
+ with open(onlineFilename,'wb') as output:
+ output.write(file.read())
+
+def copyMinidump(filename) :
+ shutil.copyfile(filename, "msi\PFiles\Swift\{0}".format(minidump_filename))
+
+def printHumanReadableReport():
+ oldDir = os.getcwd()
+
+ # change dir to Swift.exe dir
+ os.chdir("msi\PFiles\Swift")
+
+ # print all stacks and analyze crash for exceptions
+ cdbCommand = ".symopt+0x40;.lines -e;.kframes 200;!analyze -v -p;!uniqstack -vp;.ecxr;k;q"
+
+ symbolPath = "cache*{0};srv*https://msdl.microsoft.com/download/symbols;C:\\Qt\\Qt5.4.2\\5.4\\msvc2013_opengl\\bin;C:\\Qt\\Qt5.4.2\\5.4\\msvc2013_opengl\\lib;{1}".format(symbol_cache_path, os.getcwd())
+
+ cdbFullCommand = ["cdb", "-i", os.getcwd(), "-y", symbolPath, "-z", minidump_filename, "-srcpath", oldDir, "-logo", humantext_fullpath, "-c", cdbCommand ]
+ print("Run command: " + str(cdbFullCommand))
+ call(cdbFullCommand)
+
+# for testing, delete the old folder
+try:
+ shutil.rmtree(working_folder)
+except:
+ print ""
+
+# clone local git repository into dedicated directory
+call(["git", "clone", ".", working_folder], shell=True)
+
+# git version from swift version
+match = re.match( r"(.*)-dev(\d+)", version)
+if match:
+ basetag = match.group(1)
+ commits = int(match.group(2))
+ process = Popen(["git", "-C", working_folder, "log", "--ancestry-path", "--format=%H", "{0}..HEAD".format(basetag)], stdout=PIPE)
+ (output, err) = process.communicate()
+ exit_code = process.wait()
+ commit = output.splitlines()[-commits].strip()
+else:
+ basetag = version
+ process = Popen(["git", "-C", working_folder, "log", "--format=%H", "-n", "1" "{0}".format(basetag)], stdout=PIPE)
+ (output, err) = process.communicate()
+ exit_code = process.wait()
+ commit = output.strip()
+
+assert(len(commit) > 0)
+
+# Create symbol cache directory
+if not os.path.exists(symbol_cache_path):
+ os.makedirs(symbol_cache_path)
+
+#print "Checking out commit {0}.".format(commit)
+call(["git", "-C", working_folder, "checkout", commit])
+
+os.chdir(working_folder)
+
+downloadInstaller(version)
+downloadDebugSymbols(version)
+unpackInstaller(version)
+unpackDebugSymbols(version)
+copyMinidump(minidump_fullpath)
+time.sleep(10)
+printHumanReadableReport()
diff --git a/BuildTools/DocBook/SCons/DocBook.py b/BuildTools/DocBook/SCons/DocBook.py
index 7641b65..ffb0bfc 100644
--- a/BuildTools/DocBook/SCons/DocBook.py
+++ b/BuildTools/DocBook/SCons/DocBook.py
@@ -6,100 +6,100 @@ import SCons.Util, SCons.Action
import xml.dom.minidom, re, os.path, sys
def generate(env) :
- # Location of stylesheets and catalogs
- docbook_dir = "#/BuildTools/DocBook"
- docbook_xsl_style_dir = env.Dir(docbook_dir + "/Stylesheets").abspath
- docbook_xml_catalog = env.File("catalog.xml").abspath
- if "DOCBOOK_XML_DIR" in env :
- docbook_xml_dir = env.Dir("$DOCBOOK_XML_DIR").abspath
- else :
- docbook_xml_dir = env.Dir("#/3rdParty/DocBook/XML").abspath
- if "DOCBOOK_XSL_DIR" in env :
- docbook_xsl_dir = env.Dir("$DOCBOOK_XSL_DIR").abspath
- else :
- docbook_xsl_dir = env.Dir("#/3rdParty/DocBook/XSL").abspath
- fop_fonts_dir = env.Dir(docbook_dir + "/Fonts").abspath
-
- # Generates a catalog from paths to external tools
- def buildCatalog(target, source, env) :
- catalog = """<?xml version='1.0'?>
+ # Location of stylesheets and catalogs
+ docbook_dir = "#/BuildTools/DocBook"
+ docbook_xsl_style_dir = env.Dir(docbook_dir + "/Stylesheets").abspath
+ docbook_xml_catalog = env.File("catalog.xml").abspath
+ if "DOCBOOK_XML_DIR" in env :
+ docbook_xml_dir = env.Dir("$DOCBOOK_XML_DIR").abspath
+ else :
+ docbook_xml_dir = env.Dir("#/3rdParty/DocBook/XML").abspath
+ if "DOCBOOK_XSL_DIR" in env :
+ docbook_xsl_dir = env.Dir("$DOCBOOK_XSL_DIR").abspath
+ else :
+ docbook_xsl_dir = env.Dir("#/3rdParty/DocBook/XSL").abspath
+ fop_fonts_dir = env.Dir(docbook_dir + "/Fonts").abspath
+
+ # Generates a catalog from paths to external tools
+ def buildCatalog(target, source, env) :
+ catalog = """<?xml version='1.0'?>
<catalog xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog" prefer="public">
- <rewriteSystem
- systemIdStartString="http://www.oasis-open.org/docbook/xml/4.5/"
- rewritePrefix="%(docbook_xml_dir)s/" />
- <rewriteSystem
- systemIdStartString="docbook-xsl:/"
- rewritePrefix="%(docbook_xsl_dir)s/" />
+ <rewriteSystem
+ systemIdStartString="http://www.oasis-open.org/docbook/xml/4.5/"
+ rewritePrefix="%(docbook_xml_dir)s/" />
+ <rewriteSystem
+ systemIdStartString="docbook-xsl:/"
+ rewritePrefix="%(docbook_xsl_dir)s/" />
</catalog>"""
- docbook_xml_dir = source[0].get_contents()
- docbook_xsl_dir = source[1].get_contents()
- if env["PLATFORM"] == "win32" :
- docbook_xml_dir = docbook_xml_dir.replace("\\","/")
- docbook_xsl_dir = docbook_xsl_dir.replace("\\","/")
- file = open(target[0].abspath, "w")
- file.write(catalog % {
- "docbook_xml_dir" : docbook_xml_dir,
- "docbook_xsl_dir" : docbook_xsl_dir,
- })
- file.close()
-
- # Generates a FOP config file
- def buildFopConfig(target, source, env) :
- fopcfg = """<fop version=\"1.0\">
- <renderers>
- <renderer mime=\"application/pdf\">
- <fonts>
- <directory recursive=\"true\">%(fonts_dir)s</directory>
- </fonts>
- </renderer>
- </renderers>
+ docbook_xml_dir = source[0].get_contents()
+ docbook_xsl_dir = source[1].get_contents()
+ if env["PLATFORM"] == "win32" :
+ docbook_xml_dir = docbook_xml_dir.replace("\\","/")
+ docbook_xsl_dir = docbook_xsl_dir.replace("\\","/")
+ file = open(target[0].abspath, "w")
+ file.write(catalog % {
+ "docbook_xml_dir" : docbook_xml_dir,
+ "docbook_xsl_dir" : docbook_xsl_dir,
+ })
+ file.close()
+
+ # Generates a FOP config file
+ def buildFopConfig(target, source, env) :
+ fopcfg = """<fop version=\"1.0\">
+ <renderers>
+ <renderer mime=\"application/pdf\">
+ <fonts>
+ <directory recursive=\"true\">%(fonts_dir)s</directory>
+ </fonts>
+ </renderer>
+ </renderers>
</fop>"""
- file = open(target[0].abspath, "w")
- file.write(fopcfg % {
- "fonts_dir" : source[0].get_contents()
- })
- file.close()
+ file = open(target[0].abspath, "w")
+ file.write(fopcfg % {
+ "fonts_dir" : source[0].get_contents()
+ })
+ file.close()
- # Builds a DocBook file
- def buildDocBook(env, source) :
- db_env = env.Clone()
- db_env["XMLCATALOGS"] = [docbook_xml_catalog]
- db_env["ENV"].update({"OS" : os.environ.get("OS", "")})
+ # Builds a DocBook file
+ def buildDocBook(env, source) :
+ db_env = env.Clone()
+ db_env["XMLCATALOGS"] = [docbook_xml_catalog]
+ db_env["ENV"].update({"OS" : os.environ.get("OS", "")})
- db_env["XMLLINT"] = env.WhereIs("xmllint")
- db_env["XSLT"] = env.WhereIs("xsltproc")
- db_env["FO"] = env.WhereIs("fop")
+ db_env["XMLLINT"] = env.WhereIs("xmllint")
+ db_env["XSLT"] = env.WhereIs("xsltproc")
+ db_env["FO"] = env.WhereIs("fop")
- if not db_env["XMLLINT"] or not db_env["XSLT"] :
- return
+ if not db_env["XMLLINT"] or not db_env["XSLT"] :
+ return
- # PDF generation
- if db_env["FO"] :
- fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source,
- XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"])
- pdf = db_env.FO(fo)
+ # PDF generation
+ if db_env["FO"] :
+ fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source,
+ XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"])
+ pdf = db_env.FO(fo)
- # HTML generation
- db_env.XSLT(os.path.splitext(source)[0] + ".html", source,
- XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"])
+ # HTML generation
+ db_env.XSLT(os.path.splitext(source)[0] + ".html", source,
+ XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"])
- # Import tools
- env.Tool("FO", toolpath = [docbook_dir + "/SCons"])
- env.Tool("XSLT", toolpath = [docbook_dir + "/SCons"])
+ # Import tools
+ env.Tool("FO", toolpath = [docbook_dir + "/SCons"])
+ env.Tool("XSLT", toolpath = [docbook_dir + "/SCons"])
- # Catalog file generation
- env.Command("catalog.xml", [env.Value(docbook_xml_dir), env.Value(docbook_xsl_dir)], SCons.Action.Action(buildCatalog, cmdstr = "$GENCOMSTR"))
+ # Catalog file generation
+ env.Command("catalog.xml", [env.Value(docbook_xml_dir), env.Value(docbook_xsl_dir)], SCons.Action.Action(buildCatalog, cmdstr = "$GENCOMSTR"))
- # FO config file generation
- env["FOCFG"] = env.File("fop.cfg").abspath
- env.Command("fop.cfg", [env.Value(fop_fonts_dir)], SCons.Action.Action(buildFopConfig, cmdstr = "$GENCOMSTR"))
+ # FO config file generation
+ env["FOCFG"] = env.File("fop.cfg").abspath
+ env.Command("fop.cfg", [env.Value(fop_fonts_dir)], SCons.Action.Action(buildFopConfig, cmdstr = "$GENCOMSTR"))
- # DocBook stylesheets
- env["DOCBOOK_XSL_FO"] = docbook_xsl_style_dir + "/fo/docbook.xsl"
- env["DOCBOOK_XSL_HTML"] = docbook_xsl_style_dir + "/html/docbook.xsl"
- env.AddMethod(buildDocBook, "DocBook")
+ # DocBook stylesheets
+ env["DOCBOOK_XSL_FO"] = docbook_xsl_style_dir + "/fo/docbook.xsl"
+ env["DOCBOOK_XSL_HTML"] = docbook_xsl_style_dir + "/html/docbook.xsl"
+ env.AddMethod(buildDocBook, "DocBook")
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/DocBook/SCons/FO.py b/BuildTools/DocBook/SCons/FO.py
index a4079d7..c1c5614 100644
--- a/BuildTools/DocBook/SCons/FO.py
+++ b/BuildTools/DocBook/SCons/FO.py
@@ -6,40 +6,40 @@ import xml.dom.minidom, re
################################################################################
def generate(env) :
- def generate_actions(source, target, env, for_signature) :
- if len(env["FOCFG"]) > 0 :
- cmd = "$FO -c $FOCFG $FOFLAGS $SOURCE $TARGET"
- else :
- cmd = "$FO $FOFLAGS $SOURCE $TARGET"
- return SCons.Action.Action(cmd, cmdstr = "$FOCOMSTR")
+ def generate_actions(source, target, env, for_signature) :
+ if len(env["FOCFG"]) > 0 :
+ cmd = "$FO -c $FOCFG $FOFLAGS $SOURCE $TARGET"
+ else :
+ cmd = "$FO $FOFLAGS $SOURCE $TARGET"
+ return SCons.Action.Action(cmd, cmdstr = "$FOCOMSTR")
- def modify_sources(target, source, env) :
- if len(env["FOCFG"]) > 0 :
- source.append(env["FOCFG"])
- return target, source
+ def modify_sources(target, source, env) :
+ if len(env["FOCFG"]) > 0 :
+ source.append(env["FOCFG"])
+ return target, source
- def scan_fo(node, env, path) :
- dependencies = set()
- try :
- document = xml.dom.minidom.parseString(node.get_contents())
- except xml.parsers.expat.ExpatError:
- return []
- for include in document.getElementsByTagNameNS("http://www.w3.org/1999/XSL/Format", "external-graphic") :
- m = re.match("url\((.*)\)", include.getAttribute("src"))
- if m :
- dependencies.add(m.group(1))
- return list(dependencies)
+ def scan_fo(node, env, path) :
+ dependencies = set()
+ try :
+ document = xml.dom.minidom.parseString(node.get_contents())
+ except xml.parsers.expat.ExpatError:
+ return []
+ for include in document.getElementsByTagNameNS("http://www.w3.org/1999/XSL/Format", "external-graphic") :
+ m = re.match("url\((.*)\)", include.getAttribute("src"))
+ if m :
+ dependencies.add(m.group(1))
+ return list(dependencies)
- env["FO"] = "fop"
- env["FOFLAGS"] = ""
- env["FOCFG"] = ""
- env["BUILDERS"]["FO"] = SCons.Builder.Builder(
- generator = generate_actions,
- emitter = modify_sources,
- source_scanner = SCons.Scanner.Scanner(function = scan_fo, skeys = [".fo"]),
- suffix = ".pdf",
- src_suffix = ".fo"
- )
+ env["FO"] = "fop"
+ env["FOFLAGS"] = ""
+ env["FOCFG"] = ""
+ env["BUILDERS"]["FO"] = SCons.Builder.Builder(
+ generator = generate_actions,
+ emitter = modify_sources,
+ source_scanner = SCons.Scanner.Scanner(function = scan_fo, skeys = [".fo"]),
+ suffix = ".pdf",
+ src_suffix = ".fo"
+ )
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/DocBook/SCons/XSLT.py b/BuildTools/DocBook/SCons/XSLT.py
index 825f129..38e36c5 100644
--- a/BuildTools/DocBook/SCons/XSLT.py
+++ b/BuildTools/DocBook/SCons/XSLT.py
@@ -6,58 +6,58 @@ import xml.dom.minidom, os, os.path
################################################################################
def generate(env) :
- def generate_actions(source, target, env, for_signature) :
- if not env.has_key("XSLTSTYLESHEET") :
- raise SCons.Errors.UserError, "The XSLTSTYLESHEET construction variable must be defined"
-
- # Process the XML catalog files
- # FIXME: It's probably not clean to do an ENV assignment globally
- env["ENV"]["XML_CATALOG_FILES"] = " ".join(env.get("XMLCATALOGS", ""))
-
- # Build the XMLLint command
- xmllintcmd = ["$XMLLINT", "--nonet", "--xinclude", "--postvalid", "--noout", "$SOURCE"]
-
- # Build the XSLT command
- xsltcmd = ["$XSLT", "--nonet", "--xinclude"]
- for (param, value) in env["XSLTPARAMS"] :
- xsltcmd += ["--stringparam", param, value]
- xsltcmd += ["-o", "$TARGET", "$XSLTSTYLESHEET", "$SOURCE"]
-
- return [
- SCons.Action.Action([xmllintcmd], cmdstr = "$XMLLINTCOMSTR"),
- SCons.Action.Action([xsltcmd], cmdstr = "$XSLTCOMSTR")]
-
- def modify_sources(target, source, env) :
- if len(env["FOCFG"]) > 0 :
- source.append(env["FOCFG"])
- source.append(env.get("XMLCATALOGS", []))
- return target, source
-
- def scan_xml(node, env, path) :
- dependencies = set()
- nodes = [node]
- while len(nodes) > 0 :
- node = nodes.pop()
- try :
- document = xml.dom.minidom.parseString(node.get_contents())
- except xml.parsers.expat.ExpatError:
- continue
- for include in document.getElementsByTagNameNS("http://www.w3.org/2001/XInclude", "include") :
- include_file = include.getAttribute("href")
- dependencies.add(include_file)
- if include.getAttribute("parse") != "text" :
- nodes.append(env.File(include_file))
- return list(dependencies)
-
- env["XMLLINT"] = "xmllint"
- env["XSLT"] = "xsltproc"
- env["XSLTPARAMS"] = []
- env["BUILDERS"]["XSLT"] = SCons.Builder.Builder(
- generator = generate_actions,
- emitter = modify_sources,
- source_scanner = SCons.Scanner.Scanner(function = scan_xml),
- src_suffix = ".xml"
- )
+ def generate_actions(source, target, env, for_signature) :
+ if not env.has_key("XSLTSTYLESHEET") :
+ raise SCons.Errors.UserError, "The XSLTSTYLESHEET construction variable must be defined"
+
+ # Process the XML catalog files
+ # FIXME: It's probably not clean to do an ENV assignment globally
+ env["ENV"]["XML_CATALOG_FILES"] = " ".join(env.get("XMLCATALOGS", ""))
+
+ # Build the XMLLint command
+ xmllintcmd = ["$XMLLINT", "--nonet", "--xinclude", "--postvalid", "--noout", "$SOURCE"]
+
+ # Build the XSLT command
+ xsltcmd = ["$XSLT", "--nonet", "--xinclude"]
+ for (param, value) in env["XSLTPARAMS"] :
+ xsltcmd += ["--stringparam", param, value]
+ xsltcmd += ["-o", "$TARGET", "$XSLTSTYLESHEET", "$SOURCE"]
+
+ return [
+ SCons.Action.Action([xmllintcmd], cmdstr = "$XMLLINTCOMSTR"),
+ SCons.Action.Action([xsltcmd], cmdstr = "$XSLTCOMSTR")]
+
+ def modify_sources(target, source, env) :
+ if len(env["FOCFG"]) > 0 :
+ source.append(env["FOCFG"])
+ source.append(env.get("XMLCATALOGS", []))
+ return target, source
+
+ def scan_xml(node, env, path) :
+ dependencies = set()
+ nodes = [node]
+ while len(nodes) > 0 :
+ node = nodes.pop()
+ try :
+ document = xml.dom.minidom.parseString(node.get_contents())
+ except xml.parsers.expat.ExpatError:
+ continue
+ for include in document.getElementsByTagNameNS("http://www.w3.org/2001/XInclude", "include") :
+ include_file = include.getAttribute("href")
+ dependencies.add(include_file)
+ if include.getAttribute("parse") != "text" :
+ nodes.append(env.File(include_file))
+ return list(dependencies)
+
+ env["XMLLINT"] = "xmllint"
+ env["XSLT"] = "xsltproc"
+ env["XSLTPARAMS"] = []
+ env["BUILDERS"]["XSLT"] = SCons.Builder.Builder(
+ generator = generate_actions,
+ emitter = modify_sources,
+ source_scanner = SCons.Scanner.Scanner(function = scan_xml),
+ src_suffix = ".xml"
+ )
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/FilterScanBuildResults.py b/BuildTools/FilterScanBuildResults.py
index ed4a55f..a4861ac 100755
--- a/BuildTools/FilterScanBuildResults.py
+++ b/BuildTools/FilterScanBuildResults.py
@@ -6,23 +6,23 @@ resultsDir = sys.argv[1]
resultDirs = [ d for d in os.listdir(resultsDir) if os.path.isdir(os.path.join(resultsDir, d)) ]
resultDirs.sort()
if len(resultDirs) > 0 :
- resultDir = os.path.join(resultsDir, resultDirs[-1])
- resultFileName = os.path.join(resultDir, "index.html")
- resultData = []
- f = open(resultFileName, "r")
- skipLines = 0
- for line in f.readlines() :
- if skipLines > 0 :
- skipLines -= 1
- else :
- if ("3rdParty" in line or "SHA1.cpp" in line or "lua.c" in line) :
- m = re.match(".*(report-.*\.html)", line)
- os.remove(os.path.join(resultDir, m.group(1)))
- skipLines = 2
- else :
- resultData.append(line)
- f.close()
+ resultDir = os.path.join(resultsDir, resultDirs[-1])
+ resultFileName = os.path.join(resultDir, "index.html")
+ resultData = []
+ f = open(resultFileName, "r")
+ skipLines = 0
+ for line in f.readlines() :
+ if skipLines > 0 :
+ skipLines -= 1
+ else :
+ if ("3rdParty" in line or "SHA1.cpp" in line or "lua.c" in line) :
+ m = re.match(".*(report-.*\.html)", line)
+ os.remove(os.path.join(resultDir, m.group(1)))
+ skipLines = 2
+ else :
+ resultData.append(line)
+ f.close()
- f = open(resultFileName, "w")
- f.writelines(resultData)
- f.close()
+ f = open(resultFileName, "w")
+ f.writelines(resultData)
+ f.close()
diff --git a/BuildTools/FixIncludes.py b/BuildTools/FixIncludes.py
index 8ecbd4a..8984944 100755
--- a/BuildTools/FixIncludes.py
+++ b/BuildTools/FixIncludes.py
@@ -9,7 +9,7 @@ filename = sys.argv[1]
inPlace = False
if "-i" in sys.argv:
- inPlace = True
+ inPlace = True
filename_base = os.path.basename(filename)
(filename_name, filename_ext) = os.path.splitext(filename_base)
@@ -19,135 +19,147 @@ c_stdlib_headers = Set(["assert.h", "limits.h", "signal.h", "stdlib.h", "ctyp
cpp_stdlib_headers = Set(["algorithm", "fstream", "list", "regex", "typeindex", "array", "functional", "locale", "set", "typeinfo", "atomic", "future", "map", "sstream", "type_traits", "bitset", "initializer_list", "memory", "stack", "unordered_map", "chrono", "iomanip", "mutex", "stdexcept", "unordered_set", "codecvt", "ios", "new", "streambuf", "utility", "complex", "iosfwd", "numeric", "string", "valarray", "condition_variable", "iostream", "ostream", "strstream", "vector", "deque", "istream", "queue", "system_error", "exception", "iterator", "random", "thread", "forward_list", "limits", "ratio", "tuple", "cassert", "ciso646", "csetjmp", "cstdio", "ctime", "cctype", "climits", "csignal", "cstdlib", "cwchar", "cerrno", "clocale", "cstdarg", "cstring", "cwctype", "cfloat", "cmath", "cstddef"])
class HeaderType:
- PRAGMA_ONCE, CORRESPONDING_HEADER, C_STDLIB, CPP_STDLIB, BOOST, QT, OTHER, SWIFTEN, SWIFT_CONTROLLERS, SWIFTOOLS, SWIFT = range(11)
+ PRAGMA_ONCE, CORRESPONDING_HEADER, C_STDLIB, CPP_STDLIB, BOOST, QT, SWIFTEN_BASE_DEBUG, OTHER, SWIFTEN, LIMBER, SLIMBER, SWIFT_CONTROLLERS, SLUIFT, SWIFTOOLS, SWIFT = range(15)
def findHeaderBlock(lines):
- start = False
- end = False
- lastLine = None
-
- for idx, line in enumerate(lines):
- if not start and line.startswith("#"):
- start = idx
- elif start and (not end) and (not line.startswith("#")) and line.strip():
- end = idx-1
- break
- if not end:
- end = len(lines)
- return (start, end)
+ start = False
+ end = False
+ lastLine = None
+
+ for idx, line in enumerate(lines):
+ if not start and line.startswith("#"):
+ start = idx
+ elif start and (not end) and (not line.startswith("#")) and line.strip():
+ end = idx-1
+ break
+ if not end:
+ end = len(lines)
+ return (start, end)
def lineToFileName(line):
- match = re.match( r'#include "(.*)"', line)
- if match:
- return match.group(1)
- match = re.match( r'#include <(.*)>', line)
- if match:
- return match.group(1)
- return False
+ match = re.match( r'#include "(.*)"', line)
+ if match:
+ return match.group(1)
+ match = re.match( r'#include <(.*)>', line)
+ if match:
+ return match.group(1)
+ return False
def fileNameToHeaderType(name):
- if name.endswith(filename_name + ".h"):
- return HeaderType.CORRESPONDING_HEADER
+ if name.endswith("/" + filename_name + ".h"):
+ return HeaderType.CORRESPONDING_HEADER
- if name in c_stdlib_headers:
- return HeaderType.C_STDLIB
+ if name in c_stdlib_headers:
+ return HeaderType.C_STDLIB
- if name in cpp_stdlib_headers:
- return HeaderType.CPP_STDLIB
+ if name in cpp_stdlib_headers:
+ return HeaderType.CPP_STDLIB
- if name.startswith("boost"):
- return HeaderType.BOOST
+ if name.startswith("boost"):
+ return HeaderType.BOOST
- if name.startswith("Q"):
- return HeaderType.QT
+ if name.startswith("Q"):
+ return HeaderType.QT
- if name.startswith("Swiften"):
- return HeaderType.SWIFTEN
+ if name.startswith("Swiften/Base/Debug.h"):
+ return HeaderType.SWIFTEN_BASE_DEBUG
- if name.startswith("Swift/Controllers"):
- return HeaderType.SWIFT_CONTROLLERS
+ if name.startswith("Swiften"):
+ return HeaderType.SWIFTEN
- if name.startswith("SwifTools"):
- return HeaderType.SWIFTOOLS
+ if name.startswith("Limber"):
+ return HeaderType.LIMBER
- if name.startswith("Swift"):
- return HeaderType.SWIFT
+ if name.startswith("Slimber"):
+ return HeaderType.SLIMBER
- return HeaderType.OTHER
+ if name.startswith("Swift/Controllers"):
+ return HeaderType.SWIFT_CONTROLLERS
+
+ if name.startswith("Sluift"):
+ return HeaderType.SLUIFT
+
+ if name.startswith("SwifTools"):
+ return HeaderType.SWIFTOOLS
+
+ if name.startswith("Swift"):
+ return HeaderType.SWIFT
+
+ return HeaderType.OTHER
def serializeHeaderGroups(groups):
- headerList = []
- for group in range(0, HeaderType.SWIFT + 1):
- if group in groups:
- # sorted and without duplicates
- headers = sorted(list(set(groups[group])))
- headerList.extend(headers)
- headerList.extend(["\n"])
- headerList.pop()
- return headerList
+ headerList = []
+ for group in range(0, HeaderType.SWIFT + 1):
+ if group in groups:
+ # sorted and without duplicates
+ headers = sorted(list(set(groups[group])))
+ headerList.extend(headers)
+ headerList.extend(["\n"])
+ headerList.pop()
+ return headerList
def overwriteFile(filename, content):
- with open(filename, 'w') as f:
- for line in content:
- f.write(line)
+ with open(filename, 'w') as f:
+ for line in content:
+ f.write(line)
def cleanHeaderFile(content, headerStart, headerEnd, headerGroups):
- del content[headerStart:headerEnd]
- newHeaders = serializeHeaderGroups(headerGroups)
- content[headerStart:1] = newHeaders
+ del content[headerStart:headerEnd]
+ newHeaders = serializeHeaderGroups(headerGroups)
+ content[headerStart:1] = newHeaders
- if inPlace :
- overwriteFile(filename, content)
- else :
- for line in content:
- print line,
+ if inPlace :
+ overwriteFile(filename, content)
+ else :
+ for line in content:
+ print line,
def cleanImplementationFile(content, headerStart, headerEnd, headerGroups):
- del content[headerStart:headerEnd]
- newHeaders = serializeHeaderGroups(headerGroups)
- content[headerStart:1] = newHeaders
+ del content[headerStart:headerEnd]
+ newHeaders = serializeHeaderGroups(headerGroups)
+ content[headerStart:1] = newHeaders
- if inPlace :
- overwriteFile(filename, content)
- else :
- for line in content:
- print line,
+ if inPlace :
+ overwriteFile(filename, content)
+ else :
+ for line in content:
+ print line,
containsComplexPreprocessorDirectives = False
with open(filename) as f:
- content = f.readlines()
+ content = f.readlines()
(headerStart, headerEnd) = findHeaderBlock(content)
headerGroups = {}
for line in content[headerStart:headerEnd]:
- if line.strip():
- if line.strip().startswith("#pragma once"):
- headerType = HeaderType.PRAGMA_ONCE
- elif line.strip().startswith("#if") or line.strip().startswith("#def") or line.strip().startswith("#undef") or line.strip().startswith("#pragma "):
- containsComplexPreprocessorDirectives = True
- break
- else:
- #print line
- headerType = fileNameToHeaderType(lineToFileName(line))
-
- #filename = lineToFileName(line)
- if headerType in headerGroups:
- headerGroups[headerType].append(line)
- else:
- headerGroups[headerType] = [line]
+ if line.strip():
+ if line.strip().startswith("#pragma once"):
+ headerType = HeaderType.PRAGMA_ONCE
+ elif line.strip().startswith("#if") or line.strip().startswith("#def") or line.strip().startswith("#undef") or line.strip().startswith("#pragma "):
+ containsComplexPreprocessorDirectives = True
+ break
+ else:
+ #print line
+ headerType = fileNameToHeaderType(lineToFileName(line))
+
+ #filename = lineToFileName(line)
+ if headerType in headerGroups:
+ headerGroups[headerType].append(line)
+ else:
+ headerGroups[headerType] = [line]
if containsComplexPreprocessorDirectives:
- print "Cannot format headers containing preprocessor #if, #pragma, #define or #undef statements!"
- exit(1)
+ print "Cannot format headers containing preprocessor #if, #pragma, #define or #undef statements!"
+ exit(1)
if filename_base.endswith(".h"):
- if not HeaderType.PRAGMA_ONCE in headerGroups:
- print "Missing #pragma once!"
- exit(2)
- cleanHeaderFile(content, headerStart, headerEnd, headerGroups)
-elif filename_base.endswith(".cpp"):
- cleanImplementationFile(content, headerStart, headerEnd, headerGroups)
+ if not HeaderType.PRAGMA_ONCE in headerGroups:
+ print "Missing #pragma once!"
+ exit(2)
+ cleanHeaderFile(content, headerStart, headerEnd, headerGroups)
+elif filename_base.endswith(".cpp") or filename_base.endswith(".mm"):
+ cleanImplementationFile(content, headerStart, headerEnd, headerGroups)
diff --git a/BuildTools/GenerateAppCastFeeds.py b/BuildTools/GenerateAppCastFeeds.py
new file mode 100755
index 0000000..8135134
--- /dev/null
+++ b/BuildTools/GenerateAppCastFeeds.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python2
+
+# This script generates three app cast feeds for macOS Sparkle updates from Swift releases in the download folder on the Swift website.
+
+from xml.etree import ElementTree as ET
+import argparse
+import datetime
+import email.utils as eut
+import fnmatch
+import jinja2
+import os.path
+import re
+import time
+import urllib2
+import urlparse
+
+class Release:
+ def __init__(self, version, absoluteURL, sizeInBytes, date):
+ # This is the version string used for update detection.
+ self.fullVersion = version.split('-', 1)[1]
+ # This is a human readable version string, only used for presentation.
+ self.presentationVersion = version
+ self.url = absoluteURL
+ self.sizeInBytes = sizeInBytes
+ self.date = date
+ dateTumple = date.timetuple()
+ dateTimestamp = time.mktime(dateTumple)
+ self.dateString = eut.formatdate(dateTimestamp)
+
+ def __str__(self):
+ return "Release(%s, %s, %s, %s)" % (self.fullVersion, self.url, self.sizeInBytes, self.date)
+
+ def __repr__(self):
+ return "Release(%s, %s, %s, %s)" % (self.fullVersion, self.url, self.sizeInBytes, self.date)
+
+def getReleaseFromAbsoluteFilePath(absolutePath, downloadsFolder, releasesURL):
+ version = os.path.splitext(absolutePath.split('/')[-1])[0]
+ sizeInBytes = os.path.getsize(absolutePath)
+ date = datetime.datetime.fromtimestamp(os.path.getmtime(absolutePath))
+ absoluteURL = urlparse.urljoin(releasesURL, os.path.relpath(absolutePath, downloadsFolder))
+ return Release(version, absoluteURL, sizeInBytes, date)
+
+def getReleaseFromReleaseFolder(releaseFolder, downloadsFolder, releasesURL, extension):
+ release = None
+ regex = re.compile(fnmatch.translate(extension))
+
+ files = [f for f in os.listdir(releaseFolder) if os.path.isfile(os.path.join(releaseFolder, f))]
+ for file in files:
+ fileFullPath = os.path.join(releaseFolder, file)
+ if regex.match(fileFullPath):
+ release = getReleaseFromAbsoluteFilePath(fileFullPath, downloadsFolder, releasesURL)
+ return release
+
+def getReleaseFilesInReleasesFolder(releasesFolder, releasesURL, extension):
+ releases = []
+
+ dirs = [d for d in os.listdir(releasesFolder) if os.path.isdir(os.path.join(releasesFolder, d))]
+ for d in dirs:
+ release = getReleaseFromReleaseFolder(os.path.join(releasesFolder, d), releasesFolder, releasesURL, extension)
+ if release:
+ releases.append(release)
+
+ return releases
+
+def getReleaseFilesInDevelopmentFolder(developmentMacFolder, developmentMacURL, extension):
+ extensionRegex = re.compile(fnmatch.translate(extension))
+ devPatternRegex = re.compile(".+-dev\d+")
+
+ releases = []
+
+ files = [f for f in os.listdir(developmentMacFolder) if os.path.isfile(os.path.join(developmentMacFolder, f))]
+ for f in files:
+ # Only use dev builds from the development folder.
+ if devPatternRegex.match(f):
+ fileFullPath = os.path.join(developmentMacFolder, f)
+ if extensionRegex.match(fileFullPath):
+ releases.append(getReleaseFromAbsoluteFilePath(fileFullPath, developmentMacFolder, developmentMacURL))
+
+ return releases
+
+def writeAppcastFile(filename, title, description, regexPattern, appcastURL, releases):
+ template = jinja2.Template('''<rss xmlns:sparkle="http://www.andymatuschak.org/xml-namespaces/sparkle" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
+ <channel>
+ <title>{{ title }}</title>
+ <link>{{ appcast_url }}</link>
+ <description>{{ description }}</description>
+ <language>en</language>
+ {% for item in releases %}<item>
+ <title>Swift version {{ item.fullVersion }}</title>
+ <pubDate>{{ item.dateString }}</pubDate>
+ <enclosure url="{{ item.url }}"
+ sparkle:version="{{ item.fullVersion }}"
+ sparkle:shortVersionString="{{ item.presentationVersion }}"
+ length="{{ item.sizeInBytes }}"
+ type="application/octet-stream" />
+ </item>
+ {% endfor %}</channel>
+</rss>''')
+
+ matchingReleases = [i for i in releases if re.match(regexPattern, i.fullVersion)]
+ matchingReleases = matchingReleases[:2] # only include the first two matches in the appcast
+
+ appcastContent = template.render(title=title, appcast_url=appcastURL, description=description, releases=matchingReleases)
+
+ contentParsesOK = False
+ try:
+ x = ET.fromstring(appcastContent)
+ contentParsesOK = True
+ except :
+ contentParsesOK = False
+
+ if contentParsesOK:
+ with open(filename, 'w') as file:
+ file.write(appcastContent)
+ else:
+ print("Failed to generate valid appcast feed %s." % filename)
+
+parser = argparse.ArgumentParser(description='Generate stable/testing/development appcast feeds for Sparkle updater.')
+parser.add_argument('downloadsFolder', type=str, help="e.g. /Users/foo/website/downloads/")
+parser.add_argument('downloadsURL', type=str, help="e.g. https://swift.im/downloads/")
+parser.add_argument('outputFolder', type=str, help="e.g. /Users/foo/website/downloads/")
+
+args = parser.parse_args()
+
+releasesPath = os.path.join(args.downloadsFolder, "releases")
+developmentMacPath = os.path.join(args.downloadsFolder, "development", "mac")
+
+manualReleases = getReleaseFilesInReleasesFolder(releasesPath, urlparse.urljoin(args.downloadsURL, "releases/"), "*.dmg")
+manualReleases.sort(key=lambda release: release.date, reverse=True)
+
+automaticReleases = manualReleases
+automaticReleases.extend(getReleaseFilesInDevelopmentFolder(developmentMacPath, urlparse.urljoin(args.downloadsURL, "development/mac/"), "*.dmg"))
+automaticReleases.sort(key=lambda release: release.date, reverse=True)
+
+
+writeAppcastFile(filename=os.path.join(args.outputFolder, "swift-stable-appcast-mac.xml"),
+ title="Swift Stable Releases",
+ description="",
+ regexPattern="^\d+(\.\d+)?(\.\d+)?$",
+ appcastURL=urlparse.urljoin(args.downloadsURL, "swift-stable-appcast-mac.xml"),
+ releases=manualReleases)
+writeAppcastFile(filename=os.path.join(args.outputFolder, "swift-testing-appcast-mac.xml"),
+ title="Swift Testing Releases",
+ description="",
+ regexPattern="^\d+(\.\d+)?(\.\d+)?(beta\d+)?(rc\d+)?$",
+ appcastURL=urlparse.urljoin(args.downloadsURL, "swift-testing-appcast-mac.xml"),
+ releases=manualReleases)
+writeAppcastFile(filename=os.path.join(args.outputFolder, "swift-development-appcast-mac.xml"),
+ title="Swift Development Releases",
+ description="",
+ regexPattern="^\d+(\.\d+)?(\.\d+)?(alpha)?(beta\d+)?(rc\d+)?(-dev\d+)?$",
+ appcastURL=urlparse.urljoin(args.downloadsURL, "swift-development-appcast-mac.xml"),
+ releases=automaticReleases)
diff --git a/BuildTools/GetBuildVersion.py b/BuildTools/GetBuildVersion.py
index fc92d15..70fdc5c 100755
--- a/BuildTools/GetBuildVersion.py
+++ b/BuildTools/GetBuildVersion.py
@@ -8,14 +8,14 @@ assert(len(sys.argv) >= 2)
only_major = False
if "--major" in sys.argv :
- only_major = True
+ only_major = True
if only_major :
- v = Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1])
- version_match = re.match("(\d+)\.(\d+).*", v)
- if version_match :
- print version_match.group(1)
- else :
- print "0"
+ v = Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1])
+ version_match = re.match("(\d+)\.(\d+).*", v)
+ if version_match :
+ print version_match.group(1)
+ else :
+ print "0"
else :
- print Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1])
+ print Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1])
diff --git a/BuildTools/Gource/GetGravatars.py b/BuildTools/Gource/GetGravatars.py
index 47f8a68..d1f40a4 100755
--- a/BuildTools/Gource/GetGravatars.py
+++ b/BuildTools/Gource/GetGravatars.py
@@ -5,8 +5,8 @@ import subprocess, os, sys, hashlib, urllib
GRAVATAR_URL = "http://www.gravatar.com/avatar/%(id)s?d=404"
if len(sys.argv) != 2 :
- print "Usage: " + sys.argv[0] + " <output-dir>"
- sys.exit(-1)
+ print "Usage: " + sys.argv[0] + " <output-dir>"
+ sys.exit(-1)
output_dir = sys.argv[1]
@@ -14,36 +14,36 @@ output_dir = sys.argv[1]
authors = {}
p = subprocess.Popen("git log --pretty=format:'%ae|%an'", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
for line in p.stdout.readlines() :
- author_components = line.rstrip().split("|")
- authors[author_components[0]] = author_components[1]
+ author_components = line.rstrip().split("|")
+ authors[author_components[0]] = author_components[1]
p.stdin.close()
if p.wait() != 0 :
- print "Error"
- sys.exit(-1)
+ print "Error"
+ sys.exit(-1)
# Get & save the avatars
if not os.path.isdir(output_dir) :
- os.makedirs(output_dir)
+ os.makedirs(output_dir)
for email, name in authors.items() :
- print "Processing avatar for " + name + " <" + email + ">"
- filename = os.path.join(output_dir, name + ".png")
- if os.path.isfile(filename) :
- print "-> Already there. Skipping."
- continue
+ print "Processing avatar for " + name + " <" + email + ">"
+ filename = os.path.join(output_dir, name + ".png")
+ if os.path.isfile(filename) :
+ print "-> Already there. Skipping."
+ continue
- m = hashlib.md5()
- m.update(email)
- url = GRAVATAR_URL % {"id" : m.hexdigest()}
- print "- Downloading " + url
- f = urllib.urlopen(url)
- input = None
- if f.getcode() == 200 :
- input = f.read()
- f.close()
- if input :
- print "- Saving file " + filename
- f = open(filename, "w")
- f.write(input)
- f.close()
- else :
- print "- No Gravatar found"
+ m = hashlib.md5()
+ m.update(email)
+ url = GRAVATAR_URL % {"id" : m.hexdigest()}
+ print "- Downloading " + url
+ f = urllib.urlopen(url)
+ input = None
+ if f.getcode() == 200 :
+ input = f.read()
+ f.close()
+ if input :
+ print "- Saving file " + filename
+ f = open(filename, "w")
+ f.write(input)
+ f.close()
+ else :
+ print "- No Gravatar found"
diff --git a/BuildTools/InstallSwiftDependencies.sh b/BuildTools/InstallSwiftDependencies.sh
index 64cdc5e..f957e3d 100755
--- a/BuildTools/InstallSwiftDependencies.sh
+++ b/BuildTools/InstallSwiftDependencies.sh
@@ -6,27 +6,31 @@ SYSTEM_NAME=$(uname)
if [ "$SYSTEM_NAME" == "Linux" ]
then
- # handle linux distributions
- SYSTEM_DISTRO=$(lsb_release -i -s)
- if [ "$SYSTEM_DISTRO" == "Debian" ]
- then
- sudo apt-get install pkg-config libssl-dev qt5-default libqt5x11extras5-dev libqt5webkit5-dev qtmultimedia5-dev qttools5-dev-tools
- elif [ "$SYSTEM_DISTRO" == "Ubuntu" ]
- then
- sudo apt-get install pkg-config libssl-dev qt5-default libqt5x11extras5-dev libqt5webkit5-dev qtmultimedia5-dev qttools5-dev-tools
- elif [ "$SYSTEM_DISTRO" == "Arch" ]
- then
- sudo pacman -S qt5-base qt5-x11extras qt5-webkit qt5-multimedia qt5-tools
- elif [ "$SYSTEM_DISTRO" == "openSUSE project" ]
- then
- sudo zypper in pkg-config libopenssl-devel libQt5Core-devel libQt5WebKit5-devel libQt5WebKitWidgets-devel libqt5-qtmultimedia-devel libqt5-qtx11extras-devel libqt5-qttools-devel libQt5Gui-devel libQt5Network-devel libQt5DBus-devel
- elif [ "$SYSTEM_DISTRO" == "Fedora" ]
- then
- sudo dnf groups install "C Development Tools and Libraries"
- sudo dnf install openssl-devel qt5-qtbase-devel qt5-linguist qt5-qtwebkit-devel qt5-qtmultimedia-devel qt5-qtx11extras-devel
- else
- echo "Unsupported Linux distribution."
- fi
+ # handle linux distributions
+ SYSTEM_DISTRO=$(lsb_release -i -s)
+ if [ "$SYSTEM_DISTRO" == "Debian" ]
+ then
+ sudo apt-get install build-essential pkg-config libssl-dev qt5-default libqt5x11extras5-dev libqt5webkit5-dev qtmultimedia5-dev qttools5-dev-tools qt5-image-formats-plugins libqt5svg5-dev libminiupnpc-dev libnatpmp-dev libhunspell-dev
+ elif [ "$SYSTEM_DISTRO" == "Ubuntu" ]
+ then
+ sudo apt-get install build-essential pkg-config libssl-dev qt5-default libqt5x11extras5-dev libqt5webkit5-dev qtmultimedia5-dev qttools5-dev-tools qt5-image-formats-plugins libqt5svg5-dev libminiupnpc-dev libnatpmp-dev libhunspell-dev
+ elif [ "$SYSTEM_DISTRO" == "Arch" ]
+ then
+ sudo pacman -S qt5-base qt5-x11extras qt5-webkit qt5-multimedia qt5-tools qt5-svg hunspell
+ elif [ "$SYSTEM_DISTRO" == "openSUSE project" ] || [ "$SYSTEM_DISTRO" == "SUSE LINUX" ]
+ then
+ sudo zypper "$@" in --type pattern devel_basis
+ sudo zypper "$@" in pkg-config libopenssl-devel libQt5Core-devel libQt5WebKit5-devel libQt5WebKitWidgets-devel libqt5-qtmultimedia-devel libqt5-qtx11extras-devel libqt5-qttools-devel libQt5Gui-devel libQt5Network-devel libQt5DBus-devel libQt5Svg-devel libQt5Svg5 python-xml hunspell-devel
+ elif [ "$SYSTEM_DISTRO" == "Fedora" ]
+ then
+ sudo dnf groups install "C Development Tools and Libraries"
+ sudo dnf install openssl-devel qt5-qtbase-devel qt5-linguist qt5-qtwebkit-devel qt5-qtmultimedia-devel qt5-qtx11extras-devel qt5-qtsvg-devel hunspell-devel
+ elif [ "$SYSTEM_DISTRO" == "Sabayon" ]
+ then
+ sudo -E equo install sys-devel/autoconf sys-devel/automake sys-devel/gcc sys-devel/g++ virtual/os-headers virtual/pkgconfig sys-libs/glibc dev-qt/linguist-tools dev-qt/qtcore dev-qt/qtmultimedia dev-qt/qtdbus dev-qt/qtgui dev-qt/qtimageformats dev-qt/qtsvg dev-qt/qtwebkit dev-qt/qtwidgets dev-qt/qtx11extras dev-libs/openssl net-libs/miniupnpc net-libs/libnatpmp app-text/hunspell
+ else
+ echo "Unsupported Linux distribution."
+ fi
else
- echo "Unsupported system."
+ echo "Unsupported system."
fi
diff --git a/BuildTools/SCons/SConscript.boot b/BuildTools/SCons/SConscript.boot
index 14f72c7..031c556 100644
--- a/BuildTools/SCons/SConscript.boot
+++ b/BuildTools/SCons/SConscript.boot
@@ -14,8 +14,9 @@ vars.Add('link', "Linker")
vars.Add('linkflags', "Extra linker flags")
vars.Add('ar', "Archiver (ar or lib)")
if os.name == "nt":
- vars.Add('mt', "manifest tool")
+ vars.Add('mt', "manifest tool")
vars.Add(BoolVariable("ccache", "Use CCache", "no"))
+vars.Add(BoolVariable("distcc", "Use distcc", "no"))
vars.Add(EnumVariable("test", "Compile and run tests", "none", ["none", "all", "unit", "system"]))
vars.Add(BoolVariable("optimize", "Compile with optimizations turned on", "no"))
vars.Add(BoolVariable("debug", "Compile with debug information", "yes"))
@@ -28,29 +29,30 @@ vars.Add('android_sdk_bin', "Path to Android SDK's tools directory")
vars.Add(BoolVariable("swift_mobile", "Build mobile Swift", "no"))
vars.Add(BoolVariable("swiften_dll", "Build Swiften as dynamically linked library", "no"))
if os.name != "nt" :
- vars.Add(BoolVariable("coverage", "Compile with coverage information", "no"))
+ vars.Add(BoolVariable("coverage", "Compile with coverage information", "no"))
if os.name == "posix" :
- vars.Add(BoolVariable("valgrind", "Run tests with valgrind", "no"))
+ vars.Add(BoolVariable("valgrind", "Run tests with valgrind", "no"))
if os.name == "mac" or (os.name == "posix" and os.uname()[0] == "Darwin"):
- vars.Add(BoolVariable("universal", "Create universal binaries", "no"))
- vars.Add(BoolVariable("mac105", "Link against the 10.5 frameworks", "no"))
- vars.Add(BoolVariable("mac106", "Link against the 10.6 frameworks", "no"))
+ vars.Add(BoolVariable("universal", "Create universal binaries", "no"))
+ vars.Add(BoolVariable("mac105", "Link against the 10.5 frameworks", "no"))
+ vars.Add(BoolVariable("mac106", "Link against the 10.6 frameworks", "no"))
if os.name == "nt" :
- vars.Add(PathVariable("vcredist", "MSVC redistributable dir", None, PathVariable.PathAccept))
+ vars.Add(PathVariable("vcredist", "MSVC redistributable dir", None, PathVariable.PathAccept))
if os.name == "nt" :
- vars.Add(PathVariable("wix_bindir", "Path to WiX binaries", "", PathVariable.PathAccept))
+ vars.Add(PathVariable("wix_bindir", "Path to WiX binaries", "", PathVariable.PathAccept))
if os.name == "nt" :
- vars.Add(PackageVariable("bonjour", "Bonjour SDK location", "yes"))
+ vars.Add(PackageVariable("bonjour", "Bonjour SDK location", "yes"))
+vars.Add(EnumVariable("tls_backend", "Choose the TLS backend", "native", ["native", "openssl", "openssl_bundled"]))
vars.Add(PackageVariable("openssl", "OpenSSL location", "yes"))
vars.Add("openssl_libnames", "Comma-separated openssl library names to override defaults", None)
-vars.Add(BoolVariable("openssl_force_bundled", "Force use of the bundled OpenSSL", "no"))
vars.Add("openssl_include", "Location of OpenSSL include files (if not under (openssl)/include)", None)
vars.Add("openssl_libdir", "Location of OpenSSL library files (if not under (openssl)/lib)", None)
vars.Add(PackageVariable("hunspell_prefix", "Hunspell location", False))
-vars.Add(BoolVariable("hunspell_enable", "Build with Hunspell support", False))
+vars.Add(BoolVariable("hunspell_enable", "Build with Hunspell support", True))
vars.Add(PathVariable("boost_includedir", "Boost headers location", None, PathVariable.PathAccept))
vars.Add(PathVariable("boost_libdir", "Boost library location", None, PathVariable.PathAccept))
vars.Add(BoolVariable("boost_bundled_enable", "Allow use of bundled Boost as last resort", "true"))
+vars.Add(BoolVariable("boost_force_bundled", "Force use of bundled Boost.", False))
vars.Add(PathVariable("zlib_includedir", "Zlib headers location", None, PathVariable.PathAccept))
vars.Add(PathVariable("zlib_libdir", "Zlib library location", None, PathVariable.PathAccept))
vars.Add(PathVariable("zlib_libfile", "Zlib library file (full path to file)", None, PathVariable.PathAccept))
@@ -68,12 +70,17 @@ vars.Add(PathVariable("libidn_includedir", "LibIDN headers location", None, Path
vars.Add(PathVariable("libidn_libdir", "LibIDN library location", None, PathVariable.PathAccept))
vars.Add("libidn_libname", "LibIDN library name", os.name == "nt" and "libidn" or "idn")
vars.Add(BoolVariable("need_idn", "Whether an IDN library is required. Without this, most internal binaries will fail", "true"))
+
vars.Add(PathVariable("libminiupnpc_includedir", "LibMiniUPNPC headers location", None, PathVariable.PathAccept))
vars.Add(PathVariable("libminiupnpc_libdir", "LibMiniUPNPC library location", None, PathVariable.PathAccept))
vars.Add("libminiupnpc_libname", "LibMiniUPNPC library name", os.name == "nt" and "libminiupnpc" or "miniupnpc")
+vars.Add(BoolVariable("libminiupnpc_force_bundled", "Force use of bundled LibMiniUPNPC", False))
+
vars.Add(PathVariable("libnatpmp_includedir", "LibNATPMP headers location", None, PathVariable.PathAccept))
vars.Add(PathVariable("libnatpmp_libdir", "LibNATPMP library location", None, PathVariable.PathAccept))
vars.Add("libnatpmp_libname", "LibNATPMP library name", os.name == "nt" and "libnatpmp" or "natpmp")
+vars.Add(BoolVariable("libnatpmp_force_bundled", "Force use of bundled LibNATPMP", False))
+
vars.Add(PathVariable("sqlite_includedir", "SQLite headers location", None, PathVariable.PathAccept))
vars.Add(PathVariable("sqlite_libdir", "SQLite library location", None, PathVariable.PathAccept))
vars.Add("sqlite_libname", "SQLite library name", os.name == "nt" and "libsqlite3" or "sqlite3")
@@ -104,36 +111,45 @@ vars.Add(BoolVariable("check_headers", "Independently build compilation units fo
vars.Add("win_target_arch", "Target architecture for Windows builds. x86 for 32-bit (default) or x86_64 for 64-bit.", "x86")
vars.Add(BoolVariable("install_git_hooks", "Install git hooks", "true"))
+# Code Signing Options
+vars.Add("codesign_identity", "macOS code signing identity to be passed to codesign when building the distribution package. Must match the Commen Name of the Subject of the code signing certificate.", "")
+vars.Add("signtool_key_pfx", "The keyfile (.pfx) that will be used to sign the Windows installer.", None)
+vars.Add("signtool_timestamp_url", "The timestamp server that will be queried for a signed time stamp in the signing process.", None)
+
+# Automatic Software Update Options
+vars.Add(PathVariable("sparkle_public_dsa_key", "Optional path to a public DSA key used to verify Sparkle software updates. Without specifiying this option, the app needs to be code signed for Sparkle to work.", None, PathVariable.PathIsFile))
+
+
################################################################################
# Set up default build & configure environment
################################################################################
env_ENV = {
- 'PATH' : os.environ['PATH'],
- 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""),
- 'TERM' : os.environ.get("TERM", ""),
+ 'PATH' : os.environ['PATH'],
+ 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""),
+ 'TERM' : os.environ.get("TERM", ""),
}
if "MSVC_VERSION" in ARGUMENTS :
- env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None))
- env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None), TARGET_ARCH=env["win_target_arch"])
+ env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None))
+ env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None), TARGET_ARCH=env["win_target_arch"])
else :
- env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None))
- env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None), TARGET_ARCH=env["win_target_arch"])
+ env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None))
+ env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None), TARGET_ARCH=env["win_target_arch"])
Help(vars.GenerateHelpText(env))
# Workaround for missing Visual Studio 2012 support in SCons
# Requires scons to be run from a VS2012 console
if env.get("MSVC_VERSION", "").startswith("11.0") :
- env["ENV"]["LIB"] = os.environ["LIB"]
- env["ENV"]["INCLUDE"] = os.environ["INCLUDE"]
+ env["ENV"]["LIB"] = os.environ["LIB"]
+ env["ENV"]["INCLUDE"] = os.environ["INCLUDE"]
# Default environment variables
env["PLATFORM_FLAGS"] = {
- "LIBPATH": [],
- "LIBS": [],
- "FRAMEWORKS": [],
+ "LIBPATH": [],
+ "LIBS": [],
+ "FRAMEWORKS": [],
}
# Default custom tools
@@ -142,24 +158,24 @@ env.Tool("WriteVal", toolpath = ["#/BuildTools/SCons/Tools"])
env.Tool("BuildVersion", toolpath = ["#/BuildTools/SCons/Tools"])
env.Tool("Flags", toolpath = ["#/BuildTools/SCons/Tools"])
if env["PLATFORM"] == "darwin" :
- env.Tool("Nib", toolpath = ["#/BuildTools/SCons/Tools"])
- env.Tool("AppBundle", toolpath = ["#/BuildTools/SCons/Tools"])
+ env.Tool("Nib", toolpath = ["#/BuildTools/SCons/Tools"])
+ env.Tool("AppBundle", toolpath = ["#/BuildTools/SCons/Tools"])
if env["PLATFORM"] == "win32" :
- env.Tool("WindowsBundle", toolpath = ["#/BuildTools/SCons/Tools"])
- #So we don't need to escalate with UAC
- if "TMP" in os.environ.keys() :
- env['ENV']['TMP'] = os.environ['TMP']
+ env.Tool("WindowsBundle", toolpath = ["#/BuildTools/SCons/Tools"])
+ #So we don't need to escalate with UAC
+ if "TMP" in os.environ.keys() :
+ env['ENV']['TMP'] = os.environ['TMP']
env.Tool("SLOCCount", toolpath = ["#/BuildTools/SCons/Tools"])
# Max out the number of jobs
if env["max_jobs"] :
- try :
- import multiprocessing
- SetOption("num_jobs", multiprocessing.cpu_count())
- except NotImplementedError :
- pass
- except ImportError :
- pass
+ try :
+ import multiprocessing
+ SetOption("num_jobs", multiprocessing.cpu_count())
+ except NotImplementedError :
+ pass
+ except ImportError :
+ pass
# Set speed options
env.Decider("MD5-timestamp")
@@ -168,19 +184,24 @@ env.SetOption("implicit_cache", True)
# Set the default compiler to CLang on OS X, and set the necessary flags
if env["PLATFORM"] == "darwin" and env["target"] == "native" :
- if "cc" not in env :
- env["CC"] = "clang"
- if platform.machine() == "x86_64" :
- env["CCFLAGS"] = ["-arch", "x86_64"]
- if "cxx" not in env :
- env["CXX"] = "clang++"
- # Compiling Qt5 in C++0x mode includes headers that we don't have
- if not env["qt5"] :
- env["CXXFLAGS"] = ["-std=c++11"]
- if "link" not in env :
- env["LINK"] = "clang"
- if platform.machine() == "x86_64" :
- env.Append(LINKFLAGS = ["-arch", "x86_64"])
+ if "cc" not in env :
+ env["CC"] = "clang"
+ if platform.machine() == "x86_64" :
+ env["CCFLAGS"] = ["-arch", "x86_64"]
+ if "cxx" not in env :
+ env["CXX"] = "clang++"
+ if "link" not in env :
+ # Use clang++ instead of clang, otherwise XCode's clang will cause linking errors due to missing C++ standard lib.
+ env["LINK"] = "clang++"
+ if platform.machine() == "x86_64" :
+ env.Append(LINKFLAGS = ["-arch", "x86_64"])
+
+# Set QT_SELECT variable to enable building on systems that have Qt4 and Qt5 installed and use qtselect
+if env["PLATFORM"] != "darwin" and env["PLATFORM"] != "win32" :
+ if env["qt5"] :
+ env["ENV"]["QT_SELECT"] = "qt5"
+ else:
+ env["ENV"]["QT_SELECT"] = "qt4"
# Set QT_SELECT variable to enable building on systems that have Qt4 and Qt5 installed and use qtselect
if env["PLATFORM"] != "darwin" and env["PLATFORM"] != "win32" :
@@ -191,198 +212,209 @@ if env["PLATFORM"] != "darwin" and env["PLATFORM"] != "win32" :
# Check whether we are running inside scan-build, and override compiler if so
if "CCC_ANALYZER_HTML" in os.environ :
- for key, value in os.environ.items() :
- if key.startswith("CCC_") or key.startswith("CLANG") :
- env["ENV"][key] = value
- env["CC"] = os.environ["CC"]
- env["CXX"] = os.environ["CXX"]
+ for key, value in os.environ.items() :
+ if key.startswith("CCC_") or key.startswith("CLANG") :
+ env["ENV"][key] = value
+ env["CC"] = os.environ["CC"]
+ env["CXX"] = os.environ["CXX"]
# Override the compiler with custom variables set at config time
if "cc" in env :
- env["CC"] = env["cc"]
+ env["CC"] = env["cc"]
if "cxx" in env :
- env["CXX"] = env["cxx"]
+ env["CXX"] = env["cxx"]
if "ar" in env :
- env["AR"] = env["ar"]
+ env["AR"] = env["ar"]
if "link" in env :
- env["SHLINK"] = env["link"]
- env["LINK"] = env["link"]
+ env["SHLINK"] = env["link"]
+ env["LINK"] = env["link"]
+
+# Process user-defined external flags
for flags_type in ["ccflags", "cxxflags", "linkflags"] :
- if flags_type in env :
- if isinstance(env[flags_type], str) :
- # FIXME: Make the splitting more robust
- env[flags_type.upper()] = env[flags_type].split(" ")
- else :
- env[flags_type.upper()] = env[flags_type]
+ if flags_type in env :
+ if isinstance(env[flags_type], str) :
+ # FIXME: Make the splitting more robust
+ env[flags_type.upper()] = env[flags_type].split(" ")
+ else :
+ env[flags_type.upper()] = env[flags_type]
# This isn't a real flag (yet) AFAIK. Be sure to append it to the CXXFLAGS
# where you need it
env["OBJCCFLAGS"] = []
+# Compile code as C++11
+if env["PLATFORM"] != "win32" :
+ env.Append(CXXFLAGS = ["-std=c++11"])
+
if env["optimize"] :
- if env["PLATFORM"] == "win32" :
- env.Append(CCFLAGS = ["/O2"])
- else :
- env.Append(CCFLAGS = ["-O2"])
+ if env["PLATFORM"] == "win32" :
+ env.Append(CCFLAGS = ["/O2"])
+ else :
+ env.Append(CCFLAGS = ["-O2"])
if env["target"] == "xcode" and os.environ["CONFIGURATION"] == "Release" :
- env.Append(CCFLAGS = ["-Os"])
+ env.Append(CCFLAGS = ["-Os"])
if env["debug"] :
- if env["PLATFORM"] == "win32" :
- env.Append(CCFLAGS = ["/Zi"])
- env.Append(LINKFLAGS = ["/DEBUG"])
- if GetOption("num_jobs") > 1 :
- env["CCPDBFLAGS"] = '/Fd${TARGET}.pdb'
- env["PDB"] = '${TARGET.base}.pdb'
- if env["set_iterator_debug_level"] :
- env.Append(CPPDEFINES = ["_ITERATOR_DEBUG_LEVEL=0"])
- if env["optimize"] :
- env.Append(LINKFLAGS = ["/OPT:NOREF"])
- env.Append(CCFLAGS = ["/MD"])
- else :
- env.Append(CCFLAGS = ["/MDd"])
- else :
- env.Append(CCFLAGS = ["-g"])
+ if env["PLATFORM"] == "win32" :
+ env.Append(CCFLAGS = ["/Zi"])
+ env.Append(LINKFLAGS = ["/DEBUG"])
+ if GetOption("num_jobs") > 1 :
+ env["CCPDBFLAGS"] = '/Fd${TARGET}.pdb'
+ env["PDB"] = '${TARGET.base}.pdb'
+ if env["set_iterator_debug_level"] :
+ env.Append(CPPDEFINES = ["_ITERATOR_DEBUG_LEVEL=0"])
+ env.Append(LINKFLAGS = ["/OPT:NOREF"])
+ env.Append(CCFLAGS = ["/MD"])
+ else :
+ env.Append(CCFLAGS = ["-g"])
elif env["PLATFORM"] == "win32" :
- env.Append(CCFLAGS = ["/MD"])
+ env.Append(CCFLAGS = ["/MD"])
if env.get("universal", 0) :
- assert(env["PLATFORM"] == "darwin")
- env.Append(CCFLAGS = [
- "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk",
- "-arch", "i386",
- "-arch", "ppc"])
- env.Append(LINKFLAGS = [
- "-mmacosx-version-min=10.4",
- "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk",
- "-arch", "i386",
- "-arch", "ppc"])
+ assert(env["PLATFORM"] == "darwin")
+ env.Append(CCFLAGS = [
+ "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk",
+ "-arch", "i386",
+ "-arch", "ppc"])
+ env.Append(LINKFLAGS = [
+ "-mmacosx-version-min=10.4",
+ "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk",
+ "-arch", "i386",
+ "-arch", "ppc"])
# Link against other versions of the OS X SDKs.
# FIXME: This method does not work anymore, we need to set deployment targets.
if env.get("mac105", 0) :
- assert(env["PLATFORM"] == "darwin")
- env.Append(CCFLAGS = [
- "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk",
- "-arch", "i386"])
- env.Append(LINKFLAGS = [
- "-mmacosx-version-min=10.5",
- "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk",
- "-arch", "i386"])
+ assert(env["PLATFORM"] == "darwin")
+ env.Append(CCFLAGS = [
+ "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk",
+ "-arch", "i386"])
+ env.Append(LINKFLAGS = [
+ "-mmacosx-version-min=10.5",
+ "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk",
+ "-arch", "i386"])
if env.get("mac106", 0) :
- assert(env["PLATFORM"] == "darwin")
- env.Append(CCFLAGS = [
- "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk",
- "-arch", "i386"])
- env.Append(LINKFLAGS = [
- "-mmacosx-version-min=10.6",
- "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk",
- "-arch", "i386"])
+ assert(env["PLATFORM"] == "darwin")
+ env.Append(CCFLAGS = [
+ "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk",
+ "-arch", "i386"])
+ env.Append(LINKFLAGS = [
+ "-mmacosx-version-min=10.6",
+ "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk",
+ "-arch", "i386"])
if not env["assertions"] :
- env.Append(CPPDEFINES = ["NDEBUG"])
+ env.Append(CPPDEFINES = ["NDEBUG"])
# disable file-transfer support on iOS
if env["target"] in ["iphone-device", "iphone-simulator", "xcode"] :
- env["experimental_ft"] = False
+ env["experimental_ft"] = False
if env["experimental_ft"] :
- env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_FT"])
+ env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_FT"])
if env["experimental"] :
- env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_HISTORY", "SWIFT_EXPERIMENTAL_WB"])
+ env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_HISTORY", "SWIFT_EXPERIMENTAL_WB"])
# If we build shared libs on AMD64, we need -fPIC.
# This should have no performance impact om AMD64
if env["PLATFORM"] == "posix" and platform.machine() in ["x86_64", "amd64"] :
- env.Append(CCFLAGS = ["-fPIC"])
+ env.Append(CCFLAGS = ["-fPIC"])
# Warnings
if env["PLATFORM"] == "win32" :
- env.Append(CXXFLAGS = ["/wd4068"])
+ env.Append(CXXFLAGS = ["/wd4068"])
+ env.Append(CXXFLAGS = ["/wd4503"]) # Disable 'decorated name length exceeded, name was truncated' warning
+ if not env.get("allow_warnings", "False") :
+ env.Append(CXXFLAGS = ["/WX"])
elif env["PLATFORM"] == "hpux" :
- # HP-UX gives a flood of minor warnings if this is enabled
- #env.Append(CXXFLAGS = ["+w"])
- pass
+ # HP-UX gives a flood of minor warnings if this is enabled
+ #env.Append(CXXFLAGS = ["+w"])
+ pass
elif env["PLATFORM"] == "sunos" :
- #env.Append(CXXFLAGS = ["-z verbose"])
- pass
+ #env.Append(CXXFLAGS = ["-z verbose"])
+ pass
else :
- if "clang" in env["CXX"] :
- env.Append(CXXFLAGS = [
- "-Weverything",
- "-Wno-unknown-warning-option", # To stay compatible between CLang versions
- "-Wno-unknown-pragmas", # To stay compatible between CLang versions
- "-Wno-weak-vtables", # Virtually none of our elements have outlined methods. This also seems to affect classes in .cpp files, which in turn affects all our tests, which may need fixing in CLang
- "-Wno-shadow", # Also warns for shadowing on constructor arguments, which we do a lot
- "-Wno-documentation", # We don't care about documentation warnings
- "-Wno-documentation-unknown-command", # We don't care about documentation warnings
- "-Wno-exit-time-destructors", # Used a lot in e.g. CPPUnit
- "-Wno-c++98-compat-pedantic", # We do different things that violate this, but they could be fixed
- "-Wno-global-constructors", # We depend on this for e.g. string constants
- "-Wno-disabled-macro-expansion", # Caused due to system headers
- "-Wno-c++11-extensions", # We use C++11; turn this off when we use -std=c++11
- "-Wno-long-long", # We use long long
- "-Wno-padded",
- "-Wno-missing-variable-declarations", # Getting rid of CPPUnit warnings
- "-Wno-direct-ivar-access", # Obj-C code warning
- "-Wno-potentially-evaluated-expression", # Caused due to calling shared_ptr::get() inside typeid()
- ])
- else :
- env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wold-style-cast", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls", "-Wno-unknown-pragmas"])
- gccVersion = env.get("CCVERSION", "0.0.0").split(".")
- if gccVersion >= ["4", "5", "0"] and not "clang" in env["CC"] :
- env.Append(CXXFLAGS = ["-Wlogical-op"])
- if not env.get("allow_warnings", False) :
- env.Append(CXXFLAGS = ["-Werror"])
+ if os.path.basename(env["CXX"]) in ["clang", "clang++"] :
+ env.Append(CXXFLAGS = [
+ "-Weverything",
+ "-Wno-unknown-warning-option", # To stay compatible between CLang versions
+ "-Wno-unknown-pragmas", # To stay compatible between CLang versions
+ "-Wno-weak-vtables", # Virtually none of our elements have outlined methods. This also seems to affect classes in .cpp files, which in turn affects all our tests, which may need fixing in CLang
+ "-Wno-shadow", # Also warns for shadowing on constructor arguments, which we do a lot
+ "-Wno-documentation", # We don't care about documentation warnings
+ "-Wno-documentation-unknown-command", # We don't care about documentation warnings
+ "-Wno-exit-time-destructors", # Used a lot in e.g. CPPUnit
+ "-Wno-c++98-compat-pedantic", # We do different things that violate this, but they could be fixed
+ "-Wno-global-constructors", # We depend on this for e.g. string constants
+ "-Wno-disabled-macro-expansion", # Caused due to system headers
+ "-Wno-long-long", # We use long long
+ "-Wno-padded",
+ "-Wno-missing-variable-declarations", # Getting rid of CPPUnit warnings
+ "-Wno-direct-ivar-access", # Obj-C code warning
+ "-Wno-potentially-evaluated-expression", # Caused due to calling shared_ptr::get() inside typeid()
+ ])
+ else :
+ env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wold-style-cast", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls", "-Wno-unknown-pragmas"])
+ gccVersion = env.get("CCVERSION", "0.0.0").split(".")
+ if gccVersion >= ["4", "5", "0"] and not "clang" in env["CC"] :
+ env.Append(CXXFLAGS = ["-Wlogical-op"])
+ if not env.get("allow_warnings", False) :
+ env.Append(CXXFLAGS = ["-Werror"])
if env.get("coverage", 0) :
- assert(env["PLATFORM"] != "win32")
- env.Append(CCFLAGS = ["-fprofile-arcs", "-ftest-coverage"])
- env.Append(LINKFLAGS = ["-fprofile-arcs", "-ftest-coverage"])
+ assert(env["PLATFORM"] != "win32")
+ env.Append(CCFLAGS = ["-fprofile-arcs", "-ftest-coverage"])
+ env.Append(LINKFLAGS = ["-fprofile-arcs", "-ftest-coverage"])
if env["PLATFORM"] == "win32" :
- env.Append(LIBS = ["user32", "crypt32", "dnsapi", "iphlpapi", "ws2_32", "wsock32", "Advapi32", "ntdsapi"])
- env.Append(CCFLAGS = ["/EHsc", "/nologo", "/Zm256"])
- env.Append(LINKFLAGS = ["/INCREMENTAL:no", "/NOLOGO"])
- if int(env["MSVS_VERSION"].split(".")[0]) < 10 :
- mt = env.get('mt')
- if not mt:
- mt = 'mt.exe'
- env["LINKCOM"] = [env["LINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;1' % mt]
- env["SHLINKCOM"] = [env["SHLINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;2' % mt]
+ env.Append(LIBS = ["user32", "crypt32", "dnsapi", "iphlpapi", "ws2_32", "wsock32", "Advapi32", "ntdsapi"])
+ env.Append(CCFLAGS = ["/EHsc", "/nologo", "/Zm256"])
+ env.Append(LINKFLAGS = ["/INCREMENTAL:no", "/NOLOGO"])
+ if int(env["MSVS_VERSION"].split(".")[0]) < 10 :
+ mt = env.get('mt')
+ if not mt:
+ mt = 'mt.exe'
+ env["LINKCOM"] = [env["LINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;1' % mt]
+ env["SHLINKCOM"] = [env["SHLINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;2' % mt]
if env["PLATFORM"] == "darwin" and not env["target"] in ["iphone-device", "iphone-simulator", "xcode", "android"] :
- env["PLATFORM_FLAGS"]["FRAMEWORKS"] += ["IOKit", "AppKit", "SystemConfiguration", "Security", "SecurityInterface"]
+ env["PLATFORM_FLAGS"]["FRAMEWORKS"] += ["IOKit", "AppKit", "SystemConfiguration", "Security", "SecurityInterface"]
# Required by boost headers on HP-UX
if env["PLATFORM"] == "hpux" :
- env.Append(CXXFLAGS = ["+hpxstd98", "-mt", "-AA"])
- # FIXME: Need -AA for linking C++ but not C
- #env.Append(LINKFLAGS = ["-AA"])
+ env.Append(CXXFLAGS = ["+hpxstd98", "-mt", "-AA"])
+ # FIXME: Need -AA for linking C++ but not C
+ #env.Append(LINKFLAGS = ["-AA"])
+# Code signing
+if env["PLATFORM"] == "darwin" :
+ env["CODE_SIGN_IDENTITY"] = env["codesign_identity"]
+if env["PLATFORM"] == "win32" :
+ env["SIGNTOOL_KEY_PFX"] = env.get("signtool_key_pfx", None)
+ env["SIGNTOOL_TIMESTAMP_URL"] = env.get("signtool_timestamp_url", None)
# Testing
env["TEST_TYPE"] = env["test"]
if "check" in ARGUMENTS :
- env["TEST_TYPE"] = "unit"
+ env["TEST_TYPE"] = "unit"
env["checker_report"] = ARGUMENTS.get("checker_report", False)
env["TEST"] = (env["TEST_TYPE"] != "none") or env.GetOption("clean")
if env.get("valgrind", 0) :
- env["TEST_RUNNER"] = "valgrind --suppressions=QA/valgrind.supp -q --leak-check=full --track-origins=yes "
+ env["TEST_RUNNER"] = "valgrind --suppressions=QA/valgrind.supp -q --leak-check=full --track-origins=yes "
env["TEST_IGNORE_RESULT"] = "ignore_test_result" in ARGUMENTS
env["TEST_CREATE_LIBRARIES"] = "create_test_libraries" in ARGUMENTS
# Packaging
env["DIST"] = "dist" in ARGUMENTS or env.GetOption("clean")
for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR", "SLUIFT_INSTALLDIR"] :
- if ARGUMENTS.get(path, "") :
- if os.path.isabs(ARGUMENTS[path]) :
- env[path] = Dir(ARGUMENTS[path]).abspath
- else :
- env[path] = Dir("#/" + ARGUMENTS[path]).abspath
+ if ARGUMENTS.get(path, "") :
+ if os.path.isabs(ARGUMENTS[path]) :
+ env[path] = Dir(ARGUMENTS[path]).abspath
+ else :
+ env[path] = Dir("#/" + ARGUMENTS[path]).abspath
################################################################################
@@ -391,70 +423,77 @@ for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR", "SLUIFT_INSTALLDIR"] :
target = env["target"]
if target in ["iphone-device", "iphone-simulator", "xcode"] :
- # Extract/initialize all the information we need
- if target == "xcode" :
- # Get the information from the XCode environment
- env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = os.environ["PLATFORM_DEVELOPER_BIN_DIR"]
- env["XCODE_SDKROOT"] = os.environ["SDKROOT"]
- env["XCODE_ARCH_FLAGS"] = sum([["-arch", arch] for arch in os.environ["ARCHS"].split(" ")], [])
- env["IPHONEOS_DEPLOYMENT_TARGET"] = os.environ["IPHONEOS_DEPLOYMENT_TARGET"]
- # Use absolute path sources so Xcode can highlight compilation errors in swiften
- env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM ${SOURCES.abspath}'
- else :
- # Hard code values
- env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = "/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin"
- if target == "iphone-device":
- env["XCODE_ARCH_FLAGS"] = ["-arch", "armv6", "-arch", "armv7"]
- sdkPart = "iPhoneOS"
- else :
- env["XCODE_ARCH_FLAGS"] = ["-arch", "i386"]
- sdkPart = "iPhoneSimulator"
- sdkVer = "6.0"
- env["XCODE_SDKROOT"] = "/Applications/Xcode.app/Contents/Developer/Platforms/" + sdkPart + ".platform/Developer/SDKs/" + sdkPart + sdkVer + ".sdk"
- env["IPHONEOS_DEPLOYMENT_TARGET"] = "4.1"
-
- # Set the build flags
- env["CC"] = os.environ["DEVELOPER_BIN_DIR"] + "/gcc"
- env["CXX"] = os.environ["DEVELOPER_BIN_DIR"] + "/g++"
- env["OBJCCFLAGS"] = ["-fobjc-abi-version=2", "-fobjc-legacy-dispatch"]
- env["LD"] = env["CC"]
- env.Append(CCFLAGS = env["XCODE_ARCH_FLAGS"] + ["-fvisibility=hidden", "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]])
- env.Append(LINKFLAGS = "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"])
- if os.environ.get("GCC_THUMB_SUPPORT", False) :
- env.Append(CCFLAGS = ["-mthumb"])
- env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"])
- env.Append(CPPFLAGS = ["-isysroot", "$XCODE_SDKROOT"])
- env.Append(FRAMEWORKS = ["CoreFoundation", "Foundation", "UIKit", "CoreGraphics"])
- env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"] + ["-isysroot", "$XCODE_SDKROOT", "-L\"$XCODE_SDKROOT/usr/lib\"", "-F\"$XCODE_SDKROOT/System/Library/Frameworks\"", "-F\"$XCODE_SDKROOT/System/Library/PrivateFrameworks\""])
- # Bit of a hack, because BOOST doesn't know the endianness for ARM
- env.Append(CPPDEFINES = ["_LITTLE_ENDIAN"])
+ # Extract/initialize all the information we need
+ if target == "xcode" :
+ # Get the information from the XCode environment
+ env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = os.environ["PLATFORM_DEVELOPER_BIN_DIR"]
+ env["XCODE_SDKROOT"] = os.environ["SDKROOT"]
+ env["XCODE_ARCH_FLAGS"] = sum([["-arch", arch] for arch in os.environ["ARCHS"].split(" ")], [])
+ env["IPHONEOS_DEPLOYMENT_TARGET"] = os.environ["IPHONEOS_DEPLOYMENT_TARGET"]
+ # Use absolute path sources so Xcode can highlight compilation errors in swiften
+ env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM ${SOURCES.abspath}'
+ else :
+ # Hard code values
+ env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = "/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin"
+ if target == "iphone-device":
+ env["XCODE_ARCH_FLAGS"] = ["-arch", "armv6", "-arch", "armv7"]
+ sdkPart = "iPhoneOS"
+ else :
+ env["XCODE_ARCH_FLAGS"] = ["-arch", "i386"]
+ sdkPart = "iPhoneSimulator"
+ sdkVer = "6.0"
+ env["XCODE_SDKROOT"] = "/Applications/Xcode.app/Contents/Developer/Platforms/" + sdkPart + ".platform/Developer/SDKs/" + sdkPart + sdkVer + ".sdk"
+ env["IPHONEOS_DEPLOYMENT_TARGET"] = "4.1"
+
+ # Set the build flags
+ env["CC"] = os.environ["DEVELOPER_BIN_DIR"] + "/gcc"
+ env["CXX"] = os.environ["DEVELOPER_BIN_DIR"] + "/g++"
+ env["OBJCCFLAGS"] = ["-fobjc-abi-version=2", "-fobjc-legacy-dispatch"]
+ env["LD"] = env["CC"]
+ env.Append(CCFLAGS = env["XCODE_ARCH_FLAGS"] + ["-fvisibility=hidden", "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]])
+ env.Append(LINKFLAGS = "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"])
+ if os.environ.get("GCC_THUMB_SUPPORT", False) :
+ env.Append(CCFLAGS = ["-mthumb"])
+ env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"])
+ env.Append(CPPFLAGS = ["-isysroot", "$XCODE_SDKROOT"])
+ env.Append(FRAMEWORKS = ["CoreFoundation", "Foundation", "UIKit", "CoreGraphics"])
+ env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"] + ["-isysroot", "$XCODE_SDKROOT", "-L\"$XCODE_SDKROOT/usr/lib\"", "-F\"$XCODE_SDKROOT/System/Library/Frameworks\"", "-F\"$XCODE_SDKROOT/System/Library/PrivateFrameworks\""])
+ # Bit of a hack, because BOOST doesn't know the endianness for ARM
+ env.Append(CPPDEFINES = ["_LITTLE_ENDIAN"])
################################################################################
# Android
################################################################################
if target in ["android"] :
- env["ENV"]["PATH"] = env["android_toolchain"] + "/bin:" + env["ENV"]["PATH"]
- env["CC"] = "arm-linux-androideabi-gcc"
- env["CXX"] = "arm-linux-androideabi-g++"
- env["AR"] = "arm-linux-androideabi-ar"
- env["RANLIB"] = "arm-linux-androideabi-ranlib"
- env.Append(CPPDEFINES = ["ANDROID"])
- env.Append(CPPDEFINES = ["_REENTRANT", "_GLIBCXX__PTHREADS"])
+ env["ENV"]["PATH"] = env["android_toolchain"] + "/bin:" + env["ENV"]["PATH"]
+ env["CC"] = "arm-linux-androideabi-gcc"
+ env["CXX"] = "arm-linux-androideabi-g++"
+ env["AR"] = "arm-linux-androideabi-ar"
+ env["RANLIB"] = "arm-linux-androideabi-ranlib"
+ env.Append(CPPDEFINES = ["ANDROID"])
+ env.Append(CPPDEFINES = ["_REENTRANT", "_GLIBCXX__PTHREADS"])
# CCache
if env.get("ccache", False) :
- env["ENV"]["HOME"] = os.environ["HOME"]
- for var in os.environ :
- if var.startswith("CCACHE_") :
- env["ENV"][var] = os.environ[var]
- if env.get("CC", "") != "" :
- env["CC"] = "ccache " + env["CC"]
- else :
- env["CC"] = "ccache gcc"
- if env.get("CXX", "") != "" :
- env["CXX"] = "ccache " + env["CXX"]
- else :
- env["CC"] = "ccache g++"
+ env["ENV"]["HOME"] = os.environ["HOME"]
+ for var in os.environ :
+ if var.startswith("CCACHE_") :
+ env["ENV"][var] = os.environ[var]
+ if env.get("CC", "") != "" :
+ env["CC"] = "ccache " + env["CC"]
+ else :
+ env["CC"] = "ccache gcc"
+ if env.get("CXX", "") != "" :
+ env["CXX"] = "ccache " + env["CXX"]
+ else :
+ env["CC"] = "ccache g++"
+
+# distcc
+if env.get("distcc", False) :
+ env["ENV"]["HOME"] = os.environ["HOME"]
+ for var in os.environ :
+ if var.startswith("DISTCC_") :
+ env["ENV"][var] = os.environ[var]
conf_env = env.Clone()
@@ -463,12 +502,12 @@ Export("conf_env")
variant = ""
if env["enable_variants"] :
- fingerprint = ",".join([flag for flag in env["CXXFLAGS"] + env["CCFLAGS"] if not flag.startswith("-W") and not flag.startswith("-fvisibility")])
- variant = "build/" + fingerprint
- if not os.path.exists(Dir("#/build").abspath) :
- os.mkdir(Dir("#/build").abspath)
- if os.path.exists(Dir("#/build/current").abspath) :
- os.unlink(Dir("#/build/current").abspath)
- os.symlink(os.path.basename(variant), Dir("#/build/current").abspath)
+ fingerprint = ",".join([flag for flag in env["CXXFLAGS"] + env["CCFLAGS"] if not flag.startswith("-W") and not flag.startswith("-fvisibility")])
+ variant = "build/" + fingerprint
+ if not os.path.exists(Dir("#/build").abspath) :
+ os.mkdir(Dir("#/build").abspath)
+ if os.path.exists(Dir("#/build/current").abspath) :
+ os.unlink(Dir("#/build/current").abspath)
+ os.symlink(os.path.basename(variant), Dir("#/build/current").abspath)
Return("variant")
diff --git a/BuildTools/SCons/SConstruct b/BuildTools/SCons/SConstruct
index de24728..c084cff 100644
--- a/BuildTools/SCons/SConstruct
+++ b/BuildTools/SCons/SConstruct
@@ -8,8 +8,8 @@ root = Dir("../..").abspath
# Override SConscript to handle tests
oldSConscript = SConscript
def SConscript(*arguments, **keywords) :
- if not keywords.get("test_only", False) or env["TEST"] :
- return apply(oldSConscript, arguments, keywords)
+ if not keywords.get("test_only", False) or env["TEST"] :
+ return apply(oldSConscript, arguments, keywords)
env.SConscript = SConscript
################################################################################
@@ -21,313 +21,352 @@ env.SConscript = SConscript
################################################################################
#if env["PLATFORM"] == "win32" :
-# env["MSVC_BATCH"] = 1
+# env["MSVC_BATCH"] = 1
# Pretty output
def colorize(command, target, color) :
- colors = { "red": "31", "green": "32", "yellow": "33", "blue": "34" }
- prefix = ""
- suffix = ""
- if sys.stdout.isatty() and env["PLATFORM"] != "win32":
- prefix = "\033[0;" + colors[color] + ";140m"
- suffix = "\033[0m"
- return " " + prefix + command + suffix + " " + target
+ colors = { "red": "31", "green": "32", "yellow": "33", "blue": "34" }
+ prefix = ""
+ suffix = ""
+ if sys.stdout.isatty() and env["PLATFORM"] != "win32":
+ prefix = "\033[0;" + colors[color] + ";140m"
+ suffix = "\033[0m"
+ return " " + prefix + command + suffix + " " + target
if int(ARGUMENTS.get("V", 0)) == 0 and not ARGUMENTS.get("dump_trace", False) :
- env["CCCOMSTR"] = colorize("CC", "$TARGET", "green")
- env["SHCCCOMSTR"] = colorize("CC", "$TARGET", "green")
- env["CXXCOMSTR"] = colorize("CXX", "$TARGET", "green")
- env["SHCXXCOMSTR"] = colorize("CXX", "$TARGET", "green")
- env["LINKCOMSTR"] = colorize("LINK", "$TARGET", "red")
- env["SHLINKCOMSTR"] = colorize("LINK", "$TARGET", "red")
- env["ARCOMSTR"] = colorize("AR", "$TARGET", "red")
- env["RANLIBCOMSTR"] = colorize("RANLIB", "$TARGET", "red")
- env["PCHCOMSTR"] = colorize("PCH", "$TARGET", "blue")
- env["QT4_RCCCOMSTR"] = colorize("RCC", "$TARGET", "blue")
- env["QT4_UICCOMSTR"] = colorize("UIC", "$TARGET", "blue")
- env["QT4_MOCFROMHCOMSTR"] = colorize("MOC", "$TARGET", "blue")
- env["QT4_MOCFROMCXXCOMSTR"] = colorize("MOC", "$TARGET", "blue")
- env["QT4_LRELEASECOMSTR"] = colorize("LRELEASE", "$TARGET", "blue")
- env["QT4_LUPDATECOMSTR"] = colorize("LUPDATE", "$TARGET", "blue")
- env["GENCOMSTR"] = colorize("GEN", "$TARGET", "blue")
- env["RCCOMSTR"] = colorize("RC", "$TARGET", "blue")
- env["BUNDLECOMSTR"] = colorize("BUNDLE", "$TARGET", "blue")
- env["NIBCOMSTR"] = colorize("NIB", "$TARGET", "blue")
- env["NSISCOMSTR"] = colorize("NSIS", "$TARGET", "blue")
- env["INSTALLSTR"] = colorize("INSTALL", "$TARGET", "blue")
- env["TESTCOMSTR"] = colorize("TEST", "$SOURCE", "yellow")
- env["FOCOMSTR"] = colorize("FO", "$TARGET", "blue")
- env["XSLTCOMSTR"] = colorize("XSLT", "$TARGET", "blue")
- env["XMLLINTCOMSTR"] = colorize("XMLLINT", "$SOURCE", "blue")
- env["DOXYCOMSTR"] = colorize("DOXY", "$SOURCE", "blue")
- #Progress(colorize("DEP", "$TARGET", "red")
+ env["CCCOMSTR"] = colorize("CC", "$TARGET", "green")
+ env["SHCCCOMSTR"] = colorize("CC", "$TARGET", "green")
+ env["CXXCOMSTR"] = colorize("CXX", "$TARGET", "green")
+ env["SHCXXCOMSTR"] = colorize("CXX", "$TARGET", "green")
+ env["LINKCOMSTR"] = colorize("LINK", "$TARGET", "red")
+ env["SHLINKCOMSTR"] = colorize("LINK", "$TARGET", "red")
+ env["ARCOMSTR"] = colorize("AR", "$TARGET", "red")
+ env["RANLIBCOMSTR"] = colorize("RANLIB", "$TARGET", "red")
+ env["PCHCOMSTR"] = colorize("PCH", "$TARGET", "blue")
+ env["QT4_RCCCOMSTR"] = colorize("RCC", "$TARGET", "blue")
+ env["QT4_UICCOMSTR"] = colorize("UIC", "$TARGET", "blue")
+ env["QT4_MOCFROMHCOMSTR"] = colorize("MOC", "$TARGET", "blue")
+ env["QT4_MOCFROMCXXCOMSTR"] = colorize("MOC", "$TARGET", "blue")
+ env["QT4_LRELEASECOMSTR"] = colorize("LRELEASE", "$TARGET", "blue")
+ env["QT4_LUPDATECOMSTR"] = colorize("LUPDATE", "$TARGET", "blue")
+ env["GENCOMSTR"] = colorize("GEN", "$TARGET", "blue")
+ env["RCCOMSTR"] = colorize("RC", "$TARGET", "blue")
+ env["BUNDLECOMSTR"] = colorize("BUNDLE", "$TARGET", "blue")
+ env["NIBCOMSTR"] = colorize("NIB", "$TARGET", "blue")
+ env["NSISCOMSTR"] = colorize("NSIS", "$TARGET", "blue")
+ env["INSTALLSTR"] = colorize("INSTALL", "$TARGET", "blue")
+ env["TESTCOMSTR"] = colorize("TEST", "$SOURCE", "yellow")
+ env["FOCOMSTR"] = colorize("FO", "$TARGET", "blue")
+ env["XSLTCOMSTR"] = colorize("XSLT", "$TARGET", "blue")
+ env["XMLLINTCOMSTR"] = colorize("XMLLINT", "$SOURCE", "blue")
+ env["DOXYCOMSTR"] = colorize("DOXY", "$SOURCE", "blue")
+ #Progress(colorize("DEP", "$TARGET", "red")
def checkObjCHeader(context, header) :
- context.Message("Checking for Objective-C header " + header + " ... ")
- ret = context.TryCompile("#include <Cocoa/Cocoa.h>\n#include <" + header + ">", ".m")
- context.Result(ret)
- return ret
+ context.Message("Checking for Objective-C header " + header + " ... ")
+ ret = context.TryCompile("#include <Cocoa/Cocoa.h>\n#include <" + header + ">", ".m")
+ context.Result(ret)
+ return ret
+
+def checkForCpp11Support(context) :
+ context.Message('Checking whether the C++ compiler supports C++11... ')
+ result = context.TryLink(
+ """
+#include <memory>
+
+int main(int, char **) {
+ // shared_ptr test
+ std::shared_ptr<int> intPtr = std::make_shared<int>();
+
+ // unique_ptr test
+ std::unique_ptr<int> intPtrUnique = std::unique_ptr<int>(new int(1));
+
+ // auto test
+ auto otherIntPtr = intPtr;
+ std::shared_ptr<int> fooIntPtr = otherIntPtr;
+
+ // lambda test
+ auto someFunction = [](int i){ i = i * i; };
+ someFunction(2);
+
+ // nullptr test
+ double* fooDouble = nullptr;
+ double bazDouble = 8.0;
+ fooDouble = &bazDouble;
+ bazDouble = *fooDouble;
+
+ return 0;
+}
+""", '.cpp')
+ context.Result(result)
+ return result
+
################################################################################
# Platform configuration
################################################################################
if ARGUMENTS.get("force-configure", 0) :
- SCons.SConf.SetCacheMode("force")
+ SCons.SConf.SetCacheMode("force")
def CheckPKG(context, name):
- context.Message( 'Checking for package %s... ' % name )
- ret = context.TryAction('pkg-config --exists \'%s\'' % name)[0]
- context.Result( ret )
- return ret
+ context.Message( 'Checking for package %s... ' % name )
+ ret = context.TryAction('pkg-config --exists \'%s\'' % name)[0]
+ context.Result( ret )
+ return ret
def CheckVersion(context, library, version, define, header, value) :
- context.Message("Checking " + library + " version (>= " + version + ") ...")
- version = GetVersion(context, define, header)
- ok = version >= value
- context.Result(ok)
- return ok
+ context.Message("Checking " + library + " version (>= " + version + ") ...")
+ version = GetVersion(context, define, header)
+ ok = version >= value
+ context.Result(ok)
+ return ok
def GetVersion(context, define, header, extension = ".c") :
- ret = context.TryRun("""
+ ret = context.TryRun("""
#include <%(header)s>
#include <stdio.h>
int main(int argc, char* argv[]) {
- printf("%%d\\n", %(define)s);
- return 0;
+ printf("%%d\\n", %(define)s);
+ return 0;
}
""" % { "header" : header, "define": define }, extension)
- if ret[0] :
- return int(ret[1])
- else :
- return -1
-
+ if ret[0] :
+ return int(ret[1])
+ else :
+ return -1
-conf = Configure(conf_env)
+conf = Configure(conf_env, custom_tests = {
+ 'CheckCpp11Support' : checkForCpp11Support,
+ })
if not conf.CheckCXX() or not conf.CheckCC() :
- print "Error: You need a working compiler"
- Exit(1)
+ print "Error: You need a working compiler"
+ Exit(1)
+
+if not conf.CheckCpp11Support() :
+ print "Error: You need a compiler with support for the C++11 standard"
+ Exit(1)
+
env["HAVE_ZLIB"] = True
zlib_flags = {}
zlib_okay = False
if env.get("zlib_libdir", None) :
- zlib_flags["LIBPATH"] = [env["zlib_libdir"]]
- zlib_okay = True
+ zlib_flags["LIBPATH"] = [env["zlib_libdir"]]
+ zlib_okay = True
if env.get("zlib_includedir", None) :
- zlib_flags["CPPPATH"] = [env["zlib_includedir"]]
- zlib_okay = True
+ zlib_flags["CPPPATH"] = [env["zlib_includedir"]]
+ zlib_okay = True
if env.get("zlib_libfile", None) :
- zlib_flags["LIBS"] = [File(env["zlib_libfile"])]
- zlib_okay = True
+ zlib_flags["LIBS"] = [File(env["zlib_libfile"])]
+ zlib_okay = True
elif zlib_okay :
- zlib_flags["LIBS"] = ["z"]
+ zlib_flags["LIBS"] = ["z"]
if (not zlib_okay) and conf.CheckLib("z") :
- zlib_flags["LIBS"] = ["z"]
- zlib_okay = True
+ zlib_flags["LIBS"] = ["z"]
+ zlib_okay = True
if zlib_okay :
- env["ZLIB_FLAGS"] = zlib_flags
+ env["ZLIB_FLAGS"] = zlib_flags
elif not env.get("zlib_bundled_enable", True) :
- print "Error: Zlib not found and zlib_bundled_enable is false"
- Exit(1)
+ print "Error: Zlib not found and zlib_bundled_enable is false"
+ Exit(1)
else :
- env["ZLIB_BUNDLED"] = True
+ env["ZLIB_BUNDLED"] = True
if conf.CheckLib("resolv") :
- env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["resolv"]
+ env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["resolv"]
if env["PLATFORM"] != "win32" :
- if conf.CheckLib("pthread") :
- env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["pthread"]
+ if conf.CheckLib("pthread") :
+ env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["pthread"]
if conf.CheckLib("dl") :
- env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["dl"]
+ env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["dl"]
if conf.CheckLib("m") :
- env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["m"]
+ env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["m"]
if conf.CheckLib("c") :
- env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["c"]
+ env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["c"]
# Even if you find stdc++ on HP-UX, it is the wrong one for aCC
if env["PLATFORM"] != "hpux" :
- if conf.CheckLib("stdc++", language='CXX') :
- env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["stdc++"]
+ if conf.CheckLib("stdc++", language='CXX') :
+ env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["stdc++"]
conf.Finish()
# Boost
boost_conf_env = conf_env.Clone()
boost_flags = {}
if env.get("boost_libdir", None) :
- boost_flags["LIBPATH"] = [env["boost_libdir"]]
+ boost_flags["LIBPATH"] = [env["boost_libdir"]]
if env.get("boost_includedir", None) :
- if env["PLATFORM"] == "win32" or env["PLATFORM"] == "hpux" or env["PLATFORM"] == "sunos" :
- boost_flags["CPPPATH"] = [env["boost_includedir"]]
- else :
- # Using isystem to avoid getting warnings from a system boost
- # Unfortunately, this also disables dependency tracking
- boost_flags["CPPFLAGS"] = [("-isystem", env["boost_includedir"])]
+ if env["PLATFORM"] == "win32" or env["PLATFORM"] == "hpux" or env["PLATFORM"] == "sunos" :
+ boost_flags["CPPPATH"] = [env["boost_includedir"]]
+ else :
+ # Using isystem to avoid getting warnings from a system boost
+ # Unfortunately, this also disables dependency tracking
+ boost_flags["CPPFLAGS"] = [("-isystem", env["boost_includedir"])]
boost_conf_env.MergeFlags(boost_flags)
conf = Configure(boost_conf_env)
-boostLibs = [("signals", None), ("system", "system/system_error.hpp"), ("thread", None), ("regex", None), ("program_options", None), ("filesystem", None), ("serialization", "archive/text_oarchive.hpp"), ("date_time", "date_time/date.hpp")]
+boostLibs = [(None, "signals2.hpp"), ("system", "system/system_error.hpp"), ("thread", None), ("regex", None), ("program_options", None), ("filesystem", None), ("serialization", "archive/text_oarchive.hpp"), ("date_time", "date_time/date.hpp")]
allLibsPresent = True
libNames = []
for (lib, header) in boostLibs :
- if header :
- header = "boost/" + header
- else :
- header = "boost/" + lib + ".hpp"
- if not conf.CheckCXXHeader(header) :
- allLibsPresent = False
- break
- if env["PLATFORM"] != "win32" :
- libName = "boost_" + lib
- if not conf.CheckLib(libName, language='CXX') :
- libName += "-mt"
- if not conf.CheckLib(libName, language='CXX') :
- allLibsPresent = False
- break
- libNames.append(libName)
-if allLibsPresent :
- boost_flags["CPPDEFINES"] = ["BOOST_SIGNALS_NO_DEPRECATION_WARNING"]
- env["BOOST_FLAGS"] = boost_flags
- if env["PLATFORM"] != "win32" :
- env["BOOST_FLAGS"].update({"LIBS": libNames})
- if not conf.CheckCXXHeader("boost/uuid/uuid.hpp") :
- # FIXME: Remove this workaround when UUID is available in most distros
- env["BOOST_BUNDLED_UUID_ONLY"] = True
- env["BOOST_FLAGS"]["CPPDEFINES"] = ["BOOST_SIGNALS_NO_DEPRECATION_WARNING"]
+ if header :
+ header = "boost/" + header
+ else :
+ header = "boost/" + lib + ".hpp"
+ if not conf.CheckCXXHeader(header) :
+ allLibsPresent = False
+ break
+ if lib and env["PLATFORM"] != "win32" :
+ libName = "boost_" + lib
+ if not conf.CheckLib(libName, language='CXX') :
+ libName += "-mt"
+ if not conf.CheckLib(libName, language='CXX') :
+ allLibsPresent = False
+ break
+ libNames.append(libName)
+if not env.get("boost_force_bundled") and allLibsPresent :
+ env["BOOST_FLAGS"] = boost_flags
+ if env["PLATFORM"] != "win32" :
+ env["BOOST_FLAGS"].update({"LIBS": libNames})
+ if not conf.CheckCXXHeader("boost/uuid/uuid.hpp") :
+ # FIXME: Remove this workaround when UUID is available in most distros
+ env["BOOST_BUNDLED_UUID_ONLY"] = True
elif not env.get("boost_bundled_enable", True) :
- print "Error: Boost not found and boost_bundled_enable is false"
- Exit(1)
+ print "Error: Boost not found and boost_bundled_enable is false"
+ Exit(1)
else :
- env["BOOST_BUNDLED"] = True
+ env["BOOST_BUNDLED"] = True
conf.Finish()
# Xss
env["HAVE_XSS"] = 0
if env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" :
- xss_flags = {
- "LIBPATH": ["/usr/X11R6/lib"],
- "LIBS": ["Xss"]
- }
- xss_env = conf_env.Clone()
- xss_env.MergeFlags(xss_flags)
- conf = Configure(xss_env)
- if conf.CheckFunc("XScreenSaverQueryExtension") :
- env["HAVE_XSS"] = 1
- env["XSS_FLAGS"] = xss_flags
- conf.Finish()
+ xss_flags = {
+ "LIBPATH": ["/usr/X11R6/lib"],
+ "LIBS": ["Xss"]
+ }
+ xss_env = conf_env.Clone()
+ xss_env.MergeFlags(xss_flags)
+ conf = Configure(xss_env)
+ if conf.CheckFunc("XScreenSaverQueryExtension") :
+ env["HAVE_XSS"] = 1
+ env["XSS_FLAGS"] = xss_flags
+ conf.Finish()
# GConf
env["HAVE_GCONF"] = 0
if env.get("try_gconf", True) and env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" :
- gconf_env = conf_env.Clone()
- conf = Configure(gconf_env, custom_tests = {"CheckPKG": CheckPKG})
- if conf.CheckPKG("gconf-2.0") :
- gconf_bare_env = Environment()
- gconf_bare_env.ParseConfig('pkg-config --cflags gconf-2.0 gobject-2.0 --libs gconf-2.0 gobject-2.0')
- gconf_flags = {
- "LIBS": gconf_bare_env["LIBS"],
- "CCFLAGS": gconf_bare_env["CCFLAGS"],
- "CPPPATH": gconf_bare_env["CPPPATH"],
- "CPPDEFINES": gconf_bare_env.get("CPPDEFINES", []),
- }
- gconf_env.MergeFlags(gconf_flags)
- if conf.CheckCHeader("gconf/gconf-client.h") and conf.CheckLib("gconf-2") :
- env["HAVE_GCONF"] = 1
- env["GCONF_FLAGS"] = {
- "LIBS": gconf_env["LIBS"],
- "CCFLAGS": gconf_env["CCFLAGS"],
- "CPPPATH": gconf_env["CPPPATH"],
- "CPPDEFINES": gconf_env.get("CPPDEFINES", []),
- }
- conf.Finish()
+ gconf_env = conf_env.Clone()
+ conf = Configure(gconf_env, custom_tests = {"CheckPKG": CheckPKG})
+ if conf.CheckPKG("gconf-2.0") :
+ gconf_bare_env = Environment()
+ gconf_bare_env.ParseConfig('pkg-config --cflags gconf-2.0 gobject-2.0 --libs gconf-2.0 gobject-2.0')
+ gconf_flags = {
+ "LIBS": gconf_bare_env["LIBS"],
+ "CCFLAGS": gconf_bare_env["CCFLAGS"],
+ "CPPPATH": gconf_bare_env["CPPPATH"],
+ "CPPDEFINES": gconf_bare_env.get("CPPDEFINES", []),
+ }
+ gconf_env.MergeFlags(gconf_flags)
+ if conf.CheckCHeader("gconf/gconf-client.h") and conf.CheckLib("gconf-2") :
+ env["HAVE_GCONF"] = 1
+ env["GCONF_FLAGS"] = {
+ "LIBS": gconf_env["LIBS"],
+ "CCFLAGS": gconf_env["CCFLAGS"],
+ "CPPPATH": gconf_env["CPPPATH"],
+ "CPPDEFINES": gconf_env.get("CPPDEFINES", []),
+ }
+ conf.Finish()
# Sparkle
env["HAVE_SPARKLE"] = 0
if env["PLATFORM"] == "darwin" :
- sparkle_flags = {
- "FRAMEWORKPATH": ["/Library/Frameworks"],
- "FRAMEWORKS": ["Sparkle"]
- }
- sparkle_env = conf_env.Clone()
- sparkle_env.MergeFlags(sparkle_flags)
- conf = Configure(sparkle_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader })
- if conf.CheckObjCHeader("Sparkle/Sparkle.h") :
- env["HAVE_SPARKLE"] = 1
- env["SPARKLE_FLAGS"] = sparkle_flags
- env["SPARKLE_FRAMEWORK"] = "/Library/Frameworks/Sparkle.framework"
- conf.Finish()
+ sparkle_flags = {
+ "FRAMEWORKPATH": ["3rdParty/Sparkle/Sparkle-1.14.0"],
+ "FRAMEWORKS": ["Sparkle"]
+ }
+ sparkle_env = conf_env.Clone()
+ sparkle_env.MergeFlags(sparkle_flags)
+ conf = Configure(sparkle_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader })
+ if conf.CheckObjCHeader("Sparkle/Sparkle.h") :
+ env["HAVE_SPARKLE"] = 1
+ env["SPARKLE_FLAGS"] = sparkle_flags
+ env["SPARKLE_FRAMEWORK"] = Dir("../../3rdParty/Sparkle/Sparkle-1.14.0/Sparkle.framework")
+ conf.Finish()
+
+ if env.get("sparkle_public_dsa_key", None) != None :
+ env["SWIFT_SPARKLE_PUBLIC_DSA_KEY"] = File(env.get("sparkle_public_dsa_key"))
+ else :
+ env["SWIFT_SPARKLE_PUBLIC_DSA_KEY"] = None
# Growl
env["HAVE_GROWL"] = 0
if env["PLATFORM"] == "darwin" :
- growl_flags = {
- "FRAMEWORKPATH": ["/Library/Frameworks"],
- "FRAMEWORKS": ["Growl"]
- }
- growl_env = conf_env.Clone()
- growl_env.MergeFlags(growl_flags)
- conf = Configure(growl_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader })
- if conf.CheckObjCHeader("Growl/Growl.h") :
- env["HAVE_GROWL"] = 1
- env["GROWL_FLAGS"] = growl_flags
- env["GROWL_FRAMEWORK"] = "/Library/Frameworks/Growl.framework"
- conf.Finish()
-
-# Snarl
-if env["PLATFORM"] == "win32" :
- env["HAVE_SNARL"] = True
+ growl_flags = {
+ "FRAMEWORKPATH": ["/Library/Frameworks"],
+ "FRAMEWORKS": ["Growl"]
+ }
+ growl_env = conf_env.Clone()
+ growl_env.MergeFlags(growl_flags)
+ conf = Configure(growl_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader })
+ if conf.CheckObjCHeader("Growl/Growl.h") :
+ env["HAVE_GROWL"] = 1
+ env["GROWL_FLAGS"] = growl_flags
+ env["GROWL_FRAMEWORK"] = "/Library/Frameworks/Growl.framework"
+ conf.Finish()
# LibXML
conf = Configure(conf_env, custom_tests = {"CheckVersion": CheckVersion})
if env.get("try_libxml", True) and conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") :
#and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623) :
- env["HAVE_LIBXML"] = 1
- env["LIBXML_FLAGS"] = { "LIBS": ["xml2"] }
+ env["HAVE_LIBXML"] = 1
+ env["LIBXML_FLAGS"] = { "LIBS": ["xml2"] }
conf.Finish()
if env.get("try_libxml", True) and not env.get("HAVE_LIBXML", 0) :
- libxml_env = conf_env.Clone()
- libxml_env.Append(CPPPATH = ["/usr/include/libxml2"])
- conf = Configure(libxml_env, custom_tests = {"CheckVersion": CheckVersion})
- if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") :
-# and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623):
- env["HAVE_LIBXML"] = 1
- libxml_env.Append()
- if os.path.basename(env["CC"]) in ("clang", "gcc"):
- env["LIBXML_FLAGS"] = { "CXXFLAGS": ["-isystem/usr/include/libxml2"], "LIBS": ["xml2"] }
- else:
- env["LIBXML_FLAGS"] = { "CPPPATH": ["/usr/include/libxml2"], "LIBS": ["xml2"] }
- conf.Finish()
+ libxml_env = conf_env.Clone()
+ libxml_env.Append(CPPPATH = ["/usr/include/libxml2"])
+ conf = Configure(libxml_env, custom_tests = {"CheckVersion": CheckVersion})
+ if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") :
+# and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623):
+ env["HAVE_LIBXML"] = 1
+ libxml_env.Append()
+ if os.path.basename(env["CC"]) in ("clang", "gcc"):
+ env["LIBXML_FLAGS"] = { "CXXFLAGS": ["-isystem/usr/include/libxml2"], "LIBS": ["xml2"] }
+ else:
+ env["LIBXML_FLAGS"] = { "CPPPATH": ["/usr/include/libxml2"], "LIBS": ["xml2"] }
+ conf.Finish()
# Expat
if env.get("try_expat", True) and not env.get("HAVE_LIBXML",0) :
- expat_conf_env = conf_env.Clone()
- expat_flags = {}
- if env.get("expat_libdir", None) :
- expat_flags["LIBPATH"] = [env["expat_libdir"]]
- if env.get("expat_includedir", None) :
- expat_flags["CPPPATH"] = [env["expat_includedir"]]
- expat_conf_env.MergeFlags(expat_flags)
- conf = Configure(expat_conf_env)
- if conf.CheckCHeader("expat.h") and conf.CheckLib(env["expat_libname"]) :
- env["HAVE_EXPAT"] = 1
- env["EXPAT_FLAGS"] = { "LIBS": [env["expat_libname"]] }
- env["EXPAT_FLAGS"].update(expat_flags)
- conf.Finish()
+ expat_conf_env = conf_env.Clone()
+ expat_flags = {}
+ if env.get("expat_libdir", None) :
+ expat_flags["LIBPATH"] = [env["expat_libdir"]]
+ if env.get("expat_includedir", None) :
+ expat_flags["CPPPATH"] = [env["expat_includedir"]]
+ expat_conf_env.MergeFlags(expat_flags)
+ conf = Configure(expat_conf_env)
+ if conf.CheckCHeader("expat.h") and conf.CheckLib(env["expat_libname"]) :
+ env["HAVE_EXPAT"] = 1
+ env["EXPAT_FLAGS"] = { "LIBS": [env["expat_libname"]] }
+ env["EXPAT_FLAGS"].update(expat_flags)
+ conf.Finish()
# Bundled expat
bundledExpat = False
if not env.get("HAVE_EXPAT", 0) and not env.get("HAVE_LIBXML", 0) :
- print "Expat or LibXML not found. Using bundled Expat"
- SConscript("#/3rdParty/Expat/SConscript")
- env["HAVE_EXPAT"] = 1
- env["EXPAT_BUNDLED"] = True
+ print "Expat or LibXML not found. Using bundled Expat"
+ SConscript("#/3rdParty/Expat/SConscript")
+ env["HAVE_EXPAT"] = 1
+ env["EXPAT_BUNDLED"] = True
################################################################################
# IDN library
@@ -340,292 +379,294 @@ icu_env = conf_env.Clone()
use_icu = bool(env["icu"])
icu_prefix = ""
if isinstance(env["icu"], str) :
- icu_prefix = env["icu"]
+ icu_prefix = env["icu"]
icu_flags = {}
if icu_prefix :
- icu_flags = { "CPPPATH": [os.path.join(icu_prefix, "include")] }
- icu_flags["LIBPATH"] = [os.path.join(icu_prefix, "lib")]
- icu_env.MergeFlags(icu_flags)
+ icu_flags = { "CPPPATH": [os.path.join(icu_prefix, "include")] }
+ icu_flags["LIBPATH"] = [os.path.join(icu_prefix, "lib")]
+ icu_env.MergeFlags(icu_flags)
icu_conf = Configure(icu_env)
if use_icu and icu_conf.CheckCHeader("unicode/usprep.h") :
- env["HAVE_ICU"] = 1
- env["ICU_FLAGS"] = icu_flags
- env["ICU_FLAGS"]["LIBS"] = ["icuuc"]
+ env["HAVE_ICU"] = 1
+ env["ICU_FLAGS"] = icu_flags
+ env["ICU_FLAGS"]["LIBS"] = ["icuuc"]
icu_conf.Finish()
# LibIDN
libidn_conf_env = conf_env.Clone()
libidn_flags = {}
if env.get("libidn_libdir", None) :
- libidn_flags["LIBPATH"] = [env["libidn_libdir"]]
+ libidn_flags["LIBPATH"] = [env["libidn_libdir"]]
if env.get("libidn_includedir", None) :
- libidn_flags["CPPPATH"] = [env["libidn_includedir"]]
+ libidn_flags["CPPPATH"] = [env["libidn_includedir"]]
libidn_conf_env.MergeFlags(libidn_flags)
conf = Configure(libidn_conf_env)
if env.get("try_libidn", True) and not env.get("HAVE_ICU") and conf.CheckCHeader("idna.h") and conf.CheckLib(env["libidn_libname"]) :
- env["HAVE_LIBIDN"] = 1
- env["LIBIDN_FLAGS"] = { "LIBS": [env["libidn_libname"]] }
- env["LIBIDN_FLAGS"].update(libidn_flags)
+ env["HAVE_LIBIDN"] = 1
+ env["LIBIDN_FLAGS"] = { "LIBS": [env["libidn_libname"]] }
+ env["LIBIDN_FLAGS"].update(libidn_flags)
conf.Finish()
# Fallback to bundled LibIDN
if not env.get("HAVE_ICU", False) and not env.get("HAVE_LIBIDN", False) :
- if env.get("libidn_bundled_enable", True) :
- env["HAVE_LIBIDN"] = 1
- env["LIBIDN_BUNDLED"] = 1
- elif env.get("need_idn", True):
- print "Error: ICU and LIBIDN not found, and libidn_bundled_enable is false"
- Exit(1)
- else:
- print "Proceeding without an IDN library because need_idn was false. This will break all internal binaries"
+ if env.get("libidn_bundled_enable", True) :
+ env["HAVE_LIBIDN"] = 1
+ env["LIBIDN_BUNDLED"] = 1
+ elif env.get("need_idn", True):
+ print "Error: ICU and LIBIDN not found, and libidn_bundled_enable is false"
+ Exit(1)
+ else:
+ print "Proceeding without an IDN library because need_idn was false. This will break all internal binaries"
# Unbound
if env["unbound"] :
- env["LDNS_BUNDLED"] = 1
- env["UNBOUND_BUNDLED"] = 1
+ env["LDNS_BUNDLED"] = 1
+ env["UNBOUND_BUNDLED"] = 1
else :
- env["LDNS_FLAGS"] = {}
- env["UNBOUND_FLAGS"] = {}
+ env["LDNS_FLAGS"] = {}
+ env["UNBOUND_FLAGS"] = {}
# LibMiniUPnPc
if env["experimental_ft"] :
- libminiupnpc_flags = {"CPPPATH": ["/usr/include/miniupnpc/"]}
- libminiupnpc_conf_env = conf_env.Clone()
- if env.get("libminiupnpc_libdir", None) :
- libminiupnpc_flags["LIBPATH"] = [env["libminiupnpc_libdir"]]
- if env.get("libminiupnpc_includedir", None) :
- libminiupnpc_flags["CPPPATH"] = [env["libminiupnpc_includedir"]]
- libminiupnpc_conf_env.MergeFlags(libminiupnpc_flags)
- conf = Configure(libminiupnpc_conf_env)
- if conf.CheckCHeader("miniupnpc.h") and conf.CheckLib(env["libminiupnpc_libname"]) :
- env["HAVE_LIBMINIUPNPC"] = 1
- env["LIBMINIUPNPC_FLAGS"] = { "LIBS": ["miniupnpc"] }
- env["LIBMINIUPNPC_FLAGS"].update(libminiupnpc_flags)
- else :
- env["LIBMINIUPNPC_BUNDLED"] = 1
- conf.Finish()
+ libminiupnpc_flags = {"CPPPATH": ["/usr/include/miniupnpc/"]}
+ libminiupnpc_conf_env = conf_env.Clone()
+ if env.get("libminiupnpc_libdir", None) :
+ libminiupnpc_flags["LIBPATH"] = [env["libminiupnpc_libdir"]]
+ if env.get("libminiupnpc_includedir", None) :
+ libminiupnpc_flags["CPPPATH"] = [env["libminiupnpc_includedir"]]
+ libminiupnpc_conf_env.MergeFlags(libminiupnpc_flags)
+ conf = Configure(libminiupnpc_conf_env)
+ if not env.get("libminiupnpc_force_bundled") and conf.CheckCHeader("miniupnpc.h") and conf.CheckLib(env["libminiupnpc_libname"]) :
+ env["HAVE_LIBMINIUPNPC"] = 1
+ env["LIBMINIUPNPC_FLAGS"] = { "LIBS": ["miniupnpc"] }
+ env["LIBMINIUPNPC_FLAGS"].update(libminiupnpc_flags)
+ else :
+ env["LIBMINIUPNPC_BUNDLED"] = 1
+ conf.Finish()
else :
- env["LIBMINIUPNPC_FLAGS"] = {}
+ env["LIBMINIUPNPC_FLAGS"] = {}
# LibNATPMP
if env["experimental_ft"] :
- libnatpmp_flags = {}
- libnatpmp_conf_env = conf_env.Clone()
- if env.get("libnatpmp_libdir", None) :
- libnatpmp_flags["LIBPATH"] = [env["libnatpmp_libdir"]]
- if env.get("libnatpmp_includedir", None) :
- libnatpmp_flags["CPPPATH"] = [env["libnatpmp_includedir"]]
- libnatpmp_conf_env.MergeFlags(libnatpmp_flags)
- conf = Configure(libnatpmp_conf_env)
- if conf.CheckCHeader("natpmp.h") and conf.CheckLib(env["libnatpmp_libname"]) :
- env["HAVE_LIBNATPMP"] = 1
- env["LIBNATPMP_FLAGS"] = { "LIBS": ["natpmp"] }
- env["LIBNATPMP_FLAGS"].update(libnatpmp_flags)
- else :
- env["LIBNATPMP_BUNDLED"] = 1
- conf.Finish()
+ libnatpmp_flags = {}
+ libnatpmp_conf_env = conf_env.Clone()
+ if env.get("libnatpmp_libdir", None) :
+ libnatpmp_flags["LIBPATH"] = [env["libnatpmp_libdir"]]
+ if env.get("libnatpmp_includedir", None) :
+ libnatpmp_flags["CPPPATH"] = [env["libnatpmp_includedir"]]
+ libnatpmp_conf_env.MergeFlags(libnatpmp_flags)
+ conf = Configure(libnatpmp_conf_env)
+ if not env.get("libnatpmp_force_bundled") and conf.CheckCHeader("natpmp.h") and conf.CheckLib(env["libnatpmp_libname"]) :
+ env["HAVE_LIBNATPMP"] = 1
+ env["LIBNATPMP_FLAGS"] = { "LIBS": ["natpmp"] }
+ env["LIBNATPMP_FLAGS"].update(libnatpmp_flags)
+ else :
+ env["LIBNATPMP_BUNDLED"] = 1
+ conf.Finish()
else :
- env["LIBNATPMP_FLAGS"] = {}
+ env["LIBNATPMP_FLAGS"] = {}
# SQLite
if env["experimental"] :
- sqlite_conf_env = conf_env.Clone()
- sqlite_flags = {}
- if env.get("sqlite_libdir", None) :
- sqlite_flags["LIBPATH"] = [env["sqlite_libdir"]]
- if env.get("sqlite_includedir", None) :
- sqlite_flags["CPPPATH"] = [env["sqlite_includedir"]]
- sqlite_conf_env.MergeFlags(sqlite_flags)
- conf = Configure(sqlite_conf_env)
- if conf.CheckCHeader("sqlite3.h") and conf.CheckLib(env["sqlite_libname"]) and not env.get("sqlite_force_bundled", False):
- env["HAVE_SQLITE"] = 1
- env["SQLITE_FLAGS"] = { "LIBS": [env["sqlite_libname"]] }
- env["SQLITE_FLAGS"].update(sqlite_flags)
- else :
- env["SQLITE_BUNDLED"] = 1
- conf.Finish()
+ sqlite_conf_env = conf_env.Clone()
+ sqlite_flags = {}
+ if env.get("sqlite_libdir", None) :
+ sqlite_flags["LIBPATH"] = [env["sqlite_libdir"]]
+ if env.get("sqlite_includedir", None) :
+ sqlite_flags["CPPPATH"] = [env["sqlite_includedir"]]
+ sqlite_conf_env.MergeFlags(sqlite_flags)
+ conf = Configure(sqlite_conf_env)
+ if conf.CheckCHeader("sqlite3.h") and conf.CheckLib(env["sqlite_libname"]) and not env.get("sqlite_force_bundled", False):
+ env["HAVE_SQLITE"] = 1
+ env["SQLITE_FLAGS"] = { "LIBS": [env["sqlite_libname"]] }
+ env["SQLITE_FLAGS"].update(sqlite_flags)
+ else :
+ env["SQLITE_BUNDLED"] = 1
+ conf.Finish()
else :
- env["SQLITE_FLAGS"] = {}
+ env["SQLITE_FLAGS"] = {}
# Lua
lua_conf_env = conf_env.Clone()
lua_flags = {}
if env.get("lua_libdir", None) :
- lua_flags["LIBPATH"] = [env["lua_libdir"]]
+ lua_flags["LIBPATH"] = [env["lua_libdir"]]
if env.get("lua_includedir", None) :
- lua_flags["CPPPATH"] = [env["lua_includedir"]]
+ lua_flags["CPPPATH"] = [env["lua_includedir"]]
lua_conf_env.MergeFlags(lua_flags)
conf = Configure(lua_conf_env)
if not env.get("lua_force_bundled", False) and conf.CheckLibWithHeader(env["lua_libname"], "lua.hpp", "cxx", autoadd = 0) :
- env["HAVE_LUA"] = 1
- env["LUA_FLAGS"] = { "LIBS": [env["lua_libname"]] }
- lua_version = GetVersion(conf, "LUA_VERSION_NUM", "lua.h")
- if lua_version > 0 :
- env["LUA_FLAGS"]["LUA_VERSION"] = str(lua_version // 100) + "." + str(lua_version % 100)
- else :
- print "Warning: Unable to determine Lua version. Not installing Lua libraries."
- env["LUA_FLAGS"].update(lua_flags)
+ env["HAVE_LUA"] = 1
+ env["LUA_FLAGS"] = { "LIBS": [env["lua_libname"]] }
+ lua_version = GetVersion(conf, "LUA_VERSION_NUM", "lua.h")
+ if lua_version > 0 :
+ env["LUA_FLAGS"]["LUA_VERSION"] = str(lua_version // 100) + "." + str(lua_version % 100)
+ else :
+ print "Warning: Unable to determine Lua version. Not installing Lua libraries."
+ env["LUA_FLAGS"].update(lua_flags)
else :
- env["LUA_BUNDLED"] = 1
+ env["LUA_BUNDLED"] = 1
conf.Finish()
# Readline
editline_conf_env = conf_env.Clone()
editline_flags = {}
if env.get("editline_libdir", None) :
- editline_flags["LIBPATH"] = [env["editline_libdir"]]
+ editline_flags["LIBPATH"] = [env["editline_libdir"]]
if env.get("editline_includedir", None) :
- editline_flags["CPPPATH"] = [env["editline_includedir"]]
+ editline_flags["CPPPATH"] = [env["editline_includedir"]]
editline_conf_env.MergeFlags(editline_flags)
conf = Configure(editline_conf_env)
if conf.CheckLibWithHeader(env["editline_libname"], ["stdio.h", "editline/readline.h"], "c") :
- env["HAVE_EDITLINE"] = 1
- env["EDITLINE_FLAGS"] = { "LIBS": [env["editline_libname"]] }
- env["EDITLINE_FLAGS"].update(editline_flags)
+ env["HAVE_EDITLINE"] = 1
+ env["EDITLINE_FLAGS"] = { "LIBS": [env["editline_libname"]] }
+ env["EDITLINE_FLAGS"].update(editline_flags)
conf.Finish()
# Avahi
avahi_conf_env = conf_env.Clone()
avahi_flags = {}
if env.get("avahi_libdir", None) :
- avahi_flags["LIBPATH"] = [env["avahi_libdir"]]
+ avahi_flags["LIBPATH"] = [env["avahi_libdir"]]
if env.get("avahi_includedir", None) :
- avahi_flags["CPPPATH"] = [env["avahi_includedir"]]
+ avahi_flags["CPPPATH"] = [env["avahi_includedir"]]
avahi_conf_env.MergeFlags(avahi_flags)
conf = Configure(avahi_conf_env)
if env.get("try_avahi", True) and conf.CheckCHeader("avahi-client/client.h") and conf.CheckLib("avahi-client") and conf.CheckLib("avahi-common") :
- env["HAVE_AVAHI"] = True
- env["AVAHI_FLAGS"] = { "LIBS": ["avahi-client", "avahi-common"] }
- env["AVAHI_FLAGS"].update(avahi_flags)
+ env["HAVE_AVAHI"] = True
+ env["AVAHI_FLAGS"] = { "LIBS": ["avahi-client", "avahi-common"] }
+ env["AVAHI_FLAGS"].update(avahi_flags)
conf.Finish()
# Qt
if env["qt"] :
- env["QTDIR"] = env["qt"]
+ env["QTDIR"] = env["qt"]
-# Check for OS X Secure Transport
-if not env.get("openssl_force_bundled", False) and env["PLATFORM"] == "darwin" and env["target"] == "native" :
- env["HAVE_SECURETRANSPORT"] = True
-else :
- env["HAVE_SECURETRANSPORT"] = False
+################################################################################
+# TLS backend selection
+################################################################################
+env["OPENSSL_FLAGS"] = {}
+if env.get("tls_backend") == "native" :
+ if env["PLATFORM"] == "win32" :
+ env["HAVE_SCHANNEL"] = True
+ elif env["PLATFORM"] == "darwin" and env["target"] == "native":
+ env["HAVE_SECURETRANSPORT"] = True
+ elif env["target"] in ("iphone-device", "iphone-simulator", "xcode", "android") :
+ env["tls_backend"] = "openssl_bundled"
+ else :
+ env["tls_backend"] = "openssl"
# OpenSSL
-openssl_env = conf_env.Clone()
-if env.get("openssl_force_bundled", False) or env["target"] in ("iphone-device", "iphone-simulator", "xcode", "android") :
- env["OPENSSL_BUNDLED"] = True
- env["HAVE_OPENSSL"] = True
-elif not env["HAVE_SECURETRANSPORT"] :
- use_openssl = bool(env["openssl"])
- openssl_prefix = ""
- if isinstance(env["openssl"], str) :
- openssl_prefix = env["openssl"]
- openssl_flags = {}
- if openssl_prefix :
- openssl_include = env.get("openssl_include", None)
- openssl_libdir = env.get("openssl_libdir", None)
- if openssl_include:
- openssl_flags = {"CPPPATH":[openssl_include]}
- else:
- openssl_flags = { "CPPPATH": [os.path.join(openssl_prefix, "include")] }
- if openssl_libdir:
- openssl_flags["LIBPATH"] = [openssl_libdir]
- env["OPENSSL_DIR"] = openssl_prefix
- elif env["PLATFORM"] == "win32" :
- openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib", "VC")]
- env["OPENSSL_DIR"] = openssl_prefix
- else :
- openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib")]
- openssl_env.MergeFlags(openssl_flags)
-
- openssl_conf = Configure(openssl_env)
- if use_openssl and openssl_conf.CheckCHeader("openssl/ssl.h") :
- env["HAVE_OPENSSL"] = 1
- env["OPENSSL_FLAGS"] = openssl_flags
- openssl_libnames = env.get("openssl_libnames", None)
- if openssl_libnames:
- env["OPENSSL_FLAGS"]["LIBS"] = openssl_libnames.split(',')
- elif env["PLATFORM"] == "win32" :
- env["OPENSSL_FLAGS"]["LIBS"] = ["libeay32MD", "ssleay32MD"]
- else:
- env["OPENSSL_FLAGS"]["LIBS"] = ["ssl", "crypto"]
- if env["PLATFORM"] == "darwin" :
- if platform.mac_ver()[0].startswith("10.5") :
- env["OPENSSL_FLAGS"]["FRAMEWORKS"] = ["Security"]
- else :
- env["OPENSSL_FLAGS"] = {}
- if env["PLATFORM"] == "win32" :
- # If we're compiling for Windows and OpenSSL isn't being used, use Schannel
- env["HAVE_SCHANNEL"] = True
-
- openssl_conf.Finish()
+if env.get("tls_backend") == "openssl_bundled" :
+ env["OPENSSL_BUNDLED"] = True
+ env["HAVE_OPENSSL"] = True
+elif env.get("tls_backend") == "openssl" :
+ openssl_env = conf_env.Clone()
+ use_openssl = bool(env["openssl"])
+ openssl_prefix = ""
+ if isinstance(env["openssl"], str) :
+ openssl_prefix = env["openssl"]
+ openssl_flags = {}
+ if openssl_prefix :
+ openssl_include = env.get("openssl_include")
+ openssl_libdir = env.get("openssl_libdir")
+ if openssl_include:
+ openssl_flags = {"CPPPATH":[openssl_include]}
+ else:
+ openssl_flags = { "CPPPATH": [os.path.join(openssl_prefix, "include")] }
+ if openssl_libdir:
+ openssl_flags["LIBPATH"] = [openssl_libdir]
+ env["OPENSSL_DIR"] = openssl_prefix
+ elif env["PLATFORM"] == "win32" :
+ openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib", "VC")]
+ env["OPENSSL_DIR"] = openssl_prefix
+ else :
+ openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib")]
+ openssl_env.MergeFlags(openssl_flags)
+
+ openssl_conf = Configure(openssl_env)
+ if use_openssl and openssl_conf.CheckCHeader("openssl/ssl.h") :
+ env["HAVE_OPENSSL"] = 1
+ env["OPENSSL_FLAGS"] = openssl_flags
+ openssl_libnames = env.get("openssl_libnames")
+ if openssl_libnames:
+ env["OPENSSL_FLAGS"]["LIBS"] = openssl_libnames.split(',')
+ elif env["PLATFORM"] == "win32" :
+ env["OPENSSL_FLAGS"]["LIBS"] = ["libeay32MD", "ssleay32MD"]
+ else:
+ env["OPENSSL_FLAGS"]["LIBS"] = ["ssl", "crypto"]
+ if env["PLATFORM"] == "darwin" :
+ if platform.mac_ver()[0].startswith("10.5") :
+ env["OPENSSL_FLAGS"]["FRAMEWORKS"] = ["Security"]
+ openssl_conf.Finish()
if env["PLATFORM"] == "win32" :
- # On Windows link to secur32. It is needed by Swiften/SASL/WindowsAuthentication
- env.Append(LIBS = ["secur32"])
+ # On Windows link to secur32. It is needed by Swiften/SASL/WindowsAuthentication
+ env.Append(LIBS = ["secur32"])
#Hunspell
hunspell_env = conf_env.Clone()
hunspell_prefix = isinstance(env.get("hunspell_prefix", False), str) and env["hunspell_prefix"] or ""
hunspell_flags = {}
if hunspell_prefix :
- hunspell_flags = {"CPPPATH":[os.path.join(hunspell_prefix, "include")], "LIBPATH":[os.path.join(hunspell_prefix, "lib")]}
+ hunspell_flags = {"CPPPATH":[os.path.join(hunspell_prefix, "include")], "LIBPATH":[os.path.join(hunspell_prefix, "lib")]}
hunspell_env.MergeFlags(hunspell_flags)
env["HAVE_HUNSPELL"] = 0;
if env.get("hunspell_enable", False) :
- hunspell_conf = Configure(hunspell_env)
- if hunspell_conf.CheckCXXHeader("hunspell/hunspell.hxx") and hunspell_conf.CheckLib("hunspell") :
- env["HAVE_HUNSPELL"] = 1
- hunspell_flags["LIBS"] = ["hunspell"]
- env["HUNSPELL_FLAGS"] = hunspell_flags
- hunspell_conf.Finish()
+ hunspell_conf = Configure(hunspell_env)
+ if hunspell_conf.CheckCXXHeader("hunspell/hunspell.hxx") and hunspell_conf.CheckLib("hunspell") :
+ env["HAVE_HUNSPELL"] = 1
+ hunspell_flags["LIBS"] = ["hunspell"]
+ env["HUNSPELL_FLAGS"] = hunspell_flags
+ hunspell_conf.Finish()
# Bonjour
if env["PLATFORM"] == "darwin" and env["target"] == "native" :
- env["HAVE_BONJOUR"] = 1
+ env["HAVE_BONJOUR"] = 1
elif env.get("bonjour", False) :
- bonjour_env = conf_env.Clone()
- bonjour_conf = Configure(bonjour_env)
- bonjour_flags = {}
- if env.get("bonjour") != True :
- bonjour_prefix = env["bonjour"]
- bonjour_flags["CPPPATH"] = [os.path.join(bonjour_prefix, "include")]
- bonjour_flags["LIBPATH"] = [os.path.join(bonjour_prefix, "lib", "win32")]
- bonjour_env.MergeFlags(bonjour_flags)
- if bonjour_conf.CheckCHeader("dns_sd.h") and bonjour_conf.CheckLib("dnssd") :
- env["HAVE_BONJOUR"] = 1
- env["BONJOUR_FLAGS"] = bonjour_flags
- env["BONJOUR_FLAGS"]["LIBS"] = ["dnssd"]
- bonjour_conf.Finish()
+ bonjour_env = conf_env.Clone()
+ bonjour_conf = Configure(bonjour_env)
+ bonjour_flags = {}
+ if env.get("bonjour") != True :
+ bonjour_prefix = env["bonjour"]
+ bonjour_flags["CPPPATH"] = [os.path.join(bonjour_prefix, "include")]
+ bonjour_flags["LIBPATH"] = [os.path.join(bonjour_prefix, "lib", "win32")]
+ bonjour_env.MergeFlags(bonjour_flags)
+ if bonjour_conf.CheckCHeader("dns_sd.h") and bonjour_conf.CheckLib("dnssd") :
+ env["HAVE_BONJOUR"] = 1
+ env["BONJOUR_FLAGS"] = bonjour_flags
+ env["BONJOUR_FLAGS"]["LIBS"] = ["dnssd"]
+ bonjour_conf.Finish()
# Cocoa & IOKit
if env["PLATFORM"] == "darwin" :
- cocoa_conf = Configure(conf_env)
- if cocoa_conf.CheckCHeader("IOKit/IOKitLib.h") :
- env["HAVE_IOKIT"] = True
- cocoa_conf.Finish()
+ cocoa_conf = Configure(conf_env)
+ if cocoa_conf.CheckCHeader("IOKit/IOKitLib.h") :
+ env["HAVE_IOKIT"] = True
+ cocoa_conf.Finish()
# Qt
try :
- myenv = env.Clone()
- myenv.Tool("qt4", toolpath = ["#/BuildTools/SCons/Tools"])
- env["HAVE_QT"] = True
+ myenv = env.Clone()
+ myenv.Tool("qt4", toolpath = ["#/BuildTools/SCons/Tools"])
+ env["HAVE_QT"] = True
except SCons.Errors.StopError:
- env["HAVE_QT"] = False
+ env["HAVE_QT"] = False
except Exception as e:
- print "Info: %s" % str(e)
- env["HAVE_QT"] = False
+ print "Info: %s" % str(e)
+ env["HAVE_QT"] = False
################################################################################
# DocBook setup
################################################################################
if env.get("docbook_xml") :
- env["DOCBOOK_XML_DIR"] = env["docbook_xml"]
+ env["DOCBOOK_XML_DIR"] = env["docbook_xml"]
if env.get("docbook_xsl") :
- env["DOCBOOK_XSL_DIR"] = env["docbook_xsl"]
+ env["DOCBOOK_XSL_DIR"] = env["docbook_xsl"]
################################################################################
@@ -633,11 +674,11 @@ if env.get("docbook_xsl") :
################################################################################
try:
- if env.Dir("#/.git").exists() :
- if not env.GetOption("clean") and env.get("install_git_hooks", True) :
- env.Install("#/.git/hooks", Glob("#/BuildTools/Git/Hooks/*"))
+ if env.Dir("#/.git").exists() :
+ if not env.GetOption("clean") and env.get("install_git_hooks", True) :
+ env.Install("#/.git/hooks", Glob("#/BuildTools/Git/Hooks/*"))
except TypeError:
- print "You seem to be using Swift in a Git submodule. Not installing hooks."
+ print "You seem to be using Swift in a Git submodule. Not installing hooks."
################################################################################
@@ -645,23 +686,23 @@ except TypeError:
################################################################################
if ARGUMENTS.get("replace_pragma_once", False) :
- env.Tool("ReplacePragmaOnce", toolpath = ["#/BuildTools/SCons/Tools"])
-
- def relpath(path, start) :
- i = len(os.path.commonprefix([path, start]))
- return path[i+1:]
-
- for actual_root, dirs, files in os.walk(root) :
- if "3rdParty" in actual_root :
- continue
- for file in files :
- if not file.endswith(".h") :
- continue
- include = relpath(os.path.join(actual_root, file), root)
- env.ReplacePragmaOnce("#/include/" + include, "#/" + include)
- env.Append(CPPPATH = ["#/include"])
+ env.Tool("ReplacePragmaOnce", toolpath = ["#/BuildTools/SCons/Tools"])
+
+ def relpath(path, start) :
+ i = len(os.path.commonprefix([path, start]))
+ return path[i+1:]
+
+ for actual_root, dirs, files in os.walk(root) :
+ if "3rdParty" in actual_root :
+ continue
+ for file in files :
+ if not file.endswith(".h") :
+ continue
+ include = relpath(os.path.join(actual_root, file), root)
+ env.ReplacePragmaOnce("#/include/" + include, "#/" + include)
+ env.Append(CPPPATH = ["#/include"])
else :
- env.Append(CPPPATH = [root])
+ env.Append(CPPPATH = [root])
################################################################################
@@ -669,29 +710,29 @@ else :
################################################################################
if ARGUMENTS.get("dump_trace", False) :
- env.SetOption("no_exec", True)
- env["TEST"] = True
- env["BOOST_BUILD_BCP"] = True
- env.Decider(lambda x, y, z : True)
- SCons.Node.Python.Value.changed_since_last_build = (lambda x, y, z: True)
+ env.SetOption("no_exec", True)
+ env["TEST"] = True
+ env["BOOST_BUILD_BCP"] = True
+ env.Decider(lambda x, y, z : True)
+ SCons.Node.Python.Value.changed_since_last_build = (lambda x, y, z: True)
# Modules
modules = []
if os.path.isdir(Dir("#/3rdParty").abspath) :
- for dir in os.listdir(Dir("#/3rdParty").abspath) :
- full_dir = os.path.join(Dir("#/3rdParty").abspath, dir)
- if not os.path.isdir(full_dir) :
- continue
- sconscript = os.path.join(full_dir, "SConscript")
- if os.path.isfile(sconscript) :
- modules.append("3rdParty/" + dir)
+ for dir in os.listdir(Dir("#/3rdParty").abspath) :
+ full_dir = os.path.join(Dir("#/3rdParty").abspath, dir)
+ if not os.path.isdir(full_dir) :
+ continue
+ sconscript = os.path.join(full_dir, "SConscript")
+ if os.path.isfile(sconscript) :
+ modules.append("3rdParty/" + dir)
for dir in os.listdir(Dir("#").abspath) :
- full_dir = os.path.join(Dir("#").abspath, dir)
- if not os.path.isdir(full_dir) :
- continue
- sconscript = os.path.join(full_dir, "SConscript")
- if os.path.isfile(sconscript) :
- modules.append(dir)
+ full_dir = os.path.join(Dir("#").abspath, dir)
+ if not os.path.isdir(full_dir) :
+ continue
+ sconscript = os.path.join(full_dir, "SConscript")
+ if os.path.isfile(sconscript) :
+ modules.append(dir)
# QA comes last
modules.remove("QA")
@@ -700,13 +741,13 @@ modules.append("QA")
# Flags
env["PROJECTS"] = [m for m in modules if m not in ["Documentation", "QA", "SwifTools"] and not m.startswith("3rdParty")]
for stage in ["flags", "build"] :
- env["SCONS_STAGE"] = stage
- SConscript(dirs = map(lambda x : root + "/" + x, modules))
+ env["SCONS_STAGE"] = stage
+ SConscript(dirs = map(lambda x : root + "/" + x, modules))
# SLOCCount
if ARGUMENTS.get("sloccount", False) :
- for project in env["PROJECTS"] :
- env.SLOCCount("#/" + project)
+ for project in env["PROJECTS"] :
+ env.SLOCCount("#/" + project)
################################################################################
@@ -719,11 +760,11 @@ print " -------------------"
parsers = []
if env.get("HAVE_LIBXML", 0):
- parsers.append("LibXML")
+ parsers.append("LibXML")
if env.get("HAVE_EXPAT", 0):
- parsers.append("Expat")
- if env.get("EXPAT_BUNDLED", False) :
- parsers.append("(Bundled)")
+ parsers.append("Expat")
+ if env.get("EXPAT_BUNDLED", False) :
+ parsers.append("(Bundled)")
print " Projects: " + ' '.join(env["PROJECTS"])
print ""
print " XML Parsers: " + ' '.join(parsers)
@@ -731,3 +772,8 @@ print " XML Parsers: " + ' '.join(parsers)
print " TLS Support: " + (env.get("HAVE_OPENSSL",0) and "OpenSSL" or env.get("HAVE_SECURETRANSPORT",0) and "Secure Transport" or env.get("HAVE_SCHANNEL", 0) and "Schannel" or "Disabled")
print " DNSSD Support: " + (env.get("HAVE_BONJOUR") and "Bonjour" or (env.get("HAVE_AVAHI") and "Avahi" or "Disabled"))
print
+
+if not GetOption("help") and not env.get("HAVE_OPENSSL", 0) and not env.get("HAVE_SCHANNEL", 0) and not env.get("HAVE_SECURETRANSPORT", 0):
+ print "Error: A working TLS backend is required. Please check the documentation for more information."
+ Exit(1)
+
diff --git a/BuildTools/SCons/Tools/AppBundle.py b/BuildTools/SCons/Tools/AppBundle.py
index f1072f5..337e83f 100644
--- a/BuildTools/SCons/Tools/AppBundle.py
+++ b/BuildTools/SCons/Tools/AppBundle.py
@@ -1,64 +1,72 @@
import SCons.Util, os.path
+from datetime import date
def generate(env) :
- def createAppBundle(env, bundle, version = "1.0", resources = [], frameworks = [], info = {}, handlesXMPPURIs = False) :
- bundleDir = bundle + ".app"
- bundleContentsDir = bundleDir + "/Contents"
- resourcesDir = bundleContentsDir + "/Resources"
- frameworksDir = bundleContentsDir + "/Frameworks"
- env.Install(bundleContentsDir + "/MacOS", bundle)
- env.WriteVal(bundleContentsDir + "/PkgInfo", env.Value("APPL\77\77\77\77"))
+ def createAppBundle(env, bundle, version = "1.0", resources = [], frameworks = [], info = {}, handlesXMPPURIs = False, sparklePublicDSAKey = None) :
+ env.Tool("InstallWithSymLinks", toolpath = ["#/BuildTools/SCons/Tools"])
- infoDict = {
- "CFBundleDevelopmentRegion" : "English",
- "CFBundleExecutable" : bundle,
- "CFBundleIdentifier" : "im.swift." + bundle,
- "CFBundleInfoDictionaryVersion" : "6.0",
- "CFBundleName" : bundle,
- "CFBundlePackageType" : "APPL",
- "CFBundleSignature": "\77\77\77\77",
- "CFBundleVersion" : version,
- "CFBundleIconFile" : bundle,
- "NSPrincipalClass" : "NSApplication",
- "NSHumanReadableCopyright" : unichr(0xA9) + " 2010 Swift Development Team.\nAll Rights Reserved."
- }
- infoDict.update(info)
+ bundleDir = bundle + ".app"
+ bundleContentsDir = bundleDir + "/Contents"
+ resourcesDir = bundleContentsDir + "/Resources"
+ frameworksDir = bundleContentsDir + "/Frameworks"
+ env.Install(bundleContentsDir + "/MacOS", bundle)
+ env.WriteVal(bundleContentsDir + "/PkgInfo", env.Value("APPL\77\77\77\77"))
- plist = """<?xml version="1.0" encoding="UTF-8"?>
- <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
- <plist version="1.0">
- <dict>
- """
- for key, value in infoDict.items() :
- plist += "<key>" + key + "</key>\n"
- plist += "<string>" + value.encode("utf-8") + "</string>\n"
- if handlesXMPPURIs :
- plist += """<key>CFBundleURLTypes</key>
+ infoDict = {
+ "CFBundleDevelopmentRegion" : "English",
+ "CFBundleExecutable" : bundle,
+ "CFBundleIdentifier" : "im.swift." + bundle,
+ "CFBundleInfoDictionaryVersion" : "6.0",
+ "CFBundleName" : bundle,
+ "CFBundlePackageType" : "APPL",
+ "CFBundleSignature": "\77\77\77\77",
+ "CFBundleVersion" : version,
+ "CFBundleIconFile" : bundle,
+ "NSPrincipalClass" : "NSApplication",
+ "NSHumanReadableCopyright" : "(c) 2010-%d Isode Ltd.\nAll Rights Reserved." % date.today().year
+ }
+ infoDict.update(info)
+
+ plist = """<?xml version="1.0" encoding="UTF-8"?>
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+ <plist version="1.0">
+ <dict>
+ """
+ for key, value in infoDict.items() :
+ plist += "<key>" + key + "</key>\n"
+ plist += "<string>" + value.encode("utf-8") + "</string>\n"
+ if handlesXMPPURIs :
+ plist += """<key>CFBundleURLTypes</key>
<array>
- <dict>
- <key>CFBundleURLName</key>
- <string>XMPP URL</string>
- <key>CFBundleURLSchemes</key>
- <array>
- <string>xmpp</string>
- </array>
- </dict>
+ <dict>
+ <key>CFBundleURLName</key>
+ <string>XMPP URL</string>
+ <key>CFBundleURLSchemes</key>
+ <array>
+ <string>xmpp</string>
+ </array>
+ </dict>
</array>\n"""
- plist += """</dict>
- </plist>
- """
- env.WriteVal(bundleContentsDir + "/Info.plist", env.Value(plist))
- for (target, resource) in resources.items() :
- env.Install(os.path.join(resourcesDir, target), resource)
+ if sparklePublicDSAKey :
+ plist += "<key>SUPublicDSAKeyFile</key>"
+ plist += "<string>" + sparklePublicDSAKey.name.encode("utf-8") + "</string>"
+ env.Install(resourcesDir, sparklePublicDSAKey)
+ plist += """</dict>
+ </plist>
+ """
+ env.WriteVal(bundleContentsDir + "/Info.plist", env.Value(plist))
+
+ for (target, resource) in resources.items() :
+ env.Install(os.path.join(resourcesDir, target), resource)
- for framework in frameworks :
- env.Install(frameworksDir, framework)
+ for framework in frameworks :
+ env.InstallWithSymLinks(frameworksDir, framework)
- return env.Dir(bundleDir)
+ return env.Dir(bundleDir)
- env.AddMethod(createAppBundle, "AppBundle")
+ env.AddMethod(createAppBundle, "AppBundle")
def exists(env) :
- return env["PLATFORM"] == "darwin"
+ return env["PLATFORM"] == "darwin"
diff --git a/BuildTools/SCons/Tools/BuildVersion.py b/BuildTools/SCons/Tools/BuildVersion.py
index b15448a..7968282 100644
--- a/BuildTools/SCons/Tools/BuildVersion.py
+++ b/BuildTools/SCons/Tools/BuildVersion.py
@@ -3,16 +3,16 @@ import SCons.Util
import Version
def generate(env) :
- def createBuildVersion(env, target, project) :
- buildVersion = """#pragma once
+ def createBuildVersion(env, target, project) :
+ buildVersion = """#pragma once
static const char* buildVersion = \"%(buildVersion)s\";\n
#define SWIFT_VERSION_STRING \"%(buildVersion)s\";\n
""" % { "buildVersion" : Version.getBuildVersion(env.Dir("#").abspath, project) }
- env.WriteVal(target, env.Value(buildVersion))
+ env.WriteVal(target, env.Value(buildVersion))
- env.AddMethod(createBuildVersion, "BuildVersion")
+ env.AddMethod(createBuildVersion, "BuildVersion")
def exists(env) :
- return true
+ return true
diff --git a/BuildTools/SCons/Tools/DoxyGen.py b/BuildTools/SCons/Tools/DoxyGen.py
index 79af1c9..66a9111 100644
--- a/BuildTools/SCons/Tools/DoxyGen.py
+++ b/BuildTools/SCons/Tools/DoxyGen.py
@@ -1,26 +1,26 @@
import SCons.Util, os
def generate(env) :
- def modify_targets(target, source, env) :
- target = [env.File("html/index.html")]
- return target, source
+ def modify_targets(target, source, env) :
+ target = [env.File("html/index.html")]
+ return target, source
- def generate_actions(source, target, env, for_signature) :
- if env.WhereIs("$DOXYGEN") and env.WhereIs("$DOT") :
- return [SCons.Action.Action("$DOXYGEN $SOURCE", cmdstr = "$DOXYCOMSTR")]
- else :
- return []
+ def generate_actions(source, target, env, for_signature) :
+ if env.WhereIs("$DOXYGEN") and env.WhereIs("$DOT") :
+ return [SCons.Action.Action("$DOXYGEN $SOURCE", cmdstr = "$DOXYCOMSTR")]
+ else :
+ return []
- env["DOXYGEN"] = "doxygen"
- # FIXME: For some reason, things go incredibly slow (at least on OS X)
- # when not doing this. Some environment flag is having an effect on
- # this; find out which
- env["ENV"] = os.environ
- env["DOT"] = "dot"
- env["BUILDERS"]["DoxyGen"] = SCons.Builder.Builder(
- emitter = modify_targets,
- generator = generate_actions,
- single_source = True)
+ env["DOXYGEN"] = "doxygen"
+ # FIXME: For some reason, things go incredibly slow (at least on OS X)
+ # when not doing this. Some environment flag is having an effect on
+ # this; find out which
+ env["ENV"] = os.environ
+ env["DOT"] = "dot"
+ env["BUILDERS"]["DoxyGen"] = SCons.Builder.Builder(
+ emitter = modify_targets,
+ generator = generate_actions,
+ single_source = True)
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/Flags.py b/BuildTools/SCons/Tools/Flags.py
index 0768181..fe0cfcc 100644
--- a/BuildTools/SCons/Tools/Flags.py
+++ b/BuildTools/SCons/Tools/Flags.py
@@ -1,13 +1,13 @@
import SCons.Util
def generate(env) :
- def useFlags(env, flags) :
- for flag in flags :
- if flag in env :
- env[flag] = env[flag] + flags[flag]
- else :
- env[flag] = flags[flag]
- env.AddMethod(useFlags, "UseFlags")
+ def useFlags(env, flags) :
+ for flag in flags :
+ if flag in env :
+ env[flag] = env[flag] + flags[flag]
+ else :
+ env[flag] = flags[flag]
+ env.AddMethod(useFlags, "UseFlags")
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/InstallWithSymLinks.py b/BuildTools/SCons/Tools/InstallWithSymLinks.py
new file mode 100644
index 0000000..23d12ed
--- /dev/null
+++ b/BuildTools/SCons/Tools/InstallWithSymLinks.py
@@ -0,0 +1,114 @@
+"""SCons.Tool.install
+
+Tool-specific initialization for the install tool.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001 - 2015 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+from SCons.Script import Action, Builder
+from SCons.Node import FS
+import shutil
+import stat
+import os
+from os import path
+
+class CopytreeError(EnvironmentError):
+ pass
+
+# This is a patched version of shutil.copytree from python 2.5. It
+# doesn't fail if the dir exists, which regular copytree does
+# (annoyingly). Note the XXX comment in the docstring.
+def scons_copytree(src, dst, symlinks=False):
+ """Recursively copy a directory tree using copy2().
+
+ The destination directory must not already exist.
+ If exception(s) occur, an CopytreeError is raised with a list of reasons.
+
+ If the optional symlinks flag is true, symbolic links in the
+ source tree result in symbolic links in the destination tree; if
+ it is false, the contents of the files pointed to by symbolic
+ links are copied.
+
+ XXX Consider this example code rather than the ultimate tool.
+
+ """
+ names = os.listdir(src)
+ # garyo@genarts.com fix: check for dir before making dirs.
+ if not os.path.exists(dst):
+ os.makedirs(dst)
+ errors = []
+ for name in names:
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+ try:
+ if symlinks and os.path.islink(srcname):
+ linkto = os.readlink(srcname)
+ os.symlink(linkto, dstname)
+ elif os.path.isdir(srcname):
+ scons_copytree(srcname, dstname, symlinks)
+ else:
+ shutil.copy2(srcname, dstname)
+ # XXX What about devices, sockets etc.?
+ except (IOError, os.error), why:
+ errors.append((srcname, dstname, str(why)))
+ # catch the CopytreeError from the recursive copytree so that we can
+ # continue with other files
+ except CopytreeError, err:
+ errors.extend(err.args[0])
+ try:
+ shutil.copystat(src, dst)
+ except WindowsError:
+ # can't copy file access times on Windows
+ pass
+ except OSError, why:
+ errors.extend((src, dst, str(why)))
+ if errors:
+ raise CopytreeError, errors
+
+
+def symlinkBuilderImpl(target, source, env):
+ lnk = target[0].abspath
+ src = source[0].abspath
+ lnkdir,lnkname = path.split(lnk)
+ srcdir,srcname = path.split(src)
+
+ scons_copytree(src, os.path.join(lnk, srcname), True)
+
+ return None
+
+def symlinkBuilderPrinter(target, source, env):
+ lnk = path.basename(target[0].abspath)
+ src = path.basename(source[0].abspath)
+ return 'INSTALL PRESERVING SYMLINKS ' + target[0].get_internal_path()
+
+def generate(env) :
+ symlinkBuilder = Builder(action = Action(symlinkBuilderImpl, symlinkBuilderPrinter), target_factory = FS.Entry, source_factory = FS.Entry)
+ env.Append(BUILDERS = {'InstallWithSymLinks' : symlinkBuilder})
+
+def exists(env) :
+ return True \ No newline at end of file
diff --git a/BuildTools/SCons/Tools/Nib.py b/BuildTools/SCons/Tools/Nib.py
index 91eb064..cf5b0dc 100644
--- a/BuildTools/SCons/Tools/Nib.py
+++ b/BuildTools/SCons/Tools/Nib.py
@@ -1,12 +1,12 @@
import SCons.Util
def generate(env) :
- env["IBTOOL"] = "ibtool"
- env["BUILDERS"]["Nib"] = SCons.Builder.Builder(
- action = SCons.Action.Action("$IBTOOL --errors --warnings --notices --output-format human-readable-text --compile $TARGET $SOURCE", cmdstr = "$NIBCOMSTR"),
- suffix = ".nib",
- src_suffix = ".xib",
- single_source = True)
+ env["IBTOOL"] = "ibtool"
+ env["BUILDERS"]["Nib"] = SCons.Builder.Builder(
+ action = SCons.Action.Action("$IBTOOL --errors --warnings --notices --output-format human-readable-text --compile $TARGET $SOURCE", cmdstr = "$NIBCOMSTR"),
+ suffix = ".nib",
+ src_suffix = ".xib",
+ single_source = True)
def exists(env) :
- return env["PLATFORM"] == "darwin"
+ return env["PLATFORM"] == "darwin"
diff --git a/BuildTools/SCons/Tools/ReplacePragmaOnce.py b/BuildTools/SCons/Tools/ReplacePragmaOnce.py
index 3df0f41..cb49bbb 100644
--- a/BuildTools/SCons/Tools/ReplacePragmaOnce.py
+++ b/BuildTools/SCons/Tools/ReplacePragmaOnce.py
@@ -1,25 +1,25 @@
import SCons.Util, os.path
def generate(env) :
- root = env.Dir("#").abspath
- def relpath(path, start) :
- i = len(os.path.commonprefix([path, start]))
- return path[i+1:]
+ root = env.Dir("#").abspath
+ def relpath(path, start) :
+ i = len(os.path.commonprefix([path, start]))
+ return path[i+1:]
- def replacePragmaOnce(env, target, source) :
- guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper()
- data = source[0].get_contents()
- f = open(str(target[0]), 'wb')
- if "#pragma once" in data :
- f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard}))
- f.write("\n#endif\n")
- else :
- f.write(data)
- f.close()
+ def replacePragmaOnce(env, target, source) :
+ guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper()
+ data = source[0].get_contents()
+ f = open(str(target[0]), 'wb')
+ if "#pragma once" in data :
+ f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard}))
+ f.write("\n#endif\n")
+ else :
+ f.write(data)
+ f.close()
- env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder(
- action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
- single_source = True)
+ env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder(
+ action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
+ single_source = True)
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/SLOCCount.py b/BuildTools/SCons/Tools/SLOCCount.py
index 682a797..be31672 100644
--- a/BuildTools/SCons/Tools/SLOCCount.py
+++ b/BuildTools/SCons/Tools/SLOCCount.py
@@ -1,17 +1,17 @@
import SCons.Util, os.path, os
def generate(env) :
- def createSLOCCount(env, source) :
- myenv = env.Clone()
- myenv["ENV"]["HOME"] = os.environ["HOME"]
- source = myenv.Dir(source)
- target = myenv.File("#/" + source.path + ".sloccount")
- # FIXME: There's probably a better way to force building the .sc
- if os.path.exists(target.abspath) :
- os.unlink(target.abspath)
- return myenv.Command(target, source, [SCons.Action.Action("sloccount --duplicates --wide --details " + source.path + " | grep -v qrc_ > $TARGET", cmdstr = "$GENCOMSTR")])
+ def createSLOCCount(env, source) :
+ myenv = env.Clone()
+ myenv["ENV"]["HOME"] = os.environ["HOME"]
+ source = myenv.Dir(source)
+ target = myenv.File("#/" + source.path + ".sloccount")
+ # FIXME: There's probably a better way to force building the .sc
+ if os.path.exists(target.abspath) :
+ os.unlink(target.abspath)
+ return myenv.Command(target, source, [SCons.Action.Action("sloccount --duplicates --wide --details " + source.path + " | grep -v qrc_ > $TARGET", cmdstr = "$GENCOMSTR")])
- env.AddMethod(createSLOCCount, "SLOCCount")
+ env.AddMethod(createSLOCCount, "SLOCCount")
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/Test.py b/BuildTools/SCons/Tools/Test.py
index 1eee4f6..72acb22 100644
--- a/BuildTools/SCons/Tools/Test.py
+++ b/BuildTools/SCons/Tools/Test.py
@@ -1,53 +1,53 @@
import SCons.Util, os
def generate(env) :
- def registerTest(env, target, type = "unit", is_checker = False) :
- if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type :
- if SCons.Util.is_List(target) :
- cmd = target[0].abspath
- else :
- cmd = target.abspath
- params = ""
-
- # Special support for unittest checker
- if is_checker and env.get("checker_report", False) :
- params = " --xml > " + os.path.join(target[0].dir.path, "checker-report.xml")
-
- ignore_prefix = ""
- if env.get("TEST_IGNORE_RESULT", False) :
- ignore_prefix = "-"
-
- # Set environment variables for running the test
- test_env = env.Clone()
- for i in ["HOME", "USERPROFILE", "APPDATA", "ASAN_OPTIONS", "LSAN_OPTIONS", "SWIFT_NETWORK_TEST_IPV4", "SWIFT_NETWORK_TEST_IPV6"]:
- if os.environ.get(i, "") :
- test_env["ENV"][i] = os.environ[i]
- if env["target"] == "android" :
- test_env["ENV"]["PATH"] = env["android_sdk_bin"] + ";" + test_env["ENV"]["PATH"]
- else :
- if test_env["PLATFORM"] == "darwin" :
- test_env["ENV"]["DYLD_FALLBACK_LIBRARY_PATH"] = ":".join(map(lambda x : str(x), test_env.get("LIBPATH", [])))
- elif test_env["PLATFORM"] == "win32" :
- test_env["ENV"]["PATH"] = ";".join(map(lambda x : str(x), test_env.get("LIBRUNPATH", []))) + ";" + test_env["ENV"]["PATH"]
-
-
- # Run the test
- if env["target"] == "android":
- exec_name = os.path.basename(cmd)
- test_env.Command("**dummy**", target, SCons.Action.Action(
- ["adb shell mount -o rw,remount /system",
- "adb push " + cmd + " /system/bin/" + exec_name,
- "adb shell SWIFT_CLIENTTEST_JID=\"" + os.getenv("SWIFT_CLIENTTEST_JID") + "\" SWIFT_CLIENTTEST_PASS=\"" + os.getenv("SWIFT_CLIENTTEST_PASS") + "\" " + env.get("TEST_RUNNER", "") + "/system/bin/" + exec_name], cmdstr = "$TESTCOMSTR"))
- else :
- test_env.Command("**dummy**", target,
- SCons.Action.Action(ignore_prefix + env.get("TEST_RUNNER", "") + cmd + " " + params, cmdstr = "$TESTCOMSTR"))
-
- def registerScriptTests(env, scripts, name, type) :
- if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type :
- pass
-
- env.AddMethod(registerTest, "Test")
- env.AddMethod(registerScriptTests, "ScriptTests")
+ def registerTest(env, target, type = "unit", is_checker = False) :
+ if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type :
+ if SCons.Util.is_List(target) :
+ cmd = target[0].abspath
+ else :
+ cmd = target.abspath
+ params = ""
+
+ # Special support for unittest checker
+ if is_checker and env.get("checker_report", False) :
+ params = " --xml > " + os.path.join(target[0].dir.path, "checker-report.xml")
+
+ ignore_prefix = ""
+ if env.get("TEST_IGNORE_RESULT", False) :
+ ignore_prefix = "-"
+
+ # Set environment variables for running the test
+ test_env = env.Clone()
+ for i in ["HOME", "USERPROFILE", "APPDATA", "ASAN_OPTIONS", "LSAN_OPTIONS", "SWIFT_NETWORK_TEST_IPV4", "SWIFT_NETWORK_TEST_IPV6"]:
+ if os.environ.get(i, "") :
+ test_env["ENV"][i] = os.environ[i]
+ if env["target"] == "android" :
+ test_env["ENV"]["PATH"] = env["android_sdk_bin"] + ";" + test_env["ENV"]["PATH"]
+ else :
+ if test_env["PLATFORM"] == "darwin" :
+ test_env["ENV"]["DYLD_FALLBACK_LIBRARY_PATH"] = ":".join(map(lambda x : str(x), test_env.get("LIBPATH", [])))
+ elif test_env["PLATFORM"] == "win32" :
+ test_env["ENV"]["PATH"] = ";".join(map(lambda x : str(x), test_env.get("LIBRUNPATH", []))) + ";" + test_env["ENV"]["PATH"]
+
+
+ # Run the test
+ if env["target"] == "android":
+ exec_name = os.path.basename(cmd)
+ test_env.Command("**dummy**", target, SCons.Action.Action(
+ ["adb shell mount -o rw,remount /system",
+ "adb push " + cmd + " /system/bin/" + exec_name,
+ "adb shell SWIFT_CLIENTTEST_JID=\"" + os.getenv("SWIFT_CLIENTTEST_JID") + "\" SWIFT_CLIENTTEST_PASS=\"" + os.getenv("SWIFT_CLIENTTEST_PASS") + "\" " + env.get("TEST_RUNNER", "") + "/system/bin/" + exec_name], cmdstr = "$TESTCOMSTR"))
+ else :
+ test_env.Command("**dummy**", target,
+ SCons.Action.Action(ignore_prefix + env.get("TEST_RUNNER", "") + cmd + " " + params, cmdstr = "$TESTCOMSTR"))
+
+ def registerScriptTests(env, scripts, name, type) :
+ if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type :
+ pass
+
+ env.AddMethod(registerTest, "Test")
+ env.AddMethod(registerScriptTests, "ScriptTests")
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/WindowsBundle.py b/BuildTools/SCons/Tools/WindowsBundle.py
index ef77acb..20d41ff 100644
--- a/BuildTools/SCons/Tools/WindowsBundle.py
+++ b/BuildTools/SCons/Tools/WindowsBundle.py
@@ -4,119 +4,119 @@ import re
import shutil
def which(program_name):
- if hasattr(shutil, "which"):
- return shutil.which(program_name)
- else:
- path = os.getenv('PATH')
- for p in path.split(os.path.pathsep):
- p = os.path.join(p,program_name)
- if os.path.exists(p) and os.access(p,os.X_OK):
- return p
+ if hasattr(shutil, "which"):
+ return shutil.which(program_name)
+ else:
+ path = os.getenv('PATH')
+ for p in path.split(os.path.pathsep):
+ p = os.path.join(p,program_name)
+ if os.path.exists(p) and os.access(p,os.X_OK):
+ return p
def generate(env) :
- def captureWinDeployQtMapping(release = True):
- p = False
-
- qt_bin_folder = ""
- if "QTDIR" in env:
- qt_bin_folder = env["QTDIR"] + "\\bin;"
-
- environ = {"PATH": qt_bin_folder + os.getenv("PATH"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP")}
-
- if release:
- p = subprocess.Popen(['windeployqt', '--release', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
- else:
- p = subprocess.Popen(['windeployqt', '--debug', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
-
- if p:
- stdout, stderr = p.communicate()
-
- mappings = []
-
- p = re.compile(ur'"([^\"]*)" "([^\"]*)"')
-
- matches = re.findall(p, stdout)
- for match in matches:
- mappings.append(match)
- return mappings
- else:
- return False
-
- def createWindowsBundleManual(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
- all_files = []
- all_files += env.Install(bundle, bundle + ".exe")
- for lib in qtlibs :
- all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll"))
- plugins_suffix = '4'
- if qtversion == '5' :
- plugins_suffix = ''
- for plugin_type in qtplugins:
- all_files += env.Install(os.path.join(bundle, plugin_type), [os.path.join(env["QTDIR"], "plugins", plugin_type, "q" + plugin + plugins_suffix + ".dll") for plugin in qtplugins[plugin_type]])
- for dir, resourceFiles in resources.items() :
- for resource in resourceFiles :
- e = env.Entry(resource)
- if e.isdir() :
- for subresource in env.Glob(str(e) + "/*") :
- all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
- else :
- all_files += env.Install(os.path.join(bundle, dir), resource)
- return all_files
-
- # This version of uses windeployqt tool
- def createWindowsBundleWithWinDeployQt(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
- assert(qtversion == '5')
- all_files = []
-
- # add swift executable
- all_files += env.Install(bundle, bundle + ".exe")
-
- # adding resources (swift sounds/images/translations)
- for dir, resourceFiles in resources.items() :
- for resource in resourceFiles :
- e = env.Entry(resource)
- if e.isdir() :
- for subresource in env.Glob(str(e) + "/*") :
- all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
- else :
- all_files += env.Install(os.path.join(bundle, dir), resource)
-
- qtmappings = captureWinDeployQtMapping()
- assert(qtmappings)
-
- # handle core DLLs
- qt_corelib_regex = re.compile(ur".*bin.*\\(.*)\.dll")
-
- for qtlib in qtlibs:
- if qtlib.startswith("Qt5"):
- (src_path, target_path) = next(((src_path, target_path) for (src_path, target_path) in qtmappings if qt_corelib_regex.match(src_path) and qt_corelib_regex.match(src_path).group(1) == qtlib), (None, None))
- if src_path != None:
- all_files += env.Install(bundle, src_path)
-
- # handle core dependencies
- for (src_path, target_path) in qtmappings:
- if qt_corelib_regex.match(src_path) and not qt_corelib_regex.match(src_path).group(1).startswith("Qt5"):
- all_files += env.Install(bundle, src_path)
-
- # handle plugins
- qt_plugin_regex = re.compile(ur".*plugins.*\\(.*)\\(.*)\.dll")
- for (src_path, target_path) in qtmappings:
- if qt_plugin_regex.match(src_path):
- plugin_folder, filename = qt_plugin_regex.match(src_path).groups()
- try:
- if plugin_folder in ["audio"] or filename[1:] in qtplugins[plugin_folder]:
- all_files += env.Install(os.path.join(bundle, plugin_folder), src_path)
- except:
- pass
- return all_files
-
- def createWindowsBundle(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4'):
- if which("windeployqt.exe"):
- return createWindowsBundleWithWinDeployQt(env, bundle, resources, qtplugins, qtlibs, qtversion)
- else:
- return createWindowsBundleManual(env, bundle, resources, qtplugins, qtlibs, qtversion)
-
- env.AddMethod(createWindowsBundle, "WindowsBundle")
+ def captureWinDeployQtMapping(release = True):
+ p = False
+
+ qt_bin_folder = ""
+ if "QTDIR" in env:
+ qt_bin_folder = env["QTDIR"] + "\\bin;"
+
+ environ = {"PATH": qt_bin_folder + os.getenv("PATH"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP")}
+
+ if release:
+ p = subprocess.Popen(['windeployqt', '--release', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
+ else:
+ p = subprocess.Popen(['windeployqt', '--debug', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)
+
+ if p:
+ stdout, stderr = p.communicate()
+
+ mappings = []
+
+ p = re.compile(ur'"([^\"]*)" "([^\"]*)"')
+
+ matches = re.findall(p, stdout)
+ for match in matches:
+ mappings.append(match)
+ return mappings
+ else:
+ return False
+
+ def createWindowsBundleManual(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
+ all_files = []
+ all_files += env.Install(bundle, bundle + ".exe")
+ for lib in qtlibs :
+ all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll"))
+ plugins_suffix = '4'
+ if qtversion == '5' :
+ plugins_suffix = ''
+ for plugin_type in qtplugins:
+ all_files += env.Install(os.path.join(bundle, plugin_type), [os.path.join(env["QTDIR"], "plugins", plugin_type, "q" + plugin + plugins_suffix + ".dll") for plugin in qtplugins[plugin_type]])
+ for dir, resourceFiles in resources.items() :
+ for resource in resourceFiles :
+ e = env.Entry(resource)
+ if e.isdir() :
+ for subresource in env.Glob(str(e) + "/*") :
+ all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
+ else :
+ all_files += env.Install(os.path.join(bundle, dir), resource)
+ return all_files
+
+ # This version of uses windeployqt tool
+ def createWindowsBundleWithWinDeployQt(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') :
+ assert(qtversion == '5')
+ all_files = []
+
+ # add swift executable
+ all_files += env.Install(bundle, bundle + ".exe")
+
+ # adding resources (swift sounds/images/translations)
+ for dir, resourceFiles in resources.items() :
+ for resource in resourceFiles :
+ e = env.Entry(resource)
+ if e.isdir() :
+ for subresource in env.Glob(str(e) + "/*") :
+ all_files += env.Install(os.path.join(bundle, dir, e.name), subresource)
+ else :
+ all_files += env.Install(os.path.join(bundle, dir), resource)
+
+ qtmappings = captureWinDeployQtMapping()
+ assert(qtmappings)
+
+ # handle core DLLs
+ qt_corelib_regex = re.compile(ur".*bin.*\\(.*)\.dll")
+
+ for qtlib in qtlibs:
+ if qtlib.startswith("Qt5"):
+ (src_path, target_path) = next(((src_path, target_path) for (src_path, target_path) in qtmappings if qt_corelib_regex.match(src_path) and qt_corelib_regex.match(src_path).group(1) == qtlib), (None, None))
+ if src_path != None:
+ all_files += env.Install(bundle, src_path)
+
+ # handle core dependencies
+ for (src_path, target_path) in qtmappings:
+ if qt_corelib_regex.match(src_path) and not qt_corelib_regex.match(src_path).group(1).startswith("Qt5"):
+ all_files += env.Install(bundle, src_path)
+
+ # handle plugins
+ qt_plugin_regex = re.compile(ur".*plugins.*\\(.*)\\(.*)\.dll")
+ for (src_path, target_path) in qtmappings:
+ if qt_plugin_regex.match(src_path):
+ plugin_folder, filename = qt_plugin_regex.match(src_path).groups()
+ try:
+ if plugin_folder in ["audio"] or filename[1:] in qtplugins[plugin_folder]:
+ all_files += env.Install(os.path.join(bundle, plugin_folder), src_path)
+ except:
+ pass
+ return all_files
+
+ def createWindowsBundle(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4'):
+ if which("windeployqt.exe"):
+ return createWindowsBundleWithWinDeployQt(env, bundle, resources, qtplugins, qtlibs, qtversion)
+ else:
+ return createWindowsBundleManual(env, bundle, resources, qtplugins, qtlibs, qtversion)
+
+ env.AddMethod(createWindowsBundle, "WindowsBundle")
def exists(env) :
- return env["PLATFORM"] == "win32"
+ return env["PLATFORM"] == "win32"
diff --git a/BuildTools/SCons/Tools/WriteVal.py b/BuildTools/SCons/Tools/WriteVal.py
index 4e8d3bb..ad77a99 100644
--- a/BuildTools/SCons/Tools/WriteVal.py
+++ b/BuildTools/SCons/Tools/WriteVal.py
@@ -1,15 +1,15 @@
import SCons.Util
def generate(env) :
- def replacePragmaOnce(env, target, source) :
- f = open(str(target[0]), 'wb')
- f.write(source[0].get_contents())
- f.close()
+ def replacePragmaOnce(env, target, source) :
+ f = open(str(target[0]), 'wb')
+ f.write(source[0].get_contents())
+ f.close()
- env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder(
- action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
- single_source = True)
+ env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder(
+ action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"),
+ single_source = True)
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/nsis.py b/BuildTools/SCons/Tools/nsis.py
index f5b2905..393beb8 100644
--- a/BuildTools/SCons/Tools/nsis.py
+++ b/BuildTools/SCons/Tools/nsis.py
@@ -6,34 +6,34 @@ nsisIncludes_re = re.compile(r'^\s*!include (translations-\S*)', re.M)
"""
TODO:
- - Extract the target from the nsis file
- - When a target is provided use the output function
+ - Extract the target from the nsis file
+ - When a target is provided use the output function
"""
def generate(env) :
- """Add Builders and construction variables for qt to an Environment."""
- Builder = SCons.Builder.Builder
+ """Add Builders and construction variables for qt to an Environment."""
+ Builder = SCons.Builder.Builder
- env['NSIS_MAKENSIS'] = 'makensis'
- env['NSIS_OPTIONS'] = ["/V2"]
- def winToLocalReformat(path) :
- return os.path.join(*path.split("\\"))
- def scanNsisContent(node, env, path, arg):
- contents = node.get_contents()
- includes = nsisFiles_re.findall(contents) + nsisIncludes_re.findall(contents)
- includes = [ winToLocalReformat(include) for include in includes ]
- return filter(lambda x: x.rfind('*')==-1, includes)
- nsisscanner = env.Scanner(name = 'nsisfile',
- function = scanNsisContent,
- argument = None,
- skeys = ['.nsi'])
- nsisbuilder = Builder(
- action = SCons.Action.Action('$NSIS_MAKENSIS $NSIS_OPTIONS $SOURCE', cmdstr = '$NSISCOMSTR'),
- source_scanner = nsisscanner,
- single_source = True
- )
- env.Append( BUILDERS={'Nsis' : nsisbuilder} )
+ env['NSIS_MAKENSIS'] = 'makensis'
+ env['NSIS_OPTIONS'] = ["/V2"]
+ def winToLocalReformat(path) :
+ return os.path.join(*path.split("\\"))
+ def scanNsisContent(node, env, path, arg):
+ contents = node.get_contents()
+ includes = nsisFiles_re.findall(contents) + nsisIncludes_re.findall(contents)
+ includes = [ winToLocalReformat(include) for include in includes ]
+ return filter(lambda x: x.rfind('*')==-1, includes)
+ nsisscanner = env.Scanner(name = 'nsisfile',
+ function = scanNsisContent,
+ argument = None,
+ skeys = ['.nsi'])
+ nsisbuilder = Builder(
+ action = SCons.Action.Action('$NSIS_MAKENSIS $NSIS_OPTIONS $SOURCE', cmdstr = '$NSISCOMSTR'),
+ source_scanner = nsisscanner,
+ single_source = True
+ )
+ env.Append( BUILDERS={'Nsis' : nsisbuilder} )
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Tools/qt4.py b/BuildTools/SCons/Tools/qt4.py
index ec33b97..b965e06 100644
--- a/BuildTools/SCons/Tools/qt4.py
+++ b/BuildTools/SCons/Tools/qt4.py
@@ -47,24 +47,24 @@ import SCons.Util
import SCons.SConf
class ToolQtWarning(SCons.Warnings.Warning):
- pass
+ pass
class GeneratedMocFileNotIncluded(ToolQtWarning):
- pass
+ pass
class QtdirNotFound(ToolQtWarning):
- pass
+ pass
SCons.Warnings.enableWarningClass(ToolQtWarning)
qrcinclude_re = re.compile(r'<file (alias=\"[^\"]*\")?>([^<]*)</file>', re.M)
def transformToWinePath(path) :
- return os.popen('winepath -w "%s"'%path).read().strip().replace('\\','/')
+ return os.popen('winepath -w "%s"'%path).read().strip().replace('\\','/')
header_extensions = [".h", ".hxx", ".hpp", ".hh"]
if SCons.Util.case_sensitive_suffixes('.h', '.H'):
- header_extensions.append('.H')
+ header_extensions.append('.H')
# TODO: The following two lines will work when integrated back to SCons
# TODO: Meanwhile the third line will do the work
#cplusplus = __import__('c++', globals(), locals(), [])
@@ -72,519 +72,522 @@ if SCons.Util.case_sensitive_suffixes('.h', '.H'):
cxx_suffixes = [".c", ".cxx", ".cpp", ".cc"]
def checkMocIncluded(target, source, env):
- moc = target[0]
- cpp = source[0]
- # looks like cpp.includes is cleared before the build stage :-(
- # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/
- path = SCons.Defaults.CScan.path_function(env, moc.cwd)
- includes = SCons.Defaults.CScan(cpp, env, path)
- if not moc in includes:
- SCons.Warnings.warn(
- GeneratedMocFileNotIncluded,
- "Generated moc file '%s' is not included by '%s'" %
- (str(moc), str(cpp)))
+ moc = target[0]
+ cpp = source[0]
+ # looks like cpp.includes is cleared before the build stage :-(
+ # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/
+ path = SCons.Defaults.CScan.path_function(env, moc.cwd)
+ includes = SCons.Defaults.CScan(cpp, env, path)
+ if not moc in includes:
+ SCons.Warnings.warn(
+ GeneratedMocFileNotIncluded,
+ "Generated moc file '%s' is not included by '%s'" %
+ (str(moc), str(cpp)))
def find_file(filename, paths, node_factory):
- for dir in paths:
- node = node_factory(filename, dir)
- if node.rexists():
- return node
- return None
+ for dir in paths:
+ node = node_factory(filename, dir)
+ if node.rexists():
+ return node
+ return None
class _Automoc:
- """
- Callable class, which works as an emitter for Programs, SharedLibraries and
- StaticLibraries.
- """
-
- def __init__(self, objBuilderName):
- self.objBuilderName = objBuilderName
-
- def __call__(self, target, source, env):
- """
- Smart autoscan function. Gets the list of objects for the Program
- or Lib. Adds objects and builders for the special qt files.
- """
- try:
- if int(env.subst('$QT4_AUTOSCAN')) == 0:
- return target, source
- except ValueError:
- pass
- try:
- debug = int(env.subst('$QT4_DEBUG'))
- except ValueError:
- debug = 0
-
- # some shortcuts used in the scanner
- splitext = SCons.Util.splitext
- objBuilder = getattr(env, self.objBuilderName)
-
- # some regular expressions:
- # Q_OBJECT detection
- q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]')
- # cxx and c comment 'eater'
- #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)')
- # CW: something must be wrong with the regexp. See also bug #998222
- # CURRENTLY THERE IS NO TEST CASE FOR THAT
-
- # The following is kind of hacky to get builders working properly (FIXME)
- objBuilderEnv = objBuilder.env
- objBuilder.env = env
- mocBuilderEnv = env.Moc4.env
- env.Moc4.env = env
-
- # make a deep copy for the result; MocH objects will be appended
- out_sources = source[:]
-
- for obj in source:
- if isinstance(obj,basestring): # big kludge!
- print "scons: qt4: '%s' MAYBE USING AN OLD SCONS VERSION AND NOT CONVERTED TO 'File'. Discarded." % str(obj)
- continue
- if not obj.has_builder():
- # binary obj file provided
- if debug:
- print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj)
- continue
- cpp = obj.sources[0]
- if not splitext(str(cpp))[1] in cxx_suffixes:
- if debug:
- print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp)
- # c or fortran source
- continue
- #cpp_contents = comment.sub('', cpp.get_contents())
- try:
- cpp_contents = cpp.get_contents()
- except: continue # may be an still not generated source
- h=None
- for h_ext in header_extensions:
- # try to find the header file in the corresponding source
- # directory
- hname = splitext(cpp.name)[0] + h_ext
- h = find_file(hname, (cpp.get_dir(),), env.File)
- if h:
- if debug:
- print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))
- #h_contents = comment.sub('', h.get_contents())
- h_contents = h.get_contents()
- break
- if not h and debug:
- print "scons: qt: no header for '%s'." % (str(cpp))
- if h and q_object_search.search(h_contents):
- # h file with the Q_OBJECT macro found -> add moc_cpp
- moc_cpp = env.Moc4(h)
- moc_o = objBuilder(moc_cpp)
- out_sources.append(moc_o)
- #moc_cpp.target_scanner = SCons.Defaults.CScan
- if debug:
- print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))
- if cpp and q_object_search.search(cpp_contents):
- # cpp file with Q_OBJECT macro found -> add moc
- # (to be included in cpp)
- moc = env.Moc4(cpp)
- env.Ignore(moc, moc)
- if debug:
- print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))
- #moc.source_scanner = SCons.Defaults.CScan
- # restore the original env attributes (FIXME)
- objBuilder.env = objBuilderEnv
- env.Moc4.env = mocBuilderEnv
-
- return (target, out_sources)
+ """
+ Callable class, which works as an emitter for Programs, SharedLibraries and
+ StaticLibraries.
+ """
+
+ def __init__(self, objBuilderName):
+ self.objBuilderName = objBuilderName
+
+ def __call__(self, target, source, env):
+ """
+ Smart autoscan function. Gets the list of objects for the Program
+ or Lib. Adds objects and builders for the special qt files.
+ """
+ try:
+ if int(env.subst('$QT4_AUTOSCAN')) == 0:
+ return target, source
+ except ValueError:
+ pass
+ try:
+ debug = int(env.subst('$QT4_DEBUG'))
+ except ValueError:
+ debug = 0
+
+ # some shortcuts used in the scanner
+ splitext = SCons.Util.splitext
+ objBuilder = getattr(env, self.objBuilderName)
+
+ # some regular expressions:
+ # Q_OBJECT detection
+ q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]')
+ # cxx and c comment 'eater'
+ #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)')
+ # CW: something must be wrong with the regexp. See also bug #998222
+ # CURRENTLY THERE IS NO TEST CASE FOR THAT
+
+ # The following is kind of hacky to get builders working properly (FIXME)
+ objBuilderEnv = objBuilder.env
+ objBuilder.env = env
+ mocBuilderEnv = env.Moc4.env
+ env.Moc4.env = env
+
+ # make a deep copy for the result; MocH objects will be appended
+ out_sources = source[:]
+
+ for obj in source:
+ if isinstance(obj,basestring): # big kludge!
+ print "scons: qt4: '%s' MAYBE USING AN OLD SCONS VERSION AND NOT CONVERTED TO 'File'. Discarded." % str(obj)
+ continue
+ if not obj.has_builder():
+ # binary obj file provided
+ if debug:
+ print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj)
+ continue
+ cpp = obj.sources[0]
+ if not splitext(str(cpp))[1] in cxx_suffixes:
+ if debug:
+ print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp)
+ # c or fortran source
+ continue
+ #cpp_contents = comment.sub('', cpp.get_contents())
+ try:
+ cpp_contents = cpp.get_contents()
+ except: continue # may be an still not generated source
+ h=None
+ for h_ext in header_extensions:
+ # try to find the header file in the corresponding source
+ # directory
+ hname = splitext(cpp.name)[0] + h_ext
+ h = find_file(hname, (cpp.get_dir(),), env.File)
+ if h:
+ if debug:
+ print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))
+ #h_contents = comment.sub('', h.get_contents())
+ h_contents = h.get_contents()
+ break
+ if not h and debug:
+ print "scons: qt: no header for '%s'." % (str(cpp))
+ if h and q_object_search.search(h_contents):
+ # h file with the Q_OBJECT macro found -> add moc_cpp
+ moc_cpp = env.Moc4(h)
+ moc_o = objBuilder(moc_cpp)
+ out_sources.append(moc_o)
+ #moc_cpp.target_scanner = SCons.Defaults.CScan
+ if debug:
+ print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))
+ if cpp and q_object_search.search(cpp_contents):
+ # cpp file with Q_OBJECT macro found -> add moc
+ # (to be included in cpp)
+ moc = env.Moc4(cpp)
+ env.Ignore(moc, moc)
+ if debug:
+ print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))
+ #moc.source_scanner = SCons.Defaults.CScan
+ # restore the original env attributes (FIXME)
+ objBuilder.env = objBuilderEnv
+ env.Moc4.env = mocBuilderEnv
+
+ return (target, out_sources)
AutomocShared = _Automoc('SharedObject')
AutomocStatic = _Automoc('StaticObject')
def _detect(env):
- """Not really safe, but fast method to detect the QT library"""
- if 'QTDIR' in env :
- return env['QTDIR']
-
- if 'QTDIR' in os.environ :
- return os.environ['QTDIR']
-
- moc = None
- if env["qt5"]:
- moc = env.WhereIs('moc-qt5') or env.WhereIs('moc5') or env.WhereIs('moc')
- else :
- moc = env.WhereIs('moc-qt4') or env.WhereIs('moc4') or env.WhereIs('moc')
- if moc:
- # Test whether the moc command we found is real, or whether it is just the qtchooser dummy.
- p = subprocess.Popen([moc, "-v"], shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- p.communicate()
- if p.returncode == 0:
- import sys
- if sys.platform == "darwin" :
- return ""
- QTDIR = os.path.dirname(os.path.dirname(moc))
- return QTDIR
-
- raise SCons.Errors.StopError(
- QtdirNotFound,
- "Could not detect Qt 4 installation")
- return None
+ """Not really safe, but fast method to detect the QT library"""
+ if 'QTDIR' in env :
+ return env['QTDIR']
+
+ if 'QTDIR' in os.environ :
+ return os.environ['QTDIR']
+
+ moc = None
+ if env["qt5"]:
+ moc = env.WhereIs('moc-qt5') or env.WhereIs('moc5') or env.WhereIs('moc')
+ else :
+ moc = env.WhereIs('moc-qt4') or env.WhereIs('moc4') or env.WhereIs('moc')
+ if moc:
+ # Test whether the moc command we found is real, or whether it is just the qtchooser dummy.
+ p = subprocess.Popen([moc, "-v"], shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env["ENV"])
+ p.communicate()
+ if p.returncode == 0:
+ import sys
+ if sys.platform == "darwin" :
+ return ""
+ QTDIR = os.path.dirname(os.path.dirname(moc))
+ return QTDIR
+
+ raise SCons.Errors.StopError(
+ QtdirNotFound,
+ "Could not detect Qt 4 installation")
+ return None
def generate(env):
- """Add Builders and construction variables for qt to an Environment."""
-
- def locateQt4Command(env, command, qtdir) :
- if len(qtdir) == 0 :
- qtdir = "/usr"
- if env["qt5"]:
- suffixes = [
- '-qt5',
- '-qt5.exe',
- '5',
- '5.exe',
- '',
- '.exe',
- ]
- else :
- suffixes = [
- '-qt4',
- '-qt4.exe',
- '4',
- '4.exe',
- '',
- '.exe',
- ]
- triedPaths = []
- for suffix in suffixes :
- fullpath = os.path.join(qtdir,'bin',command + suffix)
- if os.access(fullpath, os.X_OK) :
- return fullpath
- triedPaths.append(fullpath)
-
- fullpath = env.Detect([command+'-qt4', command+'4', command])
- if not (fullpath is None) : return fullpath
-
- raise Exception("Qt4 command '" + command + "' not found. Tried: " + ', '.join(triedPaths))
-
-
- CLVar = SCons.Util.CLVar
- Action = SCons.Action.Action
- Builder = SCons.Builder.Builder
- splitext = SCons.Util.splitext
-
- env['QTDIR'] = _detect(env)
- # TODO: 'Replace' should be 'SetDefault'
-# env.SetDefault(
- env.Replace(
- QTDIR = _detect(env),
- # TODO: This is not reliable to QTDIR value changes but needed in order to support '-qt4' variants
- QT4_MOC = locateQt4Command(env,'moc', env['QTDIR']),
- QT4_UIC = locateQt4Command(env,'uic', env['QTDIR']),
- QT4_RCC = locateQt4Command(env,'rcc', env['QTDIR']),
- QT4_LUPDATE = locateQt4Command(env,'lupdate', env['QTDIR']),
- QT4_LRELEASE = locateQt4Command(env,'lrelease', env['QTDIR']),
- QT4_LIB = '', # KLUDGE to avoid linking qt3 library
-
- QT4_AUTOSCAN = 1, # Should the qt tool try to figure out, which sources are to be moc'ed?
-
- # Some QT specific flags. I don't expect someone wants to
- # manipulate those ...
- QT4_UICFLAGS = CLVar(''),
- QT4_MOCFROMHFLAGS = CLVar(''),
- QT4_MOCFROMCXXFLAGS = CLVar('-i'),
- QT4_QRCFLAGS = '',
-
- # suffixes/prefixes for the headers / sources to generate
- QT4_UISUFFIX = '.ui',
- QT4_UICDECLPREFIX = 'ui_',
- QT4_UICDECLSUFFIX = '.h',
- QT4_MOCHPREFIX = 'moc_',
- QT4_MOCHSUFFIX = '$CXXFILESUFFIX',
- QT4_MOCCXXPREFIX = '',
- QT4_MOCCXXSUFFIX = '.moc',
- QT4_QRCSUFFIX = '.qrc',
- QT4_QRCCXXSUFFIX = '$CXXFILESUFFIX',
- QT4_QRCCXXPREFIX = 'qrc_',
- QT4_MOCCPPPATH = [],
- QT4_MOCINCFLAGS = '$( ${_concat("-I", QT4_MOCCPPPATH, INCSUFFIX, __env__, RDirs)} $)',
-
- # Commands for the qt support ...
- QT4_UICCOM = '$QT4_UIC $QT4_UICFLAGS -o $TARGET $SOURCE',
- # FIXME: The -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED flag is a hack to work
- # around an issue in Qt
- # See https://bugreports.qt-project.org/browse/QTBUG-22829
- QT4_MOCFROMHCOM = '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMHFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE',
- QT4_MOCFROMCXXCOM = [
- '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMCXXFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE',
- Action(checkMocIncluded,None)],
- QT4_LUPDATECOM = '$QT4_LUPDATE $SOURCE -ts $TARGET',
- QT4_LRELEASECOM = '$QT4_LRELEASE -silent $SOURCE -qm $TARGET',
- QT4_RCCCOM = '$QT4_RCC $QT4_QRCFLAGS -name $SOURCE $SOURCE -o $TARGET',
- )
- if len(env["QTDIR"]) > 0 :
- env.Replace(QT4_LIBPATH = os.path.join('$QTDIR', 'lib'))
-
- # Translation builder
- tsbuilder = Builder(
- action = SCons.Action.Action('$QT4_LUPDATECOM'), #,'$QT4_LUPDATECOMSTR'),
- multi=1
- )
- env.Append( BUILDERS = { 'Ts': tsbuilder } )
- qmbuilder = Builder(
- action = SCons.Action.Action('$QT4_LRELEASECOM', cmdstr = '$QT4_LRELEASECOMSTR'),
- src_suffix = '.ts',
- suffix = '.qm',
- single_source = True
- )
- env.Append( BUILDERS = { 'Qm': qmbuilder } )
-
- # Resource builder
- def scanResources(node, env, path, arg):
- # I've being careful on providing names relative to the qrc file
- # If that was not needed that code could be simplified a lot
- def recursiveFiles(basepath, path) :
- result = []
- for item in os.listdir(os.path.join(basepath, path)) :
- itemPath = os.path.join(path, item)
- if os.path.isdir(os.path.join(basepath, itemPath)) :
- result += recursiveFiles(basepath, itemPath)
- else:
- result.append(itemPath)
- return result
- contents = node.get_contents()
- includes = [included[1] for included in qrcinclude_re.findall(contents)]
- qrcpath = os.path.dirname(node.path)
- dirs = [included for included in includes if os.path.isdir(os.path.join(qrcpath,included))]
- # dirs need to include files recursively
- for dir in dirs :
- includes.remove(dir)
- includes+=recursiveFiles(qrcpath,dir)
- return includes
- qrcscanner = SCons.Scanner.Scanner(name = 'qrcfile',
- function = scanResources,
- argument = None,
- skeys = ['.qrc'])
- qrcbuilder = Builder(
- action = SCons.Action.Action('$QT4_RCCCOM', cmdstr = '$QT4_RCCCOMSTR'),
- source_scanner = qrcscanner,
- src_suffix = '$QT4_QRCSUFFIX',
- suffix = '$QT4_QRCCXXSUFFIX',
- prefix = '$QT4_QRCCXXPREFIX',
- single_source = True
- )
- env.Append( BUILDERS = { 'Qrc': qrcbuilder } )
-
- # Interface builder
- uic4builder = Builder(
- action = SCons.Action.Action('$QT4_UICCOM', cmdstr = '$QT4_UICCOMSTR'),
- src_suffix='$QT4_UISUFFIX',
- suffix='$QT4_UICDECLSUFFIX',
- prefix='$QT4_UICDECLPREFIX',
- single_source = True
- #TODO: Consider the uiscanner on new scons version
- )
- env['BUILDERS']['Uic4'] = uic4builder
-
- # Metaobject builder
- mocBld = Builder(action={}, prefix={}, suffix={})
- for h in header_extensions:
- act = SCons.Action.Action('$QT4_MOCFROMHCOM', cmdstr = '$QT4_MOCFROMHCOMSTR')
- mocBld.add_action(h, act)
- mocBld.prefix[h] = '$QT4_MOCHPREFIX'
- mocBld.suffix[h] = '$QT4_MOCHSUFFIX'
- for cxx in cxx_suffixes:
- act = SCons.Action.Action('$QT4_MOCFROMCXXCOM', cmdstr = '$QT4_MOCFROMCXXCOMSTR')
- mocBld.add_action(cxx, act)
- mocBld.prefix[cxx] = '$QT4_MOCCXXPREFIX'
- mocBld.suffix[cxx] = '$QT4_MOCCXXSUFFIX'
- env['BUILDERS']['Moc4'] = mocBld
-
- # er... no idea what that was for
- static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
- static_obj.src_builder.append('Uic4')
- shared_obj.src_builder.append('Uic4')
-
- # We use the emitters of Program / StaticLibrary / SharedLibrary
- # to scan for moc'able files
- # We can't refer to the builders directly, we have to fetch them
- # as Environment attributes because that sets them up to be called
- # correctly later by our emitter.
- env.AppendUnique(PROGEMITTER =[AutomocStatic],
- SHLIBEMITTER=[AutomocShared],
- LIBEMITTER =[AutomocStatic],
- # Of course, we need to link against the qt libraries
- LIBPATH=["$QT4_LIBPATH"],
- LIBS=['$QT4_LIB'])
-
- # TODO: Does dbusxml2cpp need an adapter
- env.AddMethod(enable_modules, "EnableQt4Modules")
+ """Add Builders and construction variables for qt to an Environment."""
+
+ def locateQt4Command(env, command, qtdir) :
+ if len(qtdir) == 0 :
+ qtdir = "/usr"
+ if env["qt5"]:
+ suffixes = [
+ '-qt5',
+ '-qt5.exe',
+ '5',
+ '5.exe',
+ '',
+ '.exe',
+ ]
+ else :
+ suffixes = [
+ '-qt4',
+ '-qt4.exe',
+ '4',
+ '4.exe',
+ '',
+ '.exe',
+ ]
+ triedPaths = []
+ for suffix in suffixes :
+ fullpath = os.path.join(qtdir,'bin',command + suffix)
+ if os.access(fullpath, os.X_OK) :
+ return fullpath
+ triedPaths.append(fullpath)
+
+ fullpath = env.Detect([command+'-qt4', command+'4', command])
+ if not (fullpath is None) : return fullpath
+
+ raise Exception("Qt4 command '" + command + "' not found. Tried: " + ', '.join(triedPaths))
+
+
+ CLVar = SCons.Util.CLVar
+ Action = SCons.Action.Action
+ Builder = SCons.Builder.Builder
+ splitext = SCons.Util.splitext
+
+ env['QTDIR'] = _detect(env)
+ # TODO: 'Replace' should be 'SetDefault'
+# env.SetDefault(
+ env.Replace(
+ QTDIR = _detect(env),
+ # TODO: This is not reliable to QTDIR value changes but needed in order to support '-qt4' variants
+ QT4_MOC = locateQt4Command(env,'moc', env['QTDIR']),
+ QT4_UIC = locateQt4Command(env,'uic', env['QTDIR']),
+ QT4_RCC = locateQt4Command(env,'rcc', env['QTDIR']),
+ QT4_LUPDATE = locateQt4Command(env,'lupdate', env['QTDIR']),
+ QT4_LRELEASE = locateQt4Command(env,'lrelease', env['QTDIR']),
+ QT4_LIB = '', # KLUDGE to avoid linking qt3 library
+
+ QT4_AUTOSCAN = 1, # Should the qt tool try to figure out, which sources are to be moc'ed?
+
+ # Some QT specific flags. I don't expect someone wants to
+ # manipulate those ...
+ QT4_UICFLAGS = CLVar(''),
+ QT4_MOCFROMHFLAGS = CLVar(''),
+ QT4_MOCFROMCXXFLAGS = CLVar('-i'),
+ QT4_QRCFLAGS = '--compress 9 --threshold 5',
+
+ # suffixes/prefixes for the headers / sources to generate
+ QT4_UISUFFIX = '.ui',
+ QT4_UICDECLPREFIX = 'ui_',
+ QT4_UICDECLSUFFIX = '.h',
+ QT4_MOCHPREFIX = 'moc_',
+ QT4_MOCHSUFFIX = '$CXXFILESUFFIX',
+ QT4_MOCCXXPREFIX = '',
+ QT4_MOCCXXSUFFIX = '.moc',
+ QT4_QRCSUFFIX = '.qrc',
+ QT4_QRCCXXSUFFIX = '$CXXFILESUFFIX',
+ QT4_QRCCXXPREFIX = 'qrc_',
+ QT4_MOCCPPPATH = [],
+ QT4_MOCINCFLAGS = '$( ${_concat("-I", QT4_MOCCPPPATH, INCSUFFIX, __env__, RDirs)} $)',
+
+ # Commands for the qt support ...
+ QT4_UICCOM = '$QT4_UIC $QT4_UICFLAGS -o $TARGET $SOURCE',
+ # FIXME: The -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED flag is a hack to work
+ # around an issue in Qt
+ # See https://bugreports.qt-project.org/browse/QTBUG-22829
+ QT4_MOCFROMHCOM = '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMHFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE',
+ QT4_MOCFROMCXXCOM = [
+ '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMCXXFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE',
+ Action(checkMocIncluded,None)],
+ QT4_LUPDATECOM = '$QT4_LUPDATE $SOURCE -ts $TARGET',
+ QT4_LRELEASECOM = '$QT4_LRELEASE -silent $SOURCE -qm $TARGET',
+ QT4_RCCCOM = '$QT4_RCC $QT4_QRCFLAGS -name $SOURCE $SOURCE -o $TARGET',
+ )
+ if len(env["QTDIR"]) > 0 :
+ env.Replace(QT4_LIBPATH = os.path.join('$QTDIR', 'lib'))
+
+ # Translation builder
+ tsbuilder = Builder(
+ action = SCons.Action.Action('$QT4_LUPDATECOM'), #,'$QT4_LUPDATECOMSTR'),
+ multi=1
+ )
+ env.Append( BUILDERS = { 'Ts': tsbuilder } )
+ qmbuilder = Builder(
+ action = SCons.Action.Action('$QT4_LRELEASECOM', cmdstr = '$QT4_LRELEASECOMSTR'),
+ src_suffix = '.ts',
+ suffix = '.qm',
+ single_source = True
+ )
+ env.Append( BUILDERS = { 'Qm': qmbuilder } )
+
+ # Resource builder
+ def scanResources(node, env, path, arg):
+ # I've being careful on providing names relative to the qrc file
+ # If that was not needed that code could be simplified a lot
+ def recursiveFiles(basepath, path) :
+ result = []
+ for item in os.listdir(os.path.join(basepath, path)) :
+ itemPath = os.path.join(path, item)
+ if os.path.isdir(os.path.join(basepath, itemPath)) :
+ result += recursiveFiles(basepath, itemPath)
+ else:
+ result.append(itemPath)
+ return result
+ contents = node.get_contents()
+ includes = [included[1] for included in qrcinclude_re.findall(contents)]
+ qrcpath = os.path.dirname(node.path)
+ dirs = [included for included in includes if os.path.isdir(os.path.join(qrcpath,included))]
+ # dirs need to include files recursively
+ for dir in dirs :
+ includes.remove(dir)
+ includes+=recursiveFiles(qrcpath,dir)
+ return includes
+ qrcscanner = SCons.Scanner.Scanner(name = 'qrcfile',
+ function = scanResources,
+ argument = None,
+ skeys = ['.qrc'])
+ qrcbuilder = Builder(
+ action = SCons.Action.Action('$QT4_RCCCOM', cmdstr = '$QT4_RCCCOMSTR'),
+ source_scanner = qrcscanner,
+ src_suffix = '$QT4_QRCSUFFIX',
+ suffix = '$QT4_QRCCXXSUFFIX',
+ prefix = '$QT4_QRCCXXPREFIX',
+ single_source = True
+ )
+ env.Append( BUILDERS = { 'Qrc': qrcbuilder } )
+
+ # Interface builder
+ uic4builder = Builder(
+ action = SCons.Action.Action('$QT4_UICCOM', cmdstr = '$QT4_UICCOMSTR'),
+ src_suffix='$QT4_UISUFFIX',
+ suffix='$QT4_UICDECLSUFFIX',
+ prefix='$QT4_UICDECLPREFIX',
+ single_source = True
+ #TODO: Consider the uiscanner on new scons version
+ )
+ env['BUILDERS']['Uic4'] = uic4builder
+
+ # Metaobject builder
+ mocBld = Builder(action={}, prefix={}, suffix={})
+ for h in header_extensions:
+ act = SCons.Action.Action('$QT4_MOCFROMHCOM', cmdstr = '$QT4_MOCFROMHCOMSTR')
+ mocBld.add_action(h, act)
+ mocBld.prefix[h] = '$QT4_MOCHPREFIX'
+ mocBld.suffix[h] = '$QT4_MOCHSUFFIX'
+ for cxx in cxx_suffixes:
+ act = SCons.Action.Action('$QT4_MOCFROMCXXCOM', cmdstr = '$QT4_MOCFROMCXXCOMSTR')
+ mocBld.add_action(cxx, act)
+ mocBld.prefix[cxx] = '$QT4_MOCCXXPREFIX'
+ mocBld.suffix[cxx] = '$QT4_MOCCXXSUFFIX'
+ env['BUILDERS']['Moc4'] = mocBld
+
+ # er... no idea what that was for
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+ static_obj.src_builder.append('Uic4')
+ shared_obj.src_builder.append('Uic4')
+
+ # We use the emitters of Program / StaticLibrary / SharedLibrary
+ # to scan for moc'able files
+ # We can't refer to the builders directly, we have to fetch them
+ # as Environment attributes because that sets them up to be called
+ # correctly later by our emitter.
+ env.AppendUnique(PROGEMITTER =[AutomocStatic],
+ SHLIBEMITTER=[AutomocShared],
+ LIBEMITTER =[AutomocStatic],
+ # Of course, we need to link against the qt libraries
+ LIBPATH=["$QT4_LIBPATH"],
+ LIBS=['$QT4_LIB'])
+
+ # TODO: Does dbusxml2cpp need an adapter
+ env.AddMethod(enable_modules, "EnableQt4Modules")
def enable_modules(self, modules, debug=False, crosscompiling=False, version='4') :
- import sys
-
- validModules = [
- 'QtCore',
- 'QtGui',
- 'QtOpenGL',
- 'Qt3Support',
- 'QtAssistant',
- 'QtScript',
- 'QtDBus',
- 'QtSql',
- # The next modules have not been tested yet so, please
- # maybe they require additional work on non Linux platforms
- 'QtNetwork',
- 'QtSvg',
- 'QtTest',
- 'QtXml',
- 'QtXmlPatterns',
- 'QtUiTools',
- 'QtDesigner',
- 'QtDesignerComponents',
- 'QtWebKit',
- 'QtHelp',
- 'QtScript',
-
- # Qt5 modules
- 'QtWidgets',
- 'QtMultimedia',
- 'QtWebKitWidgets',
- 'QtWebChannel',
- ]
- if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling :
- validModules += ['QtX11Extras']
- staticModules = [
- 'QtUiTools',
- ]
- invalidModules=[]
- for module in modules:
- if module not in validModules :
- invalidModules.append(module)
- if invalidModules :
- raise Exception("Modules %s are not Qt4 modules. Valid Qt4 modules are: %s"% (
- str(invalidModules),str(validModules)))
-
- moduleDefines = {
- 'QtScript' : ['QT_SCRIPT_LIB'],
- 'QtSvg' : ['QT_SVG_LIB'],
- 'Qt3Support' : ['QT_QT3SUPPORT_LIB','QT3_SUPPORT'],
- 'QtSql' : ['QT_SQL_LIB'],
- 'QtXml' : ['QT_XML_LIB'],
- 'QtOpenGL' : ['QT_OPENGL_LIB'],
- 'QtGui' : ['QT_GUI_LIB'],
- 'QtWidgets' : ['QT_WIDGETS_LIB'],
- 'QtWebKitWidgets' : [],
- 'QtNetwork' : ['QT_NETWORK_LIB'],
- 'QtCore' : ['QT_CORE_LIB'],
- }
- for module in modules :
- try : self.AppendUnique(CPPDEFINES=moduleDefines[module])
- except: pass
- debugSuffix = ''
-
-
- include_flag = "-I"
- if os.path.basename(self["CC"]) in ("gcc", "clang"):
- include_flag = "-isystem"
-
-
- if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling :
- if self["qt"]:
- # The user specified qt path in config.py and we are going to use the
- # installation under that location.
- UsePkgConfig = False
- else:
- # The user did not specify a qt path in config py and we are going to
- # ask pkg-config for the correct flags.
- UsePkgConfig = True
- if not UsePkgConfig:
- if debug : debugSuffix = '_debug'
- if version == '4' :
- self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR", "include", "phonon")])
- for module in modules :
- module_str = module
- if not version == '4' :
- module_str = module_str.replace('Qt', 'Qt5')
- self.AppendUnique(LIBS=[module_str+debugSuffix])
- self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")])
- self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR","include")])
- self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR","include", module)])
- self["QT4_MOCCPPPATH"] = self["CPPPATH"]
- return
- else:
- test_conf = self.Configure()
- modules_str = " ".join(modules)
- if not version == '4' :
- modules_str = modules_str.replace('Qt', 'Qt5')
-
- # Check if Qt is registered at pkg-config
- ret = test_conf.TryAction('pkg-config --exists \'%s\'' % modules_str)[0]
- if ret != 1:
- test_conf.Finish()
- raise Exception('Qt installation is missing packages. The following are required: %s' % modules_str)
- return
- test_conf.env.ParseConfig("pkg-config --cflags --libs " + modules_str)
- self.AppendUnique(LIBS=test_conf.env["LIBS"], LIBPATH=test_conf.env["LIBPATH"], CPPPATH=test_conf.env["CPPPATH"])
- self["QT4_MOCCPPPATH"] = self["CPPPATH"]
- test_conf.Finish()
- return
-
- if sys.platform == "win32" or crosscompiling :
- if crosscompiling:
- transformedQtdir = transformToWinePath(self['QTDIR'])
- self['QT4_MOC'] = "QTDIR=%s %s"%( transformedQtdir, self['QT4_MOC'])
- self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include")])
- try: modules.remove("QtDBus")
- except: pass
- if debug : debugSuffix = 'd'
- if "QtAssistant" in modules:
- self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include","QtAssistant")])
- modules.remove("QtAssistant")
- modules.append("QtAssistantClient")
- if version == '4' :
- # FIXME: Phonon Hack
- self.AppendUnique(LIBS=['phonon'+debugSuffix+version])
- self.AppendUnique(LIBS=[lib+debugSuffix+version for lib in modules if lib not in staticModules])
- else :
- self.AppendUnique(LIBS=[lib.replace('Qt', 'Qt5') + debugSuffix for lib in modules if lib not in staticModules])
- self.PrependUnique(LIBS=[lib+debugSuffix for lib in modules if lib in staticModules])
- if 'QtOpenGL' in modules:
- self.AppendUnique(LIBS=['opengl32'])
- elif version == '5' :
- self.Append(CPPDEFINES = ["QT_NO_OPENGL"])
- self.AppendUnique(CPPPATH=[ '$QTDIR/include/'])
- self.AppendUnique(CPPPATH=[ '$QTDIR/include/'+module for module in modules])
- if crosscompiling :
- self["QT4_MOCCPPPATH"] = [
- path.replace('$QTDIR', transformedQtdir)
- for path in self['CPPPATH'] ]
- else :
- self["QT4_MOCCPPPATH"] = self["CPPPATH"]
- self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')])
- self.PrependUnique(LIBS=["shell32"])
- return
-
- if sys.platform=="darwin" :
- if debug : debugSuffix = 'd'
-
- if len(self["QTDIR"]) > 0 :
- self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')])
- self.AppendUnique(LINKFLAGS="-F$QTDIR/lib")
- self.AppendUnique(CPPFLAGS="-iframework$QTDIR/lib")
- self.AppendUnique(LINKFLAGS="-L$QTDIR/lib") #TODO clean!
-
- # FIXME: Phonon Hack
- if version == '4' :
- self.Append(LINKFLAGS=['-framework', "phonon"])
-
- for module in modules :
- if module in staticModules :
- self.AppendUnique(LIBS=[module+debugSuffix]) # TODO: Add the debug suffix
- self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")])
- else :
- if len(self["QTDIR"]) > 0 :
- self.Append(CPPFLAGS = [include_flag + os.path.join("$QTDIR", "lib", module + ".framework", "Versions", version, "Headers")])
- else :
- self.Append(CPPFLAGS = [include_flag + os.path.join("/Library/Frameworks", module + ".framework", "Versions", version, "Headers")])
- self.Append(LINKFLAGS=['-framework', module])
- if 'QtOpenGL' in modules:
- self.AppendUnique(LINKFLAGS="-F/System/Library/Frameworks")
- self.Append(LINKFLAGS=['-framework', 'AGL']) #TODO ughly kludge to avoid quotes
- self.Append(LINKFLAGS=['-framework', 'OpenGL'])
- self["QT4_MOCCPPPATH"] = self["CPPPATH"]
+ import sys
+
+ validModules = [
+ 'QtCore',
+ 'QtGui',
+ 'QtOpenGL',
+ 'Qt3Support',
+ 'QtAssistant',
+ 'QtScript',
+ 'QtDBus',
+ 'QtSql',
+ # The next modules have not been tested yet so, please
+ # maybe they require additional work on non Linux platforms
+ 'QtNetwork',
+ 'QtSvg',
+ 'QtTest',
+ 'QtXml',
+ 'QtXmlPatterns',
+ 'QtUiTools',
+ 'QtDesigner',
+ 'QtDesignerComponents',
+ 'QtWebKit',
+ 'QtHelp',
+ 'QtScript',
+
+ # Qt5 modules
+ 'QtWidgets',
+ 'QtMultimedia',
+ 'QtWebKitWidgets',
+ 'QtWebChannel',
+ ]
+ if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling :
+ validModules += ['QtX11Extras']
+ staticModules = [
+ 'QtUiTools',
+ ]
+ invalidModules=[]
+ for module in modules:
+ if module not in validModules :
+ invalidModules.append(module)
+ if invalidModules :
+ raise Exception("Modules %s are not Qt4 modules. Valid Qt4 modules are: %s"% (
+ str(invalidModules),str(validModules)))
+
+ moduleDefines = {
+ 'QtScript' : ['QT_SCRIPT_LIB'],
+ 'QtSvg' : ['QT_SVG_LIB'],
+ 'Qt3Support' : ['QT_QT3SUPPORT_LIB','QT3_SUPPORT'],
+ 'QtSql' : ['QT_SQL_LIB'],
+ 'QtXml' : ['QT_XML_LIB'],
+ 'QtOpenGL' : ['QT_OPENGL_LIB'],
+ 'QtGui' : ['QT_GUI_LIB'],
+ 'QtWidgets' : ['QT_WIDGETS_LIB'],
+ 'QtWebKitWidgets' : [],
+ 'QtNetwork' : ['QT_NETWORK_LIB'],
+ 'QtCore' : ['QT_CORE_LIB'],
+ }
+ for module in modules :
+ try : self.AppendUnique(CPPDEFINES=moduleDefines[module])
+ except: pass
+ debugSuffix = ''
+
+
+ include_flag = "-I"
+ if os.path.basename(self["CC"]) in ("gcc", "clang"):
+ include_flag = "-isystem"
+
+
+ if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling :
+ if self["qt"]:
+ # The user specified qt path in config.py and we are going to use the
+ # installation under that location.
+ UsePkgConfig = False
+ else:
+ # The user did not specify a qt path in config py and we are going to
+ # ask pkg-config for the correct flags.
+ UsePkgConfig = True
+ if not UsePkgConfig:
+ if debug : debugSuffix = '_debug'
+ if version == '4' :
+ self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR", "include", "phonon")])
+ for module in modules :
+ module_str = module
+ if not version == '4' :
+ module_str = module_str.replace('Qt', 'Qt5')
+ self.AppendUnique(LIBS=[module_str+debugSuffix])
+ self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")])
+ self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR","include")])
+ self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR","include", module)])
+ self["QT4_MOCCPPPATH"] = self["CPPPATH"]
+ return
+ else:
+ test_conf = self.Configure()
+ modules_str = " ".join(modules)
+ if not version == '4' :
+ modules_str = modules_str.replace('Qt', 'Qt5')
+
+ # Check if Qt is registered at pkg-config
+ ret = test_conf.TryAction('pkg-config --exists \'%s\'' % modules_str)[0]
+ if ret != 1:
+ test_conf.Finish()
+ raise Exception('Qt installation is missing packages. The following are required: %s' % modules_str)
+ return
+
+ def parse_conf_as_system(env, cmd, unique=1):
+ return env.MergeFlags(cmd.replace("-I/", include_flag + "/"), unique)
+
+ test_conf.env.ParseConfig("pkg-config --cflags --libs " + modules_str, parse_conf_as_system)
+ self["QT4_MOCCPPPATH"] = self["CPPPATH"]
+ test_conf.Finish()
+ return
+
+ if sys.platform == "win32" or crosscompiling :
+ if crosscompiling:
+ transformedQtdir = transformToWinePath(self['QTDIR'])
+ self['QT4_MOC'] = "QTDIR=%s %s"%( transformedQtdir, self['QT4_MOC'])
+ self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include")])
+ try: modules.remove("QtDBus")
+ except: pass
+ if debug : debugSuffix = 'd'
+ if "QtAssistant" in modules:
+ self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include","QtAssistant")])
+ modules.remove("QtAssistant")
+ modules.append("QtAssistantClient")
+ if version == '4' :
+ # FIXME: Phonon Hack
+ self.AppendUnique(LIBS=['phonon'+debugSuffix+version])
+ self.AppendUnique(LIBS=[lib+debugSuffix+version for lib in modules if lib not in staticModules])
+ else :
+ self.AppendUnique(LIBS=[lib.replace('Qt', 'Qt5') + debugSuffix for lib in modules if lib not in staticModules])
+ self.PrependUnique(LIBS=[lib+debugSuffix for lib in modules if lib in staticModules])
+ if 'QtOpenGL' in modules:
+ self.AppendUnique(LIBS=['opengl32'])
+ elif version == '5' :
+ self.Append(CPPDEFINES = ["QT_NO_OPENGL"])
+ self.AppendUnique(CPPPATH=[ '$QTDIR/include/'])
+ self.AppendUnique(CPPPATH=[ '$QTDIR/include/'+module for module in modules])
+ if crosscompiling :
+ self["QT4_MOCCPPPATH"] = [
+ path.replace('$QTDIR', transformedQtdir)
+ for path in self['CPPPATH'] ]
+ else :
+ self["QT4_MOCCPPPATH"] = self["CPPPATH"]
+ self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')])
+ self.PrependUnique(LIBS=["shell32"])
+ return
+
+ if sys.platform=="darwin" :
+ if debug : debugSuffix = 'd'
+
+ if len(self["QTDIR"]) > 0 :
+ self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')])
+ self.AppendUnique(LINKFLAGS="-F$QTDIR/lib")
+ self.AppendUnique(CPPFLAGS=["-iframework$QTDIR/lib", include_flag + os.path.join("$QTDIR", "include")])
+ self.Append(LINKFLAGS="-Wl,-rpath,$QTDIR/lib")
+
+ # FIXME: Phonon Hack
+ if version == '4' :
+ self.Append(LINKFLAGS=['-framework', "phonon"])
+
+ for module in modules :
+ if module in staticModules :
+ self.AppendUnique(LIBS=[module+debugSuffix]) # TODO: Add the debug suffix
+ self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")])
+ else :
+ if len(self["QTDIR"]) > 0 :
+ self.Append(CPPFLAGS = [include_flag + os.path.join("$QTDIR", "lib", module + ".framework", "Headers")])
+ else :
+ self.Append(CPPFLAGS = [include_flag + os.path.join("/Library/Frameworks", module + ".framework", "Headers")])
+ self.Append(LINKFLAGS=['-framework', module])
+ if 'QtOpenGL' in modules:
+ self.AppendUnique(LINKFLAGS="-F/System/Library/Frameworks")
+ self.Append(LINKFLAGS=['-framework', 'AGL']) #TODO ughly kludge to avoid quotes
+ self.Append(LINKFLAGS=['-framework', 'OpenGL'])
+ self["QT4_MOCCPPPATH"] = self["CPPPATH"]
def exists(env):
- return _detect(env)
+ return _detect(env)
diff --git a/BuildTools/SCons/Tools/textfile.py b/BuildTools/SCons/Tools/textfile.py
index b290125..89f8963 100644
--- a/BuildTools/SCons/Tools/textfile.py
+++ b/BuildTools/SCons/Tools/textfile.py
@@ -25,23 +25,23 @@
__doc__ = """
Textfile/Substfile builder for SCons.
- Create file 'target' which typically is a textfile. The 'source'
- may be any combination of strings, Nodes, or lists of same. A
- 'linesep' will be put between any part written and defaults to
- os.linesep.
-
- The only difference between the Textfile builder and the Substfile
- builder is that strings are converted to Value() nodes for the
- former and File() nodes for the latter. To insert files in the
- former or strings in the latter, wrap them in a File() or Value(),
- respectively.
-
- The values of SUBST_DICT first have any construction variables
- expanded (its keys are not expanded). If a value of SUBST_DICT is
- a python callable function, it is called and the result is expanded
- as the value. Values are substituted in a "random" order; if any
- substitution could be further expanded by another subsitition, it
- is unpredictible whether the expansion will occur.
+ Create file 'target' which typically is a textfile. The 'source'
+ may be any combination of strings, Nodes, or lists of same. A
+ 'linesep' will be put between any part written and defaults to
+ os.linesep.
+
+ The only difference between the Textfile builder and the Substfile
+ builder is that strings are converted to Value() nodes for the
+ former and File() nodes for the latter. To insert files in the
+ former or strings in the latter, wrap them in a File() or Value(),
+ respectively.
+
+ The values of SUBST_DICT first have any construction variables
+ expanded (its keys are not expanded). If a value of SUBST_DICT is
+ a python callable function, it is called and the result is expanded
+ as the value. Values are substituted in a "random" order; if any
+ substitution could be further expanded by another subsitition, it
+ is unpredictible whether the expansion will occur.
"""
__revision__ = "src/engine/SCons/Tool/textfile.py 5357 2011/09/09 21:31:03 bdeegan"
@@ -56,117 +56,117 @@ from SCons.Node.Python import Value
from SCons.Util import is_String, is_Sequence, is_Dict
def _do_subst(node, subs):
- """
- Fetch the node contents and replace all instances of the keys with
- their values. For example, if subs is
- {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'},
- then all instances of %VERSION% in the file will be replaced with
- 1.2345 and so forth.
- """
- contents = node.get_text_contents()
- if not subs: return contents
- for (k,v) in subs:
- contents = re.sub(k, v, contents)
- return contents
+ """
+ Fetch the node contents and replace all instances of the keys with
+ their values. For example, if subs is
+ {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'},
+ then all instances of %VERSION% in the file will be replaced with
+ 1.2345 and so forth.
+ """
+ contents = node.get_text_contents()
+ if not subs: return contents
+ for (k,v) in subs:
+ contents = re.sub(k, v, contents)
+ return contents
def _action(target, source, env):
- # prepare the line separator
- linesep = env['LINESEPARATOR']
- if linesep is None:
- linesep = os.linesep
- elif is_String(linesep):
- pass
- elif isinstance(linesep, Value):
- linesep = linesep.get_text_contents()
- else:
- raise SCons.Errors.UserError(
- 'unexpected type/class for LINESEPARATOR: %s'
- % repr(linesep), None)
-
- # create a dictionary to use for the substitutions
- if 'SUBST_DICT' not in env:
- subs = None # no substitutions
- else:
- d = env['SUBST_DICT']
- if is_Dict(d):
- d = list(d.items())
- elif is_Sequence(d):
- pass
- else:
- raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence')
- subs = []
- for (k,v) in d:
- if callable(v):
- v = v()
- if is_String(v):
- v = env.subst(v)
- else:
- v = str(v)
- subs.append((k,v))
-
- # write the file
- try:
- fd = open(target[0].get_path(), "wb")
- except (OSError,IOError), e:
- raise SCons.Errors.UserError("Can't write target file %s" % target[0])
- # separate lines by 'linesep' only if linesep is not empty
- lsep = None
- for s in source:
- if lsep: fd.write(lsep)
- fd.write(_do_subst(s, subs))
- lsep = linesep
- fd.close()
+ # prepare the line separator
+ linesep = env['LINESEPARATOR']
+ if linesep is None:
+ linesep = os.linesep
+ elif is_String(linesep):
+ pass
+ elif isinstance(linesep, Value):
+ linesep = linesep.get_text_contents()
+ else:
+ raise SCons.Errors.UserError(
+ 'unexpected type/class for LINESEPARATOR: %s'
+ % repr(linesep), None)
+
+ # create a dictionary to use for the substitutions
+ if 'SUBST_DICT' not in env:
+ subs = None # no substitutions
+ else:
+ d = env['SUBST_DICT']
+ if is_Dict(d):
+ d = list(d.items())
+ elif is_Sequence(d):
+ pass
+ else:
+ raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence')
+ subs = []
+ for (k,v) in d:
+ if callable(v):
+ v = v()
+ if is_String(v):
+ v = env.subst(v)
+ else:
+ v = str(v)
+ subs.append((k,v))
+
+ # write the file
+ try:
+ fd = open(target[0].get_path(), "wb")
+ except (OSError,IOError), e:
+ raise SCons.Errors.UserError("Can't write target file %s" % target[0])
+ # separate lines by 'linesep' only if linesep is not empty
+ lsep = None
+ for s in source:
+ if lsep: fd.write(lsep)
+ fd.write(_do_subst(s, subs))
+ lsep = linesep
+ fd.close()
def _strfunc(target, source, env):
- return "Creating '%s'" % target[0]
+ return "Creating '%s'" % target[0]
def _convert_list_R(newlist, sources):
- for elem in sources:
- if is_Sequence(elem):
- _convert_list_R(newlist, elem)
- elif isinstance(elem, Node):
- newlist.append(elem)
- else:
- newlist.append(Value(elem))
+ for elem in sources:
+ if is_Sequence(elem):
+ _convert_list_R(newlist, elem)
+ elif isinstance(elem, Node):
+ newlist.append(elem)
+ else:
+ newlist.append(Value(elem))
def _convert_list(target, source, env):
- if len(target) != 1:
- raise SCons.Errors.UserError("Only one target file allowed")
- newlist = []
- _convert_list_R(newlist, source)
- return target, newlist
+ if len(target) != 1:
+ raise SCons.Errors.UserError("Only one target file allowed")
+ newlist = []
+ _convert_list_R(newlist, source)
+ return target, newlist
_common_varlist = ['SUBST_DICT', 'LINESEPARATOR']
_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX']
_text_builder = SCons.Builder.Builder(
- action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist),
- source_factory = Value,
- emitter = _convert_list,
- prefix = '$TEXTFILEPREFIX',
- suffix = '$TEXTFILESUFFIX',
- )
+ action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist),
+ source_factory = Value,
+ emitter = _convert_list,
+ prefix = '$TEXTFILEPREFIX',
+ suffix = '$TEXTFILESUFFIX',
+ )
_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX']
_subst_builder = SCons.Builder.Builder(
- action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist),
- source_factory = SCons.Node.FS.File,
- emitter = _convert_list,
- prefix = '$SUBSTFILEPREFIX',
- suffix = '$SUBSTFILESUFFIX',
- src_suffix = ['.in'],
- )
+ action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist),
+ source_factory = SCons.Node.FS.File,
+ emitter = _convert_list,
+ prefix = '$SUBSTFILEPREFIX',
+ suffix = '$SUBSTFILESUFFIX',
+ src_suffix = ['.in'],
+ )
def generate(env):
- env['LINESEPARATOR'] = os.linesep
- env['BUILDERS']['MyTextfile'] = _text_builder
- env['TEXTFILEPREFIX'] = ''
- env['TEXTFILESUFFIX'] = '.txt'
- env['BUILDERS']['MySubstfile'] = _subst_builder
- env['SUBSTFILEPREFIX'] = ''
- env['SUBSTFILESUFFIX'] = ''
+ env['LINESEPARATOR'] = os.linesep
+ env['BUILDERS']['MyTextfile'] = _text_builder
+ env['TEXTFILEPREFIX'] = ''
+ env['TEXTFILESUFFIX'] = '.txt'
+ env['BUILDERS']['MySubstfile'] = _subst_builder
+ env['SUBSTFILEPREFIX'] = ''
+ env['SUBSTFILESUFFIX'] = ''
def exists(env):
- return 1
+ return 1
# Local Variables:
# tab-width:4
diff --git a/BuildTools/SCons/Tools/wix.py b/BuildTools/SCons/Tools/wix.py
index 705d249..907b6d9 100644
--- a/BuildTools/SCons/Tools/wix.py
+++ b/BuildTools/SCons/Tools/wix.py
@@ -3,49 +3,49 @@ import SCons.Util
from subprocess import call
def generate(env) :
- wixPath = env.get("wix_bindir", "")
- if len(wixPath) > 0 and wixPath[-1] != "\\":
- wixPath += "\\"
- env['WIX_HEAT'] = wixPath + 'heat.exe'
- env['WIX_HEAT_OPTIONS'] = '-nologo -ag -sfrag -suid -template fragment -dr ProgramFilesFolder'
- env['WIX_CANDLE'] = wixPath + 'candle.exe'
- env['WIX_CANDLE_OPTIONS'] = '-nologo'
- env['WIX_LIGHT'] = wixPath + 'light.exe'
- env['WIX_LIGHT_OPTIONS'] = '-nologo -ext WixUIExtension'
-
- def WiX_IncludeScanner(source, env, path, arg):
- wixIncludeRegexp = re.compile(r'^\s*\<\?include (\S+.wxs)\s*\?\>\S*', re.M)
- contents = source.get_contents()
- includes = wixIncludeRegexp.findall(contents)
- return [ "" + include for include in includes ]
-
- heat_builder = SCons.Builder.Builder(
- action = '"$WIX_HEAT" dir "$WIX_SOURCE_OBJECT_DIR" -cg Files $WIX_HEAT_OPTIONS -o ${TARGET} -t Swift\\Packaging\\WiX\\include.xslt',
- suffix = '.wxi')
-
-
- candle_scanner = env.Scanner(name = 'wixincludefile',
- function = WiX_IncludeScanner,
- argument = None,
- skeys = ['.wxs'])
-
- candle_builder = SCons.Builder.Builder(
- action = '"$WIX_CANDLE" $WIX_CANDLE_OPTIONS ${SOURCES} -o ${TARGET}',
- src_suffix = '.wxs',
- suffix = '.wixobj',
- source_scanner = candle_scanner,
- src_builder = heat_builder)
-
-
- light_builder = SCons.Builder.Builder(
- action = '"$WIX_LIGHT" $WIX_LIGHT_OPTIONS -b "$WIX_SOURCE_OBJECT_DIR" ${SOURCES} -o ${TARGET}',
- src_suffix = '.wixobj',
- src_builder = candle_builder)
-
- env['BUILDERS']['WiX_Heat'] = heat_builder
- env['BUILDERS']['WiX_Candle'] = candle_builder
- env['BUILDERS']['WiX_Light'] = light_builder
+ wixPath = env.get("wix_bindir", "")
+ if len(wixPath) > 0 and wixPath[-1] != "\\":
+ wixPath += "\\"
+ env['WIX_HEAT'] = wixPath + 'heat.exe'
+ env['WIX_HEAT_OPTIONS'] = '-nologo -ag -sfrag -suid -template fragment -dr ProgramFilesFolder'
+ env['WIX_CANDLE'] = wixPath + 'candle.exe'
+ env['WIX_CANDLE_OPTIONS'] = '-nologo'
+ env['WIX_LIGHT'] = wixPath + 'light.exe'
+ env['WIX_LIGHT_OPTIONS'] = '-nologo -ext WixUIExtension'
+
+ def WiX_IncludeScanner(source, env, path, arg):
+ wixIncludeRegexp = re.compile(r'^\s*\<\?include (\S+.wxs)\s*\?\>\S*', re.M)
+ contents = source.get_contents()
+ includes = wixIncludeRegexp.findall(contents)
+ return [ "" + include for include in includes ]
+
+ heat_builder = SCons.Builder.Builder(
+ action = '"$WIX_HEAT" dir "$WIX_SOURCE_OBJECT_DIR" -cg Files $WIX_HEAT_OPTIONS -o ${TARGET} -t Swift\\Packaging\\WiX\\include.xslt',
+ suffix = '.wxi')
+
+
+ candle_scanner = env.Scanner(name = 'wixincludefile',
+ function = WiX_IncludeScanner,
+ argument = None,
+ skeys = ['.wxs'])
+
+ candle_builder = SCons.Builder.Builder(
+ action = '"$WIX_CANDLE" $WIX_CANDLE_OPTIONS ${SOURCES} -o ${TARGET}',
+ src_suffix = '.wxs',
+ suffix = '.wixobj',
+ source_scanner = candle_scanner,
+ src_builder = heat_builder)
+
+
+ light_builder = SCons.Builder.Builder(
+ action = '"$WIX_LIGHT" $WIX_LIGHT_OPTIONS -b "$WIX_SOURCE_OBJECT_DIR" ${SOURCES} -o ${TARGET}',
+ src_suffix = '.wixobj',
+ src_builder = candle_builder)
+
+ env['BUILDERS']['WiX_Heat'] = heat_builder
+ env['BUILDERS']['WiX_Candle'] = candle_builder
+ env['BUILDERS']['WiX_Light'] = light_builder
def exists(env) :
- return True
+ return True
diff --git a/BuildTools/SCons/Version.py b/BuildTools/SCons/Version.py
index 6482664..d34c2a7 100644
--- a/BuildTools/SCons/Version.py
+++ b/BuildTools/SCons/Version.py
@@ -1,68 +1,68 @@
import subprocess, os, datetime, re, os.path
def getGitBuildVersion(root, project) :
- tag = git("describe --tags --exact --match \"" + project + "-*\"", root)
- if tag :
- return tag.rstrip()[len(project)+1:]
- tag = git("describe --tags --match \"" + project + "-*\"", root)
- if tag :
- m = re.match(project + "-(.*)-(.*)-(.*)", tag)
- if m :
- return m.group(1) + "-dev" + m.group(2)
- return None
+ tag = git("describe --tags --exact --match \"" + project + "-*\"", root)
+ if tag :
+ return tag.rstrip()[len(project)+1:]
+ tag = git("describe --tags --match \"" + project + "-*\"", root)
+ if tag :
+ m = re.match(project + "-(.*)-(.*)-(.*)", tag)
+ if m :
+ return m.group(1) + "-dev" + m.group(2)
+ return None
def git(cmd, root) :
- full_cmd = "git " + cmd
- p = subprocess.Popen(full_cmd, cwd=root, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
- gitVersion = p.stdout.read()
- # error = p.stderr.read()
- # if error:
- # print "Git error: " + error
- p.stdin.close()
- if p.wait() == 0 :
- return gitVersion
- return None
+ full_cmd = "git " + cmd
+ p = subprocess.Popen(full_cmd, cwd=root, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt"))
+ gitVersion = p.stdout.read()
+ # error = p.stderr.read()
+ # if error:
+ # print "Git error: " + error
+ p.stdin.close()
+ if p.wait() == 0 :
+ return gitVersion
+ return None
def getBuildVersion(root, project) :
- versionFilename = os.path.join(root, "VERSION." + project)
- if os.path.isfile(versionFilename) :
- f = open(versionFilename)
- version = f.read().strip()
- f.close()
- return version
+ versionFilename = os.path.join(root, "VERSION." + project)
+ if os.path.isfile(versionFilename) :
+ f = open(versionFilename)
+ version = f.read().strip()
+ f.close()
+ return version
- gitVersion = getGitBuildVersion(root, project)
- if gitVersion :
- return gitVersion
+ gitVersion = getGitBuildVersion(root, project)
+ if gitVersion :
+ return gitVersion
- return datetime.date.today().strftime("%Y%m%d")
+ return datetime.date.today().strftime("%Y%m%d")
def convertToWindowsVersion(version) :
- version_match = re.match("(\d+)\.(\d+)(.*)", version)
- major = version_match and int(version_match.group(1)) or 0
- minor = version_match and int(version_match.group(2)) or 0
- if version_match and len(version_match.group(3)) == 0 :
- patch = 60000
- else :
- match = re.match("^beta(\d+)(.*)", version_match.group(3))
- build_string = ""
- if match :
- patch = 1000*int(match.group(1))
- build_string = match.group(2)
- else :
- rc_match = re.match("^rc(\d+)(.*)", version_match.group(3))
- if rc_match :
- patch = 10000*int(rc_match.group(1))
- build_string = rc_match.group(2)
- else :
- patch = 0
- alpha_match = re.match("^alpha(.*)", version_match.group(3))
- if alpha_match :
- build_string = alpha_match.group(1)
+ version_match = re.match("(\d+)\.(\d+)(.*)", version)
+ major = version_match and int(version_match.group(1)) or 0
+ minor = version_match and int(version_match.group(2)) or 0
+ if version_match and len(version_match.group(3)) == 0 :
+ patch = 60000
+ else :
+ match = re.match("^beta(\d+)(.*)", version_match.group(3))
+ build_string = ""
+ if match :
+ patch = 1000*int(match.group(1))
+ build_string = match.group(2)
+ else :
+ rc_match = re.match("^rc(\d+)(.*)", version_match.group(3))
+ if rc_match :
+ patch = 10000*int(rc_match.group(1))
+ build_string = rc_match.group(2)
+ else :
+ patch = 0
+ alpha_match = re.match("^alpha(.*)", version_match.group(3))
+ if alpha_match :
+ build_string = alpha_match.group(1)
- if len(build_string) > 0 :
- build_match = re.match("^-dev(\d+)", build_string)
- if build_match :
- patch += int(build_match.group(1))
+ if len(build_string) > 0 :
+ build_match = re.match("^-dev(\d+)", build_string)
+ if build_match :
+ patch += int(build_match.group(1))
- return (major, minor, patch)
+ return (major, minor, patch)
diff --git a/BuildTools/UpdateDebianChangelog.py b/BuildTools/UpdateDebianChangelog.py
index 0693461..1d0e3ea 100755
--- a/BuildTools/UpdateDebianChangelog.py
+++ b/BuildTools/UpdateDebianChangelog.py
@@ -14,28 +14,28 @@ project = ""
last_version = ""
m = re.match("([\w-]+) \((.*)-\d+\)", last_version_line)
if m :
- project = m.group(1)
- last_version = m.group(2)
+ project = m.group(1)
+ last_version = m.group(2)
if project == "" :
- project="swift-im"
+ project="swift-im"
if "dev" in version :
- distribution = "development"
+ distribution = "development"
elif "beta" in version or "rc" in version :
- distribution = "beta development"
+ distribution = "beta development"
else :
- distribution = "release beta development"
+ distribution = "release beta development"
if last_version != version :
- changelog = open(sys.argv[1])
- changelog_data = changelog.read()
- changelog.close()
- changelog = open(sys.argv[1], "w")
- changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n")
- changelog.write(" * Upstream development snapshot\n\n")
- changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n")
- changelog.write("\n")
- changelog.write(changelog_data)
- changelog.close()
+ changelog = open(sys.argv[1])
+ changelog_data = changelog.read()
+ changelog.close()
+ changelog = open(sys.argv[1], "w")
+ changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n")
+ changelog.write(" * Upstream development snapshot\n\n")
+ changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n")
+ changelog.write("\n")
+ changelog.write(changelog_data)
+ changelog.close()
diff --git a/BuildTools/scons2ninja.py b/BuildTools/scons2ninja.py
index 5d084cd..6c77c88 100755
--- a/BuildTools/scons2ninja.py
+++ b/BuildTools/scons2ninja.py
@@ -24,202 +24,202 @@ SCONS_ARGS = ' '.join(sys.argv[1:])
BINARY_FLAGS = ["-framework", "-arch", "-x", "--output-format", "-isystem", "-include"]
if sys.platform == 'win32' :
- LIB_PREFIX = ""
- LIB_SUFFIX = ""
- EXE_SUFFIX = ".exe"
+ LIB_PREFIX = ""
+ LIB_SUFFIX = ""
+ EXE_SUFFIX = ".exe"
else :
- LIB_PREFIX = "lib"
- LIB_SUFFIX = ".a"
- EXE_SUFFIX = ""
+ LIB_PREFIX = "lib"
+ LIB_SUFFIX = ".a"
+ EXE_SUFFIX = ""
def is_regexp(x) :
- return 'match' in dir(x)
+ return 'match' in dir(x)
def is_list(l) :
- return type(l) is list
+ return type(l) is list
def escape(s) :
- return s.replace(' ', '$ ').replace(':', '$:')
+ return s.replace(' ', '$ ').replace(':', '$:')
def quote_spaces(s) :
- if ' ' in s :
- return '"' + s + '"'
- else :
- return s
+ if ' ' in s :
+ return '"' + s + '"'
+ else :
+ return s
def to_list(l) :
- if not l :
- return []
- if is_list(l) :
- return l
- return [l]
+ if not l :
+ return []
+ if is_list(l) :
+ return l
+ return [l]
def partition(l, f) :
- x = []
- y = []
- for v in l :
- if f(v) :
- x.append(v)
- else :
- y.append(v)
- return (x, y)
+ x = []
+ y = []
+ for v in l :
+ if f(v) :
+ x.append(v)
+ else :
+ y.append(v)
+ return (x, y)
def get_unary_flags(prefix, flags) :
- return [x[len(prefix):] for x in flags if x.lower().startswith(prefix.lower())]
+ return [x[len(prefix):] for x in flags if x.lower().startswith(prefix.lower())]
def extract_unary_flags(prefix, flags) :
- f1, f2 = partition(flags, lambda x : x.lower().startswith(prefix.lower()))
- return ([f[len(prefix):] for f in f1], f2)
+ f1, f2 = partition(flags, lambda x : x.lower().startswith(prefix.lower()))
+ return ([f[len(prefix):] for f in f1], f2)
def extract_unary_flag(prefix, flags) :
- flag, flags = extract_unary_flags(prefix, flags)
- return (flag[0], flags)
+ flag, flags = extract_unary_flags(prefix, flags)
+ return (flag[0], flags)
def extract_binary_flag(prefix, flags) :
- i = flags.index(prefix)
- flag = flags[i + 1]
- del flags[i]
- del flags[i]
- return (flag, flags)
+ i = flags.index(prefix)
+ flag = flags[i + 1]
+ del flags[i]
+ del flags[i]
+ return (flag, flags)
def get_non_flags(flags) :
- skip = False
- result = []
- for f in flags :
- if skip :
- skip = False
- elif f in BINARY_FLAGS :
- skip = True
- elif not f.startswith("/") and not f.startswith("-") :
- result.append(f)
- return result
+ skip = False
+ result = []
+ for f in flags :
+ if skip :
+ skip = False
+ elif f in BINARY_FLAGS :
+ skip = True
+ elif not f.startswith("/") and not f.startswith("-") :
+ result.append(f)
+ return result
def extract_non_flags(flags) :
- non_flags = get_non_flags(flags)
- return (non_flags, filter(lambda x : x not in non_flags, flags))
+ non_flags = get_non_flags(flags)
+ return (non_flags, filter(lambda x : x not in non_flags, flags))
def get_dependencies(target, build_targets) :
- result = []
- queue = list(dependencies.get(target, []))
- while len(queue) > 0 :
- n = queue.pop()
- # Filter out Value() results
- if n in build_targets or os.path.exists(n) :
- result.append(n)
- queue += list(dependencies.get(n, []))
- return result
+ result = []
+ queue = list(dependencies.get(target, []))
+ while len(queue) > 0 :
+ n = queue.pop()
+ # Filter out Value() results
+ if n in build_targets or os.path.exists(n) :
+ result.append(n)
+ queue += list(dependencies.get(n, []))
+ return result
def get_built_libs(libs, libpaths, outputs) :
- canonical_outputs = [os.path.abspath(p) for p in outputs]
- result = []
- for libpath in libpaths :
- for lib in libs :
- lib_libpath = os.path.join(libpath, LIB_PREFIX + lib + LIB_SUFFIX)
- if os.path.abspath(lib_libpath) in canonical_outputs :
- result.append(lib_libpath)
- return result
+ canonical_outputs = [os.path.abspath(p) for p in outputs]
+ result = []
+ for libpath in libpaths :
+ for lib in libs :
+ lib_libpath = os.path.join(libpath, LIB_PREFIX + lib + LIB_SUFFIX)
+ if os.path.abspath(lib_libpath) in canonical_outputs :
+ result.append(lib_libpath)
+ return result
def parse_tool_command(line) :
- command = shlex.split(line, False, False if sys.platform == 'win32' else True)
- flags = command[1:]
- tool = os.path.splitext(os.path.basename(command[0]))[0]
- if tool.startswith('clang++') or tool.startswith('g++') :
- tool = "cxx"
- elif tool.startswith('clang') or tool.startswith('gcc') :
- tool = "cc"
- if tool in ["cc", "cxx"] and not "-c" in flags :
- tool = "glink"
- tool = tool.replace('-qt4', '')
- return tool, command, flags
+ command = shlex.split(line, False, False if sys.platform == 'win32' else True)
+ flags = command[1:]
+ tool = os.path.splitext(os.path.basename(command[0]))[0]
+ if tool.startswith('clang++') or tool.startswith('g++') :
+ tool = "cxx"
+ elif tool.startswith('clang') or tool.startswith('gcc') :
+ tool = "cc"
+ if tool in ["cc", "cxx"] and not "-c" in flags :
+ tool = "glink"
+ tool = tool.replace('-qt4', '')
+ return tool, command, flags
def rglob(pattern, root = '.') :
- return [os.path.join(path, f) for path, dirs, files in os.walk(root) for f in fnmatch.filter(files, pattern)]
+ return [os.path.join(path, f) for path, dirs, files in os.walk(root) for f in fnmatch.filter(files, pattern)]
################################################################################
# Helper for building Ninja files
################################################################################
class NinjaBuilder :
- def __init__(self) :
- self._header = ""
- self.variables = ""
- self.rules = ""
- self._build = ""
- self.pools = ""
- self._flags = {}
- self.targets = []
-
- def header(self, text) :
- self._header += text + "\n"
-
- def rule(self, name, **kwargs) :
- self.rules += "rule " + name + "\n"
- for k, v in kwargs.iteritems() :
- self.rules += " " + str(k) + " = " + str(v) + "\n"
- self.rules += "\n"
-
- def pool(self, name, **kwargs) :
- self.pools += "pool " + name + "\n"
- for k, v in kwargs.iteritems() :
- self.pools += " " + str(k) + " = " + str(v) + "\n"
- self.pools += "\n"
-
- def variable(self, name, value) :
- self.variables += str(name) + " = " + str(value) + "\n"
-
- def build(self, target, rule, sources = None, **kwargs) :
- self._build += "build " + self.to_string(target) + ": " + rule
- if sources :
- self._build += " " + self.to_string(sources)
- if 'deps' in kwargs and kwargs['deps'] :
- self._build += " | " + self.to_string(kwargs["deps"])
- if 'order_deps' in kwargs :
- self._build += " || " + self.to_string(kwargs['order_deps'])
- self._build += "\n"
- for var, value in kwargs.iteritems() :
- if var in ['deps', 'order_deps'] :
- continue
- value = self.to_string(value, quote = True)
- if var.endswith("flags") :
- value = self.get_flags_variable(var, value)
- self._build += " " + var + " = " + value + "\n"
- self.targets += to_list(target)
-
- def header_targets(self) :
- return [x for x in self.targets if x.endswith('.h') or x.endswith('.hh')]
-
- def serialize(self) :
- result = ""
- result += self._header + "\n"
- result += self.variables + "\n"
- for prefix in self._flags.values() :
- for k, v in prefix.iteritems() :
- result += v + " = " + k + "\n"
- result += "\n"
- result += self.pools + "\n"
- result += self.rules + "\n"
- result += self._build + "\n"
- return result
-
- def to_string(self, lst, quote = False) :
- if is_list(lst) :
- if quote :
- return ' '.join([quote_spaces(x) for x in lst])
- else :
- return ' '.join([escape(x) for x in lst])
- if is_regexp(lst) :
- return ' '.join([escape(x) for x in self.targets if lst.match(x)])
- return escape(lst)
-
- def get_flags_variable(self, flags_type, flags) :
- if len(flags) == 0 :
- return ''
- if flags_type not in self._flags :
- self._flags[flags_type] = {}
- type_flags = self._flags[flags_type]
- if flags not in type_flags :
- type_flags[flags] = flags_type + "_" + str(len(type_flags))
- return "$" + type_flags[flags]
+ def __init__(self) :
+ self._header = ""
+ self.variables = ""
+ self.rules = ""
+ self._build = ""
+ self.pools = ""
+ self._flags = {}
+ self.targets = []
+
+ def header(self, text) :
+ self._header += text + "\n"
+
+ def rule(self, name, **kwargs) :
+ self.rules += "rule " + name + "\n"
+ for k, v in kwargs.iteritems() :
+ self.rules += " " + str(k) + " = " + str(v) + "\n"
+ self.rules += "\n"
+
+ def pool(self, name, **kwargs) :
+ self.pools += "pool " + name + "\n"
+ for k, v in kwargs.iteritems() :
+ self.pools += " " + str(k) + " = " + str(v) + "\n"
+ self.pools += "\n"
+
+ def variable(self, name, value) :
+ self.variables += str(name) + " = " + str(value) + "\n"
+
+ def build(self, target, rule, sources = None, **kwargs) :
+ self._build += "build " + self.to_string(target) + ": " + rule
+ if sources :
+ self._build += " " + self.to_string(sources)
+ if 'deps' in kwargs and kwargs['deps'] :
+ self._build += " | " + self.to_string(kwargs["deps"])
+ if 'order_deps' in kwargs :
+ self._build += " || " + self.to_string(kwargs['order_deps'])
+ self._build += "\n"
+ for var, value in kwargs.iteritems() :
+ if var in ['deps', 'order_deps'] :
+ continue
+ value = self.to_string(value, quote = True)
+ if var.endswith("flags") :
+ value = self.get_flags_variable(var, value)
+ self._build += " " + var + " = " + value + "\n"
+ self.targets += to_list(target)
+
+ def header_targets(self) :
+ return [x for x in self.targets if x.endswith('.h') or x.endswith('.hh')]
+
+ def serialize(self) :
+ result = ""
+ result += self._header + "\n"
+ result += self.variables + "\n"
+ for prefix in self._flags.values() :
+ for k, v in prefix.iteritems() :
+ result += v + " = " + k + "\n"
+ result += "\n"
+ result += self.pools + "\n"
+ result += self.rules + "\n"
+ result += self._build + "\n"
+ return result
+
+ def to_string(self, lst, quote = False) :
+ if is_list(lst) :
+ if quote :
+ return ' '.join([quote_spaces(x) for x in lst])
+ else :
+ return ' '.join([escape(x) for x in lst])
+ if is_regexp(lst) :
+ return ' '.join([escape(x) for x in self.targets if lst.match(x)])
+ return escape(lst)
+
+ def get_flags_variable(self, flags_type, flags) :
+ if len(flags) == 0 :
+ return ''
+ if flags_type not in self._flags :
+ self._flags[flags_type] = {}
+ type_flags = self._flags[flags_type]
+ if flags not in type_flags :
+ type_flags[flags] = flags_type + "_" + str(len(type_flags))
+ return "$" + type_flags[flags]
################################################################################
@@ -231,7 +231,7 @@ scons_cmd = "scons"
scons_dependencies = ['SConstruct'] + rglob('SConscript')
def ninja_custom_command(ninja, line) :
- return False
+ return False
CONFIGURATION_FILE = '.scons2ninja.conf'
execfile(CONFIGURATION_FILE)
@@ -248,104 +248,104 @@ ninja = NinjaBuilder()
ninja.pool('scons_pool', depth = 1)
if sys.platform == 'win32' :
- ninja.rule('cl',
- deps = 'msvc',
- command = '$cl /showIncludes $clflags -c $in /Fo$out',
- description = 'CXX $out')
-
- ninja.rule('link',
- command = '$link $in $linkflags $libs /out:$out',
- description = 'LINK $out')
-
- ninja.rule('link_mt',
- command = '$link $in $linkflags $libs /out:$out ; $mt $mtflags',
- description = 'LINK $out')
-
- ninja.rule('lib',
- command = '$lib $libflags /out:$out $in',
- description = 'AR $out')
-
- ninja.rule('rc',
- command = '$rc $rcflags /Fo$out $in',
- description = 'RC $out')
-
- # SCons doesn't touch files if they didn't change, which makes
- # ninja rebuild the file over and over again. There's no touch on Windows :(
- # Could implement it with a script, but for now, delete the file if
- # this problem occurs. I'll fix it if it occurs too much.
- ninja.rule('scons',
- command = scons_cmd + " ${scons_args} $out",
- pool = 'scons_pool',
- description = 'GEN $out')
-
- ninja.rule('install', command = 'cmd /c copy $in $out')
- ninja.rule('run', command = '$in')
+ ninja.rule('cl',
+ deps = 'msvc',
+ command = '$cl /showIncludes $clflags -c $in /Fo$out',
+ description = 'CXX $out')
+
+ ninja.rule('link',
+ command = '$link $in $linkflags $libs /out:$out',
+ description = 'LINK $out')
+
+ ninja.rule('link_mt',
+ command = '$link $in $linkflags $libs /out:$out ; $mt $mtflags',
+ description = 'LINK $out')
+
+ ninja.rule('lib',
+ command = '$lib $libflags /out:$out $in',
+ description = 'AR $out')
+
+ ninja.rule('rc',
+ command = '$rc $rcflags /Fo$out $in',
+ description = 'RC $out')
+
+ # SCons doesn't touch files if they didn't change, which makes
+ # ninja rebuild the file over and over again. There's no touch on Windows :(
+ # Could implement it with a script, but for now, delete the file if
+ # this problem occurs. I'll fix it if it occurs too much.
+ ninja.rule('scons',
+ command = scons_cmd + " ${scons_args} $out",
+ pool = 'scons_pool',
+ description = 'GEN $out')
+
+ ninja.rule('install', command = 'cmd /c copy $in $out')
+ ninja.rule('run', command = '$in')
else :
- ninja.rule('cxx',
- deps = 'gcc',
- depfile = '$out.d',
- command = '$cxx -MMD -MF $out.d $cxxflags -c $in -o $out',
- description = 'CXX $out')
+ ninja.rule('cxx',
+ deps = 'gcc',
+ depfile = '$out.d',
+ command = '$cxx -MMD -MF $out.d $cxxflags -c $in -o $out',
+ description = 'CXX $out')
- ninja.rule('cc',
- deps = 'gcc',
- depfile = '$out.d',
- command = '$cc -MMD -MF $out.d $ccflags -c $in -o $out',
- description = 'CC $out')
+ ninja.rule('cc',
+ deps = 'gcc',
+ depfile = '$out.d',
+ command = '$cc -MMD -MF $out.d $ccflags -c $in -o $out',
+ description = 'CC $out')
- ninja.rule('link',
- command = '$glink -o $out $in $linkflags',
- description = 'LINK $out')
+ ninja.rule('link',
+ command = '$glink -o $out $in $linkflags',
+ description = 'LINK $out')
- ninja.rule('ar',
- command = 'ar $arflags $out $in && ranlib $out',
- description = 'AR $out')
+ ninja.rule('ar',
+ command = 'ar $arflags $out $in && ranlib $out',
+ description = 'AR $out')
- # SCons doesn't touch files if they didn't change, which makes
- # ninja rebuild the file over and over again. Touching solves this.
- ninja.rule('scons',
- command = scons_cmd + " $out && touch $out",
- pool = 'scons_pool',
- description = 'GEN $out')
+ # SCons doesn't touch files if they didn't change, which makes
+ # ninja rebuild the file over and over again. Touching solves this.
+ ninja.rule('scons',
+ command = scons_cmd + " $out && touch $out",
+ pool = 'scons_pool',
+ description = 'GEN $out')
- ninja.rule('install', command = 'install $in $out')
- ninja.rule('run', command = './$in')
+ ninja.rule('install', command = 'install $in $out')
+ ninja.rule('run', command = './$in')
ninja.rule('moc',
- command = '$moc $mocflags -o $out $in',
- description = 'MOC $out')
+ command = '$moc $mocflags -o $out $in',
+ description = 'MOC $out')
ninja.rule('rcc',
- command = '$rcc $rccflags -name $name -o $out $in',
- description = 'RCC $out')
+ command = '$rcc $rccflags -name $name -o $out $in',
+ description = 'RCC $out')
ninja.rule('uic',
- command = '$uic $uicflags -o $out $in',
- description = 'UIC $out')
+ command = '$uic $uicflags -o $out $in',
+ description = 'UIC $out')
ninja.rule('lrelease',
- command = '$lrelease $lreleaseflags $in -qm $out',
- description = 'LRELEASE $out')
+ command = '$lrelease $lreleaseflags $in -qm $out',
+ description = 'LRELEASE $out')
ninja.rule('ibtool',
- command = '$ibtool $ibtoolflags --compile $out $in',
- description = 'IBTOOL $out')
+ command = '$ibtool $ibtoolflags --compile $out $in',
+ description = 'IBTOOL $out')
ninja.rule('dsymutil',
- command = '$dsymutil $dsymutilflags -o $out $in',
- description = 'DSYMUTIL $out')
+ command = '$dsymutil $dsymutilflags -o $out $in',
+ description = 'DSYMUTIL $out')
ninja.rule('generator',
- command = "python " + SCRIPT + " ${scons_args}",
- depfile = ".scons2ninja.deps",
- pool = 'scons_pool',
- generator = '1',
- description = 'Regenerating build.ninja')
+ command = "python " + SCRIPT + " ${scons_args}",
+ depfile = ".scons2ninja.deps",
+ pool = 'scons_pool',
+ generator = '1',
+ description = 'Regenerating build.ninja')
ninja.rule('sdef',
- command = 'sdef $in | sdp -fh --basename $basename -o $outdir',
- description = 'SDEF $out')
+ command = 'sdef $in | sdp -fh --basename $basename -o $outdir',
+ description = 'SDEF $out')
################################################################################
# Build Statements
@@ -365,235 +365,237 @@ stage = 'preamble'
skip_nth_line = -1
stack = ['.']
for line in f.stdout :
- line = line.rstrip()
-
- # Skip lines if requested from previous command
- if skip_nth_line >= 0 :
- skip_nth_line -= 1
- if skip_nth_line == 0 :
- continue
-
- if line.startswith('scons: done building targets') :
- break
-
- if stage == "preamble" :
- # Pass all lines from the SCons configuration step to output
- if re.match("^scons: Building targets ...", line) :
- stage = "build"
- else :
- print line
-
- elif stage == "build" :
- if line.startswith('+-') :
- stage = "dependencies"
- elif re.match("^Using tempfile", line) :
- # Ignore response files from MSVS
- skip_nth_line = 2
- else :
- build_lines.append(line)
-
- # Already detect targets that will need 'mt'
- tool, _, flags = parse_tool_command(line)
- if tool == 'mt' :
- target = get_unary_flags("-outputresource:", flags)[0]
- target = target[0:target.index(';')]
- mtflags[target] = flags
-
- elif stage == "dependencies" :
- if not re.match('^[\s|]+\+\-', line) :
- # Work around bug in SCons that splits output over multiple lines
- continue
-
- level = line.index('+-') / 2
- filename = line[level*2+2:]
- if filename.startswith('[') :
- filename = filename[1:-1]
-
- # Check if we use the 'fixed' format which escapes filenamenames
- if filename.startswith('\'') and filename.endswith('\'') :
- filename = eval(filename)
-
- if level < len(stack) :
- stack = stack[0:level]
- elif level > len(stack) :
- if level != len(stack) + 1 :
- raise Exception("Internal Error" )
- stack.append(previous_filename)
-
- # Skip absolute paths
- if not os.path.isabs(filename) :
- target = stack[-1]
- if target not in dependencies :
- dependencies[target] = []
- dependencies[target].append(filename)
- previous_filename = filename
+ line = line.rstrip()
+
+ # Skip lines if requested from previous command
+ if skip_nth_line >= 0 :
+ skip_nth_line -= 1
+ if skip_nth_line == 0 :
+ continue
+
+ if line.startswith('scons: done building targets') :
+ break
+
+ if stage == "preamble" :
+ # Pass all lines from the SCons configuration step to output
+ if re.match("^scons: Building targets ...", line) :
+ stage = "build"
+ else :
+ print line
+
+ elif stage == "build" :
+ if line.startswith('+-') :
+ stage = "dependencies"
+ elif re.match("^Using tempfile", line) :
+ # Ignore response files from MSVS
+ skip_nth_line = 2
+ else :
+ build_lines.append(line)
+
+ # Already detect targets that will need 'mt'
+ tool, _, flags = parse_tool_command(line)
+ if tool == 'mt' :
+ target = get_unary_flags("-outputresource:", flags)[0]
+ target = target[0:target.index(';')]
+ mtflags[target] = flags
+
+ elif stage == "dependencies" :
+ if not re.match('^[\s|]+\+\-', line) :
+ # Work around bug in SCons that splits output over multiple lines
+ continue
+
+ level = line.index('+-') / 2
+ filename = line[level*2+2:]
+ if filename.startswith('[') :
+ filename = filename[1:-1]
+
+ # Check if we use the 'fixed' format which escapes filenamenames
+ if filename.startswith('\'') and filename.endswith('\'') :
+ filename = eval(filename)
+
+ if level < len(stack) :
+ stack = stack[0:level]
+ elif level > len(stack) :
+ if level != len(stack) + 1 :
+ raise Exception("Internal Error" )
+ stack.append(previous_filename)
+
+ # Skip absolute paths
+ if not os.path.isabs(filename) :
+ target = stack[-1]
+ if target not in dependencies :
+ dependencies[target] = []
+ dependencies[target].append(filename)
+ previous_filename = filename
if f.wait() != 0 :
- print "Error calling '" + scons_generate_cmd + "'"
- print f.stderr.read()
- exit(-1)
+ print "Error calling '" + scons_generate_cmd + "'"
+ print f.stderr.read()
+ exit(-1)
# Pass 2: Parse build rules
tools = {}
for line in build_lines :
- # Custom python function
- m = re.match('^(\w+)\(\[([^\]]*)\]', line)
- if m :
- out = [x[1:-1] for x in m.group(2).split(',')]
- for x in out :
- # 'Note' = To be more correct, deps should also include $scons_dependencies,
- # but this regenerates a bit too often, so leaving it out for now.
- ninja.build(x, 'scons', None, deps = sorted(get_dependencies(x, ninja.targets)))
- continue
-
-
- # TextFile
- m = re.match("^Creating '([^']+)'", line)
- if m :
- out = m.group(1)
- # Note: To be more correct, deps should also include $scons_dependencies,
- # but this regenerates a bit too often, so leaving it out for now.
- ninja.build(out, 'scons', None, deps = sorted(get_dependencies(out, ninja.targets)))
- continue
-
- # Install
- m = re.match('^Install file: "(.*)" as "(.*)"', line)
- if m :
- ninja.build(m.group(2), 'install', m.group(1))
- continue
-
- m = re.match('^Install directory: "(.*)" as "(.*)"', line)
- if m :
- for source in rglob('*', m.group(1)) :
- if os.path.isdir(source) :
- continue
- target = os.path.join(m.group(2), os.path.relpath(source, m.group(1)))
- ninja.build(target, 'install', source)
- continue
-
- # Tools
- tool, command, flags = parse_tool_command(line)
- tools[tool] = command[0]
-
- ############################################################
- # clang/gcc tools
- ############################################################
-
- if tool == 'cc':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'cc', files, order_deps = '_generated_headers', ccflags = flags)
-
- elif tool == 'cxx':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'cxx', files, order_deps = '_generated_headers', cxxflags = flags)
-
- elif tool == 'glink':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- libs = get_unary_flags('-l', flags)
- libpaths = get_unary_flags("-L", flags)
- deps = get_built_libs(libs, libpaths, ninja.targets)
- ninja.build(out, 'link', files, deps = sorted(deps), linkflags = flags)
-
- elif tool == 'ar':
- objects, flags = partition(flags, lambda x: x.endswith('.o'))
- libs, flags = partition(flags, lambda x: x.endswith('.a'))
- out = libs[0]
- ninja.build(out, 'ar', objects, arflags = flags)
-
- elif tool == 'ranlib':
- pass
-
-
- ############################################################
- # MSVC tools
- ############################################################
-
- elif tool == 'cl':
- out, flags = extract_unary_flag("/Fo", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'cl', files, order_deps = '_generated_headers', clflags = flags)
-
- elif tool == 'lib':
- out, flags = extract_unary_flag("/out:", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'lib', files, libflags = flags)
-
- elif tool == 'link':
- objects, flags = partition(flags, lambda x: x.endswith('.obj') or x.endswith('.res'))
- out, flags = extract_unary_flag("/out:", flags)
- libs, flags = partition(flags, lambda x: not x.startswith("/") and x.endswith(".lib"))
- libpaths = get_unary_flags("/libpath:", flags)
- deps = get_built_libs(libs, libpaths, ninja.targets)
- if out in mtflags :
- ninja.build(out, 'link_mt', objects, deps = sorted(deps),
- libs = libs, linkflags = flags, mtflags = mtflags[out])
- else :
- ninja.build(out, 'link', objects, deps = sorted(deps),
- libs = libs, linkflags = flags)
-
- elif tool == 'rc':
- out, flags = extract_unary_flag("/fo", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'rc', files[0], order_deps = '_generated_headers', rcflags = flags)
-
- elif tool == 'mt':
- # Already handled
- pass
-
- ############################################################
- # Qt tools
- ############################################################
-
- elif tool == 'moc':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'moc', files, mocflags = flags)
-
- elif tool == 'uic':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'uic', files, uicflags = flags)
-
- elif tool == 'lrelease':
- out, flags = extract_binary_flag("-qm", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'lrelease', files, lreleaseflags = flags)
-
- elif tool == 'rcc':
- out, flags = extract_binary_flag("-o", flags)
- name, flags = extract_binary_flag("-name", flags)
- files, flags = extract_non_flags(flags)
- deps = list(set(get_dependencies(out, ninja.targets)) - set(files))
- ninja.build(out, 'rcc', files, deps = sorted(deps), name = name, rccflags = flags)
-
- ############################################################
- # OS X tools
- ############################################################
-
- elif tool == 'ibtool':
- out, flags = extract_binary_flag("--compile", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'ibtool', files, ibtoolflags = flags)
-
- elif tool == 'dsymutil':
- out, flags = extract_binary_flag("-o", flags)
- files, flags = extract_non_flags(flags)
- ninja.build(out, 'dsymutil', files, dsymutilflags = flags)
-
- elif tool == 'sdef' :
- source = flags[0];
- outdir, flags = extract_binary_flag("-o", flags)
- basename, flags = extract_binary_flag("--basename", flags)
- ninja.build(os.path.join(outdir, basename + ".h"), 'sdef', [source],
- basename = basename,
- outdir = outdir)
-
-
- elif not ninja_custom_command(ninja, line) :
- raise Exception("Unknown tool: '" + line + "'")
+ # Custom python function
+ m = re.match('^(\w+)\(\[([^\]]*)\]', line)
+ if m :
+ out = [x[1:-1] for x in m.group(2).split(',')]
+ for x in out :
+ # 'Note' = To be more correct, deps should also include $scons_dependencies,
+ # but this regenerates a bit too often, so leaving it out for now.
+ ninja.build(x, 'scons', None, deps = sorted(get_dependencies(x, ninja.targets)))
+ continue
+
+
+ # TextFile
+ m = re.match("^Creating '([^']+)'", line)
+ if m :
+ out = m.group(1)
+ # Note: To be more correct, deps should also include $scons_dependencies,
+ # but this regenerates a bit too often, so leaving it out for now.
+ ninja.build(out, 'scons', None, deps = sorted(get_dependencies(out, ninja.targets)))
+ continue
+
+ # Install
+ m = re.match('^Install file: "(.*)" as "(.*)"', line)
+ if m :
+ ninja.build(m.group(2), 'install', m.group(1))
+ continue
+
+ m = re.match('^Install directory: "(.*)" as "(.*)"', line)
+ if m :
+ for source in rglob('*', m.group(1)) :
+ if os.path.isdir(source) :
+ continue
+ target = os.path.join(m.group(2), os.path.relpath(source, m.group(1)))
+ ninja.build(target, 'install', source)
+ continue
+
+ # Tools
+ tool, command, flags = parse_tool_command(line)
+ tools[tool] = command[0]
+
+ ############################################################
+ # clang/gcc tools
+ ############################################################
+
+ if tool == 'cc':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'cc', files, order_deps = '_generated_headers', ccflags = flags)
+
+ elif tool == 'cxx':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'cxx', files, order_deps = '_generated_headers', cxxflags = flags)
+
+ elif tool == 'glink':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ libs = get_unary_flags('-l', flags)
+ libpaths = get_unary_flags("-L", flags)
+ deps = get_built_libs(libs, libpaths, ninja.targets)
+ ninja.build(out, 'link', files, deps = sorted(deps), linkflags = flags)
+
+ elif tool == 'ar':
+ objects, flags = partition(flags, lambda x: x.endswith('.o'))
+ libs, flags = partition(flags, lambda x: x.endswith('.a'))
+ out = libs[0]
+ ninja.build(out, 'ar', objects, arflags = flags)
+
+ elif tool == 'ranlib':
+ pass
+
+
+ ############################################################
+ # MSVC tools
+ ############################################################
+
+ elif tool == 'cl':
+ out, flags = extract_unary_flag("/Fo", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'cl', files, order_deps = '_generated_headers', clflags = flags)
+
+ elif tool == 'lib':
+ out, flags = extract_unary_flag("/out:", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'lib', files, libflags = flags)
+
+ elif tool == 'link':
+ objects, flags = partition(flags, lambda x: x.endswith('.obj') or x.endswith('.res'))
+ out, flags = extract_unary_flag("/out:", flags)
+ libs, flags = partition(flags, lambda x: not x.startswith("/") and x.endswith(".lib"))
+ libpaths = get_unary_flags("/libpath:", flags)
+ deps = get_built_libs(libs, libpaths, ninja.targets)
+ if out in mtflags :
+ ninja.build(out, 'link_mt', objects, deps = sorted(deps),
+ libs = libs, linkflags = flags, mtflags = mtflags[out])
+ else :
+ ninja.build(out, 'link', objects, deps = sorted(deps),
+ libs = libs, linkflags = flags)
+
+ elif tool == 'rc':
+ out, flags = extract_unary_flag("/fo", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'rc', files[0], order_deps = '_generated_headers', rcflags = flags)
+
+ elif tool == 'mt':
+ # Already handled
+ pass
+
+ ############################################################
+ # Qt tools
+ ############################################################
+
+ elif tool == 'moc':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'moc', files, mocflags = flags)
+
+ elif tool == 'uic':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'uic', files, uicflags = flags)
+
+ elif tool == 'lrelease':
+ out, flags = extract_binary_flag("-qm", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'lrelease', files, lreleaseflags = flags)
+
+ elif tool == 'rcc':
+ out, flags = extract_binary_flag("-o", flags)
+ name, flags = extract_binary_flag("-name", flags)
+ compress, flags = extract_binary_flag("--compress", flags)
+ threshold, flags = extract_binary_flag("--threshold", flags)
+ files, flags = extract_non_flags(flags)
+ deps = list(set(get_dependencies(out, ninja.targets)) - set(files))
+ ninja.build(out, 'rcc', files, deps = sorted(deps), name = name, rccflags = ["--compress", compress, "--threshold", threshold])
+
+ ############################################################
+ # OS X tools
+ ############################################################
+
+ elif tool == 'ibtool':
+ out, flags = extract_binary_flag("--compile", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'ibtool', files, ibtoolflags = flags)
+
+ elif tool == 'dsymutil':
+ out, flags = extract_binary_flag("-o", flags)
+ files, flags = extract_non_flags(flags)
+ ninja.build(out, 'dsymutil', files, dsymutilflags = flags)
+
+ elif tool == 'sdef' :
+ source = flags[0];
+ outdir, flags = extract_binary_flag("-o", flags)
+ basename, flags = extract_binary_flag("--basename", flags)
+ ninja.build(os.path.join(outdir, basename + ".h"), 'sdef', [source],
+ basename = basename,
+ outdir = outdir)
+
+
+ elif not ninja_custom_command(ninja, line) :
+ raise Exception("Unknown tool: '" + line + "'")
# Phony target for all generated headers, used as an order-only depency from all C/C++ sources
@@ -607,11 +609,11 @@ ninja.header("# This file is generated by " + SCRIPT)
ninja.variable("ninja_required_version", "1.3")
ninja.variable("scons_args", SCONS_ARGS)
for k, v in tools.iteritems() :
- ninja.variable(k, v)
+ ninja.variable(k, v)
# Extra customizations
if 'ninja_post' in dir() :
- ninja_post(ninja)
+ ninja_post(ninja)
################################################################################