diff options
Diffstat (limited to 'BuildTools')
53 files changed, 4537 insertions, 2365 deletions
diff --git a/BuildTools/CLang/.gitignore b/BuildTools/CLang/.gitignore deleted file mode 100644 index df682c0..0000000 --- a/BuildTools/CLang/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -CLangDiagnosticsFlags -CLangDiagnosticsFlagsTool.sh -CLangDiagnosticsFlagsTool -clang-diagnostics-overview.* diff --git a/BuildTools/CLang/CLangDiagnosticsFlagsTool.cpp b/BuildTools/CLang/CLangDiagnosticsFlagsTool.cpp deleted file mode 100644 index ccd5925..0000000 --- a/BuildTools/CLang/CLangDiagnosticsFlagsTool.cpp +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright (c) 2011 Remko Tronçon - * Licensed under the GNU General Public License v3. - * See Documentation/Licenses/GPLv3.txt for more information. - */ - -#include <iostream> -#include <set> -#include <vector> -#include <cassert> -#include <boost/algorithm/string/predicate.hpp> -#include <boost/graph/graph_traits.hpp> -#include <boost/graph/adjacency_list.hpp> -#include <boost/graph/topological_sort.hpp> -#include <boost/graph/topological_sort.hpp> -#include <boost/graph/graphviz.hpp> - -// ----------------------------------------------------------------------------- -// Include diagnostics data from CLang -// ----------------------------------------------------------------------------- - -#define DIAG(name, a, b, c, d, e, f, g) name, - -namespace diag { - enum LexKinds { -#include <clang/Basic/DiagnosticLexKinds.inc> -#include <clang/Basic/DiagnosticParseKinds.inc> -#include <clang/Basic/DiagnosticCommonKinds.inc> -#include <clang/Basic/DiagnosticDriverKinds.inc> -#include <clang/Basic/DiagnosticFrontendKinds.inc> -#include <clang/Basic/DiagnosticSemaKinds.inc> - }; -} - -#define GET_DIAG_ARRAYS -#include <clang/Basic/DiagnosticGroups.inc> -#undef GET_DIAG_ARRAYS - -struct DiagTableEntry { - const char* name; - const short* array; - const short* group; -}; - -static const DiagTableEntry diagnostics[] = { -#define GET_DIAG_TABLE -#include <clang/Basic/DiagnosticGroups.inc> -#undef GET_DIAG_TABLE -}; -static const size_t diagnostics_count = sizeof(diagnostics) / sizeof(diagnostics[0]); - -// ----------------------------------------------------------------------------- - -using namespace boost; - -struct Properties { - Properties() : have(false), implicitHave(false), dontWant(false), implicitDontWant(false), ignored(false), available(false), missing(false), redundant(false), alreadyCovered(false) { - } - - std::string name; - bool have; - bool implicitHave; - bool dontWant; - bool implicitDontWant; - bool ignored; - bool available; - bool missing; - bool redundant; - bool alreadyCovered; -}; - -class GraphVizLabelWriter { - public: - GraphVizLabelWriter(const std::vector<Properties>& properties) : properties(properties) { - } - - template <class VertexOrEdge> - void operator()(std::ostream& out, const VertexOrEdge& v) const { - std::string color; - if (properties[v].missing) { - color = "orange"; - } - else if (properties[v].redundant) { - color = "lightblue"; - } - else if (properties[v].have) { - color = "darkgreen"; - } - else if (properties[v].implicitHave) { - color = "green"; - } - else if (properties[v].dontWant) { - color = "red"; - } - else if (properties[v].implicitDontWant) { - color = "pink"; - } - else if (properties[v].ignored) { - color = "white"; - } - else if (properties[v].available) { - color = "yellow"; - } - else { - assert(false); - } - out << "[label=" << escape_dot_string(properties[v].name) << " fillcolor=\"" << color << "\" style=filled]"; - } - - private: - const std::vector<Properties> properties; -}; - -int main(int argc, char* argv[]) { - // Parse command-line arguments - std::set<std::string> have; - std::set<std::string> dontWant; - std::string outputDir; - for (int i = 1; i < argc; ++i) { - std::string arg(argv[i]); - if (starts_with(arg, "-W")) { - have.insert(arg.substr(2, arg.npos)); - } - else if (starts_with(arg, "-w")) { - dontWant.insert(arg.substr(2, arg.npos)); - } - else if (starts_with(arg, "-O")) { - outputDir = arg.substr(2, arg.npos) + "/"; - } - } - - // Build the graph and initialize properties - typedef adjacency_list<vecS, vecS, bidirectionalS> Graph; - typedef graph_traits<Graph>::vertex_descriptor Vertex; - Graph g(diagnostics_count); - std::vector<Properties> properties(num_vertices(g)); - for (size_t i = 0; i < diagnostics_count; ++i) { - std::string name(diagnostics[i].name); - properties[i].name = name; - properties[i].implicitHave = properties[i].have = have.find(name) != have.end(); - properties[i].implicitDontWant = properties[i].dontWant = dontWant.find(name) != dontWant.end(); - properties[i].ignored = diagnostics[i].group == 0 && diagnostics[i].array == 0; - properties[i].alreadyCovered = false; - properties[i].available = true; - for (const short* j = diagnostics[i].group; j && *j != -1; ++j) { - add_edge(i, *j, g); - } - } - - // Sort the diagnostics - std::list<Vertex> sortedDiagnostics; - boost::topological_sort(g, std::front_inserter(sortedDiagnostics)); - - // Propagate dontWant and have properties down - for(std::list<Vertex>::const_iterator i = sortedDiagnostics.begin(); i != sortedDiagnostics.end(); ++i) { - graph_traits<Graph>::adjacency_iterator adjacentIt, adjacentEnd; - for (tie(adjacentIt, adjacentEnd) = adjacent_vertices(*i, g); adjacentIt != adjacentEnd; ++adjacentIt) { - properties[*adjacentIt].implicitDontWant = properties[*i].implicitDontWant || properties[*adjacentIt].implicitDontWant; - properties[*adjacentIt].implicitHave = properties[*i].implicitHave || properties[*adjacentIt].implicitHave; - } - } - - // Propagate 'available' property upwards - for(std::list<Vertex>::const_reverse_iterator i = sortedDiagnostics.rbegin(); i != sortedDiagnostics.rend(); ++i) { - properties[*i].available = properties[*i].available && !properties[*i].implicitDontWant; - graph_traits<Graph>::in_edge_iterator edgesIt, edgesEnd; - graph_traits<Graph>::edge_descriptor edge; - for (tie(edgesIt, edgesEnd) = in_edges(*i, g); edgesIt != edgesEnd; ++edgesIt) { - properties[source(*edgesIt, g)].available = properties[source(*edgesIt, g)].available && properties[*i].available; - } - } - - // Collect missing & redundant flags - std::set<std::string> missing; - std::set<std::string> redundant; - for(std::list<Vertex>::const_iterator i = sortedDiagnostics.begin(); i != sortedDiagnostics.end(); ++i) { - bool markChildrenCovered = true; - if (properties[*i].alreadyCovered) { - if (properties[*i].have) { - properties[*i].redundant = true; - redundant.insert(properties[*i].name); - } - } - else { - if (properties[*i].available) { - if (!properties[*i].implicitHave && !properties[*i].ignored) { - properties[*i].missing = true; - missing.insert(properties[*i].name); - } - } - else { - markChildrenCovered = false; - } - } - if (markChildrenCovered) { - graph_traits<Graph>::adjacency_iterator adjacentIt, adjacentEnd; - for (tie(adjacentIt, adjacentEnd) = adjacent_vertices(*i, g); adjacentIt != adjacentEnd; ++adjacentIt) { - properties[*adjacentIt].alreadyCovered = true; - } - } - } - - // Write information - if (!missing.empty()) { - std::cout << "Missing diagnostic flags: "; - for(std::set<std::string>::const_iterator i = missing.begin(); i != missing.end(); ++i) { - std::cout << "-W" << *i << " "; - } - std::cout<< std::endl; - } - - if (!redundant.empty()) { - std::cout << "Redundant diagnostic flags: "; - for(std::set<std::string>::const_iterator i = redundant.begin(); i != redundant.end(); ++i) { - std::cout << "-W" << *i << " "; - } - std::cout<< std::endl; - } - - // Write graphviz file - if (!outputDir.empty()) { - std::ofstream f((outputDir + "clang-diagnostics-overview.dot").c_str()); - write_graphviz(f, g, GraphVizLabelWriter(properties)); - f.close(); - } - - return 0; -} diff --git a/BuildTools/CLang/SConscript b/BuildTools/CLang/SConscript deleted file mode 100644 index 850c35c..0000000 --- a/BuildTools/CLang/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -Import("env") - -#myenv = Environment() -#myenv.Append(CPPPATH = ["."]) -#myenv.Program("CLangDiagnosticsFlagsTool", ["CLangDiagnosticsFlagsTool.cpp"]) -# -#disabledDiagnostics = ["-wunreachable-code", "-wunused-macros", "-wmissing-noreturn", "-wlong-long", "-wcast-align", "-wglobal-constructors", "-wmissing-prototypes", "-wpadded", "-wshadow"] -#clangDiagnosticsFlagsToolCommand = "BuildTools/CLang/CLangDiagnosticsFlagsTool -O" + env.Dir(".").abspath + " " + " ".join(disabledDiagnostics) + " " -#clangDiagnosticsFlagsToolCommand += " ".join([flag for flag in env["CXXFLAGS"] if flag.startswith("-W")]) -#clangDiagnosticsFlagsToolCommand += "\n" -#clangDiagnosticsFlagsToolCommand += "dot -Tpng " + env.Dir(".").abspath + "/clang-diagnostics-overview.dot > " + env.Dir(".").abspath + "/clang-diagnostics-overview.png\n" -#v = env.WriteVal("#/BuildTools/CLang/CLangDiagnosticsFlagsTool.sh", env.Value(clangDiagnosticsFlagsToolCommand)) -#env.AddPostAction(v, Chmod(v[0], 0755)) -# -# diff --git a/BuildTools/CheckHeaders.py b/BuildTools/CheckHeaders.py index 73f49db..752d257 100755 --- a/BuildTools/CheckHeaders.py +++ b/BuildTools/CheckHeaders.py @@ -4,17 +4,38 @@ import os, sys +FORBIDDEN_INCLUDES = [ + ("iostream", ["Swiften/Base/format.h"]), + ("Base/Log.h", []), + ("Base/format.h", []), + ("algorithm", ["Swiften/Base/Algorithm.h", "Swiften/Base/SafeAllocator.h", "Swiften/Base/Listenable.h", "Swiften/Base/Concat.h"]), + ("boost/bind.hpp", ["Swiften/Base/Listenable.h", "Swiften/FileTransfer/S5BTransportSession.h"]), + ("boost/filesystem.hpp", []), + ("Base/foreach.h", []), + ("boost/date_time/date_time.hpp", []), + ("boost/filesystem/filesystem.hpp", []), + + # To avoid + ("Base/Algorithm.h", ["Swiften/StringCodecs/HMAC.h"]), +] + foundBadHeaders = False -for (path, dirs, files) in os.walk(".") : - if "3rdParty" in path or ".sconf" in path or ".framework" in path : - continue - if not "Swiften" in path : - continue - - for filename in [os.path.join(path, file) for file in files if file.endswith(".h")] : - file = open(filename, "r") - for line in file.readlines() : - for include in ["iostream", "algorithm", "cassert", "boost/bind.hpp", "boost/filesystem.hpp", "Base/foreach.h", "Base/Log.h", "boost/date_time/date_time.hpp", "boost/filesystem/filesystem.hpp"] : - if "#include" in line and include in line and not "Base/Log" in filename : - print "Found " + include + " include in " + filename - foundBadHeaders = True +filename = sys.argv[1] + +if "3rdParty" in filename or ".sconf" in filename or ".framework" in filename or not filename.endswith(".h") : + sys.exit(0) +if not "Swiften" in filename : + sys.exit(0) +if filename.endswith("Swiften.h") : + sys.exit(0) + +file = open(filename, "r") +for line in file.readlines() : + if not "#include" in line : + continue + if "Base/Log.h" in filename : + continue + for forbiddenInclude, ignores in FORBIDDEN_INCLUDES : + if forbiddenInclude in line and len([x for x in ignores if x in filename]) == 0 : + print("Found " + forbiddenInclude + " include in " + filename) + foundBadHeaders = True diff --git a/BuildTools/CheckTabs.py b/BuildTools/CheckTabs.py index e007a68..f0ec0ab 100755 --- a/BuildTools/CheckTabs.py +++ b/BuildTools/CheckTabs.py @@ -7,25 +7,13 @@ foundExpandedTabs = False filename = sys.argv[1] -if (filename.endswith(".cpp") or filename.endswith(".h")) and not "3rdParty" in filename : - file = open(filename, "r") - contents = [] - contentsChanged = False - for line in file.readlines() : - newline = "" - previousChar = None - pastInitialSpace = False - for char in line : - if not pastInitialSpace : - if char == ' ' and previousChar == ' ' : - contentsChanged = True - previousChar = '\t' - continue - pastInitialSpace = (char != ' ') - if previousChar : - newline += previousChar - previousChar = char - if previousChar : - newline += previousChar - contents.append(newline) - file.close() - if contentsChanged : - sys.exit(-1) +if (filename.endswith(".cpp") or filename.endswith(".h") or filename.endswith(".c") or filename.endswith(".mm") or filename.endswith(".ipp") or filename.endswith(".hpp") or filename.endswith(".py") or filename.endswith("SConscript") or filename.endswith("SConscript.boot") or filename.endswith("SConstruct")) and not "3rdParty" in filename : + file = open(filename, "r") + contents = [] + contentsChanged = False + for line in file.readlines() : + if "\t" in line: + print("File %s contains hard tabs. This is not allowed." % filename) + file.close() + sys.exit(-1) + file.close() + if contentsChanged : + sys.exit(-1) diff --git a/BuildTools/CheckTests.py b/BuildTools/CheckTests.py index 0ea56bd..7f160e7 100755 --- a/BuildTools/CheckTests.py +++ b/BuildTools/CheckTests.py @@ -7,26 +7,26 @@ foundUnregisteredTests = False for (path, dirs, files) in os.walk(".") : - if not "3rdParty" in path : - for filename in [os.path.join(path, file) for file in files if file.endswith("Test.cpp") and file != "IdleQuerierTest.cpp" and file != "NotifierTest.cpp" and file != "ClientTest.cpp" and file != "ConnectivityTest.cpp" and file != "ReconnectTest.cpp"] : - file = open(filename, "r") - isRegistered = False - registeredTests = set() - declaredTests = set() - for line in file.readlines() : - m = re.match("\s*CPPUNIT_TEST_SUITE_REGISTRATION\((.*)\)", line) - if m : - isRegistered = True - m = re.match("\s*CPPUNIT_TEST\((.*)\)", line) - if m : - registeredTests.add(m.group(1)) - continue - m = re.match("\s*void (test.*)\(\)", line) - if m : - declaredTests.add(m.group(1)) - if not isRegistered : - print filename + ": Registration missing" - foundUnregisteredTests = True - if registeredTests - declaredTests != set([]) : - print filename + ": " + str(registeredTests - declaredTests) - foundUnregisteredTests = True - file.close() + if not "3rdParty" in path : + for filename in [os.path.join(path, file) for file in files if file.endswith("Test.cpp") and file != "IdleQuerierTest.cpp" and file != "NotifierTest.cpp" and file != "ClientTest.cpp" and file != "ConnectivityTest.cpp" and file != "ReconnectTest.cpp"] : + file = open(filename, "r") + isRegistered = False + registeredTests = set() + declaredTests = set() + for line in file.readlines() : + m = re.match("\s*CPPUNIT_TEST_SUITE_REGISTRATION\((.*)\)", line) + if m : + isRegistered = True + m = re.match("\s*CPPUNIT_TEST\((.*)\)", line) + if m : + registeredTests.add(m.group(1)) + continue + m = re.match("\s*void (test.*)\(\)", line) + if m : + declaredTests.add(m.group(1)) + if not isRegistered : + print filename + ": Registration missing" + foundUnregisteredTests = True + if registeredTests - declaredTests != set([]) : + print filename + ": " + str(registeredTests - declaredTests) + foundUnregisteredTests = True + file.close() diff --git a/BuildTools/CheckTranslations.py b/BuildTools/CheckTranslations.py index b39af08..0617fba 100755 --- a/BuildTools/CheckTranslations.py +++ b/BuildTools/CheckTranslations.py @@ -5,7 +5,7 @@ import os, sys, re, xml.dom.minidom def getText(nodelist): - text = "" - for node in nodelist: - if node.nodeType == node.TEXT_NODE: - text += node.data - return text + text = "" + for node in nodelist: + if node.nodeType == node.TEXT_NODE: + text += node.data + return text @@ -16,8 +16,8 @@ f = open("Swift/resources/swift.desktop", "r") for l in f.readlines() : - m = re.match("GenericName\[(\w+)\].*", l) - if m : - desktop_generic_names.add(m.group(1)) - m = re.match("Comment\[(\w+)\].*", l) - if m : - desktop_comments.add(m.group(1)) + m = re.match("GenericName\[(\w+)\].*", l) + if m : + desktop_generic_names.add(m.group(1)) + m = re.match("Comment\[(\w+)\].*", l) + if m : + desktop_comments.add(m.group(1)) f.close() @@ -25,26 +25,26 @@ f.close() for filename in os.listdir("Swift/Translations") : - m = re.match("swift_(.*)\.ts", filename) - if m : - language = m.group(1) - finished = True - f = open("Swift/Translations/" + filename, "r") - document = xml.dom.minidom.parse(f) - f.close() + m = re.match("swift_(.*)\.ts", filename) + if m : + language = m.group(1) + finished = True + f = open("Swift/Translations/" + filename, "r") + document = xml.dom.minidom.parse(f) + f.close() - for message in document.getElementsByTagName("message") : - source = message.getElementsByTagName("source")[0] - sourceText = getText(source.childNodes) - sourcePlaceholders = set(re.findall("%\d+%?", sourceText)) - translation = message.getElementsByTagName("translation")[0] - if "type" in translation.attributes.keys() and translation.attributes["type"]. value == "unfinished" : - finished = False - translationText = getText(translation.childNodes) - translationPlaceholders = set(re.findall("%\d+%?", translationText)) - if translationPlaceholders != sourcePlaceholders : - print "[Error] " + filename + ": Placeholder mismatch in translation '" + sourceText + "'" - if not finished : - print "[Warning] " + filename + ": Unfinished" - if language not in desktop_generic_names and language != "en" : - print "[Warning] GenericName field missing in desktop entry for " + language - if language not in desktop_comments and language != "en" : - print "[Warning] Comment field missing in desktop entry for " + language + for message in document.getElementsByTagName("message") : + source = message.getElementsByTagName("source")[0] + sourceText = getText(source.childNodes) + sourcePlaceholders = set(re.findall("%\d+%?", sourceText)) + translation = message.getElementsByTagName("translation")[0] + if "type" in translation.attributes.keys() and translation.attributes["type"]. value == "unfinished" : + finished = False + translationText = getText(translation.childNodes) + translationPlaceholders = set(re.findall("%\d+%?", translationText)) + if translationPlaceholders != sourcePlaceholders : + print("[Error] " + filename + ": Placeholder mismatch in translation '" + sourceText + "'") + if not finished : + print("[Warning] " + filename + ": Unfinished") + if language not in desktop_generic_names and language != "en" : + print("[Warning] GenericName field missing in desktop entry for " + language) + if language not in desktop_comments and language != "en" : + print("[Warning] Comment field missing in desktop entry for " + language) diff --git a/BuildTools/Copyright/find-contribs.py b/BuildTools/Copyright/find-contribs.py new file mode 100755 index 0000000..799ae7b --- /dev/null +++ b/BuildTools/Copyright/find-contribs.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +import subprocess + +def print_log(full_log): + full_log_lines = full_log.split("\n") + + commits = [] + + commit_bit = "commit " + author_bit = "Author: " + date_bit = "Date: " + + commit = None + for line in full_log_lines: + + if line[0:len(commit_bit)] == commit_bit: + if commit: + commits.append(commit) + commit = {'text':''} + handled = False + for bit in [commit_bit, author_bit, date_bit]: + if line[0:len(bit)] == bit: + commit[bit] = line + handled = True + if not handled: + commit['text'] += line + + commits.append(commit) + + contributions = [] + + for commit in commits: + if not "git@kismith.co.uk" in commit[author_bit] and not "git@el-tramo.be" in commit[author_bit]: + contributions.append(commit) + + #print contributions + contributors = {} + for commit in contributions: + if not commit[author_bit] in contributors: + contributors[commit[author_bit]] = [] + contributors[commit[author_bit]].append(commit[commit_bit]) + + for contributor in contributors: + print contributor + " has contributed patches " + ", ".join([commit[len(commit_bit):] for commit in contributors[contributor]]) + +full_swiften_log = subprocess.check_output(["git", "log", "--", "Swiften"]) + +print("Contributors for Swiften/ subtree:\n") +print_log(full_swiften_log) + +full_all_log = subprocess.check_output(["git", "log"]) + +print("\n\n\n\n") + +print("Contributors for full tree:\n") +print_log(full_all_log) diff --git a/BuildTools/Copyrighter.py b/BuildTools/Copyrighter.py index ccb2019..4c7bfeb 100755 --- a/BuildTools/Copyrighter.py +++ b/BuildTools/Copyrighter.py @@ -9,134 +9,180 @@ LICENSE_DIR = "Documentation/Licenses" +# The following regex parses license comment blocks and its part out of a complete source file. +reParseLicenseCommentBlocks = re.compile(r'(\/\*\n\s\*\sCopyright \(c\) (?P<startYear>\d\d\d\d)(-(?P<endYear>\d\d\d\d))? (?P<author>[^\n\.]*)\.?\n.\* (?P<license>[^\n]*)\n \* (?P<seeMore>[^\n]+)\n *\*\/)') + class License : - def __init__(self, name, file) : - self.name = name - self.file = file + def __init__(self, name, file) : + self.name = name + self.file = file licenses = { - "gpl3" : License("GNU General Public License v3", "GPLv3.txt"), - "mit" : License("MIT License", "MIT.txt"), - } - + "default": License("All rights reserved.", "See the COPYING file for more information."), + "gpl3" : License("Licensed under the GNU General Public License v3.", "See " + LICENSE_DIR + "/" + "GPLv3.txt" + " for more information."), + "mit" : License("Licensed under the MIT License.", "See " + LICENSE_DIR + "/" + "MIT.txt" + " for more information."), + } class Copyright : - def __init__(self, author, year, license) : - self.author = author - self.year = year - self.license = license - - def to_string(self, comment_chars) : - return "\n".join([ - comment_chars[0], - comment_chars[1] + " Copyright (c) %(year)s %(name)s" % {"year" : self.year, "name" : self.author }, - comment_chars[1] + " Licensed under the " + licenses[self.license].name + ".", - comment_chars[1] + " See " + LICENSE_DIR + "/" + licenses[self.license].file + " for more information.", - comment_chars[2], - "\n"]) - -def get_comment_chars_for_filename(filename) : - return ("/*", " *", " */") - -def get_comment_chars_re_for_filename(filename) : - comment_chars = get_comment_chars_for_filename(filename) - return "|".join(comment_chars).replace("*", "\\*") - -def parse_file(filename) : - file = open(filename) - copyright_text = [] - prolog = "" - epilog = "" - inProlog = True - inCopyright = False - inEpilog = False - for line in file.readlines() : - if inProlog : - if line.startswith("#!") or len(line.strip()) == 0 : - prolog += line - continue - else : - inProlog = False - inCopyright = True - - if inCopyright : - if re.match(get_comment_chars_re_for_filename(filename), line) != None : - copyright_text.append(line.rstrip()) - continue - else : - inCopyright = False - inEpilog = True - if len(line.strip()) == 0 : - continue - - if inEpilog : - epilog += line - continue - - file.close() - - # Parse the copyright - copyright = None - if len(copyright_text) == 5 : - comment_chars = get_comment_chars_for_filename(filename) - if copyright_text[0] == comment_chars[0] and copyright_text[4] == comment_chars[2] : - matchstring = "(" + get_comment_chars_re_for_filename(filename) + ") Copyright \(c\) (?P<startYear>\d\d\d\d)(-(?P<endYear>\d\d\d\d))? (?P<author>.*)" - m = re.match(matchstring, copyright_text[1]) - if m != None : - # FIXME: Do better copyright reconstruction here - copyright = True - if not copyright : - epilog = "\n".join(copyright_text) + epilog - return (prolog, copyright, epilog) + def __init__(self, author, year, license) : + self.author = author + self.year = year + self.license = license + + def to_string(self, comment_chars) : + return "\n".join([ + comment_chars[0], + comment_chars[1] + " Copyright (c) %(year)s %(name)s" % {"year" : self.year, "name" : self.author }, + comment_chars[1] + licenses[self.license].name, + comment_chars[1] + licenses[self.license].file, + comment_chars[2], + "\n"]) + def __str__(self): + return """/* + * Copyright (c) %s %s. + * %s + * %s + */ +""" % (self.year, self.author, licenses[self.license].name, licenses[self.license].file) + +class ContentRef : + def __init__(self, begin, end, content): + self.begin = begin + self.end = end + self.content = content + +class CopyrightBlock : + def __init__(self, yearBegin, yearEnd, author, license, seeMore, total): + self.yearBegin = yearBegin + self.yearEnd = yearEnd + self.author = author + self.license = license + self.seeMore = seeMore + self.total = total + +def cref_from_group(match, group): + if match.group(group): + return ContentRef(match.start(group), match.end(group), match.group(group)) + else : + return None + +def parse_file_new(filename): + copyrightBlocks = [] + with open(filename, 'r') as file: + content = file.read() + for match in re.finditer(reParseLicenseCommentBlocks, content): + copyrightBlocks.append(CopyrightBlock( + cref_from_group(match, "startYear"), + cref_from_group(match, "endYear"), + cref_from_group(match, "author"), + cref_from_group(match, "license"), + cref_from_group(match, "seeMore"), + cref_from_group(match, 0))) + return copyrightBlocks def get_userinfo() : - p = subprocess.Popen("git config user.name", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) - username = p.stdout.read().rstrip() - p.stdin.close() - if p.wait() != 0 : - return None - p = subprocess.Popen("git config user.email", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) - email = p.stdout.read().rstrip() - p.stdin.close() - if p.wait() != 0 : - return None - return (username, email) + p = subprocess.Popen("git config user.name", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) + username = p.stdout.read().rstrip() + p.stdin.close() + if p.wait() != 0 : + return None + p = subprocess.Popen("git config user.email", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) + email = p.stdout.read().rstrip() + p.stdin.close() + if p.wait() != 0 : + return None + return (username, email) def get_copyright(username, email) : - if email in ["git@el-tramo.be", "git@kismith.co.uk"] : - license = DEFAULT_LICENSE - else : - license = CONTRIBUTOR_LICENSE - return Copyright(username, datetime.date.today().strftime("%Y"), license) - -def check_copyright(filename) : - (prolog, copyright, epilog) = parse_file(filename) - if copyright == None : - print "No copyright found in: " + filename - #print "Please run '" + sys.argv[0] + " set-copyright " + filename + "'" - return False - else : - return True - -def set_copyright(filename, copyright) : - (prolog, c, epilog) = parse_file(filename) - comment_chars = get_comment_chars_for_filename(filename) - copyright_text = copyright.to_string(comment_chars) - file = open(filename, "w") - if prolog != "": - file.write(prolog) - file.write(copyright_text) - if epilog != "" : - file.write(epilog) - file.close() + if email in ["git@el-tramo.be", "git@kismith.co.uk"] : + license = DEFAULT_LICENSE + else : + license = CONTRIBUTOR_LICENSE + return Copyright(username, datetime.date.today().strftime("%Y"), license) + +def get_copyright_setting(username, email) : + config = os.getenv("SWIFT_LICENSE_CONFIG") + if config : + copyrightHolder, license = config.split("|") + else : + if email.endswith("isode.com") or email in ["git@el-tramo.be", "git@kismith.co.uk", "tm@ayena.de"] : + copyrightHolder, license = "Isode Limited", "default" + else : + copyrightHolder, license = username, "mit" + return Copyright(copyrightHolder, datetime.date.today().year, license) + +def check_copyright(filename, hints) : + copyrightBlocks = parse_file_new(filename) + if copyrightBlocks : + # looking for copyright block for current author + username, email = get_userinfo() + copyrightSetting = get_copyright_setting(username, email) + for block in copyrightBlocks : + if block.author.content == copyrightSetting.author: + year = block.yearBegin.content if not block.yearEnd else block.yearEnd.content + if int(year) == copyrightSetting.year: + return True + else : + if hints : + print("Copyright block for " + copyrightSetting.author + " does not cover current year in: " + filename) + return False + if hints : + print("Missing copyright block for " + copyrightSetting.author + " in: " + filename) + return False + else : + if hints : + print("No copyright found in: " + filename) + return False + +def replace_data_in_file(filename, begin, end, replaceWith) : + with open(filename, 'r') as file: + content = file.read() + with open(filename, 'w') as file: + file.write(content[:begin] + replaceWith + content[end:]) + +def set_or_update_copyright(filename) : + if check_copyright(filename, False) : + print("No update required for file: " + filename) + else : + copyrightBlocks = parse_file_new(filename) + username, email = get_userinfo() + copyrightSetting = get_copyright_setting(username, email) + lastBlock = 0 + for block in copyrightBlocks : + if block.author.content == copyrightSetting.author : + if not block.yearEnd : + # replace year with range + replace_data_in_file(filename, block.yearBegin.begin, block.yearBegin.end, "%s-%s" % (block.yearBegin.content, str(copyrightSetting.year))) + else : + # replace end of range with current year + replace_data_in_file(filename, block.yearEnd.begin, block.yearEnd.end, "%s" % str(copyrightSetting.year)) + return + lastBlock = block.total.end + + # No copyright block found. Append a new one. + replace_data_in_file(filename, lastBlock+1, lastBlock+1, "\n" + str(copyrightSetting)) + +def print_help() : + print("""Usage: + Copyrighter.py check-copyright $filename + Cheks for the existence of a copyright comment block. + + Copyrighter.py set-copyright $filename + Adds or updates the existing copyright comment block. + + License setting: + A users license configuration can be set via the SWIFT_LICENSE_CONFIG environment variable + in the format "$copyright holder|$license", e.g. "Jane Doe|mit". Possible values for + $license are default, mit and gpl. + """) if sys.argv[1] == "check-copyright" : - file = sys.argv[2] - if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file : - if not check_copyright(file) : - sys.exit(-1) + file = sys.argv[2] + if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file : + if not check_copyright(file, True) : + sys.exit(-1) elif sys.argv[1] == "set-copyright" : - (username, email) = get_userinfo() - copyright = get_copyright(username, email) - set_copyright(sys.argv[2], copyright) + file = sys.argv[2] + set_or_update_copyright(file) else : - print "Unknown command: " + sys.argv[1] - sys.exit(-1) + print("Unknown command: " + sys.argv[1]) + print_help() + sys.exit(-1) diff --git a/BuildTools/Coverage/FilterLCovData.py b/BuildTools/Coverage/FilterLCovData.py index a3a7ee5..e0f5c92 100755 --- a/BuildTools/Coverage/FilterLCovData.py +++ b/BuildTools/Coverage/FilterLCovData.py @@ -9,4 +9,3 @@ assert(len(sys.argv) == 2) def isIgnored(file) : - return (file.find("/Swiften/") == -1 and file.find("/Slimber/") == -1 and file.find("/Swift/") == -1) or (file.find("/UnitTest/") != -1 or file.find("/QA/") != -1) - + return (file.find("/Swiften/") == -1 and file.find("/Slimber/") == -1 and file.find("/Swift/") == -1) or (file.find("/UnitTest/") != -1 or file.find("/QA/") != -1) @@ -16,13 +15,13 @@ inIgnoredFile = False for line in inputFile.readlines() : - if inIgnoredFile : - if line == "end_of_record\n" : - inIgnoredFile = False - else : - if line.startswith("SF:") and isIgnored(line) : - inIgnoredFile = True + if inIgnoredFile : + if line == "end_of_record\n" : + inIgnoredFile = False else : - m = re.match("SF:(.*)", line) - if m : - line = "SF:" + os.path.realpath(m.group(1)) + "\n" - output.append(line) + if line.startswith("SF:") and isIgnored(line) : + inIgnoredFile = True + else : + m = re.match("SF:(.*)", line) + if m : + line = "SF:" + os.path.realpath(m.group(1)) + "\n" + output.append(line) inputFile.close() diff --git a/BuildTools/Coverage/GenerateSummary.py b/BuildTools/Coverage/GenerateSummary.py index ec94a4f..9de0f4d 100755 --- a/BuildTools/Coverage/GenerateSummary.py +++ b/BuildTools/Coverage/GenerateSummary.py @@ -10,14 +10,14 @@ coverage = {} for line in inputFile.readlines() : - line = line.strip() - m = re.match("^SF:(.*)", line) - if m : - currentFile = m.group(1) - else : - m = re.match("^DA:(\d+),(\d+)", line) + line = line.strip() + m = re.match("^SF:(.*)", line) if m : - currentFileCoverage = coverage.get(currentFile, {}) - line = int(m.group(1)) - count = int(m.group(2)) - currentFileCoverage[line] = currentFileCoverage.get(line, 0) + count - coverage[currentFile] = currentFileCoverage + currentFile = m.group(1) + else : + m = re.match("^DA:(\d+),(\d+)", line) + if m : + currentFileCoverage = coverage.get(currentFile, {}) + line = int(m.group(1)) + count = int(m.group(2)) + currentFileCoverage[line] = currentFileCoverage.get(line, 0) + count + coverage[currentFile] = currentFileCoverage inputFile.close() @@ -27,6 +27,6 @@ coveredLines = 0 for c in coverage.values() : - totalLines += len(c) - for l in c.values() : - if l > 0 : - coveredLines += 1 + totalLines += len(c) + for l in c.values() : + if l > 0 : + coveredLines += 1 diff --git a/BuildTools/Cppcheck.sh b/BuildTools/Cppcheck.sh index d318150..ee52b9b 100755 --- a/BuildTools/Cppcheck.sh +++ b/BuildTools/Cppcheck.sh @@ -3,20 +3,19 @@ cppcheck $@ \ - --enable=all \ - --inline-suppr \ - --suppress=postfixOperator:3rdParty/hippomocks.h \ - --suppress=stlSize:3rdParty/hippomocks.h \ - --suppress=noConstructor \ - --suppress=publicAllocationError:Swift/Controllers/Chat/UnitTest/ChatsManagerTest.cpp \ - -i 3rdParty -i .git -i .sconf_temp \ - -i Swiftob/linit.cpp \ - -i Swift/QtUI/EventViewer/main.cpp \ - -i Swift/QtUI/ApplicationTest \ - -i Swift/QtUI/ChatView/main.cpp \ - -i Swift/QtUI/Roster/main.cpp \ - -i Swift/QtUI/NotifierTest/NotifierTest.cpp \ - -DSWIFTEN_BUILDING -DSWIFTEN_STATIC \ - -U__BEOS__ -U__CYGWIN__ -U__QNNXTO__ -U__amigaos__ -Uhpux -U__sgi \ - \ - -I . \ - -I Swift/QtUI \ - . + --enable=all \ + --inline-suppr \ + --suppress=postfixOperator:3rdParty/hippomocks.h \ + --suppress=stlSize:3rdParty/hippomocks.h \ + --suppress=noConstructor \ + --suppress=publicAllocationError:Swift/Controllers/Chat/UnitTest/ChatsManagerTest.cpp \ + -i 3rdParty -i .git -i .sconf_temp \ + -i Swift/QtUI/EventViewer/main.cpp \ + -i Swift/QtUI/ApplicationTest \ + -i Swift/QtUI/ChatView/main.cpp \ + -i Swift/QtUI/Roster/main.cpp \ + -i Swift/QtUI/NotifierTest/NotifierTest.cpp \ + -DSWIFTEN_BUILDING -DSWIFTEN_STATIC \ + -U__BEOS__ -U__CYGWIN__ -U__QNNXTO__ -U__amigaos__ -Uhpux -U__sgi \ + \ + -I . \ + -I Swift/QtUI \ + . diff --git a/BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py b/BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py new file mode 100644 index 0000000..19e5f87 --- /dev/null +++ b/BuildTools/CrashReportAnalysis/WindowsMinidumpAnalyse.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# Note +# ---- +# This script requires: +# - cdb, the Windows command line debugger installed and available in PATH. +# - the SWIFT_DIST environment variable set to a location that contains msi and pdb.gz files. + +import sys +from subprocess import call +from subprocess import Popen, PIPE +import ntpath +import shutil +import re +import urllib2 +import os +import gzip +import time + + +swiftWindowBuildsPathPrefix = os.getenv("SWIFT_DIST") +if swiftWindowBuildsPathPrefix == None : + print("Please set the SWIFT_DIST environment variable to a location containing msi and pdb.gz files.") + sys.exit(1) + +if len(sys.argv) != 3: + print("Usage: python WindowsMinidumpAnalyse.py VERSION MINIDUMP_FILE") + sys.exit(1) + +version = sys.argv[1] +minidump_file = sys.argv[2] +minidump_filename = ntpath.basename(minidump_file) +minidump_fullpath = os.path.abspath(minidump_file) +humantext_fullpath = os.path.splitext(minidump_fullpath)[0]+".txt" +symbol_cache_path = os.path.join(os.getenv("TEMP"), "\symbols") +working_folder = "tmp-crash-{0}".format(minidump_filename) +commit = "" + +def downloadInstaller(version) : + onlineFilename = "{0}.msi".format(version.capitalize()) + url = "{0}{1}".format(swiftWindowBuildsPathPrefix, onlineFilename) + print("Download {0}.".format(url)) + file = urllib2.urlopen(url) + with open(onlineFilename,'wb') as output: + output.write(file.read()) + +def unpackInstaller(version) : + msiFilename = "{0}.msi".format(version.capitalize()) + msiExtractDirectory = os.getcwd() + "\\msi" + if not os.path.exists(msiExtractDirectory): + os.makedirs(msiExtractDirectory) + print("Unpack {0} to {1}.".format(msiFilename, os.getcwd())) + call(["msiexec", "/a", msiFilename, "/qb", "TARGETDIR={0}".format(msiExtractDirectory)], shell=True) + +def unpackDebugSymbols(version) : + symbolsFilename = "{0}.pdb.gz".format(version.capitalize()) + print("Unpack {0}.".format(symbolsFilename)) + if not os.path.isdir(symbolsFilename): + with gzip.open(symbolsFilename, 'rb') as in_file: + s = in_file.read() + + path_to_store = symbolsFilename[:-3] + + with open("msi\PFiles\Swift\{0}".format("Swift.pdb"), 'wb') as f: + f.write(s) + +def downloadDebugSymbols(version) : + onlineFilename = "{0}.pdb.gz".format(version.capitalize()) + url = "{0}{1}".format(swiftWindowBuildsPathPrefix, onlineFilename) + print("Download {0}.".format(url)) + file = urllib2.urlopen(url) + with open(onlineFilename,'wb') as output: + output.write(file.read()) + +def copyMinidump(filename) : + shutil.copyfile(filename, "msi\PFiles\Swift\{0}".format(minidump_filename)) + +def printHumanReadableReport(): + oldDir = os.getcwd() + + # change dir to Swift.exe dir + os.chdir("msi\PFiles\Swift") + + # print all stacks and analyze crash for exceptions + cdbCommand = ".symopt+0x40;.lines -e;.kframes 200;!analyze -v -p;!uniqstack -vp;.ecxr;k;q" + + symbolPath = "cache*{0};srv*https://msdl.microsoft.com/download/symbols;C:\\Qt\\Qt5.4.2\\5.4\\msvc2013_opengl\\bin;C:\\Qt\\Qt5.4.2\\5.4\\msvc2013_opengl\\lib;{1}".format(symbol_cache_path, os.getcwd()) + + cdbFullCommand = ["cdb", "-i", os.getcwd(), "-y", symbolPath, "-z", minidump_filename, "-srcpath", oldDir, "-logo", humantext_fullpath, "-c", cdbCommand ] + print("Run command: " + str(cdbFullCommand)) + call(cdbFullCommand) + +# for testing, delete the old folder +try: + shutil.rmtree(working_folder) +except: + print("") + +# clone local git repository into dedicated directory +call(["git", "clone", ".", working_folder], shell=True) + +# git version from swift version +match = re.match( r"(.*)-dev(\d+)", version) +if match: + basetag = match.group(1) + commits = int(match.group(2)) + process = Popen(["git", "-C", working_folder, "log", "--ancestry-path", "--format=%H", "{0}..HEAD".format(basetag)], stdout=PIPE) + (output, err) = process.communicate() + exit_code = process.wait() + commit = output.splitlines()[-commits].strip() +else: + basetag = version + process = Popen(["git", "-C", working_folder, "log", "--format=%H", "-n", "1" "{0}".format(basetag)], stdout=PIPE) + (output, err) = process.communicate() + exit_code = process.wait() + commit = output.strip() + +assert(len(commit) > 0) + +# Create symbol cache directory +if not os.path.exists(symbol_cache_path): + os.makedirs(symbol_cache_path) + +#print("Checking out commit {0}.".format(commit)) +call(["git", "-C", working_folder, "checkout", commit]) + +os.chdir(working_folder) + +downloadInstaller(version) +downloadDebugSymbols(version) +unpackInstaller(version) +unpackDebugSymbols(version) +copyMinidump(minidump_fullpath) +time.sleep(10) +printHumanReadableReport() diff --git a/BuildTools/DocBook/SCons/DocBook.py b/BuildTools/DocBook/SCons/DocBook.py index be3d4f6..d7c95ba 100644 --- a/BuildTools/DocBook/SCons/DocBook.py +++ b/BuildTools/DocBook/SCons/DocBook.py @@ -7,99 +7,112 @@ import xml.dom.minidom, re, os.path, sys +def maybeBytesToString(s): + if isinstance(s, bytes): + return s.decode('utf-8') + return s + +def prepareForWrite(s): + try: + if isinstance(s, unicode): + return s.encode('utf-8') + except NameError: + pass + return s + def generate(env) : - # Location of stylesheets and catalogs - docbook_dir = "#/BuildTools/DocBook" - docbook_xsl_style_dir = env.Dir(docbook_dir + "/Stylesheets").abspath - docbook_xml_catalog = env.File("catalog.xml").abspath - if "DOCBOOK_XML_DIR" in env : - docbook_xml_dir = env.Dir("$DOCBOOK_XML_DIR").abspath - else : - docbook_xml_dir = env.Dir("#/3rdParty/DocBook/XML").abspath - if "DOCBOOK_XSL_DIR" in env : - docbook_xsl_dir = env.Dir("$DOCBOOK_XSL_DIR").abspath - else : - docbook_xsl_dir = env.Dir("#/3rdParty/DocBook/XSL").abspath - fop_fonts_dir = env.Dir(docbook_dir + "/Fonts").abspath - - # Generates a catalog from paths to external tools - def buildCatalog(target, source, env) : - catalog = """<?xml version='1.0'?> + # Location of stylesheets and catalogs + docbook_dir = "#/BuildTools/DocBook" + docbook_xsl_style_dir = env.Dir(docbook_dir + "/Stylesheets").abspath + docbook_xml_catalog = env.File("catalog.xml").abspath + if "DOCBOOK_XML_DIR" in env : + docbook_xml_dir = env.Dir("$DOCBOOK_XML_DIR").abspath + else : + docbook_xml_dir = env.Dir("#/3rdParty/DocBook/XML").abspath + if "DOCBOOK_XSL_DIR" in env : + docbook_xsl_dir = env.Dir("$DOCBOOK_XSL_DIR").abspath + else : + docbook_xsl_dir = env.Dir("#/3rdParty/DocBook/XSL").abspath + fop_fonts_dir = env.Dir(docbook_dir + "/Fonts").abspath + + # Generates a catalog from paths to external tools + def buildCatalog(target, source, env) : + catalog = """<?xml version='1.0'?> <catalog xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog" prefer="public"> - <rewriteSystem - systemIdStartString="http://www.oasis-open.org/docbook/xml/4.5/" - rewritePrefix="%(docbook_xml_dir)s/" /> - <rewriteSystem - systemIdStartString="docbook-xsl:/" - rewritePrefix="%(docbook_xsl_dir)s/" /> + <rewriteSystem + systemIdStartString="http://www.oasis-open.org/docbook/xml/4.5/" + rewritePrefix="%(docbook_xml_dir)s/" /> + <rewriteSystem + systemIdStartString="docbook-xsl:/" + rewritePrefix="%(docbook_xsl_dir)s/" /> </catalog>""" - - docbook_xml_dir = source[0].get_contents() - docbook_xsl_dir = source[1].get_contents() - if env["PLATFORM"] == "win32" : - docbook_xml_dir = docbook_xml_dir.replace("\\","/") - docbook_xsl_dir = docbook_xsl_dir.replace("\\","/") - file = open(target[0].abspath, "w") - file.write(catalog % { - "docbook_xml_dir" : docbook_xml_dir, - "docbook_xsl_dir" : docbook_xsl_dir, - }) - file.close() - - # Generates a FOP config file - def buildFopConfig(target, source, env) : - fopcfg = """<fop version=\"1.0\"> - <renderers> - <renderer mime=\"application/pdf\"> - <fonts> - <directory recursive=\"true\">%(fonts_dir)s</directory> - </fonts> - </renderer> - </renderers> + + docbook_xml_dir = maybeBytesToString(source[0].get_contents()) + docbook_xsl_dir = maybeBytesToString(source[1].get_contents()) + if env["PLATFORM"] == "win32" : + docbook_xml_dir = docbook_xml_dir.replace("\\","/") + docbook_xsl_dir = docbook_xsl_dir.replace("\\","/") + file = open(target[0].abspath, "w") + file.write(catalog % { + "docbook_xml_dir" : prepareForWrite(docbook_xml_dir), + "docbook_xsl_dir" : prepareForWrite(docbook_xsl_dir), + }) + file.close() + + # Generates a FOP config file + def buildFopConfig(target, source, env) : + fopcfg = """<fop version=\"1.0\"> + <renderers> + <renderer mime=\"application/pdf\"> + <fonts> + <directory recursive=\"true\">%(fonts_dir)s</directory> + </fonts> + </renderer> + </renderers> </fop>""" - file = open(target[0].abspath, "w") - file.write(fopcfg % { - "fonts_dir" : source[0].get_contents() - }) - file.close() - - # Builds a DocBook file - def buildDocBook(env, source) : - db_env = env.Clone() - db_env["XMLCATALOGS"] = [docbook_xml_catalog] - db_env["ENV"].update({"OS" : os.environ.get("OS", "")}) - - db_env["XMLLINT"] = env.WhereIs("xmllint") - db_env["XSLT"] = env.WhereIs("xsltproc") - db_env["FO"] = env.WhereIs("fop") - - if not db_env["XMLLINT"] or not db_env["XSLT"] : - return - - # PDF generation - if db_env["FO"] : - fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source, - XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"]) - pdf = db_env.FO(fo) - - # HTML generation - db_env.XSLT(os.path.splitext(source)[0] + ".html", source, - XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"]) - - # Import tools - env.Tool("FO", toolpath = [docbook_dir + "/SCons"]) - env.Tool("XSLT", toolpath = [docbook_dir + "/SCons"]) - - # Catalog file generation - env.Command("catalog.xml", [env.Value(docbook_xml_dir), env.Value(docbook_xsl_dir)], SCons.Action.Action(buildCatalog, cmdstr = "$GENCOMSTR")) - - # FO config file generation - env["FOCFG"] = env.File("fop.cfg").abspath - env.Command("fop.cfg", [env.Value(fop_fonts_dir)], SCons.Action.Action(buildFopConfig, cmdstr = "$GENCOMSTR")) - - # DocBook stylesheets - env["DOCBOOK_XSL_FO"] = docbook_xsl_style_dir + "/fo/docbook.xsl" - env["DOCBOOK_XSL_HTML"] = docbook_xsl_style_dir + "/html/docbook.xsl" - env.AddMethod(buildDocBook, "DocBook") - + file = open(target[0].abspath, "w") + file.write(fopcfg % { + "fonts_dir" : source[0].get_contents() + }) + file.close() + + # Builds a DocBook file + def buildDocBook(env, source) : + db_env = env.Clone() + db_env["XMLCATALOGS"] = [docbook_xml_catalog] + db_env["ENV"].update({"OS" : os.environ.get("OS", "")}) + + db_env["XMLLINT"] = env.WhereIs("xmllint") + db_env["XSLT"] = env.WhereIs("xsltproc") + db_env["FO"] = env.WhereIs("fop") + + if not db_env["XMLLINT"] or not db_env["XSLT"] : + return + + # PDF generation + if db_env["FO"] : + fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source, + XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"]) + pdf = db_env.FO(fo) + + # HTML generation + db_env.XSLT(os.path.splitext(source)[0] + ".html", source, + XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"]) + + # Import tools + env.Tool("FO", toolpath = [docbook_dir + "/SCons"]) + env.Tool("XSLT", toolpath = [docbook_dir + "/SCons"]) + + # Catalog file generation + env.Command("catalog.xml", [env.Value(docbook_xml_dir), env.Value(docbook_xsl_dir)], SCons.Action.Action(buildCatalog, cmdstr = "$GENCOMSTR")) + + # FO config file generation + env["FOCFG"] = env.File("fop.cfg").abspath + env.Command("fop.cfg", [env.Value(fop_fonts_dir)], SCons.Action.Action(buildFopConfig, cmdstr = "$GENCOMSTR")) + + # DocBook stylesheets + env["DOCBOOK_XSL_FO"] = docbook_xsl_style_dir + "/fo/docbook.xsl" + env["DOCBOOK_XSL_HTML"] = docbook_xsl_style_dir + "/html/docbook.xsl" + env.AddMethod(buildDocBook, "DocBook") + def exists(env) : - return True + return True diff --git a/BuildTools/DocBook/SCons/FO.py b/BuildTools/DocBook/SCons/FO.py index c7596d6..c1c5614 100644 --- a/BuildTools/DocBook/SCons/FO.py +++ b/BuildTools/DocBook/SCons/FO.py @@ -8,38 +8,38 @@ import xml.dom.minidom, re def generate(env) : - def generate_actions(source, target, env, for_signature) : - if len(env["FOCFG"]) > 0 : - cmd = "$FO -c $FOCFG $FOFLAGS $SOURCE $TARGET" - else : - cmd = "$FO $FOFLAGS $SOURCE $TARGET" - return SCons.Action.Action(cmd, cmdstr = "$FOCOMSTR") + def generate_actions(source, target, env, for_signature) : + if len(env["FOCFG"]) > 0 : + cmd = "$FO -c $FOCFG $FOFLAGS $SOURCE $TARGET" + else : + cmd = "$FO $FOFLAGS $SOURCE $TARGET" + return SCons.Action.Action(cmd, cmdstr = "$FOCOMSTR") - def modify_sources(target, source, env) : - if len(env["FOCFG"]) > 0 : - source.append(env["FOCFG"]) - return target, source + def modify_sources(target, source, env) : + if len(env["FOCFG"]) > 0 : + source.append(env["FOCFG"]) + return target, source - def scan_fo(node, env, path) : - dependencies = set() - try : - document = xml.dom.minidom.parseString(node.get_contents()) - except xml.parsers.expat.ExpatError: - return [] - for include in document.getElementsByTagNameNS("http://www.w3.org/1999/XSL/Format", "external-graphic") : - m = re.match("url\((.*)\)", include.getAttribute("src")) - if m : - dependencies.add(m.group(1)) - return list(dependencies) + def scan_fo(node, env, path) : + dependencies = set() + try : + document = xml.dom.minidom.parseString(node.get_contents()) + except xml.parsers.expat.ExpatError: + return [] + for include in document.getElementsByTagNameNS("http://www.w3.org/1999/XSL/Format", "external-graphic") : + m = re.match("url\((.*)\)", include.getAttribute("src")) + if m : + dependencies.add(m.group(1)) + return list(dependencies) - env["FO"] = "fop" - env["FOFLAGS"] = "" - env["FOCFG"] = "" - env["BUILDERS"]["FO"] = SCons.Builder.Builder( - generator = generate_actions, - emitter = modify_sources, - source_scanner = SCons.Scanner.Scanner(function = scan_fo, skeys = [".fo"]), - suffix = ".pdf", - src_suffix = ".fo" - ) + env["FO"] = "fop" + env["FOFLAGS"] = "" + env["FOCFG"] = "" + env["BUILDERS"]["FO"] = SCons.Builder.Builder( + generator = generate_actions, + emitter = modify_sources, + source_scanner = SCons.Scanner.Scanner(function = scan_fo, skeys = [".fo"]), + suffix = ".pdf", + src_suffix = ".fo" + ) def exists(env) : - return True + return True diff --git a/BuildTools/DocBook/SCons/XSLT.py b/BuildTools/DocBook/SCons/XSLT.py index 83b5ec2..6a40b62 100644 --- a/BuildTools/DocBook/SCons/XSLT.py +++ b/BuildTools/DocBook/SCons/XSLT.py @@ -8,56 +8,56 @@ import xml.dom.minidom, os, os.path def generate(env) : - def generate_actions(source, target, env, for_signature) : - if not env.has_key("XSLTSTYLESHEET") : - raise SCons.Errors.UserError, "The XSLTSTYLESHEET construction variable must be defined" - - # Process the XML catalog files - # FIXME: It's probably not clean to do an ENV assignment globally - env["ENV"]["XML_CATALOG_FILES"] = " ".join(env.get("XMLCATALOGS", "")) - - # Build the XMLLint command - xmllintcmd = ["$XMLLINT", "--nonet", "--xinclude", "--postvalid", "--noout", "$SOURCE"] - - # Build the XSLT command - xsltcmd = ["$XSLT", "--nonet", "--xinclude"] - for (param, value) in env["XSLTPARAMS"] : - xsltcmd += ["--stringparam", param, value] - xsltcmd += ["-o", "$TARGET", "$XSLTSTYLESHEET", "$SOURCE"] - - return [ - SCons.Action.Action([xmllintcmd], cmdstr = "$XMLLINTCOMSTR"), - SCons.Action.Action([xsltcmd], cmdstr = "$XSLTCOMSTR")] - - def modify_sources(target, source, env) : - if len(env["FOCFG"]) > 0 : - source.append(env["FOCFG"]) - source.append(env.get("XMLCATALOGS", [])) - return target, source - - def scan_xml(node, env, path) : - dependencies = set() - nodes = [node] - while len(nodes) > 0 : - node = nodes.pop() - try : - document = xml.dom.minidom.parseString(node.get_contents()) - except xml.parsers.expat.ExpatError: - continue - for include in document.getElementsByTagNameNS("http://www.w3.org/2001/XInclude", "include") : - include_file = include.getAttribute("href") - dependencies.add(include_file) - if include.getAttribute("parse") != "text" : - nodes.append(env.File(include_file)) - return list(dependencies) - - env["XMLLINT"] = "xmllint" - env["XSLT"] = "xsltproc" - env["XSLTPARAMS"] = [] - env["BUILDERS"]["XSLT"] = SCons.Builder.Builder( - generator = generate_actions, - emitter = modify_sources, - source_scanner = SCons.Scanner.Scanner(function = scan_xml), - src_suffix = ".xml" - ) + def generate_actions(source, target, env, for_signature) : + if not env.has_key("XSLTSTYLESHEET") : + raise SCons.Errors.UserError("The XSLTSTYLESHEET construction variable must be defined") + + # Process the XML catalog files + # FIXME: It's probably not clean to do an ENV assignment globally + env["ENV"]["XML_CATALOG_FILES"] = " ".join(env.get("XMLCATALOGS", "")) + + # Build the XMLLint command + xmllintcmd = ["$XMLLINT", "--nonet", "--xinclude", "--postvalid", "--noout", "$SOURCE"] + + # Build the XSLT command + xsltcmd = ["$XSLT", "--nonet", "--xinclude"] + for (param, value) in env["XSLTPARAMS"] : + xsltcmd += ["--stringparam", param, value] + xsltcmd += ["-o", "$TARGET", "$XSLTSTYLESHEET", "$SOURCE"] + + return [ + SCons.Action.Action([xmllintcmd], cmdstr = "$XMLLINTCOMSTR"), + SCons.Action.Action([xsltcmd], cmdstr = "$XSLTCOMSTR")] + + def modify_sources(target, source, env) : + if len(env["FOCFG"]) > 0 : + source.append(env["FOCFG"]) + source.append(env.get("XMLCATALOGS", [])) + return target, source + + def scan_xml(node, env, path) : + dependencies = set() + nodes = [node] + while len(nodes) > 0 : + node = nodes.pop() + try : + document = xml.dom.minidom.parseString(node.get_contents()) + except xml.parsers.expat.ExpatError: + continue + for include in document.getElementsByTagNameNS("http://www.w3.org/2001/XInclude", "include") : + include_file = include.getAttribute("href") + dependencies.add(include_file) + if include.getAttribute("parse") != "text" : + nodes.append(env.File(include_file)) + return list(dependencies) + + env["XMLLINT"] = "xmllint" + env["XSLT"] = "xsltproc" + env["XSLTPARAMS"] = [] + env["BUILDERS"]["XSLT"] = SCons.Builder.Builder( + generator = generate_actions, + emitter = modify_sources, + source_scanner = SCons.Scanner.Scanner(function = scan_xml), + src_suffix = ".xml" + ) def exists(env) : - return True + return True diff --git a/BuildTools/Eclipse/CodeTemplates.xml b/BuildTools/Eclipse/CodeTemplates.xml index 17ff187..5021e13 100644 --- a/BuildTools/Eclipse/CodeTemplates.xml +++ b/BuildTools/Eclipse/CodeTemplates.xml @@ -5,5 +5,5 @@ */</template><template autoinsert="false" context="org.eclipse.cdt.ui.text.codetemplates.filecomment_context" deleted="false" description="Comment for created C/C++ files" enabled="true" id="org.eclipse.cdt.ui.text.codetemplates.filecomment" name="filecomment">/* - * Copyright (c) ${year} Remko Tronçon - * Licensed under the GNU General Public License v3. - * See Documentation/Licenses/GPLv3.txt for more information. + * Copyright (c) ${year} Isode Limited. + * All rights reserved. + * See the COPYING file for more information. */</template><template autoinsert="true" context="org.eclipse.cdt.ui.text.codetemplates.typecomment_context" deleted="false" description="Comment for created classes" enabled="true" id="org.eclipse.cdt.ui.text.codetemplates.typecomment" name="typecomment">/* @@ -37,2 +37,2 @@ ${declarations} -</template></templates>
\ No newline at end of file +</template></templates> diff --git a/BuildTools/EmojisGenerator/EmojiMapper.cpp b/BuildTools/EmojisGenerator/EmojiMapper.cpp new file mode 100644 index 0000000..097ddcc --- /dev/null +++ b/BuildTools/EmojisGenerator/EmojiMapper.cpp @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2016-2017 Isode Limited. + * All rights reserved. + * See the COPYING file for more information. + */ + +#include <SwifTools/EmojiMapper.h> + +#include <algorithm> +#include <string> +#include <unordered_map> + +namespace Swift { + + //AUTO-GENERATED CONTENT + <%= mapping %> + + std::vector<std::string> EmojiMapper::getCategories() { + std::vector<std::string> categories = { + "people", + "nature", + "food", + "activity", + "travel", + "objects", + "symbols", + "flags" + }; + + for (const auto& keyValuePair : emojisInCategory) { + if (std::find(std::begin(categories), std::end(categories), keyValuePair.first) == std::end(categories)) { + categories.push_back(keyValuePair.first); + } + } + return categories; + } + + std::string EmojiMapper::shortnameToUnicode(const std::string& shortname) { + auto unicodeSequenceIterator = shortnameUnicode.find(shortname); + if (unicodeSequenceIterator != shortnameUnicode.end()) { + return unicodeSequenceIterator->second; + } + else { + return std::string(); + } + } + + std::string EmojiMapper::unicodeToShortname(const std::string& unicode) { + auto shortnameIterator = unicodeShortname.find(unicode); + if (shortnameIterator != unicodeShortname.end()) { + return shortnameIterator->second; + } + else { + return std::string(); + } + } + + std::vector<std::string> EmojiMapper::categoryNameToEmojis(const std::string& categoryName) { + auto emojiIterator = emojisInCategory.find(categoryName); + if (emojiIterator != emojisInCategory.end()) { + return emojiIterator->second; + } + else { + return std::vector<std::string>(); + } + } + + std::string EmojiMapper::categoryToFlagshipUnicodeEmoji(const std::string& category) { + if (category == "recent") { + return shortnameToUnicode(":clock3:"); + } else if (category == "people") { + return shortnameToUnicode(":smiley:"); + } else if (category == "nature" ) { + return shortnameToUnicode(":dog:"); + } else if (category == "food") { + return shortnameToUnicode(":apple:"); + } else if (category == "activity") { + return shortnameToUnicode(":soccer:"); + } else if (category == "travel") { + return shortnameToUnicode(":red_car:"); + } else if (category == "objects") { + return shortnameToUnicode(":bulb:"); + } else if (category == "symbols") { + return shortnameToUnicode(":heavy_division_sign:"); + } else if (category == "flags") { + return shortnameToUnicode(":flag_white:"); + } + return std::string(); + } +} diff --git a/BuildTools/EmojisGenerator/generate.js b/BuildTools/EmojisGenerator/generate.js new file mode 100644 index 0000000..177e26e --- /dev/null +++ b/BuildTools/EmojisGenerator/generate.js @@ -0,0 +1,107 @@ +var util = require("util"), + fs = require("fs"), + _ = require("underscore"); + +// Load emojis +var emojis = require("./emoji.json"); + +var unicodeCodepointToByteArray = function (codepoint) { + var utf8 = unescape(encodeURIComponent(String.fromCodePoint(parseInt(codepoint, 16)))); + + var arr = []; + for (var i = 0; i < utf8.length; i++) { + arr.push(utf8.charCodeAt(i)); + } + return arr; +} + +var byteArrayToCStringLiteral = function (byteArray) { + var literalString = ""; + for (var i = 0; i < byteArray.length; i++) { + literalString += "\\x" + byteArray[i].toString(16); + } + return literalString; +} + +var mapping = ''; +// Generate C++ mapping for shortnameUnicode_ +mapping += 'const std::unordered_map<std::string, std::string> EmojiMapper::shortnameUnicode = std::unordered_map<std::string, std::string>{'; +mapping += _(emojis).filter(function(data) { + // Filter out regional category. + return data.category != 'regional'; +}).filter(function(data) { + // Only use emojis with 2 or less codepoints, as Qt's harfbuzz version + // has issues rendering those as a single glyph. + return data.unicode.split("-").length < 3; +}).map(function(data) { + var shortname = data.shortname; + // Get codepoints + var codepoints = _(data.unicode.split("-")).map(function (code) { + //return "\\U" + "0".repeat(8-code.length) + code; + return byteArrayToCStringLiteral(unicodeCodepointToByteArray(code)); + }); + + // .join('\\U0000200D') -> join characters with a zero width joiner, required by + // some emojis pattern (:family_mwg: for example) + // Currently do no join them by a ZWJ as it breaks rendering of a lot simple emojis + // like $EMOJI followed by skin tone modifier. + //return '{"' + shortname + '", "' + codepoints.join(byteArrayToCStringLiteral(unicodeCodepointToByteArray('200D'))) + '"}'; + return '{"' + shortname + '", "' + codepoints.join("") + '"}'; +}).join(", ") + '};\n\n'; + +// Generate C++ code for the reverse mapping (i.e. unicodeShortname_ ) +mapping += ' const std::unordered_map<std::string, std::string> EmojiMapper::unicodeShortname = [](){\n' + + ' std::unordered_map<std::string, std::string> unicodeSequenceToShortname;\n' + + ' const auto& shortnameToUnicodeMap = EmojiMapper::shortnameUnicode;\n' + + ' for (const auto& shortnameToUnicode : shortnameToUnicodeMap) {\n' + + ' unicodeSequenceToShortname[shortnameToUnicode.second] = shortnameToUnicode.first;\n' + + ' }\n' + + ' return unicodeSequenceToShortname;\n' + + ' }();\n\n'; + +// Generate C++ mapping for categories +var CategoryMapping = new Map(); +_(emojis).filter(function(data) { + // Filter out regional category. + return data.category != 'regional'; +}).filter(function(data) { + // Only use emojis with 2 or less codepoints, as Qt's harfbuzz version + // has issues rendering those as a single glyph. + return data.unicode.split("-").length < 3; +}).map(function(data, category) { + // Get codepoints + var codepoints = _(data.unicode.split("-")).map(function (code) { + //return "\\U" + "0".repeat(8-code.length) + code; + return byteArrayToCStringLiteral(unicodeCodepointToByteArray(code)); + }); + if (!CategoryMapping.has(data.category)) { + CategoryMapping = CategoryMapping.set(data.category, []); + } + //CategoryMapping.get(data.category).push(codepoints.join(byteArrayToCStringLiteral(unicodeCodepointToByteArray('200D')))); + // Currently do no join them by a ZWJ as it breaks rendering of a lot simple emojis + // like $EMOJI followed by skin tone modifier. + CategoryMapping.get(data.category).push(codepoints.join("")); + //return 'categories_["' + data.category + '"].push_back("' + codepoints.join('\\U0000200D') + '");'; +}); + +mapping += ' const std::unordered_map<std::string, std::vector<std::string>> EmojiMapper::emojisInCategory = std::unordered_map<std::string, std::vector<std::string>>{'; +categoryMappings = []; +for (var category of CategoryMapping.keys()) { + categoryMappings.push('{"' + category + '", {' + CategoryMapping.get(category).map(function (literal) { + return '"' + literal + '"'; + }).join(', ') + '}}') +} +mapping += categoryMappings.join(', '); +mapping += '};\n' + +mapping.replace(/^\s*\n/gm, ''); + +// Generate C++ class from template +var input = fs.readFileSync("./EmojiMapper.cpp"); +var output = _(input.toString()).template()({ mapping: mapping }); + +// Write C++ class to file +var output_path = "../../SwifTools/EmojiMapper.cpp"; +fs.writeFileSync(output_path, output); + +console.log("Generated " + output_path); diff --git a/BuildTools/EmojisGenerator/package.json b/BuildTools/EmojisGenerator/package.json new file mode 100644 index 0000000..8b719ba --- /dev/null +++ b/BuildTools/EmojisGenerator/package.json @@ -0,0 +1,7 @@ +{ + "name": "emojione-android", + "version": "0.1.0", + "dependencies": { + "underscore": "^1.7.0" + } +} diff --git a/BuildTools/FilterScanBuildResults.py b/BuildTools/FilterScanBuildResults.py new file mode 100755 index 0000000..a4861ac --- /dev/null +++ b/BuildTools/FilterScanBuildResults.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python + +import os, os.path, sys, re + +resultsDir = sys.argv[1] +resultDirs = [ d for d in os.listdir(resultsDir) if os.path.isdir(os.path.join(resultsDir, d)) ] +resultDirs.sort() +if len(resultDirs) > 0 : + resultDir = os.path.join(resultsDir, resultDirs[-1]) + resultFileName = os.path.join(resultDir, "index.html") + resultData = [] + f = open(resultFileName, "r") + skipLines = 0 + for line in f.readlines() : + if skipLines > 0 : + skipLines -= 1 + else : + if ("3rdParty" in line or "SHA1.cpp" in line or "lua.c" in line) : + m = re.match(".*(report-.*\.html)", line) + os.remove(os.path.join(resultDir, m.group(1))) + skipLines = 2 + else : + resultData.append(line) + f.close() + + f = open(resultFileName, "w") + f.writelines(resultData) + f.close() diff --git a/BuildTools/FixIncludes.py b/BuildTools/FixIncludes.py new file mode 100755 index 0000000..e532464 --- /dev/null +++ b/BuildTools/FixIncludes.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python + +import sys; +import os; +import re; +from sets import Set + +filename = sys.argv[1] + +inPlace = False +if "-i" in sys.argv: + inPlace = True + +filename_base = os.path.basename(filename) +(filename_name, filename_ext) = os.path.splitext(filename_base) + +c_stdlib_headers = Set(["assert.h", "limits.h", "signal.h", "stdlib.h", "ctype.h", "locale.h", "stdarg.h", "string.h", "errno.h", "math.h", "stddef.h", "time.h", "float.h", "setjmp.h", "stdio.h", "iso646.h", "wchar.h", "wctype.h", "complex.h", "inttypes.h", "stdint.h", "tgmath.h", "fenv.h", "stdbool.h"]) + +cpp_stdlib_headers = Set(["algorithm", "fstream", "list", "regex", "typeindex", "array", "functional", "locale", "set", "typeinfo", "atomic", "future", "map", "sstream", "type_traits", "bitset", "initializer_list", "memory", "stack", "unordered_map", "chrono", "iomanip", "mutex", "stdexcept", "unordered_set", "codecvt", "ios", "new", "streambuf", "utility", "complex", "iosfwd", "numeric", "string", "valarray", "condition_variable", "iostream", "ostream", "strstream", "vector", "deque", "istream", "queue", "system_error", "exception", "iterator", "random", "thread", "forward_list", "limits", "ratio", "tuple", "cassert", "ciso646", "csetjmp", "cstdio", "ctime", "cctype", "climits", "csignal", "cstdlib", "cwchar", "cerrno", "clocale", "cstdarg", "cstring", "cwctype", "cfloat", "cmath", "cstddef"]) + +class HeaderType: + PRAGMA_ONCE, CORRESPONDING_HEADER, C_STDLIB, CPP_STDLIB, BOOST, QT, SWIFTEN_BASE_DEBUG, OTHER, SWIFTEN, LIMBER, SLIMBER, SWIFT_CONTROLLERS, SLUIFT, SWIFTOOLS, SWIFT = range(15) + +def findHeaderBlock(lines): + start = False + end = False + lastLine = None + + for idx, line in enumerate(lines): + if not start and line.startswith("#"): + start = idx + elif start and (not end) and (not line.startswith("#")) and line.strip(): + end = idx-1 + break + if not end: + end = len(lines) + return (start, end) + +def lineToFileName(line): + match = re.match( r'#include "(.*)"', line) + if match: + return match.group(1) + match = re.match( r'#include <(.*)>', line) + if match: + return match.group(1) + return False + +def fileNameToHeaderType(name): + if name.endswith("/" + filename_name + ".h"): + return HeaderType.CORRESPONDING_HEADER + + if name in c_stdlib_headers: + return HeaderType.C_STDLIB + + if name in cpp_stdlib_headers: + return HeaderType.CPP_STDLIB + + if name.startswith("boost"): + return HeaderType.BOOST + + if name.startswith("Q"): + return HeaderType.QT + + if name.startswith("Swiften/Base/Debug.h"): + return HeaderType.SWIFTEN_BASE_DEBUG + + if name.startswith("Swiften"): + return HeaderType.SWIFTEN + + if name.startswith("Limber"): + return HeaderType.LIMBER + + if name.startswith("Slimber"): + return HeaderType.SLIMBER + + if name.startswith("Swift/Controllers"): + return HeaderType.SWIFT_CONTROLLERS + + if name.startswith("Sluift"): + return HeaderType.SLUIFT + + if name.startswith("SwifTools"): + return HeaderType.SWIFTOOLS + + if name.startswith("Swift"): + return HeaderType.SWIFT + + return HeaderType.OTHER + +def serializeHeaderGroups(groups): + headerList = [] + for group in range(0, HeaderType.SWIFT + 1): + if group in groups: + # sorted and without duplicates + headers = sorted(list(set(groups[group]))) + headerList.extend(headers) + headerList.extend(["\n"]) + headerList.pop() + return headerList + +def overwriteFile(filename, content): + with open(filename, 'w') as f: + for line in content: + f.write(line) + +def cleanHeaderFile(content, headerStart, headerEnd, headerGroups): + del content[headerStart:headerEnd] + newHeaders = serializeHeaderGroups(headerGroups) + content[headerStart:1] = newHeaders + + if inPlace : + overwriteFile(filename, content) + else : + for line in content: + print line, + +def cleanImplementationFile(content, headerStart, headerEnd, headerGroups): + del content[headerStart:headerEnd] + newHeaders = serializeHeaderGroups(headerGroups) + content[headerStart:1] = newHeaders + + if inPlace : + overwriteFile(filename, content) + else : + for line in content: + print line, + + +containsComplexPreprocessorDirectives = False + +with open(filename) as f: + content = f.readlines() + +(headerStart, headerEnd) = findHeaderBlock(content) + +headerGroups = {} + +for line in content[headerStart:headerEnd]: + if line.strip(): + if line.strip().startswith("#pragma once"): + headerType = HeaderType.PRAGMA_ONCE + elif line.strip().startswith("#if") or line.strip().startswith("#def") or line.strip().startswith("#undef") or line.strip().startswith("#pragma "): + containsComplexPreprocessorDirectives = True + break + else: + #print line + headerType = fileNameToHeaderType(lineToFileName(line)) + + #filename = lineToFileName(line) + if headerType in headerGroups: + headerGroups[headerType].append(line) + else: + headerGroups[headerType] = [line] + +if containsComplexPreprocessorDirectives: + print("Cannot format headers containing preprocessor #if, #pragma, #define or #undef statements!") + exit(1) + +if filename_base.endswith(".h"): + if not HeaderType.PRAGMA_ONCE in headerGroups: + print("Missing #pragma once!") + exit(2) + cleanHeaderFile(content, headerStart, headerEnd, headerGroups) +elif filename_base.endswith(".cpp") or filename_base.endswith(".mm"): + cleanImplementationFile(content, headerStart, headerEnd, headerGroups) diff --git a/BuildTools/GenerateAppCastFeeds.py b/BuildTools/GenerateAppCastFeeds.py new file mode 100755 index 0000000..e7493df --- /dev/null +++ b/BuildTools/GenerateAppCastFeeds.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python2 + +# This script generates three app cast feeds for macOS Sparkle updates from Swift releases in the download folder on the Swift website. + +from xml.etree import ElementTree as ET +import argparse +import datetime +import email.utils as eut +import fnmatch +import jinja2 +import os.path +import re +import time +import urllib2 +import urlparse + +class Release: + def __init__(self, version, absoluteURL, sizeInBytes, date): + # This is the version string used for update detection. + self.fullVersion = version.split('-', 1)[1] + # This is a human readable version string, only used for presentation. + self.presentationVersion = version + self.url = absoluteURL + self.sizeInBytes = sizeInBytes + self.date = date + dateTumple = date.timetuple() + dateTimestamp = time.mktime(dateTumple) + self.dateString = eut.formatdate(dateTimestamp) + + def __str__(self): + return "Release(%s, %s, %s, %s)" % (self.fullVersion, self.url, self.sizeInBytes, self.date) + + def __repr__(self): + return "Release(%s, %s, %s, %s)" % (self.fullVersion, self.url, self.sizeInBytes, self.date) + +def getReleaseFromAbsoluteFilePath(absolutePath, downloadsFolder, releasesURL): + version = os.path.splitext(absolutePath.split('/')[-1])[0] + sizeInBytes = os.path.getsize(absolutePath) + date = datetime.datetime.fromtimestamp(os.path.getmtime(absolutePath)) + absoluteURL = urlparse.urljoin(releasesURL, os.path.relpath(absolutePath, downloadsFolder)) + return Release(version, absoluteURL, sizeInBytes, date) + +def getReleaseFromReleaseFolder(releaseFolder, downloadsFolder, releasesURL, extension): + release = None + regex = re.compile(fnmatch.translate(extension)) + + files = [f for f in os.listdir(releaseFolder) if os.path.isfile(os.path.join(releaseFolder, f))] + for file in files: + fileFullPath = os.path.join(releaseFolder, file) + if regex.match(fileFullPath): + release = getReleaseFromAbsoluteFilePath(fileFullPath, downloadsFolder, releasesURL) + return release + +def getReleaseFilesInReleasesFolder(releasesFolder, releasesURL, extension): + releases = [] + + dirs = [d for d in os.listdir(releasesFolder) if os.path.isdir(os.path.join(releasesFolder, d))] + for d in dirs: + release = getReleaseFromReleaseFolder(os.path.join(releasesFolder, d), releasesFolder, releasesURL, extension) + if release: + releases.append(release) + + return releases + +def getReleaseFilesInDevelopmentFolder(developmentMacFolder, developmentMacURL, extension): + extensionRegex = re.compile(fnmatch.translate(extension)) + devPatternRegex = re.compile(".+-dev\d+") + + releases = [] + + files = [f for f in os.listdir(developmentMacFolder) if os.path.isfile(os.path.join(developmentMacFolder, f))] + for f in files: + # Only use dev builds from the development folder. + if devPatternRegex.match(f): + fileFullPath = os.path.join(developmentMacFolder, f) + if extensionRegex.match(fileFullPath): + releases.append(getReleaseFromAbsoluteFilePath(fileFullPath, developmentMacFolder, developmentMacURL)) + + return releases + +def writeAppcastFile(filename, title, description, regexPattern, appcastURL, releases): + template = jinja2.Template('''<rss xmlns:sparkle="http://www.andymatuschak.org/xml-namespaces/sparkle" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0"> + <channel> + <title>{{ title }}</title> + <link>{{ appcast_url }}</link> + <description>{{ description }}</description> + <language>en</language> + {% for item in releases %}<item> + <title>Swift version {{ item.fullVersion }}</title> + <pubDate>{{ item.dateString }}</pubDate> + <enclosure url="{{ item.url }}" + sparkle:version="{{ item.fullVersion }}" + sparkle:shortVersionString="{{ item.presentationVersion }}" + length="{{ item.sizeInBytes }}" + type="application/octet-stream" /> + </item> + {% endfor %}</channel> +</rss>''') + + matchingReleases = [i for i in releases if re.match(regexPattern, i.fullVersion)] + matchingReleases = matchingReleases[:2] # only include the first two matches in the appcast + + appcastContent = template.render(title=title, appcast_url=appcastURL, description=description, releases=matchingReleases) + + contentParsesOK = False + try: + x = ET.fromstring(appcastContent) + contentParsesOK = True + except : + contentParsesOK = False + + if contentParsesOK: + with open(filename, 'w') as file: + file.write(appcastContent) + else: + print("Failed to generate valid appcast feed %s." % filename) + +parser = argparse.ArgumentParser(description='Generate stable/testing/development appcast feeds for Sparkle updater.') +parser.add_argument('downloadsFolder', type=str, help="e.g. /Users/foo/website/downloads/") +parser.add_argument('downloadsURL', type=str, help="e.g. https://swift.im/downloads/") +parser.add_argument('outputFolder', type=str, help="e.g. /Users/foo/website/downloads/") + +args = parser.parse_args() + +releasesPath = os.path.join(args.downloadsFolder, "releases") +developmentMacPath = os.path.join(args.downloadsFolder, "development", "mac") + +manualReleases = getReleaseFilesInReleasesFolder(releasesPath, urlparse.urljoin(args.downloadsURL, "releases/"), "*.dmg") +manualReleases.sort(key=lambda release: release.date, reverse=True) + +automaticReleases = manualReleases +automaticReleases.extend(getReleaseFilesInDevelopmentFolder(developmentMacPath, urlparse.urljoin(args.downloadsURL, "development/mac/"), "*.dmg")) +automaticReleases.sort(key=lambda release: release.date, reverse=True) + + +writeAppcastFile(filename=os.path.join(args.outputFolder, "swift-stable-appcast-mac.xml"), + title="Swift Stable Releases", + description="", + regexPattern="^\d+(\.\d+)?(\.\d+)?$", + appcastURL=urlparse.urljoin(args.downloadsURL, "swift-stable-appcast-mac.xml"), + releases=manualReleases) +writeAppcastFile(filename=os.path.join(args.outputFolder, "swift-testing-appcast-mac.xml"), + title="Swift Testing Releases", + description="", + regexPattern="^\d+(\.\d+)?(\.\d+)?(beta\d+)?(rc\d+)?$", + appcastURL=urlparse.urljoin(args.downloadsURL, "swift-testing-appcast-mac.xml"), + releases=manualReleases) +writeAppcastFile(filename=os.path.join(args.outputFolder, "swift-development-appcast-mac.xml"), + title="Swift Development Releases", + description="", + regexPattern="^\d+(\.\d+)?(\.\d+)?(alpha\d*)?(beta\d+)?(rc\d+)?(-dev\d+)?$", + appcastURL=urlparse.urljoin(args.downloadsURL, "swift-development-appcast-mac.xml"), + releases=automaticReleases) diff --git a/BuildTools/GetBuildVersion.py b/BuildTools/GetBuildVersion.py index 007fec8..be7cc03 100755 --- a/BuildTools/GetBuildVersion.py +++ b/BuildTools/GetBuildVersion.py @@ -10,12 +10,12 @@ only_major = False if "--major" in sys.argv : - only_major = True + only_major = True if only_major : - v = Version.getBuildVersion(os.path.dirname(sys.argv[0] + "/.."), sys.argv[1]) - version_match = re.match("(\d+)\.(\d+).*", v) - if version_match : - print version_match.group(1) - else : - print "0" + v = Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1]) + version_match = re.match("(\d+)\.(\d+).*", v) + if version_match : + print version_match.group(1) + else : + print("0") else : - print Version.getBuildVersion(os.path.dirname(sys.argv[0] + "/.."), sys.argv[1]) + print Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1]) diff --git a/BuildTools/Git/Hooks/pre-commit b/BuildTools/Git/Hooks/pre-commit index 28bebfc..987f127 100755 --- a/BuildTools/Git/Hooks/pre-commit +++ b/BuildTools/Git/Hooks/pre-commit @@ -6,3 +6,15 @@ IFS=' echo "Checking tabs & copyrights ..." +for file in $(git diff --cached --diff-filter=A --name-only); do + if [ ! -f $file ]; then + continue + fi + if ! BuildTools/Copyrighter.py check-copyright $file; then + echo "ERROR: '$file' has a copyright error. Aborting commit." + exit -1 + fi +done for file in $(git diff --cached --name-only); do + if [ ! -f $file ]; then + continue + fi if ! BuildTools/CheckTabs.py $file; then @@ -11,4 +23,4 @@ for file in $(git diff --cached --name-only); do fi - if ! BuildTools/Copyrighter.py check-copyright $file; then - echo "ERROR: '$file' has a copyright error. Aborting commit." + if ! BuildTools/CheckHeaders.py $file; then + echo "ERROR: '$file' failed header sanity test. Aborting commit." exit -1 diff --git a/BuildTools/Gource/GetGravatars.py b/BuildTools/Gource/GetGravatars.py index 8adb13c..17198aa 100755 --- a/BuildTools/Gource/GetGravatars.py +++ b/BuildTools/Gource/GetGravatars.py @@ -7,4 +7,4 @@ GRAVATAR_URL = "http://www.gravatar.com/avatar/%(id)s?d=404" if len(sys.argv) != 2 : - print "Usage: " + sys.argv[0] + " <output-dir>" - sys.exit(-1) + print("Usage: " + sys.argv[0] + " <output-dir>") + sys.exit(-1) @@ -16,8 +16,8 @@ p = subprocess.Popen("git log --pretty=format:'%ae|%an'", shell=True, stdin=subp for line in p.stdout.readlines() : - author_components = line.rstrip().split("|") - authors[author_components[0]] = author_components[1] + author_components = line.rstrip().split("|") + authors[author_components[0]] = author_components[1] p.stdin.close() if p.wait() != 0 : - print "Error" - sys.exit(-1) + print("Error") + sys.exit(-1) @@ -25,25 +25,25 @@ if p.wait() != 0 : if not os.path.isdir(output_dir) : - os.makedirs(output_dir) + os.makedirs(output_dir) for email, name in authors.items() : - print "Processing avatar for " + name + " <" + email + ">" - filename = os.path.join(output_dir, name + ".png") - if os.path.isfile(filename) : - print "-> Already there. Skipping." - continue + print("Processing avatar for " + name + " <" + email + ">") + filename = os.path.join(output_dir, name + ".png") + if os.path.isfile(filename) : + print("-> Already there. Skipping.") + continue - m = hashlib.md5() - m.update(email) - url = GRAVATAR_URL % {"id" : m.hexdigest()} - print "- Downloading " + url - f = urllib.urlopen(url) - input = None - if f.getcode() == 200 : - input = f.read() - f.close() - if input : - print "- Saving file " + filename - f = open(filename, "w") - f.write(input) + m = hashlib.md5() + m.update(email) + url = GRAVATAR_URL % {"id" : m.hexdigest()} + print("- Downloading " + url) + f = urllib.urlopen(url) + input = None + if f.getcode() == 200 : + input = f.read() f.close() - else : - print "- No Gravatar found" + if input : + print("- Saving file " + filename) + f = open(filename, "w") + f.write(input) + f.close() + else : + print("- No Gravatar found") diff --git a/BuildTools/InstallSwiftDependencies.sh b/BuildTools/InstallSwiftDependencies.sh new file mode 100755 index 0000000..438e395 --- /dev/null +++ b/BuildTools/InstallSwiftDependencies.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# This script installs the external dependencies to build Swift with Qt5. + +SYSTEM_NAME=$(uname) + +if [ "$SYSTEM_NAME" == "Linux" ] +then + # handle linux distributions + SYSTEM_DISTRO=$(lsb_release -i -s) + if [ "$SYSTEM_DISTRO" == "Debian" ] + then + sudo apt-get install build-essential pkg-config libssl-dev qt5-default libqt5x11extras5-dev libqt5webkit5-dev qtmultimedia5-dev qttools5-dev-tools qt5-image-formats-plugins libqt5svg5-dev libminiupnpc-dev libnatpmp-dev libhunspell-dev + elif [ "$SYSTEM_DISTRO" == "Ubuntu" ] || [ "$SYSTEM_DISTRO" == "LinuxMint" ] || [ "$SYSTEM_DISTRO" == "neon" ] + then + sudo apt-get install build-essential pkg-config libssl-dev qt5-default libqt5x11extras5-dev libqt5webkit5-dev qtmultimedia5-dev qttools5-dev-tools qt5-image-formats-plugins libqt5svg5-dev libminiupnpc-dev libnatpmp-dev libhunspell-dev + elif [ "$SYSTEM_DISTRO" == "Arch" ] + then + sudo pacman -S qt5-base qt5-x11extras qt5-webkit qt5-multimedia qt5-tools qt5-svg hunspell + elif [ "$SYSTEM_DISTRO" == "openSUSE project" ] || [ "$SYSTEM_DISTRO" == "openSUSE" ] || [ "$SYSTEM_DISTRO" == "SUSE LINUX" ] + then + sudo zypper "$@" in --type pattern devel_basis + sudo zypper "$@" in pkg-config libopenssl-devel libQt5Core-devel libQt5WebKit5-devel libQt5WebKitWidgets-devel libqt5-qtmultimedia-devel libqt5-qtx11extras-devel libqt5-qttools-devel libQt5Gui-devel libQt5Network-devel libQt5DBus-devel libQt5Svg-devel libQt5Svg5 python-xml hunspell-devel + elif [ "$SYSTEM_DISTRO" == "Fedora" ] + then + sudo dnf groups install "C Development Tools and Libraries" + sudo dnf install openssl-devel qt5-qtbase-devel qt5-linguist qt5-qtwebkit-devel qt5-qtmultimedia-devel qt5-qtx11extras-devel qt5-qtsvg-devel hunspell-devel + elif [ "$SYSTEM_DISTRO" == "Sabayon" ] + then + sudo -E equo install sys-devel/autoconf sys-devel/automake sys-devel/gcc sys-devel/g++ virtual/os-headers virtual/pkgconfig sys-libs/glibc dev-qt/linguist-tools dev-qt/qtcore dev-qt/qtmultimedia dev-qt/qtdbus dev-qt/qtgui dev-qt/qtimageformats dev-qt/qtsvg dev-qt/qtwebkit dev-qt/qtwidgets dev-qt/qtx11extras dev-libs/openssl net-libs/miniupnpc net-libs/libnatpmp app-text/hunspell + else + echo "Unsupported Linux distribution." + fi +else + echo "Unsupported system." +fi diff --git a/BuildTools/MSVS/.gitignore b/BuildTools/MSVS/.gitignore deleted file mode 100644 index 95a4834..0000000 --- a/BuildTools/MSVS/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.suo -*.ncp -Slimber -Swift diff --git a/BuildTools/MSVS/GenerateProjects.py b/BuildTools/MSVS/GenerateProjects.py deleted file mode 100644 index d13df08..0000000 --- a/BuildTools/MSVS/GenerateProjects.py +++ /dev/null @@ -1,100 +0,0 @@ -import os, os.path - -projects = [("Swift", "Swift\QtUI\Swift.exe"), ("Slimber", "Slimber\Qt\Slimber.exe")] - -for (project, outputbin) in projects : - if not os.path.exists(project) : - os.mkdir(project) - output = open(os.path.join(project, project + ".vcproj"), "w") - - headers = [] - sources = [] - for root, dirs, files in os.walk(os.path.join("..", "..", project)) : - for file in files : - if file.endswith(".h") : - headers.append('<File RelativePath="' + os.path.join("..", root, file) + '" />') - elif file.endswith(".cpp") : - sources.append('<File RelativePath="' + os.path.join("..", root, file) + '" />') - - output.write("""<?xml version="1.0" encoding="Windows-1252"?> -<VisualStudioProject - ProjectType="Visual C++" - Version="9.00" - Name="%(project)s" - Keyword="MakeFileProj" - TargetFrameworkVersion="196613" - > - <Platforms> - <Platform - Name="Win32" - /> - </Platforms> - <ToolFiles> - </ToolFiles> - <Configurations> - <Configuration - Name="Debug|Win32" - OutputDirectory="$(ConfigurationName)" - IntermediateDirectory="$(ConfigurationName)" - ConfigurationType="0" - > - <Tool - Name="VCNMakeTool" - BuildCommandLine="cd ..\..\..\ && scons debug=1 %(project)s" - ReBuildCommandLine="" - CleanCommandLine="cd ..\..\..\ && scons -c debug=1 %(project)s" - Output="..\..\..\%(output)s" - PreprocessorDefinitions="WIN32;_DEBUG" - IncludeSearchPath="" - ForcedIncludes="" - AssemblySearchPath="" - ForcedUsingAssemblies="" - CompileAsManaged="" - /> - </Configuration> - <Configuration - Name="Release|Win32" - OutputDirectory="$(ConfigurationName)" - IntermediateDirectory="$(ConfigurationName)" - ConfigurationType="0" - > - <Tool - Name="VCNMakeTool" - BuildCommandLine="cd ..\..\..\ && scons %(project)s" - ReBuildCommandLine="" - CleanCommandLine="cd ..\..\..\ && scons -c %(project)s" - Output="..\..\..\%(output)s" - PreprocessorDefinitions="WIN32;NDEBUG" - IncludeSearchPath="" - ForcedIncludes="" - AssemblySearchPath="" - ForcedUsingAssemblies="" - CompileAsManaged="" - /> - </Configuration> - </Configurations> - <References> - </References> - <Files> - <Filter - Name="Source Files" - Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx" - > - %(sources)s - </Filter> - <Filter - Name="Header Files" - Filter="h;hpp;hxx;hm;inl;inc;xsd" - > - %(headers)s - </Filter> - <Filter - Name="Resource Files" - Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav" - > - </Filter> - </Files> - <Globals> - </Globals> -</VisualStudioProject>""" % { "project": project, "output" : outputbin, "headers" : '\n'.join(headers), "sources": '\n'.join(sources) }) - output.close() diff --git a/BuildTools/MSVS/Swift.sln b/BuildTools/MSVS/Swift.sln deleted file mode 100644 index 2724f81..0000000 --- a/BuildTools/MSVS/Swift.sln +++ /dev/null @@ -1,26 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 10.00 -# Visual C++ Express 2008 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Swift", "Swift\Swift.vcproj", "{C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}" -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Slimber", "Slimber\Slimber.vcproj", "{597242B2-A667-47A1-B69E-D2C4281183D0}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Win32 = Debug|Win32 - Release|Win32 = Release|Win32 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Debug|Win32.ActiveCfg = Debug|Win32 - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Debug|Win32.Build.0 = Debug|Win32 - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Release|Win32.ActiveCfg = Release|Win32 - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Release|Win32.Build.0 = Release|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Debug|Win32.ActiveCfg = Debug|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Debug|Win32.Build.0 = Debug|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Release|Win32.ActiveCfg = Release|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Release|Win32.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/BuildTools/ProjectCopyrightSummary.py b/BuildTools/ProjectCopyrightSummary.py new file mode 100755 index 0000000..f6b18a0 --- /dev/null +++ b/BuildTools/ProjectCopyrightSummary.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import fnmatch +import os +import re +from sets import Set + +projectSourceFiles = [] +for directory in ['Documentation', 'Limber', 'Packages', 'QA', 'Slimber', 'Sluift', 'Swift', 'Swiften', 'SwifTools']: + for root, dirnames, filenames in os.walk(directory): + for filename in filenames: + if any(fnmatch.fnmatch(filename, pattern) for pattern in ['*.cpp', '*.h', '*.mm', '*.m', '*.c']): + projectSourceFiles.append(os.path.join(root, filename)) + + +def CopyrightNames(filename): + names = [] + with open(filename, 'r') as file: + data = file.read() + p = re.compile(r'\* Copyright.*\d\d\d\d (.*?)\.?$', re.MULTILINE) + names = re.findall(p, data) + return names + + +names = Set() +for file in projectSourceFiles: + copyrightNames = CopyrightNames(file) + for name in copyrightNames: + names.add(name) + +for name in sorted(list(names)): + print(name) diff --git a/BuildTools/SCons/SConscript.boot b/BuildTools/SCons/SConscript.boot index 361004f..7b29c06 100644 --- a/BuildTools/SCons/SConscript.boot +++ b/BuildTools/SCons/SConscript.boot @@ -10,8 +10,11 @@ vars.Add('cc', "C compiler") vars.Add('cxx', "C++ compiler") -vars.Add('ccflags', "Extra C(++) compiler flags") +vars.Add('ccflags', "Extra C/C++/ObjC compiler flags") +vars.Add('cxxflags', "Extra C++ compiler flags") vars.Add('link', "Linker") vars.Add('linkflags', "Extra linker flags") +vars.Add('ar', "Archiver (ar or lib)") +if os.name == "nt": + vars.Add('mt', "manifest tool") vars.Add(BoolVariable("ccache", "Use CCache", "no")) -vars.Add(BoolVariable("distcc", "Use DistCC", "no")) -vars.Add('distcc_hosts', "DistCC hosts (overrides DISTCC_HOSTS)") +vars.Add(BoolVariable("distcc", "Use distcc", "no")) vars.Add(EnumVariable("test", "Compile and run tests", "none", ["none", "all", "unit", "system"])) @@ -22,3 +25,5 @@ vars.Add(BoolVariable("assertions", "Compile with assertions", "yes")) vars.Add(BoolVariable("max_jobs", "Build with maximum number of parallel jobs", "no")) -vars.Add(EnumVariable("target", "Choose a target platform for compilation", "native", ["native", "iphone-simulator", "iphone-device", "xcode"])) +vars.Add(EnumVariable("target", "Choose a target platform for compilation", "native", ["native", "iphone-simulator", "iphone-device", "xcode", "android"])) +vars.Add('android_toolchain', "Path to Android toolchain") +vars.Add('android_sdk_bin', "Path to Android SDK's tools directory") vars.Add(BoolVariable("swift_mobile", "Build mobile Swift", "no")) @@ -26,29 +31,70 @@ vars.Add(BoolVariable("swiften_dll", "Build Swiften as dynamically linked librar if os.name != "nt" : - vars.Add(BoolVariable("coverage", "Compile with coverage information", "no")) + vars.Add(BoolVariable("coverage", "Compile with coverage information", "no")) if os.name == "posix" : - vars.Add(BoolVariable("valgrind", "Run tests with valgrind", "no")) + vars.Add(BoolVariable("valgrind", "Run tests with valgrind", "no")) if os.name == "mac" or (os.name == "posix" and os.uname()[0] == "Darwin"): - vars.Add(BoolVariable("universal", "Create universal binaries", "no")) - vars.Add(BoolVariable("mac105", "Link against the 10.5 frameworks", "no")) - vars.Add(BoolVariable("mac106", "Link against the 10.6 frameworks", "no")) + vars.Add(BoolVariable("universal", "Create universal binaries", "no")) + vars.Add(BoolVariable("mac105", "Link against the 10.5 frameworks", "no")) + vars.Add(BoolVariable("mac106", "Link against the 10.6 frameworks", "no")) if os.name == "nt" : - vars.Add(PathVariable("vcredist", "MSVC redistributable dir", None, PathVariable.PathAccept)) + vars.Add(PathVariable("vcredist", "MSVC redistributable path", None, PathVariable.PathAccept)) + vars.Add(PathVariable("vcredistdir", "MSVC redistributable dir", None, PathVariable.PathAccept)) if os.name == "nt" : - vars.Add(PathVariable("wix_bindir", "Path to WiX binaries", "", PathVariable.PathAccept)) + vars.Add(PathVariable("wix_bindir", "Path to WiX binaries", "", PathVariable.PathAccept)) if os.name == "nt" : - vars.Add(PackageVariable("bonjour", "Bonjour SDK location", "yes")) + vars.Add(PackageVariable("bonjour", "Bonjour SDK location", "yes")) +vars.Add(EnumVariable("tls_backend", "Choose the TLS backend", "native", ["native", "openssl", "openssl_bundled"])) vars.Add(PackageVariable("openssl", "OpenSSL location", "yes")) +vars.Add("openssl_libnames", "Comma-separated openssl library names to override defaults", None) +vars.Add("openssl_include", "Location of OpenSSL include files (if not under (openssl)/include)", None) +vars.Add("openssl_libdir", "Location of OpenSSL library files (if not under (openssl)/lib)", None) +vars.Add(PackageVariable("hunspell_prefix", "Hunspell location", False)) +vars.Add(BoolVariable("hunspell_enable", "Build with Hunspell support", True)) vars.Add(PathVariable("boost_includedir", "Boost headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("boost_libdir", "Boost library location", None, PathVariable.PathAccept)) +vars.Add(BoolVariable("boost_bundled_enable", "Allow use of bundled Boost as last resort", "true")) +vars.Add(BoolVariable("boost_force_bundled", "Force use of bundled Boost.", False)) +vars.Add(BoolVariable("allow_boost_1_64", "Allow use of Boost 1.64", False)) +vars.Add(PathVariable("zlib_includedir", "Zlib headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("zlib_libdir", "Zlib library location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("zlib_libfile", "Zlib library file (full path to file)", None, PathVariable.PathAccept)) +vars.Add(BoolVariable("zlib_bundled_enable", "Allow use of bundled Zlib as last resort", "true")) +vars.Add(BoolVariable("try_gconf", "Try configuring for GConf?", "true")) +vars.Add(BoolVariable("try_libxml", "Try configuring for libXML?", "true")) +vars.Add(BoolVariable("try_expat", "Try configuring for expat?", "true")) vars.Add(PathVariable("expat_includedir", "Expat headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("expat_libdir", "Expat library location", None, PathVariable.PathAccept)) -vars.Add("expat_libname", "Expat library name", "libexpat" if os.name == "nt" else "expat") +vars.Add("expat_libname", "Expat library name", os.name == "nt" and "libexpat" or "expat") vars.Add(PackageVariable("icu", "ICU library location", "no")) +vars.Add(BoolVariable("libidn_bundled_enable", "Allow use of bundled LibIDN", "true")) +vars.Add(BoolVariable("try_libidn", "Try configuring for LibIDN?", "true")) vars.Add(PathVariable("libidn_includedir", "LibIDN headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("libidn_libdir", "LibIDN library location", None, PathVariable.PathAccept)) -vars.Add("libidn_libname", "LibIDN library name", "libidn" if os.name == "nt" else "idn") +vars.Add("libidn_libname", "LibIDN library name", os.name == "nt" and "libidn" or "idn") +vars.Add(BoolVariable("need_idn", "Whether an IDN library is required. Without this, most internal binaries will fail", "true")) + +vars.Add(PathVariable("libminiupnpc_includedir", "LibMiniUPNPC headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("libminiupnpc_libdir", "LibMiniUPNPC library location", None, PathVariable.PathAccept)) +vars.Add("libminiupnpc_libname", "LibMiniUPNPC library name", os.name == "nt" and "libminiupnpc" or "miniupnpc") +vars.Add(BoolVariable("libminiupnpc_force_bundled", "Force use of bundled LibMiniUPNPC", False)) + +vars.Add(PathVariable("libnatpmp_includedir", "LibNATPMP headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("libnatpmp_libdir", "LibNATPMP library location", None, PathVariable.PathAccept)) +vars.Add("libnatpmp_libname", "LibNATPMP library name", os.name == "nt" and "libnatpmp" or "natpmp") +vars.Add(BoolVariable("libnatpmp_force_bundled", "Force use of bundled LibNATPMP", False)) + vars.Add(PathVariable("sqlite_includedir", "SQLite headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("sqlite_libdir", "SQLite library location", None, PathVariable.PathAccept)) -vars.Add("sqlite_libname", "SQLite library name", "libsqlite3" if os.name == "nt" else "sqlite3") +vars.Add("sqlite_libname", "SQLite library name", os.name == "nt" and "libsqlite3" or "sqlite3") vars.Add("sqlite_force_bundled", "Force use of the bundled SQLite", None) +vars.Add(PathVariable("lua_includedir", "Lua headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("lua_libdir", "Lua library location", None, PathVariable.PathAccept)) +vars.Add("lua_libname", "Lua library name", os.name == "nt" and "liblua" or "lua") +vars.Add("lua_force_bundled", "Force use of the bundled Lua", None) + +vars.Add(PathVariable("editline_includedir", "Readline headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("editline_libdir", "Readline library location", None, PathVariable.PathAccept)) +vars.Add("editline_libname", "Readline library name", os.name == "nt" and "libedit" or "edit") + +vars.Add(BoolVariable("try_avahi", "Try configuring for avahi?", "true")) vars.Add(PathVariable("avahi_includedir", "Avahi headers location", None, PathVariable.PathAccept)) @@ -56,2 +102,3 @@ vars.Add(PathVariable("avahi_libdir", "Avahi library location", None, PathVariab vars.Add(PathVariable("qt", "Qt location", "", PathVariable.PathAccept)) +vars.Add(BoolVariable("qt5", "Compile in Qt5 mode", "yes")) vars.Add(PathVariable("docbook_xml", "DocBook XML", None, PathVariable.PathAccept)) @@ -60,4 +107,20 @@ vars.Add(BoolVariable("build_examples", "Build example programs", "yes")) vars.Add(BoolVariable("enable_variants", "Build in a separate dir under build/, depending on compile flags", "no")) +vars.Add(BoolVariable("experimental_ft", "Build experimental file transfer", "yes")) vars.Add(BoolVariable("experimental", "Build experimental features", "no")) vars.Add(BoolVariable("set_iterator_debug_level", "Set _ITERATOR_DEBUG_LEVEL=0", "yes")) +vars.Add(EnumVariable("msvc_runtime", "Choose MSVC runtime library", "MD", ["MT", "MTd", "MD", "MDd"])) +vars.Add(BoolVariable("unbound", "Build bundled ldns and unbound. Use them for DNS lookup.", "no")) +vars.Add(BoolVariable("check_headers", "Independently build compilation units for all Swiften headers for detecting missing dependencies.", "no")) +vars.Add("win_target_arch", "Target architecture for Windows builds. x86 for 32-bit (default) or x86_64 for 64-bit.", "x86") +vars.Add(BoolVariable("install_git_hooks", "Install git hooks", "true")) +vars.Add(BoolVariable("help2man", "Run help2man to geneate man pages", "false")) + +# Code Signing Options +vars.Add("codesign_identity", "macOS code signing identity to be passed to codesign when building the distribution package. Must match the Commen Name of the Subject of the code signing certificate.", "") +vars.Add("signtool_key_pfx", "The keyfile (.pfx) that will be used to sign the Windows installer.", None) +vars.Add("signtool_timestamp_url", "The timestamp server that will be queried for a signed time stamp in the signing process.", None) + +# Automatic Software Update Options +vars.Add(PathVariable("sparkle_public_dsa_key", "Optional path to a public DSA key used to verify Sparkle software updates. Without specifiying this option, the app needs to be code signed for Sparkle to work.", None, PathVariable.PathIsFile)) + @@ -68,9 +131,19 @@ vars.Add(BoolVariable("set_iterator_debug_level", "Set _ITERATOR_DEBUG_LEVEL=0", env_ENV = { - 'PATH' : os.environ['PATH'], - 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""), + 'PATH' : os.environ['PATH'], + 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""), + 'TERM' : os.environ.get("TERM", ""), + 'SDKROOT' : os.environ.get("SDKROOT", ""), } + if "MSVC_VERSION" in ARGUMENTS : - env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"]) + env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None)) + env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None), TARGET_ARCH=env["win_target_arch"]) else : - env = Environment(ENV = env_ENV, variables = vars) + env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None)) + env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None), TARGET_ARCH=env["win_target_arch"]) + +if env["PLATFORM"] == "win32" and env["qt"] : + # This adds Qt's binary directory at the end of the PATH variable, so that + # windeployqt.exe is automatically detected and used in WindowsBundle.py. + os.environ["PATH"] = env['ENV']['PATH'] + ";" + env['qt'] + "\\bin" @@ -78,4 +151,14 @@ Help(vars.GenerateHelpText(env)) +# Workaround for missing Visual Studio 2012 support in SCons +# Requires scons to be run from a VS2012 console +if env.get("MSVC_VERSION", "").startswith("11.0") : + env["ENV"]["LIB"] = os.environ["LIB"] + env["ENV"]["INCLUDE"] = os.environ["INCLUDE"] + # Default environment variables -env["PLATFORM_FLAGS"] = {} +env["PLATFORM_FLAGS"] = { + "LIBPATH": [], + "LIBS": [], + "FRAMEWORKS": [], +} @@ -87,9 +170,9 @@ env.Tool("Flags", toolpath = ["#/BuildTools/SCons/Tools"]) if env["PLATFORM"] == "darwin" : - env.Tool("Nib", toolpath = ["#/BuildTools/SCons/Tools"]) - env.Tool("AppBundle", toolpath = ["#/BuildTools/SCons/Tools"]) + env.Tool("Nib", toolpath = ["#/BuildTools/SCons/Tools"]) + env.Tool("AppBundle", toolpath = ["#/BuildTools/SCons/Tools"]) if env["PLATFORM"] == "win32" : - env.Tool("WindowsBundle", toolpath = ["#/BuildTools/SCons/Tools"]) - #So we don't need to escalate with UAC - if "TMP" in os.environ.keys() : - env['ENV']['TMP'] = os.environ['TMP'] + env.Tool("WindowsBundle", toolpath = ["#/BuildTools/SCons/Tools"]) + #So we don't need to escalate with UAC + if "TMP" in os.environ.keys() : + env['ENV']['TMP'] = os.environ['TMP'] env.Tool("SLOCCount", toolpath = ["#/BuildTools/SCons/Tools"]) @@ -98,37 +181,71 @@ env.Tool("SLOCCount", toolpath = ["#/BuildTools/SCons/Tools"]) if env["max_jobs"] : - try : - import multiprocessing - SetOption("num_jobs", multiprocessing.cpu_count()) - except NotImplementedError : - pass - except ImportError : - pass - -# Default compiler flags -if env.get("distcc", False) : - env["ENV"]["HOME"] = os.environ["HOME"] - env["ENV"]["DISTCC_HOSTS"] = os.environ.get("DISTCC_HOSTS", "") - if "distcc_hosts" in env : - env["ENV"]["DISTCC_HOSTS"] = env["distcc_hosts"] - env["CC"] = "distcc gcc" - env["CXX"] = "distcc g++" + try : + import multiprocessing + SetOption("num_jobs", multiprocessing.cpu_count()) + except NotImplementedError : + pass + except ImportError : + pass + +# Set speed options +env.Decider("MD5-timestamp") +env.SetOption("max_drift", 1) +env.SetOption("implicit_cache", True) + +# Set the default compiler to CLang on OS X, and set the necessary flags +if env["PLATFORM"] == "darwin" and env["target"] == "native" : + if "cc" not in env : + env["CC"] = "clang" + if platform.machine() == "x86_64" : + env["CCFLAGS"] = ["-arch", "x86_64"] + if "cxx" not in env : + env["CXX"] = "clang++" + if "link" not in env : + # Use clang++ instead of clang, otherwise XCode's clang will cause linking errors due to missing C++ standard lib. + env["LINK"] = "clang++" + if platform.machine() == "x86_64" : + env.Append(LINKFLAGS = ["-arch", "x86_64"]) + +# Set QT_SELECT variable to enable building on systems that have Qt4 and Qt5 installed and use qtselect +if env["PLATFORM"] != "darwin" and env["PLATFORM"] != "win32" : + if env["qt5"] : + env["ENV"]["QT_SELECT"] = "qt5" + else: + env["ENV"]["QT_SELECT"] = "qt4" + +# Set QT_SELECT variable to enable building on systems that have Qt4 and Qt5 installed and use qtselect +if env["PLATFORM"] != "darwin" and env["PLATFORM"] != "win32" : + if env["qt5"] : + env["ENV"]["QT_SELECT"] = "qt5" + else: + env["ENV"]["QT_SELECT"] = "qt4" + +# Check whether we are running inside scan-build, and override compiler if so +if "CCC_ANALYZER_HTML" in os.environ : + for key, value in os.environ.items() : + if key.startswith("CCC_") or key.startswith("CLANG") : + env["ENV"][key] = value + env["CC"] = os.environ["CC"] + env["CXX"] = os.environ["CXX"] + +# Override the compiler with custom variables set at config time if "cc" in env : - env["CC"] = env["cc"] + env["CC"] = env["cc"] if "cxx" in env : - env["CXX"] = env["cxx"] -ccflags = env.get("ccflags", []) -if isinstance(ccflags, str) : - # FIXME: Make the splitting more robust - env["CCFLAGS"] = ccflags.split(" ") -else : - env["CCFLAGS"] = ccflags + env["CXX"] = env["cxx"] +if "ar" in env : + env["AR"] = env["ar"] + if "link" in env : - env["SHLINK"] = env["link"] - env["LINK"] = env["link"] -linkflags = env.get("linkflags", []) -if isinstance(linkflags, str) : - # FIXME: Make the splitting more robust - env["LINKFLAGS"] = linkflags.split(" ") -else : - env["LINKFLAGS"] = linkflags + env["SHLINK"] = env["link"] + env["LINK"] = env["link"] + +# Process user-defined external flags +for flags_type in ["ccflags", "cxxflags", "linkflags"] : + if flags_type in env : + if isinstance(env[flags_type], str) : + # FIXME: Make the splitting more robust + env[flags_type.upper()] = env[flags_type].split(" ") + else : + env[flags_type.upper()] = env[flags_type] # This isn't a real flag (yet) AFAIK. Be sure to append it to the CXXFLAGS @@ -137,38 +254,47 @@ env["OBJCCFLAGS"] = [] +if env["PLATFORM"] != "win32" : + env.AppendUnique(CCFLAGS=['-isystem', Dir('#').abspath + '/Backport/']) + +# Compile code as C++11 +if env["PLATFORM"] != "win32" : + env.Append(CXXFLAGS = ["-std=c++11"]) + if env["optimize"] : - if env["PLATFORM"] == "win32" : - env.Append(CCFLAGS = ["/O2"]) - else : - env.Append(CCFLAGS = ["-O2"]) + if env["PLATFORM"] == "win32" : + env.Append(CCFLAGS = ["/O2"]) + else : + env.Append(CCFLAGS = ["-O2"]) if env["target"] == "xcode" and os.environ["CONFIGURATION"] == "Release" : - env.Append(CCFLAGS = ["-Os"]) + env.Append(CCFLAGS = ["-Os"]) if env["debug"] : - if env["PLATFORM"] == "win32" : - env.Append(CCFLAGS = ["/Z7"]) - env.Append(LINKFLAGS = ["/DEBUG"]) - if env["set_iterator_debug_level"] : - env.Append(CPPDEFINES = ["_ITERATOR_DEBUG_LEVEL=0"]) - if env["optimize"] : - env.Append(LINKFLAGS = ["/OPT:NOREF"]) - env.Append(CCFLAGS = ["/MD"]) - else : - env.Append(CCFLAGS = ["/MDd"]) - else : - env.Append(CCFLAGS = ["-g"]) -elif env["PLATFORM"] == "win32" : - env.Append(CCFLAGS = ["/MD"]) + if env["PLATFORM"] == "win32" : + env.Append(CCFLAGS = ["/Zi"]) + env.Append(LINKFLAGS = ["/DEBUG"]) + if GetOption("num_jobs") > 1 : + env["CCPDBFLAGS"] = '/Fd${TARGET}.pdb' + env["PDB"] = '${TARGET.base}.pdb' + if env["set_iterator_debug_level"] : + env.Append(CPPDEFINES = ["_ITERATOR_DEBUG_LEVEL=0"]) + env.Append(LINKFLAGS = ["/OPT:NOREF"]) + else : + env.Append(CCFLAGS = ["-g"]) + +if env["PLATFORM"] == "win32" : + env.AppendUnique(CCFLAGS = ["/{}".format(env.get("msvc_runtime"))]) + # debug builds against debug MSVC runtime can cause some more sections in the object file + env.AppendUnique(CCFLAGS = ["/bigobj"]) if env.get("universal", 0) : - assert(env["PLATFORM"] == "darwin") - env.Append(CCFLAGS = [ - "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk", - "-arch", "i386", - "-arch", "ppc"]) - env.Append(LINKFLAGS = [ - "-mmacosx-version-min=10.4", - "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk", - "-arch", "i386", - "-arch", "ppc"]) + assert(env["PLATFORM"] == "darwin") + env.Append(CCFLAGS = [ + "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk", + "-arch", "i386", + "-arch", "ppc"]) + env.Append(LINKFLAGS = [ + "-mmacosx-version-min=10.4", + "-isysroot", "/Developer/SDKs/MacOSX10.4u.sdk", + "-arch", "i386", + "-arch", "ppc"]) @@ -178,27 +304,32 @@ if env.get("universal", 0) : if env.get("mac105", 0) : - assert(env["PLATFORM"] == "darwin") - env.Append(CCFLAGS = [ - "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk", - "-arch", "i386"]) - env.Append(LINKFLAGS = [ - "-mmacosx-version-min=10.5", - "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk", - "-arch", "i386"]) - env.Append(FRAMEWORKS = ["Security"]) + assert(env["PLATFORM"] == "darwin") + env.Append(CCFLAGS = [ + "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk", + "-arch", "i386"]) + env.Append(LINKFLAGS = [ + "-mmacosx-version-min=10.5", + "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk", + "-arch", "i386"]) if env.get("mac106", 0) : - assert(env["PLATFORM"] == "darwin") - env.Append(CCFLAGS = [ - "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk", - "-arch", "i386"]) - env.Append(LINKFLAGS = [ - "-mmacosx-version-min=10.6", - "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk", - "-arch", "i386"]) - env.Append(FRAMEWORKS = ["Security"]) + assert(env["PLATFORM"] == "darwin") + env.Append(CCFLAGS = [ + "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk", + "-arch", "i386"]) + env.Append(LINKFLAGS = [ + "-mmacosx-version-min=10.6", + "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk", + "-arch", "i386"]) if not env["assertions"] : - env.Append(CPPDEFINES = ["NDEBUG"]) + env.Append(CPPDEFINES = ["NDEBUG"]) + +# disable file-transfer support on iOS +if env["target"] in ["iphone-device", "iphone-simulator", "xcode"] : + env["experimental_ft"] = False + +if env["experimental_ft"] : + env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_FT"]) if env["experimental"] : - env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_FT", "SWIFT_EXPERIMENTAL_HISTORY", "SWIFT_EXPERIMENTAL_WB"]) + env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_HISTORY", "SWIFT_EXPERIMENTAL_WB"]) @@ -206,4 +337,4 @@ if env["experimental"] : # This should have no performance impact om AMD64 -if env["PLATFORM"] == "posix" and platform.machine() == "x86_64" : - env.Append(CCFLAGS = ["-fPIC"]) +if env["PLATFORM"] == "posix" and platform.machine() in ["x86_64", "amd64"] : + env.Append(CCFLAGS = ["-fPIC"]) @@ -211,36 +342,75 @@ if env["PLATFORM"] == "posix" and platform.machine() == "x86_64" : if env["PLATFORM"] == "win32" : - # TODO: Find the ideal set of warnings - #env.Append(CCFLAGS = ["/Wall"]) - pass + env.Append(CXXFLAGS = ["/wd4068"]) + env.Append(CXXFLAGS = ["/wd4503"]) # Disable 'decorated name length exceeded, name was truncated' warning + if not env.get("allow_warnings", "False") : + env.Append(CXXFLAGS = ["/WX"]) +elif env["PLATFORM"] == "hpux" : + # HP-UX gives a flood of minor warnings if this is enabled + #env.Append(CXXFLAGS = ["+w"]) + pass +elif env["PLATFORM"] == "sunos" : + #env.Append(CXXFLAGS = ["-z verbose"]) + pass else : - env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wold-style-cast", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls"]) - if not env.get("allow_warnings", False) : - env.Append(CXXFLAGS = ["-Werror"]) - gccVersion = env.get("CCVERSION", "0.0.0").split(".") - if gccVersion >= ["4", "5", "0"] and not "clang" in env["CC"] : - env.Append(CXXFLAGS = ["-Wlogical-op"]) - if "clang" in env["CC"] : - env.Append(CXXFLAGS = ["-W#warnings", "-Wc++0x-compat", "-Waddress-of-temporary", "-Wambiguous-member-template", "-Warray-bounds", "-Watomic-properties", "-Wbind-to-temporary-copy", "-Wbuiltin-macro-redefined", "-Wc++-compat", "-Wc++0x-extensions", "-Wcomments", "-Wconditional-uninitialized", "-Wconstant-logical-operand", "-Wdeclaration-after-statement", "-Wdeprecated", "-Wdeprecated-implementations", "-Wdeprecated-writable-strings", "-Wduplicate-method-arg", "-Wempty-body", "-Wendif-labels", "-Wenum-compare", "-Wformat=2", "-Wfour-char-constants", "-Wgnu", "-Wincomplete-implementation", "-Winvalid-noreturn", "-Winvalid-offsetof", "-Winvalid-token-paste", "-Wlocal-type-template-args", "-Wmethod-signatures", "-Wmicrosoft", "-Wmissing-declarations", "-Wnon-pod-varargs", "-Wnull-dereference", "-Wout-of-line-declaration", "-Woverlength-strings", "-Wpacked", "-Wpointer-arith", "-Wpointer-sign", "-Wprotocol", "-Wreadonly-setter-attrs", "-Wselector", "-Wshift-overflow", "-Wshift-sign-overflow", "-Wstrict-selector-match", "-Wsuper-class-method-mismatch", "-Wtautological-compare", "-Wtypedef-redefinition", "-Wundeclared-selector", "-Wunknown-warning-option", "-Wunnamed-type-template-args", "-Wunused-exception-parameter", "-Wunused-member-function", "-Wused-but-marked-unused", "-Wvariadic-macros", "-Wno-c++11-extensions"]) -# To enable: -# "-Wnonfragile-abi2" -> deprecated, is now -Warc-abi i think -# "-Wheader-hygiene" -# "-Wnon-gcc", -# "-Wweak-vtables", -# "-Wlarge-by-value-copy", + if os.path.basename(env["CXX"]).startswith(("clang", "clang++")) : + env.Append(CXXFLAGS = [ + "-Weverything", + "-Wno-unknown-warning-option", # To stay compatible between CLang versions + "-Wno-unknown-pragmas", # To stay compatible between CLang versions + "-Wno-weak-vtables", # Virtually none of our elements have outlined methods. This also seems to affect classes in .cpp files, which in turn affects all our tests, which may need fixing in CLang + "-Wno-shadow", # Also warns for shadowing on constructor arguments, which we do a lot + "-Wno-documentation", # We don't care about documentation warnings + "-Wno-documentation-unknown-command", # We don't care about documentation warnings + "-Wno-exit-time-destructors", # Used a lot in e.g. CPPUnit + "-Wno-c++98-compat-pedantic", # We do different things that violate this, but they could be fixed + "-Wno-global-constructors", # We depend on this for e.g. string constants + "-Wno-disabled-macro-expansion", # Caused due to system headers + "-Wno-long-long", # We use long long + "-Wno-padded", + "-Wno-missing-variable-declarations", # Getting rid of CPPUnit warnings + "-Wno-direct-ivar-access", # Obj-C code warning + "-Wno-potentially-evaluated-expression", # Caused due to calling shared_ptr::get() inside typeid() + + "-Wno-shadow-field", # FIXME: fix source code issues regarding this warning later + "-Wno-unused-template", # FIXME: fix source code issues regarding this warning later + ]) + else : + env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wold-style-cast", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls", "-Wno-unknown-pragmas"]) + gccVersion = env.get("CCVERSION", "0.0.0").split(".") + if gccVersion >= ["4", "5", "0"] and not "clang" in env["CC"] : + env.Append(CXXFLAGS = ["-Wlogical-op"]) + if not env.get("allow_warnings", False) : + env.Append(CXXFLAGS = ["-Werror"]) if env.get("coverage", 0) : - assert(env["PLATFORM"] != "win32") - env.Append(CCFLAGS = ["-fprofile-arcs", "-ftest-coverage"]) - env.Append(LINKFLAGS = ["-fprofile-arcs", "-ftest-coverage"]) + assert(env["PLATFORM"] != "win32") + env.Append(CCFLAGS = ["-fprofile-arcs", "-ftest-coverage"]) + env.Append(LINKFLAGS = ["-fprofile-arcs", "-ftest-coverage"]) if env["PLATFORM"] == "win32" : - env.Append(LIBS = ["user32", "crypt32", "dnsapi", "iphlpapi", "ws2_32", "wsock32", "Advapi32"]) - env.Append(CCFLAGS = ["/EHsc", "/nologo"]) - env.Append(LINKFLAGS = ["/INCREMENTAL:no", "/NOLOGO"]) - if int(env["MSVS_VERSION"].split(".")[0]) < 10 : - env["LINKCOM"] = [env["LINKCOM"], 'mt.exe -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;1'] - env["SHLINKCOM"] = [env["SHLINKCOM"], 'mt.exe -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;2'] - -if env["PLATFORM"] == "darwin" and not env["target"] in ["iphone-device", "iphone-simulator", "xcode"] : - env.Append(FRAMEWORKS = ["IOKit", "AppKit", "SystemConfiguration", "Security", "SecurityInterface"]) + env.Append(LIBS = ["user32", "crypt32", "dnsapi", "iphlpapi", "ws2_32", "wsock32", "Advapi32", "ntdsapi"]) + env.Append(CCFLAGS = ["/EHsc", "/nologo", "/Zm256"]) + env.Append(LINKFLAGS = ["/INCREMENTAL:no", "/NOLOGO"]) + if int(env["MSVS_VERSION"].split(".")[0]) < 10 : + mt = env.get('mt') + if not mt: + mt = 'mt.exe' + env["LINKCOM"] = [env["LINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;1' % mt] + env["SHLINKCOM"] = [env["SHLINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;2' % mt] + +if env["PLATFORM"] == "darwin" and not env["target"] in ["iphone-device", "iphone-simulator", "xcode", "android"] : + env["PLATFORM_FLAGS"]["FRAMEWORKS"] += ["IOKit", "AppKit", "SystemConfiguration", "Security", "SecurityInterface"] + +# Required by boost headers on HP-UX +if env["PLATFORM"] == "hpux" : + env.Append(CXXFLAGS = ["+hpxstd98", "-mt", "-AA"]) + # FIXME: Need -AA for linking C++ but not C + #env.Append(LINKFLAGS = ["-AA"]) + +# Code signing +if env["PLATFORM"] == "darwin" : + env["CODE_SIGN_IDENTITY"] = env["codesign_identity"] +if env["PLATFORM"] == "win32" : + env["SIGNTOOL_KEY_PFX"] = env.get("signtool_key_pfx", None) + env["SIGNTOOL_TIMESTAMP_URL"] = env.get("signtool_timestamp_url", None) @@ -249,3 +419,3 @@ env["TEST_TYPE"] = env["test"] if "check" in ARGUMENTS : - env["TEST_TYPE"] = "unit" + env["TEST_TYPE"] = "unit" env["checker_report"] = ARGUMENTS.get("checker_report", False) @@ -253,3 +423,3 @@ env["TEST"] = (env["TEST_TYPE"] != "none") or env.GetOption("clean") if env.get("valgrind", 0) : - env["TEST_RUNNER"] = "valgrind --suppressions=QA/valgrind.supp -q --leak-check=full --track-origins=yes " + env["TEST_RUNNER"] = "valgrind --suppressions=QA/valgrind.supp -q --leak-check=full --track-origins=yes " env["TEST_IGNORE_RESULT"] = "ignore_test_result" in ARGUMENTS @@ -259,8 +429,11 @@ env["TEST_CREATE_LIBRARIES"] = "create_test_libraries" in ARGUMENTS env["DIST"] = "dist" in ARGUMENTS or env.GetOption("clean") -for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR"] : - if ARGUMENTS.get(path, "") : - if os.path.isabs(ARGUMENTS[path]) : - env[path] = Dir(ARGUMENTS[path]).abspath - else : - env[path] = Dir("#/" + ARGUMENTS[path]).abspath +for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR", "SLUIFT_INSTALLDIR"] : + if ARGUMENTS.get(path, "") : + if os.path.isabs(ARGUMENTS[path]) : + env[path] = Dir(ARGUMENTS[path]).abspath + else : + env[path] = Dir("#/" + ARGUMENTS[path]).abspath +if ARGUMENTS.get("SWIFTEN_LIBDIR", "") : + env["SWIFTEN_LIBDIR"] = ARGUMENTS["SWIFTEN_LIBDIR"] + @@ -272,38 +445,51 @@ target = env["target"] if target in ["iphone-device", "iphone-simulator", "xcode"] : - # Extract/initialize all the information we need - if target == "xcode" : - # Get the information from the XCode environment - env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = os.environ["PLATFORM_DEVELOPER_BIN_DIR"] - env["XCODE_SDKROOT"] = os.environ["SDKROOT"] - env["XCODE_ARCH_FLAGS"] = sum([["-arch", arch] for arch in os.environ["ARCHS"].split(" ")], []) - env["IPHONEOS_DEPLOYMENT_TARGET"] = os.environ["IPHONEOS_DEPLOYMENT_TARGET"] - # Use absolute path sources so Xcode can highlight compilation errors in swiften - env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM ${SOURCES.abspath}' - else : - # Hard code values - env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = "/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin" - if target == "iphone-device": - env["XCODE_ARCH_FLAGS"] = ["-arch", "armv6", "-arch", "armv7"] - sdkPart = "iPhoneOS" - else : - env["XCODE_ARCH_FLAGS"] = ["-arch", "i386"] - sdkPart = "iPhoneSimulator" - sdkVer = "4.3" - env["XCODE_SDKROOT"] = "/Developer/Platforms/" + sdkPart + ".platform/Developer/SDKs/" + sdkPart + sdkVer + ".sdk" - env["IPHONEOS_DEPLOYMENT_TARGET"] = "4.1" - - # Set the build flags - env["CC"] = "$XCODE_PLATFORM_DEVELOPER_BIN_DIR/gcc" - env["CXX"] = "$XCODE_PLATFORM_DEVELOPER_BIN_DIR/g++" - env["OBJCCFLAGS"] = ["-fobjc-abi-version=2", "-fobjc-legacy-dispatch"] - env["LD"] = env["CC"] - env.Append(CCFLAGS = env["XCODE_ARCH_FLAGS"] + ["-fvisibility=hidden", "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]]) - if os.environ.get("GCC_THUMB_SUPPORT", False) : - env.Append(CCFLAGS = ["-mthumb"]) - env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"]) - env.Append(CPPFLAGS = ["-isysroot", "$XCODE_SDKROOT"]) - env.Append(FRAMEWORKS = ["CoreFoundation", "Foundation", "UIKit", "CoreGraphics"]) - env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"] + ["-isysroot", "$XCODE_SDKROOT", "-L\"$XCODE_SDKROOT/usr/lib\"", "-F\"$XCODE_SDKROOT/System/Library/Frameworks\"", "-F\"$XCODE_SDKROOT/System/Library/PrivateFrameworks\""]) - # Bit of a hack, because BOOST doesn't know the endianness for ARM - env.Append(CPPDEFINES = ["_LITTLE_ENDIAN"]) + # Extract/initialize all the information we need + if target == "xcode" : + # Get the information from the XCode environment + env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = os.environ["PLATFORM_DEVELOPER_BIN_DIR"] + env["XCODE_SDKROOT"] = os.environ["SDKROOT"] + env["XCODE_ARCH_FLAGS"] = sum([["-arch", arch] for arch in os.environ["ARCHS"].split(" ")], []) + env["IPHONEOS_DEPLOYMENT_TARGET"] = os.environ["IPHONEOS_DEPLOYMENT_TARGET"] + # Use absolute path sources so Xcode can highlight compilation errors in swiften + env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM ${SOURCES.abspath}' + else : + # Hard code values + env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = "/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin" + if target == "iphone-device": + env["XCODE_ARCH_FLAGS"] = ["-arch", "armv6", "-arch", "armv7"] + sdkPart = "iPhoneOS" + else : + env["XCODE_ARCH_FLAGS"] = ["-arch", "i386"] + sdkPart = "iPhoneSimulator" + sdkVer = "6.0" + env["XCODE_SDKROOT"] = "/Applications/Xcode.app/Contents/Developer/Platforms/" + sdkPart + ".platform/Developer/SDKs/" + sdkPart + sdkVer + ".sdk" + env["IPHONEOS_DEPLOYMENT_TARGET"] = "4.1" + + # Set the build flags + env["CC"] = os.environ["DEVELOPER_BIN_DIR"] + "/gcc" + env["CXX"] = os.environ["DEVELOPER_BIN_DIR"] + "/g++" + env["OBJCCFLAGS"] = ["-fobjc-abi-version=2", "-fobjc-legacy-dispatch"] + env["LD"] = env["CC"] + env.Append(CCFLAGS = env["XCODE_ARCH_FLAGS"] + ["-fvisibility=hidden", "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]]) + env.Append(LINKFLAGS = "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]) + if os.environ.get("GCC_THUMB_SUPPORT", False) : + env.Append(CCFLAGS = ["-mthumb"]) + env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"]) + env.Append(CPPFLAGS = ["-isysroot", "$XCODE_SDKROOT"]) + env.Append(FRAMEWORKS = ["CoreFoundation", "Foundation", "UIKit", "CoreGraphics"]) + env.Append(LINKFLAGS = env["XCODE_ARCH_FLAGS"] + ["-isysroot", "$XCODE_SDKROOT", "-L\"$XCODE_SDKROOT/usr/lib\"", "-F\"$XCODE_SDKROOT/System/Library/Frameworks\"", "-F\"$XCODE_SDKROOT/System/Library/PrivateFrameworks\""]) + # Bit of a hack, because BOOST doesn't know the endianness for ARM + env.Append(CPPDEFINES = ["_LITTLE_ENDIAN"]) + +################################################################################ +# Android +################################################################################ +if target in ["android"] : + env["ENV"]["PATH"] = env["android_toolchain"] + "/bin:" + env["ENV"]["PATH"] + env["CC"] = "arm-linux-androideabi-gcc" + env["CXX"] = "arm-linux-androideabi-g++" + env["AR"] = "arm-linux-androideabi-ar" + env["RANLIB"] = "arm-linux-androideabi-ranlib" + env.Append(CPPDEFINES = ["ANDROID"]) + env.Append(CPPDEFINES = ["_REENTRANT", "_GLIBCXX__PTHREADS"]) @@ -311,14 +497,21 @@ if target in ["iphone-device", "iphone-simulator", "xcode"] : if env.get("ccache", False) : - env["ENV"]["HOME"] = os.environ["HOME"] - for var in os.environ : - if var.startswith("CCACHE_") : - env["ENV"][var] = os.environ[var] - if env.get("CC", "") != "" : - env["CC"] = "ccache " + env["CC"] - else : - env["CC"] = "ccache gcc" - if env.get("CXX", "") != "" : - env["CXX"] = "ccache " + env["CXX"] - else : - env["CC"] = "ccache g++" + env["ENV"]["HOME"] = os.environ["HOME"] + for var in os.environ : + if var.startswith("CCACHE_") : + env["ENV"][var] = os.environ[var] + if env.get("CC", "") != "" : + env["CC"] = "ccache " + env["CC"] + else : + env["CC"] = "ccache gcc" + if env.get("CXX", "") != "" : + env["CXX"] = "ccache " + env["CXX"] + else : + env["CC"] = "ccache g++" + +# distcc +if env.get("distcc", False) : + env["ENV"]["HOME"] = os.environ["HOME"] + for var in os.environ : + if var.startswith("DISTCC_") : + env["ENV"][var] = os.environ[var] @@ -331,9 +524,9 @@ variant = "" if env["enable_variants"] : - fingerprint = ",".join([flag for flag in env["CXXFLAGS"] + env["CCFLAGS"] if not flag.startswith("-W") and not flag.startswith("-fvisibility")]) - variant = "build/" + fingerprint - if not os.path.exists(Dir("#/build").abspath) : - os.mkdir(Dir("#/build").abspath) - if os.path.exists(Dir("#/build/current").abspath) : - os.unlink(Dir("#/build/current").abspath) - os.symlink(os.path.basename(variant), Dir("#/build/current").abspath) + fingerprint = ",".join([flag for flag in env["CXXFLAGS"] + env["CCFLAGS"] if not flag.startswith("-W") and not flag.startswith("-fvisibility")]) + variant = "build/" + fingerprint + if not os.path.exists(Dir("#/build").abspath) : + os.mkdir(Dir("#/build").abspath) + if os.path.exists(Dir("#/build/current").abspath) : + os.unlink(Dir("#/build/current").abspath) + os.symlink(os.path.basename(variant), Dir("#/build/current").abspath) diff --git a/BuildTools/SCons/SConstruct b/BuildTools/SCons/SConstruct index a0a6e8d..78f388b 100644 --- a/BuildTools/SCons/SConstruct +++ b/BuildTools/SCons/SConstruct @@ -9,7 +9,7 @@ root = Dir("../..").abspath oldSConscript = SConscript -def SConscript(*arguments, **keywords) : - if not keywords.get("test_only", False) or env["TEST"] : - return apply(oldSConscript, arguments, keywords) +def SConscript(*args, **kwargs) : + if not kwargs.get("test_only", False) or env["TEST"] : + return oldSConscript(*args, **kwargs) env.SConscript = SConscript - + ################################################################################ @@ -23,3 +23,3 @@ env.SConscript = SConscript #if env["PLATFORM"] == "win32" : -# env["MSVC_BATCH"] = 1 +# env["MSVC_BATCH"] = 1 @@ -27,43 +27,78 @@ env.SConscript = SConscript def colorize(command, target, color) : - colors = { "red": "31", "green": "32", "yellow": "33", "blue": "34" } - prefix = "" - suffix = "" - if sys.stdout.isatty() and env["PLATFORM"] != "win32": - prefix = "\033[0;" + colors[color] + ";140m" - suffix = "\033[0m" - return " " + prefix + command + suffix + " " + target - -if int(ARGUMENTS.get("V", 0)) == 0: - env["CCCOMSTR"] = colorize("CC", "$TARGET", "green") - env["SHCCCOMSTR"] = colorize("CC", "$TARGET", "green") - env["CXXCOMSTR"] = colorize("CXX", "$TARGET", "green") - env["SHCXXCOMSTR"] = colorize("CXX", "$TARGET", "green") - env["LINKCOMSTR"] = colorize("LINK", "$TARGET", "red") - env["SHLINKCOMSTR"] = colorize("LINK", "$TARGET", "red") - env["ARCOMSTR"] = colorize("AR", "$TARGET", "red") - env["RANLIBCOMSTR"] = colorize("RANLIB", "$TARGET", "red") - env["QT4_RCCCOMSTR"] = colorize("RCC", "$TARGET", "blue") - env["QT4_UICCOMSTR"] = colorize("UIC", "$TARGET", "blue") - env["QT4_MOCFROMHCOMSTR"] = colorize("MOC", "$TARGET", "blue") - env["QT4_MOCFROMCXXCOMSTR"] = colorize("MOC", "$TARGET", "blue") - env["QT4_LRELEASECOMSTR"] = colorize("LRELEASE", "$TARGET", "blue") - env["QT4_LUPDATECOMSTR"] = colorize("LUPDATE", "$TARGET", "blue") - env["GENCOMSTR"] = colorize("GEN", "$TARGET", "blue") - env["RCCOMSTR"] = colorize("RC", "$TARGET", "blue") - env["BUNDLECOMSTR"] = colorize("BUNDLE", "$TARGET", "blue") - env["NIBCOMSTR"] = colorize("NIB", "$TARGET", "blue") - env["NSISCOMSTR"] = colorize("NSIS", "$TARGET", "blue") - env["INSTALLSTR"] = colorize("INSTALL", "$TARGET", "blue") - env["TESTCOMSTR"] = colorize("TEST", "$SOURCE", "yellow") - env["FOCOMSTR"] = colorize("FO", "$TARGET", "blue") - env["XSLTCOMSTR"] = colorize("XSLT", "$TARGET", "blue") - env["XMLLINTCOMSTR"] = colorize("XMLLINT", "$SOURCE", "blue") - env["DOXYCOMSTR"] = colorize("DOXY", "$SOURCE", "blue") - #Progress(colorize("DEP", "$TARGET", "red") + colors = { "red": "31", "green": "32", "yellow": "33", "blue": "34" } + prefix = "" + suffix = "" + if sys.stdout.isatty() and env["PLATFORM"] != "win32": + prefix = "\033[0;" + colors[color] + ";140m" + suffix = "\033[0m" + return " " + prefix + command + suffix + " " + target + +if int(ARGUMENTS.get("V", 0)) == 0 and not ARGUMENTS.get("dump_trace", False) : + env["CCCOMSTR"] = colorize("CC", "$TARGET", "green") + env["SHCCCOMSTR"] = colorize("CC", "$TARGET", "green") + env["CXXCOMSTR"] = colorize("CXX", "$TARGET", "green") + env["SHCXXCOMSTR"] = colorize("CXX", "$TARGET", "green") + env["LINKCOMSTR"] = colorize("LINK", "$TARGET", "red") + env["SHLINKCOMSTR"] = colorize("LINK", "$TARGET", "red") + env["ARCOMSTR"] = colorize("AR", "$TARGET", "red") + env["RANLIBCOMSTR"] = colorize("RANLIB", "$TARGET", "red") + env["PCHCOMSTR"] = colorize("PCH", "$TARGET", "blue") + env["QT4_RCCCOMSTR"] = colorize("RCC", "$TARGET", "blue") + env["QT4_UICCOMSTR"] = colorize("UIC", "$TARGET", "blue") + env["QT4_MOCFROMHCOMSTR"] = colorize("MOC", "$TARGET", "blue") + env["QT4_MOCFROMCXXCOMSTR"] = colorize("MOC", "$TARGET", "blue") + env["QT4_LRELEASECOMSTR"] = colorize("LRELEASE", "$TARGET", "blue") + env["QT4_LUPDATECOMSTR"] = colorize("LUPDATE", "$TARGET", "blue") + env["GENCOMSTR"] = colorize("GEN", "$TARGET", "blue") + env["RCCOMSTR"] = colorize("RC", "$TARGET", "blue") + env["BUNDLECOMSTR"] = colorize("BUNDLE", "$TARGET", "blue") + env["NIBCOMSTR"] = colorize("NIB", "$TARGET", "blue") + env["NSISCOMSTR"] = colorize("NSIS", "$TARGET", "blue") + env["INSTALLSTR"] = colorize("INSTALL", "$TARGET", "blue") + env["TESTCOMSTR"] = colorize("TEST", "$SOURCE", "yellow") + env["FOCOMSTR"] = colorize("FO", "$TARGET", "blue") + env["XSLTCOMSTR"] = colorize("XSLT", "$TARGET", "blue") + env["XMLLINTCOMSTR"] = colorize("XMLLINT", "$SOURCE", "blue") + env["DOXYCOMSTR"] = colorize("DOXY", "$SOURCE", "blue") + #Progress(colorize("DEP", "$TARGET", "red") def checkObjCHeader(context, header) : - context.Message("Checking for Objective-C header " + header + " ... ") - ret = context.TryCompile("#include <Cocoa/Cocoa.h>\n#include <" + header + ">", ".m") - context.Result(ret) - return ret + context.Message("Checking for Objective-C header " + header + " ... ") + ret = context.TryCompile("#include <Cocoa/Cocoa.h>\n#include <" + header + ">", ".m") + context.Result(ret) + return ret + +def checkForCpp11Support(context) : + context.Message('Checking whether the C++ compiler supports C++11... ') + result = context.TryLink( + """ +#include <memory> + +int main(int, char **) { + // shared_ptr test + std::shared_ptr<int> intPtr = std::make_shared<int>(); + + // unique_ptr test + std::unique_ptr<int> intPtrUnique = std::unique_ptr<int>(new int(1)); + + // auto test + auto otherIntPtr = intPtr; + std::shared_ptr<int> fooIntPtr = otherIntPtr; + + // lambda test + auto someFunction = [](int i){ i = i * i; }; + someFunction(2); + + // nullptr test + double* fooDouble = nullptr; + double bazDouble = 8.0; + fooDouble = &bazDouble; + bazDouble = *fooDouble; + + return 0; +} +""", '.cpp') + context.Result(result) + return result + @@ -74,13 +109,19 @@ def checkObjCHeader(context, header) : if ARGUMENTS.get("force-configure", 0) : - SCons.SConf.SetCacheMode("force") + SCons.SConf.SetCacheMode("force") def CheckPKG(context, name): - context.Message( 'Checking for package %s... ' % name ) - ret = context.TryAction('pkg-config --exists \'%s\'' % name)[0] - context.Result( ret ) - return ret + context.Message( 'Checking for package %s... ' % name ) + ret = context.TryAction('pkg-config --exists \'%s\'' % name)[0] + context.Result( ret ) + return ret def CheckVersion(context, library, version, define, header, value) : - context.Message("Checking " + library + " version (>= " + version + ") ...") - ret = context.TryRun(""" + context.Message("Checking " + library + " version (>= " + version + ") ...") + version = GetVersion(context, define, header) + ok = version >= value + context.Result(ok) + return ok + +def GetVersion(context, define, header, extension = ".c") : + ret = context.TryRun(""" #include <%(header)s> @@ -89,41 +130,69 @@ def CheckVersion(context, library, version, define, header, value) : int main(int argc, char* argv[]) { - printf("%%d\\n", %(define)s); - return 0; + printf("%%d\\n", %(define)s); + return 0; } -""" % { "header" : header, "define": define }, ".c") - ok = ret[0] and int(ret[1]) >= value - context.Result(ok) - return ok +""" % { "header" : header, "define": define }, extension) + if ret[0] : + return int(ret[1]) + else : + return -1 -conf = Configure(conf_env) +conf = Configure(conf_env, custom_tests = { + 'CheckCpp11Support' : checkForCpp11Support, + }) if not conf.CheckCXX() or not conf.CheckCC() : - print "Error: You need a working compiler" - Exit(1) + print("Error: You need a working compiler") + Exit(1) + +if not conf.CheckCpp11Support() : + print("Error: You need a compiler with support for the C++11 standard") + Exit(1) + env["HAVE_ZLIB"] = True -if conf.CheckLib("z") : - env["ZLIB_FLAGS"] = {"LIBS": ["z"]} +zlib_flags = {} +zlib_okay = False +if env.get("zlib_libdir", None) : + zlib_flags["LIBPATH"] = [env["zlib_libdir"]] + zlib_okay = True +if env.get("zlib_includedir", None) : + zlib_flags["CPPPATH"] = [env["zlib_includedir"]] + zlib_okay = True +if env.get("zlib_libfile", None) : + zlib_flags["LIBS"] = [File(env["zlib_libfile"])] + zlib_okay = True +elif zlib_okay : + zlib_flags["LIBS"] = ["z"] +if (not zlib_okay) and conf.CheckLib("z") : + zlib_flags["LIBS"] = ["z"] + zlib_okay = True +if zlib_okay : + env["ZLIB_FLAGS"] = zlib_flags +elif not env.get("zlib_bundled_enable", True) : + print("Error: Zlib not found and zlib_bundled_enable is false") + Exit(1) else : - env["ZLIB_BUNDLED"] = True + env["ZLIB_BUNDLED"] = True if conf.CheckLib("resolv") : - env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["resolv"] + env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["resolv"] if env["PLATFORM"] != "win32" : - if conf.CheckLib("pthread") : - env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["pthread"] + if conf.CheckLib("pthread") : + env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["pthread"] if conf.CheckLib("dl") : - env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["dl"] + env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["dl"] if conf.CheckLib("m") : - env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["m"] + env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["m"] if conf.CheckLib("c") : - env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["c"] - -if conf.CheckLib("stdc++") : - env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["stdc++"] + env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["c"] +# Even if you find stdc++ on HP-UX, it is the wrong one for aCC +if env["PLATFORM"] != "hpux" : + if conf.CheckLib("stdc++", language='CXX') : + env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["stdc++"] conf.Finish() @@ -134,13 +203,13 @@ boost_flags = {} if env.get("boost_libdir", None) : - boost_flags["LIBPATH"] = [env["boost_libdir"]] + boost_flags["LIBPATH"] = [env["boost_libdir"]] if env.get("boost_includedir", None) : - if env["PLATFORM"] == "win32" : - boost_flags["CPPPATH"] = [env["boost_includedir"]] - else : - # Using isystem to avoid getting warnings from a system boost - # Unfortunately, this also disables dependency tracking - boost_flags["CPPFLAGS"] = [("-isystem", env["boost_includedir"])] + if env["PLATFORM"] == "win32" or env["PLATFORM"] == "hpux" or env["PLATFORM"] == "sunos" : + boost_flags["CPPPATH"] = [env["boost_includedir"]] + else : + # Using isystem to avoid getting warnings from a system boost + # Unfortunately, this also disables dependency tracking + boost_flags["CPPFLAGS"] = [("-isystem", env["boost_includedir"])] boost_conf_env.MergeFlags(boost_flags) conf = Configure(boost_conf_env) -boostLibs = [("signals", None), ("thread", None), ("regex", None), ("program_options", None), ("filesystem", None), ("system", "system/system_error.hpp"), ("date_time", "date_time/date.hpp")] +boostLibs = [(None, "signals2.hpp"), ("system", "system/system_error.hpp"), ("thread", None), ("regex", None), ("program_options", None), ("filesystem", None), ("serialization", "archive/text_oarchive.hpp"), ("date_time", "date_time/date.hpp")] allLibsPresent = True @@ -148,26 +217,35 @@ libNames = [] for (lib, header) in boostLibs : - if header : - header = "boost/" + header - else : - header = "boost/" + lib + ".hpp" - if not conf.CheckCXXHeader(header) : - allLibsPresent = False - break - if env["PLATFORM"] != "win32" : - libName = "boost_" + lib - if not conf.CheckLib(libName) : - libName += "-mt" - if not conf.CheckLib(libName) : - allLibsPresent = False - break - libNames.append(libName) -if allLibsPresent : - env["BOOST_FLAGS"] = boost_flags - if env["PLATFORM"] != "win32" : - env["BOOST_FLAGS"].update({"LIBS": libNames}) - if not conf.CheckCXXHeader("boost/uuid/uuid.hpp") : - # FIXME: Remove this workaround when UUID is available in most distros - env["BOOST_BUNDLED_UUID_ONLY"] = True + if header : + header = "boost/" + header + else : + header = "boost/" + lib + ".hpp" + if not conf.CheckCXXHeader(header) : + allLibsPresent = False + break + if lib and env["PLATFORM"] != "win32" : + libName = "boost_" + lib + if not conf.CheckLib(libName, language='CXX') : + libName += "-mt" + if not conf.CheckLib(libName, language='CXX') : + allLibsPresent = False + break + libNames.append(libName) +if not env.get("boost_force_bundled") and allLibsPresent : + env["BOOST_FLAGS"] = boost_flags + if env["PLATFORM"] != "win32" : + env["BOOST_FLAGS"].update({"LIBS": libNames}) + if not conf.CheckCXXHeader("boost/uuid/uuid.hpp") : + # FIXME: Remove this workaround when UUID is available in most distros + env["BOOST_BUNDLED_UUID_ONLY"] = True +elif not env.get("boost_bundled_enable", True) : + print("Error: Boost not found and boost_bundled_enable is false") + Exit(1) else : - env["BOOST_BUNDLED"] = True + env["BOOST_BUNDLED"] = True +boost_version = GetVersion(conf, "BOOST_VERSION", "boost/version.hpp") +if boost_version == 106400 : + #Version 1.64 has some issues with the serialization of boost::optional, see https://svn.boost.org/trac10/ticket/13050 + env["BOOST_1_64_DETECTED"] = True +else: + env["BOOST_1_64_DETECTED"] = False conf.Finish() @@ -178,13 +256,13 @@ env["HAVE_XSS"] = 0 if env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : - xss_flags = { - "LIBPATH": ["/usr/X11R6/lib"], - "LIBS": ["Xss"] - } - xss_env = conf_env.Clone() - xss_env.MergeFlags(xss_flags) - conf = Configure(xss_env) - if conf.CheckFunc("XScreenSaverQueryExtension") : - env["HAVE_XSS"] = 1 - env["XSS_FLAGS"] = xss_flags - conf.Finish() + xss_flags = { + "LIBPATH": ["/usr/X11R6/lib"], + "LIBS": ["Xss"] + } + xss_env = conf_env.Clone() + xss_env.MergeFlags(xss_flags) + conf = Configure(xss_env) + if conf.CheckFunc("XScreenSaverQueryExtension") : + env["HAVE_XSS"] = 1 + env["XSS_FLAGS"] = xss_flags + conf.Finish() @@ -192,24 +270,27 @@ if env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : env["HAVE_GCONF"] = 0 -if env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : - gconf_env = conf_env.Clone() - conf = Configure(gconf_env, custom_tests = {"CheckPKG": CheckPKG}) - if conf.CheckPKG("gconf-2.0") : - gconf_bare_env = Environment() - gconf_bare_env.ParseConfig('pkg-config --cflags gconf-2.0 gobject-2.0 --libs gconf-2.0 gobject-2.0') - gconf_flags = { - "LIBS": gconf_bare_env["LIBS"], - "CCFLAGS": gconf_bare_env["CCFLAGS"], - "CPPPATH": gconf_bare_env["CPPPATH"], - "CPPDEFINES": gconf_bare_env.get("CPPDEFINES", []), - } - gconf_env.MergeFlags(gconf_flags) - if conf.CheckCHeader("gconf/gconf-client.h") and conf.CheckLib("gconf-2") : - env["HAVE_GCONF"] = 1 - env["GCONF_FLAGS"] = { - "LIBS": gconf_env["LIBS"], - "CCFLAGS": gconf_env["CCFLAGS"], - "CPPPATH": gconf_env["CPPPATH"], - "CPPDEFINES": gconf_env.get("CPPDEFINES", []), - } - conf.Finish() +if env.get("try_gconf", True) and env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : + gconf_env = conf_env.Clone() + conf = Configure(gconf_env, custom_tests = {"CheckPKG": CheckPKG}) + if conf.CheckPKG("gconf-2.0") : + gconf_bare_env = Environment() + gconf_bare_env.ParseConfig('pkg-config --cflags gconf-2.0 gobject-2.0 --libs gconf-2.0 gobject-2.0') + if os.path.basename(env["CXX"]).startswith(("g++", "clang++")) : + gconf_bare_env["CCFLAGS"] = [("-isystem" + ccflag) for ccflag in gconf_bare_env["CPPPATH"]] + gconf_bare_env["CPPPATH"] = [] + gconf_flags = { + "LIBS": gconf_bare_env["LIBS"], + "CCFLAGS": gconf_bare_env["CCFLAGS"], + "CPPPATH": gconf_bare_env["CPPPATH"], + "CPPDEFINES": gconf_bare_env.get("CPPDEFINES", []), + } + gconf_env.MergeFlags(gconf_flags) + if conf.CheckCHeader("gconf/gconf-client.h") and conf.CheckLib("gconf-2") : + env["HAVE_GCONF"] = 1 + env["GCONF_FLAGS"] = { + "LIBS": gconf_env["LIBS"], + "CCFLAGS": gconf_env["CCFLAGS"], + "CPPPATH": gconf_env.get("CPPPATH", []), + "CPPDEFINES": gconf_env.get("CPPDEFINES", []), + } + conf.Finish() @@ -218,14 +299,20 @@ env["HAVE_SPARKLE"] = 0 if env["PLATFORM"] == "darwin" : - sparkle_flags = { - "FRAMEWORKPATH": ["/Library/Frameworks"], - "FRAMEWORKS": ["Sparkle"] - } - sparkle_env = conf_env.Clone() - sparkle_env.MergeFlags(sparkle_flags) - conf = Configure(sparkle_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader }) - if conf.CheckObjCHeader("Sparkle/Sparkle.h") : - env["HAVE_SPARKLE"] = 1 - env["SPARKLE_FLAGS"] = sparkle_flags - env["SPARKLE_FRAMEWORK"] = "/Library/Frameworks/Sparkle.framework" - conf.Finish() + sparkle_flags = { + "FRAMEWORKPATH": ["3rdParty/Sparkle/Sparkle-1.14.0"], + "FRAMEWORKS": ["Sparkle"] + } + sparkle_env = conf_env.Clone() + sparkle_env.MergeFlags(sparkle_flags) + conf = Configure(sparkle_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader }) + if conf.CheckObjCHeader("Sparkle/Sparkle.h") : + env["HAVE_SPARKLE"] = 1 + env["SPARKLE_FLAGS"] = sparkle_flags + env["SPARKLE_FRAMEWORK"] = Dir("../../3rdParty/Sparkle/Sparkle-1.14.0/Sparkle.framework") + env["SPARKLE_COPYING"] = File("../../3rdParty/Sparkle/Sparkle-1.14.0/LICENSE") + conf.Finish() + + if env.get("sparkle_public_dsa_key", None) != None : + env["SWIFT_SPARKLE_PUBLIC_DSA_KEY"] = File(env.get("sparkle_public_dsa_key")) + else : + env["SWIFT_SPARKLE_PUBLIC_DSA_KEY"] = None @@ -234,18 +321,14 @@ env["HAVE_GROWL"] = 0 if env["PLATFORM"] == "darwin" : - growl_flags = { - "FRAMEWORKPATH": ["/Library/Frameworks"], - "FRAMEWORKS": ["Growl"] - } - growl_env = conf_env.Clone() - growl_env.MergeFlags(growl_flags) - conf = Configure(growl_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader }) - if conf.CheckObjCHeader("Growl/Growl.h") : - env["HAVE_GROWL"] = 1 - env["GROWL_FLAGS"] = growl_flags - env["GROWL_FRAMEWORK"] = "/Library/Frameworks/Growl.framework" - conf.Finish() - -# Snarl -if env["PLATFORM"] == "win32" : - env["HAVE_SNARL"] = True + growl_flags = { + "FRAMEWORKPATH": ["/Library/Frameworks"], + "FRAMEWORKS": ["Growl"] + } + growl_env = conf_env.Clone() + growl_env.MergeFlags(growl_flags) + conf = Configure(growl_env, custom_tests = { "CheckObjCHeader" : checkObjCHeader }) + if conf.CheckObjCHeader("Growl/Growl.h") : + env["HAVE_GROWL"] = 1 + env["GROWL_FLAGS"] = growl_flags + env["GROWL_FRAMEWORK"] = "/Library/Frameworks/Growl.framework" + conf.Finish() @@ -253,33 +336,37 @@ if env["PLATFORM"] == "win32" : conf = Configure(conf_env, custom_tests = {"CheckVersion": CheckVersion}) -if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : +if env.get("try_libxml", True) and conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : #and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623) : - env["HAVE_LIBXML"] = 1 - env["LIBXML_FLAGS"] = { "LIBS": ["xml2"] } + env["HAVE_LIBXML"] = 1 + env["LIBXML_FLAGS"] = { "LIBS": ["xml2"] } conf.Finish() -if not env.get("HAVE_LIBXML", 0) : - libxml_env = conf_env.Clone() - libxml_env.Append(CPPPATH = ["/usr/include/libxml2"]) - conf = Configure(libxml_env, custom_tests = {"CheckVersion": CheckVersion}) - if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : -# and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623): - env["HAVE_LIBXML"] = 1 - env["LIBXML_FLAGS"] = { "CPPPATH": ["/usr/include/libxml2"], "LIBS": ["xml2"] } - conf.Finish() +if env.get("try_libxml", True) and not env.get("HAVE_LIBXML", 0) : + libxml_env = conf_env.Clone() + libxml_env.Append(CPPPATH = ["/usr/include/libxml2"]) + conf = Configure(libxml_env, custom_tests = {"CheckVersion": CheckVersion}) + if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : +# and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623): + env["HAVE_LIBXML"] = 1 + libxml_env.Append() + if os.path.basename(env["CC"]) in ("clang", "gcc"): + env["LIBXML_FLAGS"] = { "CXXFLAGS": ["-isystem/usr/include/libxml2"], "LIBS": ["xml2"] } + else: + env["LIBXML_FLAGS"] = { "CPPPATH": ["/usr/include/libxml2"], "LIBS": ["xml2"] } + conf.Finish() # Expat -if not env.get("HAVE_LIBXML",0) : - expat_conf_env = conf_env.Clone() - expat_flags = {} - if env.get("expat_libdir", None) : - expat_flags["LIBPATH"] = [env["expat_libdir"]] - if env.get("expat_includedir", None) : - expat_flags["CPPPATH"] = [env["expat_includedir"]] - expat_conf_env.MergeFlags(expat_flags) - conf = Configure(expat_conf_env) - if conf.CheckCHeader("expat.h") and conf.CheckLib(env["expat_libname"]) : - env["HAVE_EXPAT"] = 1 - env["EXPAT_FLAGS"] = { "LIBS": [env["expat_libname"]] } - env["EXPAT_FLAGS"].update(expat_flags) - conf.Finish() +if env.get("try_expat", True) and not env.get("HAVE_LIBXML",0) : + expat_conf_env = conf_env.Clone() + expat_flags = {} + if env.get("expat_libdir", None) : + expat_flags["LIBPATH"] = [env["expat_libdir"]] + if env.get("expat_includedir", None) : + expat_flags["CPPPATH"] = [env["expat_includedir"]] + expat_conf_env.MergeFlags(expat_flags) + conf = Configure(expat_conf_env) + if conf.CheckCHeader("expat.h") and conf.CheckLib(env["expat_libname"]) : + env["HAVE_EXPAT"] = 1 + env["EXPAT_FLAGS"] = { "LIBS": [env["expat_libname"]] } + env["EXPAT_FLAGS"].update(expat_flags) + conf.Finish() @@ -288,6 +375,6 @@ bundledExpat = False if not env.get("HAVE_EXPAT", 0) and not env.get("HAVE_LIBXML", 0) : - print "Expat or LibXML not found. Using bundled Expat" - SConscript("#/3rdParty/Expat/SConscript") - env["HAVE_EXPAT"] = 1 - env["EXPAT_BUNDLED"] = True + print("Expat or LibXML not found. Using bundled Expat") + SConscript("#/3rdParty/Expat/SConscript") + env["HAVE_EXPAT"] = 1 + env["EXPAT_BUNDLED"] = True @@ -297,2 +384,4 @@ if not env.get("HAVE_EXPAT", 0) and not env.get("HAVE_LIBXML", 0) : +env["NEED_IDN"] = env.get("need_idn", True) + # ICU @@ -300,8 +389,10 @@ icu_env = conf_env.Clone() use_icu = bool(env["icu"]) -icu_prefix = env["icu"] if isinstance(env["icu"], str) else "" +icu_prefix = "" +if isinstance(env["icu"], str) : + icu_prefix = env["icu"] icu_flags = {} if icu_prefix : - icu_flags = { "CPPPATH": [os.path.join(icu_prefix, "include")] } - icu_flags["LIBPATH"] = [os.path.join(icu_prefix, "lib")] - icu_env.MergeFlags(icu_flags) + icu_flags = { "CPPPATH": [os.path.join(icu_prefix, "include")] } + icu_flags["LIBPATH"] = [os.path.join(icu_prefix, "lib")] + icu_env.MergeFlags(icu_flags) @@ -309,5 +400,5 @@ icu_conf = Configure(icu_env) if use_icu and icu_conf.CheckCHeader("unicode/usprep.h") : - env["HAVE_ICU"] = 1 - env["ICU_FLAGS"] = icu_flags - env["ICU_FLAGS"]["LIBS"] = ["icuuc"] + env["HAVE_ICU"] = 1 + env["ICU_FLAGS"] = icu_flags + env["ICU_FLAGS"]["LIBS"] = ["icuuc"] icu_conf.Finish() @@ -318,11 +409,11 @@ libidn_flags = {} if env.get("libidn_libdir", None) : - libidn_flags["LIBPATH"] = [env["libidn_libdir"]] + libidn_flags["LIBPATH"] = [env["libidn_libdir"]] if env.get("libidn_includedir", None) : - libidn_flags["CPPPATH"] = [env["libidn_includedir"]] + libidn_flags["CPPPATH"] = [env["libidn_includedir"]] libidn_conf_env.MergeFlags(libidn_flags) conf = Configure(libidn_conf_env) -if not env.get("HAVE_ICU") and conf.CheckCHeader("idna.h") and conf.CheckLib(env["libidn_libname"]) : - env["HAVE_LIBIDN"] = 1 - env["LIBIDN_FLAGS"] = { "LIBS": [env["libidn_libname"]] } - env["LIBIDN_FLAGS"].update(libidn_flags) +if env.get("try_libidn", True) and not env.get("HAVE_ICU") and conf.CheckCHeader("idna.h") and conf.CheckLib(env["libidn_libname"]) : + env["HAVE_LIBIDN"] = 1 + env["LIBIDN_FLAGS"] = { "LIBS": [env["libidn_libname"]] } + env["LIBIDN_FLAGS"].update(libidn_flags) conf.Finish() @@ -331,28 +422,58 @@ conf.Finish() if not env.get("HAVE_ICU", False) and not env.get("HAVE_LIBIDN", False) : - env["HAVE_LIBIDN"] = 1 - env["LIBIDN_BUNDLED"] = 1 + if env.get("libidn_bundled_enable", True) : + env["HAVE_LIBIDN"] = 1 + env["LIBIDN_BUNDLED"] = 1 + elif env.get("need_idn", True): + print("Error: ICU and LIBIDN not found, and libidn_bundled_enable is false") + Exit(1) + else: + print("Proceeding without an IDN library because need_idn was false. This will break all internal binaries") + +# Unbound +if env["unbound"] : + env["LDNS_BUNDLED"] = 1 + env["UNBOUND_BUNDLED"] = 1 +else : + env["LDNS_FLAGS"] = {} + env["UNBOUND_FLAGS"] = {} # LibMiniUPnPc -if env["experimental"] : - #libminiupnpc_conf_env = conf_env.Clone() - #conf = Configure(libminiupnpc_conf_env) - #if conf.CheckCHeader("miniupnpc.h") and conf.CheckLib(env["libminiupnpc_libname"]) : - # print "NOT IMPLEMENTED YET" - #else : - env["LIBMINIUPNPC_BUNDLED"] = 1 - #conf.Finish() +if env["experimental_ft"] : + libminiupnpc_flags = {"CPPPATH": ["/usr/include/miniupnpc/"]} + libminiupnpc_conf_env = conf_env.Clone() + if env.get("libminiupnpc_libdir", None) : + libminiupnpc_flags["LIBPATH"] = [env["libminiupnpc_libdir"]] + if env.get("libminiupnpc_includedir", None) : + libminiupnpc_flags["CPPPATH"] = [env["libminiupnpc_includedir"]] + libminiupnpc_conf_env.MergeFlags(libminiupnpc_flags) + conf = Configure(libminiupnpc_conf_env) + if not env.get("libminiupnpc_force_bundled") and conf.CheckCHeader("miniupnpc.h") and conf.CheckLib(env["libminiupnpc_libname"]) : + env["HAVE_LIBMINIUPNPC"] = 1 + env["LIBMINIUPNPC_FLAGS"] = { "LIBS": ["miniupnpc"] } + env["LIBMINIUPNPC_FLAGS"].update(libminiupnpc_flags) + else : + env["LIBMINIUPNPC_BUNDLED"] = 1 + conf.Finish() else : - env["LIBMINIUPNPC_FLAGS"] = {} + env["LIBMINIUPNPC_FLAGS"] = {} # LibNATPMP -if env["experimental"] : - #libnatpmp_conf_env = conf_env.Clone() - #conf = Configure(libnatpmp_conf_env) - #if conf.CheckCHeader("natpmp.h") and conf.CheckLib(env["libnatpmp_libname"]) : - # print "NOT IMPLEMENTED YET" - #else : - env["LIBNATPMP_BUNDLED"] = 1 - #conf.Finish() +if env["experimental_ft"] : + libnatpmp_flags = {} + libnatpmp_conf_env = conf_env.Clone() + if env.get("libnatpmp_libdir", None) : + libnatpmp_flags["LIBPATH"] = [env["libnatpmp_libdir"]] + if env.get("libnatpmp_includedir", None) : + libnatpmp_flags["CPPPATH"] = [env["libnatpmp_includedir"]] + libnatpmp_conf_env.MergeFlags(libnatpmp_flags) + conf = Configure(libnatpmp_conf_env) + if not env.get("libnatpmp_force_bundled") and conf.CheckCHeader("natpmp.h") and conf.CheckLib(env["libnatpmp_libname"]) : + env["HAVE_LIBNATPMP"] = 1 + env["LIBNATPMP_FLAGS"] = { "LIBS": ["natpmp"] } + env["LIBNATPMP_FLAGS"].update(libnatpmp_flags) + else : + env["LIBNATPMP_BUNDLED"] = 1 + conf.Finish() else : - env["LIBNATPMP_FLAGS"] = {} + env["LIBNATPMP_FLAGS"] = {} @@ -360,20 +481,19 @@ else : if env["experimental"] : - sqlite_conf_env = conf_env.Clone() - sqlite_flags = {} - if env.get("sqlite_libdir", None) : - sqlite_flags["LIBPATH"] = [env["sqlite_libdir"]] - if env.get("sqlite_includedir", None) : - sqlite_flags["CPPPATH"] = [env["sqlite_includedir"]] - sqlite_conf_env.MergeFlags(sqlite_flags) - conf = Configure(sqlite_conf_env) - if conf.CheckCHeader("sqlite3.h") and conf.CheckLib(env["sqlite_libname"]) and not env.get("sqlite_force_bundled", False): - env["HAVE_SQLITE"] = 1 - env["SQLITE_FLAGS"] = { "LIBS": [env["sqlite_libname"]] } - env["SQLITE_FLAGS"].update(sqlite_flags) - else : - env["SQLITE_BUNDLED"] = 1 - env["SQLITE_ASYNC_BUNDLED"] = 1 - conf.Finish() + sqlite_conf_env = conf_env.Clone() + sqlite_flags = {} + if env.get("sqlite_libdir", None) : + sqlite_flags["LIBPATH"] = [env["sqlite_libdir"]] + if env.get("sqlite_includedir", None) : + sqlite_flags["CPPPATH"] = [env["sqlite_includedir"]] + sqlite_conf_env.MergeFlags(sqlite_flags) + conf = Configure(sqlite_conf_env) + if conf.CheckCHeader("sqlite3.h") and conf.CheckLib(env["sqlite_libname"]) and not env.get("sqlite_force_bundled", False): + env["HAVE_SQLITE"] = 1 + env["SQLITE_FLAGS"] = { "LIBS": [env["sqlite_libname"]] } + env["SQLITE_FLAGS"].update(sqlite_flags) + else : + env["SQLITE_BUNDLED"] = 1 + conf.Finish() else : - env["SQLITE_FLAGS"] = {} + env["SQLITE_FLAGS"] = {} @@ -381,9 +501,36 @@ else : # Lua -env["LUA_BUNDLED"] = 1 +lua_conf_env = conf_env.Clone() +lua_flags = {} +if env.get("lua_libdir", None) : + lua_flags["LIBPATH"] = [env["lua_libdir"]] +if env.get("lua_includedir", None) : + lua_flags["CPPPATH"] = [env["lua_includedir"]] +lua_conf_env.MergeFlags(lua_flags) +conf = Configure(lua_conf_env) +if not env.get("lua_force_bundled", False) and conf.CheckLibWithHeader(env["lua_libname"], "lua.hpp", "cxx", autoadd = 0) : + env["HAVE_LUA"] = 1 + env["LUA_FLAGS"] = { "LIBS": [env["lua_libname"]] } + lua_version = GetVersion(conf, "LUA_VERSION_NUM", "lua.h") + if lua_version > 0 : + env["LUA_FLAGS"]["LUA_VERSION"] = str(lua_version // 100) + "." + str(lua_version % 100) + else : + print("Warning: Unable to determine Lua version. Not installing Lua libraries.") + env["LUA_FLAGS"].update(lua_flags) +else : + env["LUA_BUNDLED"] = 1 +conf.Finish() # Readline -conf = Configure(conf_env) -if conf.CheckCHeader(["stdio.h", "readline/readline.h"]) and conf.CheckLib("readline") : - env["HAVE_READLINE"] = True - env["READLINE_FLAGS"] = { "LIBS": ["readline"] } +editline_conf_env = conf_env.Clone() +editline_flags = {} +if env.get("editline_libdir", None) : + editline_flags["LIBPATH"] = [env["editline_libdir"]] +if env.get("editline_includedir", None) : + editline_flags["CPPPATH"] = [env["editline_includedir"]] +editline_conf_env.MergeFlags(editline_flags) +conf = Configure(editline_conf_env) +if conf.CheckLibWithHeader(env["editline_libname"], ["stdio.h", "editline/readline.h"], "c") : + env["HAVE_EDITLINE"] = 1 + env["EDITLINE_FLAGS"] = { "LIBS": [env["editline_libname"]] } + env["EDITLINE_FLAGS"].update(editline_flags) conf.Finish() @@ -394,11 +541,11 @@ avahi_flags = {} if env.get("avahi_libdir", None) : - avahi_flags["LIBPATH"] = [env["avahi_libdir"]] + avahi_flags["LIBPATH"] = [env["avahi_libdir"]] if env.get("avahi_includedir", None) : - avahi_flags["CPPPATH"] = [env["avahi_includedir"]] + avahi_flags["CPPPATH"] = [env["avahi_includedir"]] avahi_conf_env.MergeFlags(avahi_flags) conf = Configure(avahi_conf_env) -if conf.CheckCHeader("avahi-client/client.h") and conf.CheckLib("avahi-client") and conf.CheckLib("avahi-common") : - env["HAVE_AVAHI"] = True - env["AVAHI_FLAGS"] = { "LIBS": ["avahi-client", "avahi-common"] } - env["AVAHI_FLAGS"].update(avahi_flags) +if env.get("try_avahi", True) and conf.CheckCHeader("avahi-client/client.h") and conf.CheckLib("avahi-client") and conf.CheckLib("avahi-common") : + env["HAVE_AVAHI"] = True + env["AVAHI_FLAGS"] = { "LIBS": ["avahi-client", "avahi-common"] } + env["AVAHI_FLAGS"].update(avahi_flags) conf.Finish() @@ -407,58 +554,104 @@ conf.Finish() if env["qt"] : - env["QTDIR"] = env["qt"] + env["QTDIR"] = env["qt"] + +if env["PLATFORM"] == "win32" : + systemIncludeFlag = "/I" +else: + systemIncludeFlag = "-isystem" +################################################################################ +# TLS backend selection +################################################################################ +env["OPENSSL_FLAGS"] = {} +if env.get("tls_backend") == "native" : + if env["PLATFORM"] == "win32" : + env["HAVE_SCHANNEL"] = True + elif env["PLATFORM"] == "darwin" and env["target"] == "native": + env["HAVE_SECURETRANSPORT"] = True + elif env["target"] in ("iphone-device", "iphone-simulator", "xcode", "android") : + env["tls_backend"] = "openssl_bundled" + else : + env["tls_backend"] = "openssl" # OpenSSL -openssl_env = conf_env.Clone() -use_openssl = bool(env["openssl"]) -openssl_prefix = env["openssl"] if isinstance(env["openssl"], str) else "" -openssl_flags = {} -if openssl_prefix : - openssl_flags = { "CPPPATH": [os.path.join(openssl_prefix, "include")] } - if env["PLATFORM"] == "win32" : - openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib", "VC")] - env["OPENSSL_DIR"] = openssl_prefix - else : - openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib")] - openssl_env.MergeFlags(openssl_flags) - -openssl_conf = Configure(openssl_env) -if use_openssl and openssl_conf.CheckCHeader("openssl/ssl.h") : - env["HAVE_OPENSSL"] = 1 - env["OPENSSL_FLAGS"] = openssl_flags - if env["PLATFORM"] == "win32" : - env["OPENSSL_FLAGS"]["LIBS"] = ["libeay32MD", "ssleay32MD"] - else: - env["OPENSSL_FLAGS"]["LIBS"] = ["ssl", "crypto"] - if env["PLATFORM"] == "darwin" : - if platform.mac_ver()[0].startswith("10.5") : - env["OPENSSL_FLAGS"]["FRAMEWORKS"] = ["Security"] -elif env["target"] in ("iphone-device", "iphone-simulator", "xcode") : - env["OPENSSL_BUNDLED"] = True - env["HAVE_OPENSSL"] = True -else : - env["OPENSSL_FLAGS"] = {} - if env["PLATFORM"] == "win32" : - env["HAVE_SCHANNEL"] = True - # If we're compiling for Windows and OpenSSL isn't being used, use Schannel - env.Append(LIBS = ["secur32"]) +if env.get("tls_backend") == "openssl_bundled" : + env["OPENSSL_BUNDLED"] = True + env["HAVE_OPENSSL"] = True +elif env.get("tls_backend") == "openssl" : + openssl_env = conf_env.Clone() + use_openssl = bool(env["openssl"]) + openssl_prefix = "" + if isinstance(env["openssl"], str) : + openssl_prefix = env["openssl"] + openssl_flags = {} + if openssl_prefix : + openssl_include = env.get("openssl_include") + openssl_libdir = env.get("openssl_libdir") + if openssl_include: + openssl_flags = { "CPPFLAGS": [systemIncludeFlag + openssl_include]} + else: + openssl_flags = { "CPPFLAGS": [systemIncludeFlag + os.path.join(openssl_prefix, "include")] } + if openssl_libdir: + openssl_flags["LIBPATH"] = [openssl_libdir] + env["OPENSSL_DIR"] = openssl_prefix + elif env["PLATFORM"] == "win32" : + openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib", "VC")] + env["OPENSSL_DIR"] = openssl_prefix + else : + openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib")] + openssl_env.MergeFlags(openssl_flags) + + openssl_conf = Configure(openssl_env) + if use_openssl and openssl_conf.CheckCHeader("openssl/ssl.h") : + env["HAVE_OPENSSL"] = 1 + env["OPENSSL_FLAGS"] = openssl_flags + openssl_libnames = env.get("openssl_libnames") + if openssl_libnames: + env["OPENSSL_FLAGS"]["LIBS"] = openssl_libnames.split(',') + elif env["PLATFORM"] == "win32" : + env["OPENSSL_FLAGS"]["LIBS"] = ["libeay32MD", "ssleay32MD"] + else: + env["OPENSSL_FLAGS"]["LIBS"] = ["ssl", "crypto"] + if env["PLATFORM"] == "darwin" : + if platform.mac_ver()[0].startswith("10.5") : + env["OPENSSL_FLAGS"]["FRAMEWORKS"] = ["Security"] + openssl_conf.Finish() -openssl_conf.Finish() +if env["PLATFORM"] == "win32" : + # On Windows link to secur32. It is needed by Swiften/SASL/WindowsAuthentication + env.Append(LIBS = ["secur32"]) + +#Hunspell +hunspell_env = conf_env.Clone() +hunspell_prefix = isinstance(env.get("hunspell_prefix", False), str) and env["hunspell_prefix"] or "" +hunspell_flags = {} +if hunspell_prefix : + hunspell_flags = {"CPPPATH":[os.path.join(hunspell_prefix, "include")], "LIBPATH":[os.path.join(hunspell_prefix, "lib")]} +hunspell_env.MergeFlags(hunspell_flags) + +env["HAVE_HUNSPELL"] = 0; +if env.get("hunspell_enable", False) : + hunspell_conf = Configure(hunspell_env) + if hunspell_conf.CheckCXXHeader("hunspell/hunspell.hxx") and hunspell_conf.CheckLib("hunspell") : + env["HAVE_HUNSPELL"] = 1 + hunspell_flags["LIBS"] = ["hunspell"] + env["HUNSPELL_FLAGS"] = hunspell_flags + hunspell_conf.Finish() # Bonjour -if env["PLATFORM"] == "darwin" : - env["HAVE_BONJOUR"] = 1 +if env["PLATFORM"] == "darwin" and env["target"] == "native" : + env["HAVE_BONJOUR"] = 1 elif env.get("bonjour", False) : - bonjour_env = conf_env.Clone() - bonjour_conf = Configure(bonjour_env) - bonjour_flags = {} - if env.get("bonjour") != True : - bonjour_prefix = env["bonjour"] - bonjour_flags["CPPPATH"] = [os.path.join(bonjour_prefix, "include")] - bonjour_flags["LIBPATH"] = [os.path.join(bonjour_prefix, "lib", "win32")] - bonjour_env.MergeFlags(bonjour_flags) - if bonjour_conf.CheckCHeader("dns_sd.h") and bonjour_conf.CheckLib("dnssd") : - env["HAVE_BONJOUR"] = 1 - env["BONJOUR_FLAGS"] = bonjour_flags - env["BONJOUR_FLAGS"]["LIBS"] = ["dnssd"] - bonjour_conf.Finish() + bonjour_env = conf_env.Clone() + bonjour_conf = Configure(bonjour_env) + bonjour_flags = {} + if env.get("bonjour") != True : + bonjour_prefix = env["bonjour"] + bonjour_flags["CPPPATH"] = [os.path.join(bonjour_prefix, "include")] + bonjour_flags["LIBPATH"] = [os.path.join(bonjour_prefix, "lib", "win32")] + bonjour_env.MergeFlags(bonjour_flags) + if bonjour_conf.CheckCHeader("dns_sd.h") and bonjour_conf.CheckLib("dnssd") : + env["HAVE_BONJOUR"] = 1 + env["BONJOUR_FLAGS"] = bonjour_flags + env["BONJOUR_FLAGS"]["LIBS"] = ["dnssd"] + bonjour_conf.Finish() @@ -466,6 +659,6 @@ elif env.get("bonjour", False) : if env["PLATFORM"] == "darwin" : - cocoa_conf = Configure(conf_env) - if cocoa_conf.CheckCHeader("IOKit/IOKitLib.h") : - env["HAVE_IOKIT"] = True - cocoa_conf.Finish() + cocoa_conf = Configure(conf_env) + if cocoa_conf.CheckCHeader("IOKit/IOKitLib.h") : + env["HAVE_IOKIT"] = True + cocoa_conf.Finish() @@ -473,7 +666,10 @@ if env["PLATFORM"] == "darwin" : try : - myenv = env.Clone() - myenv.Tool("qt4", toolpath = ["#/BuildTools/SCons/Tools"]) - env["HAVE_QT"] = True -except : - env["HAVE_QT"] = False + myenv = env.Clone() + myenv.Tool("qt4", toolpath = ["#/BuildTools/SCons/Tools"]) + env["HAVE_QT"] = True +except SCons.Errors.StopError: + env["HAVE_QT"] = False +except Exception as e: + print("Info: %s" % str(e)) + env["HAVE_QT"] = False @@ -484,5 +680,5 @@ except : if env.get("docbook_xml") : - env["DOCBOOK_XML_DIR"] = env["docbook_xml"] + env["DOCBOOK_XML_DIR"] = env["docbook_xml"] if env.get("docbook_xsl") : - env["DOCBOOK_XSL_DIR"] = env["docbook_xsl"] + env["DOCBOOK_XSL_DIR"] = env["docbook_xsl"] @@ -493,5 +689,8 @@ if env.get("docbook_xsl") : -if env.Dir("#/.git").exists() : - if not env.GetOption("clean") : - env.Install("#/.git/hooks", Glob("#/BuildTools/Git/Hooks/*")) +try: + if env.Dir("#/.git").exists() : + if not env.GetOption("clean") and env.get("install_git_hooks", True) : + env.Install("#/.git/hooks", Glob("#/BuildTools/Git/Hooks/*")) +except TypeError: + print("You seem to be using Swift in a Git submodule. Not installing hooks.") @@ -503,19 +702,21 @@ if env.Dir("#/.git").exists() : if ARGUMENTS.get("replace_pragma_once", False) : - env.Tool("ReplacePragmaOnce", toolpath = ["#/BuildTools/SCons/Tools"]) - - def relpath(path, start) : - i = len(os.path.commonprefix([path, start])) - return path[i+1:] - - for actual_root, dirs, files in os.walk(root) : - if "3rdParty" in actual_root : - continue - for file in files : - if not file.endswith(".h") : - continue - include = relpath(os.path.join(actual_root, file), root) - env.ReplacePragmaOnce("#/include/" + include, "#/" + include) - env.Append(CPPPATH = ["#/include"]) + env.Tool("ReplacePragmaOnce", toolpath = ["#/BuildTools/SCons/Tools"]) + + def relpath(path, start) : + i = len(os.path.commonprefix([path, start])) + return path[i+1:] + + for actual_root, dirs, files in os.walk(root) : + dirs.sort() + files.sort() + if "3rdParty" in actual_root : + continue + for file in files : + if not file.endswith(".h") : + continue + include = relpath(os.path.join(actual_root, file), root) + env.ReplacePragmaOnce("#/include/" + include, "#/" + include) + env.Append(CPPPATH = ["#/include"]) else : - env.Append(CPPPATH = [root]) + env.Append(CPPPATH = [root]) @@ -526,4 +727,8 @@ else : -# Build tools -env.SConscript(dirs = ["#/BuildTools/CLang"]) +if ARGUMENTS.get("dump_trace", False) : + env.SetOption("no_exec", True) + env["TEST"] = True + env["BOOST_BUILD_BCP"] = True + env.Decider(lambda x, y, z : True) + SCons.Node.Python.Value.changed_since_last_build = (lambda x, y, z: True) @@ -531,16 +736,21 @@ env.SConscript(dirs = ["#/BuildTools/CLang"]) modules = [] -for dir in os.listdir(Dir("#/3rdParty").abspath) : - full_dir = os.path.join(Dir("#/3rdParty").abspath, dir) - if not os.path.isdir(full_dir) : - continue - sconscript = os.path.join(full_dir, "SConscript") - if os.path.isfile(sconscript) : - modules.append("3rdParty/" + dir) -for dir in os.listdir(Dir("#").abspath) : - full_dir = os.path.join(Dir("#").abspath, dir) - if not os.path.isdir(full_dir) : - continue - sconscript = os.path.join(full_dir, "SConscript") - if os.path.isfile(sconscript) : - modules.append(dir) +if os.path.isdir(Dir("#/3rdParty").abspath) : + for dir in sorted(os.listdir(Dir("#/3rdParty").abspath)) : + full_dir = os.path.join(Dir("#/3rdParty").abspath, dir) + if not os.path.isdir(full_dir) : + continue + sconscript = os.path.join(full_dir, "SConscript") + if os.path.isfile(sconscript) : + modules.append("3rdParty/" + dir) +for dir in sorted(os.listdir(Dir("#").abspath)) : + full_dir = os.path.join(Dir("#").abspath, dir) + if not os.path.isdir(full_dir) : + continue + sconscript = os.path.join(full_dir, "SConscript") + if os.path.isfile(sconscript) : + modules.append(dir) + +# QA comes last +modules.remove("QA") +modules.append("QA") @@ -548,5 +758,5 @@ for dir in os.listdir(Dir("#").abspath) : env["PROJECTS"] = [m for m in modules if m not in ["Documentation", "QA", "SwifTools"] and not m.startswith("3rdParty")] -for stage in ["flags", "build", "test"] : - env["SCONS_STAGE"] = stage - SConscript(dirs = map(lambda x : root + "/" + x, modules)) +for stage in ["flags", "build"] : + env["SCONS_STAGE"] = stage + SConscript(dirs = list(map(lambda x : root + "/" + x, modules))) @@ -554,4 +764,4 @@ for stage in ["flags", "build", "test"] : if ARGUMENTS.get("sloccount", False) : - for project in env["PROJECTS"] : - env.SLOCCount("#/" + project) + for project in env["PROJECTS"] : + env.SLOCCount("#/" + project) @@ -562,5 +772,5 @@ if ARGUMENTS.get("sloccount", False) : -print -print " Build Configuration" -print " -------------------" +print("") +print(" Build Configuration") +print(" -------------------") @@ -568,13 +778,17 @@ parsers = [] if env.get("HAVE_LIBXML", 0): - parsers.append("LibXML") + parsers.append("LibXML") if env.get("HAVE_EXPAT", 0): - parsers.append("Expat") - if env.get("EXPAT_BUNDLED", False) : - parsers.append("(Bundled)") -print " Projects: " + ' '.join(env["PROJECTS"]) -print "" -print " XML Parsers: " + ' '.join(parsers) - -print " TLS Support: " + ("OpenSSL" if env.get("HAVE_OPENSSL",0) else ("Schannel" if env.get("HAVE_SCHANNEL", 0) else "Disabled")) -print " DNSSD Support: " + ("Bonjour" if env.get("HAVE_BONJOUR") else ("Avahi" if env.get("HAVE_AVAHI") else "Disabled")) -print + parsers.append("Expat") + if env.get("EXPAT_BUNDLED", False) : + parsers.append("(Bundled)") +print(" Projects: " + ' '.join(env["PROJECTS"])) +print("") +print(" XML Parsers: " + ' '.join(parsers)) + +print(" TLS Support: " + (env.get("HAVE_OPENSSL",0) and "OpenSSL" or env.get("HAVE_SECURETRANSPORT",0) and "Secure Transport" or env.get("HAVE_SCHANNEL", 0) and "Schannel" or "Disabled")) +print(" DNSSD Support: " + (env.get("HAVE_BONJOUR") and "Bonjour" or (env.get("HAVE_AVAHI") and "Avahi" or "Disabled"))) +print("") + +if not GetOption("help") and not env.get("HAVE_OPENSSL", 0) and not env.get("HAVE_SCHANNEL", 0) and not env.get("HAVE_SECURETRANSPORT", 0): + print("Error: A working TLS backend is required. Please check the documentation for more information.") + Exit(1) diff --git a/BuildTools/SCons/Tools/AppBundle.py b/BuildTools/SCons/Tools/AppBundle.py index 6a343f6..31cfef1 100644 --- a/BuildTools/SCons/Tools/AppBundle.py +++ b/BuildTools/SCons/Tools/AppBundle.py @@ -1,61 +1,69 @@ import SCons.Util, os.path +from datetime import date def generate(env) : - def createAppBundle(env, bundle, version = "1.0", resources = [], frameworks = [], info = {}, handlesXMPPURIs = False) : - bundleDir = bundle + ".app" - bundleContentsDir = bundleDir + "/Contents" - resourcesDir = bundleContentsDir + "/Resources" - frameworksDir = bundleContentsDir + "/Frameworks" - env.Install(bundleContentsDir + "/MacOS", bundle) - env.WriteVal(bundleContentsDir + "/PkgInfo", env.Value("APPL\77\77\77\77")) + def createAppBundle(env, bundle, version = "1.0", resources = [], frameworks = [], info = {}, handlesXMPPURIs = False, sparklePublicDSAKey = None) : + env.Tool("InstallWithSymLinks", toolpath = ["#/BuildTools/SCons/Tools"]) - infoDict = { - "CFBundleDevelopmentRegion" : "English", - "CFBundleExecutable" : bundle, - "CFBundleIdentifier" : "im.swift." + bundle, - "CFBundleInfoDictionaryVersion" : "6.0", - "CFBundleName" : bundle, - "CFBundlePackageType" : "APPL", - "CFBundleSignature": "\77\77\77\77", - "CFBundleVersion" : version, - "CFBundleIconFile" : bundle, - "NSPrincipalClass" : "NSApplication", - "NSHumanReadableCopyright" : unichr(0xA9) + " 2010 Swift Development Team.\nAll Rights Reserved." - } - infoDict.update(info) - - plist = """<?xml version="1.0" encoding="UTF-8"?> - <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> - <plist version="1.0"> - <dict> - """ - for key, value in infoDict.items() : - plist += "<key>" + key + "</key>\n" - plist += "<string>" + value.encode("utf-8") + "</string>\n" - if handlesXMPPURIs : - plist += """<key>CFBundleURLTypes</key> -<array> + bundleDir = bundle + ".app" + bundleContentsDir = bundleDir + "/Contents" + resourcesDir = bundleContentsDir + "/Resources" + frameworksDir = bundleContentsDir + "/Frameworks" + env.Install(bundleContentsDir + "/MacOS", bundle) + env.WriteVal(bundleContentsDir + "/PkgInfo", env.Value("APPL\77\77\77\77")) + + infoDict = { + "CFBundleDevelopmentRegion" : "English", + "CFBundleExecutable" : bundle, + "CFBundleIdentifier" : "im.swift." + bundle, + "CFBundleInfoDictionaryVersion" : "6.0", + "CFBundleName" : bundle, + "CFBundlePackageType" : "APPL", + "CFBundleSignature": "\77\77\77\77", + "CFBundleVersion" : version, + "CFBundleIconFile" : bundle, + "NSPrincipalClass" : "NSApplication", + "NSHumanReadableCopyright" : "(c) 2010-%d Isode Ltd.\nAll Rights Reserved." % date.today().year + } + infoDict.update(info) + + plist = """<?xml version="1.0" encoding="UTF-8"?> + <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> + <plist version="1.0"> <dict> - <key>CFBundleURLName</key> - <string>XMPP URL</string> - <key>CFBundleURLSchemes</key> - <array> - <string>xmpp</string> - </array> - </dict> + """ + for key, value in infoDict.items() : + plist += "<key>" + key + "</key>\n" + plist += "<string>" + value + "</string>\n" + if handlesXMPPURIs : + plist += """<key>CFBundleURLTypes</key> +<array> + <dict> + <key>CFBundleURLName</key> + <string>XMPP URL</string> + <key>CFBundleURLSchemes</key> + <array> + <string>xmpp</string> + </array> + </dict> </array>\n""" - plist += """</dict> - </plist> - """ - env.WriteVal(bundleContentsDir + "/Info.plist", env.Value(plist)) - for (target, resource) in resources.items() : - env.Install(os.path.join(resourcesDir, target), resource) + if sparklePublicDSAKey : + plist += "<key>SUPublicDSAKeyFile</key>" + plist += "<string>" + sparklePublicDSAKey.name + "</string>" + env.Install(resourcesDir, sparklePublicDSAKey) + plist += """</dict> + </plist> + """ + env.WriteVal(bundleContentsDir + "/Info.plist", env.Value(plist)) + + for (target, resource) in resources.items() : + env.Install(os.path.join(resourcesDir, target), resource) - for framework in frameworks : - env.Install(frameworksDir, framework) + for framework in frameworks : + env.InstallWithSymLinks(frameworksDir, framework) - return env.Dir(bundleDir) + return env.Dir(bundleDir) - env.AddMethod(createAppBundle, "AppBundle") + env.AddMethod(createAppBundle, "AppBundle") @@ -63,2 +71,2 @@ def generate(env) : def exists(env) : - return env["PLATFORM"] == "darwin" + return env["PLATFORM"] == "darwin" diff --git a/BuildTools/SCons/Tools/BuildVersion.py b/BuildTools/SCons/Tools/BuildVersion.py index 41e6d8d..7968282 100644 --- a/BuildTools/SCons/Tools/BuildVersion.py +++ b/BuildTools/SCons/Tools/BuildVersion.py @@ -5,4 +5,4 @@ import Version def generate(env) : - def createBuildVersion(env, target, project) : - buildVersion = """#pragma once + def createBuildVersion(env, target, project) : + buildVersion = """#pragma once @@ -11,5 +11,5 @@ static const char* buildVersion = \"%(buildVersion)s\";\n """ % { "buildVersion" : Version.getBuildVersion(env.Dir("#").abspath, project) } - env.WriteVal(target, env.Value(buildVersion)) + env.WriteVal(target, env.Value(buildVersion)) - env.AddMethod(createBuildVersion, "BuildVersion") + env.AddMethod(createBuildVersion, "BuildVersion") @@ -17,2 +17,2 @@ static const char* buildVersion = \"%(buildVersion)s\";\n def exists(env) : - return true + return true diff --git a/BuildTools/SCons/Tools/DoxyGen.py b/BuildTools/SCons/Tools/DoxyGen.py index 3fc53c4..66a9111 100644 --- a/BuildTools/SCons/Tools/DoxyGen.py +++ b/BuildTools/SCons/Tools/DoxyGen.py @@ -3,24 +3,24 @@ import SCons.Util, os def generate(env) : - def modify_targets(target, source, env) : - target = [env.File("html/index.html")] - return target, source + def modify_targets(target, source, env) : + target = [env.File("html/index.html")] + return target, source - def generate_actions(source, target, env, for_signature) : - if env.WhereIs("$DOXYGEN") and env.WhereIs("$DOT") : - return [SCons.Action.Action("$DOXYGEN $SOURCE", cmdstr = "$DOXYCOMSTR")] - else : - return [] + def generate_actions(source, target, env, for_signature) : + if env.WhereIs("$DOXYGEN") and env.WhereIs("$DOT") : + return [SCons.Action.Action("$DOXYGEN $SOURCE", cmdstr = "$DOXYCOMSTR")] + else : + return [] - env["DOXYGEN"] = "doxygen" - # FIXME: For some reason, things go incredibly slow (at least on OS X) - # when not doing this. Some environment flag is having an effect on - # this; find out which - env["ENV"] = os.environ - env["DOT"] = "dot" - env["BUILDERS"]["DoxyGen"] = SCons.Builder.Builder( - emitter = modify_targets, - generator = generate_actions, - single_source = True) + env["DOXYGEN"] = "doxygen" + # FIXME: For some reason, things go incredibly slow (at least on OS X) + # when not doing this. Some environment flag is having an effect on + # this; find out which + env["ENV"] = os.environ + env["DOT"] = "dot" + env["BUILDERS"]["DoxyGen"] = SCons.Builder.Builder( + emitter = modify_targets, + generator = generate_actions, + single_source = True) def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/Flags.py b/BuildTools/SCons/Tools/Flags.py index 13fbb32..fe0cfcc 100644 --- a/BuildTools/SCons/Tools/Flags.py +++ b/BuildTools/SCons/Tools/Flags.py @@ -3,8 +3,11 @@ import SCons.Util def generate(env) : - def useFlags(env, flags) : - for flag in flags : - env[flag] = env.get(flag, []) + flags[flag] - env.AddMethod(useFlags, "UseFlags") + def useFlags(env, flags) : + for flag in flags : + if flag in env : + env[flag] = env[flag] + flags[flag] + else : + env[flag] = flags[flag] + env.AddMethod(useFlags, "UseFlags") def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/InstallWithSymLinks.py b/BuildTools/SCons/Tools/InstallWithSymLinks.py new file mode 100644 index 0000000..4955192 --- /dev/null +++ b/BuildTools/SCons/Tools/InstallWithSymLinks.py @@ -0,0 +1,114 @@ +"""SCons.Tool.install + +Tool-specific initialization for the install tool. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001 - 2015 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +from SCons.Script import Action, Builder +from SCons.Node import FS +import shutil +import stat +import os +from os import path + +class CopytreeError(EnvironmentError): + pass + +# This is a patched version of shutil.copytree from python 2.5. It +# doesn't fail if the dir exists, which regular copytree does +# (annoyingly). Note the XXX comment in the docstring. +def scons_copytree(src, dst, symlinks=False): + """Recursively copy a directory tree using copy2(). + + The destination directory must not already exist. + If exception(s) occur, an CopytreeError is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. + + XXX Consider this example code rather than the ultimate tool. + + """ + names = os.listdir(src) + # garyo@genarts.com fix: check for dir before making dirs. + if not os.path.exists(dst): + os.makedirs(dst) + errors = [] + for name in names: + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if symlinks and os.path.islink(srcname): + linkto = os.readlink(srcname) + os.symlink(linkto, dstname) + elif os.path.isdir(srcname): + scons_copytree(srcname, dstname, symlinks) + else: + shutil.copy2(srcname, dstname) + # XXX What about devices, sockets etc.? + except (IOError, os.error) as why: + errors.append((srcname, dstname, str(why))) + # catch the CopytreeError from the recursive copytree so that we can + # continue with other files + except CopytreeError as err: + errors.extend(err.args[0]) + try: + shutil.copystat(src, dst) + except WindowsError: + # can't copy file access times on Windows + pass + except OSError as why: + errors.extend((src, dst, str(why))) + if errors: + raise CopytreeError(errors) + + +def symlinkBuilderImpl(target, source, env): + lnk = target[0].abspath + src = source[0].abspath + lnkdir,lnkname = path.split(lnk) + srcdir,srcname = path.split(src) + + scons_copytree(src, os.path.join(lnk, srcname), True) + + return None + +def symlinkBuilderPrinter(target, source, env): + lnk = path.basename(target[0].abspath) + src = path.basename(source[0].abspath) + return 'INSTALL PRESERVING SYMLINKS ' + target[0].get_internal_path() + +def generate(env) : + symlinkBuilder = Builder(action = Action(symlinkBuilderImpl, symlinkBuilderPrinter), target_factory = FS.Entry, source_factory = FS.Entry) + env.Append(BUILDERS = {'InstallWithSymLinks' : symlinkBuilder}) + +def exists(env) : + return True
\ No newline at end of file diff --git a/BuildTools/SCons/Tools/Nib.py b/BuildTools/SCons/Tools/Nib.py index ccfd884..cf5b0dc 100644 --- a/BuildTools/SCons/Tools/Nib.py +++ b/BuildTools/SCons/Tools/Nib.py @@ -3,10 +3,10 @@ import SCons.Util def generate(env) : - env["IBTOOL"] = "ibtool" - env["BUILDERS"]["Nib"] = SCons.Builder.Builder( - action = SCons.Action.Action("$IBTOOL --errors --warnings --notices --output-format human-readable-text --compile $TARGET $SOURCE", cmdstr = "$NIBCOMSTR"), - suffix = ".nib", - src_suffix = ".xib", - single_source = True) + env["IBTOOL"] = "ibtool" + env["BUILDERS"]["Nib"] = SCons.Builder.Builder( + action = SCons.Action.Action("$IBTOOL --errors --warnings --notices --output-format human-readable-text --compile $TARGET $SOURCE", cmdstr = "$NIBCOMSTR"), + suffix = ".nib", + src_suffix = ".xib", + single_source = True) def exists(env) : - return env["PLATFORM"] == "darwin" + return env["PLATFORM"] == "darwin" diff --git a/BuildTools/SCons/Tools/ReplacePragmaOnce.py b/BuildTools/SCons/Tools/ReplacePragmaOnce.py index 466c31e..cb49bbb 100644 --- a/BuildTools/SCons/Tools/ReplacePragmaOnce.py +++ b/BuildTools/SCons/Tools/ReplacePragmaOnce.py @@ -3,23 +3,23 @@ import SCons.Util, os.path def generate(env) : - root = env.Dir("#").abspath - def relpath(path, start) : - i = len(os.path.commonprefix([path, start])) - return path[i+1:] + root = env.Dir("#").abspath + def relpath(path, start) : + i = len(os.path.commonprefix([path, start])) + return path[i+1:] - def replacePragmaOnce(env, target, source) : - guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper() - data = source[0].get_contents() - f = open(str(target[0]), 'wb') - if "#pragma once" in data : - f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard})) - f.write("\n#endif\n") - else : - f.write(data) - f.close() + def replacePragmaOnce(env, target, source) : + guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper() + data = source[0].get_contents() + f = open(str(target[0]), 'wb') + if "#pragma once" in data : + f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard})) + f.write("\n#endif\n") + else : + f.write(data) + f.close() - env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder( - action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"), - single_source = True) + env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder( + action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"), + single_source = True) def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/SLOCCount.py b/BuildTools/SCons/Tools/SLOCCount.py index abf4a3c..be31672 100644 --- a/BuildTools/SCons/Tools/SLOCCount.py +++ b/BuildTools/SCons/Tools/SLOCCount.py @@ -3,15 +3,15 @@ import SCons.Util, os.path, os def generate(env) : - def createSLOCCount(env, source) : - myenv = env.Clone() - myenv["ENV"]["HOME"] = os.environ["HOME"] - source = myenv.Dir(source) - target = myenv.File("#/" + source.path + ".sloccount") - # FIXME: There's probably a better way to force building the .sc - if os.path.exists(target.abspath) : - os.unlink(target.abspath) - return myenv.Command(target, source, [SCons.Action.Action("sloccount --duplicates --wide --details " + source.path + " | grep -v qrc_ > $TARGET", cmdstr = "$GENCOMSTR")]) + def createSLOCCount(env, source) : + myenv = env.Clone() + myenv["ENV"]["HOME"] = os.environ["HOME"] + source = myenv.Dir(source) + target = myenv.File("#/" + source.path + ".sloccount") + # FIXME: There's probably a better way to force building the .sc + if os.path.exists(target.abspath) : + os.unlink(target.abspath) + return myenv.Command(target, source, [SCons.Action.Action("sloccount --duplicates --wide --details " + source.path + " | grep -v qrc_ > $TARGET", cmdstr = "$GENCOMSTR")]) - env.AddMethod(createSLOCCount, "SLOCCount") + env.AddMethod(createSLOCCount, "SLOCCount") def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/Test.py b/BuildTools/SCons/Tools/Test.py index c52f448..2106af4 100644 --- a/BuildTools/SCons/Tools/Test.py +++ b/BuildTools/SCons/Tools/Test.py @@ -3,37 +3,51 @@ import SCons.Util, os def generate(env) : - def registerTest(env, target, type = "unit", is_checker = False) : - if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type : - cmd = target[0].abspath if SCons.Util.is_List(target) else target.abspath - params = "" - - # Special support for unittest checker - if is_checker and env.get("checker_report", False) : - params = " --xml > " + os.path.join(target[0].dir.path, "checker-report.xml") - - ignore_prefix = "" - if env.get("TEST_IGNORE_RESULT", False) : - ignore_prefix = "-" - - # Set environment variables for running the test - test_env = env.Clone() - for i in ["HOME", "USERPROFILE", "APPDATA"]: - if os.environ.get(i, "") : - test_env["ENV"][i] = os.environ[i] - if test_env["PLATFORM"] == "darwin" : - test_env["ENV"]["DYLD_FALLBACK_LIBRARY_PATH"] = ":".join(map(lambda x : str(x), test_env.get("LIBPATH", []))) - elif test_env["PLATFORM"] == "win32" : - test_env["ENV"]["PATH"] = ";".join(map(lambda x : str(x), test_env.get("LIBRUNPATH", []))) + ";" + test_env["ENV"]["PATH"] - - # Run the test - test_env.Command("**dummy**", target, - SCons.Action.Action(ignore_prefix + env.get("TEST_RUNNER", "") + cmd + " " + params, cmdstr = "$TESTCOMSTR")) - - def registerScriptTests(env, scripts, name, type) : - if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type : - pass - - env.AddMethod(registerTest, "Test") - env.AddMethod(registerScriptTests, "ScriptTests") + def registerTest(env, target, type = "unit", is_checker = False) : + if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type : + if SCons.Util.is_List(target) : + cmd = target[0].abspath + else : + cmd = target.abspath + params = "" + + # Special support for unittest checker + if is_checker and env.get("checker_report", False) : + params = " --xml" + + ignore_prefix = "" + if env.get("TEST_IGNORE_RESULT", False) : + ignore_prefix = "-" + + # Set environment variables for running the test + test_env = env.Clone() + for i in ["HOME", "PATH", "USERPROFILE", "APPDATA", "GTEST_FILTER", "ASAN_OPTIONS", "LSAN_OPTIONS", "SWIFT_NETWORK_TEST_IPV4", "SWIFT_NETWORK_TEST_IPV6"]: + if os.environ.get(i, "") : + test_env["ENV"][i] = os.environ[i] + if env["target"] == "android" : + test_env["ENV"]["PATH"] = env["android_sdk_bin"] + ";" + test_env["ENV"]["PATH"] + else : + if test_env["PLATFORM"] == "darwin" : + test_env["ENV"]["DYLD_FALLBACK_LIBRARY_PATH"] = ":".join(map(lambda x : str(x), test_env.get("LIBPATH", []))) + elif test_env["PLATFORM"] == "win32" : + test_env["ENV"]["PATH"] = ";".join(map(lambda x : str(x), test_env.get("LIBRUNPATH", []))) + ";" + test_env["ENV"]["PATH"] + + + # Run the test + if env["target"] == "android": + exec_name = os.path.basename(cmd) + test_env.Command("**dummy**", target, SCons.Action.Action( + ["adb shell mount -o rw,remount /system", + "adb push " + cmd + " /system/bin/" + exec_name, + "adb shell SWIFT_CLIENTTEST_JID=\"" + os.getenv("SWIFT_CLIENTTEST_JID") + "\" SWIFT_CLIENTTEST_PASS=\"" + os.getenv("SWIFT_CLIENTTEST_PASS") + "\" " + env.get("TEST_RUNNER", "") + "/system/bin/" + exec_name], cmdstr = "$TESTCOMSTR")) + else : + test_env.Command("**dummy**", target, + SCons.Action.Action(ignore_prefix + env.get("TEST_RUNNER", "") + cmd + " " + params, cmdstr = "$TESTCOMSTR")) + + def registerScriptTests(env, scripts, name, type) : + if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type : + pass + + env.AddMethod(registerTest, "Test") + env.AddMethod(registerScriptTests, "ScriptTests") def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/WindowsBundle.py b/BuildTools/SCons/Tools/WindowsBundle.py index c3c77aa..9781deb 100644 --- a/BuildTools/SCons/Tools/WindowsBundle.py +++ b/BuildTools/SCons/Tools/WindowsBundle.py @@ -1,25 +1,138 @@ import SCons.Util, os +import subprocess +import re +import shutil + +def which(program_name): + if hasattr(shutil, "which"): + return shutil.which(program_name) + else: + path = os.getenv('PATH') + for p in path.split(os.path.pathsep): + p = os.path.join(p,program_name) + if os.path.exists(p) and os.access(p,os.X_OK): + return p def generate(env) : - def createWindowsBundle(env, bundle, resources = {}, qtimageformats = [], qtlibs = []) : - all_files = [] - all_files += env.Install(bundle, bundle + ".exe") - for lib in qtlibs : - all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll")) - all_files += env.Install(os.path.join(bundle, "imageformats"), [os.path.join(env["QTDIR"], "plugins", "imageformats", "q" + codec + "4.dll") for codec in qtimageformats]) - - for dir, resourceFiles in resources.items() : - for resource in resourceFiles : - e = env.Entry(resource) - if e.isdir() : - for subresource in env.Glob(str(e) + "/*") : - all_files += env.Install(os.path.join(bundle, dir, e.name), subresource) - else : - all_files += env.Install(os.path.join(bundle, dir), resource) - return all_files - - env.AddMethod(createWindowsBundle, "WindowsBundle") + def captureWinDeployQtMapping(release = True): + p = False -def exists(env) : - return env["PLATFORM"] == "win32" + qt_bin_folder = "" + if "QTDIR" in env: + qt_bin_folder = env["QTDIR"] + "\\bin;" + + environ = {"PATH": qt_bin_folder + os.getenv("PATH"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP")} + + if release: + p = subprocess.Popen(['windeployqt', '--release', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ) + else: + p = subprocess.Popen(['windeployqt', '--debug', '--dry-run', '--list', 'mapping', 'Swift.exe'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ) + + if p: + stdout, stderr = p.communicate() + + mappings = [] + + regex = re.compile(r'"([^\"]*)" "([^\"]*)"') + + if SCons.Util.PY3: + matches = re.findall(regex, stdout.decode('utf8')) + else: + matches = re.findall(regex, stdout) + + for match in matches: + mappings.append(match) + return mappings + else: + return False + + def createWindowsBundleManual(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') : + all_files = [] + all_files += env.Install(bundle, bundle + ".exe") + for lib in qtlibs : + all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll")) + plugins_suffix = '4' + if qtversion == '5' : + plugins_suffix = '' + for plugin_type in qtplugins: + all_files += env.Install(os.path.join(bundle, plugin_type), [os.path.join(env["QTDIR"], "plugins", plugin_type, "q" + plugin + plugins_suffix + ".dll") for plugin in qtplugins[plugin_type]]) + for dir, resourceFiles in resources.items() : + for resource in resourceFiles : + e = env.Entry(resource) + if e.isdir() : + for subresource in env.Glob(str(e) + "/*") : + all_files += env.Install(os.path.join(bundle, dir, e.name), subresource) + else : + all_files += env.Install(os.path.join(bundle, dir), resource) + return all_files + + # This version of uses windeployqt tool + def createWindowsBundleWithWinDeployQt(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') : + assert(qtversion == '5') + all_files = [] + # add swift executable + all_files += env.Install(bundle, bundle + ".exe") + + # adding resources (swift sounds/images/translations) + for dir, resourceFiles in resources.items() : + for resource in resourceFiles : + e = env.Entry(resource) + if e.isdir() : + for subresource in env.Glob(str(e) + "/*") : + all_files += env.Install(os.path.join(bundle, dir, e.name), subresource) + else : + all_files += env.Install(os.path.join(bundle, dir), resource) + + qtmappings = captureWinDeployQtMapping() + assert(qtmappings) + + # Add QtWebKit dependencies. + # This is needed as QtWebKit since 5.6 is developed and released seperately + # of Qt and windeployqt does not know about its dependencies anymore. + for map_from, map_to in qtmappings: + if map_to == "Qt5WebKit.dll": + # hidden Qt5WebKit dependencies + hidden_dependencies = ["libxml2.dll", "libxslt.dll"] + for dependency in hidden_dependencies: + dependency_from_path = os.path.join(env["QTDIR"], "bin", dependency) + if os.path.isfile(dependency_from_path): + qtmappings.append((dependency_from_path, dependency)) + break + + # handle core DLLs + qt_corelib_regex = re.compile(r".*bin.*\\(.*)\.dll") + + for qtlib in qtlibs: + if qtlib.startswith("Qt5"): + (src_path, target_path) = next(((src_path, target_path) for (src_path, target_path) in qtmappings if qt_corelib_regex.match(src_path) and qt_corelib_regex.match(src_path).group(1) == qtlib), (None, None)) + if src_path != None: + all_files += env.Install(bundle, src_path) + + # handle core dependencies + for (src_path, target_path) in qtmappings: + if qt_corelib_regex.match(src_path) and not qt_corelib_regex.match(src_path).group(1).startswith("Qt5"): + all_files += env.Install(bundle, src_path) + + # handle plugins + qt_plugin_regex = re.compile(r".*plugins.*\\(.*)\\(.*)\.dll") + for (src_path, target_path) in qtmappings: + if qt_plugin_regex.match(src_path): + plugin_folder, filename = qt_plugin_regex.match(src_path).groups() + try: + if plugin_folder in ["audio"] or filename[1:] in qtplugins[plugin_folder]: + all_files += env.Install(os.path.join(bundle, plugin_folder), src_path) + except: + pass + return all_files + + def createWindowsBundle(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4'): + if which("windeployqt.exe"): + return createWindowsBundleWithWinDeployQt(env, bundle, resources, qtplugins, qtlibs, qtversion) + else: + return createWindowsBundleManual(env, bundle, resources, qtplugins, qtlibs, qtversion) + + env.AddMethod(createWindowsBundle, "WindowsBundle") + +def exists(env) : + return env["PLATFORM"] == "win32" diff --git a/BuildTools/SCons/Tools/WriteVal.py b/BuildTools/SCons/Tools/WriteVal.py index 0a1e1ad..ad77a99 100644 --- a/BuildTools/SCons/Tools/WriteVal.py +++ b/BuildTools/SCons/Tools/WriteVal.py @@ -3,13 +3,13 @@ import SCons.Util def generate(env) : - def replacePragmaOnce(env, target, source) : - f = open(str(target[0]), 'wb') - f.write(source[0].get_contents()) - f.close() + def replacePragmaOnce(env, target, source) : + f = open(str(target[0]), 'wb') + f.write(source[0].get_contents()) + f.close() - env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder( - action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"), - single_source = True) + env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder( + action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"), + single_source = True) def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/nsis.py b/BuildTools/SCons/Tools/nsis.py index f5b2905..393beb8 100644 --- a/BuildTools/SCons/Tools/nsis.py +++ b/BuildTools/SCons/Tools/nsis.py @@ -8,4 +8,4 @@ nsisIncludes_re = re.compile(r'^\s*!include (translations-\S*)', re.M) TODO: - - Extract the target from the nsis file - - When a target is provided use the output function + - Extract the target from the nsis file + - When a target is provided use the output function """ @@ -13,27 +13,27 @@ TODO: def generate(env) : - """Add Builders and construction variables for qt to an Environment.""" - Builder = SCons.Builder.Builder + """Add Builders and construction variables for qt to an Environment.""" + Builder = SCons.Builder.Builder - env['NSIS_MAKENSIS'] = 'makensis' - env['NSIS_OPTIONS'] = ["/V2"] - def winToLocalReformat(path) : - return os.path.join(*path.split("\\")) - def scanNsisContent(node, env, path, arg): - contents = node.get_contents() - includes = nsisFiles_re.findall(contents) + nsisIncludes_re.findall(contents) - includes = [ winToLocalReformat(include) for include in includes ] - return filter(lambda x: x.rfind('*')==-1, includes) - nsisscanner = env.Scanner(name = 'nsisfile', - function = scanNsisContent, - argument = None, - skeys = ['.nsi']) - nsisbuilder = Builder( - action = SCons.Action.Action('$NSIS_MAKENSIS $NSIS_OPTIONS $SOURCE', cmdstr = '$NSISCOMSTR'), - source_scanner = nsisscanner, - single_source = True - ) - env.Append( BUILDERS={'Nsis' : nsisbuilder} ) + env['NSIS_MAKENSIS'] = 'makensis' + env['NSIS_OPTIONS'] = ["/V2"] + def winToLocalReformat(path) : + return os.path.join(*path.split("\\")) + def scanNsisContent(node, env, path, arg): + contents = node.get_contents() + includes = nsisFiles_re.findall(contents) + nsisIncludes_re.findall(contents) + includes = [ winToLocalReformat(include) for include in includes ] + return filter(lambda x: x.rfind('*')==-1, includes) + nsisscanner = env.Scanner(name = 'nsisfile', + function = scanNsisContent, + argument = None, + skeys = ['.nsi']) + nsisbuilder = Builder( + action = SCons.Action.Action('$NSIS_MAKENSIS $NSIS_OPTIONS $SOURCE', cmdstr = '$NSISCOMSTR'), + source_scanner = nsisscanner, + single_source = True + ) + env.Append( BUILDERS={'Nsis' : nsisbuilder} ) def exists(env) : - return True + return True diff --git a/BuildTools/SCons/Tools/qt4.py b/BuildTools/SCons/Tools/qt4.py index 671fd08..372b261 100644 --- a/BuildTools/SCons/Tools/qt4.py +++ b/BuildTools/SCons/Tools/qt4.py @@ -38,2 +38,4 @@ import os.path import re +import subprocess +from string import Template @@ -45,11 +47,12 @@ import SCons.Tool import SCons.Util +import SCons.SConf class ToolQtWarning(SCons.Warnings.Warning): - pass + pass class GeneratedMocFileNotIncluded(ToolQtWarning): - pass + pass class QtdirNotFound(ToolQtWarning): - pass + pass @@ -60,3 +63,3 @@ qrcinclude_re = re.compile(r'<file (alias=\"[^\"]*\")?>([^<]*)</file>', re.M) def transformToWinePath(path) : - return os.popen('winepath -w "%s"'%path).read().strip().replace('\\','/') + return os.popen('winepath -w "%s"'%path).read().strip().replace('\\','/') @@ -64,3 +67,3 @@ header_extensions = [".h", ".hxx", ".hpp", ".hh"] if SCons.Util.case_sensitive_suffixes('.h', '.H'): - header_extensions.append('.H') + header_extensions.append('.H') # TODO: The following two lines will work when integrated back to SCons @@ -72,120 +75,122 @@ cxx_suffixes = [".c", ".cxx", ".cpp", ".cc"] def checkMocIncluded(target, source, env): - moc = target[0] - cpp = source[0] - # looks like cpp.includes is cleared before the build stage :-( - # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/ - path = SCons.Defaults.CScan.path_function(env, moc.cwd) - includes = SCons.Defaults.CScan(cpp, env, path) - if not moc in includes: - SCons.Warnings.warn( - GeneratedMocFileNotIncluded, - "Generated moc file '%s' is not included by '%s'" % - (str(moc), str(cpp))) + moc = target[0] + cpp = source[0] + # looks like cpp.includes is cleared before the build stage :-( + # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/ + path = SCons.Defaults.CScan.path_function(env, moc.cwd) + includes = SCons.Defaults.CScan(cpp, env, path) + if not moc in includes: + SCons.Warnings.warn( + GeneratedMocFileNotIncluded, + "Generated moc file '%s' is not included by '%s'" % + (str(moc), str(cpp))) def find_file(filename, paths, node_factory): - for dir in paths: - node = node_factory(filename, dir) - if node.rexists(): - return node - return None + for dir in paths: + node = node_factory(filename, dir) + if node.rexists(): + return node + return None class _Automoc: - """ - Callable class, which works as an emitter for Programs, SharedLibraries and - StaticLibraries. - """ - - def __init__(self, objBuilderName): - self.objBuilderName = objBuilderName - - def __call__(self, target, source, env): - """ - Smart autoscan function. Gets the list of objects for the Program - or Lib. Adds objects and builders for the special qt files. - """ - try: - if int(env.subst('$QT4_AUTOSCAN')) == 0: - return target, source - except ValueError: - pass - try: - debug = int(env.subst('$QT4_DEBUG')) - except ValueError: - debug = 0 - - # some shortcuts used in the scanner - splitext = SCons.Util.splitext - objBuilder = getattr(env, self.objBuilderName) - - # some regular expressions: - # Q_OBJECT detection - q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') - # cxx and c comment 'eater' - #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') - # CW: something must be wrong with the regexp. See also bug #998222 - # CURRENTLY THERE IS NO TEST CASE FOR THAT - - # The following is kind of hacky to get builders working properly (FIXME) - objBuilderEnv = objBuilder.env - objBuilder.env = env - mocBuilderEnv = env.Moc4.env - env.Moc4.env = env - - # make a deep copy for the result; MocH objects will be appended - out_sources = source[:] - - for obj in source: - if isinstance(obj,basestring): # big kludge! - print "scons: qt4: '%s' MAYBE USING AN OLD SCONS VERSION AND NOT CONVERTED TO 'File'. Discarded." % str(obj) - continue - if not obj.has_builder(): - # binary obj file provided - if debug: - print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj) - continue - cpp = obj.sources[0] - if not splitext(str(cpp))[1] in cxx_suffixes: - if debug: - print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp) - # c or fortran source - continue - #cpp_contents = comment.sub('', cpp.get_contents()) - try: - cpp_contents = cpp.get_contents() - except: continue # may be an still not generated source - h=None - for h_ext in header_extensions: - # try to find the header file in the corresponding source - # directory - hname = splitext(cpp.name)[0] + h_ext - h = find_file(hname, (cpp.get_dir(),), env.File) - if h: - if debug: - print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp)) - #h_contents = comment.sub('', h.get_contents()) - h_contents = h.get_contents() - break - if not h and debug: - print "scons: qt: no header for '%s'." % (str(cpp)) - if h and q_object_search.search(h_contents): - # h file with the Q_OBJECT macro found -> add moc_cpp - moc_cpp = env.Moc4(h) - moc_o = objBuilder(moc_cpp) - out_sources.append(moc_o) - #moc_cpp.target_scanner = SCons.Defaults.CScan - if debug: - print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp)) - if cpp and q_object_search.search(cpp_contents): - # cpp file with Q_OBJECT macro found -> add moc - # (to be included in cpp) - moc = env.Moc4(cpp) - env.Ignore(moc, moc) - if debug: - print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc)) - #moc.source_scanner = SCons.Defaults.CScan - # restore the original env attributes (FIXME) - objBuilder.env = objBuilderEnv - env.Moc4.env = mocBuilderEnv - - return (target, out_sources) + """ + Callable class, which works as an emitter for Programs, SharedLibraries and + StaticLibraries. + """ + + def __init__(self, objBuilderName): + self.objBuilderName = objBuilderName + + def __call__(self, target, source, env): + """ + Smart autoscan function. Gets the list of objects for the Program + or Lib. Adds objects and builders for the special qt files. + """ + try: + if int(env.subst('$QT4_AUTOSCAN')) == 0: + return target, source + except ValueError: + pass + try: + debug = int(env.subst('$QT4_DEBUG')) + except ValueError: + debug = 0 + + # some shortcuts used in the scanner + splitext = SCons.Util.splitext + objBuilder = getattr(env, self.objBuilderName) + + # some regular expressions: + # Q_OBJECT detection + q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') + # cxx and c comment 'eater' + #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') + # CW: something must be wrong with the regexp. See also bug #998222 + # CURRENTLY THERE IS NO TEST CASE FOR THAT + + # The following is kind of hacky to get builders working properly (FIXME) + objBuilderEnv = objBuilder.env + objBuilder.env = env.Clone() + if os.path.basename(objBuilder.env ["CXX"]).startswith(("gcc", "clang")): + objBuilder.env.Append(CXXFLAGS = "-w") + mocBuilderEnv = env.Moc4.env + env.Moc4.env = env + + # make a deep copy for the result; MocH objects will be appended + out_sources = source[:] + + for obj in source: + if isinstance(obj,str): # big kludge! + print("scons: qt4: '%s' MAYBE USING AN OLD SCONS VERSION AND NOT CONVERTED TO 'File'. Discarded." % str(obj)) + continue + if not obj.has_builder(): + # binary obj file provided + if debug: + print("scons: qt: '%s' seems to be a binary. Discarded." % str(obj)) + continue + cpp = obj.sources[0] + if not splitext(str(cpp))[1] in cxx_suffixes: + if debug: + print("scons: qt: '%s' is no cxx file. Discarded." % str(cpp) ) + # c or fortran source + continue + #cpp_contents = comment.sub('', cpp.get_contents()) + try: + cpp_contents = str(cpp.get_contents()) + except: continue # may be an still not generated source + h=None + for h_ext in header_extensions: + # try to find the header file in the corresponding source + # directory + hname = splitext(cpp.name)[0] + h_ext + h = find_file(hname, (cpp.get_dir(),), env.File) + if h: + if debug: + print("scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))) + #h_contents = comment.sub('', h.get_contents()) + h_contents = str(h.get_contents()) + break + if not h and debug: + print("scons: qt: no header for '%s'." % (str(cpp))) + if h and q_object_search.search(h_contents): + # h file with the Q_OBJECT macro found -> add moc_cpp + moc_cpp = env.Moc4(h) + moc_o = objBuilder(moc_cpp) + out_sources.append(moc_o) + #moc_cpp.target_scanner = SCons.Defaults.CScan + if debug: + print("scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))) + if cpp and q_object_search.search(cpp_contents): + # cpp file with Q_OBJECT macro found -> add moc + # (to be included in cpp) + moc = env.Moc4(cpp) + env.Ignore(moc, moc) + if debug: + print("scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))) + #moc.source_scanner = SCons.Defaults.CScan + # restore the original env attributes (FIXME) + objBuilder.env = objBuilderEnv + env.Moc4.env = mocBuilderEnv + + return (target, out_sources) @@ -195,326 +200,419 @@ AutomocStatic = _Automoc('StaticObject') def _detect(env): - """Not really safe, but fast method to detect the QT library""" - try: return env['QTDIR'] - except KeyError: pass - - try: return os.environ['QTDIR'] - except KeyError: pass - - moc = env.WhereIs('moc-qt4') or env.WhereIs('moc4') or env.WhereIs('moc') - if moc: - import sys - if sys.platform == "darwin" : - return "" - QTDIR = os.path.dirname(os.path.dirname(moc)) - SCons.Warnings.warn( - QtdirNotFound, - "QTDIR variable is not defined, using moc executable as a hint (QTDIR=%s)" % QTDIR) - return QTDIR - - raise SCons.Errors.StopError( - QtdirNotFound, - "Could not detect Qt 4 installation") - return None + """Not really safe, but fast method to detect the QT library""" + if 'QTDIR' in env : + return env['QTDIR'] + + if 'QTDIR' in os.environ : + return os.environ['QTDIR'] + + moc = None + if env["qt5"]: + moc = env.WhereIs('moc-qt5') or env.WhereIs('moc5') or env.WhereIs('moc') + else : + moc = env.WhereIs('moc-qt4') or env.WhereIs('moc4') or env.WhereIs('moc') + if moc: + # Test whether the moc command we found is real, or whether it is just the qtchooser dummy. + p = subprocess.Popen([moc, "-v"], shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env["ENV"]) + p.communicate() + if p.returncode == 0: + import sys + if sys.platform == "darwin" : + return "" + QTDIR = os.path.dirname(os.path.dirname(moc)) + return QTDIR + + raise SCons.Errors.StopError( + QtdirNotFound, + "Could not detect Qt 4 installation") + return None def generate(env): - """Add Builders and construction variables for qt to an Environment.""" - - def locateQt4Command(env, command, qtdir) : - if len(qtdir) == 0 : - qtdir = "/usr" - suffixes = [ - '-qt4', - '-qt4.exe', - '4', - '4.exe', - '', - '.exe', - ] - triedPaths = [] - for suffix in suffixes : - fullpath = os.path.join(qtdir,'bin',command + suffix) - if os.access(fullpath, os.X_OK) : - return fullpath - triedPaths.append(fullpath) - - fullpath = env.Detect([command+'-qt4', command+'4', command]) - if not (fullpath is None) : return fullpath - - raise Exception("Qt4 command '" + command + "' not found. Tried: " + ', '.join(triedPaths)) - - - CLVar = SCons.Util.CLVar - Action = SCons.Action.Action - Builder = SCons.Builder.Builder - splitext = SCons.Util.splitext - - env['QTDIR'] = _detect(env) - # TODO: 'Replace' should be 'SetDefault' -# env.SetDefault( - env.Replace( - QTDIR = _detect(env), - # TODO: This is not reliable to QTDIR value changes but needed in order to support '-qt4' variants - QT4_MOC = locateQt4Command(env,'moc', env['QTDIR']), - QT4_UIC = locateQt4Command(env,'uic', env['QTDIR']), - QT4_RCC = locateQt4Command(env,'rcc', env['QTDIR']), - QT4_LUPDATE = locateQt4Command(env,'lupdate', env['QTDIR']), - QT4_LRELEASE = locateQt4Command(env,'lrelease', env['QTDIR']), - QT4_LIB = '', # KLUDGE to avoid linking qt3 library - - QT4_AUTOSCAN = 1, # Should the qt tool try to figure out, which sources are to be moc'ed? - - # Some QT specific flags. I don't expect someone wants to - # manipulate those ... - QT4_UICFLAGS = CLVar(''), - QT4_MOCFROMHFLAGS = CLVar(''), - QT4_MOCFROMCXXFLAGS = CLVar('-i'), - QT4_QRCFLAGS = '', - - # suffixes/prefixes for the headers / sources to generate - QT4_UISUFFIX = '.ui', - QT4_UICDECLPREFIX = 'ui_', - QT4_UICDECLSUFFIX = '.h', - QT4_MOCHPREFIX = 'moc_', - QT4_MOCHSUFFIX = '$CXXFILESUFFIX', - QT4_MOCCXXPREFIX = '', - QT4_MOCCXXSUFFIX = '.moc', - QT4_QRCSUFFIX = '.qrc', - QT4_QRCCXXSUFFIX = '$CXXFILESUFFIX', - QT4_QRCCXXPREFIX = 'qrc_', - QT4_MOCCPPPATH = [], - QT4_MOCINCFLAGS = '$( ${_concat("-I", QT4_MOCCPPPATH, INCSUFFIX, __env__, RDirs)} $)', - - # Commands for the qt support ... - QT4_UICCOM = '$QT4_UIC $QT4_UICFLAGS -o $TARGET $SOURCE', - # FIXME: The -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED flag is a hack to work - # around an issue in Qt - # See https://bugreports.qt-project.org/browse/QTBUG-22829 - QT4_MOCFROMHCOM = '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMHFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE', - QT4_MOCFROMCXXCOM = [ - '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMCXXFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE', - Action(checkMocIncluded,None)], - QT4_LUPDATECOM = '$QT4_LUPDATE $SOURCE -ts $TARGET', - QT4_LRELEASECOM = '$QT4_LRELEASE -silent $SOURCE -qm $TARGET', - QT4_RCCCOM = '$QT4_RCC $QT4_QRCFLAGS -name $SOURCE $SOURCE -o $TARGET', - ) - if len(env["QTDIR"]) > 0 : - env.Replace(QT4_LIBPATH = os.path.join('$QTDIR', 'lib')) - - # Translation builder - tsbuilder = Builder( - action = SCons.Action.Action('$QT4_LUPDATECOM'), #,'$QT4_LUPDATECOMSTR'), - multi=1 - ) - env.Append( BUILDERS = { 'Ts': tsbuilder } ) - qmbuilder = Builder( - action = SCons.Action.Action('$QT4_LRELEASECOM', cmdstr = '$QT4_LRELEASECOMSTR'), - src_suffix = '.ts', - suffix = '.qm', - single_source = True - ) - env.Append( BUILDERS = { 'Qm': qmbuilder } ) - - # Resource builder - def scanResources(node, env, path, arg): - # I've being careful on providing names relative to the qrc file - # If that was not needed that code could be simplified a lot - def recursiveFiles(basepath, path) : - result = [] - for item in os.listdir(os.path.join(basepath, path)) : - itemPath = os.path.join(path, item) - if os.path.isdir(os.path.join(basepath, itemPath)) : - result += recursiveFiles(basepath, itemPath) - else: - result.append(itemPath) - return result - contents = node.get_contents() - includes = [included[1] for included in qrcinclude_re.findall(contents)] - qrcpath = os.path.dirname(node.path) - dirs = [included for included in includes if os.path.isdir(os.path.join(qrcpath,included))] - # dirs need to include files recursively - for dir in dirs : - includes.remove(dir) - includes+=recursiveFiles(qrcpath,dir) - return includes - qrcscanner = SCons.Scanner.Scanner(name = 'qrcfile', - function = scanResources, - argument = None, - skeys = ['.qrc']) - qrcbuilder = Builder( - action = SCons.Action.Action('$QT4_RCCCOM', cmdstr = '$QT4_RCCCOMSTR'), - source_scanner = qrcscanner, - src_suffix = '$QT4_QRCSUFFIX', - suffix = '$QT4_QRCCXXSUFFIX', - prefix = '$QT4_QRCCXXPREFIX', - single_source = True - ) - env.Append( BUILDERS = { 'Qrc': qrcbuilder } ) - - # Interface builder - uic4builder = Builder( - action = SCons.Action.Action('$QT4_UICCOM', cmdstr = '$QT4_UICCOMSTR'), - src_suffix='$QT4_UISUFFIX', - suffix='$QT4_UICDECLSUFFIX', - prefix='$QT4_UICDECLPREFIX', - single_source = True - #TODO: Consider the uiscanner on new scons version - ) - env['BUILDERS']['Uic4'] = uic4builder - - # Metaobject builder - mocBld = Builder(action={}, prefix={}, suffix={}) - for h in header_extensions: - act = SCons.Action.Action('$QT4_MOCFROMHCOM', cmdstr = '$QT4_MOCFROMHCOMSTR') - mocBld.add_action(h, act) - mocBld.prefix[h] = '$QT4_MOCHPREFIX' - mocBld.suffix[h] = '$QT4_MOCHSUFFIX' - for cxx in cxx_suffixes: - act = SCons.Action.Action('$QT4_MOCFROMCXXCOM', cmdstr = '$QT4_MOCFROMCXXCOMSTR') - mocBld.add_action(cxx, act) - mocBld.prefix[cxx] = '$QT4_MOCCXXPREFIX' - mocBld.suffix[cxx] = '$QT4_MOCCXXSUFFIX' - env['BUILDERS']['Moc4'] = mocBld - - # er... no idea what that was for - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - static_obj.src_builder.append('Uic4') - shared_obj.src_builder.append('Uic4') - - # We use the emitters of Program / StaticLibrary / SharedLibrary - # to scan for moc'able files - # We can't refer to the builders directly, we have to fetch them - # as Environment attributes because that sets them up to be called - # correctly later by our emitter. - env.AppendUnique(PROGEMITTER =[AutomocStatic], - SHLIBEMITTER=[AutomocShared], - LIBEMITTER =[AutomocStatic], - # Of course, we need to link against the qt libraries - LIBPATH=["$QT4_LIBPATH"], - LIBS=['$QT4_LIB']) - - # TODO: Does dbusxml2cpp need an adapter - env.AddMethod(enable_modules, "EnableQt4Modules") - -def enable_modules(self, modules, debug=False, crosscompiling=False) : - import sys - - validModules = [ - 'QtCore', - 'QtGui', - 'QtOpenGL', - 'Qt3Support', - 'QtAssistant', - 'QtScript', - 'QtDBus', - 'QtSql', - # The next modules have not been tested yet so, please - # maybe they require additional work on non Linux platforms - 'QtNetwork', - 'QtSvg', - 'QtTest', - 'QtXml', - 'QtXmlPatterns', - 'QtUiTools', - 'QtDesigner', - 'QtDesignerComponents', - 'QtWebKit', - 'QtHelp', - 'QtScript', - ] - staticModules = [ - 'QtUiTools', - ] - invalidModules=[] - for module in modules: - if module not in validModules : - invalidModules.append(module) - if invalidModules : - raise Exception("Modules %s are not Qt4 modules. Valid Qt4 modules are: %s"% ( - str(invalidModules),str(validModules))) - - moduleDefines = { - 'QtScript' : ['QT_SCRIPT_LIB'], - 'QtSvg' : ['QT_SVG_LIB'], - 'Qt3Support' : ['QT_QT3SUPPORT_LIB','QT3_SUPPORT'], - 'QtSql' : ['QT_SQL_LIB'], - 'QtXml' : ['QT_XML_LIB'], - 'QtOpenGL' : ['QT_OPENGL_LIB'], - 'QtGui' : ['QT_GUI_LIB'], - 'QtNetwork' : ['QT_NETWORK_LIB'], - 'QtCore' : ['QT_CORE_LIB'], - } - for module in modules : - try : self.AppendUnique(CPPDEFINES=moduleDefines[module]) - except: pass - debugSuffix = '' - - if sys.platform.startswith("linux") and not crosscompiling : - if debug : debugSuffix = '_debug' - self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include", "phonon")]) - for module in modules : - self.AppendUnique(LIBS=[module+debugSuffix]) - self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")]) - self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include")]) - self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include",module)]) - self["QT4_MOCCPPPATH"] = self["CPPPATH"] - return - - if sys.platform == "win32" or crosscompiling : - if crosscompiling: - transformedQtdir = transformToWinePath(self['QTDIR']) - self['QT4_MOC'] = "QTDIR=%s %s"%( transformedQtdir, self['QT4_MOC']) - self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include")]) - try: modules.remove("QtDBus") - except: pass - if debug : debugSuffix = 'd' - if "QtAssistant" in modules: - self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include","QtAssistant")]) - modules.remove("QtAssistant") - modules.append("QtAssistantClient") - # FIXME: Phonon Hack - self.AppendUnique(LIBS=['phonon'+debugSuffix+'4']) - self.AppendUnique(LIBS=[lib+debugSuffix+'4' for lib in modules if lib not in staticModules]) - self.PrependUnique(LIBS=[lib+debugSuffix for lib in modules if lib in staticModules]) - if 'QtOpenGL' in modules: - self.AppendUnique(LIBS=['opengl32']) - self.AppendUnique(CPPPATH=[ '$QTDIR/include/']) - self.AppendUnique(CPPPATH=[ '$QTDIR/include/'+module for module in modules]) - if crosscompiling : - self["QT4_MOCCPPPATH"] = [ - path.replace('$QTDIR', transformedQtdir) - for path in self['CPPPATH'] ] - else : - self["QT4_MOCCPPPATH"] = self["CPPPATH"] - self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')]) - return - - if sys.platform=="darwin" : - if debug : debugSuffix = 'd' - - if len(self["QTDIR"]) > 0 : - self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')]) - self.AppendUnique(LINKFLAGS="-F$QTDIR/lib") - self.AppendUnique(CPPFLAGS="-F$QTDIR/lib") - self.AppendUnique(LINKFLAGS="-L$QTDIR/lib") #TODO clean! - - # FIXME: Phonon Hack - self.Append(LINKFLAGS=['-framework', "phonon"]) - - for module in modules : - if module in staticModules : - self.AppendUnique(LIBS=[module+debugSuffix]) # TODO: Add the debug suffix - self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")]) - else : - if len(self["QTDIR"]) > 0 : - self.Append(CPPFLAGS = ["-I" + os.path.join("$QTDIR", "lib", module + ".framework", "Versions", "4", "Headers")]) - else : - self.Append(CPPFLAGS = ["-I" + os.path.join("/Library/Frameworks", module + ".framework", "Versions", "4", "Headers")]) - self.Append(LINKFLAGS=['-framework', module]) - if 'QtOpenGL' in modules: - self.AppendUnique(LINKFLAGS="-F/System/Library/Frameworks") - self.Append(LINKFLAGS=['-framework', 'AGL']) #TODO ughly kludge to avoid quotes - self.Append(LINKFLAGS=['-framework', 'OpenGL']) - self["QT4_MOCCPPPATH"] = self["CPPPATH"] + """Add Builders and construction variables for qt to an Environment.""" + + def locateQt4Command(env, command, qtdir) : + if len(qtdir) == 0 : + qtdir = "/usr" + if env["qt5"]: + suffixes = [ + '-qt5', + '-qt5.exe', + '5', + '5.exe', + '', + '.exe', + ] + else : + suffixes = [ + '-qt4', + '-qt4.exe', + '4', + '4.exe', + '', + '.exe', + ] + triedPaths = [] + for suffix in suffixes : + fullpath = os.path.join(qtdir,'bin',command + suffix) + if os.access(fullpath, os.X_OK) : + return fullpath + triedPaths.append(fullpath) + + fullpath = env.Detect([command+'-qt4', command+'4', command]) + if not (fullpath is None) : return fullpath + + raise Exception("Qt4 command '" + command + "' not found. Tried: " + ', '.join(triedPaths)) + + + CLVar = SCons.Util.CLVar + Action = SCons.Action.Action + Builder = SCons.Builder.Builder + splitext = SCons.Util.splitext + + env['QTDIR'] = _detect(env) + # TODO: 'Replace' should be 'SetDefault' +# env.SetDefault( + env.Replace( + QTDIR = _detect(env), + # TODO: This is not reliable to QTDIR value changes but needed in order to support '-qt4' variants + QT4_MOC = locateQt4Command(env,'moc', env['QTDIR']), + QT4_UIC = locateQt4Command(env,'uic', env['QTDIR']), + QT4_RCC = locateQt4Command(env,'rcc', env['QTDIR']), + QT4_LUPDATE = locateQt4Command(env,'lupdate', env['QTDIR']), + QT4_LRELEASE = locateQt4Command(env,'lrelease', env['QTDIR']), + QT4_LIB = '', # KLUDGE to avoid linking qt3 library + + QT4_AUTOSCAN = 1, # Should the qt tool try to figure out, which sources are to be moc'ed? + + # Some QT specific flags. I don't expect someone wants to + # manipulate those ... + QT4_UICFLAGS = CLVar(''), + QT4_MOCFROMHFLAGS = CLVar(''), + QT4_MOCFROMCXXFLAGS = CLVar('-i'), + QT4_QRCFLAGS = '--compress 9 --threshold 5', + + # suffixes/prefixes for the headers / sources to generate + QT4_UISUFFIX = '.ui', + QT4_UICDECLPREFIX = 'ui_', + QT4_UICDECLSUFFIX = '.h', + QT4_MOCHPREFIX = 'moc_', + QT4_MOCHSUFFIX = '$CXXFILESUFFIX', + QT4_MOCCXXPREFIX = '', + QT4_MOCCXXSUFFIX = '.moc', + QT4_QRCSUFFIX = '.qrc', + QT4_QRCCXXSUFFIX = '$CXXFILESUFFIX', + QT4_QRCCXXPREFIX = 'qrc_', + QT4_MOCCPPPATH = [], + QT4_MOCINCFLAGS = '$( ${_concat("-I", QT4_MOCCPPPATH, INCSUFFIX, __env__, RDirs)} $)', + + # Commands for the qt support ... + QT4_UICCOM = '$QT4_UIC $QT4_UICFLAGS -o $TARGET $SOURCE', + # FIXME: The -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED flag is a hack to work + # around an issue in Qt + # See https://bugreports.qt-project.org/browse/QTBUG-22829 + QT4_MOCFROMHCOM = '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMHFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE', + QT4_MOCFROMCXXCOM = [ + '$QT4_MOC -DBOOST_TT_HAS_OPERATOR_HPP_INCLUDED $QT4_MOCFROMCXXFLAGS $QT4_MOCINCFLAGS -o $TARGET $SOURCE', + Action(checkMocIncluded,None)], + QT4_LUPDATECOM = '$QT4_LUPDATE $SOURCE -ts $TARGET', + QT4_LRELEASECOM = '$QT4_LRELEASE -silent $SOURCE -qm $TARGET', + QT4_RCCCOM = '$QT4_RCC $QT4_QRCFLAGS -name $SOURCE $SOURCE -o $TARGET', + ) + if len(env["QTDIR"]) > 0 : + env.Replace(QT4_LIBPATH = os.path.join('$QTDIR', 'lib')) + + # Translation builder + tsbuilder = Builder( + action = SCons.Action.Action('$QT4_LUPDATECOM'), #,'$QT4_LUPDATECOMSTR'), + multi=1 + ) + env.Append( BUILDERS = { 'Ts': tsbuilder } ) + qmbuilder = Builder( + action = SCons.Action.Action('$QT4_LRELEASECOM', cmdstr = '$QT4_LRELEASECOMSTR'), + src_suffix = '.ts', + suffix = '.qm', + single_source = True + ) + env.Append( BUILDERS = { 'Qm': qmbuilder } ) + + # Resource builder + def scanResources(node, env, path, arg): + # I've being careful on providing names relative to the qrc file + # If that was not needed that code could be simplified a lot + def recursiveFiles(basepath, path) : + result = [] + for item in os.listdir(os.path.join(basepath, path)) : + itemPath = os.path.join(path, item) + if os.path.isdir(os.path.join(basepath, itemPath)) : + result += recursiveFiles(basepath, itemPath) + else: + result.append(itemPath) + return result + contents = str(node.get_contents()) + includes = [included[1] for included in qrcinclude_re.findall(contents)] + qrcpath = os.path.dirname(node.path) + dirs = [included for included in includes if os.path.isdir(os.path.join(qrcpath,included))] + # dirs need to include files recursively + for dir in dirs : + includes.remove(dir) + includes+=recursiveFiles(qrcpath,dir) + return includes + qrcscanner = SCons.Scanner.Scanner(name = 'qrcfile', + function = scanResources, + argument = None, + skeys = ['.qrc']) + qrcbuilder = Builder( + action = SCons.Action.Action('$QT4_RCCCOM', cmdstr = '$QT4_RCCCOMSTR'), + source_scanner = qrcscanner, + src_suffix = '$QT4_QRCSUFFIX', + suffix = '$QT4_QRCCXXSUFFIX', + prefix = '$QT4_QRCCXXPREFIX', + single_source = True + ) + env.Append( BUILDERS = { 'Qrc': qrcbuilder } ) + + # Interface builder + def addDisableWarningsPragmaToFile(target, source, env): + assert( len(target) == 1 ) + assert( len(source) == 1 ) + srcf = str(source[0]) + dstf = str(target[0]) + with open(dstf, 'r+') as uiHeader: + data=uiHeader.read() + + template = Template( +"""#pragma once +#pragma warning(push, 0) +#pragma GCC system_header +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wall" +$uiheadertext +#pragma clang diagnostic pop +#pragma warning(pop) +""") + uiHeader.seek(0) + uiHeader.write(template.substitute(uiheadertext=data)) + uiHeader.truncate() + + uic4builder = Builder( + action = [SCons.Action.Action('$QT4_UICCOM', cmdstr = '$QT4_UICCOMSTR'), SCons.Action.Action(addDisableWarningsPragmaToFile, None)], + src_suffix='$QT4_UISUFFIX', + suffix='$QT4_UICDECLSUFFIX', + prefix='$QT4_UICDECLPREFIX', + single_source = True + #TODO: Consider the uiscanner on new scons version + ) + env['BUILDERS']['Uic4'] = uic4builder + + # Metaobject builder + mocBld = Builder(action={}, prefix={}, suffix={}) + for h in header_extensions: + act = SCons.Action.Action('$QT4_MOCFROMHCOM', cmdstr = '$QT4_MOCFROMHCOMSTR') + mocBld.add_action(h, act) + mocBld.prefix[h] = '$QT4_MOCHPREFIX' + mocBld.suffix[h] = '$QT4_MOCHSUFFIX' + for cxx in cxx_suffixes: + act = SCons.Action.Action('$QT4_MOCFROMCXXCOM', cmdstr = '$QT4_MOCFROMCXXCOMSTR') + mocBld.add_action(cxx, act) + mocBld.prefix[cxx] = '$QT4_MOCCXXPREFIX' + mocBld.suffix[cxx] = '$QT4_MOCCXXSUFFIX' + env['BUILDERS']['Moc4'] = mocBld + + # er... no idea what that was for + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + static_obj.src_builder.append('Uic4') + shared_obj.src_builder.append('Uic4') + + # We use the emitters of Program / StaticLibrary / SharedLibrary + # to scan for moc'able files + # We can't refer to the builders directly, we have to fetch them + # as Environment attributes because that sets them up to be called + # correctly later by our emitter. + env.AppendUnique(PROGEMITTER =[AutomocStatic], + SHLIBEMITTER=[AutomocShared], + LIBEMITTER =[AutomocStatic], + # Of course, we need to link against the qt libraries + LIBPATH=["$QT4_LIBPATH"], + LIBS=['$QT4_LIB']) + + # TODO: Does dbusxml2cpp need an adapter + env.AddMethod(enable_modules, "EnableQt4Modules") + +def enable_modules(self, modules, debug=False, crosscompiling=False, version='4') : + import sys + + validModules = [ + 'QtCore', + 'QtGui', + 'QtOpenGL', + 'Qt3Support', + 'QtAssistant', + 'QtScript', + 'QtDBus', + 'QtSql', + # The next modules have not been tested yet so, please + # maybe they require additional work on non Linux platforms + 'QtNetwork', + 'QtSvg', + 'QtTest', + 'QtXml', + 'QtXmlPatterns', + 'QtUiTools', + 'QtDesigner', + 'QtDesignerComponents', + 'QtWebKit', + 'QtHelp', + 'QtScript', + + # Qt5 modules + 'QtWidgets', + 'QtMultimedia', + 'QtWebKitWidgets', + 'QtWebChannel', + ] + if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling : + validModules += ['QtX11Extras'] + staticModules = [ + 'QtUiTools', + ] + invalidModules=[] + for module in modules: + if module not in validModules : + invalidModules.append(module) + if invalidModules : + raise Exception("Modules %s are not Qt4 modules. Valid Qt4 modules are: %s"% ( + str(invalidModules),str(validModules))) + + moduleDefines = { + 'QtScript' : ['QT_SCRIPT_LIB'], + 'QtSvg' : ['QT_SVG_LIB'], + 'Qt3Support' : ['QT_QT3SUPPORT_LIB','QT3_SUPPORT'], + 'QtSql' : ['QT_SQL_LIB'], + 'QtXml' : ['QT_XML_LIB'], + 'QtOpenGL' : ['QT_OPENGL_LIB'], + 'QtGui' : ['QT_GUI_LIB'], + 'QtWidgets' : ['QT_WIDGETS_LIB'], + 'QtWebKitWidgets' : [], + 'QtNetwork' : ['QT_NETWORK_LIB'], + 'QtCore' : ['QT_CORE_LIB'], + } + for module in modules : + try : self.AppendUnique(CPPDEFINES=moduleDefines[module]) + except: pass + debugSuffix = '' + + + include_flag = "-I" + if os.path.basename(self["CC"]).startswith(("gcc", "clang")): + include_flag = "-isystem" + + + if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling : + if self["qt"]: + # The user specified qt path in config.py and we are going to use the + # installation under that location. + UsePkgConfig = False + else: + # The user did not specify a qt path in config py and we are going to + # ask pkg-config for the correct flags. + UsePkgConfig = True + if not UsePkgConfig: + if debug : debugSuffix = '_debug' + if version == '4' : + self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR", "include", "phonon")]) + for module in modules : + module_str = module + if not version == '4' : + module_str = module_str.replace('Qt', 'Qt5') + self.AppendUnique(LIBS=[module_str+debugSuffix]) + self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")]) + self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR","include")]) + self.AppendUnique(CPPFLAGS = [include_flag + os.path.join("$QTDIR","include", module)]) + self["QT4_MOCCPPPATH"] = self["CPPPATH"] + return + else: + test_conf = self.Configure() + modules_str = " ".join(modules) + if not version == '4' : + modules_str = modules_str.replace('Qt', 'Qt5') + + # Check if Qt is registered at pkg-config + ret = test_conf.TryAction('pkg-config --exists \'%s\'' % modules_str)[0] + if ret != 1: + test_conf.Finish() + raise Exception('Qt installation is missing packages. The following are required: %s' % modules_str) + return + + def parse_conf_as_system(env, cmd, unique=1): + return env.MergeFlags(cmd.replace("-I/", include_flag + "/"), unique) + + test_conf.env.ParseConfig("pkg-config --cflags --libs " + modules_str, parse_conf_as_system) + self["QT4_MOCCPPPATH"] = self["CPPPATH"] + test_conf.Finish() + return + + if sys.platform == "win32" or crosscompiling : + if crosscompiling: + transformedQtdir = transformToWinePath(self['QTDIR']) + self['QT4_MOC'] = "QTDIR=%s %s"%( transformedQtdir, self['QT4_MOC']) + self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include")]) + try: modules.remove("QtDBus") + except: pass + if debug : debugSuffix = 'd' + if "QtAssistant" in modules: + self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include","QtAssistant")]) + modules.remove("QtAssistant") + modules.append("QtAssistantClient") + if version == '4' : + # FIXME: Phonon Hack + self.AppendUnique(LIBS=['phonon'+debugSuffix+version]) + self.AppendUnique(LIBS=[lib+debugSuffix+version for lib in modules if lib not in staticModules]) + else : + self.AppendUnique(LIBS=[lib.replace('Qt', 'Qt5') + debugSuffix for lib in modules if lib not in staticModules]) + self.PrependUnique(LIBS=[lib+debugSuffix for lib in modules if lib in staticModules]) + if 'QtOpenGL' in modules: + self.AppendUnique(LIBS=['opengl32']) + elif version == '5' : + self.Append(CPPDEFINES = ["QT_NO_OPENGL"]) + self.AppendUnique(CPPPATH=[ '$QTDIR/include/']) + self.AppendUnique(CPPPATH=[ '$QTDIR/include/'+module for module in modules]) + if crosscompiling : + self["QT4_MOCCPPPATH"] = [ + path.replace('$QTDIR', transformedQtdir) + for path in self['CPPPATH'] ] + else : + self["QT4_MOCCPPPATH"] = self["CPPPATH"] + self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')]) + self.PrependUnique(LIBS=["shell32"]) + return + + if sys.platform=="darwin" : + if debug : debugSuffix = 'd' + + if len(self["QTDIR"]) > 0 : + self.AppendUnique(LIBPATH=[os.path.join('$QTDIR','lib')]) + self.AppendUnique(LINKFLAGS="-F$QTDIR/lib") + self.AppendUnique(CPPFLAGS=["-iframework$QTDIR/lib", include_flag + os.path.join("$QTDIR", "include")]) + self.Append(LINKFLAGS="-Wl,-rpath,$QTDIR/lib") + + # FIXME: Phonon Hack + if version == '4' : + self.Append(LINKFLAGS=['-framework', "phonon"]) + + for module in modules : + if module in staticModules : + self.AppendUnique(LIBS=[module+debugSuffix]) # TODO: Add the debug suffix + self.AppendUnique(LIBPATH=[os.path.join("$QTDIR","lib")]) + else : + if len(self["QTDIR"]) > 0 : + self.Append(CPPFLAGS = [include_flag + os.path.join("$QTDIR", "lib", module + ".framework", "Headers")]) + else : + self.Append(CPPFLAGS = [include_flag + os.path.join("/Library/Frameworks", module + ".framework", "Headers")]) + self.Append(LINKFLAGS=['-framework', module]) + if 'QtOpenGL' in modules: + self.AppendUnique(LINKFLAGS="-F/System/Library/Frameworks") + self.Append(LINKFLAGS=['-framework', 'AGL']) #TODO ughly kludge to avoid quotes + self.Append(LINKFLAGS=['-framework', 'OpenGL']) + self["QT4_MOCCPPPATH"] = self["CPPPATH"] def exists(env): - return _detect(env) + return _detect(env) diff --git a/BuildTools/SCons/Tools/textfile.py b/BuildTools/SCons/Tools/textfile.py deleted file mode 100644 index 89f8963..0000000 --- a/BuildTools/SCons/Tools/textfile.py +++ /dev/null @@ -1,175 +0,0 @@ -# -*- python -*- -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = """ -Textfile/Substfile builder for SCons. - - Create file 'target' which typically is a textfile. The 'source' - may be any combination of strings, Nodes, or lists of same. A - 'linesep' will be put between any part written and defaults to - os.linesep. - - The only difference between the Textfile builder and the Substfile - builder is that strings are converted to Value() nodes for the - former and File() nodes for the latter. To insert files in the - former or strings in the latter, wrap them in a File() or Value(), - respectively. - - The values of SUBST_DICT first have any construction variables - expanded (its keys are not expanded). If a value of SUBST_DICT is - a python callable function, it is called and the result is expanded - as the value. Values are substituted in a "random" order; if any - substitution could be further expanded by another subsitition, it - is unpredictible whether the expansion will occur. -""" - -__revision__ = "src/engine/SCons/Tool/textfile.py 5357 2011/09/09 21:31:03 bdeegan" - -import SCons - -import os -import re - -from SCons.Node import Node -from SCons.Node.Python import Value -from SCons.Util import is_String, is_Sequence, is_Dict - -def _do_subst(node, subs): - """ - Fetch the node contents and replace all instances of the keys with - their values. For example, if subs is - {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'}, - then all instances of %VERSION% in the file will be replaced with - 1.2345 and so forth. - """ - contents = node.get_text_contents() - if not subs: return contents - for (k,v) in subs: - contents = re.sub(k, v, contents) - return contents - -def _action(target, source, env): - # prepare the line separator - linesep = env['LINESEPARATOR'] - if linesep is None: - linesep = os.linesep - elif is_String(linesep): - pass - elif isinstance(linesep, Value): - linesep = linesep.get_text_contents() - else: - raise SCons.Errors.UserError( - 'unexpected type/class for LINESEPARATOR: %s' - % repr(linesep), None) - - # create a dictionary to use for the substitutions - if 'SUBST_DICT' not in env: - subs = None # no substitutions - else: - d = env['SUBST_DICT'] - if is_Dict(d): - d = list(d.items()) - elif is_Sequence(d): - pass - else: - raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence') - subs = [] - for (k,v) in d: - if callable(v): - v = v() - if is_String(v): - v = env.subst(v) - else: - v = str(v) - subs.append((k,v)) - - # write the file - try: - fd = open(target[0].get_path(), "wb") - except (OSError,IOError), e: - raise SCons.Errors.UserError("Can't write target file %s" % target[0]) - # separate lines by 'linesep' only if linesep is not empty - lsep = None - for s in source: - if lsep: fd.write(lsep) - fd.write(_do_subst(s, subs)) - lsep = linesep - fd.close() - -def _strfunc(target, source, env): - return "Creating '%s'" % target[0] - -def _convert_list_R(newlist, sources): - for elem in sources: - if is_Sequence(elem): - _convert_list_R(newlist, elem) - elif isinstance(elem, Node): - newlist.append(elem) - else: - newlist.append(Value(elem)) -def _convert_list(target, source, env): - if len(target) != 1: - raise SCons.Errors.UserError("Only one target file allowed") - newlist = [] - _convert_list_R(newlist, source) - return target, newlist - -_common_varlist = ['SUBST_DICT', 'LINESEPARATOR'] - -_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX'] -_text_builder = SCons.Builder.Builder( - action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist), - source_factory = Value, - emitter = _convert_list, - prefix = '$TEXTFILEPREFIX', - suffix = '$TEXTFILESUFFIX', - ) - -_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX'] -_subst_builder = SCons.Builder.Builder( - action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist), - source_factory = SCons.Node.FS.File, - emitter = _convert_list, - prefix = '$SUBSTFILEPREFIX', - suffix = '$SUBSTFILESUFFIX', - src_suffix = ['.in'], - ) - -def generate(env): - env['LINESEPARATOR'] = os.linesep - env['BUILDERS']['MyTextfile'] = _text_builder - env['TEXTFILEPREFIX'] = '' - env['TEXTFILESUFFIX'] = '.txt' - env['BUILDERS']['MySubstfile'] = _subst_builder - env['SUBSTFILEPREFIX'] = '' - env['SUBSTFILESUFFIX'] = '' - -def exists(env): - return 1 - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/BuildTools/SCons/Tools/wix.py b/BuildTools/SCons/Tools/wix.py index 69622f6..986ea23 100644 --- a/BuildTools/SCons/Tools/wix.py +++ b/BuildTools/SCons/Tools/wix.py @@ -5,47 +5,49 @@ from subprocess import call def generate(env) : - wixPath = env.get("wix_bindir", "") - if len(wixPath) > 0 and wixPath[-1] != "\\": - wixPath += "\\" - env['WIX_HEAT'] = wixPath + 'heat.exe' - env['WIX_HEAT_OPTIONS'] = '-nologo -gg -sfrag -suid -template fragment -dr ProgramFilesFolder' - env['WIX_CANDLE'] = wixPath + 'candle.exe' - env['WIX_CANDLE_OPTIONS'] = '-nologo' - env['WIX_LIGHT'] = wixPath + 'light.exe' - env['WIX_LIGHT_OPTIONS'] = '-nologo -ext WixUIExtension' - - def WiX_IncludeScanner(source, env, path, arg): - wixIncludeRegexp = re.compile(r'^\s*\<\?include (\S+.wxs)\s*\?\>\S*', re.M) - contents = source.get_contents() - includes = wixIncludeRegexp.findall(contents) - return [ "" + include for include in includes ] - - heat_builder = SCons.Builder.Builder( - action = '"$WIX_HEAT" dir Swift\\QtUI\\Swift -cg Files $WIX_HEAT_OPTIONS -o ${TARGET} -t Swift\\Packaging\\WiX\\include.xslt', - suffix = '.wxi') - - - candle_scanner = env.Scanner(name = 'wixincludefile', - function = WiX_IncludeScanner, - argument = None, - skeys = ['.wxs']) - - candle_builder = SCons.Builder.Builder( - action = '"$WIX_CANDLE" $WIX_CANDLE_OPTIONS ${SOURCES} -o ${TARGET}', - src_suffix = '.wxs', - suffix = '.wixobj', - source_scanner = candle_scanner, - src_builder = heat_builder) - - - light_builder = SCons.Builder.Builder( - action = '"$WIX_LIGHT" $WIX_LIGHT_OPTIONS -b Swift\\QtUI\\Swift ${SOURCES} -o ${TARGET}', - src_suffix = '.wixobj', - src_builder = candle_builder) - - env['BUILDERS']['WiX_Heat'] = heat_builder - env['BUILDERS']['WiX_Candle'] = candle_builder - env['BUILDERS']['WiX_Light'] = light_builder + wixPath = env.get("wix_bindir", "") + if len(wixPath) > 0 and wixPath[-1] != "\\": + wixPath += "\\" + env['WIX_HEAT'] = wixPath + 'heat.exe' + env['WIX_HEAT_OPTIONS'] = '-nologo -ag -sfrag -suid -template fragment -dr ProgramFilesFolder' + env['WIX_CANDLE'] = wixPath + 'candle.exe' + env['WIX_CANDLE_OPTIONS'] = '-nologo' + env['WIX_LIGHT'] = wixPath + 'light.exe' + env['WIX_LIGHT_OPTIONS'] = '-nologo -ext WixUIExtension' + + def WiX_IncludeScanner(source, env, path, arg): + wixIncludeRegexp = re.compile(r'^\s*\<\?include (\S+.wxs)\s*\?\>\S*', re.M) + if SCons.Util.PY3: + contents = source.get_contents().decode("utf8") + else: + contents = source.get_contents() + includes = wixIncludeRegexp.findall(contents) + return [ "" + include for include in includes ] + + heat_builder = SCons.Builder.Builder( + action = '"$WIX_HEAT" dir "$WIX_SOURCE_OBJECT_DIR" -cg Files $WIX_HEAT_OPTIONS -o ${TARGET} -t Swift\\Packaging\\WiX\\include.xslt', + suffix = '.wxi') + + + candle_scanner = env.Scanner(name = 'wixincludefile', + function = WiX_IncludeScanner, + argument = None, + skeys = ['.wxs']) + + candle_builder = SCons.Builder.Builder( + action = '"$WIX_CANDLE" $WIX_CANDLE_OPTIONS ${SOURCES} -o ${TARGET}', + src_suffix = '.wxs', + suffix = '.wixobj', + source_scanner = candle_scanner, + src_builder = heat_builder) + + + light_builder = SCons.Builder.Builder( + action = '"$WIX_LIGHT" $WIX_LIGHT_OPTIONS -b "$WIX_SOURCE_OBJECT_DIR" ${SOURCES} -loc Swift\\Packaging\\WiX\\Swift_en-us.wxl -o ${TARGET}', + src_suffix = '.wixobj', + src_builder = candle_builder) + + env['BUILDERS']['WiX_Heat'] = heat_builder + env['BUILDERS']['WiX_Candle'] = candle_builder + env['BUILDERS']['WiX_Light'] = light_builder def exists(env) : - return True - + return True diff --git a/BuildTools/SCons/Version.py b/BuildTools/SCons/Version.py index fe5fde1..23cd850 100644 --- a/BuildTools/SCons/Version.py +++ b/BuildTools/SCons/Version.py @@ -1,63 +1,231 @@ -import subprocess, os, datetime, re, os.path - -def getGitBuildVersion(project) : - tag = git("describe --tags --exact --match \"" + project + "-*\"") - if tag : - return tag.rstrip()[len(project)+1:] - tag = git("describe --tags --match \"" + project + "-*\"") - if tag : - m = re.match(project + "-(.*)-(.*)-(.*)", tag) - if m : - return m.group(1) + "-dev" + m.group(2) - return None - -def git(cmd) : - p = subprocess.Popen("git " + cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) - gitVersion = p.stdout.read() - p.stdin.close() - return gitVersion if p.wait() == 0 else None +from __future__ import print_function +import subprocess, os, datetime, re, os.path, sys, unittest + +def gitDescribeToVersion(tag, tagPrefix): + m = re.match(r'^' + tagPrefix + r'-(.+?)(?:-(.+?)-(.+?))?$', tag) + if not m: + return None + result = m.group(1) + if m.group(2): + result += "-dev" + m.group(2) + return result + +def getGitBuildVersion(root, tagPrefix) : + tag = git("describe --tags --match \"" + tagPrefix + "-*\"", root) + if tag: + return gitDescribeToVersion(tag, tagPrefix) + return None + +def git(cmd, root): + full_cmd = "git " + cmd + # Would've used with .. as p, but that's not supported by Python 2.7 + p = subprocess.Popen(full_cmd, cwd=root, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) + gitVersion = p.stdout.read() + try: + p.stdin.close() + p.stdout.close() + p.stderr.close() + except: + pass + if p.wait() == 0: + return bytes(gitVersion).decode('utf-8') + return None def getBuildVersion(root, project) : - versionFilename = os.path.join(root, "VERSION." + project) - if os.path.isfile(versionFilename) : - f = open(versionFilename) - version = f.read().strip() - f.close() - return version - - gitVersion = getGitBuildVersion(project) - if gitVersion : - return gitVersion - - return datetime.date.today().strftime("%Y%m%d") - -def convertToWindowsVersion(version) : - version_match = re.match("(\d+)\.(\d+)(.*)", version) - major = int(version_match.group(1)) if version_match else 0 - minor = int(version_match.group(2)) if version_match else 0 - if version_match and len(version_match.group(3)) == 0 : - patch = 99999 - else : - match = re.match("^beta(\d+)(.*)", version_match.group(3)) - build_string = "" - if match : - patch = 1000*int(match.group(1)) - build_string = match.group(2) - else : - rc_match = re.match("^rc(\d+)(.*)", version_match.group(3)) - if rc_match : - patch = 10000*int(rc_match.group(1)) - build_string = rc_match.group(2) - else : - patch = 0 - alpha_match = re.match("^alpha(.*)", version_match.group(3)) - if alpha_match : - build_string = alpha_match.group(1) - - if len(build_string) > 0 : - build_match = re.match("^-dev(\d+)", build_string) - if build_match : - patch += int(build_match.group(1)) + versionFilename = os.path.join(root, "VERSION." + project) + if os.path.isfile(versionFilename) : + f = open(versionFilename) + version = f.read().strip() + f.close() + return version + + gitVersion = getGitBuildVersion(root, project) + if gitVersion : + return gitVersion + + return datetime.date.today().strftime("%Y%m%d") + +# The following conversion allows us to use version tags the format: +# major.0 +# major.0.(0 to 9) +# +# Either from above followed by: +# alpha(0 to 4) +# beta(0 to 6) +# rc(1 to 11) +# +# Followed by an optional -dev(1-65535) for off tag builds. +def convertToWindowsVersion(version): + match = re.match(r"(?P<major>\d+)\.(?P<minor>\d+)\.?(?P<patch>\d+)?(?:(?P<stage>rc|beta|alpha)(?P<stage_number>\d+)?)?(?:-dev(?P<dev>\d+))?", version) + assert(match) + major, minor, patch = (0, 0, 0) + + groups = match.groupdict() + assert(groups['major']) + major = int(groups['major']) + + if groups['minor']: + assert(int(groups['minor']) == 0) + + if groups['patch']: + assert(0 <= int(groups['patch']) <= 9) + minor = int(groups['patch']) * 25 + stage = groups["stage"] + if stage: + stageNumber = groups['stage_number'] + if not stageNumber or stageNumber == "": + stageNumber = 0 + else: + stageNumber = int(stageNumber) + + if stage == "alpha": + assert(0 <= stageNumber <= 4) + minor = 1 + stageNumber + elif stage == "beta": + assert(0 <= stageNumber <= 6) + minor = 6 + stageNumber + elif stage == "rc": + assert(1 <= stageNumber <= 11) + minor = 12 + stageNumber + else: + assert(False) + else: + minor = minor + 24 + + if groups['dev']: + patch = 1 + int(groups['dev']) + + # The following constraints are set by Windows Installer framework + assert(0 <= major <= 255) + assert(0 <= minor <= 255) + assert(0 <= patch <= 65535) return (major, minor, patch) +# Test Windows version mapping scheme +class TestCases(unittest.TestCase): + def testWindowsVersionsAreDescending(self): + versionStringsWithOldVersions = [ + ("5.0rc11", None), + ("5.0rc1", None), + ("5.0beta6", None), + ("5.0alpha4", None), + ("5.0alpha2", None), + ("5.0alpha", None), + ("4.0.9", None), + ("4.0.1", None), + ("4.0", (4, 0, 60000)), + ("4.0rc6", (4, 0, 60000)), + ("4.0rc5", (4, 0, 50000)), + ("4.0rc4", (4, 0, 40000)), + ("4.0rc3", (4, 0, 30000)), + ('4.0rc2-dev39', (4, 0, 20039)), + ('4.0rc2-dev34', (4, 0, 20034)), + ('4.0rc2-dev33', (4, 0, 20033)), + ('4.0rc2-dev31', (4, 0, 20031)), + ('4.0rc2-dev30', (4, 0, 20030)), + ('4.0rc2-dev29', (4, 0, 20029)), + ('4.0rc2-dev27', (4, 0, 20027)), + ('4.0rc2', (4, 0, 20000)), + ('4.0rc1', (4, 0, 10000)), + ('4.0beta2-dev203', (4, 0, 2203)), + ('4.0beta2-dev195', (4, 0, 2195)), + ('4.0beta2-dev177', (4, 0, 2177)), + ('4.0beta2-dev171', (4, 0, 2171)), + ('4.0beta2-dev154', (4, 0, 2154)), + ('4.0beta2-dev150', (4, 0, 2150)), + ('4.0beta2-dev142', (4, 0, 2142)), + ('4.0beta2-dev140', (4, 0, 2140)), + ('4.0beta2-dev133', (4, 0, 2133)), + ('4.0beta2-dev118', (4, 0, 2118)), + ('4.0beta2-dev112', (4, 0, 2112)), + ('4.0beta2-dev93', (4, 0, 2093)), + ('4.0beta2-dev80', (4, 0, 2080)), + ('4.0beta2-dev72', (4, 0, 2072)), + ('4.0beta2-dev57', (4, 0, 2057)), + ('4.0beta2-dev44', (4, 0, 2044)), + ('4.0beta2-dev38', (4, 0, 2038)), + ('4.0beta2-dev29', (4, 0, 2029)), + ('4.0beta2-dev15', (4, 0, 2015)), + ('4.0beta2', (4, 0, 2000)), + ('4.0beta1', (4, 0, 1000)), + ('4.0alpha-dev80', (4, 0, 80)), + ('4.0alpha-dev50', (4, 0, 50)), + ('4.0alpha-dev43', (4, 0, 43)), + ('4.0alpha-dev21', (4, 0, 21)), + ('3.0', (3, 0, 60000)), + ('3.0rc3', (3, 0, 30000)), + ('3.0rc2', (3, 0, 20000)), + ('3.0rc1', (3, 0, 10000)), + ('3.0beta2-dev124', (3, 0, 2124)), + ('3.0beta2-dev81', (3, 0, 2081)), + ('3.0beta2-dev50', (3, 0, 2050)), + ('3.0beta2-dev44', (3, 0, 2044)), + ('3.0beta2-dev40', (3, 0, 2040)), + ('3.0beta2-dev26', (3, 0, 2026)), + ('3.0beta2', (3, 0, 2000)), + ('3.0beta1', (3, 0, 1000)), + ('3.0alpha-dev529', (3, 0, 529)), + ('3.0alpha-dev528', (3, 0, 528)), + ('3.0alpha-dev526', (3, 0, 526)), + ('3.0alpha-dev524', (3, 0, 524)), + ('3.0alpha-dev515', (3, 0, 515)), + ] + prevParsed = None + prevOldVersion = None + for string, oldVersion in versionStringsWithOldVersions: + parsed = convertToWindowsVersion(string) + if prevOldVersion and oldVersion: + self.assertTrue(oldVersion <= prevOldVersion, "Old version %r must come before %r" % (oldVersion, prevOldVersion)) + if prevParsed: + self.assertTrue(parsed < prevParsed, "%s => %r must be smaller than %s => %r" % (string, parsed, prevString, prevParsed)) + prevString = string + prevParsed = parsed + prevOldVersion = oldVersion + + def testThatBetaIsHigherThanAlpha(self): + self.assertTrue(convertToWindowsVersion("3.0beta0") > convertToWindowsVersion("3.0alpha0")) + self.assertTrue(convertToWindowsVersion("3.0beta6") > convertToWindowsVersion("3.0alpha1")) + self.assertTrue(convertToWindowsVersion("3.0beta6") > convertToWindowsVersion("3.0alpha4")) + + def testThatRcIsHigherThanAlphaAndBeta(self): + self.assertTrue(convertToWindowsVersion("3.0rc11") > convertToWindowsVersion("3.0alpha0")) + self.assertTrue(convertToWindowsVersion("3.0rc11") > convertToWindowsVersion("3.0alpha4")) + self.assertTrue(convertToWindowsVersion("3.0rc1") > convertToWindowsVersion("3.0alpha0")) + self.assertTrue(convertToWindowsVersion("3.0rc1") > convertToWindowsVersion("3.0alpha4")) + self.assertTrue(convertToWindowsVersion("3.0rc11") > convertToWindowsVersion("3.0beta0")) + self.assertTrue(convertToWindowsVersion("3.0rc11") > convertToWindowsVersion("3.0beta6")) + self.assertTrue(convertToWindowsVersion("3.0rc1") > convertToWindowsVersion("3.0beta0")) + self.assertTrue(convertToWindowsVersion("3.0rc1") > convertToWindowsVersion("3.0beta6")) + + def testThatStableIsHigherThanAlphaAndBetaAndRc(self): + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0alpha0")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0alpha4")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0alpha0")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0alpha4")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0beta0")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0beta6")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0rc1")) + self.assertTrue(convertToWindowsVersion("3.0") > convertToWindowsVersion("3.0rc11")) + + def testGitDescribeToVersion(self): + self.assertEqual("5.0alpha2-dev100", gitDescribeToVersion("swift-5.0alpha2-100-g33d5f6571", "swift")) + self.assertEqual("5.0alpha2", gitDescribeToVersion("swift-5.0alpha2", "swift")) + self.assertEqual("5.0-dev1", gitDescribeToVersion("swift-5.0-1-g012312312", "swift")) + self.assertIsNone(gitDescribeToVersion("swiften-5.0-1-g012312312", "swift")) + self.assertIsNone(gitDescribeToVersion("quickly-5.0-1-g012312312", "swift")) + self.assertIsNone(gitDescribeToVersion("swift-", "swift")) + self.assertIsNone(gitDescribeToVersion("swift", "swift")) + +if __name__ == '__main__': + if len(sys.argv) == 1: + unittest.main() + elif len(sys.argv) == 2: + if sys.argv[1] == "--git-build-version": + print(getGitBuildVersion(os.path.dirname(os.path.dirname(__file__)), "swift")) + else: + print(convertToWindowsVersion(sys.argv[1])) + sys.exit(0) + else: + print("Error: Simply run the script without arguments or pass a single argument.") + sys.exit(-1) diff --git a/BuildTools/TranslationCoverage.xslt b/BuildTools/TranslationCoverage.xslt new file mode 100644 index 0000000..4329174 --- /dev/null +++ b/BuildTools/TranslationCoverage.xslt @@ -0,0 +1,50 @@ +<?xml version="1.0"?> +<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform"> + +<!-- + +To use this XSLT, run it with xsltproc: + + xsltproc BuildTools/TranslationCoverage.xslt Swift/Translations/swift_nl.ts + +Supported parameters: + + full if set to 1 or true, generate a full report instead of the default oneliner. + +--> + +<xsl:param name="full" select="'0'" /> +<xsl:param name="filename" /> +<xsl:output method="text" /> + +<xsl:variable name="contexts" select="count(TS/context)"/> +<xsl:variable name="vanished" select="count(TS/context/message/translation[@type = 'vanished'])"/> +<xsl:variable name="obsolete" select="count(TS/context/message/translation[@type = 'obsolete'])"/> +<xsl:variable name="strings" select="count(TS/context/message/source) - $vanished - $obsolete"/> +<xsl:variable name="translations" select="count(TS/context/message/translation[not(@type) and text()])"/> +<xsl:variable name="missing" select="$strings - $translations"/> +<xsl:variable name="percent_done" select="floor($translations div $strings * 100)"/> + +<xsl:template match="/TS"> + <xsl:choose> + <xsl:when test="$full != '0' and $full != 'false'"><xsl:call-template name="full" /></xsl:when> + <xsl:otherwise><xsl:call-template name="terse" /></xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="terse"><xsl:value-of select="$percent_done"/>% complete, <xsl:value-of select="$vanished"/> vanished, <xsl:value-of select="$obsolete"/> obsolete, <xsl:value-of select="$missing"/> missing +</xsl:template> + +<xsl:template name="full"><xsl:if test="$filename">Report for <xsl:value-of select="$filename" />: + +</xsl:if>Contexts: <xsl:value-of select="$contexts"/> +Active Strings: <xsl:value-of select="$strings"/> +Translations: <xsl:value-of select="$translations"/>, <xsl:value-of select="$percent_done"/>% complete +Missing: <xsl:value-of select="$missing"/> +Vanished: <xsl:value-of select="$vanished"/> +Obsolete: <xsl:value-of select="$obsolete"/> + +Note: Strings and Translations do not include vanished or obsolete messages. +</xsl:template> + +</xsl:stylesheet> diff --git a/BuildTools/UpdateDebianChangelog.py b/BuildTools/UpdateDebianChangelog.py index 20e72da..d7e1f1b 100755 --- a/BuildTools/UpdateDebianChangelog.py +++ b/BuildTools/UpdateDebianChangelog.py @@ -16,26 +16,25 @@ m = re.match("([\w-]+) \((.*)-\d+\)", last_version_line) if m : - project = m.group(1) - last_version = m.group(2) + project = m.group(1) + last_version = m.group(2) if project == "" : - project="swift-im" + project="swift-im" -if "dev" in version : - distribution = "development" +if "dev" in version or "alpha" in version : + distribution = "development" elif "beta" in version or "rc" in version : - distribution = "beta development" + distribution = "beta development" else : - distribution = "release beta development" + distribution = "release beta development" if last_version != version : - changelog = open(sys.argv[1]) - changelog_data = changelog.read() - changelog.close() - changelog = open(sys.argv[1], "w") - changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n") - changelog.write(" * Upstream development snapshot\n\n") - changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n") - changelog.write("\n") - changelog.write(changelog_data) - changelog.close() - + changelog = open(sys.argv[1]) + changelog_data = changelog.read() + changelog.close() + changelog = open(sys.argv[1], "w") + changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n") + changelog.write(" * Upstream development snapshot\n\n") + changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n") + changelog.write("\n") + changelog.write(changelog_data) + changelog.close() diff --git a/BuildTools/iOS/swiften.xcodeproj/project.pbxproj b/BuildTools/iOS/swiften.xcodeproj/project.pbxproj new file mode 100644 index 0000000..1fcaa30 --- /dev/null +++ b/BuildTools/iOS/swiften.xcodeproj/project.pbxproj @@ -0,0 +1,230 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 46; + objects = { + +/* Begin PBXFileReference section */ + F6CD8AEF1A1293000039D508 /* libswift.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libswift.a; sourceTree = BUILT_PRODUCTS_DIR; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + F6CD8AEC1A1293000039D508 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + F6CD8AE41A1292C00039D508 = { + isa = PBXGroup; + children = ( + F6CD8AF11A1293000039D508 /* swiften */, + F6CD8AF01A1293000039D508 /* Products */, + ); + sourceTree = "<group>"; + }; + F6CD8AF01A1293000039D508 /* Products */ = { + isa = PBXGroup; + children = ( + F6CD8AEF1A1293000039D508 /* libswift.a */, + ); + name = Products; + sourceTree = "<group>"; + }; + F6CD8AF11A1293000039D508 /* swiften */ = { + isa = PBXGroup; + children = ( + ); + path = swiften; + sourceTree = "<group>"; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + F6CD8AEE1A1293000039D508 /* swiften */ = { + isa = PBXNativeTarget; + buildConfigurationList = F6CD8B011A1293000039D508 /* Build configuration list for PBXNativeTarget "swiften" */; + buildPhases = ( + F6CD8B071A1293A10039D508 /* Build Swift */, + F6CD8AEC1A1293000039D508 /* Frameworks */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = swiften; + productName = swiften; + productReference = F6CD8AEF1A1293000039D508 /* libswift.a */; + productType = "com.apple.product-type.library.static"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + F6CD8AE51A1292C00039D508 /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 0610; + TargetAttributes = { + F6CD8AEE1A1293000039D508 = { + CreatedOnToolsVersion = 6.1; + }; + }; + }; + buildConfigurationList = F6CD8AE81A1292C00039D508 /* Build configuration list for PBXProject "swiften" */; + compatibilityVersion = "Xcode 3.2"; + developmentRegion = English; + hasScannedForEncodings = 0; + knownRegions = ( + en, + ); + mainGroup = F6CD8AE41A1292C00039D508; + productRefGroup = F6CD8AF01A1293000039D508 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + F6CD8AEE1A1293000039D508 /* swiften */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXShellScriptBuildPhase section */ + F6CD8B071A1293A10039D508 /* Build Swift */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "Build Swift"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "cd $PWD/../../\n./scons enable_variants=1 build_examples=0 target=xcode Swift/Controllers Swiften"; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + F6CD8AE91A1292C00039D508 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + }; + name = Debug; + }; + F6CD8AEA1A1292C00039D508 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + }; + name = Release; + }; + F6CD8B021A1293000039D508 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_SYMBOLS_PRIVATE_EXTERN = NO; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.1; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + OTHER_LDFLAGS = "-ObjC"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = iphoneos; + SKIP_INSTALL = YES; + }; + name = Debug; + }; + F6CD8B031A1293000039D508 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = YES; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.1; + MTL_ENABLE_DEBUG_INFO = NO; + OTHER_LDFLAGS = "-ObjC"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SDKROOT = iphoneos; + SKIP_INSTALL = YES; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + F6CD8AE81A1292C00039D508 /* Build configuration list for PBXProject "swiften" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + F6CD8AE91A1292C00039D508 /* Debug */, + F6CD8AEA1A1292C00039D508 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + F6CD8B011A1293000039D508 /* Build configuration list for PBXNativeTarget "swiften" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + F6CD8B021A1293000039D508 /* Debug */, + F6CD8B031A1293000039D508 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = F6CD8AE51A1292C00039D508 /* Project object */; +} diff --git a/BuildTools/scons2ninja.py b/BuildTools/scons2ninja.py new file mode 100755 index 0000000..df4c655 --- /dev/null +++ b/BuildTools/scons2ninja.py @@ -0,0 +1,631 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# +# scons2ninja: A script to create a Ninja build file from SCons. +# +# Copyright (c) 2013 Remko Tronçon +# Licensed under the simplified BSD license. +# See COPYING for details. +# +################################################################################ + +import re, os, os.path, subprocess, sys, fnmatch, shlex + +################################################################################ +# Helper methods & variables +################################################################################ + +SCRIPT = sys.argv[0] +SCONS_ARGS = ' '.join(sys.argv[1:]) + +# TODO: Make this a tool-specific map +BINARY_FLAGS = ["-framework", "-arch", "-x", "--output-format", "-isystem", "-include"] + +if sys.platform == 'win32' : + LIB_PREFIX = "" + LIB_SUFFIX = "" + EXE_SUFFIX = ".exe" +else : + LIB_PREFIX = "lib" + LIB_SUFFIX = ".a" + EXE_SUFFIX = "" + +def is_regexp(x) : + return 'match' in dir(x) + +def is_list(l) : + return type(l) is list + +def escape(s) : + return s.replace(' ', '$ ').replace(':', '$:') + +def quote_spaces(s) : + if ' ' in s : + return '"' + s + '"' + else : + return s + +def to_list(l) : + if not l : + return [] + if is_list(l) : + return l + return [l] + +def partition(l, f) : + x = [] + y = [] + for v in l : + if f(v) : + x.append(v) + else : + y.append(v) + return (x, y) + +def get_unary_flags(prefix, flags) : + return [x[len(prefix):] for x in flags if x.lower().startswith(prefix.lower())] + +def extract_unary_flags(prefix, flags) : + f1, f2 = partition(flags, lambda x : x.lower().startswith(prefix.lower())) + return ([f[len(prefix):] for f in f1], f2) + +def extract_unary_flag(prefix, flags) : + flag, flags = extract_unary_flags(prefix, flags) + return (flag[0], flags) + +def extract_binary_flag(prefix, flags) : + i = flags.index(prefix) + flag = flags[i + 1] + del flags[i] + del flags[i] + return (flag, flags) + +def get_non_flags(flags) : + skip = False + result = [] + for f in flags : + if skip : + skip = False + elif f in BINARY_FLAGS : + skip = True + elif not f.startswith("/") and not f.startswith("-") : + result.append(f) + return result + +def extract_non_flags(flags) : + non_flags = get_non_flags(flags) + return (non_flags, filter(lambda x : x not in non_flags, flags)) + +def get_dependencies(target, build_targets) : + result = [] + queue = list(dependencies.get(target, [])) + while len(queue) > 0 : + n = queue.pop() + # Filter out Value() results + if n in build_targets or os.path.exists(n) : + result.append(n) + queue += list(dependencies.get(n, [])) + return result + +def get_built_libs(libs, libpaths, outputs) : + canonical_outputs = [os.path.abspath(p) for p in outputs] + result = [] + for libpath in libpaths : + for lib in libs : + lib_libpath = os.path.join(libpath, LIB_PREFIX + lib + LIB_SUFFIX) + if os.path.abspath(lib_libpath) in canonical_outputs : + result.append(lib_libpath) + return result + +def parse_tool_command(line) : + command = shlex.split(line, False, False if sys.platform == 'win32' else True) + flags = command[1:] + tool = os.path.splitext(os.path.basename(command[0]))[0] + if tool.startswith('clang++') or tool.startswith('g++') : + tool = "cxx" + elif tool.startswith('clang') or tool.startswith('gcc') : + tool = "cc" + if tool in ["cc", "cxx"] and not "-c" in flags : + tool = "glink" + tool = tool.replace('-qt4', '') + return tool, command, flags + +def rglob(pattern, root = '.') : + return [os.path.join(path, f) for path, dirs, files in os.walk(root) for f in fnmatch.filter(files, pattern)] + +################################################################################ +# Helper for building Ninja files +################################################################################ + +class NinjaBuilder : + def __init__(self) : + self._header = "" + self.variables = "" + self.rules = "" + self._build = "" + self.pools = "" + self._flags = {} + self.targets = [] + + def header(self, text) : + self._header += text + "\n" + + def rule(self, name, **kwargs) : + self.rules += "rule " + name + "\n" + for k, v in kwargs.iteritems() : + self.rules += " " + str(k) + " = " + str(v) + "\n" + self.rules += "\n" + + def pool(self, name, **kwargs) : + self.pools += "pool " + name + "\n" + for k, v in kwargs.iteritems() : + self.pools += " " + str(k) + " = " + str(v) + "\n" + self.pools += "\n" + + def variable(self, name, value) : + self.variables += str(name) + " = " + str(value) + "\n" + + def build(self, target, rule, sources = None, **kwargs) : + self._build += "build " + self.to_string(target) + ": " + rule + if sources : + self._build += " " + self.to_string(sources) + if 'deps' in kwargs and kwargs['deps'] : + self._build += " | " + self.to_string(kwargs["deps"]) + if 'order_deps' in kwargs : + self._build += " || " + self.to_string(kwargs['order_deps']) + self._build += "\n" + for var, value in kwargs.iteritems() : + if var in ['deps', 'order_deps'] : + continue + value = self.to_string(value, quote = True) + if var.endswith("flags") : + value = self.get_flags_variable(var, value) + self._build += " " + var + " = " + value + "\n" + self.targets += to_list(target) + + def header_targets(self) : + return [x for x in self.targets if x.endswith('.h') or x.endswith('.hh')] + + def serialize(self) : + result = "" + result += self._header + "\n" + result += self.variables + "\n" + for prefix in self._flags.values() : + for k, v in prefix.iteritems() : + result += v + " = " + k + "\n" + result += "\n" + result += self.pools + "\n" + result += self.rules + "\n" + result += self._build + "\n" + return result + + def to_string(self, lst, quote = False) : + if is_list(lst) : + if quote : + return ' '.join([quote_spaces(x) for x in lst]) + else : + return ' '.join([escape(x) for x in lst]) + if is_regexp(lst) : + return ' '.join([escape(x) for x in self.targets if lst.match(x)]) + return escape(lst) + + def get_flags_variable(self, flags_type, flags) : + if len(flags) == 0 : + return '' + if flags_type not in self._flags : + self._flags[flags_type] = {} + type_flags = self._flags[flags_type] + if flags not in type_flags : + type_flags[flags] = flags_type + "_" + str(len(type_flags)) + return "$" + type_flags[flags] + + +################################################################################ +# Configuration +################################################################################ + +ninja_post = [] +scons_cmd = "scons" +scons_dependencies = ['SConstruct'] + rglob('SConscript') + +def ninja_custom_command(ninja, line) : + return False + +CONFIGURATION_FILE = '.scons2ninja.conf' +execfile(CONFIGURATION_FILE) + +scons_dependencies = [os.path.normpath(x) for x in scons_dependencies] + + +################################################################################ +# Rules +################################################################################ + +ninja = NinjaBuilder() + +ninja.pool('scons_pool', depth = 1) + +if sys.platform == 'win32' : + ninja.rule('cl', + deps = 'msvc', + command = '$cl /showIncludes $clflags -c $in /Fo$out', + description = 'CXX $out') + + ninja.rule('link', + command = '$link $in $linkflags $libs /out:$out', + description = 'LINK $out') + + ninja.rule('link_mt', + command = '$link $in $linkflags $libs /out:$out ; $mt $mtflags', + description = 'LINK $out') + + ninja.rule('lib', + command = '$lib $libflags /out:$out $in', + description = 'AR $out') + + ninja.rule('rc', + command = '$rc $rcflags /Fo$out $in', + description = 'RC $out') + + # SCons doesn't touch files if they didn't change, which makes + # ninja rebuild the file over and over again. There's no touch on Windows :( + # Could implement it with a script, but for now, delete the file if + # this problem occurs. I'll fix it if it occurs too much. + ninja.rule('scons', + command = scons_cmd + " ${scons_args} $out", + pool = 'scons_pool', + description = 'GEN $out') + + ninja.rule('install', command = 'cmd /c copy $in $out') + ninja.rule('run', command = '$in') +else : + ninja.rule('cxx', + deps = 'gcc', + depfile = '$out.d', + command = '$cxx -MMD -MF $out.d $cxxflags -c $in -o $out', + description = 'CXX $out') + + ninja.rule('cc', + deps = 'gcc', + depfile = '$out.d', + command = '$cc -MMD -MF $out.d $ccflags -c $in -o $out', + description = 'CC $out') + + ninja.rule('link', + command = '$glink -o $out $in $linkflags', + description = 'LINK $out') + + ninja.rule('ar', + command = 'ar $arflags $out $in && ranlib $out', + description = 'AR $out') + + # SCons doesn't touch files if they didn't change, which makes + # ninja rebuild the file over and over again. Touching solves this. + ninja.rule('scons', + command = scons_cmd + " $out && touch $out", + pool = 'scons_pool', + description = 'GEN $out') + + ninja.rule('install', command = 'install $in $out') + ninja.rule('run', command = './$in') + + +ninja.rule('moc', + command = '$moc $mocflags -o $out $in', + description = 'MOC $out') + +ninja.rule('rcc', + command = '$rcc $rccflags -name $name -o $out $in', + description = 'RCC $out') + +ninja.rule('uic', + command = '$uic $uicflags -o $out $in', + description = 'UIC $out') + +ninja.rule('lrelease', + command = '$lrelease $lreleaseflags $in -qm $out', + description = 'LRELEASE $out') + +ninja.rule('ibtool', + command = '$ibtool $ibtoolflags --compile $out $in', + description = 'IBTOOL $out') + +ninja.rule('dsymutil', + command = '$dsymutil $dsymutilflags -o $out $in', + description = 'DSYMUTIL $out') + +ninja.rule('generator', + command = "python " + SCRIPT + " ${scons_args}", + depfile = ".scons2ninja.deps", + pool = 'scons_pool', + generator = '1', + description = 'Regenerating build.ninja') + +ninja.rule('sdef', + command = 'sdef $in | sdp -fh --basename $basename -o $outdir', + description = 'SDEF $out') + +################################################################################ +# Build Statements +################################################################################ + +scons_generate_cmd = scons_cmd + " " + SCONS_ARGS + " --tree=all,prune dump_trace=1" +#scons_generate_cmd = 'cmd /c type scons2ninja.in' +#scons_generate_cmd = 'cat scons2ninja.in' + +# Pass 1: Parse dependencies (and prefilter some build rules) +build_lines = [] +dependencies = {} +mtflags = {} +previous_file = None +f = subprocess.Popen(scons_generate_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell=True) +stage = 'preamble' +skip_nth_line = -1 +stack = ['.'] +for line in f.stdout : + line = line.rstrip() + + # Skip lines if requested from previous command + if skip_nth_line >= 0 : + skip_nth_line -= 1 + if skip_nth_line == 0 : + continue + + if line.startswith('scons: done building targets') : + break + + if stage == "preamble" : + # Pass all lines from the SCons configuration step to output + if re.match("^scons: Building targets ...", line) : + stage = "build" + else : + print line + + elif stage == "build" : + if line.startswith('+-') : + stage = "dependencies" + elif re.match("^Using tempfile", line) : + # Ignore response files from MSVS + skip_nth_line = 2 + else : + build_lines.append(line) + + # Already detect targets that will need 'mt' + tool, _, flags = parse_tool_command(line) + if tool == 'mt' : + target = get_unary_flags("-outputresource:", flags)[0] + target = target[0:target.index(';')] + mtflags[target] = flags + + elif stage == "dependencies" : + if not re.match('^[\s|]+\+\-', line) : + # Work around bug in SCons that splits output over multiple lines + continue + + level = line.index('+-') / 2 + filename = line[level*2+2:] + if filename.startswith('[') : + filename = filename[1:-1] + + # Check if we use the 'fixed' format which escapes filenamenames + if filename.startswith('\'') and filename.endswith('\'') : + filename = eval(filename) + + if level < len(stack) : + stack = stack[0:level] + elif level > len(stack) : + if level != len(stack) + 1 : + raise Exception("Internal Error" ) + stack.append(previous_filename) + + # Skip absolute paths + if not os.path.isabs(filename) : + target = stack[-1] + if target not in dependencies : + dependencies[target] = [] + dependencies[target].append(filename) + previous_filename = filename + +if f.wait() != 0 : + print("Error calling '" + scons_generate_cmd + "'") + print f.stderr.read() + exit(-1) + +# Pass 2: Parse build rules +tools = {} +for line in build_lines : + # Custom python function + m = re.match('^(\w+)\(\[([^\]]*)\]', line) + if m : + out = [x[1:-1] for x in m.group(2).split(',')] + for x in out : + # 'Note' = To be more correct, deps should also include $scons_dependencies, + # but this regenerates a bit too often, so leaving it out for now. + ninja.build(x, 'scons', None, deps = sorted(get_dependencies(x, ninja.targets))) + continue + + + # TextFile + m = re.match("^Creating '([^']+)'", line) + if m : + out = m.group(1) + # Note: To be more correct, deps should also include $scons_dependencies, + # but this regenerates a bit too often, so leaving it out for now. + ninja.build(out, 'scons', None, deps = sorted(get_dependencies(out, ninja.targets))) + continue + + # Install + m = re.match('^Install file: "(.*)" as "(.*)"', line) + if m : + ninja.build(m.group(2), 'install', m.group(1)) + continue + + m = re.match('^Install directory: "(.*)" as "(.*)"', line) + if m : + for source in rglob('*', m.group(1)) : + if os.path.isdir(source) : + continue + target = os.path.join(m.group(2), os.path.relpath(source, m.group(1))) + ninja.build(target, 'install', source) + continue + + # Tools + tool, command, flags = parse_tool_command(line) + tools[tool] = command[0] + + ############################################################ + # clang/gcc tools + ############################################################ + + if tool == 'cc': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'cc', files, order_deps = '_generated_headers', ccflags = flags) + + elif tool == 'cxx': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'cxx', files, order_deps = '_generated_headers', cxxflags = flags) + + elif tool == 'glink': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + libs = get_unary_flags('-l', flags) + libpaths = get_unary_flags("-L", flags) + deps = get_built_libs(libs, libpaths, ninja.targets) + ninja.build(out, 'link', files, deps = sorted(deps), linkflags = flags) + + elif tool == 'ar': + objects, flags = partition(flags, lambda x: x.endswith('.o')) + libs, flags = partition(flags, lambda x: x.endswith('.a')) + out = libs[0] + ninja.build(out, 'ar', objects, arflags = flags) + + elif tool == 'ranlib': + pass + + + ############################################################ + # MSVC tools + ############################################################ + + elif tool == 'cl': + out, flags = extract_unary_flag("/Fo", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'cl', files, order_deps = '_generated_headers', clflags = flags) + + elif tool == 'lib': + out, flags = extract_unary_flag("/out:", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'lib', files, libflags = flags) + + elif tool == 'link': + objects, flags = partition(flags, lambda x: x.endswith('.obj') or x.endswith('.res')) + out, flags = extract_unary_flag("/out:", flags) + libs, flags = partition(flags, lambda x: not x.startswith("/") and x.endswith(".lib")) + libpaths = get_unary_flags("/libpath:", flags) + deps = get_built_libs(libs, libpaths, ninja.targets) + if out in mtflags : + ninja.build(out, 'link_mt', objects, deps = sorted(deps), + libs = libs, linkflags = flags, mtflags = mtflags[out]) + else : + ninja.build(out, 'link', objects, deps = sorted(deps), + libs = libs, linkflags = flags) + + elif tool == 'rc': + out, flags = extract_unary_flag("/fo", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'rc', files[0], order_deps = '_generated_headers', rcflags = flags) + + elif tool == 'mt': + # Already handled + pass + + ############################################################ + # Qt tools + ############################################################ + + elif tool == 'moc': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'moc', files, mocflags = flags) + + elif tool == 'uic': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'uic', files, uicflags = flags) + + elif tool == 'lrelease': + out, flags = extract_binary_flag("-qm", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'lrelease', files, lreleaseflags = flags) + + elif tool == 'rcc': + out, flags = extract_binary_flag("-o", flags) + name, flags = extract_binary_flag("-name", flags) + compress, flags = extract_binary_flag("--compress", flags) + threshold, flags = extract_binary_flag("--threshold", flags) + files, flags = extract_non_flags(flags) + deps = list(set(get_dependencies(out, ninja.targets)) - set(files)) + ninja.build(out, 'rcc', files, deps = sorted(deps), name = name, rccflags = ["--compress", compress, "--threshold", threshold]) + + ############################################################ + # OS X tools + ############################################################ + + elif tool == 'ibtool': + out, flags = extract_binary_flag("--compile", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'ibtool', files, ibtoolflags = flags) + + elif tool == 'dsymutil': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'dsymutil', files, dsymutilflags = flags) + + elif tool == 'sdef' : + source = flags[0]; + outdir, flags = extract_binary_flag("-o", flags) + basename, flags = extract_binary_flag("--basename", flags) + ninja.build(os.path.join(outdir, basename + ".h"), 'sdef', [source], + basename = basename, + outdir = outdir) + + elif tool == 'checker': + pass + + elif not ninja_custom_command(ninja, line) : + raise Exception("Unknown tool: '" + line + "'") + + +# Phony target for all generated headers, used as an order-only depency from all C/C++ sources +ninja.build('_generated_headers', 'phony', ninja.header_targets()) + +# Regenerate build.ninja file +ninja.build('build.ninja', 'generator', [], deps = [SCRIPT, CONFIGURATION_FILE]) + +# Header & variables +ninja.header("# This file is generated by " + SCRIPT) +ninja.variable("ninja_required_version", "1.3") +ninja.variable("scons_args", SCONS_ARGS) +for k, v in tools.iteritems() : + ninja.variable(k, v) + +# Extra customizations +if 'ninja_post' in dir() : + ninja_post(ninja) + + +################################################################################ +# Result +################################################################################ + +f = open(".scons2ninja.deps", "w") +f.write("build.ninja: " + " ".join([d for d in scons_dependencies if os.path.exists(d)]) + "\n") +f.close() + +f = open("build.ninja", "w") +f.write(ninja.serialize()) +f.close() |