diff options
Diffstat (limited to 'BuildTools')
29 files changed, 1718 insertions, 586 deletions
diff --git a/BuildTools/CLang/.gitignore b/BuildTools/CLang/.gitignore deleted file mode 100644 index df682c0..0000000 --- a/BuildTools/CLang/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -CLangDiagnosticsFlags -CLangDiagnosticsFlagsTool.sh -CLangDiagnosticsFlagsTool -clang-diagnostics-overview.* diff --git a/BuildTools/CLang/CLangDiagnosticsFlagsTool.cpp b/BuildTools/CLang/CLangDiagnosticsFlagsTool.cpp deleted file mode 100644 index ccd5925..0000000 --- a/BuildTools/CLang/CLangDiagnosticsFlagsTool.cpp +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright (c) 2011 Remko Tronçon - * Licensed under the GNU General Public License v3. - * See Documentation/Licenses/GPLv3.txt for more information. - */ - -#include <iostream> -#include <set> -#include <vector> -#include <cassert> -#include <boost/algorithm/string/predicate.hpp> -#include <boost/graph/graph_traits.hpp> -#include <boost/graph/adjacency_list.hpp> -#include <boost/graph/topological_sort.hpp> -#include <boost/graph/topological_sort.hpp> -#include <boost/graph/graphviz.hpp> - -// ----------------------------------------------------------------------------- -// Include diagnostics data from CLang -// ----------------------------------------------------------------------------- - -#define DIAG(name, a, b, c, d, e, f, g) name, - -namespace diag { - enum LexKinds { -#include <clang/Basic/DiagnosticLexKinds.inc> -#include <clang/Basic/DiagnosticParseKinds.inc> -#include <clang/Basic/DiagnosticCommonKinds.inc> -#include <clang/Basic/DiagnosticDriverKinds.inc> -#include <clang/Basic/DiagnosticFrontendKinds.inc> -#include <clang/Basic/DiagnosticSemaKinds.inc> - }; -} - -#define GET_DIAG_ARRAYS -#include <clang/Basic/DiagnosticGroups.inc> -#undef GET_DIAG_ARRAYS - -struct DiagTableEntry { - const char* name; - const short* array; - const short* group; -}; - -static const DiagTableEntry diagnostics[] = { -#define GET_DIAG_TABLE -#include <clang/Basic/DiagnosticGroups.inc> -#undef GET_DIAG_TABLE -}; -static const size_t diagnostics_count = sizeof(diagnostics) / sizeof(diagnostics[0]); - -// ----------------------------------------------------------------------------- - -using namespace boost; - -struct Properties { - Properties() : have(false), implicitHave(false), dontWant(false), implicitDontWant(false), ignored(false), available(false), missing(false), redundant(false), alreadyCovered(false) { - } - - std::string name; - bool have; - bool implicitHave; - bool dontWant; - bool implicitDontWant; - bool ignored; - bool available; - bool missing; - bool redundant; - bool alreadyCovered; -}; - -class GraphVizLabelWriter { - public: - GraphVizLabelWriter(const std::vector<Properties>& properties) : properties(properties) { - } - - template <class VertexOrEdge> - void operator()(std::ostream& out, const VertexOrEdge& v) const { - std::string color; - if (properties[v].missing) { - color = "orange"; - } - else if (properties[v].redundant) { - color = "lightblue"; - } - else if (properties[v].have) { - color = "darkgreen"; - } - else if (properties[v].implicitHave) { - color = "green"; - } - else if (properties[v].dontWant) { - color = "red"; - } - else if (properties[v].implicitDontWant) { - color = "pink"; - } - else if (properties[v].ignored) { - color = "white"; - } - else if (properties[v].available) { - color = "yellow"; - } - else { - assert(false); - } - out << "[label=" << escape_dot_string(properties[v].name) << " fillcolor=\"" << color << "\" style=filled]"; - } - - private: - const std::vector<Properties> properties; -}; - -int main(int argc, char* argv[]) { - // Parse command-line arguments - std::set<std::string> have; - std::set<std::string> dontWant; - std::string outputDir; - for (int i = 1; i < argc; ++i) { - std::string arg(argv[i]); - if (starts_with(arg, "-W")) { - have.insert(arg.substr(2, arg.npos)); - } - else if (starts_with(arg, "-w")) { - dontWant.insert(arg.substr(2, arg.npos)); - } - else if (starts_with(arg, "-O")) { - outputDir = arg.substr(2, arg.npos) + "/"; - } - } - - // Build the graph and initialize properties - typedef adjacency_list<vecS, vecS, bidirectionalS> Graph; - typedef graph_traits<Graph>::vertex_descriptor Vertex; - Graph g(diagnostics_count); - std::vector<Properties> properties(num_vertices(g)); - for (size_t i = 0; i < diagnostics_count; ++i) { - std::string name(diagnostics[i].name); - properties[i].name = name; - properties[i].implicitHave = properties[i].have = have.find(name) != have.end(); - properties[i].implicitDontWant = properties[i].dontWant = dontWant.find(name) != dontWant.end(); - properties[i].ignored = diagnostics[i].group == 0 && diagnostics[i].array == 0; - properties[i].alreadyCovered = false; - properties[i].available = true; - for (const short* j = diagnostics[i].group; j && *j != -1; ++j) { - add_edge(i, *j, g); - } - } - - // Sort the diagnostics - std::list<Vertex> sortedDiagnostics; - boost::topological_sort(g, std::front_inserter(sortedDiagnostics)); - - // Propagate dontWant and have properties down - for(std::list<Vertex>::const_iterator i = sortedDiagnostics.begin(); i != sortedDiagnostics.end(); ++i) { - graph_traits<Graph>::adjacency_iterator adjacentIt, adjacentEnd; - for (tie(adjacentIt, adjacentEnd) = adjacent_vertices(*i, g); adjacentIt != adjacentEnd; ++adjacentIt) { - properties[*adjacentIt].implicitDontWant = properties[*i].implicitDontWant || properties[*adjacentIt].implicitDontWant; - properties[*adjacentIt].implicitHave = properties[*i].implicitHave || properties[*adjacentIt].implicitHave; - } - } - - // Propagate 'available' property upwards - for(std::list<Vertex>::const_reverse_iterator i = sortedDiagnostics.rbegin(); i != sortedDiagnostics.rend(); ++i) { - properties[*i].available = properties[*i].available && !properties[*i].implicitDontWant; - graph_traits<Graph>::in_edge_iterator edgesIt, edgesEnd; - graph_traits<Graph>::edge_descriptor edge; - for (tie(edgesIt, edgesEnd) = in_edges(*i, g); edgesIt != edgesEnd; ++edgesIt) { - properties[source(*edgesIt, g)].available = properties[source(*edgesIt, g)].available && properties[*i].available; - } - } - - // Collect missing & redundant flags - std::set<std::string> missing; - std::set<std::string> redundant; - for(std::list<Vertex>::const_iterator i = sortedDiagnostics.begin(); i != sortedDiagnostics.end(); ++i) { - bool markChildrenCovered = true; - if (properties[*i].alreadyCovered) { - if (properties[*i].have) { - properties[*i].redundant = true; - redundant.insert(properties[*i].name); - } - } - else { - if (properties[*i].available) { - if (!properties[*i].implicitHave && !properties[*i].ignored) { - properties[*i].missing = true; - missing.insert(properties[*i].name); - } - } - else { - markChildrenCovered = false; - } - } - if (markChildrenCovered) { - graph_traits<Graph>::adjacency_iterator adjacentIt, adjacentEnd; - for (tie(adjacentIt, adjacentEnd) = adjacent_vertices(*i, g); adjacentIt != adjacentEnd; ++adjacentIt) { - properties[*adjacentIt].alreadyCovered = true; - } - } - } - - // Write information - if (!missing.empty()) { - std::cout << "Missing diagnostic flags: "; - for(std::set<std::string>::const_iterator i = missing.begin(); i != missing.end(); ++i) { - std::cout << "-W" << *i << " "; - } - std::cout<< std::endl; - } - - if (!redundant.empty()) { - std::cout << "Redundant diagnostic flags: "; - for(std::set<std::string>::const_iterator i = redundant.begin(); i != redundant.end(); ++i) { - std::cout << "-W" << *i << " "; - } - std::cout<< std::endl; - } - - // Write graphviz file - if (!outputDir.empty()) { - std::ofstream f((outputDir + "clang-diagnostics-overview.dot").c_str()); - write_graphviz(f, g, GraphVizLabelWriter(properties)); - f.close(); - } - - return 0; -} diff --git a/BuildTools/CLang/SConscript b/BuildTools/CLang/SConscript deleted file mode 100644 index 850c35c..0000000 --- a/BuildTools/CLang/SConscript +++ /dev/null @@ -1,15 +0,0 @@ -Import("env") - -#myenv = Environment() -#myenv.Append(CPPPATH = ["."]) -#myenv.Program("CLangDiagnosticsFlagsTool", ["CLangDiagnosticsFlagsTool.cpp"]) -# -#disabledDiagnostics = ["-wunreachable-code", "-wunused-macros", "-wmissing-noreturn", "-wlong-long", "-wcast-align", "-wglobal-constructors", "-wmissing-prototypes", "-wpadded", "-wshadow"] -#clangDiagnosticsFlagsToolCommand = "BuildTools/CLang/CLangDiagnosticsFlagsTool -O" + env.Dir(".").abspath + " " + " ".join(disabledDiagnostics) + " " -#clangDiagnosticsFlagsToolCommand += " ".join([flag for flag in env["CXXFLAGS"] if flag.startswith("-W")]) -#clangDiagnosticsFlagsToolCommand += "\n" -#clangDiagnosticsFlagsToolCommand += "dot -Tpng " + env.Dir(".").abspath + "/clang-diagnostics-overview.dot > " + env.Dir(".").abspath + "/clang-diagnostics-overview.png\n" -#v = env.WriteVal("#/BuildTools/CLang/CLangDiagnosticsFlagsTool.sh", env.Value(clangDiagnosticsFlagsToolCommand)) -#env.AddPostAction(v, Chmod(v[0], 0755)) -# -# diff --git a/BuildTools/CheckHeaders.py b/BuildTools/CheckHeaders.py index 73f49db..274a760 100755 --- a/BuildTools/CheckHeaders.py +++ b/BuildTools/CheckHeaders.py @@ -3,18 +3,39 @@ import os, sys +FORBIDDEN_INCLUDES = [ + ("iostream", ["Swiften/Base/format.h"]), + ("Base/Log.h", []), + ("Base/format.h", []), + ("algorithm", ["Swiften/Base/Algorithm.h", "Swiften/Base/SafeAllocator.h", "Swiften/Base/Listenable.h"]), + ("boost/bind.hpp", ["Swiften/Base/Listenable.h"]), + ("boost/filesystem.hpp", []), + ("Base/foreach.h", []), + ("boost/date_time/date_time.hpp", []), + ("boost/filesystem/filesystem.hpp", []), + + # To avoid + ("Base/Algorithm.h", ["Swiften/StringCodecs/HMAC.h"]), +] + foundBadHeaders = False -for (path, dirs, files) in os.walk(".") : - if "3rdParty" in path or ".sconf" in path or ".framework" in path : - continue - if not "Swiften" in path : - continue +filename = sys.argv[1] + +if "3rdParty" in filename or ".sconf" in filename or ".framework" in filename or not filename.endswith(".h") : + sys.exit(0) +if not "Swiften" in filename : + sys.exit(0) +if filename.endswith("Swiften.h") : + sys.exit(0) - for filename in [os.path.join(path, file) for file in files if file.endswith(".h")] : file = open(filename, "r") for line in file.readlines() : - for include in ["iostream", "algorithm", "cassert", "boost/bind.hpp", "boost/filesystem.hpp", "Base/foreach.h", "Base/Log.h", "boost/date_time/date_time.hpp", "boost/filesystem/filesystem.hpp"] : - if "#include" in line and include in line and not "Base/Log" in filename : - print "Found " + include + " include in " + filename + if not "#include" in line : + continue + if "Base/Log.h" in filename : + continue + for forbiddenInclude, ignores in FORBIDDEN_INCLUDES : + if forbiddenInclude in line and len([x for x in ignores if x in filename]) == 0 : + print "Found " + forbiddenInclude + " include in " + filename foundBadHeaders = True diff --git a/BuildTools/CheckTabs.py b/BuildTools/CheckTabs.py index c685fc4..e007a68 100755 --- a/BuildTools/CheckTabs.py +++ b/BuildTools/CheckTabs.py @@ -5,7 +5,6 @@ import os, sys foundExpandedTabs = False -for (path, dirs, files) in os.walk(".") : - if not "3rdParty" in path and not ".sconf" in path and not ".framework" in path and not path.startswith("build") : - for filename in [os.path.join(path, file) for file in files if (file.endswith(".cpp") or file.endswith(".h")) and not "ui_" in file and not "moc_" in file and not "qrc_" in file] : +filename = sys.argv[1] +if (filename.endswith(".cpp") or filename.endswith(".h")) and not "3rdParty" in filename : file = open(filename, "r") contents = [] @@ -30,12 +29,3 @@ for (path, dirs, files) in os.walk(".") : file.close() if contentsChanged : - if len(sys.argv) > 1 and sys.argv[1] == "--fix" : - print("Fixing tabs in " + filename) - file = open(filename, "w") - file.write(''.join(contents)) - file.close() - else : - foundExpandedTabs = True - print(filename + " contains expanded tabs") - -sys.exit(foundExpandedTabs) + sys.exit(-1) diff --git a/BuildTools/CheckTranslations.py b/BuildTools/CheckTranslations.py index 95c7e91..b39af08 100755 --- a/BuildTools/CheckTranslations.py +++ b/BuildTools/CheckTranslations.py @@ -45,6 +45,6 @@ for filename in os.listdir("Swift/Translations") : if not finished : print "[Warning] " + filename + ": Unfinished" - if language not in desktop_generic_names : + if language not in desktop_generic_names and language != "en" : print "[Warning] GenericName field missing in desktop entry for " + language - if language not in desktop_comments : + if language not in desktop_comments and language != "en" : print "[Warning] Comment field missing in desktop entry for " + language diff --git a/BuildTools/Copyright/find-contribs.py b/BuildTools/Copyright/find-contribs.py new file mode 100755 index 0000000..63c454e --- /dev/null +++ b/BuildTools/Copyright/find-contribs.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +import subprocess + +def print_log(full_log): + full_log_lines = full_log.split("\n") + + commits = [] + + commit_bit = "commit " + author_bit = "Author: " + date_bit = "Date: " + + commit = None + for line in full_log_lines: + + if line[0:len(commit_bit)] == commit_bit: + if commit: + commits.append(commit) + commit = {'text':''} + handled = False + for bit in [commit_bit, author_bit, date_bit]: + if line[0:len(bit)] == bit: + commit[bit] = line + handled = True + if not handled: + commit['text'] += line + + commits.append(commit) + + contributions = [] + + for commit in commits: + if not "git@kismith.co.uk" in commit[author_bit] and not "git@el-tramo.be" in commit[author_bit]: + contributions.append(commit) + + #print contributions + contributors = {} + for commit in contributions: + if not commit[author_bit] in contributors: + contributors[commit[author_bit]] = [] + contributors[commit[author_bit]].append(commit[commit_bit]) + + for contributor in contributors: + print contributor + " has contributed patches " + ", ".join([commit[len(commit_bit):] for commit in contributors[contributor]]) + +full_swiften_log = subprocess.check_output(["git", "log", "--", "Swiften"]) + +print "Contributors for Swiften/ subtree:\n" +print_log(full_swiften_log) + +full_all_log = subprocess.check_output(["git", "log"]) + +print "\n\n\n\n" + +print "Contributors for full tree:\n" +print_log(full_all_log) diff --git a/BuildTools/Copyrighter.py b/BuildTools/Copyrighter.py index 248873b..ccb2019 100755 --- a/BuildTools/Copyrighter.py +++ b/BuildTools/Copyrighter.py @@ -130,14 +130,7 @@ def set_copyright(filename, copyright) : if sys.argv[1] == "check-copyright" : - if not check_copyright(sys.argv[2]) : - sys.exit(-1) -elif sys.argv[1] == "check-all-copyrights" : - ok = True - for (path, dirs, files) in os.walk(".") : - if "3rdParty" in path or ".sconf" in path or "Swift.app" in path or path.startswith("build") or "xmppbench" in path : - continue - for filename in [os.path.join(path, file) for file in files if (file.endswith(".cpp") or file.endswith(".h")) and not "ui_" in file and not "moc_" in file and not "qrc_" in file and not "BuildVersion.h" in file and not "Swiften.h" in file and not "Version.h" in file and not "swiften-config.h" in file and not "linit.cpp" in file ] : - ok &= check_copyright(filename) - if not ok : + file = sys.argv[2] + if (file.endswith(".cpp") or file.endswith(".h")) and not "3rdParty" in file : + if not check_copyright(file) : sys.exit(-1) elif sys.argv[1] == "set-copyright" : @@ -145,12 +138,4 @@ elif sys.argv[1] == "set-copyright" : copyright = get_copyright(username, email) set_copyright(sys.argv[2], copyright) -elif sys.argv[1] == "set-all-copyrights" : - (username, email) = get_userinfo() - copyright = get_copyright(username, email) - for (path, dirs, files) in os.walk(".") : - if "3rdParty" in path or ".sconf" in path or "Swift.app" in path : - continue - for filename in [os.path.join(path, file) for file in files if (file.endswith(".cpp") or file.endswith(".h")) and not "ui_" in file and not "moc_" in file and not "qrc_" in file and not "BuildVersion.h" in file and not "swiften-config.h" in file] : - set_copyright(filename, copyright) else : print "Unknown command: " + sys.argv[1] diff --git a/BuildTools/Cppcheck.sh b/BuildTools/Cppcheck.sh index 7b6a33b..d318150 100755 --- a/BuildTools/Cppcheck.sh +++ b/BuildTools/Cppcheck.sh @@ -15,4 +15,6 @@ cppcheck $@ \ -i Swift/QtUI/Roster/main.cpp \ -i Swift/QtUI/NotifierTest/NotifierTest.cpp \ + -DSWIFTEN_BUILDING -DSWIFTEN_STATIC \ + -U__BEOS__ -U__CYGWIN__ -U__QNNXTO__ -U__amigaos__ -Uhpux -U__sgi \ \ -I . \ diff --git a/BuildTools/FilterScanBuildResults.py b/BuildTools/FilterScanBuildResults.py new file mode 100755 index 0000000..53a345f --- /dev/null +++ b/BuildTools/FilterScanBuildResults.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python + +import os, os.path, sys, re + +resultsDir = sys.argv[1] +resultDirs = [ d for d in os.listdir(resultsDir) if os.path.isdir(os.path.join(resultsDir, d)) ] +resultDirs.sort() +if len(resultDirs) > 0 : + resultDir = os.path.join(resultsDir, resultDirs[-1]) + resultFileName = os.path.join(resultDir, "index.html") + resultData = [] + f = open(resultFileName, "r") + skipLines = 0 + for line in f.readlines() : + if skipLines > 0 : + skipLines -= 1 + else : + if ("3rdParty" in line or "SHA1.cpp" in line or "lua.c" in line) : + m = re.match(".*(report-.*\.html)", line) + os.remove(os.path.join(resultDir, m.group(1))) + skipLines = 2 + else : + resultData.append(line) + f.close() + + f = open(resultFileName, "w") + f.writelines(resultData) + f.close() diff --git a/BuildTools/GetBuildVersion.py b/BuildTools/GetBuildVersion.py index 007fec8..fc92d15 100755 --- a/BuildTools/GetBuildVersion.py +++ b/BuildTools/GetBuildVersion.py @@ -12,5 +12,5 @@ if "--major" in sys.argv : if only_major : - v = Version.getBuildVersion(os.path.dirname(sys.argv[0] + "/.."), sys.argv[1]) + v = Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1]) version_match = re.match("(\d+)\.(\d+).*", v) if version_match : @@ -19,3 +19,3 @@ if only_major : print "0" else : - print Version.getBuildVersion(os.path.dirname(sys.argv[0] + "/.."), sys.argv[1]) + print Version.getBuildVersion(os.path.dirname(sys.argv[0]) + "/..", sys.argv[1]) diff --git a/BuildTools/Git/Hooks/commit-msg b/BuildTools/Git/Hooks/commit-msg new file mode 100755 index 0000000..e2c3df2 --- /dev/null +++ b/BuildTools/Git/Hooks/commit-msg @@ -0,0 +1,174 @@ +#!/bin/sh +# From Gerrit Code Review 2.4.2 +# +# Part of Gerrit Code Review (http://code.google.com/p/gerrit/) +# +# Copyright (C) 2009 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +CHANGE_ID_AFTER="Bug|Issue" +MSG="$1" + +# Check for, and add if missing, a unique Change-Id +# +add_ChangeId() { + clean_message=`sed -e ' + /^diff --git a\/.*/{ + s/// + q + } + /^Signed-off-by:/d + /^#/d + ' "$MSG" | git stripspace` + if test -z "$clean_message" + then + return + fi + + # Does Change-Id: already exist? if so, exit (no change). + if grep -i '^Change-Id:' "$MSG" >/dev/null + then + return + fi + + id=`_gen_ChangeId` + T="$MSG.tmp.$$" + AWK=awk + if [ -x /usr/xpg4/bin/awk ]; then + # Solaris AWK is just too broken + AWK=/usr/xpg4/bin/awk + fi + + # How this works: + # - parse the commit message as (textLine+ blankLine*)* + # - assume textLine+ to be a footer until proven otherwise + # - exception: the first block is not footer (as it is the title) + # - read textLine+ into a variable + # - then count blankLines + # - once the next textLine appears, print textLine+ blankLine* as these + # aren't footer + # - in END, the last textLine+ block is available for footer parsing + $AWK ' + BEGIN { + # while we start with the assumption that textLine+ + # is a footer, the first block is not. + isFooter = 0 + footerComment = 0 + blankLines = 0 + } + + # Skip lines starting with "#" without any spaces before it. + /^#/ { next } + + # Skip the line starting with the diff command and everything after it, + # up to the end of the file, assuming it is only patch data. + # If more than one line before the diff was empty, strip all but one. + /^diff --git a/ { + blankLines = 0 + while (getline) { } + next + } + + # Count blank lines outside footer comments + /^$/ && (footerComment == 0) { + blankLines++ + next + } + + # Catch footer comment + /^\[[a-zA-Z0-9-]+:/ && (isFooter == 1) { + footerComment = 1 + } + + /]$/ && (footerComment == 1) { + footerComment = 2 + } + + # We have a non-blank line after blank lines. Handle this. + (blankLines > 0) { + print lines + for (i = 0; i < blankLines; i++) { + print "" + } + + lines = "" + blankLines = 0 + isFooter = 1 + footerComment = 0 + } + + # Detect that the current block is not the footer + (footerComment == 0) && (!/^\[?[a-zA-Z0-9-]+:/ || /^[a-zA-Z0-9-]+:\/\//) { + isFooter = 0 + } + + { + # We need this information about the current last comment line + if (footerComment == 2) { + footerComment = 0 + } + if (lines != "") { + lines = lines "\n"; + } + lines = lines $0 + } + + # Footer handling: + # If the last block is considered a footer, splice in the Change-Id at the + # right place. + # Look for the right place to inject Change-Id by considering + # CHANGE_ID_AFTER. Keys listed in it (case insensitive) come first, + # then Change-Id, then everything else (eg. Signed-off-by:). + # + # Otherwise just print the last block, a new line and the Change-Id as a + # block of its own. + END { + unprinted = 1 + if (isFooter == 0) { + print lines "\n" + lines = "" + } + changeIdAfter = "^(" tolower("'"$CHANGE_ID_AFTER"'") "):" + numlines = split(lines, footer, "\n") + for (line = 1; line <= numlines; line++) { + if (unprinted && match(tolower(footer[line]), changeIdAfter) != 1) { + unprinted = 0 + print "Change-Id: I'"$id"'" + } + print footer[line] + } + if (unprinted) { + print "Change-Id: I'"$id"'" + } + }' "$MSG" > $T && mv $T "$MSG" || rm -f $T +} +_gen_ChangeIdInput() { + echo "tree `git write-tree`" + if parent=`git rev-parse "HEAD^0" 2>/dev/null` + then + echo "parent $parent" + fi + echo "author `git var GIT_AUTHOR_IDENT`" + echo "committer `git var GIT_COMMITTER_IDENT`" + echo + printf '%s' "$clean_message" +} +_gen_ChangeId() { + _gen_ChangeIdInput | + git hash-object -t commit --stdin +} + + +add_ChangeId diff --git a/BuildTools/Git/Hooks/pre-commit b/BuildTools/Git/Hooks/pre-commit index 8130ad6..ad0945e 100755 --- a/BuildTools/Git/Hooks/pre-commit +++ b/BuildTools/Git/Hooks/pre-commit @@ -1,13 +1,23 @@ #!/bin/sh -echo "Checking tabs ..." -if ! BuildTools/CheckTabs.py; then - echo "Expanded tabs found. Aborting commit." +IFS=' +' + +echo "Checking tabs & copyrights ..." +for file in $(git diff --cached --name-only); do + if [ ! -f $file ]; then + continue + fi + if ! BuildTools/CheckTabs.py $file; then + echo "ERROR: '$file' contains expanded tabs. Aborting commit." exit -1 fi - -echo "Checking copyrights ..." -if ! BuildTools/Copyrighter.py check-all-copyrights; then - echo "Copyright error found. Aborting commit." + if ! BuildTools/Copyrighter.py check-copyright $file; then + echo "ERROR: '$file' has a copyright error. Aborting commit." + exit -1 + fi + if ! BuildTools/CheckHeaders.py $file; then + echo "ERROR: '$file' failed header sanity test. Aborting commit." exit -1 fi +done diff --git a/BuildTools/MSVS/.gitignore b/BuildTools/MSVS/.gitignore deleted file mode 100644 index 95a4834..0000000 --- a/BuildTools/MSVS/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.suo -*.ncp -Slimber -Swift diff --git a/BuildTools/MSVS/GenerateProjects.py b/BuildTools/MSVS/GenerateProjects.py deleted file mode 100644 index d13df08..0000000 --- a/BuildTools/MSVS/GenerateProjects.py +++ /dev/null @@ -1,100 +0,0 @@ -import os, os.path - -projects = [("Swift", "Swift\QtUI\Swift.exe"), ("Slimber", "Slimber\Qt\Slimber.exe")] - -for (project, outputbin) in projects : - if not os.path.exists(project) : - os.mkdir(project) - output = open(os.path.join(project, project + ".vcproj"), "w") - - headers = [] - sources = [] - for root, dirs, files in os.walk(os.path.join("..", "..", project)) : - for file in files : - if file.endswith(".h") : - headers.append('<File RelativePath="' + os.path.join("..", root, file) + '" />') - elif file.endswith(".cpp") : - sources.append('<File RelativePath="' + os.path.join("..", root, file) + '" />') - - output.write("""<?xml version="1.0" encoding="Windows-1252"?> -<VisualStudioProject - ProjectType="Visual C++" - Version="9.00" - Name="%(project)s" - Keyword="MakeFileProj" - TargetFrameworkVersion="196613" - > - <Platforms> - <Platform - Name="Win32" - /> - </Platforms> - <ToolFiles> - </ToolFiles> - <Configurations> - <Configuration - Name="Debug|Win32" - OutputDirectory="$(ConfigurationName)" - IntermediateDirectory="$(ConfigurationName)" - ConfigurationType="0" - > - <Tool - Name="VCNMakeTool" - BuildCommandLine="cd ..\..\..\ && scons debug=1 %(project)s" - ReBuildCommandLine="" - CleanCommandLine="cd ..\..\..\ && scons -c debug=1 %(project)s" - Output="..\..\..\%(output)s" - PreprocessorDefinitions="WIN32;_DEBUG" - IncludeSearchPath="" - ForcedIncludes="" - AssemblySearchPath="" - ForcedUsingAssemblies="" - CompileAsManaged="" - /> - </Configuration> - <Configuration - Name="Release|Win32" - OutputDirectory="$(ConfigurationName)" - IntermediateDirectory="$(ConfigurationName)" - ConfigurationType="0" - > - <Tool - Name="VCNMakeTool" - BuildCommandLine="cd ..\..\..\ && scons %(project)s" - ReBuildCommandLine="" - CleanCommandLine="cd ..\..\..\ && scons -c %(project)s" - Output="..\..\..\%(output)s" - PreprocessorDefinitions="WIN32;NDEBUG" - IncludeSearchPath="" - ForcedIncludes="" - AssemblySearchPath="" - ForcedUsingAssemblies="" - CompileAsManaged="" - /> - </Configuration> - </Configurations> - <References> - </References> - <Files> - <Filter - Name="Source Files" - Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx" - > - %(sources)s - </Filter> - <Filter - Name="Header Files" - Filter="h;hpp;hxx;hm;inl;inc;xsd" - > - %(headers)s - </Filter> - <Filter - Name="Resource Files" - Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav" - > - </Filter> - </Files> - <Globals> - </Globals> -</VisualStudioProject>""" % { "project": project, "output" : outputbin, "headers" : '\n'.join(headers), "sources": '\n'.join(sources) }) - output.close() diff --git a/BuildTools/MSVS/Swift.sln b/BuildTools/MSVS/Swift.sln deleted file mode 100644 index 2724f81..0000000 --- a/BuildTools/MSVS/Swift.sln +++ /dev/null @@ -1,26 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 10.00 -# Visual C++ Express 2008 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Swift", "Swift\Swift.vcproj", "{C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}" -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Slimber", "Slimber\Slimber.vcproj", "{597242B2-A667-47A1-B69E-D2C4281183D0}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Win32 = Debug|Win32 - Release|Win32 = Release|Win32 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Debug|Win32.ActiveCfg = Debug|Win32 - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Debug|Win32.Build.0 = Debug|Win32 - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Release|Win32.ActiveCfg = Release|Win32 - {C67C3A5B-1382-4B4A-88F7-3BFC98DA43A2}.Release|Win32.Build.0 = Release|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Debug|Win32.ActiveCfg = Debug|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Debug|Win32.Build.0 = Debug|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Release|Win32.ActiveCfg = Release|Win32 - {597242B2-A667-47A1-B69E-D2C4281183D0}.Release|Win32.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/BuildTools/SCons/SConscript.boot b/BuildTools/SCons/SConscript.boot index a3a5c6d..10fc7d3 100644 --- a/BuildTools/SCons/SConscript.boot +++ b/BuildTools/SCons/SConscript.boot @@ -9,10 +9,12 @@ vars = Variables(os.path.join(Dir("#").abspath, "config.py")) vars.Add('cc', "C compiler") vars.Add('cxx', "C++ compiler") -vars.Add('ccflags', "Extra C(++) compiler flags") +vars.Add('ccflags', "Extra C/C++/ObjC compiler flags") +vars.Add('cxxflags', "Extra C++ compiler flags") vars.Add('link', "Linker") vars.Add('linkflags', "Extra linker flags") +vars.Add('ar', "Archiver (ar or lib)") +if os.name == "nt": + vars.Add('mt', "manifest tool") vars.Add(BoolVariable("ccache", "Use CCache", "no")) -vars.Add(BoolVariable("distcc", "Use DistCC", "no")) -vars.Add('distcc_hosts', "DistCC hosts (overrides DISTCC_HOSTS)") vars.Add(EnumVariable("test", "Compile and run tests", "none", ["none", "all", "unit", "system"])) vars.Add(BoolVariable("optimize", "Compile with optimizations turned on", "no")) @@ -21,6 +23,9 @@ vars.Add(BoolVariable("allow_warnings", "Allow compilation warnings during compi vars.Add(BoolVariable("assertions", "Compile with assertions", "yes")) vars.Add(BoolVariable("max_jobs", "Build with maximum number of parallel jobs", "no")) -vars.Add(EnumVariable("target", "Choose a target platform for compilation", "native", ["native", "iphone-simulator", "iphone-device", "xcode"])) +vars.Add(EnumVariable("target", "Choose a target platform for compilation", "native", ["native", "iphone-simulator", "iphone-device", "xcode", "android"])) +vars.Add('android_toolchain', "Path to Android toolchain") +vars.Add('android_sdk_bin', "Path to Android SDK's tools directory") vars.Add(BoolVariable("swift_mobile", "Build mobile Swift", "no")) +vars.Add(BoolVariable("swiften_dll", "Build Swiften as dynamically linked library", "no")) if os.name != "nt" : vars.Add(BoolVariable("coverage", "Compile with coverage information", "no")) @@ -30,4 +35,5 @@ if os.name == "mac" or (os.name == "posix" and os.uname()[0] == "Darwin"): vars.Add(BoolVariable("universal", "Create universal binaries", "no")) vars.Add(BoolVariable("mac105", "Link against the 10.5 frameworks", "no")) + vars.Add(BoolVariable("mac106", "Link against the 10.6 frameworks", "no")) if os.name == "nt" : vars.Add(PathVariable("vcredist", "MSVC redistributable dir", None, PathVariable.PathAccept)) @@ -37,24 +43,63 @@ if os.name == "nt" : vars.Add(PackageVariable("bonjour", "Bonjour SDK location", "yes")) vars.Add(PackageVariable("openssl", "OpenSSL location", "yes")) +vars.Add("openssl_libnames", "Comma-separated openssl library names to override defaults", None) +vars.Add(BoolVariable("openssl_force_bundled", "Force use of the bundled OpenSSL", "no")) +vars.Add("openssl_include", "Location of OpenSSL include files (if not under (openssl)/include)", None) +vars.Add("openssl_libdir", "Location of OpenSSL library files (if not under (openssl)/lib)", None) +vars.Add(PackageVariable("hunspell", "Hunspell location", False)) vars.Add(PathVariable("boost_includedir", "Boost headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("boost_libdir", "Boost library location", None, PathVariable.PathAccept)) +vars.Add(BoolVariable("boost_bundled_enable", "Allow use of bundled Boost as last resort", "true")) +vars.Add(PathVariable("zlib_includedir", "Zlib headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("zlib_libdir", "Zlib library location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("zlib_libfile", "Zlib library file (full path to file)", None, PathVariable.PathAccept)) +vars.Add(BoolVariable("zlib_bundled_enable", "Allow use of bundled Zlib as last resort", "true")) +vars.Add(BoolVariable("try_gconf", "Try configuring for GConf?", "true")) +vars.Add(BoolVariable("try_libxml", "Try configuring for libXML?", "true")) +vars.Add(BoolVariable("try_expat", "Try configuring for expat?", "true")) vars.Add(PathVariable("expat_includedir", "Expat headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("expat_libdir", "Expat library location", None, PathVariable.PathAccept)) -vars.Add("expat_libname", "Expat library name", "libexpat" if os.name == "nt" else "expat") +vars.Add("expat_libname", "Expat library name", os.name == "nt" and "libexpat" or "expat") +vars.Add(PackageVariable("icu", "ICU library location", "no")) +vars.Add(BoolVariable("libidn_bundled_enable", "Allow use of bunded Expat", "true")) +vars.Add(BoolVariable("try_libidn", "Try configuring for LibIDN?", "true")) vars.Add(PathVariable("libidn_includedir", "LibIDN headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("libidn_libdir", "LibIDN library location", None, PathVariable.PathAccept)) -vars.Add("libidn_libname", "LibIDN library name", "libidn" if os.name == "nt" else "idn") +vars.Add("libidn_libname", "LibIDN library name", os.name == "nt" and "libidn" or "idn") +vars.Add(BoolVariable("need_idn", "Whether an IDN library is required. Without this, most internal binaries will fail", "true")) +vars.Add(PathVariable("libminiupnpc_includedir", "LibMiniUPNPC headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("libminiupnpc_libdir", "LibMiniUPNPC library location", None, PathVariable.PathAccept)) +vars.Add("libminiupnpc_libname", "LibMiniUPNPC library name", os.name == "nt" and "libminiupnpc" or "miniupnpc") +vars.Add(PathVariable("libnatpmp_includedir", "LibNATPMP headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("libnatpmp_libdir", "LibNATPMP library location", None, PathVariable.PathAccept)) +vars.Add("libnatpmp_libname", "LibNATPMP library name", os.name == "nt" and "libnatpmp" or "natpmp") vars.Add(PathVariable("sqlite_includedir", "SQLite headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("sqlite_libdir", "SQLite library location", None, PathVariable.PathAccept)) -vars.Add("sqlite_libname", "SQLite library name", "libsqlite3" if os.name == "nt" else "sqlite3") +vars.Add("sqlite_libname", "SQLite library name", os.name == "nt" and "libsqlite3" or "sqlite3") +vars.Add("sqlite_force_bundled", "Force use of the bundled SQLite", None) +vars.Add(PathVariable("lua_includedir", "Lua headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("lua_libdir", "Lua library location", None, PathVariable.PathAccept)) +vars.Add("lua_libname", "Lua library name", os.name == "nt" and "liblua" or "lua") +vars.Add("lua_force_bundled", "Force use of the bundled Lua", None) + +vars.Add(PathVariable("editline_includedir", "Readline headers location", None, PathVariable.PathAccept)) +vars.Add(PathVariable("editline_libdir", "Readline library location", None, PathVariable.PathAccept)) +vars.Add("editline_libname", "Readline library name", os.name == "nt" and "libedit" or "edit") + +vars.Add(BoolVariable("try_avahi", "Try configuring for avahi?", "true")) vars.Add(PathVariable("avahi_includedir", "Avahi headers location", None, PathVariable.PathAccept)) vars.Add(PathVariable("avahi_libdir", "Avahi library location", None, PathVariable.PathAccept)) vars.Add(PathVariable("qt", "Qt location", "", PathVariable.PathAccept)) +vars.Add(BoolVariable("qt5", "Compile in Qt5 mode", "no")) # TODO: auto-detect this vars.Add(PathVariable("docbook_xml", "DocBook XML", None, PathVariable.PathAccept)) vars.Add(PathVariable("docbook_xsl", "DocBook XSL", None, PathVariable.PathAccept)) vars.Add(BoolVariable("build_examples", "Build example programs", "yes")) vars.Add(BoolVariable("enable_variants", "Build in a separate dir under build/, depending on compile flags", "no")) +vars.Add(BoolVariable("experimental_ft", "Build experimental file transfer", "yes")) vars.Add(BoolVariable("experimental", "Build experimental features", "no")) vars.Add(BoolVariable("set_iterator_debug_level", "Set _ITERATOR_DEBUG_LEVEL=0", "yes")) +vars.Add(BoolVariable("unbound", "Build bundled ldns and unbound. Use them for DNS lookup.", "no")) + +vars.Add(BoolVariable("install_git_hooks", "Install git hooks", "true")) ################################################################################ @@ -62,13 +107,29 @@ vars.Add(BoolVariable("set_iterator_debug_level", "Set _ITERATOR_DEBUG_LEVEL=0", ################################################################################ -env = Environment(CPPPATH = ["#"], ENV = { +env_ENV = { 'PATH' : os.environ['PATH'], 'LD_LIBRARY_PATH' : os.environ.get("LD_LIBRARY_PATH", ""), - }, variables = vars) + 'TERM' : os.environ.get("TERM", ""), +} + +if "MSVC_VERSION" in ARGUMENTS : + env = Environment(ENV = env_ENV, variables = vars, MSVC_VERSION = ARGUMENTS["MSVC_VERSION"], platform = ARGUMENTS.get("PLATFORM", None)) +else : + env = Environment(ENV = env_ENV, variables = vars, platform = ARGUMENTS.get("PLATFORM", None)) Help(vars.GenerateHelpText(env)) +# Workaround for missing Visual Studio 2012 support in SCons +# Requires scons to be run from a VS2012 console +if env.get("MSVC_VERSION", "").startswith("11.0") : + env["ENV"]["LIB"] = os.environ["LIB"] + env["ENV"]["INCLUDE"] = os.environ["INCLUDE"] + # Default environment variables -env["PLATFORM_FLAGS"] = {} +env["PLATFORM_FLAGS"] = { + "LIBPATH": [], + "LIBS": [], + "FRAMEWORKS": [], +} # Default custom tools @@ -97,38 +158,58 @@ if env["max_jobs"] : pass -# Default compiler flags -if env.get("distcc", False) : - env["ENV"]["HOME"] = os.environ["HOME"] - env["ENV"]["DISTCC_HOSTS"] = os.environ.get("DISTCC_HOSTS", "") - if "distcc_hosts" in env : - env["ENV"]["DISTCC_HOSTS"] = env["distcc_hosts"] - env["CC"] = "distcc gcc" - env["CXX"] = "distcc g++" +# Set speed options +env.Decider("MD5-timestamp") +env.SetOption("max_drift", 1) +env.SetOption("implicit_cache", True) + +# Set the default compiler to CLang on OS X, and set the necessary flags +if env["PLATFORM"] == "darwin" and env["target"] == "native" : + if "cc" not in env : + env["CC"] = "clang" + if platform.machine() == "x86_64" : + env["CCFLAGS"] = ["-arch", "x86_64"] + if "cxx" not in env : + env["CXX"] = "clang++" + # Compiling Qt5 in C++0x mode includes headers that we don't have + if not env["qt5"] : + env["CXXFLAGS"] = ["-std=c++11"] + if "link" not in env : + env["LINK"] = "clang" + if platform.machine() == "x86_64" : + env.Append(LINKFLAGS = ["-arch", "x86_64"]) + +# Check whether we are running inside scan-build, and override compiler if so +if "CCC_ANALYZER_HTML" in os.environ : + for key, value in os.environ.items() : + if key.startswith("CCC_") or key.startswith("CLANG") : + env["ENV"][key] = value + env["CC"] = os.environ["CC"] + env["CXX"] = os.environ["CXX"] + +# Override the compiler with custom variables set at config time if "cc" in env : env["CC"] = env["cc"] if "cxx" in env : env["CXX"] = env["cxx"] -ccflags = env.get("ccflags", []) -if isinstance(ccflags, str) : - # FIXME: Make the splitting more robust - env["CCFLAGS"] = ccflags.split(" ") -else : - env["CCFLAGS"] = ccflags +if "ar" in env : + env["AR"] = env["ar"] + if "link" in env : env["SHLINK"] = env["link"] env["LINK"] = env["link"] -linkflags = env.get("linkflags", []) -if isinstance(linkflags, str) : +for flags_type in ["ccflags", "cxxflags", "linkflags"] : + if flags_type in env : + if isinstance(env[flags_type], str) : # FIXME: Make the splitting more robust - env["LINKFLAGS"] = linkflags.split(" ") + env[flags_type.upper()] = env[flags_type].split(" ") else : - env["LINKFLAGS"] = linkflags + env[flags_type.upper()] = env[flags_type] # This isn't a real flag (yet) AFAIK. Be sure to append it to the CXXFLAGS # where you need it env["OBJCCFLAGS"] = [] + if env["optimize"] : if env["PLATFORM"] == "win32" : - env.Append(CCFLAGS = ["/O2", "/GL"]) - env.Append(LINKFLAGS = ["/INCREMENTAL:NO", "/LTCG"]) + env.Append(CCFLAGS = ["/O2"]) else : env.Append(CCFLAGS = ["-O2"]) @@ -139,8 +220,13 @@ if env["target"] == "xcode" and os.environ["CONFIGURATION"] == "Release" : if env["debug"] : if env["PLATFORM"] == "win32" : - env.Append(CCFLAGS = ["/Zi", "/MDd"]) + env.Append(CCFLAGS = ["/Zi"]) env.Append(LINKFLAGS = ["/DEBUG"]) if env["set_iterator_debug_level"] : env.Append(CPPDEFINES = ["_ITERATOR_DEBUG_LEVEL=0"]) + if env["optimize"] : + env.Append(LINKFLAGS = ["/OPT:NOREF"]) + env.Append(CCFLAGS = ["/MD"]) + else : + env.Append(CCFLAGS = ["/MDd"]) else : env.Append(CCFLAGS = ["-g"]) @@ -160,4 +246,7 @@ if env.get("universal", 0) : "-arch", "ppc"]) + +# Link against other versions of the OS X SDKs. +# FIXME: This method does not work anymore, we need to set deployment targets. if env.get("mac105", 0) : assert(env["PLATFORM"] == "darwin") @@ -169,12 +258,23 @@ if env.get("mac105", 0) : "-isysroot", "/Developer/SDKs/MacOSX10.5.sdk", "-arch", "i386"]) - env.Append(FRAMEWORKS = ["Security"]) +if env.get("mac106", 0) : + assert(env["PLATFORM"] == "darwin") + env.Append(CCFLAGS = [ + "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk", + "-arch", "i386"]) + env.Append(LINKFLAGS = [ + "-mmacosx-version-min=10.6", + "-isysroot", "/Developer/SDKs/MacOSX10.6.sdk", + "-arch", "i386"]) if not env["assertions"] : env.Append(CPPDEFINES = ["NDEBUG"]) -if env["experimental"] : +if env["experimental_ft"] : env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_FT"]) +if env["experimental"] : + env.Append(CPPDEFINES = ["SWIFT_EXPERIMENTAL_HISTORY", "SWIFT_EXPERIMENTAL_WB"]) + # If we build shared libs on AMD64, we need -fPIC. # This should have no performance impact om AMD64 @@ -184,21 +284,38 @@ if env["PLATFORM"] == "posix" and platform.machine() == "x86_64" : # Warnings if env["PLATFORM"] == "win32" : - # TODO: Find the ideal set of warnings - #env.Append(CCFLAGS = ["/Wall"]) + env.Append(CXXFLAGS = ["/wd4068"]) +elif env["PLATFORM"] == "hpux" : + # HP-UX gives a flood of minor warnings if this is enabled + #env.Append(CXXFLAGS = ["+w"]) + pass +elif env["PLATFORM"] == "sunos" : + #env.Append(CXXFLAGS = ["-z verbose"]) pass else : - env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wold-style-cast", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls"]) - if not env.get("allow_warnings", False) : - env.Append(CXXFLAGS = ["-Werror"]) + if "clang" in env["CXX"] : + env.Append(CXXFLAGS = [ + "-Weverything", + "-Wno-unknown-warning-option", # To stay compatible between CLang versions + "-Wno-unknown-pragmas", # To stay compatible between CLang versions + "-Wno-weak-vtables", # Virtually none of our elements have outlined methods. This also seems to affect classes in .cpp files, which in turn affects all our tests, which may need fixing in CLang + "-Wno-shadow", # Also warns for shadowing on constructor arguments, which we do a lot + "-Wno-documentation", # We don't care about documentation warnings + "-Wno-exit-time-destructors", # Used a lot in e.g. CPPUnit + "-Wno-c++98-compat-pedantic", # We do different things that violate this, but they could be fixed + "-Wno-global-constructors", # We depend on this for e.g. string constants + "-Wno-disabled-macro-expansion", # Caused due to system headers + "-Wno-c++11-extensions", # We use C++11; turn this off when we use -std=c++11 + "-Wno-long-long", # We use long long + "-Wno-padded", + "-Wno-missing-variable-declarations", # Getting rid of CPPUnit warnings + "-Wno-direct-ivar-access", # Obj-C code warning + ]) + else : + env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wold-style-cast", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls", "-Wno-unknown-pragmas"]) gccVersion = env.get("CCVERSION", "0.0.0").split(".") if gccVersion >= ["4", "5", "0"] and not "clang" in env["CC"] : env.Append(CXXFLAGS = ["-Wlogical-op"]) - if "clang" in env["CC"] : - env.Append(CXXFLAGS = ["-W#warnings", "-Wc++0x-compat", "-Waddress-of-temporary", "-Wambiguous-member-template", "-Warray-bounds", "-Watomic-properties", "-Wbind-to-temporary-copy", "-Wbuiltin-macro-redefined", "-Wc++-compat", "-Wc++0x-extensions", "-Wcomments", "-Wconditional-uninitialized", "-Wconstant-logical-operand", "-Wdeclaration-after-statement", "-Wdeprecated", "-Wdeprecated-implementations", "-Wdeprecated-writable-strings", "-Wduplicate-method-arg", "-Wempty-body", "-Wendif-labels", "-Wenum-compare", "-Wformat=2", "-Wfour-char-constants", "-Wgnu", "-Wincomplete-implementation", "-Winvalid-noreturn", "-Winvalid-offsetof", "-Winvalid-token-paste", "-Wlocal-type-template-args", "-Wmethod-signatures", "-Wmicrosoft", "-Wmissing-declarations", "-Wnon-pod-varargs", "-Wnonfragile-abi2", "-Wnull-dereference", "-Wout-of-line-declaration", "-Woverlength-strings", "-Wpacked", "-Wpointer-arith", "-Wpointer-sign", "-Wprotocol", "-Wreadonly-setter-attrs", "-Wselector", "-Wshift-overflow", "-Wshift-sign-overflow", "-Wstrict-selector-match", "-Wsuper-class-method-mismatch", "-Wtautological-compare", "-Wtypedef-redefinition", "-Wundeclared-selector", "-Wunknown-warning-option", "-Wunnamed-type-template-args", "-Wunused-exception-parameter", "-Wunused-member-function", "-Wused-but-marked-unused", "-Wvariadic-macros"]) -# To enable: -# "-Wheader-hygiene" -# "-Wnon-gcc", -# "-Wweak-vtables", -# "-Wlarge-by-value-copy", + if not env.get("allow_warnings", False) : + env.Append(CXXFLAGS = ["-Werror"]) if env.get("coverage", 0) : @@ -209,12 +326,22 @@ if env.get("coverage", 0) : if env["PLATFORM"] == "win32" : env.Append(LIBS = ["user32", "crypt32", "dnsapi", "iphlpapi", "ws2_32", "wsock32", "Advapi32"]) - env.Append(CCFLAGS = ["/EHsc", "/nologo"]) - # FIXME: We should find a decent solution for MSVS 10 + env.Append(CCFLAGS = ["/EHsc", "/nologo", "/Zm256"]) + env.Append(LINKFLAGS = ["/INCREMENTAL:no", "/NOLOGO"]) if int(env["MSVS_VERSION"].split(".")[0]) < 10 : - env["LINKCOM"] = [env["LINKCOM"], 'mt.exe -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;1'] - env["SHLINKCOM"] = [env["SHLINKCOM"], 'mt.exe -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;2'] + mt = env.get('mt') + if not mt: + mt = 'mt.exe' + env["LINKCOM"] = [env["LINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;1' % mt] + env["SHLINKCOM"] = [env["SHLINKCOM"], '%s -nologo -manifest ${TARGET}.manifest -outputresource:$TARGET;2' % mt] + +if env["PLATFORM"] == "darwin" and not env["target"] in ["iphone-device", "iphone-simulator", "xcode", "android"] : + env["PLATFORM_FLAGS"]["FRAMEWORKS"] += ["IOKit", "AppKit", "SystemConfiguration", "Security", "SecurityInterface"] + +# Required by boost headers on HP-UX +if env["PLATFORM"] == "hpux" : + env.Append(CXXFLAGS = ["+hpxstd98", "-mt", "-AA"]) + # FIXME: Need -AA for linking C++ but not C + #env.Append(LINKFLAGS = ["-AA"]) -if env["PLATFORM"] == "darwin" and not env["target"] in ["iphone-device", "iphone-simulator", "xcode"] : - env.Append(FRAMEWORKS = ["IOKit", "AppKit", "SystemConfiguration"]) # Testing @@ -231,5 +358,5 @@ env["TEST_CREATE_LIBRARIES"] = "create_test_libraries" in ARGUMENTS # Packaging env["DIST"] = "dist" in ARGUMENTS or env.GetOption("clean") -for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR"] : +for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR", "SLUIFT_INSTALLDIR"] : if ARGUMENTS.get(path, "") : if os.path.isabs(ARGUMENTS[path]) : @@ -238,4 +365,5 @@ for path in ["SWIFT_INSTALLDIR", "SWIFTEN_INSTALLDIR"] : env[path] = Dir("#/" + ARGUMENTS[path]).abspath + ################################################################################ # XCode / iPhone / ... @@ -255,5 +383,5 @@ if target in ["iphone-device", "iphone-simulator", "xcode"] : else : # Hard code values - env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = "/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin" + env["XCODE_PLATFORM_DEVELOPER_BIN_DIR"] = "/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin" if target == "iphone-device": env["XCODE_ARCH_FLAGS"] = ["-arch", "armv6", "-arch", "armv7"] @@ -262,6 +390,6 @@ if target in ["iphone-device", "iphone-simulator", "xcode"] : env["XCODE_ARCH_FLAGS"] = ["-arch", "i386"] sdkPart = "iPhoneSimulator" - sdkVer = "4.3" - env["XCODE_SDKROOT"] = "/Developer/Platforms/" + sdkPart + ".platform/Developer/SDKs/" + sdkPart + sdkVer + ".sdk" + sdkVer = "6.0" + env["XCODE_SDKROOT"] = "/Applications/Xcode.app/Contents/Developer/Platforms/" + sdkPart + ".platform/Developer/SDKs/" + sdkPart + sdkVer + ".sdk" env["IPHONEOS_DEPLOYMENT_TARGET"] = "4.1" @@ -272,4 +400,5 @@ if target in ["iphone-device", "iphone-simulator", "xcode"] : env["LD"] = env["CC"] env.Append(CCFLAGS = env["XCODE_ARCH_FLAGS"] + ["-fvisibility=hidden", "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]]) + env.Append(LINKFLAGS = "-miphoneos-version-min=" + env["IPHONEOS_DEPLOYMENT_TARGET"]) if os.environ.get("GCC_THUMB_SUPPORT", False) : env.Append(CCFLAGS = ["-mthumb"]) @@ -281,4 +410,15 @@ if target in ["iphone-device", "iphone-simulator", "xcode"] : env.Append(CPPDEFINES = ["_LITTLE_ENDIAN"]) +################################################################################ +# Android +################################################################################ +if target in ["android"] : + env["ENV"]["PATH"] = env["android_toolchain"] + "/bin:" + env["ENV"]["PATH"] + env["CC"] = "arm-linux-androideabi-gcc" + env["CXX"] = "arm-linux-androideabi-g++" + env["RANLIB"] = "arm-linux-androideabi-ranlib" + env.Append(CPPDEFINES = ["ANDROID"]) + env.Append(CPPDEFINES = ["_REENTRANT", "_GLIBCXX__PTHREADS"]) + # CCache if env.get("ccache", False) : diff --git a/BuildTools/SCons/SConstruct b/BuildTools/SCons/SConstruct index e91dd6e..d314ff3 100644 --- a/BuildTools/SCons/SConstruct +++ b/BuildTools/SCons/SConstruct @@ -34,5 +34,5 @@ def colorize(command, target, color) : return " " + prefix + command + suffix + " " + target -if int(ARGUMENTS.get("V", 0)) == 0: +if int(ARGUMENTS.get("V", 0)) == 0 and not ARGUMENTS.get("dump_trace", False) : env["CCCOMSTR"] = colorize("CC", "$TARGET", "green") env["SHCCCOMSTR"] = colorize("CC", "$TARGET", "green") @@ -43,4 +43,5 @@ if int(ARGUMENTS.get("V", 0)) == 0: env["ARCOMSTR"] = colorize("AR", "$TARGET", "red") env["RANLIBCOMSTR"] = colorize("RANLIB", "$TARGET", "red") + env["PCHCOMSTR"] = colorize("PCH", "$TARGET", "blue") env["QT4_RCCCOMSTR"] = colorize("RCC", "$TARGET", "blue") env["QT4_UICCOMSTR"] = colorize("UIC", "$TARGET", "blue") @@ -72,6 +73,4 @@ def checkObjCHeader(context, header) : ################################################################################ -env.Append(CPPPATH = [root]) - if ARGUMENTS.get("force-configure", 0) : SCons.SConf.SetCacheMode("force") @@ -85,4 +84,10 @@ def CheckPKG(context, name): def CheckVersion(context, library, version, define, header, value) : context.Message("Checking " + library + " version (>= " + version + ") ...") + version = GetVersion(context, define, header) + ok = version >= value + context.Result(ok) + return ok + +def GetVersion(context, define, header, extension = ".c") : ret = context.TryRun(""" #include <%(header)s> @@ -93,8 +98,10 @@ int main(int argc, char* argv[]) { return 0; } -""" % { "header" : header, "define": define }, ".c") - ok = ret[0] and int(ret[1]) >= value - context.Result(ok) - return ok +""" % { "header" : header, "define": define }, extension) + if ret[0] : + return int(ret[1]) + else : + return -1 + conf = Configure(conf_env) @@ -105,6 +112,25 @@ if not conf.CheckCXX() or not conf.CheckCC() : env["HAVE_ZLIB"] = True -if conf.CheckLib("z") : - env["ZLIB_FLAGS"] = {"LIBS": ["z"]} +zlib_flags = {} +zlib_okay = False +if env.get("zlib_libdir", None) : + zlib_flags["LIBPATH"] = [env["zlib_libdir"]] + zlib_okay = True +if env.get("zlib_includedir", None) : + zlib_flags["CPPPATH"] = [env["zlib_includedir"]] + zlib_okay = True +if env.get("zlib_libfile", None) : + zlib_flags["LIBS"] = [File(env["zlib_libfile"])] + zlib_okay = True +elif zlib_okay : + zlib_flags["LIBS"] = ["z"] +if (not zlib_okay) and conf.CheckLib("z") : + zlib_flags["LIBS"] = ["z"] + zlib_okay = True +if zlib_okay : + env["ZLIB_FLAGS"] = zlib_flags +elif not env.get("zlib_bundled_enable", True) : + print "Error: Zlib not found and zlib_bundled_enable is false" + Exit(1) else : env["ZLIB_BUNDLED"] = True @@ -126,7 +152,8 @@ if conf.CheckLib("c") : env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["c"] -if conf.CheckLib("stdc++") : +# Even if you find stdc++ on HP-UX, it is the wrong one for aCC +if env["PLATFORM"] != "hpux" : + if conf.CheckLib("stdc++", language='CXX') : env["PLATFORM_FLAGS"]["LIBS"] = env["PLATFORM_FLAGS"].get("LIBS", []) + ["stdc++"] - conf.Finish() @@ -134,9 +161,8 @@ conf.Finish() boost_conf_env = conf_env.Clone() boost_flags = {} -boost_flags["CPPDEFINES"] = [("BOOST_FILESYSTEM_VERSION", "2")] if env.get("boost_libdir", None) : boost_flags["LIBPATH"] = [env["boost_libdir"]] if env.get("boost_includedir", None) : - if env["PLATFORM"] == "win32" : + if env["PLATFORM"] == "win32" or env["PLATFORM"] == "hpux" or env["PLATFORM"] == "sunos" : boost_flags["CPPPATH"] = [env["boost_includedir"]] else : @@ -146,5 +172,5 @@ if env.get("boost_includedir", None) : boost_conf_env.MergeFlags(boost_flags) conf = Configure(boost_conf_env) -boostLibs = [("signals", None), ("thread", None), ("regex", None), ("program_options", None), ("filesystem", None), ("system", "system/system_error.hpp"), ("date_time", "date_time/date.hpp")] +boostLibs = [("signals", None), ("system", "system/system_error.hpp"), ("thread", None), ("regex", None), ("program_options", None), ("filesystem", None), ("serialization", "archive/text_oarchive.hpp"), ("date_time", "date_time/date.hpp")] allLibsPresent = True libNames = [] @@ -159,7 +185,7 @@ for (lib, header) in boostLibs : if env["PLATFORM"] != "win32" : libName = "boost_" + lib - if not conf.CheckLib(libName) : + if not conf.CheckLib(libName, language='CXX') : libName += "-mt" - if not conf.CheckLib(libName) : + if not conf.CheckLib(libName, language='CXX') : allLibsPresent = False break @@ -172,4 +198,8 @@ if allLibsPresent : # FIXME: Remove this workaround when UUID is available in most distros env["BOOST_BUNDLED_UUID_ONLY"] = True + env["BOOST_FLAGS"]["CPPDEFINES"] = ["BOOST_SIGNALS_NO_DEPRECATION_WARNING"] +elif not env.get("boost_bundled_enable", True) : + print "Error: Boost not found and boost_bundled_enable is false" + Exit(1) else : env["BOOST_BUNDLED"] = True @@ -194,5 +224,5 @@ if env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : # GConf env["HAVE_GCONF"] = 0 -if env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : +if env.get("try_gconf", True) and env["PLATFORM"] != "win32" and env["PLATFORM"] != "darwin" : gconf_env = conf_env.Clone() conf = Configure(gconf_env, custom_tests = {"CheckPKG": CheckPKG}) @@ -255,5 +285,5 @@ if env["PLATFORM"] == "win32" : # LibXML conf = Configure(conf_env, custom_tests = {"CheckVersion": CheckVersion}) -if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : +if env.get("try_libxml", True) and conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : #and conf.CheckVersion("LibXML", "2.6.23", "LIBXML_VERSION", "libxml/xmlversion.h", 20623) : env["HAVE_LIBXML"] = 1 @@ -261,5 +291,5 @@ if conf.CheckCHeader("libxml/parser.h") and conf.CheckLib("xml2") : conf.Finish() -if not env.get("HAVE_LIBXML", 0) : +if env.get("try_libxml", True) and not env.get("HAVE_LIBXML", 0) : libxml_env = conf_env.Clone() libxml_env.Append(CPPPATH = ["/usr/include/libxml2"]) @@ -272,5 +302,5 @@ if not env.get("HAVE_LIBXML", 0) : # Expat -if not env.get("HAVE_LIBXML",0) : +if env.get("try_expat", True) and not env.get("HAVE_LIBXML",0) : expat_conf_env = conf_env.Clone() expat_flags = {} @@ -295,4 +325,29 @@ if not env.get("HAVE_EXPAT", 0) and not env.get("HAVE_LIBXML", 0) : env["EXPAT_BUNDLED"] = True +################################################################################ +# IDN library +################################################################################ + +env["NEED_IDN"] = env.get("need_idn", True) + +# ICU +icu_env = conf_env.Clone() +use_icu = bool(env["icu"]) +icu_prefix = "" +if isinstance(env["icu"], str) : + icu_prefix = env["icu"] +icu_flags = {} +if icu_prefix : + icu_flags = { "CPPPATH": [os.path.join(icu_prefix, "include")] } + icu_flags["LIBPATH"] = [os.path.join(icu_prefix, "lib")] + icu_env.MergeFlags(icu_flags) + +icu_conf = Configure(icu_env) +if use_icu and icu_conf.CheckCHeader("unicode/usprep.h") : + env["HAVE_ICU"] = 1 + env["ICU_FLAGS"] = icu_flags + env["ICU_FLAGS"]["LIBS"] = ["icuuc"] +icu_conf.Finish() + # LibIDN libidn_conf_env = conf_env.Clone() @@ -304,61 +359,127 @@ if env.get("libidn_includedir", None) : libidn_conf_env.MergeFlags(libidn_flags) conf = Configure(libidn_conf_env) -if conf.CheckCHeader("idna.h") and conf.CheckLib(env["libidn_libname"]) : +if env.get("try_libidn", True) and not env.get("HAVE_ICU") and conf.CheckCHeader("idna.h") and conf.CheckLib(env["libidn_libname"]) : env["HAVE_LIBIDN"] = 1 env["LIBIDN_FLAGS"] = { "LIBS": [env["libidn_libname"]] } env["LIBIDN_FLAGS"].update(libidn_flags) -else : - env["LIBIDN_BUNDLED"] = 1 conf.Finish() +# Fallback to bundled LibIDN +if not env.get("HAVE_ICU", False) and not env.get("HAVE_LIBIDN", False) : + if env.get("libidn_bundled_enable", True) : + env["HAVE_LIBIDN"] = 1 + env["LIBIDN_BUNDLED"] = 1 + elif env.get("need_idn", True): + print "Error: ICU and LIBIDN not found, and libidn_bundled_enable is false" + Exit(1) + else: + print "Proceeding without an IDN library because need_idn was false. This will break all internal binaries" + +# Unbound +if env["unbound"] : + env["LDNS_BUNDLED"] = 1 + env["UNBOUND_BUNDLED"] = 1 +else : + env["LDNS_FLAGS"] = {} + env["UNBOUND_FLAGS"] = {} + # LibMiniUPnPc -if env["experimental"] : - #libminiupnpc_conf_env = conf_env.Clone() - #conf = Configure(libminiupnpc_conf_env) - #if conf.CheckCHeader("miniupnpc.h") and conf.CheckLib(env["libminiupnpc_libname"]) : - # print "NOT IMPLEMENTED YET" - #else : +if env["experimental_ft"] : + libminiupnpc_flags = {"CPPPATH": ["/usr/include/miniupnpc/"]} + libminiupnpc_conf_env = conf_env.Clone() + if env.get("libminiupnpc_libdir", None) : + libminiupnpc_flags["LIBPATH"] = [env["libminiupnpc_libdir"]] + if env.get("libminiupnpc_includedir", None) : + libminiupnpc_flags["CPPPATH"] = [env["libminiupnpc_includedir"]] + libminiupnpc_conf_env.MergeFlags(libminiupnpc_flags) + conf = Configure(libminiupnpc_conf_env) + if conf.CheckCHeader("miniupnpc.h") and conf.CheckLib(env["libminiupnpc_libname"]) and False : + # ^ False because APIs aren't stable + env["HAVE_LIBMINIUPNPC"] = 1 + env["LIBMINIUPNPC_FLAGS"] = { "LIBS": ["miniupnpc"] } + env["LIBMINIUPNPC_FLAGS"].update(libminiupnpc_flags) + else : env["LIBMINIUPNPC_BUNDLED"] = 1 - #conf.Finish() + conf.Finish() else : env["LIBMINIUPNPC_FLAGS"] = {} # LibNATPMP -if env["experimental"] : - #libnatpmp_conf_env = conf_env.Clone() - #conf = Configure(libnatpmp_conf_env) - #if conf.CheckCHeader("natpmp.h") and conf.CheckLib(env["libnatpmp_libname"]) : - # print "NOT IMPLEMENTED YET" - #else : +if env["experimental_ft"] : + libnatpmp_flags = {} + libnatpmp_conf_env = conf_env.Clone() + if env.get("libnatpmp_libdir", None) : + libnatpmp_flags["LIBPATH"] = [env["libnatpmp_libdir"]] + if env.get("libnatpmp_includedir", None) : + libnatpmp_flags["CPPPATH"] = [env["libnatpmp_includedir"]] + libnatpmp_conf_env.MergeFlags(libnatpmp_flags) + conf = Configure(libnatpmp_conf_env) + if conf.CheckCHeader("natpmp.h") and conf.CheckLib(env["libnatpmp_libname"]) and False: + # ^ False because APIs aren't stable + env["HAVE_LIBNATPMP"] = 1 + env["LIBNATPMP_FLAGS"] = { "LIBS": ["natpmp"] } + env["LIBNATPMP_FLAGS"].update(libnatpmp_flags) + else : env["LIBNATPMP_BUNDLED"] = 1 - #conf.Finish() + conf.Finish() else : env["LIBNATPMP_FLAGS"] = {} # SQLite -#sqlite_conf_env = conf_env.Clone() -#sqlite_flags = {} -#if env.get("sqlite_libdir", None) : -# sqlite_flags["LIBPATH"] = [env["sqlite_libdir"]] -#if env.get("sqlite_includedir", None) : -# sqlite_flags["CPPPATH"] = [env["sqlite_includedir"]] -#sqlite_conf_env.MergeFlags(sqlite_flags) -#conf = Configure(sqlite_conf_env) -#if conf.CheckCHeader("sqlite3.h") and conf.CheckLib(env["sqlite_libname"]) : -# env["HAVE_SQLITE"] = 1 -# env["SQLITE_FLAGS"] = { "LIBS": [env["sqlite_libname"]] } -# env["SQLITE_FLAGS"].update(sqlite_flags) -#else : -# env["SQLITE_BUNDLED"] = 1 -#conf.Finish() +if env["experimental"] : + sqlite_conf_env = conf_env.Clone() + sqlite_flags = {} + if env.get("sqlite_libdir", None) : + sqlite_flags["LIBPATH"] = [env["sqlite_libdir"]] + if env.get("sqlite_includedir", None) : + sqlite_flags["CPPPATH"] = [env["sqlite_includedir"]] + sqlite_conf_env.MergeFlags(sqlite_flags) + conf = Configure(sqlite_conf_env) + if conf.CheckCHeader("sqlite3.h") and conf.CheckLib(env["sqlite_libname"]) and not env.get("sqlite_force_bundled", False): + env["HAVE_SQLITE"] = 1 + env["SQLITE_FLAGS"] = { "LIBS": [env["sqlite_libname"]] } + env["SQLITE_FLAGS"].update(sqlite_flags) + else : + env["SQLITE_BUNDLED"] = 1 + conf.Finish() +else : + env["SQLITE_FLAGS"] = {} + # Lua +lua_conf_env = conf_env.Clone() +lua_flags = {} +if env.get("lua_libdir", None) : + lua_flags["LIBPATH"] = [env["lua_libdir"]] +if env.get("lua_includedir", None) : + lua_flags["CPPPATH"] = [env["lua_includedir"]] +lua_conf_env.MergeFlags(lua_flags) +conf = Configure(lua_conf_env) +if not env.get("lua_force_bundled", False) and conf.CheckLibWithHeader(env["lua_libname"], "lua.hpp", "cxx", autoadd = 0) : + env["HAVE_LUA"] = 1 + env["LUA_FLAGS"] = { "LIBS": [env["lua_libname"]] } + lua_version = GetVersion(conf, "LUA_VERSION_NUM", "lua.h") + if lua_version > 0 : + env["LUA_FLAGS"]["LUA_VERSION"] = str(lua_version // 100) + "." + str(lua_version % 100) + else : + print "Warning: Unable to determine Lua version. Not installing Lua libraries." + env["LUA_FLAGS"].update(lua_flags) +else : env["LUA_BUNDLED"] = 1 +conf.Finish() # Readline -conf = Configure(conf_env) -if conf.CheckCHeader(["stdio.h", "readline/readline.h"]) and conf.CheckLib("readline") : - env["HAVE_READLINE"] = True - env["READLINE_FLAGS"] = { "LIBS": ["readline"] } +editline_conf_env = conf_env.Clone() +editline_flags = {} +if env.get("editline_libdir", None) : + editline_flags["LIBPATH"] = [env["editline_libdir"]] +if env.get("editline_includedir", None) : + editline_flags["CPPPATH"] = [env["editline_includedir"]] +editline_conf_env.MergeFlags(editline_flags) +conf = Configure(editline_conf_env) +if conf.CheckLibWithHeader(env["editline_libname"], ["stdio.h", "editline/readline.h"], "c") : + env["HAVE_EDITLINE"] = 1 + env["EDITLINE_FLAGS"] = { "LIBS": [env["editline_libname"]] } + env["EDITLINE_FLAGS"].update(editline_flags) conf.Finish() @@ -372,5 +493,5 @@ if env.get("avahi_includedir", None) : avahi_conf_env.MergeFlags(avahi_flags) conf = Configure(avahi_conf_env) -if conf.CheckCHeader("avahi-client/client.h") and conf.CheckLib("avahi-client") and conf.CheckLib("avahi-common") : +if env.get("try_avahi", True) and conf.CheckCHeader("avahi-client/client.h") and conf.CheckLib("avahi-client") and conf.CheckLib("avahi-common") : env["HAVE_AVAHI"] = True env["AVAHI_FLAGS"] = { "LIBS": ["avahi-client", "avahi-common"] } @@ -384,10 +505,24 @@ if env["qt"] : # OpenSSL openssl_env = conf_env.Clone() +if env.get("openssl_force_bundled", False) or env["target"] in ("iphone-device", "iphone-simulator", "xcode", "android") : + env["OPENSSL_BUNDLED"] = True + env["HAVE_OPENSSL"] = True +else : use_openssl = bool(env["openssl"]) -openssl_prefix = env["openssl"] if isinstance(env["openssl"], str) else "" + openssl_prefix = "" + if isinstance(env["openssl"], str) : + openssl_prefix = env["openssl"] openssl_flags = {} if openssl_prefix : + openssl_include = env.get("openssl_include", None) + openssl_libdir = env.get("openssl_libdir", None) + if openssl_include: + openssl_flags = {"CPPPATH":[openssl_include]} + else: openssl_flags = { "CPPPATH": [os.path.join(openssl_prefix, "include")] } - if env["PLATFORM"] == "win32" : + if openssl_libdir: + openssl_flags["LIBPATH"] = [openssl_libdir] + env["OPENSSL_DIR"] = openssl_prefix + elif env["PLATFORM"] == "win32" : openssl_flags["LIBPATH"] = [os.path.join(openssl_prefix, "lib", "VC")] env["OPENSSL_DIR"] = openssl_prefix @@ -400,5 +535,8 @@ if use_openssl and openssl_conf.CheckCHeader("openssl/ssl.h") : env["HAVE_OPENSSL"] = 1 env["OPENSSL_FLAGS"] = openssl_flags - if env["PLATFORM"] == "win32" : + openssl_libnames = env.get("openssl_libnames", None) + if openssl_libnames: + env["OPENSSL_FLAGS"]["LIBS"] = openssl_libnames.split(',') + elif env["PLATFORM"] == "win32" : env["OPENSSL_FLAGS"]["LIBS"] = ["libeay32MD", "ssleay32MD"] else: @@ -407,9 +545,6 @@ if use_openssl and openssl_conf.CheckCHeader("openssl/ssl.h") : if platform.mac_ver()[0].startswith("10.5") : env["OPENSSL_FLAGS"]["FRAMEWORKS"] = ["Security"] -elif env["target"] in ("iphone-device", "iphone-simulator", "xcode") : - env["OPENSSL_BUNDLED"] = True - env["HAVE_OPENSSL"] = True else : - env["OPENSSL_FLAGS"] = "" + env["OPENSSL_FLAGS"] = {} if env["PLATFORM"] == "win32" : env["HAVE_SCHANNEL"] = True @@ -419,6 +554,22 @@ else : openssl_conf.Finish() +#Hunspell +hunspell_env = conf_env.Clone() +hunspell_prefix = isinstance(env.get("hunspell", False), str) and env["hunspell"] or "" +hunspell_flags = {} +if hunspell_prefix : + hunspell_flags = {"CPPPATH":[os.path.join(hunspell_prefix, "include")], "LIBPATH":[os.path.join(hunspell_prefix, "lib")]} +hunspell_env.MergeFlags(hunspell_flags) + +env["HAVE_HUNSPELL"] = 0; +hunspell_conf = Configure(hunspell_env) +if hunspell_conf.CheckCXXHeader("hunspell/hunspell.hxx") and hunspell_conf.CheckLib("hunspell") : + env["HAVE_HUNSPELL"] = 1 + hunspell_flags["LIBS"] = ["hunspell"] + env["HUNSPELL_FLAGS"] = hunspell_flags +hunspell_conf.Finish() + # Bonjour -if env["PLATFORM"] == "darwin" : +if env["PLATFORM"] == "darwin" and env["target"] == "native" : env["HAVE_BONJOUR"] = 1 elif env.get("bonjour", False) : @@ -466,7 +617,35 @@ if env.get("docbook_xsl") : ################################################################################ +try: if env.Dir("#/.git").exists() : - if not env.GetOption("clean") : + if not env.GetOption("clean") and env.get("install_git_hooks", True) : env.Install("#/.git/hooks", Glob("#/BuildTools/Git/Hooks/*")) +except TypeError: + print "You seem to be using Swift in a Git submodule. Not installing hooks." + + +################################################################################ +# Replace #pragma once with proper guards on platforms that require it +################################################################################ + +if ARGUMENTS.get("replace_pragma_once", False) : + env.Tool("ReplacePragmaOnce", toolpath = ["#/BuildTools/SCons/Tools"]) + + def relpath(path, start) : + i = len(os.path.commonprefix([path, start])) + return path[i+1:] + + for actual_root, dirs, files in os.walk(root) : + if "3rdParty" in actual_root : + continue + for file in files : + if not file.endswith(".h") : + continue + include = relpath(os.path.join(actual_root, file), root) + env.ReplacePragmaOnce("#/include/" + include, "#/" + include) + env.Append(CPPPATH = ["#/include"]) +else : + env.Append(CPPPATH = [root]) + ################################################################################ @@ -474,9 +653,14 @@ if env.Dir("#/.git").exists() : ################################################################################ -# Build tools -env.SConscript(dirs = ["#/BuildTools/CLang"]) +if ARGUMENTS.get("dump_trace", False) : + env.SetOption("no_exec", True) + env["TEST"] = True + env["BOOST_BUILD_BCP"] = True + env.Decider(lambda x, y, z : True) + SCons.Node.Python.Value.changed_since_last_build = (lambda x, y, z: True) # Modules modules = [] +if os.path.isdir(Dir("#/3rdParty").abspath) : for dir in os.listdir(Dir("#/3rdParty").abspath) : full_dir = os.path.join(Dir("#/3rdParty").abspath, dir) @@ -494,7 +678,11 @@ for dir in os.listdir(Dir("#").abspath) : modules.append(dir) +# QA comes last +modules.remove("QA") +modules.append("QA") + # Flags env["PROJECTS"] = [m for m in modules if m not in ["Documentation", "QA", "SwifTools"] and not m.startswith("3rdParty")] -for stage in ["flags", "build", "test"] : +for stage in ["flags", "build"] : env["SCONS_STAGE"] = stage SConscript(dirs = map(lambda x : root + "/" + x, modules)) @@ -505,4 +693,5 @@ if ARGUMENTS.get("sloccount", False) : env.SLOCCount("#/" + project) + ################################################################################ # Print summary @@ -524,5 +713,5 @@ print "" print " XML Parsers: " + ' '.join(parsers) -print " TLS Support: " + ("OpenSSL" if env.get("HAVE_OPENSSL",0) else ("Schannel" if env.get("HAVE_SCHANNEL", 0) else "Disabled")) -print " DNSSD Support: " + ("Bonjour" if env.get("HAVE_BONJOUR") else ("Avahi" if env.get("HAVE_AVAHI") else "Disabled")) +print " TLS Support: " + (env.get("HAVE_OPENSSL",0) and "OpenSSL" or env.get("HAVE_SCHANNEL", 0) and "Schannel" or "Disabled") +print " DNSSD Support: " + (env.get("HAVE_BONJOUR") and "Bonjour" or (env.get("HAVE_AVAHI") and "Avahi" or "Disabled")) print diff --git a/BuildTools/SCons/Tools/Flags.py b/BuildTools/SCons/Tools/Flags.py index 13fbb32..c130faf 100644 --- a/BuildTools/SCons/Tools/Flags.py +++ b/BuildTools/SCons/Tools/Flags.py @@ -4,5 +4,8 @@ def generate(env) : def useFlags(env, flags) : for flag in flags : - env[flag] = env.get(flag, []) + flags[flag] + if flag in env : + env[flag] = env[flag] + flags[flag] + else : + env[flag] = flags[flag] env.AddMethod(useFlags, "UseFlags") diff --git a/BuildTools/SCons/Tools/ReplacePragmaOnce.py b/BuildTools/SCons/Tools/ReplacePragmaOnce.py new file mode 100644 index 0000000..466c31e --- /dev/null +++ b/BuildTools/SCons/Tools/ReplacePragmaOnce.py @@ -0,0 +1,25 @@ +import SCons.Util, os.path + +def generate(env) : + root = env.Dir("#").abspath + def relpath(path, start) : + i = len(os.path.commonprefix([path, start])) + return path[i+1:] + + def replacePragmaOnce(env, target, source) : + guard = relpath(source[0].abspath, root).replace("/", "_").replace(".", "_").upper() + data = source[0].get_contents() + f = open(str(target[0]), 'wb') + if "#pragma once" in data : + f.write(data.replace("#pragma once", "#ifndef %(guard)s\n#define %(guard)s" % {"guard": guard})) + f.write("\n#endif\n") + else : + f.write(data) + f.close() + + env["BUILDERS"]["ReplacePragmaOnce"] = SCons.Builder.Builder( + action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"), + single_source = True) + +def exists(env) : + return True diff --git a/BuildTools/SCons/Tools/Test.py b/BuildTools/SCons/Tools/Test.py index 40eaeb1..7e4609d 100644 --- a/BuildTools/SCons/Tools/Test.py +++ b/BuildTools/SCons/Tools/Test.py @@ -4,5 +4,8 @@ def generate(env) : def registerTest(env, target, type = "unit", is_checker = False) : if env["TEST_TYPE"] == "all" or env["TEST_TYPE"] == type : - cmd = target[0].abspath if SCons.Util.is_List(target) else target.abspath + if SCons.Util.is_List(target) : + cmd = target[0].abspath + else : + cmd = target.abspath params = "" @@ -14,5 +17,28 @@ def generate(env) : if env.get("TEST_IGNORE_RESULT", False) : ignore_prefix = "-" - env.Command("**dummy**", target, + + # Set environment variables for running the test + test_env = env.Clone() + for i in ["HOME", "USERPROFILE", "APPDATA"]: + if os.environ.get(i, "") : + test_env["ENV"][i] = os.environ[i] + if env["target"] == "android" : + test_env["ENV"]["PATH"] = env["android_sdk_bin"] + ";" + test_env["ENV"]["PATH"] + else : + if test_env["PLATFORM"] == "darwin" : + test_env["ENV"]["DYLD_FALLBACK_LIBRARY_PATH"] = ":".join(map(lambda x : str(x), test_env.get("LIBPATH", []))) + elif test_env["PLATFORM"] == "win32" : + test_env["ENV"]["PATH"] = ";".join(map(lambda x : str(x), test_env.get("LIBRUNPATH", []))) + ";" + test_env["ENV"]["PATH"] + + + # Run the test + if env["target"] == "android": + exec_name = os.path.basename(cmd) + test_env.Command("**dummy**", target, SCons.Action.Action( + ["adb shell mount -o rw,remount /system", + "adb push " + cmd + " /system/bin/" + exec_name, + "adb shell SWIFT_CLIENTTEST_JID=\"" + os.getenv("SWIFT_CLIENTTEST_JID") + "\" SWIFT_CLIENTTEST_PASS=\"" + os.getenv("SWIFT_CLIENTTEST_PASS") + "\" " + env.get("TEST_RUNNER", "") + "/system/bin/" + exec_name], cmdstr = "$TESTCOMSTR")) + else : + test_env.Command("**dummy**", target, SCons.Action.Action(ignore_prefix + env.get("TEST_RUNNER", "") + cmd + " " + params, cmdstr = "$TESTCOMSTR")) diff --git a/BuildTools/SCons/Tools/WindowsBundle.py b/BuildTools/SCons/Tools/WindowsBundle.py index e351884..2915141 100644 --- a/BuildTools/SCons/Tools/WindowsBundle.py +++ b/BuildTools/SCons/Tools/WindowsBundle.py @@ -2,9 +2,14 @@ import SCons.Util, os def generate(env) : - def createWindowsBundle(env, bundle, resources = {}, qtimageformats = [], qtlibs = []) : - env.Install(bundle, bundle + ".exe") + def createWindowsBundle(env, bundle, resources = {}, qtplugins = {}, qtlibs = [], qtversion = '4') : + all_files = [] + all_files += env.Install(bundle, bundle + ".exe") for lib in qtlibs : - env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll")) - env.Install(os.path.join(bundle, "imageformats"), [os.path.join(env["QTDIR"], "plugins", "imageformats", "q" + codec + "4.dll") for codec in qtimageformats]) + all_files += env.Install(bundle, os.path.join(env["QTDIR"], "bin", lib + ".dll")) + plugins_suffix = '4' + if qtversion == '5' : + plugins_suffix = '' + for plugin_type in qtplugins: + all_files += env.Install(os.path.join(bundle, plugin_type), [os.path.join(env["QTDIR"], "plugins", plugin_type, "q" + plugin + plugins_suffix + ".dll") for plugin in qtplugins[plugin_type]]) for dir, resourceFiles in resources.items() : @@ -13,7 +18,8 @@ def generate(env) : if e.isdir() : for subresource in env.Glob(str(e) + "/*") : - env.Install(os.path.join(bundle, dir, e.name), subresource) + all_files += env.Install(os.path.join(bundle, dir, e.name), subresource) else : - env.Install(os.path.join(bundle, dir), resource) + all_files += env.Install(os.path.join(bundle, dir), resource) + return all_files env.AddMethod(createWindowsBundle, "WindowsBundle") diff --git a/BuildTools/SCons/Tools/WriteVal.py b/BuildTools/SCons/Tools/WriteVal.py index e39ad82..0a1e1ad 100644 --- a/BuildTools/SCons/Tools/WriteVal.py +++ b/BuildTools/SCons/Tools/WriteVal.py @@ -2,5 +2,5 @@ import SCons.Util def generate(env) : - def writeVal(env, target, source) : + def replacePragmaOnce(env, target, source) : f = open(str(target[0]), 'wb') f.write(source[0].get_contents()) @@ -8,7 +8,8 @@ def generate(env) : env["BUILDERS"]["WriteVal"] = SCons.Builder.Builder( - action = SCons.Action.Action(writeVal, cmdstr = "$GENCOMSTR"), + action = SCons.Action.Action(replacePragmaOnce, cmdstr = "$GENCOMSTR"), single_source = True) def exists(env) : return True + diff --git a/BuildTools/SCons/Tools/qt4.py b/BuildTools/SCons/Tools/qt4.py index d9e41d8..ad4f1c0 100644 --- a/BuildTools/SCons/Tools/qt4.py +++ b/BuildTools/SCons/Tools/qt4.py @@ -56,5 +56,5 @@ class QtdirNotFound(ToolQtWarning): SCons.Warnings.enableWarningClass(ToolQtWarning) -qrcinclude_re = re.compile(r'<file>([^<]*)</file>', re.M) +qrcinclude_re = re.compile(r'<file (alias=\"[^\"]*\")?>([^<]*)</file>', re.M) def transformToWinePath(path) : @@ -329,5 +329,5 @@ def generate(env): return result contents = node.get_contents() - includes = qrcinclude_re.findall(contents) + includes = [included[1] for included in qrcinclude_re.findall(contents)] qrcpath = os.path.dirname(node.path) dirs = [included for included in includes if os.path.isdir(os.path.join(qrcpath,included))] @@ -396,5 +396,5 @@ def generate(env): env.AddMethod(enable_modules, "EnableQt4Modules") -def enable_modules(self, modules, debug=False, crosscompiling=False) : +def enable_modules(self, modules, debug=False, crosscompiling=False, version='4') : import sys @@ -421,4 +421,9 @@ def enable_modules(self, modules, debug=False, crosscompiling=False) : 'QtHelp', 'QtScript', + + # Qt5 modules + 'QtWidgets', + 'QtMultimedia', + 'QtWebKitWidgets', ] staticModules = [ @@ -441,4 +446,6 @@ def enable_modules(self, modules, debug=False, crosscompiling=False) : 'QtOpenGL' : ['QT_OPENGL_LIB'], 'QtGui' : ['QT_GUI_LIB'], + 'QtWidgets' : ['QT_WIDGETS_LIB'], + 'QtWebKitWidgets' : [], 'QtNetwork' : ['QT_NETWORK_LIB'], 'QtCore' : ['QT_CORE_LIB'], @@ -449,6 +456,7 @@ def enable_modules(self, modules, debug=False, crosscompiling=False) : debugSuffix = '' - if sys.platform.startswith("linux") and not crosscompiling : + if sys.platform != "win32" and sys.platform != "darwin" and not crosscompiling : if debug : debugSuffix = '_debug' + if version == '4' : self.AppendUnique(CPPPATH=[os.path.join("$QTDIR","include", "phonon")]) for module in modules : @@ -472,10 +480,15 @@ def enable_modules(self, modules, debug=False, crosscompiling=False) : modules.remove("QtAssistant") modules.append("QtAssistantClient") + if version == '4' : # FIXME: Phonon Hack - self.AppendUnique(LIBS=['phonon'+debugSuffix+'4']) - self.AppendUnique(LIBS=[lib+debugSuffix+'4' for lib in modules if lib not in staticModules]) + self.AppendUnique(LIBS=['phonon'+debugSuffix+version]) + self.AppendUnique(LIBS=[lib+debugSuffix+version for lib in modules if lib not in staticModules]) + else : + self.AppendUnique(LIBS=[lib.replace('Qt', 'Qt5') + debugSuffix for lib in modules if lib not in staticModules]) self.PrependUnique(LIBS=[lib+debugSuffix for lib in modules if lib in staticModules]) if 'QtOpenGL' in modules: self.AppendUnique(LIBS=['opengl32']) + elif version == '5' : + self.Append(CPPDEFINES = ["QT_NO_OPENGL"]) self.AppendUnique(CPPPATH=[ '$QTDIR/include/']) self.AppendUnique(CPPPATH=[ '$QTDIR/include/'+module for module in modules]) @@ -499,4 +512,5 @@ def enable_modules(self, modules, debug=False, crosscompiling=False) : # FIXME: Phonon Hack + if version == '4' : self.Append(LINKFLAGS=['-framework', "phonon"]) @@ -507,7 +521,7 @@ def enable_modules(self, modules, debug=False, crosscompiling=False) : else : if len(self["QTDIR"]) > 0 : - self.Append(CPPFLAGS = ["-I" + os.path.join("$QTDIR", "lib", module + ".framework", "Versions", "4", "Headers")]) + self.Append(CPPFLAGS = ["-I" + os.path.join("$QTDIR", "lib", module + ".framework", "Versions", version, "Headers")]) else : - self.Append(CPPFLAGS = ["-I" + os.path.join("/Library/Frameworks", module + ".framework", "Versions", "4", "Headers")]) + self.Append(CPPFLAGS = ["-I" + os.path.join("/Library/Frameworks", module + ".framework", "Versions", version, "Headers")]) self.Append(LINKFLAGS=['-framework', module]) if 'QtOpenGL' in modules: diff --git a/BuildTools/SCons/Tools/textfile.py b/BuildTools/SCons/Tools/textfile.py new file mode 100644 index 0000000..89f8963 --- /dev/null +++ b/BuildTools/SCons/Tools/textfile.py @@ -0,0 +1,175 @@ +# -*- python -*- +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = """ +Textfile/Substfile builder for SCons. + + Create file 'target' which typically is a textfile. The 'source' + may be any combination of strings, Nodes, or lists of same. A + 'linesep' will be put between any part written and defaults to + os.linesep. + + The only difference between the Textfile builder and the Substfile + builder is that strings are converted to Value() nodes for the + former and File() nodes for the latter. To insert files in the + former or strings in the latter, wrap them in a File() or Value(), + respectively. + + The values of SUBST_DICT first have any construction variables + expanded (its keys are not expanded). If a value of SUBST_DICT is + a python callable function, it is called and the result is expanded + as the value. Values are substituted in a "random" order; if any + substitution could be further expanded by another subsitition, it + is unpredictible whether the expansion will occur. +""" + +__revision__ = "src/engine/SCons/Tool/textfile.py 5357 2011/09/09 21:31:03 bdeegan" + +import SCons + +import os +import re + +from SCons.Node import Node +from SCons.Node.Python import Value +from SCons.Util import is_String, is_Sequence, is_Dict + +def _do_subst(node, subs): + """ + Fetch the node contents and replace all instances of the keys with + their values. For example, if subs is + {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'}, + then all instances of %VERSION% in the file will be replaced with + 1.2345 and so forth. + """ + contents = node.get_text_contents() + if not subs: return contents + for (k,v) in subs: + contents = re.sub(k, v, contents) + return contents + +def _action(target, source, env): + # prepare the line separator + linesep = env['LINESEPARATOR'] + if linesep is None: + linesep = os.linesep + elif is_String(linesep): + pass + elif isinstance(linesep, Value): + linesep = linesep.get_text_contents() + else: + raise SCons.Errors.UserError( + 'unexpected type/class for LINESEPARATOR: %s' + % repr(linesep), None) + + # create a dictionary to use for the substitutions + if 'SUBST_DICT' not in env: + subs = None # no substitutions + else: + d = env['SUBST_DICT'] + if is_Dict(d): + d = list(d.items()) + elif is_Sequence(d): + pass + else: + raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence') + subs = [] + for (k,v) in d: + if callable(v): + v = v() + if is_String(v): + v = env.subst(v) + else: + v = str(v) + subs.append((k,v)) + + # write the file + try: + fd = open(target[0].get_path(), "wb") + except (OSError,IOError), e: + raise SCons.Errors.UserError("Can't write target file %s" % target[0]) + # separate lines by 'linesep' only if linesep is not empty + lsep = None + for s in source: + if lsep: fd.write(lsep) + fd.write(_do_subst(s, subs)) + lsep = linesep + fd.close() + +def _strfunc(target, source, env): + return "Creating '%s'" % target[0] + +def _convert_list_R(newlist, sources): + for elem in sources: + if is_Sequence(elem): + _convert_list_R(newlist, elem) + elif isinstance(elem, Node): + newlist.append(elem) + else: + newlist.append(Value(elem)) +def _convert_list(target, source, env): + if len(target) != 1: + raise SCons.Errors.UserError("Only one target file allowed") + newlist = [] + _convert_list_R(newlist, source) + return target, newlist + +_common_varlist = ['SUBST_DICT', 'LINESEPARATOR'] + +_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX'] +_text_builder = SCons.Builder.Builder( + action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist), + source_factory = Value, + emitter = _convert_list, + prefix = '$TEXTFILEPREFIX', + suffix = '$TEXTFILESUFFIX', + ) + +_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX'] +_subst_builder = SCons.Builder.Builder( + action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist), + source_factory = SCons.Node.FS.File, + emitter = _convert_list, + prefix = '$SUBSTFILEPREFIX', + suffix = '$SUBSTFILESUFFIX', + src_suffix = ['.in'], + ) + +def generate(env): + env['LINESEPARATOR'] = os.linesep + env['BUILDERS']['MyTextfile'] = _text_builder + env['TEXTFILEPREFIX'] = '' + env['TEXTFILESUFFIX'] = '.txt' + env['BUILDERS']['MySubstfile'] = _subst_builder + env['SUBSTFILEPREFIX'] = '' + env['SUBSTFILESUFFIX'] = '' + +def exists(env): + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/BuildTools/SCons/Tools/wix.py b/BuildTools/SCons/Tools/wix.py index 7d1e4a2..7b62508 100644 --- a/BuildTools/SCons/Tools/wix.py +++ b/BuildTools/SCons/Tools/wix.py @@ -8,9 +8,9 @@ def generate(env) : wixPath += "\\" env['WIX_HEAT'] = wixPath + 'heat.exe' - env['WIX_HEAT_OPTIONS'] = '-gg -sfrag -suid -template fragment -dr ProgramFilesFolder' + env['WIX_HEAT_OPTIONS'] = '-nologo -gg -sfrag -suid -template fragment -dr ProgramFilesFolder' env['WIX_CANDLE'] = wixPath + 'candle.exe' - env['WIX_CANDLE_OPTIONS'] = '' + env['WIX_CANDLE_OPTIONS'] = '-nologo' env['WIX_LIGHT'] = wixPath + 'light.exe' - env['WIX_LIGHT_OPTIONS'] = '-ext WixUIExtension' + env['WIX_LIGHT_OPTIONS'] = '-nologo -ext WixUIExtension' def WiX_IncludeScanner(source, env, path, arg): @@ -21,5 +21,5 @@ def generate(env) : heat_builder = SCons.Builder.Builder( - action = '"$WIX_HEAT" dir Swift\\QtUI\\Swift -cg Files $WIX_HEAT_OPTIONS -o ${TARGET} -t Swift\\Packaging\\WiX\\include.xslt', + action = '"$WIX_HEAT" dir "$WIX_SOURCE_OBJECT_DIR" -cg Files $WIX_HEAT_OPTIONS -o ${TARGET} -t Swift\\Packaging\\WiX\\include.xslt', suffix = '.wxi') @@ -39,5 +39,5 @@ def generate(env) : light_builder = SCons.Builder.Builder( - action = '"$WIX_LIGHT" $WIX_LIGHT_OPTIONS -b Swift\\QtUI\\Swift ${SOURCES} -o ${TARGET}', + action = '"$WIX_LIGHT" $WIX_LIGHT_OPTIONS -b "$WIX_SOURCE_OBJECT_DIR" ${SOURCES} -o ${TARGET}', src_suffix = '.wixobj', src_builder = candle_builder) diff --git a/BuildTools/SCons/Version.py b/BuildTools/SCons/Version.py index 57ef96d..4fd9171 100644 --- a/BuildTools/SCons/Version.py +++ b/BuildTools/SCons/Version.py @@ -1,9 +1,9 @@ import subprocess, os, datetime, re, os.path -def getGitBuildVersion(project) : - tag = git("describe --tags --exact --match \"" + project + "-*\"") +def getGitBuildVersion(root, project) : + tag = git("describe --tags --exact --match \"" + project + "-*\"", root) if tag : return tag.rstrip()[len(project)+1:] - tag = git("describe --tags --match \"" + project + "-*\"") + tag = git("describe --tags --match \"" + project + "-*\"", root) if tag : m = re.match(project + "-(.*)-(.*)-(.*)", tag) @@ -12,9 +12,15 @@ def getGitBuildVersion(project) : return None -def git(cmd) : - p = subprocess.Popen("git " + cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) +def git(cmd, root) : + full_cmd = "git " + cmd + p = subprocess.Popen(full_cmd, cwd=root, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=(os.name != "nt")) gitVersion = p.stdout.read() + # error = p.stderr.read() + # if error: + # print "Git error: " + error p.stdin.close() - return gitVersion if p.wait() == 0 else None + if p.wait() == 0 : + return gitVersion + return None def getBuildVersion(root, project) : @@ -26,7 +32,37 @@ def getBuildVersion(root, project) : return version - gitVersion = getGitBuildVersion(project) + gitVersion = getGitBuildVersion(root, project) if gitVersion : return gitVersion return datetime.date.today().strftime("%Y%m%d") + +def convertToWindowsVersion(version) : + version_match = re.match("(\d+)\.(\d+)(.*)", version) + major = version_match and int(version_match.group(1)) or 0 + minor = version_match and int(version_match.group(2)) or 0 + if version_match and len(version_match.group(3)) == 0 : + patch = 60000 + else : + match = re.match("^beta(\d+)(.*)", version_match.group(3)) + build_string = "" + if match : + patch = 1000*int(match.group(1)) + build_string = match.group(2) + else : + rc_match = re.match("^rc(\d+)(.*)", version_match.group(3)) + if rc_match : + patch = 10000*int(rc_match.group(1)) + build_string = rc_match.group(2) + else : + patch = 0 + alpha_match = re.match("^alpha(.*)", version_match.group(3)) + if alpha_match : + build_string = alpha_match.group(1) + + if len(build_string) > 0 : + build_match = re.match("^-dev(\d+)", build_string) + if build_match : + patch += int(build_match.group(1)) + + return (major, minor, patch) diff --git a/BuildTools/UpdateDebianChangelog.py b/BuildTools/UpdateDebianChangelog.py index f7d9e65..20e72da 100755 --- a/BuildTools/UpdateDebianChangelog.py +++ b/BuildTools/UpdateDebianChangelog.py @@ -35,5 +35,5 @@ if last_version != version : changelog.write(project + " (" + version + "-1)" + " " + distribution + "; urgency=low\n\n") changelog.write(" * Upstream development snapshot\n\n") - changelog.write(" -- Swift Package Maintainers <packages@swift.im> " + email.utils.formatdate() + "\n") + changelog.write(" -- Swift Package Maintainer <packages@swift.im> " + email.utils.formatdate() + "\n") changelog.write("\n") changelog.write(changelog_data) diff --git a/BuildTools/scons2ninja.py b/BuildTools/scons2ninja.py new file mode 100755 index 0000000..a39ed32 --- /dev/null +++ b/BuildTools/scons2ninja.py @@ -0,0 +1,627 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# +# scons2ninja: A script to create a Ninja build file from SCons. +# +# Copyright (c) 2013 Remko Tronçon +# Licensed under the simplified BSD license. +# See COPYING for details. +# +################################################################################ + +import re, os, os.path, subprocess, sys, fnmatch, shlex + +################################################################################ +# Helper methods & variables +################################################################################ + +SCRIPT = sys.argv[0] +SCONS_ARGS = ' '.join(sys.argv[1:]) + +# TODO: Make this a tool-specific map +BINARY_FLAGS = ["-framework", "-arch", "-x", "--output-format", "-isystem", "-include"] + +if sys.platform == 'win32' : + LIB_PREFIX = "" + LIB_SUFFIX = "" + EXE_SUFFIX = ".exe" +else : + LIB_PREFIX = "lib" + LIB_SUFFIX = ".a" + EXE_SUFFIX = "" + +def is_regexp(x) : + return 'match' in dir(x) + +def is_list(l) : + return type(l) is list + +def escape(s) : + return s.replace(' ', '$ ').replace(':', '$:') + +def quote_spaces(s) : + if ' ' in s : + return '"' + s + '"' + else : + return s + +def to_list(l) : + if not l : + return [] + if is_list(l) : + return l + return [l] + +def partition(l, f) : + x = [] + y = [] + for v in l : + if f(v) : + x.append(v) + else : + y.append(v) + return (x, y) + +def get_unary_flags(prefix, flags) : + return [x[len(prefix):] for x in flags if x.lower().startswith(prefix.lower())] + +def extract_unary_flags(prefix, flags) : + f1, f2 = partition(flags, lambda x : x.lower().startswith(prefix.lower())) + return ([f[len(prefix):] for f in f1], f2) + +def extract_unary_flag(prefix, flags) : + flag, flags = extract_unary_flags(prefix, flags) + return (flag[0], flags) + +def extract_binary_flag(prefix, flags) : + i = flags.index(prefix) + flag = flags[i + 1] + del flags[i] + del flags[i] + return (flag, flags) + +def get_non_flags(flags) : + skip = False + result = [] + for f in flags : + if skip : + skip = False + elif f in BINARY_FLAGS : + skip = True + elif not f.startswith("/") and not f.startswith("-") : + result.append(f) + return result + +def extract_non_flags(flags) : + non_flags = get_non_flags(flags) + return (non_flags, filter(lambda x : x not in non_flags, flags)) + +def get_dependencies(target, build_targets) : + result = [] + queue = list(dependencies.get(target, [])) + while len(queue) > 0 : + n = queue.pop() + # Filter out Value() results + if n in build_targets or os.path.exists(n) : + result.append(n) + queue += list(dependencies.get(n, [])) + return result + +def get_built_libs(libs, libpaths, outputs) : + canonical_outputs = [os.path.abspath(p) for p in outputs] + result = [] + for libpath in libpaths : + for lib in libs : + lib_libpath = os.path.join(libpath, LIB_PREFIX + lib + LIB_SUFFIX) + if os.path.abspath(lib_libpath) in canonical_outputs : + result.append(lib_libpath) + return result + +def parse_tool_command(line) : + command = shlex.split(line) + flags = command[1:] + tool = os.path.splitext(os.path.basename(command[0]))[0] + if tool.startswith('clang++') or tool.startswith('g++') : + tool = "cxx" + elif tool.startswith('clang') or tool.startswith('gcc') : + tool = "cc" + if tool in ["cc", "cxx"] and not "-c" in flags : + tool = "glink" + tool = tool.replace('-qt4', '') + return tool, command, flags + +def rglob(pattern, root = '.') : + return [os.path.join(path, f) for path, dirs, files in os.walk(root) for f in fnmatch.filter(files, pattern)] + +################################################################################ +# Helper for building Ninja files +################################################################################ + +class NinjaBuilder : + def __init__(self) : + self._header = "" + self.variables = "" + self.rules = "" + self._build = "" + self.pools = "" + self._flags = {} + self.targets = [] + + def header(self, text) : + self._header += text + "\n" + + def rule(self, name, **kwargs) : + self.rules += "rule " + name + "\n" + for k, v in kwargs.iteritems() : + self.rules += " " + str(k) + " = " + str(v) + "\n" + self.rules += "\n" + + def pool(self, name, **kwargs) : + self.pools += "pool " + name + "\n" + for k, v in kwargs.iteritems() : + self.pools += " " + str(k) + " = " + str(v) + "\n" + self.pools += "\n" + + def variable(self, name, value) : + self.variables += str(name) + " = " + str(value) + "\n" + + def build(self, target, rule, sources = None, **kwargs) : + self._build += "build " + self.to_string(target) + ": " + rule + if sources : + self._build += " " + self.to_string(sources) + if 'deps' in kwargs and kwargs['deps'] : + self._build += " | " + self.to_string(kwargs["deps"]) + if 'order_deps' in kwargs : + self._build += " || " + self.to_string(kwargs['order_deps']) + self._build += "\n" + for var, value in kwargs.iteritems() : + if var in ['deps', 'order_deps'] : + continue + value = self.to_string(value, quote = True) + if var.endswith("flags") : + value = self.get_flags_variable(var, value) + self._build += " " + var + " = " + value + "\n" + self.targets += to_list(target) + + def header_targets(self) : + return [x for x in self.targets if x.endswith('.h') or x.endswith('.hh')] + + def serialize(self) : + result = "" + result += self._header + "\n" + result += self.variables + "\n" + for prefix in self._flags.values() : + for k, v in prefix.iteritems() : + result += v + " = " + k + "\n" + result += "\n" + result += self.pools + "\n" + result += self.rules + "\n" + result += self._build + "\n" + return result + + def to_string(self, lst, quote = False) : + if is_list(lst) : + if quote : + return ' '.join([quote_spaces(x) for x in lst]) + else : + return ' '.join([escape(x) for x in lst]) + if is_regexp(lst) : + return ' '.join([escape(x) for x in self.targets if lst.match(x)]) + return escape(lst) + + def get_flags_variable(self, flags_type, flags) : + if len(flags) == 0 : + return '' + if flags_type not in self._flags : + self._flags[flags_type] = {} + type_flags = self._flags[flags_type] + if flags not in type_flags : + type_flags[flags] = flags_type + "_" + str(len(type_flags)) + return "$" + type_flags[flags] + + +################################################################################ +# Configuration +################################################################################ + +ninja_post = [] +scons_cmd = "scons" +scons_dependencies = ['SConstruct'] + rglob('SConscript') + +def ninja_custom_command(ninja, line) : + return False + +CONFIGURATION_FILE = '.scons2ninja.conf' +execfile(CONFIGURATION_FILE) + +scons_dependencies = [os.path.normpath(x) for x in scons_dependencies] + + +################################################################################ +# Rules +################################################################################ + +ninja = NinjaBuilder() + +ninja.pool('scons_pool', depth = 1) + +if sys.platform == 'win32' : + ninja.rule('cl', + deps = 'msvc', + command = '$cl /showIncludes $clflags -c $in /Fo$out', + description = 'CXX $out') + + ninja.rule('link', + command = '$link $in $linkflags $libs /out:$out', + description = 'LINK $out') + + ninja.rule('link_mt', + command = '$link $in $linkflags $libs /out:$out ; $mt $mtflags', + description = 'LINK $out') + + ninja.rule('lib', + command = '$lib $libflags /out:$out $in', + description = 'AR $out') + + ninja.rule('rc', + command = '$rc $rcflags /Fo$out $in', + description = 'RC $out') + + # SCons doesn't touch files if they didn't change, which makes + # ninja rebuild the file over and over again. There's no touch on Windows :( + # Could implement it with a script, but for now, delete the file if + # this problem occurs. I'll fix it if it occurs too much. + ninja.rule('scons', + command = scons_cmd + " ${scons_args} $out", + pool = 'scons_pool', + description = 'GEN $out') + + ninja.rule('install', command = 'cmd /c copy $in $out') + ninja.rule('run', command = '$in') +else : + ninja.rule('cxx', + deps = 'gcc', + depfile = '$out.d', + command = '$cxx -MMD -MF $out.d $cxxflags -c $in -o $out', + description = 'CXX $out') + + ninja.rule('cc', + deps = 'gcc', + depfile = '$out.d', + command = '$cc -MMD -MF $out.d $ccflags -c $in -o $out', + description = 'CC $out') + + ninja.rule('link', + command = '$glink -o $out $in $linkflags', + description = 'LINK $out') + + ninja.rule('ar', + command = 'ar $arflags $out $in && ranlib $out', + description = 'AR $out') + + # SCons doesn't touch files if they didn't change, which makes + # ninja rebuild the file over and over again. Touching solves this. + ninja.rule('scons', + command = scons_cmd + " $out && touch $out", + pool = 'scons_pool', + description = 'GEN $out') + + ninja.rule('install', command = 'install $in $out') + ninja.rule('run', command = './$in') + + +ninja.rule('moc', + command = '$moc $mocflags -o $out $in', + description = 'MOC $out') + +ninja.rule('rcc', + command = '$rcc $rccflags -name $name -o $out $in', + description = 'RCC $out') + +ninja.rule('uic', + command = '$uic $uicflags -o $out $in', + description = 'UIC $out') + +ninja.rule('lrelease', + command = '$lrelease $lreleaseflags $in -qm $out', + description = 'LRELEASE $out') + +ninja.rule('ibtool', + command = '$ibtool $ibtoolflags --compile $out $in', + description = 'IBTOOL $out') + +ninja.rule('dsymutil', + command = '$dsymutil $dsymutilflags -o $out $in', + description = 'DSYMUTIL $out') + +ninja.rule('generator', + command = "python " + SCRIPT + " ${scons_args}", + depfile = ".scons2ninja.deps", + pool = 'scons_pool', + generator = '1', + description = 'Regenerating build.ninja') + +ninja.rule('sdef', + command = 'sdef $in | sdp -fh --basename $basename -o $outdir', + description = 'SDEF $out') + +################################################################################ +# Build Statements +################################################################################ + +scons_generate_cmd = scons_cmd + " " + SCONS_ARGS + " --tree=all,prune dump_trace=1" +#scons_generate_cmd = 'cmd /c type scons2ninja.in' +#scons_generate_cmd = 'cat scons2ninja.in' + +# Pass 1: Parse dependencies (and prefilter some build rules) +build_lines = [] +dependencies = {} +mtflags = {} +previous_file = None +f = subprocess.Popen(scons_generate_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell=True) +stage = 'preamble' +skip_nth_line = -1 +stack = ['.'] +for line in f.stdout : + line = line.rstrip() + + # Skip lines if requested from previous command + if skip_nth_line >= 0 : + skip_nth_line -= 1 + if skip_nth_line == 0 : + continue + + if line.startswith('scons: done building targets') : + break + + if stage == "preamble" : + # Pass all lines from the SCons configuration step to output + if re.match("^scons: Building targets ...", line) : + stage = "build" + else : + print line + + elif stage == "build" : + if line.startswith('+-') : + stage = "dependencies" + elif re.match("^Using tempfile", line) : + # Ignore response files from MSVS + skip_nth_line = 2 + else : + build_lines.append(line) + + # Already detect targets that will need 'mt' + tool, _, flags = parse_tool_command(line) + if tool == 'mt' : + target = get_unary_flags("-outputresource:", flags)[0] + target = target[0:target.index(';')] + mtflags[target] = flags + + elif stage == "dependencies" : + if not re.match('^[\s|]+\+\-', line) : + # Work around bug in SCons that splits output over multiple lines + continue + + level = line.index('+-') / 2 + filename = line[level*2+2:] + if filename.startswith('[') : + filename = filename[1:-1] + + # Check if we use the 'fixed' format which escapes filenamenames + if filename.startswith('\'') and filename.endswith('\'') : + filename = eval(filename) + + if level < len(stack) : + stack = stack[0:level] + elif level > len(stack) : + if level != len(stack) + 1 : + raise Exception("Internal Error" ) + stack.append(previous_filename) + + # Skip absolute paths + if not os.path.isabs(filename) : + target = stack[-1] + if target not in dependencies : + dependencies[target] = [] + dependencies[target].append(filename) + previous_filename = filename + +if f.wait() != 0 : + print "Error calling '" + scons_generate_cmd + "'" + print f.stderr.read() + exit(-1) + +# Pass 2: Parse build rules +tools = {} +for line in build_lines : + # Custom python function + m = re.match('^(\w+)\(\[([^\]]*)\]', line) + if m : + out = [x[1:-1] for x in m.group(2).split(',')] + for x in out : + # 'Note' = To be more correct, deps should also include $scons_dependencies, + # but this regenerates a bit too often, so leaving it out for now. + ninja.build(x, 'scons', None, deps = sorted(get_dependencies(x, ninja.targets))) + continue + + + # TextFile + m = re.match("^Creating '([^']+)'", line) + if m : + out = m.group(1) + # Note: To be more correct, deps should also include $scons_dependencies, + # but this regenerates a bit too often, so leaving it out for now. + ninja.build(out, 'scons', None, deps = sorted(get_dependencies(out, ninja.targets))) + continue + + # Install + m = re.match('^Install file: "(.*)" as "(.*)"', line) + if m : + ninja.build(m.group(2), 'install', m.group(1)) + continue + + m = re.match('^Install directory: "(.*)" as "(.*)"', line) + if m : + for source in rglob('*', m.group(1)) : + if os.path.isdir(source) : + continue + target = os.path.join(m.group(2), os.path.relpath(source, m.group(1))) + ninja.build(target, 'install', source) + continue + + # Tools + tool, command, flags = parse_tool_command(line) + tools[tool] = command[0] + + ############################################################ + # clang/gcc tools + ############################################################ + + if tool == 'cc': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'cc', files, order_deps = '_generated_headers', ccflags = flags) + + elif tool == 'cxx': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'cxx', files, order_deps = '_generated_headers', cxxflags = flags) + + elif tool == 'glink': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + libs = get_unary_flags('-l', flags) + libpaths = get_unary_flags("-L", flags) + deps = get_built_libs(libs, libpaths, ninja.targets) + ninja.build(out, 'link', files, deps = sorted(deps), linkflags = flags) + + elif tool == 'ar': + objects, flags = partition(flags, lambda x: x.endswith('.o')) + libs, flags = partition(flags, lambda x: x.endswith('.a')) + out = libs[0] + ninja.build(out, 'ar', objects, arflags = flags) + + elif tool == 'ranlib': + pass + + + ############################################################ + # MSVC tools + ############################################################ + + elif tool == 'cl': + out, flags = extract_unary_flag("/Fo", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'cl', files, order_deps = '_generated_headers', clflags = flags) + + elif tool == 'lib': + out, flags = extract_unary_flag("/out:", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'lib', files, libflags = flags) + + elif tool == 'link': + objects, flags = partition(flags, lambda x: x.endswith('.obj') or x.endswith('.res')) + out, flags = extract_unary_flag("/out:", flags) + libs, flags = partition(flags, lambda x: not x.startswith("/") and x.endswith(".lib")) + libpaths = get_unary_flags("/libpath:", flags) + deps = get_built_libs(libs, libpaths, ninja.targets) + if out in mtflags : + ninja.build(out, 'link_mt', objects, deps = sorted(deps), + libs = libs, linkflags = flags, mtflags = mtflags[out]) + else : + ninja.build(out, 'link', objects, deps = sorted(deps), + libs = libs, linkflags = flags) + + elif tool == 'rc': + out, flags = extract_unary_flag("/fo", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'rc', files[0], order_deps = '_generated_headers', rcflags = flags) + + elif tool == 'mt': + # Already handled + pass + + ############################################################ + # Qt tools + ############################################################ + + elif tool == 'moc': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'moc', files, mocflags = flags) + + elif tool == 'uic': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'uic', files, uicflags = flags) + + elif tool == 'lrelease': + out, flags = extract_binary_flag("-qm", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'lrelease', files, lreleaseflags = flags) + + elif tool == 'rcc': + out, flags = extract_binary_flag("-o", flags) + name, flags = extract_binary_flag("-name", flags) + files, flags = extract_non_flags(flags) + deps = list(set(get_dependencies(out, ninja.targets)) - set(files)) + ninja.build(out, 'rcc', files, deps = sorted(deps), name = name, rccflags = flags) + + ############################################################ + # OS X tools + ############################################################ + + elif tool == 'ibtool': + out, flags = extract_binary_flag("--compile", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'ibtool', files, ibtoolflags = flags) + + elif tool == 'dsymutil': + out, flags = extract_binary_flag("-o", flags) + files, flags = extract_non_flags(flags) + ninja.build(out, 'dsymutil', files, dsymutilflags = flags) + + elif tool == 'sdef' : + source = flags[0]; + outdir, flags = extract_binary_flag("-o", flags) + basename, flags = extract_binary_flag("--basename", flags) + ninja.build(os.path.join(outdir, basename + ".h"), 'sdef', [source], + basename = basename, + outdir = outdir) + + + elif not ninja_custom_command(ninja, line) : + raise Exception("Unknown tool: '" + line + "'") + + +# Phony target for all generated headers, used as an order-only depency from all C/C++ sources +ninja.build('_generated_headers', 'phony', ninja.header_targets()) + +# Regenerate build.ninja file +ninja.build('build.ninja', 'generator', [], deps = [SCRIPT, CONFIGURATION_FILE]) + +# Header & variables +ninja.header("# This file is generated by " + SCRIPT) +ninja.variable("ninja_required_version", "1.3") +ninja.variable("scons_args", SCONS_ARGS) +for k, v in tools.iteritems() : + ninja.variable(k, v) + +# Extra customizations +if 'ninja_post' in dir() : + ninja_post(ninja) + + +################################################################################ +# Result +################################################################################ + +f = open(".scons2ninja.deps", "w") +f.write("build.ninja: " + " ".join([d for d in scons_dependencies if os.path.exists(d)]) + "\n") +f.close() + +f = open("build.ninja", "w") +f.write(ninja.serialize()) +f.close() |