Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/mosesdecoder.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKenneth Heafield <kenlm@kheafield.com>2011-11-18 22:14:09 +0400
committerKenneth Heafield <kenlm@kheafield.com>2011-11-18 22:14:09 +0400
commit9102477a072e92c574367ce4ba6c6692c2a92bb4 (patch)
tree35d01c49b87e3b3bf62a4cd1b78b9bf84f1a70ce /jam-files/boost-build
parentf15eee0abf7e1d2795cbffd26b82992f23c1f36b (diff)
bjam for people who don't have it
Diffstat (limited to 'jam-files/boost-build')
-rw-r--r--jam-files/boost-build/boost-build.jam8
-rw-r--r--jam-files/boost-build/bootstrap.jam18
-rw-r--r--jam-files/boost-build/build-system.jam1008
-rw-r--r--jam-files/boost-build/build/ac.jam198
-rw-r--r--jam-files/boost-build/build/alias.jam73
-rw-r--r--jam-files/boost-build/build/build-request.jam322
-rw-r--r--jam-files/boost-build/build/configure.jam237
-rw-r--r--jam-files/boost-build/build/feature.jam1335
-rw-r--r--jam-files/boost-build/build/generators.jam1408
-rw-r--r--jam-files/boost-build/build/modifiers.jam232
-rw-r--r--jam-files/boost-build/build/project.jam1110
-rw-r--r--jam-files/boost-build/build/property-set.jam481
-rw-r--r--jam-files/boost-build/build/property.jam788
-rw-r--r--jam-files/boost-build/build/readme.txt13
-rw-r--r--jam-files/boost-build/build/scanner.jam153
-rw-r--r--jam-files/boost-build/build/targets.jam1659
-rw-r--r--jam-files/boost-build/build/toolset.jam502
-rw-r--r--jam-files/boost-build/build/type.jam425
-rw-r--r--jam-files/boost-build/build/version.jam161
-rw-r--r--jam-files/boost-build/build/virtual-target.jam1317
-rw-r--r--jam-files/boost-build/kernel/boost-build.jam5
-rw-r--r--jam-files/boost-build/kernel/bootstrap.jam263
-rw-r--r--jam-files/boost-build/kernel/class.jam420
-rw-r--r--jam-files/boost-build/kernel/errors.jam274
-rw-r--r--jam-files/boost-build/kernel/modules.jam354
-rw-r--r--jam-files/boost-build/options/help.jam212
-rw-r--r--jam-files/boost-build/site-config.jam4
-rw-r--r--jam-files/boost-build/tools/acc.jam118
-rw-r--r--jam-files/boost-build/tools/bison.jam32
-rw-r--r--jam-files/boost-build/tools/boostbook-config.jam13
-rw-r--r--jam-files/boost-build/tools/boostbook.jam727
-rw-r--r--jam-files/boost-build/tools/borland.jam220
-rw-r--r--jam-files/boost-build/tools/builtin.jam960
-rw-r--r--jam-files/boost-build/tools/cast.jam91
-rw-r--r--jam-files/boost-build/tools/clang-darwin.jam170
-rw-r--r--jam-files/boost-build/tools/clang-linux.jam196
-rw-r--r--jam-files/boost-build/tools/clang.jam27
-rw-r--r--jam-files/boost-build/tools/common.jam986
-rw-r--r--jam-files/boost-build/tools/como-linux.jam103
-rw-r--r--jam-files/boost-build/tools/como-win.jam117
-rw-r--r--jam-files/boost-build/tools/como.jam29
-rw-r--r--jam-files/boost-build/tools/convert.jam62
-rw-r--r--jam-files/boost-build/tools/cw-config.jam34
-rw-r--r--jam-files/boost-build/tools/cw.jam246
-rw-r--r--jam-files/boost-build/tools/darwin.jam568
-rw-r--r--jam-files/boost-build/tools/dmc.jam134
-rw-r--r--jam-files/boost-build/tools/docutils.jam84
-rw-r--r--jam-files/boost-build/tools/doxygen-config.jam11
-rw-r--r--jam-files/boost-build/tools/doxygen.jam776
-rw-r--r--jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile3
-rw-r--r--jam-files/boost-build/tools/doxygen/windows-paths-check.hpp0
-rw-r--r--jam-files/boost-build/tools/fop.jam69
-rw-r--r--jam-files/boost-build/tools/fortran.jam55
-rw-r--r--jam-files/boost-build/tools/gcc.jam1185
-rw-r--r--jam-files/boost-build/tools/generate.jam108
-rw-r--r--jam-files/boost-build/tools/gettext.jam230
-rw-r--r--jam-files/boost-build/tools/gfortran.jam39
-rw-r--r--jam-files/boost-build/tools/hp_cxx.jam181
-rw-r--r--jam-files/boost-build/tools/hpfortran.jam35
-rw-r--r--jam-files/boost-build/tools/ifort.jam44
-rw-r--r--jam-files/boost-build/tools/intel-darwin.jam220
-rw-r--r--jam-files/boost-build/tools/intel-linux.jam250
-rw-r--r--jam-files/boost-build/tools/intel-win.jam184
-rw-r--r--jam-files/boost-build/tools/intel.jam34
-rw-r--r--jam-files/boost-build/tools/lex.jam33
-rw-r--r--jam-files/boost-build/tools/make.jam72
-rw-r--r--jam-files/boost-build/tools/mc.jam44
-rw-r--r--jam-files/boost-build/tools/message.jam55
-rw-r--r--jam-files/boost-build/tools/midl.jam142
-rw-r--r--jam-files/boost-build/tools/mipspro.jam145
-rw-r--r--jam-files/boost-build/tools/mpi.jam583
-rw-r--r--jam-files/boost-build/tools/msvc-config.jam12
-rw-r--r--jam-files/boost-build/tools/msvc.jam1392
-rw-r--r--jam-files/boost-build/tools/notfile.jam74
-rw-r--r--jam-files/boost-build/tools/package.jam165
-rw-r--r--jam-files/boost-build/tools/pathscale.jam168
-rw-r--r--jam-files/boost-build/tools/pch.jam95
-rw-r--r--jam-files/boost-build/tools/pgi.jam147
-rw-r--r--jam-files/boost-build/tools/python-config.jam27
-rw-r--r--jam-files/boost-build/tools/python.jam1267
-rw-r--r--jam-files/boost-build/tools/qcc.jam236
-rw-r--r--jam-files/boost-build/tools/qt.jam17
-rw-r--r--jam-files/boost-build/tools/qt3.jam209
-rw-r--r--jam-files/boost-build/tools/qt4.jam713
-rw-r--r--jam-files/boost-build/tools/quickbook-config.jam44
-rw-r--r--jam-files/boost-build/tools/quickbook.jam361
-rw-r--r--jam-files/boost-build/tools/rc.jam156
-rw-r--r--jam-files/boost-build/tools/stage.jam524
-rw-r--r--jam-files/boost-build/tools/stlport.jam303
-rw-r--r--jam-files/boost-build/tools/sun.jam142
-rw-r--r--jam-files/boost-build/tools/symlink.jam140
-rw-r--r--jam-files/boost-build/tools/testing-aux.jam210
-rw-r--r--jam-files/boost-build/tools/testing.jam581
-rw-r--r--jam-files/boost-build/tools/types/asm.jam4
-rw-r--r--jam-files/boost-build/tools/types/cpp.jam86
-rw-r--r--jam-files/boost-build/tools/types/exe.jam9
-rw-r--r--jam-files/boost-build/tools/types/html.jam4
-rw-r--r--jam-files/boost-build/tools/types/lib.jam74
-rw-r--r--jam-files/boost-build/tools/types/obj.jam9
-rw-r--r--jam-files/boost-build/tools/types/objc.jam26
-rw-r--r--jam-files/boost-build/tools/types/preprocessed.jam9
-rw-r--r--jam-files/boost-build/tools/types/qt.jam10
-rw-r--r--jam-files/boost-build/tools/types/register.jam39
-rw-r--r--jam-files/boost-build/tools/types/rsp.jam4
-rw-r--r--jam-files/boost-build/tools/unix.jam224
-rw-r--r--jam-files/boost-build/tools/vacpp.jam150
-rw-r--r--jam-files/boost-build/tools/whale.jam116
-rw-r--r--jam-files/boost-build/tools/xlf.jam39
-rw-r--r--jam-files/boost-build/tools/xsltproc-config.jam37
-rw-r--r--jam-files/boost-build/tools/xsltproc.jam194
-rw-r--r--jam-files/boost-build/tools/xsltproc/included.xsl11
-rw-r--r--jam-files/boost-build/tools/xsltproc/test.xml2
-rw-r--r--jam-files/boost-build/tools/xsltproc/test.xsl12
-rw-r--r--jam-files/boost-build/tools/zlib.jam92
-rw-r--r--jam-files/boost-build/user-config.jam92
-rw-r--r--jam-files/boost-build/util/assert.jam336
-rw-r--r--jam-files/boost-build/util/container.jam339
-rw-r--r--jam-files/boost-build/util/doc.jam997
-rw-r--r--jam-files/boost-build/util/indirect.jam115
-rw-r--r--jam-files/boost-build/util/numbers.jam218
-rw-r--r--jam-files/boost-build/util/option.jam109
-rw-r--r--jam-files/boost-build/util/order.jam169
-rw-r--r--jam-files/boost-build/util/os.jam171
-rw-r--r--jam-files/boost-build/util/path.jam934
-rw-r--r--jam-files/boost-build/util/print.jam488
-rw-r--r--jam-files/boost-build/util/regex.jam193
-rw-r--r--jam-files/boost-build/util/sequence.jam335
-rw-r--r--jam-files/boost-build/util/set.jam93
-rw-r--r--jam-files/boost-build/util/string.jam189
-rw-r--r--jam-files/boost-build/util/utility.jam235
130 files changed, 35542 insertions, 0 deletions
diff --git a/jam-files/boost-build/boost-build.jam b/jam-files/boost-build/boost-build.jam
new file mode 100644
index 000000000..73db0497b
--- /dev/null
+++ b/jam-files/boost-build/boost-build.jam
@@ -0,0 +1,8 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+boost-build kernel ;
diff --git a/jam-files/boost-build/bootstrap.jam b/jam-files/boost-build/bootstrap.jam
new file mode 100644
index 000000000..af3e8bf50
--- /dev/null
+++ b/jam-files/boost-build/bootstrap.jam
@@ -0,0 +1,18 @@
+# Copyright (c) 2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file handles initial phase of Boost.Build loading.
+# Boost.Jam has already figured out where Boost.Build is
+# and loads this file, which is responsible for initialization
+# of basic facilities such a module system and loading the
+# main Boost.Build module, build-system.jam.
+#
+# Exact operation of this module is not interesting, it makes
+# sense to look at build-system.jam right away.
+
+# Load the kernel/bootstrap.jam, which does all the work.
+.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ;
+include $(.bootstrap-file) ; \ No newline at end of file
diff --git a/jam-files/boost-build/build-system.jam b/jam-files/boost-build/build-system.jam
new file mode 100644
index 000000000..9f9c884cc
--- /dev/null
+++ b/jam-files/boost-build/build-system.jam
@@ -0,0 +1,1008 @@
+# Copyright 2003, 2005, 2007 Dave Abrahams
+# Copyright 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is part of Boost Build version 2. You can think of it as forming the
+# main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
+
+import build-request ;
+import builtin ;
+import "class" : new ;
+import errors ;
+import feature ;
+import make ;
+import modules ;
+import os ;
+import path ;
+import project ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import utility ;
+import version ;
+import virtual-target ;
+import generators ;
+import configure ;
+
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Shortcut used in this module for accessing used command-line parameters.
+.argv = [ modules.peek : ARGV ] ;
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
+
+# Legacy option doing too many things, some of which are not even documented.
+# Should be phased out.
+# * Disables loading site and user configuration files.
+# * Disables auto-configuration for toolsets specified explicitly on the
+# command-line.
+# * Causes --toolset command-line options to be ignored.
+# * Prevents the default toolset from being used even if no toolset has been
+# configured at all.
+.legacy-ignore-config = [ MATCH ^(--ignore-config)$ : $(.argv) ] ;
+
+# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
+# directory, then we want to clean targets which are in 'foo' as well as those
+# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
+# achieve this we collect a list of projects under which cleaning is allowed.
+.project-targets = ;
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+.results-of-main-targets = ;
+
+# Was an XML dump requested?
+.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+.default-toolset = ;
+.default-toolset-version = ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+rule command-line-free-features ( )
+{
+ return $(.command-line-free-features) ;
+}
+
+
+# Returns the location of the build system. The primary use case is building
+# Boost where it is sometimes needed to get the location of other components
+# (e.g. BoostBook files) and it is convenient to use locations relative to the
+# Boost Build path.
+#
+rule location ( )
+{
+ local r = [ modules.binding build-system ] ;
+ return $(r:P) ;
+}
+
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+rule set-default-toolset ( toolset : version ? )
+{
+ .default-toolset = $(toolset) ;
+ .default-toolset-version = $(version) ;
+}
+
+rule set-pre-build-hook ( function )
+{
+ .pre-build-hook = $(function) ;
+}
+
+rule set-post-build-hook ( function )
+{
+ .post-build-hook = $(function) ;
+}
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+local rule actual-clean-targets ( )
+{
+ # Construct a list of projects explicitly detected as targets on this build
+ # system run. These are the projects under which cleaning is allowed.
+ for local t in $(targets)
+ {
+ if [ class.is-a $(t) : project-target ]
+ {
+ .project-targets += [ $(t).project-module ] ;
+ }
+ }
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ local targets-to-clean ;
+ for local t in $(.results-of-main-targets)
+ {
+ # Do not include roots or sources.
+ targets-to-clean += [ virtual-target.traverse $(t) ] ;
+ }
+ targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
+
+ local to-clean ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local p = [ $(t).project ] ;
+
+ # Remove only derived targets.
+ if [ $(t).action ]
+ {
+ if $(t) in $(targets-to-clean) ||
+ [ should-clean-project [ $(p).project-module ] ] = true
+ {
+ to-clean += $(t) ;
+ }
+ }
+ }
+
+ local to-clean-actual ;
+ for local t in $(to-clean)
+ {
+ to-clean-actual += [ $(t).actualize ] ;
+ }
+ return $(to-clean-actual) ;
+}
+
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+local rule find-target ( target-id )
+{
+ local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
+
+ local pm ;
+ if $(split)
+ {
+ pm = [ project.find $(split[1]) : "." ] ;
+ }
+ else
+ {
+ pm = [ project.find $(target-id) : "." ] ;
+ }
+
+ local result ;
+ if $(pm)
+ {
+ result = [ project.target $(pm) ] ;
+ }
+
+ if $(split)
+ {
+ result = [ $(result).find $(split[2]) ] ;
+ }
+
+ return $(result) ;
+}
+
+
+# Initializes a new configuration module.
+#
+local rule initialize-config-module ( module-name : location ? )
+{
+ project.initialize $(module-name) : $(location) ;
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+local rule load-config ( module-name : filename : path + : must-find ? )
+{
+ if $(.debug-config)
+ {
+ ECHO "notice: Searching" "$(path)" "for" "$(module-name)"
+ "configuration file" "$(filename)" "." ;
+ }
+ local where = [ GLOB $(path) : $(filename) ] ;
+ if $(where)
+ {
+ where = [ NORMALIZE_PATH $(where[1]) ] ;
+ if $(.debug-config)
+ {
+ ECHO "notice: Loading" "$(module-name)" "configuration file"
+ "$(filename)" "from" $(where) "." ;
+ }
+
+ # Set source location so that path-constant in config files
+ # with relative paths work. This is of most importance
+ # for project-config.jam, but may be used in other
+ # config files as well.
+ local attributes = [ project.attributes $(module-name) ] ;
+ $(attributes).set source-location : $(where:D) : exact ;
+ modules.load $(module-name) : $(filename) : $(path) ;
+ project.load-used-projects $(module-name) ;
+ }
+ else
+ {
+ if $(must-find)
+ {
+ errors.user-error "Configuration file" "$(filename)" "not found in"
+ "$(path)" "." ;
+ }
+ if $(.debug-config)
+ {
+ ECHO "notice:" "Configuration file" "$(filename)" "not found in"
+ "$(path)" "." ;
+ }
+ }
+ return $(where) ;
+}
+
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if specified.
+# If this configuration file is loaded, regular site and user configuration
+# files will not be. If a relative path is specified, file is searched for in
+# the current folder.
+#
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
+# path, in that order. Not loaded in case the test-config configuration file is
+# loaded or either the --ignore-site-config or the --ignore-config command-line
+# option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded, --ignore-config
+# command-line option is specified or an empty file name is explicitly
+# specified. If the file name has been given explicitly then the file must
+# exist.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+local rule load-configuration-files
+{
+ # Flag indicating that site configuration should not be loaded.
+ local ignore-site-config =
+ [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
+
+ if $(.legacy-ignore-config) && $(.debug-config)
+ {
+ ECHO "notice: Regular site and user configuration files will be ignored" ;
+ ECHO "notice: due to the --ignore-config command-line option." ;
+ }
+
+ initialize-config-module test-config ;
+ local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
+ local uq = [ MATCH \"(.*)\" : $(test-config) ] ;
+ if $(uq)
+ {
+ test-config = $(uq) ;
+ }
+ if $(test-config)
+ {
+ local where =
+ [ load-config test-config : $(test-config:BS) : $(test-config:D) ] ;
+ if $(where)
+ {
+ if $(.debug-config) && ! $(.legacy-ignore-config)
+ {
+ ECHO "notice: Regular site and user configuration files will" ;
+ ECHO "notice: be ignored due to the test configuration being"
+ "loaded." ;
+ }
+ }
+ else
+ {
+ test-config = ;
+ }
+ }
+
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local site-path = /etc $(user-path) ;
+ if [ os.name ] in NT CYGWIN
+ {
+ site-path = [ modules.peek : SystemRoot ] $(user-path) ;
+ }
+
+ if $(ignore-site-config) && !$(.legacy-ignore-config)
+ {
+ ECHO "notice: Site configuration files will be ignored due to the" ;
+ ECHO "notice: --ignore-site-config command-line option." ;
+ }
+
+ initialize-config-module site-config ;
+ if ! $(test-config) && ! $(ignore-site-config) && ! $(.legacy-ignore-config)
+ {
+ load-config site-config : site-config.jam : $(site-path) ;
+ }
+
+ initialize-config-module user-config ;
+ if ! $(test-config) && ! $(.legacy-ignore-config)
+ {
+ local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
+ user-config = $(user-config[-1]) ;
+ user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ user-config = [ utility.unquote $(user-config) ] ;
+ local explicitly-requested = $(user-config) ;
+ user-config ?= user-config.jam ;
+
+ if $(user-config)
+ {
+ if $(explicitly-requested)
+ {
+ # Treat explicitly entered user paths as native OS path
+ # references and, if non-absolute, root them at the current
+ # working directory.
+ user-config = [ path.make $(user-config) ] ;
+ user-config = [ path.root $(user-config) [ path.pwd ] ] ;
+ user-config = [ path.native $(user-config) ] ;
+
+ if $(.debug-config)
+ {
+ ECHO "notice: Loading explicitly specified user"
+ "configuration file:" ;
+ ECHO " $(user-config)" ;
+ }
+
+ load-config user-config : $(user-config:BS) : $(user-config:D)
+ : must-exist ;
+ }
+ else
+ {
+ load-config user-config : $(user-config) : $(user-path) ;
+ }
+ }
+ else if $(.debug-config)
+ {
+ ECHO "notice: User configuration file loading explicitly disabled." ;
+ }
+ }
+
+ # We look for project-config.jam from "." upward.
+ # I am not sure this is 100% right decision, we might as well check for
+ # it only alonside the Jamroot file. However:
+ #
+ # - We need to load project-root.jam before Jamroot
+ # - We probably would need to load project-root.jam even if there's no
+ # Jamroot - e.g. to implement automake-style out-of-tree builds.
+ local file = [ path.glob "." : project-config.jam ] ;
+ if ! $(file)
+ {
+ file = [ path.glob-in-parents "." : project-config.jam ] ;
+ }
+ if $(file)
+ {
+ initialize-config-module project-config : $(file:D) ;
+ load-config project-config : project-config.jam : $(file:D) ;
+ }
+}
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+local rule process-explicit-toolset-requests
+{
+ local extra-properties ;
+
+ local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
+ local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
+
+ for local t in $(option-toolsets) $(feature-toolsets)
+ {
+ # Parse toolset-version/properties.
+ local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
+ local toolset-version = $((t-v,t,v)[1]) ;
+ local toolset = $((t-v,t,v)[2]) ;
+ local version = $((t-v,t,v)[3]) ;
+
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] Detected command-line request for
+ $(toolset-version): "toolset=" $(toolset) "version="
+ $(version) ;
+ }
+
+ # If the toolset is not known, configure it now.
+ local known ;
+ if $(toolset) in [ feature.values <toolset> ]
+ {
+ known = true ;
+ }
+ if $(known) && $(version) && ! [ feature.is-subvalue toolset
+ : $(toolset) : version : $(version) ]
+ {
+ known = ;
+ }
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if ! $(known)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice: [cmdline-cfg] toolset $(toolset-version) not"
+ "previously configured; attempting to auto-configure now" ;
+ }
+ toolset.using $(toolset) : $(version) ;
+ }
+ else
+ {
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] toolset $(toolset-version) already
+ configured ;
+ }
+ }
+
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if ! $(t) in $(.argv) && ! $(t) in $(feature-toolsets)
+ {
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] adding toolset=$(t) to the build
+ request. ;
+ }
+ extra-properties += toolset=$(t) ;
+ }
+ }
+
+ return $(extra-properties) ;
+}
+
+
+# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
+# child to any of the projects requested to be cleaned in this build system run.
+# Returns 'false' otherwise. Expects the .project-targets list to have already
+# been constructed.
+#
+local rule should-clean-project ( project )
+{
+ if ! $(.should-clean-project.$(project))
+ {
+ local r = false ;
+ if $(project) in $(.project-targets)
+ {
+ r = true ;
+ }
+ else
+ {
+ local parent = [ project.attribute $(project) parent-module ] ;
+ if $(parent) && $(parent) != user-config
+ {
+ r = [ should-clean-project $(parent) ] ;
+ }
+ }
+ .should-clean-project.$(project) = $(r) ;
+ }
+
+ return $(.should-clean-project.$(project)) ;
+}
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+{
+ if --version in $(.argv)
+ {
+ version.print ;
+ EXIT ;
+ }
+
+ version.verify-engine-version ;
+
+ load-configuration-files ;
+
+ local extra-properties ;
+ # Note that this causes --toolset options to be ignored if --ignore-config
+ # is specified.
+ if ! $(.legacy-ignore-config)
+ {
+ extra-properties = [ process-explicit-toolset-requests ] ;
+ }
+
+
+ # We always load project in "." so that 'use-project' directives have any
+ # chance of being seen. Otherwise, we would not be able to refer to
+ # subprojects using target ids.
+ local current-project ;
+ if [ project.find "." : "." ]
+ {
+ current-project = [ project.target [ project.load "." ] ] ;
+ }
+
+
+ # In case there are no toolsets currently defined makes the build run using
+ # the default toolset.
+ if ! $(.legacy-ignore-config) && ! [ feature.values <toolset> ]
+ {
+ local default-toolset = $(.default-toolset) ;
+ local default-toolset-version = ;
+ if $(default-toolset)
+ {
+ default-toolset-version = $(.default-toolset-version) ;
+ }
+ else
+ {
+ default-toolset = gcc ;
+ if [ os.name ] = NT
+ {
+ default-toolset = msvc ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ default-toolset = darwin ;
+ }
+ }
+
+ ECHO "warning: No toolsets are configured." ;
+ ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
+ ECHO "warning: If the default is wrong, your build may not work correctly." ;
+ ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ;
+ ECHO "warning: For more configuration options, please consult" ;
+ ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+
+ toolset.using $(default-toolset) : $(default-toolset-version) ;
+ }
+
+
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ local build-request = [ build-request.from-command-line $(.argv)
+ $(extra-properties) ] ;
+ local target-ids = [ $(build-request).get-at 1 ] ;
+ local properties = [ $(build-request).get-at 2 ] ;
+
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ if $(properties)
+ {
+ expanded = [ build-request.expand-no-defaults $(properties) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
+ }
+ else
+ {
+ expanded = [ property-set.empty ] ;
+ }
+
+
+ # Check that we actually found something to build.
+ if ! $(current-project) && ! $(target-ids)
+ {
+ errors.user-error "error: no Jamfile in current directory found, and no"
+ "target references specified." ;
+ EXIT ;
+ }
+
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ local clean ; if "--clean" in $(.argv) { clean = true ; }
+ local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
+
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ local explicitly-requested-files
+
+
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ local targets ;
+ local virtual-targets ;
+ local actual-targets ;
+
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explictly requested use the current project
+ # as the target.
+ for local id in $(target-ids)
+ {
+ if $(id) = clean
+ {
+ clean = true ;
+ }
+ else
+ {
+ local t ;
+ if $(current-project)
+ {
+ t = [ $(current-project).find $(id) : no-error ] ;
+ }
+ else
+ {
+ t = [ find-target $(id) ] ;
+ }
+
+ if ! $(t)
+ {
+ ECHO "notice: could not find main target" $(id) ;
+ ECHO "notice: assuming it is a name of file to create." ;
+ explicitly-requested-files += $(id) ;
+ }
+ else
+ {
+ targets += $(t) ;
+ }
+ }
+ }
+ if ! $(targets)
+ {
+ targets += [ project.target [ project.module-name "." ] ] ;
+ }
+
+ if [ option.get dump-generators : : true ]
+ {
+ generators.dump ;
+ }
+
+ # We wish to put config.log in the build directory corresponding
+ # to Jamroot, so that the location does not differ depending on
+ # directory where we do build. The amount of indirection necessary
+ # here is scary.
+ local first-project = [ $(targets[0]).project ] ;
+ local first-project-root-location = [ $(first-project).get project-root ] ;
+ local first-project-root-module = [ project.load $(first-project-root-location) ] ;
+ local first-project-root = [ project.target $(first-project-root-module) ] ;
+ local first-build-build-dir = [ $(first-project-root).build-dir ] ;
+ configure.set-log-file $(first-build-build-dir)/config.log ;
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for local p in $(expanded)
+ {
+ .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
+ for local t in $(targets)
+ {
+ local g = [ $(t).generate $(p) ] ;
+ if ! [ class.is-a $(t) : project-target ]
+ {
+ .results-of-main-targets += $(g[2-]) ;
+ }
+ virtual-targets += $(g[2-]) ;
+ }
+ }
+
+
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in $(virtual-targets)
+ {
+ actual-targets += [ $(t).actualize ] ;
+ }
+
+
+ # If XML data output has been requested prepare additional rules and targets
+ # so we can hook into Jam to collect build data while its building and have
+ # it trigger the final XML report generation after all the planned targets
+ # have been built.
+ if $(.out-xml)
+ {
+ # Get a qualified virtual target name.
+ rule full-target-name ( target )
+ {
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+ local project-path = [ $(project).get location ] ;
+ return $(project-path)//$(name) ;
+ }
+
+ # Generate an XML file containing build statistics for each constituent.
+ #
+ rule out-xml ( xml-file : constituents * )
+ {
+ # Prepare valid XML header and footer with some basic info.
+ local nl = "
+" ;
+ local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+ local timestamp = [ modules.peek : JAMDATE ] ;
+ local cwd = [ PWD ] ;
+ local command = $(.argv) ;
+ local bb-version = [ version.boost-build ] ;
+ .header on $(xml-file) =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+ "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+ "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+ "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+ "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+ "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+ ;
+ .footer on $(xml-file) =
+ "$(nl)</build>" ;
+
+ # Generate the target dependency graph.
+ .contents on $(xml-file) +=
+ "$(nl) <targets>" ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local action = [ $(t).action ] ;
+ if $(action)
+ # If a target has no action, it has no dependencies.
+ {
+ local name = [ full-target-name $(t) ] ;
+ local sources = [ $(action).sources ] ;
+ local dependencies ;
+ for local s in $(sources)
+ {
+ dependencies += [ full-target-name $(s) ] ;
+ }
+
+ local path = [ $(t).path ] ;
+ local jam-target = [ $(t).actual-name ] ;
+
+ .contents on $(xml-file) +=
+ "$(nl) <target>"
+ "$(nl) <name><![CDATA[$(name)]]></name>"
+ "$(nl) <dependencies>"
+ "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+ "$(nl) </dependencies>"
+ "$(nl) <path><![CDATA[$(path)]]></path>"
+ "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+ "$(nl) </target>"
+ ;
+ }
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </targets>" ;
+
+ # Build $(xml-file) after $(constituents). Do so even if a
+ # constituent action fails and regenerate the xml on every bjam run.
+ INCLUDES $(xml-file) : $(constituents) ;
+ ALWAYS $(xml-file) ;
+ __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
+ out-xml.generate $(xml-file) ;
+ }
+
+ # The actual build actions are here; if we did this work in the actions
+ # clause we would have to form a valid command line containing the
+ # result of @(...) below (the name of the XML file).
+ #
+ rule out-xml.generate-action ( args * : xml-file
+ : command status start end user system : output ? )
+ {
+ local contents =
+ [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+ local f = @($(xml-file):E=$(contents)) ;
+ }
+
+ # Nothing to do here; the *real* actions happen in
+ # out-xml.generate-action.
+ actions quietly out-xml.generate { }
+
+ # Define the out-xml file target, which depends on all the targets so
+ # that it runs the collection after the targets have run.
+ out-xml $(.out-xml) : $(actual-targets) ;
+
+ # Set up a global __ACTION_RULE__ that records all the available
+ # statistics about each actual target in a variable "on" the --out-xml
+ # target.
+ #
+ rule out-xml.collect ( xml-file : target : command status start end user
+ system : output ? )
+ {
+ local nl = "
+" ;
+ # Open the action with some basic info.
+ .contents on $(xml-file) +=
+ "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+ # If we have an action object we can print out more detailed info.
+ local action = [ on $(target) return $(.action) ] ;
+ if $(action)
+ {
+ local action-name = [ $(action).action-name ] ;
+ local action-sources = [ $(action).sources ] ;
+ local action-props = [ $(action).properties ] ;
+
+ # The qualified name of the action which we created the target.
+ .contents on $(xml-file) +=
+ "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+ # The sources that made up the target.
+ .contents on $(xml-file) +=
+ "$(nl) <sources>" ;
+ for local source in $(action-sources)
+ {
+ local source-actual = [ $(source).actual-name ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </sources>" ;
+
+ # The properties that define the conditions under which the
+ # target was built.
+ .contents on $(xml-file) +=
+ "$(nl) <properties>" ;
+ for local prop in [ $(action-props).raw ]
+ {
+ local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </properties>" ;
+ }
+
+ local locate = [ on $(target) return $(LOCATE) ] ;
+ locate ?= "" ;
+ .contents on $(xml-file) +=
+ "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+ "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+ "$(nl) <command><![CDATA[$(command)]]></command>"
+ "$(nl) <output><![CDATA[$(output)]]></output>" ;
+ .contents on $(xml-file) +=
+ "$(nl) </action>" ;
+ }
+
+ # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+ # the global module.
+ module
+ {
+ __ACTION_RULE__ = build-system.out-xml.collect
+ [ modules.peek build-system : .out-xml ] ;
+ }
+
+ IMPORT
+ build-system :
+ out-xml.collect
+ out-xml.generate-action
+ : :
+ build-system.out-xml.collect
+ build-system.out-xml.generate-action
+ ;
+ }
+
+ local j = [ option.get jobs ] ;
+ if $(j)
+ {
+ modules.poke : PARALLELISM : $(j) ;
+ }
+
+ local k = [ option.get keep-going : true : true ] ;
+ if $(k) in "on" "yes" "true"
+ {
+ modules.poke : KEEP_GOING : 1 ;
+ }
+ else if $(k) in "off" "no" "false"
+ {
+ modules.poke : KEEP_GOING : 0 ;
+ }
+ else
+ {
+ ECHO "error: Invalid value for the --keep-going option" ;
+ EXIT ;
+ }
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ NOTFILE all ;
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if $(explicitly-requested-files)
+ {
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ;
+ }
+ else if $(cleanall)
+ {
+ UPDATE clean-all ;
+ }
+ else if $(clean)
+ {
+ common.Clean clean : [ actual-clean-targets ] ;
+ UPDATE clean ;
+ }
+ else
+ {
+ configure.print-configure-checks-summary ;
+
+ if $(.pre-build-hook)
+ {
+ $(.pre-build-hook) ;
+ }
+
+ DEPENDS all : $(actual-targets) ;
+ if UPDATE_NOW in [ RULENAMES ]
+ {
+ local ok = [ UPDATE_NOW all $(.out-xml) ] ;
+ if $(.post-build-hook)
+ {
+ $(.post-build-hook) $(ok) ;
+ }
+ # Prevent automatic update of the 'all' target, now that
+ # we have explicitly updated what we wanted.
+ UPDATE ;
+ }
+ else
+ {
+ UPDATE all $(.out-xml) ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/build/ac.jam b/jam-files/boost-build/build/ac.jam
new file mode 100644
index 000000000..6768f358c
--- /dev/null
+++ b/jam-files/boost-build/build/ac.jam
@@ -0,0 +1,198 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property-set ;
+import path ;
+import modules ;
+import "class" ;
+import errors ;
+import configure ;
+
+rule find-include-path ( variable : properties : header
+ : provided-path ? )
+{
+ # FIXME: document which properties affect this function by
+ # default.
+ local target-os = [ $(properties).get <target-os> ] ;
+ properties = [ property-set.create <target-os>$(toolset) ] ;
+ if $($(variable)-$(properties))
+ {
+ return $($(variable)-$(properties)) ;
+ }
+ else
+ {
+ provided-path ?= [ modules.peek : $(variable) ] ;
+ includes = $(provided-path) ;
+ includes += [ $(properties).get <include> ] ;
+ if [ $(properties).get <target-os> ] != windows
+ {
+ # FIXME: use sysroot
+ includes += /usr/include ;
+ }
+
+ local result ;
+ while ! $(result) && $(includes)
+ {
+ local f = [ path.root $(header) $(includes[1]) ] ;
+ ECHO "Checking " $(f) ;
+ if [ path.exists $(f) ]
+ {
+ result = $(includes[1]) ;
+ }
+ else if $(provided-path)
+ {
+ errors.user-error "Could not find header" $(header)
+ : "in the user-specified directory" $(provided-path) ;
+ }
+ includes = $(includes[2-]) ;
+ }
+ $(variable)-$(properties) = $(result) ;
+ return $(result) ;
+ }
+}
+
+rule find-library ( variable : properties : names + : provided-path ? )
+{
+ local target-os = [ $(properties).get <target-os> ] ;
+ properties = [ property-set.create <target-os>$(toolset) ] ;
+ if $($(variable)-$(properties))
+ {
+ return $($(variable)-$(properties)) ;
+ }
+ else
+ {
+ provided-path ?= [ modules.peek : $(variable) ] ;
+ paths = $(provided-path) ;
+ paths += [ $(properties).get <library-path> ] ;
+ if [ $(properties).get <target-os> ] != windows
+ {
+ paths += /usr/lib /usr/lib32 /usr/lib64 ;
+ }
+
+ local result ;
+ while ! $(result) && $(paths)
+ {
+ while ! $(result) && $(names)
+ {
+ local f ;
+ if $(target-os) = windows
+ {
+ f = $(paths[1])/$(names[1]).lib ;
+ if [ path.exists $(f) ]
+ {
+ result = $(f) ;
+ }
+ }
+ else
+ {
+ # FIXME: check for .a as well, depending on
+ # the 'link' feature.
+ f = $(paths[1])/lib$(names[1]).so ;
+ ECHO "CHECKING $(f) " ;
+ if [ path.exists $(f) ]
+ {
+ result = $(f) ;
+ }
+ }
+ if ! $(result) && $(provided-path)
+ {
+ errors.user-error "Could not find either of: " $(names)
+ : "in the user-specified directory" $(provided-path) ;
+
+ }
+ names = $(names[2-]) ;
+ }
+ paths = $(paths[2-]) ;
+ }
+ $(variable)-$(properties) = $(result) ;
+ return $(result) ;
+ }
+}
+
+class ac-library : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+ import ac ;
+ import configure ;
+
+ rule __init__ ( name : project : * : * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) ;
+
+ reconfigure $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule set-header ( header )
+ {
+ self.header = $(header) ;
+ }
+
+ rule set-default-names ( names + )
+ {
+ self.default-names = $(names) ;
+ }
+
+ rule reconfigure ( * : * )
+ {
+ ECHO "XXX" $(1) ;
+ if ! $(1)
+ {
+ # This is 'using xxx ;'. Nothing to configure, really.
+ }
+ else
+ {
+ for i in 1 2 3 4 5 6 7 8 9
+ {
+ # FIXME: this naming is inconsistent with XXX_INCLUDE/XXX_LIBRARY
+ if ! ( $($(i)[1]) in root include-path library-path library-name condition )
+ {
+ errors.user-error "Invalid named parameter" $($(i)[1]) ;
+ }
+ local name = $($(i)[1]) ;
+ local value = $($(i)[2-]) ;
+ if $($(name)) && $($(name)) != $(value)
+ {
+ errors.user-error "Attempt to change value of '$(name)'" ;
+ }
+ $(name) = $(value) ;
+ }
+
+ include-path ?= $(root)/include ;
+ library-path ?= $(root)/lib ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ # FIXME: log results.
+ local libnames = $(library-name) ;
+ if ! $(libnames) && ! $(include-path) && ! $(library-path)
+ {
+ libnames = [ modules.peek : $(name:U)_NAME ] ;
+ # Backward compatibility only.
+ libnames ?= [ modules.peek : $(name:U)_BINARY ] ;
+ }
+ libnames ?= $(self.default-names) ;
+
+ local includes = [
+ ac.find-include-path $(name:U)_INCLUDE : $(property-set) : $(self.header) : $(include-path) ] ;
+ local library = [ ac.find-library $(name:U)_LIBRARY : $(property-set) : $(libnames) : $(library-path) ] ;
+ if $(includes) && $(library)
+ {
+ library = [ virtual-target.from-file $(library) : . : $(self.project) ] ;
+ configure.log-library-search-result $(name) : "found" ;
+ return [ property-set.create <include>$(includes) <source>$(library) ] ;
+ }
+ else
+ {
+ configure.log-library-search-result $(name) : "no found" ;
+ }
+ }
+}
+
diff --git a/jam-files/boost-build/build/alias.jam b/jam-files/boost-build/build/alias.jam
new file mode 100644
index 000000000..48019cb98
--- /dev/null
+++ b/jam-files/boost-build/build/alias.jam
@@ -0,0 +1,73 @@
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'alias' rule and the associated target class.
+#
+# Alias is just a main target which returns its source targets without any
+# processing. For example:
+#
+# alias bin : hello test_hello ;
+# alias lib : helpers xml_parser ;
+#
+# Another important use of 'alias' is to conveniently group source files:
+#
+# alias platform-src : win.cpp : <os>NT ;
+# alias platform-src : linux.cpp : <os>LINUX ;
+# exe main : main.cpp platform-src ;
+#
+# Lastly, it is possible to create a local alias for some target, with different
+# properties:
+#
+# alias big_lib : : @/external_project/big_lib/<link>static ;
+#
+
+import "class" : new ;
+import project ;
+import property-set ;
+import targets ;
+
+
+class alias-target-class : basic-target
+{
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ return [ property-set.empty ] $(source-targets) ;
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
+ return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
+ }
+}
+
+
+# Declares the 'alias' target. It will process its sources virtual-targets by
+# returning them unaltered as its own constructed virtual-targets.
+#
+rule alias ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new alias-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project)
+ ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ]
+ ] ;
+}
+
+
+IMPORT $(__name__) : alias : : alias ;
diff --git a/jam-files/boost-build/build/build-request.jam b/jam-files/boost-build/build/build-request.jam
new file mode 100644
index 000000000..8a1f7b0eb
--- /dev/null
+++ b/jam-files/boost-build/build/build-request.jam
@@ -0,0 +1,322 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import sequence ;
+import set ;
+import regex ;
+import feature ;
+import property ;
+import container ;
+import string ;
+
+
+# Transform property-set by applying f to each component property.
+#
+local rule apply-to-property-set ( f property-set )
+{
+ local properties = [ feature.split $(property-set) ] ;
+ return [ string.join [ $(f) $(properties) ] : / ] ;
+}
+
+
+# Expand the given build request by combining all property-sets which do not
+# specify conflicting non-free features. Expects all the project files to
+# already be loaded.
+#
+rule expand-no-defaults ( property-sets * )
+{
+ # First make all features and subfeatures explicit.
+ local expanded-property-sets = [ sequence.transform apply-to-property-set
+ feature.expand-subfeatures : $(property-sets) ] ;
+
+ # Now combine all of the expanded property-sets
+ local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
+
+ return $(product) ;
+}
+
+
+# Implementation of x-product, below. Expects all the project files to already
+# be loaded.
+#
+local rule x-product-aux ( property-sets + )
+{
+ local result ;
+ local p = [ feature.split $(property-sets[1]) ] ;
+ local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
+ local seen ;
+ # No conflict with things used at a higher level?
+ if ! [ set.intersection $(f) : $(x-product-used) ]
+ {
+ local x-product-seen ;
+ {
+ # Do not mix in any conflicting features.
+ local x-product-used = $(x-product-used) $(f) ;
+
+ if $(property-sets[2])
+ {
+ local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
+ result = $(property-sets[1])/$(rest) ;
+ }
+
+ result ?= $(property-sets[1]) ;
+ }
+
+ # If we did not encounter a conflicting feature lower down, do not
+ # recurse again.
+ if ! [ set.intersection $(f) : $(x-product-seen) ]
+ {
+ property-sets = ;
+ }
+
+ seen = $(x-product-seen) ;
+ }
+
+ if $(property-sets[2])
+ {
+ result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
+ }
+
+ # Note that we have seen these features so that higher levels will recurse
+ # again without them set.
+ x-product-seen += $(f) $(seen) ;
+ return $(result) ;
+}
+
+
+# Return the cross-product of all elements of property-sets, less any that would
+# contain conflicting values for single-valued features. Expects all the project
+# files to already be loaded.
+#
+local rule x-product ( property-sets * )
+{
+ if $(property-sets).non-empty
+ {
+ # Prepare some "scoped globals" that can be used by the implementation
+ # function, x-product-aux.
+ local x-product-seen x-product-used ;
+ return [ x-product-aux $(property-sets) : $(feature-space) ] ;
+ }
+ # Otherwise return empty.
+}
+
+
+# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
+# an implicit value. Expects all the project files to already be loaded.
+#
+local rule looks-like-implicit-value ( v )
+{
+ if [ feature.is-implicit-value $(v) ]
+ {
+ return true ;
+ }
+ else
+ {
+ local split = [ regex.split $(v) - ] ;
+ if [ feature.is-implicit-value $(split[1]) ]
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Takes the command line tokens (such as taken from the ARGV rule) and
+# constructs a build request from them. Returns a vector of two vectors (where
+# "vector" means container.jam's "vector"). First is the set of targets
+# specified in the command line, and second is the set of requested build
+# properties. Expects all the project files to already be loaded.
+#
+rule from-command-line ( command-line * )
+{
+ local targets ;
+ local properties ;
+
+ command-line = $(command-line[2-]) ;
+ local skip-next = ;
+ for local e in $(command-line)
+ {
+ if $(skip-next)
+ {
+ skip-next = ;
+ }
+ else if ! [ MATCH "^(-).*" : $(e) ]
+ {
+ # Build request spec either has "=" in it or completely consists of
+ # implicit feature values.
+ local fs = feature-space ;
+ if [ MATCH "(.*=.*)" : $(e) ]
+ || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
+ {
+ properties += [ convert-command-line-element $(e) :
+ $(feature-space) ] ;
+ }
+ else
+ {
+ targets += $(e) ;
+ }
+ }
+ else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
+ {
+ skip-next = true ;
+ }
+ }
+ return [ new vector
+ [ new vector $(targets) ]
+ [ new vector $(properties) ] ] ;
+}
+
+
+# Converts one element of command line build request specification into internal
+# form. Expects all the project files to already be loaded.
+#
+local rule convert-command-line-element ( e )
+{
+ local result ;
+ local parts = [ regex.split $(e) "/" ] ;
+ while $(parts)
+ {
+ local p = $(parts[1]) ;
+ local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
+ local lresult ;
+ local feature ;
+ local values ;
+ if $(m)
+ {
+ feature = $(m[1]) ;
+ values = [ regex.split $(m[2]) "," ] ;
+ lresult = <$(feature)>$(values) ;
+ }
+ else
+ {
+ lresult = [ regex.split $(p) "," ] ;
+ }
+
+ if $(feature) && free in [ feature.attributes $(feature) ]
+ {
+ # If we have free feature, then the value is everything
+ # until the end of the command line token. Slashes in
+ # the following string are not taked to mean separation
+ # of properties. Commas are also not interpreted specially.
+ values = $(values:J=,) ;
+ values = $(values) $(parts[2-]) ;
+ values = $(values:J=/) ;
+ lresult = <$(feature)>$(values) ;
+ parts = ;
+ }
+
+ if ! [ MATCH (.*-.*) : $(p) ]
+ {
+ # property.validate cannot handle subfeatures, so we avoid the check
+ # here.
+ for local p in $(lresult)
+ {
+ property.validate $(p) : $(feature-space) ;
+ }
+ }
+
+ if ! $(result)
+ {
+ result = $(lresult) ;
+ }
+ else
+ {
+ result = $(result)/$(lresult) ;
+ }
+
+ parts = $(parts[2-]) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import feature ;
+
+ feature.prepare-test build-request-test-temp ;
+
+ import build-request ;
+ import build-request : expand-no-defaults : build-request.expand-no-defaults ;
+ import errors : try catch ;
+ import feature : feature subfeature ;
+
+ feature toolset : gcc msvc borland : implicit ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+ 3.0 3.0.1 3.0.2 : optional ;
+
+ feature variant : debug release : implicit composite ;
+ feature inlining : on off ;
+ feature "include" : : free ;
+
+ feature stdlib : native stlport : implicit ;
+
+ feature runtime-link : dynamic static : symmetric ;
+
+ # Empty build requests should expand to empty.
+ assert.result
+ : build-request.expand-no-defaults ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ <variant>debug/<toolset>msvc/<stdlib>stlport
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
+ : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
+
+ assert.result
+ <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
+ : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
+
+ local r ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+
+ try ;
+ {
+ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+ }
+ catch \"static\" is not a value of an implicit feature ;
+
+ r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : target ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+ gcc/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+ borland/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam gcc-3.0 ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+
+ feature.finish-test build-request-test-temp ;
+}
diff --git a/jam-files/boost-build/build/configure.jam b/jam-files/boost-build/build/configure.jam
new file mode 100644
index 000000000..14c1328af
--- /dev/null
+++ b/jam-files/boost-build/build/configure.jam
@@ -0,0 +1,237 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines function to help with two main tasks:
+#
+# - Discovering build-time configuration for the purposes of adjusting
+# build process.
+# - Reporting what is built, and how it is configured.
+
+import targets ;
+import errors ;
+import targets ;
+import sequence ;
+import property ;
+import property-set ;
+import "class" : new ;
+import common ;
+import path ;
+
+rule log-summary ( )
+{
+
+}
+
+.width = 30 ;
+
+rule set-width ( width )
+{
+ .width = $(width) ;
+}
+
+# Declare that the components specified by the parameter exist.
+rule register-components ( components * )
+{
+ .components += $(components) ;
+}
+
+# Declare that the components specified by the parameters will
+# be build.
+rule components-building ( components * )
+{
+ .built-components += $(components) ;
+}
+
+# Report something about component configuration that the
+# user should better know.
+rule log-component-configuration ( component : message )
+{
+ # FIXME: implement per-property-set logs
+ .component-logs.$(component) += $(message) ;
+}
+
+
+
+rule log-check-result ( result )
+{
+ if ! $(.announced-checks)
+ {
+ ECHO "Performing configuration checks\n" ;
+ .announced-checks = 1 ;
+ }
+
+ ECHO $(result) ;
+ #.check-results += $(result) ;
+}
+
+rule log-library-search-result ( library : result )
+{
+ local x = [ PAD " - $(library) : $(result)" : $(.width) ] ;
+ log-check-result "$(x)" ;
+}
+
+rule print-component-configuration ( )
+{
+ local c = [ sequence.unique $(.components) ] ;
+
+ ECHO "\nComponent configuration:\n" ;
+ for c in $(.components)
+ {
+ local s ;
+ if $(c) in $(.built-components)
+ {
+ s = "building" ;
+ }
+ else
+ {
+ s = "not building" ;
+ }
+ ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
+ for local m in $(.component-logs.$(c))
+ {
+ ECHO " -" $(m) ;
+ }
+ }
+ ECHO ;
+}
+
+rule print-configure-checks-summary ( )
+{
+ # FIXME: the problem with that approach is tha
+ # the user sees checks summary when all checks are
+ # done, and has no progress reporting while the
+ # checks are being executed.
+ if $(.check-results)
+ {
+ ECHO "Configuration checks summary\n" ;
+
+ for local r in $(.check-results)
+ {
+ ECHO $(r) ;
+ }
+ ECHO ;
+ }
+}
+
+# Attempt to build a metatarget named by 'metatarget-reference'
+# in context of 'project' with properties 'ps'.
+# Returns non-empty value if build is OK.
+rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
+{
+ local result ;
+
+ if ! $(retry) && ! $(.$(what)-tested.$(ps))
+ {
+ .$(what)-tested.$(ps) = true ;
+
+ local targets = [ targets.generate-from-reference
+ $(metatarget-reference) : $(project) : $(ps) ] ;
+
+ local jam-targets ;
+ for local t in $(targets[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+
+ if ! UPDATE_NOW in [ RULENAMES ]
+ {
+ # Cannot determine. Assume existance.
+ }
+ else
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ if [ UPDATE_NOW $(jam-targets) :
+ $(.log-fd) : ignore-minus-n : ignore-minus-q ]
+ {
+ .$(what)-supported.$(ps) = yes ;
+ result = true ;
+ log-check-result "$(x) : yes" ;
+ }
+ else
+ {
+ log-check-result "$(x) : no" ;
+ }
+ }
+ return $(result) ;
+ }
+ else
+ {
+ return $(.$(what)-supported.$(ps)) ;
+ }
+}
+
+rule builds ( metatarget-reference : properties * : what ? : retry ? )
+{
+ what ?= "$(metatarget-reference) builds" ;
+
+ # FIXME: this should not be hardcoded. Other checks might
+ # want to consider different set of features as relevant.
+ local toolset = [ property.select <toolset> : $(properties) ] ;
+ local toolset-version-property = "<toolset-$(toolset:G=):version>" ;
+ local relevant = [ property.select <target-os> <toolset> $(toolset-version-property)
+ <address-model> <architecture>
+ : $(properties) ] ;
+ local ps = [ property-set.create $(relevant) ] ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+
+ return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : $(retry) ] ;
+}
+
+
+# Called by Boost.Build startup code to specify name of a file
+# that will receive results of configure checks. This
+# should never be called by users.
+rule set-log-file ( log-file )
+{
+ path.makedirs [ path.parent $(log-file) ] ;
+
+ .log-fd = [ FILE_OPEN $(log-file) : "w" ] ;
+}
+
+# Frontend rules
+
+class check-target-builds-worker
+{
+ import configure ;
+ import property-set ;
+ import targets ;
+ import property ;
+
+ rule __init__ ( target message ? : true-properties * : false-properties * )
+ {
+ self.target = $(target) ;
+ self.message = $(message) ;
+ self.true-properties = $(true-properties) ;
+ self.false-properties = $(false-properties) ;
+ }
+
+ rule check ( properties * )
+ {
+ local choosen ;
+ if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
+ {
+ choosen = $(self.true-properties) ;
+ }
+ else
+ {
+ choosen = $(self.false-properties) ;
+ }
+ return [ property.evaluate-conditionals-in-context $(choosen) : $(properties) ] ;
+ }
+}
+
+
+rule check-target-builds ( target message ? : true-properties * : false-properties * )
+{
+ local instance = [ new check-target-builds-worker $(target) $(message) : $(true-properties)
+ : $(false-properties) ] ;
+ return <conditional>@$(instance).check ;
+}
+
+IMPORT $(__name__) : check-target-builds : : check-target-builds ;
+
+
diff --git a/jam-files/boost-build/build/feature.jam b/jam-files/boost-build/build/feature.jam
new file mode 100644
index 000000000..6f54adefb
--- /dev/null
+++ b/jam-files/boost-build/build/feature.jam
@@ -0,0 +1,1335 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import assert : * ;
+import "class" : * ;
+import errors : lol->list ;
+import indirect ;
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+import utility ;
+
+
+local rule setup ( )
+{
+ .all-attributes =
+ implicit
+ composite
+ optional
+ symmetric
+ free
+ incidental
+ path
+ dependency
+ propagated
+ link-incompatible
+ subfeature
+ order-sensitive
+ ;
+
+ .all-features = ;
+ .all-subfeatures = ;
+ .all-top-features = ; # non-subfeatures
+ .all-implicit-values = ;
+}
+setup ;
+
+
+# Prepare a fresh space to test in by moving all global variable settings into
+# the given temporary module and erasing them here.
+#
+rule prepare-test ( temp-module )
+{
+ DELETE_MODULE $(temp-module) ;
+
+ # Transfer globals to temp-module.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ modules.poke $(temp-module) : $(v) : $($(v)) ;
+ $(v) = ;
+ }
+ }
+ setup ;
+}
+
+
+# Clear out all global variables and recover all variables from the given
+# temporary module.
+#
+rule finish-test ( temp-module )
+{
+ # Clear globals.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ $(v) = ;
+ }
+ }
+
+ for local v in [ VARNAMES $(temp-module) ]
+ {
+ $(v) = [ modules.peek $(temp-module) : $(v) ] ;
+ }
+ DELETE_MODULE $(temp-module) ;
+}
+
+
+# Transform features by bracketing any elements which are not already bracketed
+# by "<>".
+#
+local rule grist ( features * )
+{
+ local empty = "" ;
+ return $(empty:G=$(features)) ;
+}
+
+
+# Declare a new feature with the given name, values, and attributes.
+#
+rule feature (
+ name # Feature name.
+ : values * # Allowable values - may be extended later using feature.extend.
+ : attributes * # Feature attributes (e.g. implicit, free, propagated...).
+)
+{
+ name = [ grist $(name) ] ;
+
+ local error ;
+
+ # Check for any unknown attributes.
+ if ! ( $(attributes) in $(.all-attributes) )
+ {
+ error = unknown attributes:
+ [ set.difference $(attributes) : $(.all-attributes) ] ;
+ }
+ else if $(name) in $(.all-features)
+ {
+ error = feature already defined: ;
+ }
+ else if implicit in $(attributes) && free in $(attributes)
+ {
+ error = free features cannot also be implicit ;
+ }
+ else if free in $(attributes) && propagated in $(attributes)
+ {
+ error = free features cannot be propagated ;
+ }
+ else
+ {
+ local m = [ MATCH (.*=.*) : $(values) ] ;
+ if $(m[1])
+ {
+ error = "feature value may not contain '='" ;
+ }
+ }
+
+ if $(error)
+ {
+ errors.error $(error)
+ : "in" feature declaration:
+ : feature [ lol->list $(1) : $(2) : $(3) ] ;
+ }
+
+ $(name).values ?= ;
+ $(name).attributes = $(attributes) ;
+ $(name).subfeatures ?= ;
+ $(attributes).features += $(name) ;
+
+ .all-features += $(name) ;
+ if subfeature in $(attributes)
+ {
+ .all-subfeatures += $(name) ;
+ }
+ else
+ {
+ .all-top-features += $(name) ;
+ }
+ extend $(name) : $(values) ;
+}
+
+
+# Sets the default value of the given feature, overriding any previous default.
+#
+rule set-default ( feature : value )
+{
+ local f = [ grist $(feature) ] ;
+ local a = $($(f).attributes) ;
+ local bad-attribute = ;
+ if free in $(a)
+ {
+ bad-attribute = free ;
+ }
+ else if optional in $(a)
+ {
+ bad-attribute = optional ;
+ }
+ if $(bad-attribute)
+ {
+ errors.error "$(bad-attribute) property $(f) cannot have a default." ;
+ }
+ if ! $(value) in $($(f).values)
+ {
+ errors.error "The specified default value, '$(value)' is invalid"
+ : "allowed values are: " $($(f).values) ;
+ }
+ $(f).default = $(value) ;
+}
+
+
+# Returns the default property values for the given features.
+#
+rule defaults ( features * )
+{
+ local result ;
+ for local f in $(features)
+ {
+ local gf = $(:E=:G=$(f)) ;
+ local a = $($(gf).attributes) ;
+ if ( free in $(a) ) || ( optional in $(a) )
+ {
+ }
+ else
+ {
+ result += $(gf)$($(gf).default) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns true iff all 'names' elements are valid features.
+#
+rule valid ( names + )
+{
+ if $(names) in $(.all-features)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the attibutes of the given feature.
+#
+rule attributes ( feature )
+{
+ return $($(:E=:G=$(feature)).attributes) ;
+}
+
+
+# Returns the values of the given feature.
+#
+rule values ( feature )
+{
+ return $($(:E=:G=$(feature)).values) ;
+}
+
+
+# Returns true iff 'value-string' is a value-string of an implicit feature.
+#
+rule is-implicit-value ( value-string )
+{
+ local v = [ regex.split $(value-string) - ] ;
+ local failed ;
+ if ! $(v[1]) in $(.all-implicit-values)
+ {
+ failed = true ;
+ }
+ else
+ {
+ local feature = $($(v[1]).implicit-feature) ;
+ for local subvalue in $(v[2-])
+ {
+ if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
+ {
+ failed = true ;
+ }
+ }
+ }
+
+ if ! $(failed)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the implicit feature associated with the given implicit value.
+#
+rule implied-feature ( implicit-value )
+{
+ local components = [ regex.split $(implicit-value) "-" ] ;
+
+ local feature = $($(components[1]).implicit-feature) ;
+ if ! $(feature)
+ {
+ errors.error \"$(implicit-value)\" is not a value of an implicit feature ;
+ feature = "" ; # Keep testing happy; it expects a result.
+ }
+ return $(feature) ;
+}
+
+
+local rule find-implied-subfeature ( feature subvalue : value-string ? )
+{
+ # Feature should be of the form <feature-name>.
+ if $(feature) != $(feature:G)
+ {
+ errors.error invalid feature $(feature) ;
+ }
+
+ return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
+}
+
+
+# Given a feature and a value of one of its subfeatures, find the name of the
+# subfeature. If value-string is supplied, looks for implied subfeatures that
+# are specific to that value of feature
+#
+rule implied-subfeature (
+ feature # The main feature name.
+ subvalue # The value of one of its subfeatures.
+ : value-string ? # The value of the main feature.
+)
+{
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
+ : $(value-string) ] ;
+ if ! $(subfeature)
+ {
+ value-string ?= "" ;
+ errors.error \"$(subvalue)\" is not a known subfeature value of
+ $(feature)$(value-string) ;
+ }
+ return $(subfeature) ;
+}
+
+
+# Generate an error if the feature is unknown.
+#
+local rule validate-feature ( feature )
+{
+ if ! $(feature) in $(.all-features)
+ {
+ errors.error unknown feature \"$(feature)\" ;
+ }
+}
+
+
+# Given a feature and its value or just a value corresponding to an implicit
+# feature, returns a property set consisting of all component subfeatures and
+# their values. For example all the following calls:
+#
+# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
+# expand-subfeatures-aux gcc-2.95.2-linux-x86
+#
+# return:
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+local rule expand-subfeatures-aux (
+ feature ? # Feature name or empty if value corresponds to an
+ # implicit property.
+ : value # Feature value.
+ : dont-validate ? # If set, no value string validation will be done.
+)
+{
+ if $(feature)
+ {
+ feature = $(feature) ;
+ }
+
+ if ! $(feature)
+ {
+ feature = [ implied-feature $(value) ] ;
+ }
+ else
+ {
+ validate-feature $(feature) ;
+ }
+ if ! $(dont-validate)
+ {
+ validate-value-string $(feature) $(value) ;
+ }
+
+ local components = [ regex.split $(value) "-" ] ;
+
+ # Get the top-level feature's value.
+ local value = $(components[1]:G=) ;
+
+ local result = $(components[1]:G=$(feature)) ;
+
+ local subvalues = $(components[2-]) ;
+ while $(subvalues)
+ {
+ local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
+ subvalues = $(subvalues[2-]) ;
+
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
+ $(value) ] ;
+
+ # If no subfeature was found reconstitute the value string and use that.
+ if ! $(subfeature)
+ {
+ result = $(components:J=-) ;
+ result = $(result:G=$(feature)) ;
+ subvalues = ; # Stop looping.
+ }
+ else
+ {
+ local f = [ MATCH ^<(.*)>$ : $(feature) ] ;
+ result += $(subvalue:G=$(f)-$(subfeature)) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Make all elements of properties corresponding to implicit features explicit,
+# and express all subfeature values as separate properties in their own right.
+# For example, all of the following properties
+#
+# gcc-2.95.2-linux-x86
+# <toolset>gcc-2.95.2-linux-x86
+#
+# might expand to
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+rule expand-subfeatures (
+ properties * # Property set with elements of the form
+ # <feature>value-string or just value-string in the case
+ # of implicit features.
+ : dont-validate ?
+)
+{
+ local result ;
+ for local p in $(properties)
+ {
+ # Don't expand subfeatures in subfeatures
+ if ! [ MATCH "(:)" : $(p:G) ]
+ {
+ result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for extend, below. Handles the feature case.
+#
+local rule extend-feature ( feature : values * )
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if implicit in $($(feature).attributes)
+ {
+ for local v in $(values)
+ {
+ if $($(v).implicit-feature)
+ {
+ errors.error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ;
+ }
+ $(v).implicit-feature = $(feature) ;
+ }
+
+ .all-implicit-values += $(values) ;
+ }
+ if ! $($(feature).values)
+ {
+ # This is the first value specified for this feature so make it be the
+ # default.
+ $(feature).default = $(values[1]) ;
+ }
+ $(feature).values += $(values) ;
+}
+
+
+# Checks that value-string is a valid value-string for the given feature.
+#
+rule validate-value-string ( feature value-string )
+{
+ if ! (
+ free in $($(feature).attributes)
+ || ( $(value-string) in $(feature).values )
+ )
+ {
+ local values = $(value-string) ;
+
+ if $($(feature).subfeatures)
+ {
+ if ! ( $(value-string) in $($(feature).values) )
+ && ! ( $(value-string) in $($(feature).subfeatures) )
+ {
+ values = [ regex.split $(value-string) - ] ;
+ }
+ }
+
+ if ! ( $(values[1]) in $($(feature).values) ) &&
+
+ # An empty value is allowed for optional features.
+ ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
+ {
+ errors.error \"$(values[1])\" is not a known value of feature $(feature)
+ : legal values: \"$($(feature).values)\" ;
+ }
+
+ for local v in $(values[2-])
+ {
+ # This will validate any subfeature values in value-string.
+ implied-subfeature $(feature) $(v) : $(values[1]) ;
+ }
+ }
+}
+
+
+# A helper that computes:
+# * name(s) of module-local variable(s) used to record the correspondence
+# between subvalue(s) and a subfeature
+# * value of that variable when such a subfeature/subvalue has been defined and
+# returns a list consisting of the latter followed by the former.
+#
+local rule subvalue-var (
+ feature # Main feature name.
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the subfeature values are valid.
+ : subfeature # Subfeature name.
+ : subvalues * # Subfeature values.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if $(value-string)
+ {
+ validate-value-string $(feature) $(value-string) ;
+ }
+
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ return $(subfeature-name)
+ $(feature)$(value-string:E="")<>$(subvalues).subfeature ;
+}
+
+
+# Extends the given subfeature with the subvalues. If the optional value-string
+# is provided, the subvalues are only valid for the given value of the feature.
+# Thus, you could say that <target-platform>mingw is specific to
+# <toolset>gcc-2.95.2 as follows:
+#
+# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
+#
+rule extend-subfeature (
+ feature # The feature whose subfeature is being extended.
+
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the new subfeature values are valid.
+
+ : subfeature # Subfeature name.
+ : subvalues * # Additional subfeature values.
+)
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalues) ] ;
+
+ local f = [ utility.ungrist [ grist $(feature) ] ] ;
+ extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
+
+ # Provide a way to get from the given feature or property and subfeature
+ # value to the subfeature name.
+ $(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
+}
+
+
+# Returns true iff the subvalues are valid for the feature. When the optional
+# value-string is provided, returns true iff the subvalues are valid for the
+# given value of the feature.
+#
+rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalue) ] ;
+
+ if $($(subfeature-vars[2])) = $(subfeature-vars[1])
+ {
+ return true ;
+ }
+}
+
+
+# Can be called three ways:
+#
+# 1. extend feature : values *
+# 2. extend <feature> subfeature : values *
+# 3. extend <feature>value-string subfeature : values *
+#
+# * Form 1 adds the given values to the given feature.
+# * Forms 2 and 3 add subfeature values to the given feature.
+# * Form 3 adds the subfeature values as specific to the given property
+# value-string.
+#
+rule extend ( feature-or-property subfeature ? : values * )
+{
+ local feature ; # If a property was specified this is its feature.
+ local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
+
+ # If a property was specified.
+ if $(feature-or-property:G) && $(feature-or-property:G=)
+ {
+ # Extract the feature and value-string, if any.
+ feature = $(feature-or-property:G) ;
+ value-string = $(feature-or-property:G=) ;
+ }
+ else
+ {
+ feature = [ grist $(feature-or-property) ] ;
+ }
+
+ # Dispatch to the appropriate handler.
+ if $(subfeature)
+ {
+ extend-subfeature $(feature) $(value-string) : $(subfeature)
+ : $(values) ;
+ }
+ else
+ {
+ # If no subfeature was specified, we do not expect to see a
+ # value-string.
+ if $(value-string)
+ {
+ errors.error can only specify a property as the first argument when
+ extending a subfeature
+ : usage:
+ : " extend" feature ":" values...
+ : " | extend" <feature>value-string subfeature ":" values...
+ ;
+ }
+
+ extend-feature $(feature) : $(values) ;
+ }
+}
+
+
+local rule get-subfeature-name ( subfeature value-string ? )
+{
+ local prefix = $(value-string): ;
+ return $(prefix:E="")$(subfeature) ;
+}
+
+
+# Declares a subfeature.
+#
+rule subfeature (
+ feature # Root feature that is not a subfeature.
+ value-string ? # A value-string specifying which feature or subfeature
+ # values this subfeature is specific to, if any.
+ : subfeature # The name of the subfeature being declared.
+ : subvalues * # The allowed values of this subfeature.
+ : attributes * # The attributes of the subfeature.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+
+ # Add grist to the subfeature name if a value-string was supplied.
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ if $(subfeature-name) in $($(feature).subfeatures)
+ {
+ errors.error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\"
+ "specific to "$(value-string) ;
+ }
+ $(feature).subfeatures += $(subfeature-name) ;
+
+ # First declare the subfeature as a feature in its own right.
+ local f = [ utility.ungrist $(feature) ] ;
+ feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
+
+ # Now make sure the subfeature values are known.
+ extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
+}
+
+
+# Set components of the given composite property.
+#
+rule compose ( composite-property : component-properties * )
+{
+ local feature = $(composite-property:G) ;
+ if ! ( composite in [ attributes $(feature) ] )
+ {
+ errors.error "$(feature)" is not a composite feature ;
+ }
+
+ $(composite-property).components ?= ;
+ if $($(composite-property).components)
+ {
+ errors.error components of "$(composite-property)" already set:
+ $($(composite-property).components) ;
+ }
+
+ if $(composite-property) in $(component-properties)
+ {
+ errors.error composite property "$(composite-property)" cannot have itself as a component ;
+ }
+ $(composite-property).components = $(component-properties) ;
+}
+
+
+local rule expand-composite ( property )
+{
+ return $(property)
+ [ sequence.transform expand-composite : $($(property).components) ] ;
+}
+
+
+# Return all values of the given feature specified by the given property set.
+#
+rule get-values ( feature : properties * )
+{
+ local result ;
+
+ feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ # Use MATCH instead of :G= to get the value, in order to preserve
+ # the value intact instead of having bjam treat it as a decomposable
+ # path.
+ result += [ MATCH ">(.*)" : $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule free-features ( )
+{
+ return $(free.features) ;
+}
+
+
+# Expand all composite properties in the set so that all components are
+# explicitly expressed.
+#
+rule expand-composites ( properties * )
+{
+ local explicit-features = $(properties:G) ;
+ local result ;
+
+ # Now expand composite features.
+ for local p in $(properties)
+ {
+ local expanded = [ expand-composite $(p) ] ;
+
+ for local x in $(expanded)
+ {
+ if ! $(x) in $(result)
+ {
+ local f = $(x:G) ;
+
+ if $(f) in $(free.features)
+ {
+ result += $(x) ;
+ }
+ else if ! $(x) in $(properties) # x is the result of expansion
+ {
+ if ! $(f) in $(explicit-features) # not explicitly-specified
+ {
+ if $(f) in $(result:G)
+ {
+ errors.error expansions of composite features result
+ in conflicting values for $(f)
+ : values: [ get-values $(f) : $(result) ] $(x:G=)
+ : one contributing composite property was $(p) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ else if $(f) in $(result:G)
+ {
+ errors.error explicitly-specified values of non-free feature
+ $(f) conflict :
+ "existing values:" [ get-values $(f) : $(properties) ] :
+ "value from expanding " $(p) ":" $(x:G=) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return true iff f is an ordinary subfeature of the parent-property's feature,
+# or if f is a subfeature of the parent-property's feature specific to the
+# parent-property's value.
+#
+local rule is-subfeature-of ( parent-property f )
+{
+ if subfeature in $($(f).attributes)
+ {
+ local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
+ if $(specific-subfeature)
+ {
+ # The feature has the form <topfeature-topvalue:subfeature>, e.g.
+ # <toolset-msvc:version>.
+ local feature-value = [ split-top-feature $(specific-subfeature[1])
+ ] ;
+ if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ # The feature has the form <topfeature-subfeature>, e.g.
+ # <toolset-version>
+ local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
+ if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
+ {
+ return true ;
+ }
+ }
+ }
+}
+
+
+# As for is-subfeature-of but for subproperties.
+#
+local rule is-subproperty-of ( parent-property p )
+{
+ return [ is-subfeature-of $(parent-property) $(p:G) ] ;
+}
+
+
+# Given a property, return the subset of features consisting of all ordinary
+# subfeatures of the property's feature, and all specific subfeatures of the
+# property's feature which are conditional on the property's value.
+#
+local rule select-subfeatures ( parent-property : features * )
+{
+ return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
+}
+
+
+# As for select-subfeatures but for subproperties.
+#
+local rule select-subproperties ( parent-property : properties * )
+{
+ return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
+}
+
+
+# Given a property set which may consist of composite and implicit properties
+# and combined subfeature values, returns an expanded, normalized property set
+# with all implicit features expressed explicitly, all subfeature values
+# individually expressed, and all components of composite properties expanded.
+# Non-free features directly expressed in the input properties cause any values
+# of those features due to composite feature expansion to be dropped. If two
+# values of a given non-free feature are directly expressed in the input, an
+# error is issued.
+#
+rule expand ( properties * )
+{
+ local expanded = [ expand-subfeatures $(properties) ] ;
+ return [ expand-composites $(expanded) ] ;
+}
+
+
+# Helper rule for minimize. Returns true iff property's feature is present in
+# the contents of the variable named by feature-set-var.
+#
+local rule in-features ( feature-set-var property )
+{
+ if $(property:G) in $($(feature-set-var))
+ {
+ return true ;
+ }
+}
+
+
+# Helper rule for minimize. Returns the list with the same properties, but with
+# all subfeatures moved to the end of the list.
+#
+local rule move-subfeatures-to-the-end ( properties * )
+{
+ local x1 ;
+ local x2 ;
+ for local p in $(properties)
+ {
+ if subfeature in $($(p:G).attributes)
+ {
+ x2 += $(p) ;
+ }
+ else
+ {
+ x1 += $(p) ;
+ }
+ }
+ return $(x1) $(x2) ;
+}
+
+
+# Given an expanded property set, eliminate all redundancy: properties that are
+# elements of other (composite) properties in the set will be eliminated.
+# Non-symmetric properties equal to default values will be eliminated unless
+# they override a value from some composite property. Implicit properties will
+# be expressed without feature grist, and sub-property values will be expressed
+# as elements joined to the corresponding main property.
+#
+rule minimize ( properties * )
+{
+ # Precondition checking
+ local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
+ if $(implicits)
+ {
+ errors.error minimize requires an expanded property set, but
+ \"$(implicits[1])\" appears to be the value of an un-expanded
+ implicit feature ;
+ }
+
+ # Remove properties implied by composite features.
+ local components = $($(properties).components) ;
+ local x = [ set.difference $(properties) : $(components) ] ;
+
+ # Handle subfeatures and implicit features.
+ x = [ move-subfeatures-to-the-end $(x) ] ;
+ local result ;
+ while $(x)
+ {
+ local p fullp = $(x[1]) ;
+ local f = $(p:G) ;
+ local v = $(p:G=) ;
+
+ # Eliminate features in implicit properties.
+ if implicit in [ attributes $(f) ]
+ {
+ p = $(v) ;
+ }
+
+ # Locate all subproperties of $(x[1]) in the property set.
+ local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
+ if $(subproperties)
+ {
+ # Reconstitute the joined property name.
+ local sorted = [ sequence.insertion-sort $(subproperties) ] ;
+ result += $(p)-$(sorted:G="":J=-) ;
+
+ x = [ set.difference $(x[2-]) : $(subproperties) ] ;
+ }
+ else
+ {
+ # Eliminate properties whose value is equal to feature's default,
+ # which are not symmetric and which do not contradict values implied
+ # by composite properties.
+
+ # Since all component properties of composites in the set have been
+ # eliminated, any remaining property whose feature is the same as a
+ # component of a composite in the set must have a non-redundant
+ # value.
+ if $(fullp) != [ defaults $(f) ]
+ || symmetric in [ attributes $(f) ]
+ || $(fullp:G) in $(components:G)
+ {
+ result += $(p) ;
+ }
+
+ x = $(x[2-]) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Combine all subproperties into their parent properties
+#
+# Requires: for every subproperty, there is a parent property. All features are
+# explicitly expressed.
+#
+# This rule probably should not be needed, but build-request.expand-no-defaults
+# is being abused for unintended purposes and it needs help.
+#
+rule compress-subproperties ( properties * )
+{
+ local all-subs ;
+ local matched-subs ;
+ local result ;
+
+ for local p in $(properties)
+ {
+ if ! $(p:G)
+ {
+ # Expecting fully-gristed properties.
+ assert.variable-not-empty p:G ;
+ }
+
+ if ! subfeature in $($(p:G).attributes)
+ {
+ local subs = [ sequence.insertion-sort
+ [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
+
+ matched-subs += $(subs) ;
+
+ local subvalues = -$(subs:G=:J=-) ;
+ subvalues ?= "" ;
+ result += $(p)$(subvalues) ;
+ }
+ else
+ {
+ all-subs += $(p) ;
+ }
+ }
+ assert.result true : set.equal $(all-subs) : $(matched-subs) ;
+ return $(result) ;
+}
+
+
+# Given an ungristed string, finds the longest prefix which is a top-level
+# feature name followed by a dash, and return a pair consisting of the parts
+# before and after that dash. More interesting than a simple split because
+# feature names may contain dashes.
+#
+local rule split-top-feature ( feature-plus )
+{
+ local e = [ regex.split $(feature-plus) - ] ;
+ local f = $(e[1]) ;
+ local v ;
+ while $(e)
+ {
+ if <$(f)> in $(.all-top-features)
+ {
+ v = $(f) $(e[2-]:J=-) ;
+ }
+ e = $(e[2-]) ;
+ f = $(f)-$(e[1]) ;
+ }
+ return $(v) ;
+}
+
+
+# Given a set of properties, add default values for features not represented in
+# the set.
+#
+# Note: if there's an ordinary feature F1 and a composite feature F2 which
+# includes some value for F1 and both feature have default values then the
+# default value of F1 will be added (as opposed to the value in F2). This might
+# not be the right idea, e.g. consider:
+#
+# feature variant : debug ... ;
+# <variant>debug : .... <runtime-debugging>on
+# feature <runtime-debugging> : off on ;
+#
+# Here, when adding default for an empty property set, we'll get
+#
+# <variant>debug <runtime_debugging>off
+#
+# and that's kind of strange.
+#
+rule add-defaults ( properties * )
+{
+ for local v in $(properties:G=)
+ {
+ if $(v) in $(properties)
+ {
+ errors.error add-defaults requires explicitly specified features,
+ but \"$(v)\" appears to be the value of an un-expanded implicit
+ feature ;
+ }
+ }
+ # We don't add default for elements with ":" inside. This catches:
+ # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
+ # to be takes as specified value for <variant>
+ # 2. Free properties with ":" in values. We don't care, since free
+ # properties don't have defaults.
+ local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
+ local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
+ local more = [ defaults $(missing-top) ] ;
+ properties += $(more) ;
+ xproperties += $(more) ;
+
+ # Add defaults for subfeatures of features which are present.
+ for local p in $(xproperties)
+ {
+ local s = $($(p:G).subfeatures) ;
+ local f = [ utility.ungrist $(p:G) ] ;
+ local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
+ properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
+ }
+
+ return $(properties) ;
+}
+
+
+# Given a property-set of the form
+# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
+#
+# Returns
+# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
+#
+# Note that vN...vM may contain slashes. This needs to be resilient to the
+# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
+# slash direction on NT.
+#
+rule split ( property-set )
+{
+ local pieces = [ regex.split $(property-set) [\\/] ] ;
+ local result ;
+
+ for local x in $(pieces)
+ {
+ if ( ! $(x:G) ) && $(result[-1]:G)
+ {
+ result = $(result[1--2]) $(result[-1])/$(x) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Tests of module feature.
+#
+rule __test__ ( )
+{
+ # Use a fresh copy of the feature module.
+ prepare-test feature-test-temp ;
+
+ import assert ;
+ import errors : try catch ;
+
+ # These are local rules and so must be explicitly reimported into the
+ # testing module.
+ import feature : extend-feature validate-feature select-subfeatures ;
+
+ feature toolset : gcc : implicit ;
+ feature define : : free ;
+ feature runtime-link : dynamic static : symmetric ;
+ feature optimization : on off ;
+ feature variant : debug release profile : implicit composite symmetric ;
+ feature stdlib : native stlport ;
+ feature magic : : free ;
+
+ compose <variant>debug : <define>_DEBUG <optimization>off ;
+ compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ assert.result dynamic static : values <runtime-link> ;
+ assert.result dynamic static : values runtime-link ;
+
+ try ;
+ {
+ compose <variant>profile : <variant>profile ;
+ }
+ catch composite property <variant>profile cannot have itself as a component ;
+
+ extend-feature toolset : msvc metrowerks ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
+
+ assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : gcc : version : 1.1 ;
+
+ assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : : version : yabba ;
+
+ feature yabba ;
+ subfeature yabba : version : dabba ;
+ assert.true is-subvalue yabba : : version : dabba ;
+
+ subfeature toolset gcc : platform : linux cygwin : optional ;
+
+ assert.result <toolset-gcc:version>
+ : select-subfeatures <toolset>gcc
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib> ;
+
+ subfeature stdlib : version : 3 4 : optional ;
+
+ assert.result <stdlib-version>
+ : select-subfeatures <stdlib>native
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib-version> ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures <toolset>gcc-3.0.1 ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
+ : expand-subfeatures <toolset>gcc-3.0.1-linux ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
+
+ assert.result <define>foo=x-y
+ : expand-subfeatures <define>foo=x-y ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures gcc-3.0.1 ;
+
+ assert.result a c e
+ : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ <variant>debug <define>_DEBUG <optimization>on
+ : expand gcc-3.0.1 debug <optimization>on ;
+
+ assert.result <variant>debug <define>_DEBUG <optimization>on
+ : expand debug <optimization>on ;
+
+ assert.result <optimization>on <variant>debug <define>_DEBUG
+ : expand <optimization>on debug ;
+
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults <runtime-link> <define> <optimization> ;
+
+ # Make sure defaults is resilient to missing grist.
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults runtime-link define optimization ;
+
+ feature dummy : dummy1 dummy2 ;
+ subfeature dummy : subdummy : x y z : optional ;
+
+ feature fu : fu1 fu2 : optional ;
+ subfeature fu : subfu : x y z : optional ;
+ subfeature fu : subfu2 : q r s ;
+
+ assert.result optional : attributes <fu> ;
+ assert.result optional : attributes fu ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
+ <dummy>dummy1 <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
+ <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 ;
+
+ set-default <runtime-link> : static ;
+ assert.result <runtime-link>static : defaults <runtime-link> ;
+
+ assert.result gcc-3.0.1 debug <optimization>on
+ : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
+
+ assert.result gcc-3.0.1 debug <runtime-link>dynamic
+ : minimize
+ [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
+
+ assert.result gcc-3.0.1 debug
+ : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
+
+ assert.result debug <optimization>on
+ : minimize [ expand debug <optimization>on ] ;
+
+ assert.result gcc-3.0
+ : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
+
+ assert.result gcc-3.0
+ : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y/z/<a>b/c/<d>e/f ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a/b/c/<d>e/f/g/<h>i/j/k ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
+
+ # Test error checking.
+
+ try ;
+ {
+ expand release <optimization>off <optimization>on ;
+ }
+ catch explicitly-specified values of non-free feature <optimization> conflict ;
+
+ try ;
+ {
+ validate-feature <foobar> ;
+ }
+ catch unknown feature ;
+
+ validate-value-string <toolset> gcc ;
+ validate-value-string <toolset> gcc-3.0.1 ;
+
+ try ;
+ {
+ validate-value-string <toolset> digital_mars ;
+ }
+ catch \"digital_mars\" is not a known value of <toolset> ;
+
+ try ;
+ {
+ feature foobar : : baz ;
+ }
+ catch unknown attributes: baz ;
+
+ feature feature1 ;
+ try ;
+ {
+ feature feature1 ;
+ }
+ catch feature already defined: ;
+
+ try ;
+ {
+ feature feature2 : : free implicit ;
+ }
+ catch free features cannot also be implicit ;
+
+ try ;
+ {
+ feature feature3 : : free propagated ;
+ }
+ catch free features cannot be propagated ;
+
+ try ;
+ {
+ implied-feature lackluster ;
+ }
+ catch \"lackluster\" is not a value of an implicit feature ;
+
+ try ;
+ {
+ implied-subfeature <toolset> 3.0.1 ;
+ }
+ catch \"3.0.1\" is not a known subfeature value of <toolset> ;
+
+ try ;
+ {
+ implied-subfeature <toolset> not-a-version : gcc ;
+ }
+ catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
+
+ # Leave a clean copy of the features module behind.
+ finish-test feature-test-temp ;
+}
diff --git a/jam-files/boost-build/build/generators.jam b/jam-files/boost-build/build/generators.jam
new file mode 100644
index 000000000..1515525f2
--- /dev/null
+++ b/jam-files/boost-build/build/generators.jam
@@ -0,0 +1,1408 @@
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Manages 'generators' --- objects which can do transformation between different
+# target types and contain algorithm for finding transformation from sources to
+# targets.
+#
+# The main entry point to this module is generators.construct rule. It is given
+# a list of source targets, desired target type and a set of properties. It
+# starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked
+# and a set of the most specific ones is selected.
+#
+# The most specific generators have their 'run' methods called, with the
+# properties and list of sources. Each one selects a target which can be
+# directly consumed, and tries to convert the remaining ones to the types it can
+# consume. This is done by recursively calling 'construct' with all consumable
+# types.
+#
+# If the generator has collected all the targets it needs, it creates targets
+# corresponding to result, and returns it. When all generators have been run,
+# results of one of them are selected and returned as a result.
+#
+# It is quite possible for 'construct' to return more targets that it was asked
+# for. For example, if it were asked to generate a target of type EXE, but the
+# only found generator produces both EXE and TDS (file with debug) information.
+# The extra target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can
+# get more targets that it was asked for. The question is what to do with extra
+# targets. Boost.Build attempts to convert them to requested types, and attempts
+# that as early as possible. Specifically, this is done after invoking each
+# generator. TODO: An example is needed to document the rationale for trying
+# extra target conversion at that point.
+#
+# In order for the system to be able to use a specific generator instance 'when
+# needed', the instance needs to be registered with the system using
+# generators.register() or one of its related rules. Unregistered generators may
+# only be run explicitly and will not be considered by Boost.Build when when
+# converting between given target types.
+
+import "class" : new ;
+import errors ;
+import property-set ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+import virtual-target ;
+
+
+if "--debug-generators" in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+
+# Updated cached viable source target type information as needed after a new
+# target type gets defined. This is needed because if a target type is a viable
+# source target type for some generator then all of the target type's derived
+# target types should automatically be considered as viable source target types
+# for the same generator as well. Does nothing if a non-derived target type is
+# passed to it.
+#
+rule update-cached-information-with-a-new-type ( type )
+{
+ local base-type = [ type.base $(type) ] ;
+ if $(base-type)
+ {
+ for local g in $(.vstg-cached-generators)
+ {
+ if $(base-type) in $(.vstg.$(g))
+ {
+ .vstg.$(g) += $(type) ;
+ }
+ }
+
+ for local t in $(.vst-cached-types)
+ {
+ if $(base-type) in $(.vst.$(t))
+ {
+ .vst.$(t) += $(type) ;
+ }
+ }
+ }
+}
+
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+local rule invalidate-extendable-viable-source-target-type-cache ( )
+{
+ local generators-with-cached-source-types = $(.vstg-cached-generators) ;
+ .vstg-cached-generators = ;
+ for local g in $(generators-with-cached-source-types)
+ {
+ if $(.vstg.$(g)) = *
+ {
+ .vstg-cached-generators += $(g) ;
+ }
+ else
+ {
+ .vstg.$(g) = ;
+ }
+ }
+
+ local types-with-cached-source-types = $(.vst-cached-types) ;
+ .vst-cached-types = ;
+ for local t in $(types-with-cached-source-types)
+ {
+ if $(.vst.$(t)) = *
+ {
+ .vst-cached-types += $(t) ;
+ }
+ else
+ {
+ .vst.$(t) = ;
+ }
+ }
+}
+
+
+# Outputs a debug message if generators debugging is on. Each element of
+# 'message' is checked to see if it is a class instance. If so, instead of the
+# value, the result of 'str' call is output.
+#
+local rule generators.dout ( message * )
+{
+ if $(.debug)
+ {
+ ECHO [ sequence.transform utility.str : $(message) ] ;
+ }
+}
+
+
+local rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+local rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+local rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Models a generator.
+#
+class generator
+{
+ import generators : indent increase-indent decrease-indent generators.dout ;
+ import set ;
+ import utility ;
+ import feature ;
+ import errors ;
+ import sequence ;
+ import type ;
+ import virtual-target ;
+ import "class" : new ;
+ import property ;
+ import path ;
+
+ EXPORT class@generator : indent increase-indent decrease-indent
+ generators.dout ;
+
+ rule __init__ (
+ id # Identifies the generator - should be name
+ # of the rule which sets up the build
+ # actions.
+
+ composing ? # Whether generator processes each source
+ # target in turn, converting it to required
+ # types. Ordinary generators pass all
+ # sources together to the recursive
+ # generators.construct-types call.
+
+ : source-types * # Types that this generator can handle. If
+ # empty, the generator can consume anything.
+
+ : target-types-and-names + # Types the generator will create and,
+ # optionally, names for created targets.
+ # Each element should have the form
+ # type["(" name-pattern ")"], for example,
+ # obj(%_x). Generated target name will be
+ # found by replacing % with the name of
+ # source, provided an explicit name was not
+ # specified.
+
+ : requirements *
+ )
+ {
+ self.id = $(id) ;
+ self.rule-name = $(id) ;
+ self.composing = $(composing) ;
+ self.source-types = $(source-types) ;
+ self.target-types-and-names = $(target-types-and-names) ;
+ self.requirements = $(requirements) ;
+
+ for local e in $(target-types-and-names)
+ {
+ # Create three parallel lists: one with the list of target types,
+ # and two other with prefixes and postfixes to be added to target
+ # name. We use parallel lists for prefix and postfix (as opposed to
+ # mapping), because given target type might occur several times, for
+ # example "H H(%_symbols)".
+ local m = [ MATCH ([^\\(]*)(\\((.*)%(.*)\\))? : $(e) ] ;
+ self.target-types += $(m[1]) ;
+ self.name-prefix += $(m[3]:E="") ;
+ self.name-postfix += $(m[4]:E="") ;
+ }
+
+ # Note that 'transform' here, is the same as 'for_each'.
+ sequence.transform type.validate : $(self.source-types) ;
+ sequence.transform type.validate : $(self.target-types) ;
+ }
+
+ ################# End of constructor #################
+
+ rule id ( )
+ {
+ return $(self.id) ;
+ }
+
+ # Returns the list of target type the generator accepts.
+ #
+ rule source-types ( )
+ {
+ return $(self.source-types) ;
+ }
+
+ # Returns the list of target types that this generator produces. It is
+ # assumed to be always the same -- i.e. it can not change depending on some
+ # provided list of sources.
+ #
+ rule target-types ( )
+ {
+ return $(self.target-types) ;
+ }
+
+ # Returns the required properties for this generator. Properties in returned
+ # set must be present in build properties if this generator is to be used.
+ # If result has grist-only element, that build properties must include some
+ # value of that feature.
+ #
+ # XXX: remove this method?
+ #
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule set-rule-name ( rule-name )
+ {
+ self.rule-name = $(rule-name) ;
+ }
+
+ rule rule-name ( )
+ {
+ return $(self.rule-name) ;
+ }
+
+ # Returns a true value if the generator can be run with the specified
+ # properties.
+ #
+ rule match-rank ( property-set-to-match )
+ {
+ # See if generator requirements are satisfied by 'properties'. Treat a
+ # feature name in requirements (i.e. grist-only element), as matching
+ # any value of the feature.
+ local all-requirements = [ requirements ] ;
+
+ local property-requirements feature-requirements ;
+ for local r in $(all-requirements)
+ {
+ if $(r:G=)
+ {
+ property-requirements += $(r) ;
+ }
+ else
+ {
+ feature-requirements += $(r) ;
+ }
+ }
+
+ local properties-to-match = [ $(property-set-to-match).raw ] ;
+ if $(property-requirements) in $(properties-to-match) &&
+ $(feature-requirements) in $(properties-to-match:G)
+ {
+ return true ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ # Returns another generator which differs from $(self) in
+ # - id
+ # - value to <toolset> feature in properties
+ #
+ rule clone ( new-id : new-toolset-properties + )
+ {
+ local g = [ new $(__class__) $(new-id) $(self.composing) :
+ $(self.source-types) : $(self.target-types-and-names) :
+ # Note: this does not remove any subfeatures of <toolset> which
+ # might cause problems.
+ [ property.change $(self.requirements) : <toolset> ]
+ $(new-toolset-properties) ] ;
+ return $(g) ;
+ }
+
+ # Creates another generator that is the same as $(self), except that if
+ # 'base' is in target types of $(self), 'type' will in target types of the
+ # new generator.
+ #
+ rule clone-and-change-target-type ( base : type )
+ {
+ local target-types ;
+ for local t in $(self.target-types-and-names)
+ {
+ local m = [ MATCH ([^\\(]*)(\\(.*\\))? : $(t) ] ;
+ if $(m) = $(base)
+ {
+ target-types += $(type)$(m[2]:E="") ;
+ }
+ else
+ {
+ target-types += $(t) ;
+ }
+ }
+
+ local g = [ new $(__class__) $(self.id) $(self.composing) :
+ $(self.source-types) : $(target-types) : $(self.requirements) ] ;
+ if $(self.rule-name)
+ {
+ $(g).set-rule-name $(self.rule-name) ;
+ }
+ return $(g) ;
+ }
+
+ # Tries to invoke this generator on the given sources. Returns a list of
+ # generated targets (instances of 'virtual-target') and optionally a set of
+ # properties to be added to the usage-requirements for all the generated
+ # targets. Returning nothing from run indicates that the generator was
+ # unable to create the target.
+ #
+ rule run
+ (
+ project # Project for which the targets are generated.
+ name ? # Used when determining the 'name' attribute for all
+ # generated targets. See the 'generated-targets' method.
+ : property-set # Desired properties for generated targets.
+ : sources + # Source targets.
+ )
+ {
+ generators.dout [ indent ] " ** generator" $(self.id) ;
+ generators.dout [ indent ] " composing:" $(self.composing) ;
+
+ if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
+ {
+ errors.error "Unsupported source/source-type combination" ;
+ }
+
+ # We do not run composing generators if no name is specified. The reason
+ # is that composing generator combines several targets, which can have
+ # different names, and it cannot decide which name to give for produced
+ # target. Therefore, the name must be passed.
+ #
+ # This in effect, means that composing generators are runnable only at
+ # the top-level of a transformation graph, or if their name is passed
+ # explicitly. Thus, we dissallow composing generators in the middle. For
+ # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
+ # will not be allowed as the OBJ -> STATIC_LIB generator is composing.
+ if ! $(self.composing) || $(name)
+ {
+ run-really $(project) $(name) : $(property-set) : $(sources) ;
+ }
+ }
+
+ rule run-really ( project name ? : property-set : sources + )
+ {
+ # Targets that this generator will consume directly.
+ local consumed = ;
+ # Targets that can not be consumed and will be returned as-is.
+ local bypassed = ;
+
+ if $(self.composing)
+ {
+ convert-multiple-sources-to-consumable-types $(project)
+ : $(property-set) : $(sources) : consumed bypassed ;
+ }
+ else
+ {
+ convert-to-consumable-types $(project) $(name) : $(property-set)
+ : $(sources) : : consumed bypassed ;
+ }
+
+ local result ;
+ if $(consumed)
+ {
+ result = [ construct-result $(consumed) : $(project) $(name) :
+ $(property-set) ] ;
+ }
+
+ if $(result)
+ {
+ generators.dout [ indent ] " SUCCESS: " $(result) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " FAILURE" ;
+ }
+ generators.dout ;
+ return $(result) ;
+ }
+
+ # Constructs the dependency graph to be returned by this generator.
+ #
+ rule construct-result
+ (
+ consumed + # Already prepared list of consumable targets.
+ # Composing generators may receive multiple sources
+ # all of which will have types matching those in
+ # $(self.source-types). Non-composing generators with
+ # multiple $(self.source-types) will receive exactly
+ # len $(self.source-types) sources with types matching
+ # those in $(self.source-types). And non-composing
+ # generators with only a single source type may
+ # receive multiple sources with all of them of the
+ # type listed in $(self.source-types).
+ : project name ?
+ : property-set # Properties to be used for all actions created here.
+ )
+ {
+ local result ;
+ # If this is 1->1 transformation, apply it to all consumed targets in
+ # order.
+ if ! $(self.source-types[2]) && ! $(self.composing)
+ {
+ for local r in $(consumed)
+ {
+ result += [ generated-targets $(r) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ }
+ else if $(consumed)
+ {
+ result += [ generated-targets $(consumed) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+ #
+ rule determine-target-name ( fullname : prefix ? : postfix ? )
+ {
+ # See if we need to add directory to the target name.
+ local dir = $(fullname:D) ;
+ local name = $(fullname:B) ;
+
+ name = $(prefix:E=)$(name) ;
+ name = $(name)$(postfix:E=) ;
+
+ if $(dir) &&
+ # Never append '..' to target path.
+ ! [ MATCH .*(\\.\\.).* : $(dir) ]
+ &&
+ ! [ path.is-rooted $(dir) ]
+ {
+ # Relative path is always relative to the source
+ # directory. Retain it, so that users can have files
+ # with the same in two different subdirectories.
+ name = $(dir)/$(name) ;
+ }
+ return $(name) ;
+ }
+
+ # Determine the name of the produced target from the names of the sources.
+ #
+ rule determine-output-name ( sources + )
+ {
+ # The simple case if when a name of source has single dot. Then, we take
+ # the part before dot. Several dots can be caused by:
+ # - using source file like a.host.cpp, or
+ # - a type whose suffix has a dot. Say, we can type 'host_cpp' with
+ # extension 'host.cpp'.
+ # In the first case, we want to take the part up to the last dot. In the
+ # second case -- not sure, but for now take the part up to the last dot
+ # too.
+ name = [ utility.basename [ $(sources[1]).name ] ] ;
+
+ for local s in $(sources[2])
+ {
+ local n2 = [ utility.basename [ $(s).name ] ] ;
+ if $(n2) != $(name)
+ {
+ errors.error "$(self.id): source targets have different names: cannot determine target name" ;
+ }
+ }
+ name = [ determine-target-name [ $(sources[1]).name ] ] ;
+ return $(name) ;
+ }
+
+ # Constructs targets that are created after consuming 'sources'. The result
+ # will be the list of virtual-target, which has the same length as the
+ # 'target-types' attribute and with corresponding types.
+ #
+ # When 'name' is empty, all source targets must have the same 'name'
+ # attribute value, which will be used instead of the 'name' argument.
+ #
+ # The 'name' attribute value for each generated target will be equal to
+ # the 'name' parameter if there is no name pattern for this type. Otherwise,
+ # the '%' symbol in the name pattern will be replaced with the 'name'
+ # parameter to obtain the 'name' attribute.
+ #
+ # For example, if targets types are T1 and T2 (with name pattern "%_x"),
+ # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created
+ # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
+ # determines the basename of a file.
+ #
+ # Note that this pattern mechanism has nothing to do with implicit patterns
+ # in make. It is a way to produce a target whose name is different than the
+ # name of its source.
+ #
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if ! $(name)
+ {
+ name = [ determine-output-name $(sources) ] ;
+ }
+
+ # Assign an action for each target.
+ local action = [ action-class ] ;
+ local a = [ class.new $(action) $(sources) : $(self.rule-name) :
+ $(property-set) ] ;
+
+ # Create generated target for each target type.
+ local targets ;
+ local pre = $(self.name-prefix) ;
+ local post = $(self.name-postfix) ;
+ for local t in $(self.target-types)
+ {
+ local generated-name = $(pre[1])$(name:BS)$(post[1]) ;
+ generated-name = $(generated-name:R=$(name:D)) ;
+ pre = $(pre[2-]) ;
+ post = $(post[2-]) ;
+
+ targets += [ class.new file-target $(generated-name) : $(t) :
+ $(project) : $(a) ] ;
+ }
+
+ return [ sequence.transform virtual-target.register : $(targets) ] ;
+ }
+
+ # Attempts to convert 'sources' to targets of types that this generator can
+ # handle. The intention is to produce the set of targets that can be used
+ # when the generator is run.
+ #
+ rule convert-to-consumable-types
+ (
+ project name ?
+ : property-set
+ : sources +
+ : only-one ? # Convert 'source' to only one of the source types. If
+ # there is more that one possibility, report an error.
+ : consumed-var # Name of the variable which receives all targets which
+ # can be consumed.
+ bypassed-var # Name of the variable which receives all targets which
+ # can not be consumed.
+ )
+ {
+ # We are likely to be passed 'consumed' and 'bypassed' var names. Use
+ # '_' to avoid name conflicts.
+ local _consumed ;
+ local _bypassed ;
+ local missing-types ;
+
+ if $(sources[2])
+ {
+ # Do not know how to handle several sources yet. Just try to pass
+ # the request to other generator.
+ missing-types = $(self.source-types) ;
+ }
+ else
+ {
+ consume-directly $(sources) : _consumed : missing-types ;
+ }
+
+ # No need to search for transformation if some source type has consumed
+ # source and no more source types are needed.
+ if $(only-one) && $(_consumed)
+ {
+ missing-types = ;
+ }
+
+ # TODO: we should check that only one source type if create of
+ # 'only-one' is true.
+ # TODO: consider if consumed/bypassed separation should be done by
+ # 'construct-types'.
+
+ if $(missing-types)
+ {
+ local transformed = [ generators.construct-types $(project) $(name)
+ : $(missing-types) : $(property-set) : $(sources) ] ;
+
+ # Add targets of right type to 'consumed'. Add others to 'bypassed'.
+ # The 'generators.construct' rule has done its best to convert
+ # everything to the required type. There is no need to rerun it on
+ # targets of different types.
+
+ # NOTE: ignoring usage requirements.
+ for local t in $(transformed[2-])
+ {
+ if [ $(t).type ] in $(missing-types)
+ {
+ _consumed += $(t) ;
+ }
+ else
+ {
+ _bypassed += $(t) ;
+ }
+ }
+ }
+
+ _consumed = [ sequence.unique $(_consumed) ] ;
+ _bypassed = [ sequence.unique $(_bypassed) ] ;
+
+ # Remove elements of '_bypassed' that are in '_consumed'.
+
+ # Suppose the target type of current generator, X is produced from X_1
+ # and X_2, which are produced from Y by one generator. When creating X_1
+ # from Y, X_2 will be added to 'bypassed'. Likewise, when creating X_2
+ # from Y, X_1 will be added to 'bypassed', but they are also in
+ # 'consumed'. We have to remove them from bypassed, so that generators
+ # up the call stack do not try to convert them.
+
+ # In this particular case, X_1 instance in 'consumed' and X_1 instance
+ # in 'bypassed' will be the same: because they have the same source and
+ # action name, and 'virtual-target.register' will not allow two
+ # different instances. Therefore, it is OK to use 'set.difference'.
+
+ _bypassed = [ set.difference $(_bypassed) : $(_consumed) ] ;
+
+ $(consumed-var) += $(_consumed) ;
+ $(bypassed-var) += $(_bypassed) ;
+ }
+
+ # Converts several files to consumable types. Called for composing
+ # generators only.
+ #
+ rule convert-multiple-sources-to-consumable-types ( project : property-set :
+ sources * : consumed-var bypassed-var )
+ {
+ # We process each source one-by-one, trying to convert it to a usable
+ # type.
+ for local source in $(sources)
+ {
+ local _c ;
+ local _b ;
+ # TODO: need to check for failure on each source.
+ convert-to-consumable-types $(project) : $(property-set) : $(source)
+ : true : _c _b ;
+ if ! $(_c)
+ {
+ generators.dout [ indent ] " failed to convert " $(source) ;
+ }
+ $(consumed-var) += $(_c) ;
+ $(bypassed-var) += $(_b) ;
+ }
+ }
+
+ rule consume-directly ( source : consumed-var : missing-types-var )
+ {
+ local real-source-type = [ $(source).type ] ;
+
+ # If there are no source types, we can consume anything.
+ local source-types = $(self.source-types) ;
+ source-types ?= $(real-source-type) ;
+
+ for local st in $(source-types)
+ {
+ # The 'source' if of the right type already.
+ if $(real-source-type) = $(st) || [ type.is-derived
+ $(real-source-type) $(st) ]
+ {
+ $(consumed-var) += $(source) ;
+ }
+ else
+ {
+ $(missing-types-var) += $(st) ;
+ }
+ }
+ }
+
+ # Returns the class to be used to actions. Default implementation returns
+ # "action".
+ #
+ rule action-class ( )
+ {
+ return "action" ;
+ }
+}
+
+
+# Registers a new generator instance 'g'.
+#
+rule register ( g )
+{
+ .all-generators += $(g) ;
+
+ # A generator can produce several targets of the same type. We want unique
+ # occurrence of that generator in .generators.$(t) in that case, otherwise,
+ # it will be tried twice and we will get a false ambiguity.
+ for local t in [ sequence.unique [ $(g).target-types ] ]
+ {
+ .generators.$(t) += $(g) ;
+ }
+
+ # Update the set of generators for toolset.
+
+ # TODO: should we check that generator with this id is not already
+ # registered. For example, the fop.jam module intentionally declared two
+ # generators with the same id, so such check will break it.
+ local id = [ $(g).id ] ;
+
+ # Some generators have multiple periods in their name, so a simple $(id:S=)
+ # will not generate the right toolset name. E.g. if id = gcc.compile.c++,
+ # then .generators-for-toolset.$(id:S=) will append to
+ # .generators-for-toolset.gcc.compile, which is a separate value from
+ # .generators-for-toolset.gcc. Correcting this makes generator inheritance
+ # work properly. See also inherit-generators in the toolset module.
+ local base = $(id) ;
+ while $(base:S)
+ {
+ base = $(base:B) ;
+ }
+ .generators-for-toolset.$(base) += $(g) ;
+
+
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077
+ # mailing list thread for an even more advanced idea of how we could convert
+ # Boost Build's Jamfile processing, target selection and generator selection
+ # into separate steps which would prevent these caches from ever being
+ # invalidated.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate-extendable-viable-source-target-type-cache ;
+}
+
+
+# Creates a new non-composing 'generator' class instance and registers it.
+# Returns the created instance. Rationale: the instance is returned so that it
+# is possible to first register a generator and then call its 'run' method,
+# bypassing the whole generator selection process.
+#
+rule register-standard ( id : source-types * : target-types + : requirements * )
+{
+ local g = [ new generator $(id) : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Creates a new composing 'generator' class instance and registers it.
+#
+rule register-composing ( id : source-types * : target-types + : requirements *
+ )
+{
+ local g = [ new generator $(id) true : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Returns all generators belonging to the given 'toolset', i.e. whose ids are
+# '$(toolset).<something>'.
+#
+rule generators-for-toolset ( toolset )
+{
+ return $(.generators-for-toolset.$(toolset)) ;
+}
+
+
+# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
+# searching for generators that could produce a target of a certain type, both
+# those generators are among viable generators, the overridden generator is
+# immediately discarded.
+#
+# The overridden generators are discarded immediately after computing the list
+# of viable generators but before running any of them.
+#
+rule override ( overrider-id : overridee-id )
+{
+ .override.$(overrider-id) += $(overridee-id) ;
+}
+
+
+# Returns a list of source type which can possibly be converted to 'target-type'
+# by some chain of generator invocation.
+#
+# More formally, takes all generators for 'target-type' and returns a union of
+# source types for those generators and result of calling itself recursively on
+# source types.
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given type.
+#
+local rule viable-source-types-real ( target-type )
+{
+ local result ;
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ local t0 = [ type.all-bases $(target-type) ] ;
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ local t = $(t0) ;
+
+ while $(t)
+ {
+ # Find all generators for the current type. Unlike
+ # 'find-viable-generators' we do not care about the property-set.
+ local generators = $(.generators.$(t[1])) ;
+ t = $(t[2-]) ;
+
+ while $(generators)
+ {
+ local g = $(generators[1]) ;
+ generators = $(generators[2-]) ;
+
+ if ! [ $(g).source-types ]
+ {
+ # Empty source types -- everything can be accepted.
+ result = * ;
+ # This will terminate this loop.
+ generators = ;
+ # This will terminate the outer loop.
+ t = ;
+ }
+
+ for local source-type in [ $(g).source-types ]
+ {
+ if ! $(source-type) in $(result)
+ {
+ # If a generator accepts a 'source-type' it will also
+ # happily accept any type derived from it.
+ for local n in [ type.all-derived $(source-type) ]
+ {
+ if ! $(n) in $(result)
+ {
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if ! $(n) in $(t0)
+ {
+ t += $(n) ;
+ }
+ result += $(n) ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-real'.
+#
+rule viable-source-types ( target-type )
+{
+ local key = .vst.$(target-type) ;
+ if ! $($(key))
+ {
+ .vst-cached-types += $(target-type) ;
+ local v = [ viable-source-types-real $(target-type) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns the list of source types, which, when passed to 'run' method of
+# 'generator', has some change of being eventually used (probably after
+# conversion by other generators).
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given generator.
+#
+rule viable-source-types-for-generator-real ( generator )
+{
+ local source-types = [ $(generator).source-types ] ;
+ if ! $(source-types)
+ {
+ # If generator does not specify any source types, it might be a special
+ # generator like builtin.lib-generator which just relays to other
+ # generators. Return '*' to indicate that any source type is possibly
+ # OK, since we do not know for sure.
+ return * ;
+ }
+ else
+ {
+ local result ;
+ while $(source-types)
+ {
+ local s = $(source-types[1]) ;
+ source-types = $(source-types[2-]) ;
+ local viable-sources = [ generators.viable-source-types $(s) ] ;
+ if $(viable-sources) = *
+ {
+ result = * ;
+ source-types = ; # Terminate the loop.
+ }
+ else
+ {
+ result += [ type.all-derived $(s) ] $(viable-sources) ;
+ }
+ }
+ return [ sequence.unique $(result) ] ;
+ }
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-for-generator'.
+#
+local rule viable-source-types-for-generator ( generator )
+{
+ local key = .vstg.$(generator) ;
+ if ! $($(key))
+ {
+ .vstg-cached-generators += $(generator) ;
+ local v = [ viable-source-types-for-generator-real $(generator) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns usage requirements + list of created targets.
+#
+local rule try-one-generator-really ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local targets =
+ [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
+
+ local usage-requirements ;
+ local success ;
+
+ generators.dout [ indent ] returned $(targets) ;
+
+ if $(targets)
+ {
+ success = true ;
+
+ if [ class.is-a $(targets[1]) : property-set ]
+ {
+ usage-requirements = $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ }
+
+ generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
+ generators.dout [ indent ] " " $(targets) ;
+ if $(usage-requirements)
+ {
+ generators.dout [ indent ] " with usage requirements:" $(x) ;
+ }
+
+ if $(success)
+ {
+ return $(usage-requirements) $(targets) ;
+ }
+}
+
+
+# Checks if generator invocation can be pruned, because it is guaranteed to
+# fail. If so, quickly returns an empty list. Otherwise, calls
+# try-one-generator-really.
+#
+local rule try-one-generator ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local source-types ;
+ for local s in $(sources)
+ {
+ source-types += [ $(s).type ] ;
+ }
+ local viable-source-types = [ viable-source-types-for-generator $(generator)
+ ] ;
+
+ if $(source-types) && $(viable-source-types) != * &&
+ ! [ set.intersection $(source-types) : $(viable-source-types) ]
+ {
+ local id = [ $(generator).id ] ;
+ generators.dout [ indent ] " ** generator '$(id)' pruned" ;
+ #generators.dout [ indent ] "source-types" '$(source-types)' ;
+ #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
+ }
+ else
+ {
+ return [ try-one-generator-really $(project) $(name) : $(generator) :
+ $(target-type) : $(property-set) : $(sources) ] ;
+ }
+}
+
+
+rule construct-types ( project name ? : target-types + : property-set
+ : sources + )
+{
+ local result ;
+ local matched-types ;
+ local usage-requirements = [ property-set.empty ] ;
+ for local t in $(target-types)
+ {
+ local r = [ construct $(project) $(name) : $(t) : $(property-set) :
+ $(sources) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ matched-types += $(t) ;
+ }
+ }
+ # TODO: have to introduce parameter controlling if several types can be
+ # matched and add appropriate checks.
+
+ # TODO: need to review the documentation for 'construct' to see if it should
+ # return $(source) even if nothing can be done with it. Currents docs seem
+ # to imply that, contrary to the behaviour.
+ if $(result)
+ {
+ return $(usage-requirements) $(result) ;
+ }
+ else
+ {
+ return $(usage-requirements) $(sources) ;
+ }
+}
+
+
+# Ensures all 'targets' have their type. If this is not so, exists with error.
+#
+local rule ensure-type ( targets * )
+{
+ for local t in $(targets)
+ {
+ if ! [ $(t).type ]
+ {
+ errors.error "target" [ $(t).str ] "has no type" ;
+ }
+ }
+}
+
+
+# Returns generators which can be used to construct target of specified type
+# with specified properties. Uses the following algorithm:
+# - iterates over requested target-type and all its bases (in the order returned
+# by type.all-bases).
+# - for each type find all generators that generate that type and whose
+# requirements are satisfied by properties.
+# - if the set of generators is not empty, returns that set.
+#
+# Note: this algorithm explicitly ignores generators for base classes if there
+# is at least one generator for the requested target-type.
+#
+local rule find-viable-generators-aux ( target-type : property-set )
+{
+ # Select generators that can create the required target type.
+ local viable-generators = ;
+ local generator-rank = ;
+
+ import type ;
+ local t = [ type.all-bases $(target-type) ] ;
+
+ generators.dout [ indent ] find-viable-generators target-type= $(target-type)
+ property-set= [ $(property-set).as-path ] ;
+
+ # Get the list of generators for the requested type. If no generator is
+ # registered, try base type, and so on.
+ local generators ;
+ while $(t[1])
+ {
+ generators.dout [ indent ] "trying type" $(t[1]) ;
+ if $(.generators.$(t[1]))
+ {
+ generators.dout [ indent ] "there are generators for this type" ;
+ generators = $(.generators.$(t[1])) ;
+
+ if $(t[1]) != $(target-type)
+ {
+ # We are here because there were no generators found for
+ # target-type but there are some generators for its base type.
+ # We will try to use them, but they will produce targets of
+ # base type, not of 'target-type'. So, we clone the generators
+ # and modify the list of target types.
+ local generators2 ;
+ for local g in $(generators)
+ {
+ # generators.register adds a generator to the list of
+ # generators for toolsets, which is a bit strange, but
+ # should work. That list is only used when inheriting a
+ # toolset, which should have been done before running
+ # generators.
+ generators2 += [ $(g).clone-and-change-target-type $(t[1]) :
+ $(target-type) ] ;
+ generators.register $(generators2[-1]) ;
+ }
+ generators = $(generators2) ;
+ }
+ t = ;
+ }
+ t = $(t[2-]) ;
+ }
+
+ for local g in $(generators)
+ {
+ generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
+
+ local m = [ $(g).match-rank $(property-set) ] ;
+ if $(m)
+ {
+ generators.dout [ indent ] " is viable" ;
+ viable-generators += $(g) ;
+ }
+ }
+
+ return $(viable-generators) ;
+}
+
+
+rule find-viable-generators ( target-type : property-set )
+{
+ local key = $(target-type).$(property-set) ;
+ local l = $(.fv.$(key)) ;
+ if ! $(l)
+ {
+ l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ;
+ if ! $(l)
+ {
+ l = none ;
+ }
+ .fv.$(key) = $(l) ;
+ }
+
+ if $(l) = none
+ {
+ l = ;
+ }
+
+ local viable-generators ;
+ for local g in $(l)
+ {
+ # Avoid trying the same generator twice on different levels.
+ if ! $(g) in $(.active-generators)
+ {
+ viable-generators += $(g) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ;
+ }
+ }
+
+ # Generators which override 'all'.
+ local all-overrides ;
+ # Generators which are overriden.
+ local overriden-ids ;
+ for local g in $(viable-generators)
+ {
+ local id = [ $(g).id ] ;
+ local this-overrides = $(.override.$(id)) ;
+ overriden-ids += $(this-overrides) ;
+ if all in $(this-overrides)
+ {
+ all-overrides += $(g) ;
+ }
+ }
+ if $(all-overrides)
+ {
+ viable-generators = $(all-overrides) ;
+ }
+ local result ;
+ for local g in $(viable-generators)
+ {
+ if ! [ $(g).id ] in $(overriden-ids)
+ {
+ result += $(g) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+.construct-stack = ;
+
+
+# Attempts to construct a target by finding viable generators, running them and
+# selecting the dependency graph.
+#
+local rule construct-really ( project name ? : target-type : property-set :
+ sources * )
+{
+ viable-generators = [ find-viable-generators $(target-type) :
+ $(property-set) ] ;
+
+ generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
+ " viable generators" ;
+
+ local result ;
+ local generators-that-succeeded ;
+ for local g in $(viable-generators)
+ {
+ # This variable will be restored on exit from this scope.
+ local .active-generators = $(g) $(.active-generators) ;
+
+ local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type)
+ : $(property-set) : $(sources) ] ;
+
+ if $(r)
+ {
+ generators-that-succeeded += $(g) ;
+ if $(result)
+ {
+ ECHO "Error: ambiguity found when searching for best transformation" ;
+ ECHO "Trying to produce type '$(target-type)' from: " ;
+ for local s in $(sources)
+ {
+ ECHO " - " [ $(s).str ] ;
+ }
+ ECHO "Generators that succeeded:" ;
+ for local g in $(generators-that-succeeded)
+ {
+ ECHO " - " [ $(g).id ] ;
+ }
+ ECHO "First generator produced: " ;
+ for local t in $(result[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ ECHO "Second generator produced: " ;
+ for local t in $(r[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ EXIT ;
+ }
+ else
+ {
+ result = $(r) ;
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
+# The 'sources' are treated as a collection of *possible* ingridients, i.e.
+# there is no obligation to consume them all.
+#
+# Returns a list of targets. When this invocation is first instance of
+# 'construct' in stack, returns only targets of requested 'target-type',
+# otherwise, returns also unused sources and additionally generated targets.
+#
+# If 'top-level' is set, does not suppress generators that are already
+# used in the stack. This may be useful in cases where a generator
+# has to build a metatargets -- for example a target corresponding to
+# built tool.
+#
+rule construct ( project name ? : target-type : property-set * : sources * : top-level ? )
+{
+ local saved-stack ;
+ if $(top-level)
+ {
+ saved-active = $(.active-generators) ;
+ .active-generators = ;
+ }
+
+ if (.construct-stack)
+ {
+ ensure-type $(sources) ;
+ }
+
+ .construct-stack += 1 ;
+
+ increase-indent ;
+
+ if $(.debug)
+ {
+ generators.dout [ indent ] "*** construct" $(target-type) ;
+
+ for local s in $(sources)
+ {
+ generators.dout [ indent ] " from" $(s) ;
+ }
+ generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
+ }
+
+ local result = [ construct-really $(project) $(name) : $(target-type) :
+ $(property-set) : $(sources) ] ;
+
+ decrease-indent ;
+
+ .construct-stack = $(.construct-stack[2-]) ;
+
+ if $(top-level)
+ {
+ .active-generators = $(saved-active) ;
+ }
+
+ return $(result) ;
+}
+
+# Given 'result', obtained from some generator or generators.construct, adds
+# 'raw-properties' as usage requirements to it. If result already contains usage
+# requirements -- that is the first element of result of an instance of the
+# property-set class, the existing usage requirements and 'raw-properties' are
+# combined.
+#
+rule add-usage-requirements ( result * : raw-properties * )
+{
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
+ }
+ else
+ {
+ return [ property-set.create $(raw-properties) ] $(result) ;
+ }
+ }
+}
+
+rule dump ( )
+{
+ for local g in $(.all-generators)
+ {
+ ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ;
+ }
+}
+
diff --git a/jam-files/boost-build/build/modifiers.jam b/jam-files/boost-build/build/modifiers.jam
new file mode 100644
index 000000000..6b0093433
--- /dev/null
+++ b/jam-files/boost-build/build/modifiers.jam
@@ -0,0 +1,232 @@
+# Copyright 2003 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Modifiers are generalized generators that mutate targets in specific ways.
+# This structure allows for grouping a variety of functionality in an
+# orthogonal way to the functionality in toolsets, and without specifying
+# more target variations. In turn the modifiers can be used as building
+# blocks to implement simple requests, like the <version> feature.
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import property ;
+import virtual-target ;
+import numbers ;
+import sequence ;
+import symlink ;
+import property-set ;
+
+# Base generator for creating targets that are modifications of existing
+# targets.
+#
+class modifier : generator
+{
+ rule __init__ (
+ id
+ composing ?
+ : source-types *
+ : target-types-and-names +
+ : requirements *
+ )
+ {
+ generator.__init__ $(id) $(composing)
+ : $(source-types)
+ : $(target-types-and-names)
+ : $(requirements) ;
+
+ self.targets-in-progress = ;
+ }
+
+ # Wraps the generation of the target to call before and after rules to
+ # affect the real target.
+ #
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result ;
+ local current-target = $(project)^$(name) ;
+ if ! $(current-target) in $(self.targets-in-progress)
+ {
+ # Before modifications...
+ local project_ =
+ [ modify-project-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ local name_ =
+ [ modify-name-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ local property-set_ =
+ [ modify-properties-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ local sources_ =
+ [ modify-sources-before
+ $(project) $(name) : $(property-set) : $(sources) ] ;
+ project = $(project_) ;
+ name = $(name_) ;
+ property-set = $(property-set_) ;
+ sources = $(sources_) ;
+
+ # Generate the real target...
+ local target-type-p =
+ [ property.select <main-target-type> : [ $(property-set).raw ] ] ;
+ self.targets-in-progress += $(current-target) ;
+ result =
+ [ generators.construct $(project) $(name)
+ : $(target-type-p:G=)
+ : $(property-set)
+ : $(sources) ] ;
+ self.targets-in-progress = $(self.targets-in-progress[1--2]) ;
+
+ # After modifications...
+ result =
+ [ modify-target-after $(result)
+ : $(project) $(name)
+ : $(property-set)
+ : $(sources) ] ;
+ }
+ return $(result) ;
+ }
+
+ rule modify-project-before ( project name ? : property-set : sources + )
+ {
+ return $(project) ;
+ }
+
+ rule modify-name-before ( project name ? : property-set : sources + )
+ {
+ return $(name) ;
+ }
+
+ rule modify-properties-before ( project name ? : property-set : sources + )
+ {
+ return $(property-set) ;
+ }
+
+ rule modify-sources-before ( project name ? : property-set : sources + )
+ {
+ return $(sources) ;
+ }
+
+ rule modify-target-after ( target : project name ? : property-set : sources + )
+ {
+ return $(target) ;
+ }
+
+ # Utility, clones a file-target with optional changes to the name, type and
+ # project of the target.
+ # NOTE: This functionality should be moved, and generalized, to
+ # virtual-targets.
+ #
+ rule clone-file-target ( target : new-name ? : new-type ? : new-project ? )
+ {
+ # Need a MUTCH better way to clone a target...
+ new-name ?= [ $(target).name ] ;
+ new-type ?= [ $(target).type ] ;
+ new-project ?= [ $(target).project ] ;
+ local result = [ new file-target $(new-name) : $(new-type) : $(new-project) ] ;
+
+ if [ $(target).dependencies ] { $(result).depends [ $(target).dependencies ] ; }
+ $(result).root [ $(target).root ] ;
+ $(result).set-usage-requirements [ $(target).usage-requirements ] ;
+
+ local action = [ $(target).action ] ;
+ local action-class = [ modules.peek $(action) : __class__ ] ;
+
+ local ps = [ $(action).properties ] ;
+ local cloned-action = [ new $(action-class) $(result) :
+ [ $(action).sources ] : [ $(action).action-name ] : $(ps) ] ;
+ $(result).action $(cloned-action) ;
+
+ return $(result) ;
+ }
+}
+
+
+# A modifier that changes the name of a target, after it's generated, given a
+# regular expression to split the name, and a set of token to insert between the
+# split tokens of the name. This also exposes the target for other uses with a
+# symlink to the original name (optionally).
+#
+class name-modifier : modifier
+{
+ rule __init__ ( )
+ {
+ # Apply ourselves to EXE targets, for now.
+ modifier.__init__ name.modifier : : EXE LIB : <name-modify>yes ;
+ }
+
+ # Modifies the name, by cloning the target with the new name.
+ #
+ rule modify-target-after ( target : project name ? : property-set : sources + )
+ {
+ local result = $(target) ;
+
+ local name-mod-p = [ property.select <name-modifier> : [ $(property-set).raw ] ] ;
+ if $(name-mod-p)
+ {
+ local new-name = [ modify-name [ $(target).name ] : $(name-mod-p:G=) ] ;
+ if $(new-name) != [ $(target).name ]
+ {
+ result = [ clone-file-target $(target) : $(new-name) ] ;
+ }
+ local expose-original-as-symlink = [ MATCH "<symlink>(.*)" : $(name-mod-p) ] ;
+ if $(expose-original-as-symlink)
+ {
+ local symlink-t = [ new symlink-targets $(project) : $(name) : [ $(result).name ] ] ;
+ result = [ $(symlink-t).construct $(result)
+ : [ property-set.create [ $(property-set).raw ] <symlink-location>build-relative ] ] ;
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # Do the transformation of the name.
+ #
+ rule modify-name ( name : modifier-spec + )
+ {
+ local match = [ MATCH "<match>(.*)" : $(modifier-spec) ] ;
+ local name-parts = [ MATCH $(match) : $(name) ] ;
+ local insertions = [ sequence.insertion-sort [ MATCH "(<[0123456789]+>.*)" : $(modifier-spec) ] ] ;
+ local new-name-parts ;
+ local insert-position = 1 ;
+ while $(insertions)
+ {
+ local insertion = [ MATCH "<$(insert-position)>(.*)" : $(insertions[1]) ] ;
+ if $(insertion)
+ {
+ new-name-parts += $(insertion) ;
+ insertions = $(insertions[2-]) ;
+ }
+ new-name-parts += $(name-parts[1]) ;
+ name-parts = $(name-parts[2-]) ;
+ insert-position = [ numbers.increment $(insert-position) ] ;
+ }
+ new-name-parts += $(name-parts) ;
+ return [ sequence.join $(new-name-parts) ] ;
+ }
+
+ rule optional-properties ( )
+ {
+ return <name-modify>yes ;
+ }
+}
+feature.feature name-modifier : : free ;
+feature.feature name-modify : no yes : incidental optional ;
+generators.register [ new name-modifier ] ;
+
+# Translates <version> property to a set of modification properties
+# that are applied by the name-modifier, and symlink-modifier.
+#
+rule version-to-modifier ( property : properties * )
+{
+ return
+ <name-modify>yes
+ <name-modifier><match>"^([^.]*)(.*)" <name-modifier><2>.$(property:G=)
+ <name-modifier><symlink>yes
+ ;
+}
+feature.action <version> : version-to-modifier ;
diff --git a/jam-files/boost-build/build/project.jam b/jam-files/boost-build/build/project.jam
new file mode 100644
index 000000000..c9967613b
--- /dev/null
+++ b/jam-files/boost-build/build/project.jam
@@ -0,0 +1,1110 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented by:
+# - a module where all the Jamfile content live.
+# - an instance of 'project-attributes' class.
+# (given a module name, can be obtained using the 'attributes' rule)
+# - an instance of 'project-target' class (from targets.jam)
+# (given a module name, can be obtained using the 'target' rule)
+#
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
+# and new project-attributes instance is created. Some rules necessary for
+# project are added to the module (see 'project-rules' module) at the bottom of
+# this file. Default project attributes are set (inheriting attributes of parent
+# project, if it exists). After that the Jamfile is read. It can declare its own
+# attributes using the 'project' rule which will be combined with any already
+# set attributes.
+#
+# The 'project' rule can also declare a project id which will be associated with
+# the project module.
+#
+# There can also be 'standalone' projects. They are created by calling
+# 'initialize' on an arbitrary module and not specifying their location. After
+# the call, the module can call the 'project' rule, declare main targets and
+# behave as a regular project except that, since it is not associated with any
+# location, it should not declare targets that are not prebuilt.
+#
+# The list of all loaded Jamfile is stored in the .project-locations variable.
+# It is possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be referenced using
+# their project id.
+
+import "class" : new ;
+import errors ;
+import modules ;
+import path ;
+import print ;
+import property-set ;
+import sequence ;
+
+
+# Loads the Jamfile at the given location. After loading, project global file
+# and Jamfiles needed by the requested one will be loaded recursively. If the
+# Jamfile at that location is loaded already, does nothing. Returns the project
+# module for the Jamfile.
+#
+rule load ( jamfile-location )
+{
+ if --debug-loading in [ modules.peek : ARGV ]
+ {
+ ECHO "Loading Jamfile at" '$(jamfile-location)' ;
+ }
+
+ local module-name = [ module-name $(jamfile-location) ] ;
+ # If Jamfile is already loaded, don't try again.
+ if ! $(module-name) in $(.jamfile-modules)
+ {
+ load-jamfile $(jamfile-location) : $(module-name) ;
+
+ # We want to make sure that child project are loaded only after parent
+ # projects. In particular, because parent projects define attributes
+ # which are inherited by children, and we don't want children to be
+ # loaded before parent has defined everything.
+ #
+ # While "build-project" and "use-project" can potentially refer to child
+ # projects from parent projects, we don't immediately load child
+ # projects when seeing those attributes. Instead, we record the minimal
+ # information to be used only later.
+ load-used-projects $(module-name) ;
+ }
+ return $(module-name) ;
+}
+
+
+rule load-used-projects ( module-name )
+{
+ local used = [ modules.peek $(module-name) : .used-projects ] ;
+ local location = [ attribute $(module-name) location ] ;
+ import project ;
+ while $(used)
+ {
+ local id = $(used[1]) ;
+ local where = $(used[2]) ;
+
+ project.use $(id) : [ path.root [ path.make $(where) ] $(location) ] ;
+ used = $(used[3-]) ;
+ }
+}
+
+
+# Note the use of character groups, as opposed to listing 'Jamroot' and
+# 'jamroot'. With the latter, we would get duplicate matches on Windows and
+# would have to eliminate duplicates.
+JAMROOT ?= [ modules.peek : JAMROOT ] ;
+JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
+
+
+# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
+#
+rule load-parent ( location )
+{
+ local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ;
+
+ if ! $(found)
+ {
+ ECHO error: Could not find parent for project at '$(location)' ;
+ EXIT error: Did not find Jamfile.jam or Jamroot.jam in any parent
+ directory. ;
+ }
+
+ return [ load $(found[1]:D) ] ;
+}
+
+
+# Makes the specified 'module' act as if it were a regularly loaded Jamfile at
+# 'location'. Reports an error if a Jamfile has already been loaded for that
+# location.
+#
+rule act-as-jamfile ( module : location )
+{
+ if [ module-name $(location) ] in $(.jamfile-modules)
+ {
+ errors.error "Jamfile was already loaded for '$(location)'" ;
+ }
+ # Set up non-default mapping from location to module.
+ .module.$(location) = $(module) ;
+
+ # Add the location to the list of project locations so that we don't try to
+ # reload the same Jamfile in the future.
+ .jamfile-modules += [ module-name $(location) ] ;
+
+ initialize $(module) : $(location) ;
+}
+
+
+# Returns the project module corresponding to the given project-id or plain
+# directory name. Returns nothing if such a project can not be found.
+#
+rule find ( name : current-location )
+{
+ local project-module ;
+
+ # Try interpreting name as project id.
+ if [ path.is-rooted $(name) ]
+ {
+ project-module = $($(name).jamfile-module) ;
+ }
+
+ if ! $(project-module)
+ {
+ local location = [ path.root [ path.make $(name) ] $(current-location) ]
+ ;
+
+ # If no project is registered for the given location, try to load it.
+ # First see if we have a Jamfile. If not, then see if we might have a
+ # project root willing to act as a Jamfile. In that case, project root
+ # must be placed in the directory referred by id.
+
+ project-module = [ module-name $(location) ] ;
+ if ! $(project-module) in $(.jamfile-modules)
+ {
+ if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
+ {
+ project-module = [ load $(location) ] ;
+ }
+ else
+ {
+ project-module = ;
+ }
+ }
+ }
+
+ return $(project-module) ;
+}
+
+
+# Returns the name of the module corresponding to 'jamfile-location'. If no
+# module corresponds to that location yet, associates the default module name
+# with that location.
+#
+rule module-name ( jamfile-location )
+{
+ if ! $(.module.$(jamfile-location))
+ {
+ # Root the path, so that locations are always unambiguous. Without this,
+ # we can't decide if '../../exe/program1' and '.' are the same paths.
+ jamfile-location = [ path.root $(jamfile-location) [ path.pwd ] ] ;
+ .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ;
+ }
+ return $(.module.$(jamfile-location)) ;
+}
+
+
+# Default patterns to search for the Jamfiles to use for build declarations.
+#
+JAMFILE = [ modules.peek : JAMFILE ] ;
+JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
+
+
+# Find the Jamfile at the given location. This returns the exact names of all
+# the Jamfiles in the given directory. The optional parent-root argument causes
+# this to search not the given directory but the ones above it up to the
+# directory given in it.
+#
+rule find-jamfile (
+ dir # The directory(s) to look for a Jamfile.
+ parent-root ? # Optional flag indicating to search for the parent Jamfile.
+ : no-errors ?
+ )
+{
+ # Glob for all the possible Jamfiles according to the match pattern.
+ #
+ local jamfile-glob = ;
+ if $(parent-root)
+ {
+ if ! $(.parent-jamfile.$(dir))
+ {
+ .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE)
+ ] ;
+ }
+ jamfile-glob = $(.parent-jamfile.$(dir)) ;
+ }
+ else
+ {
+ if ! $(.jamfile.$(dir))
+ {
+ .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
+ }
+ jamfile-glob = $(.jamfile.$(dir)) ;
+
+ }
+
+ local jamfile-to-load = $(jamfile-glob) ;
+ # Multiple Jamfiles found in the same place. Warn about this and ensure we
+ # use only one of them. As a temporary convenience measure, if there is
+ # Jamfile.v2 among found files, suppress the warning and use it.
+ #
+ if $(jamfile-to-load[2-])
+ {
+ local v2-jamfiles = [ MATCH (.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam) : $(jamfile-to-load) ] ;
+
+ if $(v2-jamfiles) && ! $(v2-jamfiles[2])
+ {
+ jamfile-to-load = $(v2-jamfiles) ;
+ }
+ else
+ {
+ local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
+ ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
+ "Loading the first one: '$(jamfile)'." ;
+ }
+
+ jamfile-to-load = $(jamfile-to-load[1]) ;
+ }
+
+ # Could not find it, error.
+ #
+ if ! $(no-errors) && ! $(jamfile-to-load)
+ {
+ errors.error Unable to load Jamfile.
+ : Could not find a Jamfile in directory '$(dir)'.
+ : Attempted to find it with pattern '"$(JAMFILE:J=" ")"'.
+ : Please consult the documentation at 'http://www.boost.org'. ;
+ }
+
+ return $(jamfile-to-load) ;
+}
+
+
+# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
+# the file as indicated by the JAMFILE patterns. Effect of calling this rule
+# twice with the same 'dir' is undefined.
+#
+local rule load-jamfile (
+ dir # The directory of the project Jamfile.
+ : jamfile-module
+ )
+{
+ # See if the Jamfile is where it should be.
+ #
+ local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ;
+ if ! $(jamfile-to-load)
+ {
+ jamfile-to-load = [ find-jamfile $(dir) ] ;
+ }
+
+ if $(jamfile-to-load[2])
+ {
+ errors.error "Multiple Jamfiles found at '$(dir)'"
+ : "Filenames are: " $(jamfile-to-load:D=) ;
+ }
+
+ # Now load the Jamfile in it's own context.
+ # The call to 'initialize' may load parent Jamfile, which might have
+ # 'use-project' statement that causes a second attempt to load the
+ # same project we're loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing up.
+ if ! $(jamfile-module) in $(.jamfile-modules)
+ {
+ .jamfile-modules += $(jamfile-module) ;
+
+ # Initialize the Jamfile module before loading.
+ #
+ initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
+ : $(jamfile-to-load:BS) ;
+
+ local saved-project = $(.current-project) ;
+
+ mark-as-user $(jamfile-module) ;
+ modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
+ if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ]
+ {
+ jamfile = [ find-jamfile $(dir) : no-errors ] ;
+ if $(jamfile)
+ {
+ load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
+ }
+ }
+
+ # Now do some checks.
+ if $(.current-project) != $(saved-project)
+ {
+ errors.error "The value of the .current-project variable has magically"
+ : "changed after loading a Jamfile. This means some of the targets"
+ : "might be defined in the wrong project."
+ : "after loading" $(jamfile-module)
+ : "expected value" $(saved-project)
+ : "actual value" $(.current-project) ;
+ }
+
+ if $(.global-build-dir)
+ {
+ local id = [ attribute $(jamfile-module) id ] ;
+ local project-root = [ attribute $(jamfile-module) project-root ] ;
+ local location = [ attribute $(jamfile-module) location ] ;
+
+ if $(location) && $(project-root) = $(dir)
+ {
+ # This is Jamroot.
+ if ! $(id)
+ {
+ ECHO "warning: the --build-dir option was specified" ;
+ ECHO "warning: but Jamroot at '$(dir)'" ;
+ ECHO "warning: specified no project id" ;
+ ECHO "warning: the --build-dir option will be ignored" ;
+ }
+ }
+ }
+ }
+}
+
+
+rule mark-as-user ( module-name )
+{
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+rule load-aux ( module-name : file )
+{
+ mark-as-user $(module-name) ;
+
+ module $(module-name)
+ {
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+}
+
+
+.global-build-dir = [ MATCH --build-dir=(.*) : [ modules.peek : ARGV ] ] ;
+if $(.global-build-dir)
+{
+ # If the option is specified several times, take the last value.
+ .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
+}
+
+
+# Initialize the module for a project.
+#
+rule initialize (
+ module-name # The name of the project module.
+ : location ? # The location (directory) of the project to initialize. If
+ # not specified, a standalone project will be initialized.
+ : basename ?
+ )
+{
+ if --debug-loading in [ modules.peek : ARGV ]
+ {
+ ECHO "Initializing project '$(module-name)'" ;
+ }
+
+ # TODO: need to consider if standalone projects can do anything but define
+ # prebuilt targets. If so, we need to give it a more sensible "location", so
+ # that source paths are correct.
+ location ?= "" ;
+ # Create the module for the Jamfile first.
+ module $(module-name)
+ {
+ }
+ $(module-name).attributes = [ new project-attributes $(location)
+ $(module-name) ] ;
+ local attributes = $($(module-name).attributes) ;
+
+ if $(location)
+ {
+ $(attributes).set source-location : [ path.make $(location) ] : exact ;
+ }
+ else if ! $(module-name) in test-config site-config user-config project-config
+ {
+ # This is a standalone project with known location. Set source location
+ # so that it can declare targets. This is intended so that you can put
+ # a .jam file in your sources and use it via 'using'. Standard modules
+ # (in 'tools' subdir) may not assume source dir is set.
+ local s = [ modules.binding $(module-name) ] ;
+ if ! $(s)
+ {
+ errors.error "Could not determine project location $(module-name)" ;
+ }
+ $(attributes).set source-location : $(s:D) : exact ;
+ }
+
+ $(attributes).set requirements : [ property-set.empty ] : exact ;
+ $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
+
+ # Import rules common to all project modules from project-rules module,
+ # defined at the end of this file.
+ local rules = [ RULENAMES project-rules ] ;
+ IMPORT project-rules : $(rules) : $(module-name) : $(rules) ;
+
+ local jamroot ;
+
+ local parent-module ;
+ if $(module-name) = test-config
+ {
+ # No parent.
+ }
+ else if $(module-name) = site-config
+ {
+ parent-module = test-config ;
+ }
+ else if $(module-name) = user-config
+ {
+ parent-module = site-config ;
+ }
+ else if $(module-name) = project-config
+ {
+ parent-module = user-config ;
+ }
+ else
+ {
+ # We search for parent/project-root only if Jamfile was specified, i.e.
+ # if the project is not standalone.
+ if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ]
+ {
+ parent-module = [ load-parent $(location) ] ;
+ }
+ else
+ {
+ # It's either jamroot or standalone project. If it's jamroot,
+ # inherit from user-config.
+ if $(location)
+ {
+ # If project-config module exist, inherit from it.
+ if $(project-config.attributes)
+ {
+ parent-module = project-config ;
+ }
+ else
+ {
+ parent-module = user-config ;
+ }
+ jamroot = true ;
+ }
+ }
+ }
+
+ if $(parent-module)
+ {
+ inherit-attributes $(module-name) : $(parent-module) ;
+ $(attributes).set parent-module : $(parent-module) : exact ;
+ }
+
+ if $(jamroot)
+ {
+ $(attributes).set project-root : $(location) : exact ;
+ }
+
+ local parent ;
+ if $(parent-module)
+ {
+ parent = [ target $(parent-module) ] ;
+ }
+
+ if ! $(.target.$(module-name))
+ {
+ .target.$(module-name) = [ new project-target $(module-name)
+ : $(module-name) $(parent)
+ : [ attribute $(module-name) requirements ] ] ;
+
+ if --debug-loading in [ modules.peek : ARGV ]
+ {
+ ECHO "Assigned project target" $(.target.$(module-name))
+ "to '$(module-name)'" ;
+ }
+ }
+
+ .current-project = [ target $(module-name) ] ;
+}
+
+
+# Make 'project-module' inherit attributes of project root and parent module.
+#
+rule inherit-attributes ( project-module : parent-module )
+{
+ local attributes = $($(project-module).attributes) ;
+ local pattributes = [ attributes $(parent-module) ] ;
+ # Parent module might be locationless configuration module.
+ if [ modules.binding $(parent-module) ]
+ {
+ $(attributes).set parent : [ path.parent
+ [ path.make [ modules.binding $(parent-module) ] ] ] ;
+ }
+ local v = [ $(pattributes).get project-root ] ;
+ $(attributes).set project-root : $(v) : exact ;
+ $(attributes).set default-build
+ : [ $(pattributes).get default-build ] ;
+ $(attributes).set requirements
+ : [ $(pattributes).get requirements ] : exact ;
+ $(attributes).set usage-requirements
+ : [ $(pattributes).get usage-requirements ] : exact ;
+
+ local parent-build-dir = [ $(pattributes).get build-dir ] ;
+ if $(parent-build-dir)
+ {
+ # Have to compute relative path from parent dir to our dir. Convert both
+ # paths to absolute, since we cannot find relative path from ".." to
+ # ".".
+
+ local location = [ attribute $(project-module) location ] ;
+ local parent-location = [ attribute $(parent-module) location ] ;
+
+ local pwd = [ path.pwd ] ;
+ local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
+ local our-dir = [ path.root $(location) $(pwd) ] ;
+ $(attributes).set build-dir : [ path.join $(parent-build-dir)
+ [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
+ }
+}
+
+
+# Associate the given id with the given project module.
+#
+rule register-id ( id : module )
+{
+ $(id).jamfile-module = $(module) ;
+}
+
+
+# Class keeping all the attributes of a project.
+#
+# The standard attributes are "id", "location", "project-root", "parent"
+# "requirements", "default-build", "source-location" and "projects-to-build".
+#
+class project-attributes
+{
+ import property ;
+ import property-set ;
+ import errors ;
+ import path ;
+ import print ;
+ import sequence ;
+ import project ;
+
+ rule __init__ ( location project-module )
+ {
+ self.location = $(location) ;
+ self.project-module = $(project-module) ;
+ }
+
+ # Set the named attribute from the specification given by the user. The
+ # value actually set may be different.
+ #
+ rule set ( attribute : specification *
+ : exact ? # Sets value from 'specification' without any processing.
+ )
+ {
+ if $(exact)
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ else if $(attribute) = "requirements"
+ {
+ local result = [ property-set.refine-from-user-input
+ $(self.requirements) : $(specification)
+ : $(self.project-module) : $(self.location) ] ;
+
+ if $(result[1]) = "@error"
+ {
+ errors.error Requirements for project at '$(self.location)'
+ conflict with parent's. : Explanation: $(result[2-]) ;
+ }
+ else
+ {
+ self.requirements = $(result) ;
+ }
+ }
+ else if $(attribute) = "usage-requirements"
+ {
+ local unconditional ;
+ for local p in $(specification)
+ {
+ local split = [ property.split-conditional $(p) ] ;
+ split ?= nothing $(p) ;
+ unconditional += $(split[2]) ;
+ }
+
+ local non-free = [ property.remove free : $(unconditional) ] ;
+ if $(non-free)
+ {
+ errors.error usage-requirements $(specification) have non-free
+ properties $(non-free) ;
+ }
+ local t = [ property.translate-paths $(specification)
+ : $(self.location) ] ;
+ if $(self.usage-requirements)
+ {
+ self.usage-requirements = [ property-set.create
+ [ $(self.usage-requirements).raw ] $(t) ] ;
+ }
+ else
+ {
+ self.usage-requirements = [ property-set.create $(t) ] ;
+ }
+ }
+ else if $(attribute) = "default-build"
+ {
+ self.default-build = [ property.make $(specification) ] ;
+ }
+ else if $(attribute) = "source-location"
+ {
+ self.source-location = ;
+ for local src-path in $(specification)
+ {
+ self.source-location += [ path.root [ path.make $(src-path) ]
+ $(self.location) ] ;
+ }
+ }
+ else if $(attribute) = "build-dir"
+ {
+ self.build-dir = [ path.root
+ [ path.make $(specification) ] $(self.location) ] ;
+ }
+ else if $(attribute) = "id"
+ {
+ id = [ path.root $(specification) / ] ;
+ project.register-id $(id) : $(self.project-module) ;
+ self.id = $(id) ;
+ }
+ else if ! $(attribute) in "default-build" "location" "parent"
+ "projects-to-build" "project-root" "source-location"
+ {
+ errors.error Invalid project attribute '$(attribute)' specified for
+ project at '$(self.location)' ;
+ }
+ else
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ }
+
+ # Returns the value of the given attribute.
+ #
+ rule get ( attribute )
+ {
+ return $(self.$(attribute)) ;
+ }
+
+ # Prints the project attributes.
+ #
+ rule print ( )
+ {
+ local id = $(self.id) ; id ?= (none) ;
+ local parent = $(self.parent) ; parent ?= (none) ;
+ print.section "'"$(id)"'" ;
+ print.list-start ;
+ print.list-item "Parent project:" $(parent) ;
+ print.list-item "Requirements:" [ $(self.requirements).raw ] ;
+ print.list-item "Default build:" $(self.default-build) ;
+ print.list-item "Source location:" $(self.source-location) ;
+ print.list-item "Projects to build:"
+ [ sequence.insertion-sort $(self.projects-to-build) ] ;
+ print.list-end ;
+ }
+}
+
+
+# Returns the project which is currently being loaded.
+#
+rule current ( )
+{
+ return $(.current-project) ;
+}
+
+
+# Temporarily changes the current project to 'project'. Should be followed by
+# 'pop-current'.
+#
+rule push-current ( project )
+{
+ .saved-current-project += $(.current-project) ;
+ .current-project = $(project) ;
+}
+
+
+rule pop-current ( )
+{
+ .current-project = $(.saved-current-project[-1]) ;
+ .saved-current-project = $(.saved-current-project[1--2]) ;
+}
+
+
+# Returns the project-attribute instance for the specified Jamfile module.
+#
+rule attributes ( project )
+{
+ return $($(project).attributes) ;
+}
+
+
+# Returns the value of the specified attribute in the specified Jamfile module.
+#
+rule attribute ( project attribute )
+{
+ return [ $($(project).attributes).get $(attribute) ] ;
+}
+
+
+# Returns the project target corresponding to the 'project-module'.
+#
+rule target ( project-module )
+{
+ if ! $(.target.$(project-module))
+ {
+ .target.$(project-module) = [ new project-target $(project-module)
+ : $(project-module)
+ : [ attribute $(project-module) requirements ] ] ;
+ }
+ return $(.target.$(project-module)) ;
+}
+
+
+# Use/load a project.
+#
+rule use ( id : location )
+{
+ local saved-project = $(.current-project) ;
+ local project-module = [ project.load $(location) ] ;
+ local declared-id = [ project.attribute $(project-module) id ] ;
+
+ if ! $(declared-id) || $(declared-id) != $(id)
+ {
+ # The project at 'location' either has no id or that id is not equal to
+ # the 'id' parameter.
+ if $($(id).jamfile-module) && ( $($(id).jamfile-module) !=
+ $(project-module) )
+ {
+ errors.user-error Attempt to redeclare already existing project id
+ '$(id)'
+ location '$(location)' ;
+ }
+ $(id).jamfile-module = $(project-module) ;
+ }
+ .current-project = $(saved-project) ;
+}
+
+
+# Defines a Boost.Build extension project. Such extensions usually contain
+# library targets and features that can be used by many people. Even though
+# extensions are really projects, they can be initialized as a module would be
+# with the "using" (project.project-rules.using) mechanism.
+#
+rule extension ( id : options * : * )
+{
+ # The caller is a standalone module for the extension.
+ local mod = [ CALLER_MODULE ] ;
+
+ # We need to do the rest within the extension module.
+ module $(mod)
+ {
+ import path ;
+
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+
+ # Create the project data, and bring in the project rules into the
+ # module.
+ project.initialize $(__name__) : [ path.join [ project.attribute
+ $(root-project) location ] ext $(1:L) ] ;
+
+ # Create the project itself, i.e. the attributes. All extensions are
+ # created in the "/ext" project space.
+ project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) ;
+ local attributes = [ project.attributes $(__name__) ] ;
+
+ # Inherit from the root project of whomever is defining us.
+ project.inherit-attributes $(__name__) : $(root-project) ;
+ $(attributes).set parent-module : $(root-project) : exact ;
+ }
+}
+
+
+rule glob-internal ( project : wildcards + : excludes * : rule-name )
+{
+ local location = [ $(project).get source-location ] ;
+
+ local result ;
+ local paths = [ path.$(rule-name) $(location) :
+ [ sequence.transform path.make : $(wildcards) ] :
+ [ sequence.transform path.make : $(excludes) ] ] ;
+ if $(wildcards:D) || $(rule-name) != glob
+ {
+ # The paths we have found are relative to the current directory, but the
+ # names specified in the sources list are assumed to be relative to the
+ # source directory of the corresponding project. So, just make the names
+ # absolute.
+ for local p in $(paths)
+ {
+ # If the path is below source location, use relative path.
+ # Otherwise, use full path just to avoid any ambiguities.
+ local rel = [ path.relative $(p) $(location) : no-error ] ;
+ if $(rel) = not-a-child
+ {
+ result += [ path.root $(p) [ path.pwd ] ] ;
+ }
+ else
+ {
+ result += $(rel) ;
+ }
+ }
+ }
+ else
+ {
+ # There were no wildcards in the directory path, so the files are all in
+ # the source directory of the project. Just drop the directory, instead
+ # of making paths absolute.
+ result = $(paths:D="") ;
+ }
+
+ return $(result) ;
+}
+
+
+# This module defines rules common to all projects.
+#
+module project-rules
+{
+ rule using ( toolset-module : * )
+ {
+ import toolset ;
+ import modules ;
+ import project ;
+
+ # Temporarily change the search path so the module referred to by
+ # 'using' can be placed in the same directory as Jamfile. User will
+ # expect the module to be found even though the directory is not in
+ # BOOST_BUILD_PATH.
+ local x = [ modules.peek : BOOST_BUILD_PATH ] ;
+ local caller = [ CALLER_MODULE ] ;
+ local caller-location = [ modules.binding $(caller) ] ;
+ modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ;
+ toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ modules.poke : BOOST_BUILD_PATH : $(x) ;
+
+ # The above might have clobbered .current-project. Restore the correct
+ # value.
+ modules.poke project : .current-project
+ : [ project.target $(caller) ] ;
+ }
+
+ import modules ;
+
+ rule import ( * : * : * )
+ {
+ modules.import project ;
+
+ local caller = [ CALLER_MODULE ] ;
+ local saved = [ modules.peek project : .current-project ] ;
+ module $(caller)
+ {
+ modules.import $(1) : $(2) : $(3) ;
+ }
+ modules.poke project : .current-project : $(saved) ;
+ }
+
+ rule project ( id ? : options * : * )
+ {
+ import errors ;
+ import path ;
+ import project ;
+
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+ if $(id)
+ {
+ $(attributes).set id : $(id) ;
+ }
+
+ local explicit-build-dir ;
+
+ for n in 2 3 4 5 6 7 8 9
+ {
+ local option = $($(n)) ;
+ if $(option)
+ {
+ $(attributes).set $(option[1]) : $(option[2-]) ;
+ }
+ if $(option[1]) = "build-dir"
+ {
+ explicit-build-dir = [ path.make $(option[2-]) ] ;
+ }
+ }
+
+ # If '--build-dir' is specified, change the build dir for the project.
+ local global-build-dir =
+ [ modules.peek project : .global-build-dir ] ;
+
+ if $(global-build-dir)
+ {
+ local location = [ $(attributes).get location ] ;
+ # Project with an empty location is a 'standalone' project such as
+ # user-config or qt. It has no build dir. If we try to set build dir
+ # for user-config, we shall then try to inherit it, with either
+ # weird or wrong consequences.
+ if $(location) && $(location) = [ $(attributes).get project-root ]
+ {
+ # Re-read the project id, since it might have been changed in
+ # the project's attributes.
+ id = [ $(attributes).get id ] ;
+ # This is Jamroot.
+ if $(id)
+ {
+ if $(explicit-build-dir) &&
+ [ path.is-rooted $(explicit-build-dir) ]
+ {
+ errors.user-error Absolute directory specified via
+ 'build-dir' project attribute : Do not know how to
+ combine that with the --build-dir option. ;
+ }
+ # Strip the leading slash from id.
+ local rid = [ MATCH /(.*) : $(id) ] ;
+ local p = [ path.join
+ $(global-build-dir) $(rid) $(explicit-build-dir) ] ;
+
+ $(attributes).set build-dir : $(p) : exact ;
+ }
+ }
+ else
+ {
+ # Not Jamroot.
+ if $(explicit-build-dir)
+ {
+ errors.user-error When --build-dir is specified, the
+ 'build-dir' project : attribute is allowed only for
+ top-level 'project' invocations ;
+ }
+ }
+ }
+ }
+
+ # Declare and set a project global constant. Project global constants are
+ # normal variables but should not be changed. They are applied to every
+ # child Jamfile.
+ #
+ rule constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) ;
+ }
+
+ # Declare and set a project global constant, whose value is a path. The path
+ # is adjusted to be relative to the invocation directory. The given value
+ # path is taken to be either absolute, or relative to this project root.
+ #
+ rule path-constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) : path ;
+ }
+
+ rule use-project ( id : where )
+ {
+ import modules ;
+ # See comment in 'load' for explanation.
+ local caller = [ CALLER_MODULE ] ;
+ modules.poke $(caller) : .used-projects :
+ [ modules.peek $(caller) : .used-projects ]
+ $(id) $(where) ;
+ }
+
+ rule build-project ( dir )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+
+ local now = [ $(attributes).get projects-to-build ] ;
+ $(attributes).set projects-to-build : $(now) $(dir) ;
+ }
+
+ rule explicit ( target-names * )
+ {
+ import project ;
+ # If 'explicit' is used in a helper rule defined in Jamroot and
+ # inherited by children, then most of the time we want 'explicit' to
+ # operate on the Jamfile where the helper rule is invoked.
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-explicit $(n) ;
+ }
+ }
+
+ rule always ( target-names * )
+ {
+ import project ;
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-always $(n) ;
+ }
+ }
+
+ rule glob ( wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob ] ;
+ }
+
+ rule glob-tree ( wildcards + : excludes * )
+ {
+ import project ;
+
+ if $(wildcards:D) || $(excludes:D)
+ {
+ errors.user-error The patterns to 'glob-tree' may not include
+ directory ;
+ }
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob-tree ] ;
+ }
+
+ # Calculates conditional requirements for multiple requirements at once.
+ # This is a shorthand to reduce duplication and to keep an inline
+ # declarative syntax. For example:
+ #
+ # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
+ # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
+ #
+ rule conditional ( condition + : requirements * )
+ {
+ local condition = $(condition:J=,) ;
+ if [ MATCH (:) : $(condition) ]
+ {
+ return $(condition)$(requirements) ;
+ }
+ else
+ {
+ return $(condition):$(requirements) ;
+ }
+ }
+
+ rule option ( name : value )
+ {
+ if $(__name__) != site-config && $(__name__) != user-config && $(__name__) != project-config
+ {
+ import errors ;
+ errors.error "The 'option' rule may be used only in site-config or user-config" ;
+ }
+ import option ;
+ option.set $(name) : $(value) ;
+ }
+}
diff --git a/jam-files/boost-build/build/property-set.jam b/jam-files/boost-build/build/property-set.jam
new file mode 100644
index 000000000..70fd90cde
--- /dev/null
+++ b/jam-files/boost-build/build/property-set.jam
@@ -0,0 +1,481 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import feature ;
+import path ;
+import project ;
+import property ;
+import sequence ;
+import set ;
+import option ;
+
+# Class for storing a set of properties.
+#
+# There is 1<->1 correspondence between identity and value. No two instances
+# of the class are equal. To maintain this property, the 'property-set.create'
+# rule should be used to create new instances. Instances are immutable.
+#
+# Each property is classified with regard to its effect on build results.
+# Incidental properties have no effect on build results, from Boost.Build's
+# point of view. Others are either free, or non-free and we refer to non-free
+# ones as 'base'. Each property belongs to exactly one of those categories.
+#
+# It is possible to get a list of properties belonging to each category as
+# well as a list of properties with a specific attribute.
+#
+# Several operations, like and refine and as-path are provided. They all use
+# caching whenever possible.
+#
+class property-set
+{
+ import errors ;
+ import feature ;
+ import path ;
+ import property ;
+ import property-set ;
+ import set ;
+
+ rule __init__ ( raw-properties * )
+ {
+ self.raw = $(raw-properties) ;
+
+ for local p in $(raw-properties)
+ {
+ if ! $(p:G)
+ {
+ errors.error "Invalid property: '$(p)'" ;
+ }
+
+ local att = [ feature.attributes $(p:G) ] ;
+ # A feature can be both incidental and free, in which case we add it
+ # to incidental.
+ if incidental in $(att)
+ {
+ self.incidental += $(p) ;
+ }
+ else if free in $(att)
+ {
+ self.free += $(p) ;
+ }
+ else
+ {
+ self.base += $(p) ;
+ }
+
+ if dependency in $(att)
+ {
+ self.dependency += $(p) ;
+ }
+ else
+ {
+ self.non-dependency += $(p) ;
+ }
+
+ if [ MATCH (:) : $(p:G=) ]
+ {
+ self.conditional += $(p) ;
+ }
+ else
+ {
+ self.non-conditional += $(p) ;
+ }
+
+ if propagated in $(att)
+ {
+ self.propagated += $(p) ;
+ }
+ if link-incompatible in $(att)
+ {
+ self.link-incompatible += $(p) ;
+ }
+ }
+ }
+
+ # Returns Jam list of stored properties.
+ #
+ rule raw ( )
+ {
+ return $(self.raw) ;
+ }
+
+ rule str ( )
+ {
+ return "[" $(self.raw) "]" ;
+ }
+
+ # Returns properties that are neither incidental nor free.
+ #
+ rule base ( )
+ {
+ return $(self.base) ;
+ }
+
+ # Returns free properties which are not incidental.
+ #
+ rule free ( )
+ {
+ return $(self.free) ;
+ }
+
+ # Returns dependency properties.
+ #
+ rule dependency ( )
+ {
+ return $(self.dependency) ;
+ }
+
+ rule non-dependency ( )
+ {
+ return $(self.non-dependency) ;
+ }
+
+ rule conditional ( )
+ {
+ return $(self.conditional) ;
+ }
+
+ rule non-conditional ( )
+ {
+ return $(self.non-conditional) ;
+ }
+
+ # Returns incidental properties.
+ #
+ rule incidental ( )
+ {
+ return $(self.incidental) ;
+ }
+
+ rule refine ( ps )
+ {
+ if ! $(self.refined.$(ps))
+ {
+ local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
+ if $(r[1]) != "@error"
+ {
+ self.refined.$(ps) = [ property-set.create $(r) ] ;
+ }
+ else
+ {
+ self.refined.$(ps) = $(r) ;
+ }
+ }
+ return $(self.refined.$(ps)) ;
+ }
+
+ rule expand ( )
+ {
+ if ! $(self.expanded)
+ {
+ self.expanded = [ property-set.create [ feature.expand $(self.raw) ] ] ;
+ }
+ return $(self.expanded) ;
+ }
+
+ rule expand-composites ( )
+ {
+ if ! $(self.composites)
+ {
+ self.composites = [ property-set.create
+ [ feature.expand-composites $(self.raw) ] ] ;
+ }
+ return $(self.composites) ;
+ }
+
+ rule evaluate-conditionals ( context ? )
+ {
+ context ?= $(__name__) ;
+ if ! $(self.evaluated.$(context))
+ {
+ self.evaluated.$(context) = [ property-set.create
+ [ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ;
+ }
+ return $(self.evaluated.$(context)) ;
+ }
+
+ rule propagated ( )
+ {
+ if ! $(self.propagated-ps)
+ {
+ self.propagated-ps = [ property-set.create $(self.propagated) ] ;
+ }
+ return $(self.propagated-ps) ;
+ }
+
+ rule link-incompatible ( )
+ {
+ if ! $(self.link-incompatible-ps)
+ {
+ self.link-incompatible-ps =
+ [ property-set.create $(self.link-incompatible) ] ;
+ }
+ return $(self.link-incompatible-ps) ;
+ }
+
+ rule run-actions ( )
+ {
+ if ! $(self.run)
+ {
+ self.run = [ property-set.create [ feature.run-actions $(self.raw) ] ] ;
+ }
+ return $(self.run) ;
+ }
+
+ rule add-defaults ( )
+ {
+ if ! $(self.defaults)
+ {
+ self.defaults = [ property-set.create
+ [ feature.add-defaults $(self.raw) ] ] ;
+ }
+ return $(self.defaults) ;
+ }
+
+ rule as-path ( )
+ {
+ if ! $(self.as-path)
+ {
+ self.as-path = [ property.as-path $(self.base) ] ;
+ }
+ return $(self.as-path) ;
+ }
+
+ # Computes the path to be used for a target with the given properties.
+ # Returns a list of
+ # - the computed path
+ # - if the path is relative to the build directory, a value of 'true'.
+ #
+ rule target-path ( )
+ {
+ if ! $(self.target-path)
+ {
+ # The <location> feature can be used to explicitly change the
+ # location of generated targets.
+ local l = [ get <location> ] ;
+ if $(l)
+ {
+ self.target-path = $(l) ;
+ }
+ else
+ {
+ local p = [ as-path ] ;
+ p = [ property-set.hash-maybe $(p) ] ;
+
+ # A real ugly hack. Boost regression test system requires
+ # specific target paths, and it seems that changing it to handle
+ # other directory layout is really hard. For that reason, we
+ # teach V2 to do the things regression system requires. The
+ # value of '<location-prefix>' is prepended to the path.
+ local prefix = [ get <location-prefix> ] ;
+ if $(prefix)
+ {
+ self.target-path = [ path.join $(prefix) $(p) ] ;
+ }
+ else
+ {
+ self.target-path = $(p) ;
+ }
+ if ! $(self.target-path)
+ {
+ self.target-path = . ;
+ }
+ # The path is relative to build dir.
+ self.target-path += true ;
+ }
+ }
+ return $(self.target-path) ;
+ }
+
+ rule add ( ps )
+ {
+ if ! $(self.added.$(ps))
+ {
+ self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ;
+ }
+ return $(self.added.$(ps)) ;
+ }
+
+ rule add-raw ( properties * )
+ {
+ return [ add [ property-set.create $(properties) ] ] ;
+ }
+
+ rule link-incompatible-with ( ps )
+ {
+ if ! $(.li.$(ps))
+ {
+ local li1 = [ $(__name__).link-incompatible ] ;
+ local li2 = [ $(ps).link-incompatible ] ;
+ if [ set.equal $(li1) : $(li2) ]
+ {
+ .li.$(ps) = false ;
+ }
+ else
+ {
+ .li.$(ps) = true ;
+ }
+ }
+ if $(.li.$(ps)) = true
+ {
+ return true ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ # Returns all values of 'feature'.
+ #
+ rule get ( feature )
+ {
+ if ! $(self.map-built)
+ {
+ # For each feature, create a member var and assign all values to it.
+ # Since all regular member vars start with 'self', there will be no
+ # conflicts between names.
+ self.map-built = true ;
+ for local v in $(self.raw)
+ {
+ $(v:G) += $(v:G=) ;
+ }
+ }
+ return $($(feature)) ;
+ }
+}
+
+
+# Creates a new 'property-set' instance for the given raw properties or returns
+# an already existing ones.
+#
+rule create ( raw-properties * )
+{
+ raw-properties = [ sequence.unique
+ [ sequence.insertion-sort $(raw-properties) ] ] ;
+
+ local key = $(raw-properties:J=-:E=) ;
+
+ if ! $(.ps.$(key))
+ {
+ .ps.$(key) = [ new property-set $(raw-properties) ] ;
+ }
+ return $(.ps.$(key)) ;
+}
+NATIVE_RULE property-set : create ;
+
+
+# Creates a new 'property-set' instance after checking that all properties are
+# valid and converting incidental properties into gristed form.
+#
+rule create-with-validation ( raw-properties * )
+{
+ property.validate $(raw-properties) ;
+ return [ create [ property.make $(raw-properties) ] ] ;
+}
+
+
+# Creates a property-set from the input given by the user, in the context of
+# 'jamfile-module' at 'location'.
+#
+rule create-from-user-input ( raw-properties * : jamfile-module location )
+{
+ local specification = [ property.translate-paths $(raw-properties)
+ : $(location) ] ;
+ specification = [ property.translate-indirect $(specification)
+ : $(jamfile-module) ] ;
+ local project-id = [ project.attribute $(jamfile-module) id ] ;
+ project-id ?= [ path.root $(location) [ path.pwd ] ] ;
+ specification = [ property.translate-dependencies
+ $(specification) : $(project-id) : $(location) ] ;
+ specification =
+ [ property.expand-subfeatures-in-conditions $(specification) ] ;
+ specification = [ property.make $(specification) ] ;
+ return [ property-set.create $(specification) ] ;
+}
+
+
+# Refines requirements with requirements provided by the user. Specially handles
+# "-<property>value" syntax in specification to remove given requirements.
+# - parent-requirements -- property-set object with requirements to refine.
+# - specification -- string list of requirements provided by the user.
+# - project-module -- module to which context indirect features will be
+# bound.
+# - location -- path to which path features are relative.
+#
+rule refine-from-user-input ( parent-requirements : specification * :
+ project-module : location )
+{
+ if ! $(specification)
+ {
+ return $(parent-requirements) ;
+ }
+ else
+ {
+ local add-requirements ;
+ local remove-requirements ;
+
+ for local r in $(specification)
+ {
+ local m = [ MATCH "^-(.*)" : $(r) ] ;
+ if $(m)
+ {
+ remove-requirements += $(m) ;
+ }
+ else
+ {
+ add-requirements += $(r) ;
+ }
+ }
+
+ if $(remove-requirements)
+ {
+ # Need to create a property set, so that path features and indirect
+ # features are translated just like they are in project
+ # requirements.
+ local ps = [ property-set.create-from-user-input
+ $(remove-requirements) : $(project-module) $(location) ] ;
+
+ parent-requirements = [ property-set.create
+ [ set.difference [ $(parent-requirements).raw ]
+ : [ $(ps).raw ] ] ] ;
+ specification = $(add-requirements) ;
+ }
+
+ local requirements = [ property-set.create-from-user-input
+ $(specification) : $(project-module) $(location) ] ;
+
+ return [ $(parent-requirements).refine $(requirements) ] ;
+ }
+}
+
+
+# Returns a property-set with an empty set of properties.
+#
+rule empty ( )
+{
+ if ! $(.empty)
+ {
+ .empty = [ create ] ;
+ }
+ return $(.empty) ;
+}
+
+if [ option.get hash : : yes ] = yes
+{
+ rule hash-maybe ( path ? )
+ {
+ path ?= "" ;
+ return [ MD5 $(path) ] ;
+ }
+}
+else
+{
+ rule hash-maybe ( path ? )
+ {
+ return $(path) ;
+ }
+}
+
diff --git a/jam-files/boost-build/build/property.jam b/jam-files/boost-build/build/property.jam
new file mode 100644
index 000000000..a2ad5226b
--- /dev/null
+++ b/jam-files/boost-build/build/property.jam
@@ -0,0 +1,788 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import feature ;
+import indirect ;
+import path ;
+import regex ;
+import string ;
+import sequence ;
+import set ;
+import utility ;
+
+
+# Refines 'properties' by overriding any non-free and non-conditional properties
+# for which a different value is specified in 'requirements'. Returns the
+# resulting list of properties.
+#
+rule refine ( properties * : requirements * )
+{
+ local result ;
+ local error ;
+
+ # All the 'requirements' elements should be present in the result. Record
+ # them so that we can handle 'properties'.
+ for local r in $(requirements)
+ {
+ # Do not consider conditional requirements.
+ if ! [ MATCH (:) : $(r:G=) ]
+ {
+ # Note: cannot use a local variable here, so use an ugly name.
+ __require__$(r:G) = $(r:G=) ;
+ }
+ }
+
+ for local p in $(properties)
+ {
+ if [ MATCH (:) : $(p:G=) ]
+ {
+ # Do not modify conditional properties.
+ result += $(p) ;
+ }
+ else if free in [ feature.attributes $(p:G) ]
+ {
+ # Do not modify free properties.
+ result += $(p) ;
+ }
+ else
+ {
+ local required-value = $(__require__$(p:G)) ;
+ if $(required-value)
+ {
+ if $(p:G=) != $(required-value)
+ {
+ result += $(p:G)$(required-value) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ }
+
+ # Unset our ugly map.
+ for local r in $(requirements)
+ {
+ __require__$(r:G) = ;
+ }
+
+ if $(error)
+ {
+ return $(error) ;
+ }
+ else
+ {
+ return [ sequence.unique $(result) $(requirements) ] ;
+ }
+}
+
+
+# Removes all conditional properties whose conditions are not met. For those
+# with met conditions, removes the condition. Properties in conditions are
+# looked up in 'context'.
+#
+rule evaluate-conditionals-in-context ( properties * : context * )
+{
+ local base ;
+ local conditionals ;
+ for local p in $(properties)
+ {
+ if [ MATCH (:<) : $(p) ]
+ {
+ conditionals += $(p) ;
+ }
+ else
+ {
+ base += $(p) ;
+ }
+ }
+
+ local result = $(base) ;
+ for local p in $(conditionals)
+ {
+ # Separate condition and property.
+ local s = [ MATCH (.*):(<.*) : $(p) ] ;
+ # Split condition into individual properties.
+ local condition = [ regex.split $(s[1]) "," ] ;
+ # Evaluate condition.
+ if ! [ MATCH (!).* : $(condition:G=) ]
+ {
+ # Only positive checks
+ if $(condition) in $(context)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ else
+ {
+ # Have negative checks
+ local fail ;
+ while $(condition)
+ {
+ local c = $(condition[1]) ;
+ local m = [ MATCH !(.*) : $(c) ] ;
+ if $(m)
+ {
+ local p = $(m:G=$(c:G)) ;
+ if $(p) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ else
+ {
+ if ! $(c) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ condition = $(condition[2-]) ;
+ }
+ if ! $(fail)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+rule expand-subfeatures-in-conditions ( properties * )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local s = [ MATCH (.*):(<.*) : $(p) ] ;
+ if ! $(s)
+ {
+ result += $(p) ;
+ }
+ else
+ {
+ local condition = $(s[1]) ;
+ local value = $(s[2]) ;
+ # Condition might include several elements.
+ condition = [ regex.split $(condition) "," ] ;
+ local e ;
+ for local c in $(condition)
+ {
+ # It is common for a condition to include a toolset or
+ # subfeatures that have not been defined. In that case we want
+ # the condition to simply 'never be satisfied' and validation
+ # would only produce a spurious error so we prevent it by
+ # passing 'true' as the second parameter.
+ e += [ feature.expand-subfeatures $(c) : true ] ;
+ }
+ if $(e) = $(condition)
+ {
+ # (todo)
+ # This is just an optimization and possibly a premature one at
+ # that.
+ # (todo) (12.07.2008.) (Jurko)
+ result += $(p) ;
+ }
+ else
+ {
+ result += $(e:J=,):$(value) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for as-path, below. Orders properties with the implicit ones first, and
+# within the two sections in alphabetical order of feature name.
+#
+local rule path-order ( x y )
+{
+ if $(y:G) && ! $(x:G)
+ {
+ return true ;
+ }
+ else if $(x:G) && ! $(y:G)
+ {
+ return ;
+ }
+ else
+ {
+ if ! $(x:G)
+ {
+ x = [ feature.expand-subfeatures $(x) ] ;
+ y = [ feature.expand-subfeatures $(y) ] ;
+ }
+
+ if $(x[1]) < $(y[1])
+ {
+ return true ;
+ }
+ }
+}
+
+
+local rule abbreviate-dashed ( string )
+{
+ local r ;
+ for local part in [ regex.split $(string) - ]
+ {
+ r += [ string.abbreviate $(part) ] ;
+ }
+ return $(r:J=-) ;
+}
+
+
+local rule identity ( string )
+{
+ return $(string) ;
+}
+
+
+if --abbreviate-paths in [ modules.peek : ARGV ]
+{
+ .abbrev = abbreviate-dashed ;
+}
+else
+{
+ .abbrev = identity ;
+}
+
+
+# Returns a path representing the given expanded property set.
+#
+rule as-path ( properties * )
+{
+ local entry = .result.$(properties:J=-) ;
+
+ if ! $($(entry))
+ {
+ # Trim redundancy.
+ properties = [ feature.minimize $(properties) ] ;
+
+ # Sort according to path-order.
+ properties = [ sequence.insertion-sort $(properties) : path-order ] ;
+
+ local components ;
+ for local p in $(properties)
+ {
+ if $(p:G)
+ {
+ local f = [ utility.ungrist $(p:G) ] ;
+ p = $(f)-$(p:G=) ;
+ }
+ components += [ $(.abbrev) $(p) ] ;
+ }
+
+ $(entry) = $(components:J=/) ;
+ }
+
+ return $($(entry)) ;
+}
+
+
+# Exit with error if property is not valid.
+#
+local rule validate1 ( property )
+{
+ local msg ;
+ if $(property:G)
+ {
+ local feature = $(property:G) ;
+ local value = $(property:G=) ;
+
+ if ! [ feature.valid $(feature) ]
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "unknown feature '$(feature)'" ;
+ }
+ else if $(value) && ! free in [ feature.attributes $(feature) ]
+ {
+ feature.validate-value-string $(feature) $(value) ;
+ }
+ else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "No value specified for feature '$(feature)'" ;
+ }
+ }
+ else
+ {
+ local feature = [ feature.implied-feature $(property) ] ;
+ feature.validate-value-string $(feature) $(property) ;
+ }
+ if $(msg)
+ {
+ errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
+ }
+}
+
+
+rule validate ( properties * )
+{
+ for local p in $(properties)
+ {
+ validate1 $(p) ;
+ }
+}
+
+
+rule validate-property-sets ( property-sets * )
+{
+ for local s in $(property-sets)
+ {
+ validate [ feature.split $(s) ] ;
+ }
+}
+
+
+# Expands any implicit property values in the given property 'specification' so
+# they explicitly state their feature.
+#
+rule make ( specification * )
+{
+ local result ;
+ for local e in $(specification)
+ {
+ if $(e:G)
+ {
+ result += $(e) ;
+ }
+ else if [ feature.is-implicit-value $(e) ]
+ {
+ local feature = [ feature.implied-feature $(e) ] ;
+ result += $(feature)$(e) ;
+ }
+ else
+ {
+ errors.error "'$(e)' is not a valid property specification" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that do not
+# have their attributes listed in 'attributes'.
+#
+rule remove ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that have
+# their attributes listed in 'attributes'.
+#
+rule take ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Selects properties corresponding to any of the given features.
+#
+rule select ( features * : properties * )
+{
+ local result ;
+
+ # Add any missing angle brackets.
+ local empty = "" ;
+ features = $(empty:G=$(features)) ;
+
+ for local p in $(properties)
+ {
+ if $(p:G) in $(features)
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a modified version of properties with all values of the given feature
+# replaced by the given value. If 'value' is empty the feature will be removed.
+#
+rule change ( properties * : feature value ? )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ result += $(value:G=$(feature)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'property' is a conditional property, returns the condition and the
+# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
+# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
+# string.
+#
+rule split-conditional ( property )
+{
+ local m = [ MATCH "(.+):<(.+)" : $(property) ] ;
+ if $(m)
+ {
+ return $(m[1]) <$(m[2]) ;
+ }
+}
+
+
+# Interpret all path properties in 'properties' as relative to 'path'. The
+# property values are assumed to be in system-specific form, and will be
+# translated into normalized form.
+#
+rule translate-paths ( properties * : path )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = $(split[1]): ;
+ p = $(split[2]) ;
+ }
+
+ if path in [ feature.attributes $(p:G) ]
+ {
+ local values = [ regex.split $(p:TG=) "&&" ] ;
+ local t ;
+ for local v in $(values)
+ {
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
+ }
+ t = $(t:J="&&") ;
+ result += $(condition)$(t:TG=$(p:G)) ;
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Assumes that all feature values that start with '@' are names of rules, used
+# in 'context-module'. Such rules can be either local to the module or global.
+# Converts such values into 'indirect-rule' format (see indirect.jam), so they
+# can be called from other modules. Does nothing for such values that are
+# already in the 'indirect-rule' format.
+#
+rule translate-indirect ( specification * : context-module )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local m = [ MATCH ^@(.+) : $(p:G=) ] ;
+ if $(m)
+ {
+ local v ;
+ if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
+ {
+ # Rule is already in the 'indirect-rule' format.
+ v = $(m) ;
+ }
+ else
+ {
+ if ! [ MATCH ".*([.]).*" : $(m) ]
+ {
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
+ m = $(context-module).$(m) ;
+ }
+ v = [ indirect.make $(m) : $(context-module) ] ;
+ }
+
+ v = @$(v) ;
+ result += $(v:G=$(p:G)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Binds all dependency properties in a list relative to the given project.
+# Targets with absolute paths will be left unchanged and targets which have a
+# project specified will have the path to the project interpreted relative to
+# the specified location.
+#
+rule translate-dependencies ( specification * : project-id : location )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = $(split[1]): ;
+ p = $(split[2]) ;
+ }
+ if dependency in [ feature.attributes $(p:G) ]
+ {
+ local split-target = [ regex.match (.*)//(.*) : $(p:G=) ] ;
+ if $(split-target)
+ {
+ local rooted = [ path.root [ path.make $(split-target[1]) ]
+ [ path.root $(location) [ path.pwd ] ] ] ;
+ result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
+ }
+ else if [ path.is-rooted $(p:G=) ]
+ {
+ result += $(condition)$(p) ;
+ }
+ else
+ {
+ result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
+ }
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Class maintaining a property set -> string mapping.
+#
+class property-map
+{
+ import errors ;
+ import numbers ;
+ import sequence ;
+
+ rule __init__ ( )
+ {
+ self.next-flag = 1 ;
+ }
+
+ # Associate 'value' with 'properties'.
+ #
+ rule insert ( properties + : value )
+ {
+ self.all-flags += $(self.next-flag) ;
+ self.properties.$(self.next-flag) = $(properties) ;
+ self.value.$(self.next-flag) = $(value) ;
+
+ self.next-flag = [ numbers.increment $(self.next-flag) ] ;
+ }
+
+ # Returns the value associated with 'properties' or any subset of it. If
+ # more than one subset has a value assigned to it, returns the value for the
+ # longest subset, if it is unique.
+ #
+ rule find ( properties + )
+ {
+ return [ find-replace $(properties) ] ;
+ }
+
+ # Returns the value associated with 'properties'. If 'value' parameter is
+ # given, replaces the found value.
+ #
+ rule find-replace ( properties + : value ? )
+ {
+ # First find all matches.
+ local matches ;
+ local match-ranks ;
+ for local i in $(self.all-flags)
+ {
+ if $(self.properties.$(i)) in $(properties)
+ {
+ matches += $(i) ;
+ match-ranks += [ sequence.length $(self.properties.$(i)) ] ;
+ }
+ }
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
+ if $(best[2])
+ {
+ errors.error "Ambiguous key $(properties:J= :E=)" ;
+ }
+ local original = $(self.value.$(best)) ;
+ if $(value)
+ {
+ self.value.$(best) = $(value) ;
+ }
+ return $(original) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+ import feature ;
+
+ # Local rules must be explicitly re-imported.
+ import property : path-order abbreviate-dashed ;
+
+ feature.prepare-test property-test-temp ;
+
+ feature.feature toolset : gcc : implicit symmetric ;
+ feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
+ 3.0.2 : optional ;
+ feature.feature define : : free ;
+ feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
+ feature.feature optimization : on off ;
+ feature.feature variant : debug release : implicit composite symmetric ;
+ feature.feature rtti : on off : link-incompatible ;
+
+ feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
+ feature.compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+
+ assert.true path-order $(test-space) debug <define>foo ;
+ assert.false path-order $(test-space) <define>foo debug ;
+ assert.true path-order $(test-space) gcc debug ;
+ assert.false path-order $(test-space) debug gcc ;
+ assert.true path-order $(test-space) <optimization>on <rtti>on ;
+ assert.false path-order $(test-space) <rtti>on <optimization>on ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
+ : refine <toolset>gcc <rtti>off
+ : <define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <optimization>on
+ : refine <toolset>gcc <optimization>off
+ : <optimization>on
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off
+ : refine <toolset>gcc : <rtti>off : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
+ : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+ : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+ : $(test-space) ;
+
+ assert.result <define>MY_RELEASE
+ : evaluate-conditionals-in-context
+ <variant>release,<rtti>off:<define>MY_RELEASE
+ : <toolset>gcc <variant>release <rtti>off ;
+
+ assert.result debug
+ : as-path <optimization>off <variant>debug
+ : $(test-space) ;
+
+ assert.result gcc/debug/rtti-off
+ : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
+ : $(test-space) ;
+
+ assert.result optmz-off : abbreviate-dashed optimization-off ;
+ assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
+
+ try ;
+ validate <feature>value : $(test-space) ;
+ catch "Invalid property '<feature>value': unknown feature 'feature'." ;
+
+ try ;
+ validate <rtti>default : $(test-space) ;
+ catch \"default\" is not a known value of feature <rtti> ;
+
+ validate <define>WHATEVER : $(test-space) ;
+
+ try ;
+ validate <rtti> : $(test-space) ;
+ catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
+
+ try ;
+ validate value : $(test-space) ;
+ catch "value" is not a value of an implicit feature ;
+
+ assert.result-set-equal <rtti>on
+ : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
+
+ assert.result-set-equal <include>a
+ : select include : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a
+ : select include bar : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a <toolset>gcc
+ : select include <bar> <toolset> : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <toolset>kylix <include>a
+ : change <toolset>gcc <include>a : <toolset> kylix ;
+
+ pm = [ new property-map ] ;
+ $(pm).insert <toolset>gcc : o ;
+ $(pm).insert <toolset>gcc <os>NT : obj ;
+ $(pm).insert <toolset>gcc <os>CYGWIN : obj ;
+
+ assert.equal o : [ $(pm).find <toolset>gcc ] ;
+
+ assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ;
+
+ try ;
+ $(pm).find <toolset>gcc <os>NT <os>CYGWIN ;
+ catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
+
+ # Test ordinary properties.
+ assert.result : split-conditional <toolset>gcc ;
+
+ # Test properties with ":".
+ assert.result : split-conditional <define>FOO=A::B ;
+
+ # Test conditional feature.
+ assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
+ : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
+
+ feature.finish-test property-test-temp ;
+}
diff --git a/jam-files/boost-build/build/readme.txt b/jam-files/boost-build/build/readme.txt
new file mode 100644
index 000000000..c3dddd8d7
--- /dev/null
+++ b/jam-files/boost-build/build/readme.txt
@@ -0,0 +1,13 @@
+Copyright 2001, 2002 Dave Abrahams
+Copyright 2002 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+Development code for new build system. To run unit tests for jam code, execute:
+
+ bjam --debug --build-system=test
+
+Comprehensive tests require Python. See ../test/readme.txt
+
+
+
diff --git a/jam-files/boost-build/build/scanner.jam b/jam-files/boost-build/build/scanner.jam
new file mode 100644
index 000000000..d6042ea2c
--- /dev/null
+++ b/jam-files/boost-build/build/scanner.jam
@@ -0,0 +1,153 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements scanners: objects that compute implicit dependencies for
+# files, such as includes in C++.
+#
+# Scanner has a regular expression used to find dependencies, some
+# data needed to interpret those dependencies (for example, include
+# paths), and a code which actually established needed relationship
+# between actual jam targets.
+#
+# Scanner objects are created by actions, when they try to actualize
+# virtual targets, passed to 'virtual-target.actualize' method and are
+# then associated with actual targets. It is possible to use
+# several scanners for a virtual-target. For example, a single source
+# might be used by to compile actions, with different include paths.
+# In this case, two different actual targets will be created, each
+# having scanner of its own.
+#
+# Typically, scanners are created from target type and action's
+# properties, using the rule 'get' in this module. Directly creating
+# scanners is not recommended, because it might create many equvivalent
+# but different instances, and lead in unneeded duplication of
+# actual targets. However, actions can also create scanners in a special
+# way, instead of relying on just target type.
+
+import "class" : new ;
+import property virtual-target property-set ;
+import errors : error ;
+
+# Base scanner class.
+class scanner
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Returns a pattern to use for scanning
+ rule pattern ( )
+ {
+ error "method must be overriden" ;
+ }
+
+ # Establish necessary relationship between targets,
+ # given actual target beeing scanned, and a list of
+ # pattern matches in that file.
+ rule process ( target : matches * )
+ {
+ error "method must be overriden" ;
+ }
+}
+
+# Registers a new generator class, specifying a set of
+# properties relevant to this scanner. Ctor for that class
+# should have one parameter: list of properties.
+rule register ( scanner-class : relevant-properties * )
+{
+ .registered += $(scanner-class) ;
+ .relevant-properties.$(scanner-class) = $(relevant-properties) ;
+}
+
+# Common scanner class, which can be used when there's only one
+# kind of includes (unlike C, where "" and <> includes have different
+# search paths).
+class common-scanner : scanner
+{
+ import scanner ;
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+ ISFILE $(matches) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+
+# Returns an instance of previously registered scanner,
+# with the specified properties.
+rule get ( scanner-class : property-set )
+{
+ if ! $(scanner-class) in $(.registered)
+ {
+ error "attempt to get unregisted scanner" ;
+ }
+
+ local r = $(.rv-cache.$(property-set)) ;
+ if ! $(r)
+ {
+ r = [ property-set.create
+ [ property.select $(.relevant-properties.$(scanner-class)) :
+ [ $(property-set).raw ] ] ] ;
+ .rv-cache.$(property-set) = $(r) ;
+ }
+
+ if ! $(scanner.$(scanner-class).$(r:J=-))
+ {
+ scanner.$(scanner-class).$(r:J=-) = [ new $(scanner-class) [ $(r).raw ] ] ;
+ }
+ return $(scanner.$(scanner-class).$(r:J=-)) ;
+}
+
+
+# Installs the specified scanner on actual target 'target'.
+rule install ( scanner : target
+ vtarget # virtual target from which 'target' was actualized
+)
+{
+ HDRSCAN on $(target) = [ $(scanner).pattern ] ;
+ SCANNER on $(target) = $(scanner) ;
+ HDRRULE on $(target) = scanner.hdrrule ;
+
+ # scanner reflects difference in properties affecting
+ # binding of 'target', which will be known when processing
+ # includes for it, will give information on how to
+ # interpret quoted includes.
+ HDRGRIST on $(target) = $(scanner) ;
+}
+
+# Propagate scanner setting from 'including-target' to 'targets'.
+rule propagate ( scanner : targets * : including-target )
+{
+ HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
+ SCANNER on $(targets) = $(scanner) ;
+ HDRRULE on $(targets) = scanner.hdrrule ;
+ HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
+}
+
+
+rule hdrrule ( target : matches * : binding )
+{
+ local scanner = [ on $(target) return $(SCANNER) ] ;
+ $(scanner).process $(target) : $(matches) : $(binding) ;
+}
+# hdrrule must be available at global scope so that it can be invoked
+# by header scanning
+IMPORT scanner : hdrrule : : scanner.hdrrule ;
+
+
+
+
diff --git a/jam-files/boost-build/build/targets.jam b/jam-files/boost-build/build/targets.jam
new file mode 100644
index 000000000..a70532ce7
--- /dev/null
+++ b/jam-files/boost-build/build/targets.jam
@@ -0,0 +1,1659 @@
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports 'abstract' targets, which are targets explicitly defined in a
+# Jamfile.
+#
+# Abstract targets are represented by classes derived from 'abstract-target'
+# class. The first abstract target is 'project-target', which is created for
+# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module
+# (see project.jam).
+#
+# Project targets keep a list of 'main-target' instances. A main target is what
+# the user explicitly defines in a Jamfile. It is possible to have several
+# definitions for a main target, for example to have different lists of sources
+# for different platforms. So, main targets keep a list of alternatives.
+#
+# Each alternative is an instance of 'abstract-target'. When a main target
+# subvariant is defined by some rule, that rule will decide what class to use,
+# create an instance of that class and add it to the list of alternatives for
+# the main target.
+#
+# Rules supplied by the build system will use only targets derived from
+# 'basic-target' class, which will provide some default behaviour. There will be
+# different classes derived from it such as 'make-target', created by the 'make'
+# rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
+
+#
+# +------------------------+
+# |abstract-target |
+# +========================+
+# |name |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project-target | | main-target | | basic-target |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# | main-target | +-------------+ | construct = 0|
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
+# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
+# | | typed-target | | make-target | | stage-target |
+# . +==============+ +=============+ +==============+
+# . | construct | | construct | | construct |
+# +--------------+ +-------------+ +--------------+
+
+import assert ;
+import "class" : new ;
+import errors ;
+import feature ;
+import indirect ;
+import path ;
+import property ;
+import property-set ;
+import sequence ;
+import set ;
+import toolset ;
+import build-request ;
+
+
+# Base class for all abstract targets.
+#
+class abstract-target
+{
+ import project ;
+ import assert ;
+ import "class" ;
+ import errors ;
+
+ rule __init__ ( name # Name of the target in Jamfile.
+ : project-target # The project target to which this one belongs.
+ )
+ {
+ # Note: it might seem that we don't need either name or project at all.
+ # However, there are places where we really need it. One example is
+ # error messages which should name problematic targets. Another is
+ # setting correct paths for sources and generated files.
+
+ self.name = $(name) ;
+ self.project = $(project-target) ;
+ self.location = [ errors.nearest-user-location ] ;
+ }
+
+ # Returns the name of this target.
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Returns the project for this target.
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Return the location where the target was declared.
+ rule location ( )
+ {
+ return $(self.location) ;
+ }
+
+ # Returns a user-readable name for this target.
+ rule full-name ( )
+ {
+ local location = [ $(self.project).get location ] ;
+ return $(location)/$(self.name) ;
+ }
+
+ # Generates virtual targets for this abstract target using the specified
+ # properties, unless a different value of some feature is required by the
+ # target.
+ # On success, returns:
+ # - a property-set with the usage requirements to be applied to dependants
+ # - a list of produced virtual targets, which may be empty.
+ # If 'property-set' is empty, performs the default build of this target, in
+ # a way specific to the derived class.
+ #
+ rule generate ( property-set )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ rule rename ( new-name )
+ {
+ self.name = $(new-name) ;
+ }
+}
+
+
+if --debug-building in [ modules.peek : ARGV ]
+{
+ modules.poke : .debug-building : true ;
+}
+
+
+rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Project target class (derived from 'abstract-target').
+#
+# This class has the following responsibilities:
+# - Maintaining a list of main targets in this project and building them.
+#
+# Main targets are constructed in two stages:
+# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
+# that time, alternatives can also be renamed to account for inline targets.
+# - The first time 'main-target' or 'has-main-target' rule is called, all
+# alternatives are enumerated and main targets are created.
+#
+class project-target : abstract-target
+{
+ import project ;
+ import targets ;
+ import path ;
+ import print ;
+ import property-set ;
+ import set ;
+ import sequence ;
+ import "class" : new ;
+ import errors ;
+
+ rule __init__ ( name : project-module parent-project ?
+ : requirements * : default-build * )
+ {
+ abstract-target.__init__ $(name) : $(__name__) ;
+
+ self.project-module = $(project-module) ;
+ self.location = [ project.attribute $(project-module) location ] ;
+ self.requirements = $(requirements) ;
+ self.default-build = $(default-build) ;
+
+ if $(parent-project)
+ {
+ inherit $(parent-project) ;
+ }
+ }
+
+ # This is needed only by the 'make' rule. Need to find the way to make
+ # 'make' work without this method.
+ #
+ rule project-module ( )
+ {
+ return $(self.project-module) ;
+ }
+
+ rule get ( attribute )
+ {
+ return [ project.attribute $(self.project-module) $(attribute) ] ;
+ }
+
+ rule build-dir ( )
+ {
+ if ! $(self.build-dir)
+ {
+ self.build-dir = [ get build-dir ] ;
+ if ! $(self.build-dir)
+ {
+ self.build-dir = [ path.join [ $(self.project).get location ]
+ bin ] ;
+ }
+ }
+ return $(self.build-dir) ;
+ }
+
+ # Generates all possible targets contained in this project.
+ #
+ rule generate ( property-set * )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "building project" [ name ] " ('$(__name__)') with" [ $(property-set).raw ] ;
+ targets.increase-indent ;
+ }
+
+ local usage-requirements = [ property-set.empty ] ;
+ local targets ;
+
+ for local t in [ targets-to-build ]
+ {
+ local g = [ $(t).generate $(property-set) ] ;
+ usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
+ targets += $(g[2-]) ;
+ }
+ targets.decrease-indent ;
+ return $(usage-requirements) [ sequence.unique $(targets) ] ;
+ }
+
+ # Computes and returns a list of abstract-target instances which must be
+ # built when this project is built.
+ #
+ rule targets-to-build ( )
+ {
+ local result ;
+
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ # Collect all main targets here, except for "explicit" ones.
+ for local t in $(self.main-targets)
+ {
+ if ! [ $(t).name ] in $(self.explicit-targets)
+ {
+ result += $(t) ;
+ }
+ }
+
+ # Collect all projects referenced via "projects-to-build" attribute.
+ local self-location = [ get location ] ;
+ for local pn in [ get projects-to-build ]
+ {
+ result += [ find $(pn)/ ] ;
+ }
+
+ return $(result) ;
+ }
+
+ # Add 'target' to the list of targets in this project that should be build
+ # only by explicit request
+ #
+ rule mark-target-as-explicit ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.explicit-targets += $(target-name) ;
+ }
+
+ rule mark-target-as-always ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.always-targets += $(target-name) ;
+ }
+
+ # Add new target alternative
+ #
+ rule add-alternative ( target-instance )
+ {
+ if $(self.built-main-targets)
+ {
+ errors.error add-alternative called when main targets are already
+ created. : in project [ full-name ] ;
+ }
+ self.alternatives += $(target-instance) ;
+ }
+
+ # Returns a 'main-target' class instance corresponding to 'name'.
+ #
+ rule main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+ return $(self.main-target.$(name)) ;
+ }
+
+ # Returns whether a main target with the specified name exists.
+ #
+ rule has-main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ if $(self.main-target.$(name))
+ {
+ return true ;
+ }
+ }
+
+ # Worker function for the find rule not implementing any caching and simply
+ # returning nothing in case the target can not be found.
+ #
+ rule find-really ( id )
+ {
+ local result ;
+ local current-location = [ get location ] ;
+
+ local split = [ MATCH (.*)//(.*) : $(id) ] ;
+ local project-part = $(split[1]) ;
+ local target-part = $(split[2]) ;
+
+ local extra-error-message ;
+ if $(project-part)
+ {
+ # There is an explicitly specified project part in id. Looks up the
+ # project and passes the request to it.
+ local pm = [ project.find $(project-part) : $(current-location) ] ;
+ if $(pm)
+ {
+ project-target = [ project.target $(pm) ] ;
+ result = [ $(project-target).find $(target-part) : no-error ] ;
+ }
+ else
+ {
+ # TODO: This extra error message will not get displayed most
+ # likely due to some buggy refactoring. Refactor the code so the
+ # message gets diplayed again.
+ extra-error-message = error: could not find project
+ '$(project-part)' ;
+ }
+ }
+ else
+ {
+ # Interpret target-name as name of main target. Need to do this
+ # before checking for file. Consider the following scenario with a
+ # toolset not modifying its executable's names, e.g. gcc on
+ # Unix-like platforms:
+ #
+ # exe test : test.cpp ;
+ # install s : test : <location>. ;
+ #
+ # After the first build we would have a target named 'test' in the
+ # Jamfile and a file named 'test' on the disk. We need the target to
+ # override the file.
+ result = [ main-target $(id) ] ;
+
+ # Interpret id as an existing file reference.
+ if ! $(result)
+ {
+ result = [ new file-reference [ path.make $(id) ] :
+ $(self.project) ] ;
+ if ! [ $(result).exists ]
+ {
+ result = ;
+ }
+ }
+
+ # Interpret id as project-id.
+ if ! $(result)
+ {
+ local project-module = [ project.find $(id) :
+ $(current-location) ] ;
+ if $(project-module)
+ {
+ result = [ project.target $(project-module) ] ;
+ }
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # Find and return the target with the specified id, treated relative to
+ # self. Id may specify either a target or a file name with the target taking
+ # priority. May report an error or return nothing if the target is not found
+ # depending on the 'no-error' parameter.
+ #
+ rule find ( id : no-error ? )
+ {
+ local v = $(.id.$(id)) ;
+ if ! $(v)
+ {
+ v = [ find-really $(id) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ .id.$(id) = $(v) ;
+ }
+
+ if $(v) != none
+ {
+ return $(v) ;
+ }
+ else
+ {
+ if ! $(no-error)
+ {
+ local current-location = [ get location ] ;
+ ECHO "error: Unable to find file or target named" ;
+ ECHO "error: '$(id)'" ;
+ ECHO "error: referred from project at" ;
+ ECHO "error: '$(current-location)'" ;
+ ECHO $(extra-error-message) ;
+ EXIT ;
+ }
+ }
+ }
+
+ rule build-main-targets ( )
+ {
+ self.built-main-targets = true ;
+ for local a in $(self.alternatives)
+ {
+ local name = [ $(a).name ] ;
+ local target = $(self.main-target.$(name)) ;
+ if ! $(target)
+ {
+ local t = [ new main-target $(name) : $(self.project) ] ;
+ self.main-target.$(name) = $(t) ;
+ self.main-targets += $(t) ;
+ target = $(self.main-target.$(name)) ;
+ }
+
+ if $(name) in $(self.always-targets)
+ {
+ $(a).always ;
+ }
+
+ $(target).add-alternative $(a) ;
+ }
+ }
+
+ # Accessor, add a constant.
+ #
+ rule add-constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ : type ? # Optional type of value.
+ )
+ {
+ switch $(type)
+ {
+ case path :
+ local r ;
+ for local v in $(value)
+ {
+ local l = $(self.location) ;
+ if ! $(l)
+ {
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location.
+ # It might be more reasonable to make every project have
+ # a location and use some other approach to prevent buildable
+ # targets in config files, but that's for later.
+ l = [ get source-location ] ;
+ }
+ v = [ path.root [ path.make $(v) ] $(l) ] ;
+ # Now make the value absolute path.
+ v = [ path.root $(v) [ path.pwd ] ] ;
+ # Constants should be in platform-native form.
+ v = [ path.native $(v) ] ;
+ r += $(v) ;
+ }
+ value = $(r) ;
+ }
+ if ! $(name) in $(self.constants)
+ {
+ self.constants += $(name) ;
+ }
+ self.constant.$(name) = $(value) ;
+ # Inject the constant in the scope of the Jamroot module.
+ modules.poke $(self.project-module) : $(name) : $(value) ;
+ }
+
+ rule inherit ( parent )
+ {
+ for local c in [ modules.peek $(parent) : self.constants ]
+ {
+ # No need to pass the type. Path constants were converted to
+ # absolute paths already by parent.
+ add-constant $(c)
+ : [ modules.peek $(parent) : self.constant.$(c) ] ;
+ }
+
+ # Import rules from parent.
+ local this-module = [ project-module ] ;
+ local parent-module = [ $(parent).project-module ] ;
+ # Do not import rules coming from 'project-rules' as they must be
+ # imported localized.
+ local user-rules = [ set.difference
+ [ RULENAMES $(parent-module) ] :
+ [ RULENAMES project-rules ] ] ;
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+}
+
+
+# Helper rules to detect cycles in main target references.
+#
+local rule start-building ( main-target-instance )
+{
+ if $(main-target-instance) in $(.targets-being-built)
+ {
+ local names ;
+ for local t in $(.targets-being-built) $(main-target-instance)
+ {
+ names += [ $(t).full-name ] ;
+ }
+
+ errors.error "Recursion in main target references"
+ : "the following target are being built currently:"
+ : $(names) ;
+ }
+ .targets-being-built += $(main-target-instance) ;
+}
+
+
+local rule end-building ( main-target-instance )
+{
+ .targets-being-built = $(.targets-being-built[1--2]) ;
+}
+
+
+# A named top-level target in Jamfile.
+#
+class main-target : abstract-target
+{
+ import assert ;
+ import errors ;
+ import feature ;
+ import print ;
+ import property-set ;
+ import sequence ;
+ import targets : start-building end-building ;
+
+ rule __init__ ( name : project )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+ }
+
+ # Add a new alternative for this target
+ rule add-alternative ( target )
+ {
+ local d = [ $(target).default-build ] ;
+ if $(self.alternatives) && ( $(self.default-build) != $(d) )
+ {
+ errors.error "default build must be identical in all alternatives"
+ : "main target is" [ full-name ]
+ : "with" [ $(d).raw ]
+ : "differing from previous default build" [ $(self.default-build).raw ] ;
+ }
+ else
+ {
+ self.default-build = $(d) ;
+ }
+ self.alternatives += $(target) ;
+ }
+
+ # Returns the best viable alternative for this property-set. See the
+ # documentation for selection rules.
+ #
+ local rule select-alternatives ( property-set debug ? )
+ {
+ # When selecting alternatives we have to consider defaults, for example:
+ # lib l : l.cpp : <variant>debug ;
+ # lib l : l_opt.cpp : <variant>release ;
+ # won't work unless we add default value <variant>debug.
+ property-set = [ $(p).add-defaults ] ;
+
+ # The algorithm: we keep the current best viable alternative. When we've
+ # got a new best viable alternative, we compare it with the current one.
+
+ local best ;
+ local best-properties ;
+
+ if $(self.alternatives[2-])
+ {
+ local bad ;
+ local worklist = $(self.alternatives) ;
+ while $(worklist) && ! $(bad)
+ {
+ local v = $(worklist[1]) ;
+ local properties = [ $(v).match $(property-set) $(debug) ] ;
+
+ if $(properties) != no-match
+ {
+ if ! $(best)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ if $(properties) = $(best-properties)
+ {
+ bad = true ;
+ }
+ else if $(properties) in $(best-properties)
+ {
+ # Do nothing, this alternative is worse
+ }
+ else if $(best-properties) in $(properties)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ bad = true ;
+ }
+ }
+ }
+ worklist = $(worklist[2-]) ;
+ }
+ if ! $(bad)
+ {
+ return $(best) ;
+ }
+ }
+ else
+ {
+ return $(self.alternatives) ;
+ }
+ }
+
+ rule apply-default-build ( property-set )
+ {
+ return [ targets.apply-default-build $(property-set)
+ : $(self.default-build) ] ;
+ }
+
+ # Select an alternative for this main target, by finding all alternatives
+ # which requirements are satisfied by 'properties' and picking the one with
+ # the longest requirements set. Returns the result of calling 'generate' on
+ # that alternative.
+ #
+ rule generate ( property-set )
+ {
+ start-building $(__name__) ;
+
+ # We want composite properties in build request act as if all the
+ # properties it expands too are explicitly specified.
+ property-set = [ $(property-set).expand ] ;
+
+ local all-property-sets = [ apply-default-build $(property-set) ] ;
+ local usage-requirements = [ property-set.empty ] ;
+ local result ;
+ for local p in $(all-property-sets)
+ {
+ local r = [ generate-really $(p) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ }
+ }
+ end-building $(__name__) ;
+ return $(usage-requirements) [ sequence.unique $(result) ] ;
+ }
+
+ # Generates the main target with the given property set and returns a list
+ # which first element is property-set object containing usage-requirements
+ # of generated target and with generated virtual target in other elements.
+ # It is possible that no targets are generated.
+ #
+ local rule generate-really ( property-set )
+ {
+ local best-alternatives = [ select-alternatives $(property-set) ] ;
+ if ! $(best-alternatives)
+ {
+ ECHO "error: No best alternative for" [ full-name ] ;
+ select-alternatives $(property-set) debug ;
+ return [ property-set.empty ] ;
+ }
+ else
+ {
+ # Now return virtual targets for the only alternative.
+ return [ $(best-alternatives).generate $(property-set) ] ;
+ }
+ }
+
+ rule rename ( new-name )
+ {
+ abstract-target.rename $(new-name) ;
+ for local a in $(self.alternatives)
+ {
+ $(a).rename $(new-name) ;
+ }
+ }
+}
+
+
+# Abstract target refering to a source file. This is an artificial entity
+# allowing sources to a target to be represented using a list of abstract target
+# instances.
+#
+class file-reference : abstract-target
+{
+ import virtual-target ;
+ import property-set ;
+ import path ;
+
+ rule __init__ ( file : project )
+ {
+ abstract-target.__init__ $(file) : $(project) ;
+ }
+
+ rule generate ( properties )
+ {
+ return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
+ [ location ] : $(self.project) ] ;
+ }
+
+ # Returns true if the referred file really exists.
+ rule exists ( )
+ {
+ location ;
+ return $(self.file-path) ;
+ }
+
+ # Returns the location of target. Needed by 'testing.jam'.
+ rule location ( )
+ {
+ if ! $(self.file-location)
+ {
+ local source-location = [ $(self.project).get source-location ] ;
+ for local src-dir in $(source-location)
+ {
+ if ! $(self.file-location)
+ {
+ local location = [ path.root $(self.name) $(src-dir) ] ;
+ if [ CHECK_IF_FILE [ path.native $(location) ] ]
+ {
+ self.file-location = $(src-dir) ;
+ self.file-path = $(location) ;
+ }
+ }
+ }
+ }
+ return $(self.file-location) ;
+ }
+}
+
+
+# Given a target-reference, made in context of 'project', returns the
+# abstract-target instance that is referred to, as well as properties explicitly
+# specified for this reference.
+#
+rule resolve-reference ( target-reference : project )
+{
+ # Separate target name from properties override.
+ local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ;
+ local id = $(split[1]) ;
+ local sproperties = ;
+ if $(split[3])
+ {
+ sproperties = [ property.make [ feature.split $(split[3]) ] ] ;
+ sproperties = [ feature.expand-composites $(sproperties) ] ;
+ }
+
+ # Find the target.
+ local target = [ $(project).find $(id) ] ;
+
+ return $(target) [ property-set.create $(sproperties) ] ;
+}
+
+
+# Attempts to generate the target given by target reference, which can refer
+# both to a main target or to a file. Returns a list consisting of
+# - usage requirements
+# - generated virtual targets, if any
+#
+rule generate-from-reference (
+ target-reference # Target reference.
+ : project # Project where the reference is made.
+ : property-set # Properties of the main target that makes the reference.
+)
+{
+ local r = [ resolve-reference $(target-reference) : $(project) ] ;
+ local target = $(r[1]) ;
+ local sproperties = $(r[2]) ;
+
+ # Take properties which should be propagated and refine them with
+ # source-specific requirements.
+ local propagated = [ $(property-set).propagated ] ;
+ local rproperties = [ $(propagated).refine $(sproperties) ] ;
+ if $(rproperties[1]) = "@error"
+ {
+ errors.error
+ "When building" [ full-name ] " with properties " $(properties) :
+ "Invalid properties specified for " $(source) ":"
+ $(rproperties[2-]) ;
+ }
+ return [ $(target).generate $(rproperties) ] ;
+}
+
+rule apply-default-build ( property-set : default-build )
+{
+ # 1. First, see what properties from default-build are already present
+ # in property-set.
+
+ local raw = [ $(property-set).raw ] ;
+ local specified-features = $(raw:G) ;
+
+ local defaults-to-apply ;
+ for local d in [ $(default-build).raw ]
+ {
+ if ! $(d:G) in $(specified-features)
+ {
+ defaults-to-apply += $(d) ;
+ }
+ }
+
+ # 2. If there are any defaults to be applied, form a new build request.
+ # Pass it through to 'expand-no-defaults' since default-build might
+ # contain "release debug" resulting in two property-sets.
+ local result ;
+ if $(defaults-to-apply)
+ {
+ properties = [
+ build-request.expand-no-defaults
+
+ # We have to compress subproperties here to prevent property
+ # lists like:
+ #
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ #
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to a cross-product property combination. That may be an
+ # indication that build-request.expand-no-defaults is the wrong
+ # rule to use here.
+ [ feature.compress-subproperties $(raw) ]
+ $(defaults-to-apply)
+ ] ;
+
+ if $(properties)
+ {
+ for local p in $(properties)
+ {
+ result += [ property-set.create
+ [ feature.expand [ feature.split $(p) ] ] ] ;
+ }
+ }
+ else
+ {
+ result = [ property-set.empty ] ;
+ }
+ }
+ else
+ {
+ result = $(property-set) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a build request and requirements, return properties common to dependency
+# build request and target requirements.
+#
+# TODO: Document exactly what 'common properties' are, whether they should
+# include default property values, whether they should contain any conditional
+# properties or should those be already processed, etc. See whether there are
+# any differences between use cases with empty and non-empty build-request as
+# well as with requirements containing and those not containing any non-free
+# features.
+#
+rule common-properties ( build-request requirements )
+{
+ # For optimization, we add free requirements directly, without using a
+ # complex algorithm. This gives the complex algorithm a better chance of
+ # caching results.
+ local free = [ $(requirements).free ] ;
+ local non-free = [ property-set.create [ $(requirements).base ]
+ [ $(requirements).incidental ] ] ;
+
+ local key = .rp.$(build-request)-$(non-free) ;
+ if ! $($(key))
+ {
+ $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
+ }
+ result = [ $($(key)).add-raw $(free) ] ;
+}
+
+
+# Given a 'context' -- a set of already present properties, and 'requirements',
+# decide which extra properties should be applied to 'context'. For conditional
+# requirements, this means evaluating the condition. For indirect conditional
+# requirements, this means calling a rule. Ordinary requirements are always
+# applied.
+#
+# Handles the situation where evaluating one conditional requirement affects
+# conditions of another conditional requirements, such as:
+# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
+#
+# If 'what' is 'refined' returns context refined with new requirements. If
+# 'what' is 'added' returns just the requirements to be applied.
+#
+rule evaluate-requirements ( requirements : context : what )
+{
+ # Apply non-conditional requirements. It is possible that further
+ # conditional requirement change a value set by non-conditional
+ # requirements. For example:
+ #
+ # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
+ #
+ # I am not sure if this should be an error, or not, especially given that
+ #
+ # <threading>single
+ #
+ # might come from project's requirements.
+
+ local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] ;
+
+ local raw = [ $(context).raw ] ;
+ raw = [ property.refine $(raw) : $(unconditional) ] ;
+
+ # We have collected properties that surely must be present in common
+ # properties. We now try to figure out what other properties should be added
+ # in order to satisfy rules (4)-(6) from the docs.
+
+ local conditionals = [ $(requirements).conditional ] ;
+ # The 'count' variable has one element for each conditional feature and for
+ # each occurrence of '<indirect-conditional>' feature. It is used as a loop
+ # counter: for each iteration of the loop before we remove one element and
+ # the property set should stabilize before we are done. It is assumed that
+ # #conditionals iterations should be enough for properties to propagate
+ # along conditions in any direction.
+ local count = $(conditionals)
+ [ $(requirements).get <conditional> ]
+ and-once-more ;
+
+ local added-requirements ;
+
+ local current = $(raw) ;
+
+ # It is assumed that ordinary conditional requirements can not add
+ # <conditional> properties (a.k.a. indirect conditional properties), and
+ # that rules referred to by <conditional> properties can not add new
+ # <conditional> properties. So the list of indirect conditionals does not
+ # change.
+ local indirect = [ $(requirements).get <conditional> ] ;
+ indirect = [ MATCH ^@(.*) : $(indirect) ] ;
+
+ local ok ;
+ while $(count)
+ {
+ # Evaluate conditionals in context of current properties.
+ local e = [ property.evaluate-conditionals-in-context $(conditionals)
+ : $(current) ] ;
+
+ # Evaluate indirect conditionals.
+ for local i in $(indirect)
+ {
+ e += [ indirect.call $(i) $(current) ] ;
+ }
+
+ if $(e) = $(added-requirements)
+ {
+ # If we got the same result, we have found the final properties.
+ count = ;
+ ok = true ;
+ }
+ else
+ {
+ # Oops, conditional evaluation results have changed. Also 'current'
+ # contains leftovers from a previous evaluation. Recompute 'current'
+ # using initial properties and conditional requirements.
+ added-requirements = $(e) ;
+ current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
+ }
+ count = $(count[2-]) ;
+ }
+ if ! $(ok)
+ {
+ errors.error "Can not evaluate conditional properties " $(conditionals) ;
+ }
+
+ if $(what) = added
+ {
+ return [ property-set.create $(unconditional) $(added-requirements) ] ;
+ }
+ else if $(what) = refined
+ {
+ return [ property-set.create $(current) ] ;
+ }
+ else
+ {
+ errors.error "Invalid value of the 'what' parameter." ;
+ }
+}
+
+
+rule common-properties2 ( build-request requirements )
+{
+ # This guarantees that default properties are present in the result, unless
+ # they are overriden by some requirement. FIXME: There is possibility that
+ # we have added <foo>bar, which is composite and expands to <foo2>bar2, but
+ # default value of <foo2> is not bar2, in which case it is not clear what to
+ # do.
+ #
+ build-request = [ $(build-request).add-defaults ] ;
+ # Features added by 'add-default' can be composite and expand to features
+ # without default values -- so they are not added yet. It could be clearer/
+ # /faster to expand only newly added properties but that is not critical.
+ build-request = [ $(build-request).expand ] ;
+
+ return [ evaluate-requirements $(requirements) : $(build-request) :
+ refined ] ;
+}
+
+rule push-target ( target )
+{
+ .targets = $(target) $(.targets) ;
+}
+
+rule pop-target ( )
+{
+ .targets = $(.targets[2-]) ;
+}
+
+# Return the metatarget that is currently being generated.
+rule current ( )
+{
+ return $(.targets[1]) ;
+}
+
+
+# Implements the most standard way of constructing main target alternative from
+# sources. Allows sources to be either file or other main target and handles
+# generation of those dependency targets.
+#
+class basic-target : abstract-target
+{
+ import build-request ;
+ import build-system ;
+ import "class" : new ;
+ import errors ;
+ import feature ;
+ import property ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import targets ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+
+ self.sources = $(sources) ;
+ if ! $(requirements) {
+ requirements = [ property-set.empty ] ;
+ }
+ self.requirements = $(requirements) ;
+ if ! $(default-build)
+ {
+ default-build = [ property-set.empty ] ;
+ }
+ self.default-build = $(default-build) ;
+ if ! $(usage-requirements)
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ self.usage-requirements = $(usage-requirements) ;
+
+ if $(sources:G)
+ {
+ errors.user-error properties found in the 'sources' parameter for
+ [ full-name ] ;
+ }
+ }
+
+ rule always ( )
+ {
+ self.always = 1 ;
+ }
+
+ # Returns the list of abstract-targets which are used as sources. The extra
+ # properties specified for sources are not represented. The only user for
+ # this rule at the moment is the "--dump-tests" feature of the test system.
+ #
+ rule sources ( )
+ {
+ if ! $(self.source-targets)
+ {
+ for local s in $(self.sources)
+ {
+ self.source-targets +=
+ [ targets.resolve-reference $(s) : $(self.project) ] ;
+ }
+ }
+ return $(self.source-targets) ;
+ }
+
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule default-build ( )
+ {
+ return $(self.default-build) ;
+ }
+
+ # Returns the alternative condition for this alternative, if the condition
+ # is satisfied by 'property-set'.
+ #
+ rule match ( property-set debug ? )
+ {
+ # The condition is composed of all base non-conditional properties. It
+ # is not clear if we should expand 'self.requirements' or not. For one
+ # thing, it would be nice to be able to put
+ # <toolset>msvc-6.0
+ # in requirements. On the other hand, if we have <variant>release as a
+ # condition it does not make sense to require <optimization>full to be
+ # in the build request just to select this variant.
+ local bcondition = [ $(self.requirements).base ] ;
+ local ccondition = [ $(self.requirements).conditional ] ;
+ local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
+ if $(debug)
+ {
+ ECHO " next alternative: required properties:" $(condition:E=(empty)) ;
+ }
+
+ if $(condition) in [ $(property-set).raw ]
+ {
+ if $(debug)
+ {
+ ECHO " matched" ;
+ }
+ return $(condition) ;
+ }
+ else
+ {
+ if $(debug)
+ {
+ ECHO " not matched" ;
+ }
+ return no-match ;
+ }
+ }
+
+ # Takes a target reference, which might be either target id or a dependency
+ # property, and generates that target using 'property-set' as build request.
+ #
+ # The results are added to the variable called 'result-var'. Usage
+ # requirements are added to the variable called 'usage-requirements-var'.
+ #
+ rule generate-dependencies ( dependencies * : property-set
+ : result-var usage-requirements-var )
+ {
+ for local dependency in $(dependencies)
+ {
+ local grist = $(dependency:G) ;
+ local id = $(dependency:G=) ;
+
+ local result = [ targets.generate-from-reference $(id) :
+ $(self.project) : $(property-set) ] ;
+
+ $(result-var) += $(result[2-]:G=$(grist)) ;
+ $(usage-requirements-var) += [ $(result[1]).raw ] ;
+ }
+ }
+
+ # Determines final build properties, generates sources, and calls
+ # 'construct'. This method should not be overridden.
+ #
+ rule generate ( property-set )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ local fn = [ full-name ] ;
+ ECHO [ targets.indent ] "Building target '$(fn)'" ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] "Build request: " $(property-set) [ $(property-set).raw ] ;
+ local cf = [ build-system.command-line-free-features ] ;
+ ECHO [ targets.indent ] "Command line free features: " [ $(cf).raw ] ;
+ ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ;
+ }
+ targets.push-target $(__name__) ;
+
+ if ! $(self.generated.$(property-set))
+ {
+ # Apply free features from the command line. If user said
+ # define=FOO
+ # he most likely wants this define to be set for all compiles.
+ property-set = [ $(property-set).refine
+ [ build-system.command-line-free-features ] ] ;
+ local rproperties = [ targets.common-properties $(property-set)
+ $(self.requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Common properties: " [ $(rproperties).raw ] ;
+ }
+
+ if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get
+ <build> ] != no )
+ {
+ local source-targets ;
+ local properties = [ $(rproperties).non-dependency ] ;
+ local usage-requirements ;
+
+ generate-dependencies [ $(rproperties).dependency ] :
+ $(rproperties) : properties usage-requirements ;
+
+ generate-dependencies $(self.sources) : $(rproperties) :
+ source-targets usage-requirements ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Usage requirements for"
+ $(self.name)": " $(usage-requirements) ;
+ }
+
+ rproperties = [ property-set.create $(properties)
+ $(usage-requirements) ] ;
+ usage-requirements = [ property-set.create $(usage-requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Build properties: "
+ [ $(rproperties).raw ] ;
+ }
+
+ local extra = [ $(rproperties).get <source> ] ;
+ source-targets += $(extra:G=) ;
+ # We might get duplicate sources, for example if we link to two
+ # libraries having the same <library> usage requirement.
+ # Use stable sort, since for some targets the order is
+ # important. E.g. RUN_PY target need python source to come
+ # first.
+ source-targets = [ sequence.unique $(source-targets) : stable ] ;
+
+ local result = [ construct $(self.name) : $(source-targets) :
+ $(rproperties) ] ;
+
+ if $(result)
+ {
+ local gur = $(result[1]) ;
+ result = $(result[2-]) ;
+
+ if $(self.always)
+ {
+ for local t in $(result)
+ {
+ $(t).always ;
+ }
+ }
+
+ local s = [ create-subvariant $(result)
+ : [ virtual-target.recent-targets ]
+ : $(property-set) : $(source-targets)
+ : $(rproperties) : $(usage-requirements) ] ;
+ virtual-target.clear-recent-targets ;
+
+ local ur = [ compute-usage-requirements $(s) ] ;
+ ur = [ $(ur).add $(gur) ] ;
+ $(s).set-usage-requirements $(ur) ;
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ }
+
+ self.generated.$(property-set) = $(ur) $(result) ;
+ }
+ }
+ else
+ {
+ if $(rproperties[1]) = "@error"
+ {
+ ECHO [ targets.indent ] "Skipping build of:" [ full-name ]
+ "cannot compute common properties" ;
+ }
+ else if [ $(rproperties).get <build> ] = no
+ {
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+ }
+ else
+ {
+ ECHO [ targets.indent ] "Skipping build of: " [ full-name ]
+ " unknown reason" ;
+ }
+
+ # We are here either because there has been an error computing
+ # properties or there is <build>no in properties. In the latter
+ # case we do not want any diagnostic. In the former case, we
+ # need diagnostics. FIXME
+
+ # If this target fails to build, add <build>no to properties to
+ # cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it is not clear if that is a good idea anyway. The alias
+ # target, for example, should not fail to build if a
+ # dependency fails.
+ self.generated.$(property-set) = [ property-set.create <build>no ] ;
+ }
+ }
+ else
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Already built" ;
+ local ur = $(self.generated.$(property-set)) ;
+ ur = $(ur[0]) ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ targets.decrease-indent ;
+ }
+ }
+
+ targets.pop-target ;
+ targets.decrease-indent ;
+ return $(self.generated.$(property-set)) ;
+ }
+
+ # Given the set of generated targets, and refined build properties,
+ # determines and sets appropriate usage requirements on those targets.
+ #
+ rule compute-usage-requirements ( subvariant )
+ {
+ local rproperties = [ $(subvariant).build-properties ] ;
+ xusage-requirements = [ targets.evaluate-requirements
+ $(self.usage-requirements) : $(rproperties) : added ] ;
+
+ # We generate all dependency properties and add them, as well as their
+ # usage requirements, to the result.
+ local extra ;
+ generate-dependencies [ $(xusage-requirements).dependency ] :
+ $(rproperties) : extra extra ;
+
+ local result = [ property-set.create
+ [ $(xusage-requirements).non-dependency ] $(extra) ] ;
+
+ # Propagate usage requirements we got from sources, except for the
+ # <pch-header> and <pch-file> features.
+ #
+ # That feature specifies which pch file to use, and should apply only to
+ # direct dependents. Consider:
+ #
+ # pch pch1 : ...
+ # lib lib1 : ..... pch1 ;
+ # pch pch2 :
+ # lib lib2 : pch2 lib1 ;
+ #
+ # Here, lib2 should not get <pch-header> property from pch1.
+ #
+ # Essentially, when those two features are in usage requirements, they
+ # are propagated only to direct dependents. We might need a more general
+ # mechanism, but for now, only those two features are special.
+ #
+ # TODO - Actually there are more possible candidates like for instance
+ # when listing static library X as a source for another static library.
+ # Then static library X will be added as a <source> property to the
+ # second library's usage requirements but those requirements should last
+ # only up to the first executable or shared library that actually links
+ # to it.
+ local raw = [ $(subvariant).sources-usage-requirements ] ;
+ raw = [ $(raw).raw ] ;
+ raw = [ property.change $(raw) : <pch-header> ] ;
+ raw = [ property.change $(raw) : <pch-file> ] ;
+ return [ $(result).add [ property-set.create $(raw) ] ] ;
+ }
+
+ # Creates new subvariant instances for 'targets'.
+ # 'root-targets' - virtual targets to be returned to dependants
+ # 'all-targets' - virtual targets created while building this main target
+ # 'build-request' - property-set instance with requested build properties
+ #
+ local rule create-subvariant ( root-targets * : all-targets * :
+ build-request : sources * : rproperties : usage-requirements )
+ {
+ for local e in $(root-targets)
+ {
+ $(e).root true ;
+ }
+
+ # Process all virtual targets that will be created if this main target
+ # is created.
+ local s = [ new subvariant $(__name__) : $(build-request) : $(sources) :
+ $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
+ for local v in $(all-targets)
+ {
+ if ! [ $(v).creating-subvariant ]
+ {
+ $(v).creating-subvariant $(s) ;
+ }
+ }
+ return $(s) ;
+ }
+
+ # Constructs virtual targets for this abstract target and the dependency
+ # graph. Returns a usage-requirements property-set and a list of virtual
+ # targets. Should be overriden in derived classes.
+ #
+ rule construct ( name : source-targets * : properties * )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+class typed-target : basic-target
+{
+ import generators ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ self.type = $(type) ;
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) $(name:S=) : $(self.type)
+ : [ property-set.create [ $(property-set).raw ]
+ <main-target-type>$(self.type) ]
+ : $(source-targets) : true ] ;
+ if ! $(r)
+ {
+ ECHO "warn: Unable to construct" [ full-name ] ;
+
+ # Are there any top-level generators for this type/property set.
+ if ! [ generators.find-viable-generators $(self.type)
+ : $(property-set) ]
+ {
+ ECHO "error: no generators were found for type '$(self.type)'" ;
+ ECHO "error: and the requested properties" ;
+ ECHO "error: make sure you've configured the needed tools" ;
+ ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+ ECHO "To debug this problem, try the --debug-generators option." ;
+ EXIT ;
+ }
+ }
+ return $(r) ;
+ }
+}
+
+
+# Return the list of sources to use, if main target rule is invoked with
+# 'sources'. If there are any objects in 'sources', they are treated as main
+# target instances, and the name of such targets are adjusted to be
+# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
+# a non-empty value is passed as the 'no-renaming' parameter.
+#
+rule main-target-sources ( sources * : main-target-name : no-renaming ? )
+{
+ local result ;
+ for local t in $(sources)
+ {
+ if [ class.is-instance $(t) ]
+ {
+ local name = [ $(t).name ] ;
+ if ! $(no-renaming)
+ {
+ name = $(main-target-name)__$(name) ;
+ $(t).rename $(name) ;
+ }
+ # Inline targets are not built by default.
+ local p = [ $(t).project ] ;
+ $(p).mark-target-as-explicit $(name) ;
+ result += $(name) ;
+ }
+ else
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the requirements to use when declaring a main target, obtained by
+# translating all specified property paths and refining project requirements
+# with the ones specified for the target.
+#
+rule main-target-requirements (
+ specification * # Properties explicitly specified for the main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ specification += [ toolset.requirements ] ;
+
+ local requirements = [ property-set.refine-from-user-input
+ [ $(project).get requirements ] : $(specification) :
+ [ $(project).project-module ] : [ $(project).get location ] ] ;
+ if $(requirements[1]) = "@error"
+ {
+ errors.error "Conflicting requirements for target:" $(requirements) ;
+ }
+ return $(requirements) ;
+}
+
+
+# Returns the usage requirements to use when declaring a main target, which are
+# obtained by translating all specified property paths and adding project's
+# usage requirements.
+#
+rule main-target-usage-requirements (
+ specification * # Use-properties explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local project-usage-requirements = [ $(project).get usage-requirements ] ;
+
+ # We do not use 'refine-from-user-input' because:
+ # - I am not sure if removing parent's usage requirements makes sense
+ # - refining usage requirements is not needed, since usage requirements are
+ # always free.
+ local usage-requirements = [ property-set.create-from-user-input
+ $(specification)
+ : [ $(project).project-module ] [ $(project).get location ] ] ;
+
+ return [ $(project-usage-requirements).add $(usage-requirements) ] ;
+}
+
+
+# Return the default build value to use when declaring a main target, which is
+# obtained by using the specified value if not empty and parent's default build
+# attribute otherwise.
+#
+rule main-target-default-build (
+ specification * # Default build explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local result ;
+ if $(specification)
+ {
+ result = $(specification) ;
+ }
+ else
+ {
+ result = [ $(project).get default-build ] ;
+ }
+ return [ property-set.create-with-validation $(result) ] ;
+}
+
+
+# Registers the specified target as a main target alternative and returns it.
+#
+rule main-target-alternative ( target )
+{
+ local ptarget = [ $(target).project ] ;
+ $(ptarget).add-alternative $(target) ;
+ return $(target) ;
+}
+
+# Creates a new metargets with the specified properties, using 'klass' as
+# the class. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in Jamfile corresponding to 'project'.
+#
+rule create-metatarget ( klass : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [
+ targets.main-target-alternative
+ [ new $(klass) $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
+
+# Creates a typed-target with the specified properties. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in Jamfile corresponding to 'project'.
+#
+rule create-typed-target ( type : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : $(type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
diff --git a/jam-files/boost-build/build/toolset.jam b/jam-files/boost-build/build/toolset.jam
new file mode 100644
index 000000000..f2036d999
--- /dev/null
+++ b/jam-files/boost-build/build/toolset.jam
@@ -0,0 +1,502 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for toolset definition.
+
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import path ;
+import property ;
+import regex ;
+import sequence ;
+import set ;
+
+
+.flag-no = 1 ;
+
+.ignore-requirements = ;
+
+# This is used only for testing, to make sure we do not get random extra
+# elements in paths.
+if --ignore-toolset-requirements in [ modules.peek : ARGV ]
+{
+ .ignore-requirements = 1 ;
+}
+
+
+# Initializes an additional toolset-like module. First load the 'toolset-module'
+# and then calls its 'init' rule with trailing arguments.
+#
+rule using ( toolset-module : * )
+{
+ import $(toolset-module) ;
+ $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+}
+
+
+# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
+# converted to '<toolset>gcc/<toolset-version>3.2'.
+#
+local rule normalize-condition ( property-sets * )
+{
+ local result ;
+ for local p in $(property-sets)
+ {
+ local split = [ feature.split $(p) ] ;
+ local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
+ result += $(expanded:J=/) ;
+ }
+ return $(result) ;
+}
+
+
+# Specifies if the 'flags' rule should check that the invoking module is the
+# same as the module we are setting the flag for. 'v' can be either 'checked' or
+# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
+# the setting that was in effect before calling this rule.
+#
+rule push-checking-for-flags-module ( v )
+{
+ .flags-module-checking = $(v) $(.flags-module-checking) ;
+}
+
+rule pop-checking-for-flags-module ( )
+{
+ .flags-module-checking = $(.flags-module-checking[2-]) ;
+}
+
+
+# Specifies the flags (variables) that must be set on targets under certain
+# conditions, described by arguments.
+#
+rule flags (
+ rule-or-module # If contains a dot, should be a rule name. The flags will
+ # be applied when that rule is used to set up build
+ # actions.
+ #
+ # If does not contain dot, should be a module name. The
+ # flag will be applied for all rules in that module. If
+ # module for rule is different from the calling module, an
+ # error is issued.
+
+ variable-name # Variable that should be set on target.
+ condition * : # A condition when this flag should be applied. Should be a
+ # set of property sets. If one of those property sets is
+ # contained in the build properties, the flag will be used.
+ # Implied values are not allowed: "<toolset>gcc" should be
+ # used, not just "gcc". Subfeatures, like in
+ # "<toolset>gcc-3.2" are allowed. If left empty, the flag
+ # will be used unconditionally.
+ #
+ # Propery sets may use value-less properties ('<a>' vs.
+ # '<a>value') to match absent properties. This allows to
+ # separately match:
+ #
+ # <architecture>/<address-model>64
+ # <architecture>ia64/<address-model>
+ #
+ # Where both features are optional. Without this syntax
+ # we would be forced to define "default" values.
+
+ values * : # The value to add to variable. If <feature> is specified,
+ # then the value of 'feature' will be added.
+ unchecked ? # If value 'unchecked' is passed, will not test that flags
+ # are set for the calling module.
+ : hack-hack ? # For
+ # flags rule OPTIONS <cxx-abi> : -model ansi
+ # Treat <cxx-abi> as condition
+ # FIXME: ugly hack.
+)
+{
+ local caller = [ CALLER_MODULE ] ;
+ if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
+ && [ MATCH "(Jamfile<.*)" : $(caller) ]
+ {
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule-or-module = $(caller).$(rule-or-module) ;
+ }
+ else
+ {
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ if $(unchecked) != unchecked
+ && $(.flags-module-checking[1]) != unchecked
+ && $(module_) != $(caller)
+ {
+ errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
+ }
+ }
+
+ if $(condition) && ! $(condition:G=) && ! $(hack-hack)
+ {
+ # We have condition in the form '<feature>', that is, without value.
+ # That is an older syntax:
+ # flags gcc.link RPATH <dll-path> ;
+ # for compatibility, convert it to
+ # flags gcc.link RPATH : <dll-path> ;
+ values = $(condition) ;
+ condition = ;
+ }
+
+ if $(condition)
+ {
+ property.validate-property-sets $(condition) ;
+ condition = [ normalize-condition $(condition) ] ;
+ }
+
+ add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
+}
+
+
+# Adds a new flag setting with the specified values. Does no checking.
+#
+local rule add-flag ( rule-or-module : variable-name : condition * : values * )
+{
+ .$(rule-or-module).flags += $(.flag-no) ;
+
+ # Store all flags for a module.
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ .module-flags.$(module_) += $(.flag-no) ;
+ # Store flag-no -> rule-or-module mapping.
+ .rule-or-module.$(.flag-no) = $(rule-or-module) ;
+
+ .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
+ .$(rule-or-module).values.$(.flag-no) += $(values) ;
+ .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
+
+ .flag-no = [ numbers.increment $(.flag-no) ] ;
+}
+
+
+# Returns the first element of 'property-sets' which is a subset of
+# 'properties' or an empty list if no such element exists.
+#
+rule find-property-subset ( property-sets * : properties * )
+{
+ # Cut property values off.
+ local prop-keys = $(properties:G) ;
+
+ local result ;
+ for local s in $(property-sets)
+ {
+ if ! $(result)
+ {
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+
+ local set = [ feature.split $(s) ] ;
+
+ # Find the set of features that
+ # - have no property specified in required property set
+ # - are omitted in the build property set.
+ local default-props ;
+ for local i in $(set)
+ {
+ # If $(i) is a value-less property it should match default value
+ # of an optional property. See the first line in the example
+ # below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
+ {
+ default-props += $(i) ;
+ }
+ }
+
+ if $(set) in $(properties) $(default-props)
+ {
+ result = $(s) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a value to be added to some flag for some target based on the flag's
+# value definition and the given target's property set.
+#
+rule handle-flag-value ( value * : properties * )
+{
+ local result ;
+ if $(value:G)
+ {
+ local matches = [ property.select $(value) : $(properties) ] ;
+ for local p in $(matches)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ if dependency in $(att)
+ {
+ # The value of a dependency feature is a target and needs to be
+ # actualized.
+ result += [ $(p:G=).actualize ] ;
+ }
+ else if path in $(att) || free in $(att)
+ {
+ local values ;
+ # Treat features with && in the value specially -- each
+ # &&-separated element is considered a separate value. This is
+ # needed to handle searched libraries or include paths, which
+ # may need to be in a specific order.
+ if ! [ MATCH (&&) : $(p:G=) ]
+ {
+ values = $(p:G=) ;
+ }
+ else
+ {
+ values = [ regex.split $(p:G=) "&&" ] ;
+ }
+ if path in $(att)
+ {
+ result += [ sequence.transform path.native : $(values) ] ;
+ }
+ else
+ {
+ result += $(values) ;
+ }
+ }
+ else
+ {
+ result += $(p:G=) ;
+ }
+ }
+ }
+ else
+ {
+ result += $(value) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a rule name and a property set, returns a list of interleaved variables
+# names and values which must be set on targets for that rule/property-set
+# combination.
+#
+rule set-target-variables-aux ( rule-or-module : property-set )
+{
+ local result ;
+ properties = [ $(property-set).raw ] ;
+ for local f in $(.$(rule-or-module).flags)
+ {
+ local variable = $(.$(rule-or-module).variable.$(f)) ;
+ local condition = $(.$(rule-or-module).condition.$(f)) ;
+ local values = $(.$(rule-or-module).values.$(f)) ;
+
+ if ! $(condition) ||
+ [ find-property-subset $(condition) : $(properties) ]
+ {
+ local processed ;
+ for local v in $(values)
+ {
+ # The value might be <feature-name> so needs special treatment.
+ processed += [ handle-flag-value $(v) : $(properties) ] ;
+ }
+ for local r in $(processed)
+ {
+ result += $(variable) $(r) ;
+ }
+ }
+ }
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule set-target-variables ( rule-or-module targets + : property-set )
+{
+ properties = [ $(property-set).raw ] ;
+ local key = $(rule-or-module).$(property-set) ;
+ local settings = $(.stv.$(key)) ;
+ if ! $(settings)
+ {
+ settings = [ set-target-variables-aux $(rule-or-module) :
+ $(property-set) ] ;
+
+ if ! $(settings)
+ {
+ settings = none ;
+ }
+ .stv.$(key) = $(settings) ;
+ }
+
+ if $(settings) != none
+ {
+ local var-name = ;
+ for local name-or-value in $(settings)
+ {
+ if $(var-name)
+ {
+ $(var-name) on $(targets) += $(name-or-value) ;
+ var-name = ;
+ }
+ else
+ {
+ var-name = $(name-or-value) ;
+ }
+ }
+ }
+}
+
+
+# Make toolset 'toolset', defined in a module of the same name, inherit from
+# 'base'.
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
+# Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
+# property in requires is adjusted too.
+# 3. All flags are inherited.
+# 4. All rules are imported.
+#
+rule inherit ( toolset : base )
+{
+ import $(base) ;
+ inherit-generators $(toolset) : $(base) ;
+ inherit-flags $(toolset) : $(base) ;
+ inherit-rules $(toolset) : $(base) ;
+}
+
+
+rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
+{
+ properties ?= <toolset>$(toolset) ;
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ for local g in $(base-generators)
+ {
+ local id = [ $(g).id ] ;
+
+ if ! $(id) in $(generators-to-ignore)
+ {
+ # Some generator names have multiple periods in their name, so
+ # $(id:B=$(toolset)) does not generate the right new-id name. E.g.
+ # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
+ # not what we want. Manually parse the base and suffix. If there is
+ # a better way to do this, I would love to see it. See also the
+ # register() rule in the generators module.
+ local base = $(id) ;
+ local suffix = "" ;
+ while $(base:S)
+ {
+ suffix = $(base:S)$(suffix) ;
+ base = $(base:B) ;
+ }
+ local new-id = $(toolset)$(suffix) ;
+
+ generators.register [ $(g).clone $(new-id) : $(properties) ] ;
+ }
+ }
+}
+
+
+# Brings all flag definitions from the 'base' toolset into the 'toolset'
+# toolset. Flag definitions whose conditions make use of properties in
+# 'prohibited-properties' are ignored. Do not confuse property and feature, for
+# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+# not block the other one.
+#
+# The flag conditions are not altered at all, so if a condition includes a name,
+# or version of a base toolset, it will not ever match the inheriting toolset.
+# When such flag settings must be inherited, define a rule in base toolset
+# module and call it as needed.
+#
+rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
+{
+ for local f in $(.module-flags.$(base))
+ {
+ local rule-or-module = $(.rule-or-module.$(f)) ;
+ if ( [ set.difference
+ $(.$(rule-or-module).condition.$(f)) :
+ $(prohibited-properties) ]
+ || ! $(.$(rule-or-module).condition.$(f))
+ ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
+ {
+ local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
+ local new-rule-or-module ;
+ if $(rule_)
+ {
+ new-rule-or-module = $(toolset).$(rule_) ;
+ }
+ else
+ {
+ new-rule-or-module = $(toolset) ;
+ }
+
+ add-flag
+ $(new-rule-or-module)
+ : $(.$(rule-or-module).variable.$(f))
+ : $(.$(rule-or-module).condition.$(f))
+ : $(.$(rule-or-module).values.$(f)) ;
+ }
+ }
+}
+
+
+rule inherit-rules ( toolset : base : localize ? )
+{
+ # It appears that "action" creates a local rule.
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ local rules ;
+ for local g in $(base-generators)
+ {
+ rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
+ }
+ rules = [ sequence.unique $(rules) ] ;
+ IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
+ IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
+}
+
+
+# Return the list of global 'toolset requirements'. Those requirements will be
+# automatically added to the requirements of any main target.
+#
+rule requirements ( )
+{
+ return $(.requirements) ;
+}
+
+
+# Adds elements to the list of global 'toolset requirements'. The requirements
+# will be automatically added to the requirements for all main targets, as if
+# they were specified literally. For best results, all requirements added should
+# be conditional or indirect conditional.
+#
+rule add-requirements ( requirements * )
+{
+ if ! $(.ignore-requirements)
+ {
+ .requirements += $(requirements) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
+ assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
+ assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
+
+ local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
+ assert.result <a>/<b> : find-property-subset $(p-set) : ;
+ assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
+ assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
+ assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
+}
diff --git a/jam-files/boost-build/build/type.jam b/jam-files/boost-build/build/type.jam
new file mode 100644
index 000000000..1a7a57823
--- /dev/null
+++ b/jam-files/boost-build/build/type.jam
@@ -0,0 +1,425 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Deals with target type declaration and defines target class which supports
+# typed targets.
+
+import "class" : new ;
+import errors ;
+import feature ;
+import generators : * ;
+import project ;
+import property ;
+import scanner ;
+import os ;
+
+# The following import would create a circular dependency:
+# project -> project-root -> builtin -> type -> targets -> project
+# import targets ;
+
+# The feature is optional so it would never get added implicitly. It is used
+# only for internal purposes and in all cases we want to use it explicitly.
+feature.feature target-type : : composite optional ;
+
+feature.feature main-target-type : : optional incidental ;
+feature.feature base-target-type : : composite optional free ;
+
+
+# Registers a target type, possible derived from a 'base-type'. Providing a list
+# of 'suffixes' here is a shortcut for separately calling the register-suffixes
+# rule with the given suffixes and the set-generated-target-suffix rule with the
+# first given suffix.
+#
+rule register ( type : suffixes * : base-type ? )
+{
+ # Type names cannot contain hyphens, because when used as feature-values
+ # they would be interpreted as composite features which need to be
+ # decomposed.
+ switch $(type)
+ {
+ case *-* : errors.error "type name \"$(type)\" contains a hyphen" ;
+ }
+
+ if $(type) in $(.types)
+ {
+ errors.error "Type $(type) is already registered." ;
+ }
+ else
+ {
+ .types += $(type) ;
+ .base.$(type) = $(base-type) ;
+ .derived.$(base-type) += $(type) ;
+
+ if $(suffixes)-is-not-empty
+ {
+ # Specify mapping from suffixes to type.
+ register-suffixes $(suffixes) : $(type) ;
+ # By default generated targets of 'type' will use the first of
+ #'suffixes'. This may be overriden.
+ set-generated-target-suffix $(type) : : $(suffixes[1]) ;
+ }
+
+ feature.extend target-type : $(type) ;
+ feature.extend main-target-type : $(type) ;
+ feature.extend base-target-type : $(type) ;
+
+ feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
+ feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
+
+ # We used to declare the main target rule only when a 'main' parameter
+ # has been specified. However, it is hard to decide that a type will
+ # *never* need a main target rule and so from time to time we needed to
+ # make yet another type 'main'. So now a main target rule is defined for
+ # each type.
+ main-rule-name = [ type-to-rule-name $(type) ] ;
+ .main-target-type.$(main-rule-name) = $(type) ;
+ IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
+
+ # Adding a new derived type affects generator selection so we need to
+ # make the generator selection module update any of its cached
+ # information related to a new derived type being defined.
+ generators.update-cached-information-with-a-new-type $(type) ;
+ }
+}
+
+
+# Given a type, returns the name of the main target rule which creates targets
+# of that type.
+#
+rule type-to-rule-name ( type )
+{
+ # Lowercase everything. Convert underscores to dashes.
+ import regex ;
+ local n = [ regex.split $(type:L) "_" ] ;
+ return $(n:J=-) ;
+}
+
+
+# Given a main target rule name, returns the type for which it creates targets.
+#
+rule type-from-rule-name ( rule-name )
+{
+ return $(.main-target-type.$(rule-name)) ;
+}
+
+
+# Specifies that files with suffix from 'suffixes' be recognized as targets of
+# type 'type'. Issues an error if a different type is already specified for any
+# of the suffixes.
+#
+rule register-suffixes ( suffixes + : type )
+{
+ for local s in $(suffixes)
+ {
+ if ! $(.type.$(s))
+ {
+ .type.$(s) = $(type) ;
+ }
+ else if $(.type.$(s)) != $(type)
+ {
+ errors.error Attempting to specify multiple types for suffix
+ \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
+ }
+ }
+}
+
+
+# Returns true iff type has been registered.
+#
+rule registered ( type )
+{
+ if $(type) in $(.types)
+ {
+ return true ;
+ }
+}
+
+
+# Issues an error if 'type' is unknown.
+#
+rule validate ( type )
+{
+ if ! [ registered $(type) ]
+ {
+ errors.error "Unknown target type $(type)" ;
+ }
+}
+
+
+# Sets a scanner class that will be used for this 'type'.
+#
+rule set-scanner ( type : scanner )
+{
+ validate $(type) ;
+ .scanner.$(type) = $(scanner) ;
+}
+
+
+# Returns a scanner instance appropriate to 'type' and 'properties'.
+#
+rule get-scanner ( type : property-set )
+{
+ if $(.scanner.$(type))
+ {
+ return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
+ }
+}
+
+
+# Returns a base type for the given type or nothing in case the given type is
+# not derived.
+#
+rule base ( type )
+{
+ return $(.base.$(type)) ;
+}
+
+
+# Returns the given type and all of its base types in order of their distance
+# from type.
+#
+rule all-bases ( type )
+{
+ local result = $(type) ;
+ while $(type)
+ {
+ type = [ base $(type) ] ;
+ result += $(type) ;
+ }
+ return $(result) ;
+}
+
+
+# Returns the given type and all of its derived types in order of their distance
+# from type.
+#
+rule all-derived ( type )
+{
+ local result = $(type) ;
+ for local d in $(.derived.$(type))
+ {
+ result += [ all-derived $(d) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
+# indirect base.
+#
+rule is-derived ( type base )
+{
+ if $(base) in [ all-bases $(type) ]
+ {
+ return true ;
+ }
+}
+
+# Returns true if 'type' is either derived from or is equal to 'base'.
+#
+# TODO: It might be that is-derived and is-subtype were meant to be different
+# rules - one returning true for type = base and one not, but as currently
+# implemented they are actually the same. Clean this up.
+#
+rule is-subtype ( type base )
+{
+ return [ is-derived $(type) $(base) ] ;
+}
+
+
+# Store suffixes for generated targets.
+.suffixes = [ new property-map ] ;
+
+# Store prefixes for generated targets (e.g. "lib" for library).
+.prefixes = [ new property-map ] ;
+
+
+# Sets a file suffix to be used when generating a target of 'type' with the
+# specified properties. Can be called with no properties if no suffix has
+# already been specified for the 'type'. The 'suffix' parameter can be an empty
+# string ("") to indicate that no suffix should be used.
+#
+# Note that this does not cause files with 'suffix' to be automatically
+# recognized as being of 'type'. Two different types can use the same suffix for
+# their generated files but only one type can be auto-detected for a file with
+# that suffix. User should explicitly specify which one using the
+# register-suffixes rule.
+#
+rule set-generated-target-suffix ( type : properties * : suffix )
+{
+ set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Change the suffix previously registered for this type/properties combination.
+# If suffix is not yet specified, sets it.
+#
+rule change-generated-target-suffix ( type : properties * : suffix )
+{
+ change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Returns the suffix used when generating a file of 'type' with the given
+# properties.
+#
+rule generated-target-suffix ( type : property-set )
+{
+ return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
+}
+
+
+# Sets a target prefix that should be used when generating targets of 'type'
+# with the specified properties. Can be called with empty properties if no
+# prefix for 'type' has been specified yet.
+#
+# The 'prefix' parameter can be empty string ("") to indicate that no prefix
+# should be used.
+#
+# Usage example: library names use the "lib" prefix on unix.
+#
+rule set-generated-target-prefix ( type : properties * : prefix )
+{
+ set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+# Change the prefix previously registered for this type/properties combination.
+# If prefix is not yet specified, sets it.
+#
+rule change-generated-target-prefix ( type : properties * : prefix )
+{
+ change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+rule generated-target-prefix ( type : property-set )
+{
+ return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
+}
+
+
+# Common rules for prefix/suffix provisioning follow.
+
+local rule set-generated-target-ps ( ps : type : properties * : psval )
+{
+ properties = <target-type>$(type) $(properties) ;
+ $(.$(ps)es).insert $(properties) : $(psval) ;
+}
+
+
+local rule change-generated-target-ps ( ps : type : properties * : psval )
+{
+ properties = <target-type>$(type) $(properties) ;
+ local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ;
+ if ! $(prev)
+ {
+ set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
+ }
+}
+
+
+# Returns either prefix or suffix (as indicated by 'ps') that should be used
+# when generating a target of 'type' with the specified properties. Parameter
+# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
+# 'type', returns prefix/suffix for base type, if any.
+#
+local rule generated-target-ps-real ( ps : type : properties * )
+{
+ local result ;
+ local found ;
+ while $(type) && ! $(found)
+ {
+ result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ;
+ # If the prefix/suffix is explicitly set to an empty string, we consider
+ # prefix/suffix to be found. If we were not to compare with "", there
+ # would be no way to specify an empty prefix/suffix.
+ if $(result)-is-not-empty
+ {
+ found = true ;
+ }
+ type = $(.base.$(type)) ;
+ }
+ if $(result) = ""
+ {
+ result = ;
+ }
+ return $(result) ;
+}
+
+
+local rule generated-target-ps ( ps : type : property-set )
+{
+ local key = .$(ps).$(type).$(property-set) ;
+ local v = $($(key)) ;
+ if ! $(v)
+ {
+ v = [ generated-target-ps-real $(ps) : $(type) : [ $(property-set).raw ]
+ ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $(v) != none
+ {
+ return $(v) ;
+ }
+}
+
+
+# Returns file type given its name. If there are several dots in filename, tries
+# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
+# be tried.
+#
+rule type ( filename )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ filename = $(filename:L) ;
+ }
+ local type ;
+ while ! $(type) && $(filename:S)
+ {
+ local suffix = $(filename:S) ;
+ type = $(.type$(suffix)) ;
+ filename = $(filename:S=) ;
+ }
+ return $(type) ;
+}
+
+
+# Rule used to construct all main targets. Note that this rule gets imported
+# into the global namespace under different alias names and the exact target
+# type to construct is selected based on the alias used to actually invoke this
+# rule.
+#
+rule main-target-rule ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ # First discover the required target type based on the exact alias used to
+ # invoke this rule.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = $(bt[4]) ;
+ local target-type = [ type-from-rule-name $(rulename) ] ;
+
+ # This is a circular module dependency and so must be imported here.
+ import targets ;
+
+ return [ targets.create-typed-target $(target-type) : [ project.current ] :
+ $(name) : $(sources) : $(requirements) : $(default-build) :
+ $(usage-requirements) ] ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ # TODO: Add tests for all the is-derived, is-base & related type relation
+ # checking rules.
+}
diff --git a/jam-files/boost-build/build/version.jam b/jam-files/boost-build/build/version.jam
new file mode 100644
index 000000000..7626ddda8
--- /dev/null
+++ b/jam-files/boost-build/build/version.jam
@@ -0,0 +1,161 @@
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import numbers ;
+
+major = "2011" ;
+minor = "04" ;
+
+rule boost-build ( )
+{
+ return "$(major).$(minor)-svn" ;
+}
+
+rule print ( )
+{
+ if [ verify-engine-version ]
+ {
+ ECHO "Boost.Build" [ boost-build ] ;
+ }
+}
+
+rule verify-engine-version ( )
+{
+ local v = [ modules.peek : JAM_VERSION ] ;
+
+ if $(v[1]) != $(major) || $(v[2]) != $(minor)
+ {
+ local argv = [ modules.peek : ARGV ] ;
+ local e = $(argv[1]) ;
+ local l = [ modules.binding version ] ;
+ l = $(l:D) ;
+ l = $(l:D) ;
+ ECHO "warning: mismatched versions of Boost.Build engine and core" ;
+ ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ;
+ ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ;
+ }
+ else
+ {
+ return true ;
+ }
+}
+
+
+
+# Utility rule for testing whether all elements in a sequence are equal to 0.
+#
+local rule is-all-zeroes ( sequence * )
+{
+ local result = "true" ;
+ for local e in $(sequence)
+ {
+ if $(e) != "0"
+ {
+ result = "" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns "true" if the first version is less than the second one.
+#
+rule version-less ( lhs + : rhs + )
+{
+ numbers.check $(lhs) ;
+ numbers.check $(rhs) ;
+
+ local done ;
+ local result ;
+
+ while ! $(done) && $(lhs) && $(rhs)
+ {
+ if [ numbers.less $(lhs[1]) $(rhs[1]) ]
+ {
+ done = "true" ;
+ result = "true" ;
+ }
+ else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
+ {
+ done = "true" ;
+ }
+ else
+ {
+ lhs = $(lhs[2-]) ;
+ rhs = $(rhs[2-]) ;
+ }
+ }
+ if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
+ {
+ result = "true" ;
+ }
+
+ return $(result) ;
+}
+
+
+# Returns "true" if the current JAM version version is at least the given
+# version.
+#
+rule check-jam-version ( version + )
+{
+ local version-tag = $(version:J=.) ;
+ if ! $(version-tag)
+ {
+ errors.error Invalid version specifier: : $(version:E="(undefined)") ;
+ }
+
+ if ! $(.jam-version-check.$(version-tag))-is-not-empty
+ {
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ if ! $(jam-version)
+ {
+ errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
+ "installation is most likely terribly outdated." ;
+ }
+ .jam-version-check.$(version-tag) = "true" ;
+ if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
+ {
+ .jam-version-check.$(version-tag) = "" ;
+ }
+ }
+ return $(.jam-version-check.$(version-tag)) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ local future-version = $(jam-version) ;
+ future-version += "1" ;
+
+ assert.true check-jam-version $(jam-version) ;
+ assert.false check-jam-version $(future-version) ;
+
+ assert.true version-less 0 : 1 ;
+ assert.false version-less 0 : 0 ;
+ assert.true version-less 1 : 2 ;
+ assert.false version-less 1 : 1 ;
+ assert.false version-less 2 : 1 ;
+ assert.true version-less 3 1 20 : 3 4 10 ;
+ assert.false version-less 3 1 10 : 3 1 10 ;
+ assert.false version-less 3 4 10 : 3 1 20 ;
+ assert.true version-less 3 1 20 5 1 : 3 4 10 ;
+ assert.false version-less 3 1 10 5 1 : 3 1 10 ;
+ assert.false version-less 3 4 10 5 1 : 3 1 20 ;
+ assert.true version-less 3 1 20 : 3 4 10 5 1 ;
+ assert.true version-less 3 1 10 : 3 1 10 5 1 ;
+ assert.false version-less 3 4 10 : 3 1 20 5 1 ;
+ assert.false version-less 3 1 10 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 0 : 3 1 10 ;
+ assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
+ assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
+
+ # TODO: Add tests for invalid input data being sent to version-less.
+}
diff --git a/jam-files/boost-build/build/virtual-target.jam b/jam-files/boost-build/build/virtual-target.jam
new file mode 100644
index 000000000..2e8446bcc
--- /dev/null
+++ b/jam-files/boost-build/build/virtual-target.jam
@@ -0,0 +1,1317 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements virtual targets, which correspond to actual files created during a
+# build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transformation sequences, when it is not yet known
+# whether a particular target should be created at all.
+
+import "class" : new ;
+import errors ;
+import path ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+
+
+# +--------------------------+
+# | virtual-target |
+# +==========================+
+# | actualize |
+# +--------------------------+
+# | actualize-action() = 0 |
+# | actualize-location() = 0 |
+# +----------------+---------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# +---------------------+ +-------+--------------+
+# | action | | abstract-file-target |
+# +=====================| * +======================+
+# | action-name | +--+ action |
+# | properties | | +----------------------+
+# +---------------------+--+ | actualize-action() |
+# | actualize() |0..1 +-----------+----------+
+# | path() | |
+# | adjust-properties() | sources |
+# | actualize-sources() | targets |
+# +------+--------------+ ^
+# | / \
+# ^ +-+-+
+# / \ |
+# +-+-+ +-------------+-------------+
+# | | |
+# | +------+---------------+ +--------+-------------+
+# | | file-target | | searched-lib-target |
+# | +======================+ +======================+
+# | | actualize-location() | | actualize-location() |
+# | +----------------------+ +----------------------+
+# |
+# +-+------------------------------+
+# | |
+# +----+----------------+ +---------+-----------+
+# | compile-action | | link-action |
+# +=====================+ +=====================+
+# | adjust-properties() | | adjust-properties() |
+# +---------------------+ | actualize-sources() |
+# +---------------------+
+#
+# The 'compile-action' and 'link-action' classes are not defined here but in
+# builtin.jam modules. They are shown in the diagram to give the big picture.
+
+
+# Models a potential target. It can be converted into a Jam target and used in
+# building, if needed. However, it can be also dropped, which allows us to
+# search for different transformations and select only one.
+#
+class virtual-target
+{
+ import scanner ;
+ import sequence ;
+ import utility ;
+ import virtual-target ;
+
+ rule __init__ (
+ name # Target/project name.
+ : project # Project to which this target belongs.
+ )
+ {
+ self.name = $(name) ;
+ self.project = $(project) ;
+ self.dependencies = ;
+ }
+
+ # Name of this target.
+ #
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Project of this target.
+ #
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Adds additional 'virtual-target' instances this one depends on.
+ #
+ rule depends ( d + )
+ {
+ self.dependencies = [ sequence.merge $(self.dependencies) :
+ [ sequence.insertion-sort $(d) ] ] ;
+ }
+
+ rule dependencies ( )
+ {
+ return $(self.dependencies) ;
+ }
+
+ rule always ( )
+ {
+ .always = 1 ;
+ }
+
+ # Generates all the actual targets and sets up build actions for this
+ # target.
+ #
+ # If 'scanner' is specified, creates an additional target with the same
+ # location as the actual target, which will depend on the actual target and
+ # be associated with a 'scanner'. That additional target is returned. See
+ # the docs (#dependency_scanning) for rationale. Target must correspond to a
+ # file if 'scanner' is specified.
+ #
+ # If scanner is not specified then the actual target is returned.
+ #
+ rule actualize ( scanner ? )
+ {
+ local actual-name = [ actualize-no-scanner ] ;
+
+ if $(.always)
+ {
+ ALWAYS $(actual-name) ;
+ }
+
+ if ! $(scanner)
+ {
+ return $(actual-name) ;
+ }
+ else
+ {
+ # Add the scanner instance to the grist for name.
+ local g = [ sequence.join
+ [ utility.ungrist $(actual-name:G) ] $(scanner) : - ] ;
+ local name = $(actual-name:G=$(g)) ;
+
+ if ! $(self.made.$(name))
+ {
+ self.made.$(name) = true ;
+
+ DEPENDS $(name) : $(actual-name) ;
+
+ actualize-location $(name) ;
+
+ scanner.install $(scanner) : $(name) $(__name__) ;
+ }
+ return $(name) ;
+ }
+ }
+
+# private: (overridables)
+
+ # Sets up build actions for 'target'. Should call appropriate rules and set
+ # target variables.
+ #
+ rule actualize-action ( target )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Sets up variables on 'target' which specify its location.
+ #
+ rule actualize-location ( target )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # If the target is a generated one, returns the path where it will be
+ # generated. Otherwise, returns an empty list.
+ #
+ rule path ( )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Returns the actual target name to be used in case when no scanner is
+ # involved.
+ #
+ rule actual-name ( )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+# implementation
+ rule actualize-no-scanner ( )
+ {
+ # In fact, we just need to merge virtual-target with
+ # abstract-file-target as the latter is the only class derived from the
+ # former. But that has been left for later.
+
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+# Target corresponding to a file. The exact mapping for file is not yet
+# specified in this class. (TODO: Actually, the class name could be better...)
+#
+# May be a source file (when no action is specified) or a derived file
+# (otherwise).
+#
+# The target's grist is a concatenation of its project's location, action
+# properties (for derived targets) and, optionally, value identifying the main
+# target.
+#
+class abstract-file-target : virtual-target
+{
+ import project ;
+ import regex ;
+ import sequence ;
+ import path ;
+ import type ;
+ import property-set ;
+ import indirect ;
+
+ rule __init__ (
+ name # Target's name.
+ exact ? # If non-empty, the name is exactly the name created file
+ # should have. Otherwise, the '__init__' method will add a
+ # suffix obtained from 'type' by calling
+ # 'type.generated-target-suffix'.
+ : type ? # Target's type.
+ : project
+ : action ?
+ )
+ {
+ virtual-target.__init__ $(name) : $(project) ;
+
+ self.type = $(type) ;
+ self.action = $(action) ;
+ if $(action)
+ {
+ $(action).add-targets $(__name__) ;
+
+ if $(self.type) && ! $(exact)
+ {
+ _adjust-name $(name) ;
+ }
+ }
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ # Sets the path. When generating target name, it will override any path
+ # computation from properties.
+ #
+ rule set-path ( path )
+ {
+ self.path = [ path.native $(path) ] ;
+ }
+
+ # Returns the currently set action.
+ #
+ rule action ( )
+ {
+ return $(self.action) ;
+ }
+
+ # Sets/gets the 'root' flag. Target is root if it directly corresponds to
+ # some variant of a main target.
+ #
+ rule root ( set ? )
+ {
+ if $(set)
+ {
+ self.root = true ;
+ }
+ return $(self.root) ;
+ }
+
+ # Gets or sets the subvariant which created this target. Subvariant is set
+ # when target is brought into existance and is never changed after that. In
+ # particular, if a target is shared by a subvariant, only the first is
+ # stored.
+ #
+ rule creating-subvariant ( s ? # If specified, specifies the value to set,
+ # which should be a 'subvariant' class
+ # instance.
+ )
+ {
+ if $(s) && ! $(self.creating-subvariant)
+ {
+ self.creating-subvariant = $(s) ;
+ }
+ return $(self.creating-subvariant) ;
+ }
+
+ rule actualize-action ( target )
+ {
+ if $(self.action)
+ {
+ $(self.action).actualize ;
+ }
+ }
+
+ # Return a human-readable representation of this target. If this target has
+ # an action, that is:
+ #
+ # { <action-name>-<self.name>.<self.type> <action-sources>... }
+ #
+ # otherwise, it is:
+ #
+ # { <self.name>.<self.type> }
+ #
+ rule str ( )
+ {
+ local action = [ action ] ;
+ local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
+
+ if $(action)
+ {
+ local sources = [ $(action).sources ] ;
+ local action-name = [ $(action).action-name ] ;
+
+ local ss ;
+ for local s in $(sources)
+ {
+ ss += [ $(s).str ] ;
+ }
+
+ return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
+ }
+ else
+ {
+ return "{" $(name-dot-type) "}" ;
+ }
+ }
+
+ rule less ( a )
+ {
+ if [ str ] < [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+ rule equal ( a )
+ {
+ if [ str ] = [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+# private:
+ rule actual-name ( )
+ {
+ if ! $(self.actual-name)
+ {
+ local grist = [ grist ] ;
+ local basename = [ path.native $(self.name) ] ;
+ self.actual-name = <$(grist)>$(basename) ;
+ }
+ return $(self.actual-name) ;
+ }
+
+ # Helper to 'actual-name', above. Computes a unique prefix used to
+ # distinguish this target from other targets with the same name creating
+ # different files.
+ #
+ rule grist ( )
+ {
+ # Depending on target, there may be different approaches to generating
+ # unique prefixes. We generate prefixes in the form:
+ # <one letter approach code> <the actual prefix>
+ local path = [ path ] ;
+ if $(path)
+ {
+ # The target will be generated to a known path. Just use the path
+ # for identification, since path is as unique as it can get.
+ return p$(path) ;
+ }
+ else
+ {
+ # File is either source, which will be searched for, or is not a
+ # file at all. Use the location of project for distinguishing.
+ local project-location = [ $(self.project).get location ] ;
+ local location-grist = [ sequence.join [ regex.split
+ $(project-location) "/" ] : "!" ] ;
+
+ if $(self.action)
+ {
+ local ps = [ $(self.action).properties ] ;
+ local property-grist = [ $(ps).as-path ] ;
+ # 'property-grist' can be empty when 'ps' is an empty property
+ # set.
+ if $(property-grist)
+ {
+ location-grist = $(location-grist)/$(property-grist) ;
+ }
+ }
+
+ return l$(location-grist) ;
+ }
+ }
+
+ # Given the target name specified in constructor, returns the name which
+ # should be really used, by looking at the <tag> properties. Tag properties
+ # need to be specified as <tag>@rule-name. This makes Boost Build call the
+ # specified rule with the target name, type and properties to get the new
+ # name. If no <tag> property is specified or the rule specified by <tag>
+ # returns nothing, returns the result of calling
+ # virtual-target.add-prefix-and-suffix.
+ #
+ rule _adjust-name ( specified-name )
+ {
+ local ps ;
+ if $(self.action)
+ {
+ ps = [ $(self.action).properties ] ;
+ }
+ else
+ {
+ ps = [ property-set.empty ] ;
+ }
+
+ # We add ourselves to the properties so that any tag rule can get more
+ # direct information about the target than just that available through
+ # the properties. This is useful in implementing name changes based on
+ # the sources of the target. For example to make unique names of object
+ # files based on the source file. --grafik
+ ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
+
+ local tag = [ $(ps).get <tag> ] ;
+
+ if $(tag)
+ {
+ local rule-name = [ MATCH ^@(.*) : $(tag) ] ;
+ if $(rule-name)
+ {
+ if $(tag[2])
+ {
+ errors.error "<tag>@rulename is present but is not the only"
+ "<tag> feature" ;
+ }
+
+ self.name = [ indirect.call $(rule-name) $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ else
+ {
+ errors.error
+ "The value of the <tag> feature must be '@rule-name'" ;
+ }
+ }
+
+ # If there is no tag or the tag rule returned nothing.
+ if ! $(tag) || ! $(self.name)
+ {
+ self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ }
+
+ rule actualize-no-scanner ( )
+ {
+ local name = [ actual-name ] ;
+
+ # Do anything only on the first invocation.
+ if ! $(self.made.$(name))
+ {
+ self.made.$(name) = true ;
+
+ if $(self.action)
+ {
+ # For non-derived target, we do not care if there are several
+ # virtual targets that refer to the same name. One case when
+ # this is unavoidable is when the file name is main.cpp and two
+ # targets have types CPP (for compiling) and MOCCABLE_CPP (for
+ # conversion to H via Qt tools).
+ virtual-target.register-actual-name $(name) : $(__name__) ;
+ }
+
+ for local i in $(self.dependencies)
+ {
+ DEPENDS $(name) : [ $(i).actualize ] ;
+ }
+
+ actualize-location $(name) ;
+ actualize-action $(name) ;
+ }
+ return $(name) ;
+ }
+}
+
+
+# Appends the suffix appropriate to 'type/property-set' combination to the
+# specified name and returns the result.
+#
+rule add-prefix-and-suffix ( specified-name : type ? : property-set )
+{
+ local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
+
+ # Handle suffixes for which no leading dot is desired. Those are specified
+ # by enclosing them in <...>. Needed by python so it can create "_d.so"
+ # extensions, for example.
+ if $(suffix:G)
+ {
+ suffix = [ utility.ungrist $(suffix) ] ;
+ }
+ else
+ {
+ suffix = .$(suffix) ;
+ }
+
+ local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
+
+ if [ MATCH ^($(prefix)) : $(specified-name) ]
+ {
+ prefix = ;
+ }
+ return $(prefix:E="")$(specified-name)$(suffix:E="") ;
+}
+
+
+# File targets with explicitly known location.
+#
+# The file path is determined as
+# * Value passed to the 'set-path' method, if any.
+# * For derived files, project's build dir, joined with components that
+# describe action properties. If free properties are not equal to the
+# project's reference properties an element with the name of the main
+# target is added.
+# * For source files, project's source dir.
+#
+# The file suffix is determined as:
+# * The value passed to the 'suffix' method, if any.
+# * The suffix corresponding to the target's type.
+#
+class file-target : abstract-file-target
+{
+ import "class" : new ;
+ import common ;
+ import errors ;
+
+ rule __init__ (
+ name exact ?
+ : type ? # Optional type for this target.
+ : project
+ : action ?
+ : path ?
+ )
+ {
+ abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) :
+ $(action) ;
+
+ self.path = $(path) ;
+ }
+
+ rule clone-with-different-type ( new-type )
+ {
+ return [ new file-target $(self.name) exact : $(new-type) :
+ $(self.project) : $(self.action) : $(self.path) ] ;
+ }
+
+ rule actualize-location ( target )
+ {
+ if $(self.action)
+ {
+ # This is a derived file.
+ local path = [ path ] ;
+ LOCATE on $(target) = $(path) ;
+
+ # Make sure the path exists.
+ DEPENDS $(target) : $(path) ;
+ common.MkDir $(path) ;
+
+ # It is possible that the target name includes a directory too, for
+ # example when installing headers. Create that directory.
+ if $(target:D)
+ {
+ local d = $(target:D) ;
+ d = $(d:R=$(path)) ;
+ DEPENDS $(target) : $(d) ;
+ common.MkDir $(d) ;
+ }
+
+ # For a real file target, we create a fake target depending on the
+ # real target. This allows us to run
+ #
+ # bjam hello.o
+ #
+ # without trying to guess the name of the real target. Note that the
+ # target has no directory name and uses a special <e> grist.
+ #
+ # First, that means that "bjam hello.o" will build all known hello.o
+ # targets. Second, the <e> grist makes sure this target will not be
+ # confused with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes a 'test.o' file, then the
+ # target for directory will be just 'test' the target for test.o
+ # will be <ptest/bin/gcc/debug>test.o and the target we create below
+ # will be <e>test.o
+ DEPENDS $(target:G=e) : $(target) ;
+ # Allow bjam <path-to-file>/<file> to work. This will not catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ DEPENDS $(target:G=e:R=$(path)) : $(target) ;
+ }
+ else
+ {
+ SEARCH on $(target) = [ path.native $(self.path) ] ;
+ }
+ }
+
+ # Returns the directory for this target.
+ #
+ rule path ( )
+ {
+ if ! $(self.path)
+ {
+ if $(self.action)
+ {
+ local p = [ $(self.action).properties ] ;
+ local path,relative-to-build-dir = [ $(p).target-path ] ;
+ local path = $(path,relative-to-build-dir[1]) ;
+ local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
+
+ if $(relative-to-build-dir)
+ {
+ path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
+ }
+
+ self.path = [ path.native $(path) ] ;
+ }
+ }
+ return $(self.path) ;
+ }
+}
+
+
+class notfile-target : abstract-file-target
+{
+ rule __init__ ( name : project : action ? )
+ {
+ abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
+ }
+
+ # Returns nothing to indicate that the target's path is not known.
+ #
+ rule path ( )
+ {
+ return ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ ALWAYS $(target) ;
+ # TEMPORARY $(target) ;
+ NOUPDATE $(target) ;
+ }
+}
+
+
+# Class representing an action. Both 'targets' and 'sources' should list
+# instances of 'virtual-target'. Action name should name a rule with this
+# prototype:
+# rule action-name ( targets + : sources * : properties * )
+# Targets and sources are passed as actual Jam targets. The rule may not
+# establish additional dependency relationships.
+#
+class action
+{
+ import "class" ;
+ import errors ;
+ import type ;
+ import toolset ;
+ import property-set ;
+ import indirect ;
+ import path ;
+ import set : difference ;
+
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ self.sources = $(sources) ;
+
+ self.action-name = [ indirect.make-qualified $(action-name) ] ;
+
+ if ! $(property-set)
+ {
+ property-set = [ property-set.empty ] ;
+ }
+
+ if ! [ class.is-instance $(property-set) ]
+ {
+ errors.error "Property set instance required" ;
+ }
+
+ self.properties = $(property-set) ;
+ }
+
+ rule add-targets ( targets * )
+ {
+ self.targets += $(targets) ;
+ }
+
+ rule replace-targets ( old-targets * : new-targets * )
+ {
+ self.targets = [ set.difference $(self.targets) : $(old-targets) ] ;
+ self.targets += $(new-targets) ;
+ }
+
+ rule targets ( )
+ {
+ return $(self.targets) ;
+ }
+
+ rule sources ( )
+ {
+ return $(self.sources) ;
+ }
+
+ rule action-name ( )
+ {
+ return $(self.action-name) ;
+ }
+
+ rule properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ # Generates actual build instructions.
+ #
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+
+ local ps = [ properties ] ;
+ local properties = [ adjust-properties $(ps) ] ;
+
+ local actual-targets ;
+ for local i in [ targets ]
+ {
+ actual-targets += [ $(i).actualize ] ;
+ }
+
+ actualize-sources [ sources ] : $(properties) ;
+
+ DEPENDS $(actual-targets) : $(self.actual-sources)
+ $(self.dependency-only-sources) ;
+
+ # This works around a bug with -j and actions that
+ # produce multiple target, where:
+ # - dependency on the first output is found, and
+ # the action is started
+ # - dependency on the second output is found, and
+ # bjam noticed that command is already running
+ # - instead of waiting for the command, dependents
+ # of the second targets are immediately updated.
+ if $(actual-targets[2])
+ {
+ INCLUDES $(actual-targets) : $(actual-targets) ;
+ }
+
+ # Action name can include additional argument to rule, which should
+ # not be passed to 'set-target-variables'
+ toolset.set-target-variables
+ [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
+ : $(properties) ;
+
+ # Reflect ourselves in a variable for the target. This allows
+ # looking up additional info for the action given the raw target.
+ # For example to debug or output action information from action
+ # rules.
+ .action on $(actual-targets) = $(__name__) ;
+
+ indirect.call $(self.action-name) $(actual-targets)
+ : $(self.actual-sources) : [ $(properties).raw ] ;
+
+ # Since we set up the creating action here, we set up the action for
+ # cleaning up as well.
+ common.Clean clean-all : $(actual-targets) ;
+ }
+ }
+
+ # Helper for 'actualize-sources'. For each passed source, actualizes it with
+ # the appropriate scanner. Returns the actualized virtual targets.
+ #
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result = ;
+ for local i in $(sources)
+ {
+ local scanner ;
+ if [ $(i).type ]
+ {
+ scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
+ }
+ result += [ $(i).actualize $(scanner) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Creates actual Jam targets for sources. Initializes the following member
+ # variables:
+ # 'self.actual-sources' -- sources passed to the updating action.
+ # 'self.dependency-only-sources' -- sources marked as dependencies, but
+ # are not used otherwise.
+ #
+ # New values will be *appended* to the variables. They may be non-empty if
+ # caller wants it.
+ #
+ rule actualize-sources ( sources * : property-set )
+ {
+ local dependencies = [ $(self.properties).get <dependency> ] ;
+
+ self.dependency-only-sources +=
+ [ actualize-source-type $(dependencies) : $(property-set) ] ;
+ self.actual-sources +=
+ [ actualize-source-type $(sources) : $(property-set) ] ;
+
+ # This is used to help bjam find dependencies in generated headers and
+ # other main targets, e.g. in:
+ #
+ # make a.h : ....... ;
+ # exe hello : hello.cpp : <implicit-dependency>a.h ;
+ #
+ # For bjam to find the dependency the generated target must be
+ # actualized (i.e. have its Jam target constructed). In the above case,
+ # if we are building just hello ("bjam hello"), 'a.h' will not be
+ # actualized unless we do it here.
+ local implicit = [ $(self.properties).get <implicit-dependency> ] ;
+ for local i in $(implicit)
+ {
+ $(i:G=).actualize ;
+ }
+ }
+
+ # Determines real properties when trying to build with 'properties'. This is
+ # the last chance to fix properties, for example to adjust includes to get
+ # generated headers correctly. Default implementation simply returns its
+ # argument.
+ #
+ rule adjust-properties ( property-set )
+ {
+ return $(property-set) ;
+ }
+}
+
+
+# Action class which does nothing --- it produces the targets with specific
+# properties out of nowhere. It is needed to distinguish virtual targets with
+# different properties that are known to exist and have no actions which create
+# them.
+#
+class null-action : action
+{
+ rule __init__ ( property-set ? )
+ {
+ action.__init__ : .no-action : $(property-set) ;
+ }
+
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+ for local i in [ targets ]
+ {
+ $(i).actualize ;
+ }
+ }
+ }
+}
+
+
+# Class which acts exactly like 'action', except that its sources are not
+# scanned for dependencies.
+#
+class non-scanning-action : action
+{
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ action.__init__ $(sources) : $(action-name) : $(property-set) ;
+ }
+
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result ;
+ for local i in $(sources)
+ {
+ result += [ $(i).actualize ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+# Creates a virtual target with an appropriate name and type from 'file'. If a
+# target with that name in that project already exists, returns that already
+# created target.
+#
+# FIXME: a more correct way would be to compute the path to the file, based on
+# name and source location for the project, and use that path to determine if
+# the target has already been created. This logic should be shared with how we
+# usually find targets identified by a specific target id. It should also be
+# updated to work correctly when the file is specified using both relative and
+# absolute paths.
+#
+# TODO: passing a project with all virtual targets is starting to be annoying.
+#
+rule from-file ( file : file-loc : project )
+{
+ import type ; # Had to do this here to break a circular dependency.
+
+ # Check whether we already created a target corresponding to this file.
+ local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
+
+ if $(.files.$(path))
+ {
+ return $(.files.$(path)) ;
+ }
+ else
+ {
+ local name = [ path.make $(file) ] ;
+ local type = [ type.type $(file) ] ;
+ local result ;
+
+ result = [ new file-target $(file) : $(type) : $(project) : :
+ $(file-loc) ] ;
+
+ .files.$(path) = $(result) ;
+ return $(result) ;
+ }
+}
+
+
+# Registers a new virtual target. Checks if there is already a registered target
+# with the same name, type, project and subvariant properties as well as the
+# same sources and equal action. If such target is found it is returned and a
+# new 'target' is not registered. Otherwise, 'target' is registered and
+# returned.
+#
+rule register ( target )
+{
+ local signature = [ sequence.join
+ [ $(target).path ] [ $(target).name ] : - ] ;
+
+ local result ;
+ for local t in $(.cache.$(signature))
+ {
+ local a1 = [ $(t).action ] ;
+ local a2 = [ $(target).action ] ;
+
+ if ! $(result)
+ {
+ if ! $(a1) && ! $(a2)
+ {
+ result = $(t) ;
+ }
+ else
+ {
+ if $(a1) && $(a2) &&
+ ( [ $(a1).action-name ] = [ $(a2).action-name ] ) &&
+ ( [ $(a1).sources ] = [ $(a2).sources ] )
+ {
+ local ps1 = [ $(a1).properties ] ;
+ local ps2 = [ $(a2).properties ] ;
+ local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference
+ [ $(ps1).dependency ] : [ $(ps1).incidental ] ] ;
+ local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference
+ [ $(ps2).dependency ] : [ $(ps2).incidental ] ] ;
+ if $(p1) = $(p2)
+ {
+ result = $(t) ;
+ }
+ }
+ }
+ }
+ }
+
+ if ! $(result)
+ {
+ .cache.$(signature) += $(target) ;
+ result = $(target) ;
+ }
+
+ .recent-targets += $(result) ;
+ .all-targets += $(result) ;
+
+ return $(result) ;
+}
+
+
+# Each target returned by 'register' is added to the .recent-targets list,
+# returned by this function. This allows us to find all virtual targets created
+# when building a specific main target, even those constructed only as
+# intermediate targets.
+#
+rule recent-targets ( )
+{
+ return $(.recent-targets) ;
+}
+
+
+rule clear-recent-targets ( )
+{
+ .recent-targets = ;
+}
+
+
+# Returns all virtual targets ever created.
+#
+rule all-targets ( )
+{
+ return $(.all-targets) ;
+}
+
+
+# Returns all targets from 'targets' with types equal to 'type' or derived from
+# it.
+#
+rule select-by-type ( type : targets * )
+{
+ local result ;
+ for local t in $(targets)
+ {
+ if [ type.is-subtype [ $(t).type ] $(type) ]
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule register-actual-name ( actual-name : virtual-target )
+{
+ if $(.actual.$(actual-name))
+ {
+ local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ;
+ local cs2 = [ $(virtual-target).creating-subvariant ] ;
+ local cmt1 = [ $(cs1).main-target ] ;
+ local cmt2 = [ $(cs2).main-target ] ;
+
+ local action1 = [ $(.actual.$(actual-name)).action ] ;
+ local action2 = [ $(virtual-target).action ] ;
+ local properties-added ;
+ local properties-removed ;
+ if $(action1) && $(action2)
+ {
+ local p1 = [ $(action1).properties ] ;
+ p1 = [ $(p1).raw ] ;
+ local p2 = [ $(action2).properties ] ;
+ p2 = [ $(p2).raw ] ;
+ properties-removed = [ set.difference $(p1) : $(p2) ] ;
+ properties-removed ?= "none" ;
+ properties-added = [ set.difference $(p2) : $(p1) ] ;
+ properties-added ?= "none" ;
+ }
+ errors.error "Duplicate name of actual target:" $(actual-name)
+ : "previous virtual target" [ $(.actual.$(actual-name)).str ]
+ : "created from" [ $(cmt1).full-name ]
+ : "another virtual target" [ $(virtual-target).str ]
+ : "created from" [ $(cmt2).full-name ]
+ : "added properties:" $(properties-added)
+ : "removed properties:" $(properties-removed) ;
+ }
+ else
+ {
+ .actual.$(actual-name) = $(virtual-target) ;
+ }
+}
+
+
+# Traverses the dependency graph of 'target' and return all targets that will be
+# created before this one is created. If the root of some dependency graph is
+# found during traversal, it is either included or not, depending on the
+# 'include-roots' value. In either case traversal stops at root targets, i.e.
+# root target sources are not traversed.
+#
+rule traverse ( target : include-roots ? : include-sources ? )
+{
+ local result ;
+ if [ $(target).action ]
+ {
+ local action = [ $(target).action ] ;
+ # This includes the 'target' as well.
+ result += [ $(action).targets ] ;
+
+ for local t in [ $(action).sources ]
+ {
+ if ! [ $(t).root ]
+ {
+ result += [ traverse $(t) : $(include-roots) : $(include-sources) ] ;
+ }
+ else if $(include-roots)
+ {
+ result += $(t) ;
+ }
+ }
+ }
+ else if $(include-sources)
+ {
+ result = $(target) ;
+ }
+ return $(result) ;
+}
+
+
+# Takes an 'action' instance and creates a new instance of it and all targets
+# produced by the action. The rule-name and properties are set to
+# 'new-rule-name' and 'new-properties', if those are specified. Returns the
+# cloned action.
+#
+rule clone-action ( action : new-project : new-action-name ? : new-properties ? )
+{
+ if ! $(new-action-name)
+ {
+ new-action-name = [ $(action).action-name ] ;
+ }
+ if ! $(new-properties)
+ {
+ new-properties = [ $(action).properties ] ;
+ }
+
+ local action-class = [ modules.peek $(action) : __class__ ] ;
+ local cloned-action = [ class.new $(action-class)
+ [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
+
+ local cloned-targets ;
+ for local target in [ $(action).targets ]
+ {
+ local n = [ $(target).name ] ;
+ # Do not modify produced target names.
+ local cloned-target = [ class.new file-target $(n) exact :
+ [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
+ local d = [ $(target).dependencies ] ;
+ if $(d)
+ {
+ $(cloned-target).depends $(d) ;
+ }
+ $(cloned-target).root [ $(target).root ] ;
+ $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
+
+ cloned-targets += $(cloned-target) ;
+ }
+
+ return $(cloned-action) ;
+}
+
+
+class subvariant
+{
+ import sequence ;
+ import type ;
+
+ rule __init__ ( main-target # The instance of main-target class.
+ : property-set # Properties requested for this target.
+ : sources *
+ : build-properties # Actually used properties.
+ : sources-usage-requirements # Properties propagated from sources.
+ : created-targets * ) # Top-level created targets.
+ {
+ self.main-target = $(main-target) ;
+ self.properties = $(property-set) ;
+ self.sources = $(sources) ;
+ self.build-properties = $(build-properties) ;
+ self.sources-usage-requirements = $(sources-usage-requirements) ;
+ self.created-targets = $(created-targets) ;
+
+ # Pre-compose a list of other dependency graphs this one depends on.
+ local deps = [ $(build-properties).get <implicit-dependency> ] ;
+ for local d in $(deps)
+ {
+ self.other-dg += [ $(d:G=).creating-subvariant ] ;
+ }
+
+ self.other-dg = [ sequence.unique $(self.other-dg) ] ;
+ }
+
+ rule main-target ( )
+ {
+ return $(self.main-target) ;
+ }
+
+ rule created-targets ( )
+ {
+ return $(self.created-targets) ;
+ }
+
+ rule requested-properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ rule build-properties ( )
+ {
+ return $(self.build-properties) ;
+ }
+
+ rule sources-usage-requirements ( )
+ {
+ return $(self.sources-usage-requirements) ;
+ }
+
+ rule set-usage-requirements ( usage-requirements )
+ {
+ self.usage-requirements = $(usage-requirements) ;
+ }
+
+ rule usage-requirements ( )
+ {
+ return $(self.usage-requirements) ;
+ }
+
+ # Returns all targets referenced by this subvariant, either directly or
+ # indirectly, and either as sources, or as dependency properties. Targets
+ # referred to using the dependency property are returned as properties, not
+ # targets.
+ #
+ rule all-referenced-targets ( theset )
+ {
+ # Find directly referenced targets.
+ local deps = [ $(self.build-properties).dependency ] ;
+ local all-targets = $(self.sources) $(deps) ;
+
+ # Find other subvariants.
+ local r ;
+ for local t in $(all-targets)
+ {
+ if ! [ $(theset).contains $(t) ]
+ {
+ $(theset).add $(t) ;
+ r += [ $(t:G=).creating-subvariant ] ;
+ }
+ }
+ r = [ sequence.unique $(r) ] ;
+ for local s in $(r)
+ {
+ if $(s) != $(__name__)
+ {
+ $(s).all-referenced-targets $(theset) ;
+ }
+ }
+ }
+
+ # Returns the properties specifying implicit include paths to generated
+ # headers. This traverses all targets in this subvariant and subvariants
+ # referred by <implcit-dependecy> properties. For all targets of type
+ # 'target-type' (or for all targets, if 'target-type' is not specified), the
+ # result will contain <$(feature)>path-to-that-target.
+ #
+ rule implicit-includes ( feature : target-type ? )
+ {
+ local key = ii$(feature)-$(target-type:E="") ;
+ if ! $($(key))-is-not-empty
+ {
+ local target-paths = [ all-target-directories $(target-type) ] ;
+ target-paths = [ sequence.unique $(target-paths) ] ;
+ local result = $(target-paths:G=$(feature)) ;
+ if ! $(result)
+ {
+ result = "" ;
+ }
+ $(key) = $(result) ;
+ }
+ if $($(key)) = ""
+ {
+ return ;
+ }
+ else
+ {
+ return $($(key)) ;
+ }
+ }
+
+ rule all-target-directories ( target-type ? )
+ {
+ if ! $(self.target-directories)
+ {
+ compute-target-directories $(target-type) ;
+ }
+ return $(self.target-directories) ;
+ }
+
+ rule compute-target-directories ( target-type ? )
+ {
+ local result ;
+ for local t in $(self.created-targets)
+ {
+ # Skip targets of the wrong type.
+ if ! $(target-type) ||
+ [ type.is-derived [ $(t).type ] $(target-type) ]
+ {
+ result = [ sequence.merge $(result) : [ $(t).path ] ] ;
+ }
+ }
+ for local d in $(self.other-dg)
+ {
+ result += [ $(d).all-target-directories $(target-type) ] ;
+ }
+ self.target-directories = $(result) ;
+ }
+}
diff --git a/jam-files/boost-build/kernel/boost-build.jam b/jam-files/boost-build/kernel/boost-build.jam
new file mode 100644
index 000000000..377f6ec02
--- /dev/null
+++ b/jam-files/boost-build/kernel/boost-build.jam
@@ -0,0 +1,5 @@
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+boost-build . ;
diff --git a/jam-files/boost-build/kernel/bootstrap.jam b/jam-files/boost-build/kernel/bootstrap.jam
new file mode 100644
index 000000000..89048af92
--- /dev/null
+++ b/jam-files/boost-build/kernel/bootstrap.jam
@@ -0,0 +1,263 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005, 2006 Rene Rivera
+# Copyright 2003, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# First of all, check the jam version
+
+if $(JAM_VERSION:J="") < 030112
+{
+ ECHO "error: Boost.Jam version 3.1.12 or later required" ;
+ EXIT ;
+}
+
+local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
+
+for local r in $(required-rules)
+{
+ if ! $(r) in [ RULENAMES ]
+ {
+ ECHO "error: builtin rule '$(r)' is not present" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+local native =
+ regex transform 2
+ ;
+while $(native)
+{
+ if ! [ HAS_NATIVE_RULE $(native[1]) :
+ $(native[2]) :
+ $(native[3]) ]
+ {
+ ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
+ ECHO "error: or interface version of that rule is too low" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+ native = $(native[4-]) ;
+}
+
+# Check that the builtin .ENVIRON module is present. We don't have a
+# builtin to check that a module is present, so we assume that the PATH
+# environment variable is always set and verify that the .ENVIRON module
+# has non-empty value of that variable.
+module .ENVIRON
+{
+ local p = $(PATH) $(Path) $(path) ;
+ if ! $(p)
+ {
+ ECHO "error: no builtin module .ENVIRON is found" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Check that @() functionality is present. Similarly to modules,
+# we don't have a way to test that directly. Instead we check that
+# $(TMPNAME) functionality is present which was added at roughly
+# the same time (more precisely it was added just before).
+{
+ if ! $(TMPNAME)
+ {
+ ECHO "error: no @() functionality found" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Make sure that \n escape is avaiable.
+if "\n" = "n"
+{
+ if $(OS) = CYGWIN
+ {
+ ECHO "warning: escape sequences are not supported" ;
+ ECHO "warning: this will cause major misbehaviour on cygwin" ;
+ ECHO "warning: your version of bjam is likely out of date" ;
+ ECHO "warning: please get a fresh version from SVN." ;
+ }
+}
+
+# Bootstrap the module system. Then bring the import rule into the global module.
+#
+SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
+module modules { include <module@>modules.jam ; }
+IMPORT modules : import : : import ;
+
+{
+ # Add module subdirectories to the BOOST_BUILD_PATH, which allows
+ # us to make an incremental refactoring step by moving modules to
+ # the appropriate subdirectories, thereby achieving some physical
+ # separation of different layers without changing all of our code
+ # to specify subdirectories in import statements or use an extra
+ # level of qualification on imported names.
+
+ local subdirs =
+ kernel # only the most-intrinsic modules: modules, errors
+ util # low-level substrate: string/number handling, etc.
+ build # essential elements of the build system architecture
+ tools # toolsets for handling specific build jobs and targets.
+ contrib # user contributed (unreviewed) modules
+ . # build-system.jam lives here
+ ;
+ local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
+ BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
+
+ modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
+
+ modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
+}
+
+# Reload the modules, to clean up things. The modules module can tolerate
+# being included twice.
+#
+import modules ;
+
+# Process option plugins first to alow them to prevent loading
+# the rest of the build system.
+#
+import option ;
+local dont-build = [ option.process ] ;
+
+# Should we skip building, i.e. loading the build system, according
+# to the options processed?
+#
+if ! $(dont-build)
+{
+ if ! --python in $(ARGV)
+ {
+ # Allow users to override the build system file from the
+ # command-line (mostly for testing)
+ local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
+ build-system ?= build-system ;
+
+ # Use last element in case of multiple command-line options
+ import $(build-system[-1]) ;
+ }
+ else
+ {
+ ECHO "Boost.Build V2 Python port (experimental)" ;
+
+ # Define additional interface that is exposed to Python code. Python code will
+ # also have access to select bjam builtins in the 'bjam' module, but some
+ # things are easier to define outside C.
+ module python_interface
+ {
+ rule load ( module-name : location )
+ {
+ USER_MODULE $(module-name) ;
+ # Make all rules in the loaded module available in
+ # the global namespace, so that we don't have
+ # to bother specifying "right" module when calling
+ # from Python.
+ module $(module-name)
+ {
+ __name__ = $(1) ;
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+ }
+
+ rule peek ( module-name ? : variables + )
+ {
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+ }
+
+ rule set-variable ( module-name : name : value * )
+ {
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+ }
+
+ rule set-top-level-targets ( targets * )
+ {
+ DEPENDS all : $(targets) ;
+ }
+
+ rule call-in-module ( m : rulename : * )
+ {
+ module $(m)
+ {
+ return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ }
+
+
+ rule set-update-action ( action : targets * : sources * : properties * )
+ {
+ $(action) $(targets) : $(sources) : $(properties) ;
+ }
+
+ rule set-update-action-in-module ( m : action : targets * : sources * : properties * )
+ {
+ module $(m)
+ {
+ $(2) $(3) : $(4) : $(5) ;
+ }
+ }
+
+ rule set-target-variable ( targets + : variable : value * : append ? )
+ {
+ if $(append)
+ {
+ $(variable) on $(targets) += $(value) ;
+ }
+ else
+ {
+ $(variable) on $(targets) = $(value) ;
+ }
+ }
+
+ rule get-target-variable ( targets + : variable )
+ {
+ return [ on $(targets) return $($(variable)) ] ;
+ }
+
+ rule import-rules-from-parent ( parent-module : this-module : user-rules * )
+ {
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+
+ rule mark-included ( targets * : includes * ) {
+ NOCARE $(includes) ;
+ INCLUDES $(targets) : $(includes) ;
+ ISFILE $(includes) ;
+ }
+ }
+
+ PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
+ modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
+
+ module PyBB
+ {
+ local ok = [ bootstrap $(root) ] ;
+ if ! $(ok)
+ {
+ EXIT ;
+ }
+ }
+
+
+ #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
+
+ #module PyBB
+ #{
+ # main ;
+ #}
+
+ }
+}
diff --git a/jam-files/boost-build/kernel/class.jam b/jam-files/boost-build/kernel/class.jam
new file mode 100644
index 000000000..b8e55af35
--- /dev/null
+++ b/jam-files/boost-build/kernel/class.jam
@@ -0,0 +1,420 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Polymorphic class system built on top of core Jam facilities.
+#
+# Classes are defined by 'class' keywords::
+#
+# class myclass
+# {
+# rule __init__ ( arg1 ) # constructor
+# {
+# self.attribute = $(arg1) ;
+# }
+#
+# rule method1 ( ) # method
+# {
+# return [ method2 ] ;
+# }
+#
+# rule method2 ( ) # method
+# {
+# return $(self.attribute) ;
+# }
+# }
+#
+# The __init__ rule is the constructor, and sets member variables.
+#
+# New instances are created by invoking [ new <class> <args...> ]:
+#
+# local x = [ new myclass foo ] ; # x is a new myclass object
+# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
+#
+# Derived class are created by mentioning base classes in the declaration::
+#
+# class derived : myclass
+# {
+# rule __init__ ( arg )
+# {
+# myclass.__init__ $(arg) ; # call base __init__
+#
+# }
+#
+# rule method2 ( ) # method override
+# {
+# return $(self.attribute)XXX ;
+# }
+# }
+#
+# All methods operate virtually, replacing behavior in the base classes. For
+# example::
+#
+# local y = [ new derived foo ] ; # y is a new derived object
+# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
+#
+# Each class instance is its own core Jam module. All instance attributes and
+# methods are accessible without additional qualification from within the class
+# instance. All rules imported in class declaration, or visible in base classses
+# are also visible. Base methods are available in qualified form:
+# base-name.method-name. By convention, attribute names are prefixed with
+# "self.".
+
+import modules ;
+import numbers ;
+
+
+rule xinit ( instance : class )
+{
+ module $(instance)
+ {
+ __class__ = $(2) ;
+ __name__ = $(1) ;
+ }
+}
+
+
+rule new ( class args * : * )
+{
+ .next-instance ?= 1 ;
+ local id = object($(class))@$(.next-instance) ;
+
+ xinit $(id) : $(class) ;
+
+ INSTANCE $(id) : class@$(class) ;
+ IMPORT_MODULE $(id) ;
+ $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+
+ # Bump the next unique object name.
+ .next-instance = [ numbers.increment $(.next-instance) ] ;
+
+ # Return the name of the new instance.
+ return $(id) ;
+}
+
+
+rule bases ( class )
+{
+ module class@$(class)
+ {
+ return $(__bases__) ;
+ }
+}
+
+
+rule is-derived ( class : bases + )
+{
+ local stack = $(class) ;
+ local visited found ;
+ while ! $(found) && $(stack)
+ {
+ local top = $(stack[1]) ;
+ stack = $(stack[2-]) ;
+ if ! ( $(top) in $(visited) )
+ {
+ visited += $(top) ;
+ stack += [ bases $(top) ] ;
+
+ if $(bases) in $(visited)
+ {
+ found = true ;
+ }
+ }
+ }
+ return $(found) ;
+}
+
+
+# Returns true if the 'value' is a class instance.
+#
+rule is-instance ( value )
+{
+ return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
+}
+
+
+# Check if the given value is of the given type.
+#
+rule is-a (
+ instance # The value to check.
+ : type # The type to test for.
+)
+{
+ if [ is-instance $(instance) ]
+ {
+ return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
+ }
+}
+
+
+local rule typecheck ( x )
+{
+ local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
+ if ! [ is-a $(x) : $(class-name) ]
+ {
+ return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+
+ # This will be the construction function for a class called 'myclass'.
+ #
+ class myclass
+ {
+ import assert ;
+
+ rule __init__ ( x_ * : y_ * )
+ {
+ # Set some instance variables.
+ x = $(x_) ;
+ y = $(y_) ;
+ foo += 10 ;
+ }
+
+ rule set-x ( newx * )
+ {
+ x = $(newx) ;
+ }
+
+ rule get-x ( )
+ {
+ return $(x) ;
+ }
+
+ rule set-y ( newy * )
+ {
+ y = $(newy) ;
+ }
+
+ rule get-y ( )
+ {
+ return $(y) ;
+ }
+
+ rule f ( )
+ {
+ return [ g $(x) ] ;
+ }
+
+ rule g ( args * )
+ {
+ if $(x) in $(y)
+ {
+ return $(x) ;
+ }
+ else if $(y) in $(x)
+ {
+ return $(y) ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ rule get-class ( )
+ {
+ return $(__class__) ;
+ }
+
+ rule get-instance ( )
+ {
+ return $(__name__) ;
+ }
+
+ rule invariant ( )
+ {
+ assert.equal 1 : 1 ;
+ }
+
+ rule get-foo ( )
+ {
+ return $(foo) ;
+ }
+ }
+# class myclass ;
+
+ class derived1 : myclass
+ {
+ rule __init__ ( z_ )
+ {
+ myclass.__init__ $(z_) : X ;
+ z = $(z_) ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived1.g ;
+ }
+
+ rule h ( )
+ {
+ return derived1.h ;
+ }
+
+ rule get-z ( )
+ {
+ return $(z) ;
+ }
+
+ # Check that 'assert.equal' visible in base class is visible here.
+ #
+ rule invariant2 ( )
+ {
+ assert.equal 2 : 2 ;
+ }
+
+ # Check that 'assert.variable-not-empty' visible in base class is
+ # visible here.
+ #
+ rule invariant3 ( )
+ {
+ local v = 10 ;
+ assert.variable-not-empty v ;
+ }
+ }
+# class derived1 : myclass ;
+
+ class derived2 : myclass
+ {
+ rule __init__ ( )
+ {
+ myclass.__init__ 1 : 2 ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived2.g ;
+ }
+
+ # Test the ability to call base class functions with qualification.
+ #
+ rule get-x ( )
+ {
+ return [ myclass.get-x ] ;
+ }
+ }
+# class derived2 : myclass ;
+
+ class derived2a : derived2
+ {
+ rule __init__
+ {
+ derived2.__init__ ;
+ }
+ }
+# class derived2a : derived2 ;
+
+ local rule expect_derived2 ( [derived2] x ) { }
+
+ local a = [ new myclass 3 4 5 : 4 5 ] ;
+ local b = [ new derived1 4 ] ;
+ local b2 = [ new derived1 4 ] ;
+ local c = [ new derived2 ] ;
+ local d = [ new derived2 ] ;
+ local e = [ new derived2a ] ;
+
+ expect_derived2 $(d) ;
+ expect_derived2 $(e) ;
+
+ # Argument checking is set up to call exit(1) directly on failure, and we
+ # can not hijack that with try, so we should better not do this test by
+ # default. We could fix this by having errors look up and invoke the EXIT
+ # rule instead; EXIT can be hijacked (;-)
+ if --fail-typecheck in [ modules.peek : ARGV ]
+ {
+ try ;
+ {
+ expect_derived2 $(a) ;
+ }
+ catch
+ "Expected an instance of derived2 but got" instead
+ ;
+ }
+
+ #try ;
+ #{
+ # new bad_subclass ;
+ #}
+ #catch
+ # bad_subclass.bad_subclass failed to call base class constructor myclass.__init__
+ # ;
+
+ #try ;
+ #{
+ # class bad_subclass ;
+ #}
+ #catch bad_subclass has already been declared ;
+
+ assert.result 3 4 5 : $(a).get-x ;
+ assert.result 4 5 : $(a).get-y ;
+ assert.result 4 : $(b).get-x ;
+ assert.result X : $(b).get-y ;
+ assert.result 4 : $(b).get-z ;
+ assert.result 1 : $(c).get-x ;
+ assert.result 2 : $(c).get-y ;
+ assert.result 4 5 : $(a).f ;
+ assert.result derived1.g : $(b).f ;
+ assert.result derived2.g : $(c).f ;
+ assert.result derived2.g : $(d).f ;
+
+ assert.result 10 : $(b).get-foo ;
+
+ $(a).invariant ;
+ $(b).invariant2 ;
+ $(b).invariant3 ;
+
+ # Check that the __class__ attribute is getting properly set.
+ assert.result myclass : $(a).get-class ;
+ assert.result derived1 : $(b).get-class ;
+ assert.result $(a) : $(a).get-instance ;
+
+ $(a).set-x a.x ;
+ $(b).set-x b.x ;
+ $(c).set-x c.x ;
+ $(d).set-x d.x ;
+ assert.result a.x : $(a).get-x ;
+ assert.result b.x : $(b).get-x ;
+ assert.result c.x : $(c).get-x ;
+ assert.result d.x : $(d).get-x ;
+
+ class derived3 : derived1 derived2
+ {
+ rule __init__ ( )
+ {
+ }
+ }
+
+ assert.result : bases myclass ;
+ assert.result myclass : bases derived1 ;
+ assert.result myclass : bases derived2 ;
+ assert.result derived1 derived2 : bases derived3 ;
+
+ assert.true is-derived derived1 : myclass ;
+ assert.true is-derived derived2 : myclass ;
+ assert.true is-derived derived3 : derived1 ;
+ assert.true is-derived derived3 : derived2 ;
+ assert.true is-derived derived3 : derived1 derived2 myclass ;
+ assert.true is-derived derived3 : myclass ;
+
+ assert.false is-derived myclass : derived1 ;
+
+ assert.true is-instance $(a) ;
+ assert.false is-instance bar ;
+
+ assert.true is-a $(a) : myclass ;
+ assert.true is-a $(c) : derived2 ;
+ assert.true is-a $(d) : myclass ;
+ assert.false is-a literal : myclass ;
+}
diff --git a/jam-files/boost-build/kernel/errors.jam b/jam-files/boost-build/kernel/errors.jam
new file mode 100644
index 000000000..63b11e867
--- /dev/null
+++ b/jam-files/boost-build/kernel/errors.jam
@@ -0,0 +1,274 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Print a stack backtrace leading to this rule's caller. Each argument
+# represents a line of output to be printed after the first line of the
+# backtrace.
+#
+rule backtrace ( skip-frames prefix messages * : * )
+{
+ local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
+ local drop-elements = $(frame-skips[$(skip-frames)]) ;
+ if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
+ {
+ ECHO "warning: backtrace doesn't support skipping $(skip-frames) frames;"
+ "using 1 instead." ;
+ drop-elements = 5 ;
+ }
+
+ local args = $(.args) ;
+ if $(.user-modules-only)
+ {
+ local bt = [ nearest-user-location ] ;
+ ECHO "$(prefix) at $(bt) " ;
+ for local n in $(args)
+ {
+ if $($(n))-is-not-empty
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ }
+ else
+ {
+ # Get the whole backtrace, then drop the initial quadruples
+ # corresponding to the frames that must be skipped.
+ local bt = [ BACKTRACE ] ;
+ bt = $(bt[$(drop-elements)-]) ;
+
+ while $(bt)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
+
+ # The first time through, print each argument on a separate line.
+ for local n in $(args)
+ {
+ if $($(n))-is-not-empty
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ args = ; # Kill args so that this never happens again.
+
+ # Move on to the next quadruple.
+ bt = $(bt[5-]) ;
+ }
+ }
+}
+
+.args ?= messages 2 3 4 5 6 7 8 9 ;
+.disabled ?= ;
+.last-error-$(.args) ?= ;
+
+
+# try-catch --
+#
+# This is not really an exception-handling mechanism, but it does allow us to
+# perform some error-checking on our error-checking. Errors are suppressed after
+# a try, and the first one is recorded. Use catch to check that the error
+# message matched expectations.
+
+# Begin looking for error messages.
+#
+rule try ( )
+{
+ .disabled += true ;
+ .last-error-$(.args) = ;
+}
+
+
+# Stop looking for error messages; generate an error if an argument of messages
+# is not found in the corresponding argument in the error call.
+#
+rule catch ( messages * : * )
+{
+ .disabled = $(.disabled[2-]) ; # Pop the stack.
+
+ import sequence ;
+
+ if ! $(.last-error-$(.args))-is-not-empty
+ {
+ error-skip-frames 3 expected an error, but none occurred ;
+ }
+ else
+ {
+ for local n in $(.args)
+ {
+ if ! $($(n)) in $(.last-error-$(n))
+ {
+ local v = [ sequence.join $($(n)) : " " ] ;
+ v ?= "" ;
+ local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
+
+ .last-error-$(.args) = ;
+ error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
+ : got \"$(joined)\" instead ;
+ }
+ }
+ }
+}
+
+
+rule error-skip-frames ( skip-frames messages * : * )
+{
+ if ! $(.disabled)
+ {
+ backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ EXIT ;
+ }
+ else if ! $(.last-error-$(.args))
+ {
+ for local n in $(.args)
+ {
+ # Add an extra empty string so that we always have
+ # something in the event of an error
+ .last-error-$(n) = $($(n)) "" ;
+ }
+ }
+}
+
+if --no-error-backtrace in [ modules.peek : ARGV ]
+{
+ .no-error-backtrace = true ;
+}
+
+
+# Print an error message with a stack backtrace and exit.
+#
+rule error ( messages * : * )
+{
+ if $(.no-error-backtrace)
+ {
+ # Print each argument on a separate line.
+ for local n in $(.args)
+ {
+ if $($(n))-is-not-empty
+ {
+ if ! $(first-printed)
+ {
+ ECHO error: $($(n)) ;
+ first-printed = true ;
+ }
+ else
+ {
+ ECHO $($(n)) ;
+ }
+ }
+ }
+ EXIT ;
+ }
+ else
+ {
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
+
+
+# Same as 'error', but the generated backtrace will include only user files.
+#
+rule user-error ( messages * : * )
+{
+ .user-modules-only = 1 ;
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+}
+
+
+# Print a warning message with a stack backtrace and exit.
+#
+rule warning
+{
+ backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+}
+
+
+# Convert an arbitrary argument list into a list with ":" separators and quoted
+# elements representing the same information. This is mostly useful for
+# formatting descriptions of arguments with which a rule was called when
+# reporting an error.
+#
+rule lol->list ( * )
+{
+ local result ;
+ local remaining = 1 2 3 4 5 6 7 8 9 ;
+ while $($(remaining))
+ {
+ local n = $(remaining[1]) ;
+ remaining = $(remaining[2-]) ;
+
+ if $(n) != 1
+ {
+ result += ":" ;
+ }
+ result += \"$($(n))\" ;
+ }
+ return $(result) ;
+}
+
+
+# Return the file:line for the nearest entry in backtrace which correspond to a
+# user module.
+#
+rule nearest-user-location ( )
+{
+ local bt = [ BACKTRACE ] ;
+
+ local result ;
+ while $(bt) && ! $(result)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ;
+
+ if [ MATCH $(user-modules) : $(bt[1]:D=) ]
+ {
+ result = $(bt[1]):$(bt[2]) ;
+ }
+ bt = $(bt[5-]) ;
+ }
+ return $(result) ;
+}
+
+
+# If optimized rule is available in Jam, use it.
+if NEAREST_USER_LOCATION in [ RULENAMES ]
+{
+ rule nearest-user-location ( )
+ {
+ local r = [ NEAREST_USER_LOCATION ] ;
+ return $(r[1]):$(r[2]) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Show that we can correctly catch an expected error.
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : somewhere ;
+
+ # Show that unexpected errors generate real errors.
+ try ;
+ {
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : nowhere ;
+ }
+ catch expected \"nowhere\" in argument 2 ;
+
+ # Show that not catching an error where one was expected is an error.
+ try ;
+ {
+ try ;
+ {
+ }
+ catch ;
+ }
+ catch expected an error, but none occurred ;
+}
diff --git a/jam-files/boost-build/kernel/modules.jam b/jam-files/boost-build/kernel/modules.jam
new file mode 100644
index 000000000..1f75354fc
--- /dev/null
+++ b/jam-files/boost-build/kernel/modules.jam
@@ -0,0 +1,354 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Essentially an include guard; ensures that no module is loaded multiple times.
+.loaded ?= ;
+
+# A list of modules currently being loaded for error reporting of circular
+# dependencies.
+.loading ?= ;
+
+# A list of modules needing to be tested using their __test__ rule.
+.untested ?= ;
+
+# A list of modules which have been tested using their __test__ rule.
+.tested ?= ;
+
+
+# Runs internal Boost Build unit tests for the specified module. The module's
+# __test__ rule is executed in its own module to eliminate any inadvertent
+# effects of testing module dependencies (such as assert) on the module itself.
+#
+local rule run-module-test ( m )
+{
+ local tested-modules = [ modules.peek modules : .tested ] ;
+
+ if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
+ && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
+ {
+ modules.poke modules : .tested : $(tested-modules) $(m) ;
+
+ if ! ( __test__ in [ RULENAMES $(m) ] )
+ {
+ local argv = [ peek : ARGV ] ;
+ if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
+ {
+ ECHO warning: no __test__ rule defined in module $(m) ;
+ }
+ }
+ else
+ {
+ if ! ( --quiet in $(argv) )
+ {
+ ECHO testing module $(m)... ;
+ }
+
+ local test-module = __test-$(m)__ ;
+ IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) ] ;
+ IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
+ module $(test-module)
+ {
+ __test__ ;
+ }
+ }
+ }
+}
+
+
+# Return the binding of the given module.
+#
+rule binding ( module )
+{
+ return $($(module).__binding__) ;
+}
+
+
+# Sets the module-local value of a variable. This is the most reliable way to
+# set a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule poke ( module-name ? : variables + : value * )
+{
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+}
+
+
+# Returns the module-local value of a variable. This is the most reliable way to
+# examine a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule peek ( module-name ? : variables + )
+{
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+}
+
+
+# Call the given rule locally in the given module. Use this for rules accepting
+# rule names as arguments, so that the passed rule may be invoked in the context
+# of the rule's caller (for example, if the rule accesses module globals or is a
+# local rule). Note that rules called this way may accept at most 8 parameters.
+#
+rule call-in ( module-name ? : rule-name args * : * )
+{
+ module $(module-name)
+ {
+ return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+}
+
+
+# Given a possibly qualified rule name and arguments, remove any initial module
+# qualification from the rule and invoke it in that module. If there is no
+# module qualification, the rule is invoked in the global module. Note that
+# rules called this way may accept at most 8 parameters.
+#
+rule call-locally ( qualified-rule-name args * : * )
+{
+ local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
+ local rule-name = $(module-rule[2]) ;
+ rule-name ?= $(qualified-rule-name) ;
+ # We pass only 8 parameters here since Boost Jam allows at most 9 rule
+ # parameter positions and the call-in rule already uses up the initial
+ # position for the module name.
+ return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
+ $(4) : $(5) : $(6) : $(7) : $(8) ] ;
+}
+
+
+# Load the indicated module if it is not already loaded.
+#
+rule load (
+ module-name # Name of module to load. Rules will be defined in this
+ # module.
+ : filename ? # (partial) path to file; Defaults to $(module-name).jam.
+ : search * # Directories in which to search for filename. Defaults to
+ # $(BOOST_BUILD_PATH).
+)
+{
+ # Avoid loading modules twice.
+ if ! ( $(module-name) in $(.loaded) )
+ {
+ filename ?= $(module-name).jam ;
+
+ # Mark the module loaded so we do not try to load it recursively.
+ .loaded += $(module-name) ;
+
+ # Suppress tests if any module loads are already in progress.
+ local suppress-test = $(.loading[1]) ;
+
+ # Push this module on the loading stack.
+ .loading += $(module-name) ;
+
+ # Remember that it is untested.
+ .untested += $(module-name) ;
+
+ # Insert the new module's __name__ and __file__ globals.
+ poke $(module-name) : __name__ : $(module-name) ;
+ poke $(module-name) : __file__ : $(filename) ;
+
+ module $(module-name)
+ {
+ # Add some grist so that the module will have a unique target name.
+ local module-target = $(__file__:G=module@) ;
+
+ local search = $(3) ;
+ search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
+ SEARCH on $(module-target) = $(search) ;
+ BINDRULE on $(module-target) = modules.record-binding ;
+
+ include $(module-target) ;
+
+ # Allow the module to see its own names with full qualification.
+ local rules = [ RULENAMES $(__name__) ] ;
+ IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
+ }
+
+ if $(module-name) != modules && ! [ binding $(module-name) ]
+ {
+ import errors ;
+ errors.error "Could not find module" $(module-name) in $(search) ;
+ }
+
+ # Pop the loading stack. Must happen before testing or we will run into
+ # a circular loading dependency.
+ .loading = $(.loading[1--2]) ;
+
+ # Run any pending tests if this is an outer load.
+ if ! $(suppress-test)
+ {
+ local argv = [ peek : ARGV ] ;
+ for local m in $(.untested)
+ {
+ run-module-test $(m) ;
+ }
+ .untested = ;
+ }
+ }
+ else if $(module-name) in $(.loading)
+ {
+ import errors ;
+ errors.error loading \"$(module-name)\"
+ : circular module loading dependency:
+ : $(.loading)" ->" $(module-name) ;
+ }
+}
+
+
+# This helper is used by load (above) to record the binding (path) of each
+# loaded module.
+#
+rule record-binding ( module-target : binding )
+{
+ $(.loading[-1]).__binding__ = $(binding) ;
+}
+
+
+# Transform each path in the list, with all backslashes converted to forward
+# slashes and all detectable redundancy removed. Something like this is probably
+# needed in path.jam, but I am not sure of that, I do not understand it, and I
+# am not ready to move all of path.jam into the kernel.
+#
+local rule normalize-raw-paths ( paths * )
+{
+ local result ;
+ for p in $(paths:T)
+ {
+ result += [ NORMALIZE_PATH $(p) ] ;
+ }
+ return $(result) ;
+}
+
+
+.cwd = [ PWD ] ;
+
+
+# Load the indicated module and import rule names into the current module. Any
+# members of rules-opt will be available without qualification in the caller's
+# module. Any members of rename-opt will be taken as the names of the rules in
+# the caller's module, in place of the names they have in the imported module.
+# If rules-opt = '*', all rules from the indicated module are imported into the
+# caller's module. If rename-opt is supplied, it must have the same number of
+# elements as rules-opt.
+#
+rule import ( module-names + : rules-opt * : rename-opt * )
+{
+ if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
+ {
+ import errors ;
+ errors.error "Rule aliasing is only available for explicit imports." ;
+ }
+
+ if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
+ {
+ import errors ;
+ errors.error "When loading multiple modules, no specific rules or"
+ "renaming is allowed" ;
+ }
+
+ local caller = [ CALLER_MODULE ] ;
+
+ # Import each specified module
+ for local m in $(module-names)
+ {
+ if ! $(m) in $(.loaded)
+ {
+ # If the importing module isn't already in the BOOST_BUILD_PATH,
+ # prepend it to the path. We don't want to invert the search order
+ # of modules that are already there.
+
+ local caller-location ;
+ if $(caller)
+ {
+ caller-location = [ binding $(caller) ] ;
+ caller-location = $(caller-location:D) ;
+ caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ;
+ }
+
+ local search = [ peek : BOOST_BUILD_PATH ] ;
+ search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
+
+ if $(caller-location) && ! $(caller-location) in $(search)
+ {
+ search = $(caller-location) $(search) ;
+ }
+
+ load $(m) : : $(search) ;
+ }
+
+ IMPORT_MODULE $(m) : $(caller) ;
+
+ if $(rules-opt)
+ {
+ local source-names ;
+ if $(rules-opt) = *
+ {
+ local all-rules = [ RULENAMES $(m) ] ;
+ source-names = $(all-rules) ;
+ }
+ else
+ {
+ source-names = $(rules-opt) ;
+ }
+ local target-names = $(rename-opt) ;
+ target-names ?= $(source-names) ;
+ IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ;
+ }
+ }
+}
+
+
+# Define exported copies in $(target-module) of all rules exported from
+# $(source-module). Also make them available in the global module with
+# qualification, so that it is just as though the rules were defined originally
+# in $(target-module).
+#
+rule clone-rules ( source-module target-module )
+{
+ local rules = [ RULENAMES $(source-module) ] ;
+
+ IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ;
+ EXPORT $(target-module) : $(rules) ;
+ IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ;
+}
+
+
+# These rules need to be available in all modules to implement module loading
+# itself and other fundamental operations.
+local globalize = peek poke record-binding ;
+IMPORT modules : $(globalize) : : modules.$(globalize) ;
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import modules : normalize-raw-paths ;
+
+ module modules.__test__
+ {
+ foo = bar ;
+ }
+
+ assert.result bar : peek modules.__test__ : foo ;
+
+ poke modules.__test__ : foo : bar baz ;
+ assert.result bar baz : peek modules.__test__ : foo ;
+
+ assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
+ assert.result . : normalize-raw-paths . ;
+ assert.result .. : normalize-raw-paths .. ;
+ assert.result ../.. : normalize-raw-paths ../.. ;
+ assert.result .. : normalize-raw-paths ./.. ;
+ assert.result / / : normalize-raw-paths / \\ ;
+ assert.result a : normalize-raw-paths a ;
+ assert.result a : normalize-raw-paths a/ ;
+ assert.result /a : normalize-raw-paths /a/ ;
+ assert.result / : normalize-raw-paths /a/.. ;
+}
diff --git a/jam-files/boost-build/options/help.jam b/jam-files/boost-build/options/help.jam
new file mode 100644
index 000000000..b507e1edd
--- /dev/null
+++ b/jam-files/boost-build/options/help.jam
@@ -0,0 +1,212 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Rene Rivera
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module is the plug-in handler for the --help and --help-.*
+# command-line options
+import modules ;
+import assert ;
+import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
+import sequence ;
+import set ;
+import project ;
+import print ;
+import os ;
+import version ;
+import path ;
+
+# List of possible modules, but which really aren't.
+#
+.not-modules =
+ boost-build bootstrap site-config test user-config
+ -tools allyourbase boost-base features python stlport testing unit-tests ;
+
+# The help system options are parsed here and handed off to the doc
+# module to translate into documentation requests and actions. The
+# understood options are:
+#
+# --help-disable-<option>
+# --help-doc-options
+# --help-enable-<option>
+# --help-internal
+# --help-options
+# --help-usage
+# --help-output <type>
+# --help-output-file <file>
+# --help [<module-or-class>]
+#
+rule process (
+ command # The option.
+ : values * # The values, starting after the "=".
+ )
+{
+ assert.result --help : MATCH ^(--help).* : $(command) ;
+ local did-help = ;
+ switch $(command)
+ {
+ case --help-internal :
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules ?= . ;
+ local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
+ local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
+ local modules-to-list =
+ [ sequence.insertion-sort
+ [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
+ local modules-to-scan ;
+ for local m in $(modules-to-list)
+ {
+ local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
+ modules-to-scan += $(module-files[1]) ;
+ }
+ do-scan $(modules-to-scan) : print-help-all ;
+ did-help = true ;
+
+ case --help-enable-* :
+ local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) : enabled ;
+ did-help = true ;
+
+ case --help-disable-* :
+ local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) ;
+ did-help = true ;
+
+ case --help-output :
+ set-output $(values[1]) ;
+ did-help = true ;
+
+ case --help-output-file :
+ set-output-file $(values[1]) ;
+ did-help = true ;
+
+ case --help-doc-options :
+ local doc-module-spec = [ split-symbol doc ] ;
+ do-scan $(doc-module-spec[1]) : print-help-options ;
+ did-help = true ;
+
+ case --help-options :
+ print-help-usage ;
+ did-help = true ;
+
+ case --help :
+ local spec = $(values[1]) ;
+ if $(spec)
+ {
+ local spec-parts = [ split-symbol $(spec) ] ;
+ if $(spec-parts)
+ {
+ if $(spec-parts[2])
+ {
+ do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
+ }
+ else
+ {
+ do-scan $(spec-parts[1]) : print-help-module ;
+ }
+ }
+ else
+ {
+ EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
+ }
+ }
+ else
+ {
+ version.print ;
+ ECHO ;
+ # First print documentation from the current Jamfile, if any.
+ # FIXME: Generally, this duplication of project.jam logic is bad.
+ local names = [ modules.peek project : JAMROOT ]
+ [ modules.peek project : JAMFILE ] ;
+ local project-file = [ path.glob . : $(names) ] ;
+ if ! $(project-file)
+ {
+ project-file = [ path.glob-in-parents . : $(names) ] ;
+ }
+
+ for local p in $(project-file)
+ {
+ do-scan $(p) : print-help-project $(p) ;
+ }
+
+ # Next any user-config help.
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local user-config = [ GLOB $(user-path) : user-config.jam ] ;
+ if $(user-config)
+ {
+ do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
+ }
+
+ # Next any site-config help.
+ local site-config = [ GLOB $(user-path) : site-config.jam ] ;
+ if $(site-config)
+ {
+ do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
+ }
+
+ # Then the overall help.
+ print-help-top ;
+ }
+ did-help = true ;
+ }
+ if $(did-help)
+ {
+ UPDATE all ;
+ NOCARE all ;
+ }
+ return $(did-help) ;
+}
+
+# Split a reference to a symbol into module and symbol parts.
+#
+local rule split-symbol (
+ symbol # The symbol to split.
+ )
+{
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules ?= . ;
+ local module-name = $(symbol) ;
+ local symbol-name = ;
+ local result = ;
+ while ! $(result)
+ {
+ local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
+ if $(module-path)
+ {
+ # The 'module-name' in fact refers to module. Return the full
+ # module path and a symbol within it. If 'symbol' passed to this
+ # rule is already module, 'symbol-name' will be empty. Otherwise,
+ # it's initialized on the previous loop iteration.
+ # In case there are several modules by this name,
+ # use the first one.
+ result = $(module-path[1]) $(symbol-name) ;
+ }
+ else
+ {
+ if ! $(module-name:S)
+ {
+ result = - ;
+ }
+ else
+ {
+ local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
+ if $(symbol-name)
+ {
+ symbol-name = $(next-symbol-part).$(symbol-name) ;
+ }
+ else
+ {
+ symbol-name = $(next-symbol-part) ;
+ }
+ module-name = $(module-name:B) ;
+ }
+ }
+ }
+ if $(result) != -
+ {
+ return $(result) ;
+ }
+}
diff --git a/jam-files/boost-build/site-config.jam b/jam-files/boost-build/site-config.jam
new file mode 100644
index 000000000..ad22d6744
--- /dev/null
+++ b/jam-files/boost-build/site-config.jam
@@ -0,0 +1,4 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
diff --git a/jam-files/boost-build/tools/acc.jam b/jam-files/boost-build/tools/acc.jam
new file mode 100644
index 000000000..f04c9dc87
--- /dev/null
+++ b/jam-files/boost-build/tools/acc.jam
@@ -0,0 +1,118 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Copyright Boris Gubenko 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Boost.Build V2 toolset for the HP aC++ compiler.
+#
+
+import toolset : flags ;
+import feature ;
+import generators ;
+import common ;
+
+feature.extend toolset : acc ;
+toolset.inherit acc : unix ;
+generators.override builtin.lib-generator : acc.prebuilt ;
+generators.override acc.searched-lib-generator : searched-lib-generator ;
+
+# Configures the acc toolset.
+rule init ( version ? : user-provided-command * : options * )
+{
+ local condition = [ common.check-init-parameters acc
+ : version $(version) ] ;
+
+ local command = [ common.get-invocation-command acc : aCC
+ : $(user-provided-command) ] ;
+
+ common.handle-options acc : $(condition) : $(command) : $(options) ;
+}
+
+
+# Declare generators
+generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ;
+generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ;
+
+# Declare flags.
+flags acc CFLAGS <optimization>off : ;
+flags acc CFLAGS <optimization>speed : -O3 ;
+flags acc CFLAGS <optimization>space : -O2 ;
+
+flags acc CFLAGS <inlining>off : +d ;
+flags acc CFLAGS <inlining>on : ;
+flags acc CFLAGS <inlining>full : ;
+
+flags acc C++FLAGS <exception-handling>off : ;
+flags acc C++FLAGS <exception-handling>on : ;
+
+flags acc C++FLAGS <rtti>off : ;
+flags acc C++FLAGS <rtti>on : ;
+
+# We want the full path to the sources in the debug symbols because otherwise
+# the debugger won't find the sources when we use boost.build.
+flags acc CFLAGS <debug-symbols>on : -g ;
+flags acc LINKFLAGS <debug-symbols>on : -g ;
+flags acc LINKFLAGS <debug-symbols>off : -s ;
+
+# V2 does not have <shared-linkable>, not sure what this meant in V1.
+# flags acc CFLAGS <shared-linkable>true : +Z ;
+
+flags acc CFLAGS <profiling>on : -pg ;
+flags acc LINKFLAGS <profiling>on : -pg ;
+
+flags acc CFLAGS <address-model>64 : +DD64 ;
+flags acc LINKFLAGS <address-model>64 : +DD64 ;
+
+# It is unknown if there's separate option for rpath used only
+# at link time, similar to -rpath-link in GNU. We'll use -L.
+flags acc RPATH_LINK : <xdll-path> ;
+
+flags acc CFLAGS <cflags> ;
+flags acc C++FLAGS <cxxflags> ;
+flags acc DEFINES <define> ;
+flags acc UNDEFS <undef> ;
+flags acc HDRS <include> ;
+flags acc STDHDRS <sysinclude> ;
+flags acc LINKFLAGS <linkflags> ;
+flags acc ARFLAGS <arflags> ;
+
+flags acc LIBPATH <library-path> ;
+flags acc NEEDLIBS <library-file> ;
+flags acc FINDLIBS <find-shared-library> ;
+flags acc FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags acc CFLAGS <threading>multi : -mt ;
+flags acc LINKFLAGS <threading>multi : -mt ;
+
+flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+actions acc.link bind NEEDLIBS
+{
+ $(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
+}
+
+SPACE = " " ;
+actions acc.link.dll bind NEEDLIBS
+{
+ $(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
+}
+
+actions acc.compile.c
+{
+ cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
+}
+
+actions acc.compile.c++
+{
+ $(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
+}
+
+actions updated together piecemeal acc.archive
+{
+ ar ru$(ARFLAGS:E="") "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/bison.jam b/jam-files/boost-build/tools/bison.jam
new file mode 100644
index 000000000..0689d4bd8
--- /dev/null
+++ b/jam-files/boost-build/tools/bison.jam
@@ -0,0 +1,32 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import generators ;
+import feature ;
+import type ;
+import property ;
+
+feature.feature bison.prefix : : free ;
+type.register Y : y ;
+type.register YY : yy ;
+generators.register-standard bison.bison : Y : C H ;
+generators.register-standard bison.bison : YY : CPP HPP ;
+
+rule init ( )
+{
+}
+
+rule bison ( dst dst_header : src : properties * )
+{
+ local r = [ property.select bison.prefix : $(properties) ] ;
+ if $(r)
+ {
+ PREFIX_OPT on $(<) = -p $(r:G=) ;
+ }
+}
+
+actions bison
+{
+ bison $(PREFIX_OPT) -d -o $(<[1]) $(>)
+}
diff --git a/jam-files/boost-build/tools/boostbook-config.jam b/jam-files/boost-build/tools/boostbook-config.jam
new file mode 100644
index 000000000..6e3f3ddc1
--- /dev/null
+++ b/jam-files/boost-build/tools/boostbook-config.jam
@@ -0,0 +1,13 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for BoostBook tools. To use, just import this module.
+#
+# This module is deprecated.
+# using boostbook ;
+# with no arguments now suffices.
+
+import toolset : using ;
+
+using boostbook ;
diff --git a/jam-files/boost-build/tools/boostbook.jam b/jam-files/boost-build/tools/boostbook.jam
new file mode 100644
index 000000000..3a5964c62
--- /dev/null
+++ b/jam-files/boost-build/tools/boostbook.jam
@@ -0,0 +1,727 @@
+# Copyright 2003, 2004, 2005 Dave Abrahams
+# Copyright 2003, 2004, 2005 Douglas Gregor
+# Copyright 2005, 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of documentation
+# from BoostBook sources.
+#
+# The type of output is controlled by the <format> feature which can
+# have the following values::
+#
+# * html: Generates html documention. This is the default.
+# * xhtml: Generates xhtml documentation
+# * htmlhelp: Generates html help output.
+# * onehtml: Generates a single html page.
+# * man: Generates man pages.
+# * pdf: Generates pdf documentation.
+# * ps: Generates postscript output.
+# * docbook: Generates docbook XML.
+# * fo: Generates XSL formating objects.
+# * tests: Extracts test cases from the boostbook XML.
+#
+# format is an implicit feature, so typing pdf on the command
+# line (for example) is a short-cut for format=pdf.
+
+import "class" : new ;
+import common ;
+import errors ;
+import targets ;
+import feature ;
+import generators ;
+import print ;
+import property ;
+import project ;
+import property-set ;
+import regex ;
+import scanner ;
+import sequence ;
+import make ;
+import os ;
+import type ;
+import modules path project ;
+import build-system ;
+
+import xsltproc : xslt xslt-dir ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project boostbook ;
+
+
+feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests
+ : incidental implicit composite propagated ;
+
+type.register DTDXML : dtdxml ;
+type.register XML : xml ;
+type.register BOOSTBOOK : boostbook : XML ;
+type.register DOCBOOK : docbook : XML ;
+type.register FO : fo : XML ;
+type.register PDF : pdf ;
+type.register PS : ps ;
+type.register XSLT : xsl : XML ;
+type.register HTMLDIR ;
+type.register XHTMLDIR ;
+type.register HTMLHELP ;
+type.register MANPAGES ;
+type.register TESTS : tests ;
+# Artificial target type, used to require invocation of top-level
+# BoostBook generator.
+type.register BOOSTBOOK_MAIN ;
+
+
+# Initialize BoostBook support.
+rule init (
+ docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not
+ # provided, we use DOCBOOK_XSL_DIR from the environment
+ # (if available) or look in standard locations.
+ # Otherwise, we let the XML processor load the
+ # stylesheets remotely.
+
+ : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
+ # DOCBOOK_DTD_DIR From the environment (if available) or
+ # look in standard locations. Otherwise, we let the XML
+ # processor load the DTD remotely.
+
+ : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
+)
+{
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ check-boostbook-dir $(boostbook-dir) ;
+ find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
+
+ # Register generators only if we've were called via "using boostbook ; "
+ generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ;
+ generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ;
+ generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ;
+ generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
+ generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
+ generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
+ generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
+ generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
+ generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ;
+
+ # The same about Jamfile main target rules.
+ IMPORT $(__name__) : boostbook : : boostbook ;
+ }
+ else
+ {
+ if $(docbook-xsl-dir)
+ {
+ modify-config ;
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
+ check-docbook-xsl-dir ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ modify-config ;
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
+ check-docbook-dtd-dir ;
+ }
+ if $(boostbook-dir)
+ {
+ modify-config ;
+ check-boostbook-dir $(boostbook-dir) ;
+ local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
+ local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+ }
+ }
+}
+
+rule lock-config ( )
+{
+ if ! $(.initialized)
+ {
+ errors.user-error "BoostBook has not been configured." ;
+ }
+ if ! $(.config-locked)
+ {
+ .config-locked = true ;
+ }
+}
+
+rule modify-config ( )
+{
+ if $(.config-locked)
+ {
+ errors.user-error "BoostBook configuration cannot be changed after it has been used." ;
+ }
+}
+
+rule find-boost-in-registry ( keys * )
+{
+ local boost-root = ;
+ for local R in $(keys)
+ {
+ local installed-boost = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)"
+ : "InstallRoot" ] ;
+ if $(installed-boost)
+ {
+ boost-root += [ path.make $(installed-boost) ] ;
+ }
+ }
+ return $(boost-root) ;
+}
+
+rule check-docbook-xsl-dir ( )
+{
+ if $(.docbook-xsl-dir)
+ {
+ if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
+ {
+ errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
+ }
+ }
+ }
+}
+
+rule check-docbook-dtd-dir ( )
+{
+ if $(.docbook-dtd-dir)
+ {
+ if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
+ {
+ errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
+ }
+ }
+ }
+}
+
+rule check-boostbook-xsl-dir ( )
+{
+ if ! $(.boostbook-xsl-dir)
+ {
+ errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ;
+ }
+ else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
+ {
+ errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
+ }
+ }
+}
+
+rule check-boostbook-dtd-dir ( )
+{
+ if ! $(.boostbook-dtd-dir)
+ {
+ errors.user-error "error: BoostBook: could not find boostbook DTD." ;
+ }
+ else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
+ {
+ errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
+ }
+ else
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
+ }
+ }
+}
+
+rule check-boostbook-dir ( boostbook-dir ? )
+{
+ if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
+ {
+ errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ;
+ }
+}
+
+rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
+{
+ docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
+ docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
+ boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
+
+ # Look for the boostbook stylesheets relative to BOOST_ROOT
+ # and Boost.Build.
+ local boost-build-root = [ path.make [ build-system.location ] ] ;
+ local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ if $(boost-root)
+ {
+ boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ;
+ }
+ boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
+
+ # Try to find the tools in platform specific locations
+ if [ os.name ] = NT
+ {
+ # If installed by the Boost installer.
+ local boost-root = ;
+
+ local boost-installer-versions = snapshot cvs 1.33.0 ;
+ local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
+ local boostpro-installer-versions =
+ 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
+ 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
+
+ local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ;
+
+ # Make sure that the most recent version is searched for first
+ boost-root += [ sequence.reverse
+ [ find-boost-in-registry
+ Boost-Consulting.com\\$(boost-consulting-installer-versions)
+ boostpro.com\\$(boostpro-installer-versions) ] ] ;
+
+ # Plausible locations.
+ local root = [ PWD ] ;
+ while $(root) != $(root:D) { root = $(root:D) ; }
+ root = [ path.make $(root) ] ;
+ local search-dirs = ;
+ local docbook-search-dirs = ;
+ for local p in $(boost-root) {
+ search-dirs += [ path.join $(p) tools ] ;
+ }
+ for local p in $(old-installer-root)
+ {
+ search-dirs += [ path.join $(p) share ] ;
+ docbook-search-dirs += [ path.join $(p) share ] ;
+ }
+ search-dirs += [ path.join $(root) Boost tools ] ;
+ search-dirs += [ path.join $(root) Boost share ] ;
+ docbook-search-dirs += [ path.join $(root) Boost share ] ;
+
+ docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
+ docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
+ boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
+ }
+ else
+ {
+ # Plausible locations.
+
+ local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
+ local dtd-versions = 4.2 ;
+
+ docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
+ docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ;
+ docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
+
+ docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ;
+
+ boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
+
+ # Ubuntu Linux
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ;
+ docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ;
+ }
+
+ if $(docbook-xsl-dir)
+ {
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
+ }
+
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice: Boost.Book: searching XSL/DTD in" ;
+ ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ;
+ }
+ local boostbook-xsl-dir ;
+ for local dir in $(boostbook-dir) {
+ boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
+ }
+ local boostbook-dtd-dir ;
+ for local dir in $(boostbook-dir) {
+ boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
+ }
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+
+ check-docbook-xsl-dir ;
+ check-docbook-dtd-dir ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+}
+
+rule xsl-dir
+{
+ lock-config ;
+ return $(.boostbook-xsl-dir) ;
+}
+
+rule dtd-dir
+{
+ lock-config ;
+ return $(.boostbook-dtd-dir) ;
+}
+
+rule docbook-xsl-dir
+{
+ lock-config ;
+ return $(.docbook-xsl-dir) ;
+}
+
+rule docbook-dtd-dir
+{
+ lock-config ;
+ return $(.docbook-dtd-dir) ;
+}
+
+rule dtdxml-to-boostbook ( target : source : properties * )
+{
+ lock-config ;
+ xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl"
+ : $(properties) ;
+}
+
+rule boostbook-to-docbook ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
+ xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+rule docbook-to-onehtml ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
+ xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+rule docbook-to-htmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ;
+}
+
+rule docbook-to-xhtmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ;
+}
+
+rule docbook-to-htmlhelp ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ;
+}
+
+rule docbook-to-manpages ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
+ xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ;
+}
+
+rule docbook-to-fo ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
+ xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+rule format-catalog-path ( path )
+{
+ local result = $(path) ;
+ if [ xsltproc.is-cygwin ]
+ {
+ if [ os.name ] = NT
+ {
+ drive = [ MATCH ^/(.):(.*)$ : $(path) ] ;
+ result = /cygdrive/$(drive[1])$(drive[2]) ;
+ }
+ }
+ else
+ {
+ if [ os.name ] = CYGWIN
+ {
+ local native-path = [ path.native $(path) ] ;
+ result = [ path.make $(native-path:W) ] ;
+ }
+ }
+ return [ regex.replace $(result) " " "%20" ] ;
+}
+
+rule generate-xml-catalog ( target : sources * : properties * )
+{
+ print.output $(target) ;
+
+ # BoostBook DTD catalog entry
+ local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
+ if $(boostbook-dtd-dir)
+ {
+ boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
+ }
+
+ print.text
+ "<?xml version=\"1.0\"?>"
+ "<!DOCTYPE catalog "
+ " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
+ " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
+ "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
+ " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
+ : true ;
+
+ local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
+ if ! $(docbook-xsl-dir)
+ {
+ ECHO "BoostBook warning: no DocBook XSL directory specified." ;
+ ECHO " If you have the DocBook XSL stylesheets installed, please " ;
+ ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
+ ECHO " are available here: http://docbook.sourceforge.net/ " ;
+ ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
+ }
+
+ local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
+ if ! $(docbook-dtd-dir)
+ {
+ ECHO "BoostBook warning: no DocBook DTD directory specified." ;
+ ECHO " If you have the DocBook DTD installed, please set " ;
+ ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
+ ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
+ ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
+ }
+
+ print.text "</catalog>" ;
+}
+
+rule xml-catalog ( )
+{
+ if ! $(.xml-catalog)
+ {
+ # The target is created as part of the root project. But ideally
+ # it would be created as part of the boostbook project. This is not
+ # current possible as such global projects don't inherit things like
+ # the build directory.
+
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config &&
+ [ project.attribute $(root-project) parent-module ] != project-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+ .xml-catalog = [ new file-target boostbook_catalog
+ : XML
+ : [ project.target $(root-project) ]
+ : [ new action : boostbook.generate-xml-catalog ]
+ :
+ ] ;
+ .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
+ .xml-catalog-file = $(.xml-catalog-file:J=/) ;
+ }
+ return $(.xml-catalog) $(.xml-catalog-file) ;
+}
+
+class boostbook-generator : generator
+{
+ import feature ;
+ import virtual-target ;
+ import generators ;
+ import boostbook ;
+
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ # Generate the catalog, but only once...
+ local global-catalog = [ boostbook.xml-catalog ] ;
+ local catalog = $(global-catalog[1]) ;
+ local catalog-file = $(global-catalog[2]) ;
+ local targets ;
+
+ # Add the catalog to the property set
+ property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
+
+ local type = none ;
+ local manifest ;
+ local format = [ $(property-set).get <format> ] ;
+ switch $(format)
+ {
+ case html :
+ {
+ type = HTMLDIR ;
+ manifest = HTML.manifest ;
+ }
+ case xhtml :
+ {
+ type = XHTMLDIR ;
+ manifest = HTML.manifest ;
+ }
+ case htmlhelp :
+ {
+ type = HTMLHELP ;
+ manifest = HTML.manifest ;
+ }
+
+ case onehtml : type = HTML ;
+
+ case man :
+ {
+ type = MANPAGES ;
+ manifest = man.manifest ;
+ }
+
+ case docbook : type = DOCBOOK ;
+ case fo : type = FO ;
+ case pdf : type = PDF ;
+ case ps : type = PS ;
+ case tests : type = TESTS ;
+ }
+
+ if $(manifest)
+ {
+ # Create DOCBOOK file from BOOSTBOOK sources.
+ local base-target = [ generators.construct $(project)
+ : DOCBOOK : $(property-set) : $(sources) ] ;
+ base-target = $(base-target[2]) ;
+ $(base-target).depends $(catalog) ;
+
+ # Generate HTML/PDF/PS from DOCBOOK.
+ local target = [ generators.construct $(project) $(name)_$(manifest)
+ : $(type)
+ : [ $(property-set).add-raw
+ <xsl:param>manifest=$(name)_$(manifest) ]
+ : $(base-target) ] ;
+ local name = [ $(property-set).get <name> ] ;
+ name ?= $(format) ;
+ $(target[2]).set-path $(name) ;
+ $(target[2]).depends $(catalog) ;
+
+ targets += $(target[2]) ;
+ }
+ else {
+ local target = [ generators.construct $(project)
+ : $(type) : $(property-set) : $(sources) ] ;
+
+ if ! $(target)
+ {
+ errors.error "Cannot build documentation type '$(format)'" ;
+ }
+ else
+ {
+ $(target[2]).depends $(catalog) ;
+ targets += $(target[2]) ;
+ }
+ }
+
+ return $(targets) ;
+ }
+}
+
+generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ;
+
+# Creates a boostbook target.
+rule boostbook ( target-name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+#############################################################################
+# Dependency scanners
+#############################################################################
+# XInclude scanner. Mostly stolen from c-scanner :)
+# Note that this assumes an "xi" prefix for XIncludes. This isn't always the
+# case for XML documents, but we'll assume it's true for anything we encounter.
+class xinclude-scanner : scanner
+{
+ import virtual-target ;
+ import path ;
+ import scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "xi:include[ ]*href=\"([^\"]*)\"" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+scanner.register xinclude-scanner : xsl:path ;
+type.set-scanner XML : xinclude-scanner ;
+
+rule boostbook-to-tests ( target : source : properties * )
+{
+ lock-config ;
+ local boost_root = [ modules.peek : BOOST_ROOT ] ;
+ local native-path =
+ [ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ;
+ local stylesheet = $(native-path:S=.xsl) ;
+ xslt $(target) : $(source) $(stylesheet)
+ : $(properties) <xsl:param>boost.root=$(boost_root)
+ ;
+}
+
+
diff --git a/jam-files/boost-build/tools/borland.jam b/jam-files/boost-build/tools/borland.jam
new file mode 100644
index 000000000..6e43ca93a
--- /dev/null
+++ b/jam-files/boost-build/tools/borland.jam
@@ -0,0 +1,220 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2003 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for the Borland's command line compiler
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature : get-values ;
+import type ;
+import common ;
+
+feature.extend toolset : borland ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters borland :
+ version $(version) ] ;
+
+ local command = [ common.get-invocation-command borland : bcc32.exe
+ : $(command) ] ;
+
+ common.handle-options borland : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
+ flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
+ flags borland.link RUN_PATH $(condition) : $(root)/bin ;
+ flags borland .root $(condition) : $(root)/bin/ ;
+}
+
+
+# A borland-specific target type
+type.register BORLAND.TDS : tds ;
+
+# Declare generators
+
+generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
+generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
+
+generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
+generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
+generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
+generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
+
+# Declare flags
+
+flags borland.compile OPTIONS <debug-symbols>on : -v ;
+flags borland.link OPTIONS <debug-symbols>on : -v ;
+
+flags borland.compile OPTIONS <optimization>off : -Od ;
+flags borland.compile OPTIONS <optimization>speed : -O2 ;
+flags borland.compile OPTIONS <optimization>space : -O1 ;
+
+if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
+{
+ flags borland CFLAGS <inlining>off : -vi- ;
+ flags borland CFLAGS <inlining>on : -vi -w-inl ;
+ flags borland CFLAGS <inlining>full : -vi -w-inl ;
+}
+else
+{
+ flags borland CFLAGS : -vi- ;
+}
+
+flags borland.compile OPTIONS <warnings>off : -w- ;
+flags borland.compile OPTIONS <warnings>all : -w ;
+flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
+
+
+# Deal with various runtime configs...
+
+# This should be not for DLL
+flags borland OPTIONS <user-interface>console : -tWC ;
+
+# -tWR sets -tW as well, so we turn it off here and then turn it
+# on again later if we need it:
+flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
+flags borland OPTIONS <user-interface>gui : -tW ;
+
+flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
+# Hmm.. not sure what's going on here.
+flags borland OPTIONS : -WM- ;
+flags borland OPTIONS <threading>multi : -tWM ;
+
+
+
+flags borland.compile OPTIONS <cxxflags> ;
+flags borland.compile DEFINES <define> ;
+flags borland.compile INCLUDES <include> ;
+
+flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
+
+#
+# for C++ compiles the following options are turned on by default:
+#
+# -j5 stops after 5 errors
+# -g255 allow an unlimited number of warnings
+# -q no banner
+# -c compile to object
+# -P C++ code regardless of file extention
+# -a8 8 byte alignment, this option is on in the IDE by default
+# and effects binary compatibility.
+#
+
+# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+# For C, we don't pass -P flag
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+
+# Declare flags and action for linking
+toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
+toolset.flags borland.link LIBRARY_PATH <library-path> ;
+toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
+toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
+toolset.flags borland.link LIBRARIES <library-file> ;
+
+flags borland.link OPTIONS <linkflags> ;
+flags borland.link OPTIONS <link>shared : -tWD ;
+
+flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
+flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
+
+
+
+# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
+# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
+# $(BCC_TOOL_PATH) to the path
+# The NEED_IMPLIB variable controls whether we need to invoke implib.
+
+flags borland.archive AROPTIONS <archiveflags> ;
+
+# Declare action for archives. We don't use response file
+# since it's hard to get "+-" there.
+# The /P256 increases 'page' size -- with too low
+# values tlib fails when building large applications.
+# CONSIDER: don't know what 'together' is for...
+actions updated together piecemeal archive
+{
+ $(.set-path)$(.root:W)$(.old-path)
+ tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
+}
+
+
+if [ os.name ] = CYGWIN
+{
+ .set-path = "cmd /S /C set \"PATH=" ;
+ .old-path = ";%PATH%\" \"&&\"" ;
+
+
+ # Couldn't get TLIB to stop being confused about pathnames
+ # containing dashes (it seemed to treat them as option separators
+ # when passed through from bash), so we explicitly write the
+ # command into a .bat file and execute that. TLIB is also finicky
+ # about pathname style! Forward slashes, too, are treated as
+ # options.
+ actions updated together piecemeal archive
+ {
+ chdir $(<:D)
+ echo +-$(>:BS) > $(<:BS).rsp
+ $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
+ }
+}
+else if [ os.name ] = NT
+{
+ .set-path = "set \"PATH=" ;
+ .old-path = ";%PATH%\"
+ " ;
+}
+else
+{
+ .set-path = "PATH=\"" ;
+ .old-path = "\":$PATH
+ export PATH
+ " ;
+}
+
+RM = [ common.rm-command ] ;
+
+nl = "
+" ;
+
+actions link
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+
+actions link.dll bind LIBRARIES RSP
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
+}
+
+# It seems impossible to specify output file with directory when compiling
+# asm files using bcc32, so use tasm32 directly.
+# /ml makes all symbol names case-sensitive
+actions asm
+{
+ $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
+}
+
diff --git a/jam-files/boost-build/tools/builtin.jam b/jam-files/boost-build/tools/builtin.jam
new file mode 100644
index 000000000..148e7308d
--- /dev/null
+++ b/jam-files/boost-build/tools/builtin.jam
@@ -0,0 +1,960 @@
+# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
+# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
+# Copyright 2006 Juergen Hunold
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines standard features and rules.
+
+import alias ;
+import "class" : new ;
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import os ;
+import path ;
+import print ;
+import project ;
+import property ;
+import regex ;
+import scanner ;
+import sequence ;
+import stage ;
+import symlink ;
+import toolset ;
+import type ;
+import targets ;
+import types/register ;
+import utility ;
+import virtual-target ;
+import message ;
+import convert ;
+
+# FIXME: the following generate module import is not needed here but removing it
+# too hastly will break using code (e.g. the main Boost library Jamroot file)
+# that forgot to import the generate module before calling the generate rule.
+import generate ;
+
+
+.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd
+ openbsd osf qnx qnxnto sgi solaris unix unixware windows
+ elf # Not actually an OS -- used for targeting bare metal where
+ # object format is ELF. This catches both -elf and -eabi gcc
+ # targets and well as other compilers targeting ELF. It is not
+ # clear how often do we need to key of ELF specifically as opposed
+ # to other bare metal targets, but let's stick with gcc naming.
+ ;
+
+# Feature used to determine which OS we're on. New <target-os> and <host-os>
+# features should be used instead.
+local os = [ modules.peek : OS ] ;
+feature.feature os : $(os) : propagated link-incompatible ;
+
+
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
+#
+local rule default-host-os ( )
+{
+ local host-os ;
+ if [ os.name ] in $(.os-names:U)
+ {
+ host-os = [ os.name ] ;
+ }
+ else
+ {
+ switch [ os.name ]
+ {
+ case NT : host-os = windows ;
+ case AS400 : host-os = unix ;
+ case MINGW : host-os = windows ;
+ case BSDI : host-os = bsd ;
+ case COHERENT : host-os = unix ;
+ case DRAGONFLYBSD : host-os = bsd ;
+ case IRIX : host-os = sgi ;
+ case MACOSX : host-os = darwin ;
+ case KFREEBSD : host-os = freebsd ;
+ case LINUX : host-os = linux ;
+ case SUNOS :
+ ECHO "SunOS is not a supported operating system." ;
+ ECHO "We believe last version of SunOS was released in 1992, " ;
+ ECHO "so if you get this message, something is very wrong with configuration logic. " ;
+ ECHO "Please report this as a bug. " ;
+ EXIT ;
+ case * : host-os = unix ;
+ }
+ }
+ return $(host-os:L) ;
+}
+
+
+# The two OS features define a known set of abstract OS names. The host-os is
+# the OS under which bjam is running. Even though this should really be a fixed
+# property we need to list all the values to prevent unknown value errors. Both
+# set the default value to the current OS to account for the default use case of
+# building on the target OS.
+feature.feature host-os : $(.os-names) ;
+feature.set-default host-os : [ default-host-os ] ;
+
+feature.feature target-os : $(.os-names) : propagated link-incompatible ;
+feature.set-default target-os : [ default-host-os ] ;
+
+
+feature.feature toolset : : implicit propagated symmetric ;
+feature.feature stdlib : native : propagated composite ;
+feature.feature link : shared static : propagated ;
+feature.feature runtime-link : shared static : propagated ;
+feature.feature runtime-debugging : on off : propagated ;
+feature.feature optimization : off speed space none : propagated ;
+feature.feature profiling : off on : propagated ;
+feature.feature inlining : off on full : propagated ;
+feature.feature threading : single multi : propagated ;
+feature.feature rtti : on off : propagated ;
+feature.feature exception-handling : on off : propagated ;
+
+# Whether there is support for asynchronous EH (e.g. catching SEGVs).
+feature.feature asynch-exceptions : off on : propagated ;
+
+# Whether all extern "C" functions are considered nothrow by default.
+feature.feature extern-c-nothrow : off on : propagated ;
+
+feature.feature debug-symbols : on off none : propagated ;
+# Controls whether the binary should be stripped -- that is have
+# everything not necessary to running removed. This option should
+# not be very often needed. Also, this feature will show up in
+# target paths of everything, not just binaries. Should fix that
+# when impelementing feature relevance.
+feature.feature strip : off on : propagated ;
+feature.feature define : : free ;
+feature.feature undef : : free ;
+feature.feature "include" : : free path ; #order-sensitive ;
+feature.feature cflags : : free ;
+feature.feature cxxflags : : free ;
+feature.feature fflags : : free ;
+feature.feature asmflags : : free ;
+feature.feature linkflags : : free ;
+feature.feature archiveflags : : free ;
+feature.feature version : : free ;
+
+# Generic, i.e. non-language specific, flags for tools.
+feature.feature flags : : free ;
+feature.feature location-prefix : : free ;
+
+
+# The following features are incidental since they have no effect on built
+# products. Not making them incidental will result in problems in corner cases,
+# e.g.:
+#
+# unit-test a : a.cpp : <use>b ;
+# lib b : a.cpp b ;
+#
+# Here, if <use> is not incidental, we would decide we have two targets for
+# a.obj with different properties and complain about it.
+#
+# Note that making a feature incidental does not mean it is ignored. It may be
+# ignored when creating a virtual target, but the rest of build process will use
+# them.
+feature.feature use : : free dependency incidental ;
+feature.feature dependency : : free dependency incidental ;
+feature.feature implicit-dependency : : free dependency incidental ;
+
+feature.feature warnings :
+ on # Enable default/"reasonable" warning level for the tool.
+ all # Enable all possible warnings issued by the tool.
+ off # Disable all warnings issued by the tool.
+ : incidental propagated ;
+
+feature.feature warnings-as-errors :
+ off # Do not fail the compilation if there are warnings.
+ on # Fail the compilation if there are warnings.
+ : incidental propagated ;
+
+# Feature that allows us to configure the maximal template instantiation depth
+# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers
+# actually support this configuration setting.
+#
+# Note that Boost Build currently does not allow defining features that take any
+# positive integral value as a parameter, which is what we need here, so we just
+# define some of the values here and leave it up to the user to extend this set
+# as he needs using the feature.extend rule.
+#
+# TODO: This should be upgraded as soon as Boost Build adds support for custom
+# validated feature values or at least features allowing any positive integral
+# value. See related Boost Build related trac ticket #194.
+#
+feature.feature c++-template-depth
+ :
+ [ numbers.range 64 1024 : 64 ]
+ [ numbers.range 20 1000 : 10 ]
+ # Maximum template instantiation depth guaranteed for ANSI/ISO C++
+ # conforming programs.
+ 17
+ :
+ incidental optional propagated ;
+
+feature.feature source : : free dependency incidental ;
+feature.feature library : : free dependency incidental ;
+feature.feature file : : free dependency incidental ;
+feature.feature find-shared-library : : free ; #order-sensitive ;
+feature.feature find-static-library : : free ; #order-sensitive ;
+feature.feature library-path : : free path ; #order-sensitive ;
+
+# Internal feature.
+feature.feature library-file : : free dependency ;
+
+feature.feature name : : free ;
+feature.feature tag : : free ;
+feature.feature search : : free path ; #order-sensitive ;
+feature.feature location : : free path ;
+feature.feature dll-path : : free path ;
+feature.feature hardcode-dll-paths : true false : incidental ;
+
+
+# An internal feature that holds the paths of all dependency shared libraries.
+# On Windows, it is needed so that we can add all those paths to PATH when
+# running applications. On Linux, it is needed to add proper -rpath-link command
+# line options.
+feature.feature xdll-path : : free path ;
+
+# Provides means to specify def-file for windows DLLs.
+feature.feature def-file : : free dependency ;
+
+feature.feature suppress-import-lib : false true : incidental ;
+
+# Internal feature used to store the name of a bjam action to call when building
+# a target.
+feature.feature action : : free ;
+
+# This feature is used to allow specific generators to run. For example, QT
+# tools can only be invoked when QT library is used. In that case, <allow>qt
+# will be in usage requirement of the library.
+feature.feature allow : : free ;
+
+# The addressing model to generate code for. Currently a limited set only
+# specifying the bit size of pointers.
+feature.feature address-model : 16 32 64 32_64 : propagated optional ;
+
+# Type of CPU architecture to compile for.
+feature.feature architecture :
+ # x86 and x86-64
+ x86
+
+ # ia64
+ ia64
+
+ # Sparc
+ sparc
+
+ # RS/6000 & PowerPC
+ power
+
+ # MIPS/SGI
+ mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
+
+ # HP/PA-RISC
+ parisc
+
+ # Advanced RISC Machines
+ arm
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ combined
+ combined-x86-power
+
+ : propagated optional ;
+
+# The specific instruction set in an architecture to compile.
+feature.feature instruction-set :
+ # x86 and x86-64
+ native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
+ pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe
+ conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2
+
+ # ia64
+ itanium itanium1 merced itanium2 mckinley
+
+ # Sparc
+ v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
+ sparclet tsc701 v9 ultrasparc ultrasparc3
+
+ # RS/6000 & PowerPC
+ 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
+ 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
+ power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
+
+ # MIPS
+ 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
+ r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
+ vr5000 vr5400 vr5500
+
+ # HP/PA-RISC
+ 700 7100 7100lc 7200 7300 8000
+
+ # Advanced RISC Machines
+ armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
+
+ : propagated optional ;
+
+# Used to select a specific variant of C++ ABI if the compiler supports several.
+feature.feature c++abi : : propagated optional ;
+
+feature.feature conditional : : incidental free ;
+
+# The value of 'no' prevents building of a target.
+feature.feature build : yes no : optional ;
+
+# Windows-specific features
+
+feature.feature user-interface : console gui wince native auto ;
+
+feature.feature variant : : implicit composite propagated symmetric ;
+
+
+# Declares a new variant.
+#
+# First determines explicit properties for this variant, by refining parents'
+# explicit properties with the passed explicit properties. The result is
+# remembered and will be used if this variant is used as parent.
+#
+# Second, determines the full property set for this variant by adding to the
+# explicit properties default values for all missing non-symmetric properties.
+#
+# Lastly, makes appropriate value of 'variant' property expand to the full
+# property set.
+#
+rule variant ( name # Name of the variant
+ : parents-or-properties * # Specifies parent variants, if
+ # 'explicit-properties' are given, and
+ # explicit-properties or parents otherwise.
+ : explicit-properties * # Explicit properties.
+ )
+{
+ local parents ;
+ if ! $(explicit-properties)
+ {
+ if $(parents-or-properties[1]:G)
+ {
+ explicit-properties = $(parents-or-properties) ;
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+
+ # The problem is that we have to check for conflicts between base variants.
+ if $(parents[2])
+ {
+ errors.error "multiple base variants are not yet supported" ;
+ }
+
+ local inherited ;
+ # Add explicitly specified properties for parents.
+ for local p in $(parents)
+ {
+ # TODO: This check may be made stricter.
+ if ! [ feature.is-implicit-value $(p) ]
+ {
+ errors.error "Invalid base variant" $(p) ;
+ }
+
+ inherited += $(.explicit-properties.$(p)) ;
+ }
+ property.validate $(explicit-properties) ;
+ explicit-properties = [ property.refine $(inherited)
+ : $(explicit-properties) ] ;
+
+ # Record explicitly specified properties for this variant. We do this after
+ # inheriting parents' properties so they affect other variants derived from
+ # this one.
+ .explicit-properties.$(name) = $(explicit-properties) ;
+
+ feature.extend variant : $(name) ;
+ feature.compose <variant>$(name) : $(explicit-properties) ;
+}
+IMPORT $(__name__) : variant : : variant ;
+
+
+variant debug : <optimization>off <debug-symbols>on <inlining>off
+ <runtime-debugging>on ;
+variant release : <optimization>speed <debug-symbols>off <inlining>full
+ <runtime-debugging>off <define>NDEBUG ;
+variant profile : release : <profiling>on <debug-symbols>on ;
+
+
+class searched-lib-target : abstract-file-target
+{
+ rule __init__ ( name
+ : project
+ : shared ?
+ : search *
+ : action
+ )
+ {
+ abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
+ : $(action) : ;
+
+ self.shared = $(shared) ;
+ self.search = $(search) ;
+ }
+
+ rule shared ( )
+ {
+ return $(self.shared) ;
+ }
+
+ rule search ( )
+ {
+ return $(self.search) ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ }
+
+ rule path ( )
+ {
+ }
+}
+
+
+# The generator class for libraries (target type LIB). Depending on properties
+# it will request building of the appropriate specific library type --
+# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
+#
+class lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ # The lib generator is composing, and can be only invoked with an
+ # explicit name. This check is present in generator.run (and so in
+ # builtin.linking-generator) but duplicated here to avoid doing extra
+ # work.
+ if $(name)
+ {
+ local properties = [ $(property-set).raw ] ;
+ # Determine the needed target type.
+ local actual-type ;
+ # <source>files can be generated by <conditional>@rule feature
+ # in which case we do not consider it a SEARCHED_LIB type.
+ if ! <source> in $(properties:G) &&
+ ( <search> in $(properties:G) || <name> in $(properties:G) )
+ {
+ actual-type = SEARCHED_LIB ;
+ }
+ else if <file> in $(properties:G)
+ {
+ actual-type = LIB ;
+ }
+ else if <link>shared in $(properties)
+ {
+ actual-type = SHARED_LIB ;
+ }
+ else
+ {
+ actual-type = STATIC_LIB ;
+ }
+ property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
+ # Construct the target.
+ return [ generators.construct $(project) $(name) : $(actual-type)
+ : $(property-set) : $(sources) ] ;
+ }
+ }
+
+ rule viable-source-types ( )
+ {
+ return * ;
+ }
+}
+
+
+generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
+
+
+# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+# simplified: "lib a b c ;".
+#
+rule lib ( names + : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if $(names[2])
+ {
+ if <name> in $(requirements:G)
+ {
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify the <name> feature." ;
+ }
+ if $(sources)
+ {
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify sources." ;
+ }
+ }
+
+ # This is a circular module dependency so it must be imported here.
+ import targets ;
+
+ local project = [ project.current ] ;
+ local result ;
+
+ for local name in $(names)
+ {
+ local r = $(requirements) ;
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if ! $(sources) && ! <name> in $(requirements:G)
+ && ! <file> in $(requirements:G)
+ {
+ r += <name>$(name) ;
+ }
+ result += [ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : LIB
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+ }
+ return $(result) ;
+}
+IMPORT $(__name__) : lib : : lib ;
+
+
+class searched-lib-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( )
+ {
+ # The requirements cause the generators to be tried *only* when we're
+ # building a lib target with a 'search' feature. This seems ugly --- all
+ # we want is to make sure searched-lib-generator is not invoked deep
+ # inside transformation search to produce intermediate targets.
+ generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if $(name)
+ {
+ # If 'name' is empty, it means we have not been called to build a
+ # top-level target. In this case, we just fail immediately, because
+ # searched-lib-generator cannot be used to produce intermediate
+ # targets.
+
+ local properties = [ $(property-set).raw ] ;
+ local shared ;
+ if <link>shared in $(properties)
+ {
+ shared = true ;
+ }
+
+ local search = [ feature.get-values <search> : $(properties) ] ;
+
+ local a = [ new null-action $(property-set) ] ;
+ local lib-name = [ feature.get-values <name> : $(properties) ] ;
+ lib-name ?= $(name) ;
+ local t = [ new searched-lib-target $(lib-name) : $(project)
+ : $(shared) : $(search) : $(a) ] ;
+ # We return sources for a simple reason. If there is
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to 'png'
+ # will link to 'z', too.
+ return [ property-set.create <xdll-path>$(search) ]
+ [ virtual-target.register $(t) ] $(sources) ;
+ }
+ }
+}
+
+generators.register [ new searched-lib-generator ] ;
+
+
+class prebuilt-lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
+
+generators.override builtin.prebuilt : builtin.lib-generator ;
+
+class preprocessed-target-class : basic-target
+{
+ import generators ;
+ rule construct ( name : sources * : property-set )
+ {
+ local result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
+ if ! $(result)
+ {
+ result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
+ }
+ if ! $(result)
+ {
+ local s ;
+ for x in $(sources)
+ {
+ s += [ $(x).name ] ;
+ }
+ local p = [ project ] ;
+ errors.user-error
+ "In project" [ $(p).name ] :
+ "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
+ }
+ return $(result) ;
+ }
+}
+
+rule preprocessed ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+ return [ targets.main-target-alternative
+ [ new preprocessed-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
+
+IMPORT $(__name__) : preprocessed : : preprocessed ;
+
+class compile-action : action
+{
+ import sequence ;
+
+ rule __init__ ( targets * : sources * : action-name : properties * )
+ {
+ action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
+ }
+
+ # For all virtual targets for the same dependency graph as self, i.e. which
+ # belong to the same main target, add their directories to the include path.
+ #
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes "include" : H ] ] ;
+ }
+}
+
+
+# Declare a special compiler generator. The only thing it does is changing the
+# type used to represent 'action' in the constructed dependency graph to
+# 'compile-action'. That class in turn adds additional include paths to handle
+# cases when a source file includes headers which are generated themselves.
+#
+class C-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) :
+ $(requirements) : $(optional-properties) ;
+ }
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+}
+
+
+rule register-c-compiler ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+{
+ generators.register [ new C-compiling-generator $(id) : $(source-types) :
+ $(target-types) : $(requirements) : $(optional-properties) ] ;
+}
+
+# FIXME: this is ugly, should find a better way (we would like client code to
+# register all generators as "generators.some-rule" instead of
+# "some-module.some-rule".)
+#
+IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
+
+
+# The generator class for handling EXE and SHARED_LIB creation.
+#
+class linking-generator : generator
+{
+ import path ;
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule __init__ ( id
+ composing ? : # The generator will be composing if a non-empty
+ # string is passed or the parameter is not given. To
+ # make the generator non-composing, pass an empty
+ # string ("").
+ source-types + :
+ target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ # Add <library-path> properties for all searched libraries.
+ local extra ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = SEARCHED_LIB
+ {
+ local search = [ $(s).search ] ;
+ extra += <library-path>$(search) ;
+ }
+ }
+
+ # It is possible that sources include shared libraries that did not came
+ # from 'lib' targets, e.g. .so files specified as sources. In this case
+ # we have to add extra dll-path properties and propagate extra xdll-path
+ # properties so that application linking to us will get xdll-path to
+ # those libraries.
+ local extra-xdll-paths ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
+ {
+ # Unfortunately, we do not have a good way to find the path to a
+ # file, so use this nasty approach.
+ #
+ # TODO: This needs to be done better. One thing that is really
+ # broken with this is that it does not work correctly with
+ # projects having multiple source locations.
+ local p = [ $(s).project ] ;
+ local location = [ path.root [ $(s).name ]
+ [ $(p).get source-location ] ] ;
+ extra-xdll-paths += [ path.parent $(location) ] ;
+ }
+ }
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if [ $(property-set).get <hardcode-dll-paths> ] = true
+ && [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ local xdll-path = [ $(property-set).get <xdll-path> ] ;
+ extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
+ }
+
+ if $(extra)
+ {
+ property-set = [ $(property-set).add-raw $(extra) ] ;
+ }
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ local ur ;
+ if $(result)
+ {
+ ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
+ ur = [ $(ur).add
+ [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
+ }
+ return $(ur) $(result) ;
+ }
+
+ rule extra-usage-requirements ( created-targets * : property-set )
+ {
+ local result = [ property-set.empty ] ;
+ local extra ;
+
+ # Add appropricate <xdll-path> usage requirements.
+ local raw = [ $(property-set).raw ] ;
+ if <link>shared in $(raw)
+ {
+ local paths ;
+ local pwd = [ path.pwd ] ;
+ for local t in $(created-targets)
+ {
+ if [ type.is-derived [ $(t).type ] SHARED_LIB ]
+ {
+ paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
+ }
+ }
+ extra += $(paths:G=<xdll-path>) ;
+ }
+
+ # We need to pass <xdll-path> features that we've got from sources,
+ # because if a shared library is built, exe using it needs to know paths
+ # to other shared libraries this one depends on in order to be able to
+ # find them all at runtime.
+
+ # Just pass all features in property-set, it is theorically possible
+ # that we will propagate <xdll-path> features explicitly specified by
+ # the user, but then the user is to blaim for using an internal feature.
+ local values = [ $(property-set).get <xdll-path> ] ;
+ extra += $(values:G=<xdll-path>) ;
+
+ if $(extra)
+ {
+ result = [ property-set.create $(extra) ] ;
+ }
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ; # Sources to pass to inherited rule.
+ local properties2 ; # Properties to pass to inherited rule.
+ local libraries ; # Library sources.
+
+ # Searched libraries are not passed as arguments to the linker but via
+ # some option. So, we pass them to the action using a property.
+ properties2 = [ $(property-set).raw ] ;
+ local fsa ;
+ local fst ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
+ {
+ local name = [ $(s).name ] ;
+ if [ $(s).shared ]
+ {
+ fsa += $(name) ;
+ }
+ else
+ {
+ fst += $(name) ;
+ }
+ }
+ else
+ {
+ sources2 += $(s) ;
+ }
+ }
+ properties2 += <find-shared-library>$(fsa:J=&&)
+ <find-static-library>$(fst:J=&&) ;
+
+ return [ generator.generated-targets $(sources2)
+ : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
+ }
+}
+
+
+rule register-linker ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new linking-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
+# The generator class for handling STATIC_LIB creation.
+#
+class archive-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( id composing ? : source-types + : target-types +
+ : requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ # For static linking, if we get a library in source, we can not directly
+ # link to it so we need to cause our dependencies to link to that
+ # library. There are two approaches:
+ # - adding the library to the list of returned targets.
+ # - using the <library> usage requirements.
+ # The problem with the first is:
+ #
+ # lib a1 : : <file>liba1.a ;
+ # lib a2 : a2.cpp a1 : <link>static ;
+ # install dist : a2 ;
+ #
+ # here we will try to install 'a1', even though it is not necessary in
+ # the general case. With the second approach, even indirect dependants
+ # will link to the library, but it should not cause any harm. So, return
+ # all LIB sources together with created targets, so that dependants link
+ # to them.
+ local usage-requirements ;
+ if [ $(property-set).get <link> ] = static
+ {
+ for local t in $(sources)
+ {
+ if [ type.is-derived [ $(t).type ] LIB ]
+ {
+ usage-requirements += <library>$(t) ;
+ }
+ }
+ }
+
+ usage-requirements = [ property-set.create $(usage-requirements) ] ;
+
+ return $(usage-requirements) $(result) ;
+ }
+}
+
+
+rule register-archiver ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new archive-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
+# Generator that accepts everything and produces nothing. Useful as a general
+# fallback for toolset-specific actions like PCH generation.
+#
+class dummy-generator : generator
+{
+ import property-set ;
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ return [ property-set.empty ] ;
+ }
+}
+
+IMPORT $(__name__) : register-linker register-archiver
+ : : generators.register-linker generators.register-archiver ;
diff --git a/jam-files/boost-build/tools/cast.jam b/jam-files/boost-build/tools/cast.jam
new file mode 100644
index 000000000..6c84922f1
--- /dev/null
+++ b/jam-files/boost-build/tools/cast.jam
@@ -0,0 +1,91 @@
+# Copyright 2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target 'cast', used to change type for target. For example, in Qt
+# library one wants two kinds of CPP files -- those that just compiled and those
+# that are passed via the MOC tool.
+#
+# This is done with:
+#
+# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
+#
+# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
+# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
+# support will run the MOC tool as part of the build process.
+#
+# At the moment, the 'cast' rule only works for non-derived (source) targets.
+#
+# TODO: The following comment is unclear or incorrect. Clean it up.
+# > Another solution would be to add a separate main target 'moc-them' that
+# > would moc all the passed sources, no matter what their type is, but I prefer
+# > cast, as defining a new target type + generator for that type is somewhat
+# > simpler than defining a main target rule.
+
+import "class" : new ;
+import errors ;
+import project ;
+import property-set ;
+import targets ;
+import type ;
+
+
+class cast-target-class : typed-target
+{
+ import type ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local result ;
+ for local s in $(source-targets)
+ {
+ if ! [ class.is-a $(s) : file-target ]
+ {
+ import errors ;
+ errors.user-error Source to the 'cast' rule is not a file! ;
+ }
+ if [ $(s).action ]
+ {
+ import errors ;
+ errors.user-error Only non-derived target are allowed for
+ 'cast'. : when building [ full-name ] ;
+ }
+ local r = [ $(s).clone-with-different-type $(self.type) ] ;
+ result += [ virtual-target.register $(r) ] ;
+ }
+ return [ property-set.empty ] $(result) ;
+ }
+}
+
+
+rule cast ( name type : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ local real-type = [ type.type-from-rule-name $(type) ] ;
+ if ! $(real-type)
+ {
+ errors.user-error No type corresponds to the main target rule name
+ '$(type)' : "Hint: try a lowercase name" ;
+ }
+
+ targets.main-target-alternative [ new cast-target-class $(name) : $(project)
+ : $(real-type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ;
+}
+
+
+IMPORT $(__name__) : cast : : cast ;
diff --git a/jam-files/boost-build/tools/clang-darwin.jam b/jam-files/boost-build/tools/clang-darwin.jam
new file mode 100644
index 000000000..a8abc7d6a
--- /dev/null
+++ b/jam-files/boost-build/tools/clang-darwin.jam
@@ -0,0 +1,170 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import clang ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset clang : platform : darwin ;
+
+toolset.inherit-generators clang-darwin
+ <toolset>clang <toolset-clang:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override clang-darwin.prebuilt : builtin.lib-generator ;
+generators.override clang-darwin.prebuilt : builtin.prebuilt ;
+generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules clang-darwin : gcc ;
+toolset.inherit-flags clang-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# vectorization diagnostics
+feature vectorize : off on full ;
+
+# Initializes the clang-darwin toolset
+# version in optional
+# name (default clang++) is used to invoke the specified clang complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command clang-darwin : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-darwin
+ : version $(version) ] ;
+
+ common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags clang-darwin darwin $(condition) ;
+
+}
+
+SPACE = " " ;
+
+flags clang-darwin.compile OPTIONS <cflags> ;
+flags clang-darwin.compile OPTIONS <cxxflags> ;
+# flags clang-darwin.compile INCLUDES <include> ;
+
+# Declare flags and action for compilation.
+toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
+toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags clang-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of clang-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in clang-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "ranlib" -cs "$(<)"
+}
+
+flags clang-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/jam-files/boost-build/tools/clang-linux.jam b/jam-files/boost-build/tools/clang-linux.jam
new file mode 100644
index 000000000..036d749e6
--- /dev/null
+++ b/jam-files/boost-build/tools/clang-linux.jam
@@ -0,0 +1,196 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import clang ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset clang : platform : linux ;
+
+toolset.inherit-generators clang-linux
+ <toolset>clang <toolset-clang:platform>linux : gcc
+ : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
+generators.override clang-linux.prebuilt : builtin.lib-generator ;
+generators.override clang-linux.prebuilt : builtin.prebuilt ;
+generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH
+ : <toolset>clang <toolset-clang:platform>linux : pth ;
+
+toolset.inherit-rules clang-linux : gcc ;
+toolset.inherit-flags clang-linux : gcc
+ : <inlining>off <inlining>on <inlining>full
+ <optimization>space <optimization>speed
+ <warnings>off <warnings>all <warnings>on ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
+ .debug-configuration = true ;
+}
+
+rule init ( version ? : command * : options * ) {
+ command = [ common.get-invocation-command clang-linux : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+
+ if $(command) {
+ version ?= [ MATCH "version ([0-9.]+)"
+ : [ SHELL "$(command-string) --version" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-linux
+ : version $(version) ] ;
+
+ common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags clang-linux gnu $(condition) ;
+}
+
+###############################################################################
+# Flags
+
+toolset.flags clang-linux.compile OPTIONS <cflags> ;
+toolset.flags clang-linux.compile OPTIONS <cxxflags> ;
+
+toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
+toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
+
+# note: clang silently ignores some of these inlining options
+toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
+toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
+
+###############################################################################
+# C and C++ compilation
+
+rule compile.c++ ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ local pth-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pth-file) {
+ DEPENDS $(<) : $(pth-file) ;
+ compile.c++.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ compile.c++.without-pth $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c++.without-pth {
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+actions compile.c++.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ local pth-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pth-file) {
+ DEPENDS $(<) : $(pth-file) ;
+ compile.c.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ compile.c.without-pth $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c.without-pth
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# PCH emission
+
+rule compile.c++.pch ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++.pch {
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# Linking
+
+SPACE = " " ;
+
+rule link ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * ) {
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
diff --git a/jam-files/boost-build/tools/clang.jam b/jam-files/boost-build/tools/clang.jam
new file mode 100644
index 000000000..e0ac9a553
--- /dev/null
+++ b/jam-files/boost-build/tools/clang.jam
@@ -0,0 +1,27 @@
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'clang' toolset. Depending on the current system, it
+# forwards either to 'clang-unix' or 'clang-darwin' modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : clang ;
+feature.subfeature toolset clang : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = MACOSX
+ {
+ toolset.using clang-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using clang-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/common.jam b/jam-files/boost-build/tools/common.jam
new file mode 100644
index 000000000..df914d9d4
--- /dev/null
+++ b/jam-files/boost-build/tools/common.jam
@@ -0,0 +1,986 @@
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides actions common to all toolsets, such as creating directories and
+# removing files.
+
+import os ;
+import modules ;
+import utility ;
+import print ;
+import type ;
+import feature ;
+import errors ;
+import path ;
+import sequence ;
+import toolset ;
+import virtual-target ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
+{
+ .show-configuration = true ;
+}
+
+# Configurations
+#
+# The following class helps to manage toolset configurations. Each configuration
+# has a unique ID and one or more parameters. A typical example of a unique ID
+# is a condition generated by 'common.check-init-parameters' rule. Other kinds
+# of IDs can be used. Parameters may include any details about the configuration
+# like 'command', 'path', etc.
+#
+# A toolset configuration may be in one of the following states:
+#
+# - registered
+# Configuration has been registered (e.g. explicitly or by auto-detection
+# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
+# not yet been called for it.
+# - used
+# Once called 'toolset.using' rule marks the configuration as 'used'.
+#
+# The main difference between the states above is that while a configuration is
+# 'registered' its options can be freely changed. This is useful in particular
+# for autodetection code - all detected configurations may be safely overwritten
+# by user code.
+
+class configurations
+{
+ import errors ;
+
+ rule __init__ ( )
+ {
+ }
+
+ # Registers a configuration.
+ #
+ # Returns 'true' if the configuration has been added and an empty value if
+ # it already exists. Reports an error if the configuration is 'used'.
+ #
+ rule register ( id )
+ {
+ if $(id) in $(self.used)
+ {
+ errors.error "common: the configuration '$(id)' is in use" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.all)
+ {
+ self.all += $(id) ;
+
+ # Indicate that a new configuration has been added.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Mark a configuration as 'used'.
+ #
+ # Returns 'true' if the state of the configuration has been changed to
+ # 'used' and an empty value if it the state has not been changed. Reports an
+ # error if the configuration is not known.
+ #
+ rule use ( id )
+ {
+ if ! $(id) in $(self.all)
+ {
+ errors.error "common: the configuration '$(id)' is not known" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.used)
+ {
+ self.used += $(id) ;
+
+ # Indicate that the configuration has been marked as 'used'.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Return all registered configurations.
+ #
+ rule all ( )
+ {
+ return $(self.all) ;
+ }
+
+ # Return all used configurations.
+ #
+ rule used ( )
+ {
+ return $(self.used) ;
+ }
+
+ # Returns the value of a configuration parameter.
+ #
+ rule get ( id : param )
+ {
+ return $(self.$(param).$(id)) ;
+ }
+
+ # Sets the value of a configuration parameter.
+ #
+ rule set ( id : param : value * )
+ {
+ self.$(param).$(id) = $(value) ;
+ }
+}
+
+
+# The rule for checking toolset parameters. Trailing parameters should all be
+# parameter name/value pairs. The rule will check that each parameter either has
+# a value in each invocation or has no value in each invocation. Also, the rule
+# will check that the combination of all parameter values is unique in all
+# invocations.
+#
+# Each parameter name corresponds to a subfeature. This rule will declare a
+# subfeature the first time a non-empty parameter value is passed and will
+# extend it with all the values.
+#
+# The return value from this rule is a condition to be used for flags settings.
+#
+rule check-init-parameters ( toolset requirement * : * )
+{
+ local sig = $(toolset) ;
+ local condition = <toolset>$(toolset) ;
+ local subcondition ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local name = $($(index)[1]) ;
+ local value = $($(index)[2]) ;
+
+ if $(value)-is-not-empty
+ {
+ condition = $(condition)-$(value) ;
+ if $(.had-unspecified-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "no value was specified in earlier"
+ "initialization" : "an explicit value is specified now" ;
+ }
+ # The below logic is for intel compiler. It calls this rule with
+ # 'intel-linux' and 'intel-win' as toolset, so we need to get the
+ # base part of toolset name. We can not pass 'intel' as toolset
+ # because in that case it will be impossible to register versionless
+ # intel-linux and intel-win toolsets of a specific version.
+ local t = $(toolset) ;
+ local m = [ MATCH ([^-]*)- : $(toolset) ] ;
+ if $(m)
+ {
+ t = $(m[1]) ;
+ }
+ if ! $(.had-value.$(toolset).$(name))
+ {
+ if ! $(.declared-subfeature.$(t).$(name))
+ {
+ feature.subfeature toolset $(t) : $(name) : : propagated ;
+ .declared-subfeature.$(t).$(name) = true ;
+ }
+ .had-value.$(toolset).$(name) = true ;
+ }
+ feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
+ subcondition += <toolset-$(t):$(name)>$(value) ;
+ }
+ else
+ {
+ if $(.had-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "an explicit value was specified in an"
+ "earlier initialization" : "no value is specified now" ;
+ }
+ .had-unspecified-value.$(toolset).$(name) = true ;
+ }
+ sig = $(sig)$(value:E="")- ;
+ }
+ if $(sig) in $(.all-signatures)
+ {
+ local message =
+ "duplicate initialization of $(toolset) with the following parameters: " ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local p = $($(index)) ;
+ if $(p)
+ {
+ message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
+ }
+ }
+ message += "previous initialization at $(.init-loc.$(sig))" ;
+ errors.user-error
+ $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
+ $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
+ }
+ .all-signatures += $(sig) ;
+ .init-loc.$(sig) = [ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if $(requirement)
+ {
+ local r = <toolset>$(toolset) $(requirement) ;
+ r = $(r:J=,) ;
+ toolset.add-requirements $(r):$(subcondition) ;
+ }
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition += $(requirement) ;
+
+ if $(.show-configuration)
+ {
+ ECHO notice: $(condition) ;
+ }
+ return $(condition:J=/) ;
+}
+
+
+# A helper rule to get the command to invoke some tool. If
+# 'user-provided-command' is not given, tries to find binary named 'tool' in
+# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+# element of 'user-provided-command' is an existing program.
+#
+# This rule returns the command to be used when invoking the tool. If we can not
+# find the tool, a warning is issued. If 'path-last' is specified, PATH is
+# checked after 'additional-paths' when searching for 'tool'.
+#
+rule get-invocation-command-nodefault ( toolset : tool :
+ user-provided-command * : additional-paths * : path-last ? )
+{
+ local command ;
+ if ! $(user-provided-command)
+ {
+ command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ;
+ ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ else
+ {
+ command = [ check-tool $(user-provided-command) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO "warning: toolset $(toolset) initialization: " ;
+ ECHO "warning: can not find user-provided command " '$(user-provided-command)' ;
+ ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+
+ return $(command) ;
+}
+
+
+# Same as get-invocation-command-nodefault, except that if no tool is found,
+# returns either the user-provided-command, if present, or the 'tool' parameter.
+#
+rule get-invocation-command ( toolset : tool : user-provided-command * :
+ additional-paths * : path-last ? )
+{
+ local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
+ $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
+
+ if ! $(result)
+ {
+ if $(user-provided-command)
+ {
+ result = $(user-provided-command) ;
+ }
+ else
+ {
+ result = $(tool) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Given an invocation command return the absolute path to the command. This
+# works even if command has no path element and was found on the PATH.
+#
+rule get-absolute-tool-path ( command )
+{
+ if $(command:D)
+ {
+ return $(command:D) ;
+ }
+ else
+ {
+ local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
+ return $(m[1]:D) ;
+ }
+}
+
+
+# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
+# If found in PATH, returns 'name' and if found in additional paths, returns
+# absolute name. If the tool is found in several directories, returns the
+# first path found. Otherwise, returns an empty string. If 'path-last' is
+# specified, PATH is searched after 'additional-paths'.
+#
+local rule find-tool ( name : additional-paths * : path-last ? )
+{
+ local path = [ path.programs-path ] ;
+ local match = [ path.glob $(path) : $(name) $(name).exe ] ;
+ local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
+
+ local result ;
+ if $(path-last)
+ {
+ result = $(additional-match) ;
+ if ! $(result) && $(match)
+ {
+ result = $(name) ;
+ }
+ }
+ else
+ {
+ if $(match)
+ {
+ result = $(name) ;
+ }
+ else
+ {
+ result = $(additional-match) ;
+ }
+ }
+ if $(result)
+ {
+ return [ path.native $(result[1]) ] ;
+ }
+}
+
+
+# Checks if 'command' can be found either in path or is a full name to an
+# existing file.
+#
+local rule check-tool-aux ( command )
+{
+ if $(command:D)
+ {
+ if [ path.exists $(command) ]
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
+ # Only NT will run .bat & .cmd files by their unqualified names.
+ || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
+ [ path.exists $(command).cmd ] ) )
+ {
+ return $(command) ;
+ }
+ }
+ else
+ {
+ if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
+ {
+ return $(command) ;
+ }
+ }
+}
+
+
+# Checks that a tool can be invoked by 'command'. If command is not an absolute
+# path, checks if it can be found in 'path'. If comand is an absolute path,
+# check that it exists. Returns 'command' if ok or empty string otherwise.
+#
+local rule check-tool ( xcommand + )
+{
+ if [ check-tool-aux $(xcommand[1]) ] ||
+ [ check-tool-aux $(xcommand[-1]) ]
+ {
+ return $(xcommand) ;
+ }
+}
+
+
+# Handle common options for toolset, specifically sets the following flag
+# variables:
+# - CONFIG_COMMAND to $(command)
+# - OPTIONS for compile to the value of <compileflags> in $(options)
+# - OPTIONS for compile.c to the value of <cflags> in $(options)
+# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
+# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
+# - OPTIONS for link to the value of <linkflags> in $(options)
+#
+rule handle-options ( toolset : condition * : command * : options * )
+{
+ if $(.debug-configuration)
+ {
+ ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ;
+ }
+
+ # The last parameter ('unchecked') says it is OK to set flags for another
+ # module.
+ toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
+ : unchecked ;
+
+ toolset.flags $(toolset).compile OPTIONS $(condition) :
+ [ feature.get-values <compileflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c OPTIONS $(condition) :
+ [ feature.get-values <cflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
+ [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).link OPTIONS $(condition) :
+ [ feature.get-values <linkflags> : $(options) ] : unchecked ;
+}
+
+
+# Returns the location of the "program files" directory on a Windows platform.
+#
+rule get-program-files-dir ( )
+{
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ ProgramFiles = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ ProgramFiles = "c:\\Program Files" ;
+ }
+ return $(ProgramFiles) ;
+}
+
+
+if [ os.name ] = NT
+{
+ RM = del /f /q ;
+ CP = copy /b ;
+ IGNORE = "2>nul >nul & setlocal" ;
+ LN ?= $(CP) ;
+ # Ugly hack to convince copy to set the timestamp of the
+ # destination to the current time by concatenating the
+ # source with a nonexistent file. Note that this requires
+ # /b (binary) as the default when concatenating files is /a (ascii).
+ WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
+}
+else
+{
+ RM = rm -f ;
+ CP = cp ;
+ LN = ln ;
+}
+
+
+rule rm-command ( )
+{
+ return $(RM) ;
+}
+
+
+rule copy-command ( )
+{
+ return $(CP) ;
+}
+
+
+if "\n" = "n"
+{
+ # Escape characters are not supported. Use ugly hacks that won't work,
+ # see below.
+ nl = "
+" ;
+ q = "" ;
+}
+else
+{
+ nl = "\n" ;
+ q = "\"" ;
+}
+
+# Returns the command needed to set an environment variable on the current
+# platform. The variable setting persists through all following commands and is
+# visible in the environment seen by subsequently executed commands. In other
+# words, on Unix systems, the variable is exported, which is consistent with the
+# only possible behavior on Windows systems.
+#
+rule variable-setting-command ( variable : value )
+{
+ if [ os.name ] = NT
+ {
+ return "set $(variable)=$(value)$(nl)" ;
+ }
+ else
+ {
+ # If we don't have escape characters support in bjam, the below blows
+ # up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n
+ # sequence that messes up the executed export command which then reports
+ # that the passed variable name is incorrect.
+ # But we have a check for cygwin in kernel/bootstrap.jam already.
+ return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
+ }
+}
+
+
+# Returns a command to sets a named shell path variable to the given NATIVE
+# paths on the current platform.
+#
+rule path-variable-setting-command ( variable : paths * )
+{
+ local sep = [ os.path-separator ] ;
+ return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
+}
+
+
+# Returns a command that prepends the given paths to the named path variable on
+# the current platform.
+#
+rule prepend-path-variable-command ( variable : paths * )
+{
+ return [ path-variable-setting-command $(variable)
+ : $(paths) [ os.expand-variable $(variable) ] ] ;
+}
+
+
+# Return a command which can create a file. If 'r' is result of invocation, then
+# 'r foobar' will create foobar with unspecified content. What happens if file
+# already exists is unspecified.
+#
+rule file-creation-command ( )
+{
+ if [ os.name ] = NT
+ {
+ # A few alternative implementations on Windows:
+ #
+ # 'type NUL >> '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also not change
+ # the target's timestamp in case the file already exists.
+ #
+ # 'type NUL > '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also destroy an
+ # already existing file by overwriting it with an empty one.
+ #
+ # I guess the best solution would be to allow Boost Jam to define
+ # built-in functions such as 'create a file', 'touch a file' or 'copy a
+ # file' which could be used from inside action code. That would allow
+ # completely portable operations without this kind of kludge.
+ # (22.02.2009.) (Jurko)
+ return "echo. > " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+# Returns a command that may be used for 'touching' files. It is not a real
+# 'touch' command on NT because it adds an empty line at the end of file but it
+# works with source files.
+#
+rule file-touch-command ( )
+{
+ if [ os.name ] = NT
+ {
+ return "echo. >> " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+rule MkDir
+{
+ # If dir exists, do not update it. Do this even for $(DOT).
+ NOUPDATE $(<) ;
+
+ if $(<) != $(DOT) && ! $($(<)-mkdir)
+ {
+ # Cheesy gate to prevent multiple invocations on same dir.
+ $(<)-mkdir = true ;
+
+ # Schedule the mkdir build action.
+ common.mkdir $(<) ;
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ DEPENDS dirs : $(<) ;
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ local s = $(<:P) ;
+ if [ os.name ] = NT
+ {
+ switch $(s)
+ {
+ case *: : s = ;
+ case *:\\ : s = ;
+ }
+ }
+
+ if $(s)
+ {
+ if $(s) != $(<)
+ {
+ DEPENDS $(<) : $(s) ;
+ MkDir $(s) ;
+ }
+ else
+ {
+ NOTFILE $(s) ;
+ }
+ }
+ }
+}
+
+
+#actions MkDir1
+#{
+# mkdir "$(<)"
+#}
+
+# The following quick-fix actions should be replaced using the original MkDir1
+# action once Boost Jam gets updated to correctly detect different paths leading
+# up to the same filesystem target and triggers their build action only once.
+# (todo) (04.07.2008.) (Jurko)
+
+if [ os.name ] = NT
+{
+ actions mkdir
+ {
+ if not exist "$(<)\\" mkdir "$(<)"
+ }
+}
+else
+{
+ actions mkdir
+ {
+ mkdir -p "$(<)"
+ }
+}
+
+actions piecemeal together existing Clean
+{
+ $(RM) "$(>)"
+}
+
+
+rule copy
+{
+}
+
+
+actions copy
+{
+ $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
+}
+
+
+rule RmTemps
+{
+}
+
+
+actions quietly updated piecemeal together RmTemps
+{
+ $(RM) "$(>)" $(IGNORE)
+}
+
+
+actions hard-link
+{
+ $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
+ $(LN) "$(>)" "$(<)" $(NULL_OUT)
+}
+
+
+# Given a target, as given to a custom tag rule, returns a string formatted
+# according to the passed format. Format is a list of properties that is
+# represented in the result. For each element of format the corresponding target
+# information is obtained and added to the result string. For all, but the
+# literal, the format value is taken as the as string to prepend to the output
+# to join the item to the rest of the result. If not given "-" is used as a
+# joiner.
+#
+# The format options can be:
+#
+# <base>[joiner]
+# :: The basename of the target name.
+# <toolset>[joiner]
+# :: The abbreviated toolset tag being used to build the target.
+# <threading>[joiner]
+# :: Indication of a multi-threaded build.
+# <runtime>[joiner]
+# :: Collective tag of the build runtime.
+# <version:/version-feature | X.Y[.Z]/>[joiner]
+# :: Short version tag taken from the given "version-feature" in the
+# build properties. Or if not present, the literal value as the
+# version number.
+# <property:/property-name/>[joiner]
+# :: Direct lookup of the given property-name value in the build
+# properties. /property-name/ is a regular expression. E.g.
+# <property:toolset-.*:flavor> will match every toolset.
+# /otherwise/
+# :: The literal value of the format argument.
+#
+# For example this format:
+#
+# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+#
+# Might return:
+#
+# boost_thread-vc80-mt-gd-1_33.dll, or
+# boost_regex-vc80-gd-1_33.dll
+#
+# The returned name also has the target type specific prefix and suffix which
+# puts it in a ready form to use as the value from a custom tag rule.
+#
+rule format-name ( format * : name : type ? : property-set )
+{
+ local result = "" ;
+ for local f in $(format)
+ {
+ switch $(f:G)
+ {
+ case <base> :
+ result += $(name:B) ;
+
+ case <toolset> :
+ result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <threading> :
+ result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
+ : $(property-set) ] ] ;
+
+ case <runtime> :
+ result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <qt> :
+ result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <address-model> :
+ result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <version:*> :
+ local key = [ MATCH <version:(.*)> : $(f:G) ] ;
+ local version = [ $(property-set).get <$(key)> ] ;
+ version ?= $(key) ;
+ version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
+ result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
+
+ case <property:*> :
+ local key = [ MATCH <property:(.*)> : $(f:G) ] ;
+ local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
+ if $(p0)
+ {
+ local p = [ $(property-set).get <$(p0)> ] ;
+ if $(p)
+ {
+ result += [ join-tag $(f:G=) : $(p) ] ;
+ }
+ }
+
+ case * :
+ result += $(f:G=) ;
+ }
+ }
+ result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
+ $(property-set) ] ;
+ return $(result) ;
+}
+
+
+local rule join-tag ( joiner ? : tag ? )
+{
+ if ! $(joiner) { joiner = - ; }
+ return $(joiner)$(tag) ;
+}
+
+
+local rule toolset-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ switch [ $(property-set).get <toolset> ]
+ {
+ case borland* : tag += bcb ;
+ case clang* :
+ {
+ switch [ $(property-set).get <toolset-clang:platform> ]
+ {
+ case darwin : tag += clang-darwin ;
+ case linux : tag += clang ;
+ }
+ }
+ case como* : tag += como ;
+ case cw : tag += cw ;
+ case darwin* : tag += xgcc ;
+ case edg* : tag += edg ;
+ case gcc* :
+ {
+ switch [ $(property-set).get <toolset-gcc:flavor> ]
+ {
+ case *mingw* : tag += mgw ;
+ case * : tag += gcc ;
+ }
+ }
+ case intel :
+ if [ $(property-set).get <toolset-intel:platform> ] = win
+ {
+ tag += iw ;
+ }
+ else
+ {
+ tag += il ;
+ }
+ case kcc* : tag += kcc ;
+ case kylix* : tag += bck ;
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ case mipspro* : tag += mp ;
+ case msvc* : tag += vc ;
+ case qcc* : tag += qcc ;
+ case sun* : tag += sw ;
+ case tru64cxx* : tag += tru ;
+ case vacpp* : tag += xlc ;
+ }
+ local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)"
+ : $(properties) ] ;
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if $(tag) = vc
+ {
+ if $(version[1]) = 6
+ {
+ # Cancel minor version.
+ version = 6 ;
+ }
+ else if $(version[1]) = 7 && $(version[2]) = 0
+ {
+ version = 7 ;
+ }
+ }
+ # On intel, version is not added, because it does not matter and it is the
+ # version of vc used as backend that matters. Ideally, we should encode the
+ # backend version but that would break compatibility with V1.
+ if $(tag) = iw
+ {
+ version = ;
+ }
+
+ # On borland, version is not added for compatibility with V1.
+ if $(tag) = bcb
+ {
+ version = ;
+ }
+
+ tag += $(version) ;
+
+ return $(tag:J=) ;
+}
+
+
+local rule threading-tag ( name : type ? : property-set )
+{
+ local tag = ;
+ local properties = [ $(property-set).raw ] ;
+ if <threading>multi in $(properties) { tag = mt ; }
+
+ return $(tag:J=) ;
+}
+
+
+local rule runtime-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ if <runtime-link>static in $(properties) { tag += s ; }
+
+ # This is an ugly thing. In V1, there is code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules will not even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # known to care about runtime debugging.
+ if ( <toolset>msvc in $(properties) ) ||
+ ( <stdlib>stlport in $(properties) ) ||
+ ( <toolset-intel:platform>win in $(properties) )
+ {
+ if <runtime-debugging>on in $(properties) { tag += g ; }
+ }
+
+ if <python-debugging>on in $(properties) { tag += y ; }
+ if <variant>debug in $(properties) { tag += d ; }
+ if <stdlib>stlport in $(properties) { tag += p ; }
+ if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
+
+ return $(tag:J=) ;
+}
+
+# Create a tag for the Qt library version
+# "<qt>4.6.0" will result in tag "qt460"
+local rule qt-tag ( name : type ? : property-set )
+{
+ local properties = [ $(property-set).get <qt> ] ;
+ local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)"
+ : $(properties) ] ;
+ local tag = "qt"$(version:J=) ;
+ return $(tag) ;
+}
+
+# Create a tag for the address-model
+# <address-model>64 will simply generate "64"
+local rule address-model-tag ( name : type ? : property-set )
+{
+ local tag = ;
+ local version = [ $(property-set).get <address-model> ] ;
+ return $(version) ;
+}
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local nl = "
+" ;
+
+ local save-os = [ modules.peek os : .name ] ;
+
+ modules.poke os : .name : LINUX ;
+
+ assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)"
+ : path-variable-setting-command PATH : foo bar baz ;
+
+ assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : NT ;
+
+ assert.result "set PATH=foo;bar;baz$(nl)"
+ : path-variable-setting-command PATH : foo bar baz ;
+
+ assert.result "set PATH=foo;bar;%PATH%$(nl)"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : $(save-os) ;
+}
diff --git a/jam-files/boost-build/tools/como-linux.jam b/jam-files/boost-build/tools/como-linux.jam
new file mode 100644
index 000000000..5c554c8f8
--- /dev/null
+++ b/jam-files/boost-build/tools/como-linux.jam
@@ -0,0 +1,103 @@
+# Copyright 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include a version
+# number.
+#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+import common ;
+import generators ;
+
+import unix ;
+import como ;
+
+feature.extend-subfeature toolset como : platform : linux ;
+
+toolset.inherit-generators como-linux
+ <toolset>como <toolset-como:platform>linux : unix ;
+generators.override como-linux.prebuilt : builtin.lib-generator ;
+generators.override como-linux.searched-lib-generator : searched-lib-generator ;
+toolset.inherit-flags como-linux : unix ;
+toolset.inherit-rules como-linux : gcc ;
+
+generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ
+ : <toolset>como <toolset-como:platform>linux ;
+generators.register-c-compiler como-linux.compile.c : C : OBJ
+ : <toolset>como <toolset-como:platform>linux ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters como-linux
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command como-linux : como
+ : $(command) ] ;
+
+ common.handle-options como-linux : $(condition) : $(command) : $(options) ;
+}
+
+
+flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ;
+flags como-linux C++FLAGS <exception-handling>on : --exceptions ;
+
+flags como-linux CFLAGS <inlining>off : --no_inlining ;
+flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ;
+
+flags como-linux CFLAGS <optimization>off : -O0 ;
+flags como-linux CFLAGS <optimization>speed : -O3 ;
+flags como-linux CFLAGS <optimization>space : -Os ;
+
+flags como-linux CFLAGS <debug-symbols>on : -g ;
+flags como-linux LINKFLAGS <debug-symbols>on : -g ;
+
+flags como-linux FINDLIBS : m ;
+flags como-linux FINDLIBS : rt ;
+
+flags como-linux CFLAGS <cflags> ;
+flags como-linux C++FLAGS <cxxflags> ;
+flags como-linux DEFINES <define> ;
+flags como-linux UNDEFS <undef> ;
+flags como-linux HDRS <include> ;
+flags como-linux STDHDRS <sysinclude> ;
+flags como-linux LINKFLAGS <linkflags> ;
+flags como-linux ARFLAGS <arflags> ;
+
+flags como-linux.link LIBRARIES <library-file> ;
+flags como-linux.link LINKPATH <library-path> ;
+flags como-linux.link FINDLIBS-ST <find-static-library> ;
+flags como-linux.link FINDLIBS-SA <find-shared-library> ;
+
+flags como-linux.link RPATH <dll-path> ;
+flags como-linux.link RPATH_LINK <xdll-path> ;
+
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
+}
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
+}
+
+actions compile.c
+{
+ $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
+}
+
+actions archive
+{
+ ar rcu $(<) $(>)
+}
diff --git a/jam-files/boost-build/tools/como-win.jam b/jam-files/boost-build/tools/como-win.jam
new file mode 100644
index 000000000..d21a70d6f
--- /dev/null
+++ b/jam-files/boost-build/tools/como-win.jam
@@ -0,0 +1,117 @@
+# (C) Copyright David Abrahams 2001.
+# (C) Copyright MetaCommunications, Inc. 2004.
+
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include a version
+# number.
+#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
+
+import common ;
+import como ;
+import feature ;
+import generators ;
+import toolset : flags ;
+
+feature.extend-subfeature toolset como : platform : win ;
+
+
+# Initializes the Comeau toolset for windows. The command is the command which
+# invokes the compiler. You should either set environment variable
+# COMO_XXX_INCLUDE where XXX is the used backend (as described in the
+# documentation), or pass that as part of command, e.g:
+#
+# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters como-win
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command como-win : como.exe :
+ $(command) ] ;
+
+ common.handle-options como-win : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler como-win.compile.c++ : CPP : OBJ
+ : <toolset>como <toolset-como:platform>win ;
+generators.register-c-compiler como-win.compile.c : C : OBJ
+ : <toolset>como <toolset-como:platform>win ;
+
+
+generators.register-linker como-win.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>como <toolset-como:platform>win ;
+
+# Note that status of shared libraries support is not clear, so we do not define
+# the link.dll generator.
+generators.register-archiver como-win.archive
+ : OBJ : STATIC_LIB
+ : <toolset>como <toolset-como:platform>win ;
+
+
+flags como-win C++FLAGS <exception-handling>off : --no_exceptions ;
+flags como-win C++FLAGS <exception-handling>on : --exceptions ;
+
+flags como-win CFLAGS <inlining>off : --no_inlining ;
+flags como-win CFLAGS <inlining>on <inlining>full : --inlining ;
+
+
+# The following seems to be VC-specific options. At least, when I uncomment
+# then, Comeau with bcc as backend reports that bcc32 invocation failed.
+#
+#flags como-win CFLAGS <debug-symbols>on : /Zi ;
+#flags como-win CFLAGS <optimization>off : /Od ;
+
+
+flags como-win CFLAGS <cflags> ;
+flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header.
+flags como-win CFLAGS <threading>multi : -D_MT ; # Make sure that our config knows that threading is on.
+flags como-win C++FLAGS <cxxflags> ;
+flags como-win DEFINES <define> ;
+flags como-win UNDEFS <undef> ;
+flags como-win HDRS <include> ;
+flags como-win SYSHDRS <sysinclude> ;
+flags como-win LINKFLAGS <linkflags> ;
+flags como-win ARFLAGS <arflags> ;
+flags como-win NO_WARN <no-warn> ;
+
+#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ;
+#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ;
+
+flags como-win LIBPATH <library-path> ;
+flags como-win LIBRARIES <library-file> ;
+flags como-win FINDLIBS <find-shared-library> ;
+flags como-win FINDLIBS <find-static-library> ;
+
+nl = "
+" ;
+
+
+# For como, we repeat all libraries so that dependencies are always resolved.
+#
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)"
+}
+
+actions compile.c
+{
+ $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)"
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)"
+}
+
+actions archive
+{
+ $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
+ lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
+}
diff --git a/jam-files/boost-build/tools/como.jam b/jam-files/boost-build/tools/como.jam
new file mode 100644
index 000000000..04a05a94b
--- /dev/null
+++ b/jam-files/boost-build/tools/como.jam
@@ -0,0 +1,29 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'como' toolset. Depending on the current system, it
+# forwards either to 'como-linux' or 'como-win' modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : como ;
+feature.subfeature toolset como : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = LINUX
+ {
+ toolset.using como-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using como-win :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+
+ }
+}
diff --git a/jam-files/boost-build/tools/convert.jam b/jam-files/boost-build/tools/convert.jam
new file mode 100644
index 000000000..ac1d70101
--- /dev/null
+++ b/jam-files/boost-build/tools/convert.jam
@@ -0,0 +1,62 @@
+# Copyright (c) 2009 Vladimir Prus
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements 'convert' target that takes a bunch of source and
+# tries to convert each one to the specified type.
+#
+# For example:
+#
+# convert objects obj : a.cpp b.cpp ;
+#
+
+import targets ;
+import generators ;
+import project ;
+import type ;
+import "class" : new ;
+
+class convert-target-class : typed-target
+{
+ rule __init__ ( name : project : type
+ : sources * : requirements * : default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type)
+ : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) : $(self.type)
+ : [ property-set.create [ $(property-set).raw ] # [ feature.expand
+ <main-target-type>$(self.type) ]
+ # ]
+ : $(source-targets) ] ;
+ if ! $(r)
+ {
+ errors.error "unable to construct" [ full-name ] ;
+ }
+
+ return $(r) ;
+ }
+
+}
+
+rule convert ( name type : sources * : requirements * : default-build *
+ : usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ # This is a circular module dependency, so it must be imported here
+ modules.import targets ;
+ targets.main-target-alternative
+ [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ]
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
+}
+IMPORT $(__name__) : convert : : convert ;
diff --git a/jam-files/boost-build/tools/cw-config.jam b/jam-files/boost-build/tools/cw-config.jam
new file mode 100644
index 000000000..1211b7c04
--- /dev/null
+++ b/jam-files/boost-build/tools/cw-config.jam
@@ -0,0 +1,34 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for CodeWarrior toolset. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ for local R in 9 8 7
+ {
+ local cw-path = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
+ : "PATH" ] ;
+ local cw-version = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
+ : "VERSION" ] ;
+ cw-path ?= [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0"
+ : "PATH" ] ;
+ cw-version ?= $(R).0 ;
+
+ if $(cw-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
+ }
+ using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/tools/cw.jam b/jam-files/boost-build/tools/cw.jam
new file mode 100644
index 000000000..ddcbfeb2b
--- /dev/null
+++ b/jam-files/boost-build/tools/cw.jam
@@ -0,0 +1,246 @@
+# Copyright (C) Reece H Dunn 2004
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# based on the msvc.jam toolset
+
+import property ;
+import generators ;
+import os ;
+import type ;
+import toolset : flags ;
+import errors : error ;
+import feature : feature get-values ;
+import path ;
+import sequence : unique ;
+import common ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : cw ;
+
+toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ;
+
+nl = "
+" ;
+
+rule init ( version ? : command * : options * )
+{
+ # TODO: fix the $(command[1]) = $(compiler) issue
+
+ setup = [ get-values <setup> : $(options) ] ;
+ setup ?= cwenv.bat ;
+ compiler = [ get-values <compiler> : $(options) ] ;
+ compiler ?= mwcc ;
+ linker = [ get-values <linker> : $(options) ] ;
+ linker ?= mwld ;
+
+ local condition = [ common.check-init-parameters cw :
+ version $(version) ] ;
+
+ command = [ common.get-invocation-command cw : mwcc.exe : $(command) :
+ [ default-paths $(version) ] ] ;
+
+ common.handle-options cw : $(condition) : $(command) : $(options) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ local tool-root = $(command) ;
+
+ setup = $(tool-root)\\$(setup) ;
+
+ # map the batch file in setup so it can be executed
+
+ other-tools = $(tool-root:D) ;
+ root ?= $(other-tools:D) ;
+
+ flags cw.link RUN_PATH $(condition) :
+ "$(root)\\Win32-x86 Support\\Libraries\\Runtime"
+ "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ;
+
+ setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ;
+
+ if [ os.name ] = NT
+ {
+ setup = $(setup)"
+" ;
+ }
+ else
+ {
+ setup = "cmd /S /C "$(setup)" \"&&\" " ;
+ }
+
+ # bind the setup command to the tool so it can be executed before the
+ # command
+
+ local prefix = $(setup) ;
+
+ flags cw.compile .CC $(condition) : $(prefix)$(compiler) ;
+ flags cw.link .LD $(condition) : $(prefix)$(linker) ;
+ flags cw.archive .LD $(condition) : $(prefix)$(linker) ;
+
+ if [ MATCH ^([89]\\.) : $(version) ]
+ {
+ if [ os.name ] = NT
+ {
+ # The runtime libraries
+ flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ;
+ flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ;
+
+ flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ;
+ flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ;
+
+ flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ;
+ flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ;
+ }
+ }
+}
+
+
+local rule default-paths ( version ? ) # FIXME
+{
+ local possible-paths ;
+ local ProgramFiles = [ common.get-program-files-dir ] ;
+
+ # TODO: add support for cw8 and cw9 detection
+
+ local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ;
+ possible-paths += $(version-6-path) ;
+
+ # perform post-processing
+
+ possible-paths
+ = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ;
+
+ possible-paths += [ modules.peek : PATH Path path ] ;
+
+ return $(possible-paths) ;
+}
+
+
+
+
+## declare generators
+
+generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ;
+generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ;
+
+generators.register-linker cw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>cw
+ ;
+generators.register-linker cw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : SHARED_LIB IMPORT_LIB
+ : <toolset>cw
+ ;
+
+generators.register-archiver cw.archive
+ : OBJ
+ : STATIC_LIB
+ : <toolset>cw
+ ;
+
+## compilation phase
+
+flags cw WHATEVER <toolset-cw:version> ;
+
+flags cw.compile CFLAGS <debug-symbols>on : -g ;
+flags cw.compile CFLAGS <optimization>off : -O0 ;
+flags cw.compile CFLAGS <optimization>speed : -O4,p ;
+flags cw.compile CFLAGS <optimization>space : -O4,s ;
+flags cw.compile CFLAGS <inlining>off : -inline off ;
+flags cw.compile CFLAGS <inlining>on : -inline on ;
+flags cw.compile CFLAGS <inlining>full : -inline all ;
+flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ;
+
+
+flags cw.compile CFLAGS <rtti>on : -RTTI on ;
+flags cw.compile CFLAGS <rtti>off : -RTTI off ;
+
+flags cw.compile CFLAGS <warnings>on : -w on ;
+flags cw.compile CFLAGS <warnings>off : -w off ;
+flags cw.compile CFLAGS <warnings>all : -w all ;
+flags cw.compile CFLAGS <warnings-as-errors>on : -w error ;
+
+flags cw.compile USER_CFLAGS <cflags> : ;
+flags cw.compile.c++ USER_CFLAGS <cxxflags> : ;
+
+flags cw.compile DEFINES <define> ;
+flags cw.compile UNDEFS <undef> ;
+flags cw.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
+}
+actions compile.c++
+{
+ $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
+}
+
+## linking phase
+
+flags cw.link DEF_FILE <def-file> ;
+
+flags cw LINKFLAGS : -search ;
+flags cw LINKFLAGS <debug-symbols>on : -g ;
+flags cw LINKFLAGS <user-interface>console : -subsystem console ;
+flags cw LINKFLAGS <user-interface>gui : -subsystem windows ;
+flags cw LINKFLAGS <user-interface>wince : -subsystem wince ;
+flags cw LINKFLAGS <user-interface>native : -subsystem native ;
+flags cw LINKFLAGS <user-interface>auto : -subsystem auto ;
+
+flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ;
+
+flags cw.link USER_LINKFLAGS <linkflags> ;
+flags cw.link LINKPATH <library-path> ;
+
+flags cw.link FINDLIBS_ST <find-static-library> ;
+flags cw.link FINDLIBS_SA <find-shared-library> ;
+flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ;
+flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+}
+
+if [ os.name ] in NT
+{
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+ }
+}
+else # cygwin
+{
+ actions archive
+ {
+ _bbv2_out_="$(<)"
+ if test -f "$_bbv2_out_" ; then
+ _bbv2_existing_="$(<:W)"
+ fi
+ $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+ }
+}
+
+actions link bind DEF_FILE
+{
+ $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+actions link.dll bind DEF_FILE
+{
+ $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
diff --git a/jam-files/boost-build/tools/darwin.jam b/jam-files/boost-build/tools/darwin.jam
new file mode 100644
index 000000000..283dface9
--- /dev/null
+++ b/jam-files/boost-build/tools/darwin.jam
@@ -0,0 +1,568 @@
+# Copyright 2003 Christopher Currie
+# Copyright 2006 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2005-2007 Mat Marcus
+# Copyright 2005-2007 Adobe Systems Incorporated
+# Copyright 2007-2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
+# for explanation why it's a separate toolset.
+
+import feature : feature ;
+import toolset : flags ;
+import type ;
+import common ;
+import generators ;
+import path : basename ;
+import version ;
+import property-set ;
+import regex ;
+import errors ;
+
+## Use a framework.
+feature framework : : free ;
+
+## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
+feature macosx-version : : propagated link-incompatible symmetric optional ;
+
+## The minimal MacOSX version to target.
+feature macosx-version-min : : propagated optional ;
+
+## A dependency, that is forced to be included in the link.
+feature force-load : : free dependency incidental ;
+
+#############################################################################
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : darwin ;
+import gcc ;
+toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+
+generators.override darwin.prebuilt : builtin.prebuilt ;
+generators.override darwin.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
+generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
+
+toolset.inherit-rules darwin : gcc : localize ;
+toolset.inherit-flags darwin : gcc
+ : <runtime-link>static
+ <architecture>arm/<address-model>32
+ <architecture>arm/<address-model>64
+ <architecture>arm/<instruction-set>
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ <architecture>x86/<instruction-set>
+ <architecture>power/<address-model>32
+ <architecture>power/<address-model>64
+ <architecture>power/<instruction-set> ;
+
+# Options:
+#
+# <root>PATH
+# Platform root path. The common autodetection will set this to
+# "/Developer". And when a command is given it will be set to
+# the corresponding "*.platform/Developer" directory.
+#
+rule init ( version ? : command * : options * : requirement * )
+{
+ # First time around, figure what is host OSX version
+ if ! $(.host-osx-version)
+ {
+ .host-osx-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: OSX version on this machine is $(.host-osx-version) ;
+ }
+ }
+
+ # - The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+
+ # - The bin directory where to find the commands to execute.
+ local bin ;
+
+ # - The configured compile driver command.
+ local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
+
+ # The version as reported by the compiler
+ local real-version ;
+
+ # - Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
+ if $(bin) = "/usr/bin"
+ {
+ root ?= /Developer ;
+ }
+ else
+ {
+ local r = $(bin:D) ;
+ r = $(r:D) ;
+ root ?= $(r) ;
+ }
+ }
+
+ # - Autodetect the version if not given.
+ if $(command)
+ {
+ # - The 'command' variable can have multiple elements. When calling
+ # the SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ real-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ version ?= $(real-version) ;
+ }
+
+ .real-version.$(version) = $(real-version) ;
+
+ # - Define the condition for this toolset instance.
+ local condition =
+ [ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
+
+ # - Set the toolset generic common options.
+ common.handle-options darwin : $(condition) : $(command) : $(options) ;
+
+ # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
+ if $(real-version) < "4.0.0"
+ {
+ flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
+ }
+ # - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
+ if $(real-version) < "4.2.0"
+ {
+ flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
+ }
+
+ # - Set the link flags common with the GCC toolset.
+ gcc.init-link-flags darwin darwin $(condition) ;
+
+ # - The symbol strip program.
+ local strip ;
+ if <striper> in $(options)
+ {
+ # We can turn off strip by specifying it as empty. In which
+ # case we switch to using the linker to do the strip.
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
+ }
+ else
+ {
+ # Otherwise we need to find a strip program to use. And hence
+ # also tell the link action that we need to use a strip
+ # post-process.
+ flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
+ strip =
+ [ common.get-invocation-command darwin
+ : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.link .STRIP $(condition) : $(strip[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using strip for $(condition) at $(strip[1]) ;
+ }
+ }
+
+ # - The archive builder (libtool is the default as creating
+ # archives in darwin is complicated.
+ local archiver =
+ [ common.get-invocation-command darwin
+ : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using archiver for $(condition) at $(archiver[1]) ;
+ }
+
+ # - Initialize the SDKs available in the root for this tool.
+ local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
+
+ #~ ECHO --- ;
+ #~ ECHO --- bin :: $(bin) ;
+ #~ ECHO --- root :: $(root) ;
+ #~ ECHO --- version :: $(version) ;
+ #~ ECHO --- condition :: $(condition) ;
+ #~ ECHO --- strip :: $(strip) ;
+ #~ ECHO --- archiver :: $(archiver) ;
+ #~ ECHO --- sdks :: $(sdks) ;
+ #~ ECHO --- ;
+ #~ EXIT ;
+}
+
+# Add and set options for a discovered SDK version.
+local rule init-sdk ( condition * : root ? : version + : version-feature ? )
+{
+ local rule version-to-feature ( version + )
+ {
+ switch $(version[1])
+ {
+ case iphone* :
+ {
+ return $(version[1])-$(version[2-]:J=.) ;
+ }
+ case mac* :
+ {
+ return $(version[2-]:J=.) ;
+ }
+ case * :
+ {
+ return $(version:J=.) ;
+ }
+ }
+ }
+
+ if $(version-feature)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ;
+ }
+
+ # Add the version to the features for specifying them.
+ if ! $(version-feature) in [ feature.values macosx-version ]
+ {
+ feature.extend macosx-version : $(version-feature) ;
+ }
+ if ! $(version-feature) in [ feature.values macosx-version-min ]
+ {
+ feature.extend macosx-version-min : $(version-feature) ;
+ }
+
+ # Set the flags the version needs to compile with, first
+ # generic options.
+ flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(sdk) ;
+ flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(sdk) ;
+
+ # Then device variation options.
+ switch $(version[1])
+ {
+ case iphonesim* :
+ {
+ local N = $(version[2]) ;
+ if ! $(version[3]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
+ else { N += 0$(version[3]) ; }
+ if ! $(version[4]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
+ else { N += 0$(version[4]) ; }
+ N = $(N:J=) ;
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ }
+
+ case iphone* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ }
+
+ case mac* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ }
+ }
+
+ return $(version-feature) ;
+ }
+ else if $(version[4])
+ {
+ # We have a patch version of an SDK. We want to set up
+ # both the specific patch version, and the minor version.
+ # So we recurse to set up the minor version. Plus the minor version.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+ else
+ {
+ # Yes, this is intentionally recursive.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+}
+
+# Determine the MacOSX SDK versions installed and their locations.
+local rule init-available-sdk-versions ( condition * : root ? )
+{
+ root ?= /Developer ;
+ local sdks-root = $(root)/SDKs ;
+ local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
+ local result ;
+ for local sdk in $(sdks)
+ {
+ local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ;
+ local sdk-platform = $(sdk-match[1]:L) ;
+ local sdk-version = $(sdk-match[2-]) ;
+ if $(sdk-version)
+ {
+ switch $(sdk-platform)
+ {
+ case macosx :
+ {
+ sdk-version = mac $(sdk-version) ;
+ }
+ case iphoneos :
+ {
+ sdk-version = iphone $(sdk-version) ;
+ }
+ case iphonesimulator :
+ {
+ sdk-version = iphonesim $(sdk-version) ;
+ }
+ case * :
+ {
+ sdk-version = $(sdk-version:J=-) ;
+ }
+ }
+ result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+# Generic options.
+flags darwin.compile OPTIONS <flags> ;
+
+# The following adds objective-c support to darwin.
+# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
+
+generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
+generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local ps = [ property-set.create $(properties) ] ;
+ local arch = [ $(ps).get <architecture> ] ;
+ local address-model = [ $(ps).get <address-model> ] ;
+ local osx-version = [ $(ps).get <macosx-version> ] ;
+ local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
+ gcc-version = $(.real-version.$(gcc-version)) ;
+ local options ;
+
+ local support-ppc64 = 1 ;
+
+ osx-version ?= $(.host-osx-version) ;
+
+ switch $(osx-version)
+ {
+ case iphone* :
+ {
+ support-ppc64 = ;
+ }
+
+ case * :
+ if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
+ {
+ # When targeting 10.6:
+ # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
+ # - gcc 4.0 will compile fine, somehow, but then fail at link time
+ support-ppc64 = ;
+ }
+ }
+ switch $(arch)
+ {
+ case combined :
+ {
+ if $(address-model) = 32_64 {
+ if $(support-ppc64) {
+ options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
+ } else {
+ # Build 3-way binary
+ options = -arch i386 -arch ppc -arch x86_64 ;
+ }
+ } else if $(address-model) = 64 {
+ if $(support-ppc64) {
+ options = -arch x86_64 -arch ppc64 ;
+ } else {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+ } else {
+ options = -arch i386 -arch ppc ;
+ }
+ }
+
+ case x86 :
+ {
+ if $(address-model) = 32_64 {
+ options = -arch i386 -arch x86_64 ;
+ } else if $(address-model) = 64 {
+ options = -arch x86_64 ;
+ } else {
+ options = -arch i386 ;
+ }
+ }
+
+ case power :
+ {
+ if ! $(support-ppc64)
+ && ( $(address-model) = 32_64 || $(address-model) = 64 )
+ {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+
+ if $(address-model) = 32_64 {
+ options = -arch ppc -arch ppc64 ;
+ } else if $(address-model) = 64 {
+ options = -arch ppc64 ;
+ } else {
+ options = -arch ppc ;
+ }
+ }
+
+ case arm :
+ {
+ options = -arch armv6 ;
+ }
+ }
+
+ if $(options)
+ {
+ OPTIONS on $(targets) += $(options) ;
+ }
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+}
+
+rule setup-fpic ( targets * : sources * : properties * )
+{
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.m ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c" ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.m
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.mm ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c++" ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.mm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Set the max header padding to allow renaming of libs for installation.
+flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
+
+# To link the static runtime we need to link to all the core runtime libraries.
+flags darwin.link OPTIONS <runtime-link>static
+ : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
+
+# Strip as much as possible when optimizing.
+flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+
+# Dynamic/shared linking.
+flags darwin.compile OPTIONS <link>shared : -dynamic ;
+
+# Misc options.
+flags darwin.compile OPTIONS : -no-cpp-precomp -gdwarf-2 -fexceptions ;
+#~ flags darwin.link OPTIONS : -fexceptions ;
+
+# Add the framework names to use.
+flags darwin.link FRAMEWORK <framework> ;
+
+#
+flags darwin.link FORCE_LOAD <force-load> ;
+
+# This is flag is useful for debugging the link step
+# uncomment to see what libtool is doing under the hood
+#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
+
+_ = " " ;
+
+# set up the -F option to include the paths to any frameworks used.
+local rule prepare-framework-path ( target + )
+{
+ # The -framework option only takes basename of the framework.
+ # The -F option specifies the directories where a framework
+ # is searched for. So, if we find <framework> feature
+ # with some path, we need to generate property -F option.
+ local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
+
+ # Be sure to generate no -F if there's no path.
+ for local framework-path in $(framework-paths)
+ {
+ if $(framework-path) != ""
+ {
+ FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
+ }
+ }
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+# Note that using strip without any options was reported to result in broken
+# binaries, at least on OS X 10.5.5, see:
+# http://svn.boost.org/trac/boost/ticket/2347
+# So we pass -S -x.
+actions link bind LIBRARIES FORCE_LOAD
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+ $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+}
+
+# We use libtool instead of ar to support universal binary linking
+# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
+actions piecemeal archive
+{
+ "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
+}
diff --git a/jam-files/boost-build/tools/dmc.jam b/jam-files/boost-build/tools/dmc.jam
new file mode 100644
index 000000000..8af8725a8
--- /dev/null
+++ b/jam-files/boost-build/tools/dmc.jam
@@ -0,0 +1,134 @@
+# Digital Mars C++
+
+# (C) Copyright Christof Meerwald 2003.
+# (C) Copyright Aleksey Gurtovoy 2004.
+# (C) Copyright Arjan Knepper 2006.
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include version number.
+#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a>
+
+import feature generators common ;
+import toolset : flags ;
+import sequence regex ;
+
+feature.extend toolset : dmc ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters dmc : version $(version) ] ;
+
+ local command = [ common.get-invocation-command dmc : dmc : $(command) ] ;
+ command ?= dmc ;
+
+ common.handle-options dmc : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ if $(root)
+ {
+ # DMC linker is sensitive the the direction of slashes, and
+ # won't link if forward slashes are used in command.
+ root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ;
+ flags dmc .root $(condition) : $(root)\\bin\\ ;
+ }
+ else
+ {
+ flags dmc .root $(condition) : "" ;
+ }
+}
+
+
+# Declare generators
+generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ;
+generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ;
+
+generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ;
+generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ;
+generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ;
+
+
+# Declare flags
+# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds.
+# flags dmc.compile OPTIONS <debug-symbols>on : -g ;
+flags dmc.compile OPTIONS <debug-symbols>on : -gl ;
+flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ;
+flags dmc.link OPTIONS <debug-symbols>off : /PACKF ;
+
+flags dmc.compile OPTIONS <optimization>off : -S -o+none ;
+flags dmc.compile OPTIONS <optimization>speed : -o+time ;
+flags dmc.compile OPTIONS <optimization>space : -o+space ;
+flags dmc.compile OPTIONS <exception-handling>on : -Ae ;
+flags dmc.compile OPTIONS <rtti>on : -Ar ;
+# FIXME:
+# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used
+# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used
+# But for some reason the -WD cflag is always in use.
+# flags dmc.compile OPTIONS <link>shared : -WD ;
+# flags dmc.compile OPTIONS <link>static : -WA ;
+
+# Note that these two options actually imply multithreading support on DMC
+# because there is no single-threaded dynamic runtime library. Specifying
+# <threading>multi would be a bad idea, though, because no option would be
+# matched when the build uses the default settings of <runtime-link>dynamic
+# and <threading>single.
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ;
+
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ;
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ;
+
+flags dmc.compile OPTIONS : <cflags> ;
+flags dmc.compile.c++ OPTIONS : <cxxflags> ;
+
+flags dmc.compile DEFINES : <define> ;
+flags dmc.compile INCLUDES : <include> ;
+
+flags dmc.link <linkflags> ;
+flags dmc.archive OPTIONS <arflags> ;
+
+flags dmc LIBPATH <library-path> ;
+flags dmc LIBRARIES <library-file> ;
+flags dmc FINDLIBS <find-library-sa> ;
+flags dmc FINDLIBS <find-library-st> ;
+
+actions together link bind LIBRARIES
+{
+ "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
+}
+
+actions together link.dll bind LIBRARIES
+{
+ echo LIBRARY "$(<[1])" > $(<[2]:B).def
+ echo DESCRIPTION 'A Library' >> $(<[2]:B).def
+ echo EXETYPE NT >> $(<[2]:B).def
+ echo SUBSYSTEM WINDOWS >> $(<[2]:B).def
+ echo CODE EXECUTE READ >> $(<[2]:B).def
+ echo DATA READ WRITE >> $(<[2]:B).def
+ "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
+}
+
+actions compile.c
+{
+ "$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
+}
+
+actions together piecemeal archive
+{
+ "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/docutils.jam b/jam-files/boost-build/tools/docutils.jam
new file mode 100644
index 000000000..bf0616174
--- /dev/null
+++ b/jam-files/boost-build/tools/docutils.jam
@@ -0,0 +1,84 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for docutils ReStructuredText processing.
+
+import type ;
+import scanner ;
+import generators ;
+import os ;
+import common ;
+import toolset ;
+import path ;
+import feature : feature ;
+import property ;
+
+.initialized = ;
+
+type.register ReST : rst ;
+
+class rst-scanner : common-scanner
+{
+ rule __init__ ( paths * )
+ {
+ common-scanner.__init__ . $(paths) ;
+ }
+
+ rule pattern ( )
+ {
+ return "^[ ]*\\.\\.[ ]+include::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+image::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+figure::[ ]+([^
+]+)"
+ ;
+ }
+}
+
+scanner.register rst-scanner : include ;
+type.set-scanner ReST : rst-scanner ;
+
+generators.register-standard docutils.html : ReST : HTML ;
+
+rule init ( docutils-dir ? : tools-dir ? )
+{
+ docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
+ tools-dir ?= $(docutils-dir)/tools ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ .docutils-dir = $(docutils-dir) ;
+ .tools-dir = $(tools-dir:R="") ;
+
+ .setup = [
+ common.prepend-path-variable-command PYTHONPATH
+ : $(.docutils-dir) $(.docutils-dir)/extras ] ;
+ }
+}
+
+rule html ( target : source : properties * )
+{
+ if ! [ on $(target) return $(RST2XXX) ]
+ {
+ local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
+ RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
+ }
+}
+
+
+feature docutils : : free ;
+feature docutils-html : : free ;
+feature docutils-cmd : : free ;
+toolset.flags docutils COMMON-FLAGS : <docutils> ;
+toolset.flags docutils HTML-FLAGS : <docutils-html> ;
+toolset.flags docutils RST2XXX : <docutils-cmd> ;
+
+actions html
+{
+ $(.setup)
+ "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
+}
+
diff --git a/jam-files/boost-build/tools/doxygen-config.jam b/jam-files/boost-build/tools/doxygen-config.jam
new file mode 100644
index 000000000..2cd2ccaeb
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen-config.jam
@@ -0,0 +1,11 @@
+#~ Copyright 2005, 2006 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Doxygen tools. To use, just import this module.
+
+import toolset : using ;
+
+ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ;
+
+using doxygen ;
diff --git a/jam-files/boost-build/tools/doxygen.jam b/jam-files/boost-build/tools/doxygen.jam
new file mode 100644
index 000000000..8394848dd
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen.jam
@@ -0,0 +1,776 @@
+# Copyright 2003, 2004 Douglas Gregor
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Copyright 2006 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of various outputs from source
+# files documented with doxygen comments. The supported transformations are:
+#
+# * Source -> Doxygen XML -> BoostBook XML
+# * Source -> Doxygen HTML
+#
+# The type of transformation is selected based on the target requested. For
+# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
+# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
+# HTML specifying a target with an ".html" suffix will produce a directory
+# <target> with the Doxygen html files, and a <target>.html file redirecting to
+# that directory.
+
+import "class" : new ;
+import targets ;
+import feature ;
+import property ;
+import generators ;
+import boostbook ;
+import type ;
+import path ;
+import print ;
+import regex ;
+import stage ;
+import project ;
+import xsltproc ;
+import make ;
+import os ;
+import toolset : flags ;
+import alias ;
+import common ;
+import modules ;
+import project ;
+import utility ;
+import errors ;
+
+
+# Use to specify extra configuration paramters. These get translated
+# into a doxyfile which configures the building of the docs.
+feature.feature doxygen:param : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
+feature.feature prefix : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
+feature.feature reftitle : : free ;
+
+# Which processor to use for various translations from Doxygen.
+feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
+
+# To generate, or not, index sections.
+feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
+
+# The ID for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.id : : free ;
+
+# The title for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.title : : free ;
+
+# Location for images when generating XML
+feature.feature doxygen:xml-imagedir : : free ;
+
+# Indicates whether the entire directory should be deleted
+feature.feature doxygen.rmdir : off on : optional incidental ;
+
+# Doxygen configuration input file.
+type.register DOXYFILE : doxyfile ;
+
+# Doxygen XML multi-file output.
+type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
+
+# Doxygen XML coallesed output.
+type.register DOXYGEN_XML : doxygen : XML ;
+
+# Doxygen HTML multifile directory.
+type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
+
+# Redirection HTML file to HTML multifile directory.
+type.register DOXYGEN_HTML : : HTML ;
+
+type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
+
+# Initialize the Doxygen module. Parameters are:
+# name: the name of the 'doxygen' executable. If not specified, the name
+# 'doxygen' will be used
+#
+rule init ( name ? )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ .doxproc = [ modules.binding $(__name__) ] ;
+ .doxproc = $(.doxproc:D)/doxproc.py ;
+
+ generators.register-composing doxygen.headers-to-doxyfile
+ : H HPP CPP : DOXYFILE ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_XML_MULTIFILE ;
+ generators.register-standard doxygen.xml-dir-to-boostbook
+ : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
+ generators.register-standard doxygen.xml-to-boostbook
+ : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
+ generators.register-standard doxygen.collect
+ : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
+ generators.register-standard doxygen.html-redirect
+ : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
+ generators.register-standard doxygen.copy-latex-pngs
+ : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
+
+ IMPORT $(__name__) : doxygen : : doxygen ;
+ }
+
+ if $(name)
+ {
+ modify-config ;
+ .doxygen = $(name) ;
+ check-doxygen ;
+ }
+
+ if ! $(.doxygen)
+ {
+ check-doxygen ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.initialized)
+ {
+ errors.user-error "doxygen must be initialized before it can be used." ;
+ }
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+
+ if [ .is-cygwin ]
+ {
+ .is-cygwin = true ;
+ }
+ }
+}
+
+rule modify-config ( )
+{
+ if $(.config-frozen)
+ {
+ errors.user-error "Cannot change doxygen after it has been used." ;
+ }
+}
+
+rule check-doxygen ( )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using doxygen ":" $(.doxygen) ;
+ }
+ local extra-paths ;
+ if [ os.name ] = NT
+ {
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ extra-paths = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ extra-paths = "C:\\Program Files" ;
+ }
+ }
+ .doxygen = [ common.get-invocation-command doxygen :
+ doxygen : $(.doxygen) : $(extra-paths) ] ;
+}
+
+rule name ( )
+{
+ freeze-config ;
+ return $(.doxygen) ;
+}
+
+rule .is-cygwin ( )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native
+ [ path.join [ path.parent $(file) ] doxygen ] ] ;
+ local command =
+ "cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ;
+ result = [ SHELL $(command) ] ;
+ if [ MATCH "(Parsing file /)" : $(result) ]
+ {
+ return true ;
+ }
+ }
+}
+
+# Runs Doxygen on the given Doxygen configuration file (the source) to generate
+# the Doxygen files. The output is dumped according to the settings in the
+# Doxygen configuration file, not according to the target! Because of this, we
+# essentially "touch" the target file, in effect making it look like we have
+# really written something useful to it. Anyone that uses this action must deal
+# with this behavior.
+#
+actions doxygen-action
+{
+ $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
+}
+
+
+# Runs the Python doxproc XML processor.
+#
+actions doxproc
+{
+ python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
+}
+
+
+rule translate-path ( path )
+{
+ freeze-config ;
+ if [ os.on-windows ]
+ {
+ if [ os.name ] = CYGWIN
+ {
+ if $(.is-cygwin)
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return $(path:W) ;
+ }
+ }
+ else
+ {
+ if $(.is-cygwin)
+ {
+ match = [ MATCH ^(.):(.*) : $(path) ] ;
+ if $(match)
+ {
+ return /cygdrive/$(match[1])$(match[2]:T) ;
+ }
+ else
+ {
+ return $(path:T) ;
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+}
+
+
+# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
+# and a property list that may contain <doxygen:param> features.
+#
+rule headers-to-doxyfile ( target : sources * : properties * )
+{
+ local text "# Generated by Boost.Build version 2" ;
+
+ local output-dir ;
+
+ # Translate <doxygen:param> into command line flags.
+ for local param in [ feature.get-values <doxygen:param> : $(properties) ]
+ {
+ local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ;
+ if $(namevalue[1]) = OUTPUT_DIRECTORY
+ {
+ output-dir = [ translate-path
+ [ utility.unquote $(namevalue[2]) ] ] ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+ else
+ {
+ text += "$(namevalue[1]) = $(namevalue[2])" ;
+ }
+ }
+
+ if ! $(output-dir)
+ {
+ output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+
+ local headers = ;
+ for local header in $(sources:G=)
+ {
+ header = [ translate-path $(header) ] ;
+ headers += \"$(header)\" ;
+ }
+
+ # Doxygen generates LaTex by default. So disable it unconditionally, or at
+ # least until someone needs, and hence writes support for, LaTex output.
+ text += "GENERATE_LATEX = NO" ;
+ text += "INPUT = $(headers:J= )" ;
+ print.output $(target) plain ;
+ print.text $(text) : true ;
+}
+
+
+# Run Doxygen. See doxygen-action for a description of the strange properties of
+# this rule.
+#
+rule run ( target : source : properties * )
+{
+ freeze-config ;
+ if <doxygen.rmdir>on in $(properties)
+ {
+ local output-dir =
+ [ path.make
+ [ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) :
+ $(properties) ] ] ;
+ local html-dir =
+ [ path.make
+ [ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
+ $(properties) ] ] ;
+ if $(output-dir) && $(html-dir) &&
+ [ path.glob $(output-dir) : $(html-dir) ]
+ {
+ HTMLDIR on $(target) =
+ [ path.native [ path.join $(output-dir) $(html-dir) ] ] ;
+ rm-htmldir $(target) ;
+ }
+ }
+ doxygen-action $(target) : $(source) ;
+ NAME on $(target) = $(.doxygen) ;
+ RM on $(target) = [ modules.peek common : RM ] ;
+ *.XML on $(target) =
+ [ path.native
+ [ path.join
+ [ path.make [ on $(target) return $(LOCATE) ] ]
+ $(target:B:S=)
+ *.xml ] ] ;
+}
+
+if [ os.name ] = NT
+{
+ RMDIR = rmdir /s /q ;
+}
+else
+{
+ RMDIR = rm -rf ;
+}
+
+actions quietly rm-htmldir
+{
+ $(RMDIR) $(HTMLDIR)
+}
+
+# The rules below require Boost.Book stylesheets, so we need some code to check
+# that the boostbook module has actualy been initialized.
+#
+rule check-boostbook ( )
+{
+ if ! [ modules.peek boostbook : .initialized ]
+ {
+ ECHO "error: the boostbook module is not initialized" ;
+ ECHO "error: you've attempted to use the 'doxygen' toolset, " ;
+ ECHO "error: which requires Boost.Book," ;
+ ECHO "error: but never initialized Boost.Book." ;
+ EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ;
+ }
+}
+
+
+# Collect the set of Doxygen XML files into a single XML source file that can be
+# handled by an XSLT processor. The source is completely ignored (see
+# doxygen-action), because this action picks up the Doxygen XML index file
+# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL
+# program and take a "-o output.xml" argument (grrrr). The target of the
+# collection will be a single Doxygen XML file.
+#
+rule collect ( target : source : properties * )
+{
+ check-boostbook ;
+ local collect-xsl-dir
+ = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
+ local source-path
+ = [ path.make [ on $(source) return $(LOCATE) ] ] ;
+ local collect-path
+ = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
+ local native-path
+ = [ path.native $(collect-path) ] ;
+ local real-source
+ = [ path.native [ path.join $(collect-path) index.xml ] ] ;
+ xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
+ : <xsl:param>doxygen.xml.path=$(native-path) ;
+}
+
+
+# Translate Doxygen XML into BoostBook.
+#
+rule xml-to-boostbook ( target : source : properties * )
+{
+ check-boostbook ;
+ local xsl-dir = [ boostbook.xsl-dir ] ;
+ local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
+ doxygen2boostbook.xsl ] ] ;
+
+ local xslt-properties = $(properties) ;
+ for local prefix in [ feature.get-values <prefix> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
+ }
+ for local title in [ feature.get-values <reftitle> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
+ }
+
+ xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
+}
+
+
+flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ;
+flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
+flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
+
+
+rule xml-dir-to-boostbook ( target : source : properties * )
+{
+ DOXPROC on $(target) = $(.doxproc) ;
+
+ LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
+
+ doxygen.doxproc $(target) : $(source:S=) ;
+}
+
+
+# Generate the HTML redirect to HTML dir index.html file.
+#
+rule html-redirect ( target : source : properties * )
+{
+ local uri = "$(target:B)/index.html" ;
+ print.output $(target) plain ;
+ print.text
+"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
+ \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
+<html xmlns=\"http://www.w3.org/1999/xhtml\">
+<head>
+ <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
+
+ <title></title>
+</head>
+
+<body>
+ Automatic redirection failed, please go to <a href=
+ \"$(uri)\">$(uri)</a>.
+</body>
+</html>
+"
+ : true ;
+}
+
+rule copy-latex-pngs ( target : source : requirements * )
+{
+ local directory = [ path.native
+ [ feature.get-values <doxygen:xml-imagedir> :
+ $(requirements) ] ] ;
+
+ local location = [ on $(target) return $(LOCATE) ] ;
+
+ local pdf-location =
+ [ path.native
+ [ path.join
+ [ path.make $(location) ]
+ [ path.make $(directory) ] ] ] ;
+ local html-location =
+ [ path.native
+ [ path.join
+ .
+ html
+ [ path.make $(directory) ] ] ] ;
+
+ common.MkDir $(pdf-location) ;
+ common.MkDir $(html-location) ;
+
+ DEPENDS $(target) : $(pdf-location) $(html-location) ;
+
+ if [ os.name ] = NT
+ {
+ CP on $(target) = copy /y ;
+ FROM on $(target) = \\*.png ;
+ TOHTML on $(target) = .\\html\\$(directory) ;
+ TOPDF on $(target) = \\$(directory) ;
+ }
+ else
+ {
+ CP on $(target) = cp ;
+ FROM on $(target) = /*.png ;
+ TOHTML on $(target) = ./html/$(directory) ;
+ TOPDF on $(target) = $(target:D)/$(directory) ;
+ }
+}
+
+actions copy-latex-pngs
+{
+ $(CP) $(>:S=)$(FROM) $(TOHTML)
+ $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
+ echo "Stamped" > "$(<)"
+}
+
+# building latex images for doxygen XML depends
+# on latex, dvips, and ps being in your PATH.
+# This is true for most Unix installs, but
+# not on Win32, where you will need to install
+# MkTex and Ghostscript and add these tools
+# to your path.
+
+actions check-latex
+{
+ latex -version >$(<)
+}
+
+actions check-dvips
+{
+ dvips -version >$(<)
+}
+
+if [ os.name ] = "NT"
+{
+ actions check-gs
+ {
+ gswin32c -version >$(<)
+ }
+}
+else
+{
+ actions check-gs
+ {
+ gs -version >$(<)
+ }
+}
+
+rule check-tools ( )
+{
+ if ! $(.check-tools-targets)
+ {
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config
+ {
+ root-project =
+ [ project.attribute $(root-project) parent-module ] ;
+ }
+
+ .latex.check = [ new file-target latex.check
+ :
+ : [ project.target $(root-project) ]
+ : [ new action : doxygen.check-latex ]
+ :
+ ] ;
+ .dvips.check = [ new file-target dvips.check
+ :
+ : [ project.target $(root-project) ]
+ : [ new action : doxygen.check-dvips ]
+ :
+ ] ;
+ .gs.check = [ new file-target gs.check
+ :
+ : [ project.target $(root-project) ]
+ : [ new action : doxygen.check-gs ]
+ :
+ ] ;
+ .check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ;
+ }
+ return $(.check-tools-targets) ;
+}
+
+project.initialize $(__name__) ;
+project doxygen ;
+
+class doxygen-check-tools-target-class : basic-target
+{
+ import doxygen ;
+ rule construct ( name : sources * : property-set )
+ {
+ return [ property-set.empty ] [ doxygen.check-tools ] ;
+ }
+}
+
+local project = [ project.current ] ;
+
+targets.main-target-alternative
+ [ new doxygen-check-tools-target-class check-tools : $(project)
+ : [ targets.main-target-sources : check-tools : no-renaming ]
+ : [ targets.main-target-requirements : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements : $(project) ]
+ ] ;
+
+# User-level rule to generate BoostBook XML from a set of headers via Doxygen.
+#
+rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
+{
+ freeze-config ;
+ local project = [ project.current ] ;
+
+ if $(target:S) = .html
+ {
+ # Build an HTML directory from the sources.
+ local html-location = [ feature.get-values <location> : $(requirements) ] ;
+ local output-dir ;
+ if [ $(project).get build-dir ]
+ {
+ # Explicitly specified build dir. Add html at the end.
+ output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ;
+ }
+ else
+ {
+ # Trim 'bin' from implicit build dir, for no other reason that backward
+ # compatibility.
+ output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
+ $(html-location:E=html) ] ;
+ }
+ output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
+ local output-dir-native = [ path.native $(output-dir) ] ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+
+ ## The doxygen configuration file.
+ targets.main-target-alternative
+ [ new typed-target $(target:S=.tag) : $(project) : DOXYFILE
+ : [ targets.main-target-sources $(sources) : $(target:S=.tag) ]
+ : [ targets.main-target-requirements $(requirements)
+ <doxygen:param>GENERATE_HTML=YES
+ <doxygen:param>GENERATE_XML=NO
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target:S=.tag) ;
+
+ ## The html directory to generate by running doxygen.
+ targets.main-target-alternative
+ [ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE
+ : $(target:S=.tag)
+ : [ targets.main-target-requirements $(requirements)
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target:S=.dir) ;
+
+ ## The redirect html file into the generated html.
+ targets.main-target-alternative
+ [ new typed-target $(target) : $(project) : DOXYGEN_HTML
+ : $(target:S=.dir)
+ : [ targets.main-target-requirements $(requirements)
+ <location>$(output-dir)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ }
+ else
+ {
+ # Build a BoostBook XML file from the sources.
+ local location-xml = [ feature.get-values <location> : $(requirements) ] ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+ local target-xml = $(target:B=$(target:B)-xml) ;
+
+ # Check whether we need to build images
+ local images-location =
+ [ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ;
+ if $(images-location)
+ {
+ doxygen $(target).doxygen-xml-images.html : $(sources)
+ : $(requirements)
+ <doxygen.rmdir>on
+ <doxygen:param>QUIET=YES
+ <doxygen:param>WARNINGS=NO
+ <doxygen:param>WARN_IF_UNDOCUMENTED=NO
+ <dependency>/doxygen//check-tools ;
+ $(project).mark-target-as-explicit
+ $(target).doxygen-xml-images.html ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target).doxygen-xml-images
+ : $(project) : DOXYGEN_XML_IMAGES
+ : $(target).doxygen-xml-images.html
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build)
+ : $(project) ]
+ ] ;
+
+ $(project).mark-target-as-explicit
+ $(target).doxygen-xml-images ;
+
+ if ! [ regex.match "^(.*/)$" : $(images-location) ]
+ {
+ images-location = $(images-location)/ ;
+ }
+
+ requirements +=
+ <dependency>$(target).doxygen-xml-images
+ <xsl:param>boost.doxygen.formuladir=$(images-location) ;
+ }
+
+ ## The doxygen configuration file.
+ targets.main-target-alternative
+ [ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE
+ : [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ]
+ : [ targets.main-target-requirements $(requirements)
+ <doxygen:param>GENERATE_HTML=NO
+ <doxygen:param>GENERATE_XML=YES
+ <doxygen:param>XML_OUTPUT=$(target-xml)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target-xml:S=.tag) ;
+
+ ## The Doxygen XML directory of the processed source files.
+ targets.main-target-alternative
+ [ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE
+ : $(target-xml:S=.tag)
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target-xml:S=.dir) ;
+
+ ## The resulting BoostBook file is generated by the processor tool. The
+ ## tool can be either the xsltproc plus accompanying XSL scripts. Or it
+ ## can be the python doxproc.py script.
+ targets.main-target-alternative
+ [ new typed-target $(target-xml) : $(project) : BOOSTBOOK
+ : $(target-xml:S=.dir)
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target-xml) ;
+
+ targets.main-target-alternative
+ [ new install-target-class $(target:S=.xml) : $(project)
+ : $(target-xml)
+ : [ targets.main-target-requirements $(requirements)
+ <location>$(location-xml:E=.)
+ <name>$(target:S=.xml)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(target:S=.xml) ;
+
+ targets.main-target-alternative
+ [ new alias-target-class $(target) : $(project)
+ :
+ : [ targets.main-target-requirements $(requirements)
+ : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements)
+ <dependency>$(target:S=.xml)
+ : $(project) ]
+ ] ;
+ }
+}
diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile
new file mode 100644
index 000000000..9b969df9c
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen/windows-paths-check.doxyfile
@@ -0,0 +1,3 @@
+INPUT = windows-paths-check.hpp
+GENERATE_HTML = NO
+GENERATE_LATEX = NO
diff --git a/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/jam-files/boost-build/tools/doxygen/windows-paths-check.hpp
diff --git a/jam-files/boost-build/tools/fop.jam b/jam-files/boost-build/tools/fop.jam
new file mode 100644
index 000000000..c24b8725f
--- /dev/null
+++ b/jam-files/boost-build/tools/fop.jam
@@ -0,0 +1,69 @@
+# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed
+# under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+# This module defines rules to handle generation of PDF and
+# PostScript files from XSL Formatting Objects via Apache FOP
+
+import generators ;
+import common ;
+import boostbook ;
+
+generators.register-standard fop.render.pdf : FO : PDF ;
+generators.register-standard fop.render.ps : FO : PS ;
+
+# Initializes the fop toolset.
+#
+rule init ( fop-command ? : java-home ? : java ? )
+{
+ local has-command = $(.has-command) ;
+
+ if $(fop-command)
+ {
+ .has-command = true ;
+ }
+
+ if $(fop-command) || ! $(has-command)
+ {
+ fop-command = [ common.get-invocation-command fop : fop : $(fop-command)
+ : [ modules.peek : FOP_DIR ] ] ;
+ }
+
+ if $(fop-command)
+ {
+ .FOP_COMMAND = $(fop-command) ;
+ }
+
+ if $(java-home) || $(java)
+ {
+ .FOP_SETUP = ;
+
+
+ # JAVA_HOME is the location that java was installed to.
+
+ if $(java-home)
+ {
+ .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
+ }
+
+ # JAVACMD is the location that of the java executable, useful for a
+ # non-standard java installation, where the executable isn't at
+ # $JAVA_HOME/bin/java.
+
+ if $(java)
+ {
+ .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ;
+ }
+ }
+}
+
+actions render.pdf
+{
+ $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<)
+}
+
+actions render.ps
+{
+ $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<)
+}
diff --git a/jam-files/boost-build/tools/fortran.jam b/jam-files/boost-build/tools/fortran.jam
new file mode 100644
index 000000000..37665825e
--- /dev/null
+++ b/jam-files/boost-build/tools/fortran.jam
@@ -0,0 +1,55 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# This file contains common settings for all fortran tools
+#
+
+import "class" : new ;
+import feature : feature ;
+
+import type ;
+import generators ;
+import common ;
+
+type.register FORTRAN : f F for f77 ;
+type.register FORTRAN90 : f90 F90 ;
+
+feature fortran : : free ;
+feature fortran90 : : free ;
+
+class fortran-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
+ }
+}
+
+rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
+{
+ local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
+ generators.register $(g) ;
+}
+
+class fortran90-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
+ }
+}
+
+rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
+{
+ local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
+ generators.register $(g) ;
+}
+
+# FIXME: this is ugly, should find a better way (we'd want client code to
+# register all generators as "generator.some-rule", not with "some-module.some-rule".)
+IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ;
+IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ;
diff --git a/jam-files/boost-build/tools/gcc.jam b/jam-files/boost-build/tools/gcc.jam
new file mode 100644
index 000000000..f7b0da542
--- /dev/null
+++ b/jam-files/boost-build/tools/gcc.jam
@@ -0,0 +1,1185 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2002-2006 Rene Rivera.
+# Copyright 2002-2003 Vladimir Prus.
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov.
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import pch ;
+import property ;
+import property-set ;
+import toolset ;
+import type ;
+import rc ;
+import regex ;
+import set ;
+import unix ;
+import fortran ;
+
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+
+feature.extend toolset : gcc ;
+# feature.subfeature toolset gcc : flavor : : optional ;
+
+toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
+toolset.inherit-flags gcc : unix ;
+toolset.inherit-rules gcc : unix ;
+
+generators.override gcc.prebuilt : builtin.prebuilt ;
+generators.override gcc.searched-lib-generator : searched-lib-generator ;
+
+# Make gcc toolset object files use the "o" suffix on all platforms.
+type.set-generated-target-suffix OBJ : <toolset>gcc : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ;
+
+# Initializes the gcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or
+# sun and the default value will be selected based on the current OS.
+# Example:
+# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+# The compiler command to use is detected in a three step manner:
+# 1) If an explicit command is specified by the user, it will be used and must available.
+# 2) If only a certain version is specified, it is enforced:
+# - either a command 'g++-VERSION' must be available
+# - or the default command 'g++' must be available and match the exact version.
+# 3) Without user-provided restrictions use default 'g++'
+rule init ( version ? : command * : options * )
+{
+ #1): use user-provided command
+ local tool-command = ;
+ if $(command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ;
+ if ! $(tool-command)
+ {
+ errors.error "toolset gcc initialization:" :
+ "provided command '$(command)' not found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ #2): enforce user-provided version
+ else if $(version)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ;
+
+ #2.1) fallback: check whether "g++" reports the requested version
+ if ! $(tool-command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if $(tool-command)
+ {
+ local tool-command-string = $(tool-command:J=" ") ;
+ local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ;
+ if $(tool-version) != $(version)
+ {
+ # Permit a match betwen two-digit version specified by the user
+ # (e.g. 4.4) and 3-digit version reported by gcc.
+ # Since only two digits are present in binary name anyway,
+ # insisting that user specify 3-digit version when
+ # configuring Boost.Build while it's not required on
+ # command like would be strange.
+ local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ;
+ if $(stripped) != $(version)
+ {
+ errors.error "toolset gcc initialization:" :
+ "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ tool-command = ;
+ }
+ # Use full 3-digit version to be compatible with the 'using gcc ;' case
+ version = $(tool-version) ;
+ }
+ }
+ else
+ {
+ errors.error "toolset gcc initialization:" :
+ "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ }
+ #3) default: no command and no version specified, try using default command "g++"
+ else
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if ! $(tool-command)
+ {
+ errors.error "toolset gcc initialization:" :
+ "no command provided, default command 'g++' not found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+
+
+ # Information about the gcc command...
+ # The command.
+ local command = $(tool-command) ;
+ # The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+ # The bin directory where to find the command to execute.
+ local bin ;
+ # The flavor of compiler.
+ local flavor = [ feature.get-values <flavor> : $(options) ] ;
+ # Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+ }
+ # The 'command' variable can have multiple elements. When calling
+ # the SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ # Autodetect the version and flavor if not given.
+ if $(command)
+ {
+ local machine = [ MATCH "^([^ ]+)"
+ : [ SHELL "$(command-string) -dumpmachine" ] ] ;
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ switch $(machine:L)
+ {
+ case *mingw* : flavor ?= mingw ;
+ }
+ }
+
+ local condition ;
+ if $(flavor)
+ {
+ condition = [ common.check-init-parameters gcc
+ : version $(version)
+ : flavor $(flavor)
+ ] ;
+ }
+ else
+ {
+ condition = [ common.check-init-parameters gcc
+ : version $(version)
+ ] ;
+ condition = $(condition) ; #/<toolset-gcc:flavor> ;
+ }
+
+ common.handle-options gcc : $(condition) : $(command) : $(options) ;
+
+ local linker = [ feature.get-values <linker-type> : $(options) ] ;
+ # The logic below should actually be keyed on <target-os>
+ if ! $(linker)
+ {
+ if [ os.name ] = OSF
+ {
+ linker = osf ;
+ }
+ else if [ os.name ] = HPUX
+ {
+ linker = hpux ;
+ }
+ else if [ os.name ] = AIX
+ {
+ linker = aix ;
+ }
+ else if [ os.name ] = SOLARIS
+ {
+ linker = sun ;
+ }
+ else
+ {
+ linker = gnu ;
+ }
+ }
+ init-link-flags gcc $(linker) $(condition) ;
+
+
+ # If gcc is installed in non-standard location, we'd need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
+ if $(command)
+ {
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we don't provide a clean way to build 32-bit
+ # binary with 64-bit compiler, but user can always pass -m32 manually.
+ local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ;
+ }
+ toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+
+ # If it's not a system gcc install we should adjust the various programs as
+ # needed to prefer using the install specific versions. This is essential
+ # for correct use of MinGW and for cross-compiling.
+
+ local nl = "
+" ;
+
+ # - The archive builder.
+ local archiver = [ common.get-invocation-command gcc
+ : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ar" ] ] ]
+ : [ feature.get-values <archiver> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ;
+ }
+
+ # - Ranlib
+ local ranlib = [ common.get-invocation-command gcc
+ : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ]
+ : [ feature.get-values <ranlib> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ;
+ }
+
+
+ # - The resource compiler.
+ local rc =
+ [ common.get-invocation-command-nodefault gcc
+ : windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
+ local rc-type =
+ [ feature.get-values <rc-type> : $(options) ] ;
+ rc-type ?= windres ;
+ if ! $(rc)
+ {
+ # If we can't find an RC compiler we fallback to a null RC compiler that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses the assembler to create the empty
+ # objects, so configure that.
+ rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ;
+ rc-type = null ;
+ }
+ rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
+}
+
+if [ os.name ] = NT
+{
+ # This causes single-line command invocation to not go through .bat files,
+ # thus avoiding command-line length limitations.
+ JAMSHELL = % ;
+}
+
+generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
+generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ;
+
+# pch support
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
+# full name of the header.
+
+type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
+
+# GCC-specific pch generator.
+class gcc-pch-generator : pch-generator
+{
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ # Find the header in sources. Ignore any CPP sources.
+ local header ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ header = $(s) ;
+ }
+ }
+
+ # Error handling: Base header file name should be the same as the base
+ # precompiled header name.
+ local header-name = [ $(header).name ] ;
+ local header-basename = $(header-name:B) ;
+ if $(header-basename) != $(name)
+ {
+ local location = [ $(project).project-module ] ;
+ errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
+ }
+
+ local pch-file = [ generator.run $(project) $(name) : $(property-set)
+ : $(header) ] ;
+
+ # return result of base class and pch-file property as usage-requirements
+ return
+ [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ]
+ $(pch-file)
+ ;
+ }
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ name = [ $(sources[1]).name ] ;
+ return [ generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
+generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
+
+# Override default do-nothing generators.
+generators.override gcc.compile.c.pch : pch.default-c-pch-generator ;
+generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
+
+# Declare flags and action for compilation.
+toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags gcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags gcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.compile OPTIONS <profiling>on : -pg ;
+toolset.flags gcc.compile OPTIONS <rtti>off : -fno-rtti ;
+
+rule setup-fpic ( targets * : sources * : properties * )
+{
+ local link = [ feature.get-values link : $(properties) ] ;
+ if $(link) = shared
+ {
+ local target = [ feature.get-values target-os : $(properties) ] ;
+
+ # This logic will add -fPIC for all compilations:
+ #
+ # lib a : a.cpp b ;
+ # obj b : b.cpp ;
+ # exe c : c.cpp a d ;
+ # obj d : d.cpp ;
+ #
+ # This all is fine, except that 'd' will be compiled with -fPIC even though
+ # it is not needed, as 'd' is used only in exe. However, it is hard to
+ # detect where a target is going to be used. Alternatively, we can set -fPIC
+ # only when main target type is LIB but than 'b' would be compiled without
+ # -fPIC which would lead to link errors on x86-64. So, compile everything
+ # with -fPIC.
+ #
+ # Yet another alternative would be to create a propagated <sharedable>
+ # feature and set it when building shared libraries, but that would be hard
+ # to implement and would increase the target path length even more.
+
+ # On Windows, fPIC is default, specifying -fPIC explicitly leads to
+ # a warning.
+ if $(target) != cygwin && $(target) != windows
+ {
+ OPTIONS on $(targets) += -fPIC ;
+ }
+ }
+}
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local model = [ feature.get-values address-model : $(properties) ] ;
+ if $(model)
+ {
+ local option ;
+ local os = [ feature.get-values target-os : $(properties) ] ;
+ if $(os) = aix
+ {
+ if $(model) = 32
+ {
+ option = -maix32 ;
+ }
+ else
+ {
+ option = -maix64 ;
+ }
+ }
+ else if $(os) = hpux
+ {
+ if $(model) = 32
+ {
+ option = -milp32 ;
+ }
+ else
+ {
+ option = -mlp64 ;
+ }
+ }
+ else
+ {
+ if $(model) = 32
+ {
+ option = -m32 ;
+ }
+ else if $(model) = 64
+ {
+ option = -m64 ;
+ }
+ # For darwin, the model can be 32_64. darwin.jam will handle that
+ # on its own.
+ }
+ OPTIONS on $(targets) += $(option) ;
+ }
+}
+
+
+# FIXME: this should not use os.name.
+if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX && [ os.name ] != AIX
+{
+ # OSF does have an option called -soname but it does not seem to work as
+ # expected, therefore it has been disabled.
+ HAVE_SONAME = "" ;
+ SONAME_OPTION = -h ;
+}
+
+# HPUX, for some reason, seem to use '+h', not '-h'.
+if [ os.name ] = HPUX
+{
+ HAVE_SONAME = "" ;
+ SONAME_OPTION = +h ;
+}
+
+toolset.flags gcc.compile USER_OPTIONS <cflags> ;
+toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
+toolset.flags gcc.compile DEFINES <define> ;
+toolset.flags gcc.compile INCLUDES <include> ;
+toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ;
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++.pch
+{
+ "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c++.preprocess ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c.preprocess ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(<) = 128 ;
+ }
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.fortran ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)"
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
+}
+
+actions compile.c.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.asm ( targets * : sources * : properties * )
+{
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ LANG on $(<) = "-x assembler-with-cpp" ;
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# The class which check that we don't try to use the <runtime-link>static
+# property while creating or using shared library, since it's not supported by
+# gcc/libc.
+class gcc-linking-generator : unix-linking-generator
+{
+ rule run ( project name ? : property-set : sources + )
+ {
+ # TODO: Replace this with the use of a target-os property.
+ local no-static-link = ;
+ if [ modules.peek : UNIX ]
+ {
+ switch [ modules.peek : JAMUNAME ]
+ {
+ case * : no-static-link = true ;
+ }
+ }
+
+ local properties = [ $(property-set).raw ] ;
+ local reason ;
+ if $(no-static-link) && <runtime-link>static in $(properties)
+ {
+ if <link>shared in $(properties)
+ {
+ reason =
+ "On gcc, DLL can't be build with '<runtime-link>static'." ;
+ }
+ else if [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ reason =
+ "On gcc, using DLLS together with the"
+ "<runtime-link>static options is not possible " ;
+ }
+ }
+ }
+ }
+ if $(reason)
+ {
+ ECHO warning:
+ $(reason) ;
+ ECHO warning:
+ "It is suggested to use '<runtime-link>static' together"
+ "with '<link>static'." ;
+ return ;
+ }
+ else
+ {
+ local generated-targets = [ unix-linking-generator.run $(project)
+ $(name) : $(property-set) : $(sources) ] ;
+ return $(generated-targets) ;
+ }
+ }
+}
+
+# The set of permissible input types is different on mingw.
+# So, define two sets of generators, with mingw generators
+# selected when target-os=windows.
+
+local g ;
+g = [ new gcc-linking-generator gcc.mingw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.mingw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.register
+ [ new gcc-linking-generator gcc.link
+ : LIB OBJ
+ : EXE
+ : <toolset>gcc ] ;
+generators.register
+ [ new gcc-linking-generator gcc.link.dll
+ : LIB OBJ
+ : SHARED_LIB
+ : <toolset>gcc ] ;
+
+generators.override gcc.mingw.link : gcc.link ;
+generators.override gcc.mingw.link.dll : gcc.link.dll ;
+
+# Cygwin is similar to msvc and mingw in that it uses import libraries.
+# While in simple cases, it can directly link to a shared library,
+# it is believed to be slower, and not always possible. Define cygwin-specific
+# generators here.
+
+g = [ new gcc-linking-generator gcc.cygwin.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.cygwin.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.override gcc.cygwin.link : gcc.link ;
+generators.override gcc.cygwin.link.dll : gcc.link.dll ;
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.link OPTIONS <profiling>on : -pg ;
+toolset.flags gcc.link USER_OPTIONS <linkflags> ;
+toolset.flags gcc.link LINKPATH <library-path> ;
+toolset.flags gcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags gcc.link LIBRARIES <library-file> ;
+
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
+if [ os.name ] != HPUX
+{
+ toolset.flags gcc.link OPTIONS <runtime-link>static : -static ;
+}
+
+# Now, the vendor specific flags.
+# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun.
+rule init-link-flags ( toolset linker condition )
+{
+ switch $(linker)
+ {
+ case aix :
+ {
+ #
+ # On AIX we *have* to use the native linker.
+ #
+ # Using -brtl, the AIX linker will look for libraries with both the .a
+ # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
+ # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
+ # file that may contain shared objects and is different from static libs
+ # as on Linux.
+ #
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The above options are definately for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+
+ toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath
+ : unchecked ;
+ }
+
+ case darwin :
+ {
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, don't pass -s.
+ # at all, darwin.jam will use separate 'strip' invocation.
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
+ toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
+ }
+
+ case gnu :
+ {
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
+ toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
+ toolset.flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
+ toolset.flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
+ # and change search for -l switches that follow them. The following list
+ # shows the tried variants.
+ # The search stops at the first variant that has a match.
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw,cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw,cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared
+ : -Wl,-Bstatic : unchecked ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared
+ : -Wl,-Bdynamic : unchecked ;
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows
+ : -Wl,-Bstatic : unchecked ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows
+ : -Wl,-Bdynamic : unchecked ;
+ toolset.flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows
+ : -Wl,-Bstatic : unchecked ;
+ }
+
+ case hpux :
+ {
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
+ : -Wl,-s : unchecked ;
+ toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
+ : -fPIC : unchecked ;
+ }
+
+ case osf :
+ {
+ # No --strip-all, just -s.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
+ : -Wl,-s : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
+ : unchecked ;
+ # This does not supports -R.
+ toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath
+ : unchecked ;
+ # -rpath-link is not supported at all.
+ }
+
+ case sun :
+ {
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
+ : -Wl,-s : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
+ : unchecked ;
+ # Solaris linker does not have a separate -rpath-link, but allows to use
+ # -L for the same purpose.
+ toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path>
+ : unchecked ;
+
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
+ : -mimpure-text : unchecked ;
+ }
+
+ case * :
+ {
+ errors.user-error
+ "$(toolset) initialization: invalid linker '$(linker)'" :
+ "The value '$(linker)' specified for <linker> is not recognized." :
+ "Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'" ;
+ }
+ }
+}
+
+# Enclose the RPATH variable on 'targets' in (double) quotes,
+# unless it's already enclosed in single quotes.
+# This special casing is done because it's common to pass
+# '$ORIGIN' to linker -- and it has to have single quotes
+# to prevent expansion by shell -- and if we add double
+# quotes then preventing properties of single quotes disappear.
+rule quote-rpath ( targets * )
+{
+ local r = [ on $(targets[1]) return $(RPATH) ] ;
+ if ! [ MATCH "('.*')" : $(r) ]
+ {
+ r = "\"$(r)\"" ;
+ }
+ RPATH on $(targets) = $(r) ;
+}
+
+# Declare actions for linking.
+rule link ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+
+}
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
+# always available.
+.AR = ar ;
+.RANLIB = ranlib ;
+
+toolset.flags gcc.archive AROPTIONS <archiveflags> ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here is the rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we don't care about replacement, but there's no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+# Differs from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+ local threading = [ feature.get-values threading : $(properties) ] ;
+ if $(threading) = multi
+ {
+ local target = [ feature.get-values target-os : $(properties) ] ;
+ local option ;
+ local libs ;
+
+ switch $(target)
+ {
+ case windows :
+ {
+ option = -mthreads ;
+ }
+ case cygwin :
+ {
+ option = -mthreads ;
+ }
+ case solaris :
+ {
+ option = -pthreads ;
+ libs = rt ;
+ }
+ case beos :
+ {
+ # BeOS has no threading options, so do not set anything here.
+ }
+ case *bsd :
+ {
+ option = -pthread ;
+ # There is no -lrt on BSD.
+ }
+ case sgi :
+ {
+ # gcc on IRIX does not support multi-threading so do not set anything
+ # here.
+ }
+ case darwin :
+ {
+ # Darwin has no threading options so do not set anything here.
+ }
+ case * :
+ {
+ option = -pthread ;
+ libs = rt ;
+ }
+ }
+
+ if $(option)
+ {
+ OPTIONS on $(targets) += $(option) ;
+ }
+ if $(libs)
+ {
+ FINDLIBS-SA on $(targets) += $(libs) ;
+ }
+ }
+}
+
+local rule cpu-flags ( toolset variable : architecture : instruction-set + : values + : default ? )
+{
+ if $(default)
+ {
+ toolset.flags $(toolset) $(variable)
+ <architecture>$(architecture)/<instruction-set>
+ : $(values) ;
+ }
+ toolset.flags $(toolset) $(variable)
+ <architecture>/<instruction-set>$(instruction-set)
+ <architecture>$(architecture)/<instruction-set>$(instruction-set)
+ : $(values) ;
+}
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+# The 'native' option appeared in gcc 4.2 so we cannot safely use it
+# as default. Use conservative i386 instead.
+cpu-flags gcc OPTIONS : x86 : native : -march=native ;
+cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 : default ;
+cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
+cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
+cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
+cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ;
+cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ;
+cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ;
+cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ;
+cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ;
+cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ;
+cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ;
+cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ;
+cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ;
+cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ;
+cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ;
+cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
+cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
+cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
+cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ;
+cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ;
+cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ;
+cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ;
+cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ;
+##
+cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ;
+cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ;
+cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ;
+cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ;
+cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
+cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
+cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
+cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ;
+# Sparc
+cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ;
+cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ;
+cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ;
+cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ;
+cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ;
+cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ;
+cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ;
+cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ;
+cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ;
+cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ;
+cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ;
+# RS/6000 & PowerPC
+cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ;
+cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ;
+cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ;
+cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ;
+cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ;
+cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ;
+cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ;
+cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ;
+cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ;
+cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ;
+cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ;
+cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ;
+cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ;
+cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ;
+cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ;
+cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ;
+cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ;
+cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ;
+cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ;
+cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ;
+cpu-flags gcc OPTIONS : power : power : -mcpu=power ;
+cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ;
+cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ;
+cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ;
+cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ;
+cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ;
+cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ;
+cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ;
+cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ;
+cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ;
+cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ;
+cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ;
+# AIX variant of RS/6000 & PowerPC
+toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X 64" ;
diff --git a/jam-files/boost-build/tools/generate.jam b/jam-files/boost-build/tools/generate.jam
new file mode 100644
index 000000000..6732fa355
--- /dev/null
+++ b/jam-files/boost-build/tools/generate.jam
@@ -0,0 +1,108 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Declares main target 'generate' used to produce targets by calling a
+# user-provided rule that takes and produces virtual targets.
+
+import "class" : new ;
+import errors ;
+import feature ;
+import project ;
+import property ;
+import property-set ;
+import targets ;
+import regex ;
+
+
+feature.feature generating-rule : : free ;
+
+
+class generated-target-class : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ if ! [ $(self.requirements).get <generating-rule> ]
+ {
+ errors.user-error "The generate rule requires the <generating-rule>"
+ "property to be set" ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ local result ;
+ local gr = [ $(property-set).get <generating-rule> ] ;
+
+ # FIXME: this is a copy-paste from virtual-target.jam. We should add a
+ # utility rule to call a rule like this.
+ local rule-name = [ MATCH ^@(.*) : $(gr) ] ;
+ if $(rule-name)
+ {
+ if $(gr[2])
+ {
+ local target-name = [ full-name ] ;
+ errors.user-error "Multiple <generating-rule> properties"
+ "encountered for target $(target-name)." ;
+ }
+
+ result = [ indirect.call $(rule-name) $(self.project) $(name)
+ : $(property-set) : $(sources) ] ;
+
+ if ! $(result)
+ {
+ ECHO "warning: Unable to construct" [ full-name ] ;
+ }
+ }
+
+ local ur ;
+ local targets ;
+
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ ur = $(result[1]) ;
+ targets = $(result[2-]) ;
+ }
+ else
+ {
+ ur = [ property-set.empty ] ;
+ targets = $(result) ;
+ }
+ }
+ # FIXME: the following loop should be doable using sequence.transform or
+ # some similar utility rule.
+ local rt ;
+ for local t in $(targets)
+ {
+ rt += [ virtual-target.register $(t) ] ;
+ }
+ return $(ur) $(rt) ;
+ }
+}
+
+
+rule generate ( name : sources * : requirements * : default-build *
+ : usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new generated-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
+}
+
+IMPORT $(__name__) : generate : : generate ;
diff --git a/jam-files/boost-build/tools/gettext.jam b/jam-files/boost-build/tools/gettext.jam
new file mode 100644
index 000000000..99a43ffe9
--- /dev/null
+++ b/jam-files/boost-build/tools/gettext.jam
@@ -0,0 +1,230 @@
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module support GNU gettext internationalization utilities.
+#
+# It provides two main target rules: 'gettext.catalog', used for
+# creating machine-readable catalogs from translations files, and
+# 'gettext.update', used for update translation files from modified
+# sources.
+#
+# To add i18n support to your application you should follow these
+# steps.
+#
+# - Decide on a file name which will contain translations and
+# what main target name will be used to update it. For example::
+#
+# gettext.update update-russian : russian.po a.cpp my_app ;
+#
+# - Create the initial translation file by running::
+#
+# bjam update-russian
+#
+# - Edit russian.po. For example, you might change fields like LastTranslator.
+#
+# - Create a main target for final message catalog::
+#
+# gettext.catalog russian : russian.po ;
+#
+# The machine-readable catalog will be updated whenever you update
+# "russian.po". The "russian.po" file will be updated only on explicit
+# request. When you're ready to update translations, you should
+#
+# - Run::
+#
+# bjam update-russian
+#
+# - Edit "russian.po" in appropriate editor.
+#
+# The next bjam run will convert "russian.po" into machine-readable form.
+#
+# By default, translations are marked by 'i18n' call. The 'gettext.keyword'
+# feature can be used to alter this.
+
+
+import targets ;
+import property-set ;
+import virtual-target ;
+import "class" : new ;
+import project ;
+import type ;
+import generators ;
+import errors ;
+import feature : feature ;
+import toolset : flags ;
+import regex ;
+
+.path = "" ;
+
+# Initializes the gettext module.
+rule init ( path ? # Path where all tools are located. If not specified,
+ # they should be in PATH.
+ )
+{
+ if $(.initialized) && $(.path) != $(path)
+ {
+ errors.error "Attempt to reconfigure with different path" ;
+ }
+ .initialized = true ;
+ if $(path)
+ {
+ .path = $(path)/ ;
+ }
+}
+
+# Creates a main target 'name', which, when updated, will cause
+# file 'existing-translation' to be updated with translations
+# extracted from 'sources'. It's possible to specify main target
+# in sources --- it which case all target from dependency graph
+# of those main targets will be scanned, provided they are of
+# appropricate type. The 'gettext.types' feature can be used to
+# control the types.
+#
+# The target will be updated only if explicitly requested on the
+# command line.
+rule update ( name : existing-translation sources + : requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : gettext.UPDATE :
+ $(existing-translation) $(sources)
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(name) ;
+}
+
+
+# The human editable source, containing translation.
+type.register gettext.PO : po ;
+# The machine readable message catalog.
+type.register gettext.catalog : mo ;
+# Intermediate type produce by extracting translations from
+# sources.
+type.register gettext.POT : pot ;
+# Pseudo type used to invoke update-translations generator
+type.register gettext.UPDATE ;
+
+# Identifies the keyword that should be used when scanning sources.
+# Default: i18n
+feature gettext.keyword : : free ;
+# Contains space-separated list of sources types which should be scanned.
+# Default: "C CPP"
+feature gettext.types : : free ;
+
+generators.register-standard gettext.compile : gettext.PO : gettext.catalog ;
+
+class update-translations-generator : generator
+{
+ import regex : split ;
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ # The rule should be called with at least two sources. The first source
+ # is the translation (.po) file to update. The remaining sources are targets
+ # which should be scanned for new messages. All sources files for those targets
+ # will be found and passed to the 'xgettext' utility, which extracts the
+ # messages for localization. Those messages will be merged to the .po file.
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local types = [ $(property-set).get <gettext.types> ] ;
+ types ?= "C CPP" ;
+ types = [ regex.split $(types) " " ] ;
+
+ local keywords = [ $(property-set).get <gettext.keyword> ] ;
+ property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ;
+
+ # First deterime the list of sources that must be scanned for
+ # messages.
+ local all-sources ;
+ # CONSIDER: I'm not sure if the logic should be the same as for 'stage':
+ # i.e. following dependency properties as well.
+ for local s in $(sources[2-])
+ {
+ all-sources += [ virtual-target.traverse $(s) : : include-sources ] ;
+ }
+ local right-sources ;
+ for local s in $(all-sources)
+ {
+ if [ $(s).type ] in $(types)
+ {
+ right-sources += $(s) ;
+ }
+ }
+
+ local .constructed ;
+ if $(right-sources)
+ {
+ # Create the POT file, which will contain list of messages extracted
+ # from the sources.
+ local extract =
+ [ new action $(right-sources) : gettext.extract : $(property-set) ] ;
+ local new-messages = [ new file-target $(name) : gettext.POT
+ : $(project) : $(extract) ] ;
+
+ # Create a notfile target which will update the existing translation file
+ # with new messages.
+ local a = [ new action $(sources[1]) $(new-messages)
+ : gettext.update-po-dispatch ] ;
+ local r = [ new notfile-target $(name) : $(project) : $(a) ] ;
+ .constructed = [ virtual-target.register $(r) ] ;
+ }
+ else
+ {
+ errors.error "No source could be scanned by gettext tools" ;
+ }
+ return $(.constructed) ;
+ }
+}
+generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ;
+
+flags gettext.extract KEYWORD <gettext.keyword> ;
+actions extract
+{
+ $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>)
+}
+
+# Does realy updating of po file. The tricky part is that
+# we're actually updating one of the sources:
+# $(<) is the NOTFILE target we're updating
+# $(>[1]) is the PO file to be really updated.
+# $(>[2]) is the PO file created from sources.
+#
+# When file to be updated does not exist (during the
+# first run), we need to copy the file created from sources.
+# In all other cases, we need to update the file.
+rule update-po-dispatch
+{
+ NOCARE $(>[1]) ;
+ gettext.create-po $(<) : $(>) ;
+ gettext.update-po $(<) : $(>) ;
+ _ on $(<) = " " ;
+ ok on $(<) = "" ;
+ EXISTING_PO on $(<) = $(>[1]) ;
+}
+
+# Due to fancy interaction of existing and updated, this rule can be called with
+# one source, in which case we copy the lonely source into EXISTING_PO, or with
+# two sources, in which case the action body expands to nothing. I'd really like
+# to have "missing" action modifier.
+actions quietly existing updated create-po bind EXISTING_PO
+{
+ cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
+}
+
+actions updated update-po bind EXISTING_PO
+{
+ $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])"
+}
+
+actions gettext.compile
+{
+ $(.path)msgfmt -o $(<) $(>)
+}
+
+IMPORT $(__name__) : update : : gettext.update ;
diff --git a/jam-files/boost-build/tools/gfortran.jam b/jam-files/boost-build/tools/gfortran.jam
new file mode 100644
index 000000000..0aa69b85c
--- /dev/null
+++ b/jam-files/boost-build/tools/gfortran.jam
@@ -0,0 +1,39 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags gfortran OPTIONS <fflags> ;
+
+flags gfortran OPTIONS <optimization>off : -O0 ;
+flags gfortran OPTIONS <optimization>speed : -O3 ;
+flags gfortran OPTIONS <optimization>space : -Os ;
+
+flags gfortran OPTIONS <debug-symbols>on : -g ;
+flags gfortran OPTIONS <profiling>on : -pg ;
+
+flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ;
+
+flags gfortran DEFINES <define> ;
+flags gfortran INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ;
diff --git a/jam-files/boost-build/tools/hp_cxx.jam b/jam-files/boost-build/tools/hp_cxx.jam
new file mode 100644
index 000000000..86cd783e2
--- /dev/null
+++ b/jam-files/boost-build/tools/hp_cxx.jam
@@ -0,0 +1,181 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2004, 2005 Markus Schoepflin.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# HP CXX compiler
+# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN
+#
+#
+# Notes on this toolset:
+#
+# - Because of very subtle issues with the default ansi mode, strict_ansi mode
+# is used for compilation. One example of things that don't work correctly in
+# the default ansi mode is overload resolution of function templates when
+# mixed with non-template functions.
+#
+# - For template instantiation "-timplicit_local" is used. Previously,
+# "-tlocal" has been tried to avoid the need for a template repository
+# but this doesn't work with manually instantiated templates. "-tweak"
+# has not been used to avoid the stream of warning messages issued by
+# ar or ld when creating a library or linking an application.
+#
+# - Debug symbols are generated with "-g3", as this works both in debug and
+# release mode. When compiling C++ code without optimization, we additionally
+# use "-gall", which generates full symbol table information for all classes,
+# structs, and unions. As this turns off optimization, it can't be used when
+# optimization is needed.
+#
+
+import feature generators common ;
+import toolset : flags ;
+
+feature.extend toolset : hp_cxx ;
+feature.extend c++abi : cxxarm ;
+
+# Inherit from Unix toolset to get library ordering magic.
+toolset.inherit hp_cxx : unix ;
+
+generators.override hp_cxx.prebuilt : builtin.lib-generator ;
+generators.override hp_cxx.prebuilt : builtin.prebuilt ;
+generators.override hp_cxx.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ;
+
+ local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ;
+
+ if $(command)
+ {
+ local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+
+ if $(root)
+ {
+ flags hp_cxx .root $(condition) : "\"$(root)\"/" ;
+ }
+ }
+ # If we can't find 'cxx' anyway, at least show 'cxx' in the commands
+ command ?= cxx ;
+
+ common.handle-options hp_cxx : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ;
+generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ;
+
+
+
+# No static linking as far as I can tell.
+# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
+flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ;
+flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ;
+flags hp_cxx.link OPTIONS <debug-symbols>on : -g ;
+flags hp_cxx.link OPTIONS <debug-symbols>off : -s ;
+
+flags hp_cxx.compile OPTIONS <optimization>off : -O0 ;
+flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ;
+flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ;
+
+# This (undocumented) macro needs to be defined to get all C function
+# overloads required by the C++ standard.
+flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ;
+
+# Added for threading support
+flags hp_cxx.compile OPTIONS <threading>multi : -pthread ;
+flags hp_cxx.link OPTIONS <threading>multi : -pthread ;
+
+flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ;
+flags hp_cxx.compile OPTIONS <optimization>space : -O1 ;
+flags hp_cxx.compile OPTIONS <inlining>off : -inline none ;
+
+# The compiler versions tried (up to V6.5-040) hang when compiling Boost code
+# with full inlining enabled. So leave it at the default level for now.
+#
+# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ;
+
+flags hp_cxx.compile OPTIONS <profiling>on : -pg ;
+flags hp_cxx.link OPTIONS <profiling>on : -pg ;
+
+# Selection of the object model. This flag is needed on both the C++ compiler
+# and linker command line.
+
+# Unspecified ABI translates to '-model ansi' as most
+# standard-conforming.
+flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ;
+flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ;
+flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ;
+flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ;
+
+# Display a descriptive tag together with each compiler message. This tag can
+# be used by the user to explicitely suppress the compiler message.
+flags hp_cxx.compile OPTIONS : -msg_display_tag ;
+
+flags hp_cxx.compile OPTIONS <cflags> ;
+flags hp_cxx.compile.c++ OPTIONS <cxxflags> ;
+flags hp_cxx.compile DEFINES <define> ;
+flags hp_cxx.compile INCLUDES <include> ;
+flags hp_cxx.link OPTIONS <linkflags> ;
+
+flags hp_cxx.link LIBPATH <library-path> ;
+flags hp_cxx.link LIBRARIES <library-file> ;
+flags hp_cxx.link FINDLIBS-ST <find-static-library> ;
+flags hp_cxx.link FINDLIBS-SA <find-shared-library> ;
+
+flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm
+}
+
+# When creating dynamic libraries, we don't want to be warned about unresolved
+# symbols, therefore all unresolved symbols are marked as expected by
+# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
+# chain.
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm
+}
+
+
+# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
+# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
+# is the default, no special flag is needed.
+actions compile.c
+{
+ $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Note: The compiler is forced to compile the files as C++ (-x cxx) because
+# otherwise it will silently ignore files with no file extension.
+#
+# Note: We deliberately don't suppress any warnings on the compiler command
+# line, the user can always do this in a customized toolset later on.
+
+rule compile.c++
+{
+ # We preprocess the TEMPLATE_DEPTH command line option here because we found
+ # no way to do it correctly in the actual action code. There we either get
+ # the -pending_instantiations parameter when no c++-template-depth property
+ # has been specified or we get additional quotes around
+ # "-pending_instantiations ".
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ;
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Always create archive from scratch. See the gcc toolet for rationale.
+RM = [ common.rm-command ] ;
+actions together piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc $(<) $(>)
+}
diff --git a/jam-files/boost-build/tools/hpfortran.jam b/jam-files/boost-build/tools/hpfortran.jam
new file mode 100644
index 000000000..96e8d18b5
--- /dev/null
+++ b/jam-files/boost-build/tools/hpfortran.jam
@@ -0,0 +1,35 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags hpfortran OPTIONS <optimization>off : -O0 ;
+flags hpfortran OPTIONS <optimization>speed : -O3 ;
+flags hpfortran OPTIONS <optimization>space : -O1 ;
+
+flags hpfortran OPTIONS <debug-symbols>on : -g ;
+flags hpfortran OPTIONS <profiling>on : -pg ;
+
+flags hpfortran DEFINES <define> ;
+flags hpfortran INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/ifort.jam b/jam-files/boost-build/tools/ifort.jam
new file mode 100644
index 000000000..eb7c19881
--- /dev/null
+++ b/jam-files/boost-build/tools/ifort.jam
@@ -0,0 +1,44 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags ifort OPTIONS <fflags> ;
+
+flags ifort OPTIONS <optimization>off : /Od ;
+flags ifort OPTIONS <optimization>speed : /O3 ;
+flags ifort OPTIONS <optimization>space : /O1 ;
+
+flags ifort OPTIONS <debug-symbols>on : /debug:full ;
+flags ifort OPTIONS <profiling>on : /Qprof_gen ;
+
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+flags ifort DEFINES <define> ;
+flags ifort INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/intel-darwin.jam b/jam-files/boost-build/tools/intel-darwin.jam
new file mode 100644
index 000000000..aa0fd8fb6
--- /dev/null
+++ b/jam-files/boost-build/tools/intel-darwin.jam
@@ -0,0 +1,220 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import intel ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset intel : platform : darwin ;
+
+toolset.inherit-generators intel-darwin
+ <toolset>intel <toolset-intel:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override intel-darwin.prebuilt : builtin.lib-generator ;
+generators.override intel-darwin.prebuilt : builtin.prebuilt ;
+generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules intel-darwin : gcc ;
+toolset.inherit-flags intel-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# vectorization diagnostics
+feature vectorize : off on full ;
+
+# Initializes the intel-darwin toolset
+# version in mandatory
+# name (default icc) is used to invoke the specified intel complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-darwin
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command intel-darwin : icc
+ : $(command) : /opt/intel_cc_80/bin ] ;
+
+ common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags intel-darwin darwin $(condition) ;
+
+ # handle <library-path>
+ # local library-path = [ feature.get-values <library-path> : $(options) ] ;
+ # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
+ }
+ flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+
+ local m = [ MATCH (..).* : $(version) ] ;
+ local n = [ MATCH (.)\\. : $(m) ] ;
+ if $(n) {
+ m = $(n) ;
+ }
+
+ local major = $(m) ;
+
+ if $(major) = "9" {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
+ }
+ else {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
+ }
+
+ local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
+
+ # wchar_t char_traits workaround for compilers older than 10.2
+ if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
+ flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
+ }
+}
+
+SPACE = " " ;
+
+flags intel-darwin.compile OPTIONS <cflags> ;
+flags intel-darwin.compile OPTIONS <cxxflags> ;
+# flags intel-darwin.compile INCLUDES <include> ;
+
+flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
+
+#
+cpu-type-em64t = prescott nocona ;
+flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ;
+flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ;
+
+flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
+
+flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags intel-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of intel-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "ranlib" -cs "$(<)"
+}
+
+flags intel-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/jam-files/boost-build/tools/intel-linux.jam b/jam-files/boost-build/tools/intel-linux.jam
new file mode 100644
index 000000000..d9164add8
--- /dev/null
+++ b/jam-files/boost-build/tools/intel-linux.jam
@@ -0,0 +1,250 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2011 Bryce Lelbach
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import intel ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset intel : platform : linux ;
+
+toolset.inherit-generators intel-linux
+ <toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+generators.override intel-linux.prebuilt : builtin.lib-generator ;
+generators.override intel-linux.prebuilt : builtin.prebuilt ;
+generators.override intel-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ;
+
+toolset.inherit-rules intel-linux : gcc ;
+toolset.inherit-flags intel-linux : gcc
+ : <inlining>off <inlining>on <inlining>full
+ <optimization>space <optimization>speed
+ <warnings>off <warnings>all <warnings>on
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Initializes the intel-linux toolset
+# version in mandatory
+# name (default icpc) is used to invoke the specified intel-linux complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-linux
+ : version $(version) ] ;
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice: intel-linux version is" $(version) ;
+ }
+
+ local default_path ;
+
+ # Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0,
+ # aka intel-linux-12.0. In this version, Intel thankfully decides to install
+ # to a sane 'intel' folder in /opt.
+ if [ MATCH "(12[.]0|12)" : $(version) ]
+ { default_path = /opt/intel/bin ; }
+ # Intel C++ Compiler 11.1.
+ else if [ MATCH "(11[.]1)" : $(version) ]
+ { default_path = /opt/intel_cce_11.1.064.x86_64/bin ; }
+ # Intel C++ Compiler 11.0.
+ else if [ MATCH "(11[.]0|11)" : $(version) ]
+ { default_path = /opt/intel_cce_11.0.074.x86_64/bin ; }
+ # Intel C++ Compiler 10.1.
+ else if [ MATCH "(10[.]1)" : $(version) ]
+ { default_path = /opt/intel_cce_10.1.013_x64/bin ; }
+ # Intel C++ Compiler 9.1.
+ else if [ MATCH "(9[.]1)" : $(version) ]
+ { default_path = /opt/intel_cc_91/bin ; }
+ # Intel C++ Compiler 9.0.
+ else if [ MATCH "(9[.]0|9)" : $(version) ]
+ { default_path = /opt/intel_cc_90/bin ; }
+ # Intel C++ Compiler 8.1.
+ else if [ MATCH "(8[.]1)" : $(version) ]
+ { default_path = /opt/intel_cc_81/bin ; }
+ # Intel C++ Compiler 8.0 - this used to be the default, so now it's the
+ # fallback.
+ else
+ { default_path = /opt/intel_cc_80/bin ; }
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice: default search path for intel-linux is" $(default_path) ;
+ }
+
+ command = [ common.get-invocation-command intel-linux : icpc
+ : $(command) : $(default_path) ] ;
+
+ common.handle-options intel-linux : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags intel-linux gnu $(condition) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ local command-string = $(command:J=" ") ;
+ local version-output = [ SHELL "$(command-string) --version" ] ;
+ local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ;
+ local major = [ MATCH "([0-9]+).*" : $(real-version) ] ;
+
+ # If we failed to determine major version, use the behaviour for
+ # the current compiler.
+ if $(major) && [ numbers.less $(major) 10 ]
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+ else if $(major) && [ numbers.less $(major) 11 ]
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+ else # newer version of intel do have -Os (at least 11+, don't know about 10)
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
+ }
+ flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+}
+
+SPACE = " " ;
+
+flags intel-linux.compile OPTIONS <warnings>off : -w0 ;
+flags intel-linux.compile OPTIONS <warnings>on : -w1 ;
+flags intel-linux.compile OPTIONS <warnings>all : -w2 ;
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+#
+# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler
+# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi
+# etc - which appear not to do anything except take up disk space :-(
+#
+actions compile.c++.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+
+
diff --git a/jam-files/boost-build/tools/intel-win.jam b/jam-files/boost-build/tools/intel-win.jam
new file mode 100644
index 000000000..691b5dce9
--- /dev/null
+++ b/jam-files/boost-build/tools/intel-win.jam
@@ -0,0 +1,184 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Importing common is needed because the rules we inherit here depend on it.
+# That is nasty.
+import common ;
+import errors ;
+import feature ;
+import intel ;
+import msvc ;
+import os ;
+import toolset ;
+import generators ;
+import type ;
+
+feature.extend-subfeature toolset intel : platform : win ;
+
+toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
+toolset.inherit-flags intel-win : msvc : : YLOPTION ;
+toolset.inherit-rules intel-win : msvc ;
+
+# Override default do-nothing generators.
+generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
+generators.override intel-win.compile.rc : rc.compile.resource ;
+generators.override intel-win.compile.mc : mc.compile ;
+
+toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
+
+toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
+
+# Initializes the intel toolset for windows
+rule init ( version ? : # the compiler version
+ command * : # the command to invoke the compiler itself
+ options * # Additional option: <compatibility>
+ # either 'vc6', 'vc7', 'vc7.1'
+ # or 'native'(default).
+ )
+{
+ local compatibility =
+ [ feature.get-values <compatibility> : $(options) ] ;
+ local condition = [ common.check-init-parameters intel-win
+ : version $(version) : compatibility $(compatibility) ] ;
+
+ command = [ common.get-invocation-command intel-win : icl.exe :
+ $(command) ] ;
+
+ common.handle-options intel-win : $(condition) : $(command) : $(options) ;
+
+ local root ;
+ if $(command)
+ {
+ root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root = $(root)/ ;
+ }
+
+ local setup ;
+ setup = [ GLOB $(root) : iclvars_*.bat ] ;
+ if ! $(setup)
+ {
+ setup = $(root)/iclvars.bat ;
+ }
+ setup = "call \""$(setup)"\" > nul " ;
+
+ if [ os.name ] = NT
+ {
+ setup = $(setup)"
+" ;
+ }
+ else
+ {
+ setup = "cmd /S /C "$(setup)" \"&&\" " ;
+ }
+
+ toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ;
+ toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ;
+ toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ;
+ toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ;
+ toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ;
+ toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ;
+
+ local m = [ MATCH (.).* : $(version) ] ;
+ local major = $(m[1]) ;
+
+ local C++FLAGS ;
+
+ C++FLAGS += /nologo ;
+
+ # Reduce the number of spurious error messages
+ C++FLAGS += /Qwn5 /Qwd985 ;
+
+ # Enable ADL
+ C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
+
+ # Disable Microsoft "secure" overloads in Dinkumware libraries since they
+ # cause compile errors with Intel versions 9 and 10.
+ C++FLAGS += -D_SECURE_SCL=0 ;
+
+ if $(major) > 5
+ {
+ C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping.
+ }
+
+ # Add options recognized only by intel7 and above.
+ if $(major) >= 7
+ {
+ C++FLAGS += /Qansi_alias ;
+ }
+
+ if $(compatibility) = vc6
+ {
+ C++FLAGS +=
+ # Emulate VC6
+ /Qvc6
+
+ # No wchar_t support in vc6 dinkum library. Furthermore, in vc6
+ # compatibility-mode, wchar_t is not a distinct type from unsigned
+ # short.
+ -DBOOST_NO_INTRINSIC_WCHAR_T
+ ;
+ }
+ else
+ {
+ if $(major) > 5
+ {
+ # Add support for wchar_t
+ C++FLAGS += /Zc:wchar_t
+ # Tell the dinkumware library about it.
+ -D_NATIVE_WCHAR_T_DEFINED
+ ;
+ }
+ }
+
+ if $(compatibility) && $(compatibility) != native
+ {
+ C++FLAGS += /Q$(base-vc) ;
+ }
+ else
+ {
+ C++FLAGS +=
+ -Qoption,cpp,--arg_dep_lookup
+ # The following options were intended to disable the Intel compiler's
+ # 'bug-emulation' mode, but were later reported to be causing ICE with
+ # Intel-Win 9.0. It is not yet clear which options can be safely used.
+ # -Qoption,cpp,--const_string_literals
+ # -Qoption,cpp,--new_for_init
+ # -Qoption,cpp,--no_implicit_typename
+ # -Qoption,cpp,--no_friend_injection
+ # -Qoption,cpp,--no_microsoft_bugs
+ ;
+ }
+
+ toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
+ # By default, when creating PCH, intel adds 'i' to the explicitly
+ # specified name of the PCH file. Of course, Boost.Build is not
+ # happy when compiler produces not the file it was asked for.
+ # The option below stops this behaviour.
+ toolset.flags intel-win CFLAGS : -Qpchi- ;
+
+ if ! $(compatibility)
+ {
+ # If there's no backend version, assume 7.1.
+ compatibility = vc7.1 ;
+ }
+
+ local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ;
+ if ! $(extract-version)
+ {
+ errors.user-error "Invalid value for compatibility option:"
+ $(compatibility) ;
+ }
+
+ # Depending on the settings, running of tests require some runtime DLLs.
+ toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
+
+ msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ;
+}
+
+toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
+
+toolset.flags intel-win YLOPTION ;
+
diff --git a/jam-files/boost-build/tools/intel.jam b/jam-files/boost-build/tools/intel.jam
new file mode 100644
index 000000000..67038aa28
--- /dev/null
+++ b/jam-files/boost-build/tools/intel.jam
@@ -0,0 +1,34 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'intel' toolset. Depending on the current
+# system, it forwards either to 'intel-linux' or 'intel-win'
+# modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : intel ;
+feature.subfeature toolset intel : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = LINUX
+ {
+ toolset.using intel-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ toolset.using intel-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using intel-win :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/lex.jam b/jam-files/boost-build/tools/lex.jam
new file mode 100644
index 000000000..75d641318
--- /dev/null
+++ b/jam-files/boost-build/tools/lex.jam
@@ -0,0 +1,33 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+import generators ;
+import feature ;
+import property ;
+
+
+feature.feature flex.prefix : : free ;
+type.register LEX : l ;
+type.register LEX++ : ll ;
+generators.register-standard lex.lex : LEX : C ;
+generators.register-standard lex.lex : LEX++ : CPP ;
+
+rule init ( )
+{
+}
+
+rule lex ( target : source : properties * )
+{
+ local r = [ property.select flex.prefix : $(properties) ] ;
+ if $(r)
+ {
+ PREFIX on $(<) = $(r:G=) ;
+ }
+}
+
+actions lex
+{
+ flex -P$(PREFIX) -o$(<) $(>)
+}
diff --git a/jam-files/boost-build/tools/make.jam b/jam-files/boost-build/tools/make.jam
new file mode 100644
index 000000000..085672857
--- /dev/null
+++ b/jam-files/boost-build/tools/make.jam
@@ -0,0 +1,72 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'make' main target rule.
+
+import "class" : new ;
+import errors : error ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import targets ;
+
+
+class make-target-class : basic-target
+{
+ import type regex virtual-target ;
+ import "class" : new ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local action-name = [ $(property-set).get <action> ] ;
+ # 'm' will always be set -- we add '@' ourselves in the 'make' rule
+ # below.
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+
+ local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
+ local t = [ new file-target $(self.name) exact : [ type.type
+ $(self.name) ] : $(self.project) : $(a) ] ;
+ return [ property-set.empty ] [ virtual-target.register $(t) ] ;
+ }
+}
+
+
+# Declares the 'make' main target.
+#
+rule make ( target-name : sources * : generating-rule + : requirements * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ # The '@' sign causes the feature.jam module to qualify rule name with the
+ # module name of current project, if needed.
+ local m = [ MATCH ^(@).* : $(generating-rule) ] ;
+ if ! $(m)
+ {
+ generating-rule = @$(generating-rule) ;
+ }
+ requirements += <action>$(generating-rule) ;
+
+ targets.main-target-alternative
+ [ new make-target-class $(target-name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ;
+}
+
+
+IMPORT $(__name__) : make : : make ;
diff --git a/jam-files/boost-build/tools/mc.jam b/jam-files/boost-build/tools/mc.jam
new file mode 100644
index 000000000..578377735
--- /dev/null
+++ b/jam-files/boost-build/tools/mc.jam
@@ -0,0 +1,44 @@
+#~ Copyright 2005 Alexey Pakhunov.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Microsoft message compiler tool.
+# Notes:
+# - there's just message compiler tool, there's no tool for
+# extracting message strings from sources
+# - This file allows to use Microsoft message compiler
+# with any toolset. In msvc.jam, there's more specific
+# message compiling action.
+
+import common ;
+import generators ;
+import feature : feature get-values ;
+import toolset : flags ;
+import type ;
+import rc ;
+
+rule init ( )
+{
+}
+
+type.register MC : mc ;
+
+
+# Command line options
+feature mc-input-encoding : ansi unicode : free ;
+feature mc-output-encoding : unicode ansi : free ;
+feature mc-set-customer-bit : no yes : free ;
+
+flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ;
+flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ;
+flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ;
+flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ;
+flags mc.compile MCFLAGS <mc-set-customer-bit>no : ;
+flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ;
+
+generators.register-standard mc.compile : MC : H RC ;
+
+actions compile
+{
+ mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
diff --git a/jam-files/boost-build/tools/message.jam b/jam-files/boost-build/tools/message.jam
new file mode 100644
index 000000000..212d8542c
--- /dev/null
+++ b/jam-files/boost-build/tools/message.jam
@@ -0,0 +1,55 @@
+# Copyright 2008 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target type 'message', that prints a message when built for the
+# first time.
+
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+
+class message-target-class : basic-target
+{
+ rule __init__ ( name-and-dir : project : * )
+ {
+ basic-target.__init__ $(name-and-dir) : $(project) ;
+ self.3 = $(3) ;
+ self.4 = $(4) ;
+ self.5 = $(5) ;
+ self.6 = $(6) ;
+ self.7 = $(7) ;
+ self.8 = $(8) ;
+ self.9 = $(9) ;
+ self.built = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ if ! $(self.built)
+ {
+ for i in 3 4 5 6 7 8 9
+ {
+ if $(self.$(i))
+ {
+ ECHO $(self.$(i)) ;
+ }
+ }
+ self.built = 1 ;
+ }
+
+ return [ property-set.empty ] ;
+ }
+}
+
+
+rule message ( name : * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new message-target-class $(name) : $(project)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ;
+}
+IMPORT $(__name__) : message : : message ; \ No newline at end of file
diff --git a/jam-files/boost-build/tools/midl.jam b/jam-files/boost-build/tools/midl.jam
new file mode 100644
index 000000000..0aa5dda31
--- /dev/null
+++ b/jam-files/boost-build/tools/midl.jam
@@ -0,0 +1,142 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Microsoft Interface Definition Language (MIDL) related routines
+
+import common ;
+import generators ;
+import feature : feature get-values ;
+import os ;
+import scanner ;
+import toolset : flags ;
+import type ;
+
+rule init ( )
+{
+}
+
+type.register IDL : idl ;
+
+# A type library (.tlb) is generated by MIDL compiler and can be included
+# to resources of an application (.rc). In order to be found by a resource
+# compiler its target type should be derived from 'H' - otherwise
+# the property '<implicit-dependency>' will be ignored.
+type.register MSTYPELIB : tlb : H ;
+
+
+# Register scanner for MIDL files
+class midl-scanner : scanner
+{
+ import path property-set regex scanner type virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ self.includes = $(includes) ;
+
+ # List of quoted strings
+ self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
+
+ # 'import' and 'importlib' directives
+ self.re-import = "import"$(self.re-strings)"[ \t]*;" ;
+ self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ;
+
+ # C preprocessor 'include' directive
+ self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ;
+ self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
+ }
+
+ rule pattern ( )
+ {
+ # Match '#include', 'import' and 'importlib' directives
+ return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ;
+ local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ;
+ local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ;
+ local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ local g2 = $(g)"#"$(b) ;
+
+ included-angle = $(included-angle:G=$(g)) ;
+ included-quoted = $(included-quoted:G=$(g2)) ;
+ imported = $(imported:G=$(g2)) ;
+ imported_tlbs = $(imported_tlbs:G=$(g2)) ;
+
+ local all = $(included-angle) $(included-quoted) $(imported) ;
+
+ INCLUDES $(target) : $(all) ;
+ DEPENDS $(target) : $(imported_tlbs) ;
+ NOCARE $(all) $(imported_tlbs) ;
+ SEARCH on $(included-angle) = $(self.includes:G=) ;
+ SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ;
+ SEARCH on $(imported) = $(b) $(self.includes:G=) ;
+ SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ;
+
+ scanner.propagate
+ [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] :
+ $(included-angle) $(included-quoted) : $(target) ;
+
+ scanner.propagate $(__name__) : $(imported) : $(target) ;
+ }
+}
+
+scanner.register midl-scanner : include ;
+type.set-scanner IDL : midl-scanner ;
+
+
+# Command line options
+feature midl-stubless-proxy : yes no : propagated ;
+feature midl-robust : yes no : propagated ;
+
+flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ;
+flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ;
+flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ;
+flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ;
+
+# Architecture-specific options
+architecture-x86 = <architecture> <architecture>x86 ;
+address-model-32 = <address-model> <address-model>32 ;
+address-model-64 = <address-model> <address-model>64 ;
+
+flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ;
+flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ;
+flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ;
+
+
+flags midl.compile.idl DEFINES <define> ;
+flags midl.compile.idl UNDEFS <undef> ;
+flags midl.compile.idl INCLUDES <include> ;
+
+
+generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ;
+
+
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
+# that both files will be created so bjam will not try to recreate them
+# constantly.
+TOUCH_FILE = [ common.file-touch-command ] ;
+
+actions compile.idl
+{
+ midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")"
+ $(TOUCH_FILE) "$(<[4]:W)"
+ $(TOUCH_FILE) "$(<[5]:W)"
+}
diff --git a/jam-files/boost-build/tools/mipspro.jam b/jam-files/boost-build/tools/mipspro.jam
new file mode 100644
index 000000000..417eaefcf
--- /dev/null
+++ b/jam-files/boost-build/tools/mipspro.jam
@@ -0,0 +1,145 @@
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+
+feature.extend toolset : mipspro ;
+toolset.inherit mipspro : unix ;
+generators.override mipspro.prebuilt : builtin.lib-generator ;
+generators.override mipspro.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.sgi.com/products/software/irix/tools/
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters mipspro : version $(version) ] ;
+
+ command = [ common.get-invocation-command mipspro : CC : $(command) ] ;
+
+ common.handle-options mipspro : $(condition) : $(command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=cc) ;
+
+ toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local command = [
+ common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ;
+
+ command_f = $(command_f[1--2]) $(command[-1]:B=f77) ;
+ toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ;
+
+ # set link flags
+ flags mipspro.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+
+ flags mipspro.link FINDLIBS-SA : [
+ feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+}
+
+# Declare generators
+generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ;
+generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ;
+generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ;
+
+cpu-arch-32 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32 ;
+
+cpu-arch-64 =
+ <architecture>/<address-model>64 ;
+
+flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ;
+flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ;
+
+# Declare flags and actions for compilation
+flags mipspro.compile OPTIONS <debug-symbols>on : -g ;
+# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags mipspro.compile OPTIONS <warnings>off : -w ;
+flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning
+flags mipspro.compile OPTIONS <warnings>all : -fullwarn ;
+flags mipspro.compile OPTIONS <optimization>speed : -Ofast ;
+flags mipspro.compile OPTIONS <optimization>space : -O2 ;
+flags mipspro.compile OPTIONS <cflags> : -LANG:std ;
+flags mipspro.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
+flags mipspro.compile.c++ OPTIONS <cxxflags> ;
+flags mipspro.compile DEFINES <define> ;
+flags mipspro.compile INCLUDES <include> ;
+
+
+flags mipspro.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags mipspro.link OPTIONS <debug-symbols>on : -g ;
+# Strip the binary when no debugging is needed
+# flags mipspro.link OPTIONS <debug-symbols>off : -s ;
+# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ;
+# flags mipspro.link OPTIONS <threading>multi : -mt ;
+
+flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ;
+flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ;
+
+flags mipspro.link OPTIONS <optimization>speed : -Ofast ;
+flags mipspro.link OPTIONS <optimization>space : -O2 ;
+flags mipspro.link OPTIONS <linkflags> ;
+flags mipspro.link LINKPATH <library-path> ;
+flags mipspro.link FINDLIBS-ST <find-static-library> ;
+flags mipspro.link FINDLIBS-SA <find-shared-library> ;
+flags mipspro.link FINDLIBS-SA <threading>multi : pthread ;
+flags mipspro.link LIBRARIES <library-file> ;
+flags mipspro.link LINK-RUNTIME <runtime-link>static : static ;
+flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags mipspro.link RPATH <dll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ ar -cr "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/mpi.jam b/jam-files/boost-build/tools/mpi.jam
new file mode 100644
index 000000000..0fe490bec
--- /dev/null
+++ b/jam-files/boost-build/tools/mpi.jam
@@ -0,0 +1,583 @@
+# Support for the Message Passing Interface (MPI)
+#
+# (C) Copyright 2005, 2006 Trustees of Indiana University
+# (C) Copyright 2005 Douglas Gregor
+#
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
+#
+# Authors: Douglas Gregor
+# Andrew Lumsdaine
+#
+# ==== MPI Configuration ====
+#
+# For many users, MPI support can be enabled simply by adding the following
+# line to your user-config.jam file:
+#
+# using mpi ;
+#
+# This should auto-detect MPI settings based on the MPI wrapper compiler in
+# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
+# has a different name, you can pass the name of the wrapper compiler as the
+# first argument to the mpi module:
+#
+# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
+#
+# If your MPI implementation does not have a wrapper compiler, or the MPI
+# auto-detection code does not work with your MPI's wrapper compiler,
+# you can pass MPI-related options explicitly via the second parameter to the
+# mpi module:
+#
+# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
+# <find-shared-library>mpi <find-shared-library>lam
+# <find-shared-library>dl ;
+#
+# To see the results of MPI auto-detection, pass "--debug-configuration" on
+# the bjam command line.
+#
+# The (optional) fourth argument configures Boost.MPI for running
+# regression tests. These parameters specify the executable used to
+# launch jobs (default: "mpirun") followed by any necessary arguments
+# to this to run tests and tell the program to expect the number of
+# processors to follow (default: "-np"). With the default parameters,
+# for instance, the test harness will execute, e.g.,
+#
+# mpirun -np 4 all_gather_test
+#
+# ==== Linking Against the MPI Libraries ===
+#
+# To link against the MPI libraries, import the "mpi" module and add the
+# following requirement to your target:
+#
+# <library>/mpi//mpi
+#
+# Since MPI support is not always available, you should check
+# "mpi.configured" before trying to link against the MPI libraries.
+
+import "class" : new ;
+import common ;
+import feature : feature ;
+import generators ;
+import os ;
+import project ;
+import property ;
+import testing ;
+import toolset ;
+import type ;
+import path ;
+
+# Make this module a project
+project.initialize $(__name__) ;
+project mpi ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Assuming the first part of the command line is the given prefix
+# followed by some non-empty value, remove the first argument. Returns
+# either nothing (if there was no prefix or no value) or a pair
+#
+# <name>value rest-of-cmdline
+#
+# This is a subroutine of cmdline_to_features
+rule add_feature ( prefix name cmdline )
+{
+ local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+
+ # If there was no value associated with the prefix, abort
+ if ! $(match) {
+ return ;
+ }
+
+ local value = $(match[1]) ;
+
+ if [ MATCH " +" : $(value) ] {
+ value = "\"$(value)\"" ;
+ }
+
+ return "<$(name)>$(value)" $(match[2]) ;
+}
+
+# Strip any end-of-line characters off the given string and return the
+# result.
+rule strip-eol ( string )
+{
+ local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
+
+ if $(match)
+ {
+ return $(match[1]) ;
+ }
+ else
+ {
+ return $(string) ;
+ }
+}
+
+# Split a command-line into a set of features. Certain kinds of
+# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
+# with their Boost.Build equivalents (e.g., <include>, <define>,
+# <library-path>, <find-library>). All other arguments are introduced
+# using the features in the unknown-features parameter, because we
+# don't know how to deal with them. For instance, if your compile and
+# correct. The incoming command line should be a string starting with
+# an executable (e.g., g++ -I/include/path") and may contain any
+# number of command-line arguments thereafter. The result is a list of
+# features corresponding to the given command line, ignoring the
+# executable.
+rule cmdline_to_features ( cmdline : unknown-features ? )
+{
+ local executable ;
+ local features ;
+ local otherflags ;
+ local result ;
+
+ unknown-features ?= <cxxflags> <linkflags> ;
+
+ # Pull the executable out of the command line. At this point, the
+ # executable is just thrown away.
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ executable = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # List the prefix/feature pairs that we will be able to transform.
+ # Every kind of parameter not mentioned here will be placed in both
+ # cxxflags and linkflags, because we don't know where they should go.
+ local feature_kinds-D = "define" ;
+ local feature_kinds-I = "include" ;
+ local feature_kinds-L = "library-path" ;
+ local feature_kinds-l = "find-shared-library" ;
+
+ while $(cmdline) {
+
+ # Check for one of the feature prefixes we know about. If we
+ # find one (and the associated value is nonempty), convert it
+ # into a feature.
+ local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
+ local matched ;
+ if $(match) && $(match[2]) {
+ local prefix = $(match[1]) ;
+ if $(feature_kinds$(prefix)) {
+ local name = $(feature_kinds$(prefix)) ;
+ local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
+
+ if $(add) {
+
+ if $(add[1]) = <find-shared-library>pthread
+ {
+ # Uhm. It's not really nice that this MPI implementation
+ # uses -lpthread as opposed to -pthread. We do want to
+ # set <threading>multi, instead of -lpthread.
+ result += "<threading>multi" ;
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else
+ {
+ result += $(add[1]) ;
+ }
+
+ cmdline = $(add[2]) ;
+ matched = yes ;
+ }
+ }
+ }
+
+ # If we haven't matched a feature prefix, just grab the command-line
+ # argument itself. If we can map this argument to a feature
+ # (e.g., -pthread -> <threading>multi), then do so; otherwise,
+ # and add it to the list of "other" flags that we don't
+ # understand.
+ if ! $(matched) {
+ match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ local value = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # Check for multithreading support
+ if $(value) = "-pthread" || $(value) = "-pthreads"
+ {
+ result += "<threading>multi" ;
+
+ # DPG: This is a hack intended to work around a BBv2 bug where
+ # requirements propagated from libraries are not checked for
+ # conflicts when BBv2 determines which "common" properties to
+ # apply to a target. In our case, the <threading>single property
+ # gets propagated from the common properties to Boost.MPI
+ # targets, even though <threading>multi is in the usage
+ # requirements of <library>/mpi//mpi.
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
+ otherflags += $(value) ;
+ }
+ }
+ }
+
+ # If there are other flags that we don't understand, add them to the
+ # result as both <cxxflags> and <linkflags>
+ if $(otherflags) {
+ for unknown in $(unknown-features)
+ {
+ result += "$(unknown)$(otherflags:J= )" ;
+ }
+ }
+
+ return $(result) ;
+}
+
+# Determine if it is safe to execute the given shell command by trying
+# to execute it and determining whether the exit code is zero or
+# not. Returns true for an exit code of zero, false otherwise.
+local rule safe-shell-command ( cmdline )
+{
+ local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
+ return [ MATCH ".*(SSCOK).*" : $(result) ] ;
+}
+
+# Initialize the MPI module.
+rule init ( mpicxx ? : options * : mpirun-with-options * )
+{
+ if ! $(options) && $(.debug-configuration)
+ {
+ ECHO "===============MPI Auto-configuration===============" ;
+ }
+
+ if ! $(mpicxx) && [ os.on-windows ]
+ {
+ # Try to auto-configure to the Microsoft Compute Cluster Pack
+ local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
+ local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
+ if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
+ }
+
+ # Pick up either the 32-bit or 64-bit library, depending on which address
+ # model the user has selected. Default to 32-bit.
+ options = <include>$(cluster_pack_path)/Include
+ <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
+ <library-path>$(cluster_pack_path)/Lib/i386
+ <find-static-library>msmpi
+ <toolset>msvc:<define>_SECURE_SCL=0
+ ;
+
+ # Setup the "mpirun" equivalent (mpiexec)
+ .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
+ .mpirun_flags = -n ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
+ }
+ }
+
+ if ! $(options)
+ {
+ # Try to auto-detect options based on the wrapper compiler
+ local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpiCC", which is used by MPICH
+ command = [ common.get-invocation-command mpi : mpiCC ] ;
+ }
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpicxx", which is used by OpenMPI and MPICH2
+ command = [ common.get-invocation-command mpi : mpicxx ] ;
+ }
+
+ local result ;
+ local compile_flags ;
+ local link_flags ;
+
+ if ! $(command)
+ {
+ # Do nothing: we'll complain later
+ }
+ # OpenMPI and newer versions of LAM-MPI have -showme:compile and
+ # -showme:link.
+ else if [ safe-shell-command "$(command) -showme:compile" ] &&
+ [ safe-shell-command "$(command) -showme:link" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ SHELL "$(command) -showme:compile" ] ;
+ link_flags = [ SHELL "$(command) -showme:link" ] ;
+
+ # Prepend COMPILER as the executable name, to match the format of
+ # other compilation commands.
+ compile_flags = "COMPILER $(compile_flags)" ;
+ link_flags = "COMPILER $(link_flags)" ;
+ }
+ # Look for LAM-MPI's -showme
+ else if [ safe-shell-command "$(command) -showme" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
+ }
+
+ result = [ SHELL "$(command) -showme" ] ;
+ }
+ # Look for MPICH
+ else if [ safe-shell-command "$(command) -show" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found MPICH wrapper compiler: $(command)" ;
+ }
+ compile_flags = [ SHELL "$(command) -compile_info" ] ;
+ link_flags = [ SHELL "$(command) -link_info" ] ;
+ }
+ # Sun HPC and Ibm POE
+ else if [ SHELL "$(command) -v 2>/dev/null" ]
+ {
+ compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
+
+ local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
+ if $(back)
+ {
+ # Sun HPC
+ if $(.debug-configuration)
+ {
+ ECHO "Found Sun MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
+ compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
+ link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
+ link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
+
+ # strip out -v from compile options
+ local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
+ back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ }
+ else
+ {
+ # Ibm POE
+ if $(.debug-configuration)
+ {
+ ECHO "Found IBM MPI wrapper compiler: $(command)" ;
+ }
+
+ #
+ compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
+ compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
+ local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
+ back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ link_flags = $(compile_flags) ;
+
+ # get location of mpif.h from mpxlf
+ local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
+ f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
+ front = [ MATCH "(.*)-v" : $(f_flags) ] ;
+ back = [ MATCH "-v(.*)" : $(f_flags) ] ;
+ f_flags = "$(front) $(back)" ;
+ f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
+ f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ compile_flags = "$(compile_flags) $(f_flags)" ;
+ }
+ }
+
+ if $(result) || $(compile_flags) && $(link_flags)
+ {
+ if $(result)
+ {
+ result = [ strip-eol $(result) ] ;
+ options = [ cmdline_to_features $(result) ] ;
+ }
+ else
+ {
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ link_flags = [ strip-eol $(link_flags) ] ;
+
+ # Separately process compilation and link features, then combine
+ # them at the end.
+ local compile_features = [ cmdline_to_features $(compile_flags)
+ : "<cxxflags>" ] ;
+ local link_features = [ cmdline_to_features $(link_flags)
+ : "<linkflags>" ] ;
+ options = $(compile_features) $(link_features) ;
+ }
+
+ # If requested, display MPI configuration information.
+ if $(.debug-configuration)
+ {
+ if $(result)
+ {
+ ECHO " Wrapper compiler command line: $(result)" ;
+ }
+ else
+ {
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(compile_flags) ] ;
+ ECHO "MPI compilation flags: $(match[2])" ;
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(link_flags) ] ;
+ ECHO "MPI link flags: $(match[2])" ;
+ }
+ }
+ }
+ else
+ {
+ if $(command)
+ {
+ ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
+ ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
+ }
+ else if $(mpicxx)
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
+ }
+ else
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
+ }
+ ECHO "You will need to manually configure MPI support." ;
+ }
+
+ }
+
+ # Find mpirun (or its equivalent) and its flags
+ if ! $(.mpirun)
+ {
+ .mpirun =
+ [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
+ .mpirun_flags = $(mpirun-with-options[2-]) ;
+ .mpirun_flags ?= -np ;
+ }
+
+ if $(.debug-configuration)
+ {
+ if $(options)
+ {
+ echo "MPI build features: " ;
+ ECHO $(options) ;
+ }
+
+ if $(.mpirun)
+ {
+ echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
+ }
+
+ ECHO "====================================================" ;
+ }
+
+ if $(options)
+ {
+ .configured = true ;
+
+ # Set up the "mpi" alias
+ alias mpi : : : : $(options) ;
+ }
+}
+
+# States whether MPI has bee configured
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+# Returs the "extra" requirements needed to build MPI. These requirements are
+# part of the /mpi//mpi library target, but they need to be added to anything
+# that uses MPI directly to work around bugs in BBv2's propagation of
+# requirements.
+rule extra-requirements ( )
+{
+ return $(MPI_EXTRA_REQUIREMENTS) ;
+}
+
+# Support for testing; borrowed from Python
+type.register RUN_MPI_OUTPUT ;
+type.register RUN_MPI : : TEST ;
+
+class mpi-test-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ # Generate an executable from the sources. This is the executable we will run.
+ local executable =
+ [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
+
+ result =
+ [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
+ }
+}
+
+# Use mpi-test-generator to generate MPI tests from sources
+generators.register
+ [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_MPI_OUTPUT : RUN_MPI ;
+
+# The number of processes to spawn when executing an MPI test.
+feature mpi:processes : : free incidental ;
+
+# The flag settings on testing.capture-output do not
+# apply to mpi.capture output at the moment.
+# Redo this explicitly.
+toolset.flags mpi.capture-output ARGS <testing.arg> ;
+rule capture-output ( target : sources * : properties * )
+{
+ # Use the standard capture-output rule to run the tests
+ testing.capture-output $(target) : $(sources[1]) : $(properties) ;
+
+ # Determine the number of processes we should run on.
+ local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
+ num_processes = $(num_processes:G=) ;
+
+ # serialize the MPI tests to avoid overloading systems
+ JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
+
+ # We launch MPI processes using the "mpirun" equivalent specified by the user.
+ LAUNCHER on $(target) =
+ [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
+}
+
+# Creates a set of test cases to be run through the MPI launcher. The name, sources,
+# and requirements are the same as for any other test generator. However, schedule is
+# a list of numbers, which indicates how many processes each test run will use. For
+# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
+# 7 processes. The name provided is just the base name: the actual tests will be
+# the name followed by a hypen, then the number of processes.
+rule mpi-test ( name : sources * : requirements * : schedule * )
+{
+ sources ?= $(name).cpp ;
+ schedule ?= 1 2 3 4 7 8 13 17 ;
+
+ local result ;
+ for processes in $(schedule)
+ {
+ result += [ testing.make-test
+ run-mpi : $(sources) /boost/mpi//boost_mpi
+ : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
+ }
+ return $(result) ;
+}
diff --git a/jam-files/boost-build/tools/msvc-config.jam b/jam-files/boost-build/tools/msvc-config.jam
new file mode 100644
index 000000000..6c71e3b00
--- /dev/null
+++ b/jam-files/boost-build/tools/msvc-config.jam
@@ -0,0 +1,12 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for VisualStudio toolset. To use, just import this module.
+
+import toolset : using ;
+
+ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ;
+
+using msvc : all ;
+
diff --git a/jam-files/boost-build/tools/msvc.jam b/jam-files/boost-build/tools/msvc.jam
new file mode 100644
index 000000000..e33a66d22
--- /dev/null
+++ b/jam-files/boost-build/tools/msvc.jam
@@ -0,0 +1,1392 @@
+# Copyright (c) 2003 David Abrahams.
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2006 Bojan Resnik.
+# Copyright (c) 2006 Ilya Sokolov.
+# Copyright (c) 2007 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import mc ;
+import midl ;
+import os ;
+import path ;
+import pch ;
+import property ;
+import rc ;
+import toolset ;
+import type ;
+
+
+type.register MANIFEST : manifest ;
+feature.feature embed-manifest : on off : incidental propagated ;
+
+type.register PDB : pdb ;
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be ommited:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+#
+rule init (
+ # The msvc version being configured. When omitted the tools invoked when no
+ # explicit version is given will be configured.
+ version ?
+
+ # The command used to invoke the compiler. If not specified:
+ # - if version is given, default location for that version will be
+ # searched
+ #
+ # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0
+ # and 6.* will be searched
+ #
+ # - if compiler is not found in the default locations, PATH will be
+ # searched.
+ : command *
+
+ # Options may include:
+ #
+ # All options shared by multiple toolset types as handled by the
+ # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>,
+ # <fflags> & <linkflags>.
+ #
+ # <assembler>
+ # <compiler>
+ # <idl-compiler>
+ # <linker>
+ # <mc-compiler>
+ # <resource-compiler>
+ # Exact tool names to be used by this msvc toolset configuration.
+ #
+ # <compiler-filter>
+ # Command through which to pipe the output of running the compiler.
+ # For example to pass the output to STLfilt.
+ #
+ # <setup>
+ # Global setup command to invoke before running any of the msvc tools.
+ # It will be passed additional option parameters depending on the actual
+ # target platform.
+ #
+ # <setup-amd64>
+ # <setup-i386>
+ # <setup-ia64>
+ # Platform specific setup command to invoke before running any of the
+ # msvc tools used when builing a target for a specific platform, e.g.
+ # when building a 32 or 64 bit executable.
+ : options *
+)
+{
+ if $(command)
+ {
+ options += <command>$(command) ;
+ }
+ configure $(version) : $(options) ;
+}
+
+
+# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
+# a part of the 'options' list. See the 'init' rule comment for more detailed
+# information.
+#
+rule configure ( version ? : options * )
+{
+ switch $(version)
+ {
+ case "all" :
+ if $(options)
+ {
+ errors.error "MSVC toolset configuration: options should be"
+ "empty when '$(version)' is specified." ;
+ }
+
+ # Configure (i.e. mark as used) all registered versions.
+ local all-versions = [ $(.versions).all ] ;
+ if ! $(all-versions)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [msvc-cfg] Asked to configure all registered"
+ "msvc toolset versions when there are none currently"
+ "registered." ;
+ }
+ }
+ else
+ {
+ for local v in $(all-versions)
+ {
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure-really $(v) ;
+ }
+ }
+
+ case "default" :
+ configure-really : $(options) ;
+
+ case * :
+ configure-really $(version) : $(options) ;
+ }
+}
+
+
+# Sets up flag definitions dependent on the compiler version used.
+# - 'version' is the version of compiler in N.M format.
+# - 'conditions' is the property set to be used as flag conditions.
+# - 'toolset' is the toolset for which flag settings are to be defined.
+# This makes the rule reusable for other msvc-option-compatible compilers.
+#
+rule configure-version-specific ( toolset : version : conditions )
+{
+ toolset.push-checking-for-flags-module unchecked ;
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if ! [ MATCH ^(6\\.) : $(version) ]
+ {
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ;
+ toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ;
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ;
+
+ if [ MATCH ^([78]\\.) : $(version) ]
+ {
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ;
+ }
+ }
+
+ #
+ # Processor-specific optimization.
+ #
+
+ if [ MATCH ^([67]) : $(version) ]
+ {
+ # 8.0 deprecates some of the options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ;
+
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i386 : /G3 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ;
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ;
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+ }
+ else
+ {
+ # 8.0 and above adds some more options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ;
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : /MACHINE:X86 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : /MACHINE:IA64 ;
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/<embed-manifest>off : /MANIFEST ;
+ }
+ toolset.pop-checking-for-flags-module ;
+}
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+#
+rule register-toolset ( )
+{
+ if ! msvc in [ feature.values toolset ]
+ {
+ register-toolset-really ;
+ }
+}
+
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if [ os.name ] in NT
+{
+ # The 'DEL' command would issue a message to stdout if the file does not
+ # exist, so need a check.
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+else
+{
+ actions archive
+ {
+ $(.RM) "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+
+
+# For the assembler the following options are turned on by default:
+#
+# -Zp4 align structures to 4 bytes
+# -Cp preserve case of user identifiers
+# -Cx preserve case in publics, externs
+#
+actions compile.asm
+{
+ $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
+}
+
+
+rule compile.c ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.preprocess ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.pch ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets[1]) : -TC ;
+ get-rspline $(targets[2]) : -TC ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+toolset.flags msvc YLOPTION : "-Yl" ;
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
+# The linker will pull these into the executable's PDB
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
+# as in this case the compiler must be used to create a single PDB for our library.
+#
+actions compile-c-c++ bind PDB_NAME
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+actions preprocess-c-c++ bind PDB_NAME
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
+}
+
+rule compile-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<:S=.pdb) ;
+}
+
+rule preprocess-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<:S=.pdb) ;
+}
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+#
+# The global .escaped-double-quote variable is used to avoid messing up Emacs
+# syntax highlighting in the messy N-quoted code below.
+actions compile-c-c++-pch
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
+}
+
+
+# Action for running the C/C++ compiler using precompiled headers. An already
+# built source file for compiling the precompiled headers is expected to be
+# given as one of the source parameters.
+actions compile-c-c++-pch-s
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+
+rule compile.c++ ( targets + : sources * : properties * )
+{
+ get-rspline $(targets) : -TP ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+rule compile.c++.preprocess ( targets + : sources * : properties * )
+{
+ get-rspline $(targets) : -TP ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c++.pch ( targets + : sources * : properties * )
+{
+ get-rspline $(targets[1]) : -TP ;
+ get-rspline $(targets[2]) : -TP ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+
+# See midl.jam for details.
+#
+actions compile.idl
+{
+ $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
+ $(.TOUCH_FILE) "$(<[4]:W)"
+ $(.TOUCH_FILE) "$(<[5]:W)"
+}
+
+
+actions compile.mc
+{
+ $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
+
+
+actions compile.rc
+{
+ $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
+}
+
+
+rule link ( targets + : sources * : properties * )
+{
+ if <embed-manifest>on in $(properties)
+ {
+ msvc.manifest $(targets) : $(sources) : $(properties) ;
+ }
+}
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+ if <embed-manifest>on in $(properties)
+ {
+ msvc.manifest.dll $(targets) : $(sources) : $(properties) ;
+ }
+}
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if [ os.name ] in NT
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest
+ {
+ if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )
+ }
+
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest.dll
+ {
+ if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )
+ }
+}
+else
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1"
+ fi
+ }
+
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest.dll
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
+ fi
+ }
+}
+
+# this rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler
+# puts all debug info into a single .pdb file named after the library
+#
+# Poking at source targets this way is probably not clean, but it's the
+# easiest approach.
+rule archive ( targets + : sources * : properties * )
+{
+ PDB_NAME on $(>) = $(<:S=.pdb) ;
+}
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class msvc-pch-generator : pch-generator
+{
+ import property-set ;
+
+ rule run-pch ( project name ? : property-set : sources * )
+ {
+ # Searching for the header and source file in the sources.
+ local pch-header ;
+ local pch-source ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ pch-header = $(s) ;
+ }
+ else if
+ [ type.is-derived [ $(s).type ] CPP ] ||
+ [ type.is-derived [ $(s).type ] C ]
+ {
+ pch-source = $(s) ;
+ }
+ }
+
+ if ! $(pch-header)
+ {
+ errors.user-error "can not build pch without pch-header" ;
+ }
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+
+ local generated = [ generator.run $(project) $(name)
+ : [ property-set.create
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ <pch-source>$(pch-source)
+ [ $(property-set).raw ] ]
+ : $(pch-header) ] ;
+
+ local pch-file ;
+ for local g in $(generated)
+ {
+ if [ type.is-derived [ $(g).type ] PCH ]
+ {
+ pch-file = $(g) ;
+ }
+ }
+
+ return [ property-set.create <pch-header>$(pch-header)
+ <pch-file>$(pch-file) ] $(generated) ;
+ }
+}
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '.known-versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+#
+local rule auto-detect-toolset-versions ( )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ # Get installation paths from the registry.
+ for local i in $(.known-versions)
+ {
+ if $(.version-$(i)-reg)
+ {
+ local vc-path ;
+ for local x in "" "Wow6432Node\\"
+ {
+ vc-path += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
+ : "ProductDir" ] ;
+ }
+
+ if $(vc-path)
+ {
+ vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ;
+ register-configuration $(i) : [ path.native $(vc-path[1]) ] ;
+ }
+ }
+ }
+ }
+
+ # Check environment and default installation paths.
+ for local i in $(.known-versions)
+ {
+ if ! $(i) in [ $(.versions).all ]
+ {
+ register-configuration $(i) : [ default-path $(i) ] ;
+ }
+ }
+}
+
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+local rule configure-really ( version ? : options * )
+{
+ local v = $(version) ;
+
+ # Decide what the 'default' version is.
+ if ! $(v)
+ {
+ # Take the first registered (i.e. auto-detected) version.
+ version = [ $(.versions).all ] ;
+ version = $(version[1]) ;
+ v = $(version) ;
+
+ # Note: 'version' can still be empty at this point if no versions have
+ # been auto-detected.
+ version ?= "default" ;
+ }
+
+ # Version alias -> real version number.
+ if $(.version-alias-$(version))
+ {
+ version = $(.version-alias-$(version)) ;
+ }
+
+ # Check whether the selected configuration is already in use.
+ if $(version) in [ $(.versions).used ]
+ {
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
+ {
+ errors.error "MSVC toolset configuration: Toolset version"
+ "'$(version)' already configured." ;
+ }
+ }
+ else
+ {
+ # Register a new configuration.
+ $(.versions).register $(version) ;
+
+ # Add user-supplied to auto-detected options.
+ options = [ $(.versions).get $(version) : options ] $(options) ;
+
+ # Mark the configuration as 'used'.
+ $(.versions).use $(version) ;
+
+ # Generate conditions and save them.
+ local conditions = [ common.check-init-parameters msvc : version $(v) ]
+ ;
+
+ $(.versions).set $(version) : conditions : $(conditions) ;
+
+ local command = [ feature.get-values <command> : $(options) ] ;
+
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
+ command = [ common.get-invocation-command msvc : cl.exe : $(command) :
+ [ default-paths $(version) ] : $(version) ] ;
+
+ common.handle-options msvc : $(conditions) : $(command) : $(options) ;
+
+ if ! $(version)
+ {
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
+ {
+ version = 10.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
+ {
+ version = 9.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
+ {
+ version = 8.0 ;
+ }
+ else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.1 ;
+ }
+ else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" :
+ $(command) ]
+ {
+ version = 7.1toolkit ;
+ }
+ else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.0 ;
+ }
+ else
+ {
+ version = 6.0 ;
+ }
+ }
+
+ # Generate and register setup command.
+
+ local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ;
+
+ local cpu = i386 amd64 ia64 ;
+ if $(below-8.0)
+ {
+ cpu = i386 ;
+ }
+
+ local setup-amd64 ;
+ local setup-i386 ;
+ local setup-ia64 ;
+
+ if $(command)
+ {
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+
+ if $(command)
+ {
+ local parent = [ path.make $(command) ] ;
+ parent = [ path.parent $(parent) ] ;
+ parent = [ path.native $(parent) ] ;
+
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or arhitecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ local global-setup = [ feature.get-values <setup> : $(options) ] ;
+ global-setup = $(global-setup[1]) ;
+ if ! $(below-8.0)
+ {
+ global-setup ?= [ locate-default-setup $(command) : $(parent) :
+ vcvarsall.bat ] ;
+ }
+
+ local default-setup-amd64 = vcvarsx86_amd64.bat ;
+ local default-setup-i386 = vcvars32.bat ;
+ local default-setup-ia64 = vcvarsx86_ia64.bat ;
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ local default-global-setup-options-amd64 = x86_amd64 ;
+ local default-global-setup-options-i386 = x86 ;
+ local default-global-setup-options-ia64 = x86_ia64 ;
+
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
+ {
+ default-global-setup-options-amd64 = amd64 ;
+ }
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ else if $(somehow-detect-the-itanium-platform)
+ {
+ default-global-setup-options-ia64 = ia64 ;
+ }
+
+ local setup-prefix = "call " ;
+ local setup-suffix = " >nul"$(.nl) ;
+ if ! [ os.name ] in NT
+ {
+ setup-prefix = "cmd.exe /S /C call " ;
+ setup-suffix = " \">nul\" \"&&\" " ;
+ }
+
+ for local c in $(cpu)
+ {
+ local setup-options ;
+
+ setup-$(c) = [ feature.get-values <setup-$(c)> : $(options) ] ;
+
+ if ! $(setup-$(c))-is-not-empty
+ {
+ if $(global-setup)-is-not-empty
+ {
+ setup-$(c) = $(global-setup) ;
+
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ;
+
+ setup-options ?= $(default-global-setup-options-$(c)) ;
+ }
+ else
+ {
+ setup-$(c) = [ locate-default-setup $(command) : $(parent) : $(default-setup-$(c)) ] ;
+ }
+ }
+
+ # Cygwin to Windows path translation.
+ setup-$(c) = "\""$(setup-$(c):W)"\"" ;
+
+ # Append setup options to the setup name and add the final setup
+ # prefix & suffix.
+ setup-options ?= "" ;
+ setup-$(c) = $(setup-prefix)$(setup-$(c):J=" ")" "$(setup-options:J=" ")$(setup-suffix) ;
+ }
+ }
+
+ # Get tool names (if any) and finish setup.
+
+ compiler = [ feature.get-values <compiler> : $(options) ] ;
+ compiler ?= cl ;
+
+ linker = [ feature.get-values <linker> : $(options) ] ;
+ linker ?= link ;
+
+ resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
+ resource-compiler ?= rc ;
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ local default-assembler-amd64 = ml64 ;
+ local default-assembler-i386 = "ml -coff" ;
+ local default-assembler-ia64 = ias ;
+
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
+
+ idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
+ idl-compiler ?= midl ;
+
+ mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
+ mc-compiler ?= mc ;
+
+ manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
+ manifest-tool ?= mt ;
+
+ local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ;
+
+ for local c in $(cpu)
+ {
+ # Setup script is not required in some configurations.
+ setup-$(c) ?= "" ;
+
+ local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ;
+
+ if $(.debug-configuration)
+ {
+ for local cpu-condition in $(cpu-conditions)
+ {
+ ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ;
+ }
+ }
+
+ local cpu-assembler = $(assembler) ;
+ cpu-assembler ?= $(default-assembler-$(c)) ;
+
+ toolset.flags msvc.compile .CC $(cpu-conditions) : $(setup-$(c))$(compiler) /Zm800 -nologo ;
+ toolset.flags msvc.compile .RC $(cpu-conditions) : $(setup-$(c))$(resource-compiler) ;
+ toolset.flags msvc.compile .ASM $(cpu-conditions) : $(setup-$(c))$(cpu-assembler) -nologo ;
+ toolset.flags msvc.link .LD $(cpu-conditions) : $(setup-$(c))$(linker) /NOLOGO /INCREMENTAL:NO ;
+ toolset.flags msvc.archive .LD $(cpu-conditions) : $(setup-$(c))$(linker) /lib /NOLOGO ;
+ toolset.flags msvc.compile .IDL $(cpu-conditions) : $(setup-$(c))$(idl-compiler) ;
+ toolset.flags msvc.compile .MC $(cpu-conditions) : $(setup-$(c))$(mc-compiler) ;
+
+ toolset.flags msvc.link .MT $(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ;
+
+ if $(cc-filter)
+ {
+ toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ;
+ }
+ }
+
+ # Set version-specific flags.
+ configure-version-specific msvc : $(version) : $(conditions) ;
+ }
+}
+
+
+# Returns the default installation path for the given version.
+#
+local rule default-path ( version )
+{
+ # Use auto-detected path if possible.
+ local path = [ feature.get-values <command> : [ $(.versions).get $(version)
+ : options ] ] ;
+
+ if $(path)
+ {
+ path = $(path:D) ;
+ }
+ else
+ {
+ # Check environment.
+ if $(.version-$(version)-env)
+ {
+ local vc-path = [ os.environ $(.version-$(version)-env) ] ;
+ if $(vc-path)
+ {
+ vc-path = [ path.make $(vc-path) ] ;
+ vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ;
+ vc-path = [ path.native $(vc-path) ] ;
+
+ path = $(vc-path) ;
+ }
+ }
+
+ # Check default path.
+ if ! $(path) && $(.version-$(version)-path)
+ {
+ path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ;
+ }
+ }
+
+ return $(path) ;
+}
+
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
+#
+local rule default-paths ( version ? )
+{
+ local possible-paths ;
+
+ if $(version)
+ {
+ possible-paths += [ default-path $(version) ] ;
+ }
+ else
+ {
+ for local i in $(.known-versions)
+ {
+ possible-paths += [ default-path $(i) ] ;
+ }
+ }
+
+ return $(possible-paths) ;
+}
+
+
+rule get-rspline ( target : lang-opt )
+{
+ CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS)
+ $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES)
+ $(.nl)\"-I$(INCLUDES:W)\" ] ;
+}
+
+class msvc-linking-generator : linking-generator
+{
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local result = [ linking-generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+
+ if $(result)
+ {
+ local name-main = [ $(result[0]).name ] ;
+ local action = [ $(result[0]).action ] ;
+
+ if [ $(property-set).get <debug-symbols> ] = "on"
+ {
+ # We force exact name on PDB. The reason is tagging -- the tag rule may
+ # reasonably special case some target types, like SHARED_LIB. The tag rule
+ # will not catch PDB, and it cannot even easily figure if PDB is paired with
+ # SHARED_LIB or EXE or something else. Because PDB always get the
+ # same name as the main target, with .pdb as extension, just force it.
+ local target = [ class.new file-target $(name-main:S=.pdb) exact : PDB : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ] ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+
+ if [ $(property-set).get <embed-manifest> ] = "off"
+ {
+ # Manifest is evil target. It has .manifest appened to the name of
+ # main target, including extension. E.g. a.exe.manifest. We use 'exact'
+ # name because to achieve this effect.
+ local target = [ class.new file-target $(name-main).manifest exact : MANIFEST : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ] ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+#
+local rule register-toolset-really ( )
+{
+ feature.extend toolset : msvc ;
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature toolset msvc : vendor : intel : propagated optional ;
+
+ # Inherit MIDL flags.
+ toolset.inherit-flags msvc : midl ;
+
+ # Inherit MC flags.
+ toolset.inherit-flags msvc : mc ;
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add-requirements
+ <toolset>msvc,<runtime-link>shared:<threading>multi ;
+
+ # Declare msvc toolset specific features.
+ {
+ feature.feature debug-store : object database : propagated ;
+ feature.feature pch-source : : dependency free ;
+ }
+
+ # Declare generators.
+ {
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register [ new msvc-linking-generator
+ msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ;
+ generators.register [ new msvc-linking-generator
+ msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ] ;
+
+ generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ;
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ;
+ generators.override msvc.compile.rc : rc.compile.resource ;
+ generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ;
+
+ generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ;
+ generators.override msvc.compile.idl : midl.compile.idl ;
+
+ generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ;
+ generators.override msvc.compile.mc : mc.compile ;
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
+ generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
+
+ generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
+ generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
+ }
+
+ toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
+ toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
+ toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
+
+ #
+ # Declare flags for compilation.
+ #
+
+ toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ;
+ toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ;
+
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ;
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ;
+
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ;
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ;
+ toolset.flags msvc.compile CFLAGS <optimization>off : /Od ;
+ toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ;
+ toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ;
+ toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ;
+
+ toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags msvc.compile CFLAGS <rtti>on : /GR ;
+ toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+ toolset.flags msvc.compile OPTIONS <cflags> : ;
+ toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ;
+
+ toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ;
+
+ toolset.flags msvc.compile DEFINES <define> ;
+ toolset.flags msvc.compile UNDEFS <undef> ;
+ toolset.flags msvc.compile INCLUDES <include> ;
+
+ # Declare flags for the assembler.
+ toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile.asm DEFINES <define> ;
+
+ # Declare flags for linking.
+ {
+ toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
+ toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
+ toolset.flags msvc.link DEF_FILE <def-file> ;
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ;
+
+ toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ;
+ toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ;
+ toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ;
+ toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ;
+ toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ;
+
+ toolset.flags msvc.link OPTIONS <linkflags> ;
+ toolset.flags msvc.link LINKPATH <library-path> ;
+
+ toolset.flags msvc.link FINDLIBS_ST <find-static-library> ;
+ toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ;
+ toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
+ toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+ }
+
+ toolset.flags msvc.archive AROPTIONS <archiveflags> ;
+}
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+local rule locate-default-setup ( command : parent : setup-name )
+{
+ local result = [ GLOB $(command) $(parent) : $(setup-name) ] ;
+ if $(result[1])
+ {
+ return $(result[1]) ;
+ }
+}
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+local rule register-configuration ( version : path ? )
+{
+ if $(path)
+ {
+ local command = [ GLOB $(path) : cl.exe ] ;
+
+ if $(command)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [msvc-cfg] msvc-$(version) detected, command: '$(command)'" ;
+ }
+
+ $(.versions).register $(version) ;
+ $(.versions).set $(version) : options : <command>$(command) ;
+ }
+ }
+}
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Miscellaneous constants.
+.RM = [ common.rm-command ] ;
+.nl = "
+" ;
+.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ;
+.escaped-double-quote = "\"" ;
+.TOUCH_FILE = [ common.file-touch-command ] ;
+
+# List of all registered configurations.
+.versions = [ new configurations ] ;
+
+# Supported CPU architectures.
+.cpu-arch-i386 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-amd64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-ia64 =
+ <architecture>ia64/<address-model>
+ <architecture>ia64/<address-model>64 ;
+
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+.cpu-type-g5 = i586 pentium pentium-mmx ;
+.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6
+ k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ;
+.cpu-type-em64t = prescott nocona conroe conroe-xe conroe-l allendale mermon
+ mermon-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx ;
+.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+.cpu-type-itanium = itanium itanium1 merced ;
+.cpu-type-itanium2 = itanium2 mckinley ;
+
+
+# Known toolset versions, in order of preference.
+.known-versions = 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ;
+
+# Version aliases.
+.version-alias-6 = 6.0 ;
+.version-alias-6.5 = 6.0 ;
+.version-alias-7 = 7.0 ;
+.version-alias-8 = 8.0 ;
+.version-alias-9 = 9.0 ;
+.version-alias-10 = 10.0 ;
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
+.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ;
+.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ;
+.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ;
+.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ;
+.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ;
+.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
+.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
+.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ;
+.version-7.1toolkit-env = VCToolkitInstallDir ;
+
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
+.version-7.1toolkit-envpath = "bin" ;
+
+
+# Auto-detect all the available msvc installations on the system.
+auto-detect-toolset-versions ;
+
+
+# And finally trigger the actual Boost Build toolset registration.
+register-toolset ;
diff --git a/jam-files/boost-build/tools/notfile.jam b/jam-files/boost-build/tools/notfile.jam
new file mode 100644
index 000000000..97a5b0e87
--- /dev/null
+++ b/jam-files/boost-build/tools/notfile.jam
@@ -0,0 +1,74 @@
+# Copyright (c) 2005 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+import project ;
+import targets ;
+import toolset ;
+import type ;
+
+
+type.register NOTFILE_MAIN ;
+
+
+class notfile-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local action ;
+ local action-name = [ $(property-set).get <action> ] ;
+
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+
+ if $(m)
+ {
+ action = [ new action $(sources) : $(m[1])
+ : $(property-set) ] ;
+ }
+ else
+ {
+ action = [ new action $(sources) : notfile.run
+ : $(property-set) ] ;
+ }
+ return [ virtual-target.register
+ [ new notfile-target $(name) : $(project) : $(action) ] ] ;
+ }
+}
+
+
+generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
+
+
+toolset.flags notfile.run ACTION : <action> ;
+
+
+actions run
+{
+ $(ACTION)
+}
+
+
+rule notfile ( target-name : action + : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ requirements += <action>$(action) ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : NOTFILE_MAIN
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+IMPORT $(__name__) : notfile : : notfile ;
diff --git a/jam-files/boost-build/tools/package.jam b/jam-files/boost-build/tools/package.jam
new file mode 100644
index 000000000..198c22315
--- /dev/null
+++ b/jam-files/boost-build/tools/package.jam
@@ -0,0 +1,165 @@
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides mechanism for installing whole packages into a specific directory
+# structure. This is opposed to the 'install' rule, that installs a number of
+# targets to a single directory, and does not care about directory structure at
+# all.
+
+# Example usage:
+#
+# package.install boost : <properties>
+# : <binaries>
+# : <libraries>
+# : <headers>
+# ;
+#
+# This will install binaries, libraries and headers to the 'proper' location,
+# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
+# --includedir.
+#
+# The rule is just a convenient wrapper, avoiding the need to define several
+# 'install' targets.
+#
+# The only install-related feature is <install-source-root>. It will apply to
+# headers only and if present, paths of headers relatively to source root will
+# be retained after installing. If it is not specified, then "." is assumed, so
+# relative paths in headers are always preserved.
+
+import "class" : new ;
+import option ;
+import project ;
+import feature ;
+import property ;
+import stage ;
+import targets ;
+import modules ;
+
+feature.feature install-default-prefix : : free incidental ;
+
+rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * )
+{
+ package-name ?= $(name) ;
+ if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
+ {
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of libdir/includir that
+ # is specified via options in config files.
+ option.set bindir : ;
+ option.set libdir : ;
+ option.set includedir : ;
+ }
+
+ # If <install-source-root> is not specified, all headers are installed to
+ # prefix/include, no matter what their relative path is. Sometimes that is
+ # what is needed.
+ local install-source-root = [ property.select <install-source-root> :
+ $(requirements) ] ;
+ install-source-root = $(install-source-root:G=) ;
+ requirements = [ property.change $(requirements) : <install-source-root> ] ;
+
+ local install-header-subdir = [ property.select <install-header-subdir> :
+ $(requirements) ] ;
+ install-header-subdir = /$(install-header-subdir:G=) ;
+ install-header-subdir ?= "" ;
+ requirements = [ property.change $(requirements) : <install-header-subdir> ]
+ ;
+
+ # First, figure out all locations. Use the default if no prefix option
+ # given.
+ local prefix = [ get-prefix $(name) : $(requirements) ] ;
+
+ # Architecture dependent files.
+ local exec-locate = [ option.get exec-prefix : $(prefix) ] ;
+
+ # Binaries.
+ local bin-locate = [ option.get bindir : $(prefix)/bin ] ;
+
+ # Object code libraries.
+ local lib-locate = [ option.get libdir : $(prefix)/lib ] ;
+
+ # Source header files.
+ local include-locate = [ option.get includedir : $(prefix)/include ] ;
+
+ stage.install $(name)-bin : $(binaries) : $(requirements)
+ <location>$(bin-locate) ;
+ alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ;
+
+ # Since the install location of shared libraries differs on universe
+ # and cygwin, use target alternatives to make different targets.
+ # We should have used indirection conditioanl requirements, but it's
+ # awkward to pass bin-locate and lib-locate from there to another rule.
+ alias $(name)-lib-shared : $(name)-lib-shared-universe ;
+ alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ;
+
+ # For shared libraries, we install both explicitly specified one and the
+ # shared libraries that the installed executables depend on.
+ stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements)
+ <location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ;
+ stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements)
+ <location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ;
+
+ # For static libraries, we do not care about executable dependencies, since
+ # static libraries are already incorporated into them.
+ stage.install $(name)-lib-static : $(libraries) : $(requirements)
+ <location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ;
+ stage.install $(name)-headers : $(headers) : $(requirements)
+ <location>$(include-locate)$(install-header-subdir)
+ <install-source-root>$(install-source-root) ;
+ alias $(name) : $(name)-bin $(name)-lib $(name)-headers ;
+
+ local c = [ project.current ] ;
+ local project-module = [ $(c).project-module ] ;
+ module $(project-module)
+ {
+ explicit $(1)-bin $(1)-lib $(1)-headers $(1) $(1)-lib-shared $(1)-lib-static
+ $(1)-lib-shared-universe $(1)-lib-shared-cygwin ;
+ }
+}
+
+rule install-data ( target-name : package-name : data * : requirements * )
+{
+ package-name ?= target-name ;
+ if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
+ {
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of datarootdir
+ option.set datarootdir : ;
+ }
+
+ local prefix = [ get-prefix $(package-name) : $(requirements) ] ;
+ local datadir = [ option.get datarootdir : $(prefix)/share ] ;
+
+ stage.install $(target-name)
+ : $(data)
+ : $(requirements) <location>$(datadir)/$(package-name)
+ ;
+
+ local c = [ project.current ] ;
+ local project-module = [ $(c).project-module ] ;
+ module $(project-module)
+ {
+ explicit $(1) ;
+ }
+}
+
+local rule get-prefix ( package-name : requirements * )
+{
+ local prefix = [ option.get prefix : [ property.select
+ <install-default-prefix> : $(requirements) ] ] ;
+ prefix = $(prefix:G=) ;
+ requirements = [ property.change $(requirements) : <install-default-prefix>
+ ] ;
+ # Or some likely defaults if neither is given.
+ if ! $(prefix)
+ {
+ if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; }
+ else if [ modules.peek : UNIX ] { prefix = /usr/local ; }
+ }
+ return $(prefix) ;
+}
+
diff --git a/jam-files/boost-build/tools/pathscale.jam b/jam-files/boost-build/tools/pathscale.jam
new file mode 100644
index 000000000..454e34547
--- /dev/null
+++ b/jam-files/boost-build/tools/pathscale.jam
@@ -0,0 +1,168 @@
+# Copyright 2006 Noel Belcourt
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import toolset : flags ;
+import feature ;
+import type ;
+import common ;
+import fortran ;
+
+feature.extend toolset : pathscale ;
+toolset.inherit pathscale : unix ;
+generators.override pathscale.prebuilt : builtin.prebuilt ;
+generators.override pathscale.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pathscale.com/docs.html
+
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command pathscale : pathCC : $(command)
+ : /opt/ekopath/bin ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters pathscale
+ : version $(version) ] ;
+
+ common.handle-options pathscale : $(condition) : $(command) : $(options) ;
+
+ toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
+
+ toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
+ local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+ local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+
+ toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
+ toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pathscale.link FINDLIBS-SA : rt : unchecked ;
+}
+
+# Declare generators
+generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
+generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
+generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
+generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
+
+# Declare flags and actions for compilation
+flags pathscale.compile OPTIONS <optimization>off : -O0 ;
+flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
+flags pathscale.compile OPTIONS <optimization>space : -Os ;
+
+flags pathscale.compile OPTIONS <inlining>off : -noinline ;
+flags pathscale.compile OPTIONS <inlining>on : -inline ;
+flags pathscale.compile OPTIONS <inlining>full : -inline ;
+
+flags pathscale.compile OPTIONS <warnings>off : -woffall ;
+flags pathscale.compile OPTIONS <warnings>on : -Wall ;
+flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
+flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
+flags pathscale.compile OPTIONS <profiling>on : -pg ;
+flags pathscale.compile OPTIONS <link>shared : -fPIC ;
+flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
+flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
+
+flags pathscale.compile USER_OPTIONS <cflags> ;
+flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
+flags pathscale.compile DEFINES <define> ;
+flags pathscale.compile INCLUDES <include> ;
+
+flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
+flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.fortran90 ( targets * : sources * : properties * )
+{
+ # the space rule inserts spaces between targets and it's necessary
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the compile.fortran90 action
+ # F90 source must be compiled in a particular order so we
+ # serialize the build as a parallel F90 compile might fail
+ JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
+}
+
+actions compile.fortran90
+{
+ "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
+# Strip the binary when no debugging is needed
+flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
+flags pathscale.link OPTIONS <profiling>on : -pg ;
+flags pathscale.link USER_OPTIONS <linkflags> ;
+flags pathscale.link LINKPATH <library-path> ;
+flags pathscale.link FINDLIBS-ST <find-static-library> ;
+flags pathscale.link FINDLIBS-SA <find-shared-library> ;
+flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
+flags pathscale.link LIBRARIES <library-file> ;
+flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
+flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pathscale.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pathscale.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Declare action for creating static libraries
+# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
+actions piecemeal archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/pch.jam b/jam-files/boost-build/tools/pch.jam
new file mode 100644
index 000000000..0c6e98fac
--- /dev/null
+++ b/jam-files/boost-build/tools/pch.jam
@@ -0,0 +1,95 @@
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##### Using Precompiled Headers (Quick Guide) #####
+#
+# Make precompiled mypch.hpp:
+#
+# import pch ;
+#
+# cpp-pch mypch
+# : # sources
+# mypch.hpp
+# : # requiremnts
+# <toolset>msvc:<source>mypch.cpp
+# ;
+#
+# Add cpp-pch to sources:
+#
+# exe hello
+# : main.cpp hello.cpp mypch
+# ;
+
+import "class" : new ;
+import type ;
+import feature ;
+import generators ;
+
+type.register PCH : pch ;
+
+type.register C_PCH : : PCH ;
+type.register CPP_PCH : : PCH ;
+
+# Control precompiled header (PCH) generation.
+feature.feature pch :
+ on
+ off
+ : propagated ;
+
+
+feature.feature pch-header : : free dependency ;
+feature.feature pch-file : : free dependency ;
+
+# Base PCH generator. The 'run' method has the logic to prevent this generator
+# from being run unless it's being used for a top-level PCH target.
+class pch-generator : generator
+{
+ import property-set ;
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ if ! $(name)
+ {
+ # Unless this generator is invoked as the top-most generator for a
+ # main target, fail. This allows using 'H' type as input type for
+ # this generator, while preventing Boost.Build to try this generator
+ # when not explicitly asked for.
+ #
+ # One bad example is msvc, where pch generator produces both PCH
+ # target and OBJ target, so if there's any header generated (like by
+ # bison, or by msidl), we'd try to use pch generator to get OBJ from
+ # that H, which is completely wrong. By restricting this generator
+ # only to pch main target, such problem is solved.
+ }
+ else
+ {
+ local r = [ run-pch $(project) $(name)
+ : [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ]
+ : $(sources) ] ;
+ return [ generators.add-usage-requirements $(r)
+ : <define>BOOST_BUILD_PCH_ENABLED ] ;
+ }
+ }
+
+ # This rule must be overridden by the derived classes.
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ }
+}
+
+
+# NOTE: requirements are empty, default pch generator can be applied when
+# pch=off.
+generators.register
+ [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
+generators.register
+ [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
diff --git a/jam-files/boost-build/tools/pgi.jam b/jam-files/boost-build/tools/pgi.jam
new file mode 100644
index 000000000..3a35c6447
--- /dev/null
+++ b/jam-files/boost-build/tools/pgi.jam
@@ -0,0 +1,147 @@
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+import gcc ;
+
+feature.extend toolset : pgi ;
+toolset.inherit pgi : unix ;
+generators.override pgi.prebuilt : builtin.lib-generator ;
+generators.override pgi.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pgroup.com/resources/docs.htm
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters pgi : version $(version) ] ;
+
+ local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ;
+
+ common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ;
+
+ toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ flags pgi.compile DEFINES $(condition) :
+ [ feature.get-values <define> : $(options) ] : unchecked ;
+
+ # IOV_MAX support
+ flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ;
+
+ # set link flags
+ flags pgi.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pgi.link FINDLIBS-SA : rt [
+ feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+
+ gcc.init-link-flags pgi gnu $(condition) ;
+}
+
+# Declare generators
+generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
+generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
+generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
+
+# Declare flags and actions for compilation
+flags pgi.compile OPTIONS : -Kieee ;
+flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
+flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
+flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
+flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ;
+# flags pgi.compile OPTIONS <threading>multi : -mt ;
+
+flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
+flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
+
+flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
+
+flags pgi.compile OPTIONS <cflags> ;
+flags pgi.compile.c++ OPTIONS <cxxflags> ;
+flags pgi.compile DEFINES <define> ;
+flags pgi.compile INCLUDES <include> ;
+
+flags pgi.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
+# Strip the binary when no debugging is needed
+flags pgi.link OPTIONS <debug-symbols>off : -s ;
+flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
+flags pgi.link OPTIONS <linkflags> ;
+flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
+flags pgi.link LINKPATH <library-path> ;
+flags pgi.link FINDLIBS-ST <find-static-library> ;
+flags pgi.link FINDLIBS-SA <find-shared-library> ;
+flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
+flags pgi.link LIBRARIES <library-file> ;
+flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
+flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pgi.link RPATH <dll-path> ;
+
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pgi.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line
+# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied.
+# Yod on redstorm refuses to load an executable that is dynamically linked.
+# removing the dynamic link options should get us where we need to be on redstorm.
+# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+actions updated together piecemeal pgi.archive
+{
+ ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
+}
+
diff --git a/jam-files/boost-build/tools/python-config.jam b/jam-files/boost-build/tools/python-config.jam
new file mode 100644
index 000000000..40aa825bc
--- /dev/null
+++ b/jam-files/boost-build/tools/python-config.jam
@@ -0,0 +1,27 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Python tools and librries. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ for local R in 2.4 2.3 2.2
+ {
+ local python-path = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ;
+ local python-version = $(R) ;
+
+ if $(python-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ;
+ }
+ using python : $(python-version) : $(python-path) ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/tools/python.jam b/jam-files/boost-build/tools/python.jam
new file mode 100644
index 000000000..66f2aabec
--- /dev/null
+++ b/jam-files/boost-build/tools/python.jam
@@ -0,0 +1,1267 @@
+# Copyright 2004 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Python and the the Boost.Python library.
+#
+# This module defines
+#
+# - a project 'python' with a target 'python' in it, that corresponds to the
+# python library
+#
+# - a main target rule 'python-extension' which can be used to build a python
+# extension.
+#
+# Extensions that use Boost.Python must explicitly link to it.
+
+import type ;
+import testing ;
+import generators ;
+import project ;
+import errors ;
+import targets ;
+import "class" : new ;
+import os ;
+import common ;
+import toolset ;
+import regex ;
+import numbers ;
+import string ;
+import property ;
+import sequence ;
+import path ;
+import feature ;
+import set ;
+import builtin ;
+import version ;
+
+
+# Make this module a project.
+project.initialize $(__name__) ;
+project python ;
+
+# Save the project so that if 'init' is called several times we define new
+# targets in the python project, not in whatever project we were called by.
+.project = [ project.current ] ;
+
+# Dynamic linker lib. Necessary to specify it explicitly on some platforms.
+lib dl ;
+# This contains 'openpty' function need by python. Again, on some system need to
+# pass this to linker explicitly.
+lib util ;
+# Python uses pthread symbols.
+lib pthread ;
+# Extra library needed by phtread on some platforms.
+lib rt ;
+
+# The pythonpath feature specifies additional elements for the PYTHONPATH
+# environment variable, set by run-pyd. For example, pythonpath can be used to
+# access Python modules that are part of the product being built, but are not
+# installed in the development system's default paths.
+feature.feature pythonpath : : free optional path ;
+
+# Initializes the Python toolset. Note that all parameters are optional.
+#
+# - version -- the version of Python to use. Should be in Major.Minor format,
+# for example 2.3. Do not include the subminor version.
+#
+# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter.
+# Alternatively, the installation prefix for Python libraries and includes. If
+# empty, will be guessed from the version, the platform's installation
+# patterns, and the python executables that can be found in PATH.
+#
+# - includes: the include path to Python headers. If empty, will be guessed.
+#
+# - libraries: the path to Python library binaries. If empty, will be guessed.
+# On MacOS/Darwin, you can also pass the path of the Python framework.
+#
+# - condition: if specified, should be a set of properties that are matched
+# against the build configuration when Boost.Build selects a Python
+# configuration to use.
+#
+# - extension-suffix: A string to append to the name of extension modules before
+# the true filename extension. Ordinarily we would just compute this based on
+# the value of the <python-debugging> feature. However ubuntu's python-dbg
+# package uses the windows convention of appending _d to debug-build extension
+# modules. We have no way of detecting ubuntu, or of probing python for the
+# "_d" requirement, and if you configure and build python using
+# --with-pydebug, you'll be using the standard *nix convention. Defaults to ""
+# (or "_d" when targeting windows and <python-debugging> is set).
+#
+# Example usage:
+#
+# using python : 2.3 ;
+# using python : 2.3 : /usr/local/bin/python ;
+#
+rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
+ : condition * : extension-suffix ? )
+{
+ project.push-current $(.project) ;
+
+ debug-message Configuring python... ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): \"$($(v))\" ;
+ }
+ }
+
+ configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
+
+ project.pop-current ;
+}
+
+# A simpler version of SHELL that grabs stderr as well as stdout, but returns
+# nothing if there was an error.
+#
+local rule shell-cmd ( cmd )
+{
+ debug-message running command '$(cmd)" 2>&1"' ;
+ x = [ SHELL $(cmd)" 2>&1" : exit-status ] ;
+ if $(x[2]) = 0
+ {
+ return $(x[1]) ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Try to identify Cygwin symlinks. Invoking such a file directly as an NT
+# executable from a native Windows build of bjam would be fatal to the bjam
+# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove
+# that those are not also symlinks. ;-)
+#
+# If a symlink is found returns non-empty; we try to extract the target of the
+# symlink from the file and return that.
+#
+# Note: 1. only works on NT 2. path is a native path.
+local rule is-cygwin-symlink ( path )
+{
+ local is-symlink = ;
+
+ # Look for a file with the given path having the S attribute set, as cygwin
+ # symlinks do. /-C means "do not use thousands separators in file sizes."
+ local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ;
+
+ if $(dir-listing)
+ {
+ # Escape any special regex characters in the base part of the path.
+ local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ;
+
+ # Extract the file's size from the directory listing.
+ local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ;
+
+ # If the file has a reasonably small size, look for the special symlink
+ # identification text.
+ if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ]
+ {
+ local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ;
+ if $(link[2]) != 0
+ {
+ local nl = "
+
+" ;
+ is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ;
+ if $(is-symlink)
+ {
+ is-symlink = [ *nix-path-to-native $(is-symlink) ] ;
+ is-symlink = $(is-symlink:R=$(path:D)) ;
+ }
+
+ }
+ }
+ }
+ return $(is-symlink) ;
+}
+
+
+# Append ext to each member of names that does not contain '.'.
+#
+local rule default-extension ( names * : ext * )
+{
+ local result ;
+ for local n in $(names)
+ {
+ switch $(n)
+ {
+ case *.* : result += $(n) ;
+ case * : result += $(n)$(ext) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Tries to determine whether invoking "cmd" would actually attempt to launch a
+# cygwin symlink.
+#
+# Note: only works on NT.
+#
+local rule invokes-cygwin-symlink ( cmd )
+{
+ local dirs = $(cmd:D) ;
+ if ! $(dirs)
+ {
+ dirs = . [ os.executable-path ] ;
+ }
+ local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ;
+ local paths = [ GLOB $(dirs) : $(base) ] ;
+ if $(paths)
+ {
+ # Make sure we have not run into a Cygwin symlink. Invoking such a file
+ # as an NT executable would be fatal for the bjam process.
+ return [ is-cygwin-symlink $(paths[1]) ] ;
+ }
+}
+
+
+local rule debug-message ( message * )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO notice: [python-cfg] $(message) ;
+ }
+}
+
+
+# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and
+# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result
+# found. Also accounts for the fact that on 64-bit machines, 32-bit software has
+# its own area, under SOFTWARE\Wow6432node.
+#
+local rule software-registry-value ( path : data ? )
+{
+ local result ;
+ for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE
+ {
+ for local x64elt in "" Wow6432node\\ # Account for 64-bit windows
+ {
+ if ! $(result)
+ {
+ result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ;
+ }
+ }
+
+ }
+ return $(result) ;
+}
+
+
+.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ;
+.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ;
+
+.working-directory = [ PWD ] ;
+.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ;
+.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ;
+
+
+local rule windows-to-cygwin-path ( path )
+{
+ # If path is rooted with a drive letter, rewrite it using the /cygdrive
+ # mountpoint.
+ local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ;
+
+ # Else if path is rooted without a drive letter, use the working directory.
+ p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ;
+
+ # Else return the path unchanged.
+ return $(p:E=$(path:T)) ;
+}
+
+
+# :W only works in Cygwin builds of bjam. This one works on NT builds as well.
+#
+local rule cygwin-to-windows-path ( path )
+{
+ path = $(path:R="") ; # strip any trailing slash
+
+ local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ;
+ if $(drive-letter)
+ {
+ path = $(drive-letter) ;
+ }
+ else if $(path:R=/x) = $(path) # already rooted?
+ {
+ # Look for a cygwin mount that includes each head sequence in $(path).
+ local head = $(path) ;
+ local tail = "" ;
+
+ while $(head)
+ {
+ local root = [ software-registry-value
+ "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ;
+
+ if $(root)
+ {
+ path = $(tail:R=$(root)) ;
+ head = ;
+ }
+ tail = $(tail:R=$(head:D=)) ;
+
+ if $(head) = /
+ {
+ head = ;
+ }
+ else
+ {
+ head = $(head:D) ;
+ }
+ }
+ }
+ return [ regex.replace $(path:R="") / \\ ] ;
+}
+
+
+# Convert a *nix path to native.
+#
+local rule *nix-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ path = [ cygwin-to-windows-path $(path) ] ;
+ }
+ return $(path) ;
+}
+
+
+# Convert an NT path to native.
+#
+local rule windows-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ windows-to-cygwin-path $(path) ] ;
+ }
+}
+
+
+# Return nonempty if path looks like a windows path, i.e. it starts with a drive
+# letter or contains backslashes.
+#
+local rule guess-windows-path ( path )
+{
+ return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ;
+}
+
+
+local rule path-to-native ( paths * )
+{
+ local result ;
+
+ for local p in $(paths)
+ {
+ if [ guess-windows-path $(p) ]
+ {
+ result += [ windows-path-to-native $(p) ] ;
+ }
+ else
+ {
+ result += [ *nix-path-to-native $(p:T) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+
+# Build a list of versions from 3.0 down to 1.5. Because bjam can not enumerate
+# registry sub-keys, we have no way of finding a version with a 2-digit minor
+# version, e.g. 2.10 -- let us hope that never happens.
+#
+.version-countdown = ;
+for local v in [ numbers.range 15 30 ]
+{
+ .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ;
+}
+
+
+local rule windows-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+ local interpreters ;
+
+ for local v in $(version)
+ {
+ local install-path = [
+ software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ;
+
+ if $(install-path)
+ {
+ install-path = [ windows-path-to-native $(install-path) ] ;
+ debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ;
+ }
+
+ interpreters += $(:E=python:R=$(install-path)) ;
+ }
+ return $(interpreters) ;
+}
+
+
+local rule darwin-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+
+ local prefix
+ = [ GLOB /System/Library/Frameworks /Library/Frameworks
+ : Python.framework ] ;
+
+ return $(prefix)/Versions/$(version)/bin/python ;
+}
+
+
+# Assume "python-cmd" invokes a python interpreter and invoke it to extract all
+# the information we care about from its "sys" module. Returns void if
+# unsuccessful.
+#
+local rule probe ( python-cmd )
+{
+ # Avoid invoking a Cygwin symlink on NT.
+ local skip-symlink ;
+ if [ os.name ] = NT
+ {
+ skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ;
+ }
+
+ if $(skip-symlink)
+ {
+ debug-message -------------------------------------------------------------------- ;
+ debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ;
+ debug-message causing a bjam built for Windows to hang. ;
+ debug-message ;
+ debug-message If you intend to target a Cygwin build of Python, please ;
+ debug-message replace the path to the link with the path to a real executable ;
+ debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ;
+ debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ;
+ debug-message backslashes ;
+ debug-message -------------------------------------------------------------------- ;
+ }
+ else
+ {
+ # Prepare a List of Python format strings and expressions that can be
+ # used to print the constants we want from the sys module.
+
+ # We do not really want sys.version since that is a complicated string,
+ # so get the information from sys.version_info instead.
+ local format = "version=%d.%d" ;
+ local exprs = "version_info[0]" "version_info[1]" ;
+
+ for local s in $(sys-elements[2-])
+ {
+ format += $(s)=%s ;
+ exprs += $(s) ;
+ }
+
+ # Invoke Python and ask it for all those values.
+ if [ version.check-jam-version 3 1 17 ] || ( [ os.name ] != NT )
+ {
+ # Prior to version 3.1.17 Boost Jam's SHELL command did not support
+ # quoted commands correctly on Windows. This means that on that
+ # platform we do not support using a Python command interpreter
+ # executable whose path contains a space character.
+ python-cmd = \"$(python-cmd)\" ;
+ }
+ local full-cmd =
+ $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ;
+
+ local output = [ shell-cmd $(full-cmd) ] ;
+ if $(output)
+ {
+ # Parse the output to get all the results.
+ local nl = "
+
+" ;
+ for s in $(sys-elements)
+ {
+ # These variables are expected to be declared local in the
+ # caller, so Jam's dynamic scoping will set their values there.
+ sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ;
+ }
+ }
+ return $(output) ;
+ }
+}
+
+
+# Make sure the "libraries" and "includes" variables (in an enclosing scope)
+# have a value based on the information given.
+#
+local rule compute-default-paths ( target-os : version ? : prefix ? :
+ exec-prefix ? )
+{
+ exec-prefix ?= $(prefix) ;
+
+ if $(target-os) = windows
+ {
+ # The exec_prefix is where you're supposed to look for machine-specific
+ # libraries.
+ local default-library-path = $(exec-prefix)\\libs ;
+ local default-include-path = $(:E=Include:R=$(prefix)) ;
+
+ # If the interpreter was found in a directory called "PCBuild" or
+ # "PCBuild8," assume we're looking at a Python built from the source
+ # distro, and go up one additional level to the default root. Otherwise,
+ # the default root is the directory where the interpreter was found.
+
+ # We ask Python itself what the executable path is in case of
+ # intermediate symlinks or shell scripts.
+ local executable-dir = $(sys.executable:D) ;
+
+ if [ MATCH ^(PCBuild) : $(executable-dir:D=) ]
+ {
+ debug-message "This Python appears to reside in a source distribution;" ;
+ debug-message "prepending \""$(executable-dir)"\" to default library search path" ;
+
+ default-library-path = $(executable-dir) $(default-library-path) ;
+
+ default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ;
+
+ debug-message "and \""$(default-include-path[1])"\" to default #include path" ;
+ }
+
+ libraries ?= $(default-library-path) ;
+ includes ?= $(default-include-path) ;
+ }
+ else
+ {
+ includes ?= $(prefix)/include/python$(version) ;
+
+ local lib = $(exec-prefix)/lib ;
+ libraries ?= $(lib)/python$(version)/config $(lib) ;
+ }
+}
+
+# The version of the python interpreter to use.
+feature.feature python : : propagated ;
+feature.feature python.interpreter : : free ;
+
+toolset.flags python.capture-output PYTHON : <python.interpreter> ;
+
+#
+# Support for Python configured --with-pydebug
+#
+feature.feature python-debugging : off on : propagated ;
+builtin.variant debug-python : debug : <python-debugging>on ;
+
+
+# Return a list of candidate commands to try when looking for a Python
+# interpreter. prefix is expected to be a native path.
+#
+local rule candidate-interpreters ( version ? : prefix ? : target-os )
+{
+ local bin-path = bin ;
+ if $(target-os) = windows
+ {
+ # On Windows, look in the root directory itself and, to work with the
+ # result of a build-from-source, the PCBuild directory.
+ bin-path = PCBuild8 PCBuild "" ;
+ }
+
+ bin-path = $(bin-path:R=$(prefix)) ;
+
+ if $(target-os) in windows darwin
+ {
+ return # Search:
+ $(:E=python:R=$(bin-path)) # Relative to the prefix, if any
+ python # In the PATH
+ [ $(target-os)-installed-pythons $(version) ] # Standard install locations
+ ;
+ }
+ else
+ {
+ # Search relative to the prefix, or if none supplied, in PATH.
+ local unversioned = $(:E=python:R=$(bin-path:E=)) ;
+
+ # If a version was specified, look for a python with that specific
+ # version appended before looking for one called, simply, "python"
+ return $(unversioned)$(version) $(unversioned) ;
+ }
+}
+
+
+# Compute system library dependencies for targets linking with static Python
+# libraries.
+#
+# On many systems, Python uses libraries such as pthreads or libdl. Since static
+# libraries carry no library dependency information of their own that the linker
+# can extract, these extra dependencies have to be given explicitly on the link
+# line of the client. The information about these dependencies is packaged into
+# the "python" target below.
+#
+# Even where Python itself uses pthreads, it never allows extension modules to
+# be entered concurrently (unless they explicitly give up the interpreter lock).
+# Therefore, extension modules do not need the efficiency overhead of threadsafe
+# code as produced by <threading>multi, and we handle libpthread along with
+# other libraries here. Note: this optimization is based on an assumption that
+# the compiler generates link-compatible code in both the single- and
+# multi-threaded cases, and that system libraries do not change their ABIs
+# either.
+#
+# Returns a list of usage-requirements that link to the necessary system
+# libraries.
+#
+local rule system-library-dependencies ( target-os )
+{
+ switch $(target-os)
+ {
+ case s[uo][nl]* : # solaris, sun, sunos
+ # Add a librt dependency for the gcc toolset on SunOS (the sun
+ # toolset adds -lrt unconditionally). While this appears to
+ # duplicate the logic already in gcc.jam, it does not as long as
+ # we are not forcing <threading>multi.
+
+ # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields
+ # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem
+ # to be the right list for extension modules. For example, on my
+ # installation, adding -ldl causes at least one test to fail because
+ # the library can not be found and removing it causes no failures.
+
+ # Apparently, though, we need to add -lrt for gcc.
+ return <toolset>gcc:<library>rt ;
+
+ case osf : return <library>pthread <toolset>gcc:<library>rt ;
+
+ case qnx* : return ;
+ case darwin : return ;
+ case windows : return ;
+
+ case hpux : return <library>rt ;
+ case *bsd : return <library>pthread <toolset>gcc:<library>util ;
+
+ case aix : return <library>pthread <library>dl ;
+
+ case * : return <library>pthread <library>dl
+ <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ;
+ }
+}
+
+
+# Declare a target to represent Python's library.
+#
+local rule declare-libpython-target ( version ? : requirements * )
+{
+ # Compute the representation of Python version in the name of Python's
+ # library file.
+ local lib-version = $(version) ;
+ if <target-os>windows in $(requirements)
+ {
+ local major-minor = [ split-version $(version) ] ;
+ lib-version = $(major-minor:J="") ;
+ if <python-debugging>on in $(requirements)
+ {
+ lib-version = $(lib-version)_d ;
+ }
+ }
+
+ if ! $(lib-version)
+ {
+ ECHO *** warning: could not determine Python version, which will ;
+ ECHO *** warning: probably prevent us from linking with the python ;
+ ECHO *** warning: library. Consider explicitly passing the version ;
+ ECHO *** warning: to 'using python'. ;
+ }
+
+ # Declare it.
+ lib python.lib : : <name>python$(lib-version) $(requirements) ;
+}
+
+
+# Implementation of init.
+local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
+ condition * : extension-suffix ? )
+{
+ local prefix ;
+ local exec-prefix ;
+ local cmds-to-try ;
+ local interpreter-cmd ;
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+
+ if $(target-os) = windows && <python-debugging>on in $(condition)
+ {
+ extension-suffix ?= _d ;
+ }
+ extension-suffix ?= "" ;
+
+ # Normalize and dissect any version number.
+ local major-minor ;
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+
+ local cmds-to-try ;
+
+ if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ]
+ {
+ # If the user did not pass a command, whatever we got was a prefix.
+ prefix = $(cmd-or-prefix) ;
+ cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ;
+ }
+ else
+ {
+ # Work with the command the user gave us.
+ cmds-to-try = $(cmd-or-prefix) ;
+
+ # On Windows, do not nail down the interpreter command just yet in case
+ # the user specified something that turns out to be a cygwin symlink,
+ # which could bring down bjam if we invoke it.
+ if $(target-os) != windows
+ {
+ interpreter-cmd = $(cmd-or-prefix) ;
+ }
+ }
+
+ # Values to use in case we can not really find anything in the system.
+ local fallback-cmd = $(cmds-to-try[1]) ;
+ local fallback-version ;
+
+ # Anything left to find or check?
+ if ! ( $(interpreter-cmd) && $(includes) && $(libraries) )
+ {
+ # Values to be extracted from python's sys module. These will be set by
+ # the probe rule, above, using Jam's dynamic scoping.
+ local sys-elements = version platform prefix exec_prefix executable ;
+ local sys.$(sys-elements) ;
+
+ # Compute the string Python's sys.platform needs to match. If not
+ # targeting Windows or cygwin we will assume only native builds can
+ # possibly run, so we will not require a match and we leave sys.platform
+ # blank.
+ local platform ;
+ switch $(target-os)
+ {
+ case windows : platform = win32 ;
+ case cygwin : platform = cygwin ;
+ }
+
+ while $(cmds-to-try)
+ {
+ # Pop top command.
+ local cmd = $(cmds-to-try[1]) ;
+ cmds-to-try = $(cmds-to-try[2-]) ;
+
+ debug-message Checking interpreter command \"$(cmd)\"... ;
+ if [ probe $(cmd) ]
+ {
+ fallback-version ?= $(sys.version) ;
+
+ # Check for version/platform validity.
+ for local x in version platform
+ {
+ if $($(x)) && $($(x)) != $(sys.$(x))
+ {
+ debug-message ...$(x) "mismatch (looking for"
+ $($(x)) but found $(sys.$(x))")" ;
+ cmd = ;
+ }
+ }
+
+ if $(cmd)
+ {
+ debug-message ...requested configuration matched! ;
+
+ exec-prefix = $(sys.exec_prefix) ;
+
+ compute-default-paths $(target-os) : $(sys.version) :
+ $(sys.prefix) : $(sys.exec_prefix) ;
+
+ version = $(sys.version) ;
+ interpreter-cmd ?= $(cmd) ;
+ cmds-to-try = ; # All done.
+ }
+ }
+ else
+ {
+ debug-message ...does not invoke a working interpreter ;
+ }
+ }
+ }
+
+ # Anything left to compute?
+ if $(includes) && $(libraries)
+ {
+ .configured = true ;
+ }
+ else
+ {
+ version ?= $(fallback-version) ;
+ version ?= 2.5 ;
+ exec-prefix ?= $(prefix) ;
+ compute-default-paths $(target-os) : $(version) : $(prefix:E=) ;
+ }
+
+ if ! $(interpreter-cmd)
+ {
+ fallback-cmd ?= python ;
+ debug-message No working Python interpreter found. ;
+ if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ]
+ {
+ interpreter-cmd = $(fallback-cmd) ;
+ debug-message falling back to \"$(interpreter-cmd)\" ;
+ }
+ }
+
+ includes = [ path-to-native $(includes) ] ;
+ libraries = [ path-to-native $(libraries) ] ;
+
+ debug-message "Details of this Python configuration:" ;
+ debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ;
+ debug-message " include path:" \"$(includes:E=<empty>)\" ;
+ debug-message " library path:" \"$(libraries:E=<empty>)\" ;
+ if $(target-os) = windows
+ {
+ debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ;
+ }
+
+ #
+ # End autoconfiguration sequence.
+ #
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values python ]
+ {
+ feature.extend python : $(version) ;
+ }
+ target-requirements += <python>$(version:E=default) ;
+ }
+
+ target-requirements += <target-os>$(target-os) ;
+
+ # See if we can find a framework directory on darwin.
+ local framework-directory ;
+ if $(target-os) = darwin
+ {
+ # Search upward for the framework directory.
+ local framework-directory = $(libraries[-1]) ;
+ while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
+ {
+ framework-directory = $(framework-directory:D) ;
+ }
+
+ if $(framework-directory:D=) = Python.framework
+ {
+ debug-message framework directory is \"$(framework-directory)\" ;
+ }
+ else
+ {
+ debug-message "no framework directory found; using library path" ;
+ framework-directory = ;
+ }
+ }
+
+ local dll-path = $(libraries) ;
+
+ # Make sure that we can find the Python DLL on Windows.
+ if ( $(target-os) = windows ) && $(exec-prefix)
+ {
+ dll-path += $(exec-prefix) ;
+ }
+
+ #
+ # Prepare usage requirements.
+ #
+ local usage-requirements = [ system-library-dependencies $(target-os) ] ;
+ usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ;
+ if <python-debugging>on in $(condition)
+ {
+ if $(target-os) = windows
+ {
+ # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define
+ # Py_DEBUG we will get multiple definition warnings.
+ usage-requirements += <define>_DEBUG ;
+ }
+ else
+ {
+ usage-requirements += <define>Py_DEBUG ;
+ }
+ }
+
+ # Global, but conditional, requirements to give access to the interpreter
+ # for general utilities, like other toolsets, that run Python scripts.
+ toolset.add-requirements
+ $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ;
+
+ # Register the right suffix for extensions.
+ register-extension-suffix $(extension-suffix) : $(target-requirements) ;
+
+ #
+ # Declare the "python" target. This should really be called
+ # python_for_embedding.
+ #
+
+ if $(framework-directory)
+ {
+ alias python
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <framework>$(framework-directory)
+ ;
+ }
+ else
+ {
+ declare-libpython-target $(version) : $(target-requirements) ;
+
+ # This is an evil hack. On, Windows, when Python is embedded, nothing
+ # seems to set up sys.path to include Python's standard library
+ # (http://article.gmane.org/gmane.comp.python.general/544986). The evil
+ # here, aside from the workaround necessitated by Python's bug, is that:
+ #
+ # a. we're guessing the location of the python standard library from the
+ # location of pythonXX.lib
+ #
+ # b. we're hijacking the <testing.launcher> property to get the
+ # environment variable set up, and the user may want to use it for
+ # something else (e.g. launch the debugger).
+ local set-PYTHONPATH ;
+ if $(target-os) = windows
+ {
+ set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH :
+ $(libraries:D)/Lib ] ;
+ }
+
+ alias python
+ :
+ : $(target-requirements)
+ :
+ # Why python.lib must be listed here instead of along with the
+ # system libs is a mystery, but if we do not do it, on cygwin,
+ # -lpythonX.Y never appears in the command line (although it does on
+ # linux).
+ : $(usage-requirements)
+ <testing.launcher>$(set-PYTHONPATH)
+ <library-path>$(libraries) <dll-path>$(dll-path) <library>python.lib
+ ;
+ }
+
+ # On *nix, we do not want to link either Boost.Python or Python extensions
+ # to libpython, because the Python interpreter itself provides all those
+ # symbols. If we linked to libpython, we would get duplicate symbols. So
+ # declare two targets -- one for building extensions and another for
+ # embedding.
+ #
+ # Unlike most *nix systems, Mac OS X's linker does not permit undefined
+ # symbols when linking a shared library. So, we still need to link against
+ # the Python framework, even when building extensions. Note that framework
+ # builds of Python always use shared libraries, so we do not need to worry
+ # about duplicate Python symbols.
+ if $(target-os) in windows cygwin darwin
+ {
+ alias python_for_extensions : python : $(target-requirements) ;
+ }
+ # On AIX we need Python extensions and Boost.Python to import symbols from
+ # the Python interpreter. Dynamic libraries opened with dlopen() do not
+ # inherit the symbols from the Python interpreter.
+ else if $(target-os) = aix
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp
+ ;
+ }
+ else
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements)
+ ;
+ }
+}
+
+
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+
+type.register PYTHON_EXTENSION : : SHARED_LIB ;
+
+
+local rule register-extension-suffix ( root : condition * )
+{
+ local suffix ;
+
+ switch [ feature.get-values target-os : $(condition) ]
+ {
+ case windows : suffix = pyd ;
+ case cygwin : suffix = dll ;
+ case hpux :
+ {
+ if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
+ {
+ suffix = sl ;
+ }
+ else
+ {
+ suffix = so ;
+ }
+ }
+ case * : suffix = so ;
+ }
+
+ type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
+}
+
+
+# Unset 'lib' prefix for PYTHON_EXTENSION
+type.set-generated-target-prefix PYTHON_EXTENSION : : "" ;
+
+
+rule python-extension ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if [ configured ]
+ {
+ requirements += <use>/python//python_for_extensions ;
+ }
+ requirements += <suppress-import-lib>true ;
+
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+IMPORT python : python-extension : : python-extension ;
+
+rule py2to3
+{
+ common.copy $(>) $(<) ;
+ 2to3 $(<) ;
+}
+
+actions 2to3
+{
+ 2to3 -wn "$(<)"
+ 2to3 -dwn "$(<)"
+}
+
+
+# Support for testing.
+type.register PY : py ;
+type.register RUN_PYD_OUTPUT ;
+type.register RUN_PYD : : TEST ;
+
+
+class python-test-generator : generator
+{
+ import set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local pyversion = [ $(property-set).get <python> ] ;
+ local python ;
+ local other-pythons ;
+
+ # Make new target that converting Python source by 2to3 when running with Python 3.
+ local rule make-2to3-source ( source )
+ {
+ if $(pyversion) >= 3.0
+ {
+ local a = [ new action $(source) : python.py2to3 : $(property-set) ] ;
+ local t = [ utility.basename [ $(s).name ] ] ;
+ local p = [ new file-target $(t) : PY : $(project) : $(a) ] ;
+ return $(p) ;
+ }
+ else
+ {
+ return $(source) ;
+ }
+ }
+
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PY
+ {
+ if ! $(python)
+ {
+ # First Python source ends up on command line.
+ python = [ make-2to3-source $(s) ] ;
+
+ }
+ else
+ {
+ # Other Python sources become dependencies.
+ other-pythons += [ make-2to3-source $(s) ] ;
+ }
+ }
+ }
+
+ local extensions ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PYTHON_EXTENSION
+ {
+ extensions += $(s) ;
+ }
+ }
+
+ local libs ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] LIB ]
+ && ! $(s) in $(extensions)
+ {
+ libs += $(s) ;
+ }
+ }
+
+ local new-sources ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] CPP ]
+ {
+ local name = [ utility.basename [ $(s).name ] ] ;
+ if $(name) = [ utility.basename [ $(python).name ] ]
+ {
+ name = $(name)_ext ;
+ }
+ local extension = [ generators.construct $(project) $(name) :
+ PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
+
+ # The important part of usage requirements returned from
+ # PYTHON_EXTENSION generator are xdll-path properties that will
+ # allow us to find the python extension at runtime.
+ property-set = [ $(property-set).add $(extension[1]) ] ;
+
+ # Ignore usage requirements. We're a top-level generator and
+ # nobody is going to use what we generate.
+ new-sources += $(extension[2-]) ;
+ }
+ }
+
+ property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
+
+ result = [ construct-result $(python) $(extensions) $(new-sources) :
+ $(project) $(name) : $(property-set) ] ;
+ }
+}
+
+
+generators.register
+ [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_PYD_OUTPUT : RUN_PYD ;
+
+
+# There are two different ways of spelling OS names. One is used for [ os.name ]
+# and the other is used for the <host-os> and <target-os> properties. Until that
+# is remedied, this sets up a crude mapping from the latter to the former, that
+# will work *for the purposes of cygwin/NT cross-builds only*. Could not think
+# of a better name than "translate".
+#
+.translate-os-windows = NT ;
+.translate-os-cygwin = CYGWIN ;
+local rule translate-os ( src-os )
+{
+ local x = $(.translate-os-$(src-os)) [ os.name ] ;
+ return $(x[1]) ;
+}
+
+
+# Extract the path to a single ".pyd" source. This is used to build the
+# PYTHONPATH for running bpl tests.
+#
+local rule pyd-pythonpath ( source )
+{
+ return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
+}
+
+
+# The flag settings on testing.capture-output do not apply to python.capture
+# output at the moment. Redo this explicitly.
+toolset.flags python.capture-output ARGS <testing.arg> ;
+
+
+rule capture-output ( target : sources * : properties * )
+{
+ # Setup up a proper DLL search path. Here, $(sources[1]) is a python module
+ # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to
+ # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not
+ # consulted. Move it over explicitly.
+ RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
+
+ PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
+ PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
+
+ # After test is run, we remove the Python module, but not the Python script.
+ testing.capture-output $(target) : $(sources[1]) : $(properties) :
+ $(sources[2-]) ;
+
+ # PYTHONPATH is different; it will be interpreted by whichever Python is
+ # invoked and so must follow path rules for the target os. The only OSes
+ # where we can run python for other OSes currently are NT and CYGWIN so we
+ # only need to handle those cases.
+ local target-os = [ feature.get-values target-os : $(properties) ] ;
+ # Oddly, host-os is not in properties, so grab the default value.
+ local host-os = [ feature.defaults host-os ] ;
+ host-os = $(host-os:G=) ;
+ if $(target-os) != $(host-os)
+ {
+ PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path :
+ $(PYTHONPATH) ] ;
+ }
+ local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
+ local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
+ $(PYTHONPATH:J=$(path-separator)) ] ;
+ LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
+}
+
+
+rule bpl-test ( name : sources * : requirements * )
+{
+ local s ;
+ sources ?= $(name).py $(name).cpp ;
+ return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python
+ : $(requirements) : $(name) ] ;
+}
+
+
+IMPORT $(__name__) : bpl-test : : bpl-test ;
diff --git a/jam-files/boost-build/tools/qcc.jam b/jam-files/boost-build/tools/qcc.jam
new file mode 100644
index 000000000..4f2a4fc14
--- /dev/null
+++ b/jam-files/boost-build/tools/qcc.jam
@@ -0,0 +1,236 @@
+# Copyright (c) 2001 David Abrahams.
+# Copyright (c) 2002-2003 Rene Rivera.
+# Copyright (c) 2002-2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import property ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+
+feature.extend toolset : qcc ;
+
+toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
+generators.override builtin.lib-generator : qcc.prebuilt ;
+toolset.inherit-flags qcc : unix ;
+toolset.inherit-rules qcc : unix ;
+
+# Initializes the qcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one being specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
+# default.
+#
+# Example:
+# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters qcc : version $(version) ] ;
+ local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
+ common.handle-options qcc : $(condition) : $(command) : $(options) ;
+}
+
+
+generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
+
+
+# Declare flags for compilation.
+toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
+
+# Declare flags and action for compilation.
+toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
+
+toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
+toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
+
+toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
+
+toolset.flags qcc.compile OPTIONS <cflags> ;
+toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
+toolset.flags qcc.compile DEFINES <define> ;
+toolset.flags qcc.compile INCLUDES <include> ;
+
+toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
+
+toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+rule compile.c++
+{
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(1) = 128 ;
+ }
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+
+# The class checking that we do not try to use the <runtime-link>static property
+# while creating or using a shared library, since it is not supported by qcc/
+# /libc.
+#
+class qcc-linking-generator : unix-linking-generator
+{
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if <runtime-link>static in [ $(property-set).raw ]
+ {
+ local m ;
+ if [ id ] = "qcc.link.dll"
+ {
+ m = "on qcc, DLL can't be build with <runtime-link>static" ;
+ }
+ if ! $(m)
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
+ }
+ }
+ }
+ if $(m)
+ {
+ errors.user-error $(m) : "It is suggested to use"
+ "<runtime-link>static together with <link>static." ;
+ }
+ }
+
+ return [ unix-linking-generator.generated-targets
+ $(sources) : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
+ : <toolset>qcc ] ;
+
+generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
+ : SHARED_LIB : <toolset>qcc ] ;
+
+generators.override qcc.prebuilt : builtin.prebuilt ;
+generators.override qcc.searched-lib-generator : searched-lib-generator ;
+
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
+toolset.flags qcc.link OPTIONS <profiling>on : -p ;
+toolset.flags qcc.link OPTIONS <linkflags> ;
+toolset.flags qcc.link LINKPATH <library-path> ;
+toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags qcc.link LIBRARIES <library-file> ;
+
+toolset.flags qcc.link FINDLIBS-SA : m ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link.
+toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
+
+# Assuming this is just like with gcc.
+toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
+toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
+
+
+# Declare actions for linking.
+#
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only qcc links while it might
+ # be a good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
+
+
+# Always remove archive and start again. Here is the rationale from Andre Hentz:
+# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
+# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
+# After some debugging I traced it back to the fact that a1.o was *still* in
+# liba.a
+RM = [ common.rm-command ] ;
+if [ os.name ] = NT
+{
+ RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
+}
+
+
+# Declare action for creating static libraries. The 'r' letter means to add
+# files to the archive with replacement. Since we remove the archive, we do not
+# care about replacement, but there is no option to "add without replacement".
+# The 'c' letter suppresses warnings in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#
+actions piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc "$(<)" "$(>)"
+}
+
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+
+# Differ from 'link' above only by -shared.
+#
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
diff --git a/jam-files/boost-build/tools/qt.jam b/jam-files/boost-build/tools/qt.jam
new file mode 100644
index 000000000..8aa7ca266
--- /dev/null
+++ b/jam-files/boost-build/tools/qt.jam
@@ -0,0 +1,17 @@
+# Copyright (c) 2006 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Forwarning toolset file to Qt GUI library. Forwards to the toolset file
+# for the current version of Qt.
+
+import qt4 ;
+
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ;
+}
+
+
diff --git a/jam-files/boost-build/tools/qt3.jam b/jam-files/boost-build/tools/qt3.jam
new file mode 100644
index 000000000..f82cf0ac3
--- /dev/null
+++ b/jam-files/boost-build/tools/qt3.jam
@@ -0,0 +1,209 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for the Qt GUI library version 3
+# (http://www.trolltech.com/products/qt3/index.html).
+# For new developments, it is recommended to use Qt4 via the qt4 Boost.Build
+# module.
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+
+# Convert this module into a project, so that we can declare targets here.
+project.initialize $(__name__) ;
+project qt3 ;
+
+
+# Initialized the QT support module. The 'prefix' parameter tells where QT is
+# installed. When not given, environmental variable QTDIR should be set.
+#
+rule init ( prefix ? )
+{
+ if ! $(prefix)
+ {
+ prefix = [ modules.peek : QTDIR ] ;
+ if ! $(prefix)
+ {
+ errors.error
+ "QT installation prefix not given and QTDIR variable is empty" ;
+ }
+ }
+
+ if $(.initialized)
+ {
+ if $(prefix) != $(.prefix)
+ {
+ errors.error
+ "Attempt the reinitialize QT with different installation prefix" ;
+ }
+ }
+ else
+ {
+ .initialized = true ;
+ .prefix = $(prefix) ;
+
+ generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ;
+ # Note: the OBJ target type here is fake, take a look at
+ # qt4.jam/uic-h-generator for explanations that apply in this case as
+ # well.
+ generators.register [ new moc-h-generator-qt3
+ qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ;
+
+ # The UI type is defined in types/qt.jam, and UIC_H is only used in
+ # qt.jam, but not in qt4.jam, so define it here.
+ type.register UIC_H : : H ;
+
+ generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ;
+
+ # The following generator is used to convert UI files to CPP. It creates
+ # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also
+ # returns UIC_H target, so that it can be mocced.
+ class qt::uic-cpp-generator : generator
+ {
+ rule __init__ ( )
+ {
+ generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ;
+ }
+
+ rule run ( project name ? : properties * : sources + )
+ {
+ # Consider this:
+ # obj test : test_a.cpp : <optimization>off ;
+ #
+ # This generator will somehow be called in this case, and,
+ # will fail -- which is okay. However, if there are <library>
+ # properties they will be converted to sources, so the size of
+ # 'sources' will be more than 1. In this case, the base generator
+ # will just crash -- and that's not good. Just use a quick test
+ # here.
+
+ local result ;
+ if ! $(sources[2])
+ {
+ # Construct CPP as usual
+ result = [ generator.run $(project) $(name)
+ : $(properties) : $(sources) ] ;
+
+ # If OK, process UIC_H with moc. It's pretty clear that
+ # the object generated with UIC will have Q_OBJECT macro.
+ if $(result)
+ {
+ local action = [ $(result[1]).action ] ;
+ local sources = [ $(action).sources ] ;
+ local mocced = [ generators.construct $(project) $(name)
+ : CPP : $(properties) : $(sources[2]) ] ;
+ result += $(mocced[2-]) ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+
+ generators.register [ new qt::uic-cpp-generator ] ;
+
+ # Finally, declare prebuilt target for QT library.
+ local usage-requirements =
+ <include>$(.prefix)/include
+ <dll-path>$(.prefix)/lib
+ <library-path>$(.prefix)/lib
+ <allow>qt3
+ ;
+ lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ;
+ lib qt : : <name>qt <threading>single : : $(usage-requirements) ;
+ }
+}
+
+class moc-h-generator-qt3 : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[1]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt3.moc.cpp :
+ $(property-set) ] ;
+
+ local target = [
+ new file-target $(name) : MOC : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that the dependency from
+ # sources to this generated header is detected -- if Jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.prefix) ;
+}
+
+# -f forces moc to include the processed source file. Without it, it would think
+# that .qpp is not a header and would not include it from the generated file.
+#
+actions moc
+{
+ $(.prefix)/bin/moc -f $(>) -o $(<)
+}
+
+# When moccing .cpp files, we don't need -f, otherwise generated code will
+# include .cpp and we'll get duplicated symbols.
+#
+actions moc.cpp
+{
+ $(.prefix)/bin/moc $(>) -o $(<)
+}
+
+
+space = " " ;
+
+# Sometimes it's required to make 'plugins' available during uic invocation. To
+# help with this we add paths to all dependency libraries to uic commane line.
+# The intention is that it's possible to write
+#
+# exe a : ... a.ui ... : <uses>some_plugin ;
+#
+# and have everything work. We'd add quite a bunch of unrelated paths but it
+# won't hurt.
+#
+flags qt3.uic-h LIBRARY_PATH <xdll-path> ;
+actions uic-h
+{
+ $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH)
+}
+
+
+flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ;
+# The second target is uic-generated header name. It's placed in build dir, but
+# we want to include it using only basename.
+actions uic-cpp
+{
+ $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH)
+}
diff --git a/jam-files/boost-build/tools/qt4.jam b/jam-files/boost-build/tools/qt4.jam
new file mode 100644
index 000000000..771b9344f
--- /dev/null
+++ b/jam-files/boost-build/tools/qt4.jam
@@ -0,0 +1,713 @@
+# Copyright 2002-2006 Vladimir Prus
+# Copyright 2005 Alo Sarv
+# Copyright 2005-2009 Juergen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Qt4 library support module
+#
+# The module attempts to auto-detect QT installation location from QTDIR
+# environment variable; failing that, installation location can be passed as
+# argument:
+#
+# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
+#
+# The module supports code generation from .ui and .qrc files, as well as
+# running the moc preprocessor on headers. Note that you must list all your
+# moc-able headers in sources.
+#
+# Example:
+#
+# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
+# /qt4//QtGui /qt4//QtNetwork ;
+#
+# It's also possible to run moc on cpp sources:
+#
+# import cast ;
+#
+# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
+#
+# When moccing source file myapp.cpp you need to include "myapp.moc" from
+# myapp.cpp. When moccing .h files, the output of moc will be automatically
+# compiled and linked in, you don't need any includes.
+#
+# This is consistent with Qt guidelines:
+# http://doc.trolltech.com/4.0/moc.html
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+
+# Qt3Support control feature
+#
+# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
+# The autodetection is missing, so we default to disable Qt3Support.
+# This prevents the user from inadvertedly using a deprecated API.
+#
+# The Qt3Support library can be activated by adding
+# "<qt3support>on" to requirements
+#
+# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
+# to get warnings about deprecated Qt3 support funtions and classes.
+# Files ported by the "qt3to4" conversion tool contain _tons_ of
+# warnings, so this define is not set as default.
+#
+# Todo: Detect Qt3Support from Qt's configure data.
+# Or add more auto-configuration (like python).
+feature.feature qt3support : off on : propagated link-incompatible ;
+
+# The Qt version used for requirements
+# Valid are <qt>4.4 or <qt>4.5.0
+# Auto-detection via qmake sets '<qt>major.minor.patch'
+feature.feature qt : : propagated ;
+
+project.initialize $(__name__) ;
+project qt ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+
+# Helper utils for easy debug output
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = TRUE ;
+}
+
+local rule debug-message ( message * )
+{
+ if $(.debug-configuration) = TRUE
+ {
+ ECHO notice: [qt4-cfg] $(message) ;
+ }
+}
+
+# Capture qmake output line by line
+local rule read-output ( content )
+{
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ return $(lines) ;
+}
+
+# Capture Qt version from qmake
+local rule check-version ( bin_prefix )
+{
+ full-cmd = $(bin_prefix)"/qmake -v" ;
+ debug-message Running '$(full-cmd)' ;
+ local output = [ SHELL $(full-cmd) ] ;
+ for line in [ read-output $(output) ]
+ {
+ # Parse the output to get all the results.
+ if [ MATCH "QMake" : $(line) ]
+ {
+ # Skip first line of output
+ }
+ else
+ {
+ temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
+ }
+ }
+ return $(temp) ;
+}
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+# Initialize the QT support module.
+# Parameters:
+# - 'prefix' parameter tells where Qt is installed.
+# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
+# - 'full_inc' optional full path to Qt top-level include directory
+# - 'full_lib' optional full path to Qt library directory
+# - 'version' optional version of Qt, else autodetected via 'qmake -v'
+# - 'condition' optional requirements
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ project.push-current $(.project) ;
+
+ debug-message "==== Configuring Qt ... ====" ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): '$($(v))' ;
+ }
+ }
+
+ # Needed as default value
+ .prefix = $(prefix) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ # Globally needed variables
+ .incprefix = $(inc_prefix) ;
+ .libprefix = $(lib_prefix) ;
+ .binprefix = $(bin_prefix) ;
+
+ if ! $(.initialized)
+ {
+ # Make sure this is initialised only once
+ .initialized = true ;
+
+ # Generates cpp files from header files using "moc" tool
+ generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
+
+ # The OBJ result type is a fake, 'H' will be really produced. See
+ # comments on the generator class, defined below the 'init' function.
+ generators.register [ new uic-generator qt4.uic : UI : OBJ :
+ <allow>qt4 ] ;
+
+ # The OBJ result type is a fake here too.
+ generators.register [ new moc-h-generator
+ qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
+
+ generators.register [ new moc-inc-generator
+ qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
+
+ # Generates .cpp files from .qrc files.
+ generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ;
+
+ # dependency scanner for wrapped files.
+ type.set-scanner QRC : qrc-scanner ;
+
+ # Save value of first occuring prefix
+ .PREFIX = $(prefix) ;
+ }
+
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+ else
+ {
+ version = [ check-version $(bin_prefix) ] ;
+ if $(version)
+ {
+ version = $(version:J=.) ;
+ }
+ debug-message Detected version '$(version)' ;
+ }
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values qt ]
+ {
+ feature.extend qt : $(version) ;
+ }
+ target-requirements += <qt>$(version:E=default) ;
+ }
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ if ! $(target-os)
+ {
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+ target-requirements += <target-os>$(target-os) ;
+ }
+
+ # Build exact requirements for the tools
+ local tools-requirements = $(target-requirements:J=/) ;
+
+ debug-message "Details of this Qt configuration:" ;
+ debug-message " prefix: " '$(prefix:E=<empty>)' ;
+ debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
+ debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
+ debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
+ debug-message " target requirements:" '$(target-requirements)' ;
+ debug-message " tool requirements: " '$(tools-requirements)' ;
+
+ # setup the paths for the tools
+ toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+
+ # TODO: 2009-02-12: Better support for directories
+ # Most likely needed are separate getters for: include,libraries,binaries and sources.
+ toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
+
+ # Test for a buildable Qt.
+ if [ glob $(.prefix)/Jamroot ]
+ {
+ .bjam-qt = true
+
+ # this will declare QtCore (and qtmain on <target-os>windows)
+ add-shared-library QtCore ;
+ }
+ else
+ # Setup common pre-built Qt.
+ # Special setup for QtCore on which everything depends
+ {
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <dll-path>$(.libprefix)
+ <threading>multi
+ <allow>qt4 ;
+
+ local suffix ;
+
+ # Since Qt-4.2, debug versions on unix have to be built
+ # separately and therefore have no suffix.
+ .suffix_version = "" ;
+ .suffix_debug = "" ;
+
+ # Control flag for auto-configuration of the debug libraries.
+ # This setup requires Qt 'configure -debug-and-release'.
+ # Only available on some platforms.
+ # ToDo: 2009-02-12: Maybe throw this away and
+ # require separate setup with <variant>debug as condition.
+ .have_separate_debug = FALSE ;
+
+ # Setup other platforms
+ if $(target-os) in windows cygwin
+ {
+ .have_separate_debug = TRUE ;
+
+ # On NT, the libs have "4" suffix, and "d" suffix in debug builds.
+ .suffix_version = "4" ;
+ .suffix_debug = "d" ;
+
+ # On Windows we must link against the qtmain library
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ ;
+
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain
+ $(target-requirements)
+ ;
+ }
+ else if $(target-os) = darwin
+ {
+ # On MacOS X, both debug and release libraries are available.
+ .suffix_debug = "_debug" ;
+
+ .have_separate_debug = TRUE ;
+
+ alias qtmain ;
+ }
+ else
+ {
+ alias qtmain : : $(target-requirements) ;
+ }
+
+ lib QtCore : qtmain
+ : # requirements
+ <name>QtCore$(.suffix_version)
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <define>QT_NO_DEBUG
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
+
+ lib QtCore : $(main)
+ : # requirements
+ <name>QtCore$(.suffix_debug)$(.suffix_version)
+ <variant>debug
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+ }
+ }
+
+ # Initialising the remaining libraries is canonical
+ # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
+ # 'include' only for non-canonical include paths.
+ add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
+ add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
+ add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
+ add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
+
+ add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
+ : QT_QT3SUPPORT_LIB QT3_SUPPORT
+ : <qt3support>on $(target-requirements) ;
+
+ # Dummy target to enable "<qt3support>off" and
+ # "<library>/qt//Qt3Support" at the same time. This enables quick
+ # switching from one to the other for test/porting purposes.
+ alias Qt3Support : : <qt3support>off $(target-requirements) ;
+
+ # OpenGl Support
+ add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
+
+ # SVG-Support (Qt 4.1)
+ add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
+
+ # Test-Support (Qt 4.1)
+ add-shared-library QtTest : QtCore : : $(target-requirements) ;
+
+ # Qt designer library
+ add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
+
+ # Support for dynamic Widgets (Qt 4.1)
+ add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
+
+ # DBus-Support (Qt 4.2)
+ add-shared-library QtDBus : QtXml : : $(target-requirements) ;
+
+ # Script-Engine (Qt 4.3)
+ add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
+
+ # Tools for the Script-Engine (Qt 4.5)
+ add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
+
+ # WebKit (Qt 4.4)
+ add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
+
+ # Phonon Multimedia (Qt 4.4)
+ add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
+
+ # Multimedia engine (Qt 4.6)
+ add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
+
+ # XmlPatterns-Engine (Qt 4.4)
+ add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
+
+ # Help-Engine (Qt 4.4)
+ add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
+
+ # AssistantClient Support
+ # Compat library
+ # Pre-4.4 help system, use QtHelp for new programs
+ add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
+
+ debug-message "==== Configured Qt-$(version) ====" ;
+
+ project.pop-current ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+
+# This custom generator is needed because in QT4, UI files are translated only
+# into H files, and no C++ files are created. Further, the H files need not be
+# passed via MOC. The header is used only via inclusion. If we define a standard
+# UI -> H generator, Boost.Build will run MOC on H, and then compile the
+# resulting cpp. It will give a warning, since output from moc will be empty.
+#
+# This generator is declared with a UI -> OBJ signature, so it gets invoked when
+# linking generator tries to convert sources to OBJ, but it produces target of
+# type H. This is non-standard, but allowed. That header won't be mocced.
+#
+class uic-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(name)
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+ }
+
+ local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
+
+ # The 'ui_' prefix is to match qmake's default behavior.
+ local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+}
+
+
+class moc-h-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target $(name) : MOC : $(project) : $(a)
+ ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+class moc-inc-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target moc_$(name) : CPP : $(project) :
+ $(a) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(target).actualize ] ;
+
+ return [ virtual-target.register $(target) ] ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.PREFIX) ;
+}
+
+# Add a shared Qt library.
+rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a static Qt library.
+rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a Qt library.
+# Static libs are unversioned, whereas shared libs have the major number as suffix.
+# Creates both release and debug versions on platforms where both are enabled by Qt configure.
+# Flags:
+# - lib-name Qt library Name
+# - version Qt major number used as shared library suffix (QtCore4.so)
+# - depends-on other Qt libraries
+# - usage-defines those are set by qmake, so set them when using this library
+# - requirements addional requirements
+# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
+rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
+{
+ if $(.bjam-qt)
+ {
+ # Import Qt module
+ # Eveything will be setup there
+ alias $(lib-name)
+ : $(.prefix)//$(lib-name)
+ :
+ :
+ : <allow>qt4 ;
+ }
+ else
+ {
+ local real_include ;
+ real_include ?= $(include) ;
+ real_include ?= $(lib-name) ;
+
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(version)
+ $(requirements)
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(.suffix_debug)$(version)
+ $(requirements)
+ <variant>debug
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+ }
+ }
+
+ # Make library explicit so that a simple <use>qt4 will not bring in everything.
+ # And some components like QtDBus/Phonon may not be available on all platforms.
+ explicit $(lib-name) ;
+}
+
+# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
+# The exact match is the last one.
+
+# Get <include> and <defines> from current toolset.
+flags qt4.moc INCLUDES <include> ;
+flags qt4.moc DEFINES <define> ;
+
+# Processes headers to create Qt MetaObject information. Qt4-moc has its
+# c++-parser, so pass INCLUDES and DEFINES.
+#
+actions moc
+{
+ $(.BINPREFIX[-1])/moc -I"$(INCLUDES)" -D$(DEFINES) -f $(>) -o $(<)
+}
+
+
+# When moccing files for include only, we don't need -f, otherwise the generated
+# code will include the .cpp and we'll get duplicated symbols.
+#
+actions moc.inc
+{
+ $(.BINPREFIX[-1])/moc -I"$(INCLUDES)" -D$(DEFINES) $(>) -o $(<)
+}
+
+
+# Generates source files from resource files.
+#
+actions rcc
+{
+ $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<)
+}
+
+
+# Generates user-interface source from .ui files.
+#
+actions uic
+{
+ $(.BINPREFIX[-1])/uic $(>) -o $(<)
+}
+
+
+# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
+# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
+# detailed documentation of the Qt Resource System.
+#
+class qrc-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<file.*>(.*)</file>" ;
+ }
+}
+
+
+# Wrapped files are "included".
+scanner.register qrc-scanner : include ;
diff --git a/jam-files/boost-build/tools/quickbook-config.jam b/jam-files/boost-build/tools/quickbook-config.jam
new file mode 100644
index 000000000..e983a78a8
--- /dev/null
+++ b/jam-files/boost-build/tools/quickbook-config.jam
@@ -0,0 +1,44 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for BoostBook tools. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ local boost-dir = ;
+ for local R in snapshot cvs 1.33.0
+ {
+ boost-dir += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)"
+ : "InstallRoot" ] ;
+ }
+ local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ;
+ quickbook-path = $(quickbook-path[1]) ;
+
+ if $(quickbook-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using quickbook ":" $(quickbook-path) ;
+ }
+ using quickbook : $(quickbook-path) ;
+ }
+}
+else
+{
+ local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ;
+ quickbook-path = $(quickbook-path[1]) ;
+
+ if $(quickbook-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using quickbook ":" $(quickbook-path) ;
+ }
+ using quickbook : $(quickbook-path) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/quickbook.jam b/jam-files/boost-build/tools/quickbook.jam
new file mode 100644
index 000000000..6de2d42f8
--- /dev/null
+++ b/jam-files/boost-build/tools/quickbook.jam
@@ -0,0 +1,361 @@
+#
+# Copyright (c) 2005 João Abecasis
+# Copyright (c) 2005 Vladimir Prus
+# Copyright (c) 2006 Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+
+# This toolset defines a generator to translate QuickBook to BoostBook. It can
+# be used to generate nice (!) user documentation in different formats
+# (pdf/html/...), from a single text file with simple markup.
+#
+# The toolset defines the QUICKBOOK type (file extension 'qbk') and
+# a QUICKBOOK to XML (BOOSTBOOK) generator.
+#
+#
+# ===========================================================================
+# Q & A
+# ===========================================================================
+#
+# If you don't know what this is all about, some Q & A will hopefully get you
+# up to speed with QuickBook and this toolset.
+#
+#
+# What is QuickBook ?
+#
+# QuickBook is a WikiWiki style documentation tool geared towards C++
+# documentation using simple rules and markup for simple formatting tasks.
+# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook
+# documents are simple text files. A single QuickBook document can
+# generate a fully linked set of nice HTML and PostScript/PDF documents
+# complete with images and syntax-colorized source code.
+#
+#
+# Where can I get QuickBook ?
+#
+# Quickbook can be found in Boost's repository, under the tools/quickbook
+# directory it was added there on Jan 2005, some time after the release of
+# Boost v1.32.0 and has been an integral part of the Boost distribution
+# since v1.33.
+#
+# Here's a link to the SVN repository:
+# https://svn.boost.org/svn/boost/trunk/tools/quickbook
+#
+# And to QuickBook's QuickBook-generated docs:
+# http://www.boost.org/doc/libs/release/tools/quickbook/index.html
+#
+#
+# How do I use QuickBook and this toolset in my projects ?
+#
+# The minimal example is:
+#
+# using boostbook ;
+# import quickbook ;
+#
+# boostbook my_docs : my_docs_source.qbk ;
+#
+# where my_docs is a target name and my_docs_source.qbk is a QuickBook
+# file. The documentation format to be generated is determined by the
+# boostbook toolset. By default html documentation should be generated,
+# but you should check BoostBook's docs to be sure.
+#
+#
+# What do I need ?
+#
+# You should start by setting up the BoostBook toolset. Please refer to
+# boostbook.jam and the BoostBook documentation for information on how to
+# do this.
+#
+# A QuickBook executable is also needed. The toolset will generate this
+# executable if it can find the QuickBook sources. The following
+# directories will be searched:
+#
+# BOOST_ROOT/tools/quickbook/
+# BOOST_BUILD_PATH/../../quickbook/
+#
+# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables)
+#
+# If QuickBook sources are not found the toolset will then try to use
+# the shell command 'quickbook'.
+#
+#
+# How do I provide a custom QuickBook executable ?
+#
+# You may put the following in your user-config.jam or site-config.jam:
+#
+# using quickbook : /path/to/quickbook ;
+#
+# or, if 'quickbook' can be found in your PATH,
+#
+# using quickbook : quickbook ;
+#
+#
+# For convenience three alternatives are tried to get a QuickBook executable:
+#
+# 1. If the user points us to the a QuickBook executable, that is used.
+#
+# 2. Otherwise, we search for the QuickBook sources and compile QuickBook
+# using the default toolset.
+#
+# 3. As a last resort, we rely on the shell for finding 'quickbook'.
+#
+
+import boostbook ;
+import "class" : new ;
+import feature ;
+import generators ;
+import toolset ;
+import type ;
+import scanner ;
+import project ;
+import targets ;
+import build-system ;
+import path ;
+import common ;
+import errors ;
+
+# The one and only QUICKBOOK type!
+type.register QUICKBOOK : qbk ;
+
+# <quickbook-binary> shell command to run QuickBook
+# <quickbook-binary-dependencies> targets to build QuickBook from sources.
+feature.feature <quickbook-binary> : : free ;
+feature.feature <quickbook-binary-dependencies> : : free dependency ;
+feature.feature <quickbook-define> : : free ;
+feature.feature <quickbook-indent> : : free ;
+feature.feature <quickbook-line-width> : : free ;
+
+
+# quickbook-binary-generator handles generation of the QuickBook executable, by
+# marking it as a dependency for QuickBook docs.
+#
+# If the user supplied the QuickBook command that will be used.
+#
+# Otherwise we search some sensible places for the QuickBook sources and compile
+# from scratch using the default toolset.
+#
+# As a last resort we rely on the shell to find 'quickbook'.
+#
+class quickbook-binary-generator : generator
+{
+ import modules path targets quickbook ;
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ quickbook.freeze-config ;
+ # QuickBook invocation command and dependencies.
+ local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ;
+ local quickbook-binary-dependencies ;
+
+ if ! $(quickbook-binary)
+ {
+ # If the QuickBook source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'quickbook' in user's PATH
+ local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ;
+ if $(quickbook-dir)
+ {
+ # Get the main-target in QuickBook directory.
+ local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ;
+
+ # The first element are actual targets, the second are
+ # properties found in target-id. We do not care about these
+ # since we have passed the id ourselves.
+ quickbook-main-target =
+ [ $(quickbook-main-target[1]).main-target quickbook ] ;
+
+ quickbook-binary-dependencies =
+ [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ;
+
+ # Ignore usage-requirements returned as first element.
+ quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ;
+
+ # Some toolsets generate extra targets (e.g. RSP). We must mark
+ # all targets as dependencies for the project, but we will only
+ # use the EXE target for quickbook-to-boostbook translation.
+ for local target in $(quickbook-binary-dependencies)
+ {
+ if [ $(target).type ] = EXE
+ {
+ quickbook-binary =
+ [ path.native
+ [ path.join
+ [ $(target).path ]
+ [ $(target).name ]
+ ]
+ ] ;
+ }
+ }
+ }
+ }
+
+ # Add $(quickbook-binary-dependencies) as a dependency of the current
+ # project and set it as the <quickbook-binary> feature for the
+ # quickbook-to-boostbook rule, below.
+ property-set = [ $(property-set).add-raw
+ <dependency>$(quickbook-binary-dependencies)
+ <quickbook-binary>$(quickbook-binary)
+ <quickbook-binary-dependencies>$(quickbook-binary-dependencies)
+ ] ;
+
+ return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ;
+ }
+}
+
+
+# Define a scanner for tracking QBK include dependencies.
+#
+class qbk-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "\\[[ ]*include[ ]+([^]]+)\\]"
+ "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]"
+ "\\[[ ]*import[ ]+([^]]+)\\]" ;
+ }
+}
+
+
+scanner.register qbk-scanner : include ;
+
+type.set-scanner QUICKBOOK : qbk-scanner ;
+
+
+# Initialization of toolset.
+#
+# Parameters:
+# command ? -> path to QuickBook executable.
+#
+# When command is not supplied toolset will search for QuickBook directory and
+# compile the executable from source. If that fails we still search the path for
+# 'quickbook'.
+#
+rule init (
+ command ? # path to the QuickBook executable.
+ )
+{
+ if $(command)
+ {
+ if $(.config-frozen)
+ {
+ errors.user-error "quickbook: configuration cannot be changed after it has been used." ;
+ }
+ .command = $(command) ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+
+ # QuickBook invocation command and dependencies.
+
+ .quickbook-binary = $(.command) ;
+
+ if $(.quickbook-binary)
+ {
+ # Use user-supplied command.
+ .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ;
+ }
+ else
+ {
+ # Search for QuickBook sources in sensible places, like
+ # $(BOOST_ROOT)/tools/quickbook
+ # $(BOOST_BUILD_PATH)/../../quickbook
+
+ # And build quickbook executable from sources.
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ local boost-build-path = [ build-system.location ] ;
+
+ if $(boost-root)
+ {
+ .quickbook-dir += [ path.join $(boost-root) tools ] ;
+ }
+
+ if $(boost-build-path)
+ {
+ .quickbook-dir += $(boost-build-path)/../.. ;
+ }
+
+ .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ;
+
+ # If the QuickBook source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'quickbook' in user's PATH
+ if $(.quickbook-dir)
+ {
+ .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ;
+ }
+ else
+ {
+ ECHO "QuickBook warning: The path to the quickbook executable was" ;
+ ECHO " not provided. Additionally, couldn't find QuickBook" ;
+ ECHO " sources searching in" ;
+ ECHO " * BOOST_ROOT/tools/quickbook" ;
+ ECHO " * BOOST_BUILD_PATH/../../quickbook" ;
+ ECHO " Will now try to find a precompiled executable by searching" ;
+ ECHO " the PATH for 'quickbook'." ;
+ ECHO " To disable this warning in the future, or to completely" ;
+ ECHO " avoid compilation of quickbook, you can explicitly set the" ;
+ ECHO " path to a quickbook executable command in user-config.jam" ;
+ ECHO " or site-config.jam with the call" ;
+ ECHO " using quickbook : /path/to/quickbook ;" ;
+
+ # As a last resort, search for 'quickbook' command in path. Note
+ # that even if the 'quickbook' command is not found,
+ # get-invocation-command will still return 'quickbook' and might
+ # generate an error while generating the virtual-target.
+
+ .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ;
+ }
+ }
+ }
+}
+
+
+generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ;
+
+
+# <quickbook-binary> shell command to run QuickBook
+# <quickbook-binary-dependencies> targets to build QuickBook from sources.
+toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ;
+toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ;
+
+
+rule quickbook-to-boostbook ( target : source : properties * )
+{
+ # Signal dependency of quickbook sources on <quickbook-binary-dependencies>
+ # upon invocation of quickbook-to-boostbook.
+ DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ;
+}
+
+
+actions quickbook-to-boostbook
+{
+ "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)"
+}
+
+
+# Declare a main target to convert a quickbook source into a boostbook XML file.
+#
+rule to-boostbook ( target-name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : XML
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
diff --git a/jam-files/boost-build/tools/rc.jam b/jam-files/boost-build/tools/rc.jam
new file mode 100644
index 000000000..9964d339b
--- /dev/null
+++ b/jam-files/boost-build/tools/rc.jam
@@ -0,0 +1,156 @@
+# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+#
+# Copyright (c) 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+import generators ;
+import feature ;
+import errors ;
+import scanner ;
+import toolset : flags ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+type.register RC : rc ;
+
+rule init ( )
+{
+}
+
+# Configures a new resource compilation command specific to a condition,
+# usually a toolset selection condition. The possible options are:
+#
+# * <rc-type>(rc|windres) - Indicates the type of options the command
+# accepts.
+#
+# Even though the arguments are all optional, only when a command, condition,
+# and at minimum the rc-type option are given will the command be configured.
+# This is so that callers don't have to check auto-configuration values
+# before calling this. And still get the functionality of build failures when
+# the resource compiler can't be found.
+#
+rule configure ( command ? : condition ? : options * )
+{
+ local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
+
+ if $(command) && $(condition) && $(rc-type)
+ {
+ flags rc.compile.resource .RC $(condition) : $(command) ;
+ flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
+ flags rc.compile.resource DEFINES <define> ;
+ flags rc.compile.resource INCLUDES <include> ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using rc compiler :: $(condition) :: $(command) ;
+ }
+ }
+}
+
+rule compile.resource ( target : sources * : properties * )
+{
+ local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
+ rc-type ?= null ;
+ compile.resource.$(rc-type) $(target) : $(sources[1]) ;
+}
+
+actions compile.resource.rc
+{
+ "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
+}
+
+actions compile.resource.windres
+{
+ "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
+}
+
+actions quietly compile.resource.null
+{
+ as /dev/null -o "$(<)"
+}
+
+# Since it's a common practice to write
+# exe hello : hello.cpp hello.rc
+# we change the name of object created from RC file, to
+# avoid conflict with hello.cpp.
+# The reason we generate OBJ and not RES, is that gcc does not
+# seem to like RES files, but works OK with OBJ.
+# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
+#
+# Using 'register-c-compiler' adds the build directory to INCLUDES
+generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
+
+# Register scanner for resources
+class res-scanner : scanner
+{
+ import regex virtual-target path scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
+ local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
+ local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
+
+ # Icons and other includes may referenced as
+ #
+ # IDR_MAINFRAME ICON "res\\icon.ico"
+ #
+ # so we have to replace double backslashes to single ones.
+ res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ # We don't need this extra information for angle includes,
+ # since they should not depend on including file (we can't
+ # get literal "." in include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+ res = $(res:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) ;
+
+ INCLUDES $(target) : $(all) ;
+ DEPENDS $(target) : $(res) ;
+ NOCARE $(all) $(res) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
+ SEARCH on $(res) = $(b) $(self.includes:G=) ;
+
+ # Just propagate current scanner to includes, in a hope
+ # that includes do not change scanners.
+ scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
+ }
+}
+
+scanner.register res-scanner : include ;
+type.set-scanner RC : res-scanner ;
diff --git a/jam-files/boost-build/tools/stage.jam b/jam-files/boost-build/tools/stage.jam
new file mode 100644
index 000000000..296e7558e
--- /dev/null
+++ b/jam-files/boost-build/tools/stage.jam
@@ -0,0 +1,524 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location.
+
+import targets ;
+import "class" : new ;
+import errors ;
+import type ;
+import generators ;
+import feature ;
+import project ;
+import virtual-target ;
+import path ;
+import types/register ;
+
+
+feature.feature <install-dependencies> : off on : incidental ;
+feature.feature <install-type> : : free incidental ;
+feature.feature <install-source-root> : : free path ;
+feature.feature <so-version> : : free incidental ;
+
+# If 'on', version symlinks for shared libraries will not be created. Affects
+# Unix builds only.
+feature.feature <install-no-version-symlinks> : on : optional incidental ;
+
+
+class install-target-class : basic-target
+{
+ import feature ;
+ import project ;
+ import type ;
+ import errors ;
+ import generators ;
+ import path ;
+ import stage ;
+ import "class" : new ;
+ import property ;
+ import property-set ;
+
+ rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * )
+ {
+ basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
+ $(requirements) : $(default-build) ;
+ }
+
+ # If <location> is not set, sets it based on the project data.
+ #
+ rule update-location ( property-set )
+ {
+ local loc = [ $(property-set).get <location> ] ;
+ if ! $(loc)
+ {
+ loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
+ property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
+ }
+
+ return $(property-set) ;
+ }
+
+ # Takes a target that is installed and a property set which is used when
+ # installing.
+ #
+ rule adjust-properties ( target : build-property-set )
+ {
+ local ps-raw ;
+ local a = [ $(target).action ] ;
+ if $(a)
+ {
+ local ps = [ $(a).properties ] ;
+ ps-raw = [ $(ps).raw ] ;
+
+ # Unless <hardcode-dll-paths>true is in properties, which can happen
+ # only if the user has explicitly requested it, nuke all <dll-path>
+ # properties.
+ if [ $(build-property-set).get <hardcode-dll-paths> ] != true
+ {
+ ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
+ }
+
+ # If any <dll-path> properties were specified for installing, add
+ # them.
+ local l = [ $(build-property-set).get <dll-path> ] ;
+ ps-raw += $(l:G=<dll-path>) ;
+
+ # Also copy <linkflags> feature from current build set, to be used
+ # for relinking.
+ local l = [ $(build-property-set).get <linkflags> ] ;
+ ps-raw += $(l:G=<linkflags>) ;
+
+ # Remove the <tag> feature on original targets.
+ ps-raw = [ property.change $(ps-raw) : <tag> ] ;
+
+ # And <location>. If stage target has another stage target in
+ # sources, then we shall get virtual targets with the <location>
+ # property set.
+ ps-raw = [ property.change $(ps-raw) : <location> ] ;
+ }
+
+ local d = [ $(build-property-set).get <dependency> ] ;
+ ps-raw += $(d:G=<dependency>) ;
+
+ local d = [ $(build-property-set).get <location> ] ;
+ ps-raw += $(d:G=<location>) ;
+
+ local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
+ ps-raw += $(ns:G=<install-no-version-symlinks>) ;
+
+ local d = [ $(build-property-set).get <install-source-root> ] ;
+ # Make the path absolute: we shall use it to compute relative paths and
+ # making the path absolute will help.
+ if $(d)
+ {
+ d = [ path.root $(d) [ path.pwd ] ] ;
+ ps-raw += $(d:G=<install-source-root>) ;
+ }
+
+ if $(ps-raw)
+ {
+ return [ property-set.create $(ps-raw) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ source-targets = [ targets-to-stage $(source-targets) :
+ $(property-set) ] ;
+
+ property-set = [ update-location $(property-set) ] ;
+
+ local ename = [ $(property-set).get <name> ] ;
+
+ if $(ename) && $(source-targets[2])
+ {
+ errors.error "When <name> property is used in 'install', only one"
+ "source is allowed" ;
+ }
+
+ local result ;
+ for local i in $(source-targets)
+ {
+ local staged-targets ;
+
+ local new-properties = [ adjust-properties $(i) :
+ $(property-set) ] ;
+
+ # See if something special should be done when staging this type. It
+ # is indicated by the presence of a special "INSTALLED_" type.
+ local t = [ $(i).type ] ;
+ if $(t) && [ type.registered INSTALLED_$(t) ]
+ {
+ if $(ename)
+ {
+ errors.error "In 'install': <name> property specified with target that requires relinking." ;
+ }
+ else
+ {
+ local targets = [ generators.construct $(self.project)
+ $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
+ staged-targets += $(targets[2-]) ;
+ }
+ }
+ else
+ {
+ staged-targets = [ stage.copy-file $(self.project) $(ename) :
+ $(i) : $(new-properties) ] ;
+ }
+
+ if ! $(staged-targets)
+ {
+ errors.error "Unable to generate staged version of " [ $(source).str ] ;
+ }
+
+ for t in $(staged-targets)
+ {
+ result += [ virtual-target.register $(t) ] ;
+ }
+ }
+
+ return [ property-set.empty ] $(result) ;
+ }
+
+ # Given the list of source targets explicitly passed to 'stage', returns the
+ # list of targets which must be staged.
+ #
+ rule targets-to-stage ( source-targets * : property-set )
+ {
+ local result ;
+
+ # Traverse the dependencies, if needed.
+ if [ $(property-set).get <install-dependencies> ] = "on"
+ {
+ source-targets = [ collect-targets $(source-targets) ] ;
+ }
+
+ # Filter the target types, if needed.
+ local included-types = [ $(property-set).get <install-type> ] ;
+ for local r in $(source-targets)
+ {
+ local ty = [ $(r).type ] ;
+ if $(ty)
+ {
+ # Do not stage searched libs.
+ if $(ty) != SEARCHED_LIB
+ {
+ if $(included-types)
+ {
+ if [ include-type $(ty) : $(included-types) ]
+ {
+ result += $(r) ;
+ }
+ }
+ else
+ {
+ result += $(r) ;
+ }
+ }
+ }
+ else if ! $(included-types)
+ {
+ # Don't install typeless target if there is an explicit list of
+ # allowed types.
+ result += $(r) ;
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # CONSIDER: figure out why we can not use virtual-target.traverse here.
+ #
+ rule collect-targets ( targets * )
+ {
+ # Find subvariants
+ local s ;
+ for local t in $(targets)
+ {
+ s += [ $(t).creating-subvariant ] ;
+ }
+ s = [ sequence.unique $(s) ] ;
+
+ local result = [ new set ] ;
+ $(result).add $(targets) ;
+
+ for local i in $(s)
+ {
+ $(i).all-referenced-targets $(result) ;
+ }
+ local result2 ;
+ for local r in [ $(result).list ]
+ {
+ if $(r:G) != <use>
+ {
+ result2 += $(r:G=) ;
+ }
+ }
+ DELETE_MODULE $(result) ;
+ result = [ sequence.unique $(result2) ] ;
+ }
+
+ # Returns true iff 'type' is subtype of some element of 'types-to-include'.
+ #
+ local rule include-type ( type : types-to-include * )
+ {
+ local found ;
+ while $(types-to-include) && ! $(found)
+ {
+ if [ type.is-subtype $(type) $(types-to-include[1]) ]
+ {
+ found = true ;
+ }
+ types-to-include = $(types-to-include[2-]) ;
+ }
+
+ return $(found) ;
+ }
+}
+
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
+#
+rule copy-file ( project name ? : source : properties )
+{
+ name ?= [ $(source).name ] ;
+ local relative ;
+
+ local new-a = [ new non-scanning-action $(source) : common.copy :
+ $(properties) ] ;
+ local source-root = [ $(properties).get <install-source-root> ] ;
+ if $(source-root)
+ {
+ # Get the real path of the target. We probably need to strip relative
+ # path from the target name at construction.
+ local path = [ $(source).path ] ;
+ path = [ path.root $(name:D) $(path) ] ;
+ # Make the path absolute. Otherwise, it would be hard to compute the
+ # relative path. The 'source-root' is already absolute, see the
+ # 'adjust-properties' method above.
+ path = [ path.root $(path) [ path.pwd ] ] ;
+
+ relative = [ path.relative-to $(source-root) $(path) ] ;
+ }
+
+ # Note: Using $(name:D=$(relative)) might be faster here, but then we would
+ # need to explicitly check that relative is not ".", otherwise we might get
+ # paths like '<prefix>/boost/.', try to create it and mkdir would obviously
+ # fail.
+ name = [ path.join $(relative) $(name:D=) ] ;
+
+ return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
+ $(new-a) ] ;
+}
+
+
+rule symlink ( name : project : source : properties )
+{
+ local a = [ new action $(source) : symlink.ln : $(properties) ] ;
+ return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
+ $(a) ] ;
+}
+
+
+rule relink-file ( project : source : property-set )
+{
+ local action = [ $(source).action ] ;
+ local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
+ "" : $(property-set) ] ;
+ return [ $(cloned-action).targets ] ;
+}
+
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
+type.register INSTALLED_EXE : : EXE ;
+
+
+class installed-exe-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-exe : EXE : INSTALLED_EXE ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ local need-relink ;
+
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ }
+ else
+ {
+ # See if the dll-path properties are not changed during
+ # install. If so, copy, don't relink.
+ local a = [ $(source).action ] ;
+ local p = [ $(a).properties ] ;
+ local original = [ $(p).get <dll-path> ] ;
+ local current = [ $(property-set).get <dll-path> ] ;
+
+ if $(current) != $(original)
+ {
+ need-relink = true ;
+ }
+ }
+
+
+ if $(need-relink)
+ {
+ return [ stage.relink-file $(project)
+ : $(source) : $(property-set) ] ;
+ }
+ else
+ {
+ return [ stage.copy-file $(project)
+ : $(source) : $(property-set) ] ;
+ }
+ }
+}
+
+
+generators.register [ new installed-exe-generator ] ;
+
+
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
+type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
+
+
+class installed-shared-lib-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-shared-lib : SHARED_LIB
+ : INSTALLED_SHARED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ local copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ return [ virtual-target.register $(copied) ] ;
+ }
+ else
+ {
+ local a = [ $(source).action ] ;
+ local copied ;
+ if ! $(a)
+ {
+ # Non-derived file, just copy.
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ local cp = [ $(a).properties ] ;
+ local current-dll-path = [ $(cp).get <dll-path> ] ;
+ local new-dll-path = [ $(property-set).get <dll-path> ] ;
+
+ if $(current-dll-path) != $(new-dll-path)
+ {
+ # Rpath changed, need to relink.
+ copied = [ stage.relink-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ }
+
+ copied = [ virtual-target.register $(copied) ] ;
+
+ local result = $(copied) ;
+ # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
+ # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
+ # symbolic links.
+ local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
+ : [ $(copied).name ] ] ;
+ if $(m)
+ {
+ # Symlink without version at all is used to make
+ # -lsome_library work.
+ result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
+ $(property-set) ] ;
+
+ # Symlinks of some libfoo.N and libfoo.N.M are used so that
+ # library can found at runtime, if libfoo.N.M.X has soname of
+ # libfoo.N. That happens when the library makes some binary
+ # compatibility guarantees. If not, it is possible to skip those
+ # symlinks.
+ local suppress =
+ [ $(property-set).get <install-no-version-symlinks> ] ;
+
+ if $(suppress) != "on"
+ {
+ result += [ stage.symlink $(m[1]).$(m[2]) : $(project)
+ : $(copied) : $(property-set) ] ;
+ result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project)
+ : $(copied) : $(property-set) ] ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+}
+
+generators.register [ new installed-shared-lib-generator ] ;
+
+
+# Main target rule for 'install'.
+#
+rule install ( name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ # Unless the user has explicitly asked us to hardcode dll paths, add
+ # <hardcode-dll-paths>false in requirements, to override default value.
+ if ! <hardcode-dll-paths>true in $(requirements)
+ {
+ requirements += <hardcode-dll-paths>false ;
+ }
+
+ if <tag> in $(requirements:G)
+ {
+ errors.user-error
+ "The <tag> property is not allowed for the 'install' rule" ;
+ }
+
+ targets.main-target-alternative
+ [ new install-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+
+IMPORT $(__name__) : install : : install ;
+IMPORT $(__name__) : install : : stage ;
diff --git a/jam-files/boost-build/tools/stlport.jam b/jam-files/boost-build/tools/stlport.jam
new file mode 100644
index 000000000..62eebda5f
--- /dev/null
+++ b/jam-files/boost-build/tools/stlport.jam
@@ -0,0 +1,303 @@
+# Copyright Gennadiy Rozental
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The STLPort is usable by means of 'stdlib' feature. When
+# stdlib=stlport is specified, default version of STLPort will be used,
+# while stdlib=stlport-4.5 will use specific version.
+# The subfeature value 'hostios' means to use host compiler's iostreams.
+#
+# The specific version of stlport is selected by features:
+# The <runtime-link> feature selects between static and shared library
+# The <runtime-debugging>on selects STLPort with debug symbols
+# and stl debugging.
+# There's no way to use STLPort with debug symbols but without
+# stl debugging.
+
+# TODO: must implement selection of different STLPort installations based
+# on used toolset.
+# Also, finish various flags:
+#
+# This is copied from V1 toolset, "+" means "implemented"
+#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
+# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
+
+
+import feature : feature subfeature ;
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+import common ;
+import type ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project stlport ;
+
+# The problem: how to request to use host compiler's iostreams?
+#
+# Solution 1: Global 'stlport-iostream' feature.
+# That's ugly. Subfeature make more sense for stlport-specific thing.
+# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
+# is default.
+# The problem is that such subfeature will appear in target paths, and that's ugly
+# Solution 3: Use optional subfeature with only one value.
+
+feature.extend stdlib : stlport ;
+feature.compose <stdlib>stlport : <library>/stlport//stlport ;
+
+# STLport iostreams or native iostreams
+subfeature stdlib stlport : iostream : hostios : optional propagated ;
+
+# STLport extensions
+subfeature stdlib stlport : extensions : noext : optional propagated ;
+
+# STLport anachronisms -- NOT YET SUPPORTED
+# subfeature stdlib stlport : anachronisms : on off ;
+
+# STLport debug allocation -- NOT YET SUPPORTED
+#subfeature stdlib stlport : debug-alloc : off on ;
+
+# Declare a special target class to handle the creation of search-lib-target
+# instances for STLport. We need a special class, because otherwise we'll have
+# - declare prebuilt targets for all possible toolsets. And by the time 'init'
+# is called we don't even know the list of toolsets that are registered
+# - when host iostreams are used, we really should produce nothing. It would
+# be hard/impossible to achieve this using prebuilt targets.
+
+class stlport-target-class : basic-target
+{
+ import feature project type errors generators ;
+ import set : difference ;
+
+ rule __init__ ( project : headers ? : libraries * : version ? )
+ {
+ basic-target.__init__ stlport : $(project) ;
+ self.headers = $(headers) ;
+ self.libraries = $(libraries) ;
+ self.version = $(version) ;
+ self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
+
+ local requirements ;
+ requirements += <stdlib-stlport:version>$(self.version) ;
+ self.requirements = [ property-set.create $(requirements) ] ;
+ }
+
+ rule generate ( property-set )
+ {
+ # Since this target is built with <stdlib>stlport, it will also
+ # have <library>/stlport//stlport in requirements, which will
+ # cause a loop in main target references. Remove that property
+ # manually.
+
+ property-set = [ property-set.create
+ [ difference
+ [ $(property-set).raw ] :
+ <library>/stlport//stlport
+ <stdlib>stlport
+ ]
+ ] ;
+ return [ basic-target.generate $(property-set) ] ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ # Deduce the name of stlport library, based on toolset and
+ # debug setting.
+ local raw = [ $(property-set).raw ] ;
+ local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
+ local toolset = [ feature.get-values <toolset> : $(raw) ] ;
+
+ if $(self.version.5)
+ {
+ # Version 5.x
+
+ # STLport host IO streams no longer supported. So we always
+ # need libraries.
+
+ # name: stlport(stl)?[dg]?(_static)?.M.R
+ local name = stlport ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name += stl ;
+ switch $(toolset)
+ {
+ case gcc* : name += g ;
+ case darwin* : name += g ;
+ case * : name += d ;
+ }
+ }
+
+ if [ feature.get-values <runtime-link> : $(raw) ] = "static"
+ {
+ name += _static ;
+ }
+
+ # Starting with version 5.2.0, the STLport static libraries no longer
+ # include a version number in their name
+ local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
+ if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static"
+ {
+ name += .$(self.version.5) ;
+ }
+
+ name = $(name:J=) ;
+
+ if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
+ {
+ #~ Allow explicitly asking to install the STLport lib by
+ #~ refering to it directly: /stlport//stlport/<install-dependencies>on
+ #~ This allows for install packaging of all libs one might need for
+ #~ a standalone distribution.
+ import path : make : path-make ;
+ local runtime-link
+ = [ feature.get-values <runtime-link> : $(raw) ] ;
+ local lib-file.props
+ = [ property-set.create $(raw) <link>$(runtime-link) ] ;
+ local lib-file.prefix
+ = [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
+ local lib-file.suffix
+ = [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
+ lib-file.prefix
+ ?= "" "lib" ;
+ lib-file.suffix
+ ?= "" ;
+ local lib-file
+ = [ GLOB $(self.libraries) [ modules.peek : PATH ] :
+ $(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
+ lib-file
+ = [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ;
+ lib-file
+ = [ $(lib-file).generate "" ] ;
+ local lib-file.requirements
+ = [ targets.main-target-requirements
+ [ $(lib-file.props).raw ] <file>$(lib-file[-1])
+ : $(self.project) ] ;
+ return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ;
+ }
+ else
+ {
+ #~ Otherwise, it's just a regular usage of the library.
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ }
+ else if ! $(hostios) && $(toolset) != msvc
+ {
+ # We don't need libraries if host istreams are used. For
+ # msvc, automatic library selection will be used.
+
+ # name: stlport_<toolset>(_stldebug)?
+ local name = stlport ;
+ name = $(name)_$(toolset) ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name = $(name)_stldebug ;
+ }
+
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local usage-requirements =
+ <include>$(self.headers)
+ <dll-path>$(self.libraries)
+ <library-path>$(self.libraries)
+ ;
+
+ local rproperties = [ $(subvariant).build-properties ] ;
+ # CONSIDER: should this "if" sequence be replaced with
+ # some use of 'property-map' class?
+ if [ $(rproperties).get <runtime-debugging> ] = "on"
+ {
+ usage-requirements +=
+ <define>_STLP_DEBUG=1
+ <define>_STLP_DEBUG_UNINITIALIZED=1 ;
+ }
+ if [ $(rproperties).get <runtime-link> ] = "shared"
+ {
+ usage-requirements +=
+ <define>_STLP_USE_DYNAMIC_LIB=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
+ {
+ usage-requirements +=
+ <define>_STLP_NO_EXTENSIONS=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
+ {
+ usage-requirements +=
+ <define>_STLP_NO_OWN_IOSTREAMS=1
+ <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+ }
+ if $(self.version.5)
+ {
+ # Version 5.x
+ if [ $(rproperties).get <threading> ] = "single"
+ {
+ # Since STLport5 doesn't normally support single-thread
+ # we force STLport5 into the multi-thread mode. Hence
+ # getting what other libs provide of single-thread code
+ # linking against a multi-thread lib.
+ usage-requirements +=
+ <define>_STLP_THREADS=1 ;
+ }
+ }
+
+ return [ property-set.create $(usage-requirements) ] ;
+ }
+}
+
+rule stlport-target ( headers ? : libraries * : version ? )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new stlport-target-class $(project) : $(headers) : $(libraries)
+ : $(version)
+ ] ;
+}
+
+local .version-subfeature-defined ;
+
+# Initialize stlport support.
+rule init (
+ version ? :
+ headers : # Location of header files
+ libraries * # Location of libraries, lib and bin subdirs of STLport.
+ )
+{
+ # FIXME: need to use common.check-init-parameters here.
+ # At the moment, that rule always tries to define subfeature
+ # of the 'toolset' feature, while we need to define subfeature
+ # of <stdlib>stlport, so tweaks to check-init-parameters are needed.
+ if $(version)
+ {
+ if ! $(.version-subfeature-defined)
+ {
+ feature.subfeature stdlib stlport : version : : propagated ;
+ .version-subfeature-defined = true ;
+ }
+ feature.extend-subfeature stdlib stlport : version : $(version) ;
+ }
+
+ # Declare the main target for this STLPort version.
+ stlport-target $(headers) : $(libraries) : $(version) ;
+}
+
diff --git a/jam-files/boost-build/tools/sun.jam b/jam-files/boost-build/tools/sun.jam
new file mode 100644
index 000000000..0ca927d3e
--- /dev/null
+++ b/jam-files/boost-build/tools/sun.jam
@@ -0,0 +1,142 @@
+# Copyright (C) Christopher Currie 2003. Permission to copy, use,
+# modify, sell and distribute this software is granted provided this
+# copyright notice appears in all copies. This software is provided
+# "as is" without express or implied warranty, and with no claim as
+# to its suitability for any purpose.
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import type ;
+import common ;
+
+feature.extend toolset : sun ;
+toolset.inherit sun : unix ;
+generators.override sun.prebuilt : builtin.lib-generator ;
+generators.override sun.prebuilt : builtin.prebuilt ;
+generators.override sun.searched-lib-generator : searched-lib-generator ;
+
+feature.extend stdlib : sun-stlport ;
+feature.compose <stdlib>sun-stlport
+ : <cxxflags>-library=stlport4 <linkflags>-library=stlport4
+ ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters sun : version $(version) ] ;
+
+ command = [ common.get-invocation-command sun : CC
+ : $(command) : "/opt/SUNWspro/bin" ] ;
+
+ # Even if the real compiler is not found, put CC to
+ # command line so that user see command line that would have being executed.
+ command ?= CC ;
+
+ common.handle-options sun : $(condition) : $(command) : $(options) ;
+
+ command_c = $(command[1--2]) $(command[-1]:B=cc) ;
+
+ toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ;
+}
+
+# Declare generators
+generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ;
+generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ;
+
+# Declare flags and actions for compilation
+flags sun.compile OPTIONS <debug-symbols>on : -g ;
+flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags sun.compile OPTIONS <optimization>speed : -xO4 ;
+flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ;
+flags sun.compile OPTIONS <threading>multi : -mt ;
+flags sun.compile OPTIONS <warnings>off : -erroff ;
+flags sun.compile OPTIONS <warnings>on : -erroff=%none ;
+flags sun.compile OPTIONS <warnings>all : -erroff=%none ;
+flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ;
+
+flags sun.compile.c++ OPTIONS <inlining>off : +d ;
+
+# The -m32 and -m64 options are supported starting
+# with Sun Studio 12. On earlier compilers, the
+# 'address-model' feature is not supported and should not
+# be used. Instead, use -xarch=generic64 command line
+# option.
+# See http://svn.boost.org/trac/boost/ticket/1186
+# for details.
+flags sun OPTIONS <address-model>32 : -m32 ;
+flags sun OPTIONS <address-model>64 : -m64 ;
+# On sparc, there's a difference between -Kpic
+# and -KPIC. The first is slightly more efficient,
+# but has the limits on the size of GOT table.
+# For minimal fuss on user side, we use -KPIC here.
+# See http://svn.boost.org/trac/boost/ticket/1186#comment:6
+# for detailed explanation.
+flags sun OPTIONS <link>shared : -KPIC ;
+
+flags sun.compile OPTIONS <cflags> ;
+flags sun.compile.c++ OPTIONS <cxxflags> ;
+flags sun.compile DEFINES <define> ;
+flags sun.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags sun.link OPTIONS <debug-symbols>on : -g ;
+# Strip the binary when no debugging is needed
+flags sun.link OPTIONS <debug-symbols>off : -s ;
+flags sun.link OPTIONS <profiling>on : -xprofile=tcov ;
+flags sun.link OPTIONS <threading>multi : -mt ;
+flags sun.link OPTIONS <linkflags> ;
+flags sun.link LINKPATH <library-path> ;
+flags sun.link FINDLIBS-ST <find-static-library> ;
+flags sun.link FINDLIBS-SA <find-shared-library> ;
+flags sun.link LIBRARIES <library-file> ;
+flags sun.link LINK-RUNTIME <runtime-link>static : static ;
+flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags sun.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags sun.link RPATH <xdll-path> ;
+
+# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.)
+flags sun.link FINDLIBS-SA : rt ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)"
+}
+
diff --git a/jam-files/boost-build/tools/symlink.jam b/jam-files/boost-build/tools/symlink.jam
new file mode 100644
index 000000000..b33e8260c
--- /dev/null
+++ b/jam-files/boost-build/tools/symlink.jam
@@ -0,0 +1,140 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines the "symlink" special target. 'symlink' targets make symbolic links
+# to the sources.
+
+import targets modules path class os feature project property-set ;
+
+.count = 0 ;
+
+feature.feature symlink-location : project-relative build-relative : incidental ;
+
+# The class representing "symlink" targets.
+#
+class symlink-targets : basic-target
+{
+ import numbers modules class property project path ;
+
+ rule __init__ (
+ project
+ : targets *
+ : sources *
+ )
+ {
+ # Generate a fake name for now. Need unnamed targets eventually.
+ local c = [ modules.peek symlink : .count ] ;
+ modules.poke symlink : .count : [ numbers.increment $(c) ] ;
+ local fake-name = symlink#$(c) ;
+
+ basic-target.__init__ $(fake-name) : $(project) : $(sources) ;
+
+ # Remember the targets to map the sources onto. Pad or truncate
+ # to fit the sources given.
+ self.targets = ;
+ for local source in $(sources)
+ {
+ if $(targets)
+ {
+ self.targets += $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ self.targets += $(source) ;
+ }
+ }
+
+ # The virtual targets corresponding to the given targets.
+ self.virtual-targets = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local i = 1 ;
+ for local t in $(source-targets)
+ {
+ local s = $(self.targets[$(i)]) ;
+ local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ;
+ local vt = [ class.new file-target $(s:D=)
+ : [ $(t).type ] : $(self.project) : $(a) ] ;
+
+ # Place the symlink in the directory relative to the project
+ # location, instead of placing it in the build directory.
+ if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative
+ {
+ $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ;
+ }
+
+ self.virtual-targets += $(vt) ;
+ i = [ numbers.increment $(i) ] ;
+ }
+ return [ property-set.empty ] $(self.virtual-targets) ;
+ }
+}
+
+# Creates a symbolic link from a set of targets to a set of sources.
+# The targets and sources map one to one. The symlinks generated are
+# limited to be the ones given as the sources. That is, the targets
+# are either padded or trimmed to equate to the sources. The padding
+# is done with the name of the corresponding source. For example::
+#
+# symlink : one two ;
+#
+# Is equal to::
+#
+# symlink one two : one two ;
+#
+# Names for symlink are relative to the project location. They cannot
+# include ".." path components.
+rule symlink (
+ targets *
+ : sources *
+ )
+{
+ local project = [ project.current ] ;
+
+ return [ targets.main-target-alternative
+ [ class.new symlink-targets $(project) : $(targets) :
+ # Note: inline targets are not supported for symlink, intentionally,
+ # since it's used to linking existing non-local targets.
+ $(sources) ] ] ;
+}
+
+rule ln
+{
+ local os ;
+ if [ modules.peek : UNIX ] { os = UNIX ; }
+ else { os ?= [ os.name ] ; }
+ # Remember the path to make the link relative to where the symlink is located.
+ local path-to-source = [ path.relative-to
+ [ path.make [ on $(<) return $(LOCATE) ] ]
+ [ path.make [ on $(>) return $(LOCATE) ] ] ] ;
+ if $(path-to-source) = .
+ {
+ PATH_TO_SOURCE on $(<) = "" ;
+ }
+ else
+ {
+ PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ;
+ }
+ ln-$(os) $(<) : $(>) ;
+}
+
+actions ln-UNIX
+{
+ ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'
+}
+
+# there is a way to do this; we fall back to a copy for now
+actions ln-NT
+{
+ echo "NT symlinks not supported yet, making copy"
+ del /f /q "$(<)" 2>nul >nul
+ copy "$(>)" "$(<)" $(NULL_OUT)
+}
+
+IMPORT $(__name__) : symlink : : symlink ;
diff --git a/jam-files/boost-build/tools/testing-aux.jam b/jam-files/boost-build/tools/testing-aux.jam
new file mode 100644
index 000000000..525dafd0c
--- /dev/null
+++ b/jam-files/boost-build/tools/testing-aux.jam
@@ -0,0 +1,210 @@
+# This module is imported by testing.py. The definitions here are
+# too tricky to do in Python
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(sources) ;
+}
+IMPORT testing : expect-success : : testing.expect-success ;
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+IMPORT testing : expect-failure : : testing.expect-failure ;
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependendency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
+ {
+ TEMPORARY $(targets-to-remove) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(targets-to-remove) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .STATUS_0 = "%status% EQU 0 (" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ .VERBOSE = "%verbose% EQU 1 (" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .STATUS_0 = "test $status -eq 0 ; then" ;
+ .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ .VERBOSE = "test $verbose -eq 1 ; then" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+IMPORT testing : capture-output : : testing.capture-output ;
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
+}
+
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+}
+
+# Calling this rule requests that Boost Build time how long it taks to build the
+# 'source' target and display the results both on the standard output and in the
+# 'target' file.
+#
+rule time ( target : source : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
+
+ # Make sure that the source is rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(source) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME)" seconds" > "$(<)"
+ echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
+}
diff --git a/jam-files/boost-build/tools/testing.jam b/jam-files/boost-build/tools/testing.jam
new file mode 100644
index 000000000..c42075b78
--- /dev/null
+++ b/jam-files/boost-build/tools/testing.jam
@@ -0,0 +1,581 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are OK,
+# creates an output file.
+#
+# The exact list of rules is:
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
+# 'run' -- creates .test file is running of executable produced from
+# sources was successful. Also leaves behind .output file
+# with the output from program run.
+# 'run-fail' -- same as above, but .test file is created if running fails.
+#
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities (see http://www.boost.org/more/regression.html).
+#
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
+
+# Things to do:
+# - Teach compiler_status handle Jamfile.v2.
+# Notes:
+# - <no-warn> is not implemented, since it is Como-specific, and it is not
+# clear how to implement it
+# - std::locale-support is not implemented (it is used in one test).
+
+
+import alias ;
+import "class" ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import type ;
+import virtual-target ;
+
+
+rule init ( )
+{
+}
+
+
+# Feature controling the command used to lanch test programs.
+feature.feature testing.launcher : : free optional ;
+
+feature.feature test-info : : free incidental ;
+feature.feature testing.arg : : free incidental ;
+feature.feature testing.input-file : : free dependency ;
+
+feature.feature preserve-test-targets : on off : incidental propagated ;
+
+# Register target types.
+type.register TEST : test ;
+type.register COMPILE : : TEST ;
+type.register COMPILE_FAIL : : TEST ;
+type.register RUN_OUTPUT : run ;
+type.register RUN : : TEST ;
+type.register RUN_FAIL : : TEST ;
+type.register LINK_FAIL : : TEST ;
+type.register LINK : : TEST ;
+type.register UNIT_TEST : passed : TEST ;
+
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
+#
+rule make-test ( target-type : sources + : requirements * : target-name ? )
+{
+ target-name ?= $(sources[1]:D=:S=) ;
+
+ # Having periods (".") in the target name is problematic because the typed
+ # generator will strip the suffix and use the bare name for the file
+ # targets. Even though the location-prefix averts problems most times it
+ # does not prevent ambiguity issues when referring to the test targets. For
+ # example when using the XML log output. So we rename the target to remove
+ # the periods, and provide an alias for users.
+ local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
+
+ local project = [ project.current ] ;
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
+ # post-processing tools to work.
+ local t = [ targets.create-typed-target [ type.type-from-rule-name
+ $(target-type) ] : $(project) : $(real-name) : $(sources) :
+ $(requirements) <location-prefix>$(real-name).test ] ;
+
+ # The alias to the real target, per period replacement above.
+ if $(real-name) != $(target-name)
+ {
+ alias $(target-name) : $(t) ;
+ }
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can not call
+ # this dump from a Jamfile since projects referred by 'build-project' are
+ # not available until the whole Jamfile has been loaded.
+ .all-tests += $(t) ;
+ return $(t) ;
+}
+
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
+#
+rule compile ( sources + : requirements * : target-name ? )
+{
+ return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
+ ;
+}
+
+
+rule compile-fail ( sources + : requirements * : target-name ? )
+{
+ return [ make-test compile-fail : $(sources) : $(requirements) :
+ $(target-name) ] ;
+}
+
+
+rule link ( sources + : requirements * : target-name ? )
+{
+ return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule link-fail ( sources + : requirements * : target-name ? )
+{
+ return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+rule handle-input-files ( input-files * )
+{
+ if $(input-files[2])
+ {
+ # Check that sorting made when creating property-set instance will not
+ # change the ordering.
+ if [ sequence.insertion-sort $(input-files) ] != $(input-files)
+ {
+ errors.user-error "Names of input files must be sorted alphabetically"
+ : "due to internal limitations" ;
+ }
+ }
+ return <testing.input-file>$(input-files) ;
+}
+
+
+rule run ( sources + : args * : input-files * : requirements * : target-name ? :
+ default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule run-fail ( sources + : args * : input-files * : requirements * :
+ target-name ? : default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
+IMPORT : alias : : test-suite ;
+
+
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
+#
+rule dump-tests
+{
+ for local t in $(.all-tests)
+ {
+ dump-test $(t) ;
+ }
+}
+
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
+#
+local rule get-library-name ( path )
+{
+ # Path is in normalized form, so all slashes are forward.
+ local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
+ local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
+ local match3 = [ MATCH (/status$) : $(path) ] ;
+
+ if $(match1) { return $(match1[2]) ; }
+ else if $(match2) { return $(match2[2]) ; }
+ else if $(match3) { return "" ; }
+ else if --dump-tests in [ modules.peek : ARGV ]
+ {
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
+ return $(path) ;
+ }
+}
+
+
+# Was an XML dump requested?
+.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
+
+
+# Takes a target (instance of 'basic-target') and prints
+# - its type
+# - its name
+# - comments specified via the <test-info> property
+# - relative location of all source from the project root.
+#
+rule dump-test ( target )
+{
+ local type = [ $(target).type ] ;
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+
+ local project-root = [ $(project).get project-root ] ;
+ local library = [ get-library-name [ path.root [ $(project).get location ]
+ [ path.pwd ] ] ] ;
+ if $(library)
+ {
+ name = $(library)/$(name) ;
+ }
+
+ local sources = [ $(target).sources ] ;
+ local source-files ;
+ for local s in $(sources)
+ {
+ if [ class.is-a $(s) : file-reference ]
+ {
+ local location = [ path.root [ path.root [ $(s).name ]
+ [ $(s).location ] ] [ path.pwd ] ] ;
+
+ source-files += [ path.relative-to [ path.root $(project-root)
+ [ path.pwd ] ] $(location) ] ;
+ }
+ }
+
+ local target-name = [ $(project).get location ] // [ $(target).name ] .test
+ ;
+ target-name = $(target-name:J=) ;
+
+ local r = [ $(target).requirements ] ;
+ # Extract values of the <test-info> feature.
+ local test-info = [ $(r).get <test-info> ] ;
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
+ if $(.out-xml)
+ {
+ local nl = "
+" ;
+ .contents on $(.out-xml) +=
+ "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
+ "$(nl) <target><![CDATA[$(target-name)]]></target>"
+ "$(nl) <info><![CDATA[$(test-info)]]></info>"
+ "$(nl) <source><![CDATA[$(source-files)]]></source>"
+ "$(nl) </test>"
+ ;
+ }
+ else
+ {
+ # Format them into a single string of quoted strings.
+ test-info = \"$(test-info:J=\"\ \")\" ;
+
+ ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":"
+ \"$(source-files)\" ;
+ }
+}
+
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register-standard testing.expect-success : OBJ : COMPILE ;
+generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
+generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
+generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
+generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
+generators.register-standard testing.expect-success : EXE : LINK ;
+
+# Generator which runs an EXE and captures output.
+generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
+
+# Generator which creates a target if sources run successfully. Differs from RUN
+# in that run output is not captured. The reason why it exists is that the 'run'
+# rule is much better for automated testing, but is not user-friendly (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
+generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
+
+
+# The action rules called by generators.
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(sources) ;
+}
+
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ # Dump all the tests, if needed. We do it here, since dump should happen
+ # only after all Jamfiles have been read, and there is no such place
+ # currently defined (but there should be).
+ if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
+ {
+ .dumped-tests = true ;
+ dump-tests ;
+ }
+
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+
+rule run-path-setup ( target : source : properties * )
+{
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
+ local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
+ dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
+ dll-paths += [ on $(source) return $(RUN_PATH) ] ;
+ dll-paths = [ sequence.unique $(dll-paths) ] ;
+ if $(dll-paths)
+ {
+ dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
+ PATH_SETUP on $(target) = [ common.prepend-path-variable-command
+ [ os.shared-library-path-variable ] : $(dll-paths) ] ;
+ }
+}
+
+
+local argv = [ modules.peek : ARGV ] ;
+
+toolset.flags testing.capture-output ARGS <testing.arg> ;
+toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
+toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
+
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependendency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ run-path-setup $(target) : $(source) : $(properties) ;
+
+ if [ feature.get-values preserve-test-targets : $(properties) ] = off
+ {
+ TEMPORARY $(targets-to-remove) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(targets-to-remove) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .STATUS_0 = "%status% EQU 0 (" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ .VERBOSE = "%verbose% EQU 1 (" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .STATUS_0 = "test $status -eq 0 ; then" ;
+ .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ .VERBOSE = "test $verbose -eq 1 ; then" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
+toolset.flags testing.unit-test ARGS <testing.arg> ;
+
+
+rule unit-test ( target : source : properties * )
+{
+ run-path-setup $(target) : $(source) : $(properties) ;
+}
+
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
+}
+
+
+IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
+ : : compile compile-fail run run-fail link link-fail ;
+
+
+type.register TIME : time ;
+generators.register-standard testing.time : : TIME ;
+
+
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+}
+
+
+IMPORT testing : record-time : : testing.record-time ;
+
+
+# Calling this rule requests that Boost Build time how long it taks to build the
+# 'source' target and display the results both on the standard output and in the
+# 'target' file.
+#
+rule time ( target : source : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
+
+ # Make sure that the source is rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(source) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME)" seconds" > "$(<)"
+ echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
+}
diff --git a/jam-files/boost-build/tools/types/asm.jam b/jam-files/boost-build/tools/types/asm.jam
new file mode 100644
index 000000000..a340db36a
--- /dev/null
+++ b/jam-files/boost-build/tools/types/asm.jam
@@ -0,0 +1,4 @@
+# Copyright Craig Rodrigues 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type ASM : s S asm ;
diff --git a/jam-files/boost-build/tools/types/cpp.jam b/jam-files/boost-build/tools/types/cpp.jam
new file mode 100644
index 000000000..3159cdd77
--- /dev/null
+++ b/jam-files/boost-build/tools/types/cpp.jam
@@ -0,0 +1,86 @@
+# Copyright David Abrahams 2004.
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+import type ;
+import scanner ;
+
+class c-scanner : scanner
+{
+ import path ;
+ import regex ;
+ import scanner ;
+ import sequence ;
+ import virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ for local i in $(includes)
+ {
+ self.includes += [ sequence.transform path.native
+ : [ regex.split $(i:G=) "&&" ] ] ;
+ }
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
+ angle = [ sequence.transform path.native : $(angle) ] ;
+ local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
+ quoted = [ sequence.transform path.native : $(quoted) ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on including file (we can not get literal "." in include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) ;
+
+ INCLUDES $(target) : $(all) ;
+ NOCARE $(all) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
+
+ # Just propagate the current scanner to includes in hope that includes
+ # do not change scanners.
+ scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
+
+ ISFILE $(angle) $(quoted) ;
+ }
+}
+
+scanner.register c-scanner : include ;
+
+type.register CPP : cpp cxx cc ;
+type.register H : h ;
+type.register HPP : hpp : H ;
+type.register C : c ;
+
+# It most cases where a CPP file or a H file is a source of some action, we
+# should rebuild the result if any of files included by CPP/H are changed. One
+# case when this is not needed is installation, which is handled specifically.
+type.set-scanner CPP : c-scanner ;
+type.set-scanner C : c-scanner ;
+# One case where scanning of H/HPP files is necessary is PCH generation -- if
+# any header included by HPP being precompiled changes, we need to recompile the
+# header.
+type.set-scanner H : c-scanner ;
+type.set-scanner HPP : c-scanner ;
diff --git a/jam-files/boost-build/tools/types/exe.jam b/jam-files/boost-build/tools/types/exe.jam
new file mode 100644
index 000000000..47109513a
--- /dev/null
+++ b/jam-files/boost-build/tools/types/exe.jam
@@ -0,0 +1,9 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register EXE ;
+type.set-generated-target-suffix EXE : <target-os>windows : "exe" ;
+type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ;
diff --git a/jam-files/boost-build/tools/types/html.jam b/jam-files/boost-build/tools/types/html.jam
new file mode 100644
index 000000000..5cd337d09
--- /dev/null
+++ b/jam-files/boost-build/tools/types/html.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type HTML : html ;
diff --git a/jam-files/boost-build/tools/types/lib.jam b/jam-files/boost-build/tools/types/lib.jam
new file mode 100644
index 000000000..854ab8fd5
--- /dev/null
+++ b/jam-files/boost-build/tools/types/lib.jam
@@ -0,0 +1,74 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ; # for set-generated-target-suffix
+import os ;
+
+# The following naming scheme is used for libraries.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (msvc)
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (mingw):
+# libxxx.a static library
+# libxxx.dll DLL
+# libxxx.dll.a import library
+#
+# On cygwin i.e. <target-os>cygwin
+# libxxx.a static library
+# cygxxx.dll DLL
+# libxxx.dll.a import library
+#
+
+type.register LIB ;
+
+# FIXME: should not register both extensions on both platforms.
+type.register STATIC_LIB : a lib : LIB ;
+
+# The 'lib' prefix is used everywhere
+type.set-generated-target-prefix STATIC_LIB : : lib ;
+
+# Use '.lib' suffix for windows
+type.set-generated-target-suffix STATIC_LIB : <target-os>windows : lib ;
+
+# Except with gcc.
+type.set-generated-target-suffix STATIC_LIB : <toolset>gcc <target-os>windows : a ;
+
+# Use xxx.lib for import libs
+type IMPORT_LIB : : STATIC_LIB ;
+type.set-generated-target-prefix IMPORT_LIB : : "" ;
+type.set-generated-target-suffix IMPORT_LIB : : lib ;
+
+# Except with gcc (mingw or cygwin), where use libxxx.dll.a
+type.set-generated-target-prefix IMPORT_LIB : <toolset>gcc : lib ;
+type.set-generated-target-suffix IMPORT_LIB : <toolset>gcc : dll.a ;
+
+type.register SHARED_LIB : so dll dylib : LIB ;
+
+# Both mingw and cygwin use libxxx.dll naming scheme.
+# On Linux, use "lib" prefix
+type.set-generated-target-prefix SHARED_LIB : : lib ;
+# But don't use it on windows
+type.set-generated-target-prefix SHARED_LIB : <target-os>windows : "" ;
+# But use it again on mingw
+type.set-generated-target-prefix SHARED_LIB : <toolset>gcc <target-os>windows : lib ;
+# And use 'cyg' on cygwin
+type.set-generated-target-prefix SHARED_LIB : <target-os>cygwin : cyg ;
+
+
+type.set-generated-target-suffix SHARED_LIB : <target-os>windows : dll ;
+type.set-generated-target-suffix SHARED_LIB : <target-os>cygwin : dll ;
+type.set-generated-target-suffix SHARED_LIB : <target-os>darwin : dylib ;
+
+type SEARCHED_LIB : : LIB ;
+# This is needed so that when we create a target of SEARCHED_LIB
+# type, there's no prefix or suffix automatically added.
+type.set-generated-target-prefix SEARCHED_LIB : : "" ;
+type.set-generated-target-suffix SEARCHED_LIB : : "" ;
diff --git a/jam-files/boost-build/tools/types/obj.jam b/jam-files/boost-build/tools/types/obj.jam
new file mode 100644
index 000000000..6afbcaa6f
--- /dev/null
+++ b/jam-files/boost-build/tools/types/obj.jam
@@ -0,0 +1,9 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register OBJ : o obj ;
+type.set-generated-target-suffix OBJ : <target-os>windows : obj ;
+type.set-generated-target-suffix OBJ : <target-os>cygwin : obj ;
diff --git a/jam-files/boost-build/tools/types/objc.jam b/jam-files/boost-build/tools/types/objc.jam
new file mode 100644
index 000000000..709cbd0c7
--- /dev/null
+++ b/jam-files/boost-build/tools/types/objc.jam
@@ -0,0 +1,26 @@
+# Copyright Rene Rivera 2008, 2010.
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+import type ;
+import scanner ;
+import types/cpp ;
+
+class objc-scanner : c-scanner
+{
+ rule __init__ ( includes * )
+ {
+ c-scanner.__init__ $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ;
+ }
+}
+
+scanner.register objc-scanner : include ;
+
+type.register OBJECTIVE_C : m ;
+type.register OBJECTIVE_CPP : mm ;
+type.set-scanner OBJECTIVE_C : objc-scanner ;
+type.set-scanner OBJECTIVE_CPP : objc-scanner ;
diff --git a/jam-files/boost-build/tools/types/preprocessed.jam b/jam-files/boost-build/tools/types/preprocessed.jam
new file mode 100644
index 000000000..c9187ba67
--- /dev/null
+++ b/jam-files/boost-build/tools/types/preprocessed.jam
@@ -0,0 +1,9 @@
+# Copyright Steven Watanabe 2011
+# Distributed under the Boost Software License Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register PREPROCESSED_C : i : C ;
+type.register PREPROCESSED_CPP : ii : CPP ;
diff --git a/jam-files/boost-build/tools/types/qt.jam b/jam-files/boost-build/tools/types/qt.jam
new file mode 100644
index 000000000..6d1dfbd42
--- /dev/null
+++ b/jam-files/boost-build/tools/types/qt.jam
@@ -0,0 +1,10 @@
+# Copyright Vladimir Prus 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+type UI : ui ;
+type QRC : qrc ;
+type MOCCABLE_CPP ;
+type MOCCABLE_H ;
+# Result of running moc.
+type MOC : moc : H ;
diff --git a/jam-files/boost-build/tools/types/register.jam b/jam-files/boost-build/tools/types/register.jam
new file mode 100644
index 000000000..203992ca9
--- /dev/null
+++ b/jam-files/boost-build/tools/types/register.jam
@@ -0,0 +1,39 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This module's job is to automatically import all the type
+# registration modules in its directory.
+import type os path modules ;
+
+# Register the given type on the specified OSes, or on remaining OSes
+# if os is not specified. This rule is injected into each of the type
+# modules for the sake of convenience.
+local rule type ( type : suffixes * : base-type ? : os * )
+{
+ if ! [ type.registered $(type) ]
+ {
+ if ( ! $(os) ) || [ os.name ] in $(os)
+ {
+ type.register $(type) : $(suffixes) : $(base-type) ;
+ }
+ }
+}
+
+.this-module's-file = [ modules.binding $(__name__) ] ;
+.this-module's-dir = [ path.parent $(.this-module's-file) ] ;
+.sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ;
+.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ;
+
+# A loop over all modules in this directory
+for m in $(.sibling-modules)
+{
+ m = [ path.basename $(m) ] ;
+ m = types/$(m) ;
+
+ # Inject the type rule into the new module
+ IMPORT $(__name__) : type : $(m) : type ;
+ import $(m) ;
+}
+
+
diff --git a/jam-files/boost-build/tools/types/rsp.jam b/jam-files/boost-build/tools/types/rsp.jam
new file mode 100644
index 000000000..bdf8a7c98
--- /dev/null
+++ b/jam-files/boost-build/tools/types/rsp.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type RSP : rsp ;
diff --git a/jam-files/boost-build/tools/unix.jam b/jam-files/boost-build/tools/unix.jam
new file mode 100644
index 000000000..75949851a
--- /dev/null
+++ b/jam-files/boost-build/tools/unix.jam
@@ -0,0 +1,224 @@
+# Copyright (c) 2004 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file implements linking semantic common to all unixes. On unix, static
+# libraries must be specified in a fixed order on the linker command line. Generators
+# declared there store information about the order and use it property.
+
+import feature ;
+import "class" : new ;
+import generators ;
+import type ;
+import set ;
+import order ;
+import builtin ;
+
+class unix-linking-generator : linking-generator
+{
+ import property-set ;
+ import type ;
+ import unix ;
+
+ rule __init__ ( id
+ composing ? : # Specify if generator is composing. The generator will be
+ # composing if non-empty string is passed, or parameter is
+ # not given. To make generator non-composing, pass empty
+ # string ("")
+ source-types + : target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
+ $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ linking-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ;
+ local libraries ;
+ for local l in $(sources)
+ {
+ if [ type.is-derived [ $(l).type ] LIB ]
+ {
+ libraries += $(l) ;
+ }
+ else
+ {
+ sources2 += $(l) ;
+ }
+ }
+
+ sources = $(sources2) [ unix.order-libraries $(libraries) ] ;
+
+ return [ linking-generator.generated-targets $(sources) : $(property-set)
+ : $(project) $(name) ] ;
+ }
+
+}
+
+class unix-archive-generator : archive-generator
+{
+ import unix ;
+
+ rule __init__ ( id composing ? : source-types + : target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
+ $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ archive-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+
+ }
+}
+
+class unix-searched-lib-generator : searched-lib-generator
+{
+ import unix ;
+ rule __init__ ( * : * )
+ {
+ generator.__init__
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule optional-properties ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local result = [ searched-lib-generator.run $(project) $(name)
+ : $(property-set) : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+ }
+}
+
+class unix-prebuilt-lib-generator : generator
+{
+ import unix ;
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ unix.set-library-order-aux $(f) : $(sources) ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB
+ : <file> <toolset>unix ] ;
+
+generators.override unix.prebuilt : builtin.lib-generator ;
+
+
+# Declare generators
+generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE
+ : <toolset>unix ] ;
+
+generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB
+ : <toolset>unix ] ;
+
+generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB
+ : <toolset>unix ] ;
+
+generators.register [ new unix-searched-lib-generator
+ unix.searched-lib-generator : : SEARCHED_LIB : <toolset>unix ] ;
+
+
+# The derived toolset must specify their own actions.
+actions link {
+}
+
+actions link.dll {
+}
+
+actions archive {
+}
+
+actions searched-lib-generator {
+}
+
+actions prebuilt {
+}
+
+
+
+
+
+.order = [ new order ] ;
+
+rule set-library-order-aux ( from * : to * )
+{
+ for local f in $(from)
+ {
+ for local t in $(to)
+ {
+ if $(f) != $(t)
+ {
+ $(.order).add-pair $(f) $(t) ;
+ }
+ }
+ }
+}
+
+rule set-library-order ( sources * : property-set : result * )
+{
+ local used-libraries ;
+ local deps = [ $(property-set).dependency ] ;
+ for local l in $(sources) $(deps:G=)
+ {
+ if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
+ {
+ used-libraries += $(l) ;
+ }
+ }
+
+ local created-libraries ;
+ for local l in $(result)
+ {
+ if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
+ {
+ created-libraries += $(l) ;
+ }
+ }
+
+ created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ;
+ set-library-order-aux $(created-libraries) : $(used-libraries) ;
+}
+
+rule order-libraries ( libraries * )
+{
+ local r = [ $(.order).order $(libraries) ] ;
+ return $(r) ;
+}
+ \ No newline at end of file
diff --git a/jam-files/boost-build/tools/vacpp.jam b/jam-files/boost-build/tools/vacpp.jam
new file mode 100644
index 000000000..f4080fc04
--- /dev/null
+++ b/jam-files/boost-build/tools/vacpp.jam
@@ -0,0 +1,150 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Boost.Build V2 toolset for the IBM XL C++ compiler
+#
+
+import toolset : flags ;
+import feature ;
+import common ;
+import generators ;
+import os ;
+
+feature.extend toolset : vacpp ;
+toolset.inherit vacpp : unix ;
+generators.override vacpp.prebuilt : builtin.prebuilt ;
+generators.override vacpp.searched-lib-generator : searched-lib-generator ;
+
+# Configure the vacpp toolset
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters vacpp : version $(version) ] ;
+
+ command = [ common.get-invocation-command vacpp : xlC
+ : $(command) : "/usr/vacpp/bin/xlC" ] ;
+
+ common.handle-options vacpp : $(condition) : $(command) : $(options) ;
+}
+
+# Declare generators
+generators.register-c-compiler vacpp.compile.c : C : OBJ : <toolset>vacpp ;
+generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : <toolset>vacpp ;
+
+# Allow C++ style comments in C files
+flags vacpp CFLAGS : -qcpluscmt ;
+
+# Declare flags
+flags vacpp CFLAGS <optimization>off : -qNOOPTimize ;
+flags vacpp CFLAGS <optimization>speed : -O3 -qstrict ;
+flags vacpp CFLAGS <optimization>space : -O2 -qcompact ;
+
+# Discretionary inlining (not recommended)
+flags vacpp CFLAGS <inlining>off : -qnoinline ;
+flags vacpp CFLAGS <inlining>on : -qinline ;
+#flags vacpp CFLAGS <inlining>full : -qinline ;
+flags vacpp CFLAGS <inlining>full : ;
+
+# Exception handling
+flags vacpp C++FLAGS <exception-handling>off : -qnoeh ;
+flags vacpp C++FLAGS <exception-handling>on : -qeh ;
+
+# Run-time Type Identification
+flags vacpp C++FLAGS <rtti>off : -qnortti ;
+flags vacpp C++FLAGS <rtti>on : -qrtti ;
+
+# Enable 64-bit memory addressing model
+flags vacpp CFLAGS <address-model>64 : -q64 ;
+flags vacpp LINKFLAGS <address-model>64 : -q64 ;
+flags vacpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ;
+
+# Use absolute path when generating debug information
+flags vacpp CFLAGS <debug-symbols>on : -g -qfullpath ;
+flags vacpp LINKFLAGS <debug-symbols>on : -g -qfullpath ;
+flags vacpp LINKFLAGS <debug-symbols>off : -s ;
+
+if [ os.name ] = AIX
+{
+ flags vacpp.compile C++FLAGS : -qfuncsect ;
+
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The above options are definately for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+ flags vacpp.link LINKFLAGS <link>shared : -bnoipath ;
+
+ # Run-time linking
+ flags vacpp.link EXE-LINKFLAGS <link>shared : -brtl ;
+}
+else
+{
+ # Linux PPC
+ flags vacpp.compile CFLAGS <link>shared : -qpic=large ;
+ flags vacpp FINDLIBS : rt ;
+}
+
+# Profiling
+flags vacpp CFLAGS <profiling>on : -pg ;
+flags vacpp LINKFLAGS <profiling>on : -pg ;
+
+flags vacpp.compile OPTIONS <cflags> ;
+flags vacpp.compile.c++ OPTIONS <cxxflags> ;
+flags vacpp DEFINES <define> ;
+flags vacpp UNDEFS <undef> ;
+flags vacpp HDRS <include> ;
+flags vacpp STDHDRS <sysinclude> ;
+flags vacpp.link OPTIONS <linkflags> ;
+flags vacpp ARFLAGS <arflags> ;
+
+flags vacpp LIBPATH <library-path> ;
+flags vacpp NEEDLIBS <library-file> ;
+flags vacpp FINDLIBS <find-shared-library> ;
+flags vacpp FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags vacpp VA_C_COMPILER <threading>single : xlc ;
+flags vacpp VA_C_COMPILER <threading>multi : xlc_r ;
+flags vacpp VA_CXX_COMPILER <threading>single : xlC ;
+flags vacpp VA_CXX_COMPILER <threading>multi : xlC_r ;
+
+SPACE = " " ;
+
+flags vacpp.link.dll HAVE_SONAME <target-os>linux : "" ;
+
+actions vacpp.link bind NEEDLIBS
+{
+ $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions vacpp.link.dll bind NEEDLIBS
+{
+ xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions vacpp.compile.c
+{
+ $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions vacpp.compile.c++
+{
+ $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions updated together piecemeal vacpp.archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/jam-files/boost-build/tools/whale.jam b/jam-files/boost-build/tools/whale.jam
new file mode 100644
index 000000000..9335ff0c0
--- /dev/null
+++ b/jam-files/boost-build/tools/whale.jam
@@ -0,0 +1,116 @@
+# Copyright (C) Vladimir Prus 2002-2005.
+
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements support for Whale/Dolphin/WD parser/lexer tools.
+# See http://www.cs.queensu.ca/home/okhotin/whale/ for details.
+#
+# There are three interesting target types:
+# - WHL (the parser sources), that are converted to CPP and H
+# - DLP (the lexer sources), that are converted to CPP and H
+# - WD (combined parser/lexer sources), that are converted to WHL + DLP
+
+import type ;
+import generators ;
+import path ;
+import "class" : new ;
+import errors ;
+
+rule init ( path # path the Whale/Dolphin/WD binaries
+ )
+{
+ if $(.configured) && $(.path) != $(path)
+ {
+ errors.user-error "Attempt to reconfigure Whale support" :
+ "Previously configured with path \"$(.path:E=<empty>)\"" :
+ "Now configuring with path \"$(path:E=<empty>)\"" ;
+
+ }
+ .configured = true ;
+ .path = $(path) ;
+
+ .whale = [ path.join $(path) whale ] ;
+ .dolphin = [ path.join $(path) dolphin ] ;
+ .wd = [ path.join $(path) wd ] ;
+}
+
+
+# Declare the types.
+type.register WHL : whl ;
+type.register DLP : dlp ;
+type.register WHL_LR0 : lr0 ;
+type.register WD : wd ;
+
+# Declare standard generators.
+generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ;
+generators.register-standard whale.dolphin : DLP : CPP H ;
+generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ;
+
+# The conversions defines above a ambiguious when we generated CPP from WD.
+# We can either go via WHL type, or via DLP type.
+# The following custom generator handles this by running both conversions.
+
+class wd-to-cpp : generator
+{
+ rule __init__ ( * : * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) ;
+ }
+
+ rule run ( project name ? : property-set : source * )
+ {
+ if ! $(source[2])
+ {
+ local new-sources ;
+ if ! [ $(source).type ] in WHL DLP
+ {
+ local r1 = [ generators.construct $(project) $(name)
+ : WHL : $(property-set) : $(source) ] ;
+ local r2 = [ generators.construct $(project) $(name)
+ : DLP : $(property-set) : $(source) ] ;
+
+ new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
+ }
+ else
+ {
+ new-sources = $(source) ;
+ }
+
+ local result ;
+ for local i in $(new-sources)
+ {
+ local t = [ generators.construct $(project) $(name) : CPP
+ : $(property-set) : $(i) ] ;
+ result += $(t[2-]) ;
+ }
+ return $(result) ;
+ }
+ }
+
+}
+
+
+generators.override whale.wd-to-cpp : whale.whale ;
+generators.override whale.wd-to-cpp : whale.dolphin ;
+
+
+generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ;
+
+
+actions whale
+{
+ $(.whale) -d $(<[1]:D) $(>)
+}
+
+actions dolphin
+{
+ $(.dolphin) -d $(<[1]:D) $(>)
+}
+
+actions wd
+{
+ $(.wd) -d $(<[1]:D) -g $(>)
+}
+
diff --git a/jam-files/boost-build/tools/xlf.jam b/jam-files/boost-build/tools/xlf.jam
new file mode 100644
index 000000000..e7fcc6086
--- /dev/null
+++ b/jam-files/boost-build/tools/xlf.jam
@@ -0,0 +1,39 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# toolset configuration for the IBM Fortran compiler (xlf)
+#
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags xlf OPTIONS <optimization>off : -O0 ;
+flags xlf OPTIONS <optimization>speed : -O3 ;
+flags xlf OPTIONS <optimization>space : -Os ;
+
+flags xlf OPTIONS <debug-symbols>on : -g ;
+flags xlf OPTIONS <profiling>on : -pg ;
+
+flags xlf DEFINES <define> ;
+flags xlf INCLUDES <include> ;
+
+rule compile-fortran
+{
+}
+
+actions compile-fortran
+{
+ xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ;
diff --git a/jam-files/boost-build/tools/xsltproc-config.jam b/jam-files/boost-build/tools/xsltproc-config.jam
new file mode 100644
index 000000000..de54a2eb3
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc-config.jam
@@ -0,0 +1,37 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Python tools and librries. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ local xsltproc-path = [ GLOB [ modules.peek : PATH ] "C:\\Boost\\bin" : xsltproc\.exe ] ;
+ xsltproc-path = $(xsltproc-path[1]) ;
+
+ if $(xsltproc-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
+ }
+ using xsltproc : $(xsltproc-path) ;
+ }
+}
+else
+{
+ local xsltproc-path = [ GLOB [ modules.peek : PATH ] : xsltproc ] ;
+ xsltproc-path = $(xsltproc-path[1]) ;
+
+ if $(xsltproc-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
+ }
+ using xsltproc : $(xsltproc-path) ;
+ }
+}
diff --git a/jam-files/boost-build/tools/xsltproc.jam b/jam-files/boost-build/tools/xsltproc.jam
new file mode 100644
index 000000000..96f5170be
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc.jam
@@ -0,0 +1,194 @@
+# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# This module defines rules to apply an XSLT stylesheet to an XML file using the
+# xsltproc driver, part of libxslt.
+#
+# Note: except for 'init', this modules does not provide any rules for end
+# users.
+
+import feature ;
+import regex ;
+import sequence ;
+import common ;
+import os ;
+import modules ;
+import path ;
+import errors ;
+
+feature.feature xsl:param : : free ;
+feature.feature xsl:path : : free ;
+feature.feature catalog : : free ;
+
+
+# Initialize xsltproc support. The parameters are:
+# xsltproc: The xsltproc executable
+#
+rule init ( xsltproc ? )
+{
+ if $(xsltproc)
+ {
+ modify-config ;
+ .xsltproc = $(xsltproc) ;
+ check-xsltproc ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+ .xsltproc ?= [ modules.peek : XSLTPROC ] ;
+ .xsltproc ?= xsltproc ;
+ check-xsltproc ;
+ .is-cygwin = [ .is-cygwin $(.xsltproc) ] ;
+ }
+}
+
+rule modify-config
+{
+ if $(.config-frozen)
+ {
+ errors.user-error "xsltproc: Cannot change xsltproc command after it has been used." ;
+ }
+}
+
+rule check-xsltproc ( )
+{
+ if $(.xsltproc)
+ {
+ local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] ;
+ if $(status[2]) != "0"
+ {
+ errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ;
+ }
+ }
+}
+
+# Returns a non-empty string if a cygwin xsltproc binary was specified.
+rule is-cygwin ( )
+{
+ freeze-config ;
+ return $(.is-cygwin) ;
+}
+
+rule .is-cygwin ( xsltproc )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native
+ [ path.join [ path.parent $(file) ] xsltproc ] ] ;
+ if [ os.name ] = CYGWIN
+ {
+ dir = $(dir:W) ;
+ }
+ local command =
+ "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ;
+ local status = [ SHELL $(command) : no-output : exit-status ] ;
+ if $(status[2]) != "0"
+ {
+ return true ;
+ }
+ }
+}
+
+rule compute-xslt-flags ( target : properties * )
+{
+ local flags ;
+
+ # Raw flags.
+ flags += [ feature.get-values <flags> : $(properties) ] ;
+
+ # Translate <xsl:param> into command line flags.
+ for local param in [ feature.get-values <xsl:param> : $(properties) ]
+ {
+ local namevalue = [ regex.split $(param) "=" ] ;
+ flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
+ }
+
+ # Translate <xsl:path>.
+ for local path in [ feature.get-values <xsl:path> : $(properties) ]
+ {
+ flags += --path \"$(path:G=)\" ;
+ }
+
+ # Take care of implicit dependencies.
+ local other-deps ;
+ for local dep in [ feature.get-values <implicit-dependency> : $(properties) ]
+ {
+ other-deps += [ $(dep:G=).creating-subvariant ] ;
+ }
+
+ local implicit-target-directories ;
+ for local dep in [ sequence.unique $(other-deps) ]
+ {
+ implicit-target-directories += [ $(dep).all-target-directories ] ;
+ }
+
+ for local dir in $(implicit-target-directories)
+ {
+ flags += --path \"$(dir:T)\" ;
+ }
+
+ return $(flags) ;
+}
+
+
+local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : action )
+{
+ freeze-config ;
+ STYLESHEET on $(target) = $(stylesheet) ;
+ FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ;
+ NAME on $(target) = $(.xsltproc) ;
+
+ for local catalog in [ feature.get-values <catalog> : $(properties) ]
+ {
+ CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : $(catalog:T) ] ;
+ }
+
+ if [ os.on-windows ] && ! [ is-cygwin ]
+ {
+ action = $(action).windows ;
+ }
+
+ $(action) $(target) : $(source) ;
+}
+
+
+rule xslt ( target : source stylesheet : properties * )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : xslt-xsltproc ] ;
+}
+
+
+rule xslt-dir ( target : source stylesheet : properties * : dirname )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : $(dirname) : xslt-xsltproc-dir ] ;
+}
+
+actions xslt-xsltproc.windows
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
+}
+
+
+actions xslt-xsltproc-dir.windows bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc-dir bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
+}
diff --git a/jam-files/boost-build/tools/xsltproc/included.xsl b/jam-files/boost-build/tools/xsltproc/included.xsl
new file mode 100644
index 000000000..ef86394a9
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc/included.xsl
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2010 Steven Watanabe
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+</xsl:stylesheet>
diff --git a/jam-files/boost-build/tools/xsltproc/test.xml b/jam-files/boost-build/tools/xsltproc/test.xml
new file mode 100644
index 000000000..57c8ba187
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc/test.xml
@@ -0,0 +1,2 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root/>
diff --git a/jam-files/boost-build/tools/xsltproc/test.xsl b/jam-files/boost-build/tools/xsltproc/test.xsl
new file mode 100644
index 000000000..a142c91dd
--- /dev/null
+++ b/jam-files/boost-build/tools/xsltproc/test.xsl
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2010 Steven Watanabe
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+ <xsl:include href="included.xsl"/>
+</xsl:stylesheet>
diff --git a/jam-files/boost-build/tools/zlib.jam b/jam-files/boost-build/tools/zlib.jam
new file mode 100644
index 000000000..f9138fd57
--- /dev/null
+++ b/jam-files/boost-build/tools/zlib.jam
@@ -0,0 +1,92 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the zlib library
+#
+# After 'using zlib', the following targets are available:
+#
+# /zlib//zlib -- The zlib library
+
+
+# In addition to direct purpose of supporting zlib, this module also
+# serves as canonical example of how third-party condiguration works
+# in Boost.Build. The operation is as follows
+#
+# - For each 'using zlib : condition ... : ...' we create a target alternative
+# for zlib, with the specified condition.
+# - There's one target alternative for 'zlib' with no specific condition
+# properties.
+#
+# Two invocations of 'using zlib' with the same condition but different
+# properties are not permitted, e.g.:
+#
+# using zlib : condition <target-os>windows : include foo ;
+# using zlib : condition <target-os>windows : include bar ;
+#
+# is in error. One exception is for empty condition, 'using' without any
+# parameters is overridable. That is:
+#
+# using zlib ;
+# using zlib : include foo ;
+#
+# Is OK then the first 'using' is ignored. Likewise if the order of the statements
+# is reversed.
+#
+# When 'zlib' target is built, a target alternative is selected as usual for
+# Boost.Build. The selected alternative is a custom target class, which:
+#
+# - calls ac.find-include-path to find header path. If explicit path is provided
+# in 'using', only that path is checked, and if no header is found there, error
+# is emitted. Otherwise, we check a directory specified using ZLIB_INCLUDE
+# environment variable, and failing that, in standard directories.
+# [TODO: document sysroot handling]
+# - calls ac.find-library to find the library, in an identical fashion.
+#
+
+import project ;
+import ac ;
+import errors ;
+import "class" : new ;
+import targets ;
+
+project.initialize $(__name__) ;
+project = [ project.current ] ;
+project zlib ;
+
+header = zlib.h ;
+names = z zlib zll zdll ;
+
+.default-alternative = [ new ac-library zlib : $(project) ] ;
+$(.default-alternative).set-header $(header) ;
+$(.default-alternative).set-default-names $(names) ;
+targets.main-target-alternative $(.default-alternative) ;
+
+rule init ( * : * )
+{
+ if ! $(condition)
+ {
+ # Special case the no-condition case so that 'using' without parameters
+ # can mix with more specific 'using'.
+ $(.default-alternative).reconfigure $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ # FIXME: consider if we should allow overriding definitions for a given
+ # condition -- e.g. project-config.jam might want to override whatever is
+ # in user-config.jam.
+ local mt = [ new ac-library zlib : $(project)
+ : $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+}
+
+
+
+
+
+
diff --git a/jam-files/boost-build/user-config.jam b/jam-files/boost-build/user-config.jam
new file mode 100644
index 000000000..fbbf13fd0
--- /dev/null
+++ b/jam-files/boost-build/user-config.jam
@@ -0,0 +1,92 @@
+# Copyright 2003, 2005 Douglas Gregor
+# Copyright 2004 John Maddock
+# Copyright 2002, 2003, 2004, 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is used to configure your Boost.Build installation. You can modify
+# this file in place, or you can place it in a permanent location so that it
+# does not get overwritten should you get a new version of Boost.Build. See:
+#
+# http://www.boost.org/boost-build2/doc/html/bbv2/overview/configuration.html
+#
+# for documentation about possible permanent locations.
+
+# This file specifies which toolsets (C++ compilers), libraries, and other
+# tools are available. Often, you should be able to just uncomment existing
+# example lines and adjust them to taste. The complete list of supported tools,
+# and configuration instructions can be found at:
+#
+# http://boost.org/boost-build2/doc/html/bbv2/reference/tools.html
+#
+
+# This file uses Jam language syntax to describe available tools. Mostly,
+# there are 'using' lines, that contain the name of the used tools, and
+# parameters to pass to those tools -- where paremeters are separated by
+# semicolons. Important syntax notes:
+#
+# - Both ':' and ';' must be separated from other tokens by whitespace
+# - The '\' symbol is a quote character, so when specifying Windows paths you
+# should use '/' or '\\' instead.
+#
+# More details about the syntax can be found at:
+#
+# http://boost.org/boost-build2/doc/html/bbv2/advanced.html#bbv2.advanced.jam_language
+#
+
+# ------------------
+# GCC configuration.
+# ------------------
+
+# Configure gcc (default version).
+# using gcc ;
+
+# Configure specific gcc version, giving alternative name to use.
+# using gcc : 3.2 : g++-3.2 ;
+
+
+# -------------------
+# MSVC configuration.
+# -------------------
+
+# Configure msvc (default version, searched for in standard locations and PATH).
+# using msvc ;
+
+# Configure specific msvc version (searched for in standard locations and PATH).
+# using msvc : 8.0 ;
+
+
+# ----------------------
+# Borland configuration.
+# ----------------------
+# using borland ;
+
+
+# ----------------------
+# STLPort configuration.
+# ----------------------
+
+# Configure specifying location of STLPort headers. Libraries must be either
+# not needed or available to the compiler by default.
+# using stlport : : /usr/include/stlport ;
+
+# Configure specifying location of both headers and libraries explicitly.
+# using stlport : : /usr/include/stlport /usr/lib ;
+
+
+# -----------------
+# QT configuration.
+# -----------------
+
+# Configure assuming QTDIR gives the installation prefix.
+# using qt ;
+
+# Configure with an explicit installation prefix.
+# using qt : /usr/opt/qt ;
+
+# ---------------------
+# Python configuration.
+# ---------------------
+
+# Configure specific Python version.
+# using python : 3.1 : /usr/bin/python3 : /usr/include/python3.1 : /usr/lib ;
diff --git a/jam-files/boost-build/util/assert.jam b/jam-files/boost-build/util/assert.jam
new file mode 100644
index 000000000..abedad525
--- /dev/null
+++ b/jam-files/boost-build/util/assert.jam
@@ -0,0 +1,336 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import modules ;
+
+
+################################################################################
+#
+# Private implementation details.
+#
+################################################################################
+
+# Rule added as a replacement for the regular Jam = operator but which does not
+# ignore trailing empty string elements.
+#
+local rule exact-equal-test ( lhs * : rhs * )
+{
+ local lhs_extended = $(lhs) xxx ;
+ local rhs_extended = $(rhs) xxx ;
+ if $(lhs_extended) = $(rhs_extended)
+ {
+ return true ;
+ }
+}
+
+
+# Two lists are considered set-equal if they contain the same elements, ignoring
+# duplicates and ordering.
+#
+local rule set-equal-test ( set1 * : set2 * )
+{
+ if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+################################################################################
+#
+# Public interface.
+#
+################################################################################
+
+# Assert the equality of A and B, ignoring trailing empty string elements.
+#
+rule equal ( a * : b * )
+{
+ if $(a) != $(b)
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a false
+# logical value (is either an empty list or all empty strings).
+#
+rule false ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) ] ;
+ }
+
+ if $(result)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected false result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) ] "]" : Got: "[" \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that ELEMENT is present in LIST.
+#
+rule "in" ( element : list * )
+{
+ if ! $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected \"$(element)\" in
+ "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B, ignoring trailing empty string elements.
+#
+rule not-equal ( a * : b * )
+{
+ if $(a) = $(b)
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that ELEMENT is not present in LIST.
+#
+rule not-in ( element : list * )
+{
+ if $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion failure: Did not expect
+ \"$(element)\" in "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B as sets.
+#
+rule not-set-equal ( a * : b * )
+{
+ if [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to not be equal as sets ;
+ }
+}
+
+
+# Assert that A and B are not exactly equal, not ignoring trailing empty string
+# elements.
+#
+rule not-exact-equal ( a * : b * )
+{
+ if [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" ;
+ }
+}
+
+
+# Assert that EXPECTED is the result of calling RULE-NAME with the given
+# arguments.
+#
+rule result ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) ] ;
+ }
+
+ if ! [ exact-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored)
+# to the result of calling RULE-NAME with the given arguments. Note that rules
+# called this way may accept at most 8 parameters.
+#
+rule result-set-equal ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) ] ;
+ }
+
+ if ! [ set-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert the equality of A and B as sets.
+#
+rule set-equal ( a * : b * )
+{
+ if ! [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to be equal as sets ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a true
+# logical value (is neither an empty list nor all empty strings).
+#
+rule true ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) ] ;
+ }
+
+ if ! $(result)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected true result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) ] "]" ;
+ }
+}
+
+
+# Assert the exact equality of A and B, not ignoring trailing empty string
+# elements.
+#
+rule exact-equal ( a * : b * )
+{
+ if ! [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" ;
+ }
+}
+
+
+# Assert that the given variable is not an empty list.
+#
+rule variable-not-empty ( name )
+{
+ local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ;
+ if ! $(value)-is-not-empty
+ {
+ errors.error-skip-frames 3 assertion failure: Expected variable
+ \"$(name)\" not to be an empty list ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Helper rule used to avoid test duplication related to different list
+ # equality test rules.
+ #
+ local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? )
+ {
+ local not-equality-assert = not-$(equality-assert) ;
+
+ # When the given equality test is expected to ignore trailing empty
+ # strings some of the test results should be inverted.
+ local not-equality-assert-i = not-$(equality-assert) ;
+ if $(ignore-trailing-empty-strings)
+ {
+ not-equality-assert-i = $(equality-assert) ;
+ }
+
+ $(equality-assert) : ;
+ $(equality-assert) "" "" : "" "" ;
+ $(not-equality-assert-i) : "" "" ;
+ $(equality-assert) x : x ;
+ $(not-equality-assert) : x ;
+ $(not-equality-assert) "" : x ;
+ $(not-equality-assert) "" "" : x ;
+ $(not-equality-assert-i) x : x "" ;
+ $(equality-assert) x "" : x "" ;
+ $(not-equality-assert) x : "" x ;
+ $(equality-assert) "" x : "" x ;
+
+ $(equality-assert) 1 2 3 : 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 3 2 1 ;
+ $(not-equality-assert) 1 2 3 : 1 5 3 ;
+ $(not-equality-assert) 1 2 3 : 1 "" 3 ;
+ $(not-equality-assert) 1 2 3 : 1 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 1 2 2 3 ;
+ $(not-equality-assert) 1 2 3 : 5 6 7 ;
+
+ # Extra variables used here just to make sure Boost Jam or Boost Build
+ # do not handle lists with empty strings differently depending on
+ # whether they are literals or stored in variables.
+
+ local empty = ;
+ local empty-strings = "" "" ;
+ local x-empty-strings = x "" "" ;
+ local empty-strings-x = "" "" x ;
+
+ $(equality-assert) : $(empty) ;
+ $(not-equality-assert-i) "" : $(empty) ;
+ $(not-equality-assert-i) "" "" : $(empty) ;
+ $(not-equality-assert-i) : $(empty-strings) ;
+ $(not-equality-assert-i) "" : $(empty-strings) ;
+ $(equality-assert) "" "" : $(empty-strings) ;
+ $(equality-assert) $(empty) : $(empty) ;
+ $(equality-assert) $(empty-strings) : $(empty-strings) ;
+ $(not-equality-assert-i) $(empty) : $(empty-strings) ;
+ $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ;
+ $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ;
+ $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert-i) x "" : $(x-empty-strings) ;
+ $(equality-assert) x "" "" : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert) "" x : $(empty-strings-x) ;
+ $(equality-assert) "" "" x : $(empty-strings-x) ;
+ }
+
+
+ # ---------------
+ # Equality tests.
+ # ---------------
+
+ run-equality-test equal : ignore-trailing-empty-strings ;
+ run-equality-test exact-equal ;
+
+
+ # -------------------------
+ # assert.set-equal() tests.
+ # -------------------------
+
+ set-equal : ;
+ not-set-equal "" "" : ;
+ set-equal "" "" : "" ;
+ set-equal "" "" : "" "" ;
+ set-equal a b c : a b c ;
+ set-equal a b c : b c a ;
+ set-equal a b c a : a b c ;
+ set-equal a b c : a b c a ;
+ not-set-equal a b c : a b c d ;
+ not-set-equal a b c d : a b c ;
+}
diff --git a/jam-files/boost-build/util/container.jam b/jam-files/boost-build/util/container.jam
new file mode 100644
index 000000000..dd4963938
--- /dev/null
+++ b/jam-files/boost-build/util/container.jam
@@ -0,0 +1,339 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Various container classes.
+
+# Base for container objects. This lets us construct recursive structures. That
+# is containers with containers in them, specifically so we can tell literal
+# values from node values.
+#
+class node
+{
+ rule __init__ (
+ value ? # Optional value to set node to initially.
+ )
+ {
+ self.value = $(value) ;
+ }
+
+ # Set the value of this node, passing nothing will clear it.
+ #
+ rule set ( value * )
+ {
+ self.value = $(value) ;
+ }
+
+ # Get the value of this node.
+ #
+ rule get ( )
+ {
+ return $(self.value) ;
+ }
+}
+
+
+# A simple vector. Interface mimics the C++ std::vector and std::list, with the
+# exception that indices are one (1) based to follow Jam standard.
+#
+# TODO: Possibly add assertion checks.
+#
+class vector : node
+{
+ import numbers ;
+ import utility ;
+ import sequence ;
+
+ rule __init__ (
+ values * # Initial contents of vector.
+ )
+ {
+ node.__init__ ;
+ self.value = $(values) ;
+ }
+
+ # Get the value of the first element.
+ #
+ rule front ( )
+ {
+ return $(self.value[1]) ;
+ }
+
+ # Get the value of the last element.
+ #
+ rule back ( )
+ {
+ return $(self.value[-1]) ;
+ }
+
+ # Get the value of the element at the given index, one based. Access to
+ # elements of recursive structures is supported directly. Specifying
+ # additional index values recursively accesses the elements as containers.
+ # For example: [ $(v).at 1 : 2 ] would retrieve the second element of our
+ # first element, assuming the first element is a container.
+ #
+ rule at (
+ index # The element index, one based.
+ : * # Additional indices to access recursively.
+ )
+ {
+ local r = $(self.value[$(index)]) ;
+ if $(2)
+ {
+ r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ return $(r) ;
+ }
+
+ # Get the value contained in the given element. This has the same
+ # functionality and interface as "at" but in addition gets the value of the
+ # referenced element, assuming it is a "node".
+ #
+ rule get-at (
+ index # The element index, one based.
+ : * # Additional indices to access recursively.
+ )
+ {
+ local r = $(self.value[$(index)]) ;
+ if $(2)
+ {
+ r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ return [ $(r).get ] ;
+ }
+
+ # Insert the given value into the front of the vector pushing the rest of
+ # the elements back.
+ #
+ rule push-front (
+ value # Value to become first element.
+ )
+ {
+ self.value = $(value) $(self.value) ;
+ }
+
+ # Remove the front element from the vector. Does not return the value. No
+ # effect if vector is empty.
+ #
+ rule pop-front ( )
+ {
+ self.value = $(self.value[2-]) ;
+ }
+
+ # Add the given value at the end of the vector.
+ #
+ rule push-back (
+ value # Value to become back element.
+ )
+ {
+ self.value += $(value) ;
+ }
+
+ # Remove the back element from the vector. Does not return the value. No
+ # effect if vector is empty.
+ #
+ rule pop-back ( )
+ {
+ self.value = $(self.value[1--2]) ;
+ }
+
+ # Insert the given value at the given index, one based. The values at and to
+ # the right of the index are pushed back to make room for the new value.
+ # If the index is passed the end of the vector the element is added to the
+ # end.
+ #
+ rule insert (
+ index # The index to insert at, one based.
+ : value # The value to insert.
+ )
+ {
+ local left = $(self.value[1-$(index)]) ;
+ local right = $(self.value[$(index)-]) ;
+ if $(right)-is-not-empty
+ {
+ left = $(left[1--2]) ;
+ }
+ self.value = $(left) $(value) $(right) ;
+ }
+
+ # Remove one or more elements from the vector. The range is inclusive, and
+ # not specifying an end is equivalent to the [start, start] range.
+ #
+ rule erase (
+ start # Index of first element to remove.
+ end ? # Optional, index of last element to remove.
+ )
+ {
+ end ?= $(start) ;
+ local left = $(self.value[1-$(start)]) ;
+ left = $(left[1--2]) ;
+ local right = $(self.value[$(end)-]) ;
+ right = $(right[2-]) ;
+ self.value = $(left) $(right) ;
+ }
+
+ # Remove all elements from the vector.
+ #
+ rule clear ( )
+ {
+ self.value = ;
+ }
+
+ # The number of elements in the vector.
+ #
+ rule size ( )
+ {
+ return [ sequence.length $(self.value) ] ;
+ }
+
+ # Returns "true" if there are NO elements in the vector, empty otherwise.
+ #
+ rule empty ( )
+ {
+ if ! $(self.value)-is-not-empty
+ {
+ return true ;
+ }
+ }
+
+ # Returns the textual representation of content.
+ #
+ rule str ( )
+ {
+ return "[" [ sequence.transform utility.str : $(self.value) ] "]" ;
+ }
+
+ # Sorts the vector inplace, calling 'utility.less' for comparisons.
+ #
+ rule sort ( )
+ {
+ self.value = [ sequence.insertion-sort $(self.value) : utility.less ] ;
+ }
+
+ # Returns true if content is equal to the content of other vector. Uses
+ # 'utility.equal' for comparison.
+ #
+ rule equal ( another )
+ {
+ local mismatch ;
+ local size = [ size ] ;
+ if $(size) = [ $(another).size ]
+ {
+ for local i in [ numbers.range 1 $(size) ]
+ {
+ if ! [ utility.equal [ at $(i) ] [ $(another).at $(i) ] ]
+ {
+ mismatch = true ;
+ }
+ }
+ }
+ else
+ {
+ mismatch = true ;
+ }
+
+ if ! $(mismatch)
+ {
+ return true ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+
+ local v1 = [ new vector ] ;
+ assert.true $(v1).equal $(v1) ;
+ assert.true $(v1).empty ;
+ assert.result 0 : $(v1).size ;
+ assert.result "[" "]" : $(v1).str ;
+ $(v1).push-back b ;
+ $(v1).push-front a ;
+ assert.result "[" a b "]" : $(v1).str ;
+ assert.result a : $(v1).front ;
+ assert.result b : $(v1).back ;
+ $(v1).insert 2 : d ;
+ $(v1).insert 2 : c ;
+ $(v1).insert 4 : f ;
+ $(v1).insert 4 : e ;
+ $(v1).pop-back ;
+ assert.result 5 : $(v1).size ;
+ assert.result d : $(v1).at 3 ;
+ $(v1).pop-front ;
+ assert.result c : $(v1).front ;
+ assert.false $(v1).empty ;
+ $(v1).erase 3 4 ;
+ assert.result 2 : $(v1).size ;
+
+ local v2 = [ new vector q w e r t y ] ;
+ assert.result 6 : $(v2).size ;
+ $(v1).push-back $(v2) ;
+ assert.result 3 : $(v1).size ;
+ local v2-alias = [ $(v1).back ] ;
+ assert.result e : $(v2-alias).at 3 ;
+ $(v1).clear ;
+ assert.true $(v1).empty ;
+ assert.false $(v2-alias).empty ;
+ $(v2).pop-back ;
+ assert.result t : $(v2-alias).back ;
+
+ local v3 = [ new vector ] ;
+ $(v3).push-back [ new vector 1 2 3 4 5 ] ;
+ $(v3).push-back [ new vector a b c ] ;
+ assert.result "[" "[" 1 2 3 4 5 "]" "[" a b c "]" "]" : $(v3).str ;
+ $(v3).push-back [ new vector [ new vector x y z ] [ new vector 7 8 9 ] ] ;
+ assert.result 1 : $(v3).at 1 : 1 ;
+ assert.result b : $(v3).at 2 : 2 ;
+ assert.result a b c : $(v3).get-at 2 ;
+ assert.result 7 8 9 : $(v3).get-at 3 : 2 ;
+
+ local v4 = [ new vector 4 3 6 ] ;
+ $(v4).sort ;
+ assert.result 3 4 6 : $(v4).get ;
+ assert.false $(v4).equal $(v3) ;
+
+ local v5 = [ new vector 3 4 6 ] ;
+ assert.true $(v4).equal $(v5) ;
+ # Check that vectors of different sizes are considered non-equal.
+ $(v5).pop-back ;
+ assert.false $(v4).equal $(v5) ;
+
+ local v6 = [ new vector [ new vector 1 2 3 ] ] ;
+ assert.true $(v6).equal [ new vector [ new vector 1 2 3 ] ] ;
+
+ local v7 = [ new vector 111 222 333 ] ;
+ assert.true $(v7).equal $(v7) ;
+ $(v7).insert 4 : 444 ;
+ assert.result 111 222 333 444 : $(v7).get ;
+ $(v7).insert 999 : xxx ;
+ assert.result 111 222 333 444 xxx : $(v7).get ;
+
+ local v8 = [ new vector "" "" "" ] ;
+ assert.true $(v8).equal $(v8) ;
+ assert.false $(v8).empty ;
+ assert.result 3 : $(v8).size ;
+ assert.result "" : $(v8).at 1 ;
+ assert.result "" : $(v8).at 2 ;
+ assert.result "" : $(v8).at 3 ;
+ assert.result : $(v8).at 4 ;
+ $(v8).insert 2 : 222 ;
+ assert.result 4 : $(v8).size ;
+ assert.result "" 222 "" "" : $(v8).get ;
+ $(v8).insert 999 : "" ;
+ assert.result 5 : $(v8).size ;
+ assert.result "" 222 "" "" "" : $(v8).get ;
+ $(v8).insert 999 : xxx ;
+ assert.result 6 : $(v8).size ;
+ assert.result "" 222 "" "" "" xxx : $(v8).get ;
+
+ # Regression test for a bug causing vector.equal to compare only the first
+ # and the last element in the given vectors.
+ local v9 = [ new vector 111 xxx 222 ] ;
+ local v10 = [ new vector 111 yyy 222 ] ;
+ assert.false $(v9).equal $(v10) ;
+}
diff --git a/jam-files/boost-build/util/doc.jam b/jam-files/boost-build/util/doc.jam
new file mode 100644
index 000000000..a75155882
--- /dev/null
+++ b/jam-files/boost-build/util/doc.jam
@@ -0,0 +1,997 @@
+# Copyright 2002, 2005 Dave Abrahams
+# Copyright 2002, 2003, 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Documentation system, handles --help requests.
+# It defines rules that attach documentation to modules, rules, and variables.
+# Collects and generates documentation for the various parts of the build
+# system. The documentation is collected from comments integrated into the code.
+
+import modules ;
+import print ;
+import set ;
+import container ;
+import "class" ;
+import sequence ;
+import path ;
+
+
+# The type of output to generate.
+# "console" is formated text echoed to the console (the default);
+# "text" is formated text appended to the output file;
+# "html" is HTML output to the file.
+#
+help-output = console ;
+
+
+# The file to output documentation to when generating "text" or "html" help.
+# This is without extension as the extension is determined by the type of
+# output.
+#
+help-output-file = help ;
+
+# Whether to include local rules in help output.
+#
+.option.show-locals ?= ;
+
+# When showing documentation for a module, whether to also generate
+# automatically the detailed docs for each item in the module.
+#
+.option.detailed ?= ;
+
+# Generate debug output as the help is generated and modules are parsed.
+#
+.option.debug ?= ;
+
+# Enable or disable a documentation option.
+#
+local rule set-option (
+ option # The option name.
+ : value ? # Enabled (non-empty), or disabled (empty)
+)
+{
+ .option.$(option) = $(value) ;
+}
+
+
+# Set the type of output.
+#
+local rule set-output ( type )
+{
+ help-output = $(type) ;
+}
+
+
+# Set the output to a file.
+#
+local rule set-output-file ( file )
+{
+ help-output-file = $(file) ;
+}
+
+
+# Extracts the brief comment from a complete comment. The brief comment is the
+# first sentence.
+#
+local rule brief-comment (
+ docs * # The comment documentation.
+)
+{
+ local d = $(docs:J=" ") ;
+ local p = [ MATCH ".*([.])$" : $(d) ] ;
+ if ! $(p) { d = $(d)"." ; }
+ d = $(d)" " ;
+ local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ;
+ local brief = $(m[1]) ;
+ while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ]
+ {
+ m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ;
+ brief += $(m[1]) ;
+ }
+ return $(brief:J="") ;
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-doc (
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-copyright (
+ module-name ? # The name of the module to document.
+ : copyright * # The copyright for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).copy-brief = [ brief-comment $(copyright) ] ;
+ $(module-name).copy-docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for a rule in the current module. If called in the
+# global module, this documents a global rule.
+#
+local rule set-rule-doc (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ is-local ? # Whether the rule is local to the module.
+ : docs * # The documentation for the rule.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).docs = $(docs) ;
+ $(module-name).$(name).is-local = $(is-local) ;
+
+ if ! $(name) in $($(module-name).rules)
+ {
+ $(module-name).rules += $(name) ;
+ }
+}
+
+
+# Specify a class, will turn a rule into a class.
+#
+local rule set-class-doc (
+ name # The name of the class.
+ module-name ? # The name of the module to document.
+ : super-name ? # The super class name.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).is-class = true ;
+ $(module-name).$(name).super-name = $(super-name) ;
+ $(module-name).$(name).class-rules =
+ [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ;
+ $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ;
+
+ $(module-name).classes += $(name) ;
+ $(module-name).class-rules += $($(module-name).$(name).class-rules) ;
+ $(module-name).rules =
+ [ set.difference $($(module-name).rules) :
+ $(name) $($(module-name).$(name).class-rules) ] ;
+}
+
+
+# Set the argument call signature of a rule.
+#
+local rule set-rule-arguments-signature (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ : signature * # The arguments signature.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).signature = $(signature) ;
+}
+
+
+# Specifies the documentation for an argument of a rule.
+#
+local rule set-argument-doc (
+ name # The name of the argument.
+ qualifier # Argument syntax qualifier, "*", "+", etc.
+ rule-name # The name of the rule.
+ module-name ? # THe optional name of the module.
+ : docs * # The documentation.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ;
+ $(module-name).$(rule-name).args.$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).$(rule-name).args)
+ {
+ $(module-name).$(rule-name).args += $(name) ;
+ }
+}
+
+
+# Specifies the documentation for a variable in the current module. If called in
+# the global module, the global variable is documented.
+#
+local rule set-variable-doc (
+ name # The name of the variable.
+ default # The default value.
+ initial # The initial value.
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the variable.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).default = $(default) ;
+ $(module-name).$(name).initial = $(initial) ;
+ $(module-name).$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).variables)
+ {
+ $(module-name).variables += $(name) ;
+ }
+}
+
+
+# Generates a general description of the documentation and help system.
+#
+local rule print-help-top ( )
+{
+ print.section "General command line usage" ;
+
+ print.text " bjam [options] [properties] [targets]
+
+ Options, properties and targets can be specified in any order.
+ " ;
+
+ print.section "Important Options" ;
+
+ print.list-start ;
+ print.list-item "--clean Remove targets instead of building" ;
+ print.list-item "-a Rebuild everything" ;
+ print.list-item "-n Don't execute the commands, only print them" ;
+ print.list-item "-d+2 Show commands as they are executed" ;
+ print.list-item "-d0 Supress all informational messages" ;
+ print.list-item "-q Stop at first error" ;
+ print.list-item "--debug-configuration Diagnose configuration" ;
+ print.list-item "--debug-building Report which targets are built with what properties" ;
+ print.list-item "--debug-generator Diagnose generator search/execution" ;
+ print.list-end ;
+
+ print.section "Further Help"
+ The following options can be used to obtain additional documentation.
+ ;
+
+ print.list-start ;
+ print.list-item "--help-options Print more obscure command line options." ;
+ print.list-item "--help-internal Boost.Build implementation details." ;
+ print.list-item "--help-doc-options Implementation details doc formatting." ;
+ print.list-end ;
+}
+
+
+# Generate Jam/Boost.Jam command usage information.
+#
+local rule print-help-usage ( )
+{
+ print.section "Boost.Jam Usage"
+ "bjam [ options... ] targets..."
+ ;
+ print.list-start ;
+ print.list-item -a;
+ Build all targets, even if they are current. ;
+ print.list-item -fx;
+ Read '"x"' as the Jamfile for building instead of searching for the
+ Boost.Build system. ;
+ print.list-item -jx;
+ Run up to '"x"' commands concurrently. ;
+ print.list-item -n;
+ Do not execute build commands. Instead print out the commands as they
+ would be executed if building. ;
+ print.list-item -ox;
+ Output the used build commands to file '"x"'. ;
+ print.list-item -q;
+ Quit as soon as a build failure is encountered. Without this option
+ Boost.Jam will continue building as many targets as it can.
+ print.list-item -sx=y;
+ Sets a Jam variable '"x"' to the value '"y"', overriding any value that
+ variable would have from the environment. ;
+ print.list-item -tx;
+ Rebuild the target '"x"', even if it is up-to-date. ;
+ print.list-item -v;
+ Display the version of bjam. ;
+ print.list-item --x;
+ Any option not explicitly handled by Boost.Jam remains available to
+ build scripts using the '"ARGV"' variable. ;
+ print.list-item -dn;
+ Enables output of diagnostic messages. The debug level '"n"' and all
+ below it are enabled by this option. ;
+ print.list-item -d+n;
+ Enables output of diagnostic messages. Only the output for debug level
+ '"n"' is enabled. ;
+ print.list-end ;
+ print.section "Debug Levels"
+ Each debug level shows a different set of information. Usually with
+ higher levels producing more verbose information. The following levels
+ are supported: ;
+ print.list-start ;
+ print.list-item 0;
+ Turn off all diagnostic output. Only errors are reported. ;
+ print.list-item 1;
+ Show the actions taken for building targets, as they are executed. ;
+ print.list-item 2;
+ Show "quiet" actions and display all action text, as they are executed. ;
+ print.list-item 3;
+ Show dependency analysis, and target/source timestamps/paths. ;
+ print.list-item 4;
+ Show arguments of shell invocations. ;
+ print.list-item 5;
+ Show rule invocations and variable expansions. ;
+ print.list-item 6;
+ Show directory/header file/archive scans, and attempts at binding to targets. ;
+ print.list-item 7;
+ Show variable settings. ;
+ print.list-item 8;
+ Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ;
+ print.list-item 9;
+ Show variable manipulation, scanner tokens, and memory usage. ;
+ print.list-item 10;
+ Show execution times for rules. ;
+ print.list-item 11;
+ Show parsing progress of Jamfiles. ;
+ print.list-item 12;
+ Show graph for target dependencies. ;
+ print.list-item 13;
+ Show changes in target status (fate). ;
+ print.list-end ;
+}
+
+
+# Generates description of options controlling the help system. This
+# automatically reads the options as all variables in the doc module of the form
+# ".option.*".
+#
+local rule print-help-options (
+ module-name # The doc module.
+)
+{
+ print.section "Help Options"
+ These are all the options available for enabling or disabling to control
+ the help system in various ways. Options can be enabled or disabled with
+ '"--help-enable-<option>"', and "'--help-disable-<option>'"
+ respectively.
+ ;
+ local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ;
+ if $(options-to-list)
+ {
+ print.list-start ;
+ for local option in [ sequence.insertion-sort $(options-to-list) ]
+ {
+ local def = disabled ;
+ if $($(module-name)..option.$(option).default) != "(empty)"
+ {
+ def = enabled ;
+ }
+ print.list-item $(option): $($(module-name)..option.$(option).docs)
+ Default is $(def). ;
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate brief documentation for all the known items in the section for a
+# module. Possible sections are: "rules", and "variables".
+#
+local rule print-help-module-section (
+ module # The module name.
+ section # rules or variables.
+ : section-head # The title of the section.
+ section-description * # The detailed description of the section.
+)
+{
+ if $($(module).$(section))
+ {
+ print.section $(section-head) $(section-description) ;
+ print.list-start ;
+ for local item in [ sequence.insertion-sort $($(module).$(section)) ]
+ {
+ local show = ;
+ if ! $($(module).$(item).is-local)
+ {
+ show = yes ;
+ }
+ if $(.option.show-locals)
+ {
+ show = yes ;
+ }
+ if $(show)
+ {
+ print.list-item $(item): $($(module).$(item).brief) ;
+ }
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate documentation for all possible modules. We attempt to list all known
+# modules together with a brief description of each.
+#
+local rule print-help-all (
+ ignored # Usually the module name, but is ignored here.
+)
+{
+ print.section "Modules"
+ "These are all the known modules. Use --help <module> to get more"
+ "detailed information."
+ ;
+ if $(documented-modules)
+ {
+ print.list-start ;
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print.list-item $(module-name): $($(module-name).brief) ;
+ }
+ print.list-end ;
+ }
+ # The documentation for each module when details are requested.
+ if $(documented-modules) && $(.option.detailed)
+ {
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print-help-module $(module-name) ;
+ }
+ }
+}
+
+
+# Generate documentation for a module. Basic information about the module is
+# generated.
+#
+local rule print-help-module (
+ module-name # The module to generate docs for.
+)
+{
+ # Print the docs.
+ print.section "Module '$(module-name)'" $($(module-name).docs) ;
+
+ # Print out the documented classes.
+ print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
+ Use --help $(module-name).<class-name> to get more information. ;
+
+ # Print out the documented rules.
+ print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out the documented variables.
+ print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
+ Use --help $(module-name).<variable-name> to get more information. ;
+
+ # Print out all the same information but indetailed form.
+ if $(.option.detailed)
+ {
+ print-help-classes $(module-name) ;
+ print-help-rules $(module-name) ;
+ print-help-variables $(module-name) ;
+ }
+}
+
+
+# Generate documentation for a set of rules in a module.
+#
+local rule print-help-rules (
+ module-name # Module of the rules.
+ : name * # Optional list of rules to describe.
+)
+{
+ name ?= $($(module-name).rules) ;
+ if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ]
+ {
+ # Print out the given rules.
+ for local rule-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local)
+ {
+ local signature = $($(module-name).$(rule-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'"
+ $($(module-name).$(rule-name).docs) ;
+ if $($(module-name).$(rule-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(rule-name).args)
+ {
+ print.list-item $(arg-name): $($(module-name).$(rule-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of classes in a module.
+#
+local rule print-help-classes (
+ module-name # Module of the classes.
+ : name * # Optional list of classes to describe.
+)
+{
+ name ?= $($(module-name).classes) ;
+ if [ set.intersection $(name) : $($(module-name).classes) ]
+ {
+ # Print out the given classes.
+ for local class-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local)
+ {
+ local signature = $($(module-name).$(class-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Class '$(module-name).$(class-name) ( $(signature) )'"
+ $($(module-name).$(class-name).docs)
+ "Inherits from '"$($(module-name).$(class-name).super-name)"'." ;
+ if $($(module-name).$(class-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(class-name).args)
+ {
+ print.list-item $(arg-name): $($(module-name).$(class-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+
+ # Print out the documented rules of the class.
+ print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out all the rules if details are requested.
+ if $(.option.detailed)
+ {
+ print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of variables in a module.
+#
+local rule print-help-variables (
+ module-name ? # Module of the variables.
+ : name * # Optional list of variables to describe.
+)
+{
+ name ?= $($(module-name).variables) ;
+ if [ set.intersection $(name) : $($(module-name).variables) ]
+ {
+ # Print out the given variables.
+ for local variable-name in [ sequence.insertion-sort $(name) ]
+ {
+ print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ;
+ if $($(module-name).$(variable-name).default) ||
+ $($(module-name).$(variable-name).initial)
+ {
+ print.list-start ;
+ if $($(module-name).$(variable-name).default)
+ {
+ print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ;
+ }
+ if $($(module-name).$(variable-name).initial)
+ {
+ print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a project.
+#
+local rule print-help-project (
+ unused ?
+ : jamfile * # The project Jamfile.
+)
+{
+ if $(jamfile<$(jamfile)>.docs)
+ {
+ # Print the docs.
+ print.section "Project-specific help"
+ Project has jamfile at $(jamfile) ;
+
+ print.lines $(jamfile<$(jamfile)>.docs) "" ;
+ }
+}
+
+
+# Generate documentation for a config file.
+#
+local rule print-help-config (
+ unused ?
+ : type # The type of configuration file user or site.
+ config-file # The configuration Jamfile.
+)
+{
+ if $(jamfile<$(config-file)>.docs)
+ {
+ # Print the docs.
+ print.section "Configuration help"
+ Configuration file at $(config-file) ;
+
+ print.lines $(jamfile<$(config-file)>.docs) "" ;
+ }
+}
+
+
+ws = " " ;
+
+# Extract the text from a block of comments.
+#
+local rule extract-comment (
+ var # The name of the variable to extract from.
+)
+{
+ local comment = ;
+ local line = $($(var)[1]) ;
+ local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ while $(l[1]) && $($(var))
+ {
+ if $(l[2]) { comment += [ MATCH "^[$(ws)]?(.*)$" : $(l[2]) ] ; }
+ else { comment += "" ; }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ }
+ return $(comment) ;
+}
+
+
+# Extract s single line of Jam syntax, ignoring any comments.
+#
+local rule extract-syntax (
+ var # The name of the variable to extract from.
+)
+{
+ local syntax = ;
+ local line = $($(var)[1]) ;
+ while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var))
+ {
+ local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ;
+ if $(m) && ! $(m) = ""
+ {
+ syntax = $(m) ;
+ }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ }
+ return $(syntax) ;
+}
+
+
+# Extract the next token, this is either a single Jam construct or a comment as
+# a single token.
+#
+local rule extract-token (
+ var # The name of the variable to extract from.
+)
+{
+ local parts = ;
+ while ! $(parts)
+ {
+ parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ;
+ if ! $(parts)
+ {
+ $(var) = $($(var)[2-]) ;
+ }
+ }
+ local token = ;
+ if [ MATCH "^(#)" : $(parts[1]) ]
+ {
+ token = $(parts:J=" ") ;
+ $(var) = $($(var)[2-]) ;
+ }
+ else
+ {
+ token = $(parts[1]) ;
+ $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ;
+ }
+ return $(token) ;
+}
+
+
+# Scan for a rule declaration as the next item in the variable.
+#
+local rule scan-rule (
+ syntax ? # The first part of the text which contains the rule declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ local rule-parts =
+ [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ;
+ if $(rule-parts[1])
+ {
+ # Mark as doc for rule.
+ local rule-name = $(rule-parts[2]) ;
+ if $(scope-name)
+ {
+ rule-name = $(scope-name).$(rule-name) ;
+ }
+ local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ;
+ if $(comment-block)
+ {
+ set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ;
+ }
+ # Parse args of rule.
+ $(var) = $(rule-parts[3-]) $($(var)) ;
+ set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ;
+ # Scan within this rules scope.
+ local scope-level = [ extract-token $(var) ] ;
+ local scope-name = $(rule-name) ;
+ while $(scope-level)
+ {
+ local comment-block = [ extract-comment $(var) ] ;
+ local syntax-block = [ extract-syntax $(var) ] ;
+ if [ scan-rule $(syntax-block) : $(var) ]
+ {
+ }
+ else if [ MATCH "^(\\{)" : $(syntax-block) ]
+ {
+ scope-level += "{" ;
+ }
+ else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
+ {
+ scope-level = $(scope-level[2-]) ;
+ }
+ }
+
+ return true ;
+ }
+}
+
+
+# Scan the arguments of a rule.
+#
+local rule scan-rule-arguments (
+ var # The name of the variable to extract from.
+)
+{
+ local arg-syntax = ;
+ local token = [ extract-token $(var) ] ;
+ while $(token) != "(" && $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ local arg-signature = ;
+ while $(token) != ")" && $(token) != "{"
+ {
+ local arg-name = ;
+ local arg-qualifier = " " ;
+ local arg-doc = ;
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ arg-name = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ if [ MATCH "^([\\*\\+\\?])" : $(token) ]
+ {
+ arg-qualifier = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if [ MATCH "^(#)" : $(token) ]
+ {
+ $(var) = $(token) $($(var)) ;
+ arg-doc = [ extract-comment $(var) ] ;
+ token = [ extract-token $(var) ] ;
+ }
+ set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ;
+ }
+ while $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ $(var) = "{" $($(var)) ;
+ arg-signature ?= "" ;
+ return $(arg-signature) ;
+}
+
+
+# Scan for a variable declaration.
+#
+local rule scan-variable (
+ syntax ? # The first part of the text which contains the variable declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ # [1] = name, [2] = value(s)
+ local var-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ;
+ if $(var-parts)
+ {
+ local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ;
+ local default-value = "" ;
+ local initial-valie = "" ;
+ if $(var-parts[2]) = "?="
+ {
+ default-value = $(value) ;
+ default-value ?= "(empty)" ;
+ }
+ else
+ {
+ initial-value = $(value) ;
+ initial-value ?= "(empty)" ;
+ }
+ if $(comment-block)
+ {
+ set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ;
+ }
+ return true ;
+ }
+}
+
+
+# Scan a class declaration.
+#
+local rule scan-class (
+ syntax ? # The syntax text for the class declaration.
+)
+{
+ # [1] = class?, [2] = name, [3] = superclass
+ local class-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]+:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ;
+ if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class"
+ {
+ set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ;
+ }
+}
+
+
+# Scan a module file for documentation comments. This also invokes any actions
+# assigned to the module. The actions are the rules that do the actual output of
+# the documentation. This rule is invoked as the header scan rule for the module
+# file.
+#
+rule scan-module (
+ target # The module file.
+ : text * # The text in the file, one item per line.
+ : action * # Rule to call to output docs for the module.
+)
+{
+ if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; }
+ local module-name = $(target:B) ;
+ local module-documented = ;
+ local comment-block = ;
+ local syntax-block = ;
+ # This is a hack because we can not get the line of a file if it happens to
+ # not have a new-line termination.
+ text += "}" ;
+ while $(text)
+ {
+ comment-block = [ extract-comment text ] ;
+ syntax-block = [ extract-syntax text ] ;
+ if $(.option.debug)
+ {
+ ECHO "HELP:" comment block; '$(comment-block)' ;
+ ECHO "HELP:" syntax block; '$(syntax-block)' ;
+ }
+ if [ scan-rule $(syntax-block) : text ] { }
+ else if [ scan-variable $(syntax-block) : text ] { }
+ else if [ scan-class $(syntax-block) ] { }
+ else if [ MATCH .*([cC]opyright).* : $(comment-block:J=" ") ]
+ {
+ # mark as the copy for the module.
+ set-module-copyright $(module-name) : $(comment-block) ;
+ }
+ else if $(action[1]) in "print-help-project" "print-help-config"
+ && ! $(jamfile<$(target)>.docs)
+ {
+ # special module docs for the project jamfile.
+ jamfile<$(target)>.docs = $(comment-block) ;
+ }
+ else if ! $(module-documented)
+ {
+ # document the module.
+ set-module-doc $(module-name) : $(comment-block) ;
+ module-documented = true ;
+ }
+ }
+ if $(action)
+ {
+ $(action[1]) $(module-name) : $(action[2-]) ;
+ }
+}
+
+
+# Import scan-module to global scope, so that it is available during header
+# scanning phase.
+#
+IMPORT $(__name__) : scan-module : : doc.scan-module ;
+
+
+# Read in a file using the SHELL builtin and return the individual lines as
+# would be done for header scanning.
+#
+local rule read-file (
+ file # The file to read in.
+)
+{
+ file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ;
+ if ! $(.file<$(file)>.lines)
+ {
+ local content ;
+ switch [ modules.peek : OS ]
+ {
+ case NT :
+ content = [ SHELL "TYPE \"$(file)\"" ] ;
+
+ case * :
+ content = [ SHELL "cat \"$(file)\"" ] ;
+ }
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ .file<$(file)>.lines = $(lines) ;
+ }
+ return $(.file<$(file)>.lines) ;
+}
+
+
+# Add a scan action to perform to generate the help documentation. The action
+# rule is passed the name of the module as the first argument. The second
+# argument(s) are optional and passed directly as specified here.
+#
+local rule do-scan (
+ modules + # The modules to scan and perform the action on.
+ : action * # The action rule, plus the secondary arguments to pass to the action rule.
+)
+{
+ if $(help-output) = text
+ {
+ print.output $(help-output-file).txt plain ;
+ ALWAYS $(help-output-file).txt ;
+ DEPENDS all : $(help-output-file).txt ;
+ }
+ if $(help-output) = html
+ {
+ print.output $(help-output-file).html html ;
+ ALWAYS $(help-output-file).html ;
+ DEPENDS all : $(help-output-file).html ;
+ }
+ for local module-file in $(modules[1--2])
+ {
+ scan-module $(module-file) : [ read-file $(module-file) ] ;
+ }
+ scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ;
+}
diff --git a/jam-files/boost-build/util/indirect.jam b/jam-files/boost-build/util/indirect.jam
new file mode 100644
index 000000000..ec63f1920
--- /dev/null
+++ b/jam-files/boost-build/util/indirect.jam
@@ -0,0 +1,115 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import numbers ;
+
+
+# The pattern that indirect rules must match: module%rule
+.pattern = ^([^%]*)%([^%]+)$ ;
+
+
+#
+# Type checking rules.
+#
+local rule indirect-rule ( x )
+{
+ if ! [ MATCH $(.pattern) : $(x) ]
+ {
+ return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+# Make an indirect rule which calls the given rule. If context is supplied it is
+# expected to be the module in which to invoke the rule by the 'call' rule
+# below. Otherwise, the rule will be invoked in the module of this rule's
+# caller.
+#
+rule make ( rulename bound-args * : context ? )
+{
+ context ?= [ CALLER_MODULE ] ;
+ context ?= "" ;
+ return $(context)%$(rulename) $(bound-args) ;
+}
+
+
+# Make an indirect rule which calls the given rule. 'rulename' may be a
+# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
+# supplied, the result will be invoked (by 'call', below) in the module of the
+# caller. Otherwise, frames > 1 specifies additional call frames to back up in
+# order to find the module context.
+#
+rule make-qualified ( rulename bound-args * : frames ? )
+{
+ if [ MATCH $(.pattern) : $(rulename) ]
+ {
+ return $(rulename) $(bound-args) ;
+ }
+ else
+ {
+ frames ?= 1 ;
+ # If the rule name includes a Jamfile module, grab it.
+ local module-context = [ MATCH ^(Jamfile<[^>]*>)\\..* : $(rulename) ] ;
+
+ if ! $(module-context)
+ {
+ # Take the first dot-separated element as module name. This disallows
+ # module names with dots, but allows rule names with dots.
+ module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ;
+ }
+ module-context ?= [ CALLER_MODULE $(frames) ] ;
+ return [ make $(rulename) $(bound-args) : $(module-context) ] ;
+ }
+}
+
+
+# Returns the module name in which the given indirect rule will be invoked.
+#
+rule get-module ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ if ! $(m[1])
+ {
+ m = ;
+ }
+ return $(m[1]) ;
+}
+
+
+# Returns the rulename that will be called when x is invoked.
+#
+rule get-rule ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ return $(m[2]) ;
+}
+
+
+# Invoke the given indirect-rule.
+#
+rule call ( [indirect-rule] r args * : * )
+{
+ return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+}
+
+
+rule __test__
+{
+ import assert ;
+
+ rule foo-barr! ( x )
+ {
+ assert.equal $(x) : x ;
+ }
+
+ assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
+ assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
+
+ call [ make foo-barr! ] x ;
+ call [ make foo-barr! x ] ;
+ call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
+}
diff --git a/jam-files/boost-build/util/numbers.jam b/jam-files/boost-build/util/numbers.jam
new file mode 100644
index 000000000..665347d31
--- /dev/null
+++ b/jam-files/boost-build/util/numbers.jam
@@ -0,0 +1,218 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+
+
+rule trim-leading-zeroes ( value )
+{
+ return [ CALC $(value) + 0 ] ;
+}
+
+
+rule check ( numbers * )
+{
+ for local n in $(numbers)
+ {
+ switch $(n)
+ {
+ case *[^0-9]* :
+ errors.error $(n) "in" $(numbers) : is not a number ;
+ }
+ }
+}
+
+
+rule increment ( number )
+{
+ return [ CALC $(number) + 1 ] ;
+}
+
+
+rule decrement ( number )
+{
+ return [ CALC $(number) - 1 ] ;
+}
+
+
+rule range ( start finish ? : step ? )
+{
+ if ! $(finish)
+ {
+ finish = $(start) ;
+ start = 1 ;
+ }
+ step ?= 1 ;
+
+ check $(start) $(finish) $(step) ;
+
+ if $(finish) != 0
+ {
+ local result ;
+ while [ less $(start) $(finish) ] || $(start) = $(finish)
+ {
+ result += $(start) ;
+ start = [ CALC $(start) + $(step) ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+rule less ( n1 n2 )
+{
+ switch [ CALC $(n2) - $(n1) ]
+ {
+ case [1-9]* : return true ;
+ }
+}
+
+
+rule log10 ( number )
+{
+ switch $(number)
+ {
+ case *[^0-9]* : errors.error $(number) is not a number ;
+ case 0 : errors.error can't take log of zero ;
+ case [1-9] : return 0 ;
+ case [1-9]? : return 1 ;
+ case [1-9]?? : return 2 ;
+ case [1-9]??? : return 3 ;
+ case [1-9]???? : return 4 ;
+ case [1-9]????? : return 5 ;
+ case [1-9]?????? : return 6 ;
+ case [1-9]??????? : return 7 ;
+ case [1-9]???????? : return 8 ;
+ case [1-9]????????? : return 9 ;
+ case * :
+ {
+ import sequence ;
+ import string ;
+ local chars = [ string.chars $(number) ] ;
+ while $(chars[1]) = 0
+ {
+ chars = $(chars[2-]) ;
+ }
+ if ! $(chars)
+ {
+ errors.error can't take log of zero ;
+ }
+ else
+ {
+ return [ decrement [ sequence.length $(chars) ] ] ;
+ }
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 1 : increment 0 ;
+ assert.result 2 : increment 1 ;
+ assert.result 1 : decrement 2 ;
+ assert.result 0 : decrement 1 ;
+ assert.result 50 : increment 49 ;
+ assert.result 49 : decrement 50 ;
+ assert.result 99 : increment 98 ;
+ assert.result 99 : decrement 100 ;
+ assert.result 100 : increment 99 ;
+ assert.result 999 : decrement 1000 ;
+ assert.result 1000 : increment 999 ;
+
+ assert.result 1 2 3 : range 3 ;
+ assert.result 1 2 3 4 5 6 7 8 9 10 11 12 : range 12 ;
+ assert.result 3 4 5 6 7 8 9 10 11 : range 3 11 ;
+ assert.result : range 0 ;
+ assert.result 1 4 7 10 : range 10 : 3 ;
+ assert.result 2 4 6 8 10 : range 2 10 : 2 ;
+ assert.result 25 50 75 100 : range 25 100 : 25 ;
+
+ assert.result 0 : trim-leading-zeroes 0 ;
+ assert.result 1234 : trim-leading-zeroes 1234 ;
+ assert.result 123456 : trim-leading-zeroes 0000123456 ;
+ assert.result 1000123456 : trim-leading-zeroes 1000123456 ;
+ assert.result 10000 : trim-leading-zeroes 10000 ;
+ assert.result 10000 : trim-leading-zeroes 00010000 ;
+
+ assert.true less 1 2 ;
+ assert.true less 1 12 ;
+ assert.true less 1 21 ;
+ assert.true less 005 217 ;
+ assert.false less 0 0 ;
+ assert.false less 03 3 ;
+ assert.false less 3 03 ;
+ assert.true less 005 217 ;
+ assert.true less 0005 217 ;
+ assert.true less 5 00217 ;
+
+ # TEMPORARY disabled, because nested "try"/"catch" do not work and I do no
+ # have the time to fix that right now.
+ if $(0)
+ {
+ try ;
+ {
+ decrement 0 ;
+ }
+ catch can't decrement zero! ;
+
+ try ;
+ {
+ check foo ;
+ }
+ catch : not a number ;
+
+ try ;
+ {
+ increment foo ;
+ }
+ catch : not a number ;
+
+ try ;
+ {
+ log10 0 ;
+ }
+ catch can't take log of zero ;
+
+ try ;
+ {
+ log10 000 ;
+ }
+ catch can't take log of zero ;
+
+ }
+
+ assert.result 0 : log10 1 ;
+ assert.result 0 : log10 9 ;
+ assert.result 1 : log10 10 ;
+ assert.result 1 : log10 99 ;
+ assert.result 2 : log10 100 ;
+ assert.result 2 : log10 101 ;
+ assert.result 2 : log10 125 ;
+ assert.result 2 : log10 999 ;
+ assert.result 3 : log10 1000 ;
+ assert.result 10 : log10 12345678901 ;
+
+ for local x in [ range 75 110 : 5 ]
+ {
+ for local y in [ range $(x) 111 : 3 ]
+ {
+ if $(x) != $(y)
+ {
+ assert.true less $(x) $(y) ;
+ }
+ }
+ }
+
+ for local x in [ range 90 110 : 2 ]
+ {
+ for local y in [ range 80 $(x) : 4 ]
+ {
+ assert.false less $(x) $(y) ;
+ }
+ }
+}
diff --git a/jam-files/boost-build/util/option.jam b/jam-files/boost-build/util/option.jam
new file mode 100644
index 000000000..f6dc37522
--- /dev/null
+++ b/jam-files/boost-build/util/option.jam
@@ -0,0 +1,109 @@
+# Copyright (c) 2005 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+
+# Set a value for a named option, to be used when not overridden on the command
+# line.
+rule set ( name : value ? )
+{
+ .option.$(name) = $(value) ;
+}
+
+rule get ( name : default-value ? : implied-value ? )
+{
+ local m = [ MATCH --$(name)=(.*) : [ modules.peek : ARGV ] ] ;
+ if $(m)
+ {
+ return $(m[1]) ;
+ }
+ else
+ {
+ m = [ MATCH (--$(name)) : [ modules.peek : ARGV ] ] ;
+ if $(m) && $(implied-value)
+ {
+ return $(implied-value) ;
+ }
+ else if $(.option.$(name))
+ {
+ return $(.option.$(name)) ;
+ }
+ else
+ {
+ return $(default-value) ;
+ }
+ }
+}
+
+
+# Check command-line args as soon as possible. For each option try to load
+# module named after option. Is that succeeds, invoke 'process' rule in the
+# module. The rule may return "true" to indicate that the regular build process
+# should not be attempted.
+#
+# Options take the general form of: --<name>[=<value>] [<value>]
+#
+rule process ( )
+{
+ local ARGV = [ modules.peek : ARGV ] ;
+ local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ;
+
+ local dont-build ;
+ local args = $(ARGV) ;
+ while $(args)
+ {
+ local arg = [ MATCH ^--(.*) : $(args[1]) ] ;
+ while $(args[2-]) && ! $(arg)
+ {
+ args = $(args[2-]) ;
+ arg = [ MATCH ^--(.*) : $(args[1]) ] ;
+ }
+ args = $(args[2-]) ;
+
+ if $(arg)
+ {
+ local split = [ MATCH ^(([^-=]+)[^=]*)(=?)(.*)$ : $(arg) ] ;
+ local full-name = $(split[1]) ;
+ local prefix = $(split[2]) ;
+ local values ;
+
+ if $(split[3])
+ {
+ values = $(split[4]) ;
+ }
+ if $(args) && ! [ MATCH ^(--).* : $(args[1]) ]
+ {
+ values += $(args[1]) ;
+ args = $(args[2-]) ;
+ }
+
+ # Jook in options subdirectories of BOOST_BUILD_PATH for modules
+ # matching the full option name and then its prefix.
+ local plugin-dir = options ;
+ local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) :
+ $(full-name).jam $(prefix).jam ] ;
+
+ if $(option-files)
+ {
+ # Load the file into a module named for the option.
+ local f = $(option-files[1]) ;
+ local module-name = --$(f:D=:S=) ;
+ modules.load $(module-name) : $(f:D=) : $(f:D) ;
+
+ # If there is a process rule, call it with the full option name
+ # and its value (if any). If there was no "=" in the option, the
+ # value will be empty.
+ if process in [ RULENAMES $(module-name) ]
+ {
+ dont-build += [ modules.call-in $(module-name) : process
+ --$(full-name) : $(values) ] ;
+ }
+ }
+ }
+ }
+
+ return $(dont-build) ;
+}
diff --git a/jam-files/boost-build/util/order.jam b/jam-files/boost-build/util/order.jam
new file mode 100644
index 000000000..a74fc8c84
--- /dev/null
+++ b/jam-files/boost-build/util/order.jam
@@ -0,0 +1,169 @@
+# Copyright (C) 2003 Vladimir Prus
+# Use, modification, and distribution is subject to the Boost Software
+# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
+# at http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines a class which allows to order arbitrary object with
+# regard to arbitrary binary relation.
+#
+# The primary use case is the gcc toolset, which is sensitive to library order:
+# if library 'a' uses symbols from library 'b', then 'a' must be present before
+# 'b' on the linker's command line.
+#
+# This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris
+# LD (and for Solaris toolset as well), the order always matters.
+#
+# So, we need to store order requirements and then order libraries according to
+# them. It is not possible to use the dependency graph as order requirements.
+# What we need is a "use symbols" relationship while dependency graph provides
+# the "needs to be updated" relationship.
+#
+# For example::
+# lib a : a.cpp b;
+# lib b ;
+#
+# For static linking, library 'a' need not depend on 'b'. However, it should
+# still come before 'b' on the command line.
+
+class order
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Adds the constraint that 'first' should preceede 'second'.
+ rule add-pair ( first second )
+ {
+ .constraits += $(first)--$(second) ;
+ }
+ NATIVE_RULE class@order : add-pair ;
+
+ # Given a list of objects, reorder them so that the constraints specified by
+ # 'add-pair' are satisfied.
+ #
+ # The algorithm was adopted from an awk script by Nikita Youshchenko
+ # (yoush at cs dot msu dot su)
+ rule order ( objects * )
+ {
+ # The algorithm used is the same is standard transitive closure, except
+ # that we're not keeping in-degree for all vertices, but rather removing
+ # edges.
+ local result ;
+ if $(objects)
+ {
+ local constraints = [ eliminate-unused-constraits $(objects) ] ;
+
+ # Find some library that nobody depends upon and add it to the
+ # 'result' array.
+ local obj ;
+ while $(objects)
+ {
+ local new_objects ;
+ while $(objects)
+ {
+ obj = $(objects[1]) ;
+ if [ has-no-dependents $(obj) : $(constraints) ]
+ {
+ # Emulate break ;
+ new_objects += $(objects[2-]) ;
+ objects = ;
+ }
+ else
+ {
+ new_objects += $(obj) ;
+ obj = ;
+ objects = $(objects[2-]) ;
+ }
+ }
+
+ if ! $(obj)
+ {
+ errors.error "Circular order dependencies" ;
+ }
+ # No problem with placing first.
+ result += $(obj) ;
+ # Remove all contraints where 'obj' comes first, since they are
+ # already satisfied.
+ constraints = [ remove-satisfied $(constraints) : $(obj) ] ;
+
+ # Add the remaining objects for further processing on the next
+ # iteration
+ objects = $(new_objects) ;
+ }
+
+ }
+ return $(result) ;
+ }
+ NATIVE_RULE class@order : order ;
+
+ # Eliminate constraints which mention objects not in 'objects'. In
+ # graph-theory terms, this is finding a subgraph induced by ordered
+ # vertices.
+ rule eliminate-unused-constraits ( objects * )
+ {
+ local result ;
+ for local c in $(.constraints)
+ {
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[1]) in $(objects) && $(m[2]) in $(objects)
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+ }
+
+ # Returns true if there's no constraint in 'constaraints' where 'obj' comes
+ # second.
+ rule has-no-dependents ( obj : constraints * )
+ {
+ local failed ;
+ while $(constraints) && ! $(failed)
+ {
+ local c = $(constraints[1]) ;
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[2]) = $(obj)
+ {
+ failed = true ;
+ }
+ constraints = $(constraints[2-]) ;
+ }
+ if ! $(failed)
+ {
+ return true ;
+ }
+ }
+
+ rule remove-satisfied ( constraints * : obj )
+ {
+ local result ;
+ for local c in $(constraints)
+ {
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[1]) != $(obj)
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import "class" : new ;
+ import assert ;
+
+ c1 = [ new order ] ;
+ $(c1).add-pair l1 l2 ;
+
+ assert.result l1 l2 : $(c1).order l1 l2 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+
+ $(c1).add-pair l2 l3 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+ $(c1).add-pair x l2 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+ assert.result l1 l2 l3 : $(c1).order l2 l3 l1 ;
+}
diff --git a/jam-files/boost-build/util/os.jam b/jam-files/boost-build/util/os.jam
new file mode 100644
index 000000000..daef27f77
--- /dev/null
+++ b/jam-files/boost-build/util/os.jam
@@ -0,0 +1,171 @@
+# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import string ;
+
+
+# Return the value(s) of the given environment variable(s) at the time bjam was
+# invoked.
+rule environ ( variable-names + )
+{
+ return [ modules.peek .ENVIRON : $(variable-names) ] ;
+}
+
+.name = [ modules.peek : OS ] ;
+.platform = [ modules.peek : OSPLAT ] ;
+.version = [ modules.peek : OSVER ] ;
+
+
+local rule constant ( c : os ? )
+{
+ os ?= $(.name) ;
+ # First look for a platform-specific name, then the general value.
+ local variables = .$(c)-$(os) .$(c) ;
+ local result = $($(variables)) ;
+ return $(result[1]) ;
+}
+
+rule get-constant ( os ? )
+{
+ # Find the name of the constant being accessed, which is equal to the name
+ # used to invoke us.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = [ MATCH ([^.]*)$ : $(bt[4]) ] ;
+ return [ constant $(rulename) : $(os) ] ;
+}
+
+
+# export all the common constants
+.constants = name platform version shared-library-path-variable path-separator executable-path-variable executable-suffix ;
+for local constant in $(.constants)
+{
+ IMPORT $(__name__) : get-constant : $(__name__) : $(constant) ;
+}
+EXPORT $(__name__) : $(.constants) ;
+
+.executable-path-variable-NT = PATH ;
+# On Windows the case and capitalization of PATH is not always predictable, so
+# let's find out what variable name was really set.
+if $(.name) = NT
+{
+ for local n in [ VARNAMES .ENVIRON ]
+ {
+ if $(n:L) = path
+ {
+ .executable-path-variable-NT = $(n) ;
+ }
+ }
+}
+
+# Specific constants for various platforms. There's no need to define any
+# constant whose value would be the same as the default, below.
+.shared-library-path-variable-NT = $(.executable-path-variable-NT) ;
+.path-separator-NT = ";" ;
+.expand-variable-prefix-NT = % ;
+.expand-variable-suffix-NT = % ;
+.executable-suffix-NT = .exe ;
+
+.shared-library-path-variable-CYGWIN = PATH ;
+
+.shared-library-path-variable-MACOSX = DYLD_LIBRARY_PATH ;
+
+.shared-library-path-variable-AIX = LIBPATH ;
+
+# Default constants
+.shared-library-path-variable = LD_LIBRARY_PATH ;
+.path-separator = ":" ;
+.expand-variable-prefix = $ ;
+.expand-variable-suffix = "" ;
+.executable-path-variable = PATH ;
+.executable-suffix = "" ;
+
+
+# Return a list of the directories in the PATH. Yes, that information is (sort
+# of) available in the global module, but jam code can change those values, and
+# it isn't always clear what case/capitalization to use when looking. This rule
+# is a more reliable way to get there.
+rule executable-path ( )
+{
+ return [ string.words [ environ [ constant executable-path-variable ] ]
+ : [ constant path-separator ] ] ;
+}
+
+
+# Initialize the list of home directories for the current user depending on the
+# OS.
+if $(.name) = NT
+{
+ local home = [ environ HOMEDRIVE HOMEPATH ] ;
+ .home-directories = $(home[1])$(home[2]) [ environ HOME ] [ environ USERPROFILE ] ;
+}
+else
+{
+ .home-directories = [ environ HOME ] ;
+}
+
+
+# Can't use 'constant' mechanism because it only returns 1-element values.
+rule home-directories ( )
+{
+ return $(.home-directories) ;
+}
+
+
+# Return the string needed to represent the expansion of the named shell
+# variable.
+rule expand-variable ( variable )
+{
+ local prefix = [ constant expand-variable-prefix ] ;
+ local suffix = [ constant expand-variable-suffix ] ;
+ return $(prefix)$(variable)$(suffix) ;
+}
+
+
+# Returns true if running on windows, whether in cygwin or not.
+rule on-windows ( )
+{
+ local result ;
+ if [ modules.peek : NT ]
+ {
+ result = true ;
+ }
+ else if [ modules.peek : UNIX ]
+ {
+ switch [ modules.peek : JAMUNAME ]
+ {
+ case CYGWIN* :
+ {
+ result = true ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+if ! [ on-windows ]
+{
+ .on-unix = 1 ;
+}
+
+
+rule on-unix
+{
+ return $(.on-unix) ;
+}
+
+
+rule __test__
+{
+ import assert ;
+ if ! ( --quiet in [ modules.peek : ARGV ] )
+ {
+ ECHO os: name= [ name ] ;
+ ECHO os: version= [ version ] ;
+ }
+ assert.true name ;
+}
diff --git a/jam-files/boost-build/util/path.jam b/jam-files/boost-build/util/path.jam
new file mode 100644
index 000000000..ea26b816b
--- /dev/null
+++ b/jam-files/boost-build/util/path.jam
@@ -0,0 +1,934 @@
+# Copyright Vladimir Prus 2002-2006.
+# Copyright Dave Abrahams 2003-2004.
+# Copyright Rene Rivera 2003-2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Performs various path manipulations. Paths are always in a 'normalized'
+# representation. In it, a path may be either:
+#
+# - '.', or
+#
+# - ['/'] [ ( '..' '/' )* (token '/')* token ]
+#
+# In plain english, path can be rooted, '..' elements are allowed only at the
+# beginning, and it never ends in slash, except for path consisting of slash
+# only.
+
+import errors ;
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+import version ;
+
+
+os = [ modules.peek : OS ] ;
+if [ modules.peek : UNIX ]
+{
+ local uname = [ modules.peek : JAMUNAME ] ;
+ switch $(uname)
+ {
+ case CYGWIN* : os = CYGWIN ;
+ case * : os = UNIX ;
+ }
+}
+
+
+# Converts the native path into normalized form.
+#
+rule make ( native )
+{
+ return [ make-$(os) $(native) ] ;
+}
+
+
+# Builds native representation of the path.
+#
+rule native ( path )
+{
+ return [ native-$(os) $(path) ] ;
+}
+
+
+# Tests if a path is rooted.
+#
+rule is-rooted ( path )
+{
+ return [ MATCH "^(/)" : $(path) ] ;
+}
+
+
+# Tests if a path has a parent.
+#
+rule has-parent ( path )
+{
+ if $(path) != /
+ {
+ return 1 ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Returns the path without any directory components.
+#
+rule basename ( path )
+{
+ return [ MATCH "([^/]+)$" : $(path) ] ;
+}
+
+
+# Returns parent directory of the path. If no parent exists, error is issued.
+#
+rule parent ( path )
+{
+ if [ has-parent $(path) ]
+ {
+ if $(path) = .
+ {
+ return .. ;
+ }
+ else
+ {
+ # Strip everything at the end of path up to and including the last
+ # slash.
+ local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
+
+ # Did we strip what we shouldn't?
+ if $(result[2]) = ".."
+ {
+ return $(path)/.. ;
+ }
+ else
+ {
+ if ! $(result[1])
+ {
+ if [ is-rooted $(path) ]
+ {
+ result = / ;
+ }
+ else
+ {
+ result = . ;
+ }
+ }
+ return $(result[1]) ;
+ }
+ }
+ }
+ else
+ {
+ errors.error "Path '$(path)' has no parent" ;
+ }
+}
+
+
+# Returns path2 such that "[ join path path2 ] = .". The path may not contain
+# ".." element or be rooted.
+#
+rule reverse ( path )
+{
+ if $(path) = .
+ {
+ return $(path) ;
+ }
+ else
+ {
+ local tokens = [ regex.split $(path) "/" ] ;
+ local tokens2 ;
+ for local i in $(tokens)
+ {
+ tokens2 += .. ;
+ }
+ return [ sequence.join $(tokens2) : "/" ] ;
+ }
+}
+
+
+# Concatenates the passed path elements. Generates an error if any element other
+# than the first one is rooted. Skips any empty or undefined path elements.
+#
+rule join ( elements + )
+{
+ if ! $(elements[2-])
+ {
+ return $(elements[1]) ;
+ }
+ else
+ {
+ for local e in $(elements[2-])
+ {
+ if [ is-rooted $(e) ]
+ {
+ errors.error only the first element may be rooted ;
+ }
+ }
+ if [ version.check-jam-version 3 1 17 ]
+ {
+ return [ NORMALIZE_PATH "$(elements)" ] ;
+ }
+ else
+ {
+ # Boost Jam prior to version 3.1.17 had problems with its
+ # NORMALIZE_PATH rule in case you passed it a leading backslash
+ # instead of a slash, in some cases when you sent it an empty
+ # initial path element and possibly some others. At least some of
+ # those cases were being hit and relied upon when calling this rule
+ # from the path.make-NT rule.
+ if ! $(elements[1]) && $(elements[2])
+ {
+ return [ NORMALIZE_PATH "/" "$(elements[2-])" ] ;
+ }
+ else
+ {
+ return [ NORMALIZE_PATH "$(elements)" ] ;
+ }
+ }
+ }
+}
+
+
+# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged.
+#
+rule root ( path root )
+{
+ if [ is-rooted $(path) ]
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ join $(root) $(path) ] ;
+ }
+}
+
+
+# Returns the current working directory.
+#
+rule pwd ( )
+{
+ if ! $(.pwd)
+ {
+ .pwd = [ make [ PWD ] ] ;
+ }
+ return $(.pwd) ;
+}
+
+
+# Returns the list of files matching the given pattern in the specified
+# directory. Both directories and patterns are supplied as portable paths. Each
+# pattern should be non-absolute path, and can't contain "." or ".." elements.
+# Each slash separated element of pattern can contain the following special
+# characters:
+# - '?', which match any character
+# - '*', which matches arbitrary number of characters.
+# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and
+# only if e1 matches p1, e2 matches p2 and so on.
+#
+# For example:
+# [ glob . : *.cpp ]
+# [ glob . : */build/Jamfile ]
+#
+rule glob ( dirs * : patterns + : exclude-patterns * )
+{
+ local result ;
+ local real-patterns ;
+ local real-exclude-patterns ;
+ for local d in $(dirs)
+ {
+ for local p in $(patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-patterns += [ path.native $(pattern) ] ;
+ }
+
+ for local p in $(exclude-patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-exclude-patterns += [ path.native $(pattern) ] ;
+ }
+ }
+
+ local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
+ inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
+ local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
+ exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
+
+ return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ]
+ ;
+}
+
+
+# Recursive version of GLOB. Builds the glob of files while also searching in
+# the subdirectories of the given roots. An optional set of exclusion patterns
+# will filter out the matching entries from the result. The exclusions also
+# apply to the subdirectory scanning, such that directories that match the
+# exclusion patterns will not be searched.
+#
+rule glob-tree ( roots * : patterns + : exclude-patterns * )
+{
+ return [ sequence.transform path.make : [ .glob-tree [ sequence.transform
+ path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ;
+}
+
+
+local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
+{
+ local excluded ;
+ if $(exclude-patterns)
+ {
+ excluded = [ GLOB $(roots) : $(exclude-patterns) ] ;
+ }
+ local result = [ set.difference [ GLOB $(roots) : $(patterns) ] :
+ $(excluded) ] ;
+ local subdirs ;
+ for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ]
+ {
+ if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ]
+ {
+ subdirs += $(d) ;
+ }
+ }
+ if $(subdirs)
+ {
+ result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ]
+ ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true is the specified file exists.
+#
+rule exists ( file )
+{
+ return [ path.glob $(file:D) : $(file:D=) ] ;
+}
+NATIVE_RULE path : exists ;
+
+
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names. If
+# 'upper_limit' is specified, directories above it will be pruned.
+#
+rule all-parents ( path : upper_limit ? : cwd ? )
+{
+ cwd ?= [ pwd ] ;
+ local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ;
+
+ if ! $(upper_limit)
+ {
+ upper_limit = / ;
+ }
+ local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ;
+
+ # Leave only elements in 'path_ele' below 'upper_ele'.
+ while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) )
+ {
+ upper_ele = $(upper_ele[2-]) ;
+ path_ele = $(path_ele[2-]) ;
+ }
+
+ # Have all upper elements been removed ?
+ if $(upper_ele)
+ {
+ errors.error "$(upper_limit) is not prefix of $(path)" ;
+ }
+
+ # Create the relative paths to parents, number of elements in 'path_ele'.
+ local result ;
+ for local i in $(path_ele)
+ {
+ path = [ parent $(path) ] ;
+ result += $(path) ;
+ }
+ return $(result) ;
+}
+
+
+# Search for 'pattern' in parent directories of 'dir', up till and including
+# 'upper_limit', if it is specified, or till the filesystem root otherwise.
+#
+rule glob-in-parents ( dir : patterns + : upper-limit ? )
+{
+ local result ;
+ local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
+
+ while $(parent-dirs) && ! $(result)
+ {
+ result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
+ parent-dirs = $(parent-dirs[2-]) ;
+ }
+ return $(result) ;
+}
+
+
+# Assuming 'child' is a subdirectory of 'parent', return the relative path from
+# 'parent' to 'child'.
+#
+rule relative ( child parent : no-error ? )
+{
+ local not-a-child ;
+ if $(parent) = "."
+ {
+ return $(child) ;
+ }
+ else
+ {
+ local split1 = [ regex.split $(parent) / ] ;
+ local split2 = [ regex.split $(child) / ] ;
+
+ while $(split1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ not-a-child = true ;
+ split1 = ;
+ }
+ }
+ if $(split2)
+ {
+ if $(not-a-child)
+ {
+ if $(no-error)
+ {
+ return not-a-child ;
+ }
+ else
+ {
+ errors.error $(child) is not a subdir of $(parent) ;
+ }
+ }
+ else
+ {
+ return [ join $(split2) ] ;
+ }
+ }
+ else
+ {
+ return "." ;
+ }
+ }
+}
+
+
+# Returns the minimal path to path2 that is relative path1.
+#
+rule relative-to ( path1 path2 )
+{
+ local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
+ local split1 = [ regex.split $(path1) / ] ;
+ local split2 = [ regex.split $(path2) / ] ;
+
+ while $(split1) && $(root_1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ root_1 = $(root_1[2-]) ;
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ split1 = ;
+ }
+ }
+ return [ join . $(root_1) $(split2) ] ;
+}
+
+
+# Returns the list of paths which are used by the operating system for looking
+# up programs.
+#
+rule programs-path ( )
+{
+ local result ;
+ local raw = [ modules.peek : PATH Path path ] ;
+ for local p in $(raw)
+ {
+ if $(p)
+ {
+ result += [ path.make $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+rule makedirs ( path )
+{
+ local result = true ;
+ local native = [ native $(path) ] ;
+ if ! [ exists $(native) ]
+ {
+ if [ makedirs [ parent $(path) ] ]
+ {
+ if ! [ MAKEDIR $(native) ]
+ {
+ errors.error "Could not create directory '$(path)'" ;
+ result = ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+# Converts native Windows paths into our internal canonic path representation.
+# Supports 'invalid' paths containing multiple successive path separator
+# characters.
+#
+# TODO: Check and if needed add support for Windows 'X:file' path format where
+# the file is located in the current folder on drive X.
+#
+rule make-NT ( native )
+{
+ local result ;
+
+ if [ version.check-jam-version 3 1 17 ]
+ {
+ result = [ NORMALIZE_PATH $(native) ] ;
+ }
+ else
+ {
+ # This old implementation is really fragile due to a not so clear way
+ # NORMALIZE_PATH rule worked in Boost.Jam versions prior to 3.1.17. E.g.
+ # path.join would mostly ignore empty path elements but would root the
+ # joined path in case the initial two path elements were empty or some
+ # similar accidental wierdness.
+ result = [ path.join [ regex.split $(native) "[/\\]" ] ] ;
+ }
+
+ # We need to add an extra '/' in front in case this is a rooted Windows path
+ # starting with a drive letter and not a path separator character since the
+ # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter
+ # and treats it as a regular folder name.
+ if [ regex.match "(^.:)" : $(native) ]
+ {
+ result = /$(result) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule native-NT ( path )
+{
+ local result ;
+ if [ is-rooted $(path) ] && ! [ regex.match "^/(.:)" : $(path) ]
+ {
+ result = $(path) ;
+ }
+ else
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ;
+ }
+ result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
+ return $(result) ;
+}
+
+
+rule make-UNIX ( native )
+{
+ # VP: I have no idea now 'native' can be empty here! But it can!
+ if ! $(native)
+ {
+ errors.error "Empty path passed to 'make-UNIX'" ;
+ }
+ else
+ {
+ return [ NORMALIZE_PATH $(native:T) ] ;
+ }
+}
+
+
+rule native-UNIX ( path )
+{
+ return $(path) ;
+}
+
+
+rule make-CYGWIN ( path )
+{
+ return [ make-NT $(path) ] ;
+}
+
+
+rule native-CYGWIN ( path )
+{
+ local result = $(path) ;
+ if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path.
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'.
+ }
+ return [ native-UNIX $(result) ] ;
+}
+
+
+# split-path-VMS: splits input native path into device dir file (each part is
+# optional).
+#
+# example:
+#
+# dev:[dir]file.c => dev: [dir] file.c
+#
+rule split-path-VMS ( native )
+{
+ local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
+ local device = $(matches[1]) ;
+ local dir = $(matches[2]) ;
+ local file = $(matches[3]) ;
+
+ return $(device) $(dir) $(file) ;
+}
+
+
+# Converts a native VMS path into a portable path spec.
+#
+# Does not handle current-device absolute paths such as "[dir]File.c" as it is
+# not clear how to represent them in the portable path notation.
+#
+# Adds a trailing dot (".") to the file part if no extension is present (helps
+# when converting it back into native path).
+#
+rule make-VMS ( native )
+{
+ if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
+ {
+ errors.error "Can't handle default-device absolute paths: " $(native) ;
+ }
+
+ local parts = [ split-path-VMS $(native) ] ;
+ local device = $(parts[1]) ;
+ local dir = $(parts[2]) ;
+ local file = $(parts[3]) ;
+ local elems ;
+
+ if $(device)
+ {
+ #
+ # rooted
+ #
+ elems = /$(device) ;
+ }
+
+ if $(dir) = "[]"
+ {
+ #
+ # Special case: current directory
+ #
+ elems = $(elems) "." ;
+ }
+ else if $(dir)
+ {
+ dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
+ local dir_parts = [ regex.split $(dir) \\. ] ;
+
+ if $(dir_parts[1]) = ""
+ {
+ #
+ # Relative path
+ #
+ dir_parts = $(dir_parts[2--1]) ;
+ }
+
+ #
+ # replace "parent-directory" parts (- => ..)
+ #
+ dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
+
+ elems = $(elems) $(dir_parts) ;
+ }
+
+ if $(file)
+ {
+ if ! [ MATCH (\\.) : $(file) ]
+ {
+ #
+ # Always add "." to end of non-extension file.
+ #
+ file = $(file). ;
+ }
+ elems = $(elems) $(file) ;
+ }
+
+ local portable = [ path.join $(elems) ] ;
+
+ return $(portable) ;
+}
+
+
+# Converts a portable path spec into a native VMS path.
+#
+# Relies on having at least one dot (".") included in the file name to be able
+# to differentiate it from the directory part.
+#
+rule native-VMS ( path )
+{
+ local device = "" ;
+ local dir = $(path) ;
+ local file = "" ;
+ local native ;
+ local split ;
+
+ #
+ # Has device ?
+ #
+ if [ is-rooted $(dir) ]
+ {
+ split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
+ device = $(split[1]) ;
+ dir = $(split[2]) ;
+ }
+
+ #
+ # Has file ?
+ #
+ # This is no exact science, just guess work:
+ #
+ # If the last part of the current path spec
+ # includes some chars, followed by a dot,
+ # optionally followed by more chars -
+ # then it is a file (keep your fingers crossed).
+ #
+ split = [ regex.split $(dir) / ] ;
+ local maybe_file = $(split[-1]) ;
+
+ if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
+ {
+ file = $(maybe_file) ;
+ dir = [ sequence.join $(split[1--2]) : / ] ;
+ }
+
+ #
+ # Has dir spec ?
+ #
+ if $(dir) = "."
+ {
+ dir = "[]" ;
+ }
+ else if $(dir)
+ {
+ dir = [ regex.replace $(dir) \\.\\. - ] ;
+ dir = [ regex.replace $(dir) / . ] ;
+
+ if $(device) = ""
+ {
+ #
+ # Relative directory
+ #
+ dir = "."$(dir) ;
+ }
+ dir = "["$(dir)"]" ;
+ }
+
+ native = [ sequence.join $(device) $(dir) $(file) ] ;
+
+ return $(native) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import errors : try catch ;
+
+ assert.true is-rooted "/" ;
+ assert.true is-rooted "/foo" ;
+ assert.true is-rooted "/foo/bar" ;
+ assert.result : is-rooted "." ;
+ assert.result : is-rooted "foo" ;
+ assert.result : is-rooted "foo/bar" ;
+
+ assert.true has-parent "foo" ;
+ assert.true has-parent "foo/bar" ;
+ assert.true has-parent "." ;
+ assert.result : has-parent "/" ;
+
+ assert.result "." : basename "." ;
+ assert.result ".." : basename ".." ;
+ assert.result "foo" : basename "foo" ;
+ assert.result "foo" : basename "bar/foo" ;
+ assert.result "foo" : basename "gaz/bar/foo" ;
+ assert.result "foo" : basename "/gaz/bar/foo" ;
+
+ assert.result "." : parent "foo" ;
+ assert.result "/" : parent "/foo" ;
+ assert.result "foo/bar" : parent "foo/bar/giz" ;
+ assert.result ".." : parent "." ;
+ assert.result ".." : parent "../foo" ;
+ assert.result "../../foo" : parent "../../foo/bar" ;
+
+ assert.result "." : reverse "." ;
+ assert.result ".." : reverse "foo" ;
+ assert.result "../../.." : reverse "foo/bar/giz" ;
+
+ assert.result "foo" : join "foo" ;
+ assert.result "/foo" : join "/" "foo" ;
+ assert.result "foo/bar" : join "foo" "bar" ;
+ assert.result "foo/bar" : join "foo/giz" "../bar" ;
+ assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
+ assert.result ".." : join "." ".." ;
+ assert.result ".." : join "foo" "../.." ;
+ assert.result "../.." : join "../foo" "../.." ;
+ assert.result "/foo" : join "/bar" "../foo" ;
+ assert.result "foo/giz" : join "foo/giz" "." ;
+ assert.result "." : join lib2 ".." ;
+ assert.result "/" : join "/a" ".." ;
+
+ assert.result /a/b : join /a/b/c .. ;
+
+ assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
+ assert.result "giz" : join "foo" ".." "giz" ;
+ assert.result "foo/giz" : join "foo" "." "giz" ;
+
+ try ;
+ {
+ join "a" "/b" ;
+ }
+ catch only first element may be rooted ;
+
+ local CWD = "/home/ghost/build" ;
+ assert.result : all-parents . : . : $(CWD) ;
+ assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
+ assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
+ assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
+
+ local CWD = "/home/ghost" ;
+ assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
+ assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
+
+ assert.result "c/d" : relative "a/b/c/d" "a/b" ;
+ assert.result "foo" : relative "foo" "." ;
+
+ local save-os = [ modules.peek path : os ] ;
+ modules.poke path : os : NT ;
+
+ assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+ assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
+ assert.result "foo" : make "foo/" ;
+ assert.result "foo" : make "foo\\" ;
+ assert.result "foo" : make "foo/." ;
+ assert.result "foo" : make "foo/bar/.." ;
+ assert.result "foo" : make "foo/bar/../" ;
+ assert.result "foo" : make "foo/bar/..\\" ;
+ assert.result "foo/bar" : make "foo/././././bar" ;
+ assert.result "/foo" : make "\\foo" ;
+ assert.result "/D:/My Documents" : make "D:\\My Documents" ;
+ assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
+
+ # Test processing 'invalid' paths containing multiple successive path
+ # separators.
+ assert.result "foo" : make "foo//" ;
+ assert.result "foo" : make "foo///" ;
+ assert.result "foo" : make "foo\\\\" ;
+ assert.result "foo" : make "foo\\\\\\" ;
+ assert.result "/foo" : make "//foo" ;
+ assert.result "/foo" : make "///foo" ;
+ assert.result "/foo" : make "\\\\foo" ;
+ assert.result "/foo" : make "\\\\\\foo" ;
+ assert.result "/foo" : make "\\/\\/foo" ;
+ assert.result "foo/bar" : make "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ;
+ assert.result "foo" : make "foo/bar//.." ;
+ assert.result "foo/bar" : make "foo/bar/giz//.." ;
+ assert.result "foo/giz" : make "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ;
+ assert.result "../../../foo" : make "..///.//..///.//..////foo///" ;
+
+ # Test processing 'invalid' rooted paths with too many '..' path elements
+ # that would place them before the root.
+ assert.result : make "/.." ;
+ assert.result : make "/../" ;
+ assert.result : make "/../." ;
+ assert.result : make "/.././" ;
+ assert.result : make "/foo/../bar/giz/.././././../../." ;
+ assert.result : make "/foo/../bar/giz/.././././../.././" ;
+ assert.result : make "//foo/../bar/giz/.././././../../." ;
+ assert.result : make "//foo/../bar/giz/.././././../.././" ;
+ assert.result : make "\\\\foo/../bar/giz/.././././../../." ;
+ assert.result : make "\\\\foo/../bar/giz/.././././../.././" ;
+ assert.result : make "/..///.//..///.//..////foo///" ;
+
+ assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
+ assert.result "foo" : native "foo" ;
+ assert.result "\\foo" : native "/foo" ;
+ assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
+
+ modules.poke path : os : UNIX ;
+
+ assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+ assert.result "/sub1" : make "/sub1/." ;
+ assert.result "/sub1" : make "/sub1/sub2/.." ;
+ assert.result "sub1" : make "sub1/." ;
+ assert.result "sub1" : make "sub1/sub2/.." ;
+ assert.result "/foo/bar" : native "/foo/bar" ;
+
+ modules.poke path : os : VMS ;
+
+ #
+ # Don't really need to poke os before these
+ #
+ assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
+ assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
+ assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+ assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+ assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
+ assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
+ assert.result "" "" "file" : split-path-VMS "file" ;
+ assert.result "" "" "" : split-path-VMS "" ;
+
+ #
+ # Special case: current directory
+ #
+ assert.result "" "[]" "" : split-path-VMS "[]" ;
+ assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
+ assert.result "" "[]" "file" : split-path-VMS "[]file" ;
+ assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
+
+ #
+ # Make portable paths
+ #
+ assert.result "/disk:" : make "disk:" ;
+ assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
+ assert.result "foo" : make "[.foo]" ;
+ assert.result "foo" : make "[.foo.bar.-]" ;
+ assert.result ".." : make "[.-]" ;
+ assert.result ".." : make "[-]" ;
+ assert.result "." : make "[]" ;
+ assert.result "giz.h" : make "giz.h" ;
+ assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
+ assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
+ assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
+
+ #
+ # Special case (adds '.' to end of file w/o extension to
+ # disambiguate from directory in portable path spec).
+ #
+ assert.result "Jamfile." : make "Jamfile" ;
+ assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
+ assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
+
+ #
+ # Make native paths
+ #
+ assert.result "disk:" : native "/disk:" ;
+ assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
+ assert.result "[.foo]" : native "foo" ;
+ assert.result "[.-]" : native ".." ;
+ assert.result "[.foo.-]" : native "foo/.." ;
+ assert.result "[]" : native "." ;
+ assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
+ assert.result "giz.h" : native "giz.h" ;
+ assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
+ assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
+
+ modules.poke path : os : $(save-os) ;
+}
diff --git a/jam-files/boost-build/util/print.jam b/jam-files/boost-build/util/print.jam
new file mode 100644
index 000000000..708d21aba
--- /dev/null
+++ b/jam-files/boost-build/util/print.jam
@@ -0,0 +1,488 @@
+# Copyright 2003 Douglas Gregor
+# Copyright 2002, 2003, 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Utilities for generating format independent output. Using these
+# will help in generation of documentation in at minimum plain/console
+# and html.
+
+import modules ;
+import numbers ;
+import string ;
+import regex ;
+import "class" ;
+import scanner ;
+import path ;
+
+# The current output target. Defaults to console.
+output-target = console ;
+
+# The current output type. Defaults to plain. Other possible values are "html".
+output-type = plain ;
+
+# Whitespace.
+.whitespace = [ string.whitespace ] ;
+
+
+# Set the target and type of output to generate. This sets both the destination
+# output and the type of docs to generate to that output. The target can be
+# either a file or "console" for echoing to the console. If the type of output
+# is not specified it defaults to plain text.
+#
+rule output (
+ target # The target file or device; file or "console".
+ type ? # The type of output; "plain" or "html".
+)
+{
+ type ?= plain ;
+ if $(output-target) != $(target)
+ {
+ output-target = $(target) ;
+ output-type = $(type) ;
+ if $(output-type) = html
+ {
+ text
+ "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">"
+ "<html>"
+ "<head>"
+ "</head>"
+ "<body link=\"#0000ff\" vlink=\"#800080\">"
+ : true
+ : prefix ;
+ text
+ "</body>"
+ "</html>"
+ :
+ : suffix ;
+ }
+ }
+}
+
+
+# Generate a section with a description. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule section (
+ name # The name of the section.
+ description * # A number of description lines.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words $(name): ] ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ name = [ escape-html $(name) ] ;
+ text <h3>$(name)</h3> <p> ;
+ }
+ local pre = ;
+ while $(description)
+ {
+ local paragraph = ;
+ while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; }
+ if $(pre)
+ {
+ while $(description) && (
+ $(pre) = " $(description[1])" ||
+ ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] )
+ )
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; }
+ pre = ;
+ if $(output-type) = plain
+ {
+ lines $(paragraph) "" : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ while $(description) && ! [ string.is-whitespace $(description[1]) ]
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ if $(paragraph[1]) = :: && ! $(paragraph[2])
+ {
+ pre = " " ;
+ }
+ if $(paragraph[1]) = ::
+ {
+ if $(output-type) = plain
+ {
+ lines $(paragraph[2-]) "" : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph[2-]) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ;
+ local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ;
+ p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ;
+ if $(p[3]) = ::
+ {
+ pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ;
+ if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) $(p[1]): ; }
+ else { paragraph = $(paragraph[1--2]) $(p[1]) ; }
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ else
+ {
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ }
+ }
+ }
+ if $(output-type) = html
+ {
+ text </p> ;
+ }
+}
+
+
+# Generate the start of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-start ( )
+{
+ if $(output-type) = plain
+ {
+ }
+ else if $(output-type) = html
+ {
+ text <ul> ;
+ }
+}
+
+
+# Generate an item in a list. The type of output can be controlled by the value
+# of the 'output-type' variable.
+#
+rule list-item (
+ item + # The item to list.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words "*" $(item) ] : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <li> [ escape-html $(item) ] </li> ;
+ }
+}
+
+
+# Generate the end of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-end ( )
+{
+ if $(output-type) = plain
+ {
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </ul> ;
+ }
+}
+
+
+# Split the given text into separate lines, word-wrapping to a margin. The
+# default margin is 78 characters.
+#
+rule split-at-words (
+ text + # The text to split.
+ : margin ? # An optional margin, default is 78.
+)
+{
+ local lines = ;
+ text = [ string.words $(text:J=" ") ] ;
+ text = $(text:J=" ") ;
+ margin ?= 78 ;
+ local char-match-1 = ".?" ;
+ local char-match = "" ;
+ while $(margin) != 0
+ {
+ char-match = $(char-match)$(char-match-1) ;
+ margin = [ numbers.decrement $(margin) ] ;
+ }
+ while $(text)
+ {
+ local s = "" ;
+ local t = "" ;
+ # divide s into the first X characters and the rest
+ s = [ MATCH "^($(char-match))(.*)" : $(text) ] ;
+
+ if $(s[2])
+ {
+ # split the first half at a space
+ t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ;
+ }
+ else
+ {
+ t = $(s) ;
+ }
+
+ if ! $(t[2])
+ {
+ t += "" ;
+ }
+
+ text = $(t[2])$(s[2]) ;
+ lines += $(t[1]) ;
+ }
+ return $(lines) ;
+}
+
+
+# Generate a set of fixed lines. Each single item passed in is output on a
+# separate line. For console this just echos each line, but for html this will
+# split them with <br>.
+#
+rule lines (
+ text * # The lines of text.
+ : indent ? # Optional indentation prepended to each line after the first one.
+ outdent ? # Optional indentation to prepend to the first line.
+)
+{
+ text ?= "" ;
+ indent ?= "" ;
+ outdent ?= "" ;
+ if $(output-type) = plain
+ {
+ text $(outdent)$(text[1]) $(indent)$(text[2-]) ;
+ }
+ else if $(output-type) = html
+ {
+ local indent-chars = [ string.chars $(indent) ] ;
+ indent = "" ;
+ for local c in $(indent-chars)
+ {
+ if $(c) = " " { c = "&nbsp;" ; }
+ else if $(c) = " " { c = "&nbsp;&nbsp;&nbsp;&nbsp;" ; }
+ indent = $(indent)$(c) ;
+ }
+ local html-text = [ escape-html $(text) : "&nbsp;" ] ;
+ text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ;
+ }
+}
+
+
+# Output text directly to the current target. When doing output to a file, one
+# can indicate if the text should be output to "prefix" it, as the "body"
+# (default), or "suffix" of the file. This is independant of the actual
+# execution order of the text rule. This rule invokes a singular action, one
+# action only once, which does the build of the file. Therefore actions on the
+# target outside of this rule will happen entirely before and/or after all
+# output using this rule.
+#
+rule text (
+ strings * # The strings of text to output.
+ : overwrite ? # true to overwrite the output (if it is a file)
+ : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for a file).
+)
+{
+ prefix-body-suffix ?= body ;
+ if $(output-target) = console
+ {
+ if ! $(strings)
+ {
+ ECHO ;
+ }
+ else
+ {
+ for local s in $(strings)
+ {
+ ECHO $(s) ;
+ }
+ }
+ }
+ if ! $($(output-target).did-action)
+ {
+ $(output-target).did-action = yes ;
+ $(output-target).text-prefix = ;
+ $(output-target).text-body = ;
+ $(output-target).text-suffix = ;
+
+ nl on $(output-target) = "
+" ;
+ text-redirect on $(output-target) = ">>" ;
+ if $(overwrite)
+ {
+ text-redirect on $(output-target) = ">" ;
+ }
+ text-content on $(output-target) = ;
+
+ text-action $(output-target) ;
+
+ if $(overwrite) && $(output-target) != console
+ {
+ check-for-update $(output-target) ;
+ }
+ }
+ $(output-target).text-$(prefix-body-suffix) += $(strings) ;
+ text-content on $(output-target) =
+ $($(output-target).text-prefix)
+ $($(output-target).text-body)
+ $($(output-target).text-suffix) ;
+}
+
+
+# Outputs the text to the current targets, after word-wrapping it.
+#
+rule wrapped-text ( text + )
+{
+ local lines = [ split-at-words $(text) ] ;
+ text $(lines) ;
+}
+
+
+# Escapes text into html/xml printable equivalents. Does not know about tags and
+# therefore tags fed into this will also be escaped. Currently escapes space,
+# "<", ">", and "&".
+#
+rule escape-html (
+ text + # The text to escape.
+ : space ? # What to replace spaces with, defaults to " ".
+)
+{
+ local html-text = ;
+ while $(text)
+ {
+ local html = $(text[1]) ;
+ text = $(text[2-]) ;
+ html = [ regex.replace $(html) "&" "&amp;" ] ;
+ html = [ regex.replace $(html) "<" "&lt;" ] ;
+ html = [ regex.replace $(html) ">" "&gt;" ] ;
+ if $(space)
+ {
+ html = [ regex.replace $(html) " " "$(space)" ] ;
+ }
+ html-text += $(html) ;
+ }
+ return $(html-text) ;
+}
+
+
+# Outputs the text strings collected by the text rule to the output file.
+#
+actions quietly text-action
+{
+ @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)"
+}
+
+
+rule get-scanner ( )
+{
+ if ! $(.scanner)
+ {
+ .scanner = [ class.new print-scanner ] ;
+ }
+ return $(.scanner) ;
+}
+
+
+# The following code to update print targets when their contents
+# change is a horrible hack. It basically creates a target which
+# binds to this file (print.jam) and installs a scanner on it
+# which reads the target and compares its contents to the new
+# contents that we're writing.
+#
+rule check-for-update ( target )
+{
+ local scanner = [ get-scanner ] ;
+ local file = [ path.native [ modules.binding $(__name__) ] ] ;
+ local g = [ MATCH <(.*)> : $(target:G) ] ;
+ local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ;
+ DEPENDS $(target) : $(dependency-target) ;
+ SEARCH on $(dependency-target) = $(file:D) ;
+ ISFILE $(dependency-target) ;
+ NOUPDATE $(dependency-target) ;
+ base on $(dependency-target) = $(target) ;
+ scanner.install $(scanner) : $(dependency-target) none ;
+ return $(dependency-target) ;
+}
+
+
+class print-scanner : scanner
+{
+ import path ;
+ import os ;
+
+ rule pattern ( )
+ {
+ return "(One match...)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local base = [ on $(target) return $(base) ] ;
+ local nl = [ on $(base) return $(nl) ] ;
+ local text-content = [ on $(base) return $(text-content) ] ;
+ local dir = [ on $(base) return $(LOCATE) ] ;
+ if $(dir)
+ {
+ dir = [ path.make $(dir) ] ;
+ }
+ local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ;
+ local actual-content ;
+ if [ os.name ] = NT
+ {
+ actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ;
+ }
+ else
+ {
+ actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ;
+ }
+ if $(text-content:J=$(nl)) != $(actual-content)
+ {
+ ALWAYS $(base) ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result one two three : split-at-words one two three : 5 ;
+ assert.result "one two" three : split-at-words one two three : 8 ;
+ assert.result "one two" three : split-at-words one two three : 9 ;
+ assert.result "one two three" : split-at-words one two three ;
+
+ # VP, 2004-12-03 The following test fails for some reason, so commenting it
+ # out.
+ #assert.result "one&nbsp;two&nbsp;three" "&amp;&lt;&gt;" :
+ # escape-html "one two three" "&<>" ;
+}
diff --git a/jam-files/boost-build/util/regex.jam b/jam-files/boost-build/util/regex.jam
new file mode 100644
index 000000000..234c36f62
--- /dev/null
+++ b/jam-files/boost-build/util/regex.jam
@@ -0,0 +1,193 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Returns a list of the following substrings:
+# 1) from beginning till the first occurrence of 'separator' or till the end,
+# 2) between each occurrence of 'separator' and the next occurrence,
+# 3) from the last occurrence of 'separator' till the end.
+# If no separator is present, the result will contain only one element.
+#
+
+rule split ( string separator )
+{
+ local result ;
+ local s = $(string) ;
+
+ # Break pieaces off 's' until it has no separators left.
+ local match = 1 ;
+ while $(match)
+ {
+ match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
+ if $(match)
+ {
+ match += "" ; # in case 3rd item was empty - works around MATCH bug
+ result = $(match[3]) $(result) ;
+ s = $(match[1]) ;
+ }
+ }
+ # Combine the remaining part at the beginning, which does not have
+ # separators, with the pieces broken off. Note that the rule's signature
+ # does not allow the initial s to be empty.
+ return $(s) $(result) ;
+}
+
+
+# Returns the concatenated results of Applying regex.split to every element of
+# the list using the separator pattern.
+#
+rule split-list ( list * : separator )
+{
+ local result ;
+ for s in $(list)
+ {
+ result += [ split $(s) $(separator) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Match string against pattern, and return the elements indicated by indices.
+#
+rule match ( pattern : string : indices * )
+{
+ indices ?= 1 2 3 4 5 6 7 8 9 ;
+ local x = [ MATCH $(pattern) : $(string) ] ;
+ return $(x[$(indices)]) ;
+}
+
+
+# Matches all elements of 'list' agains the 'pattern' and returns a list of
+# elements indicated by indices of all successful matches. If 'indices' is
+# omitted returns a list of first paranthethised groups of all successful
+# matches.
+#
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE regex : transform ;
+
+
+# Escapes all of the characters in symbols using the escape symbol escape-symbol
+# for the given string, and returns the escaped string.
+#
+rule escape ( string : symbols : escape-symbol )
+{
+ local result = "" ;
+ local m = 1 ;
+ while $(m)
+ {
+ m = [ MATCH ^([^$(symbols)]*)([$(symbols)])(.*) : $(string) ] ;
+ if $(m)
+ {
+ m += "" ; # Supposedly a bug fix; borrowed from regex.split
+ result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ;
+ string = $(m[3]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(result)$(string)" ;
+ return $(result) ;
+}
+
+
+# Replaces occurrences of a match string in a given string and returns the new
+# string. The match string can be a regex expression.
+#
+rule replace (
+ string # The string to modify.
+ match # The characters to replace.
+ replacement # The string to replace with.
+ )
+{
+ local result = "" ;
+ local parts = 1 ;
+ while $(parts)
+ {
+ parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
+ if $(parts)
+ {
+ parts += "" ;
+ result = "$(replacement)$(parts[3])$(result)" ;
+ string = $(parts[1]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(string)$(result)" ;
+ return $(result) ;
+}
+
+
+# Replaces occurrences of a match string in a given list of strings and returns
+# a list of new strings. The match string can be a regex expression.
+#
+# list - the list of strings to modify.
+# match - the search expression.
+# replacement - the string to replace with.
+#
+rule replace-list ( list * : match : replacement )
+{
+ local result ;
+ for local e in $(list)
+ {
+ result += [ replace $(e) $(match) $(replacement) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result a b c : split "a/b/c" / ;
+ assert.result "" a b c : split "/a/b/c" / ;
+ assert.result "" "" a b c : split "//a/b/c" / ;
+ assert.result "" a "" b c : split "/a//b/c" / ;
+ assert.result "" a "" b c "" : split "/a//b/c/" / ;
+ assert.result "" a "" b c "" "" : split "/a//b/c//" / ;
+
+ assert.result a c b d
+ : match (.)(.)(.)(.) : abcd : 1 3 2 4 ;
+
+ assert.result a b c d
+ : match (.)(.)(.)(.) : abcd ;
+
+ assert.result ababab cddc
+ : match ((ab)*)([cd]+) : abababcddc : 1 3 ;
+
+ assert.result a.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <(.*)> ;
+
+ assert.result a.h b.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <([^>]*)>|\"([^\"]*)\" : 1 2 ;
+
+ assert.result "^<?xml version=\"1.0\"^>"
+ : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ;
+
+ assert.result "<?xml version=\\\"1.0\\\">"
+ : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ;
+
+ assert.result "string&nbsp;string&nbsp;" : replace "string string " " " "&nbsp;" ;
+ assert.result "&nbsp;string&nbsp;string" : replace " string string" " " "&nbsp;" ;
+ assert.result "string&nbsp;&nbsp;string" : replace "string string" " " "&nbsp;" ;
+ assert.result "-" : replace "&" "&" "-" ;
+
+ assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ;
+}
diff --git a/jam-files/boost-build/util/sequence.jam b/jam-files/boost-build/util/sequence.jam
new file mode 100644
index 000000000..73919a65d
--- /dev/null
+++ b/jam-files/boost-build/util/sequence.jam
@@ -0,0 +1,335 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import assert ;
+import numbers ;
+import modules ;
+
+
+# Note that algorithms in this module execute largely in the caller's module
+# namespace, so that local rules can be used as function objects. Also note that
+# most predicates can be multi-element lists. In that case, all but the first
+# element are prepended to the first argument which is passed to the rule named
+# by the first element.
+
+
+# Return the elements e of $(sequence) for which [ $(predicate) e ] has a
+# non-null value.
+#
+rule filter ( predicate + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ if [ modules.call-in $(caller) : $(predicate) $(e) ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return a new sequence consisting of [ $(function) $(e) ] for each element e of
+# $(sequence).
+#
+rule transform ( function + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ result += [ modules.call-in $(caller) : $(function) $(e) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule reverse ( s * )
+{
+ local r ;
+ for local x in $(s)
+ {
+ r = $(x) $(r) ;
+ }
+ return $(r) ;
+}
+
+
+rule less ( a b )
+{
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+}
+
+
+# Insertion-sort s using the BinaryPredicate ordered.
+#
+rule insertion-sort ( s * : ordered * )
+{
+ if ! $(ordered)
+ {
+ return [ SORT $(s) ] ;
+ }
+ else
+ {
+ local caller = [ CALLER_MODULE ] ;
+ ordered ?= sequence.less ;
+ local result = $(s[1]) ;
+ if $(ordered) = sequence.less
+ {
+ local head tail ;
+ for local x in $(s[2-])
+ {
+ head = ;
+ tail = $(result) ;
+ while $(tail) && ( $(tail[1]) < $(x) )
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+ else
+ {
+ for local x in $(s[2-])
+ {
+ local head tail ;
+ tail = $(result) ;
+ while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ]
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+
+ return $(result) ;
+ }
+}
+
+
+# Merge two ordered sequences using the BinaryPredicate ordered.
+#
+rule merge ( s1 * : s2 * : ordered * )
+{
+ ordered ?= sequence.less ;
+ local result__ ;
+ local caller = [ CALLER_MODULE ] ;
+
+ while $(s1) && $(s2)
+ {
+ if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ]
+ {
+ result__ += $(s1[1]) ;
+ s1 = $(s1[2-]) ;
+ }
+ else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ]
+ {
+ result__ += $(s2[1]) ;
+ s2 = $(s2[2-]) ;
+ }
+ else
+ {
+ s2 = $(s2[2-]) ;
+ }
+
+ }
+ result__ += $(s1) ;
+ result__ += $(s2) ;
+
+ return $(result__) ;
+}
+
+
+# Join the elements of s into one long string. If joint is supplied, it is used
+# as a separator.
+#
+rule join ( s * : joint ? )
+{
+ joint ?= "" ;
+ return $(s:J=$(joint)) ;
+}
+
+
+# Find the length of any sequence.
+#
+rule length ( s * )
+{
+ local result = 0 ;
+ for local i in $(s)
+ {
+ result = [ CALC $(result) + 1 ] ;
+ }
+ return $(result) ;
+}
+
+
+rule unique ( list * : stable ? )
+{
+ local result ;
+ local prev ;
+ if $(stable)
+ {
+ for local f in $(list)
+ {
+ if ! $(f) in $(result)
+ {
+ result += $(f) ;
+ }
+ }
+ }
+ else
+ {
+ for local i in [ SORT $(list) ]
+ {
+ if $(i) != $(prev)
+ {
+ result += $(i) ;
+ }
+ prev = $(i) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or
+# 'numbers.less' if none is provided.
+#
+rule max-element ( elements + : ordered ? )
+{
+ ordered ?= numbers.less ;
+
+ local max = $(elements[1]) ;
+ for local e in $(elements[2-])
+ {
+ if [ $(ordered) $(max) $(e) ]
+ {
+ max = $(e) ;
+ }
+ }
+ return $(max) ;
+}
+
+
+# Returns all of 'elements' for which corresponding element in parallel list
+# 'rank' is equal to the maximum value in 'rank'.
+#
+rule select-highest-ranked ( elements * : ranks * )
+{
+ if $(elements)
+ {
+ local max-rank = [ max-element $(ranks) ] ;
+ local result ;
+ while $(elements)
+ {
+ if $(ranks[1]) = $(max-rank)
+ {
+ result += $(elements[1]) ;
+ }
+ elements = $(elements[2-]) ;
+ ranks = $(ranks[2-]) ;
+ }
+ return $(result) ;
+ }
+}
+NATIVE_RULE sequence : select-highest-ranked ;
+
+
+rule __test__ ( )
+{
+ # Use a unique module so we can test the use of local rules.
+ module sequence.__test__
+ {
+ import assert ;
+ import sequence ;
+
+ local rule is-even ( n )
+ {
+ if $(n) in 0 2 4 6 8
+ {
+ return true ;
+ }
+ }
+
+ assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ;
+
+ # Test that argument binding works.
+ local rule is-equal-test ( x y )
+ {
+ if $(x) = $(y)
+ {
+ return true ;
+ }
+ }
+
+ assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ;
+
+ local rule append-x ( n )
+ {
+ return $(n)x ;
+ }
+
+ assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ;
+
+ local rule repeat2 ( x )
+ {
+ return $(x) $(x) ;
+ }
+
+ assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ;
+
+ local rule test-greater ( a b )
+ {
+ if $(a) > $(b)
+ {
+ return true ;
+ }
+ }
+ assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ;
+ assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ;
+ assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ;
+ assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ;
+ assert.result 1 2 3 : sequence.merge 1 2 3 : ;
+ assert.result 1 : sequence.merge 1 : 1 ;
+
+ assert.result foo-bar-baz : sequence.join foo bar baz : - ;
+ assert.result substandard : sequence.join sub stan dard ;
+ assert.result 3.0.1 : sequence.join 3.0.1 : - ;
+
+ assert.result 0 : sequence.length ;
+ assert.result 3 : sequence.length a b c ;
+ assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ;
+
+ assert.result 1 : sequence.length a ;
+ assert.result 10 : sequence.length a b c d e f g h i j ;
+ assert.result 11 : sequence.length a b c d e f g h i j k ;
+ assert.result 12 : sequence.length a b c d e f g h i j k l ;
+
+ local p2 = x ;
+ for local i in 1 2 3 4 5 6 7 8
+ {
+ p2 = $(p2) $(p2) ;
+ }
+ assert.result 256 : sequence.length $(p2) ;
+
+ assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ;
+
+ assert.result 5 : sequence.max-element 1 3 5 0 4 ;
+
+ assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ;
+
+ assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ;
+ }
+}
diff --git a/jam-files/boost-build/util/set.jam b/jam-files/boost-build/util/set.jam
new file mode 100644
index 000000000..fc179134f
--- /dev/null
+++ b/jam-files/boost-build/util/set.jam
@@ -0,0 +1,93 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+class set
+{
+ rule __init__ ( )
+ {
+ }
+
+ rule add ( elements * )
+ {
+ for local e in $(elements)
+ {
+ if ! $($(e))
+ {
+ $(e) = 1 ;
+ self.result += $(e) ;
+ }
+ }
+ }
+
+ rule contains ( element )
+ {
+ return $($(element)) ;
+ }
+
+ rule list ( )
+ {
+ return $(self.result) ;
+ }
+}
+
+
+
+# Returns the elements of set1 that are not in set2.
+#
+rule difference ( set1 * : set2 * )
+{
+ local result = ;
+ for local element in $(set1)
+ {
+ if ! ( $(element) in $(set2) )
+ {
+ result += $(element) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE set : difference ;
+
+
+# Removes all the items appearing in both set1 & set2.
+#
+rule intersection ( set1 * : set2 * )
+{
+ local result ;
+ for local v in $(set1)
+ {
+ if $(v) in $(set2)
+ {
+ result += $(v) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns whether set1 & set2 contain the same elements. Note that this ignores
+# any element ordering differences as well as any element duplication.
+#
+rule equal ( set1 * : set2 * )
+{
+ if $(set1) in $(set2) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 0 1 4 6 8 9 : difference 0 1 2 3 4 5 6 7 8 9 : 2 3 5 7 ;
+ assert.result 2 5 7 : intersection 0 1 2 4 5 6 7 8 9 : 2 3 5 7 ;
+
+ assert.true equal : ;
+ assert.true equal 1 1 2 3 : 3 2 2 1 ;
+ assert.false equal 2 3 : 3 2 2 1 ;
+}
diff --git a/jam-files/boost-build/util/string.jam b/jam-files/boost-build/util/string.jam
new file mode 100644
index 000000000..a39ed119e
--- /dev/null
+++ b/jam-files/boost-build/util/string.jam
@@ -0,0 +1,189 @@
+# Copyright 2002 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import regex ;
+
+
+# Characters considered whitespace, as a list.
+.whitespace-chars = " " " " "
+" ;
+
+# Characters considered whitespace, as a single string.
+.whitespace = $(.whitespace-chars:J="") ;
+
+
+# Returns the canonical set of whitespace characters, as a list.
+#
+rule whitespace-chars ( )
+{
+ return $(.whitespace-chars) ;
+}
+
+
+# Returns the canonical set of whitespace characters, as a single string.
+#
+rule whitespace ( )
+{
+ return $(.whitespace) ;
+}
+
+
+# Splits the given string into a list of strings composed of each character of
+# the string in sequence.
+#
+rule chars (
+ string # The string to split.
+ )
+{
+ local result ;
+ while $(string)
+ {
+ local s = [ MATCH (.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.*) : $(string) ] ;
+ string = $(s[9]) ;
+ result += $(s[1-8]) ;
+ }
+
+ # Trim off empty strings.
+ while $(result[1]) && ! $(result[-1])
+ {
+ result = $(result[1--2]) ;
+ }
+
+ return $(result) ;
+}
+
+
+# Apply a set of standard transformations to string to produce an abbreviation
+# no more than 5 characters long.
+#
+rule abbreviate ( string )
+{
+ local r = $(.abbreviated-$(string)) ;
+ if $(r)
+ {
+ return $(r) ;
+ }
+ # Anything less than 4 characters gets no abbreviation.
+ else if ! [ MATCH (....) : $(string) ]
+ {
+ .abbreviated-$(string) = $(string) ;
+ return $(string) ;
+ }
+ else
+ {
+ # Separate the initial letter in case it's a vowel.
+ local s1 = [ MATCH ^(.)(.*) : $(string) ] ;
+
+ # Drop trailing "ing".
+ local s2 = [ MATCH ^(.*)ing$ : $(s1[2]) ] ;
+ s2 ?= $(s1[2]) ;
+
+ # Reduce all doubled characters to one.
+ local last = "" ;
+ for local c in [ chars $(s2) ]
+ {
+ if $(c) != $(last)
+ {
+ r += $(c) ;
+ last = $(c) ;
+ }
+ }
+ s2 = $(r:J="") ;
+
+ # Chop all vowels out of the remainder.
+ s2 = [ regex.replace $(s2) [AEIOUaeiou] "" ] ;
+
+ # Shorten remaining consonants to 4 characters.
+ s2 = [ MATCH ^(.?.?.?.?) : $(s2) ] ;
+
+ # Glue the initial character back on to the front.
+ s2 = $(s1[1])$(s2) ;
+
+ .abbreviated-$(string) = $(s2) ;
+ return $(s2) ;
+ }
+}
+
+
+# Concatenates the given strings, inserting the given separator between each
+# string.
+#
+rule join (
+ strings * # The strings to join.
+ : separator ? # The optional separator.
+ )
+{
+ separator ?= "" ;
+ return $(strings:J=$(separator)) ;
+}
+
+
+# Split a string into whitespace separated words.
+#
+rule words (
+ string # The string to split.
+ : whitespace * # Optional, characters to consider as whitespace.
+ )
+{
+ whitespace = $(whitespace:J="") ;
+ whitespace ?= $(.whitespace) ;
+ local w = ;
+ while $(string)
+ {
+ string = [ MATCH "^[$(whitespace)]*([^$(whitespace)]*)(.*)" : $(string) ] ;
+ if $(string[1]) && $(string[1]) != ""
+ {
+ w += $(string[1]) ;
+ }
+ string = $(string[2]) ;
+ }
+ return $(w) ;
+}
+
+
+# Check that the given string is composed entirely of whitespace.
+#
+rule is-whitespace (
+ string ? # The string to test.
+ )
+{
+ if ! $(string) { return true ; }
+ else if $(string) = "" { return true ; }
+ else if [ MATCH "^([$(.whitespace)]+)$" : $(string) ] { return true ; }
+ else { return ; }
+}
+
+rule __test__ ( )
+{
+ import assert ;
+ assert.result a b c : chars abc ;
+
+ assert.result rntm : abbreviate runtime ;
+ assert.result ovrld : abbreviate overload ;
+ assert.result dbg : abbreviate debugging ;
+ assert.result async : abbreviate asynchronous ;
+ assert.result pop : abbreviate pop ;
+ assert.result aaa : abbreviate aaa ;
+ assert.result qck : abbreviate quack ;
+ assert.result sttc : abbreviate static ;
+
+ # Check boundary cases.
+ assert.result a : chars a ;
+ assert.result : chars "" ;
+ assert.result a b c d e f g h : chars abcdefgh ;
+ assert.result a b c d e f g h i : chars abcdefghi ;
+ assert.result a b c d e f g h i j : chars abcdefghij ;
+ assert.result a b c d e f g h i j k : chars abcdefghijk ;
+
+ assert.result a//b/c/d : join a "" b c d : / ;
+ assert.result abcd : join a "" b c d ;
+
+ assert.result a b c : words "a b c" ;
+
+ assert.true is-whitespace " " ;
+ assert.false is-whitespace " a b c " ;
+ assert.true is-whitespace "" ;
+ assert.true is-whitespace ;
+}
diff --git a/jam-files/boost-build/util/utility.jam b/jam-files/boost-build/util/utility.jam
new file mode 100644
index 000000000..c46747f58
--- /dev/null
+++ b/jam-files/boost-build/util/utility.jam
@@ -0,0 +1,235 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : is-instance ;
+import errors ;
+
+
+# For all elements of 'list' which do not already have 'suffix', add 'suffix'.
+#
+rule apply-default-suffix ( suffix : list * )
+{
+ local result ;
+ for local i in $(list)
+ {
+ if $(i:S) = $(suffix)
+ {
+ result += $(i) ;
+ }
+ else
+ {
+ result += $(i)$(suffix) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'name' contains a dot, returns the part before the last dot. If 'name'
+# contains no dot, returns it unmodified.
+#
+rule basename ( name )
+{
+ if $(name:S)
+ {
+ name = $(name:B) ;
+ }
+ return $(name) ;
+}
+
+
+# Return the file of the caller of the rule that called caller-file.
+#
+rule caller-file ( )
+{
+ local bt = [ BACKTRACE ] ;
+ return $(bt[9]) ;
+}
+
+
+# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule equal ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).equal $(b) ] ;
+ }
+ else
+ {
+ if $(a) = $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule less ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).less $(b) ] ;
+ }
+ else
+ {
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Returns the textual representation of argument. If it is a class instance,
+# class its 'str' method. Otherwise, returns the argument.
+#
+rule str ( value )
+{
+ if [ is-instance $(value) ]
+ {
+ return [ $(value).str ] ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+# Accepts a list of gristed values and returns them ungristed. Reports an error
+# in case any of the passed parameters is not gristed, i.e. surrounded in angle
+# brackets < and >.
+#
+rule ungrist ( names * )
+{
+ local result ;
+ for local name in $(names)
+ {
+ local stripped = [ MATCH ^<(.*)>$ : $(name) ] ;
+ if ! $(stripped)
+ {
+ errors.error "in ungrist $(names) : $(name) is not of the form <.*>" ;
+ }
+ result += $(stripped) ;
+ }
+ return $(result) ;
+}
+
+
+# If the passed value is quoted, unquotes it. Otherwise returns the value
+# unchanged.
+#
+rule unquote ( value ? )
+{
+ local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ;
+ if $(match-result)
+ {
+ return $(match-result[2]) ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+
+ assert.result 123 : str 123 ;
+
+ class test-class__
+ {
+ rule __init__ ( ) { }
+ rule str ( ) { return "str-test-class" ; }
+ rule less ( a ) { return "yes, of course!" ; }
+ rule equal ( a ) { return "not sure" ; }
+ }
+
+ assert.result "str-test-class" : str [ new test-class__ ] ;
+ assert.true less 1 2 ;
+ assert.false less 2 1 ;
+ assert.result "yes, of course!" : less [ new test-class__ ] 1 ;
+ assert.true equal 1 1 ;
+ assert.false equal 1 2 ;
+ assert.result "not sure" : equal [ new test-class__ ] 1 ;
+
+ assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib
+ ;
+
+ assert.result foo : basename foo ;
+ assert.result foo : basename foo.so ;
+ assert.result foo.so : basename foo.so.1 ;
+
+ assert.result : unquote ;
+ assert.result "" : unquote "" ;
+ assert.result foo : unquote foo ;
+ assert.result \"foo : unquote \"foo ;
+ assert.result foo\" : unquote foo\" ;
+ assert.result foo : unquote \"foo\" ;
+ assert.result \"foo\" : unquote \"\"foo\"\" ;
+
+ assert.result : ungrist ;
+ assert.result foo : ungrist <foo> ;
+ assert.result <foo> : ungrist <<foo>> ;
+ assert.result foo bar : ungrist <foo> <bar> ;
+
+ try ;
+ {
+ ungrist "" ;
+ }
+ catch "in ungrist : is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist <> ;
+ }
+ catch "in ungrist <> : <> is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo ;
+ }
+ catch "in ungrist foo : foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist <foo ;
+ }
+ catch "in ungrist <foo : <foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo> ;
+ }
+ catch "in ungrist foo> : foo> is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo bar ;
+ }
+ catch "in ungrist foo : foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist foo <bar> ;
+ }
+ catch "in ungrist foo : foo is not of the form <.*>" ;
+
+ try ;
+ {
+ ungrist <foo> bar ;
+ }
+ catch "in ungrist bar : bar is not of the form <.*>" ;
+}